commit
stringlengths 40
40
| old_file
stringlengths 4
264
| new_file
stringlengths 4
264
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
624
| message
stringlengths 15
4.7k
| lang
stringclasses 3
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
---|---|---|---|---|---|---|---|---|---|
813ebbce962e7d59602a1f1693359b9d61bbef07
|
byceps/services/email/service.py
|
byceps/services/email/service.py
|
"""
byceps.services.email.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2017 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import List, Optional
from ... import email
from ...typing import BrandID
from .models import EmailConfig
def find_sender_address_for_brand(brand_id: BrandID) -> Optional[str]:
"""Return the configured sender e-mail address for the brand."""
config = EmailConfig.query.get(brand_id)
return config.sender_address
def send_email(recipients: List[str], subject: str, body: str, *,
sender: Optional[str]=None) -> None:
"""Send an e-mail."""
email.send(recipients, subject, body, sender=sender)
|
"""
byceps.services.email.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2017 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import List, Optional
from ... import email
from ...typing import BrandID
from .models import EmailConfig
def find_sender_address_for_brand(brand_id: BrandID) -> Optional[str]:
"""Return the configured sender e-mail address for the brand."""
config = EmailConfig.query.get(brand_id)
if config is None:
return None
return config.sender_address
def send_email(recipients: List[str], subject: str, body: str, *,
sender: Optional[str]=None) -> None:
"""Send an e-mail."""
email.send(recipients, subject, body, sender=sender)
|
Return `None` if no email configuration exists for brand instead of raising an exception
|
Return `None` if no email configuration exists for brand instead of raising an exception
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps
|
7822598afa31f0f556766c53ecff27cb8aca76d6
|
chipy_org/apps/meetings/utils.py
|
chipy_org/apps/meetings/utils.py
|
from django.core.exceptions import ObjectDoesNotExist
import requests
from apps.meetings.models import Meeting, RSVP
def meetup_meeting_sync(api_key, meetup_event_id):
url = "http://api.meetup.com/2/rsvps"
params = dict(key=api_key, event_id=meetup_event_id)
api_response = requests.get(url, params=params)
chipy_meeting_instance = Meeting.objects.get(meetup_id=meetup_event_id)
response = api_response.json()
results = response['results']
for result in results:
meetup_user_id = result['member']['member_id']
try:
rsvp = RSVP.objects.get(meetup_user_id=meetup_user_id, meeting=chipy_meeting_instance)
except ObjectDoesNotExist:
rsvp = RSVP(meetup_user_id=meetup_user_id, meeting=chipy_meeting_instance)
rsvp.response = 'Y' if result['response'] == 'yes' else 'N'
rsvp.name = result['member']['name']
rsvp.save()
|
from django.core.exceptions import ObjectDoesNotExist
import requests
from apps.meetings.models import Meeting, RSVP
def get_rsvp(meeting, meetup_member):
"""
Handles getting the rsvp instance to update from Meetup.
Will return a new instance if needed.
If there is a name collision, it will update the current RSVP with the Meetup Info. This isn't perfect by any
stretch, but for our uses it should be good enough.
"""
meetup_user_id = meetup_member['member_id']
name_collisions = RSVP.objects.filter(name=meetup_member['name'])
if name_collisions:
rsvp = name_collisions[0]
rsvp.meetup_user_id=meetup_user_id
else:
try:
rsvp = RSVP.objects.get(meetup_user_id=meetup_user_id, meeting=meeting)
except ObjectDoesNotExist:
rsvp = RSVP(meetup_user_id=meetup_user_id, meeting=meeting)
return rsvp
def meetup_meeting_sync(api_key, meetup_event_id):
url = "http://api.meetup.com/2/rsvps"
params = dict(key=api_key, event_id=meetup_event_id)
api_response = requests.get(url, params=params)
meeting = Meeting.objects.get(meetup_id=meetup_event_id)
response = api_response.json()
results = response['results']
for result in results:
rsvp = get_rsvp(meeting, result['member'])
rsvp.response = 'Y' if result['response'] == 'yes' else 'N'
rsvp.name = result['member']['name']
rsvp.save()
|
Handle name collisions from meetup by attaching the meetup id to the user. While not perfect, should be good enough.
|
Handle name collisions from meetup by attaching the meetup id to the user.
While not perfect, should be good enough.
|
Python
|
mit
|
brianray/chipy.org,brianray/chipy.org,bharathelangovan/chipy.org,tanyaschlusser/chipy.org,chicagopython/chipy.org,agfor/chipy.org,brianray/chipy.org,tanyaschlusser/chipy.org,chicagopython/chipy.org,agfor/chipy.org,chicagopython/chipy.org,bharathelangovan/chipy.org,chicagopython/chipy.org,agfor/chipy.org,tanyaschlusser/chipy.org,bharathelangovan/chipy.org
|
fb719c54a41d90e07b62c82d1995f9c3149b68ec
|
adhocracy4/projects/views.py
|
adhocracy4/projects/views.py
|
from django.views import generic
from rules.contrib import views as rules_views
from . import mixins, models
class ProjectDetailView(rules_views.PermissionRequiredMixin,
mixins.PhaseDispatchMixin,
generic.DetailView):
model = models.Project
permission_required = 'a4projects.view_project'
@property
def raise_exception(self):
return self.request.user.is_authenticated()
@property
def project(self):
"""
Emulate ProjectMixin interface for template sharing.
"""
return self.get_object()
|
from django.views import generic
from rules.contrib import views as rules_views
from . import mixins, models
class ProjectDetailView(rules_views.PermissionRequiredMixin,
mixins.PhaseDispatchMixin,
generic.DetailView):
model = models.Project
permission_required = 'a4projects.view_project'
@property
def raise_exception(self):
return self.request.user.is_authenticated()
|
Remove project property which is already set by the PhaseDispatchMixin
|
Remove project property which is already set by the PhaseDispatchMixin
|
Python
|
agpl-3.0
|
liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4
|
a910295b2cbffdcb78c99218a48cb0120ecc3085
|
ynr/apps/ynr_refactoring/settings.py
|
ynr/apps/ynr_refactoring/settings.py
|
from .helpers.popolo_fields import simple_fields
SIMPLE_POPOLO_FIELDS = simple_fields
|
from enum import Enum, unique
from .helpers.popolo_fields import simple_fields
SIMPLE_POPOLO_FIELDS = simple_fields
@unique
class PersonIdentifierFields(Enum):
email = "Email"
facebook_page_url = "Facebook Page"
facebook_personal_url = "Facebook Personal"
homepage_url = "Homepage"
linkedin_url = "Linkedin"
party_ppc_page_url = "Party PPC Page"
twitter_username = "Twitter"
wikipedia_url = "Wikipedia"
# party_candidate_page = "Party Candidate Page"
# other = "Other"
|
Add Enum for PersonIdentifier Fields
|
Add Enum for PersonIdentifier Fields
This is similar to the work to remove SimplePopoloFields from the
database layer. The values here are all the values we want in the
initial version of the "n-links" on the person page model.
One day we want to enable any type of link, specified by the user.
That's too big a change to do all at once, so step one is to remove
ComplexPopoloFields in the database but still use a fixed list of
fields.
|
Python
|
agpl-3.0
|
DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative
|
c8785033d313f0b6b78eff4bb01c7fa2df330f0d
|
djpjax/middleware.py
|
djpjax/middleware.py
|
import re
from djpjax.utils import strip_pjax_parameter
from django.conf import settings
import djpjax
class DjangoPJAXMiddleware(object):
def __init__(self):
djpjax_setting = getattr(settings, 'DJPJAX_DECORATE_URLS', [])
self.decorated_urls = tuple(
(re.compile(url_regex), getattr(djpjax, decorator)(**kwargs))
for url_regex, (decorator, kwargs) in reversed(djpjax_setting))
def process_request(self, request):
strip_pjax_parameter(request)
def process_template_response(self, request, response):
for url_regex, decorator in self.decorated_urls:
if url_regex.match(request.path):
fake_view = lambda _: response
response = decorator(fake_view)(request)
return response
|
import re
from djpjax.utils import strip_pjax_parameter
from django.conf import settings
import djpjax
class DjangoPJAXMiddleware(object):
def __init__(self):
djpjax_setting = getattr(settings, 'DJPJAX_DECORATED_URLS', [])
self.decorated_urls = tuple(
(re.compile(url_regex), getattr(djpjax, decorator)(**kwargs))
for url_regex, (decorator, kwargs) in reversed(djpjax_setting))
def process_request(self, request):
strip_pjax_parameter(request)
def process_template_response(self, request, response):
for url_regex, decorator in self.decorated_urls:
if url_regex.match(request.path):
fake_view = lambda _: response
response = decorator(fake_view)(request)
return response
|
Change setting name to DJPJAX_DECORATED_URLS.
|
Change setting name to DJPJAX_DECORATED_URLS.
|
Python
|
bsd-3-clause
|
AlexHill/djpj,AlexHill/djpj
|
0b3499beebeb789323d293513fdfc98565f6e02a
|
examples/IPLoM_example.py
|
examples/IPLoM_example.py
|
# for local run, before pygraphc packaging
import sys
sys.path.insert(0, '../pygraphc/misc')
from IPLoM import *
RawLogPath = '/home/hudan/Git/labeled-authlog/dataset/161.166.232.17/'
RawLogFile = 'auth.log.anon'
OutputPath = './results'
para = Para(path=RawLogPath, logname=RawLogFile, save_path=OutputPath)
myparser = IPLoM(para)
time = myparser.main_process()
print ('The running time of IPLoM is', time)
|
# for local run, before pygraphc packaging
import sys
sys.path.insert(0, '../pygraphc/misc')
from IPLoM import *
sys.path.insert(0, '../pygraphc/clustering')
from ClusterUtility import *
from ClusterEvaluation import *
ip_address = '161.166.232.17'
standard_path = '/home/hudan/Git/labeled-authlog/dataset/' + ip_address + '/'
standard_file = standard_path + 'auth.log.anon.labeled'
analyzed_file = 'auth.log.anon'
prediction_file = 'iplom-result-' + ip_address + '.txt'
OutputPath = './results'
para = Para(path=standard_path, logname=analyzed_file, save_path=OutputPath)
myparser = IPLoM(para)
time = myparser.main_process()
clusters = myparser.get_clusters()
original_logs = myparser.get_logs()
ClusterUtility.set_cluster_label_id(None, clusters, original_logs, prediction_file)
homogeneity_completeness_vmeasure = ClusterEvaluation.get_homogeneity_completeness_vmeasure(standard_file,
prediction_file)
print homogeneity_completeness_vmeasure
print ('The running time of IPLoM is', time)
|
Add evaluation metrics to example
|
Add evaluation metrics to example
|
Python
|
mit
|
studiawan/pygraphc
|
f3f210b523f1733e48bb6316ecbb15e198dd503c
|
examples/field_example.py
|
examples/field_example.py
|
import graphene
class Person(graphene.Interface):
name = graphene.String()
age = graphene.ID()
class Patron(Person):
id = graphene.ID()
class Query(graphene.ObjectType):
patron = graphene.Field(Patron)
def resolve_patron(self, args, info):
return Patron(id=1, name='Demo')
schema = graphene.Schema(query=Query)
query = '''
query something{
patron {
id
name
}
}
'''
result = schema.execute(query)
# Print the result
print(result.data['patron'])
|
import graphene
class Patron(graphene.ObjectType):
id = graphene.ID()
name = graphene.String()
age = graphene.ID()
class Query(graphene.ObjectType):
patron = graphene.Field(Patron)
def resolve_patron(self, args, info):
return Patron(id=1, name='Demo')
schema = graphene.Schema(query=Query)
query = '''
query something{
patron {
id
name
}
}
'''
result = schema.execute(query)
print(result.data['patron'])
|
Modify the field example to remove the use of interface
|
Modify the field example to remove the use of interface
|
Python
|
mit
|
sjhewitt/graphene,Globegitter/graphene,Globegitter/graphene,graphql-python/graphene,graphql-python/graphene,sjhewitt/graphene
|
ed4666b0d1bf5b8f82e298dfb043cce158c4ba2f
|
morepath/tests/fixtures/template_unknown_extension_no_render.py
|
morepath/tests/fixtures/template_unknown_extension_no_render.py
|
import morepath
import os
from .template_engine import FormatLoader
class App(morepath.App):
pass
@App.path(path='{name}')
class Person(object):
def __init__(self, name):
self.name = name
@App.template_loader(extension='.unknown')
def get_template_loader(template_directories, settings):
return FormatLoader(template_directories)
@App.html(model=Person, template='person.unknown')
def person_default(self, request):
return { 'name': self.name }
|
import morepath
import os
from .template_engine import FormatLoader
class App(morepath.App):
pass
@App.path(path='{name}')
class Person(object):
def __init__(self, name):
self.name = name
@App.template_loader(extension='.unknown')
def get_template_loader(template_directories, settings):
return FormatLoader(template_directories)
@App.template_directory()
def get_template_directory():
return 'templates'
@App.html(model=Person, template='person.unknown')
def person_default(self, request):
return { 'name': self.name }
|
Fix so that error under test actually gets triggered.
|
Fix so that error under test actually gets triggered.
|
Python
|
bsd-3-clause
|
taschini/morepath,faassen/morepath,morepath/morepath
|
27fe9d6531a2e76affd9388db53c0433062a9cfa
|
photonix/photos/management/commands/create_library.py
|
photonix/photos/management/commands/create_library.py
|
import os
from pathlib import Path
from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand
from django.db.utils import IntegrityError
from photonix.photos.models import Library, LibraryPath, LibraryUser
from photonix.photos.utils.db import record_photo
from photonix.photos.utils.fs import determine_destination, download_file
User = get_user_model()
class Command(BaseCommand):
help = 'Create a library for a user'
def create_library(self, username, library_name):
# Get user
user = User.objects.get(username=username)
# Create Library
library, _ = Library.objects.get_or_create(
name=library_name,
)
library_path, _ = LibraryPath.objects.get_or_create(
library=library,
type='St',
backend_type='Lo',
path='/data/photos/',
url='/photos/',
)
library_user, _ = LibraryUser.objects.get_or_create(
library=library,
user=user,
owner=True,
)
print(f'Library "{library_name}" created successfully for user "{username}"')
def add_arguments(self, parser):
# Positional arguments
parser.add_argument('username', nargs='+', type=str)
parser.add_argument('library_name', nargs='+', type=str)
def handle(self, *args, **options):
self.create_library(options['username'][0], options['library_name'][0])
|
import os
from pathlib import Path
from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand
from django.db.utils import IntegrityError
from photonix.photos.models import Library, LibraryPath, LibraryUser
from photonix.photos.utils.db import record_photo
from photonix.photos.utils.fs import determine_destination, download_file
User = get_user_model()
class Command(BaseCommand):
help = 'Create a library for a user'
def create_library(self, username, library_name, path):
# Get user
user = User.objects.get(username=username)
# Create Library
library, _ = Library.objects.get_or_create(
name=library_name,
)
library_path, _ = LibraryPath.objects.get_or_create(
library=library,
type='St',
backend_type='Lo',
path=path,
)
library_user, _ = LibraryUser.objects.get_or_create(
library=library,
user=user,
owner=True,
)
print(f'Library "{library_name}" with path "{path}" created successfully for user "{username}"')
def add_arguments(self, parser):
# Positional arguments
parser.add_argument('username', type=str)
parser.add_argument('library_name', type=str)
parser.add_argument('--path', type=str, default='/data/photos')
def handle(self, *args, **options):
self.create_library(options['username'], options['library_name'], options['path'])
|
Fix path cannot be set when creating new library
|
Fix path cannot be set when creating new library
|
Python
|
agpl-3.0
|
damianmoore/photo-manager,damianmoore/photo-manager,damianmoore/photo-manager,damianmoore/photo-manager
|
93756f6f72d9f797c058bedcb3d6d7546a5a83f3
|
server/management/commands/friendly_model_name.py
|
server/management/commands/friendly_model_name.py
|
'''
Retrieves the firendly model name for machines that don't have one yet.
'''
from django.core.management.base import BaseCommand, CommandError
from server.models import Machine
from django.db.models import Q
import server.utils as utils
class Command(BaseCommand):
help = 'Retrieves friendly model names for machines without one'
def handle(self, *args, **options):
# Get all the machines without a friendly model name and have a model
no_friendly = Machine.objects.filter(
Q(machine_model_friendly__isnull=True) |
Q(machine_model_friendly='')
).exclude(machine_model__isnull=True).exclude(machine_model='').filter(os_family='Darwin')
for machine in no_friendly[:100]:
print(f'Processing {machine}'
machine.machine_model_friendly = utils.friendly_machine_model(machine)
machine.save()
|
"""Retrieves the friendly model name for machines that don't have one yet."""
from django.core.management.base import BaseCommand, CommandError
from django.db.models import Q
import server.utils as utils
from server.models import Machine
class Command(BaseCommand):
help = 'Retrieves friendly model names for machines without one'
def handle(self, *args, **options):
# Get all the machines without a friendly model name and have a model
no_friendly = Machine.objects.filter(
Q(machine_model_friendly__isnull=True) |
Q(machine_model_friendly='')
).exclude(machine_model__isnull=True).exclude(machine_model='').filter(os_family='Darwin')
for machine in no_friendly[:100]:
print(f'Processing {machine}')
machine.machine_model_friendly = utils.friendly_machine_model(machine)
machine.save()
|
Fix missing paren, imports, spelling.
|
Fix missing paren, imports, spelling.
|
Python
|
apache-2.0
|
sheagcraig/sal,salopensource/sal,salopensource/sal,salopensource/sal,sheagcraig/sal,sheagcraig/sal,sheagcraig/sal,salopensource/sal
|
ab41dfa53325ee90032c4ed1b2e6e3c90b808ecf
|
contact/views.py
|
contact/views.py
|
import binascii
from django.shortcuts import render, redirect, get_object_or_404
from django.contrib import messages
from django.db import IntegrityError, transaction
from django.conf import settings
from django.template.loader import render_to_string
from django.core.mail import send_mail
from django.core.urlresolvers import reverse
from django.http import Http404
from contact.forms import ContactForm
# Create your views here.
def contact(request):
form_init = {
'username': request.user.username,
'ip_address': request.META.get('REMOTE_ADDR'),
}
if request.method == 'POST':
form = ContactForm(request.POST)
if form.is_valid():
messages.success('Success!')
else:
messages.error('Boo! Hiss!')
else:
form = ContactForm(initial=form_init)
return render(request, 'contact/contact.html', {'form': form})
|
import binascii
from django.shortcuts import render
from django.contrib import messages
from django.core.mail import send_mail
from contact.forms import ContactForm
# Create your views here.
def contact(request):
form_init = {
'username': request.user.username,
'ip_address': request.META.get('REMOTE_ADDR'),
'email': request.user.email,
}
if request.method == 'POST':
form = ContactForm(request.POST)
if form.is_valid():
messages.success(request, 'Success!')
else:
messages.error(request, 'Boo! Hiss!')
else:
form = ContactForm(initial=form_init)
return render(request, 'contact/contact.html', {'form': form})
|
Clean up import, pre-fill email address
|
Clean up import, pre-fill email address
If user is logged in, their email address is automatically filled in for them.
Also fixed an error with the messages usage, namely I forgot to pass in the request object.
|
Python
|
mit
|
Kromey/akwriters,Kromey/akwriters,Kromey/fbxnano,Kromey/fbxnano,Kromey/fbxnano,Kromey/fbxnano,Kromey/akwriters,Kromey/akwriters
|
0f4dc67fb9522801f010853d25939c0ef531f8ff
|
lbrynet/__init__.py
|
lbrynet/__init__.py
|
import logging
__version__ = "0.16.0rc7"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
import logging
__version__ = "0.16.0rc8"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
Bump version 0.16.0rc7 --> 0.16.0rc8
|
Bump version 0.16.0rc7 --> 0.16.0rc8
Signed-off-by: Jack Robison <[email protected]>
|
Python
|
mit
|
lbryio/lbry,lbryio/lbry,lbryio/lbry
|
7641293947dba9f721cdd0364a638b9f7714033a
|
examples/pax_mininet_node.py
|
examples/pax_mininet_node.py
|
# coding: latin-1
"""
pax_mininet_node.py: Defines PaxNode which allows Pax to behave as the sole packet hander on a node.
"""
from mininet.node import Node
from mininet.log import info, warn
class PaxNode( Node ):
"PaxNode: A node which allows Pax to behave as the sole packet hander on that node."
def __init__(self, name, **params):
super(PaxNode, self).__init__(name, **params)
def config(self, **params):
super(PaxNode, self).config(**params)
# Setup iptable rules to drop incoming packets on each interface:
# Because Pax only sniffs packets (it doesn't steal them), we need to drop the packets
# to prevent the OS from handling them and responding.
print "Drop all incoming TCP traffic on nat0 so that Pax is effectively the middle-man"
for intf in self.intfList():
self.cmd("iptables -A INPUT -p tcp -i %s -j DROP" % intf.name)
def terminate( self ):
# Remove iptables rules
for intf in self.intfList():
runCmd(net, nat0, "iptables -D INPUT -p tcp -i %s -j DROP" % intf.name)
super( NAT, self ).terminate()
|
# coding: latin-1
"""
pax_mininet_node.py: Defines PaxNode which allows Pax to behave as the sole packet hander on a node.
"""
from mininet.node import Node
from mininet.log import info, warn
class PaxNode( Node ):
"PaxNode: A node which allows Pax to behave as the sole packet hander on that node."
def __init__(self, name, **params):
super(PaxNode, self).__init__(name, **params)
def config(self, **params):
super(PaxNode, self).config(**params)
# Setup iptable rules to drop incoming packets on each interface:
# Because Pax only sniffs packets (it doesn't steal them), we need to drop the packets
# to prevent the OS from handling them and responding.
print "Drop all incoming TCP traffic on nat0 so that Pax is effectively the middle-man"
for intf in self.intfList():
self.cmd("iptables -A INPUT -p tcp -i %s -j DROP" % intf.name)
def terminate(self):
# Remove iptables rules
for intf in self.intfList():
self.cmd("iptables -D INPUT -p tcp -i %s -j DROP" % intf.name)
super(PaxNode, self).terminate()
|
Fix bug in Pax Mininet node class
|
Fix bug in Pax Mininet node class
|
Python
|
apache-2.0
|
niksu/pax,TMVector/pax,niksu/pax,niksu/pax,TMVector/pax
|
bc979e995162c441cd37b3368154e5ed842694f5
|
cartoframes/core/logger.py
|
cartoframes/core/logger.py
|
import sys
import logging
def init_logger():
logging.basicConfig(
stream=sys.stdout,
format='%(asctime)s - %(levelname)s - %(message)s',
level=logging.INFO)
return logging.getLogger('CARTOframes')
def set_log_level(level):
"""Set the level of the log in the library.
Args:
level (str): log level name. By default it's set to "info". Valid log levels are:
critical, error, warning, info, debug, notset.
"""
levels = {
'critical': logging.CRITICAL,
'error': logging.ERROR,
'warning': logging.WARNING,
'info': logging.INFO,
'debug': logging.DEBUG,
'notset': logging.NOTSET
}
if level not in levels:
return ValueError('Wrong log level. Valid log levels are: critical, error, warning, info, debug, notset.')
log.setLevel(levels[level])
log = init_logger()
|
import sys
import logging
def init_logger(formatter):
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter(formatter))
logging.basicConfig(
level=logging.INFO,
handlers=[handler])
return handler, logging.getLogger('CARTOframes')
handler, log = init_logger('%(message)s')
def set_log_level(level):
"""Set the level of the log in the library.
Args:
level (str): log level name. By default it's set to "info". Valid log levels are:
critical, error, warning, info, debug, notset.
"""
levels = {
'critical': logging.CRITICAL,
'error': logging.ERROR,
'warning': logging.WARNING,
'info': logging.INFO,
'debug': logging.DEBUG,
'notset': logging.NOTSET
}
if level not in levels:
return ValueError('Wrong log level. Valid log levels are: critical, error, warning, info, debug, notset.')
level = levels[level]
if level == logging.DEBUG:
handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))
else:
handler.setFormatter(logging.Formatter('%(message)s'))
log.setLevel(level)
|
Apply long log formatter only in debug
|
Apply long log formatter only in debug
|
Python
|
bsd-3-clause
|
CartoDB/cartoframes,CartoDB/cartoframes
|
09a78126595ff355f59f70e4f381c3e2c4bef045
|
apps/innovate/tests/test_views.py
|
apps/innovate/tests/test_views.py
|
from django.core.urlresolvers import reverse
from django.test import Client
from projects.models import Project
from innovate import urls
def test_routes():
c = Client()
for pattern in urls.urlpatterns:
response = c.get(reverse(pattern.name))
assert response.status_code == 301
assert response.has_header('location')
location = response.get('location', None)
assert location is not None
response = c.get(location)
assert response.status_code == 200
def test_featured():
project = Project.objects.create(
name=u'Test Project',
slug=u'test-project',
description=u'Blah',
featured=True
)
c = Client()
response = c.get('/en-US/')
assert response.status_code == 200
assert project.name in response.content
|
from django.core.urlresolvers import reverse
from django.test import Client
from django.test.client import RequestFactory
from projects.models import Project
from innovate import urls
from innovate.views import handle404, handle500
def test_routes():
c = Client()
for pattern in urls.urlpatterns:
response = c.get(reverse(pattern.name))
assert response.status_code == 301
assert response.has_header('location')
location = response.get('location', None)
assert location is not None
response = c.get(location)
assert response.status_code == 200
def test_featured():
project = Project.objects.create(
name=u'Test Project',
slug=u'test-project',
description=u'Blah',
featured=True
)
c = Client()
response = c.get('/en-US/')
assert response.status_code == 200
assert project.name in response.content
def test_404_handler():
"""Test that the 404 error handler renders and gives the correct code."""
response = handle404(RequestFactory().get('/not/a/real/path/'))
assert response.status_code == 404
def test_500_handler():
"""Test that the 500 error handler renders and gives the correct code."""
response = handle500(RequestFactory().get('/not/a/real/path/'))
assert response.status_code == 500
|
Add tests for the 404 and 500 error handlers.
|
Add tests for the 404 and 500 error handlers.
|
Python
|
bsd-3-clause
|
mozilla/mozilla-ignite,mozilla/mozilla-ignite,mozilla/mozilla-ignite,mozilla/mozilla-ignite
|
536746eaf35f4a2899a747bfe0b1e8918f9ec8c9
|
httoop/header/__init__.py
|
httoop/header/__init__.py
|
# -*- coding: utf-8 -*-
"""HTTP headers
.. seealso:: :rfc:`2616#section-2.2`
.. seealso:: :rfc:`2616#section-4.2`
.. seealso:: :rfc:`2616#section-14`
"""
__all__ = ['Headers']
# FIXME: python3?
# TODO: add a MAXIMUM of 500 headers?
import inspect
from httoop.header.element import HEADER, HeaderElement, HeaderType
from httoop.header.messaging import Server, UserAgent
from httoop.header.headers import Headers
from httoop.header import semantics
from httoop.header import messaging
from httoop.header import conditional
from httoop.header import range
from httoop.header import cache
from httoop.header import auth
types = (semantics, messaging, conditional, range, cache, auth)
for _, member in (member for type_ in types for member in inspect.getmembers(type_, inspect.isclass)):
if isinstance(member, HeaderType) and member is not HeaderElement:
HEADER[member.__name__] = member
|
# -*- coding: utf-8 -*-
"""HTTP headers
.. seealso:: :rfc:`2616#section-2.2`
.. seealso:: :rfc:`2616#section-4.2`
.. seealso:: :rfc:`2616#section-14`
"""
__all__ = ['Headers']
# FIXME: python3?
# TODO: add a MAXIMUM of 500 headers?
import inspect
from httoop.header.element import HEADER, HeaderElement, HeaderType
from httoop.header.messaging import Server, UserAgent
from httoop.header.headers import Headers
from httoop.header import semantics
from httoop.header import messaging
from httoop.header import conditional
from httoop.header import range
from httoop.header import cache
from httoop.header import auth
types = (semantics, messaging, conditional, range, cache, auth)
for _, member in (member for type_ in types for member in inspect.getmembers(type_, inspect.isclass)):
if isinstance(member, HeaderType) and member is not HeaderElement:
HEADER[member.__name__] = member
globals()[_] = member
|
Add all header elements to httoop.header
|
Add all header elements to httoop.header
|
Python
|
mit
|
spaceone/httoop,spaceone/httoop,spaceone/httoop
|
fe451116ffcb12621600310b6d4ca9b6316494ff
|
scripts/zpe.py
|
scripts/zpe.py
|
import logging
from vaspy.iter import OutCar
_logger = logging.getLogger("vaspy.script")
if "__main__" == __name__:
outcar = OutCar()
poscar = outcar.poscar
freq_types = outcar.freq_types
# Frequency info.
_logger.info("{:<10s}{:<20s}".format("atom", "freq_type"))
_logger.info("-"*25)
idx = 0
tfs = poscar.tf.tolist()
for atom_idx, tf in enumerate(tfs):
if tf == ["T", "T", "T"]:
_logger.info("{:<10d}{:<5s}{:<5s}{:<5s}".format(atom_idx+1, *freq_types[idx]))
idx += 1
# Zero point energy.
_logger.info("")
_logger.info("ZPE = {}".format(outcar.zpe))
|
import logging
from vaspy.iter import OutCar
_logger = logging.getLogger("vaspy.script")
if "__main__" == __name__:
outcar = OutCar()
poscar = outcar.poscar
freq_types = outcar.freq_types
# Frequency info.
_logger.info("{:<10s}{:<10s}{:<20s}".format("atom", "type", "freq_type"))
_logger.info("-"*35)
# Get atom types.
atom_types = []
for t, n in zip(poscar.atoms, poscar.atoms_num):
atom_types += [t]*n
idx = 0
tfs = poscar.tf.tolist()
for atom_idx, tf in enumerate(tfs):
if tf == ["T", "T", "T"]:
msg = "{:<10d}{:<10s}{:<5s}{:<5s}{:<5s}"
msg = msg.format(atom_idx+1, atom_types[atom_idx], *freq_types[idx])
_logger.info(msg)
idx += 1
# Zero point energy.
_logger.info("")
_logger.info("ZPE = {}".format(outcar.zpe))
|
Add atom type info output.
|
Add atom type info output.
|
Python
|
mit
|
PytLab/VASPy,PytLab/VASPy
|
24fe59c0f5df9343337549eb4495d6ca0e1e58d1
|
iconizer/iconizer_main.py
|
iconizer/iconizer_main.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from iconizer.console.launcher import CrossGuiLauncher
from iconizer.qtconsole.PyQtGuiFactory import PyQtGuiFactory
class Iconizer(object):
def __init__(self):
self.server = CrossGuiLauncher(PyQtGuiFactory())
self.server.start()
self.server.start_cross_gui_launcher_no_return()
def execute(self, app_descriptor_dict):
#Send request to start a new app
self.server.launch(app_descriptor_dict)
def main():
Iconizer()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from iconizer.console.launcher import CrossGuiLauncher
from iconizer.qtconsole.PyQtGuiFactory import PyQtGuiFactory
class Iconizer(object):
def start_gui(self):
self.server = CrossGuiLauncher(PyQtGuiFactory())
self.server.start()
self.server.start_cross_gui_launcher_no_return()
def execute(self, app_descriptor_dict):
#Send request to start a new app
self.server.launch(app_descriptor_dict)
def main():
Iconizer().start_gui()
if __name__ == '__main__':
main()
|
Move GUI creation out of __init__ function.
|
Move GUI creation out of __init__ function.
|
Python
|
bsd-3-clause
|
weijia/iconizer
|
3b5feba4141dab974f7e6e343af76694e49246d4
|
app/settings/prod.py
|
app/settings/prod.py
|
import dj_database_url
from .default import *
DEBUG = False
SECRET_KEY = os.getenv('DJANGO_SECRET_KEY', None)
ALLOWED_HOSTS = ['agendaodonto.herokuapp.com']
DATABASES = {'default': dj_database_url.config()}
CORS_ORIGIN_WHITELIST = (
'https://agendaodonto.com',
'https://backend.agendaodonto.com',
)
DJOSER['DOMAIN'] = 'agendaodonto.com'
# Celery Settings
CELERY_BROKER_URL = os.getenv('RABBITMQ_URL', None)
CELERY_BROKER_HEARTBEAT = None
|
import dj_database_url
from .default import *
DEBUG = False
SECRET_KEY = os.getenv('DJANGO_SECRET_KEY', None)
ALLOWED_HOSTS = ['agendaodonto.herokuapp.com']
DATABASES = {'default': dj_database_url.config()}
CORS_ORIGIN_WHITELIST = (
'agendaodonto.com',
'backend.agendaodonto.com',
)
DJOSER['DOMAIN'] = 'agendaodonto.com'
# Celery Settings
CELERY_BROKER_URL = os.getenv('RABBITMQ_URL', None)
CELERY_BROKER_HEARTBEAT = None
|
Remove the scheme from the domain
|
fix: Remove the scheme from the domain
|
Python
|
agpl-3.0
|
agendaodonto/server,agendaodonto/server
|
dad3eb5c1b0e188671884e97260422a90bdd5c21
|
gitcommitautosave.py
|
gitcommitautosave.py
|
"""Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if view.file_name().endswith('COMMIT_EDITMSG'):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if view.file_name().endswith('COMMIT_EDITMSG'):
view.run_command("save")
|
"""Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if view.file_name() and view.file_name().endswith('COMMIT_EDITMSG'):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if view.file_name() and view.file_name().endswith('COMMIT_EDITMSG'):
view.run_command("save")
|
Fix 'NoneType' object has no attribute 'endswith'
|
Fix 'NoneType' object has no attribute 'endswith'
|
Python
|
mit
|
aristidesfl/sublime-git-commit-message-auto-save
|
c300453131360ef9c48586f48287d6c24b3a94a2
|
jsonrpcclient/__init__.py
|
jsonrpcclient/__init__.py
|
"""__init__.py"""
from .request import Request, Notification
|
"""__init__.py"""
from .request import Request, Notification
from .http_client import HTTPClient
def request(endpoint, method, *args, **kwargs):
"""
A convenience function. Instantiates and executes a HTTPClient request, then
throws it away.
"""
return HTTPClient(endpoint).request(method, *args, **kwargs)
def notify(endpoint, method, *args, **kwargs):
"""
A convenience function. Instantiates and executes a HTTPClient request, then
throws it away.
"""
return HTTPClient(endpoint).notify(method, *args, **kwargs)
|
Add convenience functions 'request' and 'notify'
|
Add convenience functions 'request' and 'notify'
Closes #54
|
Python
|
mit
|
bcb/jsonrpcclient
|
aecaecfa20caba28af1e8117ffc14eeb6460872e
|
deployment/config.py
|
deployment/config.py
|
class Azure:
resource_group = "MajavaShakki"
location = "northeurope"
cosmosdb_name = f"{resource_group}mongo".lower()
plan_name = f"{resource_group}Plan"
site_name = f"{resource_group}Site"
class Mongo:
database_name = "Majavashakki"
collection_throughput = 400
system_indexes_collection = "undefined" # https://github.com/Automattic/mongoose/issues/6989
collections = ["gamemodels", "sessions", "users", system_indexes_collection]
|
class Azure:
resource_group = "MajavaShakki"
location = "northeurope"
cosmosdb_name = f"{resource_group}mongo".lower()
plan_name = f"{resource_group}Plan"
site_name = f"{resource_group}Site"
class Mongo:
database_name = "Majavashakki"
collection_throughput = 400
system_indexes_collection = "undefined" # https://github.com/Automattic/mongoose/issues/6989
collections = ["games", "gamemodels", "sessions", "users", system_indexes_collection]
|
Configure throughput for games collection
|
Configure throughput for games collection
The collection gamemodels should be deleted when it is no longer in use
|
Python
|
mit
|
Majavapaja/Mursushakki,Majavapaja/Mursushakki,Majavapaja/Mursushakki,Majavapaja/Mursushakki,Majavapaja/Mursushakki,Majavapaja/Mursushakki
|
91b9e5d71c323be2e5e0d1aa16e47cc49d45acc4
|
likes/middleware.py
|
likes/middleware.py
|
try:
from hashlib import md5
except ImportError:
from md5 import md5
from django.http import HttpResponseBadRequest
from secretballot.middleware import SecretBallotIpUseragentMiddleware
class SecretBallotUserIpUseragentMiddleware(SecretBallotIpUseragentMiddleware):
def generate_token(self, request):
if request.user.is_authenticated():
return request.user.username
else:
try:
s = u''.join((request.META['REMOTE_ADDR'], request.META['HTTP_USER_AGENT']))
return md5(s).hexdigest()
except KeyError:
return None
|
try:
from hashlib import md5
except ImportError:
from md5 import md5
from django.http import HttpResponseBadRequest
from secretballot.middleware import SecretBallotIpUseragentMiddleware
class SecretBallotUserIpUseragentMiddleware(SecretBallotIpUseragentMiddleware):
def generate_token(self, request):
if request.user.is_authenticated():
return request.user.username
else:
try:
s = ''.join((request.META['REMOTE_ADDR'], request.META['HTTP_USER_AGENT']))
return md5(s.encode('utf-8')).hexdigest()
except KeyError:
return None
|
Fix hashing for Python 3
|
Fix hashing for Python 3
|
Python
|
bsd-3-clause
|
Afnarel/django-likes,Afnarel/django-likes,Afnarel/django-likes
|
c62e1b325a536294b3285f8cbcad7d66a415ee23
|
heat/objects/base.py
|
heat/objects/base.py
|
# Copyright 2015 Intel Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Heat common internal object model"""
from oslo_versionedobjects import base as ovoo_base
class HeatObject(ovoo_base.VersionedObject):
OBJ_PROJECT_NAMESPACE = 'heat'
VERSION = '1.0'
|
# Copyright 2015 Intel Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Heat common internal object model"""
import weakref
from oslo_versionedobjects import base as ovoo_base
class HeatObject(ovoo_base.VersionedObject):
OBJ_PROJECT_NAMESPACE = 'heat'
VERSION = '1.0'
@property
def _context(self):
if self._contextref is None:
return
ctxt = self._contextref()
assert ctxt is not None, "Need a reference to the context"
return ctxt
@_context.setter
def _context(self, context):
if context:
self._contextref = weakref.ref(context)
else:
self._contextref = None
|
Use a weakref for the data object context
|
Use a weakref for the data object context
There are no known circular reference issues caused by storing the
context in data objects, but the following changes will refer to data
objects in the context, so this change prevents any later issues.
Change-Id: I3680e5678003cf339a98fbb7a2b1b387fb2243c0
Related-Bug: #1578854
|
Python
|
apache-2.0
|
noironetworks/heat,openstack/heat,openstack/heat,cwolferh/heat-scratch,noironetworks/heat,cwolferh/heat-scratch
|
c6dae4cbd8d8dcbcd323526c2811fea9525bcb74
|
__init__.py
|
__init__.py
|
import spyral.memoize
import spyral.point
import spyral.camera
import spyral.util
import spyral.sprite
import spyral.gui
import spyral.scene
import spyral._lib
import pygame
director = scene.Director()
def init():
pygame.init()
pygame.font.init()
|
import spyral.memoize
import spyral.point
import spyral.camera
import spyral.util
import spyral.sprite
import spyral.gui
import spyral.scene
import spyral._lib
import spyral.event
import pygame
director = scene.Director()
def init():
pygame.init()
pygame.font.init()
|
Add an event module import
|
Add an event module import
|
Python
|
lgpl-2.1
|
platipy/spyral
|
42701f0ba5147bbd1aeeb2871f1b9970bcb7e01e
|
pygraphc/clustering/ClusterUtility.py
|
pygraphc/clustering/ClusterUtility.py
|
from itertools import combinations
class ClusterUtility(object):
@staticmethod
def get_geometric_mean(weights):
multiplication = 1
for weight in weights:
multiplication = multiplication * weight
gmean = 0.0
multiplication = round(multiplication, 5)
if multiplication > 0.0:
k = float(len(weights))
gmean = multiplication ** (1 / k)
return round(gmean, 5)
@staticmethod
def get_weighted_cliques(graph, cliques, threshold):
weighted_kcliques = []
for clique in cliques:
weights = []
for u, v in combinations(clique, 2):
reduced_precision = round(graph[u][v]['weight'], 5)
weights.append(reduced_precision)
gmean = ClusterUtility.get_geometric_mean(weights)
if gmean > threshold:
weighted_kcliques.append(frozenset(clique))
return weighted_kcliques
@staticmethod
def set_cluster_id(graph, clusters):
cluster_id = 0
for cluster in clusters:
for node in cluster:
graph.node[node]['cluster'] = cluster_id
cluster_id += 1
|
from itertools import combinations
class ClusterUtility(object):
@staticmethod
def get_geometric_mean(weights):
multiplication = 1
for weight in weights:
multiplication = multiplication * weight
gmean = 0.0
if multiplication > 0.0:
k = float(len(weights))
gmean = multiplication ** (1 / k)
return round(gmean, 5)
@staticmethod
def get_weighted_cliques(graph, cliques, threshold):
weighted_kcliques = []
for clique in cliques:
weights = []
for u, v in combinations(clique, 2):
reduced_precision = round(graph[u][v][0]['weight'], 5)
weights.append(reduced_precision)
gmean = ClusterUtility.get_geometric_mean(weights)
if gmean > threshold:
weighted_kcliques.append(frozenset(clique))
return weighted_kcliques
@staticmethod
def set_cluster_id(graph, clusters):
cluster_id = 0
for cluster in clusters:
for node in cluster:
graph.node[node]['cluster'] = cluster_id
cluster_id += 1
|
Remove round of multiplication result for get_geometric_mean and bug fix get edge weight
|
Remove round of multiplication result for get_geometric_mean and bug fix get edge weight
|
Python
|
mit
|
studiawan/pygraphc
|
039a19032bebd1e6852990f8aacf05042f000070
|
args.py
|
args.py
|
import inspect
def argspec_set(func):
if not hasattr(func, 'argspec'):
func.argspec = inspect.getargspec(func)
def argspec_iscompat(func, lenargs):
spec = func.argspec
minargs = len(spec.args) - len(spec.defaults or ())
maxargs = len(spec.args) if spec.varargs is None else None
return lenargs >= minargs and (maxargs is None or lenargs <= maxargs)
class ArgCountError(Exception):
pass
|
import inspect
def argspec_set(func):
if not hasattr(func, 'argspec'):
func.argspec = inspect.getargspec(func)
def argspec_iscompat(func, lenargs):
spec = func.argspec
minargs = len(spec.args) - len(spec.defaults or ())
maxargs = len(spec.args) if spec.varargs is None else float("infinity")
return minargs <= lenargs <= maxargs
class ArgCountError(Exception):
pass
|
Simplify funcion arg compatibility check
|
Simplify funcion arg compatibility check
|
Python
|
mit
|
infogulch/pyspades-events
|
bdfc87ff9f9b67f038248052805327278309e558
|
openacademy/model/openacademy_session.py
|
openacademy/model/openacademy_session.py
|
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor")
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
|
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor",
domain=['|',
("instructor", "=", True),
("category_id", "ilike", "Teacher"),
])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
|
Add domain or and ilike
|
[REF] openacademy: Add domain or and ilike
|
Python
|
apache-2.0
|
mapuerta/openacademy-proyect
|
8d50750ae94e2c94059dcbf1009dd46441d44842
|
__init__.py
|
__init__.py
|
# -*- coding: utf-8 -*-
from flask import Flask, render_template
from flask.ext.mongoengine import MongoEngine, MongoEngineSessionInterface
import configparser
from .momentjs import momentjs
app = Flask(__name__)
# Security
WTF_CSRF_ENABLED = True
app.config['SECRET_KEY'] = '2bN9UUaBpcjrxR'
app.jinja_env.globals['momentjs'] = momentjs
# App Config
config = configparser.ConfigParser()
config.read('config/config.ini')
app.config['MONGODB_DB'] = config['MongoDB']['db_name']
app.config['MONGODB_HOST'] = config['MongoDB']['host']
app.config['MONGODB_PORT'] = int(config['MongoDB']['port'])
app.config['MONGODB_USERNAME'] = config['MongoDB']['username']
app.config['MONGODB_PASSWORD'] = config['MongoDB']['password']
db = MongoEngine(app)
def register_blueprints(app):
# Prevents circular imports
from weighttracker.views.measurement_views import measurements
app.register_blueprint(measurements)
from weighttracker.views.inspiration_views import inspirations
app.register_blueprint(inspirations)
from weighttracker.views.foodjournal_views import foodjournals
app.register_blueprint(foodjournals)
register_blueprints(app)
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def catch_all(path):
return render_template('index.html')
if __name__ == '__main__':
app.run()
|
# -*- coding: utf-8 -*-
from flask import Flask, render_template
from flask.ext.mongoengine import MongoEngine, MongoEngineSessionInterface
import configparser
from .momentjs import momentjs
app = Flask(__name__)
# Security
WTF_CSRF_ENABLED = True
app.config['SECRET_KEY'] = '2bN9UUaBpcjrxR'
app.jinja_env.globals['momentjs'] = momentjs
# App Config
config = configparser.ConfigParser()
config.read('config/config.ini')
app.config['MONGODB_DB_SETTINGS'] = {
'name': config['MongoDB']['db_name'],
'host': config['MongoDB']['host'],
'port': int(config['MongoDB']['port']),
'username': config['MongoDB']['username'],
'password': config['MongoDB']['password']}
db = MongoEngine(app)
def register_blueprints(app):
# Prevents circular imports
from weighttracker.views.measurement_views import measurements
app.register_blueprint(measurements)
from weighttracker.views.inspiration_views import inspirations
app.register_blueprint(inspirations)
from weighttracker.views.foodjournal_views import foodjournals
app.register_blueprint(foodjournals)
register_blueprints(app)
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def catch_all(path):
return render_template('index.html')
if __name__ == '__main__':
app.run()
|
Update how we set the connection information for MongoDB to support Mongo 3.0.5
|
Update how we set the connection information for MongoDB to support Mongo 3.0.5
Signed-off-by: Robert Dempsey <[email protected]>
|
Python
|
mit
|
rdempsey/weight-tracker,rdempsey/weight-tracker,rdempsey/weight-tracker
|
5057b70a59c1a3c8301928c0297d4012bd96b21a
|
mapApp/views/index.py
|
mapApp/views/index.py
|
from django.shortcuts import render
from mapApp.models import Incident, Theft, Hazard, Official, AlertArea
from mapApp.forms import IncidentForm, HazardForm, TheftForm, GeofenceForm, EditForm
import datetime
def index(request, lat=None, lng=None, zoom=None):
incidents = Incident.objects.select_related('point').all()
now = datetime.datetime.now()
context = {
# Model data used by map
'collisions': incidents.filter(p_type__exact="collision"),
'nearmisses': incidents.filter(p_type__exact="nearmiss"),
'hazards': Hazard.objects.select_related('point').exclude(expires_date__lt=now),
'thefts': Theft.objects.select_related('point').all(),
# 'officials': officialResult,
"geofences": AlertArea.objects.filter(user=request.user.id),
# Form data used by map
"incidentForm": IncidentForm(),
"hazardForm": HazardForm(),
"theftForm": TheftForm(),
"geofenceForm": GeofenceForm(),
"editForm": EditForm()
}
# Add zoom and center data if present
if not None in [lat, lng, zoom]:
context['lat']= float(lat)
context['lng']= float(lng)
context['zoom']= int(zoom)
return render(request, 'mapApp/index.html', context)
|
from django.shortcuts import render
from mapApp.models import Incident, Theft, Hazard, Official, AlertArea
from mapApp.forms import IncidentForm, HazardForm, TheftForm, GeofenceForm, EditForm
import datetime
def index(request, lat=None, lng=None, zoom=None):
incidents = Incident.objects.select_related('point').all()
now = datetime.datetime.now()
context = {
# Model data used by map
'collisions': incidents.filter(p_type__exact="collision"),
'nearmisses': incidents.filter(p_type__exact="nearmiss"),
'hazards': Hazard.objects.select_related('point').exclude(expires_date__lt=now).exclude(hazard_fixed=True),
'thefts': Theft.objects.select_related('point').all(),
# 'officials': officialResult,
"geofences": AlertArea.objects.filter(user=request.user.id),
# Form data used by map
"incidentForm": IncidentForm(),
"hazardForm": HazardForm(),
"theftForm": TheftForm(),
"geofenceForm": GeofenceForm(),
"editForm": EditForm()
}
# Add zoom and center data if present
if not None in [lat, lng, zoom]:
context['lat']= float(lat)
context['lng']= float(lng)
context['zoom']= int(zoom)
return render(request, 'mapApp/index.html', context)
|
Remove expired hazards from main map
|
Remove expired hazards from main map
|
Python
|
mit
|
SPARLab/BikeMaps,SPARLab/BikeMaps,SPARLab/BikeMaps
|
19af4b621a50639a0c1156bab1c97c7b827b89a8
|
django_nose/tools.py
|
django_nose/tools.py
|
# vim: tabstop=4 expandtab autoindent shiftwidth=4 fileencoding=utf-8
"""
Assertions that sort of follow Python unittest/Django test cases
"""
from django.test.testcases import TestCase
import re
## Python
from nose import tools
for t in dir(tools):
if t.startswith('assert_'):
vars()[t] = getattr(tools, t)
## Django
caps = re.compile('([A-Z])')
def pep8(name):
return caps.sub(lambda m: '_' + m.groups()[0].lower(), name)
class Dummy(TestCase):
def nop():
pass
_t = Dummy('nop')
for at in [ at for at in dir(_t)
if at.startswith('assert') and not '_' in at ]:
pepd = pep8(at)
vars()[pepd] = getattr(_t, at)
del Dummy
del _t
del pep8
## New
def assert_code(response, status_code, msg_prefix=''):
"""Asserts the response was returned with the given status code
"""
if msg_prefix:
msg_prefix = '%s: ' % msg_prefix
assert response.status_code == status_code, \
'Response code was %d (expected %d)' % \
(response.status_code, status_code)
def assert_ok(response, msg_prefix=''):
"""Asserts the response was returned with status 200 (OK)
"""
return assert_code(response, 200, msg_prefix=msg_prefix)
# EOF
|
# vim: tabstop=4 expandtab autoindent shiftwidth=4 fileencoding=utf-8
"""
Provides Nose and Django test case assert functions
"""
from django.test.testcases import TestCase
import re
## Python
from nose import tools
for t in dir(tools):
if t.startswith('assert_'):
vars()[t] = getattr(tools, t)
## Django
caps = re.compile('([A-Z])')
def pep8(name):
return caps.sub(lambda m: '_' + m.groups()[0].lower(), name)
class Dummy(TestCase):
def nop():
pass
_t = Dummy('nop')
for at in [ at for at in dir(_t)
if at.startswith('assert') and not '_' in at ]:
pepd = pep8(at)
vars()[pepd] = getattr(_t, at)
del Dummy
del _t
del pep8
## New
def assert_code(response, status_code, msg_prefix=''):
"""Asserts the response was returned with the given status code
"""
if msg_prefix:
msg_prefix = '%s: ' % msg_prefix
assert response.status_code == status_code, \
'Response code was %d (expected %d)' % \
(response.status_code, status_code)
def assert_ok(response, msg_prefix=''):
"""Asserts the response was returned with status 200 (OK)
"""
return assert_code(response, 200, msg_prefix=msg_prefix)
# EOF
|
Make the heading doc string a bit more descriptive
|
Make the heading doc string a bit more descriptive
|
Python
|
bsd-3-clause
|
brilliant-org/django-nose,franciscoruiz/django-nose,sociateru/django-nose,millerdev/django-nose,fabiosantoscode/django-nose-123-fix,dgladkov/django-nose,aristiden7o/django-nose,krinart/django-nose,aristiden7o/django-nose,krinart/django-nose,fabiosantoscode/django-nose-123-fix,franciscoruiz/django-nose,millerdev/django-nose,harukaeru/django-nose,alexhayes/django-nose,Deepomatic/django-nose,harukaeru/django-nose,360youlun/django-nose,sociateru/django-nose,Deepomatic/django-nose,daineX/django-nose,brilliant-org/django-nose,360youlun/django-nose,daineX/django-nose,alexhayes/django-nose,dgladkov/django-nose
|
78ec1cffde6443016bae2c8aefdb67ab26bfab10
|
__init__.py
|
__init__.py
|
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from . import OctoPrintOutputDevicePlugin
from . import DiscoverOctoPrintAction
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"type": "extension",
"plugin": {
"name": "Wifi connection",
"author": "Ultimaker",
"description": catalog.i18nc("Wifi connection", "Wifi connection"),
"api": 3
}
}
def register(app):
return {
"output_device": OctoPrintOutputDevicePlugin.OctoPrintOutputDevicePlugin(),
"machine_action": DiscoverOctoPrintAction.DiscoverOctoPrintAction()
}
|
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from . import OctoPrintOutputDevicePlugin
from . import DiscoverOctoPrintAction
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"type": "extension",
"plugin": {
"name": "OctoPrint connection",
"author": "fieldOfView",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Allows sending prints to OctoPrint and monitoring the progress"),
"api": 3
}
}
def register(app):
return {
"output_device": OctoPrintOutputDevicePlugin.OctoPrintOutputDevicePlugin(),
"machine_action": DiscoverOctoPrintAction.DiscoverOctoPrintAction()
}
|
Update plugin information (name, description, version, author)
|
Update plugin information (name, description, version, author)
|
Python
|
agpl-3.0
|
fieldOfView/OctoPrintPlugin
|
762ba71537cebac83970fbfb19725054b127191b
|
__init__.py
|
__init__.py
|
from .blendergltf import *
|
if 'loaded' in locals():
import imp
imp.reload(blendergltf)
from .blendergltf import *
else:
loaded = True
from .blendergltf import *
|
Improve reloading of the module
|
Improve reloading of the module
|
Python
|
apache-2.0
|
Kupoman/blendergltf,lukesanantonio/blendergltf
|
37069b80e9ab17f7d3cdbe8baf7085ff67780914
|
minimongo/__init__.py
|
minimongo/__init__.py
|
# -*- coding: utf-8 -*-
'''
minimongo
~~~~~~~~~
Minimongo is a lightweight, schemaless, Pythonic Object-Oriented
interface to MongoDB.
'''
from minimongo.index import Index
from minimongo.collection import Collection
from minimongo.model import Model
from minimongo.options import configure
__all__ = ('Collection', 'Index', 'Model', 'configure')
|
# -*- coding: utf-8 -*-
'''
minimongo
~~~~~~~~~
Minimongo is a lightweight, schemaless, Pythonic Object-Oriented
interface to MongoDB.
'''
from minimongo.index import Index
from minimongo.collection import Collection
from minimongo.model import Model, AttrDict
from minimongo.options import configure
__all__ = ('Collection', 'Index', 'Model', 'configure', 'AttrDict')
|
Add AttrDict to the top level
|
Add AttrDict to the top level
|
Python
|
bsd-2-clause
|
terianil/minimongo,slacy/minimongo,terianil/minimongo
|
68c0f77155f0cc77634a8d73b2a1ab6cbd66bac1
|
handlers/issue_handler.py
|
handlers/issue_handler.py
|
class IssueHandler:
def __init__(self, repos):
self.repos = repos
def handle(self, channel, prefix, issue):
repo = self.find_repo(prefix, channel)
if repo is not None:
return "https://github.com/" + repo + "/issues/" + issue
def find_repo(self, prefix, channel):
for repo in self.repos:
if prefix in repo["prefixes"] or (prefix is "" and channel in repo["channels"]):
return repo["name"]
|
class IssueHandler:
def __init__(self, repos):
self.repos = repos
def handle(self, channel, prefix, issue):
repo = self.find_repo(prefix, channel)
if repo is not None:
return "https://github.com/" + repo + "/issues/" + issue
def find_repo(self, prefix, channel):
for repo in self.repos:
if prefix is "" and channel in repo["channels"]:
# "Look at #123"
return repo["name"]
if prefix in repo["prefixes"] or prefix.lower() in repo["prefixes"]:
# "Look at bugs#123"
return repo["name"]
|
Make issue prefix matching case-insensitive
|
Make issue prefix matching case-insensitive
|
Python
|
mit
|
azeier/hearthbot
|
95e1f9517d79fb48bb9601e2d94419c6e2c984ca
|
tools/data2c.py
|
tools/data2c.py
|
#!/usr/bin/python
import sys
import os.path
import string
def path2varname(path):
path = os.path.basename(path)
s = ''
for c in path:
if c in string.ascii_letters or c in string.digits:
s += c
else:
s += '_'
return s
def main():
for path in sys.argv[1:]:
varname = path2varname(path)
with open(path, 'rb') as f:
sys.stdout.write('static const char %s[] = {' % varname)
data = f.read()
i = 0
for c in data:
if i % 16 == 0:
sys.stdout.write('\n\t')
i += 1
sys.stdout.write('0x%02x, ' % ord(c))
sys.stdout.write('\n};\n\n')
if __name__ == '__main__':
main()
|
#!/usr/bin/python
import sys
import os.path
import string
import getopt
cflag = 0 # clean output: just the hexdump
def path2varname(path):
path = os.path.basename(path)
s = ''
for c in path:
if c in string.ascii_letters or c in string.digits:
s += c
else:
s += '_'
return s
def main():
global cflag
opts, args = getopt.getopt(sys.argv[1:], "c")
for (x, y) in opts:
if x == "-c":
cflag += 1
for path in args:
varname = path2varname(path)
with open(path, 'rb') as f:
if not cflag:
sys.stdout.write('static const char %s[] = {' % varname)
data = f.read()
i = 0
for c in data:
if i % 16 == 0:
sys.stdout.write('\n')
if not cflag:
sys.stdout.write('\t')
i += 1
sys.stdout.write('0x%02x, ' % ord(c))
if not cflag:
sys.stdout.write('\n};')
sys.stdout.write('\n')
if __name__ == '__main__':
main()
|
Add -c option: output only hexdump, without C variable wrapper.
|
Add -c option: output only hexdump, without C variable wrapper.
|
Python
|
isc
|
S010/misc,S010/misc,S010/misc,S010/misc,S010/misc,S010/misc,S010/misc,S010/misc,S010/misc
|
aedc449ec40a2c0407a38608e8e800b09c6c25b0
|
tests/model.py
|
tests/model.py
|
import mongomock
from mongo_thingy import Model
def test_collection_alias():
col = mongomock.MongoClient().db.collection
class Foo(Model):
_collection = col
assert Foo.collection == col
def test_get_database_from_table():
col = mongomock.MongoClient().db.collection
class Foo(Model):
_collection = col
assert isinstance(Foo.database, mongomock.Database)
|
import mongomock
from mongo_thingy import Model
def test_collection_alias():
col = mongomock.MongoClient().db.collection
class Foo(Model):
_collection = col
assert Foo.collection == col
def test_get_database_from_table():
col = mongomock.MongoClient().db.collection
class Foo(Model):
_collection = col
assert isinstance(Foo.database, mongomock.Database)
def test_get_table_from_database():
db = mongomock.MongoClient().db
class Foo(Model):
_database = db
assert Foo.collection_name == "foo"
|
Add a test for _get_table_from_database
|
Add a test for _get_table_from_database
|
Python
|
mit
|
numberly/mongo-thingy
|
7c5747688dffd41737c841ac354c4947939006c3
|
django_excel_tools/exceptions.py
|
django_excel_tools/exceptions.py
|
class BaseExcelError(Exception):
def __init__(self, message):
super(BaseExcelError, self).__init__()
self.message = message
class ValidationError(BaseExcelError):
pass
class ColumnNotEqualError(BaseExcelError):
pass
class FieldNotExist(BaseExcelError):
pass
class SerializerConfigError(BaseExcelError):
pass
|
class BaseExcelError(Exception):
def __init__(self, message):
super(BaseExcelError, self).__init__()
self.message = message
class ColumnNotEqualError(BaseExcelError):
pass
class FieldNotExist(BaseExcelError):
pass
class ImportOperationFailed(BaseExcelError):
pass
class SerializerConfigError(BaseExcelError):
pass
class ValidationError(BaseExcelError):
pass
|
Add exception for import operation error and validation error
|
Add exception for import operation error and validation error
|
Python
|
mit
|
NorakGithub/django-excel-tools
|
b4e879e6bbfae9bf5692df2fb3b715f9e376041d
|
app/priu.py
|
app/priu.py
|
import os, sys
import json as _json
from flask import Flask, Response, request
app = Flask(__name__)
app.debug = True
import lib
@app.route("/", methods=["HEAD", "GET", "POST", "DELETE", "PUT"])
def adapter():
json = request.get_data()
decoded = _json.loads(json)
docker_json = _json.loads(decoded['ClientRequest']['Body'])
image = docker_json['Image']
user = image.split("/")[0]
if user != app.config['ALLOWED_USER']:
return '', 403
response = lib.pre_hook_response(
decoded['ClientRequest']['Method'],
decoded['ClientRequest']['Request'],
decoded['ClientRequest']['Body'],
)
return Response(response, mimetype="application/json")
if __name__ == "__main__":
try:
app.config['ALLOWED_USER'] = os.environ['USER']
except KeyError:
sys.stdout.write("""Error: Configuration environment variable USER not provided.
Specify an image username on the Docker command-line by using docker run -e USER=<user>.
Use the user "_" to only allow official Docker images.
""")
sys.exit(1)
app.run(port=80)
|
import os, sys
import json as _json
from flask import Flask, Response, request
app = Flask(__name__)
app.debug = True
import lib
@app.route("/", methods=["HEAD", "GET", "POST", "DELETE", "PUT"])
def adapter():
json = request.get_data()
decoded = _json.loads(json)
docker_json = _json.loads(decoded['ClientRequest']['Body'])
image = docker_json['Image']
if "/" not in image:
user = "_"
else:
user = image.split("/")[0]
if user != app.config['ALLOWED_USER']:
return '', 403
response = lib.pre_hook_response(
decoded['ClientRequest']['Method'],
decoded['ClientRequest']['Request'],
decoded['ClientRequest']['Body'],
)
return Response(response, mimetype="application/json")
if __name__ == "__main__":
try:
app.config['ALLOWED_USER'] = os.environ['USER']
except KeyError:
sys.stdout.write("""Error: Configuration environment variable USER not provided.
Specify an image username on the Docker command-line by using docker run -e USER=<user>.
Use the user "_" to only allow official Docker images.
""")
sys.exit(1)
app.run(port=80)
|
Support for diallowing official images.
|
Support for diallowing official images.
|
Python
|
apache-2.0
|
robhaswell/powerstrip-restrict-image-user
|
8e4d77636a9846296225ddbfab872be4c7486261
|
dask_distance/_pycompat.py
|
dask_distance/_pycompat.py
|
# -*- coding: utf-8 -*-
try:
irange = xrange
except NameError:
irange = range
|
# -*- coding: utf-8 -*-
try:
irange = xrange
except NameError:
irange = range
try:
from itertools import izip
except ImportError:
izip = zip
|
Add izip for Python 2/3 compatibility
|
Add izip for Python 2/3 compatibility
Simply use `izip` from `itertools` on Python 2 and alias `izip` as `zip`
on Python 3. This way an iterable form of `zip` remains available on
both Python 2 and Python 3 that is named `izip`. Should help avoid
having the performance of the two implementations from diverging too
far.
|
Python
|
bsd-3-clause
|
jakirkham/dask-distance
|
114f6eb7fc6e7bf7e2cbc885ded0063adce3eccc
|
api/init/graphqlapi/batch.py
|
api/init/graphqlapi/batch.py
|
import docker
def execute_batch(batch_id: int):
container_name = f'mobydq-batch-{batch_id}'
client = docker.from_env()
client.containers.run(
name=container_name,
image='mobydq-scripts',
network='mobydq-network',
command=['python', 'run.py', 'execute_batch', batch_id],
remove=True,
detach=True
)
|
import docker
def execute_batch(batch_id: int):
container_name = f'mobydq-batch-{batch_id}'
client = docker.from_env()
client.containers.run(
name=container_name,
image='mobydq-scripts',
network='mobydq-network',
command=['python', 'run.py', 'execute_batch', batch_id],
remove=True,
detach=True
)
|
Add new line at the end of file
|
Add new line at the end of file
|
Python
|
apache-2.0
|
alexisrolland/data-quality,alexisrolland/data-quality,alexisrolland/data-quality,alexisrolland/data-quality
|
21541a4a1b2fcdfaa4bb7d2fbebf7db702eebacc
|
app/main.py
|
app/main.py
|
import csv
import yaml
import parse
import deparse
def main():
with open('data/features.csv', 'r') as f:
segments = [segment for segment in csv.DictReader(f)]
with open('data/diacritics.yaml') as f:
diacritics = yaml.load(f)
with open('data/feature-strings-with-diacritics.csv') as f:
feature_strings = [line for line in csv.reader(f)]
words = parse.parse_words(['bːɒtl', 'b\u02D0ɒtl'], segments, diacritics)
print(deparse.deparse_words(words, segments, feature_strings))
if __name__ == '__main__':
main()
|
import csv
import yaml
import parse
import evolve
import deparse
def evolve_words(words, available_rules, generations=5):
'''Evolves the given list of words according to the given list of rules, for a
number of generations. If no more applicable rules are available, the evolution
will stop early. Returns the evolved list of words and a list of rule which
were applied.
'''
applied_rules = []
try:
for _ in range(generations):
rule, words = evolve.evolve(words, available_rules)
applied_rules.append(rule)
# StopIteration is raised when there are no more applicable rules
except StopIteration:
return words, applied_rules
return words, applied_rules
def main():
with open('data/features.csv', 'r') as f:
segments = [segment for segment in csv.DictReader(f)]
with open('data/diacritics.yaml') as f:
diacritics = yaml.load(f)
with open('data/rules.yaml') as f:
rules = yaml.load(f)
with open('data/feature-strings-with-diacritics.csv') as f:
feature_strings = [line for line in csv.reader(f)]
word_strings = ['mːɒtl', 'b\u02D0ɒtl']
words = parse.parse_words(word_strings, segments, diacritics)
evolved_words, applied_rules = evolve_words(words, rules)
deparsed = deparse.deparse_words(evolved_words, segments, feature_strings)
for word, evolved_word in zip(word_strings, deparsed):
print('{0} -> {1}'.format(word, evolved_word))
if __name__ == '__main__':
main()
|
Create basic flow of evolution process.
|
Create basic flow of evolution process.
|
Python
|
mit
|
kdelwat/LangEvolve,kdelwat/LangEvolve,kdelwat/LangEvolve
|
04f8e26d2dfd29c0958372098f4199d833977c62
|
app/main/views/frameworks.py
|
app/main/views/frameworks.py
|
from flask import jsonify
from sqlalchemy.types import String
from sqlalchemy import func
import datetime
from .. import main
from ...models import db, Framework, DraftService, Service, User, Supplier, SelectionAnswers, AuditEvent
@main.route('/frameworks', methods=['GET'])
def list_frameworks():
frameworks = Framework.query.all()
return jsonify(
frameworks=[f.serialize() for f in frameworks]
)
@main.route('/frameworks/g-cloud-7/stats', methods=['GET'])
def get_framework_stats():
seven_days_ago = datetime.datetime.now() + datetime.timedelta(-7)
lot_column = DraftService.data['lot'].cast(String).label('lot')
return str({
'services_drafts': DraftService.query.filter(
DraftService.status == "not-submitted"
).count(),
'services_complete': DraftService.query.filter(
DraftService.status == "submitted"
).count(),
'services_by_lot': dict(db.session.query(
lot_column, func.count(lot_column)
).group_by(lot_column).all()),
'users': User.query.count(),
'active_users': User.query.filter(User.logged_in_at > seven_days_ago).count(),
'suppliers': Supplier.query.count(),
'suppliers_interested': AuditEvent.query.filter(AuditEvent.type == 'register_framework_interest').count(),
'suppliers_with_complete_declaration': SelectionAnswers.find_by_framework('g-cloud-7').count()
})
|
from flask import jsonify
from sqlalchemy.types import String
from sqlalchemy import func
import datetime
from .. import main
from ...models import db, Framework, DraftService, Service, User, Supplier, SelectionAnswers, AuditEvent
@main.route('/frameworks', methods=['GET'])
def list_frameworks():
frameworks = Framework.query.all()
return jsonify(
frameworks=[f.serialize() for f in frameworks]
)
@main.route('/frameworks/g-cloud-7/stats', methods=['GET'])
def get_framework_stats():
seven_days_ago = datetime.datetime.utcnow() + datetime.timedelta(-7)
lot_column = DraftService.data['lot'].cast(String).label('lot')
return str({
'services_drafts': DraftService.query.filter(
DraftService.status == "not-submitted"
).count(),
'services_complete': DraftService.query.filter(
DraftService.status == "submitted"
).count(),
'services_by_lot': dict(db.session.query(
lot_column, func.count(lot_column)
).group_by(lot_column).all()),
'users': User.query.count(),
'active_users': User.query.filter(User.logged_in_at > seven_days_ago).count(),
'suppliers': Supplier.query.count(),
'suppliers_interested': AuditEvent.query.filter(AuditEvent.type == 'register_framework_interest').count(),
'suppliers_with_complete_declaration': SelectionAnswers.find_by_framework('g-cloud-7').count()
})
|
Use UTC time (we want to standardise on this)
|
Use UTC time (we want to standardise on this)
|
Python
|
mit
|
alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api
|
fe4a0e0b86300f7da5f45a5541ee9c42c0a76ab7
|
docker_manager/dispatcher.py
|
docker_manager/dispatcher.py
|
import requests_unixsocket
import urllib
import json
import re
def dockerapi_dispatcher(app,request):
method = request.method
uri = re.match(r"^.+/dockerapi/(.+)", request.url).group(1)
session = requests_unixsocket.Session()
unix_socket = urllib.quote_plus( app.config['SOCKET'] )
return getattr(session,method.lower())('http+unix://{0}/{1}'.format(unix_socket,uri),json=request.json)
|
import requests_unixsocket
import urllib
import re
def dockerapi_dispatcher(app,request):
method = request.method
uri = re.match(r"^.+/dockerapi/(.+)", request.url).group(1)
session = requests_unixsocket.Session()
unix_socket = urllib.quote_plus( app.config['SOCKET'] )
return getattr(session,method.lower())('http+unix://{0}/{1}'.format(unix_socket,uri),json=request.json)
|
Remove unsed json module import
|
Remove unsed json module import
|
Python
|
apache-2.0
|
nathanIL/docker-manager,nathanIL/docker-manager,nathanIL/docker-manager
|
1b18d81f673e9f982e9778823198cbc336285db4
|
tests/grammar_creation_test/NonterminalAddingTest.py
|
tests/grammar_creation_test/NonterminalAddingTest.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
class NonterminalAddingTest(TestCase):
pass
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
from grammpy.exceptions import NotNonterminalException
class A(Nonterminal):
pass
class B(Nonterminal):
pass
class C(Nonterminal):
pass
class D(Nonterminal):
pass
class NonterminalAddingTest(TestCase):
def test_shouldAddOneNonterminal(self):
g = Grammar(nonterminals=[A])
self.assertTrue(g.have_nonterm(A))
self.assertFalse(g.have_nonterm(B))
self.assertFalse(g.have_nonterm([A, B]))
def test_shouldAddMoreNonterminals(self):
g = Grammar(nonterminals=[A, B, C])
self.assertTrue(g.have_nonterm(A))
self.assertTrue(g.have_nonterm([A, B, C]))
self.assertFalse(g.have_nonterm(D))
def test_shouldNotAddInvalidNonterminal(self):
with self.assertRaises(NotNonterminalException):
Grammar(nonterminals=[0])
def test_shouldNotAddOneInvalidNonterminal(self):
with self.assertRaises(NotNonterminalException):
Grammar(nonterminals=[A, B, 1])
if __name__ == '__main__':
main()
|
Add test of adding nonterminals when grammar is create
|
Add test of adding nonterminals when grammar is create
|
Python
|
mit
|
PatrikValkovic/grammpy
|
a81e3f43b83fb003b9708e3a7a581da1dc9190c1
|
django_project/api/urls.py
|
django_project/api/urls.py
|
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
from .views import LocalitiesAPI, LocalityAPI
urlpatterns = patterns(
'',
url(
r'^localities$', LocalitiesAPI.as_view(),
name='api_localities'
),
url(
r'^localitiy/(?P<uuid>\w{32})$', LocalityAPI.as_view(),
name='api_locality'
)
)
|
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
from .views import LocalitiesAPI, LocalityAPI
urlpatterns = patterns(
'',
url(
r'^localities$', LocalitiesAPI.as_view(),
name='api_localities'
),
url(
r'^locality/(?P<uuid>\w{32})$', LocalityAPI.as_view(),
name='api_locality'
)
)
|
Fix spelling for api/locality/:uuid URL
|
Fix spelling for api/locality/:uuid URL
Closes #121
|
Python
|
bsd-2-clause
|
ismailsunni/healthsites,ismailsunni/healthsites,ismailsunni/healthsites,ismailsunni/healthsites
|
8b04e4de91f60a2727f9c7a6f5a56d86279c667d
|
avalonstar/apps/live/urls.py
|
avalonstar/apps/live/urls.py
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.views.generic import RedirectView
from .views import AwayView, DiscussionView, GameView, PrologueView
urlpatterns = patterns('',
# Because sooner or later, avalonstar.tv/ will be a welcome page.
url(r'^$', name='site-home', view=RedirectView.as_view(url='http://twitch.tv/avalonstar')),
# Overlays.
url(r'^away/$', name='live-away', view=AwayView.as_view()),
url(r'^discussion/$', name='live-discussion', view=DiscussionView.as_view()),
url(r'^game/$', name='live-game', view=GameView.as_view()),
url(r'^prologue/$', name='live-prologue', view=PrologueView.as_view()),
)
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.views.generic import RedirectView
from .views import (AwayView, DiscussionView, EpilogueView, GameView,
PrologueView)
urlpatterns = patterns('',
# Because sooner or later, avalonstar.tv/ will be a welcome page.
url(r'^$', name='site-home', view=RedirectView.as_view(url='http://twitch.tv/avalonstar')),
# Overlays.
url(r'^away/$', name='live-away', view=AwayView.as_view()),
url(r'^discussion/$', name='live-discussion', view=DiscussionView.as_view()),
url(r'^epilogue/$', name='live-epilogue', view=EpilogueView.as_view()),
url(r'^game/$', name='live-game', view=GameView.as_view()),
url(r'^prologue/$', name='live-prologue', view=PrologueView.as_view()),
)
|
Add a URL conf for the EpilogueView.
|
Add a URL conf for the EpilogueView.
|
Python
|
apache-2.0
|
bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv
|
78ec6813df74c153501b109b31383a07f02bde07
|
furious/_pkg_meta.py
|
furious/_pkg_meta.py
|
version_info = (1, 2, 0)
version = '.'.join(map(str, version_info))
|
version_info = (1, 3, 0)
version = '.'.join(map(str, version_info))
|
Upgrade the version to 1.3.0
|
Upgrade the version to 1.3.0
|
Python
|
apache-2.0
|
beaulyddon-wf/furious,Workiva/furious,mattsanders-wf/furious,beaulyddon-wf/furious,mattsanders-wf/furious,andreleblanc-wf/furious,Workiva/furious,andreleblanc-wf/furious
|
5cbbe25d9bbfbea804d22f399ec16ed438c492e4
|
moss/plots.py
|
moss/plots.py
|
import os.path as op
import numpy as np
import nibabel as nib
import seaborn as sns
def plot_mask_distribution(fname, hist=False):
"""Plot the distribution of voxel coordinates in a mask file.
Parameters
----------
fname : string
path to binary mask file
Returns
-------
ax : matplotlib axis object
axis with plot on it
"""
img = nib.load(fname)
mask = img.get_data()
aff = img.get_affine()
vox = np.where(mask)
vox = np.vstack([vox, np.ones(mask.sum())])
coords = np.dot(aff, vox)[:-1]
colors = sns.get_color_list()[:3]
for axis, data, color in zip(["x", "y", "z"], coords, colors):
if hist:
ax = sns.kdeplot(data, hist=True, label=axis, color=color)
else:
ax = sns.kdeplot(data, shade=True, label=axis, color=color)
ax.legend()
ax.set_title(op.basename(fname))
return ax
|
import os.path as op
import numpy as np
import nibabel as nib
import matplotlib.pyplot as plt
import seaborn as sns
def plot_mask_distribution(fname, ax=None, hist=False):
"""Plot the distribution of voxel coordinates in a mask file.
Parameters
----------
fname : string
path to binary mask file
Returns
-------
ax : matplotlib axis object
axis with plot on it
"""
if ax is None:
ax = plt.subplot(111)
img = nib.load(fname)
mask = img.get_data()
aff = img.get_affine()
vox = np.where(mask)
vox = np.vstack([vox, np.ones(mask.sum())])
coords = np.dot(aff, vox)[:-1]
colors = sns.get_color_list()[:3]
for axis, data, color in zip(["x", "y", "z"], coords, colors):
if hist:
sns.kdeplot(data, hist=True, label=axis, color=color, ax=ax)
else:
sns.kdeplot(data, shade=True, label=axis, color=color, ax=ax)
ax.legend()
ax.set_title(op.basename(fname))
return ax
|
Allow for shade or hist in mask plot
|
Allow for shade or hist in mask plot
|
Python
|
bsd-3-clause
|
mwaskom/moss,mwaskom/moss
|
77a6ff9fa19349bcb9428e79b7a7bf05cb4fb2a2
|
demo/apps/catalogue/migrations/0011_auto_20160616_1335.py
|
demo/apps/catalogue/migrations/0011_auto_20160616_1335.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wagtailimages', '0013_make_rendition_upload_callable'),
('catalogue', '0010_auto_20160616_1048'),
]
operations = [
migrations.AddField(
model_name='category',
name='description',
field=models.TextField(verbose_name='Description', blank=True),
),
migrations.AddField(
model_name='category',
name='image',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='wagtailimages.Image', null=True),
),
migrations.AlterField(
model_name='category',
name='name',
field=models.CharField(max_length=255, verbose_name='Name', db_index=True),
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wagtailimages', '0013_make_rendition_upload_callable'),
('catalogue', '0010_auto_20160616_1048'),
]
operations = [
migrations.AddField(
model_name='category',
name='description',
field=models.TextField(verbose_name='Description', blank=True),
),
migrations.AddField(
model_name='category',
name='image',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='wagtailimages.Image', null=True),
),
migrations.AddField(
model_name='category',
name='name',
field=models.CharField(max_length=255, verbose_name='Name', db_index=True),
),
]
|
Add field instead of alter field
|
Add field instead of alter field
|
Python
|
mit
|
pgovers/oscar-wagtail-demo,pgovers/oscar-wagtail-demo
|
99f862b6c123b8c6d81e931254c061e64431bccc
|
pysingcells/logger.py
|
pysingcells/logger.py
|
# -*- coding: utf-8 -*-
""" Function related to logging """
# stp import
import sys
import logging
# fief import
from fief import filter_effective_parameters as fief
log = logging.getLogger()
@fief
def setup_logging(options):
log_level = 10 if "logging_level" in options else options["logging_level"]
log.setLevel(log_level)
handler = logging.StreamHandler(sys.stderr)
formatter = logging.Formatter('%(levelname)s :: %(message)s')
handler.setFormatter(formatter)
handler.setLevel(log_level)
log.addHandler(handler)
|
# -*- coding: utf-8 -*-
""" Function related to logging """
# stp import
import sys
import logging
# fief import
from fief import filter_effective_parameters as fief
log = logging.getLogger()
@fief
def setup_logging(options):
log_level = 10 if "logging_level" in options else options["logging_level"]
log.setLevel(log_level)
handler = logging.StreamHandler(sys.stderr)
formatter = logging.Formatter('%(asctime)s :: %(levelname)s :: %(message)s')
handler.setFormatter(formatter)
handler.setLevel(log_level)
log.addHandler(handler)
|
Add time in logging trace
|
Add time in logging trace
|
Python
|
mit
|
Fougere87/pysingcells
|
1bc174d357423964191625faad6733466320face
|
application.py
|
application.py
|
import os
import sentry_sdk
from flask import Flask
from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations import logging
from app import create_app
sentry_sdk.init(
dsn=os.environ['SENTRY_DSN'],
integrations=[FlaskIntegration()],
environment=os.environ['NOTIFY_ENVIRONMENT'],
attach_stacktrace=True,
traces_sample_rate=0.00005 # avoid exceeding rate limits in Production
)
sentry_sdk.set_level('error') # only record error logs or exceptions
logging.ignore_logger('notifications_python_client.*') # ignore logs about 404s, etc.
application = Flask('app')
create_app(application)
|
import os
import sentry_sdk
from flask import Flask
from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations.redis import RedisIntegration
from sentry_sdk.integrations import logging
from app import create_app
sentry_sdk.init(
dsn=os.environ['SENTRY_DSN'],
integrations=[FlaskIntegration(), RedisIntegration()],
environment=os.environ['NOTIFY_ENVIRONMENT'],
attach_stacktrace=True,
traces_sample_rate=0.00005 # avoid exceeding rate limits in Production
)
sentry_sdk.set_level('error') # only record error logs or exceptions
logging.ignore_logger('notifications_python_client.*') # ignore logs about 404s, etc.
application = Flask('app')
create_app(application)
|
Add RedisIntegration to Sentry trial
|
Add RedisIntegration to Sentry trial
This should expose additional performance stats.
|
Python
|
mit
|
alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin
|
6de1eb3e3279af6fa09ddafd0e220becac7edf21
|
author/urls.py
|
author/urls.py
|
from django.conf.urls import patterns
from django.conf.urls import url
from django.views.generic.base import RedirectView
from django.core.urlresolvers import reverse_lazy
from django.contrib.auth.views import login
from django.contrib.auth.decorators import login_required
from author import views
urlpatterns = patterns(
'',
url(r'^$', RedirectView.as_view(url=reverse_lazy('author-login'),
permanent=False)),
url(r'^login/$',
login,
{'template_name': 'author/login.html'},
name='author-login'),
url(r'^panel/$',
login_required(function=views.PanelView.as_view(),
login_url=reverse_lazy('author-login')),
name='panel'),
)
|
from django.conf.urls import patterns
from django.conf.urls import url
from django.views.generic.base import RedirectView
from django.core.urlresolvers import reverse_lazy
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import Permission
from django.contrib.auth.views import login
from django.contrib.auth.decorators import login_required
from django.contrib.auth.decorators import permission_required
from author import views
def author_required(function=None, login_url=None):
author_permission = Permission(
content_type=ContentType.objects.get(app_label='game',
model='task'),
codename='add_task',
)
actual_decorator = permission_required(author_permission,
login_url=login_url)
if function is None:
return actual_decorator(login_required)
return actual_decorator(login_required(function))
urlpatterns = patterns(
'',
url(r'^$', RedirectView.as_view(url=reverse_lazy('author-login'),
permanent=False)),
url(r'^login/$',
login,
{'template_name': 'author/login.html'},
name='author-login'),
url(r'^panel/$',
author_required(function=views.PanelView.as_view(),
login_url=reverse_lazy('author-login')),
name='panel'),
)
|
Add permission checks in author app
|
Add permission checks in author app
|
Python
|
bsd-3-clause
|
stefantsov/blackbox3,stefantsov/blackbox3,stefantsov/blackbox3
|
c80c9aa63426f138d725eeecf74f386fbee61aed
|
gistmail.py
|
gistmail.py
|
"""\
GistMail
Email [email protected] with a link and get a response with that article's summary.
"""
from summarize import summarize_page
from flask import Flask, render_template
# Flask application
app = Flask(__name__)
# Configuration
app.config.from_object('settings')
app.config.from_envvar('SETTINGS_MODULE', silent=True)
app.config.from_pyfile('settings_local.py', silent=True)
# Views
@app.route('/')
def index():
return render_template('index.html')
@app.route('/incoming', methods=['POST'])
def incoming():
summary = summarize_page()
# TODO: Email summary
print summary
return 'TODO: Implement'
# Run development server
if __name__ == '__main__':
app.run(app.config['HOST'], app.config['PORT'], app.debug)
|
"""\
GistMail
Email [email protected] with a link and get a response with that article's summary.
"""
from summarize import summarize_page
from flask import Flask, render_template, request
# Flask application
app = Flask(__name__)
# Configuration
app.config.from_object('settings')
app.config.from_envvar('SETTINGS_MODULE', silent=True)
app.config.from_pyfile('settings_local.py', silent=True)
# Views
@app.route('/')
def index():
return render_template('index.html')
@app.route('/incoming', methods=['GET', 'POST'])
def incoming():
if request.method == 'POST':
summary = summarize_page()
# TODO: Email summary
print summary
return 'TODO: Implement'
# Run development server
if __name__ == '__main__':
app.run(app.config['HOST'], app.config['PORT'], app.debug)
|
Add GET handler to /incoming.
|
Add GET handler to /incoming.
|
Python
|
mit
|
joeyespo/gistmail.com,joeyespo/gistmail.com
|
69029424035c8506a46b94422fd347871ee51269
|
accelerator/tests/factories/program_family_factory.py
|
accelerator/tests/factories/program_family_factory.py
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
import swapper
from factory import (
DjangoModelFactory,
Sequence,
)
from accelerator.apps import AcceleratorConfig
ProgramFamily = swapper.load_model(AcceleratorConfig.name, 'ProgramFamily')
class ProgramFamilyFactory(DjangoModelFactory):
class Meta:
model = ProgramFamily
name = Sequence(lambda n: "Program Family {0}".format(n))
short_description = 'A program family for testing'
url_slug = Sequence(lambda n: "pf{0}".format(n))
email_domain = Sequence(lambda n: "pf{0}.accelerator.org".format(n))
phone_number = "617-555-1212"
physical_address = "Boston"
is_open_for_startups = True
is_open_for_experts = True
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
import swapper
from factory import (
DjangoModelFactory,
Sequence,
)
from accelerator.apps import AcceleratorConfig
ProgramFamily = swapper.load_model(AcceleratorConfig.name, 'ProgramFamily')
class ProgramFamilyFactory(DjangoModelFactory):
class Meta:
model = ProgramFamily
name = Sequence(lambda n: "Program Family {0}".format(n))
short_description = 'A program family for testing'
url_slug = Sequence(lambda n: "pf{0}".format(n))
email_domain = Sequence(lambda n: "pf{0}.accelerator.org".format(n))
phone_number = "617-555-1212"
physical_address = "Boston"
is_open_for_startups = True
is_open_for_experts = True
use_site_tree_side_nav = False
|
Add new field to factory
|
Add new field to factory
|
Python
|
mit
|
masschallenge/django-accelerator,masschallenge/django-accelerator
|
3860e0a9ac99c4c4422630e4133bf3b35927bf41
|
conf_site/sponsorship/tests/__init__.py
|
conf_site/sponsorship/tests/__init__.py
|
import factory
from symposion.schedule.tests.factories import ConferenceFactory
from symposion.sponsorship.models import SponsorLevel, Sponsor
class SponsorLevelFactory(factory.django.DjangoModelFactory):
conference = factory.SubFactory(ConferenceFactory)
name = factory.Faker("color_name")
order = factory.Faker("pyint")
cost = factory.Faker("pyint")
description = factory.Faker("paragraph")
class Meta:
model = SponsorLevel
class SponsorFactory(factory.django.DjangoModelFactory):
name = factory.Faker("company")
display_url = factory.Faker("uri")
external_url = factory.Faker("uri")
contact_name = factory.Faker("name")
contact_email = factory.Faker("company_email")
level = factory.SubFactory(SponsorLevelFactory)
class Meta:
model = Sponsor
|
import factory
from symposion.schedule.tests.factories import ConferenceFactory
from symposion.sponsorship.models import SponsorLevel, Sponsor
class SponsorLevelFactory(factory.django.DjangoModelFactory):
conference = factory.SubFactory(ConferenceFactory)
name = factory.Faker("color_name")
order = factory.Faker("pyint")
cost = factory.Faker("pyint")
description = factory.Faker("paragraph")
class Meta:
model = SponsorLevel
class SponsorFactory(factory.django.DjangoModelFactory):
name = factory.Faker("company")
display_url = factory.Faker("uri")
external_url = factory.Faker("uri")
contact_name = factory.Faker("name")
contact_email = factory.Faker("company_email")
level = factory.SubFactory(SponsorLevelFactory)
active = factory.Faker("boolean")
class Meta:
model = Sponsor
|
Allow SponsorFactory to create active sponsors.
|
Allow SponsorFactory to create active sponsors.
|
Python
|
mit
|
pydata/conf_site,pydata/conf_site,pydata/conf_site
|
58321c0471f10169ea5c0f705cdb35825036d77f
|
cat_leds.py
|
cat_leds.py
|
#!/usr/bin/env python
"""Display standard input on LEDs."""
import sys
import RPi.GPIO as GPIO
from seven_segment import print_leds
pins = [17, 23, 24, 22, 27, 25, 5]
GPIO.setmode(GPIO.BCM)
GPIO.setup(pins, GPIO.OUT)
pipe_contents = sys.stdin.read()
print_leds(pipe_contents, pins)
GPIO.cleanup()
|
#!/usr/bin/env python
"""Display standard input on LEDs."""
import sys
import RPi.GPIO as GPIO
from seven_segment import print_leds
pins = [6, 19, 5, 13, 20, 12, 16]
GPIO.setmode(GPIO.BCM)
GPIO.setup(pins, GPIO.OUT)
pipe_contents = sys.stdin.read()
print_leds(pipe_contents, pins, 1/2.0)
GPIO.cleanup()
|
Set up for my new 7 seg display.
|
Set up for my new 7 seg display.
|
Python
|
mit
|
zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie
|
caaf8e40c99f410b09580b81c4fa7a094395319c
|
test/test_progress_bar.py
|
test/test_progress_bar.py
|
import pytest
from downloads.download import _progress_bar
@pytest.mark.parametrize(
"current,block_size,total_size",
[
(
100,
32,
100 * 32,
),
(
75,
32,
100 * 32,
),
(
50,
32,
100 * 32,
),
(
25,
32,
100 * 32,
),
(
0,
32,
100 * 32,
),
],
)
def test_progress_bar(current, block_size, total_size):
bar = _progress_bar(
current=current, block_size=block_size, total_size=total_size
)
assert bar.count("#") == current
assert bar.split()[-1] == f"{current:.1f}%"
|
import pytest
from downloads.download import _progress_bar
@pytest.mark.parametrize(
"current,block_size,total_size",
[
(
100,
32,
100 * 32,
),
(
75,
32,
100 * 32,
),
(
50,
32,
100 * 32,
),
(
25,
32,
100 * 32,
),
(
0,
32,
100 * 32,
),
],
)
def test_progress_bar(current, block_size, total_size):
bar = _progress_bar(
current=current, block_size=block_size, total_size=total_size
)
assert bar.count("#") == current
assert bar.split()[-1] == f"{current:.1f}%"
assert len(bar) == 100 + 8
|
Test width of progress bar
|
Test width of progress bar
|
Python
|
mit
|
audy/downloads
|
6b777abcc694ff8991cd522300d8d545fb49586a
|
cct/core2/stronginfo.py
|
cct/core2/stronginfo.py
|
import logging
import colorlog
logging.STRONGINFO = logging.DEBUG + 5
logging.addLevelName(logging.STRONGINFO, 'STRONGINFO')
def logStrongInfo(msg, *args, **kwargs):
"""Log a message with severity 'STRONGINFO' on the root logger. If the logger has
no handlers, call basicConfig() to add a console handler with a pre-defined
format."""
return logging.log(logging.STRONGINFO, msg, *args, **kwargs)
setattr(logging, 'stronginfo', logStrongInfo)
def logStrongInfoMethod(self, msg, *args, **kwargs):
"""Log 'msg % args' with severity 'STRONGINFO'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g.
logger.stronginfo("Houston, we have a %s", "interesting problem", exc_info=1)
"""
return self.log(logging.STRONGINFO, msg, *args, **kwargs)
setattr(logging.getLoggerClass(), 'stronginfo', logStrongInfoMethod)
colorlog.default_log_colors['STRONGINFO'] = 'bold_green'
|
import logging
import colorlog
logging.STRONGINFO = logging.INFO + 5
logging.addLevelName(logging.STRONGINFO, 'STRONGINFO')
def logStrongInfo(msg, *args, **kwargs):
"""Log a message with severity 'STRONGINFO' on the root logger. If the logger has
no handlers, call basicConfig() to add a console handler with a pre-defined
format."""
return logging.log(logging.STRONGINFO, msg, *args, **kwargs)
setattr(logging, 'stronginfo', logStrongInfo)
def logStrongInfoMethod(self, msg, *args, **kwargs):
"""Log 'msg % args' with severity 'STRONGINFO'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g.
logger.stronginfo("Houston, we have a %s", "interesting problem", exc_info=1)
"""
return self.log(logging.STRONGINFO, msg, *args, **kwargs)
setattr(logging.getLoggerClass(), 'stronginfo', logStrongInfoMethod)
colorlog.default_log_colors['STRONGINFO'] = 'bold_green'
|
Correct priority for STRONGINFO log level
|
Correct priority for STRONGINFO log level
|
Python
|
bsd-3-clause
|
awacha/cct,awacha/cct,awacha/cct
|
d2079a26f1a259aa69dea061be2b192137030fd1
|
ckanext/nhm/lib/form.py
|
ckanext/nhm/lib/form.py
|
#!/usr/bin/env python
# encoding: utf-8
#
# This file is part of ckanext-nhm
# Created by the Natural History Museum in London, UK
def list_to_form_options(values, allow_empty=False, allow_empty_text='None'):
'''Format a list of values into a list of dict suitable
for use in forms: [{value: x, name: y}]
:param values: list or list of tuples [(value, name)]
:param allow_empty: if true, will add none option (optional, default: False)
:param allow_empty_text: label for none value (optional, default: 'None')
'''
options = []
if allow_empty:
options.append({'value': None, 'text': allow_empty_text or None})
for value in values:
if isinstance(value, str):
name = value
else:
# If this is a tuple or list use (value, name)
name = value[1]
value = value[0]
options.append({'value': value, 'text': name})
return options
|
#!/usr/bin/env python
# encoding: utf-8
#
# This file is part of ckanext-nhm
# Created by the Natural History Museum in London, UK
def list_to_form_options(values, allow_empty=False, allow_empty_text='None'):
'''Format a list of values into a list of dict suitable
for use in forms: [{value: x, name: y}]
:param values: list or list of tuples [(value, name)]
:param allow_empty: if true, will add none option (optional, default: False)
:param allow_empty_text: label for none value (optional, default: 'None')
'''
options = []
if allow_empty:
options.append({'value': '', 'text': allow_empty_text or None})
for value in values:
if isinstance(value, str):
name = value
else:
# If this is a tuple or list use (value, name)
name = value[1]
value = value[0]
options.append({'value': value, 'text': name})
return options
|
Switch empty value to empty string rather than None
|
Switch empty value to empty string rather than None
When None is used this is rendered in the HTML as a string, which then when it is returned to the CKAN server becomes 'None' not None.
An empty string represents no value better.
|
Python
|
mit
|
NaturalHistoryMuseum/ckanext-nhm,NaturalHistoryMuseum/ckanext-nhm,NaturalHistoryMuseum/ckanext-nhm
|
f30d2f5b09ae3a62bff02165a173af6e25345a60
|
trevornet/nets/pynet/aimath.py
|
trevornet/nets/pynet/aimath.py
|
import math
import sys
def sigmoid(x):
try:
val = 1/(1 + math.exp(-x))
except OverflowError:
val = sys.float_info.max
return val
def sigmoidprime(x):
return (1 - sigmoid(x))*sigmoid(x)
|
import math
import sys
def sigmoid(x):
try:
val = 1/(1 + math.exp(-x))
except OverflowError:
val = 0.
return val
def sigmoidprime(x):
return (1 - sigmoid(x))*sigmoid(x)
|
Fix mistake in sigmoid function
|
Fix mistake in sigmoid function
|
Python
|
mit
|
tmerr/trevornet
|
4510a4a22965d002bd41293fd8fe629c8285800d
|
tests/test_errors.py
|
tests/test_errors.py
|
# coding: pyxl
import pytest
from pyxl.codec.register import pyxl_decode
from pyxl.codec.parser import ParseError
def test_malformed_if():
with pytest.raises(ParseError):
pyxl_decode(b"""
<frag>
<if cond="{true}">foo</if>
this is incorrect!
<else>bar</else>
</frag>""")
def test_multiple_else():
with pytest.raises(ParseError):
pyxl_decode(b"""
<frag>
<if cond="{true}">foo</if>
<else>bar</else>
<else>baz</else>
</frag>""")
def test_nested_else():
with pytest.raises(ParseError):
pyxl_decode(b"""
<frag>
<if cond="{true}">foo</if>
<else><else>bar</else></else>
</frag>""")
|
# coding: pyxl
import pytest
from pyxl.codec.register import pyxl_decode
from pyxl.codec.parser import ParseError
from pyxl.codec.html_tokenizer import BadCharError
def test_malformed_if():
with pytest.raises(ParseError):
pyxl_decode(b"""
<frag>
<if cond="{true}">foo</if>
this is incorrect!
<else>bar</else>
</frag>""")
def test_multiple_else():
with pytest.raises(ParseError):
pyxl_decode(b"""
<frag>
<if cond="{true}">foo</if>
<else>bar</else>
<else>baz</else>
</frag>""")
def test_nested_else():
with pytest.raises(ParseError):
pyxl_decode(b"""
<frag>
<if cond="{true}">foo</if>
<else><else>bar</else></else>
</frag>""")
def test_bad_char():
with pytest.raises(BadCharError):
pyxl_decode(b"""<_bad_element></lm>""")
|
Add test for BadCharError exception.
|
Add test for BadCharError exception.
|
Python
|
apache-2.0
|
pyxl4/pyxl4
|
25e4730c4614a26cdecd60eb0846e69578353d2c
|
tomcrypt/__init__.py
|
tomcrypt/__init__.py
|
import os
import ctypes
# We need to manually load the _core SO the first time so that we can specify
# that it use the RTLD_GLOBAL flag. Otherwise (when not on a Mac) the runtime
# linker will not be able to resolve undefined symbols in the other modules.
_core_handle = ctypes.CDLL(
os.path.join(os.path.dirname(os.path.abspath(__file__)), '_core.so'),
ctypes.RTLD_GLOBAL
)
class Error(ValueError):
def __init__(self, *args, **kwargs):
self.code = kwargs.get('code', -1)
ValueError.__init__(self, *args)
class LibError(Error):
pass
|
import ctypes
class Error(ValueError):
def __init__(self, *args, **kwargs):
self.code = kwargs.get('code', -1)
ValueError.__init__(self, *args)
class LibError(Error):
pass
# We need to manually load the _core the first time so that we can specify
# that it use the RTLD_GLOBAL flag. Otherwise (when not on a Mac) the runtime
# linker will not be able to resolve undefined symbols in the other modules.
# This must also be done after the above exceptions are defined so that they
# are availible to the core.
from . import _core
ctypes.PyDLL(_core.__file__, mode=ctypes.RTLD_GLOBAL)
|
Use import to locate _core
|
Linking: Use import to locate _core
Related to #8
|
Python
|
bsd-3-clause
|
mikeboers/PyTomCrypt,mikeboers/PyTomCrypt,mikeboers/PyTomCrypt
|
b4ce1a023bf047524f40ac63f40d46a70c8f6f77
|
src/dirtyfields/dirtyfields.py
|
src/dirtyfields/dirtyfields.py
|
# Adapted from http://stackoverflow.com/questions/110803/dirty-fields-in-django
from django.db.models.signals import post_save
class DirtyFieldsMixin(object):
def __init__(self, *args, **kwargs):
super(DirtyFieldsMixin, self).__init__(*args, **kwargs)
post_save.connect(reset_state, sender=self.__class__,
dispatch_uid='%s-DirtyFieldsMixin-sweeper' % self.__class__.__name__)
reset_state(sender=self.__class__, instance=self)
def _as_dict(self):
return dict([(f.name, getattr(self, f.name)) for f in self._meta.local_fields if not f.rel])
def get_dirty_fields(self):
new_state = self._as_dict()
return dict([(key, value) for key, value in self._original_state.iteritems() if value != new_state[key]])
def is_dirty(self):
# in order to be dirty we need to have been saved at least once, so we
# check for a primary key and we need our dirty fields to not be empty
if not self.pk:
return True
return {} != self.get_dirty_fields()
def reset_state(sender, instance, **kwargs):
instance._original_state = instance._as_dict()
|
# Adapted from http://stackoverflow.com/questions/110803/dirty-fields-in-django
from django.db.models.signals import post_save
class DirtyFieldsMixin(object):
def __init__(self, *args, **kwargs):
super(DirtyFieldsMixin, self).__init__(*args, **kwargs)
post_save.connect(reset_state, sender=self.__class__,
dispatch_uid='%s-DirtyFieldsMixin-sweeper' % self.__class__.__name__)
reset_state(sender=self.__class__, instance=self)
def _as_dict(self):
return dict([(f.name, getattr(self, f.name)) for f in self._meta.local_fields])
def get_dirty_fields(self):
new_state = self._as_dict()
return dict([(key, value) for key, value in self._original_state.iteritems() if value != new_state[key]])
def is_dirty(self):
# in order to be dirty we need to have been saved at least once, so we
# check for a primary key and we need our dirty fields to not be empty
if not self.pk:
return True
return {} != self.get_dirty_fields()
def reset_state(sender, instance, **kwargs):
instance._original_state = instance._as_dict()
|
Check primary keys for foreign key and one-to-one fields
|
Check primary keys for foreign key and one-to-one fields
|
Python
|
bsd-3-clause
|
stanhu/django-dirtyfields
|
625307615f0dac8801b35ac5689a92706556ad01
|
main.py
|
main.py
|
import struct
id_i = 0
value_i = 0.0
id_i_1 = 0
value_i_1 = 1.0
number = 0
with open("sort.dat", "r") as f:
for line in f:
line = line.replace('(', '')
line = line.replace(')', '')
line = line.replace(' ', '')
line = line.replace('\n', '')
# print line
[id, value] = line.split(',')
id = int(id)
id = id*128/1024
value = float(value)
# print(id)
# print(value)
if (id_i == id):
value_i = value_i + value
number = number + 1
else:
if (id_i > id_i_1 + 300 or value_i < value_i_1 - 0.09):
print '({0}, {1})'.format(id, value_i/number)
id_i_1 = id_i
value_i_1 = value_i/number
id_i = id
value_i = value
number = 1
print '({0}, {1})'.format(id_i, value_i/number)
# for line in open("sort.dat").readlines():
# print line
|
Update the code of visualization.
|
Update the code of visualization.
|
Python
|
mit
|
SigmaQuan/NTM-Keras
|
|
67ea33e9e46ff321c2915d46a354d66756ff9c2b
|
main.py
|
main.py
|
#!/usr/bin/env python
"""
This is the main file for a script that reads info off a game on kongregate.com and acts upon it.
"""
# import line/s for builtin modules
# import pyautogui
__author__ = "Alex Flores Escarcega"
__copyright__ = "Copyright 2007, Alex Flores Escarcega"
__credits__ = ["Alex Flores Escarcega"]
__license__ = "MIT"
__version__ = "1.0.1"
__maintainer__ = "Alex Flores Escarcega"
__email__ = "[email protected]"
__status__ = "Development"
# game located at http://www.kongregate.com/games/Volch/endless-expansion?haref=HP_TGTM_endless-expansion
def main():
"""
Just now runs main()
inputs: none
outputs: none
"""
pass
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
"""
This is the main file for a script that reads info off a game on kongregate.com and acts upon it.
"""
# import line/s for builtin modules
# import pyautogui
__author__ = "Alex Flores Escarcega"
__copyright__ = "Copyright 2007, Alex Flores Escarcega"
__credits__ = ["Alex Flores Escarcega"]
__license__ = "MIT"
__version__ = "1.0.1"
__maintainer__ = "Alex Flores Escarcega"
__email__ = "[email protected]"
__status__ = "Development"
# game located at http://www.kongregate.com/games/Volch/endless-expansion?haref=HP_TGTM_endless-expansion
def find_game_region():
"""
Finds the top left coordinates of the game by substracting (700 + 300) from the location of the game.
The 300 comes from the width of the top_right_corner.png file that is used to locate the top right corner.
Input: None.
Output: the top left coordinates. Two elements in a tuple.
coors = pyautogui.locateOnScreen("images/top_right_corner.png")
return (coors[0],coors[1])
def main():
"""
Just now runs main()
inputs: none
outputs: none
"""
print find_game_region()
if __name__ == "__main__":
main()
|
Add method to find the coordinates of game
|
Add method to find the coordinates of game
|
Python
|
mit
|
hydrophilicsun/Automating-Endless-Expansion
|
8b6daefb98c20ddb288604f7aa2202c21df2c526
|
main.py
|
main.py
|
__author__ = 'alex'
print 45
print 54
|
_author__ = 'alex'
import sys
import xml.dom.minidom as dom
def get_Res_Matrix(length,nodes,nets_d,elem_type):
Res = [[[] for j in range(length)] for i in range(length)]
for i in range(nodes.length):
if nodes[i].nodeType != elem_type: continue
name = nodes[i].nodeName
if name == "diode":
net_from, net_to = nets_d[(int)(nodes[i].getAttribute("net_from"))], nets_d[(int)(nodes[i].getAttribute("net_to"))]
res, rev_res = (float)(nodes[i].getAttribute("resistance")), (float)(nodes[i].getAttribute("reverse_resistance"))
Res[net_from][net_to].append(res)
Res[net_to][net_from].append(rev_res)
else:
if name == "capactor" or name == "resistor":
net_from, net_to = nets_d[(int)(nodes[i].getAttribute("net_from"))], nets_d[(int)(nodes[i].getAttribute("net_to"))]
res = (float)(nodes[i].getAttribute("resistance"))
Res[net_from][net_to].append(res)
Res[net_to][net_from].append(res)
return Res
def parse_xml():
elem_type = dom.Element.ELEMENT_NODE
doc = dom.parse(sys.argv[1])
#parse xml
for node in doc.childNodes:
if node.nodeName == "schematics": break
nodes = node.childNodes
nets_d = {}
for i in range(nodes.length):
if nodes[i].nodeType != elem_type: continue
if nodes[i].nodeName != "net": continue
nets_d[(int)(nodes[i].getAttribute("id"))] = 0
length = 0
for x in sorted(nets_d):
nets_d[x] = length
length += 1
return nodes,nets_d,elem_type,length
if __name__ == "__main__":
if len(sys.argv) <> 3:
print("check the arguments")
exit()
nodes,nets_d,elem_type,length = parse_xml()
Res = get_Res_Matrix(length,nodes,nets_d,elem_type)
print Res
|
Read and parse xml to Matrix
|
Read and parse xml to Matrix
|
Python
|
mit
|
BaydinAlexey/proglangs_baydin,BaydinAlexey/proglangs_baydin
|
fa2fb3387912474eff2b6c2a14d6304fcf5cd1f8
|
erasmus/cogs/bible/testing_server_preferences_group.py
|
erasmus/cogs/bible/testing_server_preferences_group.py
|
from __future__ import annotations
from typing import TYPE_CHECKING
from botus_receptus.app_commands import test_guilds_only
from discord import app_commands
from .daily_bread.daily_bread_preferences_group import DailyBreadPreferencesGroup
if TYPE_CHECKING:
from ...erasmus import Erasmus
from ...l10n import GroupLocalizer
from .types import ParentCog
@app_commands.default_permissions(administrator=True)
@app_commands.guild_only()
@test_guilds_only
class TestingServerPreferencesGroup(
app_commands.Group, name='test-server-prefs', description='Testing group'
):
bot: Erasmus
localizer: GroupLocalizer
daily_bread = DailyBreadPreferencesGroup()
def initialize_from_parent(self, parent: ParentCog, /) -> None:
self.bot = parent.bot
self.localizer = parent.localizer.for_group(self)
self.daily_bread.initialize_from_parent(self)
|
from __future__ import annotations
from typing import TYPE_CHECKING
from botus_receptus.app_commands import test_guilds_only
from discord import app_commands
from .daily_bread.daily_bread_preferences_group import DailyBreadPreferencesGroup
if TYPE_CHECKING:
from ...erasmus import Erasmus
from ...l10n import GroupLocalizer
from .types import ParentCog
@app_commands.default_permissions(administrator=True)
@app_commands.guild_only()
@test_guilds_only
class TestingServerPreferencesGroup(
app_commands.Group, name='test-server-prefs', description='Testing group'
):
bot: Erasmus
localizer: GroupLocalizer
daily_bread = DailyBreadPreferencesGroup()
def initialize_from_parent(self, parent: ParentCog, /) -> None:
self.bot = parent.bot
self.localizer = parent.localizer.for_group('serverprefs')
self.daily_bread.initialize_from_parent(self)
|
Use serverprefs localizer for TestingServerPreferencesGroup
|
Use serverprefs localizer for TestingServerPreferencesGroup
|
Python
|
bsd-3-clause
|
bryanforbes/Erasmus
|
d044576e08e06dd6a2c68ab7868c281cd2979764
|
wsgi.py
|
wsgi.py
|
from app import create_app
from credstash import getAllSecrets
import os
default_env_file = '/home/ubuntu/environment'
environment = 'live'
if os.path.isfile(default_env_file):
with open(default_env_file, 'r') as environment_file:
environment = environment_file.readline().strip()
# on aws get secrets and export to env
os.environ.update(getAllSecrets(region="eu-west-1"))
from config import configs
os.environ['NOTIFY_ADMIN_ENVIRONMENT'] = configs[environment]
application = create_app()
if __name__ == "__main__":
application.run()
|
from credstash import getAllSecrets
import os
default_env_file = '/home/ubuntu/environment'
environment = 'live'
if os.path.isfile(default_env_file):
with open(default_env_file, 'r') as environment_file:
environment = environment_file.readline().strip()
# on aws get secrets and export to env
os.environ.update(getAllSecrets(region="eu-west-1"))
from config import configs
os.environ['NOTIFY_ADMIN_ENVIRONMENT'] = configs[environment]
from app import create_app
application = create_app()
if __name__ == "__main__":
application.run()
|
Order of imports means the config doesn't get set up properly
|
Order of imports means the config doesn't get set up properly
|
Python
|
mit
|
gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin
|
f3ec0593bb67db25c4f5af4b3b00d82d5e4e0f04
|
csv2ofx/mappings/gls.py
|
csv2ofx/mappings/gls.py
|
from operator import itemgetter
mapping = {
'has_header': True,
'currency': 'EUR',
'delimiter': ';',
'bank': 'GLS Bank',
'account': itemgetter('Kontonummer'),
# Chop up the dotted German date format and put it in ridiculous M/D/Y order
'date': lambda r:
r['Buchungstag'][3:5] + '/' + r['Buchungstag'][:2] + '/' +
r['Buchungstag'][-4:],
# locale.atof does not actually know how to deal with German separators.
# So we do it the crude way
'amount': lambda r: r['Betrag'].replace('.', '').replace(',', '.'),
'desc': itemgetter('Buchungstext'),
'payee': itemgetter('Auftraggeber/Empfänger'),
}
|
from __future__ import absolute_import
from operator import itemgetter
mapping = {
'has_header': True,
'currency': 'EUR',
'delimiter': ';',
'bank': 'GLS Bank',
'account': itemgetter('Kontonummer'),
# Chop up the dotted German date format and put it in ridiculous M/D/Y order
'date': lambda r:
r['Buchungstag'][3:5] + '/' + r['Buchungstag'][:2] + '/' +
r['Buchungstag'][-4:],
# locale.atof does not actually know how to deal with German separators.
# So we do it the crude way
'amount': lambda r: r['Betrag'].replace('.', '').replace(',', '.'),
'desc': itemgetter('Buchungstext'),
'payee': itemgetter('Auftraggeber/Empfänger'),
}
|
Add import for other python versions
|
Add import for other python versions
|
Python
|
mit
|
reubano/csv2ofx,reubano/csv2ofx
|
3e52078450a4205fdfaa2d4ba2448bce3d3d19d7
|
gpio_components/input_devices.py
|
gpio_components/input_devices.py
|
from RPi import GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
class InputDevice(object):
def __init__(self, pin):
self.pin = pin
GPIO.setup(pin, GPIO.IN, GPIO.PUD_UP)
def is_pressed(self):
return GPIO.input(self.pin) == 0
class Button(InputDevice):
pass
|
from RPi import GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
class InputDevice(object):
def __init__(self, pin=None):
if pin is None:
raise InputDeviceError('No GPIO pin number given')
self.pin = pin
self.pull = GPIO.PUD_UP
self.edge = GPIO.FALLING
self.active = 0
self.inactive = 1
GPIO.setup(pin, GPIO.IN, self.pull)
def is_active(self):
return GPIO.input(self.pin) == self.active
def wait_for_input(self):
GPIO.wait_for_edge(self.pin, self.edge)
def add_callback(self, callback=None, bouncetime=1000):
if callback is None:
raise InputDeviceError('No callback function given')
GPIO.add_event_detect(self.pin, self.edge, callback, bouncetime)
def remove_callback(self):
GPIO.remove_event_detect(self.pin)
class Button(InputDevice):
pass
class InputDeviceError(Exception):
pass
|
Add wait_for_input and add_callback methods to InputDevice
|
Add wait_for_input and add_callback methods to InputDevice
|
Python
|
bsd-3-clause
|
lurch/python-gpiozero,waveform80/gpio-zero,agiledata/python-gpiozero,Gadgetoid/python-gpiozero,cymplecy/python-gpioone,RPi-Distro/python-gpiozero,MrHarcombe/python-gpiozero
|
2f55855ed1c1ea7862e239af76fab57f32affc0c
|
hamlpy/views/generic/__init__.py
|
hamlpy/views/generic/__init__.py
|
from __future__ import print_function, unicode_literals
import django.views.generic
pouet = [
'ArchiveIndexView', 'YearArchiveView', 'MonthArchiveView',
'WeekArchiveView', 'DayArchiveView', 'TodayArchiveView', 'DateDetailView',
'DetailView', 'CreateView', 'UpdateView', 'DeleteView', 'ListView',
]
class HamlExtensionTemplateView(object):
def get_template_names(self):
names = super(HamlExtensionTemplateView, self).get_template_names()
haml_names = []
for name in names:
if name.endswith((".html", ".htm", ".xml")):
haml_names.append(name[:-len(".html")] + ".haml")
haml_names.append(name[:-len(".html")] + ".hamlpy")
return haml_names + names
for view in pouet:
locals()[view] = type(view, (HamlExtensionTemplateView, getattr(django.views.generic, view)), {})
|
from __future__ import print_function, unicode_literals
import django.views.generic
pouet = [
'ArchiveIndexView', 'YearArchiveView', 'MonthArchiveView',
'WeekArchiveView', 'DayArchiveView', 'TodayArchiveView', 'DateDetailView',
'DetailView', 'CreateView', 'UpdateView', 'DeleteView', 'ListView',
]
class HamlExtensionTemplateView(object):
def get_template_names(self):
names = super(HamlExtensionTemplateView, self).get_template_names()
haml_names = []
for name in names:
if name.endswith((".html", ".htm", ".xml")):
haml_names.append(name[:-len(".html")] + ".haml")
haml_names.append(name[:-len(".html")] + ".hamlpy")
return haml_names + names
for view in pouet:
locals()[view] = type(str(view), (HamlExtensionTemplateView, getattr(django.views.generic, view)), {})
|
Fix views file Python 3 issue
|
Fix views file Python 3 issue
|
Python
|
mit
|
Psycojoker/HamlPy,nyaruka/django-hamlpy,Psycojoker/HamlPy,nyaruka/django-hamlpy,Psycojoker/HamlPy
|
f61aa6a8088721c31510beef2145e3de7a46b1f4
|
mako_pipeline/__init__.py
|
mako_pipeline/__init__.py
|
# coding: utf-8
ASSETS = {
'debug': False,
'media_url': '/',
}
def configure(data):
ASSETS.update(data)
|
# coding: utf-8
ASSETS = {
'debug': False,
'media_url': '/',
}
def configure(data=None):
if data:
ASSETS.update(data)
return ASSETS
|
Return config when calling configure def
|
Return config when calling configure def
|
Python
|
mit
|
rcmachado/mako-pipeline
|
ed437d9a5b4ccb29eb5240b176773a475ff371c3
|
grd/urls.py
|
grd/urls.py
|
from django.conf.urls import include, url
from django.contrib import admin
from rest_framework import routers
from rest_framework.authtoken.views import obtain_auth_token
from grd import views
router = routers.SimpleRouter()
router.register(r'devices', views.DeviceView)
urlpatterns = [
# Examples:
# url(r'^$', 'ereuse.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^api/', include(router.urls)),#, namespace='api')),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^api-token-auth/', obtain_auth_token),
url(r'^api/register/$', views.Register.as_view(), name='do-register'),
url(r'^api/devices/(?P<pk>[^/.]+)/log/$', views.DeviceLog.as_view({'get': 'list'}), name='device-log'),
]
|
from django.conf.urls import include, url
from django.contrib import admin
from rest_framework import routers
from rest_framework.authtoken.views import obtain_auth_token
from grd import views
router = routers.DefaultRouter()
router.register(r'devices', views.DeviceView)
urlpatterns = [
# Examples:
# url(r'^$', 'ereuse.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^api/', include(router.urls)),#, namespace='api')),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^api-token-auth/', obtain_auth_token),
url(r'^api/register/$', views.Register.as_view(), name='do-register'),
url(r'^api/devices/(?P<pk>[^/.]+)/log/$', views.DeviceLog.as_view({'get': 'list'}), name='device-log'),
]
|
Use DefaultRouter which includes a default API root view.
|
Use DefaultRouter which includes a default API root view.
|
Python
|
agpl-3.0
|
eReuse/grd,eReuse/grd
|
0a9f378784e8c30cdf16d4d1caaf3b98f112bb90
|
nap/meta.py
|
nap/meta.py
|
from __future__ import unicode_literals
class Meta(object):
'''Generic container for Meta classes'''
def __new__(cls, meta=None):
# Return a new class base on ourselves
attrs = dict(
(name, getattr(meta, name))
for name in dir(meta)
if not name[0] == '_'
)
return object.__new__(type(str('Meta'), (cls,), attrs))
|
from __future__ import unicode_literals
class Meta(object):
'''Generic container for Meta classes'''
def __new__(cls, meta=None):
# Return a new class base on ourselves
attrs = dict(
(name, getattr(meta, name))
for name in dir(meta)
if not name[0] == '_' and hasattr(cls, name)
)
return object.__new__(type(str('Meta'), (cls,), attrs))
|
Make Meta class enforce only known properties
|
Make Meta class enforce only known properties
|
Python
|
bsd-3-clause
|
MarkusH/django-nap,limbera/django-nap
|
ea5bfe240cc349144e089f606534726863f2c21b
|
media/sites/lapinkansa.py
|
media/sites/lapinkansa.py
|
import requests
from bs4 import BeautifulSoup
def nouda( url , out ):
r = requests.get( url )
r.encoding = 'UTF-8'
soup = BeautifulSoup( r.text )
teksti = soup.find_all( class_ ='post-meta' )
for string in teksti[0].stripped_strings:
out.write( string.encode('utf8') + ' ' )
if __name__ == '__main__':
nouda("http://www.lapinkansa.fi/Lappi/1194944697007/artikkeli/kaunis+tykky+voi+olla+kavala+puille.html", file('lapinkansa.txt', 'w'))
|
import requests
from bs4 import BeautifulSoup
def nouda( url , out ):
r = requests.get( url )
r.encoding = 'UTF-8'
soup = BeautifulSoup( r.text )
teksti = soup.find_all( class_ = 'news-excerpt' )
for string in teksti[0].stripped_strings:
out.write( string.encode('utf8') + ' ' )
if __name__ == '__main__':
nouda("http://www.lapinkansa.fi/Lappi/1194944697007/artikkeli/kaunis+tykky+voi+olla+kavala+puille.html", file('lapinkansa.txt', 'w'))
|
Fix to new page layout
|
Fix to new page layout
|
Python
|
mit
|
HIIT/digivaalit-2015,HIIT/digivaalit-2015,HIIT/digivaalit-2015
|
c2355829706b2b728ca369474157cac58f9c35d1
|
examples/utils.py
|
examples/utils.py
|
import subprocess
import os
import sys
STEPS = "Step:"
RESULT = "Result:"
SETUP = "Setup:"
def enter_depend_test():
# Simple magic for using scripts within a source tree
BASEDIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if os.path.isdir(os.path.join(BASEDIR, 'depend_test_framework')):
os.environ['PATH'] += ":" + os.path.join(BASEDIR, 'examples')
sys.path.insert(0, BASEDIR)
def run_cmd(cmd):
return subprocess.check_output(cmd.split())
|
import subprocess
import time
import re
import os
import sys
import paramiko
STEPS = "Step:"
RESULT = "Result:"
SETUP = "Setup:"
def enter_depend_test():
# Simple magic for using scripts within a source tree
BASEDIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if os.path.isdir(os.path.join(BASEDIR, 'depend_test_framework')):
os.environ['PATH'] += ":" + os.path.join(BASEDIR, 'examples')
sys.path.insert(0, BASEDIR)
def run_cmd(cmd):
return subprocess.check_output(cmd.split())
def _get_guest_ip(guest_name):
# TODO: fallback to agent
ret = run_cmd("virsh -q domifaddr --source lease %s" % guest_name)
match = re.search(r"([\S]+)$", ret)
if not match:
return
return match.group(1).split("/")[0]
def login_guest_run(guest_name, cmd, passwd, retry=10):
"""
return stdout string and stderr string
"""
guest_ip = None
i = 0
while not guest_ip:
i += 1
guest_ip = _get_guest_ip(guest_name)
if not guest_ip:
time.sleep(5)
if i == retry:
raise Exception("Time out: %s cmd: %s" % (guest_name, cmd))
try:
client = paramiko.SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(guest_ip, username="root", password=passwd)
_, stdout, stderr = client.exec_command(cmd)
return stdout.read(), stderr.read()
finally:
client.close()
|
Add help function to run command in guest
|
Add help function to run command in guest
Signed-off-by: Luyao Huang <[email protected]>
|
Python
|
mit
|
LuyaoHuang/depend-test-framework
|
24520fc982d0a1a2b1b6fa7b493888fbdd3ef6a8
|
util/versioncheck.py
|
util/versioncheck.py
|
#!/usr/bin/python
from subprocess import check_output as co
from sys import exit
# Actually run bin/mn rather than importing via python path
version = 'Mininet ' + co( 'PYTHONPATH=. bin/mn --version', shell=True )
version = version.strip()
# Find all Mininet path references
lines = co( "egrep -or 'Mininet [0-9\.]+\w*' *", shell=True )
error = False
for line in lines.split( '\n' ):
if line and 'Binary' not in line:
fname, fversion = line.split( ':' )
if version != fversion:
print "%s: incorrect version '%s' (should be '%s')" % (
fname, fversion, version )
error = True
if error:
exit( 1 )
|
#!/usr/bin/python
from subprocess import check_output as co
from sys import exit
# Actually run bin/mn rather than importing via python path
version = 'Mininet ' + co( 'PYTHONPATH=. bin/mn --version', shell=True )
version = version.strip()
# Find all Mininet path references
lines = co( "egrep -or 'Mininet [0-9\.\+]+\w*' *", shell=True )
error = False
for line in lines.split( '\n' ):
if line and 'Binary' not in line:
fname, fversion = line.split( ':' )
if version != fversion:
print "%s: incorrect version '%s' (should be '%s')" % (
fname, fversion, version )
error = True
if error:
exit( 1 )
|
Allow + in version number
|
Allow + in version number
|
Python
|
bsd-3-clause
|
mininet/mininet,mininet/mininet,mininet/mininet
|
7a5ad08262afa4b7f4b8f580755136145d681ace
|
tests/apps/tornado_server/__init__.py
|
tests/apps/tornado_server/__init__.py
|
# (c) Copyright IBM Corp. 2021
# (c) Copyright Instana Inc. 2020
import os
import sys
from ...helpers import testenv
from ..utils import launch_background_thread
app_thread = None
if not any((app_thread, os.environ.get('GEVENT_TEST'), os.environ.get('CASSANDRA_TEST')):
testenv["tornado_port"] = 10813
testenv["tornado_server"] = ("http://127.0.0.1:" + str(testenv["tornado_port"]))
# Background Tornado application
from .app import run_server
app_thread = launch_background_thread(run_server, "Tornado")
|
# (c) Copyright IBM Corp. 2021
# (c) Copyright Instana Inc. 2020
import os
import sys
from ...helpers import testenv
from ..utils import launch_background_thread
app_thread = None
if not any((app_thread, os.environ.get('GEVENT_TEST'), os.environ.get('CASSANDRA_TEST'))):
testenv["tornado_port"] = 10813
testenv["tornado_server"] = ("http://127.0.0.1:" + str(testenv["tornado_port"]))
# Background Tornado application
from .app import run_server
app_thread = launch_background_thread(run_server, "Tornado")
|
Fix expression in tornado_server app
|
fix(test): Fix expression in tornado_server app
Signed-off-by: Ferenc Géczi <[email protected]>
|
Python
|
mit
|
instana/python-sensor,instana/python-sensor
|
6d17a7becc0ddebceba0bd77f876bf9744a2d62e
|
server/patch_hosts.py
|
server/patch_hosts.py
|
#!/usr/bin/env python3
import argparse
from qlmdm import set_gpg
from qlmdm.server import patch_hosts
set_gpg('server')
def parse_args():
parser = argparse.ArgumentParser(description='Queue a patch for one or '
'more hosts')
parser.add_argument('--host', action='append', help='Host(s) on which to '
'execute command (default is all)')
parser.add_argument('--mode', type=lambda m: int(m, 8), help='Mode for '
'patched file (specify in octal, default 0755)')
parser.add_argument('target_path', help='Relative path of file on '
'destination systems')
parser.add_argument('source_file', help='Local file containing patch '
'content')
args = parser.parse_args()
return args
def main():
args = parse_args()
kwargs = {}
if args.mode:
kwargs['patch_mode'] = args.mode
kwargs['patch_content'] = open(args.source_file, 'rb').read()
kwargs['hosts'] = args.host if args.host else None
patch_hosts(args.target_path, **kwargs)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python3
import argparse
from qlmdm import set_gpg
from qlmdm.server import patch_hosts
set_gpg('server')
def parse_args():
parser = argparse.ArgumentParser(description='Queue a patch for one or '
'more hosts')
parser.add_argument('--host', action='append', help='Host(s) on which to '
'execute command (default is all)')
parser.add_argument('--mode', type=lambda m: int(m, 8), help='Mode for '
'patched file (specify in octal, default 0755)')
parser.add_argument('target_path', help='Relative path of file on '
'destination systems')
parser.add_argument('source_file', help='Local file containing patch '
'content')
args = parser.parse_args()
return args
def main():
args = parse_args()
kwargs = {}
if args.mode:
kwargs['patch_mode'] = args.mode
kwargs['patch_content'] = open(args.source_file, 'rb').read()
kwargs['hosts'] = args.host if args.host else None
patch_hosts(args.target_path, **kwargs)
if __name__ == '__main__':
main()
|
Remove extra space in usage message text
|
Remove extra space in usage message text
|
Python
|
apache-2.0
|
quantopian/PenguinDome,quantopian/PenguinDome
|
b21d63d8df7d17e150702c531ef449f409100eff
|
wot_clan_battles/views_auth.py
|
wot_clan_battles/views_auth.py
|
import six
from django.http import HttpResponseRedirect
from django.shortcuts import reverse
from django.conf import settings
from openid.consumer import consumer
import wargaming
wot = wargaming.WoT(settings.WARGAMING_KEY, language='ru', region='ru')
def auth_callback(request):
oidconsumer = consumer.Consumer(request.session, None)
url = 'http://%s%s' % (request.META['HTTP_HOST'], reverse('auth_callback'))
result = oidconsumer.complete(request.GET, url)
if result.status == consumer.SUCCESS:
identifier = result.getDisplayIdentifier()
print identifier
user_id, username = six.moves.urllib_parse.urlparse(identifier).path.split('/')[2].split('-')
request.session['user_id'] = user_id
request.session['username'] = username
request.session['user_clan_id'] = wot.account.info(account_id=user_id)[str(user_id)]['clan_id']
return HttpResponseRedirect('/')
def auth_login(request):
oidconsumer = consumer.Consumer(dict(request.session), None)
openid_request = oidconsumer.begin(u'http://ru.wargaming.net/id/openid/')
trust_root = 'http://%s' % request.META['HTTP_HOST']
return_to = '%s%s' % (trust_root, reverse('auth_callback'))
redirect_to = openid_request.redirectURL(trust_root, return_to, immediate=False)
return HttpResponseRedirect(redirect_to)
|
import six
from django.http import HttpResponseRedirect
from django.shortcuts import reverse
from django.conf import settings
from openid.consumer import consumer
import wargaming
wot = wargaming.WoT(settings.WARGAMING_KEY, language='ru', region='ru')
def auth_callback(request):
oidconsumer = consumer.Consumer(request.session, None)
url = 'http://%s%s' % (request.META['HTTP_HOST'], reverse('auth_callback'))
result = oidconsumer.complete(request.GET, url)
if result.status == consumer.SUCCESS:
identifier = result.getDisplayIdentifier()
user_id, username = six.moves.urllib_parse.urlparse(identifier).path.split('/')[2].split('-')
request.session['user_id'] = user_id
request.session['username'] = username
request.session['user_clan_id'] = wot.account.info(account_id=user_id)[str(user_id)]['clan_id']
return HttpResponseRedirect('/')
def auth_login(request):
oidconsumer = consumer.Consumer(dict(request.session), None)
openid_request = oidconsumer.begin(u'http://ru.wargaming.net/id/openid/')
trust_root = 'http://%s' % request.META['HTTP_HOST']
return_to = '%s%s' % (trust_root, reverse('auth_callback'))
redirect_to = openid_request.redirectURL(trust_root, return_to, immediate=False)
return HttpResponseRedirect(redirect_to)
|
Remove debug print from view
|
Remove debug print from view
|
Python
|
mit
|
monester/wot-battles,monester/wot-battles,monester/wot-battles,monester/wot-battles
|
045c1405c4b8c7bc502814672c0da36e3e4a8a15
|
kolibri/deployment/default/settings/debug_panel.py
|
kolibri/deployment/default/settings/debug_panel.py
|
from __future__ import absolute_import, print_function, unicode_literals
from .base import * # noqa
INTERNAL_IPS = ['127.0.0.1']
DEBUG_TOOLBAR_CONFIG = {
"SHOW_TOOLBAR_CALLBACK": lambda x: True,
}
MIDDLEWARE_CLASSES = (
'debug_panel.middleware.DebugPanelMiddleware',
) + MIDDLEWARE_CLASSES # noqa
INSTALLED_APPS += [ # noqa
'debug_toolbar',
'debug_panel',
]
ENABLE_DATA_BOOTSTRAPPING = False
|
from __future__ import absolute_import, print_function, unicode_literals
from .base import * # noqa
INTERNAL_IPS = ['127.0.0.1']
DEBUG_TOOLBAR_CONFIG = {
"SHOW_TOOLBAR_CALLBACK": lambda x: True,
}
MIDDLEWARE_CLASSES = (
'debug_panel.middleware.DebugPanelMiddleware',
) + MIDDLEWARE_CLASSES # noqa
INSTALLED_APPS += [ # noqa
'debug_toolbar',
'debug_panel',
]
ENABLE_DATA_BOOTSTRAPPING = False
DEBUG_PANEL_ACTIVE = True
|
Add DEBUG_PANEL_ACTIVE setting back into custom DDT settings file
|
Add DEBUG_PANEL_ACTIVE setting back into custom DDT settings file
|
Python
|
mit
|
mrpau/kolibri,mrpau/kolibri,MingDai/kolibri,DXCanas/kolibri,DXCanas/kolibri,jonboiser/kolibri,lyw07/kolibri,benjaoming/kolibri,benjaoming/kolibri,christianmemije/kolibri,learningequality/kolibri,learningequality/kolibri,christianmemije/kolibri,lyw07/kolibri,MingDai/kolibri,jonboiser/kolibri,benjaoming/kolibri,mrpau/kolibri,jonboiser/kolibri,mrpau/kolibri,indirectlylit/kolibri,DXCanas/kolibri,lyw07/kolibri,MingDai/kolibri,rtibbles/kolibri,christianmemije/kolibri,rtibbles/kolibri,benjaoming/kolibri,DXCanas/kolibri,rtibbles/kolibri,MingDai/kolibri,indirectlylit/kolibri,learningequality/kolibri,rtibbles/kolibri,christianmemije/kolibri,indirectlylit/kolibri,jonboiser/kolibri,indirectlylit/kolibri,learningequality/kolibri,lyw07/kolibri
|
69a417c421d774c4998ae721b85211a60757ce85
|
fit_blackbody.py
|
fit_blackbody.py
|
import numpy as np
from scipy.optimize import curve_fit
from astropy import units as u
from planck import planck_function
def bb_flux(wavelength, temperature, angular_radius):
bb_flux = (np.pi) * planck_function(wavelength, temperature) * (angular_radius)**2
return bb_flux
def bb_flux_nounits(wavelength, temperature, angular_radius):
flux = bb_flux(wavelength, temperature, angular_radius)
return flux.value
def calculate_chisq(y_data, y_data_uncertainties, x_data, func, parameters):
chisq = np.sum(((y_data - func(x_data, *parameters))/y_data_uncertainties)**2)
return chisq
def bb_fit_parameters(wavelengths, fluxes, flux_uncertainties):
popt, pcov = curve_fit(bb_flux_nounits, wavelengths, fluxes, p0=[5000, 1.0e-10])
temperature = popt[0]
angular_radius = popt[1]
perr = np.sqrt(np.diag(pcov))
chisq = calculate_chisq(fluxes, flux_uncertainties, wavelengths, bb_flux_nounits, popt)
return temperature, angular_radius, perr
|
import numpy as np
from scipy.optimize import curve_fit
from astropy import units as u
from planck import planck_function, dplanck_dT
def bb_flux(wavelength, temperature, angular_radius):
bb_flux = (np.pi) * planck_function(wavelength, temperature) * (angular_radius)**2
return bb_flux
def bb_flux_nounits(wavelength, temperature, angular_radius):
flux = bb_flux(wavelength, temperature, angular_radius)
return flux.value
def dBB_dT(wavelength, temperature, angular_radius):
dBB_dT = (np.pi) * dplanck_dT(wavelength, temperature) * (angular_radius)**2
return dBB_dT
def dBB_dT_nounits(wavelength, temperature, angular_radius):
dBB_dT_nounits = dBB_dT(wavelength, temperature, angular_radius)
return dBB_dT_nounits
def calculate_chisq(y_data, y_data_uncertainties, x_data, func, parameters):
chisq = np.sum(((y_data - func(x_data, *parameters))/y_data_uncertainties)**2)
return chisq
def bb_fit_parameters(wavelengths, fluxes, flux_uncertainties):
popt, pcov = curve_fit(bb_flux_nounits, wavelengths, fluxes, p0=[5000, 1.0e-10])
temperature = popt[0]
angular_radius = popt[1]
perr = np.sqrt(np.diag(pcov))
return temperature, angular_radius, perr
|
Add derivative of BB flux with temperature
|
Add derivative of BB flux with temperature
Added functions which calculate the derivative of the blackbody flux
with temperature (with units and without units)
|
Python
|
mit
|
JALusk/SNoBoL,JALusk/SuperBoL,JALusk/SNoBoL
|
fcc4eb6feaf05c950bcb0cb3f5861e631dacd8d4
|
migrations/versions/16ef0d8ffae1_add_user_roles.py
|
migrations/versions/16ef0d8ffae1_add_user_roles.py
|
"""Add user roles
Revision ID: 16ef0d8ffae1
Revises: 8acb1453abb
Create Date: 2015-03-17 15:45:05.406297
"""
# revision identifiers, used by Alembic.
revision = '16ef0d8ffae1'
down_revision = '8acb1453abb'
from alembic import op
import sqlalchemy as sa
from findaconf import db
from findaconf.models import Group
def upgrade():
roles = ['user', 'moderator', 'admin']
[db.session.add(Group(title=role)) for role in roles]
db.session.commit()
def downgrade():
[db.session.delete(role) for role in Group.query.all()]
db.session.commit()
|
"""Add user roles
Revision ID: 16ef0d8ffae1
Revises: 8acb1453abb
Create Date: 2015-03-17 15:45:05.406297
"""
# revision identifiers, used by Alembic.
revision = '16ef0d8ffae1'
down_revision = '8acb1453abb'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table, column
from findaconf.models import Group
def upgrade():
roles = ['user', 'moderator', 'admin']
data = [{'title': r} for r in roles]
# Create an ad-hoc table to use for the insert statement.
group_table = table('group',
column('title', sa.String),
)
# Insert data.
op.bulk_insert(group_table, data)
def downgrade():
op.execute(Group.__table__.delete())
|
Refactor data migration for Group model
|
Refactor data migration for Group model
Undelying explanation as in commit f580a63909166efccc8030eb36d8b2fe697f7236
|
Python
|
mit
|
koorukuroo/findaconf,cuducos/findaconf,koorukuroo/findaconf,cuducos/findaconf,cuducos/findaconf,koorukuroo/findaconf
|
eddf3ec729fd385fd3ec2ec425db1d777a302e46
|
tensorprob/distributions/exponential.py
|
tensorprob/distributions/exponential.py
|
import tensorflow as tf
from .. import config
from ..distribution import Distribution
@Distribution
def Exponential(lambda_, name=None):
X = tf.placeholder(config.dtype, name=name)
Distribution.logp = tf.log(lambda_) - lambda_*X
def cdf(lim):
return tf.constant(1, dtype=config.dtype) - tf.exp(-lambda_*lim)
Distribution.integral = lambda lower, upper: cdf(upper) - cdf(lower)
return X
|
import tensorflow as tf
from .. import config
from ..distribution import Distribution
@Distribution
def Exponential(lambda_, name=None):
X = tf.placeholder(config.dtype, name=name)
Distribution.logp = tf.log(lambda_) - lambda_*X
def integral(lower, upper):
return tf.exp(-lambda_*lower) - tf.exp(-lambda_*upper)
Distribution.integral = integral
return X
|
Correct integral used for Exponential distributon
|
Correct integral used for Exponential distributon
|
Python
|
mit
|
ibab/tensorprob,tensorprob/tensorprob,ibab/tensorfit
|
56902792b2a7fdd25bd64781e9e98a63db2ee348
|
all/__init__.py
|
all/__init__.py
|
###----------------------------------------------------------------------------
from .help import HyperHelpCommand, HyperHelpNavigateCommand
from .help import HyperHelpListener
###----------------------------------------------------------------------------
|
###----------------------------------------------------------------------------
__version_tuple = (1, 0, 0)
__version__ = ".".join([str(num) for num in __version_tuple])
# These are exposed to Sublime to implement the core of the help system.
from .help import HyperHelpCommand, HyperHelpNavigateCommand
from .help import HyperHelpListener
# These are exposed to packages that may want to interface with the hyperhelp
# core for use in their own packages.
from .operations import package_help_scan
###----------------------------------------------------------------------------
def version():
"""
Get the currently installed version of hyperhelp as a tuple.
"""
return __version_tuple
###----------------------------------------------------------------------------
|
Include a package version number
|
Include a package version number
This includes in the core package the concept of a version number that
underlying code could use to determine what version of the core it is
interfacing with.
This is only really needed for packages that get at the underlying
core code in hyperhelp, which at the moment would only be the
companion HyperHelpAuthor package.
To this end (as an experiment) the code for loading in the help index
files is exposed to anyone that wants to import it as a test for how
this will eventually work. In particular, the idea is to put all of
the symbols meant to be accessible to outside code into the
hyperhelp.all module namespace (whicn is unfortunate but there seems
to be no satisfactory way around it).
|
Python
|
mit
|
OdatNurd/hyperhelp
|
28d48dd681dd20d839aa0748ee478947a0eb3da0
|
hiss/__init__.py
|
hiss/__init__.py
|
import pygame
class Sprite():
def __init__(self, name):
self.name = name
self.costumes = []
def addCostume(self, costumePath):
costume = pygame.image.load(costumePath)
self.costumes.append(costume)
|
import pygame, time, random
class Stage():
def __init__(self, name):
self.name = name
self.costumes = []
def addCostume(self, costumePath):
costume = pygame.image.load(costumePath)
self.costumes.append(costume)
class Sprite(Stage):
def __init__(self, name):
Stage.__init__(self, name)
self.xpos = 0
self.ypos = 0
def beginGame():
pygame.init()
screen = pygame.display.set_mode((800, 600)) # Add customizable dimensions later on?
caption = pygame.display.set_caption("Hiss Project")
|
Split Sprite and Stage, begin code
|
Split Sprite and Stage, begin code
The way I see it, sprites should be an extension of the stage. After all, the stage is like one big sprite that can't move.
Also, begin code has been added.
|
Python
|
mit
|
PySlither/Slither,PySlither/Slither
|
eef8c0c99a6a02602cc9da75eadf180e65ad55b0
|
collectd_haproxy/__init__.py
|
collectd_haproxy/__init__.py
|
version_info = (1, 0, 1)
__version__ = ".".join(map(str, version_info))
try:
import collectd
collectd_present = True
except ImportError:
collectd_present = False
from .plugin import HAProxyPlugin
if collectd_present:
HAProxyPlugin.register(collectd)
|
try:
import collectd
collectd_present = True
except ImportError:
collectd_present = False
from .plugin import HAProxyPlugin
version_info = (1, 0, 1)
__version__ = ".".join(map(str, version_info))
if collectd_present:
HAProxyPlugin.register(collectd)
|
Fix style test complaing about non-top import.
|
Fix style test complaing about non-top import.
|
Python
|
mit
|
wglass/collectd-haproxy
|
342515edc89d6666a5dc9064de7d2ceea9a7b468
|
accelerator/tests/test_program_cycle.py
|
accelerator/tests/test_program_cycle.py
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
from django.test import TestCase
from accelerator.tests.factories import ProgramCycleFactory
class TestProgramCycle(TestCase):
def test_display_name_no_short_name(self):
cycle = ProgramCycleFactory(short_name=None)
assert cycle.name in str(cycle)
def test_program_cycle_with_open_applications_has_default_application_type(self):
cycle = ProgramCycleFactory()
if (cycle.applications_open and
not cycle.default_application_type):
self.assertRaises("Open applications must have a default application type.")
def test_program_cycle_with_open_applications_has_default_application_type_and_associated_programs(self):
cycle = ProgramCycleFactory()
if (cycle.applications_open and
not cycle.default_application_type
and cycle.programs.exists()):
self.assertRaises("Default application type can’t be removed"
"from the cycle until the program cycle is"
"disassociated with all programs")
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
from django.test import TestCase
from accelerator.tests.factories import ProgramCycleFactory
class TestProgramCycle(TestCase):
def test_display_name_no_short_name(self):
cycle = ProgramCycleFactory(short_name=None)
assert cycle.name in str(cycle)
# def test_program_cycle_has_default_application_type(self):
# cycle = ProgramCycleFactory()
# if (cycle.applications_open and
# not cycle.default_application_type):
# self.assertRaises("Open applications must have"
# "a default application type.")
# def test_program_cycle_cannot_remove_default_application_type(self):
# cycle = ProgramCycleFactory()
# if (cycle.applications_open and
# not cycle.default_application_type
# and cycle.programs.exists()):
# self.assertRaises("Default application type can’t be removed"
# "from the cycle until the program cycle is"
# "disassociated with all programs")
|
Add tests for the functionality added
|
[AC-7049] Add tests for the functionality added
|
Python
|
mit
|
masschallenge/django-accelerator,masschallenge/django-accelerator
|
8e10801ab28b3db02b301c27966aeaabc154329b
|
opps/core/models/image.py
|
opps/core/models/image.py
|
# -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models.publishable import Publishable
class Image(Publishable):
title = models.CharField(_(u"Title"), max_length=140)
slug = models.SlugField(_(u"Slug"), max_length=150, blank=True)
image = models.ImageField(upload_to="uploads/")
description = models.CharField(_(u"Description"), max_length=255,
null=True, blank=True)
credit = models.CharField(_(u"Credit"), max_length=255, blank=False)
def __unicode__(self):
return self.title
class Meta:
app_label = 'core'
|
# -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models.publishable import Publishable
from opps.core.models import Source
class Image(Publishable):
title = models.CharField(_(u"Title"), max_length=140)
slug = models.SlugField(_(u"Slug"), max_length=150, blank=True)
image = models.ImageField(upload_to="uploads/")
description = models.CharField(_(u"Description"), max_length=255,
null=True, blank=True)
source = models.ForeignKey(Source, null=True, blank=True)
def __unicode__(self):
return self.title
class Meta:
app_label = 'core'
|
Change credit to source models
|
Change credit to source models
|
Python
|
mit
|
opps/opps,williamroot/opps,jeanmask/opps,williamroot/opps,opps/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,YACOWS/opps,opps/opps,jeanmask/opps,jeanmask/opps,YACOWS/opps,williamroot/opps,YACOWS/opps,opps/opps
|
6dd1881fc2631602d7e34aede208abf42ed688aa
|
renderMenu.py
|
renderMenu.py
|
#!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
from datetime import datetime
from pytz import timezone
from flask import Flask, render_template, url_for
from models import app, db, FoodMenu, FoodServices
MIXPANEL_TOKEN = os.environ.get('MIXPANEL_TOKEN')
@app.route('/')
def renderMenu():
nowWaterloo = datetime.now(timezone('America/Toronto'))
foodMenu = FoodMenu.query.order_by(FoodMenu.id.desc()).first().result
menu = json.loads(foodMenu)['response']['data']
serviceInfo = FoodServices.query.order_by(FoodServices.id.desc()).first().result
locations = json.loads(serviceInfo)['response']['data']
return render_template('index.html', menu=menu, locations=locations, nowWaterloo=nowWaterloo, mixpanelToken=MIXPANEL_TOKEN)
if __name__ == "__main__":
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
#!/usr/bin/env python
import json, os, requests
from awsauth import S3Auth
from datetime import datetime
from pytz import timezone
from flask import Flask, render_template, url_for, jsonify
from models import app, db, FoodMenu, FoodServices
MIXPANEL_TOKEN = os.environ.get('MIXPANEL_TOKEN')
@app.route('/')
def renderMenu():
nowWaterloo = datetime.now(timezone('America/Toronto'))
foodMenu = FoodMenu.query.order_by(FoodMenu.id.desc()).first().result
menu = json.loads(foodMenu)['response']['data']
serviceInfo = FoodServices.query.order_by(FoodServices.id.desc()).first().result
locations = json.loads(serviceInfo)['response']['data']
return render_template('index.html', menu=menu, locations=locations, nowWaterloo=nowWaterloo, mixpanelToken=MIXPANEL_TOKEN)
@app.route('/foodmenu')
def foodmenu():
foodMenu = FoodMenu.query.order_by(FoodMenu.id.desc()).first().result
menu = json.loads(foodMenu)['response']['data']
return jsonify(menu)
@app.route('/foodservices')
def foodservices():
serviceInfo = FoodServices.query.order_by(FoodServices.id.desc()).first().result
locations = json.loads(serviceInfo)['response']['data']
return jsonify(locations)
if __name__ == "__main__":
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
Add API endpoints to serve data in JSON format.
|
Add API endpoints to serve data in JSON format.
|
Python
|
mit
|
alykhank/FoodMenu,alykhank/FoodMenu,alykhank/FoodMenu
|
3b79447e1027cc4965ab3272c34740b82d79c66c
|
tools/perf/benchmarks/start_with_url.py
|
tools/perf/benchmarks/start_with_url.py
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import startup
import page_sets
from telemetry import benchmark
@benchmark.Disabled
class StartWithUrlCold(benchmark.Benchmark):
"""Measure time to start Chrome cold with startup URLs"""
tag = 'cold'
test = startup.StartWithUrl
page_set = page_sets.StartupPagesPageSet
options = {'cold': True,
'pageset_repeat': 5}
@benchmark.Enabled('android', 'has tabs')
class StartWithUrlWarm(benchmark.Benchmark):
"""Measure time to start Chrome warm with startup URLs"""
tag = 'warm'
test = startup.StartWithUrl
page_set = page_sets.StartupPagesPageSet
options = {'warm': True,
'pageset_repeat': 10}
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import startup
import page_sets
from telemetry import benchmark
@benchmark.Enabled('android', 'has tabs')
class StartWithUrlCold(benchmark.Benchmark):
"""Measure time to start Chrome cold with startup URLs"""
tag = 'cold'
test = startup.StartWithUrl
page_set = page_sets.StartupPagesPageSet
options = {'cold': True,
'pageset_repeat': 5}
@benchmark.Enabled('android', 'has tabs')
class StartWithUrlWarm(benchmark.Benchmark):
"""Measure time to start Chrome warm with startup URLs"""
tag = 'warm'
test = startup.StartWithUrl
page_set = page_sets.StartupPagesPageSet
options = {'warm': True,
'pageset_repeat': 10}
|
Enable statup_with_url.cold benchmark on android.
|
Enable statup_with_url.cold benchmark on android.
The benchmark works locally, and collects an important datapoint for our
current optimization work.
Review URL: https://codereview.chromium.org/508303004
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#298526}
|
Python
|
bsd-3-clause
|
axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,M4sse/chromium.src,markYoungH/chromium.src,jaruba/chromium.src,M4sse/chromium.src,chuan9/chromium-crosswalk,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,ltilve/chromium,fujunwei/chromium-crosswalk,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,M4sse/chromium.src,dednal/chromium.src,axinging/chromium-crosswalk,M4sse/chromium.src,Pluto-tv/chromium-crosswalk,ltilve/chromium,Just-D/chromium-1,Chilledheart/chromium,axinging/chromium-crosswalk,dednal/chromium.src,dushu1203/chromium.src,fujunwei/chromium-crosswalk,dednal/chromium.src,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,dednal/chromium.src,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,jaruba/chromium.src,markYoungH/chromium.src,Jonekee/chromium.src,Chilledheart/chromium,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,jaruba/chromium.src,mohamed--abdel-maksoud/chromium.src,ltilve/chromium,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,Jonekee/chromium.src,Chilledheart/chromium,dednal/chromium.src,Just-D/chromium-1,Chilledheart/chromium,Jonekee/chromium.src,Just-D/chromium-1,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,jaruba/chromium.src,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,markYoungH/chromium.src,jaruba/chromium.src,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk,M4sse/chromium.src,dednal/chromium.src,Jonekee/chromium.src,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,markYoungH/chromium.src,chuan9/chromium-crosswalk,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,M4sse/chromium.src,ltilve/chromium,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,dednal/chromium.src,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,dednal/chromium.src,hgl888/chromium-crosswalk,Fireblend/chromium-crosswalk,ltilve/chromium,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,ltilve/chromium,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,markYoungH/chromium.src,fujunwei/chromium-crosswalk,Chilledheart/chromium,chuan9/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,ltilve/chromium,ltilve/chromium,Just-D/chromium-1,dednal/chromium.src,Jonekee/chromium.src,Jonekee/chromium.src,M4sse/chromium.src,dushu1203/chromium.src,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,Jonekee/chromium.src,jaruba/chromium.src,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,jaruba/chromium.src,krieger-od/nwjs_chromium.src,jaruba/chromium.src,TheTypoMaster/chromium-crosswalk,Jonekee/chromium.src,M4sse/chromium.src,dushu1203/chromium.src,Just-D/chromium-1,Chilledheart/chromium,jaruba/chromium.src,markYoungH/chromium.src,Fireblend/chromium-crosswalk,Just-D/chromium-1,M4sse/chromium.src,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,M4sse/chromium.src,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,ltilve/chromium,fujunwei/chromium-crosswalk,jaruba/chromium.src,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk
|
f177e6acbafa514f6e6ac4563cb92c49f3213618
|
data_structures/Stack/Python/Stack.py
|
data_structures/Stack/Python/Stack.py
|
# Author: AlexBanks97
# Purpose: LIFO Stack implementation using python array.
# Date: October 15th 2017
class Stack(object):
def __init__(self):
# Initialize stack as empty array
self.stack = []
|
# Author: AlexBanks97
# Purpose: LIFO Stack implementation using python array.
# Date: October 15th 2017
class Stack(object):
def __init__(self):
# Initialize stack as empty array
self.stack = []
# Return and remove the last element of the stack array.
def pop(self):
# If the stack is not empty, pop.
if self.stack.length > 0:
return self.stack.pop()
|
Add pop method and implementation
|
Add pop method and implementation
|
Python
|
cc0-1.0
|
ZoranPandovski/al-go-rithms,Cnidarias/al-go-rithms,ZoranPandovski/al-go-rithms,EUNIX-TRIX/al-go-rithms,manikTharaka/al-go-rithms,ZoranPandovski/al-go-rithms,Deepak345/al-go-rithms,ZoranPandovski/al-go-rithms,EUNIX-TRIX/al-go-rithms,ZoranPandovski/al-go-rithms,Cnidarias/al-go-rithms,manikTharaka/al-go-rithms,Deepak345/al-go-rithms,manikTharaka/al-go-rithms,ZoranPandovski/al-go-rithms,EUNIX-TRIX/al-go-rithms,ZoranPandovski/al-go-rithms,manikTharaka/al-go-rithms,EUNIX-TRIX/al-go-rithms,Deepak345/al-go-rithms,Cnidarias/al-go-rithms,ZoranPandovski/al-go-rithms,manikTharaka/al-go-rithms,Deepak345/al-go-rithms,ZoranPandovski/al-go-rithms,EUNIX-TRIX/al-go-rithms,ZoranPandovski/al-go-rithms,manikTharaka/al-go-rithms,Deepak345/al-go-rithms,ZoranPandovski/al-go-rithms,Cnidarias/al-go-rithms,EUNIX-TRIX/al-go-rithms,manikTharaka/al-go-rithms,manikTharaka/al-go-rithms,Cnidarias/al-go-rithms,manikTharaka/al-go-rithms,Deepak345/al-go-rithms,ZoranPandovski/al-go-rithms,Deepak345/al-go-rithms,Deepak345/al-go-rithms,manikTharaka/al-go-rithms,Deepak345/al-go-rithms,manikTharaka/al-go-rithms,Deepak345/al-go-rithms,EUNIX-TRIX/al-go-rithms,Deepak345/al-go-rithms,Cnidarias/al-go-rithms,Deepak345/al-go-rithms,ZoranPandovski/al-go-rithms,Deepak345/al-go-rithms,manikTharaka/al-go-rithms,manikTharaka/al-go-rithms,ZoranPandovski/al-go-rithms,EUNIX-TRIX/al-go-rithms,ZoranPandovski/al-go-rithms,manikTharaka/al-go-rithms,Cnidarias/al-go-rithms,Cnidarias/al-go-rithms,Cnidarias/al-go-rithms,EUNIX-TRIX/al-go-rithms,ZoranPandovski/al-go-rithms,Cnidarias/al-go-rithms,Cnidarias/al-go-rithms,ZoranPandovski/al-go-rithms,EUNIX-TRIX/al-go-rithms,Deepak345/al-go-rithms,EUNIX-TRIX/al-go-rithms,Deepak345/al-go-rithms,Cnidarias/al-go-rithms,Cnidarias/al-go-rithms,EUNIX-TRIX/al-go-rithms,ZoranPandovski/al-go-rithms,manikTharaka/al-go-rithms,ZoranPandovski/al-go-rithms,Cnidarias/al-go-rithms,EUNIX-TRIX/al-go-rithms,manikTharaka/al-go-rithms,Deepak345/al-go-rithms,ZoranPandovski/al-go-rithms,Cnidarias/al-go-rithms
|
e0ce0095fd488852a7d565ecaf49eba0b8dbd7d5
|
db/sql_server/pyodbc.py
|
db/sql_server/pyodbc.py
|
from django.db import connection
from django.db.models.fields import *
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
django-pyodbc (sql_server.pyodbc) implementation of database operations.
"""
def create_table(self, table_name, fields):
# Tweak stuff as needed
for name,f in fields:
if isinstance(f, BooleanField):
if f.default == True:
f.default = 1
if f.default == False:
f.default = 0
# Run
generic.DatabaseOperations.create_table(self, table_name, fields)
|
from django.db import connection
from django.db.models.fields import *
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
django-pyodbc (sql_server.pyodbc) implementation of database operations.
"""
add_column_string = 'ALTER TABLE %s ADD %s;'
def create_table(self, table_name, fields):
# Tweak stuff as needed
for name,f in fields:
if isinstance(f, BooleanField):
if f.default == True:
f.default = 1
if f.default == False:
f.default = 0
# Run
generic.DatabaseOperations.create_table(self, table_name, fields)
|
Add column support for sql server
|
Add column support for sql server
--HG--
extra : convert_revision : svn%3A69d324d9-c39d-4fdc-8679-7745eae9e2c8/trunk%40111
|
Python
|
apache-2.0
|
theatlantic/django-south,theatlantic/django-south
|
98405875fd8ec682caa04244a900e6ce9eac9acb
|
pavement.py
|
pavement.py
|
import sys
from paver.easy import task, needs, path, sh, cmdopts, options
from paver.setuputils import setup, install_distutils_tasks
from distutils.extension import Extension
from distutils.dep_util import newer
sys.path.insert(0, path('.').abspath())
import version
setup(name='microdrop-plugin-manager',
version=version.getVersion(),
description='Microdrop plugin manager.',
keywords='',
author='Christian Fobel',
author_email='[email protected]',
url='https://github.com/wheeler-microfluidics/mpm',
license='LGPLv2.1',
packages=['mpm', ],
install_requires=['configobj', 'path-helpers', 'pip-helpers>=0.6',
'progressbar2', 'pyyaml', 'si-prefix>=0.4.post3'],
# Install data listed in `MANIFEST.in`
include_package_data=True,
entry_points = {'console_scripts': ['mpm = mpm.bin:main']})
@task
@needs('generate_setup', 'minilib', 'setuptools.command.sdist')
def sdist():
"""Overrides sdist to make sure that our setup.py is generated."""
pass
|
import platform
import sys
from paver.easy import task, needs, path, sh, cmdopts, options
from paver.setuputils import setup, install_distutils_tasks
from distutils.extension import Extension
from distutils.dep_util import newer
sys.path.insert(0, path('.').abspath())
import version
install_requires = ['configobj', 'path-helpers', 'pip-helpers>=0.6',
'progressbar2', 'pyyaml', 'si-prefix>=0.4.post3']
if platform.system() == 'Windows':
install_requires += ['pywin32']
setup(name='microdrop-plugin-manager',
version=version.getVersion(),
description='Microdrop plugin manager.',
keywords='',
author='Christian Fobel',
author_email='[email protected]',
url='https://github.com/wheeler-microfluidics/mpm',
license='LGPLv2.1',
packages=['mpm', ],
install_requires=install_requires,
# Install data listed in `MANIFEST.in`
include_package_data=True,
entry_points = {'console_scripts': ['mpm = mpm.bin:main']})
@task
@needs('generate_setup', 'minilib', 'setuptools.command.sdist')
def sdist():
"""Overrides sdist to make sure that our setup.py is generated."""
pass
|
Add pywin32 as Windows required package
|
[FIX] Add pywin32 as Windows required package
|
Python
|
bsd-3-clause
|
wheeler-microfluidics/mpm
|
db2c50de660228745cc9a7fafaaa9b3d9e451aee
|
python/src/setup.py
|
python/src/setup.py
|
"""Setup specs for packaging, distributing, and installing gcs lib."""
import distribute_setup
distribute_setup.use_setuptools()
import setuptools
setuptools.setup(
name="GoogleAppEngineCloudStorageClient",
version="1.8.3.1",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="[email protected]",
keywords="google app engine cloud storage",
url="https://code.google.com/p/appengine-gcs-client/",
license="Apache License 2.0",
description=("This library is the preferred way of accessing Google "
"Cloud Storage from App Engine. It was designed to "
"replace the Files API. As a result it contains much "
"of the same functionality (streaming reads and writes but "
"not the complete set of GCS APIs). It also provides key "
"stability improvements and a better overall developer "
"experience."),
exclude_package_data={"": ["README"]},
zip_safe=True,
)
|
"""Setup specs for packaging, distributing, and installing gcs lib."""
import distribute_setup
distribute_setup.use_setuptools()
import setuptools
setuptools.setup(
name="GoogleAppEngineCloudStorageClient",
version="1.9.0.0",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="[email protected]",
keywords="google app engine cloud storage",
url="https://code.google.com/p/appengine-gcs-client/",
license="Apache License 2.0",
description=("This library is the preferred way of accessing Google "
"Cloud Storage from App Engine. It was designed to "
"replace the Files API. As a result it contains much "
"of the same functionality (streaming reads and writes but "
"not the complete set of GCS APIs). It also provides key "
"stability improvements and a better overall developer "
"experience."),
exclude_package_data={"": ["README"]},
zip_safe=True,
)
|
Create PyPI Release for 1.9.0.0.
|
PYTHON: Create PyPI Release for 1.9.0.0.
Revision created by MOE tool push_codebase.
MOE_MIGRATION=6943
|
Python
|
apache-2.0
|
aozarov/appengine-gcs-client,GoogleCloudPlatform/appengine-gcs-client,GoogleCloudPlatform/appengine-gcs-client,aozarov/appengine-gcs-client,GoogleCloudPlatform/appengine-gcs-client,aozarov/appengine-gcs-client
|
2b18bc0e0f3b9e0ca14935e2648fb9e6d637c8d0
|
backend/rest.py
|
backend/rest.py
|
#!/usr/bin/env python
from mcapi.mcapp import app
from mcapi import tservices, public, utils, private, access, process, machine, template, tservices
from mcapi.user import account, datadirs, datafiles, reviews, ud, usergroups, projects, conditions
from mcapi.stater import stater
import sys
if __name__ == '__main__':
if len(sys.argv) >= 2:
debug = True
else:
debug = False
if len(sys.argv) == 3:
app.run(debug=debug, host='0.0.0.0')
else:
app.run(debug=debug)
|
#!/usr/bin/env python
from mcapi.mcapp import app
from mcapi import tservices, public, utils, private, access, process, machine, template, tservices
from mcapi.user import account, datadirs, datafiles, reviews, ud, usergroups, projects, conditions
from mcapi.stater import stater
import sys
from os import environ
_HOST = environ.get('MC_SERVICE_HOST') or 'localhost'
_PORT = environ.get('MC_SERVICE_PORT') or '5000'
if __name__ == '__main__':
if len(sys.argv) >= 2:
debug = True
else:
debug = False
app.run(debug=debug, host=_HOST, port=int(_PORT))
|
Allow host and port that the service listens on to be set from environment variables.
|
Allow host and port that the service listens on to be set from environment variables.
|
Python
|
mit
|
materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org
|
f764e9e0f3cba7c387952fe8f19aa702825e8378
|
tests/test_core/test_server.py
|
tests/test_core/test_server.py
|
from mock import patch
import sure # noqa
from moto.server import main
def test_wrong_arguments():
try:
main(["name", "test1", "test2", "test3"])
assert False, ("main() when called with the incorrect number of args"
" should raise a system exit")
except SystemExit:
pass
@patch('moto.server.run_simple')
def test_right_arguments(run_simple):
main(["s3"])
func_call = run_simple.call_args[0]
func_call[0].should.equal("0.0.0.0")
func_call[1].should.equal(5000)
@patch('moto.server.run_simple')
def test_port_argument(run_simple):
main(["s3", "--port", "8080"])
func_call = run_simple.call_args[0]
func_call[0].should.equal("0.0.0.0")
func_call[1].should.equal(8080)
|
from mock import patch
import sure # noqa
from moto.server import main, create_backend_app, DomainDispatcherApplication
def test_wrong_arguments():
try:
main(["name", "test1", "test2", "test3"])
assert False, ("main() when called with the incorrect number of args"
" should raise a system exit")
except SystemExit:
pass
@patch('moto.server.run_simple')
def test_right_arguments(run_simple):
main(["s3"])
func_call = run_simple.call_args[0]
func_call[0].should.equal("0.0.0.0")
func_call[1].should.equal(5000)
@patch('moto.server.run_simple')
def test_port_argument(run_simple):
main(["s3", "--port", "8080"])
func_call = run_simple.call_args[0]
func_call[0].should.equal("0.0.0.0")
func_call[1].should.equal(8080)
def test_domain_dispatched():
dispatcher = DomainDispatcherApplication(create_backend_app)
backend_app = dispatcher.get_application("email.us-east1.amazonaws.com")
backend_app.view_functions.keys()[0].should.equal('EmailResponse.dispatch')
def test_domain_without_matches():
dispatcher = DomainDispatcherApplication(create_backend_app)
dispatcher.get_application.when.called_with("not-matching-anything.com").should.throw(RuntimeError)
def test_domain_dispatched_with_service():
# If we pass a particular service, always return that.
dispatcher = DomainDispatcherApplication(create_backend_app, service="s3")
backend_app = dispatcher.get_application("s3.us-east1.amazonaws.com")
backend_app.view_functions.keys()[0].should.equal('ResponseObject.key_response')
|
Add more test coverage for the server.
|
Add more test coverage for the server.
|
Python
|
apache-2.0
|
william-richard/moto,Affirm/moto,Brett55/moto,gjtempleton/moto,whummer/moto,Affirm/moto,rocky4570/moto,ZuluPro/moto,spulec/moto,Brett55/moto,gjtempleton/moto,okomestudio/moto,ZuluPro/moto,alexdebrie/moto,rouge8/moto,william-richard/moto,gjtempleton/moto,botify-labs/moto,braintreeps/moto,2rs2ts/moto,spulec/moto,okomestudio/moto,DataDog/moto,ZuluPro/moto,kefo/moto,ZuluPro/moto,botify-labs/moto,kennethd/moto,heddle317/moto,im-auld/moto,Affirm/moto,william-richard/moto,william-richard/moto,william-richard/moto,ImmobilienScout24/moto,heddle317/moto,whummer/moto,ZuluPro/moto,jrydberg/moto,dbfr3qs/moto,tootedom/moto,jszwedko/moto,spulec/moto,ZuluPro/moto,spulec/moto,mrucci/moto,Brett55/moto,gjtempleton/moto,whummer/moto,whummer/moto,zonk1024/moto,dbfr3qs/moto,2rs2ts/moto,Affirm/moto,botify-labs/moto,whummer/moto,behanceops/moto,dbfr3qs/moto,2rs2ts/moto,william-richard/moto,Affirm/moto,2rs2ts/moto,kefo/moto,araines/moto,heddle317/moto,riccardomc/moto,spulec/moto,okomestudio/moto,Brett55/moto,botify-labs/moto,rocky4570/moto,rocky4570/moto,whummer/moto,silveregg/moto,EarthmanT/moto,IlyaSukhanov/moto,ludia/moto,heddle317/moto,dbfr3qs/moto,okomestudio/moto,okomestudio/moto,Brett55/moto,andresriancho/moto,spulec/moto,kefo/moto,heddle317/moto,Affirm/moto,botify-labs/moto,dbfr3qs/moto,kefo/moto,gjtempleton/moto,rocky4570/moto,2rs2ts/moto,botify-labs/moto,rocky4570/moto,Brett55/moto,pior/moto,jotes/moto,dbfr3qs/moto,rocky4570/moto,kefo/moto,okomestudio/moto,2mf/moto
|
54add3fa95ab450e5afcbbf7fe8a3205bfc5889c
|
indra/tests/test_reading_scripts_aws.py
|
indra/tests/test_reading_scripts_aws.py
|
import boto3
from os import path, chdir
from subprocess import check_call
from nose.plugins.attrib import attr
from indra.tools.reading import submit_reading_pipeline as srp
s3 = boto3.client('s3')
HERE = path.dirname(path.abspath(__file__))
@attr('nonpublic')
def test_normal_pmid_reading_call():
chdir(path.expanduser('~'))
# Put an id file on s3
basename = 'local_pmid_test_run'
s3_prefix = 'reading_results/%s/' % basename
s3.put_object(Bucket='bigmech', Key=s3_prefix + 'pmids',
Body='\n'.join(['PMID000test%d' % n for n in range(4)]))
# Call the reading tool
sub = srp.PmidSubmitter(basename, ['sparser'])
job_name, cmd = sub._make_command(0, 2)
check_call(cmd)
# Remove garbage on s3
res = s3.list_objects(Bucket='bigmech', Prefix=s3_prefix)
for entry in res['Contents']:
print("Removing %s..." % entry['Key'])
s3.delete_object(Bucket='bigmech', Key=entry['Key'])
return
|
import boto3
from os import path, chdir
from subprocess import check_call
from nose.plugins.attrib import attr
from indra.tools.reading import submit_reading_pipeline as srp
from indra.sources import sparser
s3 = boto3.client('s3')
HERE = path.dirname(path.abspath(__file__))
@attr('nonpublic')
def test_normal_pmid_reading_call():
chdir(path.expanduser('~'))
# Put an id file on s3
basename = 'local_pmid_test_run'
s3_prefix = 'reading_results/%s/' % basename
s3.put_object(Bucket='bigmech', Key=s3_prefix + 'pmids',
Body='\n'.join(['PMID000test%d' % n for n in range(4)]))
# Call the reading tool
sub = srp.PmidSubmitter(basename, ['sparser'])
job_name, cmd = sub._make_command(0, 2)
check_call(cmd)
# Remove garbage on s3
res = s3.list_objects(Bucket='bigmech', Prefix=s3_prefix)
for entry in res['Contents']:
print("Removing %s..." % entry['Key'])
s3.delete_object(Bucket='bigmech', Key=entry['Key'])
return
@attr('nonpublic')
def test_bad_sparser():
txt = ('Disruption of the AP-1 binding site reversed the transcriptional '
'responses seen with Fos and Jun.')
sp = sparser.process_text(txt, timeout=1)
assert sp is None, "Reading succeeded unexpectedly."
|
Add test with currently known-stall sentance.
|
Add test with currently known-stall sentance.
|
Python
|
bsd-2-clause
|
bgyori/indra,pvtodorov/indra,pvtodorov/indra,sorgerlab/indra,sorgerlab/indra,sorgerlab/indra,pvtodorov/indra,bgyori/indra,johnbachman/indra,pvtodorov/indra,sorgerlab/belpy,johnbachman/indra,johnbachman/indra,johnbachman/belpy,johnbachman/belpy,sorgerlab/belpy,sorgerlab/belpy,johnbachman/belpy,bgyori/indra
|
fbe78315efcbf646710da6bf7d6a9d6c26fa8645
|
bayesian_jobs/handlers/clean_postgres.py
|
bayesian_jobs/handlers/clean_postgres.py
|
from selinon import StoragePool
from cucoslib.models import WorkerResult, Analysis
from .base import BaseHandler
class CleanPostgres(BaseHandler):
""" Clean JSONB columns in Postgres """
def execute(self):
s3 = StoragePool.get_connected_storage('S3Data')
results = self.postgres.session.query(WorkerResult).join(Analysis).\
filter(Analysis.finished_at != None).\
filter(WorkerResult.external_request_id == None)
for entry in results:
if entry.worker[0].isupper() or entry.worker in ('recommendation', 'stack_aggregator'):
continue
if 'VersionId' in entry.task_result:
continue
result_object_key = s3._construct_task_result_object_key(entry.ecosystem.name,
entry.package.name,
entry.version.identifier,
entry.worker)
entry.task_result = {'VersionId': s3.retrieve_latest_version_id(result_object_key)}
self.postgres.session.commit()
|
from selinon import StoragePool
from cucoslib.models import WorkerResult, Analysis
from .base import BaseHandler
class CleanPostgres(BaseHandler):
""" Clean JSONB columns in Postgres """
def execute(self):
s3 = StoragePool.get_connected_storage('S3Data')
results = self.postgres.session.query(WorkerResult).join(Analysis).\
filter(Analysis.finished_at != None).\
filter(WorkerResult.external_request_id == None)
for entry in results:
if entry.worker[0].isupper() or entry.worker in ('recommendation', 'stack_aggregator'):
continue
if 'VersionId' in entry.task_result:
continue
result_object_key = s3._construct_task_result_object_key(entry.ecosystem.name,
entry.package.name,
entry.version.identifier,
entry.worker)
if s3.object_exists(result_object_key):
entry.task_result = {'VersionId': s3.retrieve_latest_version_id(result_object_key)}
else:
entry.task_result = None
entry.error = True
self.postgres.session.commit()
|
Mark error flag if an object is not present on S3
|
Mark error flag if an object is not present on S3
|
Python
|
apache-2.0
|
fabric8-analytics/fabric8-analytics-jobs,fabric8-analytics/fabric8-analytics-jobs
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.