commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
bc1c65315fe22146b2d9a0955acc6e286b069657 | Add problem 48 | mc10/project-euler | problem_48.py | problem_48.py | '''
Problem 48
@author: Kevin Ji
'''
def self_power_with_mod(number, mod):
product = 1
for _ in range(number):
product *= number
product %= mod
return product
MOD = 10000000000
number = 0
for power in range(1, 1000 + 1):
number += self_power_with_mod(power, MOD)
number %= MOD
print(number)
| mit | Python |
|
008625fef55f8f58ab80b883d34ae5d40e55c721 | Add initial test for binheap | constanthatz/data-structures | test_binheap.py | test_binheap.py | import pytest
from binheap import Binheap
def test_init_bh():
b = Binheap()
assert b.binlist is []
c = Binheap([1, 2])
assert c.binlist == [1, 2]
| mit | Python |
|
d43c67a59dcf6c43667d633df8b6f8a3eb84d611 | add HelloKhalaClient2.py | moyangvip/khala,moyangvip/khala,moyangvip/khala | examples/testClient/HelloKhalaClient2.py | examples/testClient/HelloKhalaClient2.py | #moss's HelloKhala Client
#add time type
import socket
import struct
import json
def login():
send = {'type': 'login'}
return send
def logout():
send = {'type': 'logout'}
return send
def devType():
send = {'type': 'dev'}
return send
def isLogin():
send = {'type': 'isLogin'}
return send
def nodeId():
send = {'type': 'nodeId'}
return send
def time():
send = {'type':'time'}
return send
def default():
return -1
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(('127.0.0.1', 2007))
operator = {'login':login,'logout':logout,'devType':devType,'isLogin':isLogin,'nodeId':nodeId,'time':time}
while True:
input = raw_input('[input cmd]:')
sendStr = operator.get(input,default)()
if sendStr == -1:
print 'err type:',input,'please input again!'
continue
strjson = json.dumps(sendStr)
print '[send msg]:',strjson
inputLen = len(strjson)
pstr = '>I'+ str(inputLen)+'s'
bytes = struct.pack(pstr, inputLen,strjson)
s.send(bytes)
d = s.recv(1024)
if len(d) == 0:
print 'exit'
break
print '[rev msg]:',d
print ''
if d == 'logout success!':
print 'exit'
break
s.close()
| bsd-2-clause | Python |
|
80e80bff7603e852710df6c9de613b1781877b2d | Test case for two classes with the same name in one module. | retoo/pystructure,retoo/pystructure,retoo/pystructure,retoo/pystructure | tests/python/typeinference/same_name.py | tests/python/typeinference/same_name.py | class A(object):
def method(self):
return 1
A().method() ## type int
class A(object):
def method(self):
return "test"
A().method() ## type str
| lgpl-2.1 | Python |
|
4887a269a28656c288461165078943f99e2390be | add settings template for ansible later | naggie/crates,naggie/crates,naggie/crates | ansible/crates_settings.py | ansible/crates_settings.py | from base_settings import *
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'zhj_+x#q-&vqh7&)7a3it@tcsf50@fh9$3&&j0*4pmt1x=ye+1'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
ALLOWED_HOSTS = ['.']
# where will nginx look for static files for production?
# collect all static files by running ./manage.py collectstatic
STATIC_URL = '/static/'
STATIC_ROOT = '{{crates_dir}}'
CAS_DIRECTORY = abspath('{{cas_dir}}')
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# http://wiki.nginx.org/XSendfile
# Faster serving of CAS files. Backed by nginx using Django to authenticate the
# request.
X_SENDFILE = True
| mit | Python |
|
4143f5381b8ff47a80a550065e831c306551cd77 | solve problem 035 | ringsd/projecteuler | python/035.py | python/035.py |
def base10_to_base2( n ):
base2n = 0
if n == 0:
return 0
return base10_to_base2( n/2 ) * 10 + n % 2
def palindromes( s ):
flag = True
str_len = len(s)
half_len = str_len / 2
for i in range( 0, half_len+1 ):
if s[i] != s[str_len-i-1]:
flag = False
break
return flag
def solve_35():
sum = 0
for i in range( 1, 1000001 ):
if palindromes( str(i) ):
#print i
base2n = base10_to_base2( i )
if palindromes( str(base2n) ):
sum = sum + i
print i
print sum
solve_35()
| mit | Python |
|
a5a7d6c3097571a9ef050a75127a2eb24ad2746c | Remove test code. | pearsontechnology/st2contrib,pearsontechnology/st2contrib,pearsontechnology/st2contrib,tonybaloney/st2contrib,pidah/st2contrib,pidah/st2contrib,armab/st2contrib,StackStorm/st2contrib,armab/st2contrib,StackStorm/st2contrib,tonybaloney/st2contrib,armab/st2contrib,tonybaloney/st2contrib,pidah/st2contrib,StackStorm/st2contrib,pearsontechnology/st2contrib | packs/alertlogic/actions/scan_list_scan_executions.py | packs/alertlogic/actions/scan_list_scan_executions.py | #!/usr/bin/env python
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import requests
import json
import os
import yaml
from getpass import getpass
from st2actions.runners.pythonrunner import Action
from lib import GetScanList
from lib import GetScanExecutions
class ListScanExecutions(Action):
def run(self, scan_title, customer_id=None):
"""
The template class for
Returns: An blank Dict.
Raises:
ValueError: On lack of key in config.
"""
# Set up the results
results = {}
# ChatOps is not passing None, so catch 0...
if customer_id == 0:
customer_id = None
scans = GetScanList(self.config, customer_id)
return GetScanExecutions(self.config, scans[scan_title]['id'])
| #!/usr/bin/env python
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import requests
import json
import os
import yaml
from getpass import getpass
from st2actions.runners.pythonrunner import Action
from lib import GetScanList
from lib import GetScanExecutions
class ListScanExecutions(Action):
def run(self, scan_title, customer_id=None):
"""
The template class for
Returns: An blank Dict.
Raises:
ValueError: On lack of key in config.
"""
# Set up the results
results = {}
# ChatOps is not passing None, so catch 0...
if customer_id == 0:
customer_id = None
scans = GetScanList(self.config, customer_id)
return GetScanExecutions(self.config, scans[scan_title]['id'])
if __name__ == '__main__':
config_file = "/home/jjm/src/our-configs/alertlogic.yaml"
with open(config_file) as f:
config = yaml.safe_load(f)
action = ListScanExecutions(config)
ScanId = action.run(scan_title="ACI - RDG3 - Martin")
print(json.dumps( ScanId,
sort_keys=True, indent=2))
| apache-2.0 | Python |
7ecec2d2b516d9ae22a3a0f652424045d547d811 | Put object_tools in the correct order in settings | felixxm/django-object-tools,praekelt/django-object-tools,shubhamdipt/django-object-tools,shubhamdipt/django-object-tools,praekelt/django-object-tools,sky-chen/django-object-tools,felixxm/django-object-tools,sky-chen/django-object-tools | test_settings.py | test_settings.py | DEBUG = True
DATABASE_ENGINE = 'sqlite3'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
SECRET_KEY = '123'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'object_tools',
'django.contrib.admin',
'object_tools.tests'
]
ROOT_URLCONF = 'object_tools.tests.urls'
STATIC_URL = '/static/'
| DEBUG = True
DATABASE_ENGINE = 'sqlite3'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
SECRET_KEY = '123'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'object_tools',
'object_tools.tests',
]
ROOT_URLCONF = 'object_tools.tests.urls'
STATIC_URL = '/static/'
| bsd-3-clause | Python |
35296b1c87a86a87fbcf317e26a497fc91c287c7 | Update receiver to catch value error | WheatonCS/Lexos,WheatonCS/Lexos,WheatonCS/Lexos | lexos/receivers/kmeans_receiver.py | lexos/receivers/kmeans_receiver.py | from typing import NamedTuple
from lexos.models.filemanager_model import FileManagerModel
from lexos.receivers.base_receiver import BaseReceiver
class KMeansOption(NamedTuple):
"""The typed tuple to hold kmeans options."""
n_init: int # number of iterations with different centroids.
k_value: int # k value-for k-means analysis. (k groups)
max_iter: int # maximum number of iterations.
tolerance: float # relative tolerance, inertia to declare convergence.
init_method: str # method of initialization: "K++" or "random".
class KMeansReceiver(BaseReceiver):
def options_from_front_end(self) -> KMeansOption:
"""Get the K-means option from front end.
:return: a KmeansOption object to hold all the options.
"""
n_init = int(self._front_end_data['n_init'])
max_iter = int(self._front_end_data['max_iter'])
tolerance = float(self._front_end_data['tolerance'])
init_method = self._front_end_data['init']
# Check if no input from front-end, use the default k value.
try:
k_value = int(self._front_end_data['nclusters'])
except ValueError:
k_value = int(len(FileManagerModel().load_file_manager().
get_active_files()) / 2)
return KMeansOption(n_init=n_init,
k_value=k_value,
max_iter=max_iter,
tolerance=tolerance,
init_method=init_method)
| from typing import NamedTuple
from lexos.models.filemanager_model import FileManagerModel
from lexos.receivers.base_receiver import BaseReceiver
class KMeansOption(NamedTuple):
"""The typed tuple to hold kmeans options."""
n_init: int # number of iterations with different centroids.
k_value: int # k value-for k-means analysis. (k groups)
max_iter: int # maximum number of iterations.
tolerance: float # relative tolerance, inertia to declare convergence.
init_method: str # method of initialization: "K++" or "random".
class KMeansReceiver(BaseReceiver):
def options_from_front_end(self) -> KMeansOption:
"""Get the K-means option from front end.
:return: a KmeansOption object to hold all the options.
"""
n_init = int(self._front_end_data['n_init'])
k_value = int(self._front_end_data['nclusters'])
max_iter = int(self._front_end_data['max_iter'])
tolerance = float(self._front_end_data['tolerance'])
init_method = self._front_end_data['init']
# Check if no input, use the default k value.
if k_value == '':
k_value = int(len(FileManagerModel().load_file_manager().
get_active_files()) / 2)
return KMeansOption(n_init=n_init,
k_value=k_value,
max_iter=max_iter,
tolerance=tolerance,
init_method=init_method)
| mit | Python |
89d83b9ca8c1c52537aae0c5339b0cb5ae64c6c4 | Add additional test for template filters: for filter queries and filter with variable argument | GrAndSE/lighty,GrAndSE/lighty-template | tests/filters.py | tests/filters.py | """Test cases for variable fields
"""
import unittest
from lighty.templates import Template
from lighty.templates.filter import filter_manager
def simple_filter(value):
return str(value).upper()
filter_manager.register(simple_filter)
def argument_filter(value, arg):
return str(value) + ', ' + str(arg)
filter_manager.register(argument_filter)
def multiarg_filter(value, *args):
return ', '.join([str(arg) for arg in (value, ) + args])
filter_manager.register(multiarg_filter)
class TemplateFiltersTestCase(unittest.TestCase):
"""Test case for block template tag
"""
def assertResult(self, result, value):
assert result == value, 'Error template execution: %s' % ' '.join((
result, 'except', value))
def testSimpleFilter(self):
simple_template = Template(name='simple-filter.html')
simple_template.parse("{{ simple_var|simple_filter }}")
result = simple_template.execute({'simple_var': 'Hello'})
self.assertResult(result, 'HELLO')
def testArgFilter(self):
argument_template = Template(name='argument-filter.html')
argument_template.parse('{{ simple_var|argument_filter:"world" }}')
result = argument_template.execute({'simple_var': 'Hello'})
self.assertResult(result, 'Hello, world')
def testMultiargFilter(self):
multiarg_template = Template(name='multiarg-filter.html')
multiarg_template.parse(
'{{ simple_var|multiarg_filter:"John" "Peter" }}')
result = multiarg_template.execute({'simple_var': 'Hello'})
self.assertResult(result, 'Hello, John, Peter')
def testMultiFilter(self):
multifilter_template = Template(name='multifilter.html')
multifilter_template.parse(
'{{ simple_var|simple_filter|argument_filter:"world" }}')
result = multifilter_template.execute({'simple_var': 'Hello'})
self.assertResult(result, 'HELLO, world')
def testVaribaleArgFilter(self):
varargfilter_template = Template(name='vararg-filter.html')
varargfilter_template.parse('{{ simple_var|argument_filter:arg }}')
result = varargfilter_template.execute({
'simple_var': 'Hello',
'arg': 'world'
})
self.assertResult(result, 'Hello, world')
def test():
suite = unittest.TestSuite()
suite.addTest(TemplateFiltersTestCase('testSimpleFilter'))
suite.addTest(TemplateFiltersTestCase('testArgFilter'))
suite.addTest(TemplateFiltersTestCase('testMultiargFilter'))
suite.addTest(TemplateFiltersTestCase('testMultiFilter'))
suite.addTest(TemplateFiltersTestCase('testVaribaleArgFilter'))
return suite
| """Test cases for variable fields
"""
import unittest
from lighty.templates import Template
from lighty.templates.filter import filter_manager
def simple_filter(value):
return str(value).upper()
filter_manager.register(simple_filter)
def argument_filter(value, arg):
return str(value) + ', ' + str(arg)
filter_manager.register(argument_filter)
def multiarg_filter(value, *args):
return ', '.join([str(arg) for arg in (value, ) + args])
filter_manager.register(multiarg_filter)
class TemplateFiltersTestCase(unittest.TestCase):
"""Test case for block template tag
"""
def assertResult(self, result, value):
assert result == value, 'Error emplate execution: %s' % ' '.join((
result, 'except', value))
def testSimpleFilter(self):
simple_template = Template(name='simple-filter.html')
simple_template.parse("{{ simple_var|simple_filter }}")
result = simple_template.execute({'simple_var': 'Hello'})
self.assertResult(result, 'HELLO')
def testArgFilter(self):
argument_template = Template(name='argument-filter.html')
argument_template.parse('{{ simple_var|argument_filter:"world" }}')
result = argument_template.execute({'simple_var': 'Hello'})
self.assertResult(result, 'Hello, world')
def testMultiargFilter(self):
multiarg_template = Template(name='multiarg-filter.html')
multiarg_template.parse(
'{{ simple_var|multiarg_filter:"John" "Peter" }}')
result = multiarg_template.execute({'simple_var': 'Hello'})
self.assertResult(result, 'Hello, John, Peter')
def test():
suite = unittest.TestSuite()
suite.addTest(TemplateFiltersTestCase('testSimpleFilter'))
suite.addTest(TemplateFiltersTestCase('testArgFilter'))
suite.addTest(TemplateFiltersTestCase('testMultiargFilter'))
return suite
| bsd-3-clause | Python |
8874af7c0db371f63da687c5398db1c7b80f58cd | Fix import of django during install time (for environments like Heroku) (#120) | shacker/django-todo,shacker/django-todo,shacker/django-todo | todo/__init__.py | todo/__init__.py | """
A multi-user, multi-group task management and assignment system for Django.
"""
__version__ = "2.4.10"
__author__ = "Scot Hacker"
__email__ = "[email protected]"
__url__ = "https://github.com/shacker/django-todo"
__license__ = "BSD License"
try:
from . import check
except ModuleNotFoundError:
# this can happen during install time, if django is not installed yet!
pass
| """
A multi-user, multi-group task management and assignment system for Django.
"""
__version__ = "2.4.10"
__author__ = "Scot Hacker"
__email__ = "[email protected]"
__url__ = "https://github.com/shacker/django-todo"
__license__ = "BSD License"
from . import check
| bsd-3-clause | Python |
4d2f3ee1343b9aef24f599b8acd07ed8340f0bff | convert that to a list so we can measure it's len in a template | tndatacommons/tndata_backend,izzyalonso/tndata_backend,izzyalonso/tndata_backend,tndatacommons/tndata_backend,izzyalonso/tndata_backend,tndatacommons/tndata_backend,izzyalonso/tndata_backend,tndatacommons/tndata_backend | tndata_backend/notifications/views.py | tndata_backend/notifications/views.py | from collections import defaultdict
from django.contrib.auth.decorators import user_passes_test
from django.contrib import messages
from django.shortcuts import render, redirect
from . import queue
from .models import GCMMessage
@user_passes_test(lambda u: u.is_staff, login_url='/')
def dashboard(request):
"""A simple dashboard for enqueued GCM notifications."""
jobs = queue.messages() # Get the enqueued messages
ids = [job.args[0] for job, _ in jobs]
message_data = defaultdict(dict)
fields = ['id', 'title', 'user__email', 'message']
messages = GCMMessage.objects.filter(pk__in=ids).values_list(*fields)
for msg in messages:
mid, title, email, message = msg
message_data[mid] = {
'id': mid,
'title': title,
'email': email,
'message': message,
}
jobs = [
(job, scheduled_for, message_data[job.args[0]])
for job, scheduled_for in jobs
]
context = {
'jobs': jobs,
'metrics': ['GCM Message Sent', 'GCM Message Scheduled', ]
}
return render(request, "notifications/index.html", context)
@user_passes_test(lambda u: u.is_staff, login_url='/')
def cancel_job(request):
"""Look for an enqueued job with the given ID and cancel it."""
job_id = request.POST.get('job_id', None)
if request.method == "POST" and job_id:
for job, _ in queue.messages():
if job.id == job_id:
job.cancel()
messages.success(request, "That notification has been cancelled")
break
return redirect("notifications:dashboard")
@user_passes_test(lambda u: u.is_staff, login_url='/')
def cancel_all_jobs(request):
"""Cancels all queued messages."""
if request.method == "POST":
count = 0
for job, _ in queue.messages():
job.cancel()
count += 1
messages.success(request, "Cancelled {} notifications.".format(count))
return redirect("notifications:dashboard")
| from collections import defaultdict
from django.contrib.auth.decorators import user_passes_test
from django.contrib import messages
from django.shortcuts import render, redirect
from . import queue
from .models import GCMMessage
@user_passes_test(lambda u: u.is_staff, login_url='/')
def dashboard(request):
"""A simple dashboard for enqueued GCM notifications."""
jobs = queue.messages() # Get the enqueued messages
ids = [job.args[0] for job, _ in jobs]
message_data = defaultdict(dict)
fields = ['id', 'title', 'user__email', 'message']
messages = GCMMessage.objects.filter(pk__in=ids).values_list(*fields)
for msg in messages:
mid, title, email, message = msg
message_data[mid] = {
'id': mid,
'title': title,
'email': email,
'message': message,
}
jobs = (
(job, scheduled_for, message_data[job.args[0]])
for job, scheduled_for in jobs
)
context = {
'jobs': jobs,
'metrics': ['GCM Message Sent', 'GCM Message Scheduled', ]
}
return render(request, "notifications/index.html", context)
@user_passes_test(lambda u: u.is_staff, login_url='/')
def cancel_job(request):
"""Look for an enqueued job with the given ID and cancel it."""
job_id = request.POST.get('job_id', None)
if request.method == "POST" and job_id:
for job, _ in queue.messages():
if job.id == job_id:
job.cancel()
messages.success(request, "That notification has been cancelled")
break
return redirect("notifications:dashboard")
@user_passes_test(lambda u: u.is_staff, login_url='/')
def cancel_all_jobs(request):
"""Cancels all queued messages."""
if request.method == "POST":
count = 0
for job, _ in queue.messages():
job.cancel()
count += 1
messages.success(request, "Cancelled {} notifications.".format(count))
return redirect("notifications:dashboard")
| mit | Python |
6618ea7c1b67d87acff86338415e2a322a01cc3c | add loopback support | danudey/pypcap,danudey/pypcap | testsniff.py | testsniff.py | #!/usr/bin/env python
import getopt, sys
import dpkt, pcap
def usage():
print >>sys.stderr, 'usage: %s [-i device] [pattern]' % sys.argv[0]
sys.exit(1)
def main():
opts, args = getopt.getopt(sys.argv[1:], 'i:h')
name = None
for o, a in opts:
if o == '-i': name = a
else: usage()
pc = pcap.pcap(name)
pc.setfilter(' '.join(args))
decode = { pcap.DLT_LOOP:dpkt.loopback.Loopback,
pcap.DLT_NULL:dpkt.loopback.Loopback,
pcap.DLT_EN10MB:dpkt.ethernet.Ethernet }[pc.datalink()]
try:
print 'listening on %s: %s' % (pc.name, pc.filter)
for ts, pkt in pc:
print ts, `decode(pkt)`
except KeyboardInterrupt:
nrecv, ndrop, nifdrop = pc.stats()
print '\n%d packets received by filter' % nrecv
print '%d packets dropped by kernel' % ndrop
if __name__ == '__main__':
main()
| #!/usr/bin/env python
import getopt, sys
import pcap
from dpkt.ethernet import Ethernet
def usage():
print >>sys.stderr, 'usage: %s [-i device] [pattern]' % sys.argv[0]
sys.exit(1)
def main():
opts, args = getopt.getopt(sys.argv[1:], 'i:h')
name = None
for o, a in opts:
if o == '-i': name = a
else: usage()
pc = pcap.pcap(name)
pc.setfilter(' '.join(args))
try:
print 'listening on %s: %s' % (pc.name, pc.filter)
for ts, pkt in pc:
print ts, `Ethernet(pkt)`
except KeyboardInterrupt:
nrecv, ndrop, nifdrop = pc.stats()
print '\n%d packets received by filter' % nrecv
print '%d packets dropped by kernel' % ndrop
if __name__ == '__main__':
main()
| bsd-3-clause | Python |
a1bda82bd06cbfd12e6074f22cb31d88f2abd96a | update py +x | ray26/google-hosts,ayyb1988/google-hosts,ChaneyZhao/google-hosts,SolaWing/google-hosts,ray26/google-hosts,bgarrels/google-hosts,xiaozihan2011/google-hosts,9618211/google-hosts,EasonYi/google-hosts,DavikChen/google-hosts,manfy/google-hosts,monkeytest15/google-hosts,peterdocter/google-hosts,shenleilei/google-hosts,fengshao0907/google-hosts,txthinking/google-hosts,peterdocter/google-hosts,wangsaizzy/google-hosts,DropFan/google-hosts,ghbhaha/google-hosts,wang1984717366/TTOO,bgarrels/google-hosts,takeshineshiro/google-hosts,sugar2010/google-hosts,qipa/google-hosts,everhopingandwaiting/google-hosts,IveWong/google-hosts,sndnvaps/google-hosts,wang1984717366/TTOO,jiachenning/google-hosts,IveWong/google-hosts,robert4571/google-hosts,takeshineshiro/google-hosts,jiachenning/google-hosts,kingideayou/google-hosts,kingideayou/google-hosts,qipa/google-hosts,danny200309/google-hosts,zhuyingtao/google-hosts,DavikChen/google-hosts,danny200309/google-hosts,DropFan/google-hosts,caijinyan/google-hosts,manfy/google-hosts,sndnvaps/google-hosts,sugar2010/google-hosts,shenleilei/google-hosts,ayyb1988/google-hosts,fengzai/google-hosts,DropFan/google-hosts,wangsaizzy/google-hosts,robert4571/google-hosts,fengshao0907/google-hosts,9618211/google-hosts,everhopingandwaiting/google-hosts,monkeytest15/google-hosts,txthinking/google-hosts,caijinyan/google-hosts,yuzaipiaofei/google-hosts,ray26/google-hosts,txthinking/google-hosts,zhuyingtao/google-hosts,yuzaipiaofei/google-hosts,EasonYi/google-hosts | tools/fuckGFW.py | tools/fuckGFW.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Update hosts for *nix
Author: [email protected]
Version: 0.0.1
Date: 2012-10-24 14:35:39
'''
import urllib2
import os
import sys
HOSTS_PATH = "/etc/hosts"
HOSTS_SOURCE = "http://tx.txthinking.com/hosts"
SEARCH_STRING = "#TX-HOSTS"
def GetRemoteHosts(url):
f = urllib2.urlopen(url, timeout=5)
hosts = [line for line in f]
f.close()
return hosts
def main():
try:
hosts = GetRemoteHosts(HOSTS_SOURCE)
except IOError:
print "Could't connect to %s. Try again." % HOSTS_SOURCE
sys.exit(1)
yours = ""
if os.path.isfile(HOSTS_PATH):
f = open(HOSTS_PATH, "r")
for line in f:
if SEARCH_STRING in line:
break
yours += line
f.close()
os.rename(HOSTS_PATH, HOSTS_PATH + ".BAK")
yours += SEARCH_STRING + "\n"
fp = open(HOSTS_PATH, "w")
fp.write(yours)
fp.writelines(hosts)
fp.close()
print "Success"
if __name__ == "__main__":
main()
| '''
Update hosts for *nix
Author: [email protected]
Version: 0.0.1
Date: 2012-10-24 14:35:39
'''
import urllib2
import os
import sys
HOSTS_PATH = "/etc/hosts"
HOSTS_SOURCE = "http://tx.txthinking.com/hosts"
SEARCH_STRING = "#TX-HOSTS"
def GetRemoteHosts(url):
f = urllib2.urlopen(url, timeout=5)
hosts = [line for line in f]
f.close()
return hosts
def main():
try:
hosts = GetRemoteHosts(HOSTS_SOURCE)
except IOError:
print "Could't connect to %s. Try again." % HOSTS_SOURCE
sys.exit(1)
yours = ""
if os.path.isfile(HOSTS_PATH):
f = open(HOSTS_PATH, "r")
for line in f:
if SEARCH_STRING in line:
break
yours += line
f.close()
os.rename(HOSTS_PATH, HOSTS_PATH + ".BAK")
yours += SEARCH_STRING + "\n"
fp = open(HOSTS_PATH, "w")
fp.write(yours)
fp.writelines(hosts)
fp.close()
print "Success"
if __name__ == "__main__":
main()
| mit | Python |
145e9141af1e1abdf0a9ab3c035ed8df6298ba0f | rebase migration dependency. | masschallenge/django-accelerator,masschallenge/django-accelerator | accelerator/migrations/0015_expert_bio_add_max_length_validation.py | accelerator/migrations/0015_expert_bio_add_max_length_validation.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-07-25 15:00
from __future__ import unicode_literals
import django.core.validators
from django.db import (
migrations,
models,
)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0014_alter_fluent_page_type_managers'),
]
operations = [
migrations.AlterField(
model_name='expertprofile',
name='bio',
field=models.TextField(blank=True, default='', validators=[
django.core.validators.MaxLengthValidator(7500)]),
),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-07-25 15:00
from __future__ import unicode_literals
import django.core.validators
from django.db import (
migrations,
models,
)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0013_allocator'),
]
operations = [
migrations.AlterField(
model_name='expertprofile',
name='bio',
field=models.TextField(blank=True, default='', validators=[
django.core.validators.MaxLengthValidator(7500)]),
),
]
| mit | Python |
837f05228fac7f6addd28069c6387f798e01ff8c | Add checksum test. | andrewguy9/farmfs,andrewguy9/farmfs | tests/test_fs.py | tests/test_fs.py | from farmfs.fs import normpath as _normalize
from farmfs.fs import userPath2Path as up2p
from farmfs.fs import Path
import pytest
def test_create_path():
p1 = Path("/")
p2 = Path("/a")
p2 = Path("/a/b")
p3 = Path(p1)
p4 = Path("a", p1)
with pytest.raises(AssertionError):
p5 = Path("/a/b", p2)
with pytest.raises(ValueError):
p6 = Path(None)
with pytest.raises(ValueError):
p7 = Path(None, p1)
with pytest.raises(AssertionError):
p8 = Path("a", "b")
def test_normalize_abs():
assert _normalize("/") == "/"
assert _normalize("/a") == "/a"
assert _normalize("/a/") == "/a"
assert _normalize("/a/b") == "/a/b"
assert _normalize("/a/b/") == "/a/b"
assert _normalize("/a//b") == "/a/b"
assert _normalize("/a//b//") == "/a/b"
def test_normalize_relative():
assert _normalize("a") == "a"
assert _normalize("a/") == "a"
assert _normalize("a/b") == "a/b"
assert _normalize("a/b/") == "a/b"
assert _normalize("a//b") == "a/b"
assert _normalize("a//b//") == "a/b"
def test_userPath2Path():
assert up2p("c", Path("/a/b")) == Path("/a/b/c")
assert up2p("/c", Path("/a/b")) == Path("/c")
def test_cmp():
assert Path("/a/b") < Path("/a/c")
assert Path("/a/c") > Path("/a/b")
assert Path("/a/2") < Path("/b/1")
assert Path("/") < Path("/a")
@pytest.mark.skip(reason="bugs not impacting development at moment.")
def test_relative_to():
assert Path("/a/b").relative_to(Path("/")) == "a/b"
assert Path("/a/b").relative_to(Path("/a")) == "b"
assert Path("/a/b/c").relative_to(Path("/a")) == "b/c"
assert Path("/a/b/c").relative_to(Path("/a/b")) == "c"
assert Path("/a/b").relative_to(Path("/c")) == "../a/b"
@pytest.mark.parametrize(
"input,expected",
[
(b'', u"d41d8cd98f00b204e9800998ecf8427e"),
(b'abc', u"900150983cd24fb0d6963f7d28e17f72"),
(b'\xea\x80\x80abcd\xde\xb4', u'b8c6dee81075e87d348522b146c95ae3'),
],)
def test_checksum_empty(tmp_path, input, expected):
tmp = Path(str(tmp_path))
fp = tmp.join("empty.txt")
with fp.open("wb") as fd:
fd.write(input)
assert fp.checksum() == expected
| from farmfs.fs import normpath as _normalize
from farmfs.fs import userPath2Path as up2p
from farmfs.fs import Path
import pytest
def test_create_path():
p1 = Path("/")
p2 = Path("/a")
p2 = Path("/a/b")
p3 = Path(p1)
p4 = Path("a", p1)
with pytest.raises(AssertionError):
p5 = Path("/a/b", p2)
with pytest.raises(ValueError):
p6 = Path(None)
with pytest.raises(ValueError):
p7 = Path(None, p1)
with pytest.raises(AssertionError):
p8 = Path("a", "b")
def test_normalize_abs():
assert _normalize("/") == "/"
assert _normalize("/a") == "/a"
assert _normalize("/a/") == "/a"
assert _normalize("/a/b") == "/a/b"
assert _normalize("/a/b/") == "/a/b"
assert _normalize("/a//b") == "/a/b"
assert _normalize("/a//b//") == "/a/b"
def test_normalize_relative():
assert _normalize("a") == "a"
assert _normalize("a/") == "a"
assert _normalize("a/b") == "a/b"
assert _normalize("a/b/") == "a/b"
assert _normalize("a//b") == "a/b"
assert _normalize("a//b//") == "a/b"
def test_userPath2Path():
assert up2p("c", Path("/a/b")) == Path("/a/b/c")
assert up2p("/c", Path("/a/b")) == Path("/c")
def test_cmp():
assert Path("/a/b") < Path("/a/c")
assert Path("/a/c") > Path("/a/b")
assert Path("/a/2") < Path("/b/1")
assert Path("/") < Path("/a")
| mit | Python |
9786c5f242f2b70240e7bb23c866c864cb4ed4ca | Add registrations to admin | small-worlds/expedition-manager,small-worlds/expedition-manager | expeditions/admin.py | expeditions/admin.py | from django.contrib import admin
from expeditions.models import Expedition, Waypoint, Registration
# Register your models here.
class ExpeditionAdmin(admin.ModelAdmin):
list_display = ('id', 'name', 'start_date', 'end_date', 'published')
list_display_links = ('id', 'name')
search_fields = ('name', 'start_date')
list_filter = ('published', )
class WaypointAdmin(admin.ModelAdmin):
list_display = ('id', 'expedition', 'name', 'system', 'planet', 'datetime')
list_display_links = ('id', 'name')
list_filter = ('expedition', )
search_fields = ('name', 'expedition__name', 'system', 'planet', 'datetime')
class RegistrationAdmin(admin.ModelAdmin):
list_display = ('id', 'user', 'expedition', 'registration_number')
list_display_links = ('id', 'user')
list_filter = ('expedition', 'user')
search_fields = ('user__username', 'expedition__name')
admin.site.register(Expedition, ExpeditionAdmin)
admin.site.register(Waypoint, WaypointAdmin)
admin.site.register(Registration, RegistrationAdmin) | from django.contrib import admin
from expeditions.models import Expedition, Waypoint
# Register your models here.
class ExpeditionAdmin(admin.ModelAdmin):
list_display = ('id', 'name', 'start_date', 'end_date', 'published')
search_fields = ('name', 'start_date')
list_filter = ('published', )
class WaypointAdmin(admin.ModelAdmin):
list_display = ('id', 'expedition', 'name', 'system', 'planet', 'datetime')
list_filter = ('expedition', )
search_fields = ('name', 'expedition__name', 'system', 'planet', 'datetime')
admin.site.register(Expedition, ExpeditionAdmin)
admin.site.register(Waypoint, WaypointAdmin) | mit | Python |
ab23ea60457720d0a7414b1b84191945f529b23c | Update _version.py | theno/fabsetup,theno/fabsetup | fabsetup/_version.py | fabsetup/_version.py | __version__ = "0.7.9" # semantic versioning: https://semver.org
| __version__ = "0.7.9"
| mit | Python |
8c4edd4cc8fdd6c7c470e25436b6c6b4c146ad58 | Fix error casting datetime objects | dicortazar/xen-code-review-analysis | data-analysis/utils.py | data-analysis/utils.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2016 Bitergia
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Authors:
# Daniel Izquierdo <[email protected]>
# Santiago Dueñas <[email protected]>
#
from __future__ import absolute_import
try:
import configparser
except ImportError:
import ConfigParser as configparser
import elasticsearch
import numpy
try:
import pymysql as mysql
except ImportError:
import MySQLdb as mysql
def read_config_file(filepath):
"""Read configuration file"""
cfg_parser = configparser.SafeConfigParser()
cfg_parser.read(filepath)
config = {}
for section in ['mysql', 'elasticsearch']:
if section not in cfg_parser.sections():
cause = "Section %s not found in the %s file" % (section, filepath)
raise KeyError(cause)
config[section] = dict(cfg_parser.items(section))
return config
def to_dict(row, columns):
"""Translates from tuple to a dict"""
d = {}
for column in columns:
value = row[columns.index(column) + 1]
if isinstance(value, numpy.int64):
value = int(value)
elif isinstance(value, numpy.float64):
value = float(value)
d[column] = value
return d
def create_mysql_connection(user, password, host, db):
"""Connect to a MySQL server"""
db = mysql.connect(host=host, user=user, passwd=password, db=db,
charset='utf8')
return db.cursor()
def execute_mysql_query(conn, query):
"""Execute a MySQL query"""
n = int(conn.execute(query))
results = conn.fetchall() if n else []
return results
def create_elasticsearch_connection(url, user, password):
"""Connect to a ES server"""
conn = elasticsearch.Elasticsearch([url], http_auth=(user, password))
return conn
| # -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2016 Bitergia
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Authors:
# Daniel Izquierdo <[email protected]>
# Santiago Dueñas <[email protected]>
#
from __future__ import absolute_import
try:
import configparser
except ImportError:
import ConfigParser as configparser
import datetime
import elasticsearch
import numpy
try:
import pymysql as mysql
except ImportError:
import MySQLdb as mysql
def read_config_file(filepath):
"""Read configuration file"""
cfg_parser = configparser.SafeConfigParser()
cfg_parser.read(filepath)
config = {}
for section in ['mysql', 'elasticsearch']:
if section not in cfg_parser.sections():
cause = "Section %s not found in the %s file" % (section, filepath)
raise KeyError(cause)
config[section] = dict(cfg_parser.items(section))
return config
def to_dict(row, columns):
"""Translates from tuple to a dict"""
d = {}
for column in columns:
value = row[columns.index(column) + 1]
if isinstance(value, numpy.int64):
value = int(value)
elif isinstance(value, numpy.float64):
value = float(value)
elif isinstance(value, datetime.datetime):
value = str(value)
else:
value = str(value)
d[column] = value
return d
def create_mysql_connection(user, password, host, db):
"""Connect to a MySQL server"""
db = mysql.connect(host=host, user=user, passwd=password, db=db,
charset='utf8')
return db.cursor()
def execute_mysql_query(conn, query):
"""Execute a MySQL query"""
n = int(conn.execute(query))
results = conn.fetchall() if n else []
return results
def create_elasticsearch_connection(url, user, password):
"""Connect to a ES server"""
conn = elasticsearch.Elasticsearch([url], http_auth=(user, password))
return conn
| artistic-2.0 | Python |
9f405a3b4e01ee0a42a8530cfc5b509a38067250 | Remove unused import | reinikai/mugloar | mugloar/dragon.py | mugloar/dragon.py |
class Dragon:
# By default, stay home.
scaleThickness = 0
clawSharpness = 0
wingStrength = 0
fireBreath = 0
def __init__(self, weather_code):
if weather_code == 'T E':
# Draught requires a 'balanced' dragon, ha ha
self.scaleThickness = 5
self.clawSharpness = 5
self.wingStrength = 5
self.fireBreath = 5
elif weather_code == 'FUNDEFINEDG':
# Fog means we're unseen, no need to fly
self.scaleThickness = 8
self.clawSharpness = 8
self.wingStrength = 0
self.fireBreath = 4
elif weather_code == 'NMR':
self.scaleThickness = 3
self.clawSharpness = 6
self.wingStrength = 5
self.fireBreath = 6
elif weather_code == 'SRO':
# Stay at home if there's a storm.
pass
else:
# Fire is useless in the rain. Additional claw-sharpening is needed to destroy the umbrellaboats
self.scaleThickness = 5
self.clawSharpness = 10
self.wingStrength = 5
self.fireBreath = 0
def get_json(self):
return {"dragon": {
"scaleThickness": self.scaleThickness,
"clawSharpness": self.clawSharpness,
"wingStrength": self.wingStrength,
"fireBreath": self.fireBreath}}
| import json
class Dragon:
# By default, stay home.
scaleThickness = 0
clawSharpness = 0
wingStrength = 0
fireBreath = 0
def __init__(self, weather_code):
if weather_code == 'T E':
# Draught requires a 'balanced' dragon, ha ha
self.scaleThickness = 5
self.clawSharpness = 5
self.wingStrength = 5
self.fireBreath = 5
elif weather_code == 'FUNDEFINEDG':
# Fog means we're unseen, no need to fly
self.scaleThickness = 8
self.clawSharpness = 8
self.wingStrength = 0
self.fireBreath = 4
elif weather_code == 'NMR':
self.scaleThickness = 6
self.clawSharpness = 6
self.wingStrength = 4
self.fireBreath = 4
elif weather_code == 'SRO':
# Stay at home if there's a storm.
pass
else:
# Fire is useless in the rain. Additional claw-sharpening is needed to destroy the umbrellaboats
self.scaleThickness = 5
self.clawSharpness = 10
self.wingStrength = 5
self.fireBreath = 0
def get_json(self):
return {"dragon": {
"scaleThickness": self.scaleThickness,
"clawSharpness": self.clawSharpness,
"wingStrength": self.wingStrength,
"fireBreath": self.fireBreath}}
| mit | Python |
b8701f04d049101c8c92b468b4fc3dc863f1e292 | Add bulk accept and reject for talks | pathunstrom/pygotham,pathunstrom/pygotham,djds23/pygotham-1,pathunstrom/pygotham,PyGotham/pygotham,djds23/pygotham-1,PyGotham/pygotham,PyGotham/pygotham,pathunstrom/pygotham,djds23/pygotham-1,PyGotham/pygotham,pathunstrom/pygotham,djds23/pygotham-1,djds23/pygotham-1,PyGotham/pygotham | pygotham/admin/talks.py | pygotham/admin/talks.py | """Admin for talk-related models."""
from flask.ext.admin import actions
from flask.ext.admin.contrib.sqla import ModelView
from pygotham.admin.utils import model_view
from pygotham.core import db
from pygotham.talks import models
__all__ = ('CategoryModelView', 'talk_model_view', 'TalkReviewModelView')
CATEGORY = 'Talks'
class TalkModelView(ModelView, actions.ActionsMixin):
"""Admin view for :class:`~pygotham.models.Talk`."""
column_filters = ('status', 'duration', 'level')
column_list = ('name', 'status', 'duration', 'level', 'type', 'user')
column_searchable_list = ('name',)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.init_actions()
@actions.action(
'accept', 'Accept', 'Are you sure you want to accept selected models?')
def approve(self, talks):
for pk in talks:
talk = models.Talk.query.get(pk)
talk.status = 'accepted'
self.session.commit()
@actions.action(
'reject', 'Reject', 'Are you sure you want to reject selected models?')
def reject(self, talks):
for pk in talks:
talk = models.Talk.query.get(pk)
talk.status = 'rejected'
self.session.commit()
CategoryModelView = model_view(
models.Category,
'Categories',
CATEGORY,
form_columns=('name', 'slug'),
)
talk_model_view = TalkModelView(
models.Talk, db.session, 'Talks', CATEGORY, 'talks')
TalkReviewModelView = model_view(
models.Talk,
'Review',
CATEGORY,
can_create=False,
can_delete=False,
column_list=('name', 'status', 'level', 'type', 'user'),
column_searchable_list=('name',),
edit_template='talks/review.html',
)
| """Admin for talk-related models."""
from pygotham.admin.utils import model_view
from pygotham.talks import models
__all__ = ('CategoryModelView', 'TalkModelView', 'TalkReviewModelView')
CategoryModelView = model_view(
models.Category,
'Categories',
'Talks',
form_columns=('name', 'slug'),
)
TalkModelView = model_view(
models.Talk,
'Talks',
'Talks',
column_filters=('status', 'duration', 'level'),
column_list=('name', 'status', 'duration', 'level', 'type', 'user'),
column_searchable_list=('name',),
)
TalkReviewModelView = model_view(
models.Talk,
'Review',
'Talks',
can_create=False,
can_delete=False,
column_list=('name', 'status', 'level', 'type', 'user'),
column_searchable_list=('name',),
edit_template='talks/review.html',
)
| bsd-3-clause | Python |
ec261fdaf41bd91558e4df143be8dfd9940bde81 | Rewrite bubble sort. | atariskorpion/simple-coding-solutions,atariskorpion/simple-coding-solutions | py/sorting/05_bubbleSort.py | py/sorting/05_bubbleSort.py | def bubbleSort(A):
for k in range(len(A)-1, 0, -1):
for i in range(k):
if A[i] > A[i+1]:
tempValue = A[i]
A[i] = A[i+1]
A[i+1] = tempValue
return A
print(bubbleSort([54,26,93,17,77,31,44,55,20]))
def bubbleSortReverse(A):
for k in range(len(A)-1, 0, -1):
for i in range(k):
if A[i+1] > A[i]:
tempValue = A[i+1]
A[i+1] = A[i]
A[i] = tempValue
return A
print(bubbleSortReverse([54,26,93,17,77,31,44,55,20]))
| def bubbleSort(A):
tempValue = 0
for k in range(1, len(A)):
flag = 0
for i in range(0, len(A) - k):
if A[i+1] > A[i]:
tempValue = A[i+1]
A[i+1] = A[i]
A[i] = tempValue
flag += 1
if flag == 0:
break
return A
print(bubbleSort([1,4,55,3]))
| mit | Python |
c2a99a33455e3b01ccce3faebd3a541b4a76e579 | Bump version | rigal-m/Yamale,23andMe/Yamale | yamale/__init__.py | yamale/__init__.py | from .yamale import make_schema, make_data, validate
VERSION = (1, 0, 1, 'final', 0)
# Dynamically calculate the version based on VERSION.
def get_version():
"Returns a PEP 386-compliant version number from VERSION."
version = VERSION
assert len(version) == 5
assert version[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if version[2] == 0 else 3
main = '.'.join(str(x) for x in version[:parts])
sub = ''
if version[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[version[3]] + str(version[4])
return str(main + sub)
__version__ = get_version()
| from .yamale import make_schema, make_data, validate
VERSION = (1, 0, 0, 'final', 0)
# Dynamically calculate the version based on VERSION.
def get_version():
"Returns a PEP 386-compliant version number from VERSION."
version = VERSION
assert len(version) == 5
assert version[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if version[2] == 0 else 3
main = '.'.join(str(x) for x in version[:parts])
sub = ''
if version[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[version[3]] + str(version[4])
return str(main + sub)
__version__ = get_version()
| mit | Python |
33b4c181b2d9a3d74f45ee1ced971b5bca58b35b | remove unused import | NetstationMurator/django-treenav,NetstationMurator/django-treenav,caktus/django-treenav,caktus/django-treenav | treenav/admin.py | treenav/admin.py | from django.contrib import admin
from django.contrib.contenttypes import generic
from treenav import models as treenav
from treenav.forms import MenuItemForm, GenericInlineMenuItemForm
class GenericMenuItemInline(generic.GenericStackedInline):
"""
Add this inline to your admin class to support editing related menu items
from that model's admin page.
"""
extra = 0
max_num = 1
model = treenav.MenuItem
form = GenericInlineMenuItemForm
class SubMenuItemInline(admin.TabularInline):
model = treenav.MenuItem
extra = 1
form = MenuItemForm
prepopulated_fields = {'slug': ('label',)}
exclude = ('new_parent',)
class MenuItemAdmin(admin.ModelAdmin):
list_display = (
'menu_items',
'slug',
'label',
'parent',
'link',
'href_link',
'order',
'is_enabled',
)
list_filter = ('parent', 'is_enabled')
raw_id_fields = ('parent',)
prepopulated_fields = {'slug': ('label',)}
inlines = (SubMenuItemInline, )
fieldsets = (
(None, {
'fields': ('new_parent', 'label', 'slug', 'order', 'is_enabled')
}),
('URL', {
'fields': ('link', ('content_type', 'object_id')),
'description': "The URL for this menu item, which can be a "
"fully qualified URL, an absolute URL, a named "
"URL, a path to a Django view, a regular "
"expression, or a generic relation to a model that "
"supports get_absolute_url()"
}),
)
list_editable = ('label',)
form = MenuItemForm
def menu_items(self, obj):
if obj.level == 0:
return obj.label
return ' '*obj.level + '- %s' % obj.label
menu_items.allow_tags = True
def href_link(self, obj):
return '<a href="%s">%s</a>' % (obj.href, obj.href)
href_link.short_description = 'HREF'
href_link.allow_tags = True
admin.site.register(treenav.MenuItem, MenuItemAdmin)
| from django.contrib import admin
from django import forms
from django.contrib.contenttypes import generic
from treenav import models as treenav
from treenav.forms import MenuItemForm, GenericInlineMenuItemForm
class GenericMenuItemInline(generic.GenericStackedInline):
"""
Add this inline to your admin class to support editing related menu items
from that model's admin page.
"""
extra = 0
max_num = 1
model = treenav.MenuItem
form = GenericInlineMenuItemForm
class SubMenuItemInline(admin.TabularInline):
model = treenav.MenuItem
extra = 1
form = MenuItemForm
prepopulated_fields = {'slug': ('label',)}
exclude = ('new_parent',)
class MenuItemAdmin(admin.ModelAdmin):
list_display = (
'menu_items',
'slug',
'label',
'parent',
'link',
'href_link',
'order',
'is_enabled',
)
list_filter = ('parent', 'is_enabled')
raw_id_fields = ('parent',)
prepopulated_fields = {'slug': ('label',)}
inlines = (SubMenuItemInline, )
fieldsets = (
(None, {
'fields': ('new_parent', 'label', 'slug', 'order', 'is_enabled')
}),
('URL', {
'fields': ('link', ('content_type', 'object_id')),
'description': "The URL for this menu item, which can be a "
"fully qualified URL, an absolute URL, a named "
"URL, a path to a Django view, a regular "
"expression, or a generic relation to a model that "
"supports get_absolute_url()"
}),
)
list_editable = ('label',)
form = MenuItemForm
def menu_items(self, obj):
if obj.level == 0:
return obj.label
return ' '*obj.level + '- %s' % obj.label
menu_items.allow_tags = True
def href_link(self, obj):
return '<a href="%s">%s</a>' % (obj.href, obj.href)
href_link.short_description = 'HREF'
href_link.allow_tags = True
admin.site.register(treenav.MenuItem, MenuItemAdmin)
| bsd-3-clause | Python |
6c8a9edb6d733ac680ea2cbcb1c8d12511aa72be | Update webserver.py | NeXTHorizon/nhzpool,NeXTHorizon/nhzpool | webserver.py | webserver.py | #!/usr/bin/env python
# author: [email protected]
import ConfigParser
from bottle import route, install, run, template, static_file, PasteServer
from bottle_sqlite import SQLitePlugin
import json
import urllib
import urllib2
import datetime
config = ConfigParser.RawConfigParser()
config.read('config.ini')
install(SQLitePlugin(dbfile=(config.get("pool", "database"))))
@route('/')
def default():
output = template('default')
return output
@route('/static/:path#.+#', name='static')
def static(path):
return static_file(path, root='static')
@route('/accounts')
def accounts():
poolAccount = json.loads(urllib2.urlopen(config.get("pool", "nhzhost")+"/nhz?requestType=getAccount&account="+config.get("pool", "poolaccount")).read())
clean = poolAccount["lessors"]
output = template('accounts', leased=clean)
return output
@route('/blocks')
def blocks(db):
c = db.execute("SELECT timestamp, block, totalfee FROM blocks WHERE totalfee > 0")
result = c.fetchall()
c.close()
payload = {
'requestType': 'getForging',
'secretPhrase': config.get("pool", "poolphrase")
}
opener = urllib2.build_opener(urllib2.HTTPHandler())
data = urllib.urlencode(payload)
forging = json.loads(opener.open(config.get("pool", "nhzhost")+'/nhz', data=data).read())
getdl = forging["deadline"]
deadline = str(datetime.timedelta(seconds=getdl))
output = template('blocks', rows=result, fg=deadline)
return output
@route('/payouts')
def payouts(db):
c = db.execute("SELECT account, percentage, amount, paid, blocktime FROM accounts")
result = c.fetchall()
output = template('payouts', rows=result)
return output
run(server=PasteServer, port=8888, host='0.0.0.0')
| #!/usr/bin/env python
# author: [email protected]
import ConfigParser
from bottle import route, install, run, template, static_file, PasteServer
from bottle_sqlite import SQLitePlugin
import json
import urllib
import urllib2
import datetime
config = ConfigParser.RawConfigParser()
config.read('config.ini')
install(SQLitePlugin(dbfile=(config.get("pool", "database"))))
@route('/')
def default():
output = template('default')
return output
@route('/static/:path#.+#', name='static')
def static(path):
return static_file(path, root='static')
@route('/accounts')
def accounts():
poolAccount = json.loads(urllib2.urlopen(config.get("pool", "nhzhost")+"/nhz?requestType=getAccount&account="+config.get("pool", "poolaccount")).read())
clean = poolAccount["lessors"]
output = template('accounts', leased=clean)
return output
@route('/blocks')
def blocks(db):
c = db.execute("SELECT timestamp, block, totalfee FROM blocks")
result = c.fetchall()
c.close()
payload = {
'requestType': 'getForging',
'secretPhrase': config.get("pool", "poolphrase")
}
opener = urllib2.build_opener(urllib2.HTTPHandler())
data = urllib.urlencode(payload)
forging = json.loads(opener.open(config.get("pool", "nhzhost")+'/nhz', data=data).read())
getdl = forging["deadline"]
deadline = str(datetime.timedelta(seconds=getdl))
output = template('blocks', rows=result, fg=deadline)
return output
@route('/payouts')
def payouts(db):
c = db.execute("SELECT account, percentage, amount, paid, blocktime FROM accounts")
result = c.fetchall()
output = template('payouts', rows=result)
return output
run(server=PasteServer, port=8888, host='0.0.0.0') | mit | Python |
6902b88472826f6042dda6acda6f8a22d2fef64f | Change food color. | Beskhue/enactive-agents,Beskhue/enactive-agents,Beskhue/enactive-agents | enactiveagents/model/structure.py | enactiveagents/model/structure.py | """
Module that holds classes that represent structures.
"""
import world
class Structure(world.Entity):
"""
Class representing structures in the world (i.e., static but potentially
interactable with by agents).
"""
def collidable(self):
return True
class Wall(Structure):
"""
Class representing a wall structure.
"""
def __init__(self):
super(Wall, self).__init__()
self.height = 1
self.width = 1
class Block(Structure):
"""
Class representing a block structure.
"""
color = (122, 179, 62, 255)
def collidable(self):
return False
class Food(Structure):
"""
Class representing food.
"""
color = (62, 179, 122, 255)
def collidable(self):
return False
| """
Module that holds classes that represent structures.
"""
import world
class Structure(world.Entity):
"""
Class representing structures in the world (i.e., static but potentially
interactable with by agents).
"""
def collidable(self):
return True
class Wall(Structure):
"""
Class representing a wall structure.
"""
def __init__(self):
super(Wall, self).__init__()
self.height = 1
self.width = 1
class Block(Structure):
"""
Class representing a block structure.
"""
color = (122, 179, 62, 255)
def collidable(self):
return False
class Food(Structure):
"""
Class representing food.
"""
color = (179, 122, 62, 255)
def collidable(self):
return False
| mit | Python |
738ec72f78847bb31c89305247fcbe2d994117f0 | Optimize case ObjectMixin.setUp | watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder | feder/cases/tests.py | feder/cases/tests.py | from django.core.urlresolvers import reverse
from django.test import TestCase
from feder.users.factories import UserFactory
from feder.main.mixins import PermissionStatusMixin
from .factories import CaseFactory
class ObjectMixin(object):
def setUp(self):
self.user = UserFactory(username="john")
self.case = CaseFactory()
self.permission_object = self.case.monitoring
class CaseListViewTestCase(ObjectMixin, PermissionStatusMixin, TestCase):
status_anonymous = 200
status_no_permission = 200
def get_url(self):
return reverse('cases:list')
class CaseDetailViewTestCase(ObjectMixin, PermissionStatusMixin, TestCase):
status_anonymous = 200
status_no_permission = 200
def get_url(self):
return reverse('cases:details', kwargs={'slug': self.case.slug})
class CaseCreateViewTestCase(ObjectMixin, PermissionStatusMixin, TestCase):
permission = ['monitorings.add_case', ]
def get_url(self):
return reverse('cases:create', kwargs={'monitoring': str(self.case.monitoring.pk)})
class CaseUpdateViewTestCase(ObjectMixin, PermissionStatusMixin, TestCase):
permission = ['monitorings.change_case', ]
def get_url(self):
return reverse('cases:update', kwargs={'slug': self.case.slug})
class CaseDeleteViewTestCase(ObjectMixin, PermissionStatusMixin, TestCase):
permission = ['monitorings.delete_case', ]
def get_url(self):
return reverse('cases:delete', kwargs={'slug': self.case.slug})
| from django.core.urlresolvers import reverse
from django.test import RequestFactory, TestCase
from feder.monitorings.factories import MonitoringFactory
from feder.cases.models import Case
from feder.users.factories import UserFactory
from feder.institutions.factories import InstitutionFactory
from feder.main.mixins import PermissionStatusMixin
class ObjectMixin(object):
def setUp(self):
self.factory = RequestFactory()
self.user = UserFactory(username="john")
self.monitoring = self.permission_object = MonitoringFactory(user=self.user)
self.institution = InstitutionFactory()
self.case = Case.objects.create(name="blabla",
monitoring=self.monitoring,
institution=self.institution,
user=self.user)
class CaseListViewTestCase(ObjectMixin, PermissionStatusMixin, TestCase):
status_anonymous = 200
status_no_permission = 200
def get_url(self):
return reverse('cases:list')
class CaseDetailViewTestCase(ObjectMixin, PermissionStatusMixin, TestCase):
status_anonymous = 200
status_no_permission = 200
def get_url(self):
return reverse('cases:details', kwargs={'slug': self.case.slug})
class CaseCreateViewTestCase(ObjectMixin, PermissionStatusMixin, TestCase):
permission = ['monitorings.add_case', ]
def get_url(self):
return reverse('cases:create', kwargs={'monitoring': str(self.monitoring.pk)})
class CaseUpdateViewTestCase(ObjectMixin, PermissionStatusMixin, TestCase):
permission = ['monitorings.change_case', ]
def get_url(self):
return reverse('cases:update', kwargs={'slug': self.case.slug})
class CaseDeleteViewTestCase(ObjectMixin, PermissionStatusMixin, TestCase):
permission = ['monitorings.delete_case', ]
def get_url(self):
return reverse('cases:delete', kwargs={'slug': self.case.slug})
| mit | Python |
8f280cece4d59e36ebfeb5486f25c7ac92718c13 | Clean it up a bit | DoublePlusGood23/lc-president-challenge | third_problem.py | third_problem.py | not_vowel = 'bcdfghjklmnpqrtvwxyzBCDFGHJKLMNPQRTVWXYZ'
phrase = input()
output = ''
vowels = ''
# Remove sapces
phrase = phrase.replace(' ', '')
for char in phrase:
if char in not_vowel:
output += char # Add non vowel to output
else:
vowels += char # Add vowels to vowels
print(output)
print(vowels)
| letters = 'bcdfghjklmnpqrtvwxyzBCDFGHJKLMNPQRTVWXYZ'
phrase = input()
output = ''
vowels = ''
phrase = phrase.replace(' ', '')
for char in phrase:
if char in letters:
output += char
else:
vowels += char
print(output)
print(vowels) | mit | Python |
ac0d1036e56e8c24945abedbc372c717b5d7064a | improve imprort style. | lzkelley/zcode,lzkelley/zcode | zcode/constants.py | zcode/constants.py | """Common Numerical and Physical Constants.
"""
import numpy as np
import astropy as ap
import astropy.constants
import astropy.cosmology
# from astropy.cosmology import WMAP9 as cosmo
cosmo = astropy.cosmology.WMAP9
# Fundamental Constants
# ---------------------
NWTG = ap.constants.G.cgs.value
SPLC = ap.constants.c.cgs.value
MSOL = ap.constants.M_sun.cgs.value
LSOL = ap.constants.L_sun.cgs.value
RSOL = ap.constants.R_sun.cgs.value
PC = ap.constants.pc.cgs.value
AU = ap.constants.au.cgs.value
YR = ap.units.year.to(ap.units.s)
MELC = ap.constants.m_e.cgs.value
MPRT = ap.constants.m_p.cgs.value
H0 = cosmo.H0.cgs.value # Hubble Constants at z=0.0
HPAR = cosmo.H0.value/100.0
OMEGA_M = cosmo.Om0
OMEGA_B = cosmo.Ob0
OMEGA_DM = cosmo.Odm0
RHO_CRIT = cosmo.critical_density0.cgs.value
# Higher order constants
# ----------------------
# Thomson-Scattering (Electron-Scattering) cross-section
try:
SIGMA_T = ap.constants.sigma_T.cgs.value
except:
SIGMA_T = 6.652458734e-25 # cm^2 (i.e. per electron)
# Electron-Scattering Opacity ($\kappa_{es} = n_e \sigma_T / \rho = \mu_e \sigma_T / m_p$)
# Where $\mu_e$ is the mean-mass per electron, for a total mass-density $\rho$.
KAPPA_ES = SIGMA_T/MPRT
# Derived Constants
# -----------------
PIFT = 4.0*np.pi/3.0 # (4.0/3.0)*Pi
SCHW = 2*NWTG/(SPLC*SPLC) # Schwarzschild Constant (2*G/c^2)
HTAU = 1.0/H0 # Hubble Time - 1/H0 [sec]
MYR = 1.0e6*YR
GYR = 1.0e9*YR
KPC = 1.0e3*PC
MPC = 1.0e6*PC
GPC = 1.0e9*PC
| """Common Numerical and Physical Constants.
"""
import numpy as np
import astropy as ap
import astropy.constants
import astropy.cosmology
from astropy.cosmology import WMAP9 as cosmo
# Fundamental Constants
# ---------------------
NWTG = ap.constants.G.cgs.value
SPLC = ap.constants.c.cgs.value
MSOL = ap.constants.M_sun.cgs.value
LSOL = ap.constants.L_sun.cgs.value
RSOL = ap.constants.R_sun.cgs.value
PC = ap.constants.pc.cgs.value
AU = ap.constants.au.cgs.value
YR = ap.units.year.to(ap.units.s)
MELC = ap.constants.m_e.cgs.value
MPRT = ap.constants.m_p.cgs.value
H0 = cosmo.H0.cgs.value # Hubble Constants at z=0.0
HPAR = cosmo.H0.value/100.0
OMEGA_M = cosmo.Om0
OMEGA_B = cosmo.Ob0
OMEGA_DM = cosmo.Odm0
RHO_CRIT = cosmo.critical_density0.cgs.value
# Higher order constants
# ----------------------
# Thomson-Scattering (Electron-Scattering) cross-section
try:
SIGMA_T = ap.constants.sigma_T.cgs.value
except:
SIGMA_T = 6.652458734e-25 # cm^2 (i.e. per electron)
# Electron-Scattering Opacity ($\kappa_{es} = n_e \sigma_T / \rho = \mu_e \sigma_T / m_p$)
# Where $\mu_e$ is the mean-mass per electron, for a total mass-density $\rho$.
KAPPA_ES = SIGMA_T/MPRT
# Derived Constants
# -----------------
PIFT = 4.0*np.pi/3.0 # (4.0/3.0)*Pi
SCHW = 2*NWTG/(SPLC*SPLC) # Schwarzschild Constant (2*G/c^2)
HTAU = 1.0/H0 # Hubble Time - 1/H0 [sec]
MYR = 1.0e6*YR
GYR = 1.0e9*YR
KPC = 1.0e3*PC
MPC = 1.0e6*PC
GPC = 1.0e9*PC
| mit | Python |
b64e7714e581cfc0c0a0d0f055b22c5edca27e24 | Raise KeyboardInterrupt to allow the run to handle logout | gryffon/SusumuTakuan,gryffon/SusumuTakuan | susumutakuan.py | susumutakuan.py | import discord
import asyncio
import os
import signal
import sys
#Set up Client State
CLIENT_TOKEN=os.environ['TOKEN']
#Create Discord client
client = discord.Client()
#Handle shutdown gracefully
async def sigterm_handler(signum, frame):
print("Logging out...")
raise KeyboardInterrupt
print('Shutting down...')
sys.exit(1)
#Register SIGTERM Handler
signal.signal(signal.SIGTERM, sigterm_handler)
@client.event
async def on_ready():
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
@client.event
async def on_message(message):
if message.content.startswith('!test'):
counter = 0
tmp = await client.send_message(message.channel, 'Calculating messages...')
async for log in client.logs_from(message.channel, limit=100):
if log.author == message.author:
counter += 1
await client.edit_message(tmp, 'You have {} messages.'.format(counter))
elif message.content.startswith('!sleep'):
await asyncio.sleep(5)
await client.send_message(message.channel, 'Done sleeping')
client.run(CLIENT_TOKEN) | import discord
import asyncio
import os
import signal
import sys
#Set up Client State
CLIENT_TOKEN=os.environ['TOKEN']
#Create Discord client
client = discord.Client()
#Handle shutdown gracefully
def sigterm_handler(signum, frame):
print("Logging out...")
client.logout()
print('Shutting down...')
sys.exit(1)
#Register SIGTERM Handler
signal.signal(signal.SIGTERM, sigterm_handler)
@client.event
async def on_ready():
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
@client.event
async def on_message(message):
if message.content.startswith('!test'):
counter = 0
tmp = await client.send_message(message.channel, 'Calculating messages...')
async for log in client.logs_from(message.channel, limit=100):
if log.author == message.author:
counter += 1
await client.edit_message(tmp, 'You have {} messages.'.format(counter))
elif message.content.startswith('!sleep'):
await asyncio.sleep(5)
await client.send_message(message.channel, 'Done sleeping')
client.run(CLIENT_TOKEN) | mit | Python |
13e30fe6af93bbb48a4795ee22f4f3ba760adc14 | add get_session_names | sk1418/retmux | tmuxback/tmux.py | tmuxback/tmux.py | # -*- coding:utf-8 -*-
import subprocess
import re
#tmux commands
#list sessions
CMD_LIST_SESSIONS='tmux list-sessions -F#S'
def get_session_names():
""" return a list of tmux session names """
s = subprocess.check_output(CMD_LIST_SESSIONS.split(' '))
s = re.sub('\n$','',s)
return s.split('\n')
#if __name__ == '__main__':
# print get_session_names()
| # -*- coding:utf-8 -*-
def get_session_names():
"""get session names"""
pass
| mit | Python |
2e382c8bff2d0c3733b9b525168254971ca1175e | Update atexit function to avoid issues with late binding | milliman/spark,milliman/spark,milliman/spark,milliman/spark,milliman/spark,milliman/spark,milliman/spark | python/pyspark/shell.py | python/pyspark/shell.py | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
An interactive shell.
This file is designed to be launched as a PYTHONSTARTUP script.
"""
import atexit
import os
import platform
import warnings
from pyspark.context import SparkContext
from pyspark.sql import SparkSession
if os.environ.get("SPARK_EXECUTOR_URI"):
SparkContext.setSystemProperty("spark.executor.uri", os.environ["SPARK_EXECUTOR_URI"])
SparkContext._ensure_initialized() # type: ignore
try:
spark = SparkSession._create_shell_session() # type: ignore
except Exception:
import sys
import traceback
warnings.warn("Failed to initialize Spark session.")
traceback.print_exc(file=sys.stderr)
sys.exit(1)
sc = spark.sparkContext
sql = spark.sql
atexit.register((lambda sc: lambda: sc.stop())(sc))
# for compatibility
sqlContext = spark._wrapped
sqlCtx = sqlContext
print(r"""Welcome to
____ __
/ __/__ ___ _____/ /__
_\ \/ _ \/ _ `/ __/ '_/
/__ / .__/\_,_/_/ /_/\_\ version %s
/_/
""" % sc.version)
print("Using Python version %s (%s, %s)" % (
platform.python_version(),
platform.python_build()[0],
platform.python_build()[1]))
print("Spark context Web UI available at %s" % (sc.uiWebUrl))
print("Spark context available as 'sc' (master = %s, app id = %s)." % (sc.master, sc.applicationId))
print("SparkSession available as 'spark'.")
# The ./bin/pyspark script stores the old PYTHONSTARTUP value in OLD_PYTHONSTARTUP,
# which allows us to execute the user's PYTHONSTARTUP file:
_pythonstartup = os.environ.get('OLD_PYTHONSTARTUP')
if _pythonstartup and os.path.isfile(_pythonstartup):
with open(_pythonstartup) as f:
code = compile(f.read(), _pythonstartup, 'exec')
exec(code)
| #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
An interactive shell.
This file is designed to be launched as a PYTHONSTARTUP script.
"""
import atexit
import os
import platform
import warnings
from pyspark.context import SparkContext
from pyspark.sql import SparkSession
if os.environ.get("SPARK_EXECUTOR_URI"):
SparkContext.setSystemProperty("spark.executor.uri", os.environ["SPARK_EXECUTOR_URI"])
SparkContext._ensure_initialized() # type: ignore
try:
spark = SparkSession._create_shell_session() # type: ignore
except Exception:
import sys
import traceback
warnings.warn("Failed to initialize Spark session.")
traceback.print_exc(file=sys.stderr)
sys.exit(1)
sc = spark.sparkContext
sql = spark.sql
atexit.register(lambda: sc.stop())
# for compatibility
sqlContext = spark._wrapped
sqlCtx = sqlContext
print(r"""Welcome to
____ __
/ __/__ ___ _____/ /__
_\ \/ _ \/ _ `/ __/ '_/
/__ / .__/\_,_/_/ /_/\_\ version %s
/_/
""" % sc.version)
print("Using Python version %s (%s, %s)" % (
platform.python_version(),
platform.python_build()[0],
platform.python_build()[1]))
print("Spark context Web UI available at %s" % (sc.uiWebUrl))
print("Spark context available as 'sc' (master = %s, app id = %s)." % (sc.master, sc.applicationId))
print("SparkSession available as 'spark'.")
# The ./bin/pyspark script stores the old PYTHONSTARTUP value in OLD_PYTHONSTARTUP,
# which allows us to execute the user's PYTHONSTARTUP file:
_pythonstartup = os.environ.get('OLD_PYTHONSTARTUP')
if _pythonstartup and os.path.isfile(_pythonstartup):
with open(_pythonstartup) as f:
code = compile(f.read(), _pythonstartup, 'exec')
exec(code)
| apache-2.0 | Python |
10e6c53a39d3ee57d855ada1aa6e9d620f094465 | add 'save' command | frans-fuerst/track,frans-fuerst/track | track-cli.py | track-cli.py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
import sys
import zmq
import logging
log = logging.getLogger('track_cli')
def print_info():
log.info("zeromq version: %s" % zmq.zmq_version())
log.info("pyzmq version: %s" % zmq.pyzmq_version())
def send_request(request):
context = zmq.Context()
req_socket = context.socket(zmq.REQ)
req_socket.connect('tcp://127.0.0.1:3456')
req_socket.send_json(request)
return req_socket.recv_json()
def handle_result(result):
if 'type' in result and result['type'] == 'error':
raise Exception('server replied with error: "%s"' % result['what'])
print(result)
def main():
args = sys.argv[1:]
if args == []:
print('no command provided')
return
elif args == ['quit']:
request = {'type': 'quit'}
elif args == ['version']:
request = {'type': 'version'}
elif args == ['apps']:
request = {'type': 'apps'}
elif args == ['current']:
request = {'type': 'current'}
elif args == ['rules']:
request = {'type': 'rules'}
elif args == ['save']:
request = {'type': 'save'}
elif args == ['help']:
print(['quit', 'version', 'apps', 'current', 'rules'])
sys.exit()
else:
raise Exception('command not handled: %s' % args)
try:
result = send_request(request)
handle_result(result)
except zmq.ZMQError as e:
log.error(e)
return
except KeyboardInterrupt:
log.info("got keyboard interrupt - exit")
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
main()
| #!/usr/bin/env python2
# -*- coding: utf-8 -*-
import sys
import zmq
import logging
log = logging.getLogger('track_cli')
def print_info():
log.info("zeromq version: %s" % zmq.zmq_version())
log.info("pyzmq version: %s" % zmq.pyzmq_version())
def send_request(request):
context = zmq.Context()
req_socket = context.socket(zmq.REQ)
req_socket.connect('tcp://127.0.0.1:3456')
req_socket.send_json(request)
return req_socket.recv_json()
def handle_result(result):
if 'type' in result and result['type'] == 'error':
raise Exception('server replied with error: "%s"' % result['what'])
print(result)
def main():
args = sys.argv[1:]
if args == []:
print('no command provided')
return
elif args == ['quit']:
request = {'type': 'quit'}
elif args == ['version']:
request = {'type': 'version'}
elif args == ['apps']:
request = {'type': 'apps'}
elif args == ['current']:
request = {'type': 'current'}
elif args == ['rules']:
request = {'type': 'rules'}
elif args == ['help']:
print(['quit', 'version', 'apps', 'current', 'rules'])
sys.exit()
else:
raise Exception('command not handled: %s' % args)
try:
result = send_request(request)
handle_result(result)
except zmq.ZMQError as e:
log.error(e)
return
except KeyboardInterrupt:
log.info("got keyboard interrupt - exit")
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
main()
| apache-2.0 | Python |
0dea5f2b6a2e6d702167c3415d10a47275e30601 | update the version to 0.6.0 | ronnyandersson/zignal | zignal/__init__.py | zignal/__init__.py | """
This is the zignal library
@author: Ronny Andersson ([email protected])
@copyright: (c) 2013 Ronny Andersson
@license: MIT
"""
__version__ = "0.6.0"
from .audio import *
from . import filters
from . import measure
from . import music
from . import sndcard
__all__ = [
'filters',
'measure',
'music',
'sndcard',
]
__all__.extend(audio.__all__) #@UndefinedVariable
| """
This is the zignal library
@author: Ronny Andersson ([email protected])
@copyright: (c) 2013 Ronny Andersson
@license: MIT
"""
__version__ = "0.5.0"
from .audio import *
from . import filters
from . import measure
from . import music
from . import sndcard
__all__ = [
'filters',
'measure',
'music',
'sndcard',
]
__all__.extend(audio.__all__) #@UndefinedVariable
| mit | Python |
c91240cd43c4f714a404cf5f2ce566dad290c0c5 | Add url mapping for ProjectEntrySumsAPIView | bjoernricks/trex,bjoernricks/trex | trex/urls.py | trex/urls.py | # -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <[email protected]>
#
# See LICENSE comming with the source of 'trex' for details.
#
from django.conf.urls import patterns, url
from django.views.generic import TemplateView
from trex.views import project
urlpatterns = patterns(
'',
url(r"^$",
TemplateView.as_view(template_name="index.html"),
name="index",
),
url(r"^api/1/projects/?$",
project.ProjectListCreateAPIView.as_view(),
name="project-list"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/$",
project.ProjectDetailAPIView.as_view(),
name="project-detail"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/entries/?$",
project.ProjectEntriesListAPIView.as_view(),
name="project-entries-list"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/entries/sums/?$",
project.ProjectEntrySumsAPIView.as_view(),
name="project-entries-sums"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/tags/?$",
project.ProjectTagsListAPIView.as_view(),
name="project-tags-list"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/users/?$",
project.ProjectUsersListAPIView.as_view(),
name="project-users-list"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/zeiterfassung/?$",
project.ProjectZeiterfassungAPIView.as_view(),
name="project-zeiterfassung"),
url(r"^api/1/entries/(?P<pk>[0-9]+)/?$",
project.EntryDetailAPIView.as_view(),
name="entry-detail"),
url(r"^api/1/tags/(?P<pk>[0-9]+)/?$",
project.TagDetailAPIView.as_view(),
name="tag-detail"),
)
| # -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <[email protected]>
#
# See LICENSE comming with the source of 'trex' for details.
#
from django.conf.urls import patterns, url
from django.views.generic import TemplateView
from trex.views import project
urlpatterns = patterns(
'',
url(r"^$",
TemplateView.as_view(template_name="index.html"),
name="index",
),
url(r"^api/1/projects/?$",
project.ProjectListCreateAPIView.as_view(),
name="project-list"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/$",
project.ProjectDetailAPIView.as_view(),
name="project-detail"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/entries/?$",
project.ProjectEntriesListAPIView.as_view(),
name="project-entries-list"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/tags/?$",
project.ProjectTagsListAPIView.as_view(),
name="project-tags-list"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/users/?$",
project.ProjectUsersListAPIView.as_view(),
name="project-users-list"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/zeiterfassung/?$",
project.ProjectZeiterfassungAPIView.as_view(),
name="project-zeiterfassung"),
url(r"^api/1/entries/(?P<pk>[0-9]+)/?$",
project.EntryDetailAPIView.as_view(),
name="entry-detail"),
url(r"^api/1/tags/(?P<pk>[0-9]+)/?$",
project.TagDetailAPIView.as_view(),
name="tag-detail"),
)
| mit | Python |
7cbee5e817b6d2bbf4fbcbf8cf1cf327bdbabc9c | rename locator_string to package_id | gymnasium/edx-platform,zadgroup/edx-platform,Softmotions/edx-platform,zubair-arbi/edx-platform,jamesblunt/edx-platform,longmen21/edx-platform,morenopc/edx-platform,mjirayu/sit_academy,IONISx/edx-platform,shubhdev/edxOnBaadal,sameetb-cuelogic/edx-platform-test,4eek/edx-platform,vasyarv/edx-platform,ahmadiga/min_edx,eduNEXT/edx-platform,dcosentino/edx-platform,wwj718/ANALYSE,shubhdev/edx-platform,MSOpenTech/edx-platform,chudaol/edx-platform,martynovp/edx-platform,SivilTaram/edx-platform,dcosentino/edx-platform,dcosentino/edx-platform,ZLLab-Mooc/edx-platform,cognitiveclass/edx-platform,a-parhom/edx-platform,chauhanhardik/populo_2,simbs/edx-platform,EDUlib/edx-platform,beacloudgenius/edx-platform,sudheerchintala/LearnEraPlatForm,cpennington/edx-platform,rhndg/openedx,nanolearningllc/edx-platform-cypress-2,hastexo/edx-platform,waheedahmed/edx-platform,Semi-global/edx-platform,torchingloom/edx-platform,xinjiguaike/edx-platform,Ayub-Khan/edx-platform,sudheerchintala/LearnEraPlatForm,pomegranited/edx-platform,franosincic/edx-platform,shurihell/testasia,peterm-itr/edx-platform,cselis86/edx-platform,zofuthan/edx-platform,LICEF/edx-platform,louyihua/edx-platform,DefyVentures/edx-platform,OmarIthawi/edx-platform,antonve/s4-project-mooc,vasyarv/edx-platform,doganov/edx-platform,J861449197/edx-platform,devs1991/test_edx_docmode,franosincic/edx-platform,proversity-org/edx-platform,cognitiveclass/edx-platform,polimediaupv/edx-platform,JioEducation/edx-platform,SravanthiSinha/edx-platform,beacloudgenius/edx-platform,ubc/edx-platform,chudaol/edx-platform,pku9104038/edx-platform,appliedx/edx-platform,mushtaqak/edx-platform,ovnicraft/edx-platform,Edraak/edx-platform,nanolearningllc/edx-platform-cypress-2,openfun/edx-platform,wwj718/edx-platform,jswope00/griffinx,don-github/edx-platform,rismalrv/edx-platform,prarthitm/edxplatform,auferack08/edx-platform,rismalrv/edx-platform,CredoReference/edx-platform,shubhdev/openedx,zofuthan/edx-platform,kamalx/edx-platform,MSOpenTech/edx-platform,jbzdak/edx-platform,kmoocdev/edx-platform,Edraak/circleci-edx-platform,ampax/edx-platform,J861449197/edx-platform,jamesblunt/edx-platform,jswope00/griffinx,inares/edx-platform,EDUlib/edx-platform,dsajkl/reqiop,sameetb-cuelogic/edx-platform-test,wwj718/edx-platform,dsajkl/123,alexthered/kienhoc-platform,cyanna/edx-platform,10clouds/edx-platform,nagyistoce/edx-platform,abdoosh00/edraak,4eek/edx-platform,etzhou/edx-platform,doganov/edx-platform,nanolearningllc/edx-platform-cypress-2,playm2mboy/edx-platform,devs1991/test_edx_docmode,romain-li/edx-platform,itsjeyd/edx-platform,wwj718/ANALYSE,jazztpt/edx-platform,simbs/edx-platform,solashirai/edx-platform,zubair-arbi/edx-platform,prarthitm/edxplatform,jazkarta/edx-platform,dsajkl/123,cyanna/edx-platform,antonve/s4-project-mooc,UXE/local-edx,jswope00/griffinx,SravanthiSinha/edx-platform,marcore/edx-platform,jamesblunt/edx-platform,SivilTaram/edx-platform,ahmadiga/min_edx,eestay/edx-platform,rismalrv/edx-platform,franosincic/edx-platform,jazkarta/edx-platform-for-isc,dcosentino/edx-platform,jruiperezv/ANALYSE,shashank971/edx-platform,jazztpt/edx-platform,iivic/BoiseStateX,mahendra-r/edx-platform,msegado/edx-platform,zerobatu/edx-platform,mahendra-r/edx-platform,jamiefolsom/edx-platform,don-github/edx-platform,ESOedX/edx-platform,pabloborrego93/edx-platform,motion2015/edx-platform,prarthitm/edxplatform,Softmotions/edx-platform,LearnEra/LearnEraPlaftform,ak2703/edx-platform,cyanna/edx-platform,nikolas/edx-platform,valtech-mooc/edx-platform,cecep-edu/edx-platform,Lektorium-LLC/edx-platform,mbareta/edx-platform-ft,itsjeyd/edx-platform,zubair-arbi/edx-platform,B-MOOC/edx-platform,zhenzhai/edx-platform,Kalyzee/edx-platform,don-github/edx-platform,Edraak/edx-platform,angelapper/edx-platform,analyseuc3m/ANALYSE-v1,defance/edx-platform,louyihua/edx-platform,chrisndodge/edx-platform,jamesblunt/edx-platform,utecuy/edx-platform,nagyistoce/edx-platform,nanolearningllc/edx-platform-cypress,openfun/edx-platform,mahendra-r/edx-platform,cyanna/edx-platform,chudaol/edx-platform,Semi-global/edx-platform,mbareta/edx-platform-ft,xinjiguaike/edx-platform,appsembler/edx-platform,Shrhawk/edx-platform,stvstnfrd/edx-platform,iivic/BoiseStateX,xinjiguaike/edx-platform,edx/edx-platform,louyihua/edx-platform,jonathan-beard/edx-platform,beni55/edx-platform,nttks/edx-platform,kursitet/edx-platform,arifsetiawan/edx-platform,CredoReference/edx-platform,JioEducation/edx-platform,beacloudgenius/edx-platform,Edraak/edraak-platform,kamalx/edx-platform,leansoft/edx-platform,jswope00/GAI,cpennington/edx-platform,4eek/edx-platform,appliedx/edx-platform,waheedahmed/edx-platform,martynovp/edx-platform,jbzdak/edx-platform,Unow/edx-platform,motion2015/a3,jolyonb/edx-platform,bigdatauniversity/edx-platform,nttks/jenkins-test,mcgachey/edx-platform,zerobatu/edx-platform,RPI-OPENEDX/edx-platform,UXE/local-edx,inares/edx-platform,vasyarv/edx-platform,vasyarv/edx-platform,waheedahmed/edx-platform,rue89-tech/edx-platform,beni55/edx-platform,bigdatauniversity/edx-platform,knehez/edx-platform,chauhanhardik/populo,marcore/edx-platform,bigdatauniversity/edx-platform,WatanabeYasumasa/edx-platform,antoviaque/edx-platform,ak2703/edx-platform,bitifirefly/edx-platform,yokose-ks/edx-platform,naresh21/synergetics-edx-platform,shubhdev/edx-platform,hastexo/edx-platform,hmcmooc/muddx-platform,tanmaykm/edx-platform,edry/edx-platform,solashirai/edx-platform,jolyonb/edx-platform,cognitiveclass/edx-platform,hkawasaki/kawasaki-aio8-2,lduarte1991/edx-platform,tiagochiavericosta/edx-platform,antonve/s4-project-mooc,halvertoluke/edx-platform,xingyepei/edx-platform,SravanthiSinha/edx-platform,gymnasium/edx-platform,romain-li/edx-platform,leansoft/edx-platform,jazkarta/edx-platform,Semi-global/edx-platform,shubhdev/edx-platform,antonve/s4-project-mooc,J861449197/edx-platform,hkawasaki/kawasaki-aio8-2,jazztpt/edx-platform,knehez/edx-platform,LearnEra/LearnEraPlaftform,kmoocdev/edx-platform,devs1991/test_edx_docmode,nikolas/edx-platform,rue89-tech/edx-platform,morenopc/edx-platform,chauhanhardik/populo_2,Semi-global/edx-platform,bigdatauniversity/edx-platform,eestay/edx-platform,IONISx/edx-platform,edry/edx-platform,Shrhawk/edx-platform,TeachAtTUM/edx-platform,MSOpenTech/edx-platform,MSOpenTech/edx-platform,caesar2164/edx-platform,kxliugang/edx-platform,zerobatu/edx-platform,Endika/edx-platform,UXE/local-edx,alu042/edx-platform,DNFcode/edx-platform,Kalyzee/edx-platform,ahmadiga/min_edx,Semi-global/edx-platform,jbzdak/edx-platform,jazkarta/edx-platform-for-isc,kmoocdev/edx-platform,torchingloom/edx-platform,xuxiao19910803/edx-platform,cecep-edu/edx-platform,appliedx/edx-platform,nanolearning/edx-platform,LICEF/edx-platform,kxliugang/edx-platform,DNFcode/edx-platform,mushtaqak/edx-platform,LearnEra/LearnEraPlaftform,Unow/edx-platform,Edraak/edx-platform,zadgroup/edx-platform,kmoocdev2/edx-platform,iivic/BoiseStateX,motion2015/a3,appliedx/edx-platform,amir-qayyum-khan/edx-platform,fly19890211/edx-platform,knehez/edx-platform,Livit/Livit.Learn.EdX,TeachAtTUM/edx-platform,xinjiguaike/edx-platform,sudheerchintala/LearnEraPlatForm,Edraak/edraak-platform,fintech-circle/edx-platform,jelugbo/tundex,eestay/edx-platform,ovnicraft/edx-platform,doismellburning/edx-platform,nttks/edx-platform,IndonesiaX/edx-platform,zubair-arbi/edx-platform,xingyepei/edx-platform,IndonesiaX/edx-platform,halvertoluke/edx-platform,UXE/local-edx,romain-li/edx-platform,procangroup/edx-platform,deepsrijit1105/edx-platform,procangroup/edx-platform,nikolas/edx-platform,amir-qayyum-khan/edx-platform,DNFcode/edx-platform,mjirayu/sit_academy,chrisndodge/edx-platform,naresh21/synergetics-edx-platform,rue89-tech/edx-platform,nagyistoce/edx-platform,ZLLab-Mooc/edx-platform,ahmadiga/min_edx,jazkarta/edx-platform-for-isc,doganov/edx-platform,ovnicraft/edx-platform,edx/edx-platform,martynovp/edx-platform,nttks/edx-platform,kxliugang/edx-platform,mahendra-r/edx-platform,shabab12/edx-platform,ahmadio/edx-platform,solashirai/edx-platform,ubc/edx-platform,nttks/jenkins-test,dsajkl/reqiop,tiagochiavericosta/edx-platform,peterm-itr/edx-platform,simbs/edx-platform,valtech-mooc/edx-platform,nttks/edx-platform,mjirayu/sit_academy,motion2015/a3,edx/edx-platform,stvstnfrd/edx-platform,JioEducation/edx-platform,pepeportela/edx-platform,teltek/edx-platform,WatanabeYasumasa/edx-platform,chauhanhardik/populo,mtlchun/edx,adoosii/edx-platform,marcore/edx-platform,hkawasaki/kawasaki-aio8-0,shabab12/edx-platform,leansoft/edx-platform,J861449197/edx-platform,CourseTalk/edx-platform,devs1991/test_edx_docmode,longmen21/edx-platform,UOMx/edx-platform,alexthered/kienhoc-platform,Edraak/circleci-edx-platform,WatanabeYasumasa/edx-platform,jazkarta/edx-platform,alu042/edx-platform,wwj718/edx-platform,alu042/edx-platform,dkarakats/edx-platform,romain-li/edx-platform,nanolearningllc/edx-platform-cypress,Softmotions/edx-platform,procangroup/edx-platform,vismartltd/edx-platform,ZLLab-Mooc/edx-platform,IndonesiaX/edx-platform,adoosii/edx-platform,LICEF/edx-platform,ampax/edx-platform-backup,Ayub-Khan/edx-platform,chauhanhardik/populo_2,OmarIthawi/edx-platform,pepeportela/edx-platform,zadgroup/edx-platform,adoosii/edx-platform,Edraak/circleci-edx-platform,eestay/edx-platform,defance/edx-platform,ahmadio/edx-platform,ferabra/edx-platform,UOMx/edx-platform,bitifirefly/edx-platform,amir-qayyum-khan/edx-platform,pabloborrego93/edx-platform,adoosii/edx-platform,beni55/edx-platform,tanmaykm/edx-platform,carsongee/edx-platform,iivic/BoiseStateX,shabab12/edx-platform,rhndg/openedx,EDUlib/edx-platform,simbs/edx-platform,ovnicraft/edx-platform,xuxiao19910803/edx,zadgroup/edx-platform,shashank971/edx-platform,nanolearningllc/edx-platform-cypress-2,jzoldak/edx-platform,Ayub-Khan/edx-platform,unicri/edx-platform,sameetb-cuelogic/edx-platform-test,alu042/edx-platform,jbassen/edx-platform,openfun/edx-platform,hkawasaki/kawasaki-aio8-2,B-MOOC/edx-platform,ampax/edx-platform,antonve/s4-project-mooc,hkawasaki/kawasaki-aio8-0,chudaol/edx-platform,DefyVentures/edx-platform,andyzsf/edx,torchingloom/edx-platform,Edraak/circleci-edx-platform,4eek/edx-platform,eduNEXT/edunext-platform,shubhdev/openedx,devs1991/test_edx_docmode,jjmiranda/edx-platform,CredoReference/edx-platform,dsajkl/123,kmoocdev2/edx-platform,ahmedaljazzar/edx-platform,inares/edx-platform,jolyonb/edx-platform,SravanthiSinha/edx-platform,jazkarta/edx-platform-for-isc,yokose-ks/edx-platform,hkawasaki/kawasaki-aio8-1,jelugbo/tundex,zerobatu/edx-platform,jruiperezv/ANALYSE,wwj718/edx-platform,bdero/edx-platform,hmcmooc/muddx-platform,B-MOOC/edx-platform,pku9104038/edx-platform,zadgroup/edx-platform,jjmiranda/edx-platform,zubair-arbi/edx-platform,bitifirefly/edx-platform,alexthered/kienhoc-platform,ferabra/edx-platform,hkawasaki/kawasaki-aio8-1,fintech-circle/edx-platform,vikas1885/test1,yokose-ks/edx-platform,pabloborrego93/edx-platform,eemirtekin/edx-platform,cecep-edu/edx-platform,10clouds/edx-platform,adoosii/edx-platform,auferack08/edx-platform,knehez/edx-platform,4eek/edx-platform,Endika/edx-platform,ESOedX/edx-platform,wwj718/ANALYSE,franosincic/edx-platform,JCBarahona/edX,shubhdev/edx-platform,kxliugang/edx-platform,gsehub/edx-platform,zhenzhai/edx-platform,motion2015/a3,hmcmooc/muddx-platform,appliedx/edx-platform,JCBarahona/edX,kamalx/edx-platform,jamiefolsom/edx-platform,jruiperezv/ANALYSE,ahmadiga/min_edx,B-MOOC/edx-platform,nanolearning/edx-platform,deepsrijit1105/edx-platform,valtech-mooc/edx-platform,RPI-OPENEDX/edx-platform,jazkarta/edx-platform-for-isc,nanolearning/edx-platform,edry/edx-platform,kursitet/edx-platform,etzhou/edx-platform,xuxiao19910803/edx,ahmedaljazzar/edx-platform,kxliugang/edx-platform,eduNEXT/edx-platform,mitocw/edx-platform,tiagochiavericosta/edx-platform,benpatterson/edx-platform,a-parhom/edx-platform,gsehub/edx-platform,LICEF/edx-platform,MSOpenTech/edx-platform,utecuy/edx-platform,stvstnfrd/edx-platform,wwj718/ANALYSE,ak2703/edx-platform,arifsetiawan/edx-platform,UOMx/edx-platform,arifsetiawan/edx-platform,xuxiao19910803/edx-platform,etzhou/edx-platform,defance/edx-platform,miptliot/edx-platform,rue89-tech/edx-platform,AkA84/edx-platform,lduarte1991/edx-platform,jswope00/GAI,stvstnfrd/edx-platform,pabloborrego93/edx-platform,hastexo/edx-platform,pomegranited/edx-platform,motion2015/edx-platform,simbs/edx-platform,jjmiranda/edx-platform,ESOedX/edx-platform,DefyVentures/edx-platform,defance/edx-platform,chudaol/edx-platform,playm2mboy/edx-platform,chand3040/cloud_that,jruiperezv/ANALYSE,DefyVentures/edx-platform,synergeticsedx/deployment-wipro,eduNEXT/edunext-platform,olexiim/edx-platform,msegado/edx-platform,hamzehd/edx-platform,Edraak/circleci-edx-platform,msegado/edx-platform,chand3040/cloud_that,y12uc231/edx-platform,zhenzhai/edx-platform,ampax/edx-platform,leansoft/edx-platform,analyseuc3m/ANALYSE-v1,jonathan-beard/edx-platform,louyihua/edx-platform,raccoongang/edx-platform,philanthropy-u/edx-platform,kursitet/edx-platform,eduNEXT/edx-platform,devs1991/test_edx_docmode,sudheerchintala/LearnEraPlatForm,shubhdev/openedx,dkarakats/edx-platform,nanolearningllc/edx-platform-cypress,gsehub/edx-platform,inares/edx-platform,philanthropy-u/edx-platform,fintech-circle/edx-platform,jelugbo/tundex,benpatterson/edx-platform,jazztpt/edx-platform,CourseTalk/edx-platform,abdoosh00/edraak,kmoocdev2/edx-platform,dkarakats/edx-platform,edry/edx-platform,jbassen/edx-platform,andyzsf/edx,mtlchun/edx,mjirayu/sit_academy,angelapper/edx-platform,hamzehd/edx-platform,amir-qayyum-khan/edx-platform,martynovp/edx-platform,openfun/edx-platform,jruiperezv/ANALYSE,Unow/edx-platform,auferack08/edx-platform,etzhou/edx-platform,nikolas/edx-platform,LearnEra/LearnEraPlaftform,Lektorium-LLC/edx-platform,antoviaque/edx-platform,dcosentino/edx-platform,Kalyzee/edx-platform,wwj718/ANALYSE,synergeticsedx/deployment-wipro,BehavioralInsightsTeam/edx-platform,nagyistoce/edx-platform,sameetb-cuelogic/edx-platform-test,ferabra/edx-platform,nanolearningllc/edx-platform-cypress,vasyarv/edx-platform,CredoReference/edx-platform,cselis86/edx-platform,mcgachey/edx-platform,Stanford-Online/edx-platform,dkarakats/edx-platform,rhndg/openedx,a-parhom/edx-platform,AkA84/edx-platform,RPI-OPENEDX/edx-platform,JCBarahona/edX,pepeportela/edx-platform,fintech-circle/edx-platform,edx/edx-platform,pomegranited/edx-platform,mushtaqak/edx-platform,procangroup/edx-platform,dsajkl/reqiop,dsajkl/123,andyzsf/edx,deepsrijit1105/edx-platform,chand3040/cloud_that,cselis86/edx-platform,appsembler/edx-platform,jazkarta/edx-platform,MakeHer/edx-platform,nanolearningllc/edx-platform-cypress-2,itsjeyd/edx-platform,sameetb-cuelogic/edx-platform-test,AkA84/edx-platform,openfun/edx-platform,Stanford-Online/edx-platform,solashirai/edx-platform,Livit/Livit.Learn.EdX,raccoongang/edx-platform,peterm-itr/edx-platform,jbzdak/edx-platform,jazkarta/edx-platform,rhndg/openedx,vikas1885/test1,pepeportela/edx-platform,shubhdev/edx-platform,Shrhawk/edx-platform,MakeHer/edx-platform,bitifirefly/edx-platform,chrisndodge/edx-platform,vismartltd/edx-platform,cyanna/edx-platform,atsolakid/edx-platform,longmen21/edx-platform,motion2015/edx-platform,kursitet/edx-platform,ahmadio/edx-platform,xuxiao19910803/edx,WatanabeYasumasa/edx-platform,Shrhawk/edx-platform,iivic/BoiseStateX,raccoongang/edx-platform,halvertoluke/edx-platform,xingyepei/edx-platform,ovnicraft/edx-platform,morenopc/edx-platform,zerobatu/edx-platform,kmoocdev2/edx-platform,MakeHer/edx-platform,doismellburning/edx-platform,xuxiao19910803/edx-platform,torchingloom/edx-platform,analyseuc3m/ANALYSE-v1,pku9104038/edx-platform,jonathan-beard/edx-platform,hamzehd/edx-platform,shubhdev/openedx,miptliot/edx-platform,pomegranited/edx-platform,mbareta/edx-platform-ft,rismalrv/edx-platform,fly19890211/edx-platform,Edraak/edraak-platform,IndonesiaX/edx-platform,arifsetiawan/edx-platform,analyseuc3m/ANALYSE-v1,y12uc231/edx-platform,J861449197/edx-platform,knehez/edx-platform,raccoongang/edx-platform,don-github/edx-platform,a-parhom/edx-platform,TeachAtTUM/edx-platform,pku9104038/edx-platform,bitifirefly/edx-platform,motion2015/a3,utecuy/edx-platform,carsongee/edx-platform,Ayub-Khan/edx-platform,mbareta/edx-platform-ft,valtech-mooc/edx-platform,mushtaqak/edx-platform,hastexo/edx-platform,gymnasium/edx-platform,bdero/edx-platform,fly19890211/edx-platform,atsolakid/edx-platform,y12uc231/edx-platform,jonathan-beard/edx-platform,shurihell/testasia,jzoldak/edx-platform,dkarakats/edx-platform,caesar2164/edx-platform,teltek/edx-platform,naresh21/synergetics-edx-platform,ampax/edx-platform,unicri/edx-platform,jswope00/griffinx,ubc/edx-platform,BehavioralInsightsTeam/edx-platform,ESOedX/edx-platform,msegado/edx-platform,cpennington/edx-platform,inares/edx-platform,nttks/jenkins-test,edx-solutions/edx-platform,arbrandes/edx-platform,CourseTalk/edx-platform,EDUlib/edx-platform,kamalx/edx-platform,Ayub-Khan/edx-platform,xuxiao19910803/edx,jswope00/GAI,solashirai/edx-platform,vikas1885/test1,Edraak/edx-platform,B-MOOC/edx-platform,mcgachey/edx-platform,bdero/edx-platform,chand3040/cloud_that,mtlchun/edx,doismellburning/edx-platform,arbrandes/edx-platform,morenopc/edx-platform,BehavioralInsightsTeam/edx-platform,teltek/edx-platform,cognitiveclass/edx-platform,SravanthiSinha/edx-platform,UOMx/edx-platform,vismartltd/edx-platform,alexthered/kienhoc-platform,Edraak/edx-platform,cpennington/edx-platform,Kalyzee/edx-platform,mcgachey/edx-platform,SivilTaram/edx-platform,kmoocdev/edx-platform,miptliot/edx-platform,olexiim/edx-platform,cselis86/edx-platform,DefyVentures/edx-platform,bigdatauniversity/edx-platform,gsehub/edx-platform,marcore/edx-platform,jazztpt/edx-platform,polimediaupv/edx-platform,TeachAtTUM/edx-platform,hkawasaki/kawasaki-aio8-2,polimediaupv/edx-platform,nagyistoce/edx-platform,synergeticsedx/deployment-wipro,shurihell/testasia,jamesblunt/edx-platform,Stanford-Online/edx-platform,ampax/edx-platform-backup,Endika/edx-platform,Softmotions/edx-platform,eduNEXT/edunext-platform,cecep-edu/edx-platform,tiagochiavericosta/edx-platform,kmoocdev/edx-platform,mushtaqak/edx-platform,tanmaykm/edx-platform,rhndg/openedx,JCBarahona/edX,synergeticsedx/deployment-wipro,Shrhawk/edx-platform,naresh21/synergetics-edx-platform,caesar2164/edx-platform,polimediaupv/edx-platform,rue89-tech/edx-platform,zhenzhai/edx-platform,zofuthan/edx-platform,nttks/jenkins-test,jbzdak/edx-platform,msegado/edx-platform,longmen21/edx-platform,shashank971/edx-platform,atsolakid/edx-platform,chauhanhardik/populo_2,ZLLab-Mooc/edx-platform,carsongee/edx-platform,10clouds/edx-platform,hkawasaki/kawasaki-aio8-1,philanthropy-u/edx-platform,shubhdev/edxOnBaadal,eduNEXT/edx-platform,Stanford-Online/edx-platform,eemirtekin/edx-platform,DNFcode/edx-platform,jamiefolsom/edx-platform,proversity-org/edx-platform,IONISx/edx-platform,fly19890211/edx-platform,edx-solutions/edx-platform,olexiim/edx-platform,jswope00/griffinx,jelugbo/tundex,jbassen/edx-platform,vikas1885/test1,miptliot/edx-platform,gymnasium/edx-platform,eemirtekin/edx-platform,ahmadio/edx-platform,nttks/jenkins-test,CourseTalk/edx-platform,chauhanhardik/populo,doismellburning/edx-platform,appsembler/edx-platform,Lektorium-LLC/edx-platform,xingyepei/edx-platform,beni55/edx-platform,jswope00/GAI,ak2703/edx-platform,mcgachey/edx-platform,kamalx/edx-platform,jamiefolsom/edx-platform,DNFcode/edx-platform,arifsetiawan/edx-platform,abdoosh00/edraak,shubhdev/edxOnBaadal,xuxiao19910803/edx,vismartltd/edx-platform,longmen21/edx-platform,leansoft/edx-platform,shashank971/edx-platform,MakeHer/edx-platform,Endika/edx-platform,mitocw/edx-platform,benpatterson/edx-platform,benpatterson/edx-platform,utecuy/edx-platform,zofuthan/edx-platform,nikolas/edx-platform,ferabra/edx-platform,pomegranited/edx-platform,itsjeyd/edx-platform,ampax/edx-platform-backup,andyzsf/edx,shubhdev/edxOnBaadal,fly19890211/edx-platform,lduarte1991/edx-platform,xuxiao19910803/edx-platform,waheedahmed/edx-platform,doismellburning/edx-platform,hkawasaki/kawasaki-aio8-0,yokose-ks/edx-platform,jamiefolsom/edx-platform,auferack08/edx-platform,JCBarahona/edX,jolyonb/edx-platform,shashank971/edx-platform,beacloudgenius/edx-platform,vikas1885/test1,shubhdev/edxOnBaadal,jelugbo/tundex,kursitet/edx-platform,jzoldak/edx-platform,bdero/edx-platform,eemirtekin/edx-platform,devs1991/test_edx_docmode,ahmedaljazzar/edx-platform,kmoocdev2/edx-platform,shurihell/testasia,ahmedaljazzar/edx-platform,beni55/edx-platform,don-github/edx-platform,zhenzhai/edx-platform,beacloudgenius/edx-platform,edry/edx-platform,jjmiranda/edx-platform,xinjiguaike/edx-platform,lduarte1991/edx-platform,arbrandes/edx-platform,ampax/edx-platform-backup,JioEducation/edx-platform,edx-solutions/edx-platform,nttks/edx-platform,10clouds/edx-platform,xingyepei/edx-platform,appsembler/edx-platform,alexthered/kienhoc-platform,edx-solutions/edx-platform,RPI-OPENEDX/edx-platform,doganov/edx-platform,antoviaque/edx-platform,LICEF/edx-platform,RPI-OPENEDX/edx-platform,peterm-itr/edx-platform,mtlchun/edx,halvertoluke/edx-platform,cognitiveclass/edx-platform,Softmotions/edx-platform,dsajkl/reqiop,vismartltd/edx-platform,morenopc/edx-platform,playm2mboy/edx-platform,MakeHer/edx-platform,Livit/Livit.Learn.EdX,olexiim/edx-platform,antoviaque/edx-platform,ak2703/edx-platform,olexiim/edx-platform,shabab12/edx-platform,caesar2164/edx-platform,ZLLab-Mooc/edx-platform,jbassen/edx-platform,mitocw/edx-platform,motion2015/edx-platform,Edraak/edraak-platform,utecuy/edx-platform,Unow/edx-platform,ampax/edx-platform-backup,angelapper/edx-platform,abdoosh00/edraak,jonathan-beard/edx-platform,ferabra/edx-platform,IONISx/edx-platform,deepsrijit1105/edx-platform,OmarIthawi/edx-platform,OmarIthawi/edx-platform,shubhdev/openedx,ubc/edx-platform,franosincic/edx-platform,angelapper/edx-platform,cselis86/edx-platform,ahmadio/edx-platform,wwj718/edx-platform,rismalrv/edx-platform,y12uc231/edx-platform,unicri/edx-platform,IndonesiaX/edx-platform,romain-li/edx-platform,proversity-org/edx-platform,AkA84/edx-platform,halvertoluke/edx-platform,playm2mboy/edx-platform,nanolearning/edx-platform,prarthitm/edxplatform,chauhanhardik/populo,chand3040/cloud_that,xuxiao19910803/edx-platform,waheedahmed/edx-platform,playm2mboy/edx-platform,atsolakid/edx-platform,y12uc231/edx-platform,tanmaykm/edx-platform,eemirtekin/edx-platform,hkawasaki/kawasaki-aio8-0,eduNEXT/edunext-platform,tiagochiavericosta/edx-platform,dsajkl/123,Livit/Livit.Learn.EdX,SivilTaram/edx-platform,hamzehd/edx-platform,teltek/edx-platform,martynovp/edx-platform,mtlchun/edx,nanolearning/edx-platform,chauhanhardik/populo,valtech-mooc/edx-platform,Kalyzee/edx-platform,SivilTaram/edx-platform,jbassen/edx-platform,hmcmooc/muddx-platform,zofuthan/edx-platform,eestay/edx-platform,torchingloom/edx-platform,philanthropy-u/edx-platform,arbrandes/edx-platform,nanolearningllc/edx-platform-cypress,AkA84/edx-platform,cecep-edu/edx-platform,shurihell/testasia,unicri/edx-platform,yokose-ks/edx-platform,etzhou/edx-platform,polimediaupv/edx-platform,doganov/edx-platform,proversity-org/edx-platform,motion2015/edx-platform,benpatterson/edx-platform,devs1991/test_edx_docmode,mjirayu/sit_academy,BehavioralInsightsTeam/edx-platform,hkawasaki/kawasaki-aio8-1,mahendra-r/edx-platform,chauhanhardik/populo_2,Lektorium-LLC/edx-platform,IONISx/edx-platform,jzoldak/edx-platform,mitocw/edx-platform,hamzehd/edx-platform,chrisndodge/edx-platform,ubc/edx-platform,atsolakid/edx-platform,carsongee/edx-platform,unicri/edx-platform | cms/djangoapps/contentstore/management/commands/migrate_to_split.py | cms/djangoapps/contentstore/management/commands/migrate_to_split.py | """
Django management command to migrate a course from the old Mongo modulestore
to the new split-Mongo modulestore.
"""
from django.core.management.base import BaseCommand, CommandError
from django.contrib.auth.models import User
from xmodule.modulestore import Location
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.split_migrator import SplitMigrator
from xmodule.modulestore import InvalidLocationError
from xmodule.modulestore.django import loc_mapper
def user_from_str(identifier):
"""
Return a user identified by the given string. The string could be an email
address, or a stringified integer corresponding to the ID of the user in
the database. If no user could be found, a User.DoesNotExist exception
will be raised.
"""
try:
user_id = int(identifier)
except ValueError:
return User.objects.get(email=identifier)
else:
return User.objects.get(id=user_id)
class Command(BaseCommand):
"Migrate a course from old-Mongo to split-Mongo"
help = "Migrate a course from old-Mongo to split-Mongo"
args = "location email <locator>"
def parse_args(self, *args):
"""
Return a three-tuple of (location, user, locator_string).
If the user didn't specify a locator string, the third return value
will be None.
"""
if len(args) < 2:
raise CommandError(
"migrate_to_split requires at least two arguments: "
"a location and a user identifier (email or ID)"
)
try:
location = Location(args[0])
except InvalidLocationError:
raise CommandError("Invalid location string {}".format(args[0]))
try:
user = user_from_str(args[1])
except User.DoesNotExist:
raise CommandError("No user found identified by {}".format(args[1]))
try:
package_id = args[2]
except IndexError:
package_id = None
return location, user, package_id
def handle(self, *args, **options):
location, user, package_id = self.parse_args(*args)
migrator = SplitMigrator(
draft_modulestore=modulestore('default'),
direct_modulestore=modulestore('direct'),
split_modulestore=modulestore('split'),
loc_mapper=loc_mapper(),
)
migrator.migrate_mongo_course(location, user, package_id)
| """
Django management command to migrate a course from the old Mongo modulestore
to the new split-Mongo modulestore.
"""
from django.core.management.base import BaseCommand, CommandError
from django.contrib.auth.models import User
from xmodule.modulestore import Location
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.split_migrator import SplitMigrator
from xmodule.modulestore import InvalidLocationError
from xmodule.modulestore.django import loc_mapper
def user_from_str(identifier):
"""
Return a user identified by the given string. The string could be an email
address, or a stringified integer corresponding to the ID of the user in
the database. If no user could be found, a User.DoesNotExist exception
will be raised.
"""
try:
user_id = int(identifier)
except ValueError:
return User.objects.get(email=identifier)
else:
return User.objects.get(id=user_id)
class Command(BaseCommand):
"Migrate a course from old-Mongo to split-Mongo"
help = "Migrate a course from old-Mongo to split-Mongo"
args = "location email <locator>"
def parse_args(self, *args):
"""
Return a three-tuple of (location, user, locator_string).
If the user didn't specify a locator string, the third return value
will be None.
"""
if len(args) < 2:
raise CommandError(
"migrate_to_split requires at least two arguments: "
"a location and a user identifier (email or ID)"
)
try:
location = Location(args[0])
except InvalidLocationError:
raise CommandError("Invalid location string {}".format(args[0]))
try:
user = user_from_str(args[1])
except User.DoesNotExist:
raise CommandError("No user found identified by {}".format(args[1]))
try:
locator_string = args[2]
except IndexError:
locator_string = None
return location, user, locator_string
def handle(self, *args, **options):
location, user, locator_string = self.parse_args(*args)
migrator = SplitMigrator(
draft_modulestore=modulestore('default'),
direct_modulestore=modulestore('direct'),
split_modulestore=modulestore('split'),
loc_mapper=loc_mapper(),
)
migrator.migrate_mongo_course(location, user, locator_string)
| agpl-3.0 | Python |
c61187382c968c3018f88637806874ddd0b63b71 | add log for debug | ieiayaobb/lushi8,ieiayaobb/lushi8,ieiayaobb/lushi8 | web/views.py | web/views.py | import requests
from django.http import Http404
from django.shortcuts import render, render_to_response, redirect
# Create your views here.
from django.template import RequestContext
from web.fetch import Fetcher
from settings import LEAN_CLOUD_ID, LEAN_CLOUD_SECRET
import leancloud
# @api_view(('GET',))
# def api_root(request, format=None):
# return Response({
# 'chairmans': reverse('chairman-list', request=request, format=format),
# })
def get_index(request):
# response = requests.get('http://127.0.0.1:8000/api/chairmans/')
# chairmans = response.json()
leancloud.init(LEAN_CLOUD_ID, LEAN_CLOUD_SECRET)
Chairman = leancloud.Object.extend('Chairman')
query = Chairman.query
query.select('type', 'href', 'id', 'title', 'img', 'name', 'num')
query.add_descending('num')
query_list = query.find()
chairmans = []
for chairman in query_list:
print chairman
chairman_view = {}
chairman_view.type = chairman.get('type')
chairman_view.href = chairman.get('href')
chairman_view.id = chairman.get('id')
chairman_view.title = chairman.get('title')
chairman_view.img = chairman.get('img')
chairman_view.name = chairman.get('name')
chairman_view.num = chairman.get('num')
chairmans.append(chairman_view)
return render_to_response('index.html', locals())
def fetch(request):
leancloud.init(LEAN_CLOUD_ID, LEAN_CLOUD_SECRET)
query = leancloud.Query('Chairman')
allDataCompleted = False
batch = 0
limit = 1000
while not allDataCompleted:
query.limit(limit)
query.skip(batch * limit)
query.add_ascending('createdAt')
resultList = query.find()
if len(resultList) < limit:
allDataCompleted = True
leancloud.Object.destroy_all(resultList)
batch += 1
fetcher = Fetcher()
fetcher.fetch_cc()
fetcher.fetch_douyu()
fetcher.fetch_longzhu()
fetcher.fetch_quanmin()
fetcher.fetch_xiongmao()
fetcher.fetch_zhanqi()
fetcher.fetch_huya()
for chairman in fetcher.chairmans:
try:
chairman.save()
except Exception, e:
print e
return redirect("/")
| import requests
from django.http import Http404
from django.shortcuts import render, render_to_response, redirect
# Create your views here.
from django.template import RequestContext
from web.fetch import Fetcher
from settings import LEAN_CLOUD_ID, LEAN_CLOUD_SECRET
import leancloud
# @api_view(('GET',))
# def api_root(request, format=None):
# return Response({
# 'chairmans': reverse('chairman-list', request=request, format=format),
# })
def get_index(request):
# response = requests.get('http://127.0.0.1:8000/api/chairmans/')
# chairmans = response.json()
leancloud.init(LEAN_CLOUD_ID, LEAN_CLOUD_SECRET)
Chairman = leancloud.Object.extend('Chairman')
query = Chairman.query
query.select('type', 'href', 'id', 'title', 'img', 'name', 'num')
query.add_descending('num')
query_list = query.find()
chairmans = []
for chairman in query_list:
chairman_view = {}
chairman_view.type = chairman.get('type')
chairman_view.href = chairman.get('href')
chairman_view.id = chairman.get('id')
chairman_view.title = chairman.get('title')
chairman_view.img = chairman.get('img')
chairman_view.name = chairman.get('name')
chairman_view.num = chairman.get('num')
chairmans.append(chairman_view)
return render_to_response('index.html', locals())
def fetch(request):
leancloud.init(LEAN_CLOUD_ID, LEAN_CLOUD_SECRET)
query = leancloud.Query('Chairman')
allDataCompleted = False
batch = 0
limit = 1000
while not allDataCompleted:
query.limit(limit)
query.skip(batch * limit)
query.add_ascending('createdAt')
resultList = query.find()
if len(resultList) < limit:
allDataCompleted = True
leancloud.Object.destroy_all(resultList)
batch += 1
fetcher = Fetcher()
fetcher.fetch_cc()
fetcher.fetch_douyu()
fetcher.fetch_longzhu()
fetcher.fetch_quanmin()
fetcher.fetch_xiongmao()
fetcher.fetch_zhanqi()
fetcher.fetch_huya()
for chairman in fetcher.chairmans:
try:
chairman.save()
except Exception, e:
print e
return redirect("/")
| mit | Python |
0921f78660b7b0784ebe2fa586dd54551704699e | Fix fix_gir.py to work with ginterfaces and to support delegates. | GNOME/caribou,GNOME/caribou,GNOME/caribou | tools/fix_gir.py | tools/fix_gir.py | #!/usr/bin/python
from xml.dom import minidom
def purge_white_space_and_fix_namespace(node, indent=0):
if getattr(node, "tagName", None) == "namespace":
name = node.getAttribute("name")
node.setAttribute("name", name.lstrip('_'))
for child in [c for c in node.childNodes]:
if child.nodeType == node.TEXT_NODE or \
getattr(child, "tagName", None) == "annotation":
node.removeChild(child)
continue
purge_white_space_and_fix_namespace(child, indent+1)
def find_ancestor(node, name):
if getattr(node, "tagName", None) == name:
return node
parent = getattr(node, "parentNode", None)
if not parent:
return None
return find_ancestor(parent, name)
def fix_vfuncs(dom):
for f in dom.getElementsByTagName("callback"):
record = find_ancestor(f, "record")
if not record:
continue
name = record.getAttribute("name")
cname = record.getAttribute("c:type")
assert(name.endswith("Class") or name.endswith("Iface"))
assert(cname.endswith("Class") or name.endswith("Iface"))
params = (f.getElementsByTagName("parameters") or [None])[0]
if not params:
params = dom.createElement("parameters")
f.insertBefore(params, f.firstChild)
param = dom.createElement("parameter")
param.setAttribute("name", "self")
param.setAttribute("transfer-ownership", "none")
ptype = dom.createElement("type")
ptype.setAttribute("name", name[:-5])
ptype.setAttribute("c:type", cname[:-5])
param.appendChild(ptype)
params.insertBefore(param, params.firstChild)
if __name__ == "__main__":
import sys
if len(sys.argv) != 2:
print "supply a gir file"
sys.exit(1)
dom = minidom.parse(sys.argv[-1])
purge_white_space_and_fix_namespace(dom)
fix_vfuncs(dom)
print dom.toprettyxml(indent=" ", newl="\n")
| #!/usr/bin/python
from xml.dom import minidom
def purge_white_space_and_fix_namespace(node, indent=0):
if getattr(node, "tagName", None) == "namespace":
name = node.getAttribute("name")
node.setAttribute("name", name.lstrip('_'))
for child in [c for c in node.childNodes]:
if child.nodeType == node.TEXT_NODE or \
getattr(child, "tagName", None) == "annotation":
node.removeChild(child)
continue
purge_white_space_and_fix_namespace(child, indent+1)
def find_ancestor(node, name):
if getattr(node, "tagName") == name:
return node
parent = getattr(node, "parentNode", None)
if not parent:
return None
return find_ancestor(parent, name)
def fix_vfuncs(dom):
for f in dom.getElementsByTagName("callback"):
record = find_ancestor(f, "record")
if not record:
continue
name = record.getAttribute("name")
cname = record.getAttribute("c:type")
assert(name.endswith("Class"))
assert(cname.endswith("Class"))
params = (f.getElementsByTagName("parameters") or [None])[0]
if not params:
params = dom.createElement("parameters")
f.insertBefore(params, f.firstChild)
param = dom.createElement("parameter")
param.setAttribute("name", "self")
param.setAttribute("transfer-ownership", "none")
ptype = dom.createElement("type")
ptype.setAttribute("name", name[:-5])
ptype.setAttribute("c:type", cname[:-5])
param.appendChild(ptype)
params.insertBefore(param, params.firstChild)
if __name__ == "__main__":
import sys
if len(sys.argv) != 2:
print "supply a gir file"
sys.exit(1)
dom = minidom.parse(sys.argv[-1])
purge_white_space_and_fix_namespace(dom)
fix_vfuncs(dom)
print dom.toprettyxml(indent=" ", newl="\n")
| lgpl-2.1 | Python |
2c4cf38b7251ddffaba954f71bbca9632123777c | Add start_wizbit_server function that registers and publishes a wizbit server. | wizbit-archive/wizbit,wizbit-archive/wizbit | wizd/wizd.py | wizd/wizd.py | #! /usr/bin/env python
import sys
import socket
import os
import SimpleXMLRPCServer
import gobject
from wizbit import ServicePublisher, ServiceBrowser
WIZBIT_SERVER_PORT = 3492
from wizbit import Shares, Directory
from wizbit import *
class WizbitServer():
def getShares(self):
shares = Shares.getShares()
return shares
def getPath(self, uuid):
shares = Shares.getShares()
for id, shareId, directory in shares:
if uuid == id:
break
return directory
def getLastConfSeen(self, uuid):
return "Not Implemented"
def setConf(self, uuid, confstring):
return "Not Implemented"
def getConf(self, uuid):
shares = Shares.getShares()
for id, shareId, directory in shares:
if uuid == id:
break
wizpath = Paths(directory)
file = open(wizpath.getWizconf(), "r")
conf = file.read()
file.close()
return conf
def pushNotify(self, dirId, remoteShareId, host):
#For every local directory with the same shareId, pull
#from the remote directory
shares = Shares.getShares()
for id, localShareId, directory in shares:
if localShareId == remoteShareId:
Directory.pull(directory, dirId, host)
def server_socket_error():
print "RPC server socket was disconnected, exiting"
global main_loop
main_loop.quit()
def server_callback(source, cb_condition, server):
server.handle_request()
def start_wizbit_server():
servinst = WizbitServer()
server = SimpleXMLRPCServer.SimpleXMLRPCServer(("", 0))
server.register_instance(servinst)
server.register_introspection_functions()
gobject.io_add_watch (server.fileno(), gobject.IO_IN, server_callback, server)
gobject.io_add_watch (server.fileno(), gobject.IO_HUP | gobject.IO_ERR, server_socket_error)
sp = ServicePublisher("Wizbit", "_wizbit._tcp", server.server_address[1])
sb = ServiceBrowser("_wizbit._tcp")
def main(args):
global main_loop
start_wizbit_server()
main_loop = gobject.MainLoop()
try:
main_loop.run()
except KeyboardInterrupt:
pass
if __name__ == '__main__':
sys.exit(main(sys.argv))
| #! /usr/bin/env python
import sys
import socket
import os
import SimpleXMLRPCServer
import gobject
from wizbit import ServicePublisher, ServiceBrowser
WIZBIT_SERVER_PORT = 3492
from wizbit import Shares, Directory
from wizbit import *
class WizbitServer():
def getShares(self):
shares = Shares.getShares()
return shares
def getPath(self, uuid):
shares = Shares.getShares()
for id, shareId, directory in shares:
if uuid == id:
break
return directory
def getLastConfSeen(self, uuid):
return "Not Implemented"
def setConf(self, uuid, confstring):
return "Not Implemented"
def getConf(self, uuid):
shares = Shares.getShares()
for id, shareId, directory in shares:
if uuid == id:
break
wizpath = Paths(directory)
file = open(wizpath.getWizconf(), "r")
conf = file.read()
file.close()
return conf
def pushNotify(self, dirId, remoteShareId, host):
#For every local directory with the same shareId, pull
#from the remote directory
shares = Shares.getShares()
for id, localShareId, directory in shares:
if localShareId == remoteShareId:
Directory.pull(directory, dirId, host)
def server_socket_error():
print "RPC server socket was disconnected, exiting"
global main_loop
main_loop.quit()
def server_callback(source, cb_condition, server):
server.handle_request()
def main(args):
servinst = WizbitServer()
server = SimpleXMLRPCServer.SimpleXMLRPCServer(("", 0))
server.register_instance(servinst)
server.register_introspection_functions()
gobject.io_add_watch (server.fileno(), gobject.IO_IN, server_callback, server)
gobject.io_add_watch (server.fileno(), gobject.IO_HUP | gobject.IO_ERR, server_socket_error)
sp = ServicePublisher("Wizbit", "_wizbit._tcp", server.server_address[1])
sb = ServiceBrowser("_wizbit._tcp")
global main_loop
main_loop = gobject.MainLoop()
try:
main_loop.run()
except KeyboardInterrupt:
pass
if __name__ == '__main__':
sys.exit(main(sys.argv))
| lgpl-2.1 | Python |
5258c7d70796a03361ad865a15fd3896bb7a95f1 | Fix tests | mhcomm/pypeman,Zluurk/pypeman,jrmi/pypeman,mhcomm/pypeman,Zluurk/pypeman,jrmi/pypeman,Zluurk/pypeman,jrmi/pypeman,mhcomm/pypeman | pypeman/tests/test_nodes.py | pypeman/tests/test_nodes.py | import unittest
import asyncio
import logging
class FakeChannel():
def __init__(self):
self.logger = logging.getLogger()
self.uuid = 'fakeChannel'
class NodesTests(unittest.TestCase):
def setUp(self):
self.loop = asyncio.new_event_loop()
#asyncio.set_event_loop(None)
def test_log_node(self):
""" if Log() node is functionnal """
from pypeman.nodes import Log
from pypeman import message
n = Log()
n.channel = FakeChannel()
m = message.Message()
@asyncio.coroutine
def go():
ret = yield from n.handle(m)
return ret
self.loop.run_until_complete(go())
def test_json_to_python_node(self):
""" if JsonToPython() node is functionnal """
from pypeman.nodes import JsonToPython
from pypeman import message
n = JsonToPython()
n.channel = FakeChannel()
m = message.Message()
m.payload = '{"test":2}'
@asyncio.coroutine
def go():
ret = yield from n.handle(m)
return ret
self.loop.run_until_complete(go())
| import unittest
import asyncio
class FakeChannel():
def __init__(self):
self.uuid = 'fakeChannel'
class NodesTests(unittest.TestCase):
def setUp(self):
self.loop = asyncio.new_event_loop()
#asyncio.set_event_loop(None)
def test_log_node(self):
""" if Log() node is functionnal """
from pypeman.nodes import Log
from pypeman import message
n = Log()
n.channel = FakeChannel()
m = message.Message()
@asyncio.coroutine
def go():
ret = yield from n.handle(m)
return ret
self.loop.run_until_complete(go())
def test_json_to_python_node(self):
""" if JsonToPython() node is functionnal """
from pypeman.nodes import JsonToPython
from pypeman import message
n = JsonToPython()
n.channel = FakeChannel()
m = message.Message()
m.payload = '{"test":2}'
@asyncio.coroutine
def go():
ret = yield from n.handle(m)
return ret
self.loop.run_until_complete(go())
| apache-2.0 | Python |
175cfe45aba554d1544be3ee71bdb8a7b499d879 | add radius in request | cyplp/wtm | wtm/views.py | wtm/views.py | import urllib2
from lxml import etree
from deform import Form
from pyramid.view import view_config
from wtm.schemas.home import HomeSchema
@view_config(route_name='home', renderer='templates/home.pt')
def home(request):
"""
home page
"""
homeForm = Form(HomeSchema(), buttons=('submit',), action=request.route_path('addContent'))
return {'form': homeForm.render()}
@view_config(route_name='addContent', renderer='json')
def addContent(request):
baseURL = 'http://www.overpass-api.de/api/interpreter'
data = 'node(around:%s.0,%s,%s)["amenity"="cafe"];out;' % (request.POST['dist'],
request.POST['lat'],
request.POST['lon'])
print data
url = urllib2.Request(baseURL, data)
xmlData = urllib2.urlopen(url).read()
xml = etree.fromstring(xmlData)
for node in xml.xpath('node/tag[@k="name"]'):
print node.get('v')
return ''
| import urllib2
from lxml import etree
from deform import Form
from pyramid.view import view_config
from wtm.schemas.home import HomeSchema
@view_config(route_name='home', renderer='templates/home.pt')
def home(request):
"""
home page
"""
homeForm = Form(HomeSchema(), buttons=('submit',), action=request.route_path('addContent'))
return {'form': homeForm.render()}
@view_config(route_name='addContent', renderer='json')
def addContent(request):
baseURL = 'http://www.overpass-api.de/api/interpreter'
data = 'node(around:250.0,%s,%s)["amenity"="cafe"];out;' % (request.POST['lat'], request.POST['lon'])
print data
url = urllib2.Request(baseURL, data)
xmlData = urllib2.urlopen(url).read()
xml = etree.fromstring(xmlData)
for node in xml.xpath('node/tag[@k="name"]'):
print node.get('v')
return ''
| mit | Python |
2313a796842cbe65563a62fe12edec06c4112531 | Add YEARS_PEY_DAY. | GeoscienceAustralia/PyRate,GeoscienceAustralia/PyRate | pyrate/core/ifgconstants.py | pyrate/core/ifgconstants.py | # This Python module is part of the PyRate software package.
#
# Copyright 2017 Geoscience Australia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This Python module contains a collection of constants used in
various components of the PyRate software
"""
# lookup keys for the metadata fields in PyRate GeoTIFF files
PYRATE_NCOLS = 'NCOLS'
PYRATE_NROWS = 'NROWS'
PYRATE_X_STEP = 'X_STEP'
PYRATE_Y_STEP = 'Y_STEP'
PYRATE_LAT = 'LAT'
PYRATE_LONG = 'LONG'
MASTER_DATE = 'MASTER_DATE'
MASTER_TIME = 'MASTER_TIME'
SLAVE_DATE = 'SLAVE_DATE'
SLAVE_TIME = 'SLAVE_TIME'
EPOCH_DATE = 'EPOCH_DATE'
PYRATE_DATUM = 'DATUM'
PYRATE_TIME_SPAN = 'TIME_SPAN_YEAR'
PYRATE_WAVELENGTH_METRES = 'WAVELENGTH_METRES'
PYRATE_INCIDENCE_DEGREES = 'INCIDENCE_DEGREES'
PYRATE_INSAR_PROCESSOR = 'INSAR_PROCESSOR'
PYRATE_WEATHER_ERROR = 'WEATHER_ERROR'
PYRATE_APS_ERROR = 'APS_ERROR'
PYRATE_MAXVAR = 'CVD_MAXVAR'
PYRATE_ALPHA = 'CVD_ALPHA'
COHERENCE = 'COHERENCE_MASKED_MULTILOOKED_IFG'
MULTILOOKED = 'MULTILOOKED_IFG'
ORIG = 'ORIGINAL_IFG'
DEM = 'ORIGINAL_DEM'
MLOOKED_DEM = 'MULTILOOKED_DEM'
INCIDENCE = 'INCIDENCE_ANGLE_MAP'
MLOOKED_INC = 'MULTILOOKED_INCIDENCE_ANGLE_MAP'
INCR = 'INCREMENTAL_TIME_SLICE'
CUML = 'CUMULATIVE_TIME_SLICE'
LINRATE = 'LINEAR_RATE_MAP'
LINERROR = 'LINEAR_RATE_ERROR_MAP'
LINSAMP = 'LINEAR_RATE_SAMPLES'
PYRATE_ORBITAL_ERROR = 'ORBITAL_ERROR'
ORB_REMOVED = 'REMOVED'
APS_REMOVED = 'REMOVED'
PYRATE_REF_PHASE = 'REFERENCE_PHASE'
REF_PHASE_REMOVED = 'REMOVED'
NAN_STATUS = 'NAN_STATUS'
NAN_CONVERTED = 'CONVERTED'
DATA_TYPE = 'DATA_TYPE'
DATA_UNITS = 'DATA_UNITS'
DAYS_PER_YEAR = 365.25 # span of year, not a calendar year
YEARS_PER_DAY = 1 / DAY_PER_YEAR
SPEED_OF_LIGHT_METRES_PER_SECOND = 3e8
MM_PER_METRE = 1000
| # This Python module is part of the PyRate software package.
#
# Copyright 2017 Geoscience Australia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This Python module contains a collection of constants used in
various components of the PyRate software
"""
# lookup keys for the metadata fields in PyRate GeoTIFF files
PYRATE_NCOLS = 'NCOLS'
PYRATE_NROWS = 'NROWS'
PYRATE_X_STEP = 'X_STEP'
PYRATE_Y_STEP = 'Y_STEP'
PYRATE_LAT = 'LAT'
PYRATE_LONG = 'LONG'
MASTER_DATE = 'MASTER_DATE'
MASTER_TIME = 'MASTER_TIME'
SLAVE_DATE = 'SLAVE_DATE'
SLAVE_TIME = 'SLAVE_TIME'
EPOCH_DATE = 'EPOCH_DATE'
PYRATE_DATUM = 'DATUM'
PYRATE_TIME_SPAN = 'TIME_SPAN_YEAR'
PYRATE_WAVELENGTH_METRES = 'WAVELENGTH_METRES'
PYRATE_INCIDENCE_DEGREES = 'INCIDENCE_DEGREES'
PYRATE_INSAR_PROCESSOR = 'INSAR_PROCESSOR'
PYRATE_WEATHER_ERROR = 'WEATHER_ERROR'
PYRATE_APS_ERROR = 'APS_ERROR'
PYRATE_MAXVAR = 'CVD_MAXVAR'
PYRATE_ALPHA = 'CVD_ALPHA'
COHERENCE = 'COHERENCE_MASKED_MULTILOOKED_IFG'
MULTILOOKED = 'MULTILOOKED_IFG'
ORIG = 'ORIGINAL_IFG'
DEM = 'ORIGINAL_DEM'
MLOOKED_DEM = 'MULTILOOKED_DEM'
INCIDENCE = 'INCIDENCE_ANGLE_MAP'
MLOOKED_INC = 'MULTILOOKED_INCIDENCE_ANGLE_MAP'
INCR = 'INCREMENTAL_TIME_SLICE'
CUML = 'CUMULATIVE_TIME_SLICE'
LINRATE = 'LINEAR_RATE_MAP'
LINERROR = 'LINEAR_RATE_ERROR_MAP'
LINSAMP = 'LINEAR_RATE_SAMPLES'
PYRATE_ORBITAL_ERROR = 'ORBITAL_ERROR'
ORB_REMOVED = 'REMOVED'
APS_REMOVED = 'REMOVED'
PYRATE_REF_PHASE = 'REFERENCE_PHASE'
REF_PHASE_REMOVED = 'REMOVED'
NAN_STATUS = 'NAN_STATUS'
NAN_CONVERTED = 'CONVERTED'
DATA_TYPE = 'DATA_TYPE'
DATA_UNITS = 'DATA_UNITS'
DAYS_PER_YEAR = 365.25 # span of year, not a calendar year
SPEED_OF_LIGHT_METRES_PER_SECOND = 3e8
MM_PER_METRE = 1000
| apache-2.0 | Python |
d43cf2adeb5bc5e5546dbf58532bfc283fc94ea8 | fix sort order of combined citation information | rafaqz/citation.vim | python/citation_vim/item.py | python/citation_vim/item.py | # -*- coding:utf-8 -*-
import collections
from citation_vim.utils import compat_str, is_current
class Item(object):
"""
Intermediary object between bibtex/zotero and unite source output.
"""
def combine(self):
pairs = collections.OrderedDict([
('Key', self.key),
('Title', self.title),
('Author(s)', self.author),
('Date', self.date),
('Tags', self.tags),
('Collections', ', '.join(self.collections)),
('Publication', self.publication),
('Issue', self.issue),
('Volume', self.volume),
('Pages', self.pages),
('Publisher', self.publisher),
('Language', self.language),
('Abstract', self.abstract),
('Notes', self.notes),
('File(s)', self.file),
('URL', self.url),
('DOI', self.doi),
('ISBN', self.isbn)
])
self.combined = u"Available citation information:\n"
for key, value in pairs.items():
if value:
self.combined += " " + key + " : " + compat_str(value) + "\n"
| # -*- coding:utf-8 -*-
from citation_vim.utils import compat_str, is_current
class Item(object):
"""
Intermediary object between bibtex/zotero and unite source output.
"""
def combine(self):
pairs = {
'Key': self.key,
'Title': self.title,
'Author(s)': self.author,
'Date': self.date,
'Tags': self.tags,
'Collections': ', '.join(self.collections),
'Publication': self.publication,
'Issue': self.issue,
'Volume': self.volume,
'Pages': self.pages,
'Publisher': self.publisher,
'Language': self.language,
'Abstract': self.abstract,
'Notes': self.notes,
'File(s)': self.file,
'URL': self.url,
'DOI': self.doi,
'ISBN': self.isbn}
self.combined = u"Available citation information:\n"
for key, value in pairs.items():
if value:
self.combined += " " + key + " : " + compat_str(value) + "\n"
| mit | Python |
fb786e6fa254bf9b041b58ae3ba524257892bea8 | Make payloads larger for tests. | mvaled/sentry,alexm92/sentry,looker/sentry,mvaled/sentry,mvaled/sentry,looker/sentry,beeftornado/sentry,mvaled/sentry,fotinakis/sentry,ifduyue/sentry,ifduyue/sentry,looker/sentry,JackDanger/sentry,JamesMura/sentry,BuildingLink/sentry,beeftornado/sentry,alexm92/sentry,nicholasserra/sentry,imankulov/sentry,JamesMura/sentry,alexm92/sentry,gencer/sentry,zenefits/sentry,nicholasserra/sentry,mvaled/sentry,daevaorn/sentry,BuildingLink/sentry,jean/sentry,mvaled/sentry,jean/sentry,gencer/sentry,ifduyue/sentry,jean/sentry,jean/sentry,jean/sentry,beeftornado/sentry,JackDanger/sentry,fotinakis/sentry,gencer/sentry,fotinakis/sentry,zenefits/sentry,JamesMura/sentry,looker/sentry,JamesMura/sentry,fotinakis/sentry,zenefits/sentry,ifduyue/sentry,looker/sentry,mitsuhiko/sentry,ifduyue/sentry,BuildingLink/sentry,nicholasserra/sentry,daevaorn/sentry,daevaorn/sentry,zenefits/sentry,imankulov/sentry,imankulov/sentry,JackDanger/sentry,gencer/sentry,daevaorn/sentry,BuildingLink/sentry,zenefits/sentry,mitsuhiko/sentry,BuildingLink/sentry,JamesMura/sentry,gencer/sentry | timelines.py | timelines.py | from sentry.utils.runner import configure
configure()
import contextlib
import functools
import logging
import random
import sys
import time
import uuid
from sentry.app import timelines
from sentry.timelines.redis import Record
logging.basicConfig(level=logging.DEBUG)
@contextlib.contextmanager
def timer(preamble):
start = time.time()
yield
print '{0} in {1} ms.'.format(preamble, (time.time() - start) * 1000)
# Load a bunch of records.
n_timelines = int(sys.argv[1])
n_records = int(sys.argv[2])
payload = ' ' * 12000
calls = []
with timer('Generated {0} records to be loaded into {1} timelines'.format(n_records, n_timelines)):
for i in xrange(0, n_records):
p = random.randint(1, n_timelines)
record = Record(uuid.uuid1().hex, payload, time.time())
calls.append(functools.partial(timelines.add, 'projects/{0}'.format(p), record))
with timer('Loaded {0} records'.format(len(calls))):
for call in calls:
call()
# Move them into the "ready" state.
ready = set()
with timer('Scheduled timelines for digestion'):
for chunk in timelines.schedule(time.time()):
for timeline, timestamp in chunk:
ready.add(timeline)
# Run them through the digestion process.
with timer('Digested {0} timelines'.format(len(ready))):
for timeline in ready:
with timelines.digest(timeline) as records:
i = 0
# Iterate through the records to ensure that all data is deserialized.
for i, record in enumerate(records, 1):
pass
# Run the scheduler again (using a future cutoff time to accomodate for backoff.)
ready.clear()
with timer('Scheduled timelines for digestion'):
for chunk in timelines.schedule(time.time() + timelines.backoff(1)):
for timeline, timestamp in chunk:
ready.add(timeline)
# Run them through the digestion process again (this should result in all of
# the items being taken out of the schedule.)
with timer('Digested {0} timelines'.format(len(ready))):
for timeline in ready:
with timelines.digest(timeline) as records:
i = 0
for i, record in enumerate(records, 1):
pass
# Check to make sure we're not leaking any data.
with timelines.cluster.all() as client:
result = client.keys('*')
for host, value in result.value.iteritems():
assert not value
| from sentry.utils.runner import configure
configure()
import contextlib
import functools
import logging
import random
import sys
import time
import uuid
from sentry.app import timelines
from sentry.timelines.redis import Record
logging.basicConfig(level=logging.DEBUG)
@contextlib.contextmanager
def timer(preamble):
start = time.time()
yield
print '{0} in {1} ms.'.format(preamble, (time.time() - start) * 1000)
# Load a bunch of records.
n_timelines = int(sys.argv[1])
n_records = int(sys.argv[2])
calls = []
with timer('Generated {0} records to be loaded into {1} timelines'.format(n_records, n_timelines)):
for i in xrange(0, n_records):
p = random.randint(1, n_timelines)
record = Record(uuid.uuid1().hex, 'payload', time.time())
calls.append(functools.partial(timelines.add, 'projects/{0}'.format(p), record))
with timer('Loaded {0} records'.format(len(calls))):
for call in calls:
call()
# Move them into the "ready" state.
ready = set()
with timer('Scheduled timelines for digestion'):
for chunk in timelines.schedule(time.time()):
for timeline, timestamp in chunk:
ready.add(timeline)
# Run them through the digestion process.
with timer('Digested {0} timelines'.format(len(ready))):
for timeline in ready:
with timelines.digest(timeline) as records:
i = 0
# Iterate through the records to ensure that all data is deserialized.
for i, record in enumerate(records, 1):
pass
# Run the scheduler again (using a future cutoff time to accomodate for backoff.)
ready.clear()
with timer('Scheduled timelines for digestion'):
for chunk in timelines.schedule(time.time() + timelines.backoff(1)):
for timeline, timestamp in chunk:
ready.add(timeline)
# Run them through the digestion process again (this should result in all of
# the items being taken out of the schedule.)
with timer('Digested {0} timelines'.format(len(ready))):
for timeline in ready:
with timelines.digest(timeline) as records:
i = 0
for i, record in enumerate(records, 1):
pass
# Check to make sure we're not leaking any data.
with timelines.cluster.all() as client:
result = client.keys('*')
for host, value in result.value.iteritems():
assert not value
| bsd-3-clause | Python |
4e3ebcf98e2bfb2cea1f92b66e5205194744482a | add level 11 | au9ustine/org.au9ustine.puzzles.pythonchallenge | pythonchallenge/level_11.py | pythonchallenge/level_11.py | import unittest
import urllib
import requests
import logging
import re
import urllib
import os
import os.path
import Image
import ImageDraw
from StringIO import StringIO
# Default is warning, it's to suppress requests INFO log
logging.basicConfig(format='%(message)s')
def solution():
url = 'http://www.pythonchallenge.com/pc/return/cave.jpg'
r = requests.get(url, auth=('huge', 'file'))
image_file = Image.open(StringIO(r.content))
new_image = Image.new('RGB', (640, 480), 'black')
new_image_stroke = ImageDraw.Draw(new_image)
for y in xrange(480):
for x in xrange(640):
if y % 2 == 0 and x % 2 == 0 or y % 2 == 1 and x % 2 == 1:
new_image.putpixel((x, y), image_file.getpixel((x, y)))
new_image.save('cave_edited.jpg')
return 'evil'
class SolutionTest(unittest.TestCase):
def setUp(self):
self.prefix = "http://www.pythonchallenge.com/pc/return/"
self.suffix = ".html"
def tearDown(self):
os.remove('cave_edited.jpg')
def test_solution(self):
actual = solution()
expected = 'evil'
cred = ('huge', 'file')
self.assertEquals(actual, expected)
origin_url = ''.join([self.prefix, 'evil', self.suffix])
try:
r = requests.get(origin_url, auth=cred)
except:
raise
self.assertTrue(r.ok)
next_entry = [re.sub(r'(.*)URL=(.*)\.html\"\>', r'\2', line)
for line in r.iter_lines() if re.match(r'.*URL.*', line)]
r.close()
if len(next_entry) != 0:
r = requests.get(
''.join([self.prefix, next_entry[0], self.suffix], auth=expected))
logging.warn('Level 12 is %s with %s' % (r.url, cred))
else:
logging.warn('Level 12 is %s with %s' % (origin_url, cred))
if __name__ == "__main__":
unittest.main(failfast=True)
| import unittest
import urllib
import requests
import logging
import re
import urllib
import os
import os.path
import Image
import ImageDraw
# Default is warning, it's to suppress requests INFO log
logging.basicConfig(format='%(message)s')
def solution():
url = 'http://www.pythonchallenge.com/pc/return/cave.jpg'
urllib.urlretrieve(url, 'cave.jpg')
image_file = Image.open('cave.jpg')
new_image = Image.new('RGB', (640, 480), 'black')
new_image_stroke = ImageDraw.Draw(new_image)
for y in range(480):
for x in range(640):
if y % 2 == 0 and x % 2 == 0 or y % 2 == 1 and x % 2 == 1:
new_image.putpixel((x, y), image_file.getpixel(x, y))
new_image.save('cave_edited.jpg')
return 'evil'
class SolutionTest(unittest.TestCase):
def setUp(self):
self.prefix = "http://www.pythonchallenge.com/pc/return/"
self.suffix = ".html"
def test_solution(self):
actual = solution()
expected = 'evil'
cred = ('huge', 'file')
self.assertEquals(actual, expected)
origin_url = ''.join([self.prefix, 'evil', self.suffix])
try:
r = requests.get(origin_url, auth=cred)
except:
raise
self.assertTrue(r.ok)
next_entry = [re.sub(r'(.*)URL=(.*)\.html\"\>', r'\2', line)
for line in r.iter_lines() if re.match(r'.*URL.*', line)]
r.close()
if len(next_entry) != 0:
r = requests.get(
''.join([self.prefix, next_entry[0], self.suffix], auth=expected))
logging.warn('Level 12 is %s with %s' % (r.url, cred))
else:
logging.warn('Level 12 is %s with %s' % (origin_url, cred))
if __name__ == "__main__":
unittest.main(failfast=True)
| mit | Python |
fb5ad293c34387b1ab7b7b7df3aed3942fdd9282 | Add default to max_places in proposal form | hirunatan/estelcon_web,hirunatan/estelcon_web,hirunatan/estelcon_web,hirunatan/estelcon_web | src/webapp/activities/forms.py | src/webapp/activities/forms.py | # -*- encoding: utf-8 -*-
from django import forms
class ActivitySubscribeForm(forms.Form):
id = forms.IntegerField(
min_value = 0, required=True,
widget = forms.HiddenInput,
)
title = forms.CharField(
max_length=100, required=True,
widget = forms.HiddenInput,
)
class ProposalForm(forms.Form):
title = forms.CharField(
max_length=100, required=True,
)
subtitle = forms.CharField(
required = False,
widget = forms.Textarea,
)
duration = forms.CharField(
max_length=50, required=True,
)
max_places = forms.IntegerField(
min_value = 0, required=True, initial = 0,
)
show_owners = forms.BooleanField(
initial = False, required = False,
)
requires_inscription = forms.BooleanField(
initial = False, required = False,
)
owners = forms.CharField(
required = False,
widget = forms.Textarea,
)
organizers = forms.CharField(
required = False,
widget = forms.Textarea,
)
text = forms.CharField(
required = False,
widget = forms.Textarea,
)
logistics = forms.CharField(
required = False,
widget = forms.Textarea,
)
notes_organization = forms.CharField(
required = False,
widget = forms.Textarea,
)
| # -*- encoding: utf-8 -*-
from django import forms
class ActivitySubscribeForm(forms.Form):
id = forms.IntegerField(
min_value = 0, required=True,
widget = forms.HiddenInput,
)
title = forms.CharField(
max_length=100, required=True,
widget = forms.HiddenInput,
)
class ProposalForm(forms.Form):
title = forms.CharField(
max_length=100, required=True,
)
subtitle = forms.CharField(
required = False,
widget = forms.Textarea,
)
duration = forms.CharField(
max_length=50, required=True,
)
max_places = forms.IntegerField(
min_value = 0, required=True,
)
show_owners = forms.BooleanField(
initial = False, required = False,
)
requires_inscription = forms.BooleanField(
initial = False, required = False,
)
owners = forms.CharField(
required = False,
widget = forms.Textarea,
)
organizers = forms.CharField(
required = False,
widget = forms.Textarea,
)
text = forms.CharField(
required = False,
widget = forms.Textarea,
)
logistics = forms.CharField(
required = False,
widget = forms.Textarea,
)
notes_organization = forms.CharField(
required = False,
widget = forms.Textarea,
)
| agpl-3.0 | Python |
3b5f322d8fe76251b322b2d81cecf6abbee5e4bd | rename python class method | intel-analytics/BigDL,intel-analytics/BigDL,yangw1234/BigDL,yangw1234/BigDL,yangw1234/BigDL,intel-analytics/BigDL,yangw1234/BigDL,intel-analytics/BigDL | python/dllib/src/bigdl/dllib/feature/image/imagePreprocessing.py | python/dllib/src/bigdl/dllib/feature/image/imagePreprocessing.py | #
# Copyright 2016 The BigDL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
from bigdl.util.common import JavaValue
from bigdl.util.common import callBigDlFunc
from bigdl.util.common import *
from zoo.feature.common import Preprocessing
if sys.version >= '3':
long = int
unicode = str
class Resize(Preprocessing):
"""
image resize
"""
def __init__(self, resizeH, resizeW, bigdl_type="float"):
super(Resize, self).__init__(bigdl_type, resizeH, resizeW)
class ChannelNormalizer(Preprocessing):
"""
image norm
"""
def __init__(self, meanR, meanG, meanB, stdR, stdG, stdB, bigdl_type="float"):
super(ChannelNormalizer, self).__init__(bigdl_type, meanR, meanG, meanB, stdR, stdG, stdB)
class MatToTensor(Preprocessing):
"""
MatToTensor
"""
def __init__(self, bigdl_type="float"):
super(MatToTensor, self).__init__(bigdl_type)
class CenterCrop(Preprocessing):
"""
CenterCrop
"""
def __init__(self, cropWidth, cropHeight, bigdl_type="float"):
super(CenterCrop, self).__init__(bigdl_type, cropWidth, cropHeight)
| #
# Copyright 2016 The BigDL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
from bigdl.util.common import JavaValue
from bigdl.util.common import callBigDlFunc
from bigdl.util.common import *
from zoo.feature.common import Preprocessing
if sys.version >= '3':
long = int
unicode = str
class Resize(Preprocessing):
"""
image resize
"""
def __init__(self, resizeH, resizeW, bigdl_type="float"):
super(Resize, self).__init__(bigdl_type, resizeH, resizeW)
class ChannelNormalizer(Preprocessing):
"""
image norm
"""
def __init__(self, meanR, meanG, meanB, stdR, stdG, stdB, bigdl_type="float"):
super(ChannelNormalizer, self).__init__(bigdl_type, meanR, meanG, meanB, stdR, stdG, stdB)
class MatToTensor(Preprocessing):
"""
MatToTensor
"""
def __init__(self, bigdl_type="float"):
super(MatToTensor, self).__init__(bigdl_type)
class CenterCrop(Preprocessing):
"""
CenterCrop
"""
def __init__(self, cropWidth, cropHeight, bigdl_type="float"):
super(CenterCrop, self).__init__(bigdl_type, cropWidth, cropHeight) | apache-2.0 | Python |
6a84b885be67e8a9f424c2b36f50e8fe9347dbc9 | Remove duplicate constant in ipmi.py | eliran-stratoscale/rackattack-physical,Stratoscale/rackattack-physical,eliran-stratoscale/rackattack-physical,Stratoscale/rackattack-physical | rackattack/physical/ipmi.py | rackattack/physical/ipmi.py | import subprocess
import time
import logging
import multiprocessing.pool
class IPMI:
IPMITOOL_FILENAME = "ipmitool"
_CONCURRENCY = 4
_pool = None
def __init__(self, hostname, username, password):
self._hostname = hostname
self._username = username
self._password = password
if IPMI._pool is None:
IPMI._pool = multiprocessing.pool.ThreadPool(self._CONCURRENCY)
def off(self):
IPMI._pool.apply_async(self._powerCommand, args=("off",))
def powerCycle(self):
IPMI._pool.apply_async(self._powerCycle)
def _powerCycle(self):
self._powerCommand("off")
self._powerCommand("on")
def _powerCommand(self, command):
NUMBER_OF_RETRIES = 10
cmdLine = [
self.IPMITOOL_FILENAME, "power", command,
"-H", str(self._hostname), "-U", self._username, "-P", self._password]
for i in xrange(NUMBER_OF_RETRIES - 1):
try:
return subprocess.check_output(cmdLine, stderr=subprocess.STDOUT, close_fds=True)
except:
time.sleep(0.1)
try:
return subprocess.check_output(cmdLine, stderr=subprocess.STDOUT, close_fds=True)
except subprocess.CalledProcessError as e:
logging.error("Output: %(output)s", dict(output=e.output))
raise
| import subprocess
import time
import logging
import multiprocessing.pool
class IPMI:
IPMITOOL_FILENAME = "ipmitool"
_CONCURRENCY = 4
IPMITOOL_FILENAME = "ipmitool"
_pool = None
def __init__(self, hostname, username, password):
self._hostname = hostname
self._username = username
self._password = password
if IPMI._pool is None:
IPMI._pool = multiprocessing.pool.ThreadPool(self._CONCURRENCY)
def off(self):
IPMI._pool.apply_async(self._powerCommand, args=("off",))
def powerCycle(self):
IPMI._pool.apply_async(self._powerCycle)
def _powerCycle(self):
self._powerCommand("off")
self._powerCommand("on")
def _powerCommand(self, command):
NUMBER_OF_RETRIES = 10
cmdLine = [
self.IPMITOOL_FILENAME, "power", command,
"-H", str(self._hostname), "-U", self._username, "-P", self._password]
for i in xrange(NUMBER_OF_RETRIES - 1):
try:
return subprocess.check_output(cmdLine, stderr=subprocess.STDOUT, close_fds=True)
except:
time.sleep(0.1)
try:
return subprocess.check_output(cmdLine, stderr=subprocess.STDOUT, close_fds=True)
except subprocess.CalledProcessError as e:
logging.error("Output: %(output)s", dict(output=e.output))
raise
| apache-2.0 | Python |
356a7c4d83a5289e7b30a07b0f76829e274b7481 | Fix Eventlet transport on Python 3 | lepture/raven-python,akalipetis/raven-python,jmp0xf/raven-python,getsentry/raven-python,jmagnusson/raven-python,recht/raven-python,johansteffner/raven-python,danriti/raven-python,getsentry/raven-python,dbravender/raven-python,jmagnusson/raven-python,ronaldevers/raven-python,johansteffner/raven-python,danriti/raven-python,percipient/raven-python,percipient/raven-python,jmp0xf/raven-python,recht/raven-python,Photonomie/raven-python,recht/raven-python,dbravender/raven-python,jmp0xf/raven-python,Photonomie/raven-python,getsentry/raven-python,danriti/raven-python,lepture/raven-python,akalipetis/raven-python,lepture/raven-python,jmagnusson/raven-python,akheron/raven-python,percipient/raven-python,ronaldevers/raven-python,ewdurbin/raven-python,akalipetis/raven-python,ewdurbin/raven-python,dbravender/raven-python,ronaldevers/raven-python,akheron/raven-python,akheron/raven-python,ewdurbin/raven-python,johansteffner/raven-python,Photonomie/raven-python | raven/transport/eventlet.py | raven/transport/eventlet.py | """
raven.transport.eventlet
~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import sys
from raven.transport.http import HTTPTransport
try:
import eventlet
try:
from eventlet.green import urllib2 as eventlet_urllib2
except ImportError:
from eventlet.green.urllib import request as eventlet_urllib2
has_eventlet = True
except:
has_eventlet = False
class EventletHTTPTransport(HTTPTransport):
scheme = ['eventlet+http', 'eventlet+https']
def __init__(self, parsed_url, pool_size=100, **kwargs):
if not has_eventlet:
raise ImportError('EventletHTTPTransport requires eventlet.')
super(EventletHTTPTransport, self).__init__(parsed_url, **kwargs)
# remove the eventlet+ from the protocol, as it is not a real protocol
self._url = self._url.split('+', 1)[-1]
def _send_payload(self, payload):
req = eventlet_urllib2.Request(self._url, headers=payload[1])
try:
if sys.version_info < (2, 6):
response = eventlet_urllib2.urlopen(req, payload[0]).read()
else:
response = eventlet_urllib2.urlopen(req, payload[0],
self.timeout).read()
return response
except Exception as err:
return err
def send(self, data, headers):
"""
Spawn an async request to a remote webserver.
"""
eventlet.spawn(self._send_payload, (data, headers))
| """
raven.transport.eventlet
~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import sys
from raven.transport.http import HTTPTransport
try:
import eventlet
from eventlet.green import urllib2 as eventlet_urllib2
has_eventlet = True
except:
has_eventlet = False
class EventletHTTPTransport(HTTPTransport):
scheme = ['eventlet+http', 'eventlet+https']
def __init__(self, parsed_url, pool_size=100, **kwargs):
if not has_eventlet:
raise ImportError('EventletHTTPTransport requires eventlet.')
super(EventletHTTPTransport, self).__init__(parsed_url, **kwargs)
# remove the eventlet+ from the protocol, as it is not a real protocol
self._url = self._url.split('+', 1)[-1]
def _send_payload(self, payload):
req = eventlet_urllib2.Request(self._url, headers=payload[1])
try:
if sys.version_info < (2, 6):
response = eventlet_urllib2.urlopen(req, payload[0]).read()
else:
response = eventlet_urllib2.urlopen(req, payload[0],
self.timeout).read()
return response
except Exception as err:
return err
def send(self, data, headers):
"""
Spawn an async request to a remote webserver.
"""
eventlet.spawn(self._send_payload, (data, headers))
| bsd-3-clause | Python |
b30befbf39009ed566dbb7ff725de05bad2be990 | Add link to permissions management doc for ExportTables. (#520) | all-of-us/raw-data-repository,all-of-us/raw-data-repository,all-of-us/raw-data-repository | rdr_client/export_tables.py | rdr_client/export_tables.py | # Exports the entire contents of database tables to Unicode CSV files stored in GCS.
# Used instead of Cloud SQL export because it handles newlines and null characters properly.
#
# Documentation of permissions management:
# https://docs.google.com/document/d/1vKiu2zcSy97DQTIuSezr030kTyeDthome9XzNy98B6M
#
# Usage: ./run_client.sh --project <PROJECT> --account <ACCOUNT> \
# --service_account exporter@<PROJECT>.iam.gserviceaccount.com export_tables.py \
# --database rdr --tables code,participant --directory test_directory
#
# "directory" indicates a directory inside the GCS bucket to write the files to
#
# If "rdr" is chosen for the database, the data will be written to <ENVIRONMENT>-rdr-export;
# If "cdm" or "voc" are chosen, the data will be written to <ENVIRONMENT>-cdm.
import logging
from client import Client
from main_util import get_parser, configure_logging
def export_tables(client):
table_names = client.args.tables.split(',')
logging.info('Exporting %s from %s to %s' % (table_names, client.args.database,
client.args.directory))
request_body = {'database': client.args.database,
'tables': table_names,
'directory': client.args.directory}
response = client.request_json('ExportTables', 'POST', request_body)
logging.info('Data is being exported to: %s' % response['destination'])
if __name__ == '__main__':
configure_logging()
parser = get_parser()
parser.add_argument('--database', help='The database to export data from', required=True)
parser.add_argument('--tables', help='A comma-separated list of tables to export',
required=True)
parser.add_argument('--directory',
help='A directory to write CSV output to inside the GCS bucket',
required=True)
export_tables(Client(parser=parser, base_path='offline'))
| # Exports the entire contents of database tables to Unicode CSV files stored in GCS.
# Used instead of Cloud SQL export because it handles newlines and null characters properly.
#
# Usage: ./run_client.sh --project <PROJECT> --account <ACCOUNT> \
# --service_account exporter@<PROJECT>.iam.gserviceaccount.com export_tables.py \
# --database rdr --tables code,participant --directory test_directory
#
# "directory" indicates a directory inside the GCS bucket to write the files to
#
# If "rdr" is chosen for the database, the data will be written to <ENVIRONMENT>-rdr-export;
# If "cdm" or "voc" are chosen, the data will be written to <ENVIRONMENT>-cdm.
import logging
from client import Client
from main_util import get_parser, configure_logging
def export_tables(client):
table_names = client.args.tables.split(',')
logging.info('Exporting %s from %s to %s' % (table_names, client.args.database,
client.args.directory))
request_body = {'database': client.args.database,
'tables': table_names,
'directory': client.args.directory}
response = client.request_json('ExportTables', 'POST', request_body)
logging.info('Data is being exported to: %s' % response['destination'])
if __name__ == '__main__':
configure_logging()
parser = get_parser()
parser.add_argument('--database', help='The database to export data from', required=True)
parser.add_argument('--tables', help='A comma-separated list of tables to export',
required=True)
parser.add_argument('--directory',
help='A directory to write CSV output to inside the GCS bucket',
required=True)
export_tables(Client(parser=parser, base_path='offline'))
| bsd-3-clause | Python |
f210ef3e6b4122c75b4df9eee6be6ee4ac81efa4 | Remove a useless table from the db | dubzzz/py-mymoney,dubzzz/py-mymoney,dubzzz/py-mymoney | www/scripts/generate_db.py | www/scripts/generate_db.py | #!/usr/bin/python
# This script has to generate the sqlite database
#
# Requirements (import from):
# - sqlite3
#
# Syntax:
# ./generate_db.py
import sqlite3
import sys
from os import path
SCRIPT_PATH = path.dirname(__file__)
DEFAULT_DB = path.join(SCRIPT_PATH, "../mymoney.db")
def generate_tables(db=DEFAULT_DB):
conn = sqlite3.connect(db)
with conn:
c = conn.cursor()
# Drop tables if they exist
c.execute('''DROP TABLE IF EXISTS node''')
c.execute('''DROP TABLE IF EXISTS expense''')
c.execute('''DROP TABLE IF EXISTS node_expense''')
# Create tables
c.execute('''CREATE TABLE IF NOT EXISTS node (
id INTEGER PRIMARY KEY,
parent_id INTEGER,
title TEXT NOT NULL,
FOREIGN KEY(parent_id) REFERENCES node(id))''')
c.execute('''CREATE TABLE IF NOT EXISTS expense (
id INTEGER PRIMARY KEY,
title TEXT NOT NULL,
date INTEGER NOT NULL,
value REAL NOT NULL)''')
c.execute('''CREATE TABLE IF NOT EXISTS node_expense (
expense_id INTEGER,
node_id INTEGER,
PRIMARY KEY(expense_id, node_id),
FOREIGN KEY(expense_id) REFERENCES expense(id),
FOREIGN KEY(node_id) REFERENCES node(id))''')
# Commit the changes
conn.commit()
if __name__ == '__main__':
generate_tables(DEFAULT_DB)
| #!/usr/bin/python
# This script has to generate the sqlite database
#
# Requirements (import from):
# - sqlite3
#
# Syntax:
# ./generate_db.py
import sqlite3
import sys
from os import path
SCRIPT_PATH = path.dirname(__file__)
DEFAULT_DB = path.join(SCRIPT_PATH, "../mymoney.db")
def generate_tables(db=DEFAULT_DB):
conn = sqlite3.connect(db)
with conn:
c = conn.cursor()
# Drop tables if they exist
c.execute('''DROP TABLE IF EXISTS node''')
c.execute('''DROP TABLE IF EXISTS node_hierarchy''')
c.execute('''DROP TABLE IF EXISTS expense''')
c.execute('''DROP TABLE IF EXISTS node_expense''')
# Create tables
c.execute('''CREATE TABLE IF NOT EXISTS node (
id INTEGER PRIMARY KEY,
title TEXT NOT NULL)''')
c.execute('''CREATE TABLE IF NOT EXISTS node_hierarchy (
id INTEGER PRIMARY KEY,
parent_id INTEGER,
child_id INTEGER,
FOREIGN KEY(parent_id) REFERENCES node(id),
FOREIGN KEY(child_id) REFERENCES node(id),
UNIQUE(child_id))''')
c.execute('''CREATE TABLE IF NOT EXISTS expense (
id INTEGER PRIMARY KEY,
title TEXT NOT NULL,
date INTEGER NOT NULL,
value REAL NOT NULL)''')
c.execute('''CREATE TABLE IF NOT EXISTS node_expense (
expense_id INTEGER,
node_id INTEGER,
PRIMARY KEY(expense_id, node_id),
FOREIGN KEY(expense_id) REFERENCES expense(id),
FOREIGN KEY(node_id) REFERENCES node(id))''')
# Commit the changes
conn.commit()
if __name__ == '__main__':
generate_tables(DEFAULT_DB)
| mit | Python |
c02036f26bfd1eb6b1fed2dc10c73c91e97dae0b | Update __init__.py | r0h4n/node-agent,Tendrl/node_agent,Tendrl/node_agent,Tendrl/node-agent,Tendrl/node-agent,Tendrl/node-agent,r0h4n/node-agent,r0h4n/node-agent | tendrl/node_agent/objects/cluster_message/__init__.py | tendrl/node_agent/objects/cluster_message/__init__.py | from tendrl.commons import etcdobj
from tendrl.commons.message import Message as message
from tendrl.commons import objects
class ClusterMessage(objects.BaseObject, message):
internal = True
def __init__(self, **cluster_message):
self._defs = {}
message.__init__(self, **cluster_message)
objects.BaseObject.__init__(self)
self.value = 'clusters/%s/Messages/%s'
self._etcd_cls = _ClusterMessageEtcd
class _ClusterMessageEtcd(etcdobj.EtcdObj):
"""Cluster message object, lazily updated
"""
__name__ = 'clusters/%s/Messages/%s'
_tendrl_cls = ClusterMessage
def render(self):
self.__name__ = self.__name__ % (
self.cluster_id, self.message_id
)
return super(_ClusterMessageEtcd, self).render()
| from tendrl.commons import etcdobj
from tendrl.commons.message import Message as message
from tendrl.commons import objects
class ClusterMessage(message, objects.BaseObject):
internal = True
def __init__(self, **cluster_message):
self._defs = {}
super(ClusterMessage, self).__init__(**cluster_message)
self.value = 'clusters/%s/Messages/%s'
self._etcd_cls = _ClusterMessageEtcd
class _ClusterMessageEtcd(etcdobj.EtcdObj):
"""Cluster message object, lazily updated
"""
__name__ = 'clusters/%s/Messages/%s'
_tendrl_cls = ClusterMessage
def render(self):
self.__name__ = self.__name__ % (
self.cluster_id, self.message_id
)
return super(_ClusterMessageEtcd, self).render()
| lgpl-2.1 | Python |
551dddbb80d512ec49d8a422b52c24e98c97b38c | Add waiting for new data to parse | m4tx/techswarm-receiver | tsparser/main.py | tsparser/main.py | from time import sleep
from tsparser import config
from tsparser.parser import BaseParser, ParseException
from tsparser.parser.gps import GPSParser
from tsparser.parser.imu import IMUParser
from tsparser.sender import Sender
def parse(input_file=None):
"""
Parse the file specified as input.
:param input_file: file to read input from. If None, then pipe specified
in config is used
:type input_file: file
"""
Sender(daemon=True).start()
if input_file is None:
input_file = open(config.PIPE_NAME, 'r')
parsers = _get_parsers()
while True:
line = input_file.readline()
if not line:
sleep(0.01)
continue
_parse_line(parsers, line)
def _get_parsers():
return [
IMUParser(),
GPSParser()
]
def _parse_line(parsers, line):
values = line.split(',')
BaseParser.timestamp = values.pop().strip()
for parser in parsers:
if parser.parse(line, *values):
break
else:
raise ParseException('Output line was not parsed by any parser: {}'
.format(line))
| from tsparser import config
from tsparser.parser import BaseParser, ParseException
from tsparser.parser.gps import GPSParser
from tsparser.parser.imu import IMUParser
from tsparser.sender import Sender
def parse(input_file=None):
"""
Parse the file specified as input.
:param input_file: file to read input from. If None, then pipe specified
in config is used
:type input_file: file
"""
Sender(daemon=True).start()
if input_file is None:
input_file = open(config.PIPE_NAME, 'r')
parsers = _get_parsers()
while True:
line = input_file.readline()
if not line:
continue
_parse_line(parsers, line)
def _get_parsers():
return [
IMUParser(),
GPSParser()
]
def _parse_line(parsers, line):
values = line.split(',')
BaseParser.timestamp = values.pop().strip()
for parser in parsers:
if parser.parse(line, *values):
break
else:
raise ParseException('Output line was not parsed by any parser: {}'
.format(line))
| mit | Python |
d2a0c928b9cdb693ca75731e1ae2cefb4c7ae722 | fix Episode JSON export | tvd-dataset/tvd | tvd/core/json.py | tvd/core/json.py | #!/usr/bin/env python
# encoding: utf-8
#
# The MIT License (MIT)
#
# Copyright (c) 2013-2014 CNRS (Hervé BREDIN -- http://herve.niderb.fr/)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
from __future__ import unicode_literals
import simplejson as json
import pyannote.core.json
TVD_JSON = 'tvd'
def object_hook(d):
"""
Usage
-----
>>> with open('file.json', 'r') as f:
... json.load(f, object_hook=object_hook)
"""
from episode import Episode
if TVD_JSON in d:
if d[TVD_JSON] == 'Episode':
return Episode.from_json(d)
d = pyannote.core.json.object_hook(d)
return d
def load(path):
with open(path, 'r') as f:
data = json.load(f, encoding='utf-8', object_hook=object_hook)
return data
def dump(data, path):
with open(path, 'w') as f:
json.dump(data, f, encoding='utf-8', for_json=True)
| #!/usr/bin/env python
# encoding: utf-8
#
# The MIT License (MIT)
#
# Copyright (c) 2013-2014 CNRS (Hervé BREDIN -- http://herve.niderb.fr/)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
from __future__ import unicode_literals
import simplejson as json
import pyannote.core.json
TVD_JSON = 'tvd'
def object_hook(d):
"""
Usage
-----
>>> with open('file.json', 'r') as f:
... json.load(f, object_hook=object_hook)
"""
from episode import Episode
if TVD_JSON in d:
if d[TVD_JSON] == 'episode':
return Episode.from_json(d)
d = pyannote.core.json.object_hook(d)
return d
def load(path):
with open(path, 'r') as f:
data = json.load(f, encoding='utf-8', object_hook=object_hook)
return data
def dump(data, path):
with open(path, 'w') as f:
json.dump(data, f, encoding='utf-8', for_json=True)
| mit | Python |
cd4da2e0fbed7bbadd4b110f45b7356795075aeb | add min_level to Logger | wearpants/twiggy,wearpants/twiggy | twiggy/Logger.py | twiggy/Logger.py | from Message import Message
import Levels
class Logger(object):
__slots__ = ['_fields', 'emitters', 'min_level']
def __init__(self, fields = None, emitters = None, min_level = Levels.DEBUG):
self._fields = fields if fields is not None else {}
self.emitters = emitters if emitters is not None else {}
self.min_level = min_level
def fields(self, **kwargs):
new_fields = self._fields.copy().update(**kwargs)
return self.__class__(new_fields, self.emitters, self.min_level)
def name(self, name):
return self.fields(name=name)
def struct(self, **kwargs):
self.fields(**kwargs).info()
def _emit(self, level, format_spec = '', *args, **kwargs):
if level < self.min_level: return
msg = Message(level, format_spec, self._fields.copy(), *args, **kwargs)
for emitter in self.emitters.itervalues():
if emitter.min_level >= msg.level:
# XXX add appropriate error trapping & logging; watch for recursion
emitter.emit(msg)
def debug(self, *args, **kwargs):
self._emit(Levels.DEBUG, *args, **kwargs)
def info(self, *args, **kwargs):
self._emit(Levels.INFO, *args, **kwargs)
def warning(self, *args, **kwargs):
self._emit(Levels.WARNING, *args, **kwargs)
def error(self, *args, **kwargs):
self._emit(Levels.ERROR, *args, **kwargs)
def critical(self, *args, **kwargs):
self._emit(Levels.CRITICAL, *args, **kwargs) | from Message import Message
import Levels
class Logger(object):
__slots__ = ['_fields', 'emitters']
def __init__(self, fields = None, emitters = None):
self._fields = fields if fields is not None else {}
self.emitters = emitters if emitters is not None else {}
def fields(self, **kwargs):
new_fields = self._fields.copy().update(**kwargs)
return self.__class__(new_fields, self.emitters)
def name(self, name):
return self.fields(name=name)
def struct(self, **kwargs):
self.fields(**kwargs).info()
def _emit(self, level, format_spec = '', *args, **kwargs):
msg = Message(level, format_spec, self._fields.copy(), *args, **kwargs)
for emitter in self.emitters.itervalues():
if emitter.min_level >= msg.level:
# XXX add appropriate error trapping & logging; watch for recursion
emitter.emit(msg)
def debug(self, *args, **kwargs):
self._emit(Levels.DEBUG, *args, **kwargs)
def info(self, *args, **kwargs):
self._emit(Levels.INFO, *args, **kwargs)
def warning(self, *args, **kwargs):
self._emit(Levels.WARNING, *args, **kwargs)
def error(self, *args, **kwargs):
self._emit(Levels.ERROR, *args, **kwargs)
def critical(self, *args, **kwargs):
self._emit(Levels.CRITICAL, *args, **kwargs) | bsd-3-clause | Python |
04f2c9005a04559a48ad0919b840d709c0f4eeaa | Update version. | itdxer/neupy,itdxer/neupy,itdxer/neupy,stczhc/neupy,stczhc/neupy,stczhc/neupy,stczhc/neupy,itdxer/neupy | neupy/__init__.py | neupy/__init__.py | """
NeuPy is the Artificial Neural Network library implemented in Python.
"""
__version__ = '0.1.1'
| """
NeuPy is the Artificial Neural Network library implemented in Python.
"""
__version__ = '0.1.1a'
| mit | Python |
b2859bfde66d7d91f98e3cfb61e205c1d2f5dbfe | Make CommentFactory use fuzzy attrs | NiGhTTraX/hackernews-scraper | hackernews_scraper/test/factories.py | hackernews_scraper/test/factories.py | from datetime import datetime, timedelta
import factory
from factory.fuzzy import FuzzyText, FuzzyInteger
import time
class ItemFactory(factory.Factory):
FACTORY_FOR = dict
objectID = 21
created_at_i = 42
title = "Test item"
class CommentFactory(factory.Factory):
FACTORY_FOR = dict
@factory.sequence
def created_at(n):
return (datetime.now() - timedelta(minutes=n)).isoformat()
@factory.sequence
def created_at_i(n):
return time.time() - n
title = FuzzyText(length=20)
url = "www.google.com"
comment_text = FuzzyText(length=300)
story_id = 42
story_title = "Bear kills man"
story_url = "www.bing.com"
author = FuzzyText(length=10)
points = FuzzyInteger(100)
objectID = FuzzyInteger(100)
parent_id = FuzzyInteger(100)
class StoryFactory(factory.Factory):
FACTORY_FOR = dict
created_at_i = 42
title = "Test story"
url = "www.google.com"
author = "yourdad"
points = 42
story_text = "Fuzzy wuzzy had no hair"
story_id = 42
class ResponseFactory(factory.Factory):
FACTORY_FOR = dict
nbPages = 1
hits = [ItemFactory(), ItemFactory()]
nbHits = factory.LazyAttribute(lambda x: x.nbPages * len(x.hits))
hitsPerPage = factory.LazyAttribute(lambda x: len(x.hits))
| import factory
class ItemFactory(factory.Factory):
FACTORY_FOR = dict
objectID = 21
created_at_i = 42
title = "Test item"
class CommentFactory(factory.Factory):
FACTORY_FOR = dict
created_at = "2014-04-03T10:17:28.000Z"
title = "Test comment"
url = "www.google.com"
comment_text = "Fuzzy wuzzy was a bear"
story_id = 42
story_title = "Bear kills man"
story_url = "www.bing.com"
author = "yourmom"
points = 42
created_at_i = 42
objectID = 42
parent_id = 42
class StoryFactory(factory.Factory):
FACTORY_FOR = dict
created_at = "2014-04-03T10:17:28.000Z"
created_at_i = 42
title = "Test story"
url = "www.google.com"
author = "yourdad"
points = 42
story_text = "Fuzzy wuzzy had no hair"
story_id = 42
class ResponseFactory(factory.Factory):
FACTORY_FOR = dict
nbPages = 1
hits = [ItemFactory(), ItemFactory()]
nbHits = factory.LazyAttribute(lambda x: x.nbPages * len(x.hits))
hitsPerPage = factory.LazyAttribute(lambda x: len(x.hits))
| bsd-2-clause | Python |
5e1e0ba1dca301eb597fb319c68280f7ee761037 | Add twopeasandtheirpod and simplyrecipes to __init__ | hhursev/recipe-scraper | recipe_scrapers/__init__.py | recipe_scrapers/__init__.py | import re
from .allrecipes import AllRecipes
from .simplyrecipes import SimplyRecipes
from .twopeasandtheirpod import TwoPeasAndTheirPod
SCRAPERS = {
AllRecipes.host(): AllRecipes,
SimplyRecipes.host(): SimplyRecipes,
TwoPeasAndTheirPod.host(): TwoPeasAndTheirPod,
}
def url_path_to_dict(path):
pattern = (r'^'
r'((?P<schema>.+?)://)?'
r'((?P<user>.+?)(:(?P<password>.*?))?@)?'
r'(?P<host>.*?)'
r'(:(?P<port>\d+?))?'
r'(?P<path>/.*?)?'
r'(?P<query>[?].*?)?'
r'$'
)
regex = re.compile(pattern)
matches = regex.match(path)
url_dict = matches.groupdict() if matches is not None else None
return url_dict
def scrap_me(url_path):
return SCRAPERS[url_path_to_dict(url_path)['host']](url_path)
__all__ = ['scrap_me']
| import re
from .allrecipes import AllRecipes
SCRAPERS = {
AllRecipes.host(): AllRecipes,
}
def url_path_to_dict(path):
pattern = (r'^'
r'((?P<schema>.+?)://)?'
r'((?P<user>.+?)(:(?P<password>.*?))?@)?'
r'(?P<host>.*?)'
r'(:(?P<port>\d+?))?'
r'(?P<path>/.*?)?'
r'(?P<query>[?].*?)?'
r'$'
)
regex = re.compile(pattern)
matches = regex.match(path)
url_dict = matches.groupdict() if matches is not None else None
return url_dict
def scrap_me(url_path):
return SCRAPERS[url_path_to_dict(url_path)['host']](url_path)
__all__ = ['scrap_me']
| mit | Python |
5fd70e01f648da6dfc994bfe0e5c666c69fa9e45 | return None (null) in preference to empty string when recipe yield is unavailable | hhursev/recipe-scraper | recipe_scrapers/vegolosi.py | recipe_scrapers/vegolosi.py | from ._abstract import AbstractScraper
from ._utils import get_minutes, get_yields, normalize_string
class Vegolosi(AbstractScraper):
@classmethod
def host(cls):
return "vegolosi.it"
def title(self):
return self.soup.find("h1").get_text().strip()
def preparation_time(self):
possible_time_info_elements = self.soup.findAll(
"span", {"class": "tasty-recipes-prep-time"}
)
return sum([get_minutes(element) for element in possible_time_info_elements])
def cooking_time(self):
possible_time_info_elements = self.soup.findAll(
"span", {"class": "tasty-recipes-cook-time"}
)
return sum([get_minutes(element) for element in possible_time_info_elements])
def total_time(self):
possible_time_info_elements = self.soup.findAll(
"span", {"class": "tasty-recipes-total-time"}
)
return sum([get_minutes(element) for element in possible_time_info_elements])
def yields(self):
possible_yields_info_elements = self.soup.findAll(
"span", {"class": "tasty-recipes-yield"}
)
for element in possible_yields_info_elements:
if "persone" in element.get_text():
return get_yields(element)
def ingredients(self):
ingredients = self.soup.select(".tasty-recipe-ingredients > ul > li")
if not ingredients:
ingredients = self.soup.findAll("li", {"class": "ingredient"})
return [normalize_string(ingredient.get_text()) for ingredient in ingredients]
def instructions(self):
instructions = self.soup.findAll("div", {"class": "tasty-recipe-instructions"})
return "\n".join(
[normalize_string(instruction.get_text()) for instruction in instructions]
)
def ratings(self):
return round(
float(
self.soup.find("div", {"class": "tasty-recipe-rating rating_panel"})
.get("data-content-rate")
.replace(",", ".")
),
2,
)
| from ._abstract import AbstractScraper
from ._utils import get_minutes, get_yields, normalize_string
class Vegolosi(AbstractScraper):
@classmethod
def host(cls):
return "vegolosi.it"
def title(self):
return self.soup.find("h1").get_text().strip()
def preparation_time(self):
possible_time_info_elements = self.soup.findAll(
"span", {"class": "tasty-recipes-prep-time"}
)
return sum([get_minutes(element) for element in possible_time_info_elements])
def cooking_time(self):
possible_time_info_elements = self.soup.findAll(
"span", {"class": "tasty-recipes-cook-time"}
)
return sum([get_minutes(element) for element in possible_time_info_elements])
def total_time(self):
possible_time_info_elements = self.soup.findAll(
"span", {"class": "tasty-recipes-total-time"}
)
return sum([get_minutes(element) for element in possible_time_info_elements])
def yields(self):
possible_yields_info_elements = self.soup.findAll(
"span", {"class": "tasty-recipes-yield"}
)
for element in possible_yields_info_elements:
if "persone" in element.get_text():
return get_yields(element)
return ""
def ingredients(self):
ingredients = self.soup.select(".tasty-recipe-ingredients > ul > li")
if not ingredients:
ingredients = self.soup.findAll("li", {"class": "ingredient"})
return [normalize_string(ingredient.get_text()) for ingredient in ingredients]
def instructions(self):
instructions = self.soup.findAll("div", {"class": "tasty-recipe-instructions"})
return "\n".join(
[normalize_string(instruction.get_text()) for instruction in instructions]
)
def ratings(self):
return round(
float(
self.soup.find("div", {"class": "tasty-recipe-rating rating_panel"})
.get("data-content-rate")
.replace(",", ".")
),
2,
)
| mit | Python |
631270eeafad8fd6b20973673f6d6e8b733e9029 | enable email | doubleDragon/QuantBot | quant/tool/email_box.py | quant/tool/email_box.py | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
from email.mime.text import MIMEText
from quant import config
import smtplib
mail_to = ["[email protected]"]
mail_host = "smtp.163.com"
mail_user = "[email protected]"
'''163邮箱smtp生成的密码'''
mail_pass = config.EMAIL_PASSWORD_163
mail_subject = 'logging'
def send_mail(content):
# pass
me = "QuantBot" + "<" + mail_user + ">"
msg = MIMEText(_text=content, _subtype='plain', _charset='utf-8')
msg['Subject'] = mail_subject
msg['From'] = me
msg['To'] = ";".join(mail_to)
try:
server = smtplib.SMTP()
server.connect(mail_host)
server.login(mail_user, mail_pass)
server.sendmail(me, mail_to, msg.as_string())
server.close()
return True
except Exception as e:
print (e)
return False
if __name__ == '__main__':
# for test
send_mail('content')
| #!/usr/bin/env python
# -*- coding: UTF-8 -*-
from email.mime.text import MIMEText
from quant import config
import smtplib
mail_to = ["[email protected]"]
mail_host = "smtp.163.com"
mail_user = "[email protected]"
'''163邮箱smtp生成的密码'''
mail_pass = config.EMAIL_PASSWORD_163
mail_subject = 'logging'
def send_mail(content):
pass
# me = "QuantBot" + "<" + mail_user + ">"
# msg = MIMEText(_text=content, _subtype='plain', _charset='utf-8')
# msg['Subject'] = mail_subject
# msg['From'] = me
# msg['To'] = ";".join(mail_to)
# try:
# server = smtplib.SMTP()
# server.connect(mail_host)
# server.login(mail_user, mail_pass)
# server.sendmail(me, mail_to, msg.as_string())
# server.close()
# return True
# except Exception as e:
# print (e)
# return False
if __name__ == '__main__':
# for test
send_mail('content')
| mit | Python |
0caec903579e4cf3f22ea3e5ea1df3ecd8ad0fe3 | remove nigthly test hgemm_asm | ROCmSoftwarePlatform/Tensile,ROCmSoftwarePlatform/Tensile,ROCmSoftwarePlatform/Tensile | test/nightly.py | test/nightly.py | #
# These nightly tests are slow but have good coverage. Fast tests with less coverage are in pre_checkin.py.
#
# To execute this test file, apt-get install python-pytest, then
# PYTHONPATH=. py.test -v test/nightly.py
#
# To run test directly, with complete output:
# mkdir build && cd build
# python ../Tensile/Tensile.py ../Tensile/Configs/test_hgemm_defaults.yaml ./
#
import Tensile.Tensile as Tensile
# defaults
def test_hgemm_defaults(tmpdir):
Tensile.Tensile([Tensile.TensileConfigPath("test_hgemm_defaults.yaml"), tmpdir.strpath])
def test_sgemm_defaults(tmpdir):
Tensile.Tensile([Tensile.TensileConfigPath("test_sgemm_defaults.yaml"), tmpdir.strpath])
def test_dgemm_defaults(tmpdir):
Tensile.Tensile([Tensile.TensileConfigPath("test_dgemm_defaults.yaml"), tmpdir.strpath])
# thorough tests
def test_hgemm(tmpdir):
Tensile.Tensile([Tensile.TensileConfigPath("test_hgemm.yaml"), tmpdir.strpath])
def test_sgemm(tmpdir):
Tensile.Tensile([Tensile.TensileConfigPath("test_sgemm.yaml"), tmpdir.strpath])
# vectors
def test_hgemm_vectors(tmpdir):
Tensile.Tensile([Tensile.TensileConfigPath("test_hgemm_vectors.yaml"), tmpdir.strpath])
def test_sgemm_vectors(tmpdir):
Tensile.Tensile([Tensile.TensileConfigPath("test_sgemm_vectors.yaml"), tmpdir.strpath])
# tensor convolution
def test_tensor_convolution(tmpdir):
Tensile.Tensile([Tensile.TensileConfigPath("test_convolution.yaml"), tmpdir.strpath])
# tensor contractions
def test_tensor_contraction(tmpdir):
Tensile.Tensile([Tensile.TensileConfigPath("test_tensor_contraction.yaml"), tmpdir.strpath])
# assembly
def test_sgemm_asm(tmpdir):
Tensile.Tensile([Tensile.TensileConfigPath("test_sgemm_asm.yaml"), tmpdir.strpath])
def test_dgemm_asm(tmpdir):
Tensile.Tensile([Tensile.TensileConfigPath("test_dgemm_asm.yaml"), tmpdir.strpath])
| #
# These nightly tests are slow but have good coverage. Fast tests with less coverage are in pre_checkin.py.
#
# To execute this test file, apt-get install python-pytest, then
# PYTHONPATH=. py.test -v test/nightly.py
#
# To run test directly, with complete output:
# mkdir build && cd build
# python ../Tensile/Tensile.py ../Tensile/Configs/test_hgemm_defaults.yaml ./
#
import Tensile.Tensile as Tensile
# defaults
def test_hgemm_defaults(tmpdir):
Tensile.Tensile([Tensile.TensileConfigPath("test_hgemm_defaults.yaml"), tmpdir.strpath])
def test_sgemm_defaults(tmpdir):
Tensile.Tensile([Tensile.TensileConfigPath("test_sgemm_defaults.yaml"), tmpdir.strpath])
def test_dgemm_defaults(tmpdir):
Tensile.Tensile([Tensile.TensileConfigPath("test_dgemm_defaults.yaml"), tmpdir.strpath])
# thorough tests
def test_hgemm(tmpdir):
Tensile.Tensile([Tensile.TensileConfigPath("test_hgemm.yaml"), tmpdir.strpath])
def test_sgemm(tmpdir):
Tensile.Tensile([Tensile.TensileConfigPath("test_sgemm.yaml"), tmpdir.strpath])
# vectors
def test_hgemm_vectors(tmpdir):
Tensile.Tensile([Tensile.TensileConfigPath("test_hgemm_vectors.yaml"), tmpdir.strpath])
def test_sgemm_vectors(tmpdir):
Tensile.Tensile([Tensile.TensileConfigPath("test_sgemm_vectors.yaml"), tmpdir.strpath])
# tensor convolution
def test_tensor_convolution(tmpdir):
Tensile.Tensile([Tensile.TensileConfigPath("test_convolution.yaml"), tmpdir.strpath])
# tensor contractions
def test_tensor_contraction(tmpdir):
Tensile.Tensile([Tensile.TensileConfigPath("test_tensor_contraction.yaml"), tmpdir.strpath])
# assembly
def test_hgemm_asm(tmpdir):
Tensile.Tensile([Tensile.TensileConfigPath("test_hgemm_asm.yaml"), tmpdir.strpath])
def test_sgemm_asm(tmpdir):
Tensile.Tensile([Tensile.TensileConfigPath("test_sgemm_asm.yaml"), tmpdir.strpath])
def test_dgemm_asm(tmpdir):
Tensile.Tensile([Tensile.TensileConfigPath("test_dgemm_asm.yaml"), tmpdir.strpath])
| mit | Python |
94fc7881052fea4e7d83f35e41fab4f5ed108f34 | fix styling | rmorshea/spectate | spectate/utils.py | spectate/utils.py | class Sentinel:
__slots__ = "_name"
def __init__(self, name):
self._name = name
def __repr__(self):
return self._name # pragma: no cover
| from collections.abc import Mapping
class Sentinel:
__slots__ = "_name"
def __init__(self, name):
self._name = name
def __repr__(self):
return self._name # pragma: no cover
| mit | Python |
7329757e1ad30e327c1ae823a8302c79482d6b9c | Update BUILD_OSS to 4632 | fcitx/mozc,google/mozc,google/mozc,google/mozc,fcitx/mozc,fcitx/mozc,google/mozc,google/mozc,fcitx/mozc,fcitx/mozc | src/data/version/mozc_version_template.bzl | src/data/version/mozc_version_template.bzl | # Copyright 2010-2021, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
MAJOR = 2
MINOR = 26
# BUILD number used for the OSS version.
BUILD_OSS = 4632
# Number to be increased. This value may be replaced by other tools.
BUILD = BUILD_OSS
# Represent the platform and release channel.
REVISION = 100
REVISION_MACOS = REVISION + 1
# This version represents the version of Mozc IME engine (converter, predictor,
# etc.). This version info is included both in the Mozc server and in the Mozc
# data set file so that the Mozc server can accept only the compatible version
# of data set file. The engine version must be incremented when:
# * POS matcher definition and/or conversion models were changed,
# * New data are added to the data set file, and/or
# * Any changes that loose data compatibility are made.
ENGINE_VERSION = 24
# This version is used to manage the data version and is included only in the
# data set file. DATA_VERSION can be incremented without updating
# ENGINE_VERSION as long as it's compatible with the engine.
# This version should be reset to 0 when ENGINE_VERSION is incremented.
DATA_VERSION = 10
| # Copyright 2010-2021, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
MAJOR = 2
MINOR = 26
# BUILD number used for the OSS version.
BUILD_OSS = 4624
# Number to be increased. This value may be replaced by other tools.
BUILD = BUILD_OSS
# Represent the platform and release channel.
REVISION = 100
REVISION_MACOS = REVISION + 1
# This version represents the version of Mozc IME engine (converter, predictor,
# etc.). This version info is included both in the Mozc server and in the Mozc
# data set file so that the Mozc server can accept only the compatible version
# of data set file. The engine version must be incremented when:
# * POS matcher definition and/or conversion models were changed,
# * New data are added to the data set file, and/or
# * Any changes that loose data compatibility are made.
ENGINE_VERSION = 24
# This version is used to manage the data version and is included only in the
# data set file. DATA_VERSION can be incremented without updating
# ENGINE_VERSION as long as it's compatible with the engine.
# This version should be reset to 0 when ENGINE_VERSION is incremented.
DATA_VERSION = 10
| bsd-3-clause | Python |
e243e907e58047e18c0a16e061f7aa718e3b5854 | Remove unavailable imports | bashtage/statsmodels,bashtage/statsmodels,statsmodels/statsmodels,jseabold/statsmodels,jseabold/statsmodels,bashtage/statsmodels,statsmodels/statsmodels,jseabold/statsmodels,bashtage/statsmodels,josef-pkt/statsmodels,josef-pkt/statsmodels,jseabold/statsmodels,statsmodels/statsmodels,josef-pkt/statsmodels,josef-pkt/statsmodels,bashtage/statsmodels,statsmodels/statsmodels,josef-pkt/statsmodels,statsmodels/statsmodels,statsmodels/statsmodels,josef-pkt/statsmodels,bashtage/statsmodels,jseabold/statsmodels | statsmodels/compat/__init__.py | statsmodels/compat/__init__.py | from .python import ( # noqa:F401
PY3, PY37,
bytes, str, unicode, string_types,
asunicode, asbytes, asstr, asstr2,
range, zip, filter, map,
lrange, lzip, lmap, lfilter,
cStringIO, StringIO, BytesIO,
cPickle, pickle,
iteritems, iterkeys, itervalues,
urlopen, urljoin, urlencode, HTTPError, URLError,
reduce, long, unichr, zip_longest,
builtins,
getargspec,
next, get_class
)
__all__ = ['PY3', 'PY37', 'bytes', 'str', 'unicode', 'string_types',
'asunicode', 'asbytes', 'asstr', 'asstr2', 'range', 'zip',
'filter', 'map', 'lrange', 'lzip', 'lmap', 'lfilter', 'cStringIO',
'StringIO', 'BytesIO', 'cPickle', 'pickle', 'iteritems',
'iterkeys', 'itervalues', 'urlopen', 'urljoin', 'urlencode',
'HTTPError', 'URLError', 'reduce', 'long', 'unichr', 'zip_longest',
'builtins', 'getargspec', 'next', 'get_class']
| from .python import ( # noqa:F401
PY3, PY37,
bytes, str, unicode, string_types,
asunicode, asbytes, asstr, asstr2, asunicode_nested, asbytes_nested,
range, zip, filter, map,
lrange, lzip, lmap, lfilter,
cStringIO, StringIO, BytesIO,
cPickle, pickle,
iteritems, iterkeys, itervalues,
urlopen, urljoin, urlencode, HTTPError, URLError,
reduce, long, unichr, zip_longest,
strchar,
isfileobj,
open_latin1,
builtins,
getargspec,
input,
getexception,
advance_iterator, next,
callable,
get_function_name, get_class
)
| bsd-3-clause | Python |
0629b30ade8b619697e8cc28d651904e742cd70e | Correct inst method names in system info, add Docker version (#36360) | GenericStudent/home-assistant,tboyce1/home-assistant,home-assistant/home-assistant,nkgilley/home-assistant,soldag/home-assistant,Danielhiversen/home-assistant,partofthething/home-assistant,mezz64/home-assistant,pschmitt/home-assistant,w1ll1am23/home-assistant,w1ll1am23/home-assistant,mKeRix/home-assistant,mKeRix/home-assistant,jawilson/home-assistant,tchellomello/home-assistant,Danielhiversen/home-assistant,turbokongen/home-assistant,titilambert/home-assistant,mezz64/home-assistant,tboyce021/home-assistant,tboyce1/home-assistant,rohitranjan1991/home-assistant,tboyce021/home-assistant,tchellomello/home-assistant,kennedyshead/home-assistant,partofthething/home-assistant,nkgilley/home-assistant,mKeRix/home-assistant,toddeye/home-assistant,pschmitt/home-assistant,FreekingDean/home-assistant,robbiet480/home-assistant,tboyce1/home-assistant,turbokongen/home-assistant,home-assistant/home-assistant,balloob/home-assistant,sander76/home-assistant,adrienbrault/home-assistant,lukas-hetzenecker/home-assistant,robbiet480/home-assistant,sander76/home-assistant,aronsky/home-assistant,sdague/home-assistant,jawilson/home-assistant,tboyce1/home-assistant,titilambert/home-assistant,balloob/home-assistant,toddeye/home-assistant,aronsky/home-assistant,rohitranjan1991/home-assistant,lukas-hetzenecker/home-assistant,kennedyshead/home-assistant,rohitranjan1991/home-assistant,sdague/home-assistant,GenericStudent/home-assistant,balloob/home-assistant,soldag/home-assistant,FreekingDean/home-assistant,adrienbrault/home-assistant,mKeRix/home-assistant | homeassistant/helpers/system_info.py | homeassistant/helpers/system_info.py | """Helper to gather system info."""
import os
import platform
from typing import Dict
from homeassistant.const import __version__ as current_version
from homeassistant.loader import bind_hass
from homeassistant.util.package import is_virtual_env
from .typing import HomeAssistantType
@bind_hass
async def async_get_system_info(hass: HomeAssistantType) -> Dict:
"""Return info about the system."""
info_object = {
"installation_type": "Unknown",
"version": current_version,
"dev": "dev" in current_version,
"hassio": hass.components.hassio.is_hassio(),
"virtualenv": is_virtual_env(),
"python_version": platform.python_version(),
"docker": False,
"arch": platform.machine(),
"timezone": str(hass.config.time_zone),
"os_name": platform.system(),
"os_version": platform.release(),
}
if platform.system() == "Windows":
info_object["os_version"] = platform.win32_ver()[0]
elif platform.system() == "Darwin":
info_object["os_version"] = platform.mac_ver()[0]
elif platform.system() == "Linux":
info_object["docker"] = os.path.isfile("/.dockerenv")
# Determine installation type on current data
if info_object["docker"]:
info_object["installation_type"] = "Home Assistant Container"
elif is_virtual_env():
info_object["installation_type"] = "Home Assistant Core"
# Enrich with Supervisor information
if hass.components.hassio.is_hassio():
info = hass.components.hassio.get_info()
host = hass.components.hassio.get_host_info()
info_object["supervisor"] = info.get("supervisor")
info_object["host_os"] = host.get("operating_system")
info_object["chassis"] = host.get("chassis")
info_object["docker_version"] = info.get("docker")
if info.get("hassos") is not None:
info_object["installation_type"] = "Home Assistant"
else:
info_object["installation_type"] = "Home Assistant Supervised"
return info_object
| """Helper to gather system info."""
import os
import platform
from typing import Dict
from homeassistant.const import __version__ as current_version
from homeassistant.loader import bind_hass
from homeassistant.util.package import is_virtual_env
from .typing import HomeAssistantType
@bind_hass
async def async_get_system_info(hass: HomeAssistantType) -> Dict:
"""Return info about the system."""
info_object = {
"installation_type": "Unknown",
"version": current_version,
"dev": "dev" in current_version,
"hassio": hass.components.hassio.is_hassio(),
"virtualenv": is_virtual_env(),
"python_version": platform.python_version(),
"docker": False,
"arch": platform.machine(),
"timezone": str(hass.config.time_zone),
"os_name": platform.system(),
"os_version": platform.release(),
}
if platform.system() == "Windows":
info_object["os_version"] = platform.win32_ver()[0]
elif platform.system() == "Darwin":
info_object["os_version"] = platform.mac_ver()[0]
elif platform.system() == "Linux":
info_object["docker"] = os.path.isfile("/.dockerenv")
# Determine installation type on current data
if info_object["docker"]:
info_object["installation_type"] = "Home Assistant Core on Docker"
elif is_virtual_env():
info_object[
"installation_type"
] = "Home Assistant Core in a Python Virtual Environment"
# Enrich with Supervisor information
if hass.components.hassio.is_hassio():
info = hass.components.hassio.get_info()
host = hass.components.hassio.get_host_info()
info_object["supervisor"] = info.get("supervisor")
info_object["host_os"] = host.get("operating_system")
info_object["chassis"] = host.get("chassis")
if info.get("hassos") is not None:
info_object["installation_type"] = "Home Assistant"
else:
info_object["installation_type"] = "Home Assistant Supervised"
return info_object
| apache-2.0 | Python |
c1b19af7229d582f7bd474a05a679cf45e3c9bf8 | add proxy + fix import modules | nrivet84/ig | tests/basics.py | tests/basics.py | # -*- coding: utf-8 -*-
"""
@author: Nicolas Rivet
test the connection to IG API
do some basic operations
"""
import sys
import os
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'ig')))
import ig_service as igs
import ig_tools as igt
def main():
"""Main module for testing."""
#get config for demo API
proxy_user, proxy_password, api_key, username, password, account = \
igt.getconfig('demo')
#login demo API
service=igs.IGservice(username, password, api_key, account, 'demo', proxy_user, proxy_password)
log=service.login()
print('\n', 'login', '\n', log)
#get newest bidask
instrument='CS.D.EURUSD.CFD.IP'
bidask=service.get_bidask(instrument)
print('\n', 'get_bidask of EURUSD', '\n', bidask)
#get historical closes
resolution='MINUTE'
max_size=10
closes=service.get_closes(instrument, resolution, max_size)
print('\n', 'get_closes of EURUSD for the last 10 minutes', '\n', closes)
if __name__ == '__main__':
main()
| # -*- coding: utf-8 -*-
"""
@author: Nicolas Rivet
test the connection to IG API
do some basic operations
"""
from ig.ig_service import IGservice as igs
import ig.ig_tools as igt
def main():
"""Main module for testing."""
#get config for demo API
proxy_user, proxy_password, api_key, username, password, account = \
igt.getconfig('demo')
#login demo API
service=igs(username, password, api_key, account, 'demo')
log=igs.login(service)
print(log[0])
#get newest bidask
instrument='CS.D.EURUSD.CFD.IP'
bidask=igs.get_bidask(service, instrument)
print(bidask)
#get historical closes
resolution='MINUTE'
max_size=10
closes=igs.get_closes(service, instrument, resolution, max_size)
print(closes)
if __name__ == '__main__':
main() | mit | Python |
d23a68d464c62cdefb76dbe5855110374680ae61 | Add coverage metrics to python code | willbarton/regulations-site,ascott1/regulations-site,tadhg-ohiggins/regulations-site,tadhg-ohiggins/regulations-site,18F/regulations-site,grapesmoker/regulations-site,adderall/regulations-site,ascott1/regulations-site,grapesmoker/regulations-site,EricSchles/regulations-site,EricSchles/regulations-site,grapesmoker/regulations-site,eregs/regulations-site,adderall/regulations-site,eregs/regulations-site,eregs/regulations-site,willbarton/regulations-site,EricSchles/regulations-site,willbarton/regulations-site,jeremiak/regulations-site,18F/regulations-site,jeremiak/regulations-site,ascott1/regulations-site,EricSchles/regulations-site,willbarton/regulations-site,tadhg-ohiggins/regulations-site,adderall/regulations-site,eregs/regulations-site,18F/regulations-site,tadhg-ohiggins/regulations-site,jeremiak/regulations-site,ascott1/regulations-site,18F/regulations-site,grapesmoker/regulations-site,jeremiak/regulations-site,adderall/regulations-site | regulations/settings/dev.py | regulations/settings/dev.py | from .base import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
STATICFILES_DIRS = (
root('static'),
)
OFFLINE_OUTPUT_DIR = '/tmp/'
INSTALLED_APPS += (
'django_nose',
)
NOSE_ARGS = [
'--with-coverage',
'--cover-package=regulations',
'--exclude-dir=regulations/uitests'
]
try:
from local_settings import *
except ImportError:
pass
| from .base import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
STATICFILES_DIRS = (
root('static'),
)
OFFLINE_OUTPUT_DIR = '/tmp/'
INSTALLED_APPS += (
'django_nose',
)
NOSE_ARGS = [
'--exclude-dir=regulations/uitests'
]
try:
from local_settings import *
except ImportError:
pass
| cc0-1.0 | Python |
8cc36a325e8bedb7894f31fe049aee1aef903811 | remove unused code | dennisobrien/bokeh,timsnyder/bokeh,aavanian/bokeh,philippjfr/bokeh,ericmjl/bokeh,schoolie/bokeh,ptitjano/bokeh,stonebig/bokeh,rs2/bokeh,jakirkham/bokeh,gpfreitas/bokeh,percyfal/bokeh,aavanian/bokeh,stonebig/bokeh,msarahan/bokeh,ptitjano/bokeh,schoolie/bokeh,quasiben/bokeh,azjps/bokeh,percyfal/bokeh,jakirkham/bokeh,htygithub/bokeh,mindriot101/bokeh,Karel-van-de-Plassche/bokeh,draperjames/bokeh,gpfreitas/bokeh,maxalbert/bokeh,stonebig/bokeh,percyfal/bokeh,Karel-van-de-Plassche/bokeh,aiguofer/bokeh,DuCorey/bokeh,dennisobrien/bokeh,timsnyder/bokeh,Karel-van-de-Plassche/bokeh,quasiben/bokeh,DuCorey/bokeh,ericmjl/bokeh,clairetang6/bokeh,phobson/bokeh,maxalbert/bokeh,justacec/bokeh,maxalbert/bokeh,draperjames/bokeh,dennisobrien/bokeh,aiguofer/bokeh,jakirkham/bokeh,rs2/bokeh,ptitjano/bokeh,phobson/bokeh,KasperPRasmussen/bokeh,azjps/bokeh,draperjames/bokeh,Karel-van-de-Plassche/bokeh,philippjfr/bokeh,bokeh/bokeh,draperjames/bokeh,aavanian/bokeh,Karel-van-de-Plassche/bokeh,KasperPRasmussen/bokeh,philippjfr/bokeh,timsnyder/bokeh,azjps/bokeh,msarahan/bokeh,jakirkham/bokeh,percyfal/bokeh,ptitjano/bokeh,timsnyder/bokeh,mindriot101/bokeh,timsnyder/bokeh,stonebig/bokeh,philippjfr/bokeh,htygithub/bokeh,rs2/bokeh,phobson/bokeh,gpfreitas/bokeh,rs2/bokeh,bokeh/bokeh,htygithub/bokeh,aiguofer/bokeh,KasperPRasmussen/bokeh,ericmjl/bokeh,mindriot101/bokeh,ericmjl/bokeh,jakirkham/bokeh,msarahan/bokeh,schoolie/bokeh,clairetang6/bokeh,KasperPRasmussen/bokeh,quasiben/bokeh,aavanian/bokeh,htygithub/bokeh,msarahan/bokeh,phobson/bokeh,justacec/bokeh,schoolie/bokeh,ptitjano/bokeh,bokeh/bokeh,schoolie/bokeh,maxalbert/bokeh,KasperPRasmussen/bokeh,justacec/bokeh,aavanian/bokeh,aiguofer/bokeh,clairetang6/bokeh,azjps/bokeh,rs2/bokeh,DuCorey/bokeh,bokeh/bokeh,aiguofer/bokeh,gpfreitas/bokeh,DuCorey/bokeh,phobson/bokeh,philippjfr/bokeh,mindriot101/bokeh,justacec/bokeh,bokeh/bokeh,percyfal/bokeh,azjps/bokeh,ericmjl/bokeh,draperjames/bokeh,clairetang6/bokeh,dennisobrien/bokeh,dennisobrien/bokeh,DuCorey/bokeh | examples/glyphs/buttons_server.py | examples/glyphs/buttons_server.py | from __future__ import print_function
from bokeh.browserlib import view
from bokeh.document import Document
from bokeh.plotting import curdoc
from bokeh.models.widgets import (
VBox, Icon,
Button, Toggle, Dropdown,
CheckboxGroup, RadioGroup,
CheckboxButtonGroup, RadioButtonGroup,
)
from bokeh.client import push_session
def button_handler():
print("button_handler: click")
def toggle_handler(active):
print("toggle_handler: %s" % active)
def dropdown_handler(value):
print("dropdown_handler: %s" % value)
def split_handler(value):
print("split_handler: %s" % value)
def checkbox_group_handler(active):
print("checkbox_group_handler: %s" % active)
def radio_group_handler(active):
print("radio_group_handler: %s" % active)
def checkbox_button_group_handler(active):
print("checkbox_button_group_handler: %s" % active)
def radio_button_group_handler(active):
print("radio_button_group_handler: %s" % active)
button = Button(label="Push button", icon=Icon(name="check"), type="primary")
button.on_click(button_handler)
toggle = Toggle(label="Toggle button", type="success")
toggle.on_click(toggle_handler)
menu = [("Item 1", "item_1"), ("Item 2", "item_2"), None, ("Item 3", "item_3")]
dropdown = Dropdown(label="Dropdown button", type="warning", menu=menu)
dropdown.on_click(dropdown_handler)
menu = [("Item 1", "foo"), ("Item 2", "bar"), None, ("Item 3", "baz")]
split = Dropdown(label="Split button", type="danger", menu=menu, default_value="baz")
split.on_click(split_handler)
checkbox_group = CheckboxGroup(labels=["Option 1", "Option 2", "Option 3"], active=[0, 1])
checkbox_group.on_click(checkbox_group_handler)
radio_group = RadioGroup(labels=["Option 1", "Option 2", "Option 3"], active=0)
radio_group.on_click(radio_group_handler)
checkbox_button_group = CheckboxButtonGroup(labels=["Option 1", "Option 2", "Option 3"], active=[0, 1])
checkbox_button_group.on_click(checkbox_button_group_handler)
radio_button_group = RadioButtonGroup(labels=["Option 1", "Option 2", "Option 3"], active=0)
radio_button_group.on_click(radio_button_group_handler)
vbox = VBox(children=[button, toggle, dropdown, split, checkbox_group, radio_group, checkbox_button_group, radio_button_group])
document = Document()
document.add(vbox)
session = push_session(document)
session.show()
if __name__ == "__main__":
session.loop_until_closed()
| from __future__ import print_function
from bokeh.browserlib import view
from bokeh.document import Document
from bokeh.plotting import curdoc
from bokeh.models.widgets import (
VBox, Icon,
Button, Toggle, Dropdown,
CheckboxGroup, RadioGroup,
CheckboxButtonGroup, RadioButtonGroup,
)
from bokeh.models import Plot
from bokeh.client import push_session
def button_handler():
print("button_handler: click")
def toggle_handler(active):
print("toggle_handler: %s" % active)
def dropdown_handler(value):
print("dropdown_handler: %s" % value)
def split_handler(value):
print("split_handler: %s" % value)
def checkbox_group_handler(active):
print("checkbox_group_handler: %s" % active)
def radio_group_handler(active):
print("radio_group_handler: %s" % active)
def checkbox_button_group_handler(active):
print("checkbox_button_group_handler: %s" % active)
def radio_button_group_handler(active):
print("radio_button_group_handler: %s" % active)
button = Button(label="Push button", icon=Icon(name="check"), type="primary")
button.on_click(button_handler)
toggle = Toggle(label="Toggle button", type="success")
toggle.on_click(toggle_handler)
menu = [("Item 1", "item_1"), ("Item 2", "item_2"), None, ("Item 3", "item_3")]
dropdown = Dropdown(label="Dropdown button", type="warning", menu=menu)
dropdown.on_click(dropdown_handler)
menu = [("Item 1", "foo"), ("Item 2", "bar"), None, ("Item 3", "baz")]
split = Dropdown(label="Split button", type="danger", menu=menu, default_value="baz")
split.on_click(split_handler)
checkbox_group = CheckboxGroup(labels=["Option 1", "Option 2", "Option 3"], active=[0, 1])
checkbox_group.on_click(checkbox_group_handler)
radio_group = RadioGroup(labels=["Option 1", "Option 2", "Option 3"], active=0)
radio_group.on_click(radio_group_handler)
checkbox_button_group = CheckboxButtonGroup(labels=["Option 1", "Option 2", "Option 3"], active=[0, 1])
checkbox_button_group.on_click(checkbox_button_group_handler)
radio_button_group = RadioButtonGroup(labels=["Option 1", "Option 2", "Option 3"], active=0)
radio_button_group.on_click(radio_button_group_handler)
vbox = VBox(children=[button, toggle, dropdown, split, checkbox_group, radio_group, checkbox_button_group, radio_button_group])
document = Document()
document.add(vbox)
session = push_session(document)
session.show()
if __name__ == "__main__":
session.loop_until_closed()
| bsd-3-clause | Python |
93cab6327aef7386dba6f293a22099272af6af10 | create resouce only if not exist | oVirt/ovirt-engine-sdk-tests | src/infrastructure/annotations/requires.py | src/infrastructure/annotations/requires.py | '''
Created on Jun 19, 2013
@author: mpastern
'''
from src.resource.resourcemanager import ResourceManager
from src.errors.resourcemanagernotfounderror import ResourceManagerNotFoundError
class resources(object):
def __init__(self, params):
self.params = params
def __call__(self, original_func):
decorator_self = self
def wrappee(*args, **kwargs):
for resource in decorator_self.params:
rm = (resource.__name__ + 'ResourceManager')
rm_class = ResourceManager.getResourceManager(rm)
if rm_class:
rm_instance = rm_class()
if not rm_instance.get(get_only=True):
rm_instance.add(**kwargs)
# TODO: use **kwargs for private init
else:
raise ResourceManagerNotFoundError(rm)
return original_func(*args, **kwargs)
return wrappee
| '''
Created on Jun 19, 2013
@author: mpastern
'''
from src.resource.resourcemanager import ResourceManager
from src.errors.resourcemanagernotfounderror import ResourceManagerNotFoundError
class resources(object):
def __init__(self, params):
self.params = params
def __call__(self, original_func):
decorator_self = self
def wrappee(*args, **kwargs):
for resource in decorator_self.params:
rm = (resource.__name__ + 'ResourceManager')
rm_class = ResourceManager.getResourceManager(rm)
if rm_class:
rm_instance = rm_class()
# if not rm_instance.get():
# rm_instance.add()
rm_instance.add(**kwargs)
# TODO: use **kwargs for private init
else:
raise ResourceManagerNotFoundError(rm)
return original_func(*args, **kwargs)
return wrappee
| apache-2.0 | Python |
71b4c326e18ce7e3d0b6aaab5203b3a403a85810 | Update solution_2.py | DestructHub/ProjectEuler,DestructHub/ProjectEuler,DestructHub/ProjectEuler,DestructHub/ProjectEuler,DestructHub/ProjectEuler,DestructHub/ProjectEuler,DestructHub/ProjectEuler,DestructHub/ProjectEuler,DestructHub/ProjectEuler,DestructHub/ProjectEuler,DestructHub/ProjectEuler,DestructHub/ProjectEuler,DestructHub/ProjectEuler,DestructHub/ProjectEuler | Problem033/Python/solution_2.py | Problem033/Python/solution_2.py | import math
frac=1.0
for b in range(1,10):
for a in range(1,b):
for c in range(1,10):
if (a*10+b)/(b*10+c)==a/c:
frac*=(a/c)
print(math.ceil(1/frac))
| import math
frac=1.0
for b in range(1,10):
for a in range(1,b):
for c in range(1,10):
if (a*10+b)/(b*10+c)==a/c:
frac*=(a/c)
print(math.ceil(1/frac))
| mit | Python |
6d7e597ce216093d52ecdcb7db5c087dc6040bb1 | Fix initiation of settings object | jonge-democraten/mezzanine-fullcalendar | fullcalendar/conf.py | fullcalendar/conf.py | from datetime import timedelta
from django.conf import settings as django_settings
default = {
'FULLCALENDAR_FIRST_WEEKDAY': 0,
'FULLCALENDAR_OCCURRENCE_DURATION': timedelta(hours=1),
'FULLCALENDAR_SITE_COLORS': {}
}
settings = type('SettingsDummy', (), default)
for key, value in default.items():
setattr(settings, key,
getattr(django_settings, key, value))
| from datetime import timedelta
from django.conf import settings as django_settings
default = {
'FULLCALENDAR_FIRST_WEEKDAY': 0,
'FULLCALENDAR_OCCURRENCE_DURATION': timedelta(hours=1),
'FULLCALENDAR_SITE_COLORS': {}
}
settings = object()
for key, value in default.items():
setattr(settings, key,
getattr(django_settings, key, value))
| mit | Python |
52e614f811fb9dfcd0dde46de43f13731a3717a5 | Reformat doc string for txStatHat.__init__ | hynek/txStatHat | txstathat.py | txstathat.py | # -*- coding: utf-8 -*-
"""StatHat bindings"""
from __future__ import division, print_function, unicode_literals
import urllib
from twisted.web.client import getPage
try:
from OpenSSL import SSL # noqa
have_ssl = True
except:
have_ssl = False
API_URI = b'http{}://api.stathat.com/ez'.format(b's' if have_ssl else b'')
class txStatHat(object):
"""An API wrapper for StatHat.com."""
def __init__(self, ezkey):
"""Initialize a txStatHat instance.
Does no network activity.
:param ezkey: your API key, i.e. your e-mail address by default.
"""
self.default_args = {'ezkey': ezkey}
def _make_call(self, args):
"""Build postdata using ezkey and supplied dict *args* and post it."""
post_dict = self.default_args.copy()
post_dict.update(args)
d = getPage(
API_URI,
method=b'POST',
postdata=urllib.urlencode(post_dict),
headers={
b'Content-Type': b'application/x-www-form-urlencoded'
},
)
return d
def count(self, stat, count=1):
"""Add *count* to *stat*.
:param stat: a StatHat counter stat
:param count: the value to add to the counter. 1 by default.
:type count: integer
:rtype: twisted.internet.defer.Deferred
"""
return self._make_call({'stat': stat, 'count': unicode(count)})
def value(self, stat, value):
"""Submit *value* to *stat*.
:param stat: a StatHat value stat
:param value: the value to submit
:type value: float or decimal.Decimal
:rtype: twisted.internet.defer.Deferred
"""
return self._make_call({'stat': stat, 'value': unicode(value)})
| # -*- coding: utf-8 -*-
"""StatHat bindings"""
from __future__ import division, print_function, unicode_literals
import urllib
from twisted.web.client import getPage
try:
from OpenSSL import SSL # noqa
have_ssl = True
except:
have_ssl = False
API_URI = b'http{}://api.stathat.com/ez'.format(b's' if have_ssl else b'')
class txStatHat(object):
"""An API wrapper for StatHat.com."""
def __init__(self, ezkey):
"""Initialize a txStatHat instance.
*ezkey* is you API key, i.e. your e-mail address by default. Does no
network activity.
"""
self.default_args = {'ezkey': ezkey}
def _make_call(self, args):
"""Build postdata using ezkey and supplied dict *args* and post it."""
post_dict = self.default_args.copy()
post_dict.update(args)
d = getPage(
API_URI,
method=b'POST',
postdata=urllib.urlencode(post_dict),
headers={
b'Content-Type': b'application/x-www-form-urlencoded'
},
)
return d
def count(self, stat, count=1):
"""Add *count* to *stat*.
:param stat: a StatHat counter stat
:param count: the value to add to the counter. 1 by default.
:type count: integer
:rtype: twisted.internet.defer.Deferred
"""
return self._make_call({'stat': stat, 'count': unicode(count)})
def value(self, stat, value):
"""Submit *value* to *stat*.
:param stat: a StatHat value stat
:param value: the value to submit
:type value: float or decimal.Decimal
:rtype: twisted.internet.defer.Deferred
"""
return self._make_call({'stat': stat, 'value': unicode(value)})
| mit | Python |
b2b123b15f178e81737127a4dda399a31ebb5240 | Update Dice_Probability.py | LamaHamadeh/Harvard-PH526x | Week2-Python-Libraries-and-Concepts-Used-in-Research/Dice_Probability.py | Week2-Python-Libraries-and-Concepts-Used-in-Research/Dice_Probability.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 17 16:26:48 2017
@author: lamahamadeh
"""
#First: Python-based implementation
#------------------------------------
'''
source:
-------
Video 2.4.2: Examples Involving Randomness
Week 2 Overview/Python Libraries and Concepts Used in Research
Using python for research
Harvard
online course provided by edx.org
url: https://courses.edx.org/courses/course-v1:HarvardX+PH526x+3T2016/courseware/317ce880d7644d35840b1f734be76b06/391063d8f58242e892efafc9903b36e8/
'''
#roll a dice 100 times and plot a histogram of the outcomes
#meaning: a histogram that shows how frequent the numbers from 1 to 6 appeared in the 100 samples
import random
import matplotlib.pyplot as plt
random.choice([1,2,3,4,5,6]) #this line throws the dice one time
rolls = []
for k in range(100):#we can try 1000, 10000000 times. We can notice that the histogram gets more flat when the number of rolling times increases.
rolls.append(random.choice([1,2,3,4,5,6]))#in this case, after using for loop, we wre rolling the dice 100 times
print(len(rolls))
#draw a histogram
plt.figure()
plt.hist(rolls, bins = np.linspace(0.5,6.5,7));
plt.show()
#This time we will roll 10 dice not jsut one
ys = []
for rep in range(100):#By increasing the number of dice rolls for each dice the distrbution follows the central limit theorem
#The central limit theorem (CLT) states that the sum of a large number of random variables regardless of their distribution will
#approximately follow a normal distribution (or Gaussian distribution).
y = 0
for k in range (10):
x = random.choice([1,2,3,4,5,6])
y = y + x
ys.append(y)
print(len(ys)) #100
print(min(ys))
print(max(ys))
plt.figure()
plt.hist(ys); #the semicolon suppresses the output
plt.show()
#------------------------------------------------------------------
#Second: NumPy implementation
#---------------------------
'''
source:
-------
Video 2.4.3: using the NumPy Random Module
Week 2 Overview/Python Libraries and Concepts Used in Research
Using python for research
Harvard
online course provided by edx.org
url: https://courses.edx.org/courses/course-v1:HarvardX+PH526x+3T2016/courseware/317ce880d7644d35840b1f734be76b06/391063d8f58242e892efafc9903b36e8/
'''
import numpy as np
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 17 16:26:48 2017
@author: lamahamadeh
source:
-------
Video 2.4.2: Examples Involving Randomness
Week 2 Overview/Python Libraries and Concepts Used in Research
Using python for research
Harvard
online course provided by edx.org
url: https://courses.edx.org/courses/course-v1:HarvardX+PH526x+3T2016/courseware/317ce880d7644d35840b1f734be76b06/391063d8f58242e892efafc9903b36e8/
"""
#roll a dice 100 times and plot a histogram of the outcomes
#meaning: a histogram that shows how frequent the numbers from 1 to 6 appeared in the 100 samples
import numpy as np
import random
import matplotlib.pyplot as plt
random.choice([1,2,3,4,5,6]) #this line throws the dice one time
rolls = []
for k in range(100):#we can try 1000, 10000000 times. We can notice that the histogram gets more flat when the number of rolling times increases.
rolls.append(random.choice([1,2,3,4,5,6]))#in this case, after using for loop, we wre rolling the dice 100 times
print(len(rolls))
#draw a histogram
plt.figure()
plt.hist(rolls, bins = np.linspace(0.5,6.5,7));
plt.show()
#This time we will roll 10 dice not jsut one
ys = []
for rep in range(100):#By increasing the number of dice rolls for each dice the distrbution follows the central limit theorem
#The central limit theorem (CLT) states that the sum of a large number of random variables regardless of their distribution will
#approximately follow a normal distribution (or Gaussian distribution).
y = 0
for k in range (10):
x = random.choice([1,2,3,4,5,6])
y = y + x
ys.append(y)
print(len(ys)) #100
print(min(ys))
print(max(ys))
plt.figure()
plt.hist(ys); #the semicolon suppresses the output
plt.show()
| mit | Python |
67c4d077ee4693290bf9883e90e4ed381b3cd227 | Fix a mistake. | jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets | python/matplotlib/hist_logscale_xy.py | python/matplotlib/hist_logscale_xy.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# See:
# -
import numpy as np
import matplotlib.pyplot as plt
# SETUP #######################################################################
# histtype : [‘bar’ | ‘barstacked’ | ‘step’ | ‘stepfilled’]
HIST_TYPE='bar'
ALPHA=0.5
# MAKE DATA ###################################################################
data = np.random.exponential(size=1000000)
#data = np.abs(np.random.normal(size=1000000) * 10000.)
#data = np.random.chisquare(10, size=1000000)
# INIT FIGURE #################################################################
fig = plt.figure(figsize=(8.0, 6.0))
# AX1 #########################################################################
ax1 = fig.add_subplot(211)
res_tuple = ax1.hist(data,
bins=50,
histtype=HIST_TYPE,
alpha=ALPHA)
ax1.set_title("Normal scale")
ax1.set_xlabel("Value")
ax1.set_ylabel("Count")
# AX2 #########################################################################
ax2 = fig.add_subplot(212)
vmin = np.log10(data.min())
vmax = np.log10(data.max())
bins = np.logspace(vmin, vmax, 50) # <- make a range from 10**vmin to 10**vmax
print(bins)
res_tuple = ax2.hist(data,
log=True, # <- Activate log scale on Y axis
bins=bins,
histtype=HIST_TYPE,
alpha=ALPHA)
ax2.set_xscale("log") # <- Activate log scale on Y axis
ax2.set_title("Log scale")
ax2.set_xlabel("Value")
ax2.set_ylabel("Count")
# SHOW AND SAVE FILE ##########################################################
plt.tight_layout()
plt.savefig("hist_logscale_xy.png")
plt.show()
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# See:
# -
import numpy as np
import matplotlib.pyplot as plt
# SETUP #######################################################################
# histtype : [‘bar’ | ‘barstacked’ | ‘step’ | ‘stepfilled’]
HIST_TYPE='bar'
ALPHA=0.5
# MAKE DATA ###################################################################
data = np.random.exponential(size=1000000)
#data = np.abs(np.random.normal(size=1000000) * 10000.)
#data = np.random.chisquare(10, size=1000000)
# INIT FIGURE #################################################################
fig = plt.figure(figsize=(8.0, 6.0))
# AX1 #########################################################################
ax1 = fig.add_subplot(211)
res_tuple = ax1.hist(data,
bins=50,
histtype=HIST_TYPE,
alpha=ALPHA)
ax1.set_title("Normal scale")
ax1.set_xlabel("Value")
ax1.set_ylabel("Count")
# AX2 #########################################################################
ax2 = fig.add_subplot(212)
min = np.log10(data.min())
max = np.log10(data.max())
bins = np.logspace(min, max, 50) # <- create a range from 10**min to 10**max
print(bins)
res_tuple = ax2.hist(data,
log=True, # <- Activate log scale on Y axis
bins=bins,
histtype=HIST_TYPE,
alpha=ALPHA)
ax2.set_xscale("log") # <- Activate log scale on Y axis
ax2.set_title("Log scale")
ax2.set_xlabel("Value")
ax2.set_ylabel("Count")
# SHOW AND SAVE FILE ##########################################################
plt.tight_layout()
plt.savefig("hist_logscale_xy.png")
plt.show()
| mit | Python |
e300d739bf0040b76a0deee75cc01b1410ba8953 | change image field to name in CatalogoLandsat serializer | ibamacsr/indicar_process,ibamacsr/indicar_process,ibamacsr/indicar_process,ibamacsr/indicar-process,ibamacsr/indicar-process | indicarprocess/tmsapi/serializers.py | indicarprocess/tmsapi/serializers.py | # -*- coding: utf-8 -*-
from rest_framework.serializers import ModelSerializer, SerializerMethodField
from catalogo.models import CatalogoLandsat
class LandsatSerializer(ModelSerializer):
southwest = SerializerMethodField()
northeast = SerializerMethodField()
name = SerializerMethodField()
class Meta:
model = CatalogoLandsat
fields = ['name', 'data', 'southwest', 'northeast']
def get_bounds(self, obj):
lats = []
lons = []
for lat, lon in obj.shape.coords[0]:
lats.append(lat)
lons.append(lon)
lats.sort()
lons.sort()
return [[lats[-1], lons[-1]], [lats[0], lons[0]]]
def get_southwest(self, obj):
return self.get_bounds(obj)[-1]
def get_northeast(self, obj):
return self.get_bounds(obj)[0]
def get_name(self, obj):
return obj.image.replace('.tif', '')
| # -*- coding: utf-8 -*-
from rest_framework.serializers import ModelSerializer, SerializerMethodField
from catalogo.models import CatalogoLandsat
class LandsatSerializer(ModelSerializer):
southwest = SerializerMethodField()
northeast = SerializerMethodField()
class Meta:
model = CatalogoLandsat
fields = ['image', 'data', 'southwest', 'northeast']
def get_bounds(self, obj):
lats = []
lons = []
for lat, lon in obj.shape.coords[0]:
lats.append(lat)
lons.append(lon)
lats.sort()
lons.sort()
return [[lats[-1], lons[-1]], [lats[0], lons[0]]]
def get_southwest(self, obj):
return self.get_bounds(obj)[-1]
def get_northeast(self, obj):
return self.get_bounds(obj)[0]
| agpl-3.0 | Python |
05a8f2a2e499b25472fbaf1b06e899f589a7101f | fix migration | CivicKnowledge/metaeditor,CivicKnowledge/metaeditor,CivicKnowledge/metaeditor | editor/migrations/0003_auto_20150125_0430.py | editor/migrations/0003_auto_20150125_0430.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from editor.models import Source, Category, Format
# add root data for Source and Category model
def add_root_data(apps, schema_editor):
cat = Category(name ="root", parent=None)
cat.save()
source = Source(
name = "root",
abbreviation = "root",
domain = "",
homepage = "",
about = "",
parent = None,
)
source.save()
source.categories.add(cat)
f = Format(name ="root", parent=None)
f.save()
def revert(apps, schema_editor):
for source in Source.objects.all():
source.delete()
for category in Category.objects.all():
category.delete()
for f in Format.objects.all():
f.delete()
class Migration(migrations.Migration):
dependencies = [
('editor', '0002_auto_20150124_1912'),
]
operations = [
migrations.RunPython(add_root_data, reverse_code=revert),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from editor.models import Source, Category
# add root data for Source and Category model
def add_root_data(apps, schema_editor):
cat = Category(name ="root", parent=None)
cat.save()
source = Source(
name = "root",
abbreviation = "root",
domain = "",
homepage = "",
about = "",
parent = None,
)
source.save()
source.categories.add(cat)
def revert(apps, schema_editor):
for source in Source.objects.all():
source.delete()
for category in Category.objects.all():
category.delete()
class Migration(migrations.Migration):
dependencies = [
('editor', '0002_auto_20150124_1912'),
]
operations = [
migrations.RunPython(add_root_data, reverse_code=revert),
]
| mit | Python |
53df723a1574e62b4a74d56667c131793cf6c506 | add retrieve all users and one user queries | sebasvega95/dist-systems-chat,sebasvega95/dist-systems-chat,sebasvega95/dist-systems-chat | users_handler.py | users_handler.py | from models.users import User
import logging
logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.DEBUG)
class UsersHandler:
def __init__(self, DB):
self.db = DB
def create_user(self, user_data):
collection = self.db.users
user = collection.find_one({"username": user_data["username"]})
if not user:
new_user = User(user_data)
collection.insert_one(new_user.__dict__)
logging.info("User Created")
return True
logging.warning("User already exists")
return False
def retrieve_users(self):
collection= self.db.users
users = collection.find()
logging.info("All users retrived successfully")
return users
def retrieve_user(self, username):
collection = self.db.users
user = collection.find_one({'username': username})
if user:
logging.info("Users retrived successfully")
return user
logging.error("User <{}> does not exists".format(username))
return None
| from models.users import User
class UsersHandler:
def __init__(self, DB):
self.db = DB
def create_user(self, user_data):
collection = self.db.users
new_user = User(user_data)
collection.insert_one(new_user.__dict__)
| mit | Python |
7674437d752be0791688533dd1409fa083672bb2 | Switch from dictionary to namedtuple | hatchery/Genepool2,hatchery/genepool | genes/java/config.py | genes/java/config.py | #!/usr/bin/env python
from collections import namedtuple
JavaConfig = namedtuple('JavaConfig', ['is_oracle', 'version'])
def config():
return JavaConfig(
is_oracle=True,
version='oracle-java8',
)
| #!/usr/bin/env python
def config():
return {
'is-oracle': True,
'version': 'oracle-java8',
}
| mit | Python |
becf684fc06890679f4c0cdfed1761962e16a343 | Make extra_context at browse_repository view not overriding provided variables | codeinn/vcs,codeinn/vcs | vcs/web/simplevcs/views/repository.py | vcs/web/simplevcs/views/repository.py | from django.contrib import messages
from django.template import RequestContext
from django.shortcuts import render_to_response
from vcs.exceptions import VCSError
def browse_repository(request, repository, template_name, revision=None,
node_path='', extra_context={}):
"""
Generic repository browser.
Provided context variables:
- ``repository``: same what was given
- ``changeset``: based on the given ``revision`` or tip if none given
- ``root``: repositorie's node on the given ``node_path``
"""
context = {}
for key, value in extra_context.items():
context[key] = callable(value) and value() or value
try:
context.update(dict(
changeset = repository.get_changeset(),
root = repository.request(node_path, revision=revision),
))
except VCSError, err:
messages.error(request, str(err))
return render_to_response(template_name, context, RequestContext(request))
| from django.contrib import messages
from django.template import RequestContext
from django.shortcuts import render_to_response
from vcs.exceptions import VCSError
def browse_repository(request, repository, template_name, revision=None,
node_path='', extra_context={}):
"""
Generic repository browser.
Provided context variables:
- ``repository``: same what was given
- ``changeset``: based on the given ``revision`` or tip if none given
- ``root``: repositorie's node on the given ``node_path``
"""
context = {}
try:
context.update(dict(
changeset = repository.get_changeset(),
root = repository.request(node_path, revision=revision),
))
except VCSError, err:
messages.error(request, str(err))
for key, value in extra_context.items():
context[key] = callable(value) and value() or value
return render_to_response(template_name, context, RequestContext(request))
| mit | Python |
7ecaeba33a4fe559f6122953581e533720cb2404 | Add select mkl libs (#22580) | LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack | var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py | var/spack/repos/builtin/packages/intel-oneapi-mkl/package.py | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from sys import platform
from spack import *
class IntelOneapiMkl(IntelOneApiLibraryPackage):
"""Intel oneAPI MKL."""
maintainers = ['rscohn2']
homepage = 'https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onemkl.html'
if platform == 'linux':
version('2021.1.1',
sha256='818b6bd9a6c116f4578cda3151da0612ec9c3ce8b2c8a64730d625ce5b13cc0c',
url='https://registrationcenter-download.intel.com/akdlm/irc_nas/17402/l_onemkl_p_2021.1.1.52_offline.sh',
expand=False)
depends_on('intel-oneapi-tbb')
provides('fftw-api@3')
provides('scalapack')
provides('mkl')
provides('lapack')
provides('blas')
@property
def component_dir(self):
return 'mkl'
@property
def libs(self):
lib_path = '{0}/{1}/latest/lib/intel64'.format(self.prefix, self.component_dir)
mkl_libs = ['libmkl_intel_ilp64', 'libmkl_sequential', 'libmkl_core']
return find_libraries(mkl_libs, root=lib_path, shared=True, recursive=False)
| # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from sys import platform
from spack import *
class IntelOneapiMkl(IntelOneApiLibraryPackage):
"""Intel oneAPI MKL."""
maintainers = ['rscohn2']
homepage = 'https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onemkl.html'
if platform == 'linux':
version('2021.1.1',
sha256='818b6bd9a6c116f4578cda3151da0612ec9c3ce8b2c8a64730d625ce5b13cc0c',
url='https://registrationcenter-download.intel.com/akdlm/irc_nas/17402/l_onemkl_p_2021.1.1.52_offline.sh',
expand=False)
depends_on('intel-oneapi-tbb')
provides('fftw-api@3')
provides('scalapack')
provides('mkl')
provides('lapack')
provides('blas')
@property
def component_dir(self):
return 'mkl'
| lgpl-2.1 | Python |
67cea85323195440330580cc3731447956a4ad32 | add default user settings packet | nullpixel/litecord,nullpixel/litecord | litecord/managers/user_settings.py | litecord/managers/user_settings.py |
class SettingsManager:
"""User settings manager.
Provides functions for users to change their settings and retrieve them back.
Attributes
----------
server: :class:`LitecordServer`
Litecord server instance.
settings_coll: `mongo collection`
User settings MongoDB collection.
"""
def __init__(self, server):
self.server = server
self.guild_man = server.guild_man
self.settings_coll = self.server.settings_coll
async def get_settings(self, user):
"""Get a settings object from a User ID.
Parameters
----------
user_id: :class:`User`
User ID to be get settings from.
"""
if user.bot:
return {}
settings = await self.settings_coll.find_one({'user_id': user.id})
if settings is None:
settings = {
'timezone_offset': 0,
'theme': 'dark',
'status': 'online',
'show_current_game': False,
'restricted_guilds': [],
'render_reactions': True,
'render_embeds:': True,
'message_display_compact': True,
'locale': 'en-US',
'inline_embed_media': False,
'inline_attachment_media': False,
'guild_positions': [],
'friend_source_flags': {
'all': True,
},
'explicit_content_filter': 1,
'enable_tts_command': False,
'developer_mode': False,
'detect_platform_accounts': False,
'default_guilds_restricted': False,
'convert_emoticons': True,
'afk_timeout': 600,
}
return settings
async def get_guild_settings(self, user):
"""Get a User Guild Settings object to be used
in READY payloads.
Parameters
----------
user_id: :class:`User`
User ID to get User Guild Settings payload for.
Returns
-------
list
The User Guild Settings payload.
"""
if user.bot:
return []
res = []
async for guild in self.guild_man.yield_guilds(user.id):
res.append(guild.default_settings)
return res
|
class SettingsManager:
"""User settings manager.
Provides functions for users to change their settings and retrieve them back.
Attributes
----------
server: :class:`LitecordServer`
Litecord server instance.
settings_coll: `mongo collection`
User settings MongoDB collection.
"""
def __init__(self, server):
self.server = server
self.guild_man = server.guild_man
self.settings_coll = self.server.settings_coll
async def get_settings(self, user):
"""Get a settings object from a User ID.
Parameters
----------
user_id: :class:`User`
User ID to be get settings from.
"""
if user.bot:
return {}
settings = await self.settings_coll.find_one({'user_id': user.id})
if settings is None:
settings = {}
return settings
async def get_guild_settings(self, user):
"""Get a User Guild Settings object to be used
in READY payloads.
Parameters
----------
user_id: :class:`User`
User ID to get User Guild Settings payload for.
Returns
-------
list
The User Guild Settings payload.
"""
if user.bot:
return []
res = []
async for guild in self.guild_man.yield_guilds(user.id):
res.append(guild.default_settings)
return res
| mit | Python |
603a59785f24aa98662e72d954b3aa0521ad0629 | Make repeatability tests for severities specified by CLI | RianFuro/vint,Kuniwak/vint,Kuniwak/vint,RianFuro/vint | test/unit/vint/linting/config/test_config_cmdargs_source.py | test/unit/vint/linting/config/test_config_cmdargs_source.py | import unittest
from test.asserting.config_source import ConfigSourceAssertion
from vint.linting.config.config_cmdargs_source import ConfigCmdargsSource
from vint.linting.level import Level
class TestConfigFileSource(ConfigSourceAssertion, unittest.TestCase):
def test_get_config_dict(self):
env = {
'cmdargs': {
'verbose': True,
'style': True,
'warning': True,
'max-violations': 10,
},
}
expected_config_dict = {
'cmdargs': {
'verbose': True,
'severity': Level.WARNING,
'max-violations': 10,
},
}
config_source = self.initialize_config_source_with_env(ConfigCmdargsSource, env)
self.assertConfigDict(config_source, expected_config_dict)
def test_get_config_dict_with_no_severity(self):
env = {'cmdargs': {}}
expected_config_dict = {'cmdargs': {}}
config_source = self.initialize_config_source_with_env(ConfigCmdargsSource, env)
self.assertConfigDict(config_source, expected_config_dict)
def test_get_config_dict_with_severity_style_problem(self):
env = {
'cmdargs': {
'style_problem': True,
},
}
expected_config_dict = {
'cmdargs': {
'severity': Level.STYLE_PROBLEM,
},
}
config_source = self.initialize_config_source_with_env(ConfigCmdargsSource, env)
self.assertConfigDict(config_source, expected_config_dict)
def test_get_config_dict_with_severity_warning(self):
env = {
'cmdargs': {
'warning': True,
},
}
expected_config_dict = {
'cmdargs': {
'severity': Level.WARNING,
},
}
config_source = self.initialize_config_source_with_env(ConfigCmdargsSource, env)
self.assertConfigDict(config_source, expected_config_dict)
def test_get_config_dict_with_severity_error(self):
env = {
'cmdargs': {
'error': True,
},
}
expected_config_dict = {
'cmdargs': {
'severity': Level.ERROR,
},
}
config_source = self.initialize_config_source_with_env(ConfigCmdargsSource, env)
self.assertConfigDict(config_source, expected_config_dict)
if __name__ == '__main__':
unittest.main()
| import unittest
from test.asserting.config_source import ConfigSourceAssertion
from vint.linting.config.config_cmdargs_source import ConfigCmdargsSource
from vint.linting.level import Level
class TestConfigFileSource(ConfigSourceAssertion, unittest.TestCase):
def test_get_config_dict(self):
expected_config_dict = {
'cmdargs': {
'verbose': True,
'severity': Level.WARNING,
'max-violations': 10,
},
}
env = {
'cmdargs': {
'verbose': True,
'style': True,
'warning': True,
'max-violations': 10,
},
}
config_source = self.initialize_config_source_with_env(ConfigCmdargsSource, env)
self.assertConfigDict(config_source, expected_config_dict)
if __name__ == '__main__':
unittest.main()
| mit | Python |
54cea5e302820c35025e1afc64b2058a48c5b174 | Implement pop in the data storage module | DesertBot/DesertBot | desertbot/datastore.py | desertbot/datastore.py | import json
import os
class DataStore(object):
def __init__(self, storagePath, defaultsPath):
self.storagePath = storagePath
self.defaultsPath = defaultsPath
self.data = {}
self.load()
def load(self):
# if a file data/defaults/<module>.json exists, it has priority on load
if os.path.exists(self.defaultsPath):
with open(self.defaultsPath) as storageFile:
self.data = json.load(storageFile)
# if not, use data/<network>/<module>.json instead
elif os.path.exists(self.storagePath):
with open(self.storagePath) as storageFile:
self.data = json.load(storageFile)
# if there's nothing, make sure the folder at least exists for the server-specific data files
else:
os.makedirs(os.path.dirname(self.storagePath), exist_ok=True)
def save(self):
# don't save empty files, to keep the data directories from filling up with pointless files
if len(self.data) != 0:
tmpFile = f"{self.storagePath}.tmp"
with open(tmpFile, "w") as storageFile:
storageFile.write(json.dumps(self.data, indent=4))
os.rename(tmpFile, self.storagePath)
def __len__(self):
return len(self.data)
def __iter__(self):
return iter(self.data)
def __getitem__(self, item):
return self.data[item]
def __setitem__(self, key, value):
self.data[key] = value
self.save()
def __contains__(self, key):
return key in self.data
def __delitem__(self, key):
del self.data[key]
def items(self):
return self.data.items()
def values(self):
return self.data.values()
def keys(self):
return self.data.keys()
def get(self, key, defaultValue=None):
return self.data.get(key, defaultValue)
def pop(self, key):
data = self.data.pop(key)
self.save()
return data
| import json
import os
class DataStore(object):
def __init__(self, storagePath, defaultsPath):
self.storagePath = storagePath
self.defaultsPath = defaultsPath
self.data = {}
self.load()
def load(self):
# if a file data/defaults/<module>.json exists, it has priority on load
if os.path.exists(self.defaultsPath):
with open(self.defaultsPath) as storageFile:
self.data = json.load(storageFile)
# if not, use data/<network>/<module>.json instead
elif os.path.exists(self.storagePath):
with open(self.storagePath) as storageFile:
self.data = json.load(storageFile)
# if there's nothing, make sure the folder at least exists for the server-specific data files
else:
os.makedirs(os.path.dirname(self.storagePath), exist_ok=True)
def save(self):
# don't save empty files, to keep the data directories from filling up with pointless files
if len(self.data) != 0:
tmpFile = f"{self.storagePath}.tmp"
with open(tmpFile, "w") as storageFile:
storageFile.write(json.dumps(self.data, indent=4))
os.rename(tmpFile, self.storagePath)
def __len__(self):
return len(self.data)
def __iter__(self):
return iter(self.data)
def __getitem__(self, item):
return self.data[item]
def __setitem__(self, key, value):
self.data[key] = value
self.save()
def __contains__(self, key):
return key in self.data
def __delitem__(self, key):
del self.data[key]
def items(self):
return self.data.items()
def values(self):
return self.data.values()
def keys(self):
return self.data.keys()
def get(self, key, defaultValue=None):
return self.data.get(key, defaultValue)
| mit | Python |
e3753ac4b2c24c43014aab8121a34b9ad76d6b7a | update tests to v2.1.1 (#1597) (#1597) | smalley/python,N-Parsons/exercism-python,behrtam/xpython,jmluy/xpython,smalley/python,behrtam/xpython,exercism/xpython,exercism/python,N-Parsons/exercism-python,exercism/python,exercism/xpython,jmluy/xpython | exercises/hamming/hamming_test.py | exercises/hamming/hamming_test.py | import unittest
import hamming
# Tests adapted from `problem-specifications//canonical-data.json` @ v2.1.1
class HammingTest(unittest.TestCase):
def test_empty_strands(self):
self.assertEqual(hamming.distance("", ""), 0)
def test_identical_strands(self):
self.assertEqual(hamming.distance("A", "A"), 0)
def test_long_identical_strands(self):
self.assertEqual(hamming.distance("GGACTGA", "GGACTGA"), 0)
def test_complete_distance_in_single_nucleotide_strands(self):
self.assertEqual(hamming.distance("A", "G"), 1)
def test_complete_distance_in_small_strands(self):
self.assertEqual(hamming.distance("AG", "CT"), 2)
def test_small_distance_in_small_strands(self):
self.assertEqual(hamming.distance("AT", "CT"), 1)
def test_small_distance(self):
self.assertEqual(hamming.distance("GGACG", "GGTCG"), 1)
def test_small_distance_in_long_strands(self):
self.assertEqual(hamming.distance("ACCAGGG", "ACTATGG"), 2)
def test_non_unique_character_in_first_strand(self):
self.assertEqual(hamming.distance("AAG", "AAA"), 1)
def test_non_unique_character_in_second_strand(self):
self.assertEqual(hamming.distance("AAA", "AAG"), 1)
def test_same_nucleotides_in_different_positions(self):
self.assertEqual(hamming.distance("TAG", "GAT"), 2)
def test_large_distance(self):
self.assertEqual(hamming.distance("GATACA", "GCATAA"), 4)
def test_large_distance_in_off_by_one_strand(self):
self.assertEqual(hamming.distance("GGACGGATTCTG", "AGGACGGATTCT"), 9)
def test_disallow_first_strand_longer(self):
with self.assertRaisesWithMessage(ValueError):
hamming.distance("AATG", "AAA")
def test_disallow_second_strand_longer(self):
with self.assertRaisesWithMessage(ValueError):
hamming.distance("ATA", "AGTG")
# Utility functions
def setUp(self):
try:
self.assertRaisesRegex
except AttributeError:
self.assertRaisesRegex = self.assertRaisesRegexp
def assertRaisesWithMessage(self, exception):
return self.assertRaisesRegex(exception, r".+")
if __name__ == '__main__':
unittest.main()
| import unittest
import hamming
# Tests adapted from `problem-specifications//canonical-data.json` @ v2.1.0
class HammingTest(unittest.TestCase):
def test_empty_strands(self):
self.assertEqual(hamming.distance("", ""), 0)
def test_identical_strands(self):
self.assertEqual(hamming.distance("A", "A"), 0)
def test_long_identical_strands(self):
self.assertEqual(hamming.distance("GGACTGA", "GGACTGA"), 0)
def test_complete_distance_in_single_nucleotide_strands(self):
self.assertEqual(hamming.distance("A", "G"), 1)
def test_complete_distance_in_small_strands(self):
self.assertEqual(hamming.distance("AG", "CT"), 2)
def test_small_distance_in_small_strands(self):
self.assertEqual(hamming.distance("AT", "CT"), 1)
def test_small_distance(self):
self.assertEqual(hamming.distance("GGACG", "GGTCG"), 1)
def test_small_distance_in_long_strands(self):
self.assertEqual(hamming.distance("ACCAGGG", "ACTATGG"), 2)
def test_non_unique_character_in_first_strand(self):
self.assertEqual(hamming.distance("AAG", "AAA"), 1)
def test_non_unique_character_in_second_strand(self):
self.assertEqual(hamming.distance("AAA", "AAG"), 1)
def test_same_nucleotides_in_different_positions(self):
self.assertEqual(hamming.distance("TAG", "GAT"), 2)
def test_large_distance(self):
self.assertEqual(hamming.distance("GATACA", "GCATAA"), 4)
def test_large_distance_in_off_by_one_strand(self):
self.assertEqual(hamming.distance("GGACGGATTCTG", "AGGACGGATTCT"), 9)
def test_disallow_first_strand_longer(self):
with self.assertRaisesWithMessage(ValueError):
hamming.distance("AATG", "AAA")
def test_disallow_second_strand_longer(self):
with self.assertRaisesWithMessage(ValueError):
hamming.distance("ATA", "AGTG")
# Utility functions
def setUp(self):
try:
self.assertRaisesRegex
except AttributeError:
self.assertRaisesRegex = self.assertRaisesRegexp
def assertRaisesWithMessage(self, exception):
return self.assertRaisesRegex(exception, r".+")
if __name__ == '__main__':
unittest.main()
| mit | Python |
d47cfd7c1a4dd22ab175539dcb0e3702a21f8bb7 | Move scaling factors to constant and explain | DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative | ynr/apps/moderation_queue/management/commands/moderation_queue_detect_faces_in_queued_images.py | ynr/apps/moderation_queue/management/commands/moderation_queue_detect_faces_in_queued_images.py | import json
import boto3
from django.core.management.base import BaseCommand, CommandError
from moderation_queue.models import QueuedImage
# These magic values are because the AWS API crops faces quite tightly by
# default, meaning we literally just get the face. These values are about
# right or, they are more right than the default crop.
MIN_SCALING_FACTOR = 0.3
MAX_SCALING_FACTOR = 2
class Command(BaseCommand):
def handle(self, **options):
rekognition = boto3.client("rekognition", "eu-west-1")
attributes = ["ALL"]
any_failed = False
qs = QueuedImage.objects.filter(decision="undecided").exclude(
face_detection_tried=True
)
for qi in qs:
try:
detected = rekognition.detect_faces(
Image={"Bytes": qi.image.file.read()}, Attributes=attributes
)
self.set_x_y_from_response(qi, detected, options["verbosity"])
except Exception as e:
msg = "Skipping QueuedImage{id}: {error}"
self.stdout.write(msg.format(id=qi.id, error=e))
any_failed = True
qi.face_detection_tried = True
qi.save()
if any_failed:
raise CommandError("Broken images found (see above)")
def set_x_y_from_response(self, qi, detected, verbosity=0):
if detected and detected["FaceDetails"]:
im_width = qi.image.width
im_height = qi.image.height
bounding_box = detected["FaceDetails"][0]["BoundingBox"]
qi.crop_min_x = bounding_box["Left"] * im_width * MIN_SCALING_FACTOR
qi.crop_min_y = bounding_box["Top"] * im_height * MIN_SCALING_FACTOR
qi.crop_max_x = (
bounding_box["Width"] * im_width * MAX_SCALING_FACTOR
)
qi.crop_max_y = (
bounding_box["Height"] * im_height * MAX_SCALING_FACTOR
)
qi.detection_metadata = json.dumps(detected, indent=4)
if int(verbosity) > 1:
self.stdout.write("Set bounds of {}".format(qi))
else:
self.stdout.write("Couldn't find a face in {}".format(qi))
| import json
import boto3
from django.core.management.base import BaseCommand, CommandError
from moderation_queue.models import QueuedImage
class Command(BaseCommand):
def handle(self, **options):
rekognition = boto3.client("rekognition", "eu-west-1")
attributes = ["ALL"]
any_failed = False
qs = QueuedImage.objects.filter(decision="undecided").exclude(
face_detection_tried=True
)
for qi in qs:
try:
detected = rekognition.detect_faces(
Image={"Bytes": qi.image.file.read()}, Attributes=attributes
)
self.set_x_y_from_response(qi, detected, options["verbosity"])
except Exception as e:
msg = "Skipping QueuedImage{id}: {error}"
self.stdout.write(msg.format(id=qi.id, error=e))
any_failed = True
qi.face_detection_tried = True
qi.save()
if any_failed:
raise CommandError("Broken images found (see above)")
def set_x_y_from_response(self, qi, detected, verbosity=0):
if detected and detected["FaceDetails"]:
im_width = qi.image.width
im_height = qi.image.height
bounding_box = detected["FaceDetails"][0]["BoundingBox"]
qi.crop_min_x = bounding_box["Left"] * im_width * 0.3
qi.crop_min_y = bounding_box["Top"] * im_height * 0.3
qi.crop_max_x = bounding_box["Width"] * im_width * 2
qi.crop_max_y = bounding_box["Height"] * im_height * 2
qi.detection_metadata = json.dumps(detected, indent=4)
if int(verbosity) > 1:
self.stdout.write("Set bounds of {}".format(qi))
else:
self.stdout.write("Couldn't find a face in {}".format(qi))
| agpl-3.0 | Python |
a0e0f7867e8e9805fb035a8db75e9d187fc06f3b | fix merge | PhilipGarnero/django-rest-framework-social-oauth2,villoid/django-rest-framework-social-oauth2 | rest_framework_social_oauth2/views.py | rest_framework_social_oauth2/views.py | # -*- coding: utf-8 -*-
import json
from braces.views import CsrfExemptMixin
from oauth2_provider.ext.rest_framework import OAuth2Authentication
from oauth2_provider.models import Application, AccessToken
from oauth2_provider.settings import oauth2_settings
from oauth2_provider.views.mixins import OAuthLibMixin
from rest_framework import permissions
from rest_framework import status
from rest_framework.decorators import api_view, authentication_classes, permission_classes
from rest_framework.response import Response
from rest_framework.views import APIView
from .oauth2_backends import KeepRequestCore
from .oauth2_endpoints import SocialTokenServer
class ConvertTokenView(CsrfExemptMixin, OAuthLibMixin, APIView):
"""
Implements an endpoint to provide access tokens
The endpoint is used in the following flows:
* Authorization code
* Password
* Client credentials
"""
server_class = SocialTokenServer
validator_class = oauth2_settings.OAUTH2_VALIDATOR_CLASS
oauthlib_backend_class = KeepRequestCore
permission_classes = (permissions.AllowAny,)
def post(self, request, *args, **kwargs):
# Use the rest framework `.data` to fake the post body of the django request.
request._request.POST = request._request.POST.copy()
for key, value in request.data.iteritems():
request._request.POST[key] = value
url, headers, body, status = self.create_token_response(request._request)
response = Response(data=json.loads(body), status=status)
for k, v in headers.items():
response[k] = v
return response
@api_view(['POST'])
@authentication_classes([OAuth2Authentication])
@permission_classes([permissions.IsAuthenticated])
def invalidate_sessions(request):
client_id = request.POST.get("client_id", None)
if client_id is None:
return Response({
"client_id": ["This field is required."]
}, status=status.HTTP_400_BAD_REQUEST)
try:
app = Application.objects.get(client_id=client_id)
except Application.DoesNotExist:
return Response({
"detail": "The application linked to the provided client_id could not be found."
}, status=status.HTTP_400_BAD_REQUEST)
tokens = AccessToken.objects.filter(user=request.user, application=app)
tokens.delete()
return Response({}, status=status.HTTP_204_NO_CONTENT)
| # -*- coding: utf-8 -*-
import json
from braces.views import CsrfExemptMixin
from oauth2_provider.ext.rest_framework import OAuth2Authentication
from oauth2_provider.models import Application, AccessToken
from oauth2_provider.settings import oauth2_settings
from oauth2_provider.views.mixins import OAuthLibMixin
from rest_framework import permissions
from rest_framework import status
from rest_framework.decorators import api_view, authentication_classes, permission_classes
from rest_framework.response import Response
from rest_framework.views import APIView
from .oauth2_backends import KeepRequestCore
from .oauth2_endpoints import SocialTokenServer
class ConvertTokenView(CsrfExemptMixin, OAuthLibMixin, APIView):
"""
Implements an endpoint to provide access tokens
The endpoint is used in the following flows:
* Authorization code
* Password
* Client credentials
"""
server_class = SocialTokenServer
validator_class = oauth2_settings.OAUTH2_VALIDATOR_CLASS
oauthlib_backend_class = KeepRequestCore
def post(self, request, *args, **kwargs):
# Use the rest framework `.data` to fake the post body of the django request.
request._request.POST = request._request.POST.copy()
for key, value in request.data.iteritems():
request._request.POST[key] = value
url, headers, body, status = self.create_token_response(request._request)
response = Response(data=json.loads(body), status=status)
for k, v in headers.items():
response[k] = v
return response
@api_view(['POST'])
@authentication_classes([OAuth2Authentication])
@permission_classes([permissions.IsAuthenticated])
def invalidate_sessions(request):
client_id = request.POST.get("client_id", None)
if client_id is None:
return Response({
"client_id": ["This field is required."]
}, status=status.HTTP_400_BAD_REQUEST)
try:
app = Application.objects.get(client_id=client_id)
except Application.DoesNotExist:
return Response({
"detail": "The application linked to the provided client_id could not be found."
}, status=status.HTTP_400_BAD_REQUEST)
tokens = AccessToken.objects.filter(user=request.user, application=app)
tokens.delete()
return Response({}, status=status.HTTP_204_NO_CONTENT)
| mit | Python |
e13a74ae4e1884017593143e01e8882d7e802d7b | clean up imports | compas-dev/compas | src/compas_rhino/geometry/__init__.py | src/compas_rhino/geometry/__init__.py | """
********************************************************************************
geometry
********************************************************************************
.. currentmodule:: compas_rhino.geometry
"""
from __future__ import absolute_import
__all__ = []
| """
********************************************************************************
geometry
********************************************************************************
.. currentmodule:: compas_rhino.geometry
Classes
=======
.. autosummary::
:toctree: generated/
:nosignatures:
RhinoGeometry
RhinoBox
RhinoCircle
RhinoCone
RhinoCurve
RhinoCylinder
RhinoEllipse
RhinoLine
RhinoMesh
RhinoPlane
RhinoPoint
RhinoPolyline
RhinoSphere
RhinoSurface
RhinoVector
"""
from __future__ import absolute_import
from ._geometry import RhinoGeometry
from .box import RhinoBox
from .circle import RhinoCircle
from .cone import RhinoCone
from .curve import RhinoCurve
from .cylinder import RhinoCylinder
from .ellipse import RhinoEllipse
from .line import RhinoLine
from .mesh import RhinoMesh
from .plane import RhinoPlane
from .point import RhinoPoint
from .polyline import RhinoPolyline
from .sphere import RhinoSphere
from .surface import RhinoSurface
from .vector import RhinoVector
BaseRhinoGeometry = RhinoGeometry
__all__ = [
'RhinoGeometry',
'RhinoBox',
'RhinoCircle',
'RhinoCone',
'RhinoCurve',
'RhinoCylinder',
'RhinoEllipse',
'RhinoLine',
'RhinoMesh',
'RhinoPlane',
'RhinoPoint',
'RhinoPolyline',
'RhinoSphere',
'RhinoSurface',
'RhinoVector',
]
| mit | Python |
3fb93c4b839457430180f65f1feae4c7abdba0ac | tag celery syslog messages | globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service | dbaas/dbaas/celery.py | dbaas/dbaas/celery.py | from __future__ import absolute_import
import os
import logging
from datetime import timedelta
from celery import Celery
from django.conf import settings
from dbaas import celeryconfig
from logging.handlers import SysLogHandler
from celery.log import redirect_stdouts_to_logger
from celery.signals import after_setup_task_logger, after_setup_logger
def setup_log(**args):
# redirect stdout and stderr to logger
redirect_stdouts_to_logger(args['logger'])
# logs to local syslog
syslog = SysLogHandler(address=settings.SYSLOG_FILE, facility=logging.handlers.SysLogHandler.LOG_LOCAL3)
# setting log level
syslog.setLevel(args['loglevel'])
# setting log format
formatter = logging.Formatter('dbaas: #celery %(name)s %(message)s')
syslog.setFormatter(formatter)
# add new handler to logger
args['logger'].addHandler(syslog)
after_setup_logger.connect(setup_log)
after_setup_task_logger.connect(setup_log)
LOG = logging.getLogger(__name__)
#set this variable to True to run celery tasks synchronously
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dbaas.settings')
app = Celery('dbaas')
app.config_from_object(celeryconfig)
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
@app.task(bind=True)
def debug_task(self):
LOG.debug('Request: {0!r}'.format(self.request)) | from __future__ import absolute_import
import os
import logging
from datetime import timedelta
from celery import Celery
from django.conf import settings
from dbaas import celeryconfig
from logging.handlers import SysLogHandler
from celery.log import redirect_stdouts_to_logger
from celery.signals import after_setup_task_logger, after_setup_logger
def setup_log(**args):
# redirect stdout and stderr to logger
redirect_stdouts_to_logger(args['logger'])
# logs to local syslog
syslog = SysLogHandler(address=settings.SYSLOG_FILE, facility=logging.handlers.SysLogHandler.LOG_LOCAL3)
# setting log level
syslog.setLevel(args['loglevel'])
# setting log format
formatter = logging.Formatter('dbaas: %(name)s %(message)s')
syslog.setFormatter(formatter)
# add new handler to logger
args['logger'].addHandler(syslog)
after_setup_logger.connect(setup_log)
after_setup_task_logger.connect(setup_log)
LOG = logging.getLogger(__name__)
#set this variable to True to run celery tasks synchronously
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dbaas.settings')
app = Celery('dbaas')
app.config_from_object(celeryconfig)
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
@app.task(bind=True)
def debug_task(self):
LOG.debug('Request: {0!r}'.format(self.request)) | bsd-3-clause | Python |
c0f917c6098b18479a69fe129a0fd19d11f67df7 | Fix startup | paulkramme/btsoot | src/btsoot.py | src/btsoot.py | #!/usr/bin/env python3.5
#MIT License
#
#Copyright (c) 2016 Paul Kramme
#
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#SOFTWARE.
def main():
print("BTSOOT 0.1.0")
if __name__ == __name__:
try:
main()
except KeyboardInterrupt:
print("Stopping program.")
exit()
except Exception:
print("Unknown Critical Exception")
print("Quitting...")
| bsd-3-clause | Python |
|
21620653125f33fd0d19c1bb2f16b51ec3c853f9 | fix tmin/tmax | teuben/masc,teuben/pyASC,teuben/masc,warnerem/pyASC,teuben/pyASC,teuben/pyASC,teuben/pyASC,warnerem/pyASC,warnerem/pyASC,warnerem/pyASC,teuben/pyASC,teuben/pyASC,warnerem/pyASC,teuben/pyASC,warnerem/pyASC,teuben/masc,warnerem/pyASC | ASC/SkyPie.py | ASC/SkyPie.py | #! /usr/bin/env python
#
# Takes about 15" for 1400 images on laptop with a local fast disk (100% cpu)
# But 60" on the Xeon, but at 300% cpu
#
import matplotlib.pyplot as plt
import numpy as np
import sys
table = sys.argv[1]
png = table + '.png'
twopi = 2*np.pi
# table of decimal hour time and median sky brightness (50,000 is very bright)
(t,s) = np.loadtxt(table).T
print("Sky: ",s.min(),s.max())
print("Time:",t.min(),t.max())
t0 = t[0]
t1 = t[-1]
print(t0,t1)
# tmin is the sunrise, from t1 (6), should be near 90
# tmax is the sunset, from t0 (18) 270
tmin = (6-t1)*15 + 90
tmax = (18-t0)*15 + 270
print(tmin,tmax)
x = (12-t) * twopi / 24.0
y = s.max()-s
print(x.min(),x.max())
print(y.min(),y.max())
fig, ax = plt.subplots(1, 1, subplot_kw=dict(projection='polar'))
ax.plot(x, y)
ax.set_theta_zero_location('S')
ax.xaxis.set_major_formatter(plt.NullFormatter())
ax.xaxis.set_major_formatter(plt.NullFormatter())
ax.yaxis.set_major_formatter(plt.NullFormatter())
ax.yaxis.set_major_formatter(plt.NullFormatter())
if False:
# always same pie, an extra hour either side
tmin=75
tmax=285
print(tmin,tmax)
ax.set_thetamin(tmin)
ax.set_thetamax(tmax)
ya = 0.2 * y
yb = 0.4 * y
yc = 0.8 * y
yd = 0.8 * y
ye = 0.9 * y
ax.fill_between(x,0, ya,facecolor='green',alpha=0.1)
ax.fill_between(x,ya,yb,facecolor='green',alpha=0.3)
ax.fill_between(x,yb,yc,facecolor='green',alpha=0.5)
ax.fill_between(x,yc,yd,facecolor='green',alpha=0.7)
ax.fill_between(x,yd,ye,facecolor='green',alpha=0.85)
ax.fill_between(x,ye,y ,facecolor='green',alpha=1)
# needs tweaking
plt.text(3.14,50000,'midnight',horizontalalignment='center')
plt.text(1.1,42000,'sunrise')
plt.text(5.1,48000,'sunset')
plt.text(5.5,20000,'imagine a moon')
plt.title("%s sky: %g %g %g-%g h" % (table,s.min(),s.max(),t0,t1))
plt.savefig(png)
plt.show()
print("Written ",png)
| #! /usr/bin/env python
#
# Takes about 15" fpr 1400 images on laptop with a local fast disk
#
import matplotlib.pyplot as plt
import numpy as np
import sys
date = ''
table = sys.argv[1]
png = table + '.png'
twopi = 2*np.pi
# table of time index (1...N) and median sky brightness (50,000 is very bright)
(t,s) = np.loadtxt(table).T
print("Sky: ",s.min(),s.max())
print("Time:",t.min(),t.max())
t0 = t[0]
t1 = t[-1]
print(t0,t1)
# degrees for polar plot
tmin = (t0-12.0)*180/12.0
tmax = 360 - (12-t1)*180/12.0
x = (12+24-t) * twopi / 24.0
y = s.max()-s
print(x.min(),x.max())
print(y.min(),y.max())
fig, ax = plt.subplots(1, 1, subplot_kw=dict(projection='polar'))
ax.plot(x, y)
ax.set_theta_zero_location('S')
ax.xaxis.set_major_formatter(plt.NullFormatter())
ax.xaxis.set_major_formatter(plt.NullFormatter())
ax.yaxis.set_major_formatter(plt.NullFormatter())
ax.yaxis.set_major_formatter(plt.NullFormatter())
ax.set_thetamin(tmin)
ax.set_thetamax(tmax)
ya = 0.2 * y
yb = 0.4 * y
yc = 0.8 * y
yd = 0.8 * y
ye = 0.9 * y
ax.fill_between(x,0, ya,facecolor='green',alpha=0.1)
ax.fill_between(x,ya,yb,facecolor='green',alpha=0.3)
ax.fill_between(x,yb,yc,facecolor='green',alpha=0.5)
ax.fill_between(x,yc,yd,facecolor='green',alpha=0.7)
ax.fill_between(x,yd,ye,facecolor='green',alpha=0.85)
ax.fill_between(x,ye,y ,facecolor='green',alpha=1)
# needs tweaking
plt.text(3.14,50000,'midnight',horizontalalignment='center')
plt.text(1.1,42000,'sunrise')
plt.text(5.1,48000,'sunset')
plt.text(5.5,20000,'imagine a moon')
plt.title("%s sky: %g %g %g-%g h" % (table,s.min(),s.max(),t0,t1))
plt.savefig(png)
plt.show()
print("Written ",png)
| mit | Python |
d84034db71abac46ef765f1640f3efa6712f5c42 | Update RegisterHandler.py | emeric254/gala-stri-website,emeric254/gala-stri-website,emeric254/gala-stri-website | Handlers/RegisterHandler.py | Handlers/RegisterHandler.py | # -*- coding: utf-8 -*-
import logging
from Handlers.BaseHandler import BaseHandler
from Tools import PostgreSQL, VerifyFields
logger = logging.getLogger(__name__)
class RegisterHandler(BaseHandler):
"""handle / endpoint"""
def get(self):
"""Serve Get and return main page"""
self.render('register.html')
def post(self):
"""Get user completed form and verify it before save it"""
prenom = self.get_body_argument('prenom')
nom = self.get_body_argument('nom')
courriel = self.get_body_argument('courriel')
genre = self.get_body_argument('genre')
promotion = int(self.get_body_argument('promotion'))
if VerifyFields.verify_all(prenom, nom, courriel, genre, promotion):
PostgreSQL.insert_inscrit(prenom, nom, genre, courriel, promotion)
self.render('registered.html')
else:
self.send_error(status_code=400)
| # -*- coding: utf-8 -*-
import logging
from Handlers.BaseHandler import BaseHandler
from Tools import PostgreSQL, VerifyFields
logger = logging.getLogger(__name__)
class RegisterHandler(BaseHandler):
"""handle / endpoint"""
def initialize(self):
self.conn = PostgreSQL.get_session()
def get(self):
"""Serve Get and return main page"""
self.render('register.html')
def post(self):
"""Get user completed form and verify it before save it"""
prenom = self.get_body_argument('prenom')
nom = self.get_body_argument('nom')
courriel = self.get_body_argument('courriel')
genre = self.get_body_argument('genre')
promotion = int(self.get_body_argument('promotion'))
if VerifyFields.verify_all(prenom, nom, courriel, genre, promotion):
PostgreSQL.insert_inscrit(prenom, nom, genre, courriel, promotion)
self.render('registered.html')
else:
self.send_error(status_code=400)
| mit | Python |
0aaa9000f8cf545bd5bfa41b6538d56c91dbde97 | Update base box in sample config too | f-droid/fdroid-server,f-droid/fdroid-server,f-droid/fdroid-server,matlink/fdroidserver,matlink/fdroidserver,fdroidtravis/fdroidserver,matlink/fdroidserver,OneEducation/AppUniverse_Server,f-droid/fdroid-server,OneEducation/AppUniverse_Server,f-droid/fdroidserver,f-droid/fdroidserver,matlink/fdroidserver,f-droid/fdroidserver,f-droid/fdroidserver,fdroidtravis/fdroidserver,fdroidtravis/fdroidserver,OneEducation/AppUniverse_Server,OneEducation/AppUniverse_Server,fantastico/fdroidserver,f-droid/fdroid-server,f-droid/fdroidserver,OneEducation/AppUniverse_Server,fantastico/fdroidserver,fantastico/fdroidserver,fantastico/fdroidserver,fantastico/fdroidserver,matlink/fdroidserver,fdroidtravis/fdroidserver | sampleconfigs/makebs.config.sample.py | sampleconfigs/makebs.config.sample.py | #!/usr/bin/env python2
# You will need to alter these before running ./makebuildserver
# Name of the base box to use...
basebox = "testing32"
# Location where raring32.box can be found, if you don't already have
# it. For security reasons, it's recommended that you make your own
# in a secure environment using trusted media (see the manual) but
# you can use this default if you like...
baseboxurl = "https://f-droid.org/testing32.box"
memory = 3584
# Debian package proxy server - set this to None unless you have one...
aptproxy = "http://192.168.0.19:8000"
# Set to True if your base box is 64 bit...
arch64 = False
| #!/usr/bin/env python2
# You will need to alter these before running ./makebuildserver
# Name of the base box to use...
basebox = "raring32"
# Location where raring32.box can be found, if you don't already have
# it. Could be set to https://f-droid.org/raring32.box if you like...
baseboxurl = "/shares/software/OS and Boot/raring32.box"
memory = 3584
# Debian package proxy server - set this to None unless you have one...
aptproxy = "http://192.168.0.19:8000"
# Set to True if your base box is 64 bit...
arch64 = False
| agpl-3.0 | Python |
32aec3e5595fe0868b77260cb64be718d4e7f3b8 | Update Keras.py | paperrune/Neural-Networks,paperrune/Neural-Networks | Momentum/Keras.py | Momentum/Keras.py | from keras.datasets import mnist
from keras.initializers import RandomUniform
from keras.layers import Dense
from keras.models import Sequential
from keras.optimizers import SGD
from keras.utils import to_categorical
batch_size = 128
epochs = 30
learning_rate = 0.1
momentum = 0.9
num_classes = 10
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train.reshape(60000, 784).astype('float32') / 255
y_train = to_categorical(y_train, num_classes)
x_test = x_test.reshape(10000, 784).astype('float32') / 255
y_test = to_categorical(y_test, num_classes)
model = Sequential()
model.add(Dense(512,
activation='relu',
input_shape=(784,),
kernel_initializer=RandomUniform(minval=-0.01, maxval=0.01)))
model.add(Dense(512,
activation='relu',
input_shape=(784,),
kernel_initializer=RandomUniform(minval=-0.01, maxval=0.01)))
model.add(Dense(num_classes,
activation='softmax',
input_shape=(784,),
kernel_initializer=RandomUniform(minval=-0.01, maxval=0.01)))
model.summary()
model.compile(loss='categorical_crossentropy',
optimizer=SGD(lr=learning_rate, momentum=momentum),
metrics=['accuracy'])
history = model.fit(x_train,
y_train,
batch_size=batch_size,
epochs=epochs,
validation_data=(x_test, y_test))
| from keras.datasets import mnist
from keras.initializers import RandomUniform
from keras.layers import Dense
from keras.models import Sequential
from keras.optimizers import SGD
from keras.utils import to_categorical
batch_size = 128
epochs = 30
learning_rate = 0.1
momentum = 0.9
num_classes = 10
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train.reshape(60000, 784).astype('float32') / 255
y_train = to_categorical(y_train, num_classes)
x_test = x_test.reshape(10000, 784).astype('float32') / 255
y_test = to_categorical(y_test, num_classes)
model = Sequential()
model.add(Dense(512,
activation='tanh',
input_shape=(784,),
kernel_initializer=RandomUniform(minval=-0.01, maxval=0.01)))
model.add(Dense(512,
activation='tanh',
input_shape=(784,),
kernel_initializer=RandomUniform(minval=-0.01, maxval=0.01)))
model.add(Dense(num_classes,
activation='softmax',
input_shape=(784,),
kernel_initializer=RandomUniform(minval=-0.01, maxval=0.01)))
model.summary()
model.compile(loss='categorical_crossentropy',
optimizer=SGD(lr=learning_rate, momentum=momentum),
metrics=['accuracy'])
history = model.fit(x_train,
y_train,
batch_size=batch_size,
epochs=epochs,
validation_data=(x_test, y_test)) | mit | Python |
aa6090b69f64721391dec38de04e8d01d23c48bf | Add tests for differential calculus methods | kaichogami/sympy,lindsayad/sympy,jbbskinny/sympy,Titan-C/sympy,Titan-C/sympy,wanglongqi/sympy,Vishluck/sympy,Shaswat27/sympy,Shaswat27/sympy,drufat/sympy,debugger22/sympy,rahuldan/sympy,Davidjohnwilson/sympy,sahmed95/sympy,sampadsaha5/sympy,Shaswat27/sympy,ahhda/sympy,AkademieOlympia/sympy,farhaanbukhsh/sympy,skidzo/sympy,saurabhjn76/sympy,kevalds51/sympy,rahuldan/sympy,jerli/sympy,jaimahajan1997/sympy,Vishluck/sympy,chaffra/sympy,saurabhjn76/sympy,ahhda/sympy,sampadsaha5/sympy,kaushik94/sympy,ahhda/sympy,MechCoder/sympy,mcdaniel67/sympy,moble/sympy,pandeyadarsh/sympy,abhiii5459/sympy,madan96/sympy,wanglongqi/sympy,postvakje/sympy,mafiya69/sympy,Titan-C/sympy,Davidjohnwilson/sympy,iamutkarshtiwari/sympy,jerli/sympy,Davidjohnwilson/sympy,rahuldan/sympy,maniteja123/sympy,emon10005/sympy,cswiercz/sympy,kaichogami/sympy,lindsayad/sympy,Arafatk/sympy,moble/sympy,wanglongqi/sympy,ChristinaZografou/sympy,abhiii5459/sympy,Curious72/sympy,jbbskinny/sympy,mafiya69/sympy,lindsayad/sympy,mcdaniel67/sympy,hargup/sympy,sahmed95/sympy,sampadsaha5/sympy,cswiercz/sympy,jerli/sympy,VaibhavAgarwalVA/sympy,kevalds51/sympy,chaffra/sympy,saurabhjn76/sympy,moble/sympy,farhaanbukhsh/sympy,hargup/sympy,maniteja123/sympy,souravsingh/sympy,cswiercz/sympy,postvakje/sympy,madan96/sympy,MechCoder/sympy,oliverlee/sympy,atreyv/sympy,AkademieOlympia/sympy,ga7g08/sympy,yashsharan/sympy,yashsharan/sympy,hargup/sympy,yukoba/sympy,skidzo/sympy,kaushik94/sympy,shikil/sympy,maniteja123/sympy,atreyv/sympy,Arafatk/sympy,Curious72/sympy,souravsingh/sympy,aktech/sympy,souravsingh/sympy,pandeyadarsh/sympy,Arafatk/sympy,abhiii5459/sympy,pandeyadarsh/sympy,mcdaniel67/sympy,oliverlee/sympy,ChristinaZografou/sympy,oliverlee/sympy,farhaanbukhsh/sympy,atreyv/sympy,madan96/sympy,sahmed95/sympy,iamutkarshtiwari/sympy,drufat/sympy,MechCoder/sympy,skidzo/sympy,kaushik94/sympy,jbbskinny/sympy,ChristinaZografou/sympy,Vishluck/sympy,VaibhavAgarwalVA/sympy,jaimahajan1997/sympy,debugger22/sympy,AkademieOlympia/sympy,chaffra/sympy,VaibhavAgarwalVA/sympy,yashsharan/sympy,yukoba/sympy,aktech/sympy,jaimahajan1997/sympy,kevalds51/sympy,ga7g08/sympy,kaichogami/sympy,ga7g08/sympy,postvakje/sympy,mafiya69/sympy,emon10005/sympy,yukoba/sympy,shikil/sympy,drufat/sympy,shikil/sympy,Curious72/sympy,emon10005/sympy,aktech/sympy,debugger22/sympy,iamutkarshtiwari/sympy | sympy/calculus/tests/test_singularities.py | sympy/calculus/tests/test_singularities.py | from sympy import Symbol, exp, log
from sympy.calculus.singularities import (singularities, is_increasing,
is_strictly_increasing, is_decreasing,
is_strictly_decreasing)
from sympy.sets import Interval
from sympy import oo, S
from sympy.utilities.pytest import XFAIL
x = Symbol('x')
def test_singularities():
x = Symbol('x', real=True)
assert singularities(x**2, x) == ()
assert singularities(x/(x**2 + 3*x + 2), x) == (-2, -1)
@XFAIL
def test_singularities_non_rational():
x = Symbol('x', real=True)
assert singularities(exp(1/x), x) == (0)
assert singularities(log((x - 2)**2), x) == (2)
def test_is_increasing():
assert is_increasing(x**3 - 3*x**2 + 4*x, S.Reals)
assert is_increasing(-x**2, Interval(-oo, 0))
assert not is_increasing(-x**2, Interval(0, oo))
assert not is_increasing(4*x**3 - 6*x**2 - 72*x + 30, Interval(-2, 3))
def test_is_strictly_increasing():
assert is_strictly_increasing(4*x**3 - 6*x**2 - 72*x + 30, Interval.Ropen(-oo, -2))
assert is_strictly_increasing(4*x**3 - 6*x**2 - 72*x + 30, Interval.Lopen(3, oo))
assert not is_strictly_increasing(4*x**3 - 6*x**2 - 72*x + 30, Interval.open(-2, 3))
assert not is_strictly_increasing(-x**2, Interval(0, oo))
def test_is_decreasing():
assert is_decreasing(1/(x**2 - 3*x), Interval.open(1.5, 3))
assert is_decreasing(1/(x**2 - 3*x), Interval.Lopen(3, oo))
assert not is_decreasing(1/(x**2 - 3*x), Interval.Ropen(-oo, S(3)/2))
assert not is_decreasing(-x**2, Interval(-oo, 0))
def test_is_strictly_decreasing():
assert is_decreasing(1/(x**2 - 3*x), Interval.open(1.5, 3))
assert is_decreasing(1/(x**2 - 3*x), Interval.Lopen(3, oo))
assert not is_decreasing(1/(x**2 - 3*x), Interval.Ropen(-oo, S(3)/2))
assert not is_decreasing(-x**2, Interval(-oo, 0))
| from sympy import Symbol, exp, log
from sympy.calculus.singularities import singularities
from sympy.utilities.pytest import XFAIL
def test_singularities():
x = Symbol('x', real=True)
assert singularities(x**2, x) == ()
assert singularities(x/(x**2 + 3*x + 2), x) == (-2, -1)
@XFAIL
def test_singularities_non_rational():
x = Symbol('x', real=True)
assert singularities(exp(1/x), x) == (0)
assert singularities(log((x - 2)**2), x) == (2)
@XFAIL
def test_is_increasing():
pass
@XFAIL
def test_is_strictly_increasing():
pass
@XFAIL
def test_is_decreasing():
pass
@XFAIL
def test_is_strictly_decreasing():
pass
@XFAIL
def is_monotonic():
pass
| bsd-3-clause | Python |
f84f7e9091725d638e93d1dc14b830118a1833c8 | add returns for views | crucl0/gps_tracker,crucl0/gps_tracker,crucl0/gps_tracker | gps_tracker/views.py | gps_tracker/views.py | from pyramid.view import view_config
points_list = [
{"_id": 'ObjectId("52e3eb56a7cade5d0898e012")', "latitude": "45.215",
"longitude": "14.131", "gas_station": "Lukoil", "odometer": "24100",
"description": "Bad coffee"},
{"_id": 'ObjectId("52e3eb79a7cade5d0898e013")', "latitude": "47.412",
"longitude": "16.112", "gas_station": "Shell", "odometer": "24300",
"description": "Nice service, but fuel is more expensive"},
{"_id": 'ObjectId("52e3eba5a7cade5d0898e014")', "latitude": "48.544",
"longitude": "17.001", "gas_station": "Руснефть", "odometer": "24500",
"description": "На заправке есть гостиница и кафе. Очень хорошо"},
{"_id": 'ObjectId("52e3ec19a7cade5d0898e015")', "latitude": "49.165",
"longitude": "18.125", "gas_station": "Татнефть", "odometer": "24750",
"description": "Есть стоянка кемпинг-стоянка. Дешёвая незамерзайка."},
{"_id": 'ObjectId("52f3aaf0a7cade0d846d00d7")', "gas_station": "Газпром",
"odometer": "28400", "latitude": "49.249",
"description": "Хорошее кафе, есть душ!", "longitude": "19.100"}
]
@view_config(route_name='points', request_method='GET', renderer='json')
def points_get_all(request):
return points_list
@view_config(route_name='points', request_method='POST', renderer='json')
def point_add_new(request):
return points_list[2]
@view_config(route_name='point', request_method='GET', renderer='json')
def point_get_one(request):
return points_list[0]
@view_config(route_name='point', request_method='PATCH', renderer='json')
def point_edit_one(request):
return points_list[1]
@view_config(route_name='point', request_method='DELETE', renderer='json')
def point_delete_one(request):
return {}
| from pyramid.view import view_config
points_list = [
{"_id": 'ObjectId("52e3eb56a7cade5d0898e012")', "latitude": "45.215",
"longitude": "14.131", "gas_station": "Lukoil", "odometer": "24100",
"description": "Bad coffee"},
{"_id": 'ObjectId("52e3eb79a7cade5d0898e013")', "latitude": "47.412",
"longitude": "16.112", "gas_station": "Shell", "odometer": "24300",
"description": "Nice service, but fuel is more expensive"},
{"_id": 'ObjectId("52e3eba5a7cade5d0898e014")', "latitude": "48.544",
"longitude": "17.001", "gas_station": "Руснефть", "odometer": "24500",
"description": "На заправке есть гостиница и кафе. Очень хорошо"},
{"_id": 'ObjectId("52e3ec19a7cade5d0898e015")', "latitude": "49.165",
"longitude": "18.125", "gas_station": "Татнефть", "odometer": "24750",
"description": "Есть стоянка кемпинг-стоянка. Дешёвая незамерзайка."},
{"_id": 'ObjectId("52f3aaf0a7cade0d846d00d7")', "gas_station": "Газпром",
"odometer": "28400", "latitude": "49.249",
"description": "Хорошее кафе, есть душ!", "longitude": "19.100"}
]
@view_config(route_name='points', request_method='GET', renderer='json')
def points_get_all(request):
return points_list
@view_config(route_name='points', request_method='POST', renderer='json')
def point_add_new(request):
return points_list[2]
@view_config(route_name='point', request_method='GET', renderer='json')
def point_get_one(request):
return points_list[0]
@view_config(route_name='point', request_method='PATCH', renderer='json')
def point_edit_one(request):
return {'response': 'point edited'}
@view_config(route_name='point', request_method='DELETE', renderer='json')
def point_delete_one(request):
return {}
| mit | Python |
d5f84783c376906dd5733391593ceae792b5edda | Bump version to 0.1.0 | dbcli/vcli,dbcli/vcli | vcli/__init__.py | vcli/__init__.py | __version__ = '0.1.0'
| __version__ = '0.0.1'
| bsd-3-clause | Python |
42e26737d083b82716c3adb8c19fb66a5063dc65 | change version number to v3.0.1 | civalin/cmdlr,civalin/cmdlr | src/cmdlr/info.py | src/cmdlr/info.py | """Cmdlr infomation files."""
VERSION = '3.0.1'
DESCRIPTION = ('An extensible comic subscriber.')
LICENSE = 'MIT'
AUTHOR = 'Civalin'
AUTHOR_EMAIL = '[email protected]'
PROJECT_URL = 'https://github.com/civalin/cmdlr'
PROJECT_NAME = 'cmdlr'
| """Cmdlr infomation files."""
VERSION = '3.0.0'
DESCRIPTION = ('An extensible comic subscriber.')
LICENSE = 'MIT'
AUTHOR = 'Civalin'
AUTHOR_EMAIL = '[email protected]'
PROJECT_URL = 'https://github.com/civalin/cmdlr'
PROJECT_NAME = 'cmdlr'
| mit | Python |
49e301ac6a74a30cfdf00bf4178889f9ecb74889 | Patch release for bug-fix #166 | akaszynski/vtkInterface | vtki/_version.py | vtki/_version.py | """ version info for vtki """
# major, minor, patch
version_info = 0, 18, 2
# Nice string for the version
__version__ = '.'.join(map(str, version_info))
| """ version info for vtki """
# major, minor, patch
version_info = 0, 18, 1
# Nice string for the version
__version__ = '.'.join(map(str, version_info))
| mit | Python |
4c6ec1413d1a12165c1231095783aa94d235389a | Add __version__ to vumi package. | harrissoerja/vumi,harrissoerja/vumi,harrissoerja/vumi,vishwaprakashmishra/xmatrix,vishwaprakashmishra/xmatrix,TouK/vumi,TouK/vumi,vishwaprakashmishra/xmatrix,TouK/vumi | vumi/__init__.py | vumi/__init__.py | """
Vumi scalable text messaging engine.
"""
__version__ = "0.5.0a"
| bsd-3-clause | Python |
|
3bb474a4506abb569d5c54703ba3bf2c9c933fd9 | Add tof-server to path | P1X-in/Tanks-of-Freedom-Server | tof-server.wsgi | tof-server.wsgi | import sys
activate_this = '/var/www/tof-server/flask/bin/activate_this.py'
execfile(activate_this, dict(__file__=activate_this))
sys.path.append('/var/www/tof-server')
#activator = 'some/path/to/activate_this.py'
#with open(activator) as f:
# exec(f.read(), {'__file__': activator})
from tof_server import app as application | activate_this = '/var/www/tof-server/flask/bin/activate_this.py'
execfile(activate_this, dict(__file__=activate_this))
#activator = 'some/path/to/activate_this.py'
#with open(activator) as f:
# exec(f.read(), {'__file__': activator})
from tof_server import app as application | mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.