commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
06fa3a4625576a0d7d4897dabcc2979c36d62ce1
|
Remove unused code
|
dtroyer/dwarf,dtroyer/dwarf,juergh/dwarf,juergh/dwarf
|
dwarf/image/api_response.py
|
dwarf/image/api_response.py
|
#!/usr/bin/env python
#
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
# Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from dwarf.utils import template
DETAILS = ('created_at', 'deleted', 'deleted_at', 'updated_at')
# -----------------------------------------------------------------------------
# Images API responses
IMAGE = DETAILS + ('checksum', 'container_format', 'disk_format', 'id',
'is_public', 'location', 'min_disk', 'min_ram', 'name',
'owner', 'protected', 'size', 'status')
IMAGE_PROPERTIES = {'properties': {}}
def images_create(data):
return {"image": template(IMAGE, data, add=IMAGE_PROPERTIES)}
def images_list(data):
return {"images": template(IMAGE, data, add=IMAGE_PROPERTIES)}
def images_update(data):
return {"image": template(IMAGE, data, add=IMAGE_PROPERTIES)}
|
#!/usr/bin/env python
#
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
# Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from dwarf.utils import template
DETAILS = ('created_at', 'deleted', 'deleted_at', 'updated_at')
# -----------------------------------------------------------------------------
# Images API responses
IMAGE = DETAILS + ('checksum', 'container_format', 'disk_format', 'id',
'is_public', 'location', 'min_disk', 'min_ram', 'name',
'owner', 'protected', 'size', 'status')
IMAGE_PROPERTIES = {'properties': {}}
def images_create(data):
return {"image": template(IMAGE, data, add=IMAGE_PROPERTIES)}
def images_list(data):
return {"images": template(IMAGE, data, add=IMAGE_PROPERTIES)}
def images_show(data):
return {"image": template(IMAGE, data, add=IMAGE_PROPERTIES)}
def images_update(data):
return {"image": template(IMAGE, data, add=IMAGE_PROPERTIES)}
|
apache-2.0
|
Python
|
1334c8fa989981e3c917cdc16869b04ad1c2f6e0
|
add --g-fatal-warnings gtk option
|
baverman/snaked,baverman/snaked
|
snaked/core/run.py
|
snaked/core/run.py
|
from optparse import OptionParser
import os
def get_manager():
parser = OptionParser()
parser.add_option('-s', '--session', dest='session',
help="Open snaked with specified session", default='default')
parser.add_option('', '--select-session', action="store_true", dest='select_session',
help="Show dialog to select session at startup", default=False)
parser.add_option('-d', '--debug', action="store_true", dest='debug',
help="Run embedded drainhunter", default=False)
parser.add_option('', '--g-fatal-warnings', action="store_true")
options, args = parser.parse_args()
if options.select_session:
from snaked.core.gui import session_selector
options.session = session_selector.select_session()
from .app import is_master, serve
master, conn = is_master(options.session)
if master:
import gobject
gobject.threads_init()
from .manager import EditorManager
manager = EditorManager(options.session)
manager.start(args)
serve(manager, conn)
if options.debug:
import drainhunter.server
drainhunter.server.run()
return manager
else:
conn.send(['OPEN'] + list(map(os.path.abspath, args)))
conn.send(['END'])
conn.close()
return None
def run():
manager = get_manager()
if not manager:
return
import gtk
try:
gtk.main()
except KeyboardInterrupt:
manager.quit()
|
from optparse import OptionParser
import os
def get_manager():
parser = OptionParser()
parser.add_option('-s', '--session', dest='session',
help="Open snaked with specified session", default='default')
parser.add_option('', '--select-session', action="store_true", dest='select_session',
help="Show dialog to select session at startup", default=False)
parser.add_option('-d', '--debug', action="store_true", dest='debug',
help="Run embedded drainhunter", default=False)
options, args = parser.parse_args()
if options.select_session:
from snaked.core.gui import session_selector
options.session = session_selector.select_session()
from .app import is_master, serve
master, conn = is_master(options.session)
if master:
import gobject
gobject.threads_init()
from .manager import EditorManager
manager = EditorManager(options.session)
manager.start(args)
serve(manager, conn)
if options.debug:
import drainhunter.server
drainhunter.server.run()
return manager
else:
conn.send(['OPEN'] + list(map(os.path.abspath, args)))
conn.send(['END'])
conn.close()
return None
def run():
manager = get_manager()
if not manager:
return
import gtk
try:
gtk.main()
except KeyboardInterrupt:
manager.quit()
|
mit
|
Python
|
6670fe1d081e27417a3d340e2c12c061078582af
|
Bump version (pre-release)
|
chrisglass/django-xhtml2pdf,chrisglass/django-xhtml2pdf
|
django_xhtml2pdf/__init__.py
|
django_xhtml2pdf/__init__.py
|
# -*- coding: utf-8 -*-
"""
See PEP 386 (http://www.python.org/dev/peps/pep-0386/)
Release logic:
1. Remove "dev" from current.
2. git commit
3. git tag <version>
4. push to pypi + push to github
5. bump the version, append '.dev0'
6. git commit
7. push to github (to avoid confusion)
"""
__version__ = '0.0.3'
|
# -*- coding: utf-8 -*-
"""
See PEP 386 (http://www.python.org/dev/peps/pep-0386/)
Release logic:
1. Remove "dev" from current.
2. git commit
3. git tag <version>
4. push to pypi + push to github
5. bump the version, append '.dev0'
6. git commit
7. push to github (to avoid confusion)
"""
__version__ = '0.0.3.dev0'
|
bsd-3-clause
|
Python
|
94d18ba6ede9dc58a558c68fd3af9bbcadc7f189
|
Update urls.py For Django 1.6
|
rashoodkhan/DjangoBB,hsoft/slimbb,hsoft/DjangoBB,rashoodkhan/DjangoBB,slav0nic/DjangoBB,hsoft/DjangoBB,hsoft/DjangoBB,agepoly/DjangoBB,saifrahmed/DjangoBB,slav0nic/DjangoBB,saifrahmed/DjangoBB,hsoft/slimbb,agepoly/DjangoBB,slav0nic/DjangoBB,saifrahmed/DjangoBB,agepoly/DjangoBB,hsoft/slimbb
|
djangobb_forum/tests/urls.py
|
djangobb_forum/tests/urls.py
|
from django.conf.urls import patterns, include
urlpatterns = patterns('',
(r'^forum/', include('djangobb_forum.urls', namespace='djangobb')),
)
|
from django.conf.urls.defaults import patterns, include
urlpatterns = patterns('',
(r'^forum/', include('djangobb_forum.urls', namespace='djangobb')),
)
|
bsd-3-clause
|
Python
|
bdceb4c7bc0b71755d9f63974a5597e29fd94e75
|
comment test code
|
nomemo/ProxyPool
|
tester.py
|
tester.py
|
import urllib2
from socket import p
import settings
import random
import threading
import Queue
import json
import requests
from settings import USER_AGENTS
def makeRequest(proxy, target):
i_headers = {'User-Agent': random.choice(USER_AGENTS)}
print("\n")
try:
r = requests.get(target, proxies=proxy, headers=i_headers, timeout=5)
except Exception, e:
print "Test Failed: %s By %s \nException: %s" % (target, str(proxy), str(e))
return False
else:
print "Test Successed: %s By %s" % (target, str(proxy))
return True
def makeAProxyRequest(proxy, testTarget):
i_headers = {'User-Agent':random.choice(settings.USER_AGENTS)}
url = testTarget
print("\n")
try:
r = requests.get(url, proxies=proxy, headers = i_headers, timeout=5)
except Exception, e:
print "Test Failed: %s By %s \nException: %s" % (testTarget, str(proxy), str(e))
return False
else:
print "Test Successed: %s By %s" % (testTarget, str(proxy))
return True
def makeFullTestForOneProxy(proxy, type = 'ALL'):
checkedCount = 0
for testTarget in settings.TestTargetsCN:
connected = makeAProxyRequest(proxy, testTarget)
if connected == True:
checkedCount += 1
quality = checkedCount * 1.0 / len(settings.TestTargetsCN)
return quality
class WorkThread(threading.Thread):
def __init__(self, name, workQueue, aa=None):
super(WorkThread, self).__init__()
self.queue = workQueue
self.name = name
self.aa = aa
def run(self):
print "Starting " + self.name
while True:
if self.queue.empty():
print "Exiting " + self.name
break
proxy = self.queue.get()
if proxy != None:
print "Thread: " + self.name + " Size: " + str(self.queue.qsize())
if self.aa == None:
makeFullTestForOneProxy(proxy)
else:
makeAProxyRequest(proxy, self.aa)
self.queue.task_done()
# makeFullTestForOneProxy({"http":"115.218.126.59:9000"})
# makeAProxyRequest({"http":"115.218.216.90:9000"}, 'http://www.woshipm.com/')
# makeAProxyRequest({"http":"115.218.216.90:9000"}, 'https://www.baidu.com/')
# makeAProxyRequest({"http":"115.218.216.90:9000"}, 'http://www.v2ex.com/')
# jsonFile = "proxy.json"
# f = open(jsonFile)
# fileData = f.read()
# f.close()
# proxys = json.loads(fileData)
#
#
# workQueue = Queue.Queue(0)
#
# for proxy in proxys:
# workQueue.put(proxy)
#
# for i in range(10):
# name = "Thread " + str(i)
# thread = WorkThread(name, workQueue)
# thread.start()
# workQueue.join()
|
import urllib2
from socket import p
import settings
import random
import threading
import Queue
import json
import requests
from settings import USER_AGENTS
def makeRequest(proxy, target):
i_headers = {'User-Agent': random.choice(USER_AGENTS)}
print("\n")
try:
r = requests.get(target, proxies=proxy, headers=i_headers, timeout=5)
except Exception, e:
print "Test Failed: %s By %s \nException: %s" % (target, str(proxy), str(e))
return False
else:
print "Test Successed: %s By %s" % (target, str(proxy))
return True
def makeAProxyRequest(proxy, testTarget):
i_headers = {'User-Agent':random.choice(settings.USER_AGENTS)}
url = testTarget
print("\n")
try:
r = requests.get(url, proxies=proxy, headers = i_headers, timeout=5)
except Exception, e:
print "Test Failed: %s By %s \nException: %s" % (testTarget, str(proxy), str(e))
return False
else:
print "Test Successed: %s By %s" % (testTarget, str(proxy))
return True
def makeFullTestForOneProxy(proxy, type = 'ALL'):
checkedCount = 0
for testTarget in settings.TestTargetsCN:
connected = makeAProxyRequest(proxy, testTarget)
if connected == True:
checkedCount += 1
quality = checkedCount * 1.0 / len(settings.TestTargetsCN)
return quality
class WorkThread(threading.Thread):
def __init__(self, name, workQueue, aa=None):
super(WorkThread, self).__init__()
self.queue = workQueue
self.name = name
self.aa = aa
def run(self):
print "Starting " + self.name
while True:
if self.queue.empty():
print "Exiting " + self.name
break
proxy = self.queue.get()
if proxy != None:
print "Thread: " + self.name + " Size: " + str(self.queue.qsize())
if self.aa == None:
makeFullTestForOneProxy(proxy)
else:
makeAProxyRequest(proxy, self.aa)
self.queue.task_done()
# makeFullTestForOneProxy({"http":"115.218.126.59:9000"})
# makeAProxyRequest({"http":"115.218.216.90:9000"}, 'http://www.woshipm.com/')
# makeAProxyRequest({"http":"115.218.216.90:9000"}, 'https://www.baidu.com/')
# makeAProxyRequest({"http":"115.218.216.90:9000"}, 'http://www.v2ex.com/')
jsonFile = "proxy.json"
f = open(jsonFile)
fileData = f.read()
f.close()
proxys = json.loads(fileData)
workQueue = Queue.Queue(0)
for proxy in proxys:
workQueue.put(proxy)
for i in range(10):
name = "Thread " + str(i)
thread = WorkThread(name, workQueue)
thread.start()
workQueue.join()
|
apache-2.0
|
Python
|
c6cde6a72204a9e688ea0d6dfe9550f2cb39a0fc
|
resolve incorrect merge conflict resolution
|
hkawasaki/kawasaki-aio8-0,mjirayu/sit_academy,kursitet/edx-platform,fly19890211/edx-platform,longmen21/edx-platform,halvertoluke/edx-platform,bitifirefly/edx-platform,ahmadio/edx-platform,zofuthan/edx-platform,longmen21/edx-platform,bitifirefly/edx-platform,pelikanchik/edx-platform,appsembler/edx-platform,morenopc/edx-platform,unicri/edx-platform,zerobatu/edx-platform,unicri/edx-platform,lduarte1991/edx-platform,UOMx/edx-platform,arbrandes/edx-platform,ahmadio/edx-platform,alexthered/kienhoc-platform,ahmadiga/min_edx,EduPepperPD/pepper2013,utecuy/edx-platform,jazkarta/edx-platform,mbareta/edx-platform-ft,edx-solutions/edx-platform,antonve/s4-project-mooc,etzhou/edx-platform,chudaol/edx-platform,philanthropy-u/edx-platform,pdehaye/theming-edx-platform,LICEF/edx-platform,y12uc231/edx-platform,leansoft/edx-platform,morpheby/levelup-by,DefyVentures/edx-platform,don-github/edx-platform,Kalyzee/edx-platform,jruiperezv/ANALYSE,marcore/edx-platform,motion2015/edx-platform,franosincic/edx-platform,shashank971/edx-platform,andyzsf/edx,proversity-org/edx-platform,bigdatauniversity/edx-platform,zhenzhai/edx-platform,jamiefolsom/edx-platform,morenopc/edx-platform,jolyonb/edx-platform,cecep-edu/edx-platform,nikolas/edx-platform,abdoosh00/edx-rtl-final,benpatterson/edx-platform,mushtaqak/edx-platform,valtech-mooc/edx-platform,beni55/edx-platform,carsongee/edx-platform,waheedahmed/edx-platform,yokose-ks/edx-platform,y12uc231/edx-platform,playm2mboy/edx-platform,ampax/edx-platform-backup,utecuy/edx-platform,ahmedaljazzar/edx-platform,shubhdev/edx-platform,eduNEXT/edx-platform,Endika/edx-platform,ubc/edx-platform,Stanford-Online/edx-platform,fly19890211/edx-platform,lduarte1991/edx-platform,vikas1885/test1,dkarakats/edx-platform,nanolearningllc/edx-platform-cypress-2,etzhou/edx-platform,dcosentino/edx-platform,PepperPD/edx-pepper-platform,cecep-edu/edx-platform,AkA84/edx-platform,UOMx/edx-platform,knehez/edx-platform,ferabra/edx-platform,iivic/BoiseStateX,shubhdev/edxOnBaadal,wwj718/ANALYSE,beacloudgenius/edx-platform,itsjeyd/edx-platform,kamalx/edx-platform,hastexo/edx-platform,SravanthiSinha/edx-platform,RPI-OPENEDX/edx-platform,morpheby/levelup-by,ESOedX/edx-platform,solashirai/edx-platform,OmarIthawi/edx-platform,devs1991/test_edx_docmode,Ayub-Khan/edx-platform,appliedx/edx-platform,nanolearning/edx-platform,alexthered/kienhoc-platform,chand3040/cloud_that,JioEducation/edx-platform,zadgroup/edx-platform,kmoocdev/edx-platform,sameetb-cuelogic/edx-platform-test,abdoosh00/edx-rtl-final,EDUlib/edx-platform,Endika/edx-platform,nikolas/edx-platform,cselis86/edx-platform,shurihell/testasia,chudaol/edx-platform,ferabra/edx-platform,shubhdev/openedx,nttks/jenkins-test,simbs/edx-platform,inares/edx-platform,hmcmooc/muddx-platform,zhenzhai/edx-platform,xingyepei/edx-platform,DNFcode/edx-platform,pelikanchik/edx-platform,morenopc/edx-platform,jbzdak/edx-platform,OmarIthawi/edx-platform,abdoosh00/edraak,fintech-circle/edx-platform,SravanthiSinha/edx-platform,defance/edx-platform,Livit/Livit.Learn.EdX,msegado/edx-platform,peterm-itr/edx-platform,rhndg/openedx,Softmotions/edx-platform,jjmiranda/edx-platform,hamzehd/edx-platform,morenopc/edx-platform,carsongee/edx-platform,ferabra/edx-platform,atsolakid/edx-platform,jamesblunt/edx-platform,arifsetiawan/edx-platform,hkawasaki/kawasaki-aio8-2,bitifirefly/edx-platform,rismalrv/edx-platform,pomegranited/edx-platform,shubhdev/edx-platform,hkawasaki/kawasaki-aio8-2,vismartltd/edx-platform,antonve/s4-project-mooc,atsolakid/edx-platform,edx/edx-platform,procangroup/edx-platform,solashirai/edx-platform,vikas1885/test1,waheedahmed/edx-platform,zhenzhai/edx-platform,jonathan-beard/edx-platform,shabab12/edx-platform,raccoongang/edx-platform,carsongee/edx-platform,amir-qayyum-khan/edx-platform,edry/edx-platform,abdoosh00/edraak,auferack08/edx-platform,knehez/edx-platform,Softmotions/edx-platform,playm2mboy/edx-platform,benpatterson/edx-platform,yokose-ks/edx-platform,JioEducation/edx-platform,chand3040/cloud_that,cognitiveclass/edx-platform,mbareta/edx-platform-ft,chauhanhardik/populo,motion2015/a3,hmcmooc/muddx-platform,doganov/edx-platform,stvstnfrd/edx-platform,nanolearning/edx-platform,IONISx/edx-platform,Stanford-Online/edx-platform,ZLLab-Mooc/edx-platform,jolyonb/edx-platform,antoviaque/edx-platform,raccoongang/edx-platform,nanolearningllc/edx-platform-cypress,dkarakats/edx-platform,TeachAtTUM/edx-platform,ZLLab-Mooc/edx-platform,fintech-circle/edx-platform,Livit/Livit.Learn.EdX,kmoocdev/edx-platform,romain-li/edx-platform,xinjiguaike/edx-platform,y12uc231/edx-platform,leansoft/edx-platform,IndonesiaX/edx-platform,tiagochiavericosta/edx-platform,hkawasaki/kawasaki-aio8-0,franosincic/edx-platform,cognitiveclass/edx-platform,vasyarv/edx-platform,amir-qayyum-khan/edx-platform,proversity-org/edx-platform,cognitiveclass/edx-platform,pepeportela/edx-platform,jonathan-beard/edx-platform,mushtaqak/edx-platform,mushtaqak/edx-platform,jazkarta/edx-platform-for-isc,eduNEXT/edunext-platform,kmoocdev2/edx-platform,UOMx/edx-platform,cselis86/edx-platform,zubair-arbi/edx-platform,Livit/Livit.Learn.EdX,ampax/edx-platform-backup,devs1991/test_edx_docmode,xingyepei/edx-platform,J861449197/edx-platform,miptliot/edx-platform,JioEducation/edx-platform,wwj718/edx-platform,nikolas/edx-platform,jzoldak/edx-platform,olexiim/edx-platform,nagyistoce/edx-platform,playm2mboy/edx-platform,sameetb-cuelogic/edx-platform-test,Softmotions/edx-platform,tanmaykm/edx-platform,nttks/edx-platform,zubair-arbi/edx-platform,jswope00/GAI,hamzehd/edx-platform,inares/edx-platform,beacloudgenius/edx-platform,leansoft/edx-platform,angelapper/edx-platform,deepsrijit1105/edx-platform,Lektorium-LLC/edx-platform,kxliugang/edx-platform,Kalyzee/edx-platform,utecuy/edx-platform,TeachAtTUM/edx-platform,mbareta/edx-platform-ft,vismartltd/edx-platform,vismartltd/edx-platform,eduNEXT/edx-platform,a-parhom/edx-platform,DefyVentures/edx-platform,jazkarta/edx-platform,IndonesiaX/edx-platform,andyzsf/edx,DNFcode/edx-platform,J861449197/edx-platform,rhndg/openedx,kursitet/edx-platform,martynovp/edx-platform,chauhanhardik/populo_2,xingyepei/edx-platform,edry/edx-platform,valtech-mooc/edx-platform,martynovp/edx-platform,jbzdak/edx-platform,JCBarahona/edX,nanolearningllc/edx-platform-cypress,sameetb-cuelogic/edx-platform-test,BehavioralInsightsTeam/edx-platform,playm2mboy/edx-platform,syjeon/new_edx,mahendra-r/edx-platform,prarthitm/edxplatform,Unow/edx-platform,adoosii/edx-platform,vismartltd/edx-platform,vikas1885/test1,vikas1885/test1,raccoongang/edx-platform,peterm-itr/edx-platform,procangroup/edx-platform,jazztpt/edx-platform,sudheerchintala/LearnEraPlatForm,antoviaque/edx-platform,alexthered/kienhoc-platform,cognitiveclass/edx-platform,tanmaykm/edx-platform,cselis86/edx-platform,peterm-itr/edx-platform,playm2mboy/edx-platform,yokose-ks/edx-platform,CredoReference/edx-platform,simbs/edx-platform,caesar2164/edx-platform,hkawasaki/kawasaki-aio8-1,jbassen/edx-platform,dkarakats/edx-platform,pku9104038/edx-platform,zadgroup/edx-platform,romain-li/edx-platform,pabloborrego93/edx-platform,vasyarv/edx-platform,jjmiranda/edx-platform,shashank971/edx-platform,ahmadio/edx-platform,morpheby/levelup-by,jazkarta/edx-platform-for-isc,ferabra/edx-platform,polimediaupv/edx-platform,nttks/edx-platform,romain-li/edx-platform,morpheby/levelup-by,CourseTalk/edx-platform,chudaol/edx-platform,mcgachey/edx-platform,nikolas/edx-platform,bigdatauniversity/edx-platform,shashank971/edx-platform,SivilTaram/edx-platform,prarthitm/edxplatform,abdoosh00/edx-rtl-final,torchingloom/edx-platform,xuxiao19910803/edx,bdero/edx-platform,EduPepperPD/pepper2013,eemirtekin/edx-platform,vasyarv/edx-platform,CourseTalk/edx-platform,zofuthan/edx-platform,Stanford-Online/edx-platform,Endika/edx-platform,pabloborrego93/edx-platform,cognitiveclass/edx-platform,rhndg/openedx,cyanna/edx-platform,tiagochiavericosta/edx-platform,cselis86/edx-platform,motion2015/a3,inares/edx-platform,rismalrv/edx-platform,WatanabeYasumasa/edx-platform,zhenzhai/edx-platform,JCBarahona/edX,waheedahmed/edx-platform,jzoldak/edx-platform,deepsrijit1105/edx-platform,eestay/edx-platform,mjg2203/edx-platform-seas,angelapper/edx-platform,jruiperezv/ANALYSE,xuxiao19910803/edx-platform,msegado/edx-platform,MakeHer/edx-platform,ahmadio/edx-platform,doismellburning/edx-platform,pomegranited/edx-platform,halvertoluke/edx-platform,beni55/edx-platform,TsinghuaX/edx-platform,mtlchun/edx,Shrhawk/edx-platform,synergeticsedx/deployment-wipro,MSOpenTech/edx-platform,Edraak/edx-platform,gsehub/edx-platform,Shrhawk/edx-platform,dkarakats/edx-platform,hamzehd/edx-platform,appsembler/edx-platform,edx-solutions/edx-platform,mitocw/edx-platform,mtlchun/edx,ak2703/edx-platform,WatanabeYasumasa/edx-platform,chrisndodge/edx-platform,jazztpt/edx-platform,dsajkl/123,chudaol/edx-platform,franosincic/edx-platform,shurihell/testasia,Edraak/edx-platform,kmoocdev2/edx-platform,PepperPD/edx-pepper-platform,rhndg/openedx,louyihua/edx-platform,defance/edx-platform,deepsrijit1105/edx-platform,eduNEXT/edx-platform,hamzehd/edx-platform,teltek/edx-platform,y12uc231/edx-platform,dcosentino/edx-platform,DNFcode/edx-platform,jazkarta/edx-platform,lduarte1991/edx-platform,apigee/edx-platform,caesar2164/edx-platform,kursitet/edx-platform,arifsetiawan/edx-platform,JCBarahona/edX,bdero/edx-platform,cpennington/edx-platform,zhenzhai/edx-platform,jelugbo/tundex,jbassen/edx-platform,fly19890211/edx-platform,eestay/edx-platform,arbrandes/edx-platform,antoviaque/edx-platform,10clouds/edx-platform,SravanthiSinha/edx-platform,mjirayu/sit_academy,halvertoluke/edx-platform,dsajkl/reqiop,openfun/edx-platform,Edraak/circleci-edx-platform,philanthropy-u/edx-platform,naresh21/synergetics-edx-platform,hastexo/edx-platform,RPI-OPENEDX/edx-platform,analyseuc3m/ANALYSE-v1,chand3040/cloud_that,edry/edx-platform,mcgachey/edx-platform,auferack08/edx-platform,EduPepperPD/pepper2013,IONISx/edx-platform,kamalx/edx-platform,halvertoluke/edx-platform,Edraak/circleci-edx-platform,devs1991/test_edx_docmode,ESOedX/edx-platform,cecep-edu/edx-platform,a-parhom/edx-platform,cyanna/edx-platform,beacloudgenius/edx-platform,bdero/edx-platform,jswope00/GAI,philanthropy-u/edx-platform,jbzdak/edx-platform,TeachAtTUM/edx-platform,zubair-arbi/edx-platform,chudaol/edx-platform,10clouds/edx-platform,zofuthan/edx-platform,ESOedX/edx-platform,mjg2203/edx-platform-seas,mjirayu/sit_academy,Ayub-Khan/edx-platform,Softmotions/edx-platform,deepsrijit1105/edx-platform,adoosii/edx-platform,knehez/edx-platform,yokose-ks/edx-platform,amir-qayyum-khan/edx-platform,peterm-itr/edx-platform,B-MOOC/edx-platform,doismellburning/edx-platform,bitifirefly/edx-platform,jamiefolsom/edx-platform,nagyistoce/edx-platform,TsinghuaX/edx-platform,utecuy/edx-platform,jswope00/griffinx,itsjeyd/edx-platform,alexthered/kienhoc-platform,olexiim/edx-platform,UOMx/edx-platform,andyzsf/edx,ZLLab-Mooc/edx-platform,longmen21/edx-platform,shubhdev/edx-platform,jazztpt/edx-platform,analyseuc3m/ANALYSE-v1,chand3040/cloud_that,jbassen/edx-platform,martynovp/edx-platform,marcore/edx-platform,jswope00/griffinx,praveen-pal/edx-platform,syjeon/new_edx,jamiefolsom/edx-platform,sudheerchintala/LearnEraPlatForm,IndonesiaX/edx-platform,miptliot/edx-platform,chauhanhardik/populo,morenopc/edx-platform,jolyonb/edx-platform,zubair-arbi/edx-platform,beni55/edx-platform,stvstnfrd/edx-platform,mcgachey/edx-platform,dsajkl/reqiop,jonathan-beard/edx-platform,Edraak/edx-platform,prarthitm/edxplatform,jswope00/griffinx,auferack08/edx-platform,valtech-mooc/edx-platform,xingyepei/edx-platform,mahendra-r/edx-platform,Edraak/circleci-edx-platform,alu042/edx-platform,franosincic/edx-platform,kmoocdev/edx-platform,sameetb-cuelogic/edx-platform-test,kmoocdev/edx-platform,IONISx/edx-platform,kmoocdev2/edx-platform,praveen-pal/edx-platform,EDUlib/edx-platform,eemirtekin/edx-platform,eestay/edx-platform,AkA84/edx-platform,pelikanchik/edx-platform,ahmadiga/min_edx,bdero/edx-platform,kursitet/edx-platform,itsjeyd/edx-platform,simbs/edx-platform,gsehub/edx-platform,vasyarv/edx-platform,kmoocdev/edx-platform,simbs/edx-platform,EduPepperPDTesting/pepper2013-testing,arifsetiawan/edx-platform,olexiim/edx-platform,pomegranited/edx-platform,SravanthiSinha/edx-platform,PepperPD/edx-pepper-platform,mtlchun/edx,OmarIthawi/edx-platform,shashank971/edx-platform,ferabra/edx-platform,chrisndodge/edx-platform,jelugbo/tundex,10clouds/edx-platform,nanolearningllc/edx-platform-cypress-2,ovnicraft/edx-platform,devs1991/test_edx_docmode,unicri/edx-platform,hkawasaki/kawasaki-aio8-0,BehavioralInsightsTeam/edx-platform,rismalrv/edx-platform,vasyarv/edx-platform,jzoldak/edx-platform,hamzehd/edx-platform,Semi-global/edx-platform,JCBarahona/edX,IONISx/edx-platform,pdehaye/theming-edx-platform,marcore/edx-platform,syjeon/new_edx,eduNEXT/edunext-platform,jswope00/griffinx,leansoft/edx-platform,hkawasaki/kawasaki-aio8-2,SivilTaram/edx-platform,jbassen/edx-platform,hkawasaki/kawasaki-aio8-0,ubc/edx-platform,mushtaqak/edx-platform,jamesblunt/edx-platform,edry/edx-platform,LearnEra/LearnEraPlaftform,caesar2164/edx-platform,kamalx/edx-platform,Edraak/edx-platform,shubhdev/edxOnBaadal,fintech-circle/edx-platform,antonve/s4-project-mooc,chrisndodge/edx-platform,motion2015/edx-platform,torchingloom/edx-platform,cecep-edu/edx-platform,DefyVentures/edx-platform,synergeticsedx/deployment-wipro,don-github/edx-platform,analyseuc3m/ANALYSE-v1,chauhanhardik/populo_2,IONISx/edx-platform,naresh21/synergetics-edx-platform,Shrhawk/edx-platform,cpennington/edx-platform,fly19890211/edx-platform,jelugbo/tundex,nanolearning/edx-platform,a-parhom/edx-platform,arbrandes/edx-platform,carsongee/edx-platform,Unow/edx-platform,JCBarahona/edX,cselis86/edx-platform,motion2015/a3,shurihell/testasia,ahmadiga/min_edx,dsajkl/123,cyanna/edx-platform,mitocw/edx-platform,unicri/edx-platform,syjeon/new_edx,dsajkl/123,wwj718/edx-platform,rismalrv/edx-platform,chauhanhardik/populo,MakeHer/edx-platform,jswope00/GAI,shubhdev/edxOnBaadal,ahmedaljazzar/edx-platform,nttks/jenkins-test,mbareta/edx-platform-ft,Softmotions/edx-platform,benpatterson/edx-platform,EduPepperPDTesting/pepper2013-testing,nanolearningllc/edx-platform-cypress-2,ahmadiga/min_edx,shashank971/edx-platform,edx/edx-platform,zadgroup/edx-platform,ovnicraft/edx-platform,jazkarta/edx-platform-for-isc,jazkarta/edx-platform-for-isc,nttks/edx-platform,analyseuc3m/ANALYSE-v1,Ayub-Khan/edx-platform,xinjiguaike/edx-platform,stvstnfrd/edx-platform,pku9104038/edx-platform,LICEF/edx-platform,wwj718/edx-platform,rue89-tech/edx-platform,mtlchun/edx,cyanna/edx-platform,nagyistoce/edx-platform,Unow/edx-platform,UXE/local-edx,ESOedX/edx-platform,bigdatauniversity/edx-platform,BehavioralInsightsTeam/edx-platform,msegado/edx-platform,Stanford-Online/edx-platform,shubhdev/openedx,doganov/edx-platform,wwj718/ANALYSE,mjg2203/edx-platform-seas,EduPepperPD/pepper2013,zerobatu/edx-platform,kxliugang/edx-platform,sameetb-cuelogic/edx-platform-test,doganov/edx-platform,Endika/edx-platform,B-MOOC/edx-platform,J861449197/edx-platform,PepperPD/edx-pepper-platform,knehez/edx-platform,teltek/edx-platform,ZLLab-Mooc/edx-platform,pepeportela/edx-platform,tanmaykm/edx-platform,motion2015/edx-platform,miptliot/edx-platform,valtech-mooc/edx-platform,knehez/edx-platform,chauhanhardik/populo,MSOpenTech/edx-platform,JioEducation/edx-platform,eestay/edx-platform,polimediaupv/edx-platform,olexiim/edx-platform,zerobatu/edx-platform,pku9104038/edx-platform,pelikanchik/edx-platform,mitocw/edx-platform,hkawasaki/kawasaki-aio8-2,abdoosh00/edx-rtl-final,waheedahmed/edx-platform,Lektorium-LLC/edx-platform,polimediaupv/edx-platform,hastexo/edx-platform,iivic/BoiseStateX,nanolearningllc/edx-platform-cypress,RPI-OPENEDX/edx-platform,longmen21/edx-platform,caesar2164/edx-platform,dsajkl/123,xinjiguaike/edx-platform,wwj718/edx-platform,olexiim/edx-platform,nanolearningllc/edx-platform-cypress-2,Shrhawk/edx-platform,kmoocdev2/edx-platform,jamesblunt/edx-platform,vismartltd/edx-platform,jelugbo/tundex,jswope00/griffinx,jazkarta/edx-platform,nagyistoce/edx-platform,auferack08/edx-platform,Semi-global/edx-platform,jruiperezv/ANALYSE,xuxiao19910803/edx-platform,atsolakid/edx-platform,SivilTaram/edx-platform,itsjeyd/edx-platform,Ayub-Khan/edx-platform,appsembler/edx-platform,nttks/jenkins-test,kamalx/edx-platform,cpennington/edx-platform,gsehub/edx-platform,alexthered/kienhoc-platform,pomegranited/edx-platform,shubhdev/edx-platform,UXE/local-edx,4eek/edx-platform,jonathan-beard/edx-platform,stvstnfrd/edx-platform,prarthitm/edxplatform,chand3040/cloud_that,Unow/edx-platform,unicri/edx-platform,hkawasaki/kawasaki-aio8-1,bigdatauniversity/edx-platform,romain-li/edx-platform,mitocw/edx-platform,chauhanhardik/populo_2,shubhdev/openedx,dkarakats/edx-platform,ampax/edx-platform,ak2703/edx-platform,jamiefolsom/edx-platform,leansoft/edx-platform,devs1991/test_edx_docmode,B-MOOC/edx-platform,jazztpt/edx-platform,EduPepperPD/pepper2013,UXE/local-edx,y12uc231/edx-platform,IndonesiaX/edx-platform,gymnasium/edx-platform,4eek/edx-platform,ahmadio/edx-platform,cyanna/edx-platform,jonathan-beard/edx-platform,mahendra-r/edx-platform,miptliot/edx-platform,wwj718/ANALYSE,apigee/edx-platform,shabab12/edx-platform,mcgachey/edx-platform,motion2015/edx-platform,inares/edx-platform,devs1991/test_edx_docmode,adoosii/edx-platform,louyihua/edx-platform,PepperPD/edx-pepper-platform,kamalx/edx-platform,CredoReference/edx-platform,don-github/edx-platform,openfun/edx-platform,angelapper/edx-platform,Edraak/edraak-platform,CourseTalk/edx-platform,SivilTaram/edx-platform,angelapper/edx-platform,praveen-pal/edx-platform,eduNEXT/edunext-platform,shubhdev/openedx,CredoReference/edx-platform,mjg2203/edx-platform-seas,mtlchun/edx,wwj718/edx-platform,LICEF/edx-platform,hmcmooc/muddx-platform,Edraak/edraak-platform,nanolearning/edx-platform,tanmaykm/edx-platform,chauhanhardik/populo_2,tiagochiavericosta/edx-platform,ovnicraft/edx-platform,louyihua/edx-platform,waheedahmed/edx-platform,EduPepperPDTesting/pepper2013-testing,EDUlib/edx-platform,iivic/BoiseStateX,sudheerchintala/LearnEraPlatForm,shabab12/edx-platform,yokose-ks/edx-platform,appliedx/edx-platform,gymnasium/edx-platform,rue89-tech/edx-platform,shurihell/testasia,marcore/edx-platform,zadgroup/edx-platform,Ayub-Khan/edx-platform,ak2703/edx-platform,pabloborrego93/edx-platform,romain-li/edx-platform,beni55/edx-platform,jswope00/GAI,shubhdev/edx-platform,jbassen/edx-platform,atsolakid/edx-platform,ZLLab-Mooc/edx-platform,iivic/BoiseStateX,rue89-tech/edx-platform,vikas1885/test1,AkA84/edx-platform,MakeHer/edx-platform,ampax/edx-platform,ahmedaljazzar/edx-platform,iivic/BoiseStateX,nttks/jenkins-test,pku9104038/edx-platform,xingyepei/edx-platform,LICEF/edx-platform,dsajkl/reqiop,etzhou/edx-platform,chauhanhardik/populo_2,fintech-circle/edx-platform,eemirtekin/edx-platform,hastexo/edx-platform,ahmedaljazzar/edx-platform,martynovp/edx-platform,alu042/edx-platform,jazztpt/edx-platform,cecep-edu/edx-platform,doganov/edx-platform,SravanthiSinha/edx-platform,xuxiao19910803/edx,Edraak/edraak-platform,DefyVentures/edx-platform,motion2015/a3,TeachAtTUM/edx-platform,TsinghuaX/edx-platform,kxliugang/edx-platform,zofuthan/edx-platform,hkawasaki/kawasaki-aio8-1,ovnicraft/edx-platform,devs1991/test_edx_docmode,jjmiranda/edx-platform,mjirayu/sit_academy,ubc/edx-platform,chauhanhardik/populo,Kalyzee/edx-platform,simbs/edx-platform,UXE/local-edx,MakeHer/edx-platform,xuxiao19910803/edx,B-MOOC/edx-platform,4eek/edx-platform,xuxiao19910803/edx-platform,torchingloom/edx-platform,philanthropy-u/edx-platform,doismellburning/edx-platform,edx/edx-platform,dsajkl/123,J861449197/edx-platform,defance/edx-platform,10clouds/edx-platform,don-github/edx-platform,4eek/edx-platform,wwj718/ANALYSE,jamesblunt/edx-platform,kxliugang/edx-platform,longmen21/edx-platform,LearnEra/LearnEraPlaftform,eduNEXT/edunext-platform,ubc/edx-platform,openfun/edx-platform,chrisndodge/edx-platform,nanolearning/edx-platform,kxliugang/edx-platform,naresh21/synergetics-edx-platform,kmoocdev2/edx-platform,DNFcode/edx-platform,dcosentino/edx-platform,arbrandes/edx-platform,mahendra-r/edx-platform,shubhdev/edxOnBaadal,defance/edx-platform,pomegranited/edx-platform,RPI-OPENEDX/edx-platform,Edraak/edraak-platform,jjmiranda/edx-platform,rhndg/openedx,WatanabeYasumasa/edx-platform,proversity-org/edx-platform,shubhdev/openedx,ampax/edx-platform,pepeportela/edx-platform,rue89-tech/edx-platform,valtech-mooc/edx-platform,appliedx/edx-platform,zofuthan/edx-platform,CourseTalk/edx-platform,EduPepperPDTesting/pepper2013-testing,rismalrv/edx-platform,mushtaqak/edx-platform,eestay/edx-platform,doismellburning/edx-platform,xuxiao19910803/edx-platform,don-github/edx-platform,eemirtekin/edx-platform,Shrhawk/edx-platform,edx/edx-platform,doismellburning/edx-platform,ampax/edx-platform-backup,jruiperezv/ANALYSE,shubhdev/edxOnBaadal,ampax/edx-platform-backup,ahmadiga/min_edx,procangroup/edx-platform,xuxiao19910803/edx,BehavioralInsightsTeam/edx-platform,apigee/edx-platform,atsolakid/edx-platform,synergeticsedx/deployment-wipro,jbzdak/edx-platform,abdoosh00/edraak,adoosii/edx-platform,Edraak/edx-platform,xuxiao19910803/edx,a-parhom/edx-platform,antoviaque/edx-platform,openfun/edx-platform,nttks/jenkins-test,shurihell/testasia,mahendra-r/edx-platform,teltek/edx-platform,motion2015/edx-platform,xinjiguaike/edx-platform,etzhou/edx-platform,nanolearningllc/edx-platform-cypress-2,torchingloom/edx-platform,LearnEra/LearnEraPlaftform,teltek/edx-platform,rue89-tech/edx-platform,LICEF/edx-platform,ak2703/edx-platform,appliedx/edx-platform,jamiefolsom/edx-platform,WatanabeYasumasa/edx-platform,devs1991/test_edx_docmode,andyzsf/edx,beni55/edx-platform,AkA84/edx-platform,tiagochiavericosta/edx-platform,nikolas/edx-platform,louyihua/edx-platform,msegado/edx-platform,hkawasaki/kawasaki-aio8-1,proversity-org/edx-platform,openfun/edx-platform,IndonesiaX/edx-platform,dcosentino/edx-platform,pepeportela/edx-platform,RPI-OPENEDX/edx-platform,utecuy/edx-platform,Semi-global/edx-platform,tiagochiavericosta/edx-platform,jruiperezv/ANALYSE,amir-qayyum-khan/edx-platform,appsembler/edx-platform,ak2703/edx-platform,pdehaye/theming-edx-platform,benpatterson/edx-platform,naresh21/synergetics-edx-platform,mjirayu/sit_academy,zubair-arbi/edx-platform,doganov/edx-platform,torchingloom/edx-platform,ampax/edx-platform,alu042/edx-platform,edx-solutions/edx-platform,EduPepperPDTesting/pepper2013-testing,gymnasium/edx-platform,kursitet/edx-platform,inares/edx-platform,zerobatu/edx-platform,Kalyzee/edx-platform,DNFcode/edx-platform,MakeHer/edx-platform,MSOpenTech/edx-platform,ampax/edx-platform-backup,nanolearningllc/edx-platform-cypress,Livit/Livit.Learn.EdX,bitifirefly/edx-platform,LearnEra/LearnEraPlaftform,edx-solutions/edx-platform,raccoongang/edx-platform,Edraak/circleci-edx-platform,4eek/edx-platform,eemirtekin/edx-platform,nanolearningllc/edx-platform-cypress,lduarte1991/edx-platform,msegado/edx-platform,TsinghuaX/edx-platform,J861449197/edx-platform,halvertoluke/edx-platform,antonve/s4-project-mooc,fly19890211/edx-platform,Semi-global/edx-platform,jzoldak/edx-platform,beacloudgenius/edx-platform,alu042/edx-platform,SivilTaram/edx-platform,Semi-global/edx-platform,procangroup/edx-platform,adoosii/edx-platform,Lektorium-LLC/edx-platform,martynovp/edx-platform,wwj718/ANALYSE,jolyonb/edx-platform,Lektorium-LLC/edx-platform,edry/edx-platform,arifsetiawan/edx-platform,gymnasium/edx-platform,franosincic/edx-platform,jamesblunt/edx-platform,mcgachey/edx-platform,B-MOOC/edx-platform,eduNEXT/edx-platform,motion2015/a3,jbzdak/edx-platform,DefyVentures/edx-platform,pdehaye/theming-edx-platform,polimediaupv/edx-platform,benpatterson/edx-platform,nttks/edx-platform,gsehub/edx-platform,zerobatu/edx-platform,MSOpenTech/edx-platform,pabloborrego93/edx-platform,solashirai/edx-platform,EDUlib/edx-platform,polimediaupv/edx-platform,xuxiao19910803/edx-platform,EduPepperPDTesting/pepper2013-testing,antonve/s4-project-mooc,nagyistoce/edx-platform,sudheerchintala/LearnEraPlatForm,shabab12/edx-platform,etzhou/edx-platform,solashirai/edx-platform,beacloudgenius/edx-platform,MSOpenTech/edx-platform,cpennington/edx-platform,jazkarta/edx-platform,zadgroup/edx-platform,synergeticsedx/deployment-wipro,nttks/edx-platform,ubc/edx-platform,jelugbo/tundex,arifsetiawan/edx-platform,xinjiguaike/edx-platform,dsajkl/reqiop,OmarIthawi/edx-platform,bigdatauniversity/edx-platform,Edraak/circleci-edx-platform,ovnicraft/edx-platform,apigee/edx-platform,appliedx/edx-platform,praveen-pal/edx-platform,jazkarta/edx-platform-for-isc,AkA84/edx-platform,solashirai/edx-platform,dcosentino/edx-platform,Kalyzee/edx-platform,CredoReference/edx-platform,hmcmooc/muddx-platform,abdoosh00/edraak
|
common/lib/xmodule/xmodule/modulestore/tests/test_xml.py
|
common/lib/xmodule/xmodule/modulestore/tests/test_xml.py
|
import os.path
from nose.tools import assert_raises, assert_equals
from xmodule.course_module import CourseDescriptor
from xmodule.modulestore.xml import XMLModuleStore
from xmodule.modulestore import XML_MODULESTORE_TYPE
from .test_modulestore import check_path_to_location
from xmodule.tests import DATA_DIR
class TestXMLModuleStore(object):
def test_path_to_location(self):
"""Make sure that path_to_location works properly"""
print "Starting import"
modulestore = XMLModuleStore(DATA_DIR, course_dirs=['toy', 'simple'])
print "finished import"
check_path_to_location(modulestore)
def test_xml_modulestore_type(self):
store = XMLModuleStore(DATA_DIR, course_dirs=['toy', 'simple'])
assert_equals(store.get_modulestore_type('foo/bar/baz'), XML_MODULESTORE_TYPE)
def test_unicode_chars_in_xml_content(self):
# edX/full/6.002_Spring_2012 has non-ASCII chars, and during
# uniquification of names, would raise a UnicodeError. It no longer does.
# Ensure that there really is a non-ASCII character in the course.
with open(os.path.join(DATA_DIR, "toy/sequential/vertical_sequential.xml")) as xmlf:
xml = xmlf.read()
with assert_raises(UnicodeDecodeError):
xml.decode('ascii')
# Load the course, but don't make error modules. This will succeed,
# but will record the errors.
modulestore = XMLModuleStore(DATA_DIR, course_dirs=['toy'], load_error_modules=False)
# Look up the errors during load. There should be none.
location = CourseDescriptor.id_to_location("edX/toy/2012_Fall")
errors = modulestore.get_item_errors(location)
assert errors == []
|
import os.path
from nose.tools import assert_raises, assert_equals
from xmodule.course_module import CourseDescriptor
from xmodule.modulestore.xml import XMLModuleStore
from xmodule.modulestore import XML_MODULESTORE_TYPE
from .test_modulestore import check_path_to_location
from . import DATA_DIR
class TestXMLModuleStore(object):
def test_path_to_location(self):
"""Make sure that path_to_location works properly"""
print "Starting import"
modulestore = XMLModuleStore(DATA_DIR, course_dirs=['toy', 'simple'])
print "finished import"
check_path_to_location(modulestore)
def test_xml_modulestore_type(self):
store = XMLModuleStore(DATA_DIR, course_dirs=['toy', 'simple'])
assert_equals(store.get_modulestore_type('foo/bar/baz'), XML_MODULESTORE_TYPE)
def test_unicode_chars_in_xml_content(self):
# edX/full/6.002_Spring_2012 has non-ASCII chars, and during
# uniquification of names, would raise a UnicodeError. It no longer does.
# Ensure that there really is a non-ASCII character in the course.
with open(os.path.join(DATA_DIR, "toy/sequential/vertical_sequential.xml")) as xmlf:
xml = xmlf.read()
with assert_raises(UnicodeDecodeError):
xml.decode('ascii')
# Load the course, but don't make error modules. This will succeed,
# but will record the errors.
modulestore = XMLModuleStore(DATA_DIR, course_dirs=['toy'], load_error_modules=False)
# Look up the errors during load. There should be none.
location = CourseDescriptor.id_to_location("edX/toy/2012_Fall")
errors = modulestore.get_item_errors(location)
assert errors == []
|
agpl-3.0
|
Python
|
2b3667dfc4fbd6571da288146d4e8f8f8f2d51a1
|
Fix broken sorted set unit test.
|
4degrees/clique
|
test/unit/test_sorted_set.py
|
test/unit/test_sorted_set.py
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import pytest
from clique.sorted_set import SortedSet
@pytest.fixture
def standard_set(request):
'''Return sorted set.'''
return SortedSet([4, 5, 6, 7, 2, 1, 1])
@pytest.mark.parametrize(('item', 'expected'), [
(1, True),
(10, False)
], ids=[
'item present',
'item not present'
])
def test_contains(item, expected, standard_set):
'''Check item membership.'''
assert (item in standard_set) is expected
@pytest.mark.parametrize(('sorted_set', 'expected'), [
(SortedSet(), 0),
(SortedSet([]), 0),
(SortedSet([1]), 1),
(SortedSet([1, 2, 3]), 3),
(SortedSet([1, 1, 2, 2, 3, 3]), 3)
], ids=[
'no iterable',
'empty iterable',
'single item',
'multiple items',
'duplicate multiple items'
])
def test_len(sorted_set, expected):
'''Calculate set length.'''
assert len(sorted_set) == expected
@pytest.fixture
def standard_set(request):
'''Return sorted set.'''
return SortedSet([4, 5, 6, 7, 2, 1, 1])
@pytest.mark.parametrize(('sorted_set', 'item', 'expected'), [
(SortedSet(), 1, 1),
(SortedSet([1]), 1, 1),
(SortedSet([1]), 2, 2)
], ids=[
'item',
'existing item',
'new item'
])
def test_add(sorted_set, item, expected):
'''Add item.'''
sorted_set.add(item)
assert item in sorted_set
assert len(sorted_set) == expected
@pytest.mark.parametrize(('sorted_set', 'item'), [
(SortedSet([1]), 1),
(SortedSet(), 1)
], ids=[
'present item',
'missing item'
])
def test_discard(sorted_set, item):
'''Discard item.'''
sorted_set.discard(item)
assert item not in sorted_set
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import pytest
from clique.sorted_set import SortedSet
@pytest.fixture
def standard_set(request):
'''Return sorted set.'''
return SortedSet([4, 5, 6, 7, 2, 1, 1])
@pytest.mark.parametrize(('item', 'expected'), [
(1, True),
(10, False)
], ids=[
'item present',
'item not present'
])
def test_contains(item, expected, standard_set):
'''Check item membership.'''
assert (item in standard_set) is expected
@pytest.mark.parametrize(('sorted_set', 'expected'), [
(SortedSet(), 0),
(SortedSet([]), 0),
(SortedSet([1]), 1),
(SortedSet([1, 2, 3]), 3),
(SortedSet([1, 1, 2, 2, 3, 3]), 4)
], ids=[
'no iterable',
'empty iterable',
'single item',
'multiple items',
'duplicate multiple items'
])
def test_len(sorted_set, expected):
'''Calculate set length.'''
assert len(sorted_set) == expected
@pytest.fixture
def standard_set(request):
'''Return sorted set.'''
return SortedSet([4, 5, 6, 7, 2, 1, 1])
@pytest.mark.parametrize(('sorted_set', 'item', 'expected'), [
(SortedSet(), 1, 1),
(SortedSet([1]), 1, 1),
(SortedSet([1]), 2, 2)
], ids=[
'item',
'existing item',
'new item'
])
def test_add(sorted_set, item, expected):
'''Add item.'''
sorted_set.add(item)
assert item in sorted_set
assert len(sorted_set) == expected
@pytest.mark.parametrize(('sorted_set', 'item'), [
(SortedSet([1]), 1),
(SortedSet(), 1)
], ids=[
'present item',
'missing item'
])
def test_discard(sorted_set, item):
'''Discard item.'''
sorted_set.discard(item)
assert item not in sorted_set
|
apache-2.0
|
Python
|
db0e6265892231ecf10244eb7ddcddc62a12b82b
|
Fix bug where cached items in subfolders would be re-read.
|
ollien/PyConfigManager
|
configmanager.py
|
configmanager.py
|
import json
import os
import os.path
class ConfigManager():
_cache = {}
def __init__(self, configPath = "configs/"):
if os.path.isdir(configPath):
self.configPath = configPath
else:
raise IOError("Config Path does not eixst")
self._configs = {}
self._syncCache()
self.getConfigs()
def __getitem__(self, key):
try:
return self._configs[key]
except KeyError:
self.syncCache()
return self._configs[key]
#Recursive function to get all files. Sub is the relative path from the root config dir.
def getConfigs(self, path = None, sub = "", overrideCache = False):
if path == None:
path = self.configPath
files = os.listdir(path)
for item in files:
#Ignore hidden files.
if item[0] == ".":
continue
#Remove the .json handle from the name
name = item.replace(".json", "")
finalName = os.path.join(sub, name)
#If it's a directory, run this function again within that directory
if os.path.isdir(os.path.join(path, item)):
self.getConfigs(path = os.path.join(path, item), sub = os.path.join(sub, item))
#If we already have something from the cache, or added in previously, skip it.
elif overrideCache or finalName not in self._configs:
#Read in the file
f = open(os.path.join(path, item), "r")
#Check if it's JSON. If it is, it will be parsed.
parsed = self.parseConfig(f.read())
f.close()
if parsed != None:
self.addConfig(finalName, parsed)
#Returns parsed JSON if config is valid JSON, otherwise, return Noen
def parseConfig(self, config):
try:
return json.loads(config)
except ValueError:
return None
def addConfig(self, name, contents):
self._configs[name] = contents
ConfigManager._cache[name] = contents
def _syncCache(self):
unmatchedKeys = [key for key in ConfigManager._cache.keys() if key not in self._configs]
for key in unmatchedKeys:
self._configs[key] = ConfigManager._cache[key]
|
import json
import os
import os.path
class ConfigManager():
_cache = {}
def __init__(self, configPath = "configs/"):
if os.path.isdir(configPath):
self.configPath = configPath
else:
raise IOError("Config Path does not eixst")
self._configs = {}
self._syncCache()
self.getConfigs()
def __getitem__(self, key):
try:
return self._configs[key]
except KeyError:
self.syncCache()
return self._configs[key]
#Recursive function to get all files. Sub is the relative path from the root config dir.
def getConfigs(self, path = None, sub = "", overrideCache = False):
if path == None:
path = self.configPath
files = os.listdir(path)
for item in files:
#Ignore hidden files.
if item[0] == ".":
continue
#Remove the .json handle from the name
name = item.replace(".json", "")
finalPath = os.path.join(sub, name)
#If it's a directory, run this function again within that directory
if os.path.isdir(os.path.join(path, item)):
self.getConfigs(path = os.path.join(path, item), sub = os.path.join(sub, item))
#If we already have something from the cache, skip it.
elif overrideCache or name not in self._configs:
#Read in the file
f = open(os.path.join(path, item), "r")
#Check if it's JSON. If it is, it will be parsed.
parsed = self.parseConfig(f.read())
f.close()
if parsed != None:
self.addConfig(finalPath, parsed)
#Returns parsed JSON if config is valid JSON, otherwise, return Noen
def parseConfig(self, config):
try:
return json.loads(config)
except ValueError:
return None
def addConfig(self, name, contents):
self._configs[name] = contents
ConfigManager._cache[name] = contents
def _syncCache(self):
unmatchedKeys = [key for key in ConfigManager._cache.keys() if key not in self._configs]
for key in unmatchedKeys:
self._configs[key] = ConfigManager._cache[key]
|
mit
|
Python
|
17af071faa70d3dc4a884f62fb50f34e8621ac6d
|
Update watchman/constants.py
|
JBKahn/django-watchman,mwarkentin/django-watchman,mwarkentin/django-watchman,JBKahn/django-watchman
|
watchman/constants.py
|
watchman/constants.py
|
DEFAULT_CHECKS = (
'watchman.checks.caches',
'watchman.checks.databases',
'watchman.checks.storage',
)
PAID_CHECKS = (
'watchman.checks.email',
)
|
DEFAULT_CHECKS = (
'watchman.checks.caches',
'watchman.checks.databases',
'watchman.checks.storage',
)
PAID_CHECKS = (
'watchman.checks.email',
)
|
bsd-3-clause
|
Python
|
e1a7e4535e64c005fb508ba6d3fed021bbd40a62
|
Update only tables in visible schemas
|
openego/oeplatform,openego/oeplatform,openego/oeplatform,openego/oeplatform
|
oedb_datamodels/versions/1a73867b1e79_add_meta_search.py
|
oedb_datamodels/versions/1a73867b1e79_add_meta_search.py
|
"""Add meta_search table
Revision ID: 1a73867b1e79
Revises: 1c6e2fb3d3b6
Create Date: 2019-04-29 11:47:04.783168
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
from sqlalchemy.orm.session import sessionmaker
from api.actions import update_meta_search
from dataedit.views import schema_whitelist
# revision identifiers, used by Alembic.
revision = "1a73867b1e79"
down_revision = "1c6e2fb3d3b6"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"meta_search",
sa.Column("schema", sa.String(length=100), nullable=False),
sa.Column("table", sa.String(length=100), nullable=False),
sa.Column("comment", postgresql.TSVECTOR(), nullable=True),
sa.PrimaryKeyConstraint("schema", "table"),
schema="public",
)
conn = op.get_bind()
meta = sa.MetaData(bind=conn)
meta.reflect()
for table in meta.tables.values():
if table.schema in schema_whitelist:
update_meta_search(table.name, table.schema)
def downgrade():
op.drop_table("meta_search", schema="public")
|
"""Add meta_search table
Revision ID: 1a73867b1e79
Revises: 1c6e2fb3d3b6
Create Date: 2019-04-29 11:47:04.783168
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
from sqlalchemy.orm.session import sessionmaker
from api.actions import update_meta_search
# revision identifiers, used by Alembic.
revision = "1a73867b1e79"
down_revision = "1c6e2fb3d3b6"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"meta_search",
sa.Column("schema", sa.String(length=100), nullable=False),
sa.Column("table", sa.String(length=100), nullable=False),
sa.Column("comment", postgresql.TSVECTOR(), nullable=True),
sa.PrimaryKeyConstraint("schema", "table"),
schema="public",
)
conn = op.get_bind()
meta = sa.MetaData(bind=conn)
meta.reflect()
for table in meta.tables.values():
update_meta_search(table.name, table.schema)
def downgrade():
op.drop_table("meta_search", schema="public")
|
agpl-3.0
|
Python
|
f1fec3790fee11ff3d83c272e3a2aa7bb548ddfa
|
Remove print
|
deepmind/open_spiel,deepmind/open_spiel,deepmind/open_spiel,deepmind/open_spiel,deepmind/open_spiel,deepmind/open_spiel
|
open_spiel/python/algorithms/expected_game_score_test.py
|
open_spiel/python/algorithms/expected_game_score_test.py
|
# Copyright 2019 DeepMind Technologies Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for open_spiel.python.algorithms.policy_value."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import absltest
import numpy as np
from open_spiel.python import policy
from open_spiel.python.algorithms import expected_game_score
import open_spiel.python.games
import pyspiel
class PolicyValueTest(absltest.TestCase):
def test_expected_game_score_uniform_random_kuhn_poker(self):
game = pyspiel.load_game("kuhn_poker")
uniform_policy = policy.UniformRandomPolicy(game)
uniform_policy_values = expected_game_score.policy_value(
game.new_initial_state(), [uniform_policy] * 2)
self.assertTrue(np.allclose(uniform_policy_values, [1 / 8, -1 / 8]))
def test_expected_game_score_uniform_random_iterated_prisoner_dilemma(self):
game = pyspiel.load_game(
"python_iterated_prisoners_dilemma(max_game_length=6)")
uniform_policy = policy.UniformRandomPolicy(game)
uniform_policy_values = expected_game_score.policy_value(
game.new_initial_state(), uniform_policy)
self.assertTrue(
np.allclose(uniform_policy_values, [17.6385498, 17.6385498]))
if __name__ == "__main__":
absltest.main()
|
# Copyright 2019 DeepMind Technologies Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for open_spiel.python.algorithms.policy_value."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import absltest
import numpy as np
from open_spiel.python import policy
from open_spiel.python.algorithms import expected_game_score
import open_spiel.python.games
import pyspiel
class PolicyValueTest(absltest.TestCase):
def test_expected_game_score_uniform_random_kuhn_poker(self):
game = pyspiel.load_game("kuhn_poker")
uniform_policy = policy.UniformRandomPolicy(game)
uniform_policy_values = expected_game_score.policy_value(
game.new_initial_state(), [uniform_policy] * 2)
self.assertTrue(np.allclose(uniform_policy_values, [1 / 8, -1 / 8]))
def test_expected_game_score_uniform_random_iterated_prisoner_dilemma(self):
game = pyspiel.load_game(
"python_iterated_prisoners_dilemma(max_game_length=6)")
uniform_policy = policy.UniformRandomPolicy(game)
uniform_policy_values = expected_game_score.policy_value(
game.new_initial_state(), uniform_policy)
print(uniform_policy_values)
self.assertTrue(
np.allclose(uniform_policy_values, [17.6385498, 17.6385498]))
if __name__ == "__main__":
absltest.main()
|
apache-2.0
|
Python
|
fa52bbde01f62bb0816e71970ac50761947afa72
|
Improve comment
|
lee101/retaining-wall
|
retaining_wall.py
|
retaining_wall.py
|
class RetainingWallSolver(object):
def retaining_wall(self, wood_lengths, required_lengths):
self.required_lengths = required_lengths
return self.retaining_wall_recursive(wood_lengths, len(required_lengths) - 1)
def retaining_wall_recursive(self, wood_lengths, required_length_idx):
if required_length_idx <= -1:
return {
'cuts': []
}
current_required_length = self.required_lengths[required_length_idx]
possible_subsolutions = []
for wood_length_idx in range(len(wood_lengths) - 1, -1, -1):
if wood_lengths[wood_length_idx] < current_required_length:
# cant cut from this length
continue
# what if we chose to cut current_required_length out of this wood length
new_wood_lengths = list(wood_lengths)
new_wood_lengths[wood_length_idx] -= current_required_length
subsolution = self.retaining_wall_recursive(new_wood_lengths, required_length_idx - 1)
if not subsolution:
continue
if new_wood_lengths[wood_length_idx] != 0:
subsolution['cuts'].append({
'wood_num': wood_length_idx,
'cut_amount': current_required_length
})
possible_subsolutions.append(subsolution)
if len(possible_subsolutions) == 0:
return False
# return the solution with the least number of cuts
return min(possible_subsolutions, key=lambda s: len(s['cuts']))
|
class RetainingWallSolver(object):
def retaining_wall(self, wood_lengths, required_lengths):
self.required_lengths = required_lengths
return self.retaining_wall_recursive(wood_lengths, len(required_lengths) - 1)
def retaining_wall_recursive(self, wood_lengths, required_length_idx):
if required_length_idx <= -1:
return {
'cuts': []
}
current_required_length = self.required_lengths[required_length_idx]
possible_subsolutions = []
for wood_length_idx in range(len(wood_lengths) - 1, -1, -1):
if wood_lengths[wood_length_idx] < current_required_length:
# cant cut from this length
continue
# what if we chose to cut this required length out of this wood length
new_wood_lengths = list(wood_lengths)
new_wood_lengths[wood_length_idx] -= current_required_length
subsolution = self.retaining_wall_recursive(new_wood_lengths, required_length_idx - 1)
if not subsolution:
continue
if new_wood_lengths[wood_length_idx] != 0:
subsolution['cuts'].append({
'wood_num': wood_length_idx,
'cut_amount': current_required_length
})
possible_subsolutions.append(subsolution)
if len(possible_subsolutions) == 0:
return False
# return the solution with the least number of cuts
return min(possible_subsolutions, key=lambda s: len(s['cuts']))
|
mit
|
Python
|
b4a9380c73dd367c2cf6249cdf4cdbbdfdbc7907
|
fix example
|
estin/pomp
|
examples/pythonnews.py
|
examples/pythonnews.py
|
"""
Extract python news from python.org
"""
import re
import logging
from pomp.core.base import BaseCrawler, BasePipeline
from pomp.core.item import Item, Field
from pomp.contrib import SimpleDownloader
logging.basicConfig(level=logging.DEBUG)
news_re = re.compile(r'<h2 class="news">(.*?)</h2>([\s\S]*?)<div class="pubdate">(.*?)</div>')
class PythonNewsItem(Item):
title = Field()
published = Field()
def __repr__(self):
return '%s\n\t%s\n' % (
self.title,
self.published,
)
class PythonNewsCrawler(BaseCrawler):
ENTRY_URL = 'http://python.org/news/'
def extract_items(self, response):
for i in news_re.findall(response.body.decode('utf-8')):
item = PythonNewsItem()
item.title, item.published = i[0], i[2]
yield item
def next_url(self, response):
return None # one page crawler
class PrintPipeline(BasePipeline):
def process(self, crawler, item):
print(item)
if __name__ == '__main__':
from pomp.core.engine import Pomp
pomp = Pomp(
downloader=SimpleDownloader(),
pipelines=[PrintPipeline()],
)
pomp.pump(PythonNewsCrawler())
|
"""
Extract python news from python.org
"""
import re
import logging
from pomp.core.base import BaseCrawler, BasePipeline
from pomp.core.item import Item, Field
from pomp.contrib import SimpleDownloader
logging.basicConfig(level=logging.DEBUG)
news_re = re.compile(r'<h2 class="news">(.*?)</h2>([\s\S]*?)<div class="pubdate">(.*?)</div>')
class PythonNewsItem(Item):
title = Field()
published = Field()
def __repr__(self):
return '%s\n\t%s\n' % (
self.title,
self.published,
)
class PythonNewsCrawler(BaseCrawler):
ENTRY_URL = 'http://python.org/news/'
def extract_items(self, response):
for i in news_re.findall(response.body.decode('utf-8')):
item = PythonNewsItem()
item.title, item.published = i[0], i[2]
yield item
def next_url(self, response):
return None # one page crawler
class PrintPipeline(BasePipeline):
def process(self, item):
print(item)
if __name__ == '__main__':
from pomp.core.engine import Pomp
pomp = Pomp(
downloader=SimpleDownloader(),
pipelines=[PrintPipeline()],
)
pomp.pump(PythonNewsCrawler())
|
bsd-3-clause
|
Python
|
e3c42442f090b8b6982f7ff8c93632c43cfa80b3
|
use insights landing for offseason
|
verycumbersome/the-blue-alliance,synth3tk/the-blue-alliance,bvisness/the-blue-alliance,phil-lopreiato/the-blue-alliance,phil-lopreiato/the-blue-alliance,josephbisch/the-blue-alliance,nwalters512/the-blue-alliance,bvisness/the-blue-alliance,the-blue-alliance/the-blue-alliance,phil-lopreiato/the-blue-alliance,nwalters512/the-blue-alliance,jaredhasenklein/the-blue-alliance,bdaroz/the-blue-alliance,the-blue-alliance/the-blue-alliance,verycumbersome/the-blue-alliance,phil-lopreiato/the-blue-alliance,synth3tk/the-blue-alliance,synth3tk/the-blue-alliance,josephbisch/the-blue-alliance,phil-lopreiato/the-blue-alliance,tsteward/the-blue-alliance,verycumbersome/the-blue-alliance,bvisness/the-blue-alliance,jaredhasenklein/the-blue-alliance,bvisness/the-blue-alliance,the-blue-alliance/the-blue-alliance,the-blue-alliance/the-blue-alliance,josephbisch/the-blue-alliance,nwalters512/the-blue-alliance,the-blue-alliance/the-blue-alliance,phil-lopreiato/the-blue-alliance,nwalters512/the-blue-alliance,fangeugene/the-blue-alliance,bdaroz/the-blue-alliance,jaredhasenklein/the-blue-alliance,synth3tk/the-blue-alliance,fangeugene/the-blue-alliance,verycumbersome/the-blue-alliance,tsteward/the-blue-alliance,bdaroz/the-blue-alliance,the-blue-alliance/the-blue-alliance,jaredhasenklein/the-blue-alliance,fangeugene/the-blue-alliance,josephbisch/the-blue-alliance,nwalters512/the-blue-alliance,synth3tk/the-blue-alliance,tsteward/the-blue-alliance,fangeugene/the-blue-alliance,fangeugene/the-blue-alliance,verycumbersome/the-blue-alliance,tsteward/the-blue-alliance,verycumbersome/the-blue-alliance,bdaroz/the-blue-alliance,bvisness/the-blue-alliance,tsteward/the-blue-alliance,synth3tk/the-blue-alliance,josephbisch/the-blue-alliance,nwalters512/the-blue-alliance,tsteward/the-blue-alliance,jaredhasenklein/the-blue-alliance,bdaroz/the-blue-alliance,josephbisch/the-blue-alliance,fangeugene/the-blue-alliance,bvisness/the-blue-alliance,jaredhasenklein/the-blue-alliance,bdaroz/the-blue-alliance
|
tba_config.py
|
tba_config.py
|
import os
DEBUG = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
MAX_YEAR = 2015
# For choosing what the main landing page displays
KICKOFF = 1
BUILDSEASON = 2
COMPETITIONSEASON = 3
OFFSEASON = 4
INSIGHTS = 5
CHAMPS = 6
# The CONFIG variables should have exactly the same structure between environments
# Eventually a test environment should be added. -gregmarra 17 Jul 2012
if DEBUG:
CONFIG = {
"env": "dev",
"memcache": False,
"response_cache": False,
"firebase-url": "https://thebluealliance-dev.firebaseio.com/{}.json?auth={}"
}
else:
CONFIG = {
"env": "prod",
"memcache": True,
"response_cache": True,
"firebase-url": "https://thebluealliance.firebaseio.com/{}.json?auth={}"
}
CONFIG['landing_handler'] = INSIGHTS
CONFIG["static_resource_version"] = 7
|
import os
DEBUG = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
MAX_YEAR = 2015
# For choosing what the main landing page displays
KICKOFF = 1
BUILDSEASON = 2
COMPETITIONSEASON = 3
OFFSEASON = 4
INSIGHTS = 5
CHAMPS = 6
# The CONFIG variables should have exactly the same structure between environments
# Eventually a test environment should be added. -gregmarra 17 Jul 2012
if DEBUG:
CONFIG = {
"env": "dev",
"memcache": False,
"response_cache": False,
"firebase-url": "https://thebluealliance-dev.firebaseio.com/{}.json?auth={}"
}
else:
CONFIG = {
"env": "prod",
"memcache": True,
"response_cache": True,
"firebase-url": "https://thebluealliance.firebaseio.com/{}.json?auth={}"
}
CONFIG['landing_handler'] = OFFSEASON
CONFIG["static_resource_version"] = 7
|
mit
|
Python
|
b860d7cb81488f5ebbe7e9e356a6d4f140c33df5
|
update to follow python 2to3 changes
|
sassy/FikaNote,gmkou/FikaNote,sassy/FikaNote,gmkou/FikaNote,gmkou/FikaNote,sassy/FikaNote
|
tests/__init__.py
|
tests/__init__.py
|
from .test_home import *
from .test_feed import *
from .test_shownote import *
from .test_agenda import *
from .test_episode import *
|
from test_home import *
from test_feed import *
from test_shownote import *
from test_agenda import *
from test_episode import *
|
mit
|
Python
|
d77256d1964354eb7dd178f383dd3254c3b4d975
|
Fix source docs page
|
sncosmo/sncosmo,sncosmo/sncosmo,sncosmo/sncosmo
|
docs/_helpers/source_page.py
|
docs/_helpers/source_page.py
|
"""Generate a restructured text document that describes built-in sources
and save it to this module's docstring for the purpose of including in
sphinx documentation via the automodule directive."""
import string
from sncosmo.models import _SOURCES
lines = [
'',
' '.join([30*'=', 7*'=', 10*'=', 27*'=', 30*'=', 7*'=', 20*'=']),
'{0:30} {1:7} {2:10} {3:27} {4:30} {5:7} {6:50}'.format(
'Name', 'Version', 'Type', 'Subclass', 'Reference', 'Website', 'Notes')
]
lines.append(lines[1])
urlnums = {}
allnotes = []
allrefs = []
for m in _SOURCES.get_loaders_metadata():
reflink = ''
urllink = ''
notelink = ''
if 'note' in m:
if m['note'] not in allnotes:
allnotes.append(m['note'])
notenum = allnotes.index(m['note'])
notelink = '[{0}]_'.format(notenum + 1)
if 'reference' in m:
reflink = '[{0}]_'.format(m['reference'][0])
if m['reference'] not in allrefs:
allrefs.append(m['reference'])
if 'url' in m:
url = m['url']
if url not in urlnums:
if len(urlnums) == 0:
urlnums[url] = 0
else:
urlnums[url] = max(urlnums.values()) + 1
urllink = '`{0}`_'.format(string.ascii_letters[urlnums[url]])
lines.append("{0!r:30} {1!r:7} {2:10} {3:27} {4:30} {5:7} {6:50}"
.format(m['name'], m['version'], m['type'], m['subclass'],
reflink, urllink, notelink))
lines.extend([lines[1], ''])
for refkey, ref in allrefs:
lines.append('.. [{0}] `{1}`__'.format(refkey, ref))
lines.append('')
for url, urlnum in urlnums.items():
lines.append('.. _`{0}`: {1}'.format(string.ascii_letters[urlnum], url))
lines.append('')
for i, note in enumerate(allnotes):
lines.append('.. [{0}] {1}'.format(i + 1, note))
lines.append('')
__doc__ = '\n'.join(lines)
|
"""Generate a restructured text document that describes built-in sources
and save it to this module's docstring for the purpose of including in
sphinx documentation via the automodule directive."""
import string
from sncosmo.models import _SOURCES
lines = [
'',
' '.join([20*'=', 7*'=', 10*'=', 27*'=', 30*'=', 7*'=', 20*'=']),
'{0:20} {1:7} {2:10} {3:27} {4:30} {5:7} {6:50}'.format(
'Name', 'Version', 'Type', 'Subclass', 'Reference', 'Website', 'Notes')
]
lines.append(lines[1])
urlnums = {}
allnotes = []
allrefs = []
for m in _SOURCES.get_loaders_metadata():
reflink = ''
urllink = ''
notelink = ''
if 'note' in m:
if m['note'] not in allnotes:
allnotes.append(m['note'])
notenum = allnotes.index(m['note'])
notelink = '[{0}]_'.format(notenum + 1)
if 'reference' in m:
reflink = '[{0}]_'.format(m['reference'][0])
if m['reference'] not in allrefs:
allrefs.append(m['reference'])
if 'url' in m:
url = m['url']
if url not in urlnums:
if len(urlnums) == 0:
urlnums[url] = 0
else:
urlnums[url] = max(urlnums.values()) + 1
urllink = '`{0}`_'.format(string.ascii_letters[urlnums[url]])
lines.append("{0!r:20} {1!r:7} {2:10} {3:27} {4:30} {5:7} {6:50}"
.format(m['name'], m['version'], m['type'], m['subclass'],
reflink, urllink, notelink))
lines.extend([lines[1], ''])
for refkey, ref in allrefs:
lines.append('.. [{0}] `{1}`__'.format(refkey, ref))
lines.append('')
for url, urlnum in urlnums.items():
lines.append('.. _`{0}`: {1}'.format(string.ascii_letters[urlnum], url))
lines.append('')
for i, note in enumerate(allnotes):
lines.append('.. [{0}] {1}'.format(i + 1, note))
lines.append('')
__doc__ = '\n'.join(lines)
|
bsd-3-clause
|
Python
|
5f5a7ec9460d60a964663ace670529813a41a9d9
|
Update bluetooth_ping_test.py
|
daveol/Fedora-Test-Laptop,daveol/Fedora-Test-Laptop
|
tests/bluetooth_ping_test.py
|
tests/bluetooth_ping_test.py
|
#!/usr/bin/env python
import os
import subprocess as subp
from subprocess import *
from avocado import Test
#I have used my Samsung Galaxy S7 Edge as target device
class WifiScanAP(Test):
def test():
targetDeviceMac = '8C:1A:BF:0D:31:A9'
bluetoothChannel = '2'
port = 1
print("Bluetooth ping test: testing " + targetDeviceMac)
p = subp.Popen(['sudo', 'l2ping', '8C:1A:BF:0D:31:A9','-c', '5'], stdout=subp.PIPE, stderr=subp.PIPE)
stdout, stderr = p.communicate()
res = stdout.rstrip()
if "5 sent, 5 received" in res:
self.log.debug("Bluetooth ping test succeeded: + res")
else:
self.fail("Bluetooth ping test: pinging " + targetDeviceMac + " failed")
|
#!/usr/bin/env python
import os
import subprocess as subp
from subprocess import *
from avocado import Test
class WifiScanAP(Test):
def test():
targetDeviceMac = '8C:1A:BF:0D:31:A9'
bluetoothChannel = '2'
port = 1
print("Bluetooth ping test: testing " + targetDeviceMac)
p = subp.Popen(['sudo', 'l2ping', '8C:1A:BF:0D:31:A9','-c', '5'], stdout=subp.PIPE, stderr=subp.PIPE)
stdout, stderr = p.communicate()
res = stdout.rstrip()
if "5 sent, 5 received" in res:
self.log.debug("Bluetooth ping test succeeded: + res")
else:
self.fail("Bluetooth ping test: pinging " + targetDeviceMac + " failed")
|
mit
|
Python
|
9cc45f750c0860715e66c085895611984531c48c
|
update standalone disclosure url
|
mistergone/college-costs,mistergone/college-costs,mistergone/college-costs,mistergone/college-costs
|
paying_for_college/config/urls.py
|
paying_for_college/config/urls.py
|
from django.conf.urls import url, include
from django.conf import settings
from paying_for_college.views import LandingView, StandAloneView
from django.contrib import admin
from django.conf import settings
try:
STANDALONE = settings.STANDALONE
except AttributeError: # pragma: no cover
STANDALONE = False
urlpatterns = [
url(r'^$',
LandingView.as_view(), name='pfc-landing'),
url(r'^understanding-financial-aid-offers/',
include('paying_for_college.disclosures.urls',
namespace='disclosures')),
url(r'^repaying-student-debt/$',
StandAloneView.as_view(template_name='repay_student_debt.html'),
name='pfc-repay'),
url(r'^choosing-a-student-loan/$',
StandAloneView.as_view(template_name='choose_a_loan.html'),
name='pfc-choose'),
url(r'^managing-college-money/$',
StandAloneView.as_view(template_name='manage_your_money.html'),
name='pfc-manage'),
]
if STANDALONE:
urlpatterns += [
url(r'^paying-for-college/admin/', include(admin.site.urls)),
url(r'^paying-for-college/$',
LandingView.as_view(), name='standalone:pfc-landing'),
url(r'^paying-for-college/understanding-financial-aid-offers/',
include('paying_for_college.disclosures.urls',
namespace='standalone-disclosures')),
url(r'^paying-for-college/repaying-student-debt/',
StandAloneView.as_view(template_name='repay_student_debt.html'),
name='standalone-pfc-repay'),
url(r'^paying-for-college/choosing-a-student-loan/$',
StandAloneView.as_view(template_name='choose_a_loan.html'),
name='standalone-pfc-choose'),
url(r'^paying-for-college/managing-college-money/$',
StandAloneView.as_view(template_name='manage_your_money.html'),
name='standalone-pfc-manage'),
]
|
from django.conf.urls import url, include
from django.conf import settings
from paying_for_college.views import LandingView, StandAloneView
from django.contrib import admin
from django.conf import settings
try:
STANDALONE = settings.STANDALONE
except AttributeError: # pragma: no cover
STANDALONE = False
urlpatterns = [
url(r'^$',
LandingView.as_view(), name='pfc-landing'),
url(r'^understanding-financial-aid-offers/',
include('paying_for_college.disclosures.urls',
namespace='disclosures')),
url(r'^repaying-student-debt/$',
StandAloneView.as_view(template_name='repay_student_debt.html'),
name='pfc-repay'),
url(r'^choosing-a-student-loan/$',
StandAloneView.as_view(template_name='choose_a_loan.html'),
name='pfc-choose'),
url(r'^managing-college-money/$',
StandAloneView.as_view(template_name='manage_your_money.html'),
name='pfc-manage'),
]
if STANDALONE:
urlpatterns += [
url(r'^paying-for-college/admin/', include(admin.site.urls)),
url(r'^paying-for-college/$',
LandingView.as_view(), name='standalone:pfc-landing'),
url(r'^paying-for-college/compare-financial-aid-and-college-cost/',
include('paying_for_college.disclosures.urls',
namespace='standalone-disclosures')),
url(r'^paying-for-college/repaying-student-debt/',
StandAloneView.as_view(template_name='repay_student_debt.html'),
name='standalone-pfc-repay'),
url(r'^paying-for-college/choosing-a-student-loan/$',
StandAloneView.as_view(template_name='choose_a_loan.html'),
name='standalone-pfc-choose'),
url(r'^paying-for-college/managing-college-money/$',
StandAloneView.as_view(template_name='manage_your_money.html'),
name='standalone-pfc-manage'),
]
|
cc0-1.0
|
Python
|
43b4910e004e7096addb3d50e8a0a6c307a669c6
|
Remove dead get_body_parameter_name_override
|
akx/lepo,akx/lepo
|
lepo/apidef/operation/openapi.py
|
lepo/apidef/operation/openapi.py
|
from lepo.apidef.operation.base import Operation
from lepo.apidef.parameter.openapi import OpenAPI3BodyParameter, OpenAPI3Parameter
from lepo.utils import maybe_resolve
class OpenAPI3Operation(Operation):
parameter_class = OpenAPI3Parameter
body_parameter_class = OpenAPI3BodyParameter
def _get_body_parameter(self):
for source in (
self.path.mapping.get('requestBody'),
self.data.get('requestBody'),
):
if source:
source = maybe_resolve(source, self.api.resolve_reference)
body_parameter = self.body_parameter_class(data=source, operation=self, api=self.api)
# TODO: Document x-lepo-body-name
body_parameter.name = self.data.get('x-lepo-body-name', body_parameter.name)
return body_parameter
def get_parameter_dict(self):
parameter_dict = super().get_parameter_dict()
for parameter in parameter_dict.values():
if parameter.in_body: # pragma: no cover
raise ValueError('Regular parameter declared to be in body while parsing OpenAPI 3')
body_parameter = self._get_body_parameter()
if body_parameter:
parameter_dict[body_parameter.name] = body_parameter
return parameter_dict
|
from lepo.apidef.operation.base import Operation
from lepo.apidef.parameter.openapi import OpenAPI3BodyParameter, OpenAPI3Parameter
from lepo.utils import maybe_resolve
class OpenAPI3Operation(Operation):
parameter_class = OpenAPI3Parameter
body_parameter_class = OpenAPI3BodyParameter
def _get_body_parameter(self):
for source in (
self.path.mapping.get('requestBody'),
self.data.get('requestBody'),
):
if source:
source = maybe_resolve(source, self.api.resolve_reference)
body_parameter = self.body_parameter_class(data=source, operation=self, api=self.api)
# TODO: Document x-lepo-body-name
body_parameter.name = self.data.get('x-lepo-body-name', body_parameter.name)
return body_parameter
def get_body_parameter_name_override(self):
return
def get_parameter_dict(self):
parameter_dict = super().get_parameter_dict()
for parameter in parameter_dict.values():
if parameter.in_body: # pragma: no cover
raise ValueError('Regular parameter declared to be in body while parsing OpenAPI 3')
body_parameter = self._get_body_parameter()
if body_parameter:
parameter_dict[body_parameter.name] = body_parameter
return parameter_dict
|
mit
|
Python
|
b5a6d540f5fdef37b1d58fc45921737e3c77ae96
|
fix user autocomplete
|
oleg-chubin/let_me_play,oleg-chubin/let_me_play,oleg-chubin/let_me_play,oleg-chubin/let_me_play
|
let_me_app/views/autocomplete.py
|
let_me_app/views/autocomplete.py
|
from dal import autocomplete
from slugify import slugify
from let_me_auth.models import User
from let_me_app.models import Equipment, StaffRole
from let_me_auth.social.pipeline import ABSENT_MAIL_HOST
import re
class UserAutocomplete(autocomplete.Select2QuerySetView):
create_field = 'username'
def create_object(self, text):
cell_phone = re.findall(r'\+?(\d{9,12})', text)
if cell_phone:
cell_phone = cell_phone[0]
text = re.sub(r'\+?(\d{9,12})', '', text).strip()
parts = text.split(' ', 1)
first_name = parts[0].strip()
email_parts = [slugify(first_name)]
defaults = {'first_name': first_name}
if len(parts) > 1:
last_name = parts[1].strip()
defaults['last_name'] = last_name
email_parts.append(slugify(last_name))
email = '@'.join(['.'.join(email_parts), ABSENT_MAIL_HOST])
if cell_phone:
required = {'cell_phone': cell_phone}
defaults.update({'email': email})
else:
required = {'email': email}
user, _ = User.objects.get_or_create(defaults=defaults, **required)
return user
def get_queryset(self):
# Don't forget to filter out results depending on the visitor !
if not self.request.user.is_authenticated():
return User.objects.none()
qs = User.objects.all()
if self.q:
qs = User.objects.filter(first_name__istartswith=self.q)
qs = qs | User.objects.filter(last_name__istartswith=self.q)
qs = qs | User.objects.filter(email__istartswith=self.q)
return qs
class EquipmentAutocomplete(autocomplete.Select2QuerySetView):
def get_queryset(self):
# Don't forget to filter out results depending on the visitor !
if not self.request.user.is_authenticated():
return Equipment.objects.none()
qs = Equipment.objects.all()
if self.q:
qs = Equipment.objects.filter(name__istartswith=self.q)
return qs
class StaffRoleAutocomplete(autocomplete.Select2QuerySetView):
def get_queryset(self):
# Don't forget to filter out results depending on the visitor !
if not self.request.user.is_authenticated():
return StaffRole.objects.none()
qs = StaffRole.objects.all()
if self.q:
qs = StaffRole.objects.filter(name__istartswith=self.q)
return qs
|
from dal import autocomplete
from slugify import slugify
from let_me_auth.models import User
from let_me_app.models import Equipment, StaffRole
class UserAutocomplete(autocomplete.Select2QuerySetView):
def get_queryset(self):
# Don't forget to filter out results depending on the visitor !
if not self.request.user.is_authenticated():
return User.objects.none()
qs = User.objects.all()
if self.q:
qs = User.objects.filter(first_name__istartswith=self.q)
qs = qs | User.objects.filter(last_name__istartswith=self.q)
qs = qs | User.objects.filter(email__istartswith=self.q)
return qs
class EquipmentAutocomplete(autocomplete.Select2QuerySetView):
def get_queryset(self):
# Don't forget to filter out results depending on the visitor !
if not self.request.user.is_authenticated():
return Equipment.objects.none()
qs = Equipment.objects.all()
if self.q:
qs = Equipment.objects.filter(name__istartswith=self.q)
return qs
class StaffRoleAutocomplete(autocomplete.Select2QuerySetView):
def get_queryset(self):
# Don't forget to filter out results depending on the visitor !
if not self.request.user.is_authenticated():
return StaffRole.objects.none()
qs = StaffRole.objects.all()
if self.q:
qs = StaffRole.objects.filter(name__istartswith=self.q)
return qs
|
apache-2.0
|
Python
|
060576768e02c0499282770dd22e35048d62b12e
|
Improve clarity of session finish function
|
jakirkham/bokeh,jakirkham/bokeh,rs2/bokeh,stonebig/bokeh,dennisobrien/bokeh,percyfal/bokeh,Karel-van-de-Plassche/bokeh,percyfal/bokeh,DuCorey/bokeh,timsnyder/bokeh,aavanian/bokeh,ptitjano/bokeh,schoolie/bokeh,ericmjl/bokeh,clairetang6/bokeh,ptitjano/bokeh,justacec/bokeh,aiguofer/bokeh,dennisobrien/bokeh,KasperPRasmussen/bokeh,ericmjl/bokeh,justacec/bokeh,DuCorey/bokeh,rs2/bokeh,ptitjano/bokeh,timsnyder/bokeh,mindriot101/bokeh,msarahan/bokeh,jakirkham/bokeh,bokeh/bokeh,philippjfr/bokeh,phobson/bokeh,draperjames/bokeh,percyfal/bokeh,aavanian/bokeh,stonebig/bokeh,quasiben/bokeh,percyfal/bokeh,aiguofer/bokeh,azjps/bokeh,aiguofer/bokeh,rs2/bokeh,ericmjl/bokeh,mindriot101/bokeh,stonebig/bokeh,dennisobrien/bokeh,azjps/bokeh,DuCorey/bokeh,KasperPRasmussen/bokeh,quasiben/bokeh,phobson/bokeh,philippjfr/bokeh,philippjfr/bokeh,timsnyder/bokeh,dennisobrien/bokeh,msarahan/bokeh,dennisobrien/bokeh,KasperPRasmussen/bokeh,schoolie/bokeh,phobson/bokeh,justacec/bokeh,percyfal/bokeh,phobson/bokeh,msarahan/bokeh,schoolie/bokeh,justacec/bokeh,rs2/bokeh,clairetang6/bokeh,DuCorey/bokeh,clairetang6/bokeh,mindriot101/bokeh,bokeh/bokeh,jakirkham/bokeh,ericmjl/bokeh,aiguofer/bokeh,aiguofer/bokeh,mindriot101/bokeh,philippjfr/bokeh,stonebig/bokeh,Karel-van-de-Plassche/bokeh,quasiben/bokeh,rs2/bokeh,ericmjl/bokeh,jakirkham/bokeh,Karel-van-de-Plassche/bokeh,Karel-van-de-Plassche/bokeh,azjps/bokeh,schoolie/bokeh,aavanian/bokeh,timsnyder/bokeh,ptitjano/bokeh,msarahan/bokeh,draperjames/bokeh,philippjfr/bokeh,KasperPRasmussen/bokeh,draperjames/bokeh,Karel-van-de-Plassche/bokeh,phobson/bokeh,clairetang6/bokeh,aavanian/bokeh,KasperPRasmussen/bokeh,DuCorey/bokeh,azjps/bokeh,bokeh/bokeh,bokeh/bokeh,azjps/bokeh,aavanian/bokeh,ptitjano/bokeh,draperjames/bokeh,bokeh/bokeh,draperjames/bokeh,timsnyder/bokeh,schoolie/bokeh
|
tests/conftest.py
|
tests/conftest.py
|
from __future__ import print_function
import os
import boto
import pytest
from boto.s3.key import Key as S3Key
from boto.exception import NoAuthHandlerFound
from os.path import join
s3_bucket = "bokeh-travis"
s3 = "https://s3.amazonaws.com/%s" % s3_bucket
build_id = os.environ.get("TRAVIS_BUILD_ID")
# Can we make this not hard coded and read in the report location from pytest?
report_file = "tests/pytest-report.html"
def pytest_sessionfinish(session, exitstatus):
if os.environ.get("UPLOAD_PYTEST_HTML", "False") != "True":
return
if hasattr(session.config, 'slaveinput'):
# when slave nodes (xdist) finish, the report won't be ready
return
try:
conn = boto.connect_s3()
bucket = conn.get_bucket(s3_bucket)
with open(report_file, "r") as f:
html = f.read()
filename = join(build_id, "report.html")
key = S3Key(bucket, filename)
key.set_metadata("Content-Type", "text/html")
key.set_contents_from_string(html, policy="public-read")
print("\n%s Access report at: %s" % ("---", join(s3, filename)))
except NoAuthHandlerFound:
print("Upload was requested but could not connect to S3.")
except OSError:
print("Upload was requested but report was not generated.")
@pytest.fixture(scope="session")
def capabilities(capabilities):
capabilities["browserName"] = "firefox"
capabilities["tunnel-identifier"] = os.environ.get("TRAVIS_JOB_NUMBER")
return capabilities
|
from __future__ import print_function
import os
import boto
import pytest
from boto.s3.key import Key as S3Key
from boto.exception import NoAuthHandlerFound
from os.path import join, isfile
s3_bucket = "bokeh-travis"
s3 = "https://s3.amazonaws.com/%s" % s3_bucket
build_id = os.environ.get("TRAVIS_BUILD_ID")
# Can we make this not hard coded and read in the report location from pytest?
report_file = "tests/pytest-report.html"
def pytest_sessionfinish(session, exitstatus):
try_upload = os.environ.get("UPLOAD_PYTEST_HTML", "False") == "True"
report_ready = isfile(report_file)
if try_upload and report_ready:
try:
conn = boto.connect_s3()
bucket = conn.get_bucket(s3_bucket)
upload = True
except NoAuthHandlerFound:
print("Upload was requested but could not connect to S3.")
upload = False
if upload is True:
with open(report_file, "r") as f:
html = f.read()
filename = join(build_id, "report.html")
key = S3Key(bucket, filename)
key.set_metadata("Content-Type", "text/html")
key.set_contents_from_string(html, policy="public-read")
print("\n%s Access report at: %s" % ("---", join(s3, filename)))
@pytest.fixture(scope="session")
def capabilities(capabilities):
capabilities["browserName"] = "firefox"
capabilities["tunnel-identifier"] = os.environ.get("TRAVIS_JOB_NUMBER")
return capabilities
|
bsd-3-clause
|
Python
|
36a00bd6ece27b89843a856cd2b99d25a1d0e4d3
|
Modify conftest.py to support Python 3.5 only
|
wikimedia/pywikibot-core,wikimedia/pywikibot-core
|
tests/conftest.py
|
tests/conftest.py
|
# -*- coding: utf-8 -*-
"""Used by pytest to do some preparation work before running tests."""
#
# (C) Pywikibot team, 2016-2020
#
# Distributed under the terms of the MIT license.
#
import sys
def pytest_configure(config):
"""Set the sys._test_runner_pytest flag to True, if pytest is used."""
sys._test_runner_pytest = True
|
# -*- coding: utf-8 -*-
"""Used by pytest to do some preparation work before running tests."""
#
# (C) Pywikibot team, 2016-2018
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, division, unicode_literals
import sys
def pytest_configure(config):
"""Set the sys._test_runner_pytest flag to True, if pytest is used."""
sys._test_runner_pytest = True
|
mit
|
Python
|
d3fbe9934329df1b1c5f752e4a43981b4fc8beae
|
Use pathlib.Path
|
tony/vcspull,tony/vcspull
|
tests/conftest.py
|
tests/conftest.py
|
import pathlib
import pytest
from _pytest.compat import LEGACY_PATH
from libvcs.shortcuts import create_repo_from_pip_url
from libvcs.util import run
@pytest.fixture(scope="function")
def tmpdir_repoparent(tmp_path: pathlib.Path):
"""Return temporary directory for repository checkout guaranteed unique."""
fn = tmp_path
return fn
@pytest.fixture
def git_repo_kwargs(tmpdir_repoparent: pathlib.Path, git_dummy_repo_dir):
"""Return kwargs for :func:`create_repo_from_pip_url`."""
repo_name = "repo_clone"
return {
"url": "git+file://" + git_dummy_repo_dir,
"parent_dir": str(tmpdir_repoparent),
"name": repo_name,
}
@pytest.fixture
def git_repo(git_repo_kwargs):
"""Create an git repository for tests. Return repo."""
git_repo = create_repo_from_pip_url(**git_repo_kwargs)
git_repo.obtain(quiet=True)
return git_repo
@pytest.fixture
def create_git_dummy_repo(tmpdir_repoparent):
def fn(repo_name, testfile_filename="testfile.test"):
repo_path = str(tmpdir_repoparent / repo_name)
run(["git", "init", repo_name], cwd=str(tmpdir_repoparent))
run(["touch", testfile_filename], cwd=repo_path)
run(["git", "add", testfile_filename], cwd=repo_path)
run(["git", "commit", "-m", "test file for %s" % repo_name], cwd=repo_path)
return repo_path
yield fn
@pytest.fixture
def git_dummy_repo_dir(tmpdir_repoparent, create_git_dummy_repo):
"""Create a git repo with 1 commit, used as a remote."""
return create_git_dummy_repo("dummyrepo")
@pytest.fixture
def config_dir(tmpdir: LEGACY_PATH):
conf_dir = tmpdir.join(".vcspull")
conf_dir.ensure(dir=True)
return conf_dir
|
import pytest
from _pytest.compat import LEGACY_PATH
from libvcs.shortcuts import create_repo_from_pip_url
from libvcs.util import run
@pytest.fixture(scope="function")
def tmpdir_repoparent(tmpdir_factory):
"""Return temporary directory for repository checkout guaranteed unique."""
fn = tmpdir_factory.mktemp("repo")
return fn
@pytest.fixture
def git_repo_kwargs(tmpdir_repoparent, git_dummy_repo_dir):
"""Return kwargs for :func:`create_repo_from_pip_url`."""
repo_name = "repo_clone"
return {
"url": "git+file://" + git_dummy_repo_dir,
"parent_dir": str(tmpdir_repoparent),
"name": repo_name,
}
@pytest.fixture
def git_repo(git_repo_kwargs):
"""Create an git repository for tests. Return repo."""
git_repo = create_repo_from_pip_url(**git_repo_kwargs)
git_repo.obtain(quiet=True)
return git_repo
@pytest.fixture
def create_git_dummy_repo(tmpdir_repoparent):
def fn(repo_name, testfile_filename="testfile.test"):
repo_path = str(tmpdir_repoparent.join(repo_name))
run(["git", "init", repo_name], cwd=str(tmpdir_repoparent))
run(["touch", testfile_filename], cwd=repo_path)
run(["git", "add", testfile_filename], cwd=repo_path)
run(["git", "commit", "-m", "test file for %s" % repo_name], cwd=repo_path)
return repo_path
yield fn
@pytest.fixture
def git_dummy_repo_dir(tmpdir_repoparent, create_git_dummy_repo):
"""Create a git repo with 1 commit, used as a remote."""
return create_git_dummy_repo("dummyrepo")
@pytest.fixture
def config_dir(tmpdir: LEGACY_PATH):
conf_dir = tmpdir.join(".vcspull")
conf_dir.ensure(dir=True)
return conf_dir
|
mit
|
Python
|
8913f5d6a06e0f25d1c8c1a45e0f5b4da8cbf421
|
bump version
|
prateeknepaliya09/rodeo,prateeknepaliya09/rodeo,Cophy08/rodeo,gef756/rodeo,gef756/rodeo,chengjunjian/rodeo,prateeknepaliya09/rodeo,nvoron23/rodeo,nagasuga/rodeo,varunjois111/rodeo,varunjois111/rodeo,prateeknepaliya09/rodeo,Cophy08/rodeo,atsuyim/rodeo,atsuyim/rodeo,chengjunjian/rodeo,gef756/rodeo,nagasuga/rodeo,varunjois111/rodeo,nvoron23/rodeo,chengjunjian/rodeo,gef756/rodeo,chengjunjian/rodeo,Cophy08/rodeo,atsuyim/rodeo,nvoron23/rodeo,Cophy08/rodeo,nagasuga/rodeo,nagasuga/rodeo,nvoron23/rodeo,varunjois111/rodeo,atsuyim/rodeo
|
rodeo/__init__.py
|
rodeo/__init__.py
|
__version__ = "0.1.0"
|
__version__ = "0.0.2"
|
bsd-2-clause
|
Python
|
7eb10376b585e56faad4672959f6654f2500a38d
|
Add `one` as shortcut to `dimensionless_unscaled`
|
kelle/astropy,aleksandr-bakanov/astropy,bsipocz/astropy,kelle/astropy,mhvk/astropy,funbaker/astropy,larrybradley/astropy,MSeifert04/astropy,aleksandr-bakanov/astropy,saimn/astropy,DougBurke/astropy,AustereCuriosity/astropy,AustereCuriosity/astropy,StuartLittlefair/astropy,astropy/astropy,lpsinger/astropy,AustereCuriosity/astropy,funbaker/astropy,funbaker/astropy,kelle/astropy,dhomeier/astropy,pllim/astropy,DougBurke/astropy,StuartLittlefair/astropy,funbaker/astropy,pllim/astropy,DougBurke/astropy,lpsinger/astropy,saimn/astropy,lpsinger/astropy,astropy/astropy,StuartLittlefair/astropy,saimn/astropy,joergdietrich/astropy,aleksandr-bakanov/astropy,stargaser/astropy,joergdietrich/astropy,joergdietrich/astropy,stargaser/astropy,StuartLittlefair/astropy,tbabej/astropy,dhomeier/astropy,lpsinger/astropy,kelle/astropy,dhomeier/astropy,saimn/astropy,larrybradley/astropy,dhomeier/astropy,astropy/astropy,stargaser/astropy,MSeifert04/astropy,MSeifert04/astropy,StuartLittlefair/astropy,larrybradley/astropy,joergdietrich/astropy,kelle/astropy,stargaser/astropy,bsipocz/astropy,DougBurke/astropy,mhvk/astropy,dhomeier/astropy,aleksandr-bakanov/astropy,saimn/astropy,tbabej/astropy,mhvk/astropy,astropy/astropy,joergdietrich/astropy,tbabej/astropy,tbabej/astropy,larrybradley/astropy,AustereCuriosity/astropy,lpsinger/astropy,mhvk/astropy,tbabej/astropy,larrybradley/astropy,pllim/astropy,astropy/astropy,bsipocz/astropy,pllim/astropy,bsipocz/astropy,mhvk/astropy,AustereCuriosity/astropy,pllim/astropy,MSeifert04/astropy
|
astropy/units/__init__.py
|
astropy/units/__init__.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This subpackage contains classes and functions for defining and converting
between different physical units.
This code is adapted from the `pynbody
<http://code.google.com/p/pynbody/>`_ units module written by Andrew
Pontzen, who has granted the Astropy project permission to use the
code under a BSD license.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from .core import *
from .quantity import *
from . import si
from . import cgs
from . import astrophys
from .si import *
from .astrophys import *
from .cgs import *
from .physical import *
from .equivalencies import *
del bases
one = dimensionless_unscaled
# Enable the set of default units. This notably does *not* include
# Imperial units.
set_enabled_units([si, cgs, astrophys])
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This subpackage contains classes and functions for defining and converting
between different physical units.
This code is adapted from the `pynbody
<http://code.google.com/p/pynbody/>`_ units module written by Andrew
Pontzen, who has granted the Astropy project permission to use the
code under a BSD license.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from .core import *
from .quantity import *
from . import si
from . import cgs
from . import astrophys
from .si import *
from .astrophys import *
from .cgs import *
from .physical import *
from .equivalencies import *
del bases
# Enable the set of default units. This notably does *not* include
# Imperial units.
set_enabled_units([si, cgs, astrophys])
|
bsd-3-clause
|
Python
|
865940bd126c7c45b7c615f751244a46176aca4d
|
Update version to 2.3b2-dev
|
emanuelschuetze/OpenSlides,normanjaeckel/OpenSlides,jwinzer/OpenSlides,OpenSlides/OpenSlides,jwinzer/OpenSlides,tsiegleauq/OpenSlides,jwinzer/OpenSlides,boehlke/OpenSlides,OpenSlides/OpenSlides,ostcar/OpenSlides,boehlke/OpenSlides,emanuelschuetze/OpenSlides,emanuelschuetze/OpenSlides,boehlke/OpenSlides,boehlke/OpenSlides,CatoTH/OpenSlides,FinnStutzenstein/OpenSlides,ostcar/OpenSlides,CatoTH/OpenSlides,normanjaeckel/OpenSlides,tsiegleauq/OpenSlides,normanjaeckel/OpenSlides,FinnStutzenstein/OpenSlides,FinnStutzenstein/OpenSlides,CatoTH/OpenSlides,CatoTH/OpenSlides,emanuelschuetze/OpenSlides,tsiegleauq/OpenSlides,jwinzer/OpenSlides,ostcar/OpenSlides,jwinzer/OpenSlides,normanjaeckel/OpenSlides,FinnStutzenstein/OpenSlides
|
openslides/__init__.py
|
openslides/__init__.py
|
__author__ = 'OpenSlides Team <[email protected]>'
__description__ = 'Presentation and assembly system'
__version__ = '2.3b2-dev'
__license__ = 'MIT'
__url__ = 'https://openslides.org'
args = None
|
__author__ = 'OpenSlides Team <[email protected]>'
__description__ = 'Presentation and assembly system'
__version__ = '2.3b1'
__license__ = 'MIT'
__url__ = 'https://openslides.org'
args = None
|
mit
|
Python
|
8a9fa06c36a89e3fde93059cfbe827506d5b8b62
|
Disable exception logging of status code 500 during testing.
|
BMeu/Orchard,BMeu/Orchard
|
orchard/errors/e500.py
|
orchard/errors/e500.py
|
# -*- coding: utf-8 -*-
"""
This module sets up the view for handling ``500 Internal Server Error`` errors.
"""
import datetime
import flask
import flask_classful
from orchard.errors import blueprint
class Error500View(flask_classful.FlaskView):
"""
View for ``500 Internal Server Error`` errors.
"""
trailing_slash = False
@blueprint.app_errorhandler(500)
@blueprint.app_errorhandler(Exception)
def index(self) -> str:
"""
Display the error page for internal errors and send a mail to all administrators
information them of this error.
:return: A page explaining the error.
"""
message = ('Time: {time}\n' +
'Request: {method} {path}\n' +
'Agent: {agent_platform} | {agent_browser} {agent_browser_version}\n' +
'Raw Agent: {agent}\n\n'
).format(time = datetime.datetime.now(),
method = flask.request.method,
path = flask.request.path,
agent_platform = flask.request.user_agent.platform,
agent_browser = flask.request.user_agent.browser,
agent_browser_version = flask.request.user_agent.version,
agent = flask.request.user_agent.string)
if not flask.current_app.testing: # pragma: no cover.
flask.current_app.logger.exception(message)
return flask.render_template('errors/500.html')
Error500View.register(blueprint)
|
# -*- coding: utf-8 -*-
"""
This module sets up the view for handling ``500 Internal Server Error`` errors.
"""
import datetime
import flask
import flask_classful
from orchard.errors import blueprint
class Error500View(flask_classful.FlaskView):
"""
View for ``500 Internal Server Error`` errors.
"""
trailing_slash = False
@blueprint.app_errorhandler(500)
@blueprint.app_errorhandler(Exception)
def index(self) -> str:
"""
Display the error page for internal errors and send a mail to all administrators
information them of this error.
:return: A page explaining the error.
"""
message = ('Time: {time}\n' +
'Request: {method} {path}\n' +
'Agent: {agent_platform} | {agent_browser} {agent_browser_version}\n' +
'Raw Agent: {agent}\n\n'
).format(time = datetime.datetime.now(),
method = flask.request.method,
path = flask.request.path,
agent_platform = flask.request.user_agent.platform,
agent_browser = flask.request.user_agent.browser,
agent_browser_version = flask.request.user_agent.version,
agent = flask.request.user_agent.string)
flask.current_app.logger.exception(message)
return flask.render_template('errors/500.html')
Error500View.register(blueprint)
|
mit
|
Python
|
b8d693a8fd2e0fb9fa8592b9672bc71e874547d3
|
Bump version to 0.1.1
|
tangentlabs/django-fancypages,tangentlabs/django-fancypages,socradev/django-fancypages,tangentlabs/django-fancypages,socradev/django-fancypages,socradev/django-fancypages
|
fancypages/__init__.py
|
fancypages/__init__.py
|
import os
__version__ = (0, 1, 1, 'alpha', 1)
def get_fancypages_paths(path):
""" Get absolute paths for *path* relative to the project root """
return [os.path.join(os.path.dirname(os.path.abspath(__file__)), path)]
def get_apps():
return (
'django_extensions',
# used for image thumbnailing
'sorl.thumbnail',
# framework used for the internal API
'rest_framework',
# provides a convenience layer around model inheritance
# that makes lookup of nested models easier. This is used
# for the content block hierarchy.
'model_utils',
# static file compression and collection
'compressor',
# migration handling
'south',
# package used for twitter block
'twitter_tag',
# actual apps provided by fancypages
'fancypages.assets',
'fancypages',
)
|
import os
__version__ = (0, 1, 0, 'alpha', 1)
def get_fancypages_paths(path):
""" Get absolute paths for *path* relative to the project root """
return [os.path.join(os.path.dirname(os.path.abspath(__file__)), path)]
def get_apps():
return (
'django_extensions',
# used for image thumbnailing
'sorl.thumbnail',
# framework used for the internal API
'rest_framework',
# provides a convenience layer around model inheritance
# that makes lookup of nested models easier. This is used
# for the content block hierarchy.
'model_utils',
# static file compression and collection
'compressor',
# migration handling
'south',
# package used for twitter block
'twitter_tag',
# actual apps provided by fancypages
'fancypages.assets',
'fancypages',
)
|
bsd-3-clause
|
Python
|
b699f950eebbe10c400e9867ce8bead02d2f651c
|
Remove another thing.
|
mithrandi/txacme
|
src/txacme/interfaces.py
|
src/txacme/interfaces.py
|
# -*- coding: utf-8 -*-
"""
Interface definitions for txacme.
"""
from zope.interface import Interface
class ITLSSNI01Responder(Interface):
"""
Configuration for a tls-sni-01 challenge responder.
The actual responder may exist somewhere else, this interface is merely for
an object that knows how to configure it.
"""
def start_responding(server_name):
"""
Start responding for a particular challenge.
.. seealso:: `txacme.util.generate_tls_sni_01_cert`
:param str server_name: The server name to respond to: ie.
`u'<hex>.<hex>.acme.invalid'`.
:rtype: `~twisted.internet.defer.Deferred`
:return: A deferred firing when the given hostname is ready to respond
with the given authorization.
"""
def stop_responding(server_name):
"""
Stop responding for a particular challenge.
May be a noop if a particular responder does not need or implement
explicit cleanup; implementations should not rely on this method always
being called.
:param str server_name: The server name to stop responding for: ie.
`u'<hex>.<hex>.acme.invalid'`.
"""
__all__ = ['ITLSSNI01Responder']
|
# -*- coding: utf-8 -*-
"""
Interface definitions for txacme.
"""
from zope.interface import Interface
class ITLSSNI01Responder(Interface):
"""
Configuration for a tls-sni-01 challenge responder.
The actual responder may exist somewhere else, this interface is merely for
an object that knows how to configure it.
"""
def start_responding(server_name):
"""
Start responding for a particular challenge.
.. seealso:: `txacme.util.generate_tls_sni_01_cert`
:param str server_name: The server name to respond to: ie.
`u'<hex>.<hex>.acme.invalid'`.
:rtype: `~twisted.internet.defer.Deferred`
:return: A deferred firing when the given hostname is ready to respond
with the given authorization.
"""
def stop_responding(server_name):
"""
Stop responding for a particular challenge.
May be a noop if a particular responder does not need or implement
explicit cleanup; implementations should not rely on this method always
being called.
:param str server_name: The server name to stop responding for: ie.
`u'<hex>.<hex>.acme.invalid'`.
"""
class ICertificateStore(Interface):
"""
A store of certificate/keys/chains.
"""
def get(self, server_name):
"""
Retrieve the current PEM objects for the given server name.
:param str server_name: The server name.
:raises KeyError: if the given name does not exist in the store.
:return: ``Deferred[List[:ref:`pem-objects`]]``
"""
def store(self, server_name, pem_objects):
"""
Store PEM objects for the given server name.
Implementations do not have to permit invoking this with a server name
that was not already present in the store.
:param str server_name: The server name to update.
:param pem_objects: A list of :ref:`pem-objects`; must contain exactly
one private key, a certificate corresponding to that private key,
and zero or more chain certificates.
:rtype: ``Deferred``
"""
def as_dict(self):
"""
Get all certificates in the store.
:rtype: ``Deferred[Dict[str, List[:ref:`pem-objects`]]]``
:return: A deferred firing with a dict mapping server names to
:ref:`pem-objects`.
"""
__all__ = ['ITLSSNI01Responder']
|
mit
|
Python
|
8112440223e2e8e4f5d8cb93b28fd846dd59418b
|
Add logout view.
|
repocracy/repocracy,codysoyland/snowman,repocracy/repocracy,repocracy/repocracy,codysoyland/snowman,codysoyland/snowman
|
repocracy/repo/urls.py
|
repocracy/repo/urls.py
|
from django.conf.urls.defaults import *
from django.conf import settings
import os
urlpatterns = patterns('repocracy.repo.views',
url(r'^$', 'home', name='home'),
url(r'^claim/(?P<pk>\d+)/(?P<claim_hash>[a-fA-F\d]{40})/$', 'repo_claim', name='repo_claim'),
url(r'^users/(?P<name>[\-_\d\w\\\.]+)/$', 'repo_owner', name='repo_owner'),
url(r'^repos/(?P<name>[/\-_\d\w\\\.]+)/$', 'repo_detail', name='repo_detail'),
url(r'^post-receive/(?P<pk>\d+)/$', 'post_receive', name='post_receive'),
url(r'^status/(?P<pk>\d+)/$', 'repo_status', name='repo_status'),
)
urlpatterns += patterns('',
# Not a smart way to serve repos (very slow).
# Serve with nginx using static http, or preferably the CGI hgwebdir script
url(r'^hg(?P<path>.*)$', 'django.views.static.serve',
{'show_indexes': True, 'document_root': os.path.join(settings.REPOCRACY_BASE_REPO_PATH, 'public_hg')}),
url(r'^logout/$', 'django.contrib.auth.views.logout', {'redirect_field_name': 'next'}),
)
|
from django.conf.urls.defaults import *
from django.conf import settings
import os
urlpatterns = patterns('repocracy.repo.views',
url(r'^$', 'home', name='home'),
url(r'^claim/(?P<pk>\d+)/(?P<claim_hash>[a-fA-F\d]{40})/$', 'repo_claim', name='repo_claim'),
url(r'^users/(?P<name>[\-_\d\w\\\.]+)/$', 'repo_owner', name='repo_owner'),
url(r'^repos/(?P<name>[/\-_\d\w\\\.]+)/$', 'repo_detail', name='repo_detail'),
url(r'^post-receive/(?P<pk>\d+)/$', 'post_receive', name='post_receive'),
url(r'^status/(?P<pk>\d+)/$', 'repo_status', name='repo_status'),
)
urlpatterns += patterns('',
# Not a smart way to serve repos (very slow).
# Serve with nginx using static http, or preferably the CGI hgwebdir script
url(r'^hg(?P<path>.*)$', 'django.views.static.serve',
{'show_indexes': True, 'document_root': os.path.join(settings.REPOCRACY_BASE_REPO_PATH, 'public_hg')}),
)
|
bsd-3-clause
|
Python
|
8b008968e92cabf1022dff6edb37f38c3aaa5214
|
Update merge_filter.py
|
ctsit/vivo-pump,ctsit/vivo-pump,mconlon17/vivo-pump
|
uf_examples/courses/merge_filter.py
|
uf_examples/courses/merge_filter.py
|
#!/usr/bin/env/python
"""
merge_filter.py -- find the courses in VIVO, and match them to the courses in the source. They
must match on ccn
There are two inputs:
1. Courses in VIVO. Keyed by ccn
2. UF courses in the source. Keyed the same.
There are three cases
1. Course in VIVO and in Source => Update VIVO from source
1. Course in VIVO, not in source => nothing to do
1. Course not in VIVO, is in source => Add to VIVO
See CHANGELOG.md for history
"""
__author__ = "Michael Conlon"
__copyright__ = "Copyright 2016 (c) Michael Conlon"
__license__ = "New BSD License"
__version__ = "0.02"
import sys
from pump.vivopump import read_csv_fp, write_csv_fp, get_vivo_ccn, get_parms
parms = get_parms()
data_in = read_csv_fp(sys.stdin)
print >>sys.stderr, len(data_in)
data_out = {}
vivo_courses = get_vivo_ccn(parms) # get dictionary of course uri keyed by ccn
print >>sys.stderr, 'VIVO courses', len(vivo_courses)
for row, data in data_in.items():
new_data = dict(data)
if data['ccn'] in vivo_courses: # ccn is in vivo and source
new_data['uri'] = vivo_courses[data['ccn']]
else: # key is in source, not in vivo
new_data['uri'] = ''
data_out[row] = new_data
print >>sys.stderr, 'data out', len(data_out)
write_csv_fp(sys.stdout, data_out)
|
#!/usr/bin/env/python
"""
merge_filter.py -- find the courses in VIVO, and match them to the courses in the source. They
must match on ccn
There are two inputs:
1. Courses in VIVO. Keyed by ccn
2. UF courses in the source. Keyed the same.
There are three cases
1. Course in VIVO and in Source => Update VIVO from source
1. Course in VIVO, not in source => nothing to do
1. Course not in VIVO, is in source => Add to VIVO
See CHANGELOG.md for history
"""
__author__ = "Michael Conlon"
__copyright__ = "Copyright 2015 (c) Michael Conlon"
__license__ = "New BSD License"
__version__ = "0.02"
import sys
from pump.vivopump import read_csv_fp, write_csv_fp, get_vivo_ccn, get_parms
parms = get_parms()
data_in = read_csv_fp(sys.stdin)
print >>sys.stderr, len(data_in)
data_out = {}
vivo_courses = get_vivo_ccn(parms) # get dictionary of course uri keyed by ccn
print >>sys.stderr, 'VIVO courses', len(vivo_courses)
for row, data in data_in.items():
new_data = dict(data)
if data['ccn'] in vivo_courses: # ccn is in vivo and source
new_data['uri'] = vivo_courses[data['ccn']]
else: # key is in source, not in vivo
new_data['uri'] = ''
data_out[row] = new_data
print >>sys.stderr, 'data out', len(data_out)
write_csv_fp(sys.stdout, data_out)
|
bsd-2-clause
|
Python
|
672876c172d9bba9e2f29707f9fdd95e0ff10f9f
|
put data early in Redis at hourly recache
|
mctenthij/hortiradar,mctenthij/hortiradar,mctenthij/hortiradar,mctenthij/hortiradar,mctenthij/hortiradar
|
hortiradar/website/refresh_cache.py
|
hortiradar/website/refresh_cache.py
|
import argparse
from datetime import datetime
import flask
import ujson as json
from app import app, get_period
from hortiradar import time_format
from processing import get_cache_key, get_process_top_params, process_details, process_top, redis
def main():
parser = argparse.ArgumentParser(description="Refresh the cache for hortiradar analytics.")
parser.add_argument("--verbose", "-v", action="store_true")
args = parser.parse_args()
# bigger than usual time for when the hourly recache is too slow
cache_time = 120 * 60
groups = ["bloemen", "groente_en_fruit"]
get_time = lambda: datetime.now().strftime("%H:%M")
start_time = get_time()
max_amount = 10
group_data = []
for group in groups:
if args.verbose:
print("Caching group: {}".format(group))
arguments = (group, max_amount, get_process_top_params(group))
key = get_cache_key(process_top, *arguments)
data = process_top(*arguments, force_refresh=True, cache_time=cache_time)
group_data.append((key, data))
redis.set(key, json.dumps(data), ex=cache_time)
with app.test_request_context("/?period=week"):
_, start, end, _ = get_period(flask.request, "week")
params = {"start": start.strftime(time_format), "end": end.strftime(time_format)}
for (_, group) in group_data:
for keyword in group:
prod = keyword["label"]
if args.verbose:
print("Caching keyword: {}".format(prod))
key = get_cache_key(process_details, prod, params)
data = process_details(prod, params, force_refresh=True, cache_time=cache_time)
redis.set(key, json.dumps(data), ex=cache_time)
end_time = get_time()
sync_time = "{} - {}".format(start_time, end_time) if start_time != end_time else start_time
redis.set("sync_time", sync_time)
if __name__ == "__main__":
main()
|
import argparse
from datetime import datetime
import flask
import ujson as json
from app import app, get_period
from hortiradar import time_format
from processing import get_cache_key, get_process_top_params, process_details, process_top, redis
def main():
parser = argparse.ArgumentParser(description="Refresh the cache for hortiradar analytics.")
parser.add_argument("--verbose", "-v", action="store_true")
args = parser.parse_args()
# bigger than usual time for when the hourly recache is too slow
cache_time = 120 * 60
groups = ["bloemen", "groente_en_fruit"]
get_time = lambda: datetime.now().strftime("%H:%M")
start_time = get_time()
max_amount = 10
group_data = []
for group in groups:
if args.verbose:
print("Caching group: {}".format(group))
arguments = (group, max_amount, get_process_top_params(group))
key = get_cache_key(process_top, *arguments)
data = process_top(*arguments, force_refresh=True, cache_time=cache_time)
group_data.append((key, data))
with app.test_request_context("/?period=week"):
_, start, end, _ = get_period(flask.request, "week")
params = {"start": start.strftime(time_format), "end": end.strftime(time_format)}
keyword_data = []
for (_, group) in group_data:
for keyword in group:
prod = keyword["label"]
if args.verbose:
print("Caching keyword: {}".format(prod))
key = get_cache_key(process_details, prod, params)
data = process_details(prod, params, force_refresh=True, cache_time=cache_time)
keyword_data.append((key, data))
end_time = get_time()
# Now populate the cache with the new data
for (key, data) in group_data + keyword_data:
redis.set(key, json.dumps(data), ex=cache_time)
sync_time = "{} - {}".format(start_time, end_time) if start_time != end_time else start_time
redis.set("sync_time", sync_time)
if __name__ == "__main__":
main()
|
apache-2.0
|
Python
|
b6098d5b4578547fea192fe96998dbc43ef9dcb0
|
upgrade values check
|
nitely/http-lazy-headers
|
http_lazy_headers/fields/upgrade.py
|
http_lazy_headers/fields/upgrade.py
|
# -*- coding: utf-8 -*-
from ..shared.utils import constraints
from ..shared.utils import assertions
from ..shared import bases
def upgrade(name, version=None):
return name, version
class ProtocolName:
# http://www.iana.org/assignments/http-upgrade-tokens/http-upgrade-tokens.xml
http = 'HTTP'
tls = 'TLS'
web_socket = 'WebSocket'
h2c = 'h2c'
class Upgrade(bases.MultiHeaderBase):
"""
The ``Upgrade`` header field is intended to\
provide a simple mechanism for transitioning\
from HTTP/1.1 to some other protocol on the\
same connection. A client MAY send a list of\
protocols in the Upgrade header field of a\
request to invite the server to switch to\
one or more of those protocols, in order of\
descending preference, before sending the\
final response. A server MAY ignore a\
received Upgrade header field if it wishes\
to continue using the current protocol on\
that connection. Upgrade cannot be used to\
insist on a protocol change.
Example::
Upgrade([
upgrade(ProtocolName.http, '2.0')
])
Upgrade([
upgrade(ProtocolName.web_socket)
])
Upgrade([
('HTTP', '2.0'),
('SHTTP', '1.3'),
('IRC', '6.9'),
('RTA', 'x11')
])
`Ref. <http://httpwg.org/specs/rfc7230.html#header.upgrade>`_
"""
name = 'upgrade'
def check_value(self, value):
assertions.must_be_tuple_of(value, 2)
protocol, version = value
assertions.must_be_token(protocol)
version is None or assertions.must_be_token(version)
def value_str(self, value):
protocol, version = value
if version:
return '{}/{}'.format(protocol, version)
return protocol
def values_str(self, values):
return ', '.join(
self.value_str(v)
for v in values)
def clean_value(self, raw_value):
try:
protocol_name, protocol_version = raw_value.split('/', 1)
except ValueError:
constraints.must_be_token(raw_value) # Just name
return raw_value, None
else:
constraints.must_be_token(protocol_name)
constraints.must_be_token(protocol_version)
return protocol_name, protocol_version
|
# -*- coding: utf-8 -*-
from ..shared.utils import constraints
from ..shared import bases
def upgrade(name, version=None):
return name, version
class ProtocolName:
# http://www.iana.org/assignments/http-upgrade-tokens/http-upgrade-tokens.xml
http = 'HTTP'
tls = 'TLS'
web_socket = 'WebSocket'
h2c = 'h2c'
class Upgrade(bases.MultiHeaderBase):
"""
The ``Upgrade`` header field is intended to\
provide a simple mechanism for transitioning\
from HTTP/1.1 to some other protocol on the\
same connection. A client MAY send a list of\
protocols in the Upgrade header field of a\
request to invite the server to switch to\
one or more of those protocols, in order of\
descending preference, before sending the\
final response. A server MAY ignore a\
received Upgrade header field if it wishes\
to continue using the current protocol on\
that connection. Upgrade cannot be used to\
insist on a protocol change.
Example::
Upgrade([
upgrade(ProtocolName.http, '2.0')
])
Upgrade([
upgrade(ProtocolName.web_socket)
])
Upgrade([
('HTTP', '2.0'),
('SHTTP', '1.3'),
('IRC', '6.9'),
('RTA', 'x11')
])
`Ref. <http://httpwg.org/specs/rfc7230.html#header.upgrade>`_
"""
name = 'upgrade'
def value_str(self, value):
protocol, version = value
if version:
return '{}/{}'.format(protocol, version)
return protocol
def values_str(self, values):
return ', '.join(
self.value_str(v)
for v in values)
def clean_value(self, raw_value):
try:
protocol_name, protocol_version = raw_value.split('/', 1)
except ValueError:
constraints.must_be_token(raw_value) # Just name
return raw_value, None
else:
constraints.must_be_token(protocol_name)
constraints.must_be_token(protocol_version)
return protocol_name, protocol_version
|
mit
|
Python
|
471e0f4e91eb4513315193ce2b2b0f13e2c9724c
|
remove stray "
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
corehq/util/datadog/gauges.py
|
corehq/util/datadog/gauges.py
|
from functools import wraps
from celery.task import periodic_task
from corehq.util.datadog import statsd
from corehq.util.soft_assert import soft_assert
def datadog_gauge_task(name, fn, run_every, enforce_prefix='commcare'):
"""
helper for easily registering datadog gauges to run periodically
To update a datadog gauge on a schedule based on the result of a function
just add to your app's tasks.py:
my_calculation = datadog_gauge_task('my.datadog.metric', my_calculation_function,
run_every=crontab(minute=0))
"""
soft_assert(fail_if_debug=True).call(
not enforce_prefix or name.split('.')[0] == enforce_prefix,
"Did you mean to call your gauge 'commcare.{}'? "
"If you're sure you want to forgo the prefix, you can "
"pass enforce_prefix=None".format(name))
datadog_gauge = _DatadogGauge(name, fn, run_every)
return datadog_gauge.periodic_task()
class _DatadogGauge(object):
def __init__(self, name, fn, run_every):
self.name = name
self.fn = fn
self.run_every = run_every
def periodic_task(self):
@periodic_task('background_queue', run_every=self.run_every,
acks_late=True, ignore_result=True)
@wraps(self.fn)
def inner(*args, **kwargs):
statsd.gauge(self.name, self.fn(*args, **kwargs))
return inner
|
from functools import wraps
from celery.task import periodic_task
from corehq.util.datadog import statsd
from corehq.util.soft_assert import soft_assert
def datadog_gauge_task(name, fn, run_every, enforce_prefix='commcare'):
""""
helper for easily registering datadog gauges to run periodically
To update a datadog gauge on a schedule based on the result of a function
just add to your app's tasks.py:
my_calculation = datadog_gauge_task('my.datadog.metric', my_calculation_function,
run_every=crontab(minute=0))
"""
soft_assert(fail_if_debug=True).call(
not enforce_prefix or name.split('.')[0] == enforce_prefix,
"Did you mean to call your gauge 'commcare.{}'? "
"If you're sure you want to forgo the prefix, you can "
"pass enforce_prefix=None".format(name))
datadog_gauge = _DatadogGauge(name, fn, run_every)
return datadog_gauge.periodic_task()
class _DatadogGauge(object):
def __init__(self, name, fn, run_every):
self.name = name
self.fn = fn
self.run_every = run_every
def periodic_task(self):
@periodic_task('background_queue', run_every=self.run_every,
acks_late=True, ignore_result=True)
@wraps(self.fn)
def inner(*args, **kwargs):
statsd.gauge(self.name, self.fn(*args, **kwargs))
return inner
|
bsd-3-clause
|
Python
|
213ddc9ffbb171c17c051c6394baa0499abfc820
|
fix UnboundLocalError
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
corehq/util/tests/test_log.py
|
corehq/util/tests/test_log.py
|
from __future__ import absolute_import
from __future__ import unicode_literals
import six
from django.test import SimpleTestCase
from ..log import clean_exception
class TestLogging(SimpleTestCase):
def test_bad_traceback(self):
result = "JJackson's SSN: 555-55-5555"
exception = None
try:
# copied from couchdbkit/client.py
assert isinstance(result, dict), 'received an invalid ' \
'response of type %s: %s' % (type(result), repr(result))
except AssertionError as e:
exception = e
self.assertIn(result, six.text_type(exception))
self.assertNotIn(result, six.text_type(clean_exception(exception)))
def test_that_I_didnt_break_anything(self):
exception = AssertionError("foo")
cleaned_exception = clean_exception(exception)
self.assertEqual(exception.__class__, cleaned_exception.__class__)
self.assertEqual(six.text_type(exception), six.text_type(cleaned_exception))
|
from __future__ import absolute_import
from __future__ import unicode_literals
import six
from django.test import SimpleTestCase
from ..log import clean_exception
class TestLogging(SimpleTestCase):
def test_bad_traceback(self):
result = "JJackson's SSN: 555-55-5555"
try:
# copied from couchdbkit/client.py
assert isinstance(result, dict), 'received an invalid ' \
'response of type %s: %s' % (type(result), repr(result))
except AssertionError as e:
pass
self.assertIn(result, six.text_type(e))
self.assertNotIn(result, six.text_type(clean_exception(e)))
def test_that_I_didnt_break_anything(self):
exception = AssertionError("foo")
cleaned_exception = clean_exception(exception)
self.assertEqual(exception.__class__, cleaned_exception.__class__)
self.assertEqual(six.text_type(exception), six.text_type(cleaned_exception))
|
bsd-3-clause
|
Python
|
24b85059dcc5c17d21011bc7d1975f519e09837d
|
Improve formatting
|
hhucn/netsec-uebungssystem,hhucn/netsec-uebungssystem,hhucn/netsec-uebungssystem
|
netsecus/__init__.py
|
netsecus/__init__.py
|
#!/usr/bin/env python
from __future__ import unicode_literals
import imaplib
import logging
import time
import helper
import rules
# useful for debugging: $ openssl s_client -crlf -connect imap.gmail.com:993
#
# core functions
#
def main():
# patching imaplib
imaplib.Commands["MOVE"] = ("SELECTED",)
imaplib.Commands["IDLE"] = ("AUTH", "SELECTED",)
imaplib.Commands["DONE"] = ("AUTH", "SELECTED",)
helper.setupLogging()
imapmail = loginIMAP(
helper.getConfigValue("login", "imapmail_server"),
helper.getConfigValue("login", "mail_address"),
helper.getConfigValue("login", "mail_password"))
imapmail._command("IDLE")
if "idling" in imapmail.readline().decode("utf-8"):
logging.debug("Server supports IDLE.")
firstRun = True
while True:
if firstRun or "EXISTS" in imapmail.readline().decode("utf-8"):
imapmail._command("DONE")
imapmail.readline()
ruleLoop(imapmail)
imapmail._command("IDLE")
logging.debug("Entering IDLE state.")
firstRun = False
else:
logging.debug("Server lacks support for IDLE... Falling back to delay.")
while True:
ruleLoop(imapmail)
time.sleep(helper.getConfigValue("settings", "delay"))
def ruleLoop(imapmail):
for rule in helper.getConfigValue("rules"):
processRule(imapmail, rule)
def processRule(imapmail, rule):
logging.debug("**** rule: '%s'" % rule["title"])
mails = []
for step in rule["steps"]:
logging.debug("* exec: %s" % step[0])
mails = getattr(rules, step[0])(imapmail, mails, *step[1:])
if not isinstance(mails, list):
mails = [mails]
if not mails:
logging.debug("* ret no mails")
break
logging.debug("* ret %d mail(s)" % len(mails))
logging.debug("**** done: '%s'" % rule["title"])
def loginIMAP(server, address, password):
imapmail = imaplib.IMAP4_SSL(server)
imapmail.login(address, password)
imapmail.select()
logging.info("IMAP login (%s on %s)" % (address, server))
return imapmail
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
from __future__ import unicode_literals
import imaplib
import logging
import time
import helper
import rules
# useful for debugging: $ openssl s_client -crlf -connect imap.gmail.com:993
#
# core functions
#
def main():
# patching imaplib
imaplib.Commands["MOVE"] = ("SELECTED",)
imaplib.Commands["IDLE"] = ("AUTH", "SELECTED",)
imaplib.Commands["DONE"] = ("AUTH", "SELECTED",)
helper.setupLogging()
imapmail = loginIMAP(helper.getConfigValue("login", "imapmail_server"), helper.getConfigValue(
"login", "mail_address"), helper.getConfigValue("login", "mail_password"))
imapmail._command("IDLE")
if "idling" in imapmail.readline().decode("utf-8"):
logging.debug("Server supports IDLE.")
firstRun = True
while True:
if firstRun or "EXISTS" in imapmail.readline().decode("utf-8"):
imapmail._command("DONE")
imapmail.readline()
ruleLoop(imapmail)
imapmail._command("IDLE")
logging.debug("Entering IDLE state.")
firstRun = False
else:
logging.debug("Server lacks support for IDLE... Falling back to delay.")
while True:
ruleLoop(imapmail)
time.sleep(helper.getConfigValue("settings", "delay"))
def ruleLoop(imapmail):
for rule in helper.getConfigValue("rules"):
processRule(imapmail, rule)
def processRule(imapmail, rule):
logging.debug("**** rule: '%s'" % rule["title"])
mails = []
for step in rule["steps"]:
logging.debug("* exec: %s" % step[0])
mails = getattr(rules, step[0])(imapmail, mails, *step[1:])
if not isinstance(mails, list):
mails = [mails]
if not mails:
logging.debug("* ret no mails")
break
logging.debug("* ret %d mail(s)" % len(mails))
logging.debug("**** done: '%s'" % rule["title"])
def loginIMAP(server, address, password):
imapmail = imaplib.IMAP4_SSL(server)
imapmail.login(address, password)
imapmail.select()
logging.info("IMAP login (%s on %s)" % (address, server))
return imapmail
if __name__ == "__main__":
main()
|
mit
|
Python
|
c492c42639f7a487dc27a95a5a785dd9c62ecdb7
|
Change project status formatting
|
JrGoodle/clowder,JrGoodle/clowder,JrGoodle/clowder
|
clowder/utility/print_utilities.py
|
clowder/utility/print_utilities.py
|
"""Print utilities"""
import os
from termcolor import colored
from clowder.utility.git_utilities import (
git_current_sha,
git_current_branch,
git_is_detached,
git_is_dirty
)
def print_project_status(root_directory, path, name):
"""Print repo status"""
repo_path = os.path.join(root_directory, path)
git_path = os.path.join(repo_path, '.git')
if not os.path.isdir(git_path):
return
if git_is_dirty(repo_path):
color = 'red'
symbol = '*'
else:
color = 'green'
symbol = ''
project_output = colored(symbol + name, color)
if git_is_detached(repo_path):
current_ref = git_current_sha(repo_path)
current_ref_output = colored('(HEAD @ ' + current_ref + ')', 'magenta')
else:
current_branch = git_current_branch(repo_path)
current_ref_output = colored('(' + current_branch + ')', 'magenta')
path_output = colored(path, 'cyan')
print(project_output)
print(current_ref_output + ' ' + path_output)
|
"""Print utilities"""
import os
from termcolor import colored
from clowder.utility.git_utilities import (
git_current_sha,
git_current_branch,
git_is_detached,
git_is_dirty
)
def print_project_status(root_directory, path, name):
"""Print repo status"""
repo_path = os.path.join(root_directory, path)
git_path = os.path.join(repo_path, '.git')
if not os.path.isdir(git_path):
return
if git_is_dirty(repo_path):
color = 'red'
symbol = '*'
else:
color = 'green'
symbol = ''
project_output = colored(symbol + name, color)
if git_is_detached(repo_path):
current_ref = git_current_sha(repo_path)
current_ref_output = colored('(HEAD @ ' + current_ref + ')', 'magenta')
else:
current_branch = git_current_branch(repo_path)
current_ref_output = colored('(' + current_branch + ')', 'magenta')
path_output = colored(path, 'cyan')
print(project_output + ' @ ' + path_output)
print(current_ref_output)
|
mit
|
Python
|
70021d5df6beb0e8eb5b78a6484cbb650a7a1fb6
|
fix docs
|
cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy
|
cupyx/distributed/__init__.py
|
cupyx/distributed/__init__.py
|
from cupyx.distributed._init import init_process_group # NOQA
from cupyx.distributed._comm import Backend # NOQA
from cupyx.distributed._nccl_comm import NCCLBackend # NOQA
|
from cupyx.distributed._init import init_process_group # NOQA
from cupyx.distributed._nccl_comm import NCCLBackend # NOQA
|
mit
|
Python
|
3061affd313aff39f722e6e5846a3191d6592a7d
|
fix FaqQuestionSitemap URLs
|
edoburu/django-fluent-faq,edoburu/django-fluent-faq
|
fluent_faq/sitemaps.py
|
fluent_faq/sitemaps.py
|
from django.contrib.sitemaps import Sitemap
from django.core.urlresolvers import NoReverseMatch
from fluent_faq.models import FaqCategory, FaqQuestion
from fluent_faq.urlresolvers import faq_reverse
def _url_patterns_installed():
# This module can use normal Django urls.py URLs, or mount the "FaqPage" in the page tree.
# Check whether the URLs are installed, so the `sitemap.xml` can be generated nevertheless.
# This issue will pop up elsewhere too, so there is no need to raise an error here.
try:
faq_reverse('faqcategory_detail', kwargs={'slug': 'category'}, ignore_multiple=True)
except NoReverseMatch:
return False
else:
return True
class FaqQuestionSitemap(Sitemap):
"""
Sitemap for FAQ questions
"""
def items(self):
if not _url_patterns_installed():
return None
return FaqQuestion.objects.published().select_related('category')
def lastmod(self, question):
"""Return the last modification of the object."""
return question.modification_date
def location(self, question):
"""Return url of an question."""
return faq_reverse('faqquestion_detail', kwargs={'cat_slug': question.category.slug, 'slug': question.slug}, ignore_multiple=True)
class FaqCategorySitemap(Sitemap):
"""
Sitemap for FAQ categories.
"""
def items(self):
if not _url_patterns_installed():
return None
return FaqCategory.objects.published()
def lastmod(self, category):
"""Return the last modification of the object."""
return category.modification_date
def location(self, category):
"""Return url of an category."""
return faq_reverse('faqcategory_detail', kwargs={'slug': category.slug}, ignore_multiple=True)
|
from django.contrib.sitemaps import Sitemap
from django.core.urlresolvers import NoReverseMatch
from fluent_faq.models import FaqCategory, FaqQuestion
from fluent_faq.urlresolvers import faq_reverse
def _url_patterns_installed():
# This module can use normal Django urls.py URLs, or mount the "FaqPage" in the page tree.
# Check whether the URLs are installed, so the `sitemap.xml` can be generated nevertheless.
# This issue will pop up elsewhere too, so there is no need to raise an error here.
try:
faq_reverse('faqcategory_detail', kwargs={'slug': 'category'}, ignore_multiple=True)
except NoReverseMatch:
return False
else:
return True
class FaqQuestionSitemap(Sitemap):
"""
Sitemap for FAQ questions
"""
def items(self):
if not _url_patterns_installed():
return None
return FaqQuestion.objects.published()
def lastmod(self, category):
"""Return the last modification of the object."""
return category.modification_date
def location(self, category):
"""Return url of an question."""
return faq_reverse('faqcategory_detail', kwargs={'slug': category.slug}, ignore_multiple=True)
class FaqCategorySitemap(Sitemap):
"""
Sitemap for FAQ categories.
"""
def items(self):
if not _url_patterns_installed():
return None
return FaqCategory.objects.published()
def lastmod(self, category):
"""Return the last modification of the object."""
return category.modification_date
def location(self, category):
"""Return url of an category."""
return faq_reverse('faqcategory_detail', kwargs={'slug': category.slug}, ignore_multiple=True)
|
apache-2.0
|
Python
|
79e7ef509e4757c29d6fa0bd9161410aadbd305a
|
fix os.path.expand [sic] typo, and refactor
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
salt/utils/xdg.py
|
salt/utils/xdg.py
|
# -*- coding: utf-8 -*-
'''
Create an XDG function to get the config dir
'''
import os
def xdg_config_dir(config_dir=None):
'''
Check xdg locations for config files
'''
xdg_config = os.getenv('XDG_CONFIG_HOME', os.path.expanduser('~/.config'))
xdg_config_dir = os.path.join(xdg_config, 'salt')
if os.path.isdir(xdg_config_dir):
return xdg_config_dir
else:
if config_dir is None:
config_dir = '~/.'
else:
config_dir = os.path.join('~/.', config_dir)
return os.path.expanduser(config_dir)
|
# -*- coding: utf-8 -*-
'''
Create an XDG function to get the config dir
'''
import os
def xdg_config_dir(config_dir=None):
'''
Check xdg locations for config files
'''
xdg_config = os.getenv('XDG_CONFIG_HOME', os.path.expanduser('~/.config'))
xdg_config_dir = os.path.join(xdg_config, 'salt')
if os.path.isdir(xdg_config_dir):
return xdg_config_dir
else:
if config_dir is None:
return os.path.expanduser('~/.')
else:
return os.path.expand(os.path.join('~/.', config_dir))
|
apache-2.0
|
Python
|
e6db95cce0239d9e8ce33aec5cf21aa1bd19df03
|
Add __str__ method
|
jesseklein406/django-imager,jesseklein406/django-imager,jesseklein406/django-imager
|
imagersite/imager_profile/models.py
|
imagersite/imager_profile/models.py
|
import six
from django.db import models
from django.contrib.auth.models import User
@six.python_2_unicode_compatible
class ImagerProfile(models.Model):
user = models.OneToOneField(User)
fav_camera = models.CharField(max_length=30)
address = models.CharField(max_length=100)
web_url = models.URLField()
type_photography = models.CharField(max_length=30)
def __str__(self):
return "{}'s profile".format(self.user.username)
|
from django.db import models
from django.contrib.auth.models import User
import six
@six.python_2_unicode_compatible
class ImagerProfile(models.Model):
user = models.OneToOneField(User)
fav_camera = models.CharField(max_length=30)
address = models.CharField(max_length=100)
web_url = models.URLField()
type_photography = models.CharField(max_length=30)
|
mit
|
Python
|
9a21c446f1236e1b89663c991ea354d8e473b3b9
|
Fix a copyright and pep8 issues in lanzano_luzi_2019_test.py
|
gem/oq-engine,gem/oq-engine,gem/oq-engine,gem/oq-engine,gem/oq-engine
|
openquake/hazardlib/tests/gsim/lanzano_luzi_2019_test.py
|
openquake/hazardlib/tests/gsim/lanzano_luzi_2019_test.py
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2019 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
"""
Implements the tests for the set of GMPE classes included within the GMPE
of Lanzano and Luzi (2019). Test tables were created by an excel spreadsheet
that calculates expected values provided by the original authors.
"""
from openquake.hazardlib.gsim.lanzano_luzi_2019 import (LanzanoLuzi2019shallow,
LanzanoLuzi2019deep)
from openquake.hazardlib.tests.gsim.utils import BaseGSIMTestCase
# Discrepancy percentages to be applied to all tests
class LanzanoLuzi2019shallowTestCase(BaseGSIMTestCase):
"""
Tests the Lanzano and Luzi (2019) GMPE for the case of shallow events.
"""
GSIM_CLASS = LanzanoLuzi2019shallow
# File containing the results for the Mean
MEAN_FILE = "LL19/LanzanoLuzi2019shallow_MEAN.csv"
# File contaning the results for the Total Standard Deviation
STD_FILE = "LL19/LanzanoLuzi2019shallow_STD_TOTAL.csv"
def test_mean(self):
self.check(self.MEAN_FILE,
max_discrep_percentage=0.1)
def test_std_total(self):
self.check(self.STD_FILE,
max_discrep_percentage=0.1)
class LanzanoLuzi2019deepTestCase(BaseGSIMTestCase):
"""
Tests the Lanzano and Luzi (2019) GMPE for the case of deep events.
"""
GSIM_CLASS = LanzanoLuzi2019deep
MEAN_FILE = "LL19/LanzanoLuzi2019deep_MEAN.csv"
STD_FILE = "LL19/LanzanoLuzi2019deep_STD_TOTAL.csv"
def test_mean(self):
self.check(self.MEAN_FILE,
max_discrep_percentage=0.1)
def test_std_total(self):
self.check(self.STD_FILE,
max_discrep_percentage=0.1)
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2015-2019 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
"""
Implements the tests for the set of GMPE classes included within the GMPE
of Lanzano and Luzi (2019). Test tables were created by an excel spreadsheet
that calculates expected values provided by the original authors.
"""
from openquake.hazardlib.gsim.lanzano_luzi_2019 import (LanzanoLuzi2019shallow,
LanzanoLuzi2019deep)
from openquake.hazardlib.tests.gsim.utils import BaseGSIMTestCase
# Discrepancy percentages to be applied to all tests
class LanzanoLuzi2019shallowTestCase(BaseGSIMTestCase):
"""
Tests the Lanzano and Luzi (2019) GMPE for the case of shallow events.
"""
GSIM_CLASS = LanzanoLuzi2019shallow
# File containing the results for the Mean
MEAN_FILE = "LL19/LanzanoLuzi2019shallow_MEAN.csv"
# File contaning the results for the Total Standard Deviation
STD_FILE = "LL19/LanzanoLuzi2019shallow_STD_TOTAL.csv"
def test_mean(self):
self.check(self.MEAN_FILE,
max_discrep_percentage=0.1)
def test_std_total(self):
self.check(self.STD_FILE,
max_discrep_percentage=0.1)
class LanzanoLuzi2019deepTestCase(BaseGSIMTestCase):
"""
Tests the Lanzano and Luzi (2019) GMPE for the case of deep events.
"""
GSIM_CLASS = LanzanoLuzi2019deep
MEAN_FILE = "LL19/LanzanoLuzi2019deep_MEAN.csv"
STD_FILE = "LL19/LanzanoLuzi2019deep_STD_TOTAL.csv"
def test_mean(self):
self.check(self.MEAN_FILE,
max_discrep_percentage=0.1)
def test_std_total(self):
self.check(self.STD_FILE,
max_discrep_percentage=0.1)
|
agpl-3.0
|
Python
|
03ed43d7d8867ba066d9eea3b3fc7cfe557a31d9
|
Use C++
|
encukou/py3c,encukou/py3c,encukou/py3c
|
test/setup.py
|
test/setup.py
|
from distutils.core import setup, Extension
test_py3c_module = Extension(
'test_py3c',
sources=['test_py3c.c'],
include_dirs=['../include'],
extra_compile_args = ['--std=c++0x', '-l mylib'],
)
setup_args = dict(
name='test_py3c',
version='0.0',
description = '',
ext_modules = [test_py3c_module]
)
if __name__ == '__main__':
setup(**setup_args)
|
from distutils.core import setup, Extension
test_py3c_module = Extension(
'test_py3c',
sources=['test_py3c.c'],
include_dirs=['../include'],
)
setup_args = dict(
name='test_py3c',
version='0.0',
description = '',
ext_modules = [test_py3c_module]
)
if __name__ == '__main__':
setup(**setup_args)
|
mit
|
Python
|
2543709c204f1dd6aca5d012e7c28193631bb74c
|
Use postgres standard env vars
|
eirki/gargbot_3000,eirki/gargbot_3000,eirki/gargbot_3000,eirki/gargbot_3000
|
gargbot_3000/config.py
|
gargbot_3000/config.py
|
#! /usr/bin/env python3.6
# coding: utf-8
import os
import datetime as dt
from pathlib import Path
import pytz
from dotenv import load_dotenv
env_path = Path(".") / ".env"
load_dotenv(dotenv_path=env_path)
slack_verification_token = os.environ["slack_verification_token"]
slack_bot_user_token = os.environ["slack_bot_user_token"]
bot_id = os.environ["bot_id"]
bot_name = os.environ["bot_name"]
home = Path(os.getenv("home_folder", os.getcwd()))
db_name = os.environ["POSTGRES_DB"]
db_user = os.environ["POSTGRES_USER"]
db_password = os.environ["POSTGRES_PASSWORD"]
db_host = os.environ["POSTGRES_HOST"]
dropbox_token = os.environ["dropbox_token"]
dbx_pic_folder = os.environ["dbx_pic_folder"]
tz = pytz.timezone(os.environ["tz"])
test_channel = os.environ["test_channel"]
main_channel = os.environ["main_channel"]
countdown_message = os.environ["countdown_message"]
ongoing_message = os.environ["ongoing_message"]
finished_message = os.environ["finished_message"]
forum_url = os.environ["forum_url"]
countdown_date = dt.datetime.fromtimestamp(int(os.environ["countdown_date"]), tz=tz)
countdown_args = os.environ["countdown_args"].split(", ")
|
#! /usr/bin/env python3.6
# coding: utf-8
import os
import datetime as dt
from pathlib import Path
import pytz
from dotenv import load_dotenv
env_path = Path(".") / ".env"
load_dotenv(dotenv_path=env_path)
slack_verification_token = os.environ["slack_verification_token"]
slack_bot_user_token = os.environ["slack_bot_user_token"]
bot_id = os.environ["bot_id"]
bot_name = os.environ["bot_name"]
home = Path(os.getenv("home_folder", os.getcwd()))
db_name = os.environ["db_name"]
db_user = os.environ["db_user"]
db_password = os.environ["db_password"]
db_host = os.environ["db_host"]
dropbox_token = os.environ["dropbox_token"]
dbx_pic_folder = os.environ["dbx_pic_folder"]
tz = pytz.timezone(os.environ["tz"])
test_channel = os.environ["test_channel"]
main_channel = os.environ["main_channel"]
countdown_message = os.environ["countdown_message"]
ongoing_message = os.environ["ongoing_message"]
finished_message = os.environ["finished_message"]
forum_url = os.environ["forum_url"]
countdown_date = dt.datetime.fromtimestamp(int(os.environ["countdown_date"]), tz=tz)
countdown_args = os.environ["countdown_args"].split(", ")
|
mit
|
Python
|
281fd926786186e8f0b1ebc7d8aeb1c362310fc1
|
Remove unused variable
|
godlygeek/LightRender,MaddAddaM/LightRender
|
viewer.py
|
viewer.py
|
import sys
import pygame
import pygame.locals
pygame.init()
size = width, height = 575, 575
screen = pygame.display.set_mode(size)
label_lights = False
def up_row(x_offset):
for i in range(20):
x = x_offset + (i % 2) * 0.5
y = i * 0.5
yield x, y
def down_row(x_offset):
for i in range(20):
x = x_offset + ((i+ 1) % 2) * 0.5
y = 9.5 - (i * 0.5)
yield x, y
pos_list = []
for strip_pair in range(5):
pos_list += list(up_row(2 * strip_pair))
pos_list += list(down_row(2 * strip_pair + 1))
positions = {i: v for i, v in enumerate(pos_list)}
def get_color(i):
red = 255 * (i / 199.0)
green = 0
blue = 255 * ((199-i) / 199.0)
c = (int(red), int(green), int(blue))
return c
def get_screen_pos(x, y):
# upper_left is 0,0
# bottom left is 0, width
scaled_x = (int)(50*x+50)
scaled_y = (int)(width - 50 - (50*y))
return (scaled_x, scaled_y)
myfont = pygame.font.SysFont("monospace", 15)
import struct
data = open('Resources/video.bin', 'rb')
import time
while True:
for event in pygame.event.get():
if event.type == pygame.locals.QUIT:
sys.exit(0)
for k, v in positions.items():
x, y = v
pos = get_screen_pos(x, y)
r = ord(data.read(1))
g = ord(data.read(1))
b = ord(data.read(1))
color = (r,g,b)
pygame.draw.circle(screen, color, pos, 10)
if label_lights:
label = myfont.render(str(k), 1, (255, 255, 255))
screen.blit(label, pos)
pygame.display.update()
time.sleep(0.05)
|
import sys
import pygame
import pygame.locals
pygame.init()
size = width, height = 575, 575
screen = pygame.display.set_mode(size)
label_lights = False
def up_row(x_offset):
for i in range(20):
x = x_offset + (i % 2) * 0.5
y = i * 0.5
yield x, y
def down_row(x_offset):
for i in range(20):
x = x_offset + ((i+ 1) % 2) * 0.5
y = 9.5 - (i * 0.5)
yield x, y
pos_list = []
for strip_pair in range(5):
pos_list += list(up_row(2 * strip_pair))
pos_list += list(down_row(2 * strip_pair + 1))
positions = {i: v for i, v in enumerate(pos_list)}
red = (255, 0, 0)
def get_color(i):
red = 255 * (i / 199.0)
green = 0
blue = 255 * ((199-i) / 199.0)
c = (int(red), int(green), int(blue))
return c
def get_screen_pos(x, y):
# upper_left is 0,0
# bottom left is 0, width
scaled_x = (int)(50*x+50)
scaled_y = (int)(width - 50 - (50*y))
return (scaled_x, scaled_y)
myfont = pygame.font.SysFont("monospace", 15)
import struct
data = open('Resources/video.bin', 'rb')
import time
while True:
for event in pygame.event.get():
if event.type == pygame.locals.QUIT:
sys.exit(0)
for k, v in positions.items():
x, y = v
pos = get_screen_pos(x, y)
r = ord(data.read(1))
g = ord(data.read(1))
b = ord(data.read(1))
color = (r,g,b)
pygame.draw.circle(screen, color, pos, 10)
if label_lights:
label = myfont.render(str(k), 1, (255, 255, 255))
screen.blit(label, pos)
pygame.display.update()
time.sleep(0.05)
|
mit
|
Python
|
2321dd5b0afedb9bb4a6e894149dd636174adf2c
|
Bump version to 4.0.1
|
oasis-open/cti-stix-elevator
|
stix2elevator/version.py
|
stix2elevator/version.py
|
__version__ = "4.0.1"
|
__version__ = "4.0.0"
|
bsd-3-clause
|
Python
|
282ac04e49c6adef237ea30fa4dcae64e6f959d8
|
Support for non-blank server roots
|
mgrouchy/django-stronghold,SunilMohanAdapa/django-stronghold,SunilMohanAdapa/django-stronghold
|
stronghold/middleware.py
|
stronghold/middleware.py
|
from django.contrib.auth.decorators import login_required
from stronghold import conf
class LoginRequiredMiddleware(object):
"""
Force all views to use login required
View is deemed to be public if the @public decorator is applied to the view
View is also deemed to be Public if listed in in django settings in the
STRONGHOLD_PUBLIC_URLS dictionary
each url in STRONGHOLD_PUBLIC_URLS must be a valid regex
"""
def __init__(self, *args, **kwargs):
self.public_view_urls = getattr(conf, 'STRONGHOLD_PUBLIC_URLS', ())
def process_view(self, request, view_func, view_args, view_kwargs):
# if request is authenticated, dont process it
if request.user.is_authenticated():
return None
# if its a public view, don't process it
is_public = getattr(view_func, 'STRONGHOLD_IS_PUBLIC', None)
if is_public:
return None
# if this view matches a whitelisted regex, don't process it
for view_url in self.public_view_urls:
if view_url.match(request.path_info):
return None
return login_required(view_func)(request, *view_args, **view_kwargs)
|
from django.contrib.auth.decorators import login_required
from stronghold import conf
class LoginRequiredMiddleware(object):
"""
Force all views to use login required
View is deemed to be public if the @public decorator is applied to the view
View is also deemed to be Public if listed in in django settings in the
STRONGHOLD_PUBLIC_URLS dictionary
each url in STRONGHOLD_PUBLIC_URLS must be a valid regex
"""
def __init__(self, *args, **kwargs):
self.public_view_urls = getattr(conf, 'STRONGHOLD_PUBLIC_URLS', ())
def process_view(self, request, view_func, view_args, view_kwargs):
# if request is authenticated, dont process it
if request.user.is_authenticated():
return None
# if its a public view, don't process it
is_public = getattr(view_func, 'STRONGHOLD_IS_PUBLIC', None)
if is_public:
return None
# if this view matches a whitelisted regex, don't process it
for view_url in self.public_view_urls:
if view_url.match(request.path):
return None
return login_required(view_func)(request, *view_args, **view_kwargs)
|
mit
|
Python
|
35de4045bc30a1ee0e9aaa17f0b3f370ad95d6c8
|
Bump (#16)
|
Netflix-Skunkworks/swag-client
|
swag_client/__about__.py
|
swag_client/__about__.py
|
from __future__ import absolute_import, division, print_function
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
__title__ = "swag-client"
__summary__ = ("Cloud multi-account metadata management tool.")
__uri__ = "https://github.com/Netflix-Skunkworks/swag-client"
__version__ = "0.2.5"
__author__ = "The swag developers"
__email__ = "[email protected]"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2017 {0}".format(__author__)
|
from __future__ import absolute_import, division, print_function
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
__title__ = "swag-client"
__summary__ = ("Cloud multi-account metadata management tool.")
__uri__ = "https://github.com/Netflix-Skunkworks/swag-client"
__version__ = "0.2.3"
__author__ = "The swag developers"
__email__ = "[email protected]"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2017 {0}".format(__author__)
|
apache-2.0
|
Python
|
67614fb784dca6166b112ddc60254ef5e493541d
|
Change 9/5 to 1.8
|
enzo-code/RPI.GPIO-W-D
|
wfinal.py
|
wfinal.py
|
import RPi.GPIO as GPIO
import pywapi
import string
import time
channels = [4, 7, 8, 9, 10, 14, 15, 17, 18, 22, 23, 24, 25]
GPIO.setwarnings(True)
GPIO.setmode(GPIO.BCM)
GPIO.setup(channels, GPIO.OUT)
GPIO.output(channels, 0)
weather = pywapi.get_weather_from_weather_com('33020')
temperature = int(weather['current_conditions']['temperature'])
temp_f = temperature*(1.8)+32
humidity = int(weather['current_conditions']['humidity'])
cc = (weather['current_conditions']['text'].lower())
if humidity >= 80:
GPIO.output(7, 1)
if humidity <= 79 and humidity >= 60:
GPIO.output(18, 1)
if humidity <= 59:
GPIO.output(25, 1)
if temp_f >= 90:
GPIO.output(14, 1)
if temp_f <= 89 and temp_f >= 80:
GPIO.output(14, 1)
if temp_f <= 79 and temp_f >= 70:
GPIO.output(18, 1)
if temp_f <= 69 and temp_f >= 40:
GPIO.output(23, 1)
if temp_f <= 39:
GPIO.output(24, 1)
if cc == 'drizzle' or 'light drizzle' or 'heavy drizzle':
GPIO.output(4, 1)
if cc == 'rain' or 'light rain':
GPIO.output(4, 1)
if cc == 'heavy rain':
GPIO.output(17, 1)
if cc == 'light rain mist' or 'rain mist' or 'heavy rain mist':
GPIO.output(4, 1)
if cc == 'rain shower' or 'light rain showers':
GPIO.output(4, 1)
if cc == 'heavy rain shower':
GPIO.output(17, 1)
if cc == 'light thunderstorm' or 'heavy thunderstorm' or 'thunderstorm':
GPIO.output(17, 1)
GPIO.output(10, 1)
GPIO.output(9, 1)
if cc == 'light freezing drizzle' or 'heavy freezing drizzle' or 'freezing drizzle':
GPIO.output(4, 1)
if cc == 'light freezing rain' or 'heavy freezing rain' or 'freezing rain':
GPIO.output(17, 1)
if cc == 'patches of fog' or 'shallow fog' or 'partial fog' or 'light fog':
GPIO.output(22, 1)
if cc == 'fog' or 'heavy fog' or 'heavy fog patches' or 'light fog patches':
GPIO.output(10, 1)
if cc == 'overcast':
GPIO.output(10, 1)
if cc == 'partly cloudy' or 'scattered clouds':
GPIO.output(22, 1)
if cc == 'mostly cloudy':
GPIO.output(10, 1)
|
import RPi.GPIO as GPIO
import pywapi
import string
import time
channels = [4, 7, 8, 9, 10, 14, 15, 17, 18, 22, 23, 24, 25]
GPIO.setwarnings(True)
GPIO.setmode(GPIO.BCM)
GPIO.setup(channels, GPIO.OUT)
GPIO.output(channels, 0)
weather = pywapi.get_weather_from_weather_com('33020')
temperature = int(weather['current_conditions']['temperature'])
temp_f = temperature * (9/5)+32
humidity = int(weather['current_conditions']['humidity'])
cc = (weather['current_conditions']['text'].lower())
if humidity >= 80:
GPIO.output(7, 1)
if humidity <= 79 and humidity >= 60:
GPIO.output(18, 1)
if humidity <= 59:
GPIO.output(25, 1)
if temp_f >= 90:
GPIO.output(14, 1)
if temp_f <= 89 and temp_f >= 80:
GPIO.output(14, 1)
if temp_f <= 79 and temp_f >= 70:
GPIO.output(18, 1)
if temp_f <= 69 and temp_f >= 40:
GPIO.output(23, 1)
if temp_f <= 39:
GPIO.output(24, 1)
if cc == 'drizzle' or 'light drizzle' or 'heavy drizzle':
GPIO.output(4, 1)
if cc == 'rain' or 'light rain':
GPIO.output(4, 1)
if cc == 'heavy rain':
GPIO.output(17, 1)
if cc == 'light rain mist' or 'rain mist' or 'heavy rain mist':
GPIO.output(4, 1)
if cc == 'rain shower' or 'light rain showers':
GPIO.output(4, 1)
if cc == 'heavy rain shower':
GPIO.output(17, 1)
if cc == 'light thunderstorm' or 'heavy thunderstorm' or 'thunderstorm':
GPIO.output(17, 1)
GPIO.output(10, 1)
GPIO.output(9, 1)
if cc == 'light freezing drizzle' or 'heavy freezing drizzle' or 'freezing drizzle':
GPIO.output(4, 1)
if cc == 'light freezing rain' or 'heavy freezing rain' or 'freezing rain':
GPIO.output(17, 1)
if cc == 'patches of fog' or 'shallow fog' or 'partial fog' or 'light fog':
GPIO.output(22, 1)
if cc == 'fog' or 'heavy fog' or 'heavy fog patches' or 'light fog patches':
GPIO.output(10, 1)
if cc == 'overcast':
GPIO.output(10, 1)
if cc == 'partly cloudy' or 'scattered clouds':
GPIO.output(22, 1)
if cc == 'mostly cloudy':
GPIO.output(10, 1)
|
mit
|
Python
|
07f8f44fc5f69c71922bb3b85d621867d0df49fa
|
Support core logger as a property on the main scraper.
|
pombredanne/scrapekit,pudo/scrapekit,pombredanne/scrapekit,pudo/scrapekit
|
scrapekit/core.py
|
scrapekit/core.py
|
from uuid import uuid4
from time import time
from datetime import datetime
from threading import local
from scrapekit.config import Config
from scrapekit.tasks import TaskManager, Task
from scrapekit.http import make_session
from scrapekit.logs import make_logger
class Scraper(object):
""" Scraper application object which handles resource management
for a variety of related functions. """
def __init__(self, name, config=None):
self.name = name
self.id = uuid4()
self.start_time = datetime.utcnow()
self.config = Config(self, config)
self._task_manager = None
self.task_ctx = local()
self.log = make_logger(self)
self.log.info("Starting %s, %d threads.", self.name,
self.config.threads)
@property
def task_manager(self):
if self._task_manager is None:
self._task_manager = \
TaskManager(threads=self.config.threads)
return self._task_manager
def task(self, fn):
""" Decorate a function as a task in the scraper framework.
This will enable the function to be queued and executed in
a separate thread, allowing for the execution of the scraper
to be asynchronous.
"""
return Task(self, fn)
def Session(self):
""" Create a pre-configured ``requests`` session instance
that can be used to run HTTP requests. This instance will
potentially be cached, or a stub, depending on the
configuration of the scraper. """
return make_session(self)
def head(self, url, **kwargs):
""" HTTP HEAD via ``requests``.
See: http://docs.python-requests.org/en/latest/api/#requests.head
"""
return self.Session().get(url, **kwargs)
def get(self, url, **kwargs):
""" HTTP GET via ``requests``.
See: http://docs.python-requests.org/en/latest/api/#requests.get
"""
return self.Session().get(url, **kwargs)
def post(self, url, **kwargs):
""" HTTP POST via ``requests``.
See: http://docs.python-requests.org/en/latest/api/#requests.post
"""
return self.Session().post(url, **kwargs)
def put(self, url, **kwargs):
""" HTTP PUT via ``requests``.
See: http://docs.python-requests.org/en/latest/api/#requests.put
"""
return self.Session().put(url, **kwargs)
def __repr__(self):
return '<Scraper(%s)>' % self.name
|
from scrapekit.config import Config
from scrapekit.tasks import TaskManager, Task
from scrapekit.http import make_session
class Scraper(object):
""" Scraper application object which handles resource management
for a variety of related functions. """
def __init__(self, name, config=None):
self.name = name
self.config = Config(self, config)
self._task_manager = None
@property
def task_manager(self):
if self._task_manager is None:
self._task_manager = \
TaskManager(threads=self.config.threads)
return self._task_manager
def task(self, fn):
""" Decorate a function as a task in the scraper framework.
This will enable the function to be queued and executed in
a separate thread, allowing for the execution of the scraper
to be asynchronous.
"""
return Task(self, fn)
def Session(self):
""" Create a pre-configured ``requests`` session instance
that can be used to run HTTP requests. This instance will
potentially be cached, or a stub, depending on the
configuration of the scraper. """
return make_session(self)
def head(self, url, **kwargs):
""" HTTP HEAD via ``requests``.
See: http://docs.python-requests.org/en/latest/api/#requests.head
"""
return self.Session().get(url, **kwargs)
def get(self, url, **kwargs):
""" HTTP GET via ``requests``.
See: http://docs.python-requests.org/en/latest/api/#requests.get
"""
return self.Session().get(url, **kwargs)
def post(self, url, **kwargs):
""" HTTP POST via ``requests``.
See: http://docs.python-requests.org/en/latest/api/#requests.post
"""
return self.Session().post(url, **kwargs)
def put(self, url, **kwargs):
""" HTTP PUT via ``requests``.
See: http://docs.python-requests.org/en/latest/api/#requests.put
"""
return self.Session().put(url, **kwargs)
def __repr__(self):
return '<Scraper(%s)>' % self.name
|
mit
|
Python
|
6edd4114c4e715a3a0c440af455fff089a099620
|
Clarify comment about Pyhton versions
|
pablohoffman/scrapy,pawelmhm/scrapy,finfish/scrapy,Ryezhang/scrapy,ssteo/scrapy,pawelmhm/scrapy,ssteo/scrapy,scrapy/scrapy,pawelmhm/scrapy,starrify/scrapy,ArturGaspar/scrapy,ssteo/scrapy,wujuguang/scrapy,dangra/scrapy,pablohoffman/scrapy,dangra/scrapy,elacuesta/scrapy,starrify/scrapy,scrapy/scrapy,kmike/scrapy,pablohoffman/scrapy,starrify/scrapy,finfish/scrapy,Ryezhang/scrapy,wujuguang/scrapy,elacuesta/scrapy,finfish/scrapy,eLRuLL/scrapy,ArturGaspar/scrapy,Ryezhang/scrapy,wujuguang/scrapy,eLRuLL/scrapy,eLRuLL/scrapy,dangra/scrapy,kmike/scrapy,elacuesta/scrapy,scrapy/scrapy,ArturGaspar/scrapy,kmike/scrapy
|
scrapy/squeues.py
|
scrapy/squeues.py
|
"""
Scheduler queues
"""
import marshal
from six.moves import cPickle as pickle
from queuelib import queue
def _serializable_queue(queue_class, serialize, deserialize):
class SerializableQueue(queue_class):
def push(self, obj):
s = serialize(obj)
super(SerializableQueue, self).push(s)
def pop(self):
s = super(SerializableQueue, self).pop()
if s:
return deserialize(s)
return SerializableQueue
def _pickle_serialize(obj):
try:
return pickle.dumps(obj, protocol=2)
# Python <= 3.4 raises pickle.PicklingError here while
# 3.5 <= Python < 3.6 raises AttributeError and
# Python >= 3.6 raises TypeError
except (pickle.PicklingError, AttributeError, TypeError) as e:
raise ValueError(str(e))
PickleFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \
_pickle_serialize, pickle.loads)
PickleLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \
_pickle_serialize, pickle.loads)
MarshalFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \
marshal.dumps, marshal.loads)
MarshalLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \
marshal.dumps, marshal.loads)
FifoMemoryQueue = queue.FifoMemoryQueue
LifoMemoryQueue = queue.LifoMemoryQueue
|
"""
Scheduler queues
"""
import marshal
from six.moves import cPickle as pickle
from queuelib import queue
def _serializable_queue(queue_class, serialize, deserialize):
class SerializableQueue(queue_class):
def push(self, obj):
s = serialize(obj)
super(SerializableQueue, self).push(s)
def pop(self):
s = super(SerializableQueue, self).pop()
if s:
return deserialize(s)
return SerializableQueue
def _pickle_serialize(obj):
try:
return pickle.dumps(obj, protocol=2)
# Python<=3.4 raises pickle.PicklingError here while
# Python>=3.5 raises AttributeError and
# Python>=3.6 raises TypeError
except (pickle.PicklingError, AttributeError, TypeError) as e:
raise ValueError(str(e))
PickleFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \
_pickle_serialize, pickle.loads)
PickleLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \
_pickle_serialize, pickle.loads)
MarshalFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \
marshal.dumps, marshal.loads)
MarshalLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \
marshal.dumps, marshal.loads)
FifoMemoryQueue = queue.FifoMemoryQueue
LifoMemoryQueue = queue.LifoMemoryQueue
|
bsd-3-clause
|
Python
|
d6cdf99d87b23cd6bfd8fd7079919d89d6496501
|
Complete incomplete sentence
|
diagramsoftware/partner-contact,open-synergy/partner-contact,acsone/partner-contact
|
partner_identification/models/res_partner_id_category.py
|
partner_identification/models/res_partner_id_category.py
|
# -*- coding: utf-8 -*-
#
# © 2004-2010 Tiny SPRL http://tiny.be
# © 2010-2012 ChriCar Beteiligungs- und Beratungs- GmbH
# http://www.camptocamp.at
# © 2015 Antiun Ingenieria, SL (Madrid, Spain)
# http://www.antiun.com
# Antonio Espinosa <[email protected]>
# © 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import api, models, fields
from openerp.exceptions import ValidationError, UserError
from openerp.tools.safe_eval import safe_eval
from openerp.tools.translate import _
class ResPartnerIdCategory(models.Model):
_name = "res.partner.id_category"
_order = "name"
code = fields.Char(string="Code", size=16, required=True)
name = fields.Char(string="ID name", required=True, translate=True)
active = fields.Boolean(string="Active", default=True)
validation_code = fields.Text(
'Python validation code',
help="Python code called to validate an id number.",
default="""
# Python code. Use failed = True to specify that the id number is not valid.
# You can use the following variables :
# - self: browse_record of the current ID Category browse_record
# - id_number: browse_record of ID number to validte
"""
)
@api.multi
def _validation_eval_context(self, id_number):
self.ensure_one()
return {'self': self,
'id_number': id_number,
}
@api.multi
def validate_id_number(self, id_number):
"""Validate the given ID number
The method raises an openerp.exceptions.ValidationError if the eval of
python validation code fails
"""
self.ensure_one()
eval_context = self._validation_eval_context(id_number)
try:
safe_eval(self.validation_code,
eval_context,
mode='exec',
nocopy=True)
except Exception as e:
raise UserError(
_('Error when evaluating the id_category validation code:'
':\n %s \n(%s)') % (self.name, e))
if eval_context.get('failed', False):
raise ValidationError(
_("%s is not a valid %s identifier") % (
id_number.name, self.name))
|
# -*- coding: utf-8 -*-
#
# © 2004-2010 Tiny SPRL http://tiny.be
# © 2010-2012 ChriCar Beteiligungs- und Beratungs- GmbH
# http://www.camptocamp.at
# © 2015 Antiun Ingenieria, SL (Madrid, Spain)
# http://www.antiun.com
# Antonio Espinosa <[email protected]>
# © 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import api, models, fields
from openerp.exceptions import ValidationError, UserError
from openerp.tools.safe_eval import safe_eval
from openerp.tools.translate import _
class ResPartnerIdCategory(models.Model):
_name = "res.partner.id_category"
_order = "name"
code = fields.Char(string="Code", size=16, required=True)
name = fields.Char(string="ID name", required=True, translate=True)
active = fields.Boolean(string="Active", default=True)
validation_code = fields.Text(
'Python validation code',
help="Python code called to validate an id number.",
default="""
# Python code. Use failed = True to .
# You can use the following variables :
# - self: browse_record of the current ID Category browse_record
# - id_number: browse_record of ID number to validte
"""
)
@api.multi
def _validation_eval_context(self, id_number):
self.ensure_one()
return {'self': self,
'id_number': id_number,
}
@api.multi
def validate_id_number(self, id_number):
"""Validate the given ID number
The method raises an openerp.exceptions.ValidationError if the eval of
python validation code fails
"""
self.ensure_one()
eval_context = self._validation_eval_context(id_number)
try:
safe_eval(self.validation_code,
eval_context,
mode='exec',
nocopy=True)
except Exception as e:
raise UserError(
_('Error when evaluating the id_category validation code:'
':\n %s \n(%s)') % (self.name, e))
if eval_context.get('failed', False):
raise ValidationError(
_("%s is not a valid %s identifier") % (
id_number.name, self.name))
|
agpl-3.0
|
Python
|
573d3a7411a1653f64b901077264ecb98c1f9673
|
Use subprocess.check_call replace os.system
|
abersheeran/a2wsgi
|
script/version.py
|
script/version.py
|
import importlib
import os
import sys
import subprocess
here = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
def get_version() -> str:
"""
Return version.
"""
sys.path.insert(0, here)
return importlib.import_module("a2wsgi").__version__
os.chdir(here)
subprocess.check_call(f"poetry version {get_version()}", shell=True)
subprocess.check_call("git add a2wsgi/__init__.py pyproject.toml", shell=True)
subprocess.check_call(f'git commit -m "v{get_version()}"', shell=True)
subprocess.check_call("git push", shell=True)
subprocess.check_call("git tag v{0}".format(get_version()), shell=True)
subprocess.check_call("git push --tags", shell=True)
|
import importlib
import os
import sys
here = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
def get_version() -> str:
"""
Return version.
"""
sys.path.insert(0, here)
return importlib.import_module("a2wsgi").__version__
os.chdir(here)
os.system(f"poetry version {get_version()}")
os.system("git add a2wsgi/* pyproject.toml")
os.system(f'git commit -m "v{get_version()}"')
os.system("git push")
os.system("git tag v{0}".format(get_version()))
os.system("git push --tags")
|
apache-2.0
|
Python
|
487f7a2235e8541670fc0e9949dd3c0fb80eb932
|
fix formatting
|
subutai/nupic.research,mrcslws/nupic.research,numenta/nupic.research,mrcslws/nupic.research,subutai/nupic.research,numenta/nupic.research
|
projects/dendrites/permutedMNIST/experiments/__init__.py
|
projects/dendrites/permutedMNIST/experiments/__init__.py
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2021, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
from .base import CONFIGS as BASE
from .batch import CONFIGS as BATCH
from .batch_mnist import CONFIGS as BATCH_MNIST
from .centroid import CONFIGS as CENTROID
from .hyperparameter_search import CONFIGS as HYPERPARAMETERSEARCH
from .no_dendrites import CONFIGS as NO_DENDRITES
from .si_centroid import CONFIGS as SI_CENTROID
from .sp_context import CONFIGS as SP_CONTEXT
from .sp_context_search import CONFIGS as SP_PROTO
"""
Import and collect all experiment configurations into one CONFIG
"""
__all__ = ["CONFIGS"]
# Collect all configurations
CONFIGS = dict()
CONFIGS.update(BASE)
CONFIGS.update(BATCH)
CONFIGS.update(BATCH_MNIST)
CONFIGS.update(CENTROID)
CONFIGS.update(HYPERPARAMETERSEARCH)
CONFIGS.update(NO_DENDRITES)
CONFIGS.update(SI_CENTROID)
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2021, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
from .base import CONFIGS as BASE
from .batch import CONFIGS as BATCH
from .batch_mnist import CONFIGS as BATCH_MNIST
from .centroid import CONFIGS as CENTROID
from .hyperparameter_search import CONFIGS as HYPERPARAMETERSEARCH
from .no_dendrites import CONFIGS as NO_DENDRITES
from .si_centroid import CONFIGS as SI_CENTROID
from .sp_context import CONFIGS as SP_CONTEXT
from .sp_context_search import CONFIGS as SP_PROTO
"""
Import and collect all experiment configurations into one CONFIG
"""
__all__ = ["CONFIGS"]
# Collect all configurations
CONFIGS = dict()
CONFIGS.update(BASE)
CONFIGS.update(CENTROID)
CONFIGS.update(HYPERPARAMETERSEARCH)
CONFIGS.update(BATCH)
CONFIGS.update(BATCH_MNIST)
CONFIGS.update(CENTROID)
CONFIGS.update(NO_DENDRITES)
CONFIGS.update(SI_CENTROID)
|
agpl-3.0
|
Python
|
6fbfa11a6f13f8271687a83fc4de68f62d4a4501
|
Fix encrpytion with custom salt
|
dgengtek/scripts,dgengtek/scripts
|
crypto/encrypt.py
|
crypto/encrypt.py
|
#!/bin/env python3
"""
Encrypt password with salt for unix
Usage:
encrypt.py [options] [--rounds <count>] [--sha512 | --sha256 | --md5 | --crypt] [<salt>]
Options:
--sha512
--sha256
--md5
--crypt
-r, --rounds <count> rounds[default: 1000]
"""
import sys
import crypt
from getpass import getpass
from docopt import docopt
# docopt(doc, argv=None, help=True, version=None, options_first=False))
default_flag = "--sha512"
methods = {
"--sha512" : {
"method": crypt.METHOD_SHA512,
"id": "6",
},
"--sha256" : {
"method": crypt.METHOD_SHA256,
"id": "5",
},
"--md5" : {
"method": crypt.METHOD_MD5,
"id": "1",
},
"--crypt" : {
"method": crypt.METHOD_CRYPT,
"id": "",
},
}
def get_method(opt, default=default_flag):
for key in methods.keys():
if opt.get(key, False):
return methods.get(key)
return methods.get(default_flag)
def main():
opt = docopt(__doc__, sys.argv[1:])
rounds = opt.get("--rounds")
methods = get_method(opt)
method = methods.get("method")
id_prefix = methods.get("id")
salt = opt.get("<salt>")
if not salt:
salt = crypt.mksalt(method)
else:
salt = "${}$rounds={}${}$".format(id_prefix, rounds, salt)
password = ""
if not sys.stdin.isatty():
password = sys.stdin.readline()
else:
password = getpass()
if not password:
sys.exit(1)
shadow = crypt.crypt(password, salt)
print(shadow)
def usage():
pass
if __name__ == "__main__":
main()
|
#!/bin/env python3
"""
Encrypt password with salt for unix
Usage:
encrypt.py [options] [--sha512 | --sha256 | --md5 | --crypt] [<salt>]
Options:
--sha512
--sha256
--md5
--crypt
"""
import sys
import crypt
from getpass import getpass
from docopt import docopt
# docopt(doc, argv=None, help=True, version=None, options_first=False))
default_flag = {"--sha512":True}
methods = {
"--sha512" : crypt.METHOD_SHA512,
"--sha256" : crypt.METHOD_SHA256,
"--md5" : crypt.METHOD_MD5,
"--crypt" : crypt.METHOD_CRYPT,
}
def get_method(opt):
for key in methods.keys():
if opt.get(key, False):
return methods.get(key)
def main():
opt = docopt(__doc__, sys.argv[1:])
method = get_method(opt)
salt = opt.get("<salt>")
if not salt:
salt = crypt.mksalt(method)
password = ""
if not sys.stdin.isatty():
password = sys.stdin.readline()
else:
password = getpass()
if not password:
sys.exit(1)
shadow = crypt.crypt(password, salt)
print(shadow)
def usage():
pass
if __name__ == "__main__":
main()
|
mit
|
Python
|
4c2f6372bb5c1db18998626049aa8e53e9889452
|
Fix an invalid build dependency.
|
google/syzygy,Eloston/syzygy,google/syzygy,google/syzygy,google/syzygy,Eloston/syzygy
|
syzygy/trace/rpc/rpc.gyp
|
syzygy/trace/rpc/rpc.gyp
|
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
{
'variables': {
'chromium_code': 1,
'midl_out_dir': '<(SHARED_INTERMEDIATE_DIR)/syzygy/trace/rpc',
},
'target_defaults': {
'all_dependent_settings': {
'include_dirs': ['<(SHARED_INTERMEDIATE_DIR)'],
},
},
'targets': [
{
'target_name': 'call_trace_rpc_lib',
'type': 'static_library',
# Build our IDL file to the shared intermediate directory using the
# midl_rpc.gypi include (because the default rules for .idl files are
# specific to COM interfaces). This include expects the prefix and
# midl_out_dir variables to be defined.
'variables': {
'prefix': 'CallTrace',
},
'includes': ['../../build/midl_rpc.gypi'],
'sources': ['call_trace_rpc.idl'],
'dependencies': [
'<(src)/syzygy/common/rpc/rpc.gyp:common_rpc_lib',
'<(src)/syzygy/trace/protocol/protocol.gyp:protocol_lib',
],
'outputs': [
'<(SHARED_INTERMEDIATE_DIR)/syzygy/trace/rpc/call_trace_rpc.h',
'<(SHARED_INTERMEDIATE_DIR)/syzygy/trace/rpc/call_trace_rpc_c.cc',
'<(SHARED_INTERMEDIATE_DIR)/syzygy/trace/rpc/call_trace_rpc_s.cc',
],
'process_outputs_as_sources': 1,
},
{
'target_name': 'logger_rpc_lib',
'type': 'static_library',
# Build our IDL file to the shared intermediate directory using the
# midl_rpc.gypi include (because the default rules for .idl files are
# specific to COM interfaces). This include expects the prefix and
# midl_out_dir variables to be defined.
'variables': {
'prefix': 'Logger',
},
'includes': ['../../build/midl_rpc.gypi'],
'sources': ['logger_rpc.idl'],
'dependencies': [
'<(src)/syzygy/common/rpc/rpc.gyp:common_rpc_lib',
'<(src)/syzygy/trace/protocol/protocol.gyp:protocol_lib',
],
},
],
}
|
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
{
'variables': {
'chromium_code': 1,
'midl_out_dir': '<(SHARED_INTERMEDIATE_DIR)/syzygy/trace/rpc',
},
'target_defaults': {
'all_dependent_settings': {
'include_dirs': ['<(SHARED_INTERMEDIATE_DIR)'],
},
},
'targets': [
{
'target_name': 'call_trace_rpc_lib',
'type': 'static_library',
# Build our IDL file to the shared intermediate directory using the
# midl_rpc.gypi include (because the default rules for .idl files are
# specific to COM interfaces). This include expects the prefix and
# midl_out_dir variables to be defined.
'variables': {
'prefix': 'CallTrace',
},
'includes': ['../../build/midl_rpc.gypi'],
'sources': ['call_trace_rpc.idl'],
'dependencies': [
'<(src)/syzygy/common/rpc/rpc.gyp:common_rpc_lib',
'<(src)/syzygy/trace/protocol/protocol.gyp:protocol_lib',
],
},
{
'target_name': 'logger_rpc_lib',
'type': 'static_library',
# Build our IDL file to the shared intermediate directory using the
# midl_rpc.gypi include (because the default rules for .idl files are
# specific to COM interfaces). This include expects the prefix and
# midl_out_dir variables to be defined.
'variables': {
'prefix': 'Logger',
},
'includes': ['../../build/midl_rpc.gypi'],
'sources': ['logger_rpc.idl'],
'dependencies': [
'<(src)/syzygy/common/rpc/rpc.gyp:common_rpc_lib',
'<(src)/syzygy/trace/protocol/protocol.gyp:protocol_lib',
],
},
],
}
|
apache-2.0
|
Python
|
9843dab8e7e5c3cf7087cadf095339fb1f590ee3
|
Sort files by uploading date
|
pavelulyashev/django-mes-fichiers,pavelulyashev/django-mes-fichiers
|
src/apps/file_uploader/models.py
|
src/apps/file_uploader/models.py
|
from django.contrib.auth.models import User
from django.db import models
from easy_thumbnails.fields import ThumbnailerField
from easy_thumbnails.alias import aliases
class MonFileManager(models.Manager):
def get_query_set(self):
queryset = super(MonFileManager, self).get_query_set()
return queryset.order_by('-created_at')
class MonFile(models.Model):
name = models.CharField(max_length=100, blank=True)
file = ThumbnailerField(max_length=100, upload_to='monfile/%Y-%m-%d')
description = models.TextField(blank=True)
user = models.ForeignKey(User, default=1, related_name='user_files')
# automatic fields
created_at = models.DateTimeField(auto_now_add=True, help_text=u'Created')
updated_at = models.DateTimeField(auto_now=True, help_text=u'Updated')
objects = MonFileManager()
class Meta:
verbose_name_plural = 'Mes Files'
def __unicode__(self):
return self.name
class MonAlbum(models.Model):
name = models.CharField(max_length=250)
description = models.TextField(blank=True)
cover = models.ForeignKey(MonFile,
null=True,
blank=True,
on_delete=models.SET_NULL)
files = models.ManyToManyField(MonFile,
related_name='albums',
blank=True)
class Meta:
verbose_name_plural = 'Mes Albums'
def __unicode__(self):
return self.name
_app_label = MonAlbum._meta.app_label
def populate_aliases():
THUMBNAIL_ALIASES = {
_app_label: {
'cover_medium': {'size': (270, 230), 'crop': True},
'cover_small': {'size': (140, 140), 'crop': True},
'preview': {'size': (158, 140), 'crop': True},
}
}
for target, target_aliases in THUMBNAIL_ALIASES.iteritems():
for alias, options in target_aliases.iteritems():
aliases.set(alias, options, target=target)
if not aliases.all(target=_app_label):
populate_aliases()
|
from django.contrib.auth.models import User
from django.db import models
from easy_thumbnails.fields import ThumbnailerField
from easy_thumbnails.alias import aliases
class MonFile(models.Model):
name = models.CharField(max_length=100, blank=True)
file = ThumbnailerField(max_length=100, upload_to='monfile/%Y-%m-%d')
description = models.TextField(blank=True)
user = models.ForeignKey(User, default=1, related_name='user_files')
# automatic fields
created_at = models.DateTimeField(auto_now_add=True, help_text=u'Created')
updated_at = models.DateTimeField(auto_now=True, help_text=u'Updated')
class Meta:
verbose_name_plural = 'Mes Files'
def __unicode__(self):
return self.name
class MonAlbum(models.Model):
name = models.CharField(max_length=250)
description = models.TextField(blank=True)
cover = models.ForeignKey(MonFile,
null=True,
blank=True,
on_delete=models.SET_NULL)
files = models.ManyToManyField(MonFile,
related_name='albums',
blank=True)
class Meta:
verbose_name_plural = 'Mes Albums'
def __unicode__(self):
return self.name
_app_label = MonAlbum._meta.app_label
def populate_aliases():
THUMBNAIL_ALIASES = {
_app_label: {
'cover_medium': {'size': (270, 230), 'crop': True},
'cover_small': {'size': (140, 140), 'crop': True},
'preview': {'size': (158, 140), 'crop': True},
}
}
for target, target_aliases in THUMBNAIL_ALIASES.iteritems():
for alias, options in target_aliases.iteritems():
aliases.set(alias, options, target=target)
if not aliases.all(target=_app_label):
populate_aliases()
|
bsd-3-clause
|
Python
|
bf87d7a60f20d9811fe2ff2c579f52b3e77a1ed3
|
Remove unneeded print statement.
|
ucb-sejits/ctree,ucb-sejits/ctree,mbdriscoll/ctree
|
ctree/c/dotgen.py
|
ctree/c/dotgen.py
|
"""
DOT generator for C constructs.
"""
from ctree.dotgen import DotGenLabeller
from ctree.types import codegen_type
class CDotGenLabeller(DotGenLabeller):
"""
Manages generation of DOT.
"""
def visit_SymbolRef(self, node):
s = r""
if node._global:
s += r"__global "
if node._local:
s += r"__local "
if node._const:
s += r"__const "
if node.type is not None:
s += r"%s " % codegen_type(node.type)
s += r"%s" % node.name
return s
def visit_FunctionDecl(self, node):
s = r""
if node.static:
s += r"static "
if node.inline:
s += r"inline "
if node.kernel:
s += r"__kernel "
s += r"%s %s(...)" % (codegen_type(node.return_type), node.name)
return s
def visit_Constant(self, node):
return str(node.value)
def visit_String(self, node):
return r'\" \"'.join(node.values)
def visit_CFile(self, node):
return node.get_filename()
def visit_NdPointer(self, node):
s = "dtype: %s\n" % node.ptr.dtype
s += "ndim, shape: %s, %s\n" % (node.ptr.ndim, node.ptr.shape)
s += "flags: %s" % node.ptr.flags
return s
def visit_BinaryOp(self, node):
return type(node.op).__name__
def visit_UnaryOp(self, node):
return type(node.op).__name__
|
"""
DOT generator for C constructs.
"""
from ctree.dotgen import DotGenLabeller
from ctree.types import codegen_type
class CDotGenLabeller(DotGenLabeller):
"""
Manages generation of DOT.
"""
def visit_SymbolRef(self, node):
s = r""
if node._global:
s += r"__global "
if node._local:
s += r"__local "
if node._const:
s += r"__const "
if node.type is not None:
s += r"%s " % codegen_type(node.type)
print(node.type)
s += r"%s" % node.name
return s
def visit_FunctionDecl(self, node):
s = r""
if node.static:
s += r"static "
if node.inline:
s += r"inline "
if node.kernel:
s += r"__kernel "
s += r"%s %s(...)" % (codegen_type(node.return_type), node.name)
return s
def visit_Constant(self, node):
return str(node.value)
def visit_String(self, node):
return r'\" \"'.join(node.values)
def visit_CFile(self, node):
return node.get_filename()
def visit_NdPointer(self, node):
s = "dtype: %s\n" % node.ptr.dtype
s += "ndim, shape: %s, %s\n" % (node.ptr.ndim, node.ptr.shape)
s += "flags: %s" % node.ptr.flags
return s
def visit_BinaryOp(self, node):
return type(node.op).__name__
def visit_UnaryOp(self, node):
return type(node.op).__name__
|
bsd-2-clause
|
Python
|
50861c6d256438afd880aebbb3a19ea360367fac
|
upgrade IdentityDetailSerializer to DRF3
|
CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend
|
api/serializers/identity_detail_serializer.py
|
api/serializers/identity_detail_serializer.py
|
from core.models.identity import Identity
from rest_framework import serializers
class IdentityDetailSerializer(serializers.ModelSerializer):
# created_by = serializers.CharField(source='creator_name')
quota = serializers.ReadOnlyField(source='get_quota_dict')
provider_id = serializers.ReadOnlyField(source='provider.uuid')
id = serializers.ReadOnlyField(source="uuid")
class Meta:
model = Identity
fields = ('id', 'provider_id', 'quota')
|
from core.models.identity import Identity
from rest_framework import serializers
class IdentityDetailSerializer(serializers.ModelSerializer):
created_by = serializers.CharField(source='creator_name')
quota = serializers.Field(source='get_quota_dict')
provider_id = serializers.Field(source='provider.uuid')
id = serializers.Field(source="uuid")
class Meta:
model = Identity
exclude = ('credentials', 'created_by', 'provider', 'uuid')
|
apache-2.0
|
Python
|
1e4d80c50aaf253fd2bad9a2139737d8bf8dc927
|
fix escape sequence DeprecationWarning (#1595)
|
Farama-Foundation/Gymnasium,Farama-Foundation/Gymnasium
|
gym/spaces/discrete.py
|
gym/spaces/discrete.py
|
import numpy as np
from .space import Space
class Discrete(Space):
r"""A discrete space in :math:`\{ 0, 1, \\dots, n-1 \}`.
Example::
>>> Discrete(2)
"""
def __init__(self, n):
assert n >= 0
self.n = n
super(Discrete, self).__init__((), np.int64)
def sample(self):
return self.np_random.randint(self.n)
def contains(self, x):
if isinstance(x, int):
as_int = x
elif isinstance(x, (np.generic, np.ndarray)) and (x.dtype.kind in np.typecodes['AllInteger'] and x.shape == ()):
as_int = int(x)
else:
return False
return as_int >= 0 and as_int < self.n
def __repr__(self):
return "Discrete(%d)" % self.n
def __eq__(self, other):
return isinstance(other, Discrete) and self.n == other.n
|
import numpy as np
from .space import Space
class Discrete(Space):
"""A discrete space in :math:`\{ 0, 1, \dots, n-1 \}`.
Example::
>>> Discrete(2)
"""
def __init__(self, n):
assert n >= 0
self.n = n
super(Discrete, self).__init__((), np.int64)
def sample(self):
return self.np_random.randint(self.n)
def contains(self, x):
if isinstance(x, int):
as_int = x
elif isinstance(x, (np.generic, np.ndarray)) and (x.dtype.kind in np.typecodes['AllInteger'] and x.shape == ()):
as_int = int(x)
else:
return False
return as_int >= 0 and as_int < self.n
def __repr__(self):
return "Discrete(%d)" % self.n
def __eq__(self, other):
return isinstance(other, Discrete) and self.n == other.n
|
mit
|
Python
|
50248c3989624f935a4ff2a80229b997ca77f5c2
|
fix generator issue
|
sobhe/hazm,sobhe/hazm,hesamd/hazm,sobhe/hazm
|
hazm/SequenceTagger.py
|
hazm/SequenceTagger.py
|
# coding: utf8
from __future__ import unicode_literals
from nltk.tag.api import TaggerI
from wapiti import Model
class SequenceTagger(TaggerI):
""" wrapper for [Wapiti](http://wapiti.limsi.fr) sequence tagger
>>> tagger = SequenceTagger(patterns=['*', 'U:word-%x[0,0]'])
>>> tagger.train([[('من', 'PRO'), ('به', 'P'), ('مدرسه', 'N'), ('رفته_بودم', 'V'), ('.', 'PUNC')]])
>>> tagger.tag_sents([['من', 'به', 'مدرسه', 'رفته_بودم', '.']])
[[('من', 'PRO'), ('به', 'P'), ('مدرسه', 'N'), ('رفته_بودم', 'V'), ('.', 'PUNC')]]
>>> tagger.save_model('test.tagger')
>>> SequenceTagger(model='test.tagger').tag_sents([['من', 'به', 'مدرسه', 'رفته_بودم', '.']])
[[('من', 'PRO'), ('به', 'P'), ('مدرسه', 'N'), ('رفته_بودم', 'V'), ('.', 'PUNC')]]
"""
def __init__(self, patterns=[], **options):
self.model = Model(patterns='\n'.join(patterns), **options)
def train(self, sentences):
self.model.train(['\n'.join([' '.join(word) for word in sentence]) for sentence in sentences])
def save_model(self, filename):
self.model.save(filename)
def tag_sents(self, sentences):
sentences = list(sentences)
lines = '\n\n'.join(['\n'.join(sentence) for sentence in sentences])
results = self.model.label_sequence(lines).decode('utf8')
tags = iter(results.strip().split('\n'))
return [[(word, next(tags)) for word in sentence] for sentence in sentences]
|
# coding: utf8
from __future__ import unicode_literals
from nltk.tag.api import TaggerI
from wapiti import Model
class SequenceTagger(TaggerI):
""" wrapper for [Wapiti](http://wapiti.limsi.fr) sequence tagger
>>> tagger = SequenceTagger(patterns=['*', 'U:word-%x[0,0]'])
>>> tagger.train([[('من', 'PRO'), ('به', 'P'), ('مدرسه', 'N'), ('رفته_بودم', 'V'), ('.', 'PUNC')]])
>>> tagger.tag_sents([['من', 'به', 'مدرسه', 'رفته_بودم', '.']])
[[('من', 'PRO'), ('به', 'P'), ('مدرسه', 'N'), ('رفته_بودم', 'V'), ('.', 'PUNC')]]
>>> tagger.save_model('test.tagger')
>>> SequenceTagger(model='test.tagger').tag_sents([['من', 'به', 'مدرسه', 'رفته_بودم', '.']])
[[('من', 'PRO'), ('به', 'P'), ('مدرسه', 'N'), ('رفته_بودم', 'V'), ('.', 'PUNC')]]
"""
def __init__(self, patterns=[], **options):
self.model = Model(patterns='\n'.join(patterns), **options)
def train(self, sentences):
self.model.train(['\n'.join([' '.join(word) for word in sentence]) for sentence in sentences])
def save_model(self, filename):
self.model.save(filename)
def tag_sents(self, sentences):
lines = '\n\n'.join(['\n'.join(sentence) for sentence in sentences])
results = self.model.label_sequence(lines).decode('utf8')
tags = iter(results.strip().split('\n'))
return [[(word, next(tags)) for word in sentence] for sentence in sentences]
|
mit
|
Python
|
649a70d825d2182e3d5a4f42a83f377b66043e09
|
bump version
|
yandex/yandex-tank,yandex/yandex-tank
|
yandextank/version.py
|
yandextank/version.py
|
VERSION = '1.17.2'
|
VERSION = '1.17.1'
|
lgpl-2.1
|
Python
|
60d93c3ade6f465e627c6c47c17d9c86e2b52f2a
|
Handle None challenge
|
comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django
|
app/grandchallenge/core/context_processors.py
|
app/grandchallenge/core/context_processors.py
|
import logging
from django.conf import settings
from guardian.shortcuts import get_perms
from guardian.utils import get_anonymous_user
logger = logging.getLogger(__name__)
def challenge(request):
try:
challenge = request.challenge
if challenge is None:
return {}
except AttributeError:
logger.warning(f"Could not get challenge for request: {request}")
return {}
try:
user = request.user
except AttributeError:
user = get_anonymous_user()
return {
"challenge": challenge,
"challenge_perms": get_perms(user, challenge),
"user_is_participant": challenge.is_participant(user),
"pages": challenge.page_set.all(),
}
def google_keys(*_, **__):
return {
"google_analytics_id": settings.GOOGLE_ANALYTICS_ID,
"geochart_api_key": settings.GOOGLE_MAPS_API_KEY,
}
def debug(*_, **__):
return {"DEBUG": settings.DEBUG}
|
import logging
from django.conf import settings
from guardian.shortcuts import get_perms
from guardian.utils import get_anonymous_user
logger = logging.getLogger(__name__)
def challenge(request):
try:
challenge = request.challenge
except AttributeError:
logger.warning(f"Could not get challenge for request: {request}")
return {}
try:
user = request.user
except AttributeError:
user = get_anonymous_user()
return {
"challenge": challenge,
"challenge_perms": get_perms(user, challenge),
"user_is_participant": challenge.is_participant(user),
"pages": challenge.page_set.all(),
}
def google_keys(*_, **__):
return {
"google_analytics_id": settings.GOOGLE_ANALYTICS_ID,
"geochart_api_key": settings.GOOGLE_MAPS_API_KEY,
}
def debug(*_, **__):
return {"DEBUG": settings.DEBUG}
|
apache-2.0
|
Python
|
1518347c2c1ceb482031ca091d54dcae25eed083
|
Refactor flip
|
jkoelker/zl.indicators
|
zl/indicators/flip.py
|
zl/indicators/flip.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Jason Koelker
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import numbers
from zipline.transforms import utils as transforms
BULL = 'Bull'
BEAR = 'Bear'
def flip(events, field):
Yp = events[-1][field]
Xp = events[-2][field]
X = events[0][field]
Y = events[1][field]
if (Xp > X) and (Yp < Y):
return BEAR
if (Xp < X) and (Yp > Y):
return BULL
class Flip(object):
__metaclass__ = transforms.TransformMeta
def __init__(self, period=4, field='close_price'):
self.period = period
self.field = field
self.sid_windows = collections.defaultdict(self.create_window)
def create_window(self):
return FlipWindow(self.period, self.field)
def update(self, event):
window = self.sid_windows[event.sid]
window.update(event)
return window()
class FlipWindow(transforms.EventWindow):
def __init__(self, period, field):
transforms.EventWindow.__init__(self, window_length=period + 2)
self.period = period
self.field = field
def handle_add(self, event):
assert self.field in event
assert isinstance(event[self.field], numbers.Number)
def handle_remove(self, event):
pass
def __call__(self):
if len(self.ticks) < self.window_length:
return
return flip(self.ticks, self.field)
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Jason Koelker
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import numbers
from zipline.transforms import utils as transforms
BULL = 'Bull'
BEAR = 'Bear'
class Flip(object):
__metaclass__ = transforms.TransformMeta
def __init__(self, period=4, setup_price='close_price'):
self.period = period
self.setup_price = setup_price
self.sid_windows = collections.defaultdict(self.create_window)
def create_window(self):
return FlipWindow(self.period, self.setup_price)
def update(self, event):
window = self.sid_windows[event.sid]
window.update(event)
return window()
class FlipWindow(transforms.EventWindow):
def __init__(self, period, setup_price):
transforms.EventWindow.__init__(self, window_length=period + 2)
self.period = period
self.setup_price = setup_price
def handle_add(self, event):
assert self.setup_price in event
assert isinstance(event[self.setup_price], numbers.Number)
def handle_remove(self, event):
pass
def __call__(self):
if len(self.ticks) < self.window_length:
return
Yp = self.ticks[-1][self.setup_price]
Xp = self.ticks[-2][self.setup_price]
X = self.ticks[0][self.setup_price]
Y = self.ticks[1][self.setup_price]
if (Xp > X) and (Yp < Y):
return BEAR
if (Xp < X) and (Yp > Y):
return BULL
|
apache-2.0
|
Python
|
0be54cb28387c535bea17e6c3a1a277151b9648a
|
Add the url name for students_info view to gci.views.helper.url_names.
|
rhyolight/nupic.son,rhyolight/nupic.son,rhyolight/nupic.son
|
app/soc/modules/gci/views/helper/url_names.py
|
app/soc/modules/gci/views/helper/url_names.py
|
#!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for storing GCI related URL names.
"""
GCI_LIST_ORG_INVITES = 'gci_list_org_invites'
GCI_LIST_INVITES = 'gci_list_invites'
GCI_MANAGE_INVITE = 'gci_manage_invite'
GCI_RESPOND_INVITE = 'gci_respond_invite'
GCI_SEND_INVITE = 'gci_send_invite'
GCI_MANAGE_REQUEST = 'gci_manage_request'
GCI_RESPOND_REQUEST = 'gci_respond_request'
GCI_SEND_REQUEST = 'gci_send_request'
GCI_LEADERBOARD = 'gci_leaderboard'
GCI_STUDENT_TASKS = 'gci_student_tasks'
GCI_STUDENT_FORM_DOWNLOAD = 'gci_student_form_download'
CREATE_GCI_ORG_PROFILE = 'create_gci_org_profile'
EDIT_GCI_ORG_PROFILE = 'edit_gci_org_profile'
GCI_ORG_HOME = 'gci_org_home'
GCI_VIEW_TASK = 'gci_view_task'
# GET PARAMETERS WHICH ARE USED THROUGHOUT THE MODULE
#TODO(dhans): consider creation of a separate module for that
"""GET parameter which should be set in order to download Consent Form.
"""
CONSENT_FORM_GET_PARAM = 'consent_form'
"""GET parameter which should be set in order to download Student ID Form.
"""
STUDENT_ID_FORM_GET_PARAM = 'student_id_form'
GCI_STUDENTS_INFO = 'gci_students_info'
|
#!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for storing GCI related URL names.
"""
GCI_LIST_ORG_INVITES = 'gci_list_org_invites'
GCI_LIST_INVITES = 'gci_list_invites'
GCI_MANAGE_INVITE = 'gci_manage_invite'
GCI_RESPOND_INVITE = 'gci_respond_invite'
GCI_SEND_INVITE = 'gci_send_invite'
GCI_MANAGE_REQUEST = 'gci_manage_request'
GCI_RESPOND_REQUEST = 'gci_respond_request'
GCI_SEND_REQUEST = 'gci_send_request'
GCI_LEADERBOARD = 'gci_leaderboard'
GCI_STUDENT_TASKS = 'gci_student_tasks'
GCI_STUDENT_FORM_DOWNLOAD = 'gci_student_form_download'
CREATE_GCI_ORG_PROFILE = 'create_gci_org_profile'
EDIT_GCI_ORG_PROFILE = 'edit_gci_org_profile'
GCI_ORG_HOME = 'gci_org_home'
GCI_VIEW_TASK = 'gci_view_task'
# GET PARAMETERS WHICH ARE USED THROUGHOUT THE MODULE
#TODO(dhans): consider creation of a separate module for that
"""GET parameter which should be set in order to download Consent Form.
"""
CONSENT_FORM_GET_PARAM = 'consent_form'
"""GET parameter which should be set in order to download Student ID Form.
"""
STUDENT_ID_FORM_GET_PARAM = 'student_id_form'
|
apache-2.0
|
Python
|
65c6c0b5ac47caac71c6c1284d84c1004d348c01
|
Fix imports at top of file.
|
Therp/partner-contact,acsone/partner-contact,open-synergy/partner-contact,diagramsoftware/partner-contact
|
partner_relations/model/__init__.py
|
partner_relations/model/__init__.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2013 Therp BV (<http://therp.nl>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import res_partner
from . import res_partner_relation
from . import res_partner_relation_type
from . import res_partner_relation_all
from . import res_partner_relation_type_selection
PADDING = 10
def get_partner_type(partner):
"""Get partner type for relation.
:param partner: a res.partner either a company or not
:return: 'c' for company or 'p' for person
:rtype: str
"""
return 'c' if partner.is_company else 'p'
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2013 Therp BV (<http://therp.nl>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
PADDING = 10
def get_partner_type(partner):
"""Get partner type for relation.
:param partner: a res.partner either a company or not
:return: 'c' for company or 'p' for person
:rtype: str
"""
return 'c' if partner.is_company else 'p'
from . import res_partner
from . import res_partner_relation
from . import res_partner_relation_type
from . import res_partner_relation_all
from . import res_partner_relation_type_selection
|
agpl-3.0
|
Python
|
a056ddc885d7eb333ab323f7552bfffd35635a8a
|
Add period at end of plug-in description
|
ynotstartups/Wanhao,senttech/Cura,fieldOfView/Cura,ynotstartups/Wanhao,hmflash/Cura,totalretribution/Cura,fieldOfView/Cura,Curahelper/Cura,senttech/Cura,totalretribution/Cura,Curahelper/Cura,hmflash/Cura
|
plugins/ChangeLogPlugin/__init__.py
|
plugins/ChangeLogPlugin/__init__.py
|
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from . import ChangeLog
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "Changelog"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Shows changes since latest checked version."),
"api": 2
}
}
def register(app):
return {"extension": ChangeLog.ChangeLog()}
|
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from . import ChangeLog
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "Changelog"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Shows changes since latest checked version"),
"api": 2
}
}
def register(app):
return {"extension": ChangeLog.ChangeLog()}
|
agpl-3.0
|
Python
|
9d92862f903b4683f1365e7ae82dd48d60e86d34
|
Add new urls, login and register
|
SuperNovaPOLIUSP/supernova,SuperNovaPOLIUSP/supernova,SuperNovaPOLIUSP/supernova,SuperNovaPOLIUSP/supernova
|
aeSupernova/urls.py
|
aeSupernova/urls.py
|
from django.conf.urls import patterns, include, url
from django.views.generic import TemplateView
from django.contrib import admin
from login import views
import login
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'aeSupernova.views.home', name='home'),
# url(r'^aeSupernova/', include('aeSupernova.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
#url(r'^$', 'aeSupernova.view.index'),
url(r'^index/$', TemplateView.as_view(template_name='index.html')),
url(r'^opticalSheet/', include('aeSupernova.opticalSheet.urls')),
url(r'^datafile/', include('aeSupernova.datafile.urls')),
url(r'^header/', include('aeSupernova.header.urls')),
url(r'^generator/', include('aeSupernova.generator.urls')),
url(r'^control/', include('aeSupernova.control.urls')),
url(r'^presentation/', include('aeSupernova.presentation.urls')),
url(r'^encoder/', include('aeSupernova.encoder.urls')),
url(r'^lerJupiter/', include('aeSupernova.lerJupiter.urls')),
url(r'^algeLin/', include('aeSupernova.algeLin.urls')),
url(r'^login/', include('login.urls')),
url(r'^register/$', views.register, name='register'),
)
|
from django.conf.urls import patterns, include, url
from django.views.generic import TemplateView
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'aeSupernova.views.home', name='home'),
# url(r'^aeSupernova/', include('aeSupernova.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
#url(r'^$', 'aeSupernova.view.index'),
url(r'^index/$', TemplateView.as_view(template_name='index.html')),
url(r'^opticalSheet/', include('aeSupernova.opticalSheet.urls')),
url(r'^datafile/', include('aeSupernova.datafile.urls')),
url(r'^header/', include('aeSupernova.header.urls')),
url(r'^generator/', include('aeSupernova.generator.urls')),
url(r'^control/', include('aeSupernova.control.urls')),
url(r'^presentation/', include('aeSupernova.presentation.urls')),
url(r'^encoder/', include('aeSupernova.encoder.urls')),
url(r'^lerJupiter/', include('aeSupernova.lerJupiter.urls')),
url(r'^algeLin/', include('aeSupernova.algeLin.urls')),
)
|
agpl-3.0
|
Python
|
1fb2a774765bc46e1bc2474136f135c59006c787
|
Return ConversationType in serializer
|
yunity/yunity-core,yunity/foodsaving-backend,yunity/yunity-core,yunity/foodsaving-backend,yunity/foodsaving-backend
|
yunity/conversations/serializers.py
|
yunity/conversations/serializers.py
|
from rest_framework import serializers
from rest_framework.fields import CharField, DateTimeField, SerializerMethodField
from rest_framework.relations import PrimaryKeyRelatedField
from yunity.api.serializers import UserSerializer
from yunity.conversations.models import ConversationMessage as MessageModel, ConversationType
from yunity.conversations.models import Conversation as ConversationModel
from yunity.users.models import User as UserModel
class MessageSerializer(serializers.Serializer):
content = CharField(max_length=100000)
author = PrimaryKeyRelatedField(read_only=True)
time = DateTimeField(read_only=True, source='created_at')
def create(self, validated_data):
message = MessageModel.objects.create(
sent_by_id=self.context['request'].user.id,
in_conversation_id=self.context['request'].data['in_conversation_id'],
**validated_data)
return message
class ConversationSerializer(serializers.Serializer):
topic = CharField(max_length=150, required=False)
# Writing
with_participants = PrimaryKeyRelatedField(many=True, write_only=True, queryset=UserModel.objects.all())
message = CharField(max_length=100000, write_only=True)
# Reading
id = PrimaryKeyRelatedField(read_only=True)
type = SerializerMethodField(read_only=True)
participants = UserSerializer(many=True, read_only=True)
messages = MessageSerializer(many=True, read_only=True)
def get_type(self, obj):
return ConversationType.name(obj.type)
def create(self, validated_data):
"""
Create new conversation with other users and a message
"""
participant_ids = [_.id for _ in validated_data['with_participants']] + \
[self.context['request'].user.id, ]
if len(participant_ids) > 2:
chat_type = ConversationType.MULTICHAT
else:
chat_type = ConversationType.ONE_ON_ONE
chat = ConversationModel.objects.create(type=chat_type)
chat.participants = participant_ids
chat.save()
MessageModel.objects.create(
sent_by_id=self.context['request'].user.id,
in_conversation_id=chat.id,
content=validated_data['message']['content'],
)
return chat
def update(self, conversation, validated_data):
conversation.name = validated_data.get('name', conversation.name)
conversation.save()
return conversation
def validate_with_participants(self, value):
if len(value) < 1:
raise serializers.ValidationError("No chat participants given")
if len(value) == 1 and self.context['request'].user.id in value:
raise serializers.ValidationError("Requesting user is only participant")
return value
|
from rest_framework import serializers
from rest_framework.fields import CharField, DateTimeField
from rest_framework.relations import PrimaryKeyRelatedField
from yunity.api.serializers import UserSerializer
from yunity.conversations.models import ConversationMessage as MessageModel, ConversationType
from yunity.conversations.models import Conversation as ConversationModel
from yunity.users.models import User as UserModel
class MessageSerializer(serializers.Serializer):
content = CharField(max_length=100000)
author = PrimaryKeyRelatedField(read_only=True)
time = DateTimeField(read_only=True, source='created_at')
def create(self, validated_data):
message = MessageModel.objects.create(
sent_by_id=self.context['request'].user.id,
in_conversation_id=self.context['request'].data['in_conversation_id'],
**validated_data)
return message
class ConversationSerializer(serializers.Serializer):
topic = CharField(max_length=150, required=False)
# Writing
with_participants = PrimaryKeyRelatedField(many=True, write_only=True, queryset=UserModel.objects.all())
message = CharField(max_length=100000, write_only=True)
# Reading
id = PrimaryKeyRelatedField(read_only=True)
participants = UserSerializer(many=True, read_only=True)
messages = MessageSerializer(many=True, read_only=True)
def create(self, validated_data):
"""
Create new conversation with other users and a message
"""
participant_ids = [_.id for _ in validated_data['with_participants']] + \
[self.context['request'].user.id, ]
if len(participant_ids) > 2:
chat_type = ConversationType.MULTICHAT
else:
chat_type = ConversationType.ONE_ON_ONE
chat = ConversationModel.objects.create(type=chat_type)
chat.participants = participant_ids
chat.save()
MessageModel.objects.create(
sent_by_id=self.context['request'].user.id,
in_conversation_id=chat.id,
content=validated_data['message']['content'],
)
return chat
def update(self, conversation, validated_data):
conversation.name = validated_data.get('name', conversation.name)
conversation.save()
return conversation
def validate_with_participants(self, value):
if len(value) < 1:
raise serializers.ValidationError("No chat participants given")
if len(value) == 1 and self.context['request'].user.id in value:
raise serializers.ValidationError("Requesting user is only participant")
return value
|
agpl-3.0
|
Python
|
255ddb1a6910e590cb454a0d4e03f51b8d7b2092
|
Update setup.py console script to use cli instead of main
|
hackebrot/cookiedozer,hackebrot/cookiedozer
|
{{cookiecutter.repo_name}}/setup.py
|
{{cookiecutter.repo_name}}/setup.py
|
import sys
import os
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
class PyTest(TestCommand):
user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = []
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errno = pytest.main(self.pytest_args)
sys.exit(errno)
setup(
name='{{cookiecutter.repo_name}}',
version='{{cookiecutter.version}}',
author='{{cookiecutter.full_name}}',
author_email='{{cookiecutter.email}}',
description='{{cookiecutter.short_description}}',
long_description=read('README.rst'),
license='MIT',
keywords=(
"Python, cookiecutter, kivy, buildozer, pytest, projects, project "
"templates, example, documentation, tutorial, setup.py, package, "
"android, touch, mobile, NUI"
),
url='https://github.com/{{cookiecutter.github_username}}/{{cookiecutter.repo_name}}',
install_requires=['kivy>={{cookiecutter.kivy_version}}'],
zip_safe=False,
packages=find_packages(),
include_package_data=True,
entry_points={
'console_scripts': [
'{{cookiecutter.repo_name}}={{cookiecutter.repo_name}}.cli:main'
]
},
tests_require=['pytest'],
cmdclass={'test': PyTest},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Artistic Software',
'Topic :: Multimedia :: Graphics :: Presentation',
'Topic :: Software Development :: User Interfaces',
],
)
|
import sys
import os
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
class PyTest(TestCommand):
user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = []
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errno = pytest.main(self.pytest_args)
sys.exit(errno)
setup(
name='{{cookiecutter.repo_name}}',
version='{{cookiecutter.version}}',
author='{{cookiecutter.full_name}}',
author_email='{{cookiecutter.email}}',
description='{{cookiecutter.short_description}}',
long_description=read('README.rst'),
license='MIT',
keywords=(
"Python, cookiecutter, kivy, buildozer, pytest, projects, project "
"templates, example, documentation, tutorial, setup.py, package, "
"android, touch, mobile, NUI"
),
url='https://github.com/{{cookiecutter.github_username}}/{{cookiecutter.repo_name}}',
install_requires=['kivy>={{cookiecutter.kivy_version}}'],
zip_safe=False,
packages=find_packages(),
include_package_data=True,
entry_points={
'console_scripts': [
'{{cookiecutter.repo_name}}={{cookiecutter.repo_name}}.main:main'
]
},
tests_require=['pytest'],
cmdclass={'test': PyTest},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Artistic Software',
'Topic :: Multimedia :: Graphics :: Presentation',
'Topic :: Software Development :: User Interfaces',
],
)
|
mit
|
Python
|
2a4e5ad6ac5e5400564d0dc9306c2ab30b9dba98
|
bump version
|
jacobwegner/pinax-theme-bootstrap,druss16/danslist,jacobwegner/pinax-theme-bootstrap,foraliving/foraliving,foraliving/foraliving,grahamu/pinax-theme-bootstrap,jacobwegner/pinax-theme-bootstrap,grahamu/pinax-theme-bootstrap,druss16/danslist,druss16/danslist,grahamu/pinax-theme-bootstrap,foraliving/foraliving
|
pinax_theme_bootstrap/__init__.py
|
pinax_theme_bootstrap/__init__.py
|
__version__ = "0.1.4"
|
__version__ = "0.1.3"
|
mit
|
Python
|
7faa33c1eff79223252d6a7c4fe5ad033383df6c
|
Bump version
|
BT-ojossen/l10n-switzerland,open-net-sarl/l10n-switzerland,open-net-sarl/l10n-switzerland,BT-ojossen/l10n-switzerland
|
l10n_ch_payment_slip/__openerp__.py
|
l10n_ch_payment_slip/__openerp__.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi. Copyright Camptocamp SA
# Financial contributors: Hasa SA, Open Net SA,
# Prisme Solutions Informatique SA, Quod SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{'name': 'Switzerland - Payment Slip (BVR/ESR)',
'summary': 'Print ESR/BVR payment slip with your invoices',
'description': """
Swiss Payment slip known as ESR/BVR
===================================
This addon allows you to print the ESR/BVR report Using Qweb report.
The ESR/BVR is grenerated as an image and is availabe in a fields
of the `l10n_ch.payment_slip` Model.
The ESR/BVR is created each time an invoice is validated.
To modify it you have to cancel it and reconfirm the invoice.
You can adjust the print out of ESR/BVR, which depend on each printer,
for every company in the "BVR Data" tab.
This is especialy useful when using pre-printed paper.
An option also allow you to print the ESR/BVR in background when using
white paper.
This module will also allows you to import v11 files provided
by financial institute into a bank statement
To do so, use the wizard provided in bank statement.
This module also adds transaction_ref field on entries in order to manage
reconciliation in multi payment context (unique reference needed on
account.move.line). Many BVR can now be printed from on invoice for each
payment terms.
""",
'version': '8.0.2.1.1',
'author': "Camptocamp,Odoo Community Association (OCA)",
'category': 'Localization',
'website': 'http://www.camptocamp.com',
'license': 'AGPL-3',
'depends': ['base',
'account',
'account_payment',
'report',
'l10n_ch_base_bank',
'base_transaction_id'],
'data': ["company_view.xml",
"bank_view.xml",
"account_invoice_view.xml",
"wizard/bvr_import_view.xml",
"report/report_declaration.xml",
"security/ir.model.access.csv"],
'demo': [],
'test': [],
'auto_install': False,
'installable': True,
'images': []
}
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi. Copyright Camptocamp SA
# Financial contributors: Hasa SA, Open Net SA,
# Prisme Solutions Informatique SA, Quod SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{'name': 'Switzerland - Payment Slip (BVR/ESR)',
'summary': 'Print ESR/BVR payment slip with your invoices',
'description': """
Swiss Payment slip known as ESR/BVR
===================================
This addon allows you to print the ESR/BVR report Using Qweb report.
The ESR/BVR is grenerated as an image and is availabe in a fields
of the `l10n_ch.payment_slip` Model.
The ESR/BVR is created each time an invoice is validated.
To modify it you have to cancel it and reconfirm the invoice.
You can adjust the print out of ESR/BVR, which depend on each printer,
for every company in the "BVR Data" tab.
This is especialy useful when using pre-printed paper.
An option also allow you to print the ESR/BVR in background when using
white paper.
This module will also allows you to import v11 files provided
by financial institute into a bank statement
To do so, use the wizard provided in bank statement.
This module also adds transaction_ref field on entries in order to manage
reconciliation in multi payment context (unique reference needed on
account.move.line). Many BVR can now be printed from on invoice for each
payment terms.
""",
'version': '8.0.2.1.0',
'author': "Camptocamp,Odoo Community Association (OCA)",
'category': 'Localization',
'website': 'http://www.camptocamp.com',
'license': 'AGPL-3',
'depends': ['base',
'account',
'account_payment',
'report',
'l10n_ch_base_bank',
'base_transaction_id'],
'data': ["company_view.xml",
"bank_view.xml",
"account_invoice_view.xml",
"wizard/bvr_import_view.xml",
"report/report_declaration.xml",
"security/ir.model.access.csv"],
'demo': [],
'test': [],
'auto_install': False,
'installable': True,
'images': []
}
|
agpl-3.0
|
Python
|
2e608036c8611026f9fb47a762901700891e284e
|
use BufferedWriter for gzip files -- 30% faster writing
|
Chris7/cutadapt,marcelm/cutadapt
|
cutadapt/xopen.py
|
cutadapt/xopen.py
|
"""
Open compressed files transparently.
"""
import gzip
import sys
import io
__author__ = 'Marcel Martin'
import sys
if sys.version_info[0] >= 3:
basestring = str
from codecs import getreader, getwriter
if sys.version_info < (2, 7):
buffered_reader = lambda x: x
buffered_writer = lambda x: x
else:
buffered_reader = io.BufferedReader
buffered_writer = io.BufferedWriter
def xopen(filename, mode='r'):
"""
Replacement for the "open" function that can also open
files that have been compressed with gzip. If the filename ends with .gz,
the file is opened with gzip.open(). If it doesn't, the regular open()
is used. If the filename is '-', standard output (mode 'w') or input
(mode 'r') is returned.
"""
assert isinstance(filename, basestring)
if filename == '-':
return sys.stdin if 'r' in mode else sys.stdout
if filename.endswith('.gz'):
if sys.version_info[0] < 3:
if 'r' in mode:
return buffered_reader(gzip.open(filename, mode))
else:
return buffered_writer(gzip.open(filename, mode))
else:
if 'r' in mode:
return getreader('ascii')(gzip.open(filename, mode))
else:
return getwriter('ascii')(gzip.open(filename, mode))
else:
return open(filename, mode)
|
"""
Open compressed files transparently.
"""
import gzip
import sys
import io
__author__ = 'Marcel Martin'
import sys
if sys.version_info[0] >= 3:
basestring = str
from codecs import getreader, getwriter
if sys.version_info < (2, 7):
buffered_reader = lambda x: x
else:
buffered_reader = io.BufferedReader
def xopen(filename, mode='r'):
"""
Replacement for the "open" function that can also open
files that have been compressed with gzip. If the filename ends with .gz,
the file is opened with gzip.open(). If it doesn't, the regular open()
is used. If the filename is '-', standard output (mode 'w') or input
(mode 'r') is returned.
"""
assert isinstance(filename, basestring)
if filename == '-':
return sys.stdin if 'r' in mode else sys.stdout
if filename.endswith('.gz'):
if sys.version_info[0] < 3:
if 'r' in mode:
return buffered_reader(gzip.open(filename, mode))
else:
return gzip.open(filename, mode)
else:
if 'r' in mode:
return getreader('ascii')(gzip.open(filename, mode))
else:
return getwriter('ascii')(gzip.open(filename, mode))
else:
return open(filename, mode)
|
mit
|
Python
|
fc683685d7df05ee0acc63a216c5b8fd99462219
|
use f strings
|
SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci
|
metaci/plan/templatetags/templatehelpers.py
|
metaci/plan/templatetags/templatehelpers.py
|
"""
https://simpleisbetterthancomplex.com/snippet/2016/08/22/dealing-with-querystring-parameters.html
"""
from django import template
register = template.Library()
@register.simple_tag
def relative_url(value, field_name, urlencode=None):
url = f"?{field_name}={value}"
if urlencode:
querystring = urlencode.split("&")
filtered_querystring = [p for p in querystring if p.split("=")[0] != field_name]
encoded_querystring = "&".join(filtered_querystring)
url = f"{url}&{encoded_querystring}"
return url
|
"""
https://simpleisbetterthancomplex.com/snippet/2016/08/22/dealing-with-querystring-parameters.html
"""
from django import template
register = template.Library()
@register.simple_tag
def relative_url(value, field_name, urlencode=None):
url = "?{}={}".format(field_name, value)
if urlencode:
querystring = urlencode.split("&")
filtered_querystring = [p for p in querystring if p.split("=")[0] != field_name]
encoded_querystring = "&".join(filtered_querystring)
url = "{}&{}".format(url, encoded_querystring)
return url
|
bsd-3-clause
|
Python
|
0430957f2b65ee0e14821027a15cfb956e976c62
|
make method static
|
StegSchreck/RatS,StegSchreck/RatS,StegSchreck/RatS
|
RatS/tmdb/tmdb_ratings_inserter.py
|
RatS/tmdb/tmdb_ratings_inserter.py
|
import time
from RatS.base.base_ratings_uploader import RatingsUploader
from RatS.tmdb.tmdb_site import TMDB
class TMDBRatingsInserter(RatingsUploader):
def __init__(self, args):
super(TMDBRatingsInserter, self).__init__(TMDB(args), args)
self.url_for_csv_file_upload = self._get_url_for_csv_upload()
self.css_id_of_file_input_element = 'csv_file'
self.xpath_selector_for_submit_button = "//form[@name='import_csv']//input[@type='submit']"
@staticmethod
def _get_url_for_csv_upload():
return 'https://www.themoviedb.org/settings/import-list'
def pre_upload_action(self):
cookie_accept_button = self.site.browser.find_element_by_id('cookie_notice')\
.find_elements_by_class_name('accept')
if cookie_accept_button is not None and len(cookie_accept_button) > 0:
cookie_accept_button[0].click()
time.sleep(1)
|
import time
from RatS.base.base_ratings_uploader import RatingsUploader
from RatS.tmdb.tmdb_site import TMDB
class TMDBRatingsInserter(RatingsUploader):
def __init__(self, args):
super(TMDBRatingsInserter, self).__init__(TMDB(args), args)
self.url_for_csv_file_upload = self._get_url_for_csv_upload()
self.css_id_of_file_input_element = 'csv_file'
self.xpath_selector_for_submit_button = "//form[@name='import_csv']//input[@type='submit']"
def _get_url_for_csv_upload(self):
return 'https://www.themoviedb.org/settings/import-list'
def pre_upload_action(self):
cookie_accept_button = self.site.browser.find_element_by_id('cookie_notice')\
.find_elements_by_class_name('accept')
if cookie_accept_button is not None and len(cookie_accept_button) > 0:
cookie_accept_button[0].click()
time.sleep(1)
|
agpl-3.0
|
Python
|
76f5e98aec0024fb6d015004e1f3f26434a01fc2
|
Update _version.py
|
4dn-dcic/tibanna,4dn-dcic/tibanna,4dn-dcic/tibanna
|
core/_version.py
|
core/_version.py
|
"""Version information."""
# The following line *must* be the last in the module, exactly as formatted:
__version__ = "0.5.3"
|
"""Version information."""
# The following line *must* be the last in the module, exactly as formatted:
__version__ = "0.5.2"
|
mit
|
Python
|
92febbffb91943f13cfac8c00e55103b20645b70
|
Update [MediaContainer] children with the correct `section` object
|
fuzeman/plex.py
|
plex/objects/library/container.py
|
plex/objects/library/container.py
|
from plex.objects.core.base import Property
from plex.objects.container import Container
from plex.objects.library.section import Section
class MediaContainer(Container):
section = Property(resolver=lambda: MediaContainer.construct_section)
title1 = Property
title2 = Property
identifier = Property
art = Property
thumb = Property
view_group = Property('viewGroup')
view_mode = Property('viewMode', int)
media_tag_prefix = Property('mediaTagPrefix')
media_tag_version = Property('mediaTagVersion')
no_cache = Property('nocache', bool)
allow_sync = Property('allowSync', bool)
mixed_parents = Property('mixedParents', bool)
@staticmethod
def construct_section(client, node):
attribute_map = {
'key': 'librarySectionID',
'uuid': 'librarySectionUUID',
'title': 'librarySectionTitle'
}
return Section.construct(client, node, attribute_map, child=True)
def __iter__(self):
for item in super(MediaContainer, self).__iter__():
item.section = self.section
yield item
|
from plex.objects.core.base import Property
from plex.objects.container import Container
from plex.objects.library.section import Section
class MediaContainer(Container):
section = Property(resolver=lambda: MediaContainer.construct_section)
title1 = Property
title2 = Property
identifier = Property
art = Property
thumb = Property
view_group = Property('viewGroup')
view_mode = Property('viewMode', int)
media_tag_prefix = Property('mediaTagPrefix')
media_tag_version = Property('mediaTagVersion')
no_cache = Property('nocache', bool)
allow_sync = Property('allowSync', bool)
mixed_parents = Property('mixedParents', bool)
@staticmethod
def construct_section(client, node):
attribute_map = {
'key': 'librarySectionID',
'uuid': 'librarySectionUUID',
'title': 'librarySectionTitle'
}
return Section.construct(client, node, attribute_map, child=True)
|
mit
|
Python
|
d4aa45b39eab5ce4b06d6343344afb05a0bf8582
|
Fix pep8.
|
tryfer/tryfer
|
tryfer/tests/test_formatters.py
|
tryfer/tests/test_formatters.py
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import struct
from twisted.trial.unittest import TestCase
from tryfer import formatters
class TestFormatters(TestCase):
def test_ipv4_to_int(self):
""" Thrift expects ipv4 address to be a signed 32-bit integer.
Previously this function converted ip addresses to an unsigned 32-bit
int. struct.pack is strict about integer overflows for signed 32-bit
integers, so this function very much needs to produce a signed integer
to allow IP addresses in the upper half to work
"""
# ip that doesn't overflow in signed 32-bit
low_ip = '127.0.0.1'
# ip that does overflow in signed 32-bit
high_ip = '172.17.1.1'
low_ip_as_int = formatters.ipv4_to_int(low_ip)
high_ip_as_int = formatters.ipv4_to_int(high_ip)
# both parsed ips should be packable as signed 32-bit int
struct.pack('!i', low_ip_as_int)
struct.pack('!i', high_ip_as_int)
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import struct
from twisted.trial.unittest import TestCase
from tryfer import formatters
class TestFormatters(TestCase):
def test_ipv4_to_int(self):
""" Thrift expects ipv4 address to be a signed 32-bit integer.
Previously this function converted ip addresses to an unsigned 32-bit
int. struct.pack is strict about integer overflows for signed 32-bit
integers, so this function very much needs to produce a signed integer
to allow IP addresses in the upper half to work
"""
# ip that doesn't overflow in signed 32-bit
low_ip = '127.0.0.1'
# ip that does overflow in signed 32-bit
high_ip = '172.17.1.1'
low_ip_as_int = formatters.ipv4_to_int(low_ip)
high_ip_as_int = formatters.ipv4_to_int(high_ip)
# both parsed ips should be packable as signed 32-bit int
struct.pack('!i', low_ip_as_int)
struct.pack('!i', high_ip_as_int)
|
apache-2.0
|
Python
|
35293cecc99a629b3a185e69cf9ed3a339d9d1cf
|
Remove indentation level for easier review
|
glyph/automat
|
automat/_introspection.py
|
automat/_introspection.py
|
"""
Python introspection helpers.
"""
from types import CodeType as code, FunctionType as function
def copycode(template, changes):
if hasattr(code, "replace"):
return template.replace(**{"co_" + k : v for k, v in changes.items()})
names = [
"argcount", "nlocals", "stacksize", "flags", "code", "consts",
"names", "varnames", "filename", "name", "firstlineno", "lnotab",
"freevars", "cellvars"
]
if hasattr(code, "co_kwonlyargcount"):
names.insert(1, "kwonlyargcount")
if hasattr(code, "co_posonlyargcount"):
# PEP 570 added "positional only arguments"
names.insert(1, "posonlyargcount")
values = [
changes.get(name, getattr(template, "co_" + name))
for name in names
]
return code(*values)
def copyfunction(template, funcchanges, codechanges):
names = [
"globals", "name", "defaults", "closure",
]
values = [
funcchanges.get(name, getattr(template, "__" + name + "__"))
for name in names
]
return function(copycode(template.__code__, codechanges), *values)
def preserveName(f):
"""
Preserve the name of the given function on the decorated function.
"""
def decorator(decorated):
return copyfunction(decorated,
dict(name=f.__name__), dict(name=f.__name__))
return decorator
|
"""
Python introspection helpers.
"""
from types import CodeType as code, FunctionType as function
def copycode(template, changes):
if hasattr(code, "replace"):
return template.replace(**{"co_" + k : v for k, v in changes.items()})
else:
names = [
"argcount", "nlocals", "stacksize", "flags", "code", "consts",
"names", "varnames", "filename", "name", "firstlineno", "lnotab",
"freevars", "cellvars"
]
if hasattr(code, "co_kwonlyargcount"):
names.insert(1, "kwonlyargcount")
if hasattr(code, "co_posonlyargcount"):
# PEP 570 added "positional only arguments"
names.insert(1, "posonlyargcount")
values = [
changes.get(name, getattr(template, "co_" + name))
for name in names
]
return code(*values)
def copyfunction(template, funcchanges, codechanges):
names = [
"globals", "name", "defaults", "closure",
]
values = [
funcchanges.get(name, getattr(template, "__" + name + "__"))
for name in names
]
return function(copycode(template.__code__, codechanges), *values)
def preserveName(f):
"""
Preserve the name of the given function on the decorated function.
"""
def decorator(decorated):
return copyfunction(decorated,
dict(name=f.__name__), dict(name=f.__name__))
return decorator
|
mit
|
Python
|
9a1eb2dbe37c13c82477ed5787eeb985994cac8f
|
add Python2 shebang to helper.py
|
Lujeni/matterllo,Lujeni/matterllo,Lujeni/matterllo,Lujeni/matterllo
|
scripts/helper.py
|
scripts/helper.py
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
scripts.init_webhook
~~~~~~~~~~~~~~~~~~~~
A simple script to manage the webhook.
:copyright: (c) 2016 by Lujeni.
:license: BSD, see LICENSE for more details.
"""
import argparse
import sys
from trello import TrelloClient
from slugify import slugify
from matterllo.utils import config
from matterllo.utils import logger
SETTINGS = config()
LOGGING = logger()
def main():
try:
parser = argparse.ArgumentParser(description="Webhook helpers")
parser.add_argument('--cleanup', dest='cleanup', action='store_true', help='delete webhook from your SETTINGS.')
parser.add_argument('--update', dest='update', action='store_true', help='upsert webhook from your SETTINGS.')
parser.add_argument('--init', dest='init', action='store_true', help='delete and create webhook from your SETTINGS.')
args = parser.parse_args()
if not args.cleanup and not args.update and not args.init:
print parser.print_help()
sys.exit(0)
client = TrelloClient(api_key=SETTINGS['trello_api_key'], token=SETTINGS['trello_api_token'])
trello_boards = client.list_boards()
boards_name = [slugify(b['name']) for b in SETTINGS.get('boards', {}).values()]
# cleanup part
if args.cleanup or args.init:
result = [h.delete() for h in client.list_hooks()]
LOGGING.info('delete {} webhook'.format(len(result)))
# update / init part
if args.update or args.init:
for board in trello_boards:
board_name = slugify(board.name)
if board_name not in boards_name:
continue
LOGGING.info('try to create webhook board :: {}'.format(board_name))
url = SETTINGS['callback_url'] + '/trelloCallbacks/'
result = client.create_hook(url, board.id)
LOGGING.info('create webhook board :: {} :: {}'.format(board_name, result))
except Exception as e:
LOGGING.error('unable init webhook :: {}'.format(e))
sys.exit(1)
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
"""
scripts.init_webhook
~~~~~~~~~~~~~~~~~~~~
A simple script to manage the webhook.
:copyright: (c) 2016 by Lujeni.
:license: BSD, see LICENSE for more details.
"""
import argparse
import sys
from trello import TrelloClient
from slugify import slugify
from matterllo.utils import config
from matterllo.utils import logger
SETTINGS = config()
LOGGING = logger()
def main():
try:
parser = argparse.ArgumentParser(description="Webhook helpers")
parser.add_argument('--cleanup', dest='cleanup', action='store_true', help='delete webhook from your SETTINGS.')
parser.add_argument('--update', dest='update', action='store_true', help='upsert webhook from your SETTINGS.')
parser.add_argument('--init', dest='init', action='store_true', help='delete and create webhook from your SETTINGS.')
args = parser.parse_args()
if not args.cleanup and not args.update and not args.init:
print parser.print_help()
sys.exit(0)
client = TrelloClient(api_key=SETTINGS['trello_api_key'], token=SETTINGS['trello_api_token'])
trello_boards = client.list_boards()
boards_name = [slugify(b['name']) for b in SETTINGS.get('boards', {}).values()]
# cleanup part
if args.cleanup or args.init:
result = [h.delete() for h in client.list_hooks()]
LOGGING.info('delete {} webhook'.format(len(result)))
# update / init part
if args.update or args.init:
for board in trello_boards:
board_name = slugify(board.name)
if board_name not in boards_name:
continue
LOGGING.info('try to create webhook board :: {}'.format(board_name))
url = SETTINGS['callback_url'] + '/trelloCallbacks/'
result = client.create_hook(url, board.id)
LOGGING.info('create webhook board :: {} :: {}'.format(board_name, result))
except Exception as e:
LOGGING.error('unable init webhook :: {}'.format(e))
sys.exit(1)
if __name__ == '__main__':
main()
|
mit
|
Python
|
35ee18926743b6ab0356ef278da9cb14a3263246
|
Print field in output
|
justinccdev/jjvm
|
jjvm.py
|
jjvm.py
|
#!/usr/bin/python
import argparse
import os
import struct
import sys
CP_STRUCT_SIZES = { 7:3, 10:5 }
###############
### CLASSES ###
###############
class MyParser(argparse.ArgumentParser):
def error(self, message):
sys.stderr.write('error: %s\n' % message)
self.print_help()
sys.exit(2)
###################
### SUBROUTINES ###
###################
def lenCpStruct(tag):
if tag in CP_STRUCT_SIZES:
return CP_STRUCT_SIZES[tag]
else:
return -1
############
### MAIN ###
############
parser = MyParser('Run bytecode in jjvm')
parser.add_argument('path', help='path to class')
args = parser.parse_args()
with open(args.path, "rb") as c:
c.seek(8)
cpCount = struct.unpack(">H", c.read(2))[0] - 1
cpIndex = 1
print "Constant pool count: %d" % cpCount;
while cpIndex <= cpCount:
cpTag = ord(c.read(1))
print "Field %d: %d" % (cpIndex, cpTag)
cpStructSize = lenCpStruct(cpTag)
if cpStructSize < 0:
print "ERROR: cpStructSize %d for tag %d" % (cpStructSize, cpTag)
sys.exit(1)
# print "Size: %d" % cpStructSize
cpIndex += 1
c.seek(cpStructSize - 1, os.SEEK_CUR)
|
#!/usr/bin/python
import argparse
import os
import struct
import sys
CP_STRUCT_SIZES = { 7:3, 10:5 }
###############
### CLASSES ###
###############
class MyParser(argparse.ArgumentParser):
def error(self, message):
sys.stderr.write('error: %s\n' % message)
self.print_help()
sys.exit(2)
###################
### SUBROUTINES ###
###################
def lenCpStruct(tag):
if tag in CP_STRUCT_SIZES:
return CP_STRUCT_SIZES[tag]
else:
return -1
############
### MAIN ###
############
parser = MyParser('Run bytecode in jjvm')
parser.add_argument('path', help='path to class')
args = parser.parse_args()
with open(args.path, "rb") as c:
c.seek(8)
cpCount = struct.unpack(">H", c.read(2))[0] - 1
print "Constant pool count: %d" % cpCount;
while cpCount >= 0:
cpTag = ord(c.read(1))
print "Got tag: %d" % cpTag
cpStructSize = lenCpStruct(cpTag)
if cpStructSize < 0:
print "ERROR: cpStructSize %d for tag %d" % (cpStructSize, cpTag)
sys.exit(1)
print "Size: %d" % cpStructSize
cpCount -= 1
c.seek(cpStructSize - 1, os.SEEK_CUR)
|
apache-2.0
|
Python
|
b7cdab4dea63b91bbc4840ec4f0f147ac9fce7b0
|
Make tests for EvapReadFile
|
mdpiper/topoflow-cmi-testing
|
tests/test_evap_read_file.py
|
tests/test_evap_read_file.py
|
#!/usr/bin/env python
# Nosetests for the TopoFlow EvapReadFile component.
import os
from nose.tools import assert_is_not_none, assert_equals
from cmt.components import EvapReadFile as Component
from . import example_dir
cfg_file = os.path.join(example_dir, 'June_20_67_evap_read_file.cfg')
var_name = 'land_surface_water__evaporation_volume_flux'
def setup_module():
global component
component = Component()
def teardown_module():
pass
# The file June_20_67_2D-ETrate-in.nc is missing; I had to generate it
# with the EvapEnergyBalance component.
def test_irf():
component.initialize(cfg_file)
component.update(1.0)
component.finalize()
def test_get_component_name():
x = component.get_component_name()
assert_equals(x, 'TopoFlow_Evaporation_Read_File')
def test_get_start_time():
x = component.get_start_time()
assert_equals(x, 0.0)
def test_get_end_time():
x = component.get_end_time()
assert_equals(x, 36000.0)
def test_get_var_type():
x = component.get_var_type(var_name)
assert_equals(x, 'float64')
def test_get_var_units():
x = component.get_var_units(var_name)
assert_equals(x, 'm s-1')
def test_get_var_itemsize():
x = component.get_var_itemsize(var_name)
assert_equals(x, 8)
# The get_var_nbytes method isn't implemented in TopoFlow.
# def test_get_var_nbytes():
# x = component.get_var_nbytes(var_name)
def test_get_value():
x = component.get_value(var_name)
assert_is_not_none(x)
def test_get_var_grid():
x = component.get_var_grid(var_name)
assert_equals(x, 0)
def test_get_grid_type():
grid_id = component.get_var_grid(var_name)
x = component.get_grid_type(grid_id)
assert_equals(x, 'uniform')
def test_get_grid_rank():
grid_id = component.get_var_grid(var_name)
x = component.get_grid_rank(grid_id)
assert_equals(x, 2)
def test_get_grid_shape():
grid_id = component.get_var_grid(var_name)
x = component.get_grid_shape(grid_id)
assert_equals(x[0], 44)
assert_equals(x[1], 29)
def test_get_grid_size():
grid_id = component.get_var_grid(var_name)
x = component.get_grid_size(grid_id)
assert_equals(x, 44*29)
def test_get_grid_spacing():
grid_id = component.get_var_grid(var_name)
x = component.get_grid_spacing(grid_id)
assert_equals(x[0], 30.0)
assert_equals(x[1], 30.0)
def test_get_grid_origin():
grid_id = component.get_var_grid(var_name)
x = component.get_grid_origin(grid_id)
assert_equals(x[0], 4560090.42)
assert_equals(x[1], 277850.358)
|
#!/usr/bin/env python
import os
from cmt.components import EvapReadFile as Component
from . import example_dir
cfg_file = os.path.join(example_dir, 'June_20_67_evap_read_file.cfg')
# Fails because June_20_67_2D-ETrate-in.nc is missing
def test_irf():
component = Component()
component.initialize(cfg_file)
component.update(1.0)
component.finalize()
|
mit
|
Python
|
2c6ccdacc2c4e54cf0a12618d60c963d9c67ef62
|
Fix for DjangoCMS 3.5: get_cms_setting
|
nephila/djangocms-page-sitemap
|
djangocms_page_sitemap/settings.py
|
djangocms_page_sitemap/settings.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
from cms.sitemaps import CMSSitemap
from cms.utils.conf import get_cms_setting
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
PAGE_SITEMAP_CHANGEFREQ_DEFAULT_LIST = {
'always': _('always'),
'hourly': _('hourly'),
'daily': _('daily'),
'weekly': _('weekly'),
'monthly': _('monthly'),
'yearly': _('yearly'),
'never': _('never'),
}
PAGE_SITEMAP_CHANGEFREQ_LIST = getattr(
settings, 'PAGE_SITEMAP_CHANGEFREQ_LIST', PAGE_SITEMAP_CHANGEFREQ_DEFAULT_LIST
)
PAGE_SITEMAP_DEFAULT_CHANGEFREQ = getattr(
settings, 'PAGE_SITEMAP_DEFAULT_CHANGEFREQ', CMSSitemap.changefreq
)
PAGE_SITEMAP_CACHE_DURATION = get_cms_setting('CACHE_DURATIONS')['menus']
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
from cms.sitemaps import CMSSitemap
from cms.utils import get_cms_setting
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
PAGE_SITEMAP_CHANGEFREQ_DEFAULT_LIST = {
'always': _('always'),
'hourly': _('hourly'),
'daily': _('daily'),
'weekly': _('weekly'),
'monthly': _('monthly'),
'yearly': _('yearly'),
'never': _('never'),
}
PAGE_SITEMAP_CHANGEFREQ_LIST = getattr(
settings, 'PAGE_SITEMAP_CHANGEFREQ_LIST', PAGE_SITEMAP_CHANGEFREQ_DEFAULT_LIST
)
PAGE_SITEMAP_DEFAULT_CHANGEFREQ = getattr(
settings, 'PAGE_SITEMAP_DEFAULT_CHANGEFREQ', CMSSitemap.changefreq
)
PAGE_SITEMAP_CACHE_DURATION = get_cms_setting('CACHE_DURATIONS')['menus']
|
bsd-3-clause
|
Python
|
f603e8b394ea2b3ed9329b6948119970eb6aaa46
|
add test for transition
|
vicalloy/django-lb-workflow,vicalloy/django-lb-workflow,vicalloy/django-lb-workflow
|
lbworkflow/tests/test_transition.py
|
lbworkflow/tests/test_transition.py
|
from django.contrib.auth import get_user_model
from django.core.urlresolvers import reverse
from lbworkflow.core.transition import TransitionExecutor
from lbworkflow.views.helper import user_wf_info_as_dict
from .test_base import BaseTests
from .leave.models import Leave
User = get_user_model()
class TransitionExecutorTests(BaseTests):
def test_submit(self):
leave = self.leave
instance = self.leave.pinstance
leave.submit_process()
# A1 will auto agree
self.assertEqual(leave.pinstance.cur_activity.name, 'A2')
self.assertEqual(leave.pinstance.get_operators_display(), 'tom')
# A3 not auto agree
workitem = instance.get_todo_workitem()
transition = instance.get_agree_transition()
TransitionExecutor(self.users['tom'], instance, workitem, transition).execute()
self.assertEqual(leave.pinstance.cur_activity.name, 'A3')
class ViewTests(BaseTests):
def setUp(self):
super(ViewTests, self).setUp()
self.leave.submit_process()
leave = self.leave
ctx = user_wf_info_as_dict(leave, self.users['tom'])
transitions = ctx['transitions']
transition = transitions[0]
self.transition_url = transition.get_app_url(ctx['workitem'])
self.workitem = ctx['workitem']
self.client.login(username='tom', password='password')
def test_execute_transition(self):
resp = self.client.get(self.transition_url)
self.assertEqual(resp.status_code, 200)
def test_execute_transition(self):
resp = self.client.post(self.transition_url)
self.assertRedirects(resp, '/wf/todo/')
leave = Leave.objects.get(pk=self.leave.pk)
self.assertEqual('A3', leave.pinstance.cur_activity.name)
def test_simple_agree(self):
url = reverse('wf_agree')
resp = self.client.post('%s?wi_id=%s' % (url, self.workitem.pk))
self.assertRedirects(resp, '/wf/todo/')
leave = Leave.objects.get(pk=self.leave.pk)
self.assertEqual('A3', leave.pinstance.cur_activity.name)
|
from django.contrib.auth import get_user_model
from lbworkflow.core.transition import TransitionExecutor
from lbworkflow.views.helper import user_wf_info_as_dict
from .test_base import BaseTests
User = get_user_model()
class TransitionExecutorTests(BaseTests):
def test_submit(self):
leave = self.leave
instance = self.leave.pinstance
leave.submit_process()
# A1 will auto agree
self.assertEqual(leave.pinstance.cur_activity.name, 'A2')
self.assertEqual(leave.pinstance.get_operators_display(), 'tom')
# A3 not auto agree
workitem = instance.get_todo_workitem()
transition = instance.get_agree_transition()
TransitionExecutor(self.users['tom'], instance, workitem, transition).execute()
self.assertEqual(leave.pinstance.cur_activity.name, 'A3')
class ViewTests(BaseTests):
def setUp(self):
super(ViewTests, self).setUp()
self.leave.submit_process()
def test_execute_transition(self):
self.client.login(username='tom', password='password')
leave = self.leave
ctx = user_wf_info_as_dict(leave, self.users['tom'])
transitions = ctx['transitions']
transition = transitions[0]
url = transition.get_app_url(ctx['workitem'])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
|
mit
|
Python
|
51d8d354f1a75b83becad880eec7cbac86d52e74
|
Convert test to pytest syntax
|
cichm/cookiecutter,audreyr/cookiecutter,kkujawinski/cookiecutter,drgarcia1986/cookiecutter,christabor/cookiecutter,stevepiercy/cookiecutter,atlassian/cookiecutter,tylerdave/cookiecutter,terryjbates/cookiecutter,lucius-feng/cookiecutter,letolab/cookiecutter,vintasoftware/cookiecutter,michaeljoseph/cookiecutter,agconti/cookiecutter,benthomasson/cookiecutter,atlassian/cookiecutter,drgarcia1986/cookiecutter,terryjbates/cookiecutter,janusnic/cookiecutter,ramiroluz/cookiecutter,takeflight/cookiecutter,cichm/cookiecutter,vincentbernat/cookiecutter,stevepiercy/cookiecutter,moi65/cookiecutter,foodszhang/cookiecutter,luzfcb/cookiecutter,willingc/cookiecutter,foodszhang/cookiecutter,Vauxoo/cookiecutter,Vauxoo/cookiecutter,luzfcb/cookiecutter,janusnic/cookiecutter,dajose/cookiecutter,dajose/cookiecutter,kkujawinski/cookiecutter,lucius-feng/cookiecutter,venumech/cookiecutter,jhermann/cookiecutter,christabor/cookiecutter,vincentbernat/cookiecutter,sp1rs/cookiecutter,nhomar/cookiecutter,agconti/cookiecutter,audreyr/cookiecutter,tylerdave/cookiecutter,ionelmc/cookiecutter,ionelmc/cookiecutter,sp1rs/cookiecutter,venumech/cookiecutter,jhermann/cookiecutter,cguardia/cookiecutter,Springerle/cookiecutter,hackebrot/cookiecutter,vintasoftware/cookiecutter,ramiroluz/cookiecutter,pjbull/cookiecutter,pjbull/cookiecutter,willingc/cookiecutter,takeflight/cookiecutter,letolab/cookiecutter,0k/cookiecutter,hackebrot/cookiecutter,nhomar/cookiecutter,moi65/cookiecutter,cguardia/cookiecutter,Springerle/cookiecutter,lgp171188/cookiecutter,benthomasson/cookiecutter,0k/cookiecutter,lgp171188/cookiecutter,michaeljoseph/cookiecutter
|
tests/test_generate_files.py
|
tests/test_generate_files.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_generate_files
-------------------
Test formerly known from a unittest residing in test_generate.py named
TestGenerateFiles.test_generate_files_nontemplated_exception
TestGenerateFiles.test_generate_files
"""
from __future__ import unicode_literals
import os
import io
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
from cookiecutter import utils
@pytest.fixture(scope="function")
def clean_system_remove_additional_folders(request, clean_system):
"""
Use the global clean_system fixture and run additional teardown code to
remove some special folders.
For a better understanding - order of fixture calls:
clean_system setup code
clean_system_remove_additional_folders setup code
clean_system_remove_additional_folders teardown code
clean_system teardown code
"""
def remove_additional_folders():
if os.path.exists('inputpizzä'):
utils.rmtree('inputpizzä')
if os.path.exists('inputgreen'):
utils.rmtree('inputgreen')
if os.path.exists('inputbinary_files'):
utils.rmtree('inputbinary_files')
if os.path.exists('tests/custom_output_dir'):
utils.rmtree('tests/custom_output_dir')
if os.path.exists('inputpermissions'):
utils.rmtree('inputpermissions')
request.addfinalizer(remove_additional_folders)
@pytest.mark.usefixtures("clean_system_remove_additional_folders")
def test_generate_files_nontemplated_exception():
with pytest.raises(exceptions.NonTemplatedInputDirException):
generate.generate_files(
context={
'cookiecutter': {'food': 'pizza'}
},
repo_dir='tests/test-generate-files-nontemplated'
)
@pytest.mark.usefixtures("clean_system_remove_additional_folders")
def test_generate_files():
generate.generate_files(
context={
'cookiecutter': {'food': 'pizzä'}
},
repo_dir='tests/test-generate-files'
)
simple_file = 'inputpizzä/simple.txt'
assert os.path.isfile(simple_file)
simple_text = io.open(simple_file, 'rt', encoding='utf-8').read()
assert simple_text == u'I eat pizzä'
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_generate_files
-------------------
Test formerly known from a unittest residing in test_generate.py named
TestGenerateFiles.test_generate_files_nontemplated_exception
TestGenerateFiles.test_generate_files
"""
from __future__ import unicode_literals
import os
import io
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
from cookiecutter import utils
@pytest.fixture(scope="function")
def clean_system_remove_additional_folders(request, clean_system):
"""
Use the global clean_system fixture and run additional teardown code to
remove some special folders.
For a better understanding - order of fixture calls:
clean_system setup code
clean_system_remove_additional_folders setup code
clean_system_remove_additional_folders teardown code
clean_system teardown code
"""
def remove_additional_folders():
if os.path.exists('inputpizzä'):
utils.rmtree('inputpizzä')
if os.path.exists('inputgreen'):
utils.rmtree('inputgreen')
if os.path.exists('inputbinary_files'):
utils.rmtree('inputbinary_files')
if os.path.exists('tests/custom_output_dir'):
utils.rmtree('tests/custom_output_dir')
if os.path.exists('inputpermissions'):
utils.rmtree('inputpermissions')
request.addfinalizer(remove_additional_folders)
@pytest.mark.usefixtures("clean_system_remove_additional_folders")
def test_generate_files_nontemplated_exception():
with pytest.raises(exceptions.NonTemplatedInputDirException):
generate.generate_files(
context={'cookiecutter': {'food': 'pizza'}},
repo_dir='tests/test-generate-files-nontemplated'
)
def test_generate_files(self):
generate.generate_files(
context={
'cookiecutter': {'food': 'pizzä'}
},
repo_dir='tests/test-generate-files'
)
self.assertTrue(os.path.isfile('inputpizzä/simple.txt'))
simple_text = io.open('inputpizzä/simple.txt', 'rt', encoding='utf-8').read()
self.assertEqual(simple_text, u'I eat pizzä')
|
bsd-3-clause
|
Python
|
c4963df740e82d476500d2d998b288d0213806ee
|
Allow searching in the authorization code admin.
|
cc-archive/commoner,cc-archive/commoner
|
src/commoner/promocodes/admin.py
|
src/commoner/promocodes/admin.py
|
from django.contrib import admin
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.forms.widgets import HiddenInput
from commoner.promocodes.models import PromoCode
class PromoCodeAdminForm(forms.ModelForm):
code = forms.CharField(initial='', widget=HiddenInput())
send_email = forms.BooleanField(label=_(u'Send invitation letter?'), required=False)
def __init__(self, *args, **kwargs):
# if not done here, unique_code_string is only loaded when admin is bootstrapped
if 'instance' not in kwargs:
kwargs['initial'] = {'code': PromoCode.objects.unique_code_string()}
super(PromoCodeAdminForm, self).__init__(*args, **kwargs)
def save(self, force_insert=False, force_update=False, commit=True):
code = super(PromoCodeAdminForm, self).save(commit)
if self.cleaned_data['send_email']:
PromoCode.objects.send_invite_letter(code)
return code
class Meta:
model = PromoCode
class PromoCodeAdmin(admin.ModelAdmin):
form = PromoCodeAdminForm
list_display = ('recipient', 'code', 'created', 'used')
fields = ('code', 'recipient', 'expires', 'transaction_id', 'contribution_id', 'send_email',)
ordering = ('-created',)
search_fields = ('recipient', 'transaction_id', 'contribution_id',)
# get the pretty admin boolean icons, still no filter abilities
def used(self, object):
return object.used
used.short_description = _(u'Redeemed code')
used.boolean = True
admin.site.register(PromoCode, PromoCodeAdmin)
|
from django.contrib import admin
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.forms.widgets import HiddenInput
from commoner.promocodes.models import PromoCode
class PromoCodeAdminForm(forms.ModelForm):
code = forms.CharField(initial='', widget=HiddenInput())
send_email = forms.BooleanField(label=_(u'Send invitation letter?'), required=False)
def __init__(self, *args, **kwargs):
# if not done here, unique_code_string is only loaded when admin is bootstrapped
if 'instance' not in kwargs:
kwargs['initial'] = {'code': PromoCode.objects.unique_code_string()}
super(PromoCodeAdminForm, self).__init__(*args, **kwargs)
def save(self, force_insert=False, force_update=False, commit=True):
code = super(PromoCodeAdminForm, self).save(commit)
if self.cleaned_data['send_email']:
PromoCode.objects.send_invite_letter(code)
return code
class Meta:
model = PromoCode
class PromoCodeAdmin(admin.ModelAdmin):
form = PromoCodeAdminForm
list_display = ('recipient', 'code', 'created', 'used')
fields = ('code', 'recipient', 'expires', 'transaction_id', 'contribution_id', 'send_email',)
ordering = ('-created',)
# get the pretty admin boolean icons, still no filter abilities
def used(self, object):
return object.used
used.short_description = _(u'Redeemed code')
used.boolean = True
admin.site.register(PromoCode, PromoCodeAdmin)
|
agpl-3.0
|
Python
|
442f6c9eae5c64c3438f89c2968b0343c1f4ed6e
|
Revise script docstring
|
bowen0701/algorithms_data_structures
|
alg_find_peak_1D.py
|
alg_find_peak_1D.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
"""Find a peak in 1D array.
Support a is an array of length n.
If a is an array of length 1, a[0] is a peak.
In general, a[k] is a peak iff a[k] >= a[k - 1] and a[k] >= a[k + 1].
If a[0] >= a[1], then a[0] is a peak.
If a[n - 1] >= a[n - 2], then a[n - 1] is a peak.
"""
def find_peak_naive(arr):
"""Find peak by naive iteration.
Time complexity: O(n).
"""
for i in range(len(arr)):
if i == 0:
if arr[i] >= arr[i + 1]:
return arr[i]
elif i == (len(arr) - 1):
if arr[i] >= arr[i - 1]:
return arr[i]
else:
if arr[i] >= arr[i - 1] and arr[i] >= arr[i + 1]:
return arr[i]
def find_peak(arr):
"""Find peak by divide-end-conquer algorithm.
Time complexity: O(logn).
"""
if len(arr) == 1:
return arr[0]
else:
mid = len(arr) // 2
if arr[mid] <= arr[mid - 1]:
return find_peak(arr[:mid-1])
elif arr[mid] <= arr[mid + 1]:
return find_peak(arr[mid+1:])
else:
return arr[mid]
def main():
import time
# Array of length 5 with peak 4.
arr = [0, 1, 4, 3, 2]
time_start = time.time()
peak = find_peak_naive(arr)
time_run = time.time() - time_start
print('Peak: {}'.format(peak))
print('Time for find_peak_naive(): {}'.format(time_run))
time_start = time.time()
peak = find_peak(arr)
time_run = time.time() - time_start
print('Peak: {}'.format(peak))
print('Time for find_peak_naive(): {}'.format(time_run))
# Array of long length.
arr = np.random.permutation(10000000)
time_start = time.time()
peak = find_peak_naive(arr)
time_run = time.time() - time_start
print('Peak: {}'.format(peak))
print('Time for find_peak_naive(): {}'.format(time_run))
time_start = time.time()
peak = find_peak(arr)
time_run = time.time() - time_start
print('Peak: {}'.format(peak))
print('Time for find_peak_naive(): {}'.format(time_run))
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
"""Find a peak in 1D array.
Support a is an array of length n.
If a is an array of length 1, a[0] is a peak.
In general k, a[k] is a peak iff a[k] >= a[k - 1] and a[k] >= a[k + 1].
If a[0] >= a[1], then a[0] is a peak.
If a[n - 1] >= a[n - 2], then a[n - 1] is a peak.
"""
def find_peak_naive(arr):
"""Find peak by naive iteration.
Time complexity: O(n).
"""
for i in range(len(arr)):
if i == 0:
if arr[i] >= arr[i + 1]:
return arr[i]
elif i == (len(arr) - 1):
if arr[i] >= arr[i - 1]:
return arr[i]
else:
if arr[i] >= arr[i - 1] and arr[i] >= arr[i + 1]:
return arr[i]
def find_peak(arr):
"""Find peak by divide-end-conquer algorithm.
Time complexity: O(logn).
"""
if len(arr) == 1:
return arr[0]
else:
mid = len(arr) // 2
if arr[mid] <= arr[mid - 1]:
return find_peak(arr[:mid-1])
elif arr[mid] <= arr[mid + 1]:
return find_peak(arr[mid+1:])
else:
return arr[mid]
def main():
import time
# Array of length 5 with peak 4.
arr = [0, 1, 4, 3, 2]
time_start = time.time()
peak = find_peak_naive(arr)
time_run = time.time() - time_start
print('Peak: {}'.format(peak))
print('Time for find_peak_naive(): {}'.format(time_run))
time_start = time.time()
peak = find_peak(arr)
time_run = time.time() - time_start
print('Peak: {}'.format(peak))
print('Time for find_peak_naive(): {}'.format(time_run))
# Array of long length.
arr = np.random.permutation(10000000)
time_start = time.time()
peak = find_peak_naive(arr)
time_run = time.time() - time_start
print('Peak: {}'.format(peak))
print('Time for find_peak_naive(): {}'.format(time_run))
time_start = time.time()
peak = find_peak(arr)
time_run = time.time() - time_start
print('Peak: {}'.format(peak))
print('Time for find_peak_naive(): {}'.format(time_run))
if __name__ == '__main__':
main()
|
bsd-2-clause
|
Python
|
545f04982267a34daaacc3afb94cd50db3821550
|
Update ghost.py
|
MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab
|
home/Humanoid/ghost.py
|
home/Humanoid/ghost.py
|
###################################################
# This is a basic script to carry on a conversation
# with ghost
###################################################
# create service
ghost = Runtime.start("ghost", "WebGui")
ear = Runtime.start("ear", "WebkitSpeechRecognition")
ghostchat = Runtime.start("ghostchat", "ProgramAB")
htmlfilter = Runtime.start("htmlfilter", "HtmlFilter")
mouth = Runtime.start("mouth", "NaturalReaderSpeech")
# creating the connections and routes
# - I'll need to check on these - might
# need to just "attach" some services together
ear.addTextListener(ghostchat)
ghostchat.addTextListener(htmlfilter)
htmlfilter.addTextListener(mouth)
# start a chatbot session
ghostchat.startSession("ProgramAB/bots", "ghostchat")
voices = mouth.getVoices()
# I've also tried removing this because I got an iteration error for this line
# for voice in voices:
# NaturalReaderSpeech.setVoice("Ryan")
|
###################################################
# This is a basic script to carry on a conversation
# with ghost
###################################################
# create service
ghost = Runtime.start("ghost", "WebGui")
ear = Runtime.start("ear", "WebkitSpeechRecognition")
ghostchat = Runtime.start("ghostchat", "ProgramAB")
htmlfilter = Runtime.start("htmlfilter", "HtmlFilter")
mouth = Runtime.start("mouth", "NaturalReaderSpeech")
# start a chatbot session
ghostchat.startSession("ProgramAB/bots", "ghostchat")
voices = mouth.getVoices()
# I've also tried removing this because I got an iteration error for this line
# for voice in voices:
# NaturalReaderSpeech.setVoice("Ryan")
# - I'll need to check on these - might
# need to just "attach" some services together
ear.addTextListener(ghostchat)
ghostchat.addTextListener(htmlfilter)
htmlfilter.addTextListener(mouth)
|
apache-2.0
|
Python
|
64938b5bb185f7f38716c166a2aa59a0713bc989
|
fix for sqlite test db
|
zbyte64/django-hyperadmin
|
tests/runtests.py
|
tests/runtests.py
|
"""
Test support harness for doing setup.py test.
See http://ericholscher.com/blog/2009/jun/29/enable-setuppy-test-your-django-apps/.
"""
import sys
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.test_settings'
# Bootstrap Django's settings.
from django.conf import settings
settings.DATABASES = {
'default': {'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:'}
}
settings.TEST_RUNNER = "django_nose.NoseTestSuiteRunner"
settings.NOSE_PLUGINS = ['tests.noseplugins.TestDiscoveryPlugin']
def runtests():
"""Test runner for setup.py test."""
# Run you some tests.
import django.test.utils
runner_class = django.test.utils.get_runner(settings)
test_runner = runner_class(verbosity=1, interactive=True)
failures = test_runner.run_tests(['hyperadmin'])
# Okay, so this is a nasty hack. If this isn't here, `setup.py test` craps out
# when generating a coverage report via Nose. I have no idea why, or what's
# supposed to be going on here, but this seems to fix the problem, and I
# *really* want coverage, so, unless someone can tell me *why* I shouldn't
# do this, I'm going to just whistle innocently and keep on doing this.
sys.exitfunc = lambda: 0
sys.exit(failures)
|
"""
Test support harness for doing setup.py test.
See http://ericholscher.com/blog/2009/jun/29/enable-setuppy-test-your-django-apps/.
"""
import sys
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.test_settings'
# Bootstrap Django's settings.
from django.conf import settings
settings.DATABASES = {
'default': {'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory;'}
}
settings.TEST_RUNNER = "django_nose.NoseTestSuiteRunner"
settings.NOSE_PLUGINS = ['tests.noseplugins.TestDiscoveryPlugin']
def runtests():
"""Test runner for setup.py test."""
# Run you some tests.
import django.test.utils
runner_class = django.test.utils.get_runner(settings)
test_runner = runner_class(verbosity=1, interactive=True)
failures = test_runner.run_tests(['hyperadmin'])
# Okay, so this is a nasty hack. If this isn't here, `setup.py test` craps out
# when generating a coverage report via Nose. I have no idea why, or what's
# supposed to be going on here, but this seems to fix the problem, and I
# *really* want coverage, so, unless someone can tell me *why* I shouldn't
# do this, I'm going to just whistle innocently and keep on doing this.
sys.exitfunc = lambda: 0
sys.exit(failures)
|
bsd-3-clause
|
Python
|
05f28064187c56d70d8f50c920676b81b7eb9f32
|
make test run faster
|
waylonflinn/bvec,tailwind/bdot
|
bdot/tests/test_carray.py
|
bdot/tests/test_carray.py
|
import nose
import bdot
import bcolz
import numpy as np
from numpy.testing import assert_array_equal
def test_dot_int64():
matrix = np.random.random_integers(0, 12000, size=(30000, 100))
bcarray = bdot.carray(matrix, chunklen=2**13, cparams=bcolz.cparams(clevel=2))
v = bcarray[0]
result = bcarray.dot(v)
expected = matrix.dot(v)
assert_array_equal(expected, result)
|
import nose
import bdot
import bcolz
import numpy as np
from numpy.testing import assert_array_equal
def test_dot_int64():
matrix = np.random.random_integers(0, 12000, size=(300000, 100))
bcarray = bdot.carray(matrix, chunklen=2**13, cparams=bcolz.cparams(clevel=2))
v = bcarray[0]
result = bcarray.dot(v)
expected = matrix.dot(v)
assert_array_equal(expected, result)
|
mit
|
Python
|
5e2f393238d976e576b390b668c7ce2f13a1e0c1
|
Update to use Py3 print() (#1142)
|
flutter-webrtc/flutter-webrtc,flutter-webrtc/flutter-webrtc,flutter-webrtc/flutter-webrtc,flutter-webrtc/flutter-webrtc,flutter-webrtc/flutter-webrtc,flutter-webrtc/flutter-webrtc,flutter-webrtc/flutter-webrtc,flutter-webrtc/flutter-webrtc
|
example/scripts/add-line.py
|
example/scripts/add-line.py
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
from __future__ import print_function
import sys
import getopt
import re
def findLine(pattern, fp):
line = fp.readline()
line_number = 1
while line:
#print("Line {}: {}".format(line_number, line.strip()))
if pattern in line:
return line_number
line = fp.readline()
line_number += 1
return -1
def insertBefore(filename, pattern, text):
with open(filename, 'r+') as fp:
line_number = findLine(pattern, fp)
if(line_number > 0):
print('Insert', text,'to line', line_number)
fp.seek(0)
lines = fp.readlines()
fp.seek(0)
lines.insert(line_number - 1, text + '\n')
fp.writelines(lines)
return
print('pattern',text,'not found!')
def replaceText(filename, pattern, text):
with open(filename, 'r') as fp:
lines = fp.read()
fp.close()
lines = (re.sub(pattern, text, lines))
print('Replace', pattern ,'to', text)
fp = open(filename, 'w')
fp.write(lines)
fp.close()
def main(argv):
inputfile = ''
string = ''
text = ''
replace = False
try:
opts, args = getopt.getopt(argv, "hi:s:t:r")
except getopt.GetoptError:
print('add-line.py -i <inputfile> -s <string> -t <text>')
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print('add-line.py -i <inputfile> -s <string> -t <text>')
sys.exit()
elif opt in ("-i"):
inputfile = arg
elif opt in ("-s"):
string = arg
elif opt in ("-t"):
text = arg
elif opt in ("-r"):
replace = True
if(replace):
replaceText(inputfile, string, text)
else:
insertBefore(inputfile, string, text)
if __name__ == "__main__":
main(sys.argv[1:])
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
import sys
import getopt
import re
def findLine(pattern, fp):
line = fp.readline()
line_number = 1
while line:
#print("Line {}: {}".format(line_number, line.strip()))
if pattern in line:
return line_number
line = fp.readline()
line_number += 1
return -1
def insertBefore(filename, pattern, text):
with open(filename, 'r+') as fp:
line_number = findLine(pattern, fp)
if(line_number > 0):
print 'Insert', text,'to line', line_number
fp.seek(0)
lines = fp.readlines()
fp.seek(0)
lines.insert(line_number - 1, text + '\n')
fp.writelines(lines)
return
print 'pattern',text,'not found!'
def replaceText(filename, pattern, text):
with open(filename, 'r') as fp:
lines = fp.read()
fp.close()
lines = (re.sub(pattern, text, lines))
print 'Replace', pattern ,'to', text
fp = open(filename, 'w')
fp.write(lines)
fp.close()
def main(argv):
inputfile = ''
string = ''
text = ''
replace = False
try:
opts, args = getopt.getopt(argv, "hi:s:t:r")
except getopt.GetoptError:
print 'add-line.py -i <inputfile> -s <string> -t <text>'
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print 'add-line.py -i <inputfile> -s <string> -t <text>'
sys.exit()
elif opt in ("-i"):
inputfile = arg
elif opt in ("-s"):
string = arg
elif opt in ("-t"):
text = arg
elif opt in ("-r"):
replace = True
if(replace):
replaceText(inputfile, string, text)
else:
insertBefore(inputfile, string, text)
if __name__ == "__main__":
main(sys.argv[1:])
|
mit
|
Python
|
06cb55639d2bc504d0ec1b9fb073c40e00751328
|
Disable output example_pic.png if exists
|
tosh1ki/pyogi,tosh1ki/pyogi
|
doc/sample_code/demo_plot_state.py
|
doc/sample_code/demo_plot_state.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from pyogi.board import Board
from pyogi.plot import plot_board
if __name__ == '__main__':
board = Board()
board.set_initial_state()
board.players = ['先手', '後手']
board.move('+7776FU')
board.move('-3334FU')
board.move('+2868HI')
board.move('-2288UM')
board.move('+7988GI')
# Plot by materials
savepath = 'example_pic.png'
if os.path.exists(savepath):
savepath = None
plot_board(board, savepath=savepath, mode='pic')
# Plot using matplotlib
board.plot_state_mpl(figsize=(8, 9))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pyogi.board import Board
from pyogi.plot import plot_board
if __name__ == '__main__':
board = Board()
board.set_initial_state()
board.players = ['先手', '後手']
board.move('+7776FU')
board.move('-3334FU')
board.move('+2868HI')
board.move('-2288UM')
board.move('+7988GI')
# Plot by materials
plot_board(board, savepath='example_pic.png', mode='pic')
# Plot using matplotlib
board.plot_state_mpl(figsize=(8, 9))
|
mit
|
Python
|
75f8a41c00e06f52102bf5f87a093d4ffef34f97
|
simplify the saving/loading of the lists
|
StoDevX/course-data-tools,StoDevX/course-data-tools
|
lib/maintain_lists_of_entries.py
|
lib/maintain_lists_of_entries.py
|
from .load_data_from_file import load_data_from_file
from .save_data import save_data
from .paths import mappings_path
import json
import os
def maintain_lists_of_entries(all_courses):
data_sets = {
'departments': set(),
'instructors': set(),
'times': set(),
'locations': set(),
'gereqs': set(),
'types': set(),
}
for key in data_sets:
filename = os.path.join(mappings_path, 'valid_%s.json' % key)
data = load_data_from_file(filename)
data_sets[key] = set(json.loads(data))
for course in all_courses:
data_sets['departments'].update(course.get('depts', []))
data_sets['instructors'].update(course.get('instructors', []))
data_sets['times'].update(course.get('times', []))
data_sets['locations'].update(course.get('places', []))
data_sets['gereqs'].update(course.get('gereqs', []))
data_sets['types'].add(course.get('type', ''))
for key in data_sets:
data_sets[key] = sorted(data_sets[key])
for key, data in data_sets.items():
filename = os.path.join(mappings_path, 'valid_%s.json' % key)
json_data = json.dumps(data, indent='\t', separators=(',', ': '))
save_data(json_data + '\n', filename)
|
from .load_data_from_file import load_data_from_file
from .save_data import save_data
from .paths import mappings_path
import json
import os
def maintain_lists_of_entries(all_courses):
data_sets = {
'departments': set(),
'instructors': set(),
'times': set(),
'locations': set(),
'gereqs': set(),
'types': set(),
}
for key in data_sets:
filename = os.path.join(mappings_path, 'valid_%s.json' % key)
data = load_data_from_file(filename)
data_sets[key] = set(json.loads(data)[key])
for course in all_courses:
data_sets['departments'].update(course.get('depts', []))
data_sets['instructors'].update(course.get('instructors', []))
data_sets['times'].update(course.get('times', []))
data_sets['locations'].update(course.get('places', []))
data_sets['gereqs'].update(course.get('gereqs', []))
data_sets['types'].add(course.get('type', ''))
for key in data_sets:
data_sets[key] = sorted(data_sets[key])
for key, data in data_sets.items():
filename = os.path.join(mappings_path, 'valid_%s.json' % key)
json_data = json.dumps({key: data},
indent='\t', separators=(',', ': '))
save_data(json_data, filename)
|
mit
|
Python
|
f3c6a888b4462e2fab43faba6dbe2af4bafff1bb
|
Update add-snmpproxy-collector.py
|
Orange-OpenSource/opnfv-cloudify-clearwater,Orange-OpenSource/opnfv-cloudify-clearwater
|
scripts/monitoring/proxy_snmp/add-snmpproxy-collector.py
|
scripts/monitoring/proxy_snmp/add-snmpproxy-collector.py
|
from cloudify import ctx
from cloudify import exceptions
import diamond_agent.tasks as diamond
import os
workdir = ctx.plugin.workdir
paths = diamond.get_paths(workdir.replace("script","diamond"))
name = 'SNMPProxyCollector'
collector_dir = os.path.join(paths['collectors'], name)
if not os.path.exists(collector_dir):
os.mkdir(collector_dir)
collector_file = os.path.join(collector_dir, '{0}.py'.format(name))
ctx.download_resource('scripts/monitoring/proxy_snmp/snmpproxy.py', collector_file)
config = ctx.target.instance.runtime_properties.get('snmp_collector_config', {})
config.update({'enabled': True,
'hostname': '{0}.{1}.{2}'.format(diamond.get_host_id(ctx.target),
ctx.target.node.name,
ctx.target.instance.id)
})
config_full_path = os.path.join(paths['collectors_config'], '{0}.conf'.format(name))
diamond.write_config(config_full_path, config)
try:
diamond.stop_diamond(paths['config'])
except:
pass
try:
diamond.start_diamond(paths['config'])
except:
exceptions.RecoverableError("Failed to start diamond", 30)
pass
|
from cloudify import ctx
from cloudify import exceptions
import diamond_agent.tasks as diamond
import os
paths = diamond.get_paths(ctx.plugin.workdir)
name = 'SNMPProxyCollector'
collector_dir = os.path.join(paths['collectors'], name)
if not os.path.exists(collector_dir):
os.mkdir(collector_dir)
collector_file = os.path.join(collector_dir, '{0}.py'.format(name))
ctx.download_resource('scripts/monitoring/proxy_snmp/snmpproxy.py', collector_file)
config = ctx.target.instance.runtime_properties.get('snmp_collector_config', {})
config.update({'enabled': True,
'hostname': '{0}.{1}.{2}'.format(diamond.get_host_id(ctx.target),
ctx.target.node.name,
ctx.target.instance.id)
})
config_full_path = os.path.join(paths['collectors_config'], '{0}.conf'.format(name))
diamond.write_config(config_full_path, config)
try:
diamond.stop_diamond(paths['config'])
except:
pass
try:
diamond.start_diamond(paths['config'])
except:
exceptions.RecoverableError("Failed to start diamond", 30)
pass
|
apache-2.0
|
Python
|
c9027e8aebe853d1c85fcac24b09caeb8ea5f403
|
Bump version to 0.3.0
|
Z2PackDev/bands_inspect,Z2PackDev/bands_inspect
|
bands_inspect/__init__.py
|
bands_inspect/__init__.py
|
# -*- coding: utf-8 -*-
# (c) 2017-2019, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <[email protected]>
"""
A tool for modifying, comparing and plotting electronic bandstructures.
"""
from . import kpoints
from . import eigenvals
from . import compare
from . import lattice
from . import plot
__version__ = '0.3.0'
|
# -*- coding: utf-8 -*-
# (c) 2017-2019, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <[email protected]>
"""
A tool for modifying, comparing and plotting electronic bandstructures.
"""
from . import kpoints
from . import eigenvals
from . import compare
from . import lattice
from . import plot
__version__ = '0.2.3'
|
apache-2.0
|
Python
|
889a2608d1d4038a8c7ee1c445530fd1750c00e0
|
Optimize styling according to pylint
|
aleju/ner-crf
|
preprocessing/collect_unigrams.py
|
preprocessing/collect_unigrams.py
|
# -*- coding: utf-8 -*-
"""
File to collect all unigrams and all name-unigrams (label PER) from a corpus file.
The corpus file must have one document/article per line. The words must be labeled in the
form word/LABEL.
Example file content:
Yestarday John/PER Doe/PER said something amazing.
Washington/LOC D.C./LOC is the capital of the U.S.
The foobird is a special species of birds. It's commonly found on mars.
...
Execute via:
python -m preprocessing/collect_unigrams
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from model.unigrams import Unigrams
# All capitalized constants come from this file
import config as cfg
def main():
"""Main function. Gathers all unigrams and name-unigrams, see documantation at the top."""
# collect all unigrams (all labels, including "O")
print("Collecting unigrams...")
ug_all = Unigrams()
ug_all.fill_from_articles(cfg.ARTICLES_FILEPATH, verbose=True)
ug_all.write_to_file(cfg.UNIGRAMS_FILEPATH)
ug_all = None
# collect only unigrams of label PER
print("Collecting person names (label=PER)...")
ug_names = Unigrams()
ug_names.fill_from_articles_labels(cfg.ARTICLES_FILEPATH, ["PER"], verbose=True)
ug_names.write_to_file(cfg.UNIGRAMS_PERSON_FILEPATH)
print("Finished.")
# ---------------
if __name__ == "__main__":
main()
|
# -*- coding: utf-8 -*-
"""
File to collect all unigrams and all name-unigrams (label PER) from a corpus file.
The corpus file must have one document/article per line. The words must be labeled in the
form word/LABEL.
Example file content:
Yestarday John/PER Doe/PER said something amazing.
Washington/LOC D.C./LOC is the capital of the U.S.
The foobird is a special species of birds. It's commonly found on mars.
...
Execute via:
python -m preprocessing/collect_unigrams
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import os
from model.unigrams import Unigrams
# All capitalized constants come from this file
from config import *
def main():
"""Main function. Gathers all unigrams and name-unigrams, see documantation at the top."""
# collect all unigrams (all labels, including "O")
print("Collecting unigrams...")
ug_all = Unigrams()
ug_all.fill_from_articles(ARTICLES_FILEPATH, verbose=True)
ug_all.write_to_file(UNIGRAMS_FILEPATH)
ug_all = None
# collect only unigrams of label PER
print("Collecting person names (label=PER)...")
ug_names = Unigrams()
ug_names.fill_from_articles_labels(ARTICLES_FILEPATH, ["PER"], verbose=True)
ug_names.write_to_file(UNIGRAMS_PERSON_FILEPATH)
print("Finished.")
# ---------------
if __name__ == "__main__":
main()
|
mit
|
Python
|
9b5fd8dba4885cd0cc2de10f7ff6c8066aee0277
|
Fix possibles issues with pulseaudiowidget
|
Anthony25/barython
|
barython/widgets/audio.py
|
barython/widgets/audio.py
|
#!/usr/bin/env python3
import logging
from .base import SubprocessWidget
from barython.hooks.audio import PulseAudioHook
logger = logging.getLogger("barython")
class PulseAudioWidget(SubprocessWidget):
def handler(self, event, *args, **kwargs):
"""
Filter events sent by notifications
"""
# Only notify if there is something changes in pulseaudio
event_change_msg = "Event 'change' on destination"
if event_change_msg in event:
logger.debug("PA: line \"{}\" catched.".format(event))
return self.update()
def organize_result(self, volume, output_mute=None, input_mute=None,
*args, **kwargs):
"""
Override this method to change the infos to print
"""
return "{}".format(volume)
def handle_result(self, output=None, *args, **kwargs):
# As pulseaudio-ctl add events in pactl subscribe, flush output
try:
if output != "" and output is not None:
output = self.organize_result(*output.split())
super().handle_result(output=output)
except Exception as e:
logger.error("Error in PulseAudioWidget: {}", e)
def __init__(self, cmd=["pulseaudio-ctl", "full-status"],
*args, **kwargs):
super().__init__(*args, **kwargs, cmd=cmd, infinite=False)
# Update the widget when PA volume changes
self.hooks.subscribe(self.handler, PulseAudioHook)
|
#!/usr/bin/env python3
import logging
from .base import SubprocessWidget
from barython.hooks.audio import PulseAudioHook
logger = logging.getLogger("barython")
class PulseAudioWidget(SubprocessWidget):
def handler(self, event, *args, **kwargs):
"""
Filter events sent by the notifications
"""
# Only notify if there is something changes in pulseaudio
event_change_msg = "Event 'change' on destination"
if event_change_msg in event:
logger.debug("PA: line \"{}\" catched.".format(event))
return self.update()
def organize_result(self, volume, output_mute=None, input_mute=None,
*args, **kwargs):
"""
Override this method to change the infos to print
"""
return "{}".format(volume)
def handle_result(self, output=None, *args, **kwargs):
# As pulseaudio-ctl add events in pactl subscribe, flush output
try:
if output != "" and output is not None:
output = self.organize_result(*output.split())
super().handle_result(output=output)
except Exception as e:
logger.error("Error in PulseAudioWidget: {}", e)
def __init__(self, cmd=["pulseaudio-ctl", "full-status"],
*args, **kwargs):
super().__init__(cmd, infinite=False, *args, **kwargs)
# Update the widget when PA volume changes
self.hooks.subscribe(self.handler, PulseAudioHook)
|
bsd-3-clause
|
Python
|
7f113399e4277ecbbfdde41d683c22082f7e19bd
|
Add DOI parsing to identifiers
|
CenterForOpenScience/scrapi,alexgarciac/scrapi,jeffreyliu3230/scrapi,felliott/scrapi,fabianvf/scrapi,mehanig/scrapi,mehanig/scrapi,fabianvf/scrapi,erinspace/scrapi,erinspace/scrapi,felliott/scrapi,CenterForOpenScience/scrapi
|
scrapi/harvesters/smithsonian.py
|
scrapi/harvesters/smithsonian.py
|
'''
Harvester for the Smithsonian Digital Repository for the SHARE project
Example API call: http://repository.si.edu/oai/request?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
import re
from scrapi.base import helpers
from scrapi.base import OAIHarvester
class SiHarvester(OAIHarvester):
short_name = 'smithsonian'
long_name = 'Smithsonian Digital Repository'
url = 'http://repository.si.edu/oai/request'
@property
def schema(self):
return helpers.updated_schema(self._schema, {
"uris": {
"objectUris": [('//dc:identifier/node()', get_doi_from_identifier)]
}
})
base_url = 'http://repository.si.edu/oai/request'
property_list = ['date', 'identifier', 'type', 'format', 'setSpec']
timezone_granularity = True
def get_doi_from_identifier(identifiers):
doi_re = re.compile(r'10\.\S*\/\S*')
identifiers = [identifiers] if not isinstance(identifiers, list) else identifiers
for identifier in identifiers:
try:
found_doi = doi_re.search(identifier).group()
return 'http://dx.doi.org/{}'.format(found_doi)
except AttributeError:
continue
|
'''
Harvester for the Smithsonian Digital Repository for the SHARE project
Example API call: http://repository.si.edu/oai/request?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class SiHarvester(OAIHarvester):
short_name = 'smithsonian'
long_name = 'Smithsonian Digital Repository'
url = 'http://repository.si.edu/oai/request'
base_url = 'http://repository.si.edu/oai/request'
property_list = ['date', 'identifier', 'type', 'format', 'setSpec']
timezone_granularity = True
|
apache-2.0
|
Python
|
b65a2ee41d16efd1a056727e59c229eb8258070f
|
set deafult DB_host as localhost
|
carljm/django-model-utils,carljm/django-model-utils
|
tests/settings.py
|
tests/settings.py
|
import os
INSTALLED_APPS = (
'model_utils',
'tests',
)
DATABASES = {
"default": {
"ENGINE": "django.db.backends.postgresql_psycopg2",
"NAME": os.environ.get("DB_NAME", "modelutils"),
"USER": os.environ.get("DB_USER", 'postgres'),
"PASSWORD": os.environ.get("DB_PASSWORD", ""),
"HOST": os.environ.get("DB_HOST", "localhost"),
"PORT": os.environ.get("DB_PORT", 5432)
},
}
SECRET_KEY = 'dummy'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}
}
DEFAULT_AUTO_FIELD = 'django.db.models.AutoField'
|
import os
INSTALLED_APPS = (
'model_utils',
'tests',
)
DATABASES = {
"default": {
"ENGINE": "django.db.backends.postgresql_psycopg2",
"NAME": os.environ.get("DB_NAME", "modelutils"),
"USER": os.environ.get("DB_USER", 'postgres'),
"PASSWORD": os.environ.get("DB_PASSWORD", ""),
"HOST": os.environ.get("DB_HOST", ""),
"PORT": os.environ.get("DB_PORT", 5432)
},
}
SECRET_KEY = 'dummy'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}
}
DEFAULT_AUTO_FIELD = 'django.db.models.AutoField'
|
bsd-3-clause
|
Python
|
dcf7af23fa237cd761f1a589e2e268875d296841
|
Test settings updated
|
slasyz/django-precise-bbcode,slasyz/django-precise-bbcode
|
tests/settings.py
|
tests/settings.py
|
# -*- coding: utf-8 -*-
# Standard library imports
import os
# Third party imports
from django.conf import global_settings as default_settings
from django.conf import settings
# Local application / specific library imports
TEST_ROOT = os.path.abspath(os.path.dirname(__file__))
TEST_SETTINGS = {
'DEBUG': False,
'TEMPLATE_DEBUG': False,
'DATABASES': {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:'
}
},
'TEMPLATE_CONTEXT_PROCESSORS': default_settings.TEMPLATE_CONTEXT_PROCESSORS,
'INSTALLED_APPS': (
'django.contrib.auth',
'django.contrib.admin',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.sessions',
'django.contrib.sites',
'precise_bbcode',
'tests',
),
'ROOT_URLCONF': 'tests._testsite.urls',
'MIDDLEWARE_CLASSES': default_settings.MIDDLEWARE_CLASSES,
'ADMINS': ('[email protected]',),
'MEDIA_ROOT': os.path.join(TEST_ROOT, '_testdata/media/'),
'SITE_ID': 1,
# Setting this explicitly prevents Django 1.7+ from showing a
# warning regarding a changed default test runner. The test
# suite is run with nose, so it does not matter.
'SILENCED_SYSTEM_CHECKS': ['1_6.W001'],
}
def configure():
if not settings.configured:
settings.configure(**TEST_SETTINGS)
|
# -*- coding: utf-8 -*-
# Standard library imports
import os
# Third party imports
from django.conf import global_settings as default_settings
from django.conf import settings
# Local application / specific library imports
TEST_ROOT = os.path.abspath(os.path.dirname(__file__))
TEST_SETTINGS = {
'DEBUG': False,
'TEMPLATE_DEBUG': False,
'DATABASES': {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:'
}
},
'TEMPLATE_CONTEXT_PROCESSORS': default_settings.TEMPLATE_CONTEXT_PROCESSORS,
'INSTALLED_APPS': (
'django.contrib.auth',
'django.contrib.admin',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.sessions',
'django.contrib.sites',
'precise_bbcode',
'tests',
),
'ROOT_URLCONF': 'tests._testsite.urls',
'MIDDLEWARE_CLASSES': default_settings.MIDDLEWARE_CLASSES,
'ADMINS': ('[email protected]',),
'MEDIA_ROOT': os.path.join(TEST_ROOT, '_testdata/media/'),
'SITE_ID': 1,
}
def configure():
if not settings.configured:
settings.configure(**TEST_SETTINGS)
|
bsd-3-clause
|
Python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.