commit
stringlengths 40
40
| old_file
stringlengths 4
264
| new_file
stringlengths 4
264
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
624
| message
stringlengths 15
4.7k
| lang
stringclasses 3
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
---|---|---|---|---|---|---|---|---|---|
483800541ee66de006392c361e06177bc9db4784
|
kboard/board/urls.py
|
kboard/board/urls.py
|
# Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<post_id>\d+)/like/$', views.like_post, name='like_post'),
url(r'^(?P<post_id>\d+)/edit/$', views.edit_post, name='edit_post'),
url(r'^(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/(?P<comment_id>\d+)/delete/$', views.delete_comment, name='delete_comment'),
]
|
# Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from django.conf import settings
from django.conf.urls.static import static
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<post_id>\d+)/like/$', views.like_post, name='like_post'),
url(r'^(?P<post_id>\d+)/edit/$', views.edit_post, name='edit_post'),
url(r'^(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/(?P<comment_id>\d+)/delete/$', views.delete_comment, name='delete_comment'),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
Set url to serve uploaded file during development
|
Set url to serve uploaded file during development
|
Python
|
mit
|
guswnsxodlf/k-board,kboard/kboard,cjh5414/kboard,kboard/kboard,hyesun03/k-board,hyesun03/k-board,hyesun03/k-board,guswnsxodlf/k-board,cjh5414/kboard,darjeeling/k-board,cjh5414/kboard,kboard/kboard,guswnsxodlf/k-board
|
e09214068a12768e9aafd04363d353359ca7e1f3
|
src/actions/actions/timetracking/__init__.py
|
src/actions/actions/timetracking/__init__.py
|
#!/usr/bin/python
######################################################################
# Cloud Routes Bridge
# -------------------------------------------------------------------
# Actions Module
######################################################################
import stathat
import time
import syslog
def action(**kwargs):
''' This method is called to action a reaction '''
updateStathat(kwargs['jdata'])
return True
def updateStathat(jdata):
''' This method will be called to update a stathat Statistic '''
ez_key = jdata['time_tracking']['ez_key']
stat_name = "[%s] End to End Monitor transaction time" % jdata[
'time_tracking']['env']
value = time.time() - jdata['time_tracking']['control']
stathat.ez_value(ez_key, stat_name, value)
line = "timetracker: Sent stat to StatHat for %s" % jdata['cid']
syslog.syslog(syslog.LOG_INFO, line)
|
#!/usr/bin/python
######################################################################
# Cloud Routes Bridge
# -------------------------------------------------------------------
# Actions Module
######################################################################
import stathat
import time
def action(**kwargs):
''' This method is called to action a reaction '''
logger = kwargs['logger']
updateStathat(kwargs['jdata'], logger)
return True
def updateStathat(jdata, logger):
''' This method will be called to update a stathat Statistic '''
ez_key = jdata['time_tracking']['ez_key']
stat_name = "[%s] End to End Monitor transaction time" % jdata[
'time_tracking']['env']
value = time.time() - jdata['time_tracking']['control']
stathat.ez_value(ez_key, stat_name, value)
line = "timetracker: Sent stat to StatHat for %s" % jdata['cid']
logger.info(line)
|
Convert reactions syslog to logger: timetracking
|
Convert reactions syslog to logger: timetracking
|
Python
|
unknown
|
dethos/cloudroutes-service,asm-products/cloudroutes-service,rbramwell/runbook,codecakes/cloudroutes-service,codecakes/cloudroutes-service,asm-products/cloudroutes-service,madflojo/cloudroutes-service,Runbook/runbook,codecakes/cloudroutes-service,asm-products/cloudroutes-service,codecakes/cloudroutes-service,madflojo/cloudroutes-service,rbramwell/runbook,madflojo/cloudroutes-service,Runbook/runbook,rbramwell/runbook,rbramwell/runbook,Runbook/runbook,dethos/cloudroutes-service,madflojo/cloudroutes-service,dethos/cloudroutes-service,asm-products/cloudroutes-service,Runbook/runbook,dethos/cloudroutes-service
|
e399c0b1988ed8b2981ddc684a0a3652a73ea31e
|
pavelib/utils/test/utils.py
|
pavelib/utils/test/utils.py
|
"""
Helper functions for test tasks
"""
from paver.easy import sh, task
from pavelib.utils.envs import Env
__test__ = False # do not collect
@task
def clean_test_files():
"""
Clean fixture files used by tests and .pyc files
"""
sh("git clean -fqdx test_root/logs test_root/data test_root/staticfiles test_root/uploads")
sh("find . -type f -name \"*.pyc\" -delete")
sh("rm -rf test_root/log/auto_screenshots/*")
def clean_dir(directory):
"""
Clean coverage files, to ensure that we don't use stale data to generate reports.
"""
# We delete the files but preserve the directory structure
# so that coverage.py has a place to put the reports.
sh('find {dir} -type f -delete'.format(dir=directory))
@task
def clean_reports_dir():
"""
Clean coverage files, to ensure that we don't use stale data to generate reports.
"""
# We delete the files but preserve the directory structure
# so that coverage.py has a place to put the reports.
reports_dir = Env.REPORT_DIR.makedirs_p()
clean_dir(reports_dir)
@task
def clean_mongo():
"""
Clean mongo test databases
"""
sh("mongo {repo_root}/scripts/delete-mongo-test-dbs.js".format(repo_root=Env.REPO_ROOT))
|
"""
Helper functions for test tasks
"""
from paver.easy import sh, task
from pavelib.utils.envs import Env
__test__ = False # do not collect
@task
def clean_test_files():
"""
Clean fixture files used by tests and .pyc files
"""
sh("git clean -fqdx test_root/logs test_root/data test_root/staticfiles test_root/uploads")
sh("find . -type f -name \"*.pyc\" -delete")
sh("rm -rf test_root/log/auto_screenshots/*")
sh("rm -rf /tmp/mako_[cl]ms")
def clean_dir(directory):
"""
Clean coverage files, to ensure that we don't use stale data to generate reports.
"""
# We delete the files but preserve the directory structure
# so that coverage.py has a place to put the reports.
sh('find {dir} -type f -delete'.format(dir=directory))
@task
def clean_reports_dir():
"""
Clean coverage files, to ensure that we don't use stale data to generate reports.
"""
# We delete the files but preserve the directory structure
# so that coverage.py has a place to put the reports.
reports_dir = Env.REPORT_DIR.makedirs_p()
clean_dir(reports_dir)
@task
def clean_mongo():
"""
Clean mongo test databases
"""
sh("mongo {repo_root}/scripts/delete-mongo-test-dbs.js".format(repo_root=Env.REPO_ROOT))
|
Clean out the mako temp dirs before running tests
|
Clean out the mako temp dirs before running tests
|
Python
|
agpl-3.0
|
zofuthan/edx-platform,eemirtekin/edx-platform,TeachAtTUM/edx-platform,synergeticsedx/deployment-wipro,doismellburning/edx-platform,OmarIthawi/edx-platform,jolyonb/edx-platform,appliedx/edx-platform,philanthropy-u/edx-platform,msegado/edx-platform,pepeportela/edx-platform,JCBarahona/edX,lduarte1991/edx-platform,jamesblunt/edx-platform,vasyarv/edx-platform,valtech-mooc/edx-platform,jonathan-beard/edx-platform,beacloudgenius/edx-platform,DefyVentures/edx-platform,sameetb-cuelogic/edx-platform-test,rismalrv/edx-platform,zubair-arbi/edx-platform,ampax/edx-platform-backup,chauhanhardik/populo,ovnicraft/edx-platform,synergeticsedx/deployment-wipro,antoviaque/edx-platform,Lektorium-LLC/edx-platform,marcore/edx-platform,Endika/edx-platform,vikas1885/test1,unicri/edx-platform,Edraak/edraak-platform,Endika/edx-platform,ampax/edx-platform,motion2015/edx-platform,EDUlib/edx-platform,mtlchun/edx,shabab12/edx-platform,ESOedX/edx-platform,chrisndodge/edx-platform,antonve/s4-project-mooc,CredoReference/edx-platform,dkarakats/edx-platform,cecep-edu/edx-platform,polimediaupv/edx-platform,chauhanhardik/populo,4eek/edx-platform,wwj718/ANALYSE,ahmadiga/min_edx,appsembler/edx-platform,antonve/s4-project-mooc,gsehub/edx-platform,jazkarta/edx-platform-for-isc,xuxiao19910803/edx,ahmedaljazzar/edx-platform,dkarakats/edx-platform,LearnEra/LearnEraPlaftform,kursitet/edx-platform,shubhdev/edxOnBaadal,dsajkl/123,defance/edx-platform,angelapper/edx-platform,edx/edx-platform,devs1991/test_edx_docmode,jamiefolsom/edx-platform,jzoldak/edx-platform,doismellburning/edx-platform,nanolearningllc/edx-platform-cypress,xuxiao19910803/edx-platform,franosincic/edx-platform,DefyVentures/edx-platform,ferabra/edx-platform,cselis86/edx-platform,hastexo/edx-platform,UXE/local-edx,mjirayu/sit_academy,valtech-mooc/edx-platform,dsajkl/reqiop,cecep-edu/edx-platform,marcore/edx-platform,hamzehd/edx-platform,atsolakid/edx-platform,bdero/edx-platform,mitocw/edx-platform,philanthropy-u/edx-platform,SivilTaram/edx-platform,playm2mboy/edx-platform,zadgroup/edx-platform,cyanna/edx-platform,cyanna/edx-platform,olexiim/edx-platform,don-github/edx-platform,Stanford-Online/edx-platform,xinjiguaike/edx-platform,fintech-circle/edx-platform,benpatterson/edx-platform,adoosii/edx-platform,xuxiao19910803/edx-platform,doismellburning/edx-platform,ovnicraft/edx-platform,motion2015/a3,naresh21/synergetics-edx-platform,tiagochiavericosta/edx-platform,shurihell/testasia,dcosentino/edx-platform,AkA84/edx-platform,ubc/edx-platform,andyzsf/edx,gsehub/edx-platform,xinjiguaike/edx-platform,ubc/edx-platform,JCBarahona/edX,ampax/edx-platform,4eek/edx-platform,TeachAtTUM/edx-platform,Edraak/edx-platform,andyzsf/edx,edx/edx-platform,cognitiveclass/edx-platform,EDUlib/edx-platform,AkA84/edx-platform,olexiim/edx-platform,cognitiveclass/edx-platform,mahendra-r/edx-platform,franosincic/edx-platform,miptliot/edx-platform,jruiperezv/ANALYSE,jolyonb/edx-platform,vikas1885/test1,Lektorium-LLC/edx-platform,simbs/edx-platform,BehavioralInsightsTeam/edx-platform,jamesblunt/edx-platform,Softmotions/edx-platform,amir-qayyum-khan/edx-platform,unicri/edx-platform,waheedahmed/edx-platform,xinjiguaike/edx-platform,rhndg/openedx,arifsetiawan/edx-platform,etzhou/edx-platform,xinjiguaike/edx-platform,MakeHer/edx-platform,gsehub/edx-platform,jelugbo/tundex,jazkarta/edx-platform-for-isc,ovnicraft/edx-platform,hamzehd/edx-platform,xingyepei/edx-platform,ferabra/edx-platform,Kalyzee/edx-platform,LearnEra/LearnEraPlaftform,procangroup/edx-platform,CredoReference/edx-platform,appsembler/edx-platform,benpatterson/edx-platform,kxliugang/edx-platform,jazztpt/edx-platform,dsajkl/123,wwj718/edx-platform,Kalyzee/edx-platform,ahmadio/edx-platform,J861449197/edx-platform,jazztpt/edx-platform,Edraak/circleci-edx-platform,utecuy/edx-platform,zerobatu/edx-platform,beni55/edx-platform,knehez/edx-platform,edry/edx-platform,zubair-arbi/edx-platform,bigdatauniversity/edx-platform,kursitet/edx-platform,rhndg/openedx,pabloborrego93/edx-platform,simbs/edx-platform,alexthered/kienhoc-platform,cpennington/edx-platform,gymnasium/edx-platform,zadgroup/edx-platform,fly19890211/edx-platform,martynovp/edx-platform,hamzehd/edx-platform,shurihell/testasia,Shrhawk/edx-platform,zubair-arbi/edx-platform,LearnEra/LearnEraPlaftform,alu042/edx-platform,chand3040/cloud_that,ahmedaljazzar/edx-platform,ferabra/edx-platform,ubc/edx-platform,shubhdev/edx-platform,fintech-circle/edx-platform,zerobatu/edx-platform,waheedahmed/edx-platform,jonathan-beard/edx-platform,shabab12/edx-platform,ak2703/edx-platform,nanolearningllc/edx-platform-cypress-2,jzoldak/edx-platform,franosincic/edx-platform,proversity-org/edx-platform,UOMx/edx-platform,defance/edx-platform,leansoft/edx-platform,Edraak/circleci-edx-platform,cselis86/edx-platform,DNFcode/edx-platform,mitocw/edx-platform,SravanthiSinha/edx-platform,motion2015/a3,devs1991/test_edx_docmode,synergeticsedx/deployment-wipro,synergeticsedx/deployment-wipro,motion2015/edx-platform,chauhanhardik/populo_2,xingyepei/edx-platform,beni55/edx-platform,nttks/jenkins-test,DefyVentures/edx-platform,knehez/edx-platform,rismalrv/edx-platform,nanolearningllc/edx-platform-cypress-2,chauhanhardik/populo_2,solashirai/edx-platform,ESOedX/edx-platform,etzhou/edx-platform,doismellburning/edx-platform,jamiefolsom/edx-platform,halvertoluke/edx-platform,peterm-itr/edx-platform,shubhdev/openedx,marcore/edx-platform,inares/edx-platform,vismartltd/edx-platform,chauhanhardik/populo,jamesblunt/edx-platform,JCBarahona/edX,bdero/edx-platform,lduarte1991/edx-platform,openfun/edx-platform,ahmedaljazzar/edx-platform,arbrandes/edx-platform,a-parhom/edx-platform,Semi-global/edx-platform,B-MOOC/edx-platform,shubhdev/openedx,chrisndodge/edx-platform,4eek/edx-platform,SivilTaram/edx-platform,jbassen/edx-platform,kamalx/edx-platform,longmen21/edx-platform,CredoReference/edx-platform,waheedahmed/edx-platform,doganov/edx-platform,ZLLab-Mooc/edx-platform,jbzdak/edx-platform,openfun/edx-platform,jazkarta/edx-platform,philanthropy-u/edx-platform,adoosii/edx-platform,proversity-org/edx-platform,zofuthan/edx-platform,bitifirefly/edx-platform,OmarIthawi/edx-platform,eemirtekin/edx-platform,Edraak/edx-platform,antoviaque/edx-platform,jamiefolsom/edx-platform,shubhdev/edxOnBaadal,eduNEXT/edunext-platform,chand3040/cloud_that,shabab12/edx-platform,fly19890211/edx-platform,Ayub-Khan/edx-platform,mahendra-r/edx-platform,ahmadio/edx-platform,deepsrijit1105/edx-platform,inares/edx-platform,shurihell/testasia,eemirtekin/edx-platform,kamalx/edx-platform,kmoocdev/edx-platform,etzhou/edx-platform,don-github/edx-platform,amir-qayyum-khan/edx-platform,mcgachey/edx-platform,beni55/edx-platform,DefyVentures/edx-platform,chudaol/edx-platform,sameetb-cuelogic/edx-platform-test,polimediaupv/edx-platform,RPI-OPENEDX/edx-platform,nanolearningllc/edx-platform-cypress-2,cselis86/edx-platform,caesar2164/edx-platform,leansoft/edx-platform,longmen21/edx-platform,itsjeyd/edx-platform,franosincic/edx-platform,prarthitm/edxplatform,jamiefolsom/edx-platform,mushtaqak/edx-platform,jswope00/griffinx,ahmadiga/min_edx,cyanna/edx-platform,appsembler/edx-platform,adoosii/edx-platform,lduarte1991/edx-platform,ampax/edx-platform,iivic/BoiseStateX,polimediaupv/edx-platform,nttks/edx-platform,edx-solutions/edx-platform,DNFcode/edx-platform,stvstnfrd/edx-platform,jelugbo/tundex,motion2015/edx-platform,nttks/jenkins-test,jazkarta/edx-platform,IndonesiaX/edx-platform,vasyarv/edx-platform,Endika/edx-platform,philanthropy-u/edx-platform,10clouds/edx-platform,jruiperezv/ANALYSE,Kalyzee/edx-platform,ak2703/edx-platform,fly19890211/edx-platform,fly19890211/edx-platform,Shrhawk/edx-platform,beni55/edx-platform,nikolas/edx-platform,alexthered/kienhoc-platform,MSOpenTech/edx-platform,appliedx/edx-platform,waheedahmed/edx-platform,openfun/edx-platform,marcore/edx-platform,Lektorium-LLC/edx-platform,edx-solutions/edx-platform,y12uc231/edx-platform,wwj718/edx-platform,don-github/edx-platform,eemirtekin/edx-platform,deepsrijit1105/edx-platform,mushtaqak/edx-platform,a-parhom/edx-platform,shashank971/edx-platform,vikas1885/test1,martynovp/edx-platform,zerobatu/edx-platform,Ayub-Khan/edx-platform,chand3040/cloud_that,shubhdev/edx-platform,tiagochiavericosta/edx-platform,MakeHer/edx-platform,vasyarv/edx-platform,jzoldak/edx-platform,Edraak/circleci-edx-platform,analyseuc3m/ANALYSE-v1,kxliugang/edx-platform,Endika/edx-platform,jonathan-beard/edx-platform,Shrhawk/edx-platform,TeachAtTUM/edx-platform,antonve/s4-project-mooc,ahmadio/edx-platform,chudaol/edx-platform,don-github/edx-platform,andyzsf/edx,romain-li/edx-platform,iivic/BoiseStateX,hastexo/edx-platform,unicri/edx-platform,stvstnfrd/edx-platform,dsajkl/reqiop,chand3040/cloud_that,utecuy/edx-platform,shashank971/edx-platform,shashank971/edx-platform,JioEducation/edx-platform,SivilTaram/edx-platform,zhenzhai/edx-platform,benpatterson/edx-platform,ovnicraft/edx-platform,bigdatauniversity/edx-platform,nanolearningllc/edx-platform-cypress-2,itsjeyd/edx-platform,chrisndodge/edx-platform,Stanford-Online/edx-platform,alu042/edx-platform,Kalyzee/edx-platform,prarthitm/edxplatform,10clouds/edx-platform,xingyepei/edx-platform,nanolearningllc/edx-platform-cypress,jbzdak/edx-platform,LearnEra/LearnEraPlaftform,mahendra-r/edx-platform,xuxiao19910803/edx,utecuy/edx-platform,cyanna/edx-platform,UXE/local-edx,ahmadiga/min_edx,pomegranited/edx-platform,peterm-itr/edx-platform,gymnasium/edx-platform,adoosii/edx-platform,mcgachey/edx-platform,tanmaykm/edx-platform,zerobatu/edx-platform,ahmadiga/min_edx,louyihua/edx-platform,franosincic/edx-platform,gymnasium/edx-platform,jazkarta/edx-platform-for-isc,arifsetiawan/edx-platform,xuxiao19910803/edx,doganov/edx-platform,IONISx/edx-platform,eestay/edx-platform,angelapper/edx-platform,solashirai/edx-platform,tiagochiavericosta/edx-platform,zofuthan/edx-platform,jazkarta/edx-platform-for-isc,Edraak/edx-platform,openfun/edx-platform,tanmaykm/edx-platform,solashirai/edx-platform,ESOedX/edx-platform,knehez/edx-platform,xuxiao19910803/edx,UOMx/edx-platform,a-parhom/edx-platform,playm2mboy/edx-platform,edx-solutions/edx-platform,gsehub/edx-platform,cognitiveclass/edx-platform,JioEducation/edx-platform,itsjeyd/edx-platform,wwj718/edx-platform,Stanford-Online/edx-platform,defance/edx-platform,JioEducation/edx-platform,doganov/edx-platform,mitocw/edx-platform,hastexo/edx-platform,kxliugang/edx-platform,dsajkl/reqiop,zhenzhai/edx-platform,zofuthan/edx-platform,Semi-global/edx-platform,MSOpenTech/edx-platform,olexiim/edx-platform,msegado/edx-platform,simbs/edx-platform,cpennington/edx-platform,mjirayu/sit_academy,vikas1885/test1,jjmiranda/edx-platform,UOMx/edx-platform,tanmaykm/edx-platform,jbzdak/edx-platform,EDUlib/edx-platform,eduNEXT/edunext-platform,longmen21/edx-platform,zhenzhai/edx-platform,msegado/edx-platform,eduNEXT/edx-platform,Kalyzee/edx-platform,ovnicraft/edx-platform,sudheerchintala/LearnEraPlatForm,jazztpt/edx-platform,beacloudgenius/edx-platform,Semi-global/edx-platform,4eek/edx-platform,bigdatauniversity/edx-platform,motion2015/a3,Livit/Livit.Learn.EdX,J861449197/edx-platform,xuxiao19910803/edx-platform,wwj718/ANALYSE,ahmadio/edx-platform,Ayub-Khan/edx-platform,OmarIthawi/edx-platform,xuxiao19910803/edx,mcgachey/edx-platform,shubhdev/openedx,playm2mboy/edx-platform,eduNEXT/edx-platform,ahmedaljazzar/edx-platform,fintech-circle/edx-platform,nttks/jenkins-test,DefyVentures/edx-platform,mjirayu/sit_academy,longmen21/edx-platform,rue89-tech/edx-platform,J861449197/edx-platform,J861449197/edx-platform,nanolearningllc/edx-platform-cypress,chand3040/cloud_that,peterm-itr/edx-platform,shashank971/edx-platform,bigdatauniversity/edx-platform,Softmotions/edx-platform,rue89-tech/edx-platform,atsolakid/edx-platform,raccoongang/edx-platform,shubhdev/edxOnBaadal,vismartltd/edx-platform,knehez/edx-platform,RPI-OPENEDX/edx-platform,mtlchun/edx,antonve/s4-project-mooc,martynovp/edx-platform,analyseuc3m/ANALYSE-v1,IndonesiaX/edx-platform,ESOedX/edx-platform,kamalx/edx-platform,kmoocdev2/edx-platform,alexthered/kienhoc-platform,CredoReference/edx-platform,bigdatauniversity/edx-platform,angelapper/edx-platform,caesar2164/edx-platform,romain-li/edx-platform,RPI-OPENEDX/edx-platform,xuxiao19910803/edx-platform,bitifirefly/edx-platform,B-MOOC/edx-platform,utecuy/edx-platform,kamalx/edx-platform,leansoft/edx-platform,louyihua/edx-platform,cpennington/edx-platform,kmoocdev2/edx-platform,jbassen/edx-platform,rismalrv/edx-platform,y12uc231/edx-platform,ampax/edx-platform,DNFcode/edx-platform,shubhdev/openedx,jruiperezv/ANALYSE,rue89-tech/edx-platform,mtlchun/edx,doismellburning/edx-platform,gymnasium/edx-platform,jolyonb/edx-platform,procangroup/edx-platform,chauhanhardik/populo_2,pabloborrego93/edx-platform,stvstnfrd/edx-platform,edry/edx-platform,chauhanhardik/populo_2,teltek/edx-platform,xingyepei/edx-platform,leansoft/edx-platform,mbareta/edx-platform-ft,kxliugang/edx-platform,zadgroup/edx-platform,caesar2164/edx-platform,analyseuc3m/ANALYSE-v1,IONISx/edx-platform,kmoocdev/edx-platform,jazkarta/edx-platform,shubhdev/edx-platform,beacloudgenius/edx-platform,Semi-global/edx-platform,vasyarv/edx-platform,polimediaupv/edx-platform,shubhdev/edx-platform,Softmotions/edx-platform,nikolas/edx-platform,playm2mboy/edx-platform,dcosentino/edx-platform,kmoocdev2/edx-platform,vikas1885/test1,tanmaykm/edx-platform,motion2015/edx-platform,SravanthiSinha/edx-platform,4eek/edx-platform,dsajkl/reqiop,jswope00/griffinx,nagyistoce/edx-platform,motion2015/edx-platform,ZLLab-Mooc/edx-platform,hamzehd/edx-platform,BehavioralInsightsTeam/edx-platform,valtech-mooc/edx-platform,rismalrv/edx-platform,halvertoluke/edx-platform,antoviaque/edx-platform,shurihell/testasia,devs1991/test_edx_docmode,edx/edx-platform,Livit/Livit.Learn.EdX,lduarte1991/edx-platform,shubhdev/edxOnBaadal,cyanna/edx-platform,devs1991/test_edx_docmode,miptliot/edx-platform,iivic/BoiseStateX,nikolas/edx-platform,jonathan-beard/edx-platform,ampax/edx-platform-backup,Shrhawk/edx-platform,inares/edx-platform,jazkarta/edx-platform-for-isc,devs1991/test_edx_docmode,ak2703/edx-platform,UXE/local-edx,nttks/edx-platform,JCBarahona/edX,cecep-edu/edx-platform,mjirayu/sit_academy,nikolas/edx-platform,jazkarta/edx-platform,motion2015/a3,mitocw/edx-platform,Softmotions/edx-platform,kmoocdev2/edx-platform,sameetb-cuelogic/edx-platform-test,rismalrv/edx-platform,cselis86/edx-platform,solashirai/edx-platform,inares/edx-platform,beacloudgenius/edx-platform,Ayub-Khan/edx-platform,hamzehd/edx-platform,shubhdev/edx-platform,jamesblunt/edx-platform,atsolakid/edx-platform,wwj718/ANALYSE,raccoongang/edx-platform,CourseTalk/edx-platform,edry/edx-platform,edry/edx-platform,iivic/BoiseStateX,pomegranited/edx-platform,nttks/edx-platform,IndonesiaX/edx-platform,tiagochiavericosta/edx-platform,msegado/edx-platform,jbassen/edx-platform,dkarakats/edx-platform,JCBarahona/edX,DNFcode/edx-platform,10clouds/edx-platform,martynovp/edx-platform,ubc/edx-platform,appliedx/edx-platform,jruiperezv/ANALYSE,beni55/edx-platform,mcgachey/edx-platform,amir-qayyum-khan/edx-platform,sudheerchintala/LearnEraPlatForm,wwj718/edx-platform,edx-solutions/edx-platform,proversity-org/edx-platform,angelapper/edx-platform,pabloborrego93/edx-platform,arifsetiawan/edx-platform,zubair-arbi/edx-platform,pomegranited/edx-platform,ampax/edx-platform-backup,jelugbo/tundex,prarthitm/edxplatform,AkA84/edx-platform,dcosentino/edx-platform,jruiperezv/ANALYSE,romain-li/edx-platform,eestay/edx-platform,UXE/local-edx,leansoft/edx-platform,RPI-OPENEDX/edx-platform,TeachAtTUM/edx-platform,B-MOOC/edx-platform,tiagochiavericosta/edx-platform,jjmiranda/edx-platform,devs1991/test_edx_docmode,dsajkl/123,rhndg/openedx,OmarIthawi/edx-platform,prarthitm/edxplatform,jamiefolsom/edx-platform,sameetb-cuelogic/edx-platform-test,Edraak/edraak-platform,jswope00/griffinx,xingyepei/edx-platform,utecuy/edx-platform,mushtaqak/edx-platform,jbassen/edx-platform,louyihua/edx-platform,mbareta/edx-platform-ft,eduNEXT/edx-platform,jonathan-beard/edx-platform,ubc/edx-platform,JioEducation/edx-platform,cecep-edu/edx-platform,nagyistoce/edx-platform,vasyarv/edx-platform,bitifirefly/edx-platform,shubhdev/edxOnBaadal,ahmadio/edx-platform,SravanthiSinha/edx-platform,nanolearningllc/edx-platform-cypress-2,eestay/edx-platform,adoosii/edx-platform,mjirayu/sit_academy,mushtaqak/edx-platform,antonve/s4-project-mooc,a-parhom/edx-platform,bitifirefly/edx-platform,IONISx/edx-platform,atsolakid/edx-platform,y12uc231/edx-platform,bdero/edx-platform,shubhdev/openedx,fly19890211/edx-platform,jbassen/edx-platform,alu042/edx-platform,naresh21/synergetics-edx-platform,Edraak/circleci-edx-platform,sudheerchintala/LearnEraPlatForm,dcosentino/edx-platform,unicri/edx-platform,alexthered/kienhoc-platform,knehez/edx-platform,nikolas/edx-platform,rue89-tech/edx-platform,SravanthiSinha/edx-platform,longmen21/edx-platform,pepeportela/edx-platform,rue89-tech/edx-platform,miptliot/edx-platform,nanolearningllc/edx-platform-cypress,inares/edx-platform,Edraak/edraak-platform,sameetb-cuelogic/edx-platform-test,eduNEXT/edunext-platform,defance/edx-platform,procangroup/edx-platform,MakeHer/edx-platform,dsajkl/123,Edraak/edraak-platform,kmoocdev/edx-platform,itsjeyd/edx-platform,vismartltd/edx-platform,eduNEXT/edunext-platform,pepeportela/edx-platform,SravanthiSinha/edx-platform,kmoocdev/edx-platform,ahmadiga/min_edx,Edraak/edx-platform,edx/edx-platform,zerobatu/edx-platform,J861449197/edx-platform,arbrandes/edx-platform,nagyistoce/edx-platform,shurihell/testasia,y12uc231/edx-platform,kursitet/edx-platform,mahendra-r/edx-platform,procangroup/edx-platform,fintech-circle/edx-platform,zadgroup/edx-platform,DNFcode/edx-platform,halvertoluke/edx-platform,xinjiguaike/edx-platform,jazkarta/edx-platform,iivic/BoiseStateX,pomegranited/edx-platform,ZLLab-Mooc/edx-platform,y12uc231/edx-platform,martynovp/edx-platform,dkarakats/edx-platform,kxliugang/edx-platform,Edraak/circleci-edx-platform,wwj718/ANALYSE,nttks/jenkins-test,mbareta/edx-platform-ft,andyzsf/edx,MakeHer/edx-platform,chauhanhardik/populo,dsajkl/123,ampax/edx-platform-backup,zhenzhai/edx-platform,benpatterson/edx-platform,miptliot/edx-platform,jelugbo/tundex,beacloudgenius/edx-platform,proversity-org/edx-platform,chauhanhardik/populo,B-MOOC/edx-platform,nttks/jenkins-test,teltek/edx-platform,UOMx/edx-platform,arifsetiawan/edx-platform,jelugbo/tundex,AkA84/edx-platform,jjmiranda/edx-platform,arifsetiawan/edx-platform,msegado/edx-platform,jbzdak/edx-platform,kamalx/edx-platform,BehavioralInsightsTeam/edx-platform,mbareta/edx-platform-ft,etzhou/edx-platform,kmoocdev2/edx-platform,nttks/edx-platform,halvertoluke/edx-platform,naresh21/synergetics-edx-platform,jolyonb/edx-platform,MSOpenTech/edx-platform,mahendra-r/edx-platform,ZLLab-Mooc/edx-platform,devs1991/test_edx_docmode,arbrandes/edx-platform,cselis86/edx-platform,IndonesiaX/edx-platform,B-MOOC/edx-platform,chudaol/edx-platform,Ayub-Khan/edx-platform,hastexo/edx-platform,Semi-global/edx-platform,Shrhawk/edx-platform,jamesblunt/edx-platform,EDUlib/edx-platform,nanolearningllc/edx-platform-cypress,devs1991/test_edx_docmode,chudaol/edx-platform,halvertoluke/edx-platform,CourseTalk/edx-platform,mtlchun/edx,MSOpenTech/edx-platform,jbzdak/edx-platform,kmoocdev/edx-platform,shashank971/edx-platform,jazztpt/edx-platform,jjmiranda/edx-platform,ferabra/edx-platform,romain-li/edx-platform,jswope00/griffinx,wwj718/edx-platform,playm2mboy/edx-platform,antoviaque/edx-platform,polimediaupv/edx-platform,zubair-arbi/edx-platform,naresh21/synergetics-edx-platform,ak2703/edx-platform,zofuthan/edx-platform,Livit/Livit.Learn.EdX,AkA84/edx-platform,simbs/edx-platform,olexiim/edx-platform,vismartltd/edx-platform,mtlchun/edx,valtech-mooc/edx-platform,rhndg/openedx,alu042/edx-platform,romain-li/edx-platform,rhndg/openedx,eduNEXT/edx-platform,teltek/edx-platform,pomegranited/edx-platform,vismartltd/edx-platform,amir-qayyum-khan/edx-platform,edry/edx-platform,raccoongang/edx-platform,eestay/edx-platform,appliedx/edx-platform,solashirai/edx-platform,bdero/edx-platform,ferabra/edx-platform,louyihua/edx-platform,CourseTalk/edx-platform,wwj718/ANALYSE,kursitet/edx-platform,chrisndodge/edx-platform,cecep-edu/edx-platform,pabloborrego93/edx-platform,dcosentino/edx-platform,kursitet/edx-platform,caesar2164/edx-platform,etzhou/edx-platform,MSOpenTech/edx-platform,deepsrijit1105/edx-platform,10clouds/edx-platform,cognitiveclass/edx-platform,SivilTaram/edx-platform,deepsrijit1105/edx-platform,eemirtekin/edx-platform,jazztpt/edx-platform,motion2015/a3,eestay/edx-platform,pepeportela/edx-platform,ampax/edx-platform-backup,appliedx/edx-platform,alexthered/kienhoc-platform,jzoldak/edx-platform,RPI-OPENEDX/edx-platform,IONISx/edx-platform,CourseTalk/edx-platform,analyseuc3m/ANALYSE-v1,chudaol/edx-platform,nttks/edx-platform,Livit/Livit.Learn.EdX,waheedahmed/edx-platform,Lektorium-LLC/edx-platform,cognitiveclass/edx-platform,doganov/edx-platform,MakeHer/edx-platform,ZLLab-Mooc/edx-platform,nagyistoce/edx-platform,shabab12/edx-platform,atsolakid/edx-platform,Edraak/edx-platform,zhenzhai/edx-platform,cpennington/edx-platform,peterm-itr/edx-platform,Softmotions/edx-platform,openfun/edx-platform,dkarakats/edx-platform,arbrandes/edx-platform,IndonesiaX/edx-platform,jswope00/griffinx,ak2703/edx-platform,simbs/edx-platform,IONISx/edx-platform,Stanford-Online/edx-platform,valtech-mooc/edx-platform,BehavioralInsightsTeam/edx-platform,teltek/edx-platform,mushtaqak/edx-platform,don-github/edx-platform,zadgroup/edx-platform,stvstnfrd/edx-platform,unicri/edx-platform,bitifirefly/edx-platform,appsembler/edx-platform,benpatterson/edx-platform,xuxiao19910803/edx-platform,doganov/edx-platform,SivilTaram/edx-platform,nagyistoce/edx-platform,mcgachey/edx-platform,sudheerchintala/LearnEraPlatForm,raccoongang/edx-platform,olexiim/edx-platform,chauhanhardik/populo_2
|
f931a434839222bb00282a432d6d6a0c2c52eb7d
|
numpy/array_api/_typing.py
|
numpy/array_api/_typing.py
|
"""
This file defines the types for type annotations.
These names aren't part of the module namespace, but they are used in the
annotations in the function signatures. The functions in the module are only
valid for inputs that match the given type annotations.
"""
__all__ = [
"Array",
"Device",
"Dtype",
"SupportsDLPack",
"SupportsBufferProtocol",
"PyCapsule",
]
import sys
from typing import Any, Literal, Sequence, Type, Union, TYPE_CHECKING, TypeVar
from ._array_object import Array
from numpy import (
dtype,
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
)
# This should really be recursive, but that isn't supported yet. See the
# similar comment in numpy/typing/_array_like.py
_T = TypeVar("_T")
NestedSequence = Sequence[Sequence[_T]]
Device = Literal["cpu"]
if TYPE_CHECKING or sys.version_info >= (3, 9):
Dtype = dtype[Union[
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
]]
else:
Dtype = dtype
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
|
"""
This file defines the types for type annotations.
These names aren't part of the module namespace, but they are used in the
annotations in the function signatures. The functions in the module are only
valid for inputs that match the given type annotations.
"""
from __future__ import annotations
__all__ = [
"Array",
"Device",
"Dtype",
"SupportsDLPack",
"SupportsBufferProtocol",
"PyCapsule",
]
import sys
from typing import (
Any,
Literal,
Sequence,
Type,
Union,
TYPE_CHECKING,
TypeVar,
Protocol,
)
from ._array_object import Array
from numpy import (
dtype,
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
)
_T_co = TypeVar("_T_co", covariant=True)
class NestedSequence(Protocol[_T_co]):
def __getitem__(self, key: int, /) -> _T_co | NestedSequence[_T_co]: ...
def __len__(self, /) -> int: ...
Device = Literal["cpu"]
if TYPE_CHECKING or sys.version_info >= (3, 9):
Dtype = dtype[Union[
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
]]
else:
Dtype = dtype
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
|
Replace `NestedSequence` with a proper nested sequence protocol
|
ENH: Replace `NestedSequence` with a proper nested sequence protocol
|
Python
|
bsd-3-clause
|
numpy/numpy,endolith/numpy,charris/numpy,rgommers/numpy,numpy/numpy,jakirkham/numpy,seberg/numpy,mattip/numpy,jakirkham/numpy,endolith/numpy,rgommers/numpy,mattip/numpy,mattip/numpy,seberg/numpy,pdebuyl/numpy,pdebuyl/numpy,endolith/numpy,endolith/numpy,mattip/numpy,jakirkham/numpy,charris/numpy,seberg/numpy,mhvk/numpy,pdebuyl/numpy,rgommers/numpy,mhvk/numpy,numpy/numpy,jakirkham/numpy,mhvk/numpy,mhvk/numpy,charris/numpy,anntzer/numpy,pdebuyl/numpy,charris/numpy,jakirkham/numpy,numpy/numpy,rgommers/numpy,anntzer/numpy,anntzer/numpy,anntzer/numpy,seberg/numpy,mhvk/numpy
|
718a04f14f3ede084a2d9391e187b4d943463c6f
|
yanico/session/__init__.py
|
yanico/session/__init__.py
|
"""Handle nicovideo.jp user_session."""
# Copyright 2015-2016 Masayuki Yamamoto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class UserSessionNotFoundError(Exception):
"""Firefox profile exists, buf user_session is not found."""
|
"""Handle nicovideo.jp user_session."""
# Copyright 2015-2016 Masayuki Yamamoto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class UserSessionNotFoundError(Exception):
"""Profile exists, but user_session is not found."""
|
Fix a typo of class docstring
|
Fix a typo of class docstring
|
Python
|
apache-2.0
|
ma8ma/yanico
|
889eed552f4e17797764a9d9a2da6bbaa6d5dd33
|
admin_panel/views.py
|
admin_panel/views.py
|
from django.views import View
from django.views.generic import TemplateView
from django.contrib import auth
from django.contrib import messages
from django import http
class LoginView(TemplateView):
template_name = "admin/login.html"
def post(self, request):
username = request.POST['username']
password = request.POST['password']
user_object = auth.authenticate(request, username=username, password=password)
if user_object is None:
messages.error(request, "Invalid credentials")
return self.get(request)
auth.login(request, user_object)
messages.success(request, "You've been logged in")
return http.HttpResponseRedirect(self.get_next_url(request))
def get_next_url(self, request):
if "next" in request.GET:
return request.GET['next']
else:
return "/administration/panel"
class Panel(TemplateView):
template_name = "admin/panel.html"
class LogoutView(View):
def get(self, request):
auth.logout(request)
return http.HttpResponseRedirect("/administration/login")
|
from django.views import View
from django.views.generic import TemplateView
from django.contrib import auth
from django.contrib import messages
from django import http
from django.urls import reverse
class LoginView(TemplateView):
template_name = "admin/login.html"
def post(self, request):
username = request.POST['username']
password = request.POST['password']
user_object = auth.authenticate(request, username=username, password=password)
if user_object is None:
messages.error(request, "Invalid credentials")
return self.get(request)
auth.login(request, user_object)
messages.success(request, "You've been logged in")
return http.HttpResponseRedirect(self.get_next_url(request))
def get_next_url(self, request):
if "next" in request.GET:
return request.GET['next']
else:
return reverse("admin:Panel")
class Panel(TemplateView):
template_name = "admin/panel.html"
class LogoutView(View):
def get(self, request):
auth.logout(request)
return http.HttpResponseRedirect("/administration/login")
|
Use django reverse function to obtain url instead of hard-coding
|
Use django reverse function to obtain url instead of hard-coding
|
Python
|
mpl-2.0
|
Apo11onian/Apollo-Blog,Apo11onian/Apollo-Blog,Apo11onian/Apollo-Blog
|
bb732b97536bd1bec605c96440ce1f18d5edb59a
|
features/steps/install_steps.py
|
features/steps/install_steps.py
|
from behave import given
from captainhook.checkers.utils import bash
@given('I have installed captainhook')
def step_impl(context):
bash('captainhook')
|
from behave import given
from captainhook.checkers.utils import bash
@given('I have installed captainhook')
def step_impl(context):
bash('captainhook install')
|
Fix behave tests to use install command.
|
Fix behave tests to use install command.
|
Python
|
bsd-3-clause
|
Friz-zy/captainhook,alexcouper/captainhook,pczerkas/captainhook
|
0f35ed05d335e7c126675bc913b72aac3ac916df
|
project/apps/api/signals.py
|
project/apps/api/signals.py
|
from django.db.models.signals import (
post_save,
)
from django.dispatch import receiver
from rest_framework.authtoken.models import Token
from django.conf import settings
from .models import (
Contest,
)
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def user_post_save(sender, instance=None, created=False, **kwargs):
if created:
Token.objects.create(user=instance)
@receiver(post_save, sender=Contest)
def contest_post_save(sender, instance=None, created=False, **kwargs):
if created:
instance.build()
instance.save()
|
from django.db.models.signals import (
post_save,
)
from django.dispatch import receiver
from rest_framework.authtoken.models import Token
from django.conf import settings
from .models import (
Contest,
)
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def user_post_save(sender, instance=None, created=False, **kwargs):
if created:
Token.objects.create(user=instance)
@receiver(post_save, sender=Contest)
def contest_post_save(sender, instance=None, created=False, raw=False, **kwargs):
if not raw:
if created:
instance.build()
instance.save()
|
Add check for fixture loading
|
Add check for fixture loading
|
Python
|
bsd-2-clause
|
barberscore/barberscore-api,dbinetti/barberscore-django,dbinetti/barberscore,barberscore/barberscore-api,dbinetti/barberscore,barberscore/barberscore-api,dbinetti/barberscore-django,barberscore/barberscore-api
|
f277007e46b7c6d8c978011d7356b7527ba91133
|
axes/utils.py
|
axes/utils.py
|
from axes.models import AccessAttempt
def reset(ip=None, username=None):
"""Reset records that match ip or username, and
return the count of removed attempts.
"""
count = 0
attempts = AccessAttempt.objects.all()
if ip:
attempts = attempts.filter(ip_address=ip)
if username:
attempts = attempts.filter(username=username)
if attempts:
count = attempts.count()
attempts.delete()
return count
def iso8601(timestamp):
"""Returns datetime.timedelta translated to ISO 8601 formatted duration.
"""
seconds = timestamp.total_seconds()
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
days, hours = divmod(hours, 24)
date = '{:.0f}D'.format(days) if days else ''
time_values = hours, minutes, seconds
time_designators = 'H', 'M', 'S'
time = ''.join([
('{:.0f}'.format(value) + designator)
for value, designator in zip(time_values, time_designators)
if value]
)
return u'P' + date + (u'T' + time if time else '')
|
from django.core.cache import cache
from axes.models import AccessAttempt
def reset(ip=None, username=None):
"""Reset records that match ip or username, and
return the count of removed attempts.
"""
count = 0
attempts = AccessAttempt.objects.all()
if ip:
attempts = attempts.filter(ip_address=ip)
if username:
attempts = attempts.filter(username=username)
if attempts:
count = attempts.count()
from axes.decorators import get_cache_key
for attempt in attempts:
cache_hash_key = get_cache_key(attempt)
if cache.get(cache_hash_key):
cache.delete(cache_hash_key)
attempts.delete()
return count
def iso8601(timestamp):
"""Returns datetime.timedelta translated to ISO 8601 formatted duration.
"""
seconds = timestamp.total_seconds()
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
days, hours = divmod(hours, 24)
date = '{:.0f}D'.format(days) if days else ''
time_values = hours, minutes, seconds
time_designators = 'H', 'M', 'S'
time = ''.join([
('{:.0f}'.format(value) + designator)
for value, designator in zip(time_values, time_designators)
if value]
)
return u'P' + date + (u'T' + time if time else '')
|
Delete cache key in reset command line
|
Delete cache key in reset command line
|
Python
|
mit
|
jazzband/django-axes,django-pci/django-axes
|
a3c131776678b8e91e1179cd0f3c3b4b3fbbf6fb
|
openid_provider/tests/test_code_flow.py
|
openid_provider/tests/test_code_flow.py
|
from django.core.urlresolvers import reverse
from django.test import RequestFactory
from django.test import TestCase
from openid_provider.tests.utils import *
from openid_provider.views import *
class CodeFlowTestCase(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.user = create_fake_user()
self.client = create_fake_client(response_type='code')
def test_authorize_invalid_parameters(self):
"""
If the request fails due to a missing, invalid, or mismatching
redirection URI, or if the client identifier is missing or invalid,
the authorization server SHOULD inform the resource owner of the error.
See: https://tools.ietf.org/html/rfc6749#section-4.1.2.1
"""
url = reverse('openid_provider:authorize')
request = self.factory.get(url)
response = AuthorizeView.as_view()(request)
self.assertEqual(response.status_code, 200)
self.assertEqual(bool(response.content), True)
|
from django.core.urlresolvers import reverse
from django.test import RequestFactory
from django.test import TestCase
from openid_provider.tests.utils import *
from openid_provider.views import *
import urllib
class CodeFlowTestCase(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.user = create_fake_user()
self.client = create_fake_client(response_type='code')
def test_authorize_invalid_parameters(self):
"""
If the request fails due to a missing, invalid, or mismatching
redirection URI, or if the client identifier is missing or invalid,
the authorization server SHOULD inform the resource owner of the error.
See: https://tools.ietf.org/html/rfc6749#section-4.1.2.1
"""
url = reverse('openid_provider:authorize')
request = self.factory.get(url)
response = AuthorizeView.as_view()(request)
self.assertEqual(response.status_code, 200)
self.assertEqual(bool(response.content), True)
def test_authorize_invalid_response_type(self):
"""
The OP informs the RP by using the Error Response parameters defined
in Section 4.1.2.1 of OAuth 2.0.
See: http://openid.net/specs/openid-connect-core-1_0.html#AuthError
"""
# Create an authorize request with an unsupported response_type.
url = reverse('openid_provider:authorize')
url += '?client_id={0}&response_type=code%20id_token&scope=openid%20email' \
'&redirect_uri={1}&state=abcdefg'.format(
self.client.client_id,
urllib.quote(self.client.default_redirect_uri),
)
request = self.factory.get(url)
response = AuthorizeView.as_view()(request)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.has_header('Location'), True)
# Check query component in the redirection URI.
correct_query = 'error=' in response['Location']
self.assertEqual(correct_query, True)
|
Add another test for Code Flow.
|
Add another test for Code Flow.
|
Python
|
mit
|
wayward710/django-oidc-provider,bunnyinc/django-oidc-provider,wayward710/django-oidc-provider,juanifioren/django-oidc-provider,ByteInternet/django-oidc-provider,wojtek-fliposports/django-oidc-provider,ByteInternet/django-oidc-provider,django-py/django-openid-provider,torreco/django-oidc-provider,django-py/django-openid-provider,torreco/django-oidc-provider,wojtek-fliposports/django-oidc-provider,nmohoric/django-oidc-provider,Sjord/django-oidc-provider,Sjord/django-oidc-provider,juanifioren/django-oidc-provider,nmohoric/django-oidc-provider,bunnyinc/django-oidc-provider
|
15c0ecf0e8ecd71017d8a1f2414204944b134fbd
|
lib/jasy/__init__.py
|
lib/jasy/__init__.py
|
#
# Jasy - JavaScript Tooling Refined
# Copyright 2010 Sebastian Werner
#
#
# Configure logging
#
import logging
logging.basicConfig(filename="log.txt", level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
# Define a Handler which writes INFO messages or higher to the sys.stderr
console = logging.StreamHandler()
console.setLevel(logging.INFO)
console.setFormatter(logging.Formatter('>>> %(message)s', '%H:%M:%S'))
logging.getLogger('').addHandler(console)
#
# Import core classes
#
from jasy.Session import *
from jasy.Project import *
from jasy.Resolver import *
from jasy.Sorter import *
from jasy.Combiner import *
from jasy.Assets import *
from jasy.Optimization import *
from jasy.Format import *
from jasy.File import *
from jasy.Task import *
|
#
# Jasy - JavaScript Tooling Refined
# Copyright 2010 Sebastian Werner
#
#
# Configure logging
#
import logging
logging.basicConfig(filename="log.txt", level=logging.DEBUG, format="%(asctime)s - %(levelname)s - %(message)s")
# Define a Handler which writes INFO messages or higher to the sys.stderr
console = logging.StreamHandler()
console.setLevel(logging.INFO)
console.setFormatter(logging.Formatter('>>> %(message)s', '%H:%M:%S'))
logging.getLogger('').addHandler(console)
#
# Import core classes
#
from jasy.Session import *
from jasy.Project import *
from jasy.Resolver import *
from jasy.Sorter import *
from jasy.Combiner import *
from jasy.Assets import *
from jasy.Optimization import *
from jasy.Format import *
from jasy.File import *
from jasy.Task import *
|
Enable debug level for log file
|
Enable debug level for log file
|
Python
|
mit
|
sebastian-software/jasy,zynga/jasy,sebastian-software/jasy,zynga/jasy
|
aaaaa179f43b720d207377cf17dcd6cec0c19321
|
falcom/tree/mutable_tree.py
|
falcom/tree/mutable_tree.py
|
# Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
class MutableTree:
def __init__ (self, value = None):
self.child = None
self.children = [ ]
self.value = value
@property
def value (self):
return self.__value
@value.setter
def value (self, x):
self.__value = x
def __len__ (self):
return 0 if self.child is None else 1
def full_length (self):
return len(self)
def __iter__ (self):
if self:
return iter((self.child,))
else:
return iter(())
def walk (self):
return iter(self)
def __getitem__ (self, index):
if self and index == 0:
return self.child
else:
raise IndexError("tree index out of range")
def insert (self, index, node):
self.child = node
self.children.insert(index, node)
def __repr__ (self):
debug = self.__class__.__name__
if self.value is not None:
debug += " " + repr(self.value)
return "<{}>".format(debug)
|
# Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
class MutableTree:
def __init__ (self, value = None):
self.child = None
self.children = [ ]
self.value = value
@property
def value (self):
return self.__value
@value.setter
def value (self, x):
self.__value = x
def __len__ (self):
return 0 if self.child is None else 1
def full_length (self):
return len(self)
def __iter__ (self):
if self:
return iter((self.child,))
else:
return iter(())
def walk (self):
return iter(self)
def __getitem__ (self, index):
if self and index == 0:
return self.child
else:
raise IndexError("tree index out of range")
def insert (self, index, node):
self.child = node
self.children.insert(index, node)
def __repr__ (self):
debug = self.__class__.__name__
if self.value is not None:
debug += " " + repr(self.value)
return "<{} {}>".format(debug, repr(self.children))
|
Add children to debug str
|
Add children to debug str
|
Python
|
bsd-3-clause
|
mlibrary/image-conversion-and-validation,mlibrary/image-conversion-and-validation
|
4a38d0df3d72494e2a96ac776f13ce685b537561
|
lokar/bib.py
|
lokar/bib.py
|
# coding=utf-8
from __future__ import unicode_literals
from io import BytesIO
from .marc import Record
from .util import etree, parse_xml, show_diff
class Bib(object):
""" An Alma Bib record """
def __init__(self, alma, xml):
self.alma = alma
self.orig_xml = xml.encode('utf-8')
self.init(xml)
def init(self, xml):
self.doc = parse_xml(xml)
self.mms_id = self.doc.findtext('mms_id')
self.marc_record = Record(self.doc.find('record'))
self.cz_link = self.doc.findtext('linked_record_id[@type="CZ"]') or None
def save(self, diff=False, dry_run=False):
# Save record back to Alma
post_data = ('<?xml version="1.0" encoding="UTF-8" standalone="yes"?>'.encode('utf-8') +
etree.tostring(self.doc, encoding='UTF-8'))
if diff:
show_diff(self.orig_xml, post_data)
if not dry_run:
response = self.alma.put('/bibs/{}'.format(self.mms_id),
data=BytesIO(post_data),
headers={'Content-Type': 'application/xml'})
self.init(response)
def dump(self, filename):
# Dump record to file
with open(filename, 'wb') as f:
f.write(etree.tostring(self.doc, pretty_print=True))
|
# coding=utf-8
from __future__ import unicode_literals
from io import BytesIO
from .marc import Record
from .util import etree, parse_xml, show_diff
class Bib(object):
""" An Alma Bib record """
def __init__(self, alma, xml):
self.alma = alma
self.orig_xml = xml
self.init(xml)
def init(self, xml):
self.doc = parse_xml(xml)
self.mms_id = self.doc.findtext('mms_id')
self.marc_record = Record(self.doc.find('record'))
self.cz_link = self.doc.findtext('linked_record_id[@type="CZ"]') or None
def save(self, diff=False, dry_run=False):
# Save record back to Alma
post_data = ('<?xml version="1.0" encoding="UTF-8" standalone="yes"?>' +
etree.tounicode(self.doc))
if diff:
show_diff(self.orig_xml, post_data)
if not dry_run:
response = self.alma.put('/bibs/{}'.format(self.mms_id),
data=BytesIO(post_data.encode('utf-8')),
headers={'Content-Type': 'application/xml'})
self.init(response)
def dump(self, filename):
# Dump record to file
with open(filename, 'wb') as f:
f.write(etree.tostring(self.doc, pretty_print=True))
|
Fix diffing on Py3 by comparing unicode strings
|
Fix diffing on Py3 by comparing unicode strings
|
Python
|
agpl-3.0
|
scriptotek/almar,scriptotek/lokar
|
f4eff6d839d05731a6e29d3e769363e981a32739
|
test/run.py
|
test/run.py
|
#!/bin/env python2.7
import argparse
import os
parser = argparse.ArgumentParser(description="Run unit tests")
parser.add_argument("-g", "--gui", help="start in GUI mode",
action="store_true")
parser.add_argument("-t", "--test", help="run only selected test(s)",
action="append")
args = parser.parse_args()
command = ['runSVUnit']
command.append('-s ius')
if args.gui:
command.append('-c -linedebug')
command.append('-r -gui')
if args.test:
for test in args.test:
command.append('-t ' + test)
os.system(' '.join(command))
|
#!/bin/env python2.7
import argparse
import os
parser = argparse.ArgumentParser(description="Run unit tests")
parser.add_argument("-g", "--gui", help="start in GUI mode",
action="store_true")
parser.add_argument("-t", "--test", help="run only selected test(s)",
action="append")
parser.add_argument("--uvm-version", help="run with selected UVM version (only supported for ius and xcelium",
choices=['1.1d', '1.2'])
args = parser.parse_args()
command = ['runSVUnit']
command.append('-s ius')
if args.gui:
command.append('-c -linedebug')
command.append('-r -gui')
if args.uvm_version:
print(args.uvm_version)
print(args.test)
command.append('-r "-uvmhome CDNS-{version}"'.format(version=args.uvm_version))
if args.test:
for test in args.test:
command.append('-t ' + test)
print(' '.join(command))
os.system(' '.join(command))
|
Add ability to select UVM version in unit tests
|
Add ability to select UVM version in unit tests
|
Python
|
apache-2.0
|
tudortimi/vgm_svunit_utils,tudortimi/vgm_svunit_utils
|
842441bebc328329caef0a7e7aae6d8594318097
|
tests/test_error_handling.py
|
tests/test_error_handling.py
|
import unittest
from flask import Flask
from flask_selfdoc import Autodoc
class TestErrorHandling(unittest.TestCase):
def test_app_not_initialized(self):
app = Flask(__name__)
autodoc = Autodoc()
with app.app_context():
self.assertRaises(RuntimeError, lambda: autodoc.html())
|
import unittest
from flask import Flask
from flask_selfdoc import Autodoc
class TestErrorHandling(unittest.TestCase):
def test_app_not_initialized(self):
app = Flask(__name__)
autodoc = Autodoc()
with app.app_context():
self.assertRaises(RuntimeError, lambda: autodoc.html())
def test_app_initialized_by_ctor(self):
app = Flask(__name__)
autodoc = Autodoc(app)
with app.app_context():
autodoc.html()
def test_app_initialized_by_init_app(self):
app = Flask(__name__)
autodoc = Autodoc()
autodoc.init_app(app)
with app.app_context():
autodoc.html()
|
Add a test that these calls dont fail.
|
Add a test that these calls dont fail.
|
Python
|
mit
|
jwg4/flask-autodoc,jwg4/flask-autodoc
|
1312dc95d9c25897c11c8e818edcb9cd2b6a32f7
|
ecommerce/extensions/app.py
|
ecommerce/extensions/app.py
|
from oscar import app
class EdxShop(app.Shop):
# URLs are only visible to users with staff permissions
default_permissions = 'is_staff'
application = EdxShop()
|
from oscar import app
from oscar.core.application import Application
class EdxShop(app.Shop):
# URLs are only visible to users with staff permissions
default_permissions = 'is_staff'
# Override core app instances with blank application instances to exclude their URLs.
promotions_app = Application()
catalogue_app = Application()
offer_app = Application()
search_app = Application()
application = EdxShop()
|
Move the security fix into Eucalyptus
|
Move the security fix into Eucalyptus
|
Python
|
agpl-3.0
|
mferenca/HMS-ecommerce,mferenca/HMS-ecommerce,mferenca/HMS-ecommerce
|
286422d95355fc55c1745731732d763ca1bdb7e5
|
shap/__init__.py
|
shap/__init__.py
|
# flake8: noqa
__version__ = '0.28.5k'
from .explainers.kernel import KernelExplainer, kmeans
from .explainers.sampling import SamplingExplainer
from .explainers.tree import TreeExplainer, Tree
from .explainers.deep import DeepExplainer
from .explainers.gradient import GradientExplainer
from .explainers.linear import LinearExplainer
from .plots.summary import summary_plot
from .plots.dependence import dependence_plot
from .plots.force import force_plot, initjs, save_html
from .plots.image import image_plot
from .plots.monitoring import monitoring_plot
from .plots.embedding import embedding_plot
from . import datasets
from . import benchmark
from .explainers import other
from .common import approximate_interactions, hclust_ordering
|
# flake8: noqa
__version__ = '0.28.6'
from .explainers.kernel import KernelExplainer, kmeans
from .explainers.sampling import SamplingExplainer
from .explainers.tree import TreeExplainer, Tree
from .explainers.deep import DeepExplainer
from .explainers.gradient import GradientExplainer
from .explainers.linear import LinearExplainer
from .plots.summary import summary_plot
from .plots.dependence import dependence_plot
from .plots.force import force_plot, initjs, save_html
from .plots.image import image_plot
from .plots.monitoring import monitoring_plot
from .plots.embedding import embedding_plot
from . import datasets
from . import benchmark
from .explainers import other
from .common import approximate_interactions, hclust_ordering
|
Tag new version with bug fixes.
|
Tag new version with bug fixes.
|
Python
|
mit
|
slundberg/shap,slundberg/shap,slundberg/shap,slundberg/shap
|
1f8895c4e2f9189032383771d322afdbfdac5e37
|
pathvalidate/variable/_elasticsearch.py
|
pathvalidate/variable/_elasticsearch.py
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <[email protected]>
"""
from __future__ import absolute_import, unicode_literals
import re
from ._base import VarNameSanitizer
class ElasticsearchIndexNameSanitizer(VarNameSanitizer):
__RE_INVALID_INDEX_NAME = re.compile("[" + re.escape('\\/*?"<>|,"') + "\s]+")
__RE_INVALID_INDEX_NAME_HEAD = re.compile("^[_]+")
@property
def reserved_keywords(self):
return []
@property
def _invalid_var_name_head_re(self):
return self.__RE_INVALID_INDEX_NAME_HEAD
@property
def _invalid_var_name_re(self):
return self.__RE_INVALID_INDEX_NAME
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <[email protected]>
"""
from __future__ import absolute_import, unicode_literals
import re
from ._base import VarNameSanitizer
class ElasticsearchIndexNameSanitizer(VarNameSanitizer):
__RE_INVALID_INDEX_NAME = re.compile("[" + re.escape('\\/*?"<>|,"') + r"\s]+")
__RE_INVALID_INDEX_NAME_HEAD = re.compile("^[_]+")
@property
def reserved_keywords(self):
return []
@property
def _invalid_var_name_head_re(self):
return self.__RE_INVALID_INDEX_NAME_HEAD
@property
def _invalid_var_name_re(self):
return self.__RE_INVALID_INDEX_NAME
|
Change to avoid "DeprecationWarning: invalid escape sequence"
|
Change to avoid "DeprecationWarning: invalid escape sequence"
|
Python
|
mit
|
thombashi/pathvalidate
|
1958165c7bf3b9fa45972658b980cefe6a742164
|
myhpom/validators.py
|
myhpom/validators.py
|
import re
from django.core.exceptions import ValidationError
from django.core.validators import EmailValidator, RegexValidator
from django.contrib.auth.models import User
# First Name, Last Name: At least one alphanumeric character.
name_validator = RegexValidator(
regex=r'\w',
flags=re.U,
message='Please enter your name'
)
# Email: valid email address
email_validator = EmailValidator()
# Email is not already taken
def email_not_taken_validator(email):
if len(User.objects.filter(email=email)) > 0:
raise ValidationError(u'Email already in use.')
# Password: At least 8 chars total, 1 uppercase, lowercase, digit, special char.
def password_validator(password):
errors = []
if len(password) < 8:
errors.append(u'8 characters total')
if re.search(r"[a-z]", password) is None:
errors.append(u'1 lowercase letter (a-z)')
if re.search(r"[A-Z]", password) is None:
errors.append(u'1 uppercase letter (A-Z)')
if re.search(r"\d", password) is None:
errors.append(u'1 number (0-9)')
if re.search(r"[!\@\#\$\%\^\*\(\)\_\+\-\=]", password) is None:
errors.append(u'1 special character (! @ # $ % ^ * ( ) _ + - =)')
if len(errors) > 0:
raise ValidationError(u'Please enter a password with at least ' + u', '.join(errors))
|
import re
from django.core.exceptions import ValidationError
from django.core.validators import EmailValidator, RegexValidator
# First Name, Last Name: At least one alphanumeric character.
name_validator = RegexValidator(
regex=r'\w',
flags=re.U,
message='Please enter your name'
)
# Email: valid email address
email_validator = EmailValidator()
# Email is not already taken
def email_not_taken_validator(email):
from myhpom.models import User
if len(User.objects.filter(email=email)) > 0:
raise ValidationError(u'Email already in use.')
# Password: At least 8 chars total, 1 uppercase, lowercase, digit, special char.
def password_validator(password):
errors = []
if len(password) < 8:
errors.append(u'8 characters total')
if re.search(r"[a-z]", password) is None:
errors.append(u'1 lowercase letter (a-z)')
if re.search(r"[A-Z]", password) is None:
errors.append(u'1 uppercase letter (A-Z)')
if re.search(r"\d", password) is None:
errors.append(u'1 number (0-9)')
if re.search(r"[!\@\#\$\%\^\*\(\)\_\+\-\=]", password) is None:
errors.append(u'1 special character (! @ # $ % ^ * ( ) _ + - =)')
if len(errors) > 0:
raise ValidationError(u'Please enter a password with at least ' + u', '.join(errors))
|
Revert "[mh-14] "This import is ultimately just from django.contrib.auth.models import User - using that directly would probably address whatever circular import required that this import get put here, and make it clearer which model User is."-Dane"
|
Revert "[mh-14] "This import is ultimately just from django.contrib.auth.models import User - using that directly would probably address whatever circular import required that this import get put here, and make it clearer which model User is."-Dane"
This reverts commit 7350c56339acaef416d03b6d7ae0e818ab8db182.
|
Python
|
bsd-3-clause
|
ResearchSoftwareInstitute/MyHPOM,ResearchSoftwareInstitute/MyHPOM,ResearchSoftwareInstitute/MyHPOM,ResearchSoftwareInstitute/MyHPOM,ResearchSoftwareInstitute/MyHPOM
|
906950ec1bd1f5d0980116d10344f9f1b7d844ed
|
Importacions_F1_Q1/Fact_impF1_eliminar_Ja_existeix.py
|
Importacions_F1_Q1/Fact_impF1_eliminar_Ja_existeix.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from ooop import OOOP
import configdb
O = OOOP(**configdb.ooop)
imp_obj = O.GiscedataFacturacioImportacioLinia
imp_del_ids = imp_obj.search([('state','=','erroni'),('info','like',"Aquest fitxer XML ja s'ha processat en els següents IDs")])
#imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like','XML erroni')])
imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like',"XML no es correspon al tipus F1")])
imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like',"Document invàlid")])
total = len(imp_del_ids)
n = 0
for imp_del_id in imp_del_ids:
try:
imp_obj.unlink([imp_del_id])
n +=1
print "%d/%d" % (n,total)
except Exception, e:
print e
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from ooop import OOOP
import configdb
O = OOOP(**configdb.ooop)
imp_obj = O.GiscedataFacturacioImportacioLinia
imp_del_ids = imp_obj.search([('state','=','erroni'),('info','like',"Aquest fitxer XML ja s'ha processat en els següents IDs")])
imp_del_ids = imp_obj.search([('state','=','erroni'),('info','like',"Ja existeix una factura amb el mateix origen")])
#imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like','XML erroni')])
imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like',"XML no es correspon al tipus F1")])
imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like',"Document invàlid")])
total = len(imp_del_ids)
n = 0
for imp_del_id in imp_del_ids:
try:
imp_obj.unlink([imp_del_id])
n +=1
print "%d/%d" % (n,total)
except Exception, e:
print e
|
Kill "Ja existeix una factura amb el mateix.." too
|
Kill "Ja existeix una factura amb el mateix.." too
|
Python
|
agpl-3.0
|
Som-Energia/invoice-janitor
|
cca5b6355a376cb1f51a45a3fac5ca5e4b96f5c7
|
pyflation/configuration.py
|
pyflation/configuration.py
|
"""Configuration file for harness.py
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
"""
import logging
##################################################
# debug logging control
# 0 for off, 1 for on
##################################################
_debug = 1
#This is the default log level which can be overridden in run_config.
# The logging level changes how much is saved to logging files.
# Choose from logging.DEBUG, .INFO, .WARN, .ERROR, .CRITICAL in decreasing order of verbosity
LOGLEVEL = logging.INFO
# Directory structure
# Change the names of various directories
#Change to using the base run directory with bin, pyflation, scripts immediately below.
CODEDIRNAME = "."
RUNDIRNAME = "runs"
RESULTSDIRNAME = "results"
LOGDIRNAME = "applogs"
QSUBSCRIPTSDIRNAME = "qsubscripts"
QSUBLOGSDIRNAME = "qsublogs"
RUNCONFIGTEMPLATE = "run_config.template"
#Name of provenance file which records the code revisions and results files added
provenancefilename = "provenance.log"
# Compression type to be used with PyTables:
# PyTables stores results in HDF5 files. The compression it uses can be
# selected here. For maximum compatibility with other HDF5 utilities use "zlib".
# For maximum efficiency in both storage space and recall time use "blosc".
hdf5complib = "blosc"
hdf5complevel = 2
|
"""Configuration file for harness.py
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
The main configuration options are for logging. By changing _debug to 1 (default
is 0) much more debugging information will be added to the log files.
The overall logging level can also be set using the LOGLEVEL variable. This
level can be overridden using command line options to the scripts.
"""
import logging
##################################################
# debug logging control
# 0 for off, 1 for on
##################################################
_debug = 1
#This is the default log level which can be overridden in run_config.
# The logging level changes how much is saved to logging files.
# Choose from logging.DEBUG, .INFO, .WARN, .ERROR, .CRITICAL in decreasing order of verbosity
LOGLEVEL = logging.INFO
# Directory structure
# Change the names of various directories
#Change to using the base run directory with bin, pyflation, scripts immediately below.
CODEDIRNAME = "."
RUNDIRNAME = "runs"
RESULTSDIRNAME = "results"
LOGDIRNAME = "applogs"
QSUBSCRIPTSDIRNAME = "qsubscripts"
QSUBLOGSDIRNAME = "qsublogs"
RUNCONFIGTEMPLATE = "run_config.template"
#Name of provenance file which records the code revisions and results files added
provenancefilename = "provenance.log"
# Compression type to be used with PyTables:
# PyTables stores results in HDF5 files. The compression it uses can be
# selected here. For maximum compatibility with other HDF5 utilities use "zlib".
# For maximum efficiency in both storage space and recall time use "blosc".
hdf5complib = "blosc"
hdf5complevel = 2
|
Add some explanation of logging options.
|
Add some explanation of logging options.
|
Python
|
bsd-3-clause
|
ihuston/pyflation,ihuston/pyflation
|
1f112cb553b0170eb948cfb53883913dc2f3b0b3
|
index.py
|
index.py
|
from gevent import monkey
monkey.patch_all()
import time
from threading import Thread
import settings
import requests
from flask import Flask
from flask.ext.socketio import SocketIO, emit
app = Flask(__name__)
app.config.from_object('settings')
app.debug = True
if not app.debug:
import logging
file_handler = logging.FileHandler('production.log')
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
socketio = SocketIO(app)
thread = None
class StorjAPI:
@staticmethod
def getNodeStatus():
r = requests.get(app.config['NODE_URL'] + '/api/status')
return r.json()
def status_thread():
while True:
time.sleep(5)
socketio.emit('status', StorjAPI.getNodeStatus(), namespace='/metadisk')
@socketio.on('status')
def node_status():
socketio.emit('status', StorjAPI.getNodeStatus())
@socketio.on('connect', namespace='/metadisk')
def metadisk_connect():
global thread
if thread is None:
thread = Thread(target=status_thread)
thread.start()
print('Client has connected.')
@socketio.on('disconnect', namespace='/metadisk')
def metadisk_disconnect():
print('Client has disconnected.')
if __name__ == '__main__':
socketio.run(app)
|
from gevent import monkey
monkey.patch_all()
import time
from threading import Thread
import settings
import requests
from flask import Flask
from flask.ext.socketio import SocketIO, emit
app = Flask(__name__)
app.config.from_object('settings')
app.debug = True
if not app.debug:
import logging
file_handler = logging.FileHandler('production.log')
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
socketio = SocketIO(app)
thread = None
class StorjAPI:
@staticmethod
def getNodeStatus():
r = requests.get(app.config['NODE_URL'] + '/api/status')
return r.json()
def status_thread():
while True:
time.sleep(5)
socketio.emit('status', StorjAPI.getNodeStatus(), namespace='/metadisk')
@socketio.on('connect', namespace='/metadisk')
def metadisk_connect():
global thread
if thread is None:
thread = Thread(target=status_thread)
thread.start()
print('Client has connected.')
@socketio.on('disconnect', namespace='/metadisk')
def metadisk_disconnect():
print('Client has disconnected.')
if __name__ == '__main__':
socketio.run(app)
|
Remove manual emit on status
|
Remove manual emit on status
|
Python
|
mit
|
Storj/metadisk-websockets
|
2e99893065abef2f751e3fb5f19a59bfee79a756
|
language_model_transcription.py
|
language_model_transcription.py
|
import metasentence
import language_model
import standard_kaldi
import diff_align
import json
import os
import sys
vocab = metasentence.load_vocabulary('PROTO_LANGDIR/graphdir/words.txt')
def lm_transcribe(audio_f, text_f):
ms = metasentence.MetaSentence(open(text_f).read(), vocab)
model_dir = language_model.getLanguageModel(ms.get_kaldi_sequence())
print 'generated model', model_dir
k = standard_kaldi.Kaldi(os.path.join(model_dir, 'graphdir', 'HCLG.fst'))
trans = standard_kaldi.transcribe(k, audio_f)
ret = diff_align.align(trans["words"], ms)
return ret
if __name__=='__main__':
AUDIO_FILE = sys.argv[1]
TEXT_FILE = sys.argv[2]
OUTPUT_FILE = sys.argv[3]
ret = lm_transcribe(AUDIO_FILE, TEXT_FILE)
json.dump(ret, open(OUTPUT_FILE, 'w'), indent=2)
|
import metasentence
import language_model
import standard_kaldi
import diff_align
import json
import os
import sys
vocab = metasentence.load_vocabulary('PROTO_LANGDIR/graphdir/words.txt')
def lm_transcribe(audio_f, text_f):
ms = metasentence.MetaSentence(open(text_f).read(), vocab)
model_dir = language_model.getLanguageModel(ms.get_kaldi_sequence())
print 'generated model', model_dir
k = standard_kaldi.Kaldi(os.path.join(model_dir, 'graphdir', 'HCLG.fst'))
trans = standard_kaldi.transcribe(k, audio_f)
ret = diff_align.align(trans["words"], ms)
return ret
if __name__=='__main__':
import argparse
parser = argparse.ArgumentParser(
description='Align a transcript to audio by generating a new language model.')
parser.add_argument('audio_file', help='input audio file in any format supported by FFMPEG')
parser.add_argument('text_file', help='input transcript as plain text')
parser.add_argument('output_file', type=argparse.FileType('w'),
help='output json file for aligned transcript')
parser.add_argument('--proto_langdir', default="PROTO_LANGDIR",
help='path to the prototype language directory')
args = parser.parse_args()
ret = lm_transcribe(args.audio_file, args.text_file)
json.dump(ret, args.output_file, indent=2)
|
Use argparse for main python entrypoint args.
|
Use argparse for main python entrypoint args.
Will make it easier to add proto_langdir as a flag argument in a future commit.
|
Python
|
mit
|
lowerquality/gentle,lowerquality/gentle,lowerquality/gentle,lowerquality/gentle
|
de15315b95f70e56d424d54637e3ac0d615ea0f0
|
proto/ho.py
|
proto/ho.py
|
from board import Board, BoardCanvas
b = Board(19, 19)
c = BoardCanvas(b)
|
#!/usr/bin/env python
import platform
import subprocess
import sys
from copy import deepcopy
from board import Board, BoardCanvas
def clear():
subprocess.check_call('cls' if platform.system() == 'Windows' else 'clear', shell=True)
class _Getch:
"""
Gets a single character from standard input. Does not echo to the
screen.
"""
def __init__(self):
try:
self.impl = _GetchWindows()
except ImportError:
self.impl = _GetchUnix()
def __call__(self):
return self.impl()
class _GetchUnix:
def __call__(self):
import tty
import termios
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(sys.stdin.fileno())
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
class _GetchWindows:
def __init__(self):
import msvcrt # NOQA
def __call__(self):
import msvcrt
return msvcrt.getch()
getch = _Getch()
WIDTH = 19
HEIGHT = 19
def trunc_width(v):
return max(1, min(WIDTH, v))
def trunc_height(v):
return max(1, min(HEIGHT, v))
def move_up(x, y):
return trunc_width(x), trunc_height(y - 1)
def move_down(x, y):
return trunc_width(x), trunc_height(y + 1)
def move_left(x, y):
return trunc_width(x - 1), trunc_height(y)
def move_right(x, y):
return trunc_width(x + 1), trunc_height(y)
KEYS = {
'w': move_up,
'r': move_down,
'a': move_left,
's': move_right,
}
def main():
board = Board(WIDTH, HEIGHT)
canvas = BoardCanvas(board)
cur_x, cur_y = (1, 1)
while True:
clear()
# Print board
select_board = deepcopy(canvas)
select_board.set(cur_x, cur_y, 'X')
print select_board
print 'Make your move... '
# Get char
c = getch()
# Escape terminates
if c == '\x1b':
break
# Move cursor
try:
cur_x, cur_y = KEYS[c](cur_x, cur_y)
except KeyError:
pass
if __name__ == '__main__':
main()
|
Add game loop to prototype
|
Add game loop to prototype
|
Python
|
mit
|
davesque/go.py
|
038b56134017b6b3e4ea44d1b7197bc5168868d3
|
safeopt/__init__.py
|
safeopt/__init__.py
|
"""
The `safeopt` package provides...
Main classes
============
.. autosummary::
SafeOpt
SafeOptSwarm
Utilities
=========
.. autosummary::
sample_gp_function
linearly_spaced_combinations
plot_2d_gp
plot_3d_gp
plot_contour_gp
"""
from __future__ import absolute_import
from .utilities import *
from .gp_opt import *
__all__ = [s for s in dir() if not s.startswith('_')]
|
"""
The `safeopt` package provides...
Main classes
============
These classes provide the main functionality for Safe Bayesian optimization.
.. autosummary::
SafeOpt
SafeOptSwarm
Utilities
=========
The following are utilities to make testing and working with the library more pleasant.
.. autosummary::
sample_gp_function
linearly_spaced_combinations
plot_2d_gp
plot_3d_gp
plot_contour_gp
"""
from __future__ import absolute_import
from .utilities import *
from .gp_opt import *
__all__ = [s for s in dir() if not s.startswith('_')]
|
Add short comment to docs
|
Add short comment to docs
|
Python
|
mit
|
befelix/SafeOpt,befelix/SafeOpt
|
d295575284e712a755d3891806a7e40b65377a69
|
music_essentials/chord.py
|
music_essentials/chord.py
|
class Chord(object):
# TODO: doctring
def __init__(self, root_note):
# TODO: doctring
# TODO: validation
self.notes = [root_note]
def root(self):
# TODO: doctring
# TODO: tests
return self.notes[0]
def add_note(self, new_note):
# TODO: docstring
# TODO: tests
if new_note < self.root():
self.notes.insert(0, new_note)
return
for i in range(len(self.notes) - 1):
if (new_note >= self.notes[i]) and (new_note < self.notes[i + 1]):
self.notes.insert(i + 1, new_note)
return
self.notes.append(new_note)
|
class Chord(object):
# TODO: doctring
def __init__(self, root_note):
# TODO: doctring
# TODO: validation
self.notes = [root_note]
def root(self):
# TODO: doctring
# TODO: tests
return self.notes[0]
def add_note(self, new_note):
# TODO: docstring
# TODO: tests
if new_note < self.root():
self.notes.insert(0, new_note)
return
for i in range(len(self.notes) - 1):
if (new_note >= self.notes[i]) and (new_note < self.notes[i + 1]):
self.notes.insert(i + 1, new_note)
return
self.notes.append(new_note)
def __str__(self):
# TODO: docstring
out = ''
for n in self.notes:
out += n.__str__() + '+'
out = out [:-1]
return out
|
Add __str__ method for Chord class.
|
Add __str__ method for Chord class.
Signed-off-by: Charlotte Pierce <[email protected]>
|
Python
|
mit
|
charlottepierce/music_essentials
|
0d50f6663bbc7f366c9db6a9aeef5feb0f4cb5f2
|
src/ExampleNets/readAllFields.py
|
src/ExampleNets/readAllFields.py
|
import glob
import os
import SCIRunPythonAPI; from SCIRunPythonAPI import *
def allFields(path):
names = []
for dirname, dirnames, filenames in os.walk(path):
for filename in filenames:
if filename.endswith("fld"):
names.append(os.path.join(dirname, filename))
return names
dir = r"E:\scirun\trunk_ref\SCIRunData"
for file in allFields(dir):
read = addModule("ReadField")
read.Filename = file
show = addModule("ReportFieldInfo")
read.output[0] >> show.input.Input
#executeAll()
|
import glob
import os
import time
import SCIRunPythonAPI; from SCIRunPythonAPI import *
def allFields(path):
names = []
for dirname, dirnames, filenames in os.walk(path):
for filename in filenames:
if filename.endswith("fld"):
names.append(os.path.join(dirname, filename))
return names
def printList(list, name):
thefile = open(name, 'w')
for f,v in list:
thefile.write("%s\n\t%s\n" % (f,v))
dir = r"E:\scirun\trunk_ref\SCIRunData"
values = []
files = []
for file in allFields(dir):
read = addModule("ReadField")
read.Filename = file
files.append(file)
show = addModule("ReportFieldInfo")
prnt = addModule("PrintDatatype")
read.output[0] >> show.input.Input
show.output[0] >> prnt.input[0]
executeAll()
time.sleep(1)
values.append(prnt.ReceivedValue)
[removeModule(m.id) for m in modules()]
printList(zip(files, values), r'E:\fieldTypes.txt')
|
Update script to print all field types to a file
|
Update script to print all field types to a file
|
Python
|
mit
|
moritzdannhauer/SCIRunGUIPrototype,jessdtate/SCIRun,jessdtate/SCIRun,jessdtate/SCIRun,jcollfont/SCIRun,jcollfont/SCIRun,ajanson/SCIRun,jessdtate/SCIRun,ajanson/SCIRun,moritzdannhauer/SCIRunGUIPrototype,collint8/SCIRun,moritzdannhauer/SCIRunGUIPrototype,moritzdannhauer/SCIRunGUIPrototype,ajanson/SCIRun,jessdtate/SCIRun,collint8/SCIRun,jcollfont/SCIRun,ajanson/SCIRun,collint8/SCIRun,jcollfont/SCIRun,jcollfont/SCIRun,collint8/SCIRun,ajanson/SCIRun,jessdtate/SCIRun,jessdtate/SCIRun,jcollfont/SCIRun,collint8/SCIRun,collint8/SCIRun,collint8/SCIRun,ajanson/SCIRun,jessdtate/SCIRun,collint8/SCIRun,moritzdannhauer/SCIRunGUIPrototype,moritzdannhauer/SCIRunGUIPrototype,ajanson/SCIRun,moritzdannhauer/SCIRunGUIPrototype,jcollfont/SCIRun
|
b1bd07038b0c6a6d801e686372996b3478c71af9
|
iss/management/commands/upsert_iss_organizations.py
|
iss/management/commands/upsert_iss_organizations.py
|
#!/usr/bin/env python
"""Upserts Organization records with data from Salesforce Accounts.
"""
import logging
import os
from django.core.management.base import BaseCommand
import iss.salesforce
import iss.utils
logger = logging.getLogger(os.path.basename(__file__))
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'-m', '--modified-within',
type=int,
metavar='n-days',
default=7,
help='upsert organizations for accounts modified within n-days')
def handle(self, *args, **options):
upsert_organizations_for_recently_modified_accounts(
options['modified_within'])
def upsert_organizations_for_recently_modified_accounts(since=7):
"""Upsert organizations for SF Accounts modified in last `since` days."""
logger.info('upserting orgs for accounts modified in last {since} days'.
format(since=since))
recently_modified_accounts = (
iss.salesforce.Account.get_recently_modified_accounts(since=since))
iss.utils.upsert_organizations_for_accounts(recently_modified_accounts)
|
#!/usr/bin/env python
"""Upserts Organization records with data from Salesforce Accounts.
"""
import logging
import os
from django.core.management.base import BaseCommand
import iss.models
import iss.salesforce
import iss.utils
logger = logging.getLogger(os.path.basename(__file__))
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'-m', '--modified-within',
type=int,
metavar='n-days',
default=7,
help='upsert organizations for accounts modified within n-days')
parser.add_argument(
'-i', '--include-aashe-in-website',
action='store_true',
help='force AASHE exclude_from_website to be False')
def handle(self, *args, **options):
upsert_organizations_for_recently_modified_accounts(
since=options['modified_within'],
include_aashe_in_website=options['include_aashe_in_website'])
def upsert_organizations_for_recently_modified_accounts(
since=7, include_aashe_in_website=False):
"""Upsert organizations for SF Accounts modified in last `since` days.
When `include_aashe_in_website` is true, set the
`exclude_from_website` flag on the Organization representing AASHE
to False (0, actually). (Added for the Hub project.)
"""
logger.info('upserting orgs for accounts modified in last {since} days'.
format(since=since))
recently_modified_accounts = (
iss.salesforce.Account.get_recently_modified_accounts(since=since))
iss.utils.upsert_organizations_for_accounts(recently_modified_accounts)
if include_aashe_in_website:
aashe = iss.models.Organization.objects.get(org_name="AASHE")
if aashe.exclude_from_website:
aashe.exclude_from_website = 0
aashe.save()
|
Add --include-aashe-in-website flag to org upsert
|
Add --include-aashe-in-website flag to org upsert
|
Python
|
mit
|
AASHE/iss
|
ed45688c062aed44836fe902bb61bf858ed4b4bf
|
sale_require_ref/__openerp__.py
|
sale_require_ref/__openerp__.py
|
# -*- coding: utf-8 -*-
{
'name': 'Sale Order Require Contract on Confirmation',
'version': '1.0',
'category': 'Projects & Services',
'sequence': 14,
'summary': '',
'description': """
Sale Order Require Contract on Confirmation
===========================================
""",
'author': 'ADHOC SA',
'website': 'www.ingadhoc.com',
'images': [
],
'depends': [
'sale',
],
'data': [
],
'demo': [
],
'test': [
],
'installable': True,
'auto_install': False,
'application': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# -*- coding: utf-8 -*-
{
'name': 'Sale Order Require Contract on Confirmation',
'version': '1.0',
'category': 'Projects & Services',
'sequence': 14,
'summary': '',
'description': """
Sale Order Require Contract on Confirmation
===========================================
""",
'author': 'ADHOC SA',
'website': 'www.ingadhoc.com',
'images': [
],
'depends': [
'sale',
],
'data': [
],
'demo': [
],
'test': [
],
'installable': False,
'auto_install': False,
'application': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
FIX mae sale require fe uninstallable
|
FIX mae sale require fe uninstallable
|
Python
|
agpl-3.0
|
ingadhoc/account-analytic,ingadhoc/sale,ingadhoc/account-invoicing,bmya/odoo-addons,ingadhoc/stock,ingadhoc/product,dvitme/odoo-addons,maljac/odoo-addons,adhoc-dev/account-financial-tools,ingadhoc/sale,HBEE/odoo-addons,ClearCorp/account-financial-tools,sysadminmatmoz/ingadhoc,jorsea/odoo-addons,jorsea/odoo-addons,sysadminmatmoz/ingadhoc,jorsea/odoo-addons,dvitme/odoo-addons,ClearCorp/account-financial-tools,maljac/odoo-addons,HBEE/odoo-addons,ingadhoc/product,ingadhoc/account-payment,bmya/odoo-addons,sysadminmatmoz/ingadhoc,ingadhoc/sale,adhoc-dev/account-financial-tools,ingadhoc/partner,adhoc-dev/odoo-addons,dvitme/odoo-addons,ingadhoc/sale,HBEE/odoo-addons,syci/ingadhoc-odoo-addons,bmya/odoo-addons,syci/ingadhoc-odoo-addons,ingadhoc/odoo-addons,maljac/odoo-addons,adhoc-dev/odoo-addons,ingadhoc/odoo-addons,adhoc-dev/odoo-addons,syci/ingadhoc-odoo-addons,ingadhoc/odoo-addons,ingadhoc/account-financial-tools
|
07f531c7e3bbc0149fad4cfda75d8803cbc48e1d
|
smserver/chatplugin.py
|
smserver/chatplugin.py
|
#!/usr/bin/env python3
# -*- coding: utf8 -*-
"""
This module add the class needed for creating custom chat command
:Example:
Here's a simple ChatPlugin which will send a HelloWorld on use
``
ChatHelloWorld(ChatPlugin):
helper = "Display Hello World"
cimmand
def __call__(self, serv, message):
serv.send_message("Hello world", to="me")
``
"""
class ChatPlugin(object):
"""
Inherit from this class to add a command in the chat.
helper: Text that will be show when calling the help command
permission: Permission needed for this command (see ability)
room: Specify here if the command need to be execute in a room
command: The command to use to call this function
"""
helper = ""
permission = None
room = False
command = None
def can(self, serv):
"""
Method call each time somenone try to run this command
:param serv: The StepmaniaController instance
:type serv: StepmaniaController
:return: True if authorize False if not
:rtype: bool
"""
if self.room and not serv.room:
return False
if self.permission and serv.cannot(self.permission, serv.conn.room):
return False
return True
def __call__(self, serv, message):
"""
Action to perform when using the command
:param serv: The StepmaniaController instance
:param message: The text after the command. (Eg. /command text)
:type serv: StepmaniaController
:type message: str
:return: Nothing
"""
|
#!/usr/bin/env python3
# -*- coding: utf8 -*-
"""
This module add the class needed for creating custom chat command
:Example:
Here's a simple ChatPlugin which will send a HelloWorld on use
``
ChatHelloWorld(ChatPlugin):
helper = "Display Hello World"
command = "hello"
def __call__(self, serv, message):
serv.send_message("Hello world", to="me")
``
"""
class ChatPlugin(object):
"""
Inherit from this class to add a command in the chat.
helper: Text that will be show when calling the help command
permission: Permission needed for this command (see ability)
room: Specify here if the command need to be execute in a room
command: The command to use to call this function
"""
helper = ""
permission = None
room = False
command = None
def can(self, serv):
"""
Method call each time somenone try to run this command
:param serv: The StepmaniaController instance
:type serv: StepmaniaController
:return: True if authorize False if not
:rtype: bool
"""
if self.room and not serv.room:
return False
if self.permission and serv.cannot(self.permission, serv.conn.room):
return False
return True
def __call__(self, serv, message):
"""
Action to perform when using the command
:param serv: The StepmaniaController instance
:param message: The text after the command. (Eg. /command text)
:type serv: StepmaniaController
:type message: str
:return: Nothing
"""
|
Correct chat plugin example in docsctring
|
Correct chat plugin example in docsctring
|
Python
|
mit
|
ningirsu/stepmania-server,Nickito12/stepmania-server,ningirsu/stepmania-server,Nickito12/stepmania-server
|
83292a4b6f6bec00b20c623fa6f44e15aa82cd2a
|
runtests.py
|
runtests.py
|
#!/usr/bin/env python
import sys
from os.path import dirname, abspath
import django
from django.conf import settings
if len(sys.argv) > 1 and 'postgres' in sys.argv:
sys.argv.remove('postgres')
db_engine = 'django.db.backends.postgresql_psycopg2'
db_name = 'test_main'
else:
db_engine = 'django.db.backends.sqlite3'
db_name = ''
if not settings.configured:
settings.configure(
DATABASES=dict(default=dict(ENGINE=db_engine, NAME=db_name)),
INSTALLED_APPS = [
'django.contrib.contenttypes',
'genericm2m',
'genericm2m.genericm2m_tests',
],
)
from django.test.utils import get_runner
def runtests(*test_args):
if not test_args:
if sys.version_info[0] > 2:
test_args = ['genericm2m.genericm2m_tests']
else:
test_args = ["genericm2m_tests"]
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True)
failures = test_runner.run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
|
#!/usr/bin/env python
import sys
from os.path import dirname, abspath
import django
from django.conf import settings
if len(sys.argv) > 1 and 'postgres' in sys.argv:
sys.argv.remove('postgres')
db_engine = 'django.db.backends.postgresql_psycopg2'
db_name = 'test_main'
else:
db_engine = 'django.db.backends.sqlite3'
db_name = ''
if not settings.configured:
settings.configure(
DATABASES=dict(default=dict(ENGINE=db_engine, NAME=db_name)),
INSTALLED_APPS = [
'django.contrib.contenttypes',
'genericm2m',
'genericm2m.genericm2m_tests',
],
MIDDLEWARE_CLASSES = (),
)
from django.test.utils import get_runner
django.setup()
def runtests(*test_args):
if not test_args:
if sys.version_info[0] > 2:
test_args = ['genericm2m.genericm2m_tests']
else:
test_args = ["genericm2m_tests"]
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True)
failures = test_runner.run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
|
Fix "AppRegistryNotReady: Models aren't loaded yet"
|
Fix "AppRegistryNotReady: Models aren't loaded yet"
|
Python
|
mit
|
coleifer/django-generic-m2m,coleifer/django-generic-m2m,coleifer/django-generic-m2m
|
e77042c914b9725da0fef7e56ede12635c1a876b
|
s3s3/api.py
|
s3s3/api.py
|
"""
The API for s3s3.
"""
import tempfile
from boto.s3.connection import S3Connection
def create_connection(connection_args):
connection_args = connection_args.copy()
connection_args.pop('bucket_name')
return S3Connection(**connection_args)
def upload(source_key, dest_keys):
"""
`source_key` The source boto s3 key.
`dest_keys` The destination boto s3 keys.
"""
# Use the same name if no destination key is passed.
if not dest_key:
dest_key = source_key
with tempfile.NamedTemporaryFile() as data:
source_key.get_contents_to_file(data)
for dest_key in dest_keys:
dest_key.set_contents_from_filename(data.name)
|
"""
The API for s3s3.
"""
import tempfile
from boto.s3.connection import S3Connection
def create_connection(connection_args):
connection_args = connection_args.copy()
connection_args.pop('bucket_name')
return S3Connection(**connection_args)
def upload(source_key, dest_keys):
"""
`source_key` The source boto s3 key.
`dest_keys` A list of the destination boto s3 keys.
"""
# Use the same name if no destination key is passed.
if not dest_keys or not source_key:
raise Exception(
'The source_key and dest_keys parameters are required.')
with tempfile.NamedTemporaryFile() as data:
source_key.get_contents_to_file(data)
for dest_key in dest_keys:
dest_key.set_contents_from_filename(data.name)
|
Fix typo. dest_key => dest_keys.
|
Fix typo. dest_key => dest_keys.
modified: s3s3/api.py
|
Python
|
mit
|
lsst-sqre/s3s3,lsst-sqre/s3-glacier
|
ada7e2d2b98664fd6c481c4279677a4292e5bfef
|
openedx/features/idea/api_views.py
|
openedx/features/idea/api_views.py
|
from django.http import JsonResponse
from django.shortcuts import get_object_or_404
from rest_framework import status
from rest_framework.authentication import SessionAuthentication, BasicAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework.views import APIView
from openedx.features.idea.models import Idea
class FavoriteAPIView(APIView):
"""
FavoriteAPIView is used to toggle favorite idea for the user
"""
authentication_classes = (SessionAuthentication, BasicAuthentication)
permission_classes = (IsAuthenticated,)
def post(self, request, idea_id):
response = {'message': 'Idea is added to favorites', 'is_idea_favorite': True}
toggle_status = status.HTTP_201_CREATED
user = request.user
idea = get_object_or_404(Idea, pk=idea_id)
toggle_favorite_status = idea.toggle_favorite(user)
if not toggle_favorite_status:
response['is_idea_favorite'] = False
response['message'] = 'Idea is removed from favorites'
toggle_status = status.HTTP_200_OK
response['favorite_count'] = idea.favorites.count()
return JsonResponse(response, status=toggle_status)
|
from django.http import JsonResponse
from django.shortcuts import get_object_or_404
from edx_rest_framework_extensions.auth.session.authentication import SessionAuthenticationAllowInactiveUser
from rest_framework import status
from rest_framework.permissions import IsAuthenticated
from rest_framework.views import APIView
from openedx.features.idea.models import Idea
class FavoriteAPIView(APIView):
"""
FavoriteAPIView is used to toggle favorite idea for the user
"""
authentication_classes = (SessionAuthenticationAllowInactiveUser,)
permission_classes = (IsAuthenticated,)
def post(self, request, idea_id):
response = {'message': 'Idea is added to favorites', 'is_idea_favorite': True}
toggle_status = status.HTTP_201_CREATED
user = request.user
idea = get_object_or_404(Idea, pk=idea_id)
toggle_favorite_status = idea.toggle_favorite(user)
if not toggle_favorite_status:
response['is_idea_favorite'] = False
response['message'] = 'Idea is removed from favorites'
toggle_status = status.HTTP_200_OK
response['favorite_count'] = idea.favorites.count()
return JsonResponse(response, status=toggle_status)
|
Change authentication classes to cater inactive users
|
[LP-1965] Change authentication classes to cater inactive users
|
Python
|
agpl-3.0
|
philanthropy-u/edx-platform,philanthropy-u/edx-platform,philanthropy-u/edx-platform,philanthropy-u/edx-platform
|
d6782066e3ed3f00e3c8dcffe2ffd0b9bad18d17
|
slave/skia_slave_scripts/render_pdfs.py
|
slave/skia_slave_scripts/render_pdfs.py
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Run the Skia render_pdfs executable. """
from build_step import BuildStep, BuildStepWarning
import sys
class RenderPdfs(BuildStep):
def _Run(self):
# Skip this step for now, since the new SKPs are causing it to crash.
raise BuildStepWarning('Skipping this step since it is crashing.')
#self.RunFlavoredCmd('render_pdfs', [self._device_dirs.SKPDir()])
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(RenderPdfs))
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Run the Skia render_pdfs executable. """
from build_step import BuildStep
import sys
class RenderPdfs(BuildStep):
def _Run(self):
self.RunFlavoredCmd('render_pdfs', [self._device_dirs.SKPDir()])
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(RenderPdfs))
|
Revert "Skip RenderPdfs until the crash is fixed"
|
Revert "Skip RenderPdfs until the crash is fixed"
This reverts commit fd03af0fbcb5f1b3656bcc78d934c560816d6810.
https://codereview.chromium.org/15002002/ fixes the crash.
[email protected]
Review URL: https://codereview.chromium.org/14577010
git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@9019 2bbb7eff-a529-9590-31e7-b0007b416f81
|
Python
|
bsd-3-clause
|
Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot
|
bf91e50b02ff8ef89e660e3c853cc2f30646f32d
|
bash_runner/tasks.py
|
bash_runner/tasks.py
|
"""
Cloudify plugin for running a simple bash script.
Operations:
start: Run a script
"""
from celery import task
from cosmo.events import send_event as send_riemann_event
from cloudify.utils import get_local_ip
get_ip = get_local_ip
send_event = send_riemann_event
@task
def start(__cloudify_id, port=8080, **kwargs):
with open('/home/ubuntu/hello', 'w') as f:
print >> f, 'HELLO BASH! %s' % port
send_event(__cloudify_id, get_ip(), "hello_bash status", "state", "running")
|
"""
Cloudify plugin for running a simple bash script.
Operations:
start: Run a script
"""
from celery import task
from cosmo.events import send_event as send_riemann_event
from cloudify.utils import get_local_ip
get_ip = get_local_ip
send_event = send_riemann_event
@task
def start(__cloudify_id, port=8080, **kwargs):
with open('/home/ubuntu/hello', 'w') as f:
print >> f, 'HELLO BASH! %s' % port
send_event(__cloudify_id, get_ip(), "bash_runner status", "state", "running")
|
Change the status string in riemann
|
Change the status string in riemann
|
Python
|
apache-2.0
|
rantav/cosmo-plugin-bash-runner
|
c568cf4b1be5e38b92f7d3a9131e67ff9eff764e
|
lib/ctf_gameserver/lib/helper.py
|
lib/ctf_gameserver/lib/helper.py
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
def convert_arg_line_to_args(arg_line):
"""argparse helper for splitting input from config
Allows comment lines in configfiles and allows both argument and
value on the same line
"""
if arg_line.strip().startswith('#'):
return []
else:
return arg_line.split()
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import shlex
def convert_arg_line_to_args(arg_line):
"""argparse helper for splitting input from config
Allows comment lines in configfiles and allows both argument and
value on the same line
"""
return shlex.split(arg_line, comments=True)
|
Improve config argument splitting to allow quoted spaces
|
Improve config argument splitting to allow quoted spaces
|
Python
|
isc
|
fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver
|
a234b8dfc45d9e08a452ccc4f275283eb1eb5485
|
dataactbroker/scripts/loadFSRS.py
|
dataactbroker/scripts/loadFSRS.py
|
import logging
import sys
from dataactcore.models.baseInterface import databaseSession
from dataactbroker.fsrs import (
configValid, fetchAndReplaceBatch, GRANT, PROCUREMENT)
logger = logging.getLogger(__name__)
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
with databaseSession() as sess:
if not configValid():
logger.error("No config for broker/fsrs/[service]/wsdl")
sys.exit(1)
else:
procs = fetchAndReplaceBatch(sess, PROCUREMENT)
grants = fetchAndReplaceBatch(sess, GRANT)
awards = procs + grants
numSubAwards = sum(len(a.subawards) for a in awards)
logger.info("Inserted/Updated %s awards, %s subawards",
len(awards), numSubAwards)
|
import logging
import sys
from dataactcore.interfaces.db import databaseSession
from dataactbroker.fsrs import (
configValid, fetchAndReplaceBatch, GRANT, PROCUREMENT)
logger = logging.getLogger(__name__)
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
with databaseSession() as sess:
if not configValid():
logger.error("No config for broker/fsrs/[service]/wsdl")
sys.exit(1)
else:
procs = fetchAndReplaceBatch(sess, PROCUREMENT)
grants = fetchAndReplaceBatch(sess, GRANT)
awards = procs + grants
numSubAwards = sum(len(a.subawards) for a in awards)
logger.info("Inserted/Updated %s awards, %s subawards",
len(awards), numSubAwards)
|
Switch to using dbSession in db.py instead of baseInterface.py
|
Switch to using dbSession in db.py instead of baseInterface.py
This is another file that should have been included in PR #272,
where we transitioned all existing non-Flask db access to a
db connection using the new contextmanager. Originally missed
this one because it *is* using a contextmanager, but it's using
one in the deprecated baseInterface.py instead of the newer db.py.
|
Python
|
cc0-1.0
|
fedspendingtransparency/data-act-broker-backend,chambers-brian/SIG_Digital-Strategy_SI_ODP_Backend,fedspendingtransparency/data-act-broker-backend,chambers-brian/SIG_Digital-Strategy_SI_ODP_Backend
|
0022726e9f2d122ff84eb19ed2807649ab96f931
|
deployment/cfn/utils/constants.py
|
deployment/cfn/utils/constants.py
|
EC2_INSTANCE_TYPES = [
't2.micro',
't2.small',
't2.medium',
't2.large'
]
RDS_INSTANCE_TYPES = [
'db.t2.micro',
'db.t2.small',
'db.t2.medium',
'db.t2.large'
]
ALLOW_ALL_CIDR = '0.0.0.0/0'
VPC_CIDR = '10.0.0.0/16'
HTTP = 80
HTTPS = 443
POSTGRESQL = 5432
SSH = 22
AMAZON_ACCOUNT_ID = 'amazon'
AMAZON_S3_VPC_ENDPOINT = 'com.amazonaws.us-east-1.s3'
CANONICAL_ACCOUNT_ID = '099720109477'
|
EC2_INSTANCE_TYPES = [
't2.micro',
't2.small',
't2.medium',
't2.large',
'm3.medium'
]
RDS_INSTANCE_TYPES = [
'db.t2.micro',
'db.t2.small',
'db.t2.medium',
'db.t2.large'
]
ALLOW_ALL_CIDR = '0.0.0.0/0'
VPC_CIDR = '10.0.0.0/16'
HTTP = 80
HTTPS = 443
POSTGRESQL = 5432
SSH = 22
AMAZON_ACCOUNT_ID = 'amazon'
AMAZON_S3_VPC_ENDPOINT = 'com.amazonaws.us-east-1.s3'
CANONICAL_ACCOUNT_ID = '099720109477'
|
Add m3.medium to EC2 instance types
|
Add m3.medium to EC2 instance types
This is the lowest `m3` family instance type with ephemeral storage.
|
Python
|
apache-2.0
|
azavea/raster-foundry,aaronxsu/raster-foundry,kdeloach/raster-foundry,kdeloach/raster-foundry,azavea/raster-foundry,azavea/raster-foundry,aaronxsu/raster-foundry,azavea/raster-foundry,kdeloach/raster-foundry,raster-foundry/raster-foundry,azavea/raster-foundry,raster-foundry/raster-foundry,kdeloach/raster-foundry,kdeloach/raster-foundry,aaronxsu/raster-foundry,raster-foundry/raster-foundry,aaronxsu/raster-foundry
|
4d5d4665f2b46e12618b7762246d84884447e99e
|
redash/cli/organization.py
|
redash/cli/organization.py
|
from flask_script import Manager
from redash import models
manager = Manager(help="Organization management commands.")
@manager.option('domains', help="comma separated list of domains to allow")
def set_google_apps_domains(domains):
organization = models.Organization.select().first()
organization.settings[models.Organization.SETTING_GOOGLE_APPS_DOMAINS] = domains.split(',')
organization.save()
print "Updated list of allowed domains to: {}".format(organization.google_apps_domains)
@manager.command
def show_google_apps_domains():
organization = models.Organization.select().first()
print "Current list of Google Apps domains: {}".format(organization.google_apps_domains)
|
from flask_script import Manager
from redash import models
manager = Manager(help="Organization management commands.")
@manager.option('domains', help="comma separated list of domains to allow")
def set_google_apps_domains(domains):
organization = models.Organization.select().first()
organization.settings[models.Organization.SETTING_GOOGLE_APPS_DOMAINS] = domains.split(',')
organization.save()
print "Updated list of allowed domains to: {}".format(organization.google_apps_domains)
@manager.command
def show_google_apps_domains():
organization = models.Organization.select().first()
print "Current list of Google Apps domains: {}".format(organization.google_apps_domains)
@manager.command
def list():
"""List all organizations"""
orgs = models.Organization.select()
for i, org in enumerate(orgs):
if i > 0:
print "-" * 20
print "Id: {}\nName: {}\nSlug: {}".format(org.id, org.name, org.slug)
|
Add 'manage.py org list' command
|
Add 'manage.py org list' command
'org list' simply prints out the organizations.
|
Python
|
bsd-2-clause
|
pubnative/redash,pubnative/redash,pubnative/redash,pubnative/redash,pubnative/redash
|
c23cd25247974abc85c66451737f4de8d8b19d1b
|
lib/rapidsms/backends/backend.py
|
lib/rapidsms/backends/backend.py
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
class Backend(object):
def log(self, level, message):
self.router.log(level, message)
def start(self):
raise NotImplementedError
def stop(self):
raise NotImplementedError
def send(self):
raise NotImplementedError
def receive(self):
raise NotImplementedError
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
class Backend(object):
def __init__ (self, router):
self.router = router
def log(self, level, message):
self.router.log(level, message)
def start(self):
raise NotImplementedError
def stop(self):
raise NotImplementedError
def send(self):
raise NotImplementedError
def receive(self):
raise NotImplementedError
|
Add a constructor method for Backend
|
Add a constructor method for Backend
|
Python
|
bsd-3-clause
|
dimagi/rapidsms,ehealthafrica-ci/rapidsms,eHealthAfrica/rapidsms,ken-muturi/rapidsms,lsgunth/rapidsms,catalpainternational/rapidsms,unicefuganda/edtrac,catalpainternational/rapidsms,eHealthAfrica/rapidsms,ken-muturi/rapidsms,ken-muturi/rapidsms,lsgunth/rapidsms,unicefuganda/edtrac,lsgunth/rapidsms,ehealthafrica-ci/rapidsms,unicefuganda/edtrac,peterayeni/rapidsms,caktus/rapidsms,lsgunth/rapidsms,caktus/rapidsms,peterayeni/rapidsms,peterayeni/rapidsms,dimagi/rapidsms,eHealthAfrica/rapidsms,catalpainternational/rapidsms,rapidsms/rapidsms-core-dev,peterayeni/rapidsms,rapidsms/rapidsms-core-dev,catalpainternational/rapidsms,ehealthafrica-ci/rapidsms,dimagi/rapidsms-core-dev,caktus/rapidsms,dimagi/rapidsms-core-dev
|
7a37e3afa29410636c75408bc649e70c519e07f1
|
test/user_profile_test.py
|
test/user_profile_test.py
|
import json
from pymessenger.user_profile import UserProfileApi
from test_env import *
upa = UserProfileApi(PAGE_ACCESS_TOKEN, app_secret=APP_SECRET)
def test_fields_blank():
user_profile = upa.get(TEST_USER_ID)
assert user_profile is not None
def test_fields():
fields = ['first_name', 'last_name']
user_profile = upa.get(TEST_USER_ID, fields=fields)
assert user_profile is not None
assert len(user_profile.keys()) == len(fields)
|
import json
import sys, os
sys.path.append(os.path.realpath(os.path.dirname(__file__)+"/.."))
from pymessenger.user_profile import UserProfileApi
TOKEN = os.environ.get('TOKEN')
APP_SECRET = os.environ.get('APP_SECRET')
TEST_USER_ID = os.environ.get('RECIPIENT_ID')
upa = UserProfileApi(TOKEN, app_secret=APP_SECRET)
def test_fields_blank():
user_profile = upa.get(TEST_USER_ID)
assert user_profile is not None
def test_fields():
fields = ['first_name', 'last_name']
user_profile = upa.get(TEST_USER_ID, fields=fields)
assert user_profile is not None
assert len(user_profile.keys()) == len(fields)
|
Fix user profile test to include same environment variables
|
Fix user profile test to include same environment variables
|
Python
|
mit
|
karlinnolabs/pymessenger,Cretezy/pymessenger2,davidchua/pymessenger
|
2fec4b3ffa1619f81088383c9f565b51f6171fd6
|
seaborn/miscplot.py
|
seaborn/miscplot.py
|
from __future__ import division
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
__all__ = ["palplot", "dogplot"]
def palplot(pal, size=1):
"""Plot the values in a color palette as a horizontal array.
Parameters
----------
pal : sequence of matplotlib colors
colors, i.e. as returned by seaborn.color_palette()
size :
scaling factor for size of plot
"""
n = len(pal)
f, ax = plt.subplots(1, 1, figsize=(n * size, size))
ax.imshow(np.arange(n).reshape(1, n),
cmap=mpl.colors.ListedColormap(list(pal)),
interpolation="nearest", aspect="auto")
ax.set_xticks(np.arange(n) - .5)
ax.set_yticks([-.5, .5])
ax.set_xticklabels([])
ax.set_yticklabels([])
def dogplot():
"""Who's a good boy?"""
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
from io import BytesIO
url = "https://github.com/mwaskom/seaborn-data/raw/master/png/img1.png"
data = BytesIO(urlopen(url).read())
img = plt.imread(data)
f, ax = plt.subplots(figsize=(5, 5), dpi=100)
f.subplots_adjust(0, 0, 1, 1)
ax.imshow(img)
ax.set_axis_off()
|
from __future__ import division
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
__all__ = ["palplot", "dogplot"]
def palplot(pal, size=1):
"""Plot the values in a color palette as a horizontal array.
Parameters
----------
pal : sequence of matplotlib colors
colors, i.e. as returned by seaborn.color_palette()
size :
scaling factor for size of plot
"""
n = len(pal)
f, ax = plt.subplots(1, 1, figsize=(n * size, size))
ax.imshow(np.arange(n).reshape(1, n),
cmap=mpl.colors.ListedColormap(list(pal)),
interpolation="nearest", aspect="auto")
ax.set_xticks(np.arange(n) - .5)
ax.set_yticks([-.5, .5])
ax.set_xticklabels([])
ax.set_yticklabels([])
def dogplot():
"""Who's a good boy?"""
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
from io import BytesIO
url = "https://github.com/mwaskom/seaborn-data/raw/master/png/img{}.png"
pic = np.random.randint(2, 7)
data = BytesIO(urlopen(url.format(pic)).read())
img = plt.imread(data)
f, ax = plt.subplots(figsize=(5, 5), dpi=100)
f.subplots_adjust(0, 0, 1, 1)
ax.imshow(img)
ax.set_axis_off()
|
Update to reflect new example data
|
Update to reflect new example data
|
Python
|
bsd-3-clause
|
arokem/seaborn,mwaskom/seaborn,anntzer/seaborn,arokem/seaborn,mwaskom/seaborn,anntzer/seaborn
|
694408d7f96c83318bbfc0d88be2a99a116deb90
|
select2/__init__.py
|
select2/__init__.py
|
VERSION = (0, 7)
__version__ = '.'.join(map(str, VERSION))
DATE = "2014-06-17"
|
VERSION = (0, 8)
__version__ = '.'.join(map(str, VERSION))
DATE = "2014-06-17"
|
Fix format syntax with python 2.6 - upgrade version
|
Fix format syntax with python 2.6 - upgrade version
|
Python
|
mit
|
20tab/twentytab-select2,20tab/twentytab-select2,20tab/twentytab-select2
|
a4c5e9a970a297d59000468dde8423fa9db00c0f
|
packs/fixtures/actions/scripts/streamwriter-script.py
|
packs/fixtures/actions/scripts/streamwriter-script.py
|
#!/usr/bin/env python
import argparse
import sys
import ast
from lib.exceptions import CustomException
class StreamWriter(object):
def run(self, stream):
if stream.upper() == 'STDOUT':
sys.stdout.write('STREAM IS STDOUT.')
return stream
if stream.upper() == 'STDERR':
sys.stderr.write('STREAM IS STDERR.')
return stream
raise CustomException('Invalid stream specified.')
def main(args):
stream = args.stream
writer = StreamWriter()
stream = writer.run(stream)
str_arg = args.str_arg
int_arg = args.int_arg
obj_arg = args.obj_arg
if str_arg:
sys.stdout.write(' STR: %s' % str_arg)
if int_arg:
sys.stdout.write(' INT: %d' % int_arg)
if obj_arg:
sys.stdout.write(' OBJ: %s' % obj_arg)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='')
parser.add_argument('--stream', help='Stream.', required=True)
parser.add_argument('--str_arg', help='Some string arg.')
parser.add_argument('--int_arg', help='Some int arg.', type=float)
parser.add_argument('--obj_arg', help='Some dict arg.', type=ast.literal_eval)
args = parser.parse_args()
main(args)
|
#!/usr/bin/env python
import argparse
import sys
import ast
import re
from lib.exceptions import CustomException
class StreamWriter(object):
def run(self, stream):
if stream.upper() == 'STDOUT':
sys.stdout.write('STREAM IS STDOUT.')
return stream
if stream.upper() == 'STDERR':
sys.stderr.write('STREAM IS STDERR.')
return stream
raise CustomException('Invalid stream specified.')
def main(args):
stream = args.stream
writer = StreamWriter()
stream = writer.run(stream)
str_arg = args.str_arg
int_arg = args.int_arg
obj_arg = args.obj_arg
if str_arg:
sys.stdout.write(' STR: %s' % str_arg)
if int_arg:
sys.stdout.write(' INT: %d' % int_arg)
if obj_arg:
# Remove any u'' so it works consistently under Python 2 and 3.x
obj_arg_str = str(obj_arg)
value = re.sub("u'(.*?)'", r"'\1'", obj_arg_str)
sys.stdout.write(' OBJ: %s' % value)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='')
parser.add_argument('--stream', help='Stream.', required=True)
parser.add_argument('--str_arg', help='Some string arg.')
parser.add_argument('--int_arg', help='Some int arg.', type=float)
parser.add_argument('--obj_arg', help='Some dict arg.', type=ast.literal_eval)
args = parser.parse_args()
main(args)
|
Fix streamwriter action so it doesn't include "u" type prefix in the object result.
|
Fix streamwriter action so it doesn't include "u" type prefix in the
object result.
This way it works consistently and correctly under Python 2 and Python
3.
|
Python
|
apache-2.0
|
StackStorm/st2tests,StackStorm/st2tests,StackStorm/st2tests
|
9fba6f871068b0d40b71b9de4f69ac59bc33f567
|
tests/test_CheckButton.py
|
tests/test_CheckButton.py
|
#!/usr/bin/env python
import unittest
from kiwi.ui.widgets.checkbutton import ProxyCheckButton
class CheckButtonTest(unittest.TestCase):
def testForBool(self):
myChkBtn = ProxyCheckButton()
# PyGObject bug, we cannot set bool in the constructor with
# introspection
#self.assertEqual(myChkBtn.props.data_type, 'bool')
# this test doens't work... maybe be a pygtk bug
#self.assertRaises(TypeError, myChkBtn.set_property, 'data-type', str)
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
import unittest
import gtk
from kiwi.ui.widgets.checkbutton import ProxyCheckButton
class CheckButtonTest(unittest.TestCase):
def testForBool(self):
myChkBtn = ProxyCheckButton()
assert isinstance(myChkBtn, gtk.CheckButton)
# PyGObject bug, we cannot set bool in the constructor with
# introspection
#self.assertEqual(myChkBtn.props.data_type, 'bool')
# this test doens't work... maybe be a pygtk bug
#self.assertRaises(TypeError, myChkBtn.set_property, 'data-type', str)
if __name__ == '__main__':
unittest.main()
|
Add a silly assert to avoid a pyflakes warning
|
Add a silly assert to avoid a pyflakes warning
|
Python
|
lgpl-2.1
|
stoq/kiwi
|
080cda37b93010232481c8fd6090a3909a086fe4
|
tests/test_mdx_embedly.py
|
tests/test_mdx_embedly.py
|
import markdown
from mdx_embedly import EmbedlyExtension
def test_embedly():
s = "[https://github.com/yymm:embed]"
expected = """
<p>
<a class="embedly-card" href="https://github.com/yymm">embed.ly</a>
<script async src="//cdn.embedly.com/widgets/platform.js"charset="UTF-8"></script>
</p>
""".strip()
html = markdown.markdown(s, extensions=[EmbedlyExtension()])
assert html == expected
def test_gist():
s = "[https://gist.github.com/yymm/726df7f0e4ed48e54a06:embed]"
expected = """
<p>
<script type="text/javascript" src="https://ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js"></script>
<script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/gist-embed/2.4/gist-embed.min.js"></script>
<code data-gist-id="726df7f0e4ed48e54a06"></code>
</p>
""".strip()
html = markdown.markdown(s, extensions=[EmbedlyExtension()])
assert html == expected
|
import markdown
from mdx_embedly import EmbedlyExtension
def test_embedly():
s = "[https://github.com/yymm:embed]"
expected = """
<p>
<a class="embedly-card" href="https://github.com/yymm">embed.ly</a>
<script async src="//cdn.embedly.com/widgets/platform.js" charset="UTF-8"></script>
</p>
""".strip()
html = markdown.markdown(s, extensions=[EmbedlyExtension()])
assert html == expected
def test_gist():
s = "[https://gist.github.com/yymm/726df7f0e4ed48e54a06:embed]"
expected = """
<p>
<script type="text/javascript" src="https://ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js"></script>
<script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/gist-embed/2.4/gist-embed.min.js"></script>
<code data-gist-id="726df7f0e4ed48e54a06"></code>
</p>
""".strip()
html = markdown.markdown(s, extensions=[EmbedlyExtension()])
assert html == expected
|
Update expected value on test
|
Update expected value on test
|
Python
|
mit
|
yymm/mdx_embedly
|
82572b32e5ddc8dd8eade591708eb2ab45ea2ae3
|
voctocore/lib/clock.py
|
voctocore/lib/clock.py
|
#!/usr/bin/python3
import logging
from gi.repository import Gst, GstNet
__all__ = ['Clock', 'NetTimeProvider']
port = 9998
log = logging.getLogger('Clock')
log.debug("Obtaining System-Clock")
Clock = Gst.SystemClock.obtain()
log.info("Using System-Clock for all Pipelines: %s", Clock)
log.info("Starting NetTimeProvider on Port %u", port)
NetTimeProvider6 = GstNet.NetTimeProvider.new(Clock, '::', port)
|
#!/usr/bin/python3
import logging
from gi.repository import Gst, GstNet
__all__ = ['Clock', 'NetTimeProvider']
port = 9998
log = logging.getLogger('Clock')
log.debug("Obtaining System-Clock")
Clock = Gst.SystemClock.obtain()
log.info("Using System-Clock for all Pipelines: %s", Clock)
log.info("Starting NetTimeProvider on Port %u", port)
NetTimeProvider = GstNet.NetTimeProvider.new(Clock, '::', port)
|
Rename NetTimeProvider6 to NetTimeProvider, to match exports
|
voctocore: Rename NetTimeProvider6 to NetTimeProvider, to match exports
|
Python
|
mit
|
h01ger/voctomix,voc/voctomix,voc/voctomix,h01ger/voctomix
|
0fa23851cbe33ba0d3bddb8367d7089545de6847
|
setup.py
|
setup.py
|
#! /usr/bin/env python
from distutils.core import setup
setup(
name = 'qless-py',
version = '0.10.0',
description = 'Redis-based Queue Management',
long_description = '''
Redis-based queue management, with heartbeating, job tracking,
stats, notifications, and a whole lot more.''',
url = 'http://github.com/seomoz/qless-py',
author = 'Dan Lecocq',
author_email = '[email protected]',
license = "MIT License",
keywords = 'redis, qless, job',
packages = ['qless', 'qless.workers'],
package_dir = {
'qless': 'qless',
'qless.workers': 'qless/workers'},
package_data = {'qless': ['qless-core/*.lua']},
include_package_data = True,
scripts = ['bin/qless-py-worker'],
extras_require = {
'ps': ['setproctitle']
},
install_requires = [
'argparse', 'hiredis', 'redis', 'psutil', 'simplejson'],
classifiers = [
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Intended Audience :: Developers',
'Operating System :: OS Independent'
]
)
|
#! /usr/bin/env python
from distutils.core import setup
setup(
name = 'qless-py',
version = '0.10.0',
description = 'Redis-based Queue Management',
long_description = '''
Redis-based queue management, with heartbeating, job tracking,
stats, notifications, and a whole lot more.''',
url = 'http://github.com/seomoz/qless-py',
author = 'Dan Lecocq',
author_email = '[email protected]',
license = "MIT License",
keywords = 'redis, qless, job',
packages = ['qless', 'qless.workers'],
package_dir = {
'qless': 'qless',
'qless.workers': 'qless/workers'},
package_data = {'qless': ['qless-core/*.lua']},
include_package_data = True,
scripts = ['bin/qless-py-worker'],
extras_require = {
'ps': ['setproctitle']
},
install_requires = [
'argparse', 'decorator', 'hiredis', 'redis', 'psutil', 'simplejson'],
classifiers = [
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Intended Audience :: Developers',
'Operating System :: OS Independent'
]
)
|
Fix for "No module named decorator" on fresh environment installs.
|
Fix for "No module named decorator" on fresh environment installs.
Fixes regression from 4b26b5837ced0c2f76495b05b87e63e05f81c2af.
|
Python
|
mit
|
seomoz/qless-py,seomoz/qless-py
|
8629221d23cf4fe8a447b12930fdee4801cd82f9
|
setup.py
|
setup.py
|
#! /usr/bin/env python
"""TODO: Maybe add a docstring containing a long description
This would double as something we could put int the `long_description`
parameter for `setup` and it would squelch some complaints pylint has on
`setup.py`.
"""
from setuptools import setup
setup(name='demandlib',
version='0.1',
author='oemof developing group',
url='http://github.com/oemof/demandlib',
license='GPL3',
author_email='[email protected]',
description='Demandlib of the open energy modelling framework',
packages=['demandlib'],
package_dir={'demandlib': 'demandlib'},
install_requires=['numpy >= 1.7.0',
'pandas >= 0.18.0']
)
|
#! /usr/bin/env python
"""TODO: Maybe add a docstring containing a long description
This would double as something we could put int the `long_description`
parameter for `setup` and it would squelch some complaints pylint has on
`setup.py`.
"""
from setuptools import setup
import os
setup(name='demandlib',
version='0.1',
author='oemof developing group',
url='http://github.com/oemof/demandlib',
license='GPL3',
author_email='[email protected]',
description='Demandlib of the open energy modelling framework',
packages=['demandlib'],
package_dir={'demandlib': 'demandlib'},
package_data = {
'demandlib': [
os.path.join('bdew_data', 'selp_series.csv'),
os.path.join('bdew_data', 'shlp_hour_factors.csv'),
os.path.join('bdew_data', 'shlp_sigmoid_factors.csv'),
os.path.join('bdew_data', 'shlp_weekday_factors.csv')]},
install_requires=['numpy >= 1.7.0',
'pandas >= 0.18.0']
)
|
Fix data availability when installed via pip
|
Fix data availability when installed via pip
|
Python
|
mit
|
oemof/demandlib
|
5476145559e0e47dac47b41dd4bfdb9fd41bfe29
|
setup.py
|
setup.py
|
#!/usr/bin/env python
"""Setup script for the pyparsing module distribution."""
from distutils.core import setup
from pyparsing import __version__
setup(# Distribution meta-data
name = "pyparsing",
version = __version__,
description = "Python parsing module",
author = "Paul McGuire",
author_email = "[email protected]",
url = "http://pyparsing.wikispaces.com/",
download_url = "http://sourceforge.net/project/showfiles.php?group_id=97203",
license = "MIT License",
py_modules = ["pyparsing"],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
|
#!/usr/bin/env python
"""Setup script for the pyparsing module distribution."""
from distutils.core import setup
from pyparsing import __version__
setup(# Distribution meta-data
name = "pyparsing",
version = __version__,
description = "Python parsing module",
author = "Paul McGuire",
author_email = "[email protected]",
url = "http://pyparsing.wikispaces.com/",
download_url = "http://sourceforge.net/project/showfiles.php?group_id=97203",
license = "MIT License",
py_modules = ["pyparsing", "pyparsing_py3"],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
|
Add change to ship both pyparsing and pyparsing_py3 modules.
|
Add change to ship both pyparsing and pyparsing_py3 modules.
|
Python
|
mit
|
5monkeys/pyparsing
|
d7f9b8376f6ca6e4c87a5b7d6fd2bbaf37b3db2f
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
from ckanext.qa import __version__
setup(
name='ckanext-qa',
version=__version__,
description='Quality Assurance plugin for CKAN',
long_description='',
classifiers=[],
keywords='',
author='Open Knowledge Foundation',
author_email='[email protected]',
url='http://ckan.org/wiki/Extensions',
license='mit',
packages=find_packages(exclude=['tests']),
namespace_packages=['ckanext', 'ckanext.qa'],
include_package_data=True,
zip_safe=False,
install_requires=[
'celery==2.4.5',
'kombu==1.5.1',
'kombu-sqlalchemy==1.1.0',
'SQLAlchemy>=0.6.6',
'requests==0.6.4',
],
tests_require=[
'nose',
'mock',
],
entry_points='''
[paste.paster_command]
qa=ckanext.qa.commands:QACommand
[ckan.plugins]
qa=ckanext.qa.plugin:QAPlugin
[ckan.celery_task]
tasks=ckanext.qa.celery_import:task_imports
''',
)
|
from setuptools import setup, find_packages
from ckanext.qa import __version__
setup(
name='ckanext-qa',
version=__version__,
description='Quality Assurance plugin for CKAN',
long_description='',
classifiers=[],
keywords='',
author='Open Knowledge Foundation',
author_email='[email protected]',
url='http://ckan.org/wiki/Extensions',
license='mit',
packages=find_packages(exclude=['tests']),
namespace_packages=['ckanext', 'ckanext.qa'],
include_package_data=True,
zip_safe=False,
install_requires=[
'celery==2.4.2',
'kombu==2.1.3',
'kombu-sqlalchemy==1.1.0',
'SQLAlchemy>=0.6.6',
'requests==0.6.4',
],
tests_require=[
'nose',
'mock',
],
entry_points='''
[paste.paster_command]
qa=ckanext.qa.commands:QACommand
[ckan.plugins]
qa=ckanext.qa.plugin:QAPlugin
[ckan.celery_task]
tasks=ckanext.qa.celery_import:task_imports
''',
)
|
Change celery and kombu requirements to match ckanext-datastorer
|
Change celery and kombu requirements to match ckanext-datastorer
|
Python
|
mit
|
ckan/ckanext-qa,ckan/ckanext-qa,ckan/ckanext-qa
|
be6ddfe1aa9a5d812bb3805316c8c85053c676ec
|
setup.py
|
setup.py
|
#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
setup(
name='wagtail-draftail',
version='0.1a',
description='Draft.js editor for Wagtail, built upon Draftail and draftjs_exporter',
author='Springload',
author_email='[email protected]',
url='https://github.com/springload/wagtaildraftail',
packages=find_packages(),
install_requires=[
'draftjs-exporter>=0.6.2',
]
)
|
#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
setup(
name='wagtail-draftail',
version='0.1a',
description='Draft.js editor for Wagtail, built upon Draftail and draftjs_exporter',
author='Springload',
author_email='[email protected]',
url='https://github.com/springload/wagtaildraftail',
packages=find_packages(),
install_requires=[
'draftjs-exporter==0.6.2',
]
)
|
Make draftjs-exporter dep strict version check
|
Make draftjs-exporter dep strict version check
|
Python
|
mit
|
gasman/wagtaildraftail,springload/wagtaildraftail,gasman/wagtaildraftail,springload/wagtaildraftail,gasman/wagtaildraftail,gasman/wagtaildraftail,springload/wagtaildraftail,springload/wagtaildraftail
|
6757b7dc89d70b3c2307c489146ed7dfb7e6cce1
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='pyhunter',
packages=['pyhunter'],
version='0.2',
description='An (unofficial) Python wrapper for the Hunter.io API',
author='Quentin Durantay',
author_email='[email protected]',
url='https://github.com/VonStruddle/PyHunter',
download_url='https://github.com/VonStruddle/PyHunter/archive/0.1.tar.gz',
install_requires=['requests'],
keywords=['hunter', 'hunter.io', 'lead generation', 'lead enrichment'],
classifiers=[],
)
|
from setuptools import setup
setup(
name='pyhunter',
packages=['pyhunter'],
version='0.2',
description='An (unofficial) Python wrapper for the Hunter.io API',
author='Quentin Durantay',
author_email='[email protected]',
url='https://github.com/VonStruddle/PyHunter',
install_requires=['requests'],
keywords=['hunter', 'hunter.io', 'lead generation', 'lead enrichment'],
classifiers=[],
)
|
Remove GitHub to download library
|
Remove GitHub to download library
|
Python
|
mit
|
VonStruddle/PyHunter
|
6505a37ac7aebfb0ac3a4c76b924b9f80d524a23
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name="DependencyWatcher-Parser",
version="1.0",
url="http://github.com/DependencyWatcher/parser/",
author="Michael Spector",
license="Apache 2.0",
author_email="[email protected]",
long_description=__doc__,
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
"lxml"
]
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name="DependencyWatcher-Parser",
version="1.0",
url="http://github.com/DependencyWatcher/parser/",
author="Michael Spector",
license="Apache 2.0",
author_email="[email protected]",
long_description=__doc__,
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
"lxml",
"pyparsing"
]
)
|
Add pyparsing module to dependencies
|
Add pyparsing module to dependencies
|
Python
|
apache-2.0
|
DependencyWatcher/parser,DependencyWatcher/parser
|
92f4f496d755187c9d48d2a34262402ba9295732
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='mutual-debt-simplifier',
version='1.0',
description='Tool to perform mutual debt simplification',
url='https://github.com/ric2b/Mutual_Debt_Simplification',
license='MIT',
author='',
author_email='',
packages=find_packages(exclude='*.tests'),
install_requires=['docopt==0.6.2', 'graphviz==0.8'],
extras_require={
'test': ['unittest'],
},
entry_points={
'console_scripts': [
'simplify-debts=mutual_debt.main:main',
],
}
)
|
from setuptools import setup, find_packages
setup(
name='mutual-debt-simplifier',
version='1.0',
description='Tool to perform mutual debt simplification',
url='https://github.com/ric2b/Mutual_Debt_Simplification',
license='MIT',
author='',
author_email='',
packages=find_packages(exclude=['*.tests']),
install_requires=['docopt==0.6.2', 'graphviz==0.8'],
extras_require={
'test': ['unittest'],
},
entry_points={
'console_scripts': [
'simplify-debts=mutual_debt.main:main',
],
}
)
|
Fix import errors realted to missing 'mutual_debt' module
|
Fix import errors realted to missing 'mutual_debt' module
|
Python
|
mit
|
ric2b/Mutual_Debt_Simplification
|
729c1b3f798c7dafa3b8d528aa9bdf123f3068b1
|
setup.py
|
setup.py
|
#coding:utf-8
import sys
from distutils.core import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = []
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = ['tests/']
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.pytest_args)
sys.exit(errno)
setup(
name='fantasy_data',
version='1.1.2',
description='FantasyData Python',
author='Fantasy Football Calculator',
author_email='[email protected]',
url='https://fantasyfootballcalculator.com/fantasydata-python',
packages=['fantasy_data'],
keywords=['fantasy', 'sports', 'football', 'nba'],
install_requires=[
"requests",
"six",
],
tests_require=['pytest'],
cmdclass = {'test': PyTest},
download_url='https://github.com/ffcalculator/fantasydata-python/archive/v1.1.1.tar.gz'
)
|
#coding:utf-8
import sys
from distutils.core import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = []
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = ['tests/']
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.pytest_args)
sys.exit(errno)
setup(
name='fantasy_data',
version='1.1.3',
description='FantasyData Python',
author='Fantasy Football Calculator',
author_email='[email protected]',
url='https://fantasyfootballcalculator.com/fantasydata-python',
packages=['fantasy_data'],
keywords=['fantasy', 'sports', 'football', 'nba'],
install_requires=[
"requests",
"six",
],
tests_require=['pytest'],
cmdclass = {'test': PyTest},
download_url='https://github.com/ffcalculator/fantasydata-python/archive/v1.1.3.tar.gz'
)
|
Fix the download URL to match the version number.
|
Fix the download URL to match the version number.
|
Python
|
mit
|
ffcalculator/fantasydata-python
|
e12d086e5ad4692cd6be16eed0649ebc89e6c640
|
setup.py
|
setup.py
|
# coding: utf-8
"""
Plotting and analysis tools for the ARTIS 3D supernova radiative transfer code.
"""
import os
from setuptools import find_packages, setup
setup(name="artistools",
version=0.1,
author="Luke Shingles",
author_email="[email protected]",
packages=find_packages(),
url="https://www.github.com/lukeshingles/artis-tools/",
license="MIT",
description="Plotting and analysis tools for the ARTIS 3D supernova radiative transfer code.",
long_description=open(os.path.join(os.path.dirname(__file__), "README.md")).read(),
install_requires=open(os.path.join(os.path.dirname(__file__), "requirements.txt")).read(),
python_requires='>==3.6',
# test_suite='tests',
setup_requires=['pytest-runner', 'pytest-cov'],
tests_require=['pytest', 'pytest-runner', 'pytest-cov'],)
|
# coding: utf-8
"""
Plotting and analysis tools for the ARTIS 3D supernova radiative transfer code.
"""
import datetime
import os
from setuptools import find_packages, setup
print(datetime.datetime.now().isoformat())
setup(name="artistools",
version=datetime.datetime.now().isoformat(),
author="Luke Shingles",
author_email="[email protected]",
packages=find_packages(),
url="https://www.github.com/lukeshingles/artis-tools/",
license="MIT",
description="Plotting and analysis tools for the ARTIS 3D supernova radiative transfer code.",
long_description=open(os.path.join(os.path.dirname(__file__), "README.md")).read(),
install_requires=open(os.path.join(os.path.dirname(__file__), "requirements.txt")).read(),
python_requires='>==3.6',
# test_suite='tests',
setup_requires=['pytest-runner', 'pytest-cov'],
tests_require=['pytest', 'pytest-runner', 'pytest-cov'],)
|
Use current date and time as version number
|
Use current date and time as version number
|
Python
|
mit
|
lukeshingles/artistools,lukeshingles/artistools
|
119a1de0f495ee84d8354b75c05659f3d5a8b367
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
"""django-c5filemanager setup file.
THIS SOFTWARE IS UNDER BSD LICENSE.
Copyright (c) 2010 Daniele Tricoli <[email protected]>
Read LICENSE for more informations.
"""
import os
from setuptools import setup, find_packages
from c5filemanager import get_version
def read(filename):
"""Small tool function to read README."""
return open(os.path.join(os.path.dirname(__file__), filename)).read()
download_page = 'http://downloads.mornie.org/django-c5filemanager/'
setup(
name='django-c5filemanager',
version=get_version(),
description='Django connector for Core Five Filemanager.',
long_description=read('README'),
author='Daniele Tricoli',
author_email='[email protected]',
url='http://mornie.org/projects/django-c5filemanager/',
download_url='%sdjango-c5filemanager-%s.tar.gz' % (download_page,
get_version()),
packages=find_packages(),
package_data = {
'c5filemanager': [
'locale/*/LC_MESSAGES/*',
],
},
install_requires = [
'simplejson>=2.1.0',
'PIL',
],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
)
|
# -*- coding: utf-8 -*-
"""django-c5filemanager setup file.
THIS SOFTWARE IS UNDER BSD LICENSE.
Copyright (c) 2010 Daniele Tricoli <[email protected]>
Read LICENSE for more informations.
"""
import os
from setuptools import setup, find_packages
from c5filemanager import get_version
def read(filename):
"""Small tool function to read README."""
return open(os.path.join(os.path.dirname(__file__), filename)).read()
classifiers = '''
Development Status :: 4 - Beta
Environment :: Web Environment
Intended Audience :: Developers
License :: OSI Approved :: BSD License
Operating System :: OS Independent
Programming Language :: Python
Framework :: Django
'''.strip().splitlines()
setup(
name='django-c5filemanager',
version=get_version(),
license = 'BSD',
description='Django connector for Core Five Filemanager.',
long_description=read('README'),
author='Daniele Tricoli',
author_email='[email protected]',
url='http://mornie.org/projects/django-c5filemanager/',
packages=find_packages(),
package_data = {
'c5filemanager': [
'locale/*/LC_MESSAGES/*',
],
},
install_requires = [
'simplejson>=2.1.0',
'PIL',
],
classifiers = classifiers,
)
|
Use PyPI for host sdist; updated classifiers
|
Use PyPI for host sdist; updated classifiers
|
Python
|
bsd-3-clause
|
eriol/django-c5filemanager
|
3a308c37856bafd8ecbc64a2e425c8199dcf2e68
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='LinkFinder',
packages=find_packages(),
version='1.0',
description="A python script that finds endpoints in JavaScript files.",
long_description=open('README.md').read(),
author='Gerben Javado',
url='https://github.com/GerbenJavado/LinkFinder',
install_requires=['argparse', 'jsbeautifier'],
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='LinkFinder',
packages=find_packages(),
version='1.0',
description="A python script that finds endpoints in JavaScript files.",
long_description=open('README.md').read(),
author='Gerben Javado',
url='https://github.com/GerbenJavado/LinkFinder',
py_modules=['linkfinder'],
install_requires=['argparse', 'jsbeautifier'],
)
|
Allow use as vendor library
|
Allow use as vendor library
With this little change this tool can be used as a dependency
|
Python
|
mit
|
GerbenJavado/LinkFinder,GerbenJavado/LinkFinder
|
4626d74436dc547dca0faea67a95ac2121f348e4
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='myria-python',
version='1.2.4',
author='Daniel Halperin',
author_email='[email protected]',
packages=find_packages(),
scripts=[],
url='https://github.com/uwescience/myria',
description='Python interface for Myria.',
long_description=open('README.md').read(),
setup_requires=["requests"],
# see https://stackoverflow.com/questions/18578439
install_requires=["pip >= 1.5.6", "pyOpenSSL >= 0.14", "ndg-httpsclient",
"pyasn1", "requests", "requests_toolbelt",
"json-table-schema<0.2",
"messytables", "unicodecsv"],
entry_points={
'console_scripts': [
'myria_upload = myria.cmd.upload_file:main'
],
},
)
|
from setuptools import setup, find_packages
setup(
name='myria-python',
version='1.2.5',
author='Daniel Halperin',
author_email='[email protected]',
packages=find_packages(),
scripts=[],
url='https://github.com/uwescience/myria',
description='Python interface for Myria.',
long_description=open('README.md').read(),
setup_requires=["requests"],
# see https://stackoverflow.com/questions/18578439
install_requires=["pip >= 1.5.6", "pyOpenSSL >= 0.14", "ndg-httpsclient",
"pyasn1", "requests", "requests_toolbelt",
"messytables", "unicodecsv"],
entry_points={
'console_scripts': [
'myria_upload = myria.cmd.upload_file:main'
],
},
)
|
Undo workaround for broken json-table-schema release (via messytables)
|
Undo workaround for broken json-table-schema release (via messytables)
|
Python
|
bsd-3-clause
|
uwescience/myria-python,uwescience/myria-python
|
57fdb0bc4c3751ef89ec6fddd1ccb85a5f4ad56b
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
import sys
import os.path
from setuptools import Command, find_packages, setup
HERE = os.path.abspath(os.path.dirname(__file__))
README_PATH = os.path.join(HERE, 'README.rst')
try:
README = open(README_PATH).read()
except IOError:
README = ''
setup(
name='rollbar-udp-agent',
version='0.0.13',
description='Rollbar server-side UDP agent',
long_description=README,
author='Luis Rascão',
author_email='[email protected]',
url='http://github.com/lrascao/rollbar-udp-agent',
entry_points={
"console_scripts": [
"rollbar-udp-agent=rollbar_udp_agent:main"
],
},
packages=['rollbar_udp_agent'],
data_files=[('etc', ['rollbar-udp-agent.conf']),
('etc/init.d', ['rollbar-udp-agent'])],
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware",
"Topic :: Software Development",
"Topic :: Software Development :: Bug Tracking",
"Topic :: Software Development :: Testing",
"Topic :: Software Development :: Quality Assurance",
],
install_requires=[
'requests'
],
)
|
# -*- coding: utf-8 -*-
import sys
import os.path
from setuptools import Command, find_packages, setup
HERE = os.path.abspath(os.path.dirname(__file__))
README_PATH = os.path.join(HERE, 'README.rst')
try:
README = open(README_PATH).read()
except IOError:
README = ''
setup(
name='rollbar-udp-agent',
version='0.0.13',
description='Rollbar server-side UDP agent',
long_description=README,
author='Luis Rascão',
author_email='[email protected]',
url='http://github.com/lrascao/rollbar-udp-agent',
entry_points={
"console_scripts": [
"rollbar-udp-agent=rollbar_udp_agent:main"
],
},
packages=['rollbar_udp_agent'],
data_files=[('/etc', ['rollbar-udp-agent.conf']),
('/etc/init.d', ['rollbar-udp-agent'])],
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware",
"Topic :: Software Development",
"Topic :: Software Development :: Bug Tracking",
"Topic :: Software Development :: Testing",
"Topic :: Software Development :: Quality Assurance",
],
install_requires=[
'requests'
],
)
|
Make data_files paths absolute again
|
Make data_files paths absolute again
|
Python
|
mit
|
lrascao/rollbar-udp-agent,lrascao/rollbar-udp-agent
|
1ec5fb8f7c85464da24f0a1db553bcd0cce7fe39
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
import cplcom
setup(
name='CPLCom',
version=cplcom.__version__,
packages=find_packages(),
package_data={'cplcom': ['../media/*']},
install_requires=['moa', 'kivy'],
author='Matthew Einhorn',
author_email='[email protected]',
license='MIT',
description=(
'Project for common widgets used with Moa.')
)
|
from setuptools import setup, find_packages
import cplcom
setup(
name='CPLCom',
version=cplcom.__version__,
packages=find_packages(),
package_data={'cplcom': ['../media/*', '*.kv']},
install_requires=['moa', 'kivy'],
author='Matthew Einhorn',
author_email='[email protected]',
license='MIT',
description=(
'Project for common widgets used with Moa.')
)
|
Include kv files in package.
|
Include kv files in package.
|
Python
|
mit
|
matham/cplcom
|
e4de6c2f10605fda5537d838d232e4d10e680a28
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name="cobertura-clover-transform",
version='1.1.4',
packages=find_packages(),
include_package_data=True,
description="Tools for transforming Cobertura test "
"coverage XML into Clover-style XML",
install_requires=['lxml'],
author='Chris Wacek',
author_email='[email protected]',
url='http://github.com/cwacek/cobertura-clover-transform',
license='MIT',
keywords='cobertura coverage test clover xml',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Testing',
],
entry_points = {
'console_scripts': [
'cobertura-clover-transform=cobertura_clover_transform.converter:main'
]
}
)
|
from setuptools import setup, find_packages
setup(
name="cobertura-clover-transform",
version='1.1.4.post1',
packages=find_packages(),
include_package_data=True,
description="Tools for transforming Cobertura test "
"coverage XML into Clover-style XML",
install_requires=['lxml'],
author='Chris Wacek',
author_email='[email protected]',
url='http://github.com/cwacek/cobertura-clover-transform',
license='MIT',
keywords='cobertura coverage test clover xml',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Testing',
],
entry_points = {
'console_scripts': [
'cobertura-clover-transform=cobertura_clover_transform.converter:main'
]
}
)
|
Bump version for trove classifiers
|
Bump version for trove classifiers
|
Python
|
mit
|
cwacek/cobertura-clover-transform
|
f6ba4c52be9fd4be9eb7d04c15ee441d86304f0c
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name="cdispyutils",
version="0.1.0",
description="General utilities",
license="Apache",
install_requires=[
"six==1.10.0",
"requests==2.13.0",
],
packages=find_packages(),
)
|
from setuptools import setup, find_packages
setup(
name="cdispyutils",
version="0.1.0",
description="General utilities",
license="Apache",
install_requires=[
"six==1.11.0",
"requests==2.13.0",
],
packages=find_packages(),
)
|
Update six version to match
|
Update six version to match
|
Python
|
apache-2.0
|
uc-cdis/cdis-python-utils
|
eb15038cd582e087225985947e4b98ffbc86d812
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('git_externals/__init__.py') as fp:
exec(fp.read())
classifiers = [
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Libraries :: Python Modules',
]
setup(
name='git-externals',
version=__version__,
description='utility to manage svn externals',
long_description='',
packages=['git_externals'],
install_requires=['click>=6.0'],
entry_points={
'console_scripts': [
'git-externals = git_externals.git_externals:cli',
'gittify-cleanup = git_externals.cleanup_repo:main',
'svn-externals-info = git_externals.process_externals:main',
'gittify = git_externals.gittify:main',
'gittify-gen = git_externals.makefiles:cli',
],
},
author=__author__,
author_email=__email__,
license='MIT',
classifiers=classifiers,
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('git_externals/__init__.py') as fp:
exec(fp.read())
classifiers = [
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Libraries :: Python Modules',
]
setup(
name='git-externals',
version=__version__,
description='utility to manage svn externals',
long_description='',
packages=['git_externals'],
install_requires=['click',
'pathlib'],
entry_points={
'console_scripts': [
'git-externals = git_externals.git_externals:cli',
'gittify-cleanup = git_externals.cleanup_repo:main',
'svn-externals-info = git_externals.process_externals:main',
'gittify = git_externals.gittify:main',
'gittify-gen = git_externals.makefiles:cli',
],
},
author=__author__,
author_email=__email__,
license='MIT',
classifiers=classifiers,
)
|
Remove version requirement, add pathlib
|
Remove version requirement, add pathlib
|
Python
|
mit
|
develersrl/git-externals,develersrl/git-externals,develersrl/git-externals
|
af1d3b67bb6428a298e5028b7c86624d2f7f00c8
|
setup.py
|
setup.py
|
"""
Copyright (c) 2010-2013, Anthony Garcia <[email protected]>
Distributed under the ISC License (see LICENSE)
"""
from distutils.core import setup, Command
from distutils.errors import DistutilsOptionError
from unittest import TestLoader, TextTestRunner
import steam
class run_tests(Command):
description = "Run the steamodd unit tests"
user_options = [
("key=", 'k', "Your API key")
]
def initialize_options(self):
self.key = None
def finalize_options(self):
if not self.key:
raise DistutilsOptionError("API key is required")
else:
steam.api.key.set(self.key)
def run(self):
tests = TestLoader().discover("tests")
TextTestRunner(verbosity = 2).run(tests)
setup(name = "steamodd",
version = steam.__version__,
description = "High level Steam API implementation with low level reusable core",
packages = ["steam"],
author = steam.__author__,
author_email = steam.__contact__,
url = "https://github.com/Lagg/steamodd",
classifiers = [
"License :: OSI Approved :: ISC License (ISCL)",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python"
],
license = steam.__license__,
cmdclass = {"run_tests": run_tests})
|
"""
Copyright (c) 2010-2013, Anthony Garcia <[email protected]>
Distributed under the ISC License (see LICENSE)
"""
from distutils.core import setup, Command
from distutils.errors import DistutilsOptionError
from unittest import TestLoader, TextTestRunner
import steam
class run_tests(Command):
description = "Run the steamodd unit tests"
user_options = [
("key=", 'k', "Your API key")
]
def initialize_options(self):
self.key = None
def finalize_options(self):
if not self.key:
raise DistutilsOptionError("API key is required")
else:
steam.api.key.set(self.key)
def run(self):
tests = TestLoader().discover("tests")
TextTestRunner(verbosity = 2).run(tests)
setup(name = "steamodd",
version = steam.__version__,
description = "High level Steam API implementation with low level reusable core",
long_description = "Please see the `README <https://github.com/Lagg/steamodd/blob/master/README.md>`_ for a full description.",
packages = ["steam"],
author = steam.__author__,
author_email = steam.__contact__,
url = "https://github.com/Lagg/steamodd",
classifiers = [
"License :: OSI Approved :: ISC License (ISCL)",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python"
],
license = steam.__license__,
cmdclass = {"run_tests": run_tests})
|
Add rst long description for pypi
|
Add rst long description for pypi
|
Python
|
isc
|
miedzinski/steamodd,Lagg/steamodd
|
c7a0f94eced9c2ac6f8a27c10a7a93da4b97f7d7
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='django-markwhat',
version=".".join(map(str, __import__('django_markwhat').__version__)),
packages=['django_markwhat', 'django_markwhat.templatetags'],
url='http://pypi.python.org/pypi/django-markwhat',
license=open('LICENSE').read(),
author='Alireza Savand',
author_email='[email protected]',
install_requires=['Django', ],
description="""A collection of template filters that implement
common markup languages.""",
long_description=open('README.rst').read(),
keywords=[
'django',
'markdown',
'markup',
'textile',
'rst',
'reStructuredText',
'docutils',
'commonmark',
'web'
],
platforms='OS Independent',
classifiers=[
"Development Status :: 5 - Production/Stable",
"Development Status :: 6 - Mature",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Software Development",
"Topic :: Software Development :: Libraries",
],
)
|
from setuptools import setup
setup(
name='django-markwhat',
version=".".join(map(str, __import__('django_markwhat').__version__)),
packages=['django_markwhat', 'django_markwhat.templatetags'],
url='http://pypi.python.org/pypi/django-markwhat',
license=open('LICENSE').read(),
author='Alireza Savand',
author_email='[email protected]',
install_requires=['Django', ],
description="A collection of template filters that implement " + \
"common markup languages.",
long_description=open('README.rst').read(),
keywords=[
'django',
'markdown',
'markup',
'textile',
'rst',
'reStructuredText',
'docutils',
'commonmark',
'web'
],
platforms='OS Independent',
classifiers=[
"Development Status :: 5 - Production/Stable",
"Development Status :: 6 - Mature",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Software Development",
"Topic :: Software Development :: Libraries",
],
)
|
Fix summary - pypi doesn't allow multiple lines
|
Fix summary - pypi doesn't allow multiple lines
|
Python
|
bsd-3-clause
|
Alir3z4/django-markwhat
|
12daa2e0f2cc8772fd6ff2ee3abf6054e36e1f18
|
settings.py
|
settings.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#Main Settings
blog_name = "Pomash"
blog_author = "JmPotato"
blog_url = "http://xxx.oo/"
twitter_card = False
twitter_username = "@Jm_Potato"
analytics = ""
enable_comment = True
disqus_name = "pomash"
theme = "default"
post_per_page = 3
#Please Change it!!!!!!
cookie_secret = "PLEASE BE SURE TO CHANGE ME!!!!!"
#Admin Settings
login_username = "admin"
#login_password = "admin"
#Development Settings
DeBug = True
#Dropbox
app_token = ''
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#Main Settings
blog_name = "Pomash"
blog_author = "JmPotato"
blog_url = "http://xxx.oo/"
twitter_card = False
twitter_username = "@Jm_Potato"
analytics = ""
enable_comment = True
disqus_name = "pomash"
theme = "default"
post_per_page = 3
#Please Change it!!!!!!
cookie_secret = "PLEASE BE SURE TO CHANGE ME!!!!!"
#Admin Settings
login_username = "admin"
#login_password = "admin"
#Development Settings
DeBug = False
#Dropbox
app_token = ''
|
Set debug mode off as default
|
Set debug mode off as default
|
Python
|
mit
|
JmPotato/Pomash,JmPotato/Pomash
|
d815c8de309239e3c6f28e54793c9973ca9acc39
|
twilio/values.py
|
twilio/values.py
|
unset = object()
def of(d):
return {k: v for k, v in d.iteritems() if v != unset}
|
from six import iteritems
unset = object()
def of(d):
return {k: v for k, v in iteritems(d) if v != unset}
|
Replace iteritems with six helper
|
Replace iteritems with six helper
|
Python
|
mit
|
twilio/twilio-python,tysonholub/twilio-python
|
b93722df95d3907782cdff034df360b79d1fd093
|
python/paddle/v2/dataset/common.py
|
python/paddle/v2/dataset/common.py
|
import hashlib
import os
import shutil
import urllib2
__all__ = ['DATA_HOME', 'download']
DATA_HOME = os.path.expanduser('~/.cache/paddle_data_set')
if not os.path.exists(DATA_HOME):
os.makedirs(DATA_HOME)
def download(url, md5):
filename = os.path.split(url)[-1]
assert DATA_HOME is not None
filepath = os.path.join(DATA_HOME, md5)
if not os.path.exists(filepath):
os.makedirs(filepath)
__full_file__ = os.path.join(filepath, filename)
def __file_ok__():
if not os.path.exists(__full_file__):
return False
md5_hash = hashlib.md5()
with open(__full_file__, 'rb') as f:
for chunk in iter(lambda: f.read(4096), b""):
md5_hash.update(chunk)
return md5_hash.hexdigest() == md5
while not __file_ok__():
response = urllib2.urlopen(url)
with open(__full_file__, mode='wb') as of:
shutil.copyfileobj(fsrc=response, fdst=of)
return __full_file__
|
import hashlib
import os
import shutil
import urllib2
__all__ = ['DATA_HOME', 'download']
DATA_HOME = os.path.expanduser('~/.cache/paddle/dataset')
if not os.path.exists(DATA_HOME):
os.makedirs(DATA_HOME)
def download(url, md5):
filename = os.path.split(url)[-1]
assert DATA_HOME is not None
filepath = os.path.join(DATA_HOME, md5)
if not os.path.exists(filepath):
os.makedirs(filepath)
__full_file__ = os.path.join(filepath, filename)
def __file_ok__():
if not os.path.exists(__full_file__):
return False
md5_hash = hashlib.md5()
with open(__full_file__, 'rb') as f:
for chunk in iter(lambda: f.read(4096), b""):
md5_hash.update(chunk)
return md5_hash.hexdigest() == md5
while not __file_ok__():
response = urllib2.urlopen(url)
with open(__full_file__, mode='wb') as of:
shutil.copyfileobj(fsrc=response, fdst=of)
return __full_file__
|
Set data cache home directory to ~/.cache/paddle/dataset
|
Set data cache home directory to ~/.cache/paddle/dataset
|
Python
|
apache-2.0
|
yu239/Paddle,reyoung/Paddle,livc/Paddle,chengduoZH/Paddle,emailweixu/Paddle,jacquesqiao/Paddle,Canpio/Paddle,putcn/Paddle,yu239/Paddle,putcn/Paddle,gangliao/Paddle,lispc/Paddle,gangliao/Paddle,chengduoZH/Paddle,gangliao/Paddle,PaddlePaddle/Paddle,pengli09/Paddle,lcy-seso/Paddle,gangliao/Paddle,pengli09/Paddle,cxysteven/Paddle,Canpio/Paddle,baidu/Paddle,yu239/Paddle,luotao1/Paddle,lcy-seso/Paddle,wen-bo-yang/Paddle,lcy-seso/Paddle,Canpio/Paddle,emailweixu/Paddle,PaddlePaddle/Paddle,emailweixu/Paddle,cxysteven/Paddle,pkuyym/Paddle,pkuyym/Paddle,QiJune/Paddle,putcn/Paddle,PaddlePaddle/Paddle,PaddlePaddle/Paddle,cxysteven/Paddle,reyoung/Paddle,wen-bo-yang/Paddle,wen-bo-yang/Paddle,luotao1/Paddle,chengduoZH/Paddle,pkuyym/Paddle,wen-bo-yang/Paddle,QiJune/Paddle,luotao1/Paddle,tensor-tang/Paddle,emailweixu/Paddle,luotao1/Paddle,pengli09/Paddle,baidu/Paddle,luotao1/Paddle,Canpio/Paddle,QiJune/Paddle,yu239/Paddle,pkuyym/Paddle,gangliao/Paddle,putcn/Paddle,gangliao/Paddle,lispc/Paddle,lispc/Paddle,baidu/Paddle,cxysteven/Paddle,jacquesqiao/Paddle,baidu/Paddle,pengli09/Paddle,luotao1/Paddle,lispc/Paddle,hedaoyuan/Paddle,tensor-tang/Paddle,luotao1/Paddle,lispc/Paddle,livc/Paddle,lcy-seso/Paddle,wen-bo-yang/Paddle,cxysteven/Paddle,emailweixu/Paddle,wen-bo-yang/Paddle,chengduoZH/Paddle,jacquesqiao/Paddle,PaddlePaddle/Paddle,jacquesqiao/Paddle,cxysteven/Paddle,hedaoyuan/Paddle,livc/Paddle,pengli09/Paddle,tensor-tang/Paddle,PaddlePaddle/Paddle,putcn/Paddle,hedaoyuan/Paddle,tensor-tang/Paddle,lispc/Paddle,tensor-tang/Paddle,pkuyym/Paddle,QiJune/Paddle,cxysteven/Paddle,hedaoyuan/Paddle,yu239/Paddle,livc/Paddle,reyoung/Paddle,gangliao/Paddle,Canpio/Paddle,yu239/Paddle,pengli09/Paddle,yu239/Paddle,jacquesqiao/Paddle,yu239/Paddle,hedaoyuan/Paddle,reyoung/Paddle,reyoung/Paddle,Canpio/Paddle,livc/Paddle,hedaoyuan/Paddle,chengduoZH/Paddle,lcy-seso/Paddle,pkuyym/Paddle,Canpio/Paddle,emailweixu/Paddle,livc/Paddle,jacquesqiao/Paddle,hedaoyuan/Paddle,QiJune/Paddle,lcy-seso/Paddle,pengli09/Paddle,livc/Paddle,baidu/Paddle,putcn/Paddle,emailweixu/Paddle,hedaoyuan/Paddle,pengli09/Paddle,QiJune/Paddle,Canpio/Paddle,wen-bo-yang/Paddle,lispc/Paddle,lispc/Paddle,reyoung/Paddle,PaddlePaddle/Paddle
|
2989c7074853266fd134a10df4afdcb700499203
|
analyticsdataserver/urls.py
|
analyticsdataserver/urls.py
|
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import RedirectView
from analyticsdataserver import views
urlpatterns = [
url(r'^$', RedirectView.as_view(url='/docs')), # pylint: disable=no-value-for-parameter
url(r'^api-auth/', include('rest_framework.urls', 'rest_framework')),
url(r'^api-token-auth/', 'rest_framework.authtoken.views.obtain_auth_token'),
url(r'^api/', include('analytics_data_api.urls', 'api')),
url(r'^docs/', include('rest_framework_swagger.urls')),
url(r'^status/$', views.StatusView.as_view(), name='status'),
url(r'^authenticated/$', views.AuthenticationTestView.as_view(), name='authenticated'),
url(r'^health/$', views.HealthView.as_view(), name='health'),
]
if settings.ENABLE_ADMIN_SITE: # pragma: no cover
admin.autodiscover()
urlpatterns.append(url(r'^site/admin/', include(admin.site.urls)))
handler500 = 'analyticsdataserver.views.handle_internal_server_error' # pylint: disable=invalid-name
handler404 = 'analyticsdataserver.views.handle_missing_resource_error' # pylint: disable=invalid-name
|
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import RedirectView
from analyticsdataserver import views
from rest_framework.authtoken.views import obtain_auth_token
urlpatterns = [
url(r'^$', RedirectView.as_view(url='/docs')), # pylint: disable=no-value-for-parameter
url(r'^api-auth/', include('rest_framework.urls', 'rest_framework')),
url(r'^api-token-auth/', obtain_auth_token),
url(r'^api/', include('analytics_data_api.urls', 'api')),
url(r'^docs/', include('rest_framework_swagger.urls')),
url(r'^status/$', views.StatusView.as_view(), name='status'),
url(r'^authenticated/$', views.AuthenticationTestView.as_view(), name='authenticated'),
url(r'^health/$', views.HealthView.as_view(), name='health'),
]
if settings.ENABLE_ADMIN_SITE: # pragma: no cover
admin.autodiscover()
urlpatterns.append(url(r'^site/admin/', include(admin.site.urls)))
handler500 = 'analyticsdataserver.views.handle_internal_server_error' # pylint: disable=invalid-name
handler404 = 'analyticsdataserver.views.handle_missing_resource_error' # pylint: disable=invalid-name
|
Update string arg to url() to callable
|
Update string arg to url() to callable
|
Python
|
agpl-3.0
|
Stanford-Online/edx-analytics-data-api,edx/edx-analytics-data-api,Stanford-Online/edx-analytics-data-api,edx/edx-analytics-data-api,Stanford-Online/edx-analytics-data-api
|
ef516fb03db9bdaa0f0bea97526a65c319b8e43c
|
tohu/v3/utils.py
|
tohu/v3/utils.py
|
from collections import namedtuple
__all__ = ['identity', 'print_generated_sequence']
def identity(x):
"""
Helper function which returns its argument unchanged.
That is, `identity(x)` returns `x` for any input `x`.
"""
return x
def print_generated_sequence(gen, num, *, sep=", ", seed=None):
"""
Helper function which prints a sequence of `num` items
produced by the random generator `gen`.
"""
if seed:
gen.reset(seed)
elems = [str(next(gen)) for _ in range(num)]
sep_initial = "\n\n" if '\n' in sep else " "
print("Generated sequence:{}{}".format(sep_initial, sep.join(elems)))
def make_dummy_tuples(chars='abcde'):
Quux = namedtuple('Quux', ['x', 'y'])
some_tuples = [Quux((c*2).upper(), c*2) for c in chars]
return some_tuples
|
from collections import namedtuple
__all__ = ['identity', 'print_generated_sequence']
def identity(x):
"""
Helper function which returns its argument unchanged.
That is, `identity(x)` returns `x` for any input `x`.
"""
return x
def print_generated_sequence(gen, num, *, sep=", ", fmt='', seed=None):
"""
Helper function which prints a sequence of `num` items
produced by the random generator `gen`.
"""
if seed:
gen.reset(seed)
elems = [format(next(gen), fmt) for _ in range(num)]
sep_initial = "\n\n" if '\n' in sep else " "
print("Generated sequence:{}{}".format(sep_initial, sep.join(elems)))
def make_dummy_tuples(chars='abcde'):
Quux = namedtuple('Quux', ['x', 'y'])
some_tuples = [Quux((c*2).upper(), c*2) for c in chars]
return some_tuples
|
Allow passing format option to helper function
|
Allow passing format option to helper function
|
Python
|
mit
|
maxalbert/tohu
|
f12beb5d2fbdc72c12f473c5cac04716f4893666
|
test/viz/test_volcano.py
|
test/viz/test_volcano.py
|
from sequana.viz import Volcano
def test1():
import numpy as np
fc = np.random.randn(1000)
pvalue = np.random.randn(1000)
v = Volcano(fc, -np.log10(pvalue**2), pvalue_threshold=3)
v.plot()
v.plot(logy=True)
|
from sequana.viz import Volcano
import pandas as pd
def test1():
import numpy as np
fc = np.random.randn(1000)
pvalue = np.random.randn(1000)
df = pd.DataFrame({"log2FoldChange": fc, "padj": pvalue ** 2})
v = Volcano(data=df, pvalue_threshold=3)
v.plot()
v.plot(logy=True)
|
Update test for volcano plot
|
Update test for volcano plot
|
Python
|
bsd-3-clause
|
sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana
|
3d1969ebf187ed0f0ee52e84e951f65b108ce4cf
|
l10n_br_coa_simple/hooks.py
|
l10n_br_coa_simple/hooks.py
|
# Copyright (C) 2020 - Gabriel Cardoso de Faria <[email protected]>
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from odoo import api, tools, SUPERUSER_ID
def post_init_hook(cr, registry):
env = api.Environment(cr, SUPERUSER_ID, {})
coa_simple_tmpl = env.ref(
'l10n_br_coa_simple.l10n_br_coa_simple_chart_template')
if env['ir.module.module'].search_count([
('name', '=', 'l10n_br_account'),
('state', '=', 'installed'),
]):
from odoo.addons.l10n_br_account.hooks import load_fiscal_taxes
# Relate fiscal taxes to account taxes.
load_fiscal_taxes(env, coa_simple_tmpl)
# Load COA to Demo Company
if not tools.config.get('without_demo'):
env.user.company_id = env.ref(
'l10n_br_fiscal.empresa_simples_nacional')
coa_simple_tmpl.try_loading_for_current_company()
|
# Copyright (C) 2020 - Gabriel Cardoso de Faria <[email protected]>
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from odoo import api, tools, SUPERUSER_ID
def post_init_hook(cr, registry):
env = api.Environment(cr, SUPERUSER_ID, {})
coa_simple_tmpl = env.ref(
'l10n_br_coa_simple.l10n_br_coa_simple_chart_template')
if env['ir.module.module'].search_count([
('name', '=', 'l10n_br_account'),
('state', '=', 'installed'),
]):
from odoo.addons.l10n_br_account.hooks import load_fiscal_taxes
# Relate fiscal taxes to account taxes.
load_fiscal_taxes(env, coa_simple_tmpl)
# Load COA to Demo Company
if not tools.config.get('without_demo'):
user_admin = env.ref('base.user_admin')
user_admin.company_id = env.ref(
'l10n_br_base.empresa_simples_nacional')
coa_simple_tmpl.sudo(
user=user_admin.id).try_loading_for_current_company()
user_admin.company_id = env.ref('base.main_company')
|
Use admin user to create COA
|
[FIX] l10n_br_coa_simple: Use admin user to create COA
|
Python
|
agpl-3.0
|
akretion/l10n-brazil,OCA/l10n-brazil,OCA/l10n-brazil,OCA/l10n-brazil,akretion/l10n-brazil,akretion/l10n-brazil
|
f43f71cb016bc71ea32e80c2fd86f05b6af38468
|
snoop/ipython.py
|
snoop/ipython.py
|
import ast
from snoop import snoop
from IPython.core.magic import Magics, cell_magic, magics_class
@magics_class
class SnoopMagics(Magics):
@cell_magic
def snoop(self, _line, cell):
filename = self.shell.compile.cache(cell)
code = self.shell.compile(cell, filename, 'exec')
tracer = snoop()
tracer.variable_whitelist = set()
for node in ast.walk(ast.parse(cell)):
if isinstance(node, ast.Name):
tracer.variable_whitelist.add(node.id)
tracer.target_codes.add(code)
with tracer:
self.shell.ex(code)
|
import ast
from snoop import snoop
from IPython.core.magic import Magics, cell_magic, magics_class
@magics_class
class SnoopMagics(Magics):
@cell_magic
def snoop(self, _line, cell):
filename = self.shell.compile.cache(cell)
code = self.shell.compile(cell, filename, 'exec')
tracer = snoop()
tracer.variable_whitelist = set()
for node in ast.walk(ast.parse(cell)):
if isinstance(node, ast.Name):
name = node.id
if isinstance(
self.shell.user_global_ns.get(name),
type(ast),
):
# hide modules
continue
tracer.variable_whitelist.add(name)
tracer.target_codes.add(code)
with tracer:
self.shell.ex(code)
|
Hide modules from variables traced by %%snoop
|
Hide modules from variables traced by %%snoop
|
Python
|
mit
|
alexmojaki/snoop,alexmojaki/snoop
|
e63a914457fc10d895eb776a164939da3ddd9464
|
waftools/gogobject.py
|
waftools/gogobject.py
|
from waflib.Task import Task
from waflib.TaskGen import extension
class gogobject(Task):
run_str = '${GGG} ${GGGFLAGS} -o ${TGT[0].parent.abspath()} ${SRC}'
@extension('.go.in')
def gogobject_hook(self, node):
tg = self.bld.get_tgen_by_name('go-gobject-gen')
ggg = tg.link_task.outputs[0]
if not self.env.GGG:
self.env.GGG = ggg.abspath()
go_out = node.change_ext('')
c_out = go_out.change_ext('.gen.c')
h_out = go_out.change_ext('.gen.h')
task = self.create_task('gogobject', node, [go_out, c_out, h_out])
task.dep_nodes = [ggg]
return task
|
from waflib.Task import Task
from waflib.TaskGen import extension
class gogobject(Task):
run_str = '${GGG} ${GGGFLAGS} -o ${TGT[0].parent.abspath()} ${SRC}'
@extension('.go.in')
def gogobject_hook(self, node):
tg = self.bld.get_tgen_by_name('go-gobject-gen')
ggg = tg.link_task.outputs[0]
if not self.env.GGG:
self.env.GGG = ggg.abspath()
go_out = node.change_ext('')
c_out = go_out.change_ext('.gen.c')
h_out = go_out.change_ext('.gen.h')
task = self.create_task('gogobject', node, [go_out, c_out, h_out])
task.dep_nodes = [ggg, node.parent.find_node('config.json')]
return task
|
Use config.json as a go-gobject-gen dependency as well.
|
Use config.json as a go-gobject-gen dependency as well.
|
Python
|
mit
|
nsf/gogobject,nsf/gogobject,nsf/gogobject,nsf/gogobject
|
8d0fe30fadc4834348c0f697b097f4753c7f9d84
|
floppyforms/tests/fields.py
|
floppyforms/tests/fields.py
|
from .base import FloppyFormsTestCase
import floppyforms as forms
class IntegerFieldTests(FloppyFormsTestCase):
def test_parse_int(self):
int_field = forms.IntegerField()
result = int_field.clean('15')
self.assertTrue(15, result)
self.assertIsInstance(result, int)
|
from .base import FloppyFormsTestCase
import floppyforms as forms
class IntegerFieldTests(FloppyFormsTestCase):
def test_parse_int(self):
int_field = forms.IntegerField()
result = int_field.clean('15')
self.assertEqual(15, result)
self.assertIsInstance(result, int)
|
Fix test so it is testing something
|
Fix test so it is testing something
|
Python
|
bsd-3-clause
|
gregmuellegger/django-floppyforms,gregmuellegger/django-floppyforms,jonashaag/django-floppyforms,gregmuellegger/django-floppyforms,jonashaag/django-floppyforms
|
ef048131d586812c2d73edd6297dfae4305b6074
|
website/exceptions.py
|
website/exceptions.py
|
class OSFError(Exception):
"""Base class for exceptions raised by the Osf application"""
pass
class NodeError(OSFError):
"""Raised when an action cannot be performed on a Node model"""
pass
class NodeStateError(NodeError):
"""Raised when the Node's state is not suitable for the requested action
Example: Node.remove_node() is called, but the node has non-deleted children
"""
pass
class SanctionTokenError(NodeError):
"""Base class for errors arising from the user of a sanction token."""
pass
class InvalidSanctionRejectionToken(SanctionTokenError):
"""Raised if a embargo disapproval token is not found."""
message_short = "Invalid Token"
message_long = "This embargo disapproval link is invalid. Are you logged into the correct account?"
class InvalidSanctionApprovalToken(SanctionTokenError):
"""Raised if a embargo disapproval token is not found."""
message_short = "Invalid Token"
message_long = "This embargo disapproval link is invalid. Are you logged into the correct account?"
|
class OSFError(Exception):
"""Base class for exceptions raised by the Osf application"""
pass
class NodeError(OSFError):
"""Raised when an action cannot be performed on a Node model"""
pass
class NodeStateError(NodeError):
"""Raised when the Node's state is not suitable for the requested action
Example: Node.remove_node() is called, but the node has non-deleted children
"""
pass
class SanctionTokenError(NodeError):
"""Base class for errors arising from the user of a sanction token."""
pass
class InvalidSanctionRejectionToken(SanctionTokenError):
"""Raised if a Sanction subclass disapproval token submitted is invalid
or associated with another admin authorizer
"""
message_short = "Invalid Token"
message_long = "This disapproval link is invalid. Are you logged into the correct account?"
class InvalidSanctionApprovalToken(SanctionTokenError):
"""Raised if a Sanction subclass approval token submitted is invalid
or associated with another admin authorizer
"""
message_short = "Invalid Token"
message_long = "This disapproval link is invalid. Are you logged into the correct account?"
|
Update Sanction exception error message and docstrings
|
Update Sanction exception error message and docstrings
|
Python
|
apache-2.0
|
alexschiller/osf.io,amyshi188/osf.io,felliott/osf.io,cwisecarver/osf.io,ckc6cz/osf.io,TomBaxter/osf.io,emetsger/osf.io,HalcyonChimera/osf.io,caneruguz/osf.io,RomanZWang/osf.io,doublebits/osf.io,adlius/osf.io,ticklemepierce/osf.io,danielneis/osf.io,samchrisinger/osf.io,baylee-d/osf.io,sloria/osf.io,asanfilippo7/osf.io,RomanZWang/osf.io,erinspace/osf.io,hmoco/osf.io,monikagrabowska/osf.io,crcresearch/osf.io,icereval/osf.io,mluo613/osf.io,Nesiehr/osf.io,mattclark/osf.io,felliott/osf.io,brandonPurvis/osf.io,hmoco/osf.io,amyshi188/osf.io,zachjanicki/osf.io,petermalcolm/osf.io,GageGaskins/osf.io,mluke93/osf.io,Nesiehr/osf.io,njantrania/osf.io,DanielSBrown/osf.io,chennan47/osf.io,doublebits/osf.io,ZobairAlijan/osf.io,DanielSBrown/osf.io,samchrisinger/osf.io,laurenrevere/osf.io,brandonPurvis/osf.io,cwisecarver/osf.io,doublebits/osf.io,KAsante95/osf.io,chrisseto/osf.io,petermalcolm/osf.io,cosenal/osf.io,leb2dg/osf.io,rdhyee/osf.io,aaxelb/osf.io,pattisdr/osf.io,jmcarp/osf.io,mfraezz/osf.io,zachjanicki/osf.io,emetsger/osf.io,cwisecarver/osf.io,cslzchen/osf.io,mattclark/osf.io,brianjgeiger/osf.io,samanehsan/osf.io,saradbowman/osf.io,wearpants/osf.io,crcresearch/osf.io,erinspace/osf.io,CenterForOpenScience/osf.io,ZobairAlijan/osf.io,ZobairAlijan/osf.io,chrisseto/osf.io,TomHeatwole/osf.io,zachjanicki/osf.io,jmcarp/osf.io,CenterForOpenScience/osf.io,TomBaxter/osf.io,acshi/osf.io,brandonPurvis/osf.io,mfraezz/osf.io,cosenal/osf.io,erinspace/osf.io,njantrania/osf.io,Ghalko/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,RomanZWang/osf.io,Ghalko/osf.io,hmoco/osf.io,aaxelb/osf.io,pattisdr/osf.io,asanfilippo7/osf.io,ticklemepierce/osf.io,haoyuchen1992/osf.io,caneruguz/osf.io,wearpants/osf.io,Ghalko/osf.io,caseyrollins/osf.io,abought/osf.io,sbt9uc/osf.io,caseyrollins/osf.io,amyshi188/osf.io,icereval/osf.io,jnayak1/osf.io,wearpants/osf.io,ticklemepierce/osf.io,lyndsysimon/osf.io,felliott/osf.io,caneruguz/osf.io,rdhyee/osf.io,abought/osf.io,HalcyonChimera/osf.io,TomBaxter/osf.io,arpitar/osf.io,kch8qx/osf.io,abought/osf.io,samanehsan/osf.io,sbt9uc/osf.io,HalcyonChimera/osf.io,rdhyee/osf.io,saradbowman/osf.io,adlius/osf.io,arpitar/osf.io,KAsante95/osf.io,monikagrabowska/osf.io,cosenal/osf.io,mluo613/osf.io,brandonPurvis/osf.io,emetsger/osf.io,adlius/osf.io,MerlinZhang/osf.io,jolene-esposito/osf.io,kch8qx/osf.io,jnayak1/osf.io,GageGaskins/osf.io,leb2dg/osf.io,sloria/osf.io,zamattiac/osf.io,binoculars/osf.io,danielneis/osf.io,jnayak1/osf.io,mfraezz/osf.io,aaxelb/osf.io,jmcarp/osf.io,KAsante95/osf.io,leb2dg/osf.io,felliott/osf.io,leb2dg/osf.io,samchrisinger/osf.io,kwierman/osf.io,monikagrabowska/osf.io,binoculars/osf.io,baylee-d/osf.io,ZobairAlijan/osf.io,mluke93/osf.io,jnayak1/osf.io,CenterForOpenScience/osf.io,SSJohns/osf.io,brianjgeiger/osf.io,mattclark/osf.io,caneruguz/osf.io,haoyuchen1992/osf.io,kwierman/osf.io,danielneis/osf.io,TomHeatwole/osf.io,jolene-esposito/osf.io,baylee-d/osf.io,kch8qx/osf.io,TomHeatwole/osf.io,samanehsan/osf.io,SSJohns/osf.io,alexschiller/osf.io,laurenrevere/osf.io,cslzchen/osf.io,doublebits/osf.io,mfraezz/osf.io,alexschiller/osf.io,zamattiac/osf.io,GageGaskins/osf.io,abought/osf.io,billyhunt/osf.io,cwisecarver/osf.io,binoculars/osf.io,ticklemepierce/osf.io,MerlinZhang/osf.io,njantrania/osf.io,TomHeatwole/osf.io,sloria/osf.io,billyhunt/osf.io,arpitar/osf.io,chennan47/osf.io,rdhyee/osf.io,brandonPurvis/osf.io,petermalcolm/osf.io,caseyrollins/osf.io,ckc6cz/osf.io,asanfilippo7/osf.io,billyhunt/osf.io,asanfilippo7/osf.io,jolene-esposito/osf.io,jolene-esposito/osf.io,Johnetordoff/osf.io,kch8qx/osf.io,jmcarp/osf.io,KAsante95/osf.io,Johnetordoff/osf.io,caseyrygt/osf.io,cslzchen/osf.io,cslzchen/osf.io,kch8qx/osf.io,danielneis/osf.io,amyshi188/osf.io,monikagrabowska/osf.io,RomanZWang/osf.io,zamattiac/osf.io,laurenrevere/osf.io,mluke93/osf.io,crcresearch/osf.io,SSJohns/osf.io,chrisseto/osf.io,acshi/osf.io,mluo613/osf.io,samchrisinger/osf.io,HalcyonChimera/osf.io,chennan47/osf.io,Johnetordoff/osf.io,ckc6cz/osf.io,MerlinZhang/osf.io,chrisseto/osf.io,RomanZWang/osf.io,acshi/osf.io,arpitar/osf.io,icereval/osf.io,lyndsysimon/osf.io,mluo613/osf.io,adlius/osf.io,monikagrabowska/osf.io,kwierman/osf.io,pattisdr/osf.io,SSJohns/osf.io,caseyrygt/osf.io,Nesiehr/osf.io,GageGaskins/osf.io,sbt9uc/osf.io,Johnetordoff/osf.io,doublebits/osf.io,acshi/osf.io,haoyuchen1992/osf.io,acshi/osf.io,haoyuchen1992/osf.io,zamattiac/osf.io,alexschiller/osf.io,sbt9uc/osf.io,aaxelb/osf.io,samanehsan/osf.io,mluo613/osf.io,zachjanicki/osf.io,KAsante95/osf.io,lyndsysimon/osf.io,Ghalko/osf.io,mluke93/osf.io,ckc6cz/osf.io,DanielSBrown/osf.io,GageGaskins/osf.io,kwierman/osf.io,caseyrygt/osf.io,lyndsysimon/osf.io,hmoco/osf.io,brianjgeiger/osf.io,billyhunt/osf.io,emetsger/osf.io,njantrania/osf.io,MerlinZhang/osf.io,caseyrygt/osf.io,billyhunt/osf.io,Nesiehr/osf.io,DanielSBrown/osf.io,alexschiller/osf.io,wearpants/osf.io,cosenal/osf.io,petermalcolm/osf.io
|
26e90f715f5fbd5adeae1703a48ebef2352c93a2
|
salt/utils/parser.py
|
salt/utils/parser.py
|
# -*- coding: utf-8 -*-
"""
salt.utils.parser
~~~~~~~~~~~~~~~~~
:copyright: © 2012 UfSoft.org - :email:`Pedro Algarvio ([email protected])`
:license: Apache 2.0, see LICENSE for more details.
"""
import sys
import optparse
from salt import version
class OptionParser(optparse.OptionParser):
def __init__(self, *args, **kwargs):
kwargs.setdefault("version", version.__version__)
kwargs.setdefault('usage', '%%prog')
optparse.OptionParser.__init__(self, *args, **kwargs)
def parse_args(self, args=None, values=None):
options, args = optparse.OptionParser.parse_args(self, args, values)
if options.versions_report:
self.print_versions_report()
return options, args
def _add_version_option(self):
optparse.OptionParser._add_version_option(self)
self.add_option(
'--versions-report', action='store_true',
help="show program's dependencies version number and exit"
)
def print_versions_report(self, file=sys.stdout):
print >> file, '\n'.join(version.versions_report())
self.exit()
|
# -*- coding: utf-8 -*-
"""
salt.utils.parser
~~~~~~~~~~~~~~~~~
:copyright: © 2012 UfSoft.org - :email:`Pedro Algarvio ([email protected])`
:license: Apache 2.0, see LICENSE for more details.
"""
import sys
import optparse
from salt import version
class OptionParser(optparse.OptionParser):
def __init__(self, *args, **kwargs):
kwargs.setdefault("version", version.__version__)
kwargs.setdefault('usage', '%prog')
optparse.OptionParser.__init__(self, *args, **kwargs)
def parse_args(self, args=None, values=None):
options, args = optparse.OptionParser.parse_args(self, args, values)
if options.versions_report:
self.print_versions_report()
return options, args
def _add_version_option(self):
optparse.OptionParser._add_version_option(self)
self.add_option(
'--versions-report', action='store_true',
help="show program's dependencies version number and exit"
)
def print_versions_report(self, file=sys.stdout):
print >> file, '\n'.join(version.versions_report())
self.exit()
|
Remove extra `%` from the `cli` tools.
|
Remove extra `%` from the `cli` tools.
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
04807105282211ff4ad79e8b4e9b13442e083c86
|
planex_globals.py
|
planex_globals.py
|
import os.path
BUILD_ROOT_DIR = "planex-build-root"
[SPECS_DIR, SOURCES_DIR, SRPMS_DIR, RPMS_DIR, BUILD_DIR] = map(
lambda x: os.path.join(BUILD_ROOT_DIR, x),
['SPECS', 'SOURCES', 'SRPMS', 'RPMS', 'BUILD'])
SPECS_GLOB = os.path.join(SPECS_DIR, "*.spec")
|
import os.path
BUILD_ROOT_DIR = "planex-build-root"
[SPECS_DIR, SOURCES_DIR, SRPMS_DIR, RPMS_DIR, BUILD_DIR] = [
os.path.join(BUILD_ROOT_DIR, dir_name) for dir_name in
['SPECS', 'SOURCES', 'SRPMS', 'RPMS', 'BUILD']]
SPECS_GLOB = os.path.join(SPECS_DIR, "*.spec")
|
Replace deprecated 'map' builtin with list comprehension
|
globals: Replace deprecated 'map' builtin with list comprehension
Signed-off-by: Euan Harris <[email protected]>
|
Python
|
lgpl-2.1
|
djs55/planex,jonludlam/planex,djs55/planex,simonjbeaumont/planex,euanh/planex-cleanhistory,euanh/planex-cleanhistory,simonjbeaumont/planex,djs55/planex,jonludlam/planex,jonludlam/planex,simonjbeaumont/planex,euanh/planex-cleanhistory
|
13f6eb8d1229c44b8746a9b1c9f8ca804e76bced
|
mrburns/settings/server.py
|
mrburns/settings/server.py
|
import os
import socket
from .base import * # noqa
SERVER_ENV = os.getenv('DJANGO_SERVER_ENV')
SECRET_KEY = os.getenv('SECRET_KEY')
STATIC_URL = os.getenv('STATIC_URL', STATIC_URL)
DEBUG = TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
'webwewant.mozilla.org',
'webwewant.allizom.org',
'glow.cdn.mozilla.net',
# the server's IP (for monitors)
socket.gethostbyname(socket.gethostname()),
]
CACHES = {
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': 'unix:/var/run/redis/redis.sock:1',
'OPTIONS': {
'PARSER_CLASS': 'redis.connection.HiredisParser',
}
},
'smithers': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': 'unix:/var/run/redis/redis.sock:0',
'OPTIONS': {
'PARSER_CLASS': 'redis.connection.HiredisParser',
}
}
}
DJANGO_REDIS_IGNORE_EXCEPTIONS = False
ENABLE_REDIS = True
|
import os
import socket
from .base import * # noqa
SERVER_ENV = os.getenv('DJANGO_SERVER_ENV')
SECRET_KEY = os.getenv('SECRET_KEY')
STATIC_URL = os.getenv('STATIC_URL', STATIC_URL)
DEBUG = TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
'webwewant.mozilla.org',
'webwewant.allizom.org',
'glow.cdn.mozilla.net',
'glow-origin.cdn.mozilla.net',
# the server's IP (for monitors)
socket.gethostbyname(socket.gethostname()),
]
CACHES = {
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': 'unix:/var/run/redis/redis.sock:1',
'OPTIONS': {
'PARSER_CLASS': 'redis.connection.HiredisParser',
}
},
'smithers': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': 'unix:/var/run/redis/redis.sock:0',
'OPTIONS': {
'PARSER_CLASS': 'redis.connection.HiredisParser',
}
}
}
DJANGO_REDIS_IGNORE_EXCEPTIONS = False
ENABLE_REDIS = True
|
Add CDN origin domain to allowed hosts.
|
Add CDN origin domain to allowed hosts.
|
Python
|
mpl-2.0
|
almossawi/mrburns,mozilla/mrburns,mozilla/mrburns,almossawi/mrburns,mozilla/mrburns,almossawi/mrburns,almossawi/mrburns
|
42a4d5959524875fd39c190f6119eb06a97eabf2
|
build/setenv.py
|
build/setenv.py
|
import os,sys
#General vars
CURDIR=os.path.dirname(os.path.abspath(__file__))
TOPDIR=os.path.dirname(CURDIR)
DOWNLOAD_DIR=TOPDIR+'\\downloads'
#Default vars
PY_VER='Python27'
BIN_DIR=TOPDIR+'\\bin'
PY_DIR=BIN_DIR+'\\'+PY_VER #Don't mess with PYTHONHOME
############################################################
#Check environment settings in case they'e been overridden
env=os.environ
CURDIR=env.get('CURDIR',CURDIR)
TOPDIR=env.get('TOPDIR',os.path.dirname(CURDIR))
DOWNLOAD_DIR=env.get('DOWNLOAD_DIR',DOWNLOAD_DIR)
PY_VER=env.get('PY_VER',PY_VER)
BIN_DIR=env.get('BIN_DIR',BIN_DIR)
PY_DIR=env.get('PY_DIR',PY_DIR)
#Hide from autocomplete IDEs
del os
del sys
del env
|
import os,sys
#General vars
CURDIR=os.path.dirname(os.path.abspath(__file__))
TOPDIR=os.path.dirname(os.path.dirname(CURDIR))
DOWNLOAD_DIR=os.path.join(TOPDIR,'downloads')
#Default vars
PY_VER='Python27'
BIN_DIR=os.path.join(TOPDIR,'bin')
PY_DIR=os.path.join(BIN_DIR,PY_VER) #Don't mess with PYTHONHOME
############################################################
#Check environment settings in case they'e been overridden
env=os.environ
CURDIR=env.get('CURDIR',CURDIR)
TOPDIR=env.get('TOPDIR',os.path.dirname(os.path.dirname(CURDIR)))
DOWNLOAD_DIR=env.get('DOWNLOAD_DIR',DOWNLOAD_DIR)
PY_VER=env.get('PY_VER',PY_VER)
BIN_DIR=env.get('BIN_DIR',BIN_DIR)
PY_DIR=env.get('PY_DIR',PY_DIR)
#Hide from autocomplete IDEs
del os
del sys
del env
|
Change path following import of build folder
|
Change path following import of build folder
|
Python
|
mit
|
lpinner/metageta
|
18baa0b6f963692e6de8ee8425a36f75a21f83d0
|
falafel/tests/content/test_content_manager.py
|
falafel/tests/content/test_content_manager.py
|
import os
import pytest
import tempfile
import shutil
from falafel.content.manager import ContentManager
@pytest.fixture
def cm():
tmp_dir = tempfile.mkdtemp()
yield ContentManager(tmp_dir, "falafel.tests.plugins")
shutil.rmtree(tmp_dir)
def test_create_manager(cm):
assert len(list(cm.get_all())) == 0
def test_save(cm):
doc = {
"resolution": "butts",
"rule_id": "tina_loves_butts|OH_YEAH"
}
cm.save(doc, default=True)
doc["path"] = os.path.join(cm.content_prefix, "tina_loves_butts/OH_YEAH")
assert next(cm.get("tina_loves_butts|OH_YEAH")) == doc
assert next(cm.get("tina_loves_butts")) == doc
def test_error_keys(cm):
cm.save({"rule_id": "tina_loves_butts|OH_YEAH"}, default=True)
cm.save({"rule_id": "tina_loves_butts|OH_NO"}, default=True)
assert set(cm.error_keys()) == {"tina_loves_butts|OH_YEAH", "tina_loves_butts|OH_NO"}
|
import os
import pytest
import tempfile
import shutil
from falafel.content.manager import ContentManager
@pytest.fixture
def cm():
tmp_dir = tempfile.mkdtemp()
yield ContentManager(tmp_dir, ["falafel.tests.plugins"])
shutil.rmtree(tmp_dir)
def test_create_manager(cm):
assert len(list(cm.get_all())) == 0
def test_save(cm):
doc = {
"resolution": "butts",
"rule_id": "tina_loves_butts|OH_YEAH"
}
cm.save(doc, default=True)
doc["path"] = os.path.join(cm.content_prefix, "tina_loves_butts/OH_YEAH")
assert next(cm.get("tina_loves_butts|OH_YEAH")) == doc
assert next(cm.get("tina_loves_butts")) == doc
def test_error_keys(cm):
cm.save({"rule_id": "tina_loves_butts|OH_YEAH"}, default=True)
cm.save({"rule_id": "tina_loves_butts|OH_NO"}, default=True)
assert set(cm.error_keys()) == {"tina_loves_butts|OH_YEAH", "tina_loves_butts|OH_NO"}
|
Integrate content server with new settings module
|
Integrate content server with new settings module
|
Python
|
apache-2.0
|
RedHatInsights/insights-core,RedHatInsights/insights-core
|
cdc4f79210b69f131926374aff61be72ab573c46
|
scripts/import_queued_submissions.py
|
scripts/import_queued_submissions.py
|
#!/usr/bin/env python
# Copyright (C) 2011 Lukas Lalinsky
# Distributed under the MIT license, see the LICENSE file for details.
from acoustid.script import run_script
from acoustid.data.submission import import_queued_submissions
def main(script, opts, args):
conn = script.engine.connect()
with conn.begin():
import_queued_submissions(conn)
run_script(main)
|
#!/usr/bin/env python
# Copyright (C) 2011 Lukas Lalinsky
# Distributed under the MIT license, see the LICENSE file for details.
from acoustid.script import run_script
from acoustid.data.submission import import_queued_submissions
def main(script, opts, args):
conn = script.engine.connect()
with conn.begin():
import_queued_submissions(conn, limit=100)
run_script(main)
|
Raise the import batch size to 100
|
Raise the import batch size to 100
|
Python
|
mit
|
lalinsky/acoustid-server,lalinsky/acoustid-server,lalinsky/acoustid-server,lalinsky/acoustid-server
|
bebb1d9bc44300e7c65fba90d6c2eb76243ea372
|
scripts/cm2lut.py
|
scripts/cm2lut.py
|
#!/usr/bin/env python
"""
Script used to create lut lists used by mayavi from matplotlib colormaps.
This requires matlplotlib to be installed and should not be ran by the
user, but only once in a while to synchronize with MPL developpement.
"""
# Authors: Frederic Petit <[email protected]>,
# Gael Varoquaux <[email protected]>
# Copyright (c) 2007, Enthought, Inc.
# License: BSD Style.
from matplotlib.cm import datad, get_cmap
import numpy as np
from enthought.mayavi.core import lut as destination_module
import os
target_dir = os.path.dirname(destination_module.__file__)
values = np.linspace(0., 1., 256)
lut_dic = {}
for name in datad.keys():
if name.endswith('_r'):
continue
lut_dic[name] = get_cmap(name)(values.copy())
out_name = os.path.join(target_dir, 'pylab_luts.npz')
np.savez(out_name, **lut_dic)
|
#!/usr/bin/env python
"""
Script used to create lut lists used by mayavi from matplotlib colormaps.
This requires matlplotlib to be installed and should not be ran by the
user, but only once in a while to synchronize with MPL developpement.
"""
# Authors: Frederic Petit <[email protected]>,
# Gael Varoquaux <[email protected]>
# Copyright (c) 2007-2009, Enthought, Inc.
# License: BSD Style.
import os
import numpy as np
from matplotlib.cm import datad, get_cmap
from enthought.mayavi.core import lut as destination_module
from enthought.persistence import state_pickler
target_dir = os.path.dirname(destination_module.__file__)
values = np.linspace(0., 1., 256)
lut_dic = {}
for name in datad.keys():
if name.endswith('_r'):
continue
lut_dic[name] = get_cmap(name)(values.copy())
out_name = os.path.join(target_dir, 'pylab_luts.pkl')
state_pickler.dump(lut_dic, out_name)
|
Add a modified lut-data-generating script to use pickle, rather than npz
|
ENH: Add a modified lut-data-generating script to use pickle, rather than npz
|
Python
|
bsd-3-clause
|
dmsurti/mayavi,liulion/mayavi,alexandreleroux/mayavi,alexandreleroux/mayavi,dmsurti/mayavi,liulion/mayavi
|
fece0019a54534b56960a30785bb70edb5d205bf
|
example_base/forms.py
|
example_base/forms.py
|
# -*- encoding: utf-8 -*-
from base.form_utils import RequiredFieldForm
from .models import Document
from base.form_utils import FileDropInput
class DocumentForm(RequiredFieldForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name in ('file', 'description'):
self.fields[name].widget.attrs.update(
{'class': 'pure-input-2-3'}
)
class Meta:
model = Document
fields = (
'file',
'description',
)
widgets = {'file': FileDropInput()}
|
# -*- encoding: utf-8 -*-
from django import forms
from base.form_utils import RequiredFieldForm, FileDropInput
from .models import Document
class DocumentForm(RequiredFieldForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name in ('file', 'description'):
self.fields[name].widget.attrs.update(
{'class': 'pure-input-2-3'}
)
class Meta:
model = Document
fields = (
'file',
'description',
)
# Not required RequiredFieldForm uses FileDropInput for FileField
# widgets = {'file': FileDropInput()}
# this is an example of how to use in a basic ModelForm
class BasicDocumentModelForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name in ('file', 'description'):
self.fields[name].widget.attrs.update(
{'class': 'pure-input-2-3'}
)
class Meta:
model = Document
fields = (
'file',
'description',
)
widgets = {'file': FileDropInput()}
# this is an example of how to use in a basic Form
class NonModelForm(forms.Form):
file = forms.FileField(widget=FileDropInput)
description = forms.CharField(max_length=200)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name in ('file', 'description'):
self.fields[name].widget.attrs.update(
{'class': 'pure-input-2-3'}
)
|
Add examples of ways to use FileDropInput
|
Add examples of ways to use FileDropInput
|
Python
|
apache-2.0
|
pkimber/base,pkimber/base,pkimber/base,pkimber/base
|
1e17e868ff332003da959a397b8846c9386b35e8
|
API_to_backend.py
|
API_to_backend.py
|
from multiprocessing import Queue, Process
import time
import backend
command_queue = Queue()
response_queue = Queue()
def start_backend():
if handler:
handler.stop()
handler = Process(target=backend.start, args=(command_queue, response_queue))
handler.start()
def get_for(url, queue, timeout):
beginning = time.time()
result = queue.get(timeout=timeout)
if result["url"] == url:
return result["body"]
else:
queue.put(result)
return get_for(url, queue, timeout - (time.time()-beginning))
|
from multiprocessing import Queue, Process
import time
import backend
command_queue = Queue()
response_queue = Queue()
def start_backend():
handler = Process(target=backend.start, args=(command_queue, response_queue))
handler.start()
def get_for(url, queue, timeout):
beginning = time.time()
result = queue.get(timeout=timeout)
if result["url"] == url:
return result["body"]
else:
queue.put(result)
return get_for(url, queue, timeout - (time.time()-beginning))
|
Revert "Quit Backend If Running"
|
Revert "Quit Backend If Running"
This reverts commit a00432191e2575aba0f20ffb1a96a323699ae4fc.
|
Python
|
mit
|
IAPark/PITherm
|
7c18cbf6dced0435537fb4067dfa878ae9ccc6af
|
accounts/models.py
|
accounts/models.py
|
from django.db import models
from django.contrib.auth.models import User
class Profile(models.Model):
user = models.OneToOneField(User)
def __str__(self):
return self.user.get_full_name() or self.user.username
|
from django.db import models
from django.db.models.signals import post_save
from django.contrib.auth.models import User
class Profile(models.Model):
user = models.OneToOneField(User)
def __str__(self):
return self.user.get_full_name() or self.user.username
def create_user_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User)
|
Create profile if user is created
|
Create profile if user is created
|
Python
|
mit
|
lockhawksp/beethoven,lockhawksp/beethoven
|
6034265dfdfb2a7e1e4881076cc0f011ff0e639d
|
netbox/extras/migrations/0022_custom_links.py
|
netbox/extras/migrations/0022_custom_links.py
|
# Generated by Django 2.2 on 2019-04-15 19:28
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('extras', '0021_add_color_comments_changelog_to_tag'),
]
operations = [
migrations.CreateModel(
name='CustomLink',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False)),
('name', models.CharField(max_length=100, unique=True)),
('text', models.CharField(max_length=200)),
('url', models.CharField(max_length=200)),
('weight', models.PositiveSmallIntegerField(default=100)),
('group_name', models.CharField(blank=True, max_length=50)),
('button_class', models.CharField(default='default', max_length=30)),
('new_window', models.BooleanField()),
('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),
],
options={
'ordering': ['group_name', 'weight', 'name'],
},
),
]
|
# Generated by Django 2.2 on 2019-04-15 19:28
from django.db import migrations, models
import django.db.models.deletion
import extras.models
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('extras', '0021_add_color_comments_changelog_to_tag'),
]
operations = [
migrations.CreateModel(
name='CustomLink',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False)),
('name', models.CharField(max_length=100, unique=True)),
('text', models.CharField(max_length=200)),
('url', models.CharField(max_length=200)),
('weight', models.PositiveSmallIntegerField(default=100)),
('group_name', models.CharField(blank=True, max_length=50)),
('button_class', models.CharField(default='default', max_length=30)),
('new_window', models.BooleanField()),
('content_type', models.ForeignKey(limit_choices_to=extras.models.get_custom_link_models, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),
],
options={
'ordering': ['group_name', 'weight', 'name'],
},
),
]
|
Add limit_choices_to to CustomLink.content_type field
|
Add limit_choices_to to CustomLink.content_type field
|
Python
|
apache-2.0
|
lampwins/netbox,lampwins/netbox,digitalocean/netbox,digitalocean/netbox,digitalocean/netbox,lampwins/netbox,lampwins/netbox,digitalocean/netbox
|
32f38eb01c3a203ae35d70b485fcee7b13f1acde
|
tests/help_generation_test.py
|
tests/help_generation_test.py
|
# Copyright 2016 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test that we can generate help for PKB."""
import os
import unittest
from perfkitbenchmarker import flags
# Import pkb to add all flag definitions to flags.FLAGS.
from perfkitbenchmarker import pkb # NOQA
class HelpTest(unittest.TestCase):
def testHelp(self):
# Test that help generation finishes without errors
flags.FLAGS.GetHelp()
class HelpXMLTest(unittest.TestCase):
def testHelpXML(self):
with open(os.devnull, 'w') as out:
flags.FLAGS.WriteHelpInXMLFormat(outfile=out)
if __name__ == '__main__':
unittest.main()
|
# Copyright 2016 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test that we can generate help for PKB."""
import os
import unittest
from perfkitbenchmarker import flags
# Import pkb to add all flag definitions to flags.FLAGS.
from perfkitbenchmarker import pkb # NOQA
class HelpTest(unittest.TestCase):
def testHelp(self):
# Test that help generation finishes without errors
if hasattr(flags.FLAGS, 'get_help'):
flags.FLAGS.get_help()
else:
flags.FLAGS.GetHelp()
class HelpXMLTest(unittest.TestCase):
def testHelpXML(self):
with open(os.devnull, 'w') as out:
flags.FLAGS.WriteHelpInXMLFormat(outfile=out)
if __name__ == '__main__':
unittest.main()
|
Call FLAGS.get_help if it's available.
|
Call FLAGS.get_help if it's available.
|
Python
|
apache-2.0
|
GoogleCloudPlatform/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker,meteorfox/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker,meteorfox/PerfKitBenchmarker
|
d8c15667e76ce6d0dfa96a16312e75b83c63479b
|
tests/test_response.py
|
tests/test_response.py
|
"""Unit test some basic response rendering functionality.
These tests use the unittest.mock mechanism to provide a simple Assistant
instance for the _Response initialization.
"""
from unittest.mock import patch
from flask import Flask
from flask_assistant import Assistant
from flask_assistant.response import _Response
@patch('flask_assistant.response.current_app')
def test_response_with_speech(mock):
mock = Assistant(Flask(__name__))
resp = _Response('foobar')
assert resp._response['speech'] == 'foobar'
@patch('flask_assistant.response.current_app')
def test_response_with_None_speech(mock):
mock = Assistant(Flask(__name__))
resp = _Response(None)
assert resp._response['speech'] is None
@patch('flask_assistant.response.current_app')
def test_response_speech_escaping(mock):
mock = Assistant(Flask(__name__))
resp = _Response('foo & bar')
assert resp._response['speech'] == 'foo & bar'
|
"""Unit test some basic response rendering functionality.
These tests use the unittest.mock mechanism to provide a simple Assistant
instance for the _Response initialization.
"""
from flask import Flask
from flask_assistant import Assistant
from flask_assistant.response import _Response
import pytest
patch = pytest.importorskip('unittest.mock.patch')
@patch('flask_assistant.response.current_app')
def test_response_with_speech(mock):
mock = Assistant(Flask(__name__))
resp = _Response('foobar')
assert resp._response['speech'] == 'foobar'
@patch('flask_assistant.response.current_app')
def test_response_with_None_speech(mock):
mock = Assistant(Flask(__name__))
resp = _Response(None)
assert resp._response['speech'] is None
@patch('flask_assistant.response.current_app')
def test_response_speech_escaping(mock):
mock = Assistant(Flask(__name__))
resp = _Response('foo & bar')
assert resp._response['speech'] == 'foo & bar'
|
Disable test for py27 (mock not available)
|
Disable test for py27 (mock not available)
|
Python
|
apache-2.0
|
treethought/flask-assistant
|
4b578fb7683054727444f1ed5c2a7d9732a3d8e9
|
ci_scripts/installPandoc.py
|
ci_scripts/installPandoc.py
|
import os
from subprocess import call, check_output
import sys
from shutil import copy2
platform = sys.platform
def checkAndInstall():
try:
check_output('pandoc -v'.split())
except OSError:
cudir = os.path.abspath(os.curdir)
os.chdir(os.path.abspath(os.path.join(os.path.pardir, 'downloads')))
def getFile():
from requests import get
with open(pandocFile, "wb") as file:
response = get(source)
file.write(response.content)
if platform == 'win32':
pandocFile = 'pandoc-2.1.3-windows.msi'
source = 'https://github.com/jgm/pandoc/releases/download/2.1.3/' + pandocFile
getFile()
call('msiexec.exe /i "{}" /norestart'.format(pandocFile))
else:
pandocFile = 'pandoc-2.1.3-linux.tar.gz'
source = 'https://github.com/jgm/pandoc/releases/download/2.1.3/' + pandocFile
getFile()
call("tar -xvzf {}".format(pandocFile).split())
copy2('./pandoc-2.1.3/bin/pandoc', '/usr/local/bin')
copy2('./pandoc-2.1.3/bin/pandoc-citeproc', '/usr/local/bin')
os.chdir(cudir)
if __name__ == '__main__':
checkAndInstall()
|
import os
from subprocess import call, check_output
import sys
from shutil import copy2
platform = sys.platform
def checkAndInstall():
try:
check_output('pandoc -v'.split())
except OSError:
def getFile():
from requests import get
with open(pandocFile, "wb") as file:
response = get(source)
file.write(response.content)
if platform == 'win32':
pandocFile = 'pandoc-2.1.3-windows.msi'
source = 'https://github.com/jgm/pandoc/releases/download/2.1.3/' + pandocFile
getFile()
call('msiexec.exe /i "{}" /norestart'.format(pandocFile))
else:
pandocFile = 'pandoc-2.1.3-linux.tar.gz'
source = 'https://github.com/jgm/pandoc/releases/download/2.1.3/' + pandocFile
getFile()
call("tar -xvzf {}".format(pandocFile).split())
copy2('./pandoc-2.1.3/bin/pandoc', '/usr/local/bin')
copy2('./pandoc-2.1.3/bin/pandoc-citeproc', '/usr/local/bin')
if __name__ == '__main__':
checkAndInstall()
|
Fix build wheels with Pandoc 2.
|
Fix build wheels with Pandoc 2.
|
Python
|
bsd-3-clause
|
jr-garcia/AssimpCy,jr-garcia/AssimpCy
|
b4c88bcedaf6cab078426675ddbaf0e963b4a821
|
apps/reactions/serializers.py
|
apps/reactions/serializers.py
|
from apps.bluebottle_utils.serializers import SorlImageField, SlugHyperlinkedIdentityField
from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Reaction
from rest_framework.fields import HyperlinkedIdentityField
class ReactionAuthorSerializer(serializers.ModelSerializer):
picture = SorlImageField('userprofile.picture', '90x90', crop='center')
class Meta:
model = User
fields = ('first_name', 'last_name', 'picture')
class ReactionDetailSerializer(serializers.ModelSerializer):
# Read-only fields.
created = serializers.Field()
# Custom fields.
# TODO: Enable embedded models in Ember Data and re-enable this.
# author = ReactionAuthorSerializer()
author = serializers.Field() # Needed to make the author field read-only.
# TODO: This isn't working with the pattern: api/blogs/<slug>/reactions/<pk>
# Delete or fix this ... we don't really need it so removing it is ok but it's nice to have.
# url = HyperlinkedIdentityField(view_name='reactions:reaction-detail')
class Meta:
model = Reaction
fields = ('created', 'author', 'reaction', 'id')
class ReactionListSerializer(ReactionDetailSerializer):
class Meta:
model = Reaction
fields = ('created', 'author', 'reaction', 'id')
|
from apps.bluebottle_utils.serializers import SorlImageField
from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Reaction
class ReactionAuthorSerializer(serializers.ModelSerializer):
picture = SorlImageField('userprofile.picture', '90x90', crop='center')
class Meta:
model = User
fields = ('first_name', 'last_name', 'picture')
class ReactionDetailSerializer(serializers.ModelSerializer):
# Read-only fields.
created = serializers.Field()
# Custom fields.
# TODO: Enable embedded models in Ember Data and re-enable this.
# author = ReactionAuthorSerializer()
author = serializers.PrimaryKeyRelatedField(read_only=True)
# TODO: This isn't working with the pattern: api/blogs/<slug>/reactions/<pk>
# Delete or fix this ... we don't really need it so removing it is ok but it's nice to have.
# url = HyperlinkedIdentityField(view_name='reactions:reaction-detail')
class Meta:
model = Reaction
fields = ('created', 'author', 'reaction', 'id')
class ReactionListSerializer(ReactionDetailSerializer):
class Meta:
model = Reaction
fields = ('created', 'author', 'reaction', 'id')
|
Make reaction api display user id instead of username.
|
Make reaction api display user id instead of username.
|
Python
|
bsd-3-clause
|
onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site
|
afa94ea297c6042f4444c0ce833c9b1ee02373c1
|
stowaway.py
|
stowaway.py
|
import time
import socket
import datetime
from ipaddress import ip_address
import zmq
import yaml
import quick2wire.i2c as i2c
from database import Writer
from database import Temperature, Base
if __name__ == '__main__':
context = zmq.Context()
publisher = context.socket(zmq.PUB)
database = context.socket(zmq.PUB)
server = yaml.load(open('config.yaml'))['server']
host = server['host']
try:
ip_address(host)
except ValueError:
host = socket.gethostbyname(host)
publisher.bind('tcp://{}:{}'.format(host, server['port']))
database.bind('inproc://dbwrite')
writer = Writer(context)
writer.start()
while True:
with i2c.I2CMaster() as bus:
data = bus.transaction(i2c.reading(8, 6))
now = datetime.datetime.utcnow()
temp = data[0][-2:]
temp = int.from_bytes(temp, byteorder='little', signed=True) / 100.
print(now, temp)
publisher.send_pyobj(('TEMP', now, temp))
database.send_pyobj(('TEMP', now, temp))
time.sleep(0.05)
|
import time
import socket
import datetime
from ipaddress import ip_address
import zmq
import yaml
import quick2wire.i2c as i2c
from database import Writer
from database import Temperature, Base
if __name__ == '__main__':
context = zmq.Context()
publisher = context.socket(zmq.PUB)
database = context.socket(zmq.PUB)
server = yaml.load(open('config.yaml'))['server']
host = server['host']
try:
ip_address(host)
except ValueError:
host = socket.gethostbyname(host)
publisher.bind('tcp://{}:{}'.format(host, server['port']))
database.bind('inproc://dbwrite')
writer = Writer(context)
writer.start()
while True:
with i2c.I2CMaster() as bus:
data = bus.transaction(i2c.reading(8, 6))
now = datetime.datetime.utcnow()
temp = data[0][-2:]
temp = int.from_bytes(temp, byteorder='little', signed=True) / 100.
print(now, temp)
publisher.send_pyobj(('TEMP', now.timestamp(), temp))
database.send_pyobj(('TEMP', now, temp))
time.sleep(0.05)
|
Send timestamp to the outside world
|
Send timestamp to the outside world
|
Python
|
bsd-3-clause
|
CojoCompany/stowaway
|
34369635a22bf05abbabe47e708a2ed80db258e5
|
MeetingMinutes.py
|
MeetingMinutes.py
|
import sublime, sublime_plugin
from .mistune import markdown
class CreateMinuteCommand(sublime_plugin.TextCommand):
def run(self, edit):
region = sublime.Region(0, self.view.size())
md_source = self.view.substr(region)
md_source.encode(encoding='UTF-8',errors='strict')
html_source = '<!DOCTYPE html><html><head><meta charset="utf-8"></head><body>' + markdown(md_source) + '</body></html>'
print(html_source)
|
import sublime, sublime_plugin
import os
import re
from subprocess import call
from .mistune import markdown
class CreateMinuteCommand(sublime_plugin.TextCommand):
def run(self, edit):
region = sublime.Region(0, self.view.size())
md_source = self.view.substr(region)
md_source.encode(encoding='UTF-8',errors='strict')
html_source = '<!DOCTYPE html><html><head><meta charset="utf-8"></head><body>' + markdown(md_source) + '</body></html>'
file_name = self.view.file_name()
html_file, extension = os.path.splitext(file_name)
html_file += ".html"
with open(html_file, 'w+') as file_:
file_.write(html_source)
print(file_name)
print(html_file)
|
Save the created html in a HTML file.
|
Save the created html in a HTML file.
|
Python
|
mit
|
Txarli/sublimetext-meeting-minutes,Txarli/sublimetext-meeting-minutes
|
bf069a93484bff41c7cb46975fc8c41a7280723a
|
pastas/version.py
|
pastas/version.py
|
# This is the only location where the version will be written and changed.
# Based on https://packaging.python.org/single_source_version/
__version__ = '0.9.4b'
|
# This is the only location where the version will be written and changed.
# Based on https://packaging.python.org/single_source_version/
__version__ = '0.9.4'
|
Prepare to update Master branch to v 0.9.4
|
Prepare to update Master branch to v 0.9.4
|
Python
|
mit
|
pastas/pasta,pastas/pastas,gwtsa/gwtsa
|
b8273181c1f8806ebea3504d11d1feda36ae27ee
|
src/dependenpy/__main__.py
|
src/dependenpy/__main__.py
|
# -*- coding: utf-8 -*-
"""
Entrypoint module, in case you use `python -mdependenpy`.
Why does this file exist, and why __main__? For more info, read:
- https://www.python.org/dev/peps/pep-0338/
- https://docs.python.org/2/using/cmdline.html#cmdoption-m
- https://docs.python.org/3/using/cmdline.html#cmdoption-m
"""
import sys
from dependenpy.cli import main
if __name__ == "__main__":
main(sys.argv)
|
# -*- coding: utf-8 -*-
"""
Entrypoint module, in case you use `python -mdependenpy`.
Why does this file exist, and why __main__? For more info, read:
- https://www.python.org/dev/peps/pep-0338/
- https://docs.python.org/2/using/cmdline.html#cmdoption-m
- https://docs.python.org/3/using/cmdline.html#cmdoption-m
"""
import sys
from dependenpy.cli import main
if __name__ == "__main__":
main(sys.argv[1:])
|
Fix passed args (no more program name)
|
Fix passed args (no more program name)
|
Python
|
isc
|
Pawamoy/dependenpy,Pawamoy/dependenpy
|
c674ca2920e4a6774761312669f351554e3955eb
|
src/metabotnik/templatetags/utilz.py
|
src/metabotnik/templatetags/utilz.py
|
from django import template
from metabotnik.models import project_status_choices
register = template.Library()
@register.filter
def nicestatus(dbval):
'Converts a db status choice into a more user-friendly version'
for val, stat in project_status_choices:
if dbval == val:
return stat
return dbval
@register.simple_tag
def json_to_overlay(datadict):
buf = []
width, height = float(datadict['width']), float(datadict['height'])
for i,obj in enumerate(datadict.get('images', [])):
if 'LINK' in obj.get('metadata', {}):
tmp = (obj['pk'], obj['x'], obj['y'])
buf.append(u"{id: 'overlay%s', px:%s, py:%s}" % tmp)
return u'\n,'.join(buf)
|
from django import template
from metabotnik.models import project_status_choices
register = template.Library()
@register.filter
def nicestatus(dbval):
'Converts a db status choice into a more user-friendly version'
for val, stat in project_status_choices:
if dbval == val:
return stat
return dbval
@register.simple_tag
def json_to_overlay(datadict):
buf = []
width, height = float(datadict['width']), float(datadict['height'])
for i,obj in enumerate(datadict.get('images', [])):
if 'LINK' in obj.get('metadata', {}):
tmp = (obj['pk'], obj['x']+10, obj['y']+100)
buf.append(u"{id: 'overlay%s', px:%s, py:%s}" % tmp)
return u'\n,'.join(buf)
|
Move eye down a bit
|
Move eye down a bit
|
Python
|
mit
|
epoz/metabotnik,epoz/metabotnik,epoz/metabotnik
|
99dfe6fc1c8688b9d1e01218830738f0190c92f1
|
wmtmetatdata/__init__.py
|
wmtmetatdata/__init__.py
|
"""Metadata and tools for WMT components."""
import os
top_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
metadata_dir = os.path.join(top_dir, 'metadata')
|
Add variables for top and metadata directories
|
Add variables for top and metadata directories
|
Python
|
mit
|
csdms/wmt-metadata
|
|
446bb5103ec54680931cb0af43ded22e59bf11e5
|
Recorders.py
|
Recorders.py
|
from Measurement import Measurement
class Recorder(object):
def __init__(self, recorderType):
self.recorderType = recorderType
def record(self, measure: Measurement):
None
class PrintRecorder(Recorder):
def __init__(self, config):
Recorder.__init__(self, 'file')
self.format = config['format']
def record(self, measure: Measurement):
line = self.format.format(
device_id=measure.device_id,
celsius=measure.get_celsius(),
fahrenheit=measure.get_fahrenheit(),
timestamp=measure.timestamp)
print(line, end='\n')
class FileRecorder(Recorder):
def __init__(self, config):
Recorder.__init__(self, 'file')
self.format = config['format']
self.container = config['container']
self.extension = config['extension']
def record(self, measure: Measurement):
log_entry = self.format.format(
device_id=measure.device_id,
celsius=measure.get_celsius(),
fahrenheit=measure.get_fahrenheit(),
timestamp=measure.timestamp)
file_path = self.container + measure.device_id.split('/')[-1] + self.extension
f = open(file_path, 'w')
f.writelines([log_entry])
|
from Measurement import Measurement
class Recorder(object):
def __init__(self, recorderType):
self.recorderType = recorderType
def record(self, measure: Measurement):
None
class PrintRecorder(Recorder):
def __init__(self, config):
Recorder.__init__(self, 'file')
self.format = config['format']
def record(self, measure: Measurement):
line = self.format.format(
device_id=measure.device_id,
celsius=measure.get_celsius(),
fahrenheit=measure.get_fahrenheit(),
timestamp=measure.timestamp)
print(line, end='\n')
class FileRecorder(Recorder):
def __init__(self, config):
Recorder.__init__(self, 'file')
self.format = config['format']
self.container = config['container']
self.extension = config['extension']
def record(self, measure: Measurement):
log_entry = self.format.format(
device_id=measure.device_id,
celsius=measure.get_celsius(),
fahrenheit=measure.get_fahrenheit(),
timestamp=measure.timestamp)
file_path = self.container + measure.device_id.split('/')[-1] + self.extension
f = open(file_path, 'w+')
f.writelines([log_entry])
|
Create recorder file if not exist
|
Create recorder file if not exist
|
Python
|
mit
|
hectortosa/py-temperature-recorder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.