commit
stringlengths
40
40
subject
stringlengths
4
1.73k
repos
stringlengths
5
127k
old_file
stringlengths
2
751
new_file
stringlengths
2
751
new_contents
stringlengths
1
8.98k
old_contents
stringlengths
0
6.59k
license
stringclasses
13 values
lang
stringclasses
23 values
3ab0e590479fabb024937e52eab02e2311033448
Implement a function to map chord segments to STFT blocks.
bzamecnik/tfr,bzamecnik/tfr
time_intervals.py
time_intervals.py
import pandas as pd import numpy as np import collections def block_labels(df_blocks, df_labels): ''' Given fixed-size overlapping blocks and variable-sized non-overlapping labels select most suitable label for each block. This can be useful eg. to assign chord labels to audio blocks. All times are measured in samples and represented by integers. Inputs: - df_blocks: pandas DataFrame with columns start, end (in samples) - df_labels: pandas DataFrame with columns start, label Outputs: - df_blocks: pandas DataFrame with columns start, end, label In case multiple labels span a single block the label with most coverage is selected. ''' def merge_events(df_blocks, df_labels): df_events = pd.merge( pd.concat([df_blocks[['start']], df_blocks[['end']].rename(columns={'end': 'start'})]).drop_duplicates(), df_labels, how='outer') df_events.sort('start', inplace=True) df_events.fillna(method='pad', inplace=True) df_events['duration'] = abs(df_events['start'].diff(-1)) df_events.set_index('start', inplace=True) return df_events.dropna() df_events = merge_events(df_blocks, df_labels) def label_for_block(start, end): labels = df_events['label'].ix[start:end] unique_labels = set(labels) if len(unique_labels) > 1: durations = df_events['duration'].ix[start:end] cnt = collections.Counter() for l, d in zip(labels, durations): cnt[l] += d return cnt.most_common(1)[0][0] else: return labels.iloc[0] def add_labels(df_blocks): block_labels = (label_for_block(start, end) for (i, start, end) in df_blocks.itertuples()) df_block_labels = pd.DataFrame(block_labels, columns=['label']) return df_blocks.join(df_block_labels) return add_labels(df_blocks) def test(): block_size = 10 hop_size = 5 sample_count = 90 block_count = (sample_count - block_size) / hop_size block_starts = hop_size * np.arange(block_count + 1).astype(np.int32) block_ends = block_starts + block_size blocks = list(zip(block_starts, block_ends)) df_blocks = pd.DataFrame(blocks, columns=['start', 'end']) # label segment start times (the last element is the end of the last segment) label_times = [0, 25, 38, 50, 60, 64, 68, 80, 81, 84, 89] labels = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'H', 'N'] df_labels = pd.DataFrame({'start': label_times, 'label': labels}, columns=['start', 'label']) df_labelled_blocks = block_labels(df_blocks, df_labels) expected_labels = ['A','A','A','A','B','B','B','C','C','D','D','D','G','G','G','H','H'] actual_labels = list(df_labelled_blocks['label']) for s, e, a in zip(block_starts, expected_labels, actual_labels): print(s, e, a, '*' if e != a else '') assert actual_labels == expected_labels
mit
Python
fbaca2f2a0ceaa77606d9c24846a1a1b045dc460
remove deleted files from manifest
hassoon3/odoo,jesramirez/odoo,cysnake4713/odoo,OpenUpgrade-dev/OpenUpgrade,nitinitprof/odoo,jusdng/odoo,markeTIC/OCB,shingonoide/odoo,ShineFan/odoo,abstract-open-solutions/OCB,CubicERP/odoo,nagyistoce/odoo-dev-odoo,naousse/odoo,SerpentCS/odoo,feroda/odoo,colinnewell/odoo,lightcn/odoo,poljeff/odoo,syci/OCB,cpyou/odoo,factorlibre/OCB,hbrunn/OpenUpgrade,apanju/GMIO_Odoo,pedrobaeza/OpenUpgrade,guerrerocarlos/odoo,dalegregory/odoo,hanicker/odoo,osvalr/odoo,havt/odoo,kirca/OpenUpgrade,cdrooom/odoo,addition-it-solutions/project-all,arthru/OpenUpgrade,FlorianLudwig/odoo,VielSoft/odoo,kifcaliph/odoo,joshuajan/odoo,naousse/odoo,vrenaville/ngo-addons-backport,javierTerry/odoo,bguillot/OpenUpgrade,odoo-turkiye/odoo,frouty/odoogoeen,hopeall/odoo,Codefans-fan/odoo,cdrooom/odoo,cysnake4713/odoo,rschnapka/odoo,naousse/odoo,zchking/odoo,tvibliani/odoo,Elico-Corp/odoo_OCB,sysadminmatmoz/OCB,poljeff/odoo,Noviat/odoo,hip-odoo/odoo,joshuajan/odoo,tarzan0820/odoo,hmen89/odoo,rgeleta/odoo,sve-odoo/odoo,tangyiyong/odoo,Daniel-CA/odoo,OpenPymeMx/OCB,Bachaco-ve/odoo,frouty/odoo_oph,jusdng/odoo,bakhtout/odoo-educ,apocalypsebg/odoo,papouso/odoo,MarcosCommunity/odoo,Nowheresly/odoo,slevenhagen/odoo-npg,waytai/odoo,hoatle/odoo,jiachenning/odoo,PongPi/isl-odoo,shivam1111/odoo,brijeshkesariya/odoo,storm-computers/odoo,nuuuboo/odoo,florian-dacosta/OpenUpgrade,rubencabrera/odoo,takis/odoo,cloud9UG/odoo,n0m4dz/odoo,realsaiko/odoo,BT-rmartin/odoo,jfpla/odoo,patmcb/odoo,charbeljc/OCB,ThinkOpen-Solutions/odoo,ramadhane/odoo,stonegithubs/odoo,synconics/odoo,odootr/odoo,hubsaysnuaa/odoo,VielSoft/odoo,xzYue/odoo,charbeljc/OCB,bobisme/odoo,ehirt/odoo,lsinfo/odoo,ovnicraft/odoo,gdgellatly/OCB1,jaxkodex/odoo,gsmartway/odoo,ovnicraft/odoo,vnsofthe/odoo,aviciimaxwell/odoo,prospwro/odoo,ShineFan/odoo,credativUK/OCB,brijeshkesariya/odoo,ovnicraft/odoo,damdam-s/OpenUpgrade,spadae22/odoo,nagyistoce/odoo-dev-odoo,naousse/odoo,odootr/odoo,cedk/odoo,x111ong/odoo,nuuuboo/odoo,nhomar/odoo,factorlibre/OCB,markeTIC/OCB,addition-it-solutions/project-all,numerigraphe/odoo,papouso/odoo,apocalypsebg/odoo,CatsAndDogsbvba/odoo,dsfsdgsbngfggb/odoo,fgesora/odoo,cdrooom/odoo,highco-groupe/odoo,naousse/odoo,OpenUpgrade-dev/OpenUpgrade,guewen/OpenUpgrade,dsfsdgsbngfggb/odoo,savoirfairelinux/odoo,virgree/odoo,gvb/odoo,gvb/odoo,bwrsandman/OpenUpgrade,datenbetrieb/odoo,chiragjogi/odoo,CatsAndDogsbvba/odoo,Antiun/odoo,grap/OCB,mlaitinen/odoo,pedrobaeza/odoo,CatsAndDogsbvba/odoo,microcom/odoo,deKupini/erp,fossoult/odoo,oasiswork/odoo,aviciimaxwell/odoo,credativUK/OCB,srsman/odoo,christophlsa/odoo,sadleader/odoo,kybriainfotech/iSocioCRM,oasiswork/odoo,lightcn/odoo,odoo-turkiye/odoo,florian-dacosta/OpenUpgrade,dariemp/odoo,NeovaHealth/odoo,FlorianLudwig/odoo,osvalr/odoo,ovnicraft/odoo,gavin-feng/odoo,funkring/fdoo,diagramsoftware/odoo,pedrobaeza/odoo,csrocha/OpenUpgrade,Endika/OpenUpgrade,hubsaysnuaa/odoo,ccomb/OpenUpgrade,FlorianLudwig/odoo,patmcb/odoo,ApuliaSoftware/odoo,odoousers2014/odoo,KontorConsulting/odoo,rowemoore/odoo,jfpla/odoo,dalegregory/odoo,bkirui/odoo,Adel-Magebinary/odoo,shingonoide/odoo,VielSoft/odoo,kittiu/odoo,Bachaco-ve/odoo,jesramirez/odoo,cedk/odoo,Noviat/odoo,VielSoft/odoo,pplatek/odoo,hanicker/odoo,addition-it-solutions/project-all,dfang/odoo,pedrobaeza/odoo,Daniel-CA/odoo,jaxkodex/odoo,Codefans-fan/odoo,tangyiyong/odoo,jiachenning/odoo,aviciimaxwell/odoo,slevenhagen/odoo-npg,fgesora/odoo,naousse/odoo,apanju/odoo,oihane/odoo,pplatek/odoo,abstract-open-solutions/OCB,eino-makitalo/odoo,mkieszek/odoo,NeovaHealth/odoo,abstract-open-solutions/OCB,mkieszek/odoo,guewen/OpenUpgrade,demon-ru/iml-crm,joshuajan/odoo,shaufi/odoo,klunwebale/odoo,damdam-s/OpenUpgrade,VitalPet/odoo,agrista/odoo-saas,patmcb/odoo,rgeleta/odoo,jiachenning/odoo,gdgellatly/OCB1,Antiun/odoo,optima-ict/odoo,sysadminmatmoz/OCB,Eric-Zhong/odoo,OpenUpgrade/OpenUpgrade,hopeall/odoo,feroda/odoo,Endika/OpenUpgrade,makinacorpus/odoo,fevxie/odoo,datenbetrieb/odoo,joariasl/odoo,dgzurita/odoo,rschnapka/odoo,sysadminmatmoz/OCB,apocalypsebg/odoo,frouty/odoogoeen,joariasl/odoo,QianBIG/odoo,gorjuce/odoo,dfang/odoo,mlaitinen/odoo,Ichag/odoo,bobisme/odoo,jeasoft/odoo,sergio-incaser/odoo,ehirt/odoo,minhtuancn/odoo,nhomar/odoo-mirror,fuselock/odoo,slevenhagen/odoo,takis/odoo,gavin-feng/odoo,lombritz/odoo,tinkhaven-organization/odoo,fevxie/odoo,guerrerocarlos/odoo,thanhacun/odoo,bobisme/odoo,FlorianLudwig/odoo,csrocha/OpenUpgrade,savoirfairelinux/odoo,gvb/odoo,idncom/odoo,dgzurita/odoo,lsinfo/odoo,Danisan/odoo-1,spadae22/odoo,virgree/odoo,ccomb/OpenUpgrade,camptocamp/ngo-addons-backport,0k/odoo,Ernesto99/odoo,tvibliani/odoo,rowemoore/odoo,odooindia/odoo,rubencabrera/odoo,Elico-Corp/odoo_OCB,kirca/OpenUpgrade,Bachaco-ve/odoo,podemos-info/odoo,cysnake4713/odoo,nuncjo/odoo,pedrobaeza/odoo,mvaled/OpenUpgrade,Danisan/odoo-1,vnsofthe/odoo,prospwro/odoo,hip-odoo/odoo,shaufi/odoo,nhomar/odoo,thanhacun/odoo,takis/odoo,AuyaJackie/odoo,lgscofield/odoo,mmbtba/odoo,papouso/odoo,SerpentCS/odoo,jfpla/odoo,leorochael/odoo,BT-fgarbely/odoo,OSSESAC/odoopubarquiluz,kirca/OpenUpgrade,Antiun/odoo,jiangzhixiao/odoo,rahuldhote/odoo,BT-fgarbely/odoo,NeovaHealth/odoo,ChanduERP/odoo,nitinitprof/odoo,simongoffin/website_version,fdvarela/odoo8,CubicERP/odoo,inspyration/odoo,apanju/odoo,rahuldhote/odoo,laslabs/odoo,minhtuancn/odoo,nhomar/odoo,fevxie/odoo,mszewczy/odoo,OpenUpgrade-dev/OpenUpgrade,RafaelTorrealba/odoo,JGarcia-Panach/odoo,tvtsoft/odoo8,blaggacao/OpenUpgrade,Endika/OpenUpgrade,dariemp/odoo,Endika/OpenUpgrade,ojengwa/odoo,inspyration/odoo,fevxie/odoo,gsmartway/odoo,SerpentCS/odoo,credativUK/OCB,rubencabrera/odoo,frouty/odoo_oph,damdam-s/OpenUpgrade,collex100/odoo,optima-ict/odoo,storm-computers/odoo,sysadminmatmoz/OCB,kifcaliph/odoo,sebalix/OpenUpgrade,Maspear/odoo,tinkerthaler/odoo,rschnapka/odoo,numerigraphe/odoo,ApuliaSoftware/odoo,jiangzhixiao/odoo,rowemoore/odoo,prospwro/odoo,OpenUpgrade/OpenUpgrade,tvibliani/odoo,collex100/odoo,credativUK/OCB,slevenhagen/odoo,VielSoft/odoo,tvibliani/odoo,salaria/odoo,pplatek/odoo,AuyaJackie/odoo,dkubiak789/odoo,jusdng/odoo,alhashash/odoo,pedrobaeza/odoo,codekaki/odoo,bkirui/odoo,bkirui/odoo,AuyaJackie/odoo,stonegithubs/odoo,fuhongliang/odoo,kirca/OpenUpgrade,0k/OpenUpgrade,ecosoft-odoo/odoo,hoatle/odoo,fdvarela/odoo8,hifly/OpenUpgrade,realsaiko/odoo,javierTerry/odoo,blaggacao/OpenUpgrade,wangjun/odoo,elmerdpadilla/iv,dalegregory/odoo,ujjwalwahi/odoo,jfpla/odoo,avoinsystems/odoo,OpenPymeMx/OCB,makinacorpus/odoo,draugiskisprendimai/odoo,vnsofthe/odoo,BT-rmartin/odoo,thanhacun/odoo,hubsaysnuaa/odoo,christophlsa/odoo,jiangzhixiao/odoo,jeasoft/odoo,tangyiyong/odoo,vrenaville/ngo-addons-backport,hoatle/odoo,frouty/odoogoeen,addition-it-solutions/project-all,bealdav/OpenUpgrade,lgscofield/odoo,jpshort/odoo,srimai/odoo,ChanduERP/odoo,n0m4dz/odoo,fuhongliang/odoo,luiseduardohdbackup/odoo,aviciimaxwell/odoo,datenbetrieb/odoo,mustafat/odoo-1,Gitlab11/odoo,mszewczy/odoo,apocalypsebg/odoo,draugiskisprendimai/odoo,Nowheresly/odoo,glovebx/odoo,savoirfairelinux/OpenUpgrade,JCA-Developpement/Odoo,CubicERP/odoo,salaria/odoo,microcom/odoo,hassoon3/odoo,colinnewell/odoo,savoirfairelinux/odoo,gorjuce/odoo,guewen/OpenUpgrade,Ichag/odoo,fossoult/odoo,srimai/odoo,florian-dacosta/OpenUpgrade,apanju/GMIO_Odoo,doomsterinc/odoo,factorlibre/OCB,havt/odoo,florian-dacosta/OpenUpgrade,podemos-info/odoo,luistorresm/odoo,Ernesto99/odoo,havt/odoo,rubencabrera/odoo,makinacorpus/odoo,ojengwa/odoo,dalegregory/odoo,rowemoore/odoo,realsaiko/odoo,highco-groupe/odoo,damdam-s/OpenUpgrade,odooindia/odoo,tvtsoft/odoo8,stonegithubs/odoo,Endika/odoo,QianBIG/odoo,rgeleta/odoo,papouso/odoo,ccomb/OpenUpgrade,lgscofield/odoo,fuhongliang/odoo,slevenhagen/odoo-npg,grap/OCB,lgscofield/odoo,gdgellatly/OCB1,rschnapka/odoo,grap/OCB,xujb/odoo,hmen89/odoo,eino-makitalo/odoo,ubic135/odoo-design,joariasl/odoo,Danisan/odoo-1,cloud9UG/odoo,bakhtout/odoo-educ,cedk/odoo,bguillot/OpenUpgrade,Daniel-CA/odoo,cloud9UG/odoo,bkirui/odoo,OpusVL/odoo,gdgellatly/OCB1,mvaled/OpenUpgrade,odoousers2014/odoo,ChanduERP/odoo,ubic135/odoo-design,cpyou/odoo,Danisan/odoo-1,florian-dacosta/OpenUpgrade,Endika/odoo,hbrunn/OpenUpgrade,alqfahad/odoo,blaggacao/OpenUpgrade,fossoult/odoo,bguillot/OpenUpgrade,0k/odoo,mszewczy/odoo,dllsf/odootest,damdam-s/OpenUpgrade,provaleks/o8,OpenPymeMx/OCB,ccomb/OpenUpgrade,grap/OpenUpgrade,CubicERP/odoo,poljeff/odoo,ingadhoc/odoo,christophlsa/odoo,oihane/odoo,MarcosCommunity/odoo,nuuuboo/odoo,datenbetrieb/odoo,janocat/odoo,JGarcia-Panach/odoo,VitalPet/odoo,jesramirez/odoo,MarcosCommunity/odoo,BT-ojossen/odoo,hoatle/odoo,SerpentCS/odoo,minhtuancn/odoo,bwrsandman/OpenUpgrade,waytai/odoo,camptocamp/ngo-addons-backport,Nowheresly/odoo,matrixise/odoo,markeTIC/OCB,ccomb/OpenUpgrade,shaufi/odoo,x111ong/odoo,SerpentCS/odoo,sinbazhou/odoo,florentx/OpenUpgrade,nagyistoce/odoo-dev-odoo,OSSESAC/odoopubarquiluz,sadleader/odoo,thanhacun/odoo,joariasl/odoo,incaser/odoo-odoo,thanhacun/odoo,ovnicraft/odoo,frouty/odoo_oph,dllsf/odootest,funkring/fdoo,Grirrane/odoo,nuncjo/odoo,hubsaysnuaa/odoo,incaser/odoo-odoo,ihsanudin/odoo,luistorresm/odoo,avoinsystems/odoo,factorlibre/OCB,bguillot/OpenUpgrade,leorochael/odoo,papouso/odoo,RafaelTorrealba/odoo,dezynetechnologies/odoo,Endika/odoo,rubencabrera/odoo,lsinfo/odoo,savoirfairelinux/OpenUpgrade,agrista/odoo-saas,prospwro/odoo,hip-odoo/odoo,SAM-IT-SA/odoo,dkubiak789/odoo,tvtsoft/odoo8,spadae22/odoo,ramadhane/odoo,Grirrane/odoo,leorochael/odoo,sebalix/OpenUpgrade,luiseduardohdbackup/odoo,factorlibre/OCB,andreparames/odoo,Nick-OpusVL/odoo,laslabs/odoo,rgeleta/odoo,joariasl/odoo,omprakasha/odoo,slevenhagen/odoo,mlaitinen/odoo,gorjuce/odoo,tvtsoft/odoo8,sv-dev1/odoo,ihsanudin/odoo,dezynetechnologies/odoo,goliveirab/odoo,vnsofthe/odoo,Noviat/odoo,hifly/OpenUpgrade,kybriainfotech/iSocioCRM,abdellatifkarroum/odoo,pplatek/odoo,prospwro/odoo,takis/odoo,BT-astauder/odoo,alexcuellar/odoo,gavin-feng/odoo,Gitlab11/odoo,feroda/odoo,highco-groupe/odoo,srimai/odoo,erkrishna9/odoo,x111ong/odoo,slevenhagen/odoo-npg,alhashash/odoo,aviciimaxwell/odoo,jeasoft/odoo,xzYue/odoo,xujb/odoo,frouty/odoogoeen,microcom/odoo,JGarcia-Panach/odoo,leorochael/odoo,srimai/odoo,nhomar/odoo-mirror,stonegithubs/odoo,odootr/odoo,eino-makitalo/odoo,mmbtba/odoo,erkrishna9/odoo,sergio-incaser/odoo,lsinfo/odoo,jpshort/odoo,ujjwalwahi/odoo,mkieszek/odoo,tvtsoft/odoo8,Drooids/odoo,sysadminmatmoz/OCB,lombritz/odoo,ApuliaSoftware/odoo,BT-rmartin/odoo,frouty/odoo_oph,srsman/odoo,bkirui/odoo,ccomb/OpenUpgrade,wangjun/odoo,codekaki/odoo,omprakasha/odoo,jpshort/odoo,matrixise/odoo,funkring/fdoo,erkrishna9/odoo,hopeall/odoo,PongPi/isl-odoo,brijeshkesariya/odoo,Maspear/odoo,arthru/OpenUpgrade,nhomar/odoo-mirror,tinkerthaler/odoo,feroda/odoo,BT-fgarbely/odoo,grap/OpenUpgrade,hbrunn/OpenUpgrade,agrista/odoo-saas,OpusVL/odoo,dariemp/odoo,BT-ojossen/odoo,synconics/odoo,mlaitinen/odoo,leoliujie/odoo,ChanduERP/odoo,stephen144/odoo,savoirfairelinux/OpenUpgrade,nhomar/odoo,salaria/odoo,provaleks/o8,patmcb/odoo,JGarcia-Panach/odoo,bealdav/OpenUpgrade,leoliujie/odoo,gorjuce/odoo,glovebx/odoo,syci/OCB,CopeX/odoo,hifly/OpenUpgrade,cloud9UG/odoo,inspyration/odoo,hassoon3/odoo,alexcuellar/odoo,bealdav/OpenUpgrade,fjbatresv/odoo,cdrooom/odoo,sinbazhou/odoo,acshan/odoo,diagramsoftware/odoo,rdeheele/odoo,Endika/odoo,BT-astauder/odoo,apanju/GMIO_Odoo,nagyistoce/odoo-dev-odoo,jfpla/odoo,damdam-s/OpenUpgrade,fjbatresv/odoo,spadae22/odoo,Eric-Zhong/odoo,Maspear/odoo,zchking/odoo,sve-odoo/odoo,ramitalat/odoo,avoinsystems/odoo,rdeheele/odoo,fgesora/odoo,joshuajan/odoo,fdvarela/odoo8,alhashash/odoo,diagramsoftware/odoo,Endika/OpenUpgrade,Ichag/odoo,goliveirab/odoo,hip-odoo/odoo,odoo-turkiye/odoo,salaria/odoo,bakhtout/odoo-educ,bealdav/OpenUpgrade,camptocamp/ngo-addons-backport,luistorresm/odoo,apanju/odoo,x111ong/odoo,nhomar/odoo-mirror,havt/odoo,osvalr/odoo,collex100/odoo,fossoult/odoo,OSSESAC/odoopubarquiluz,tangyiyong/odoo,zchking/odoo,OpenPymeMx/OCB,fjbatresv/odoo,klunwebale/odoo,Daniel-CA/odoo,mmbtba/odoo,xujb/odoo,alexteodor/odoo,odoo-turkiye/odoo,fuhongliang/odoo,massot/odoo,windedge/odoo,bakhtout/odoo-educ,sergio-incaser/odoo,xujb/odoo,jiangzhixiao/odoo,JonathanStein/odoo,ClearCorp-dev/odoo,lombritz/odoo,alqfahad/odoo,tinkhaven-organization/odoo,oliverhr/odoo,nuuuboo/odoo,slevenhagen/odoo,havt/odoo,abdellatifkarroum/odoo,juanalfonsopr/odoo,tarzan0820/odoo,codekaki/odoo,PongPi/isl-odoo,dezynetechnologies/odoo,chiragjogi/odoo,alexteodor/odoo,havt/odoo,idncom/odoo,bplancher/odoo,csrocha/OpenUpgrade,lightcn/odoo,juanalfonsopr/odoo,Eric-Zhong/odoo,OpenUpgrade-dev/OpenUpgrade,jiachenning/odoo,Daniel-CA/odoo,jeasoft/odoo,abstract-open-solutions/OCB,Maspear/odoo,OpenUpgrade/OpenUpgrade,janocat/odoo,fdvarela/odoo8,hopeall/odoo,florentx/OpenUpgrade,klunwebale/odoo,camptocamp/ngo-addons-backport,hip-odoo/odoo,VitalPet/odoo,csrocha/OpenUpgrade,abdellatifkarroum/odoo,charbeljc/OCB,dgzurita/odoo,highco-groupe/odoo,fuselock/odoo,rdeheele/odoo,markeTIC/OCB,kittiu/odoo,odootr/odoo,apanju/GMIO_Odoo,jusdng/odoo,mvaled/OpenUpgrade,n0m4dz/odoo,storm-computers/odoo,alexcuellar/odoo,TRESCLOUD/odoopub,ygol/odoo,CopeX/odoo,tangyiyong/odoo,fgesora/odoo,ujjwalwahi/odoo,luiseduardohdbackup/odoo,OpenUpgrade/OpenUpgrade,tinkhaven-organization/odoo,christophlsa/odoo,shaufi10/odoo,jolevq/odoopub,Kilhog/odoo,eino-makitalo/odoo,ehirt/odoo,jiangzhixiao/odoo,Adel-Magebinary/odoo,glovebx/odoo,lombritz/odoo,abstract-open-solutions/OCB,srsman/odoo,gorjuce/odoo,andreparames/odoo,podemos-info/odoo,JonathanStein/odoo,prospwro/odoo,erkrishna9/odoo,fossoult/odoo,GauravSahu/odoo,bakhtout/odoo-educ,ujjwalwahi/odoo,fjbatresv/odoo,leoliujie/odoo,wangjun/odoo,jpshort/odoo,0k/OpenUpgrade,srsman/odoo,fuselock/odoo,colinnewell/odoo,simongoffin/website_version,Endika/odoo,deKupini/erp,jeasoft/odoo,jeasoft/odoo,xzYue/odoo,fjbatresv/odoo,mustafat/odoo-1,bguillot/OpenUpgrade,bealdav/OpenUpgrade,osvalr/odoo,fossoult/odoo,florentx/OpenUpgrade,dfang/odoo,frouty/odoogoeen,dkubiak789/odoo,tangyiyong/odoo,apanju/GMIO_Odoo,ShineFan/odoo,Danisan/odoo-1,kybriainfotech/iSocioCRM,slevenhagen/odoo-npg,lightcn/odoo,xzYue/odoo,0k/OpenUpgrade,shivam1111/odoo,Drooids/odoo,dgzurita/odoo,BT-ojossen/odoo,mlaitinen/odoo,ramitalat/odoo,JGarcia-Panach/odoo,NeovaHealth/odoo,sve-odoo/odoo,markeTIC/OCB,steedos/odoo,ojengwa/odoo,abdellatifkarroum/odoo,addition-it-solutions/project-all,shaufi/odoo,QianBIG/odoo,CopeX/odoo,KontorConsulting/odoo,odoo-turkiye/odoo,rowemoore/odoo,Adel-Magebinary/odoo,florian-dacosta/OpenUpgrade,odootr/odoo,abdellatifkarroum/odoo,shivam1111/odoo,srimai/odoo,Maspear/odoo,omprakasha/odoo,alqfahad/odoo,hubsaysnuaa/odoo,elmerdpadilla/iv,ingadhoc/odoo,salaria/odoo,lsinfo/odoo,kirca/OpenUpgrade,ThinkOpen-Solutions/odoo,cedk/odoo,fuselock/odoo,salaria/odoo,microcom/odoo,bwrsandman/OpenUpgrade,shingonoide/odoo,Endika/OpenUpgrade,frouty/odoogoeen,syci/OCB,fevxie/odoo,oihane/odoo,nhomar/odoo-mirror,tinkerthaler/odoo,shivam1111/odoo,oliverhr/odoo,Ichag/odoo,GauravSahu/odoo,colinnewell/odoo,nitinitprof/odoo,SAM-IT-SA/odoo,sadleader/odoo,dsfsdgsbngfggb/odoo,jiangzhixiao/odoo,Grirrane/odoo,srsman/odoo,sve-odoo/odoo,elmerdpadilla/iv,abdellatifkarroum/odoo,gsmartway/odoo,kybriainfotech/iSocioCRM,bobisme/odoo,Endika/odoo,bplancher/odoo,Adel-Magebinary/odoo,idncom/odoo,ihsanudin/odoo,apanju/odoo,sysadminmatmoz/OCB,slevenhagen/odoo-npg,stephen144/odoo,vrenaville/ngo-addons-backport,fevxie/odoo,jaxkodex/odoo,nexiles/odoo,OSSESAC/odoopubarquiluz,NeovaHealth/odoo,leoliujie/odoo,oihane/odoo,tarzan0820/odoo,hoatle/odoo,Maspear/odoo,avoinsystems/odoo,ujjwalwahi/odoo,rgeleta/odoo,waytai/odoo,nuuuboo/odoo,poljeff/odoo,Kilhog/odoo,lgscofield/odoo,numerigraphe/odoo,Noviat/odoo,osvalr/odoo,jpshort/odoo,dariemp/odoo,Elico-Corp/odoo_OCB,bguillot/OpenUpgrade,CopeX/odoo,x111ong/odoo,tarzan0820/odoo,cysnake4713/odoo,dezynetechnologies/odoo,hubsaysnuaa/odoo,abstract-open-solutions/OCB,JCA-Developpement/Odoo,ThinkOpen-Solutions/odoo,QianBIG/odoo,SerpentCS/odoo,florentx/OpenUpgrade,VielSoft/odoo,dllsf/odootest,stonegithubs/odoo,ClearCorp-dev/odoo,leorochael/odoo,virgree/odoo,incaser/odoo-odoo,ehirt/odoo,dalegregory/odoo,grap/OCB,lgscofield/odoo,nhomar/odoo,hanicker/odoo,savoirfairelinux/odoo,Noviat/odoo,codekaki/odoo,pedrobaeza/OpenUpgrade,Bachaco-ve/odoo,BT-rmartin/odoo,PongPi/isl-odoo,simongoffin/website_version,factorlibre/OCB,hbrunn/OpenUpgrade,takis/odoo,GauravSahu/odoo,mustafat/odoo-1,sinbazhou/odoo,hoatle/odoo,acshan/odoo,hopeall/odoo,AuyaJackie/odoo,mmbtba/odoo,fossoult/odoo,javierTerry/odoo,florentx/OpenUpgrade,vnsofthe/odoo,OpenPymeMx/OCB,Drooids/odoo,wangjun/odoo,gdgellatly/OCB1,camptocamp/ngo-addons-backport,csrocha/OpenUpgrade,Eric-Zhong/odoo,hifly/OpenUpgrade,rgeleta/odoo,oihane/odoo,joariasl/odoo,rahuldhote/odoo,mustafat/odoo-1,addition-it-solutions/project-all,oliverhr/odoo,SAM-IT-SA/odoo,SAM-IT-SA/odoo,optima-ict/odoo,apocalypsebg/odoo,klunwebale/odoo,MarcosCommunity/odoo,havt/odoo,cloud9UG/odoo,slevenhagen/odoo,Codefans-fan/odoo,SAM-IT-SA/odoo,tvibliani/odoo,idncom/odoo,abdellatifkarroum/odoo,alexcuellar/odoo,sergio-incaser/odoo,nagyistoce/odoo-dev-odoo,agrista/odoo-saas,sebalix/OpenUpgrade,0k/OpenUpgrade,numerigraphe/odoo,fgesora/odoo,QianBIG/odoo,leoliujie/odoo,ramadhane/odoo,waytai/odoo,cpyou/odoo,collex100/odoo,kittiu/odoo,alexteodor/odoo,Gitlab11/odoo,hubsaysnuaa/odoo,Bachaco-ve/odoo,florentx/OpenUpgrade,oasiswork/odoo,RafaelTorrealba/odoo,kybriainfotech/iSocioCRM,draugiskisprendimai/odoo,laslabs/odoo,simongoffin/website_version,grap/OCB,SerpentCS/odoo,apanju/odoo,ihsanudin/odoo,takis/odoo,BT-fgarbely/odoo,synconics/odoo,tinkerthaler/odoo,lightcn/odoo,ojengwa/odoo,fjbatresv/odoo,markeTIC/OCB,FlorianLudwig/odoo,jaxkodex/odoo,wangjun/odoo,mszewczy/odoo,charbeljc/OCB,leoliujie/odoo,ramadhane/odoo,windedge/odoo,goliveirab/odoo,blaggacao/OpenUpgrade,optima-ict/odoo,Noviat/odoo,pedrobaeza/OpenUpgrade,credativUK/OCB,charbeljc/OCB,sebalix/OpenUpgrade,Antiun/odoo,OpusVL/odoo,codekaki/odoo,lombritz/odoo,dariemp/odoo,fevxie/odoo,odoousers2014/odoo,tinkhaven-organization/odoo,oihane/odoo,grap/OpenUpgrade,stephen144/odoo,bobisme/odoo,BT-rmartin/odoo,fuhongliang/odoo,mlaitinen/odoo,joariasl/odoo,odoousers2014/odoo,abenzbiria/clients_odoo,ShineFan/odoo,ClearCorp-dev/odoo,ChanduERP/odoo,Gitlab11/odoo,gavin-feng/odoo,mvaled/OpenUpgrade,kittiu/odoo,Eric-Zhong/odoo,ecosoft-odoo/odoo,datenbetrieb/odoo,laslabs/odoo,goliveirab/odoo,Elico-Corp/odoo_OCB,leoliujie/odoo,abenzbiria/clients_odoo,hifly/OpenUpgrade,datenbetrieb/odoo,charbeljc/OCB,guerrerocarlos/odoo,ramitalat/odoo,Nick-OpusVL/odoo,windedge/odoo,BT-rmartin/odoo,tinkerthaler/odoo,cedk/odoo,erkrishna9/odoo,CatsAndDogsbvba/odoo,odoousers2014/odoo,JCA-Developpement/Odoo,collex100/odoo,ojengwa/odoo,odooindia/odoo,blaggacao/OpenUpgrade,jaxkodex/odoo,savoirfairelinux/odoo,rubencabrera/odoo,Antiun/odoo,Adel-Magebinary/odoo,PongPi/isl-odoo,apanju/GMIO_Odoo,Ernesto99/odoo,virgree/odoo,synconics/odoo,nitinitprof/odoo,draugiskisprendimai/odoo,draugiskisprendimai/odoo,makinacorpus/odoo,Gitlab11/odoo,KontorConsulting/odoo,x111ong/odoo,slevenhagen/odoo,colinnewell/odoo,bwrsandman/OpenUpgrade,osvalr/odoo,bkirui/odoo,BT-ojossen/odoo,ingadhoc/odoo,ovnicraft/odoo,ecosoft-odoo/odoo,juanalfonsopr/odoo,cpyou/odoo,ehirt/odoo,dllsf/odootest,makinacorpus/odoo,jaxkodex/odoo,sv-dev1/odoo,rubencabrera/odoo,Bachaco-ve/odoo,jolevq/odoopub,Kilhog/odoo,papouso/odoo,JonathanStein/odoo,nexiles/odoo,xujb/odoo,PongPi/isl-odoo,lightcn/odoo,0k/OpenUpgrade,Adel-Magebinary/odoo,Drooids/odoo,optima-ict/odoo,apocalypsebg/odoo,waytai/odoo,kybriainfotech/iSocioCRM,demon-ru/iml-crm,hbrunn/OpenUpgrade,spadae22/odoo,podemos-info/odoo,luistorresm/odoo,osvalr/odoo,leorochael/odoo,storm-computers/odoo,dkubiak789/odoo,savoirfairelinux/odoo,ChanduERP/odoo,eino-makitalo/odoo,diagramsoftware/odoo,avoinsystems/odoo,shingonoide/odoo,OpenPymeMx/OCB,SAM-IT-SA/odoo,synconics/odoo,aviciimaxwell/odoo,Antiun/odoo,TRESCLOUD/odoopub,tangyiyong/odoo,Drooids/odoo,windedge/odoo,rahuldhote/odoo,massot/odoo,datenbetrieb/odoo,bguillot/OpenUpgrade,Kilhog/odoo,patmcb/odoo,alexcuellar/odoo,alexcuellar/odoo,nexiles/odoo,fuselock/odoo,jfpla/odoo,KontorConsulting/odoo,thanhacun/odoo,fuselock/odoo,vnsofthe/odoo,laslabs/odoo,0k/odoo,hifly/OpenUpgrade,highco-groupe/odoo,inspyration/odoo,luiseduardohdbackup/odoo,JCA-Developpement/Odoo,ccomb/OpenUpgrade,ShineFan/odoo,rowemoore/odoo,Danisan/odoo-1,leorochael/odoo,windedge/odoo,demon-ru/iml-crm,ihsanudin/odoo,lsinfo/odoo,javierTerry/odoo,Grirrane/odoo,cpyou/odoo,oasiswork/odoo,fuhongliang/odoo,mustafat/odoo-1,cedk/odoo,Daniel-CA/odoo,avoinsystems/odoo,dfang/odoo,papouso/odoo,stephen144/odoo,jolevq/odoopub,oasiswork/odoo,incaser/odoo-odoo,abenzbiria/clients_odoo,arthru/OpenUpgrade,bplancher/odoo,pedrobaeza/OpenUpgrade,hanicker/odoo,gsmartway/odoo,shaufi10/odoo,syci/OCB,JonathanStein/odoo,nexiles/odoo,rschnapka/odoo,blaggacao/OpenUpgrade,synconics/odoo,NL66278/OCB,simongoffin/website_version,mmbtba/odoo,jpshort/odoo,hassoon3/odoo,oliverhr/odoo,javierTerry/odoo,massot/odoo,OpenPymeMx/OCB,deKupini/erp,rschnapka/odoo,ygol/odoo,tinkhaven-organization/odoo,doomsterinc/odoo,CatsAndDogsbvba/odoo,grap/OpenUpgrade,Ernesto99/odoo,ygol/odoo,BT-rmartin/odoo,dfang/odoo,collex100/odoo,ramitalat/odoo,provaleks/o8,diagramsoftware/odoo,Drooids/odoo,0k/odoo,joshuajan/odoo,poljeff/odoo,fgesora/odoo,GauravSahu/odoo,pedrobaeza/OpenUpgrade,ygol/odoo,fuselock/odoo,Gitlab11/odoo,brijeshkesariya/odoo,0k/OpenUpgrade,andreparames/odoo,Elico-Corp/odoo_OCB,RafaelTorrealba/odoo,guerrerocarlos/odoo,klunwebale/odoo,Codefans-fan/odoo,zchking/odoo,arthru/OpenUpgrade,tinkhaven-organization/odoo,JonathanStein/odoo,zchking/odoo,diagramsoftware/odoo,frouty/odoogoeen,tarzan0820/odoo,ubic135/odoo-design,prospwro/odoo,Drooids/odoo,ramadhane/odoo,sve-odoo/odoo,Ernesto99/odoo,alexteodor/odoo,salaria/odoo,syci/OCB,frouty/odoogoeen,CubicERP/odoo,OSSESAC/odoopubarquiluz,Nowheresly/odoo,credativUK/OCB,fgesora/odoo,abenzbiria/clients_odoo,chiragjogi/odoo,Kilhog/odoo,gavin-feng/odoo,apanju/odoo,shaufi/odoo,lgscofield/odoo,makinacorpus/odoo,bplancher/odoo,tinkhaven-organization/odoo,gorjuce/odoo,numerigraphe/odoo,nexiles/odoo,ApuliaSoftware/odoo,alhashash/odoo,glovebx/odoo,ChanduERP/odoo,bwrsandman/OpenUpgrade,odooindia/odoo,Nick-OpusVL/odoo,dgzurita/odoo,xujb/odoo,podemos-info/odoo,nuncjo/odoo,Eric-Zhong/odoo,vrenaville/ngo-addons-backport,ehirt/odoo,pplatek/odoo,andreparames/odoo,Elico-Corp/odoo_OCB,lightcn/odoo,ThinkOpen-Solutions/odoo,alqfahad/odoo,ClearCorp-dev/odoo,ihsanudin/odoo,stonegithubs/odoo,blaggacao/OpenUpgrade,christophlsa/odoo,kittiu/odoo,kirca/OpenUpgrade,xzYue/odoo,MarcosCommunity/odoo,pplatek/odoo,jiangzhixiao/odoo,frouty/odoo_oph,MarcosCommunity/odoo,xzYue/odoo,slevenhagen/odoo,optima-ict/odoo,alqfahad/odoo,sv-dev1/odoo,microcom/odoo,rschnapka/odoo,lombritz/odoo,BT-ojossen/odoo,vrenaville/ngo-addons-backport,VitalPet/odoo,VitalPet/odoo,OpenUpgrade/OpenUpgrade,janocat/odoo,sergio-incaser/odoo,massot/odoo,jusdng/odoo,odootr/odoo,shivam1111/odoo,pedrobaeza/odoo,ramitalat/odoo,kifcaliph/odoo,draugiskisprendimai/odoo,stephen144/odoo,hopeall/odoo,gavin-feng/odoo,sebalix/OpenUpgrade,sv-dev1/odoo,shaufi10/odoo,ecosoft-odoo/odoo,brijeshkesariya/odoo,odootr/odoo,TRESCLOUD/odoopub,codekaki/odoo,dariemp/odoo,doomsterinc/odoo,idncom/odoo,mszewczy/odoo,CopeX/odoo,andreparames/odoo,nexiles/odoo,nitinitprof/odoo,syci/OCB,provaleks/o8,KontorConsulting/odoo,demon-ru/iml-crm,JGarcia-Panach/odoo,sadleader/odoo,OpenUpgrade/OpenUpgrade,elmerdpadilla/iv,ramadhane/odoo,camptocamp/ngo-addons-backport,bakhtout/odoo-educ,JGarcia-Panach/odoo,nhomar/odoo,cedk/odoo,ingadhoc/odoo,hmen89/odoo,CatsAndDogsbvba/odoo,nuuuboo/odoo,brijeshkesariya/odoo,Eric-Zhong/odoo,colinnewell/odoo,lombritz/odoo,waytai/odoo,nexiles/odoo,shingonoide/odoo,codekaki/odoo,acshan/odoo,0k/odoo,hbrunn/OpenUpgrade,klunwebale/odoo,ShineFan/odoo,grap/OCB,kifcaliph/odoo,Nowheresly/odoo,Kilhog/odoo,markeTIC/OCB,SAM-IT-SA/odoo,kittiu/odoo,provaleks/o8,nagyistoce/odoo-dev-odoo,CubicERP/odoo,FlorianLudwig/odoo,kifcaliph/odoo,Endika/odoo,steedos/odoo,NL66278/OCB,CubicERP/odoo,jesramirez/odoo,apanju/GMIO_Odoo,mlaitinen/odoo,MarcosCommunity/odoo,Nick-OpusVL/odoo,bkirui/odoo,Maspear/odoo,numerigraphe/odoo,Grirrane/odoo,chiragjogi/odoo,Ernesto99/odoo,KontorConsulting/odoo,goliveirab/odoo,damdam-s/OpenUpgrade,hassoon3/odoo,mustafat/odoo-1,luistorresm/odoo,RafaelTorrealba/odoo,apanju/odoo,rahuldhote/odoo,Danisan/odoo-1,shivam1111/odoo,BT-fgarbely/odoo,NL66278/OCB,dgzurita/odoo,dsfsdgsbngfggb/odoo,PongPi/isl-odoo,pplatek/odoo,virgree/odoo,OpenPymeMx/OCB,ThinkOpen-Solutions/odoo,dkubiak789/odoo,rdeheele/odoo,storm-computers/odoo,matrixise/odoo,Ernesto99/odoo,sv-dev1/odoo,ShineFan/odoo,steedos/odoo,luiseduardohdbackup/odoo,steedos/odoo,jiachenning/odoo,hassoon3/odoo,oihane/odoo,acshan/odoo,deKupini/erp,minhtuancn/odoo,BT-astauder/odoo,shaufi/odoo,steedos/odoo,incaser/odoo-odoo,dkubiak789/odoo,vrenaville/ngo-addons-backport,Antiun/odoo,dsfsdgsbngfggb/odoo,Codefans-fan/odoo,vnsofthe/odoo,omprakasha/odoo,abenzbiria/clients_odoo,minhtuancn/odoo,omprakasha/odoo,doomsterinc/odoo,KontorConsulting/odoo,dalegregory/odoo,fjbatresv/odoo,luistorresm/odoo,sinbazhou/odoo,gdgellatly/OCB1,realsaiko/odoo,andreparames/odoo,laslabs/odoo,hoatle/odoo,sinbazhou/odoo,srimai/odoo,ecosoft-odoo/odoo,gvb/odoo,credativUK/OCB,csrocha/OpenUpgrade,ujjwalwahi/odoo,charbeljc/OCB,pedrobaeza/OpenUpgrade,kirca/OpenUpgrade,poljeff/odoo,jpshort/odoo,eino-makitalo/odoo,doomsterinc/odoo,srimai/odoo,Noviat/odoo,srsman/odoo,jeasoft/odoo,grap/OpenUpgrade,omprakasha/odoo,mustafat/odoo-1,demon-ru/iml-crm,vrenaville/ngo-addons-backport,virgree/odoo,spadae22/odoo,deKupini/erp,Adel-Magebinary/odoo,feroda/odoo,AuyaJackie/odoo,VielSoft/odoo,windedge/odoo,nitinitprof/odoo,TRESCLOUD/odoopub,shaufi10/odoo,Endika/OpenUpgrade,realsaiko/odoo,javierTerry/odoo,savoirfairelinux/OpenUpgrade,synconics/odoo,GauravSahu/odoo,dariemp/odoo,goliveirab/odoo,christophlsa/odoo,lsinfo/odoo,hanicker/odoo,GauravSahu/odoo,juanalfonsopr/odoo,janocat/odoo,tvibliani/odoo,cloud9UG/odoo,fuhongliang/odoo,tarzan0820/odoo,guewen/OpenUpgrade,ingadhoc/odoo,ApuliaSoftware/odoo,doomsterinc/odoo,juanalfonsopr/odoo,sv-dev1/odoo,nuuuboo/odoo,credativUK/OCB,ojengwa/odoo,feroda/odoo,funkring/fdoo,guerrerocarlos/odoo,hmen89/odoo,diagramsoftware/odoo,dezynetechnologies/odoo,ramadhane/odoo,guerrerocarlos/odoo,mvaled/OpenUpgrade,gvb/odoo,ClearCorp-dev/odoo,VitalPet/odoo,avoinsystems/odoo,glovebx/odoo,ApuliaSoftware/odoo,sadleader/odoo,xujb/odoo,kybriainfotech/iSocioCRM,nuncjo/odoo,oasiswork/odoo,ygol/odoo,idncom/odoo,feroda/odoo,Ichag/odoo,doomsterinc/odoo,wangjun/odoo,klunwebale/odoo,OSSESAC/odoopubarquiluz,gsmartway/odoo,janocat/odoo,guewen/OpenUpgrade,BT-ojossen/odoo,shaufi10/odoo,minhtuancn/odoo,QianBIG/odoo,mmbtba/odoo,ygol/odoo,jiachenning/odoo,nitinitprof/odoo,brijeshkesariya/odoo,takis/odoo,tarzan0820/odoo,dsfsdgsbngfggb/odoo,mvaled/OpenUpgrade,makinacorpus/odoo,shaufi10/odoo,jeasoft/odoo,bwrsandman/OpenUpgrade,rschnapka/odoo,hopeall/odoo,dsfsdgsbngfggb/odoo,microcom/odoo,BT-astauder/odoo,sv-dev1/odoo,sebalix/OpenUpgrade,omprakasha/odoo,bobisme/odoo,gsmartway/odoo,VitalPet/odoo,ecosoft-odoo/odoo,OpenUpgrade-dev/OpenUpgrade,ingadhoc/odoo,camptocamp/ngo-addons-backport,bplancher/odoo,cloud9UG/odoo,RafaelTorrealba/odoo,janocat/odoo,n0m4dz/odoo,luistorresm/odoo,tinkerthaler/odoo,Nick-OpusVL/odoo,steedos/odoo,grap/OpenUpgrade,glovebx/odoo,ovnicraft/odoo,Nowheresly/odoo,CatsAndDogsbvba/odoo,gavin-feng/odoo,arthru/OpenUpgrade,jfpla/odoo,waytai/odoo,Bachaco-ve/odoo,OpusVL/odoo,chiragjogi/odoo,chiragjogi/odoo,gdgellatly/OCB1,sinbazhou/odoo,ThinkOpen-Solutions/odoo,stephen144/odoo,NL66278/OCB,virgree/odoo,javierTerry/odoo,ecosoft-odoo/odoo,fdvarela/odoo8,GauravSahu/odoo,agrista/odoo-saas,acshan/odoo,bakhtout/odoo-educ,bplancher/odoo,nuncjo/odoo,BT-fgarbely/odoo,gsmartway/odoo,funkring/fdoo,ujjwalwahi/odoo,hip-odoo/odoo,ApuliaSoftware/odoo,shingonoide/odoo,janocat/odoo,provaleks/o8,odoo-turkiye/odoo,shingonoide/odoo,alqfahad/odoo,christophlsa/odoo,oliverhr/odoo,nuncjo/odoo,dllsf/odootest,zchking/odoo,Nowheresly/odoo,sinbazhou/odoo,AuyaJackie/odoo,alhashash/odoo,minhtuancn/odoo,odoousers2014/odoo,jusdng/odoo,codekaki/odoo,alexteodor/odoo,AuyaJackie/odoo,factorlibre/OCB,dgzurita/odoo,guewen/OpenUpgrade,matrixise/odoo,srsman/odoo,hmen89/odoo,JCA-Developpement/Odoo,alhashash/odoo,eino-makitalo/odoo,NeovaHealth/odoo,CopeX/odoo,apocalypsebg/odoo,oliverhr/odoo,juanalfonsopr/odoo,slevenhagen/odoo-npg,numerigraphe/odoo,rahuldhote/odoo,luiseduardohdbackup/odoo,BT-astauder/odoo,Nick-OpusVL/odoo,mkieszek/odoo,ihsanudin/odoo,n0m4dz/odoo,idncom/odoo,ubic135/odoo-design,zchking/odoo,JonathanStein/odoo,jesramirez/odoo,tinkerthaler/odoo,n0m4dz/odoo,sysadminmatmoz/OCB,Ichag/odoo,FlorianLudwig/odoo,dalegregory/odoo,massot/odoo,tvtsoft/odoo8,sergio-incaser/odoo,mvaled/OpenUpgrade,ramitalat/odoo,Codefans-fan/odoo,podemos-info/odoo,ojengwa/odoo,windedge/odoo,thanhacun/odoo,pedrobaeza/OpenUpgrade,bobisme/odoo,Kilhog/odoo,acshan/odoo,wangjun/odoo,rowemoore/odoo,mszewczy/odoo,grap/OpenUpgrade,bealdav/OpenUpgrade,funkring/fdoo,gdgellatly/OCB1,shaufi10/odoo,xzYue/odoo,alqfahad/odoo,bwrsandman/OpenUpgrade,jolevq/odoopub,rdeheele/odoo,dezynetechnologies/odoo,savoirfairelinux/OpenUpgrade,aviciimaxwell/odoo,hanicker/odoo,OpenUpgrade/OpenUpgrade,TRESCLOUD/odoopub,collex100/odoo,ehirt/odoo,jusdng/odoo,sebalix/OpenUpgrade,RafaelTorrealba/odoo,Gitlab11/odoo,Grirrane/odoo,gvb/odoo,abstract-open-solutions/OCB,mszewczy/odoo,jolevq/odoopub,acshan/odoo,n0m4dz/odoo,arthru/OpenUpgrade,camptocamp/ngo-addons-backport,OpenUpgrade-dev/OpenUpgrade,hanicker/odoo,shaufi/odoo,Daniel-CA/odoo,storm-computers/odoo,nuncjo/odoo,ingadhoc/odoo,vrenaville/ngo-addons-backport,ygol/odoo,podemos-info/odoo,matrixise/odoo,guerrerocarlos/odoo,JonathanStein/odoo,mkieszek/odoo,poljeff/odoo,stonegithubs/odoo,chiragjogi/odoo,shivam1111/odoo,savoirfairelinux/OpenUpgrade,gorjuce/odoo,odooindia/odoo,incaser/odoo-odoo,joshuajan/odoo,alexcuellar/odoo,juanalfonsopr/odoo,MarcosCommunity/odoo,BT-ojossen/odoo,kittiu/odoo,steedos/odoo,patmcb/odoo,VitalPet/odoo,Codefans-fan/odoo,x111ong/odoo,tvibliani/odoo,glovebx/odoo,cysnake4713/odoo,gvb/odoo,rgeleta/odoo,incaser/odoo-odoo,BT-fgarbely/odoo,Nick-OpusVL/odoo,Ichag/odoo,elmerdpadilla/iv,spadae22/odoo,rahuldhote/odoo,goliveirab/odoo,NL66278/OCB,dfang/odoo,andreparames/odoo,funkring/fdoo,grap/OCB,guewen/OpenUpgrade,oliverhr/odoo,mkieszek/odoo,ubic135/odoo-design,hifly/OpenUpgrade,luiseduardohdbackup/odoo,frouty/odoo_oph,ThinkOpen-Solutions/odoo,CopeX/odoo,naousse/odoo,jaxkodex/odoo,csrocha/OpenUpgrade,NeovaHealth/odoo,colinnewell/odoo,grap/OCB,provaleks/o8,oasiswork/odoo,mmbtba/odoo,dezynetechnologies/odoo,draugiskisprendimai/odoo,nagyistoce/odoo-dev-odoo,patmcb/odoo,odoo-turkiye/odoo,dkubiak789/odoo
addons/l10n_lu/__openerp__.py
addons/l10n_lu/__openerp__.py
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # Copyright (C) 2011 Thamini S.à.R.L (<http://www.thamini.com>) # Copyright (C) 2011 ADN Consultants S.à.R.L (<http://www.adn-luxembourg.com>) # Copyright (C) 2012-today OpenERP SA (<http://openerp.com>) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Luxembourg - Accounting', 'version': '1.0', 'category': 'Localization/Account Charts', 'description': """ This is the base module to manage the accounting chart for Luxembourg. ====================================================================== * the Luxembourg Official Chart of Accounts (law of June 2009 + 2011 chart and Taxes), * the Tax Code Chart for Luxembourg * the main taxes used in Luxembourg * default fiscal position for local, intracom, extracom """, 'author': 'OpenERP SA & ADN', 'website': 'http://www.openerp.com http://www.adn-luxembourg.com', 'depends': ['account', 'base_vat', 'base_iban'], 'init_xml': [], 'update_xml': [ # basic accounting data 'account.account.type-2011.csv', 'account.account.template-2011.csv', 'account.tax.code.template-2011.csv', 'account.chart.template-2011.csv', 'account.tax.template-2011.csv', # Change BRE: adds fiscal position 'account.fiscal.position.template-2011.csv', 'account.fiscal.position.tax.template-2011.csv', # configuration wizard, views, reports... 'l10n_lu_wizard.xml', 'l10n_lu_view.xml', 'wizard/print_vat_view.xml' ], 'test': ['test/l10n_lu_report.yml'], 'demo_xml': [], 'installable': True, 'auto_install': False, 'certificate': '0078164766621', 'images': ['images/config_chart_l10n_lu.jpeg','images/l10n_lu_chart.jpeg'], } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # Copyright (C) 2011 Thamini S.à.R.L (<http://www.thamini.com>) # Copyright (C) 2011 ADN Consultants S.à.R.L (<http://www.adn-luxembourg.com>) # Copyright (C) 2012-today OpenERP SA (<http://openerp.com>) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Luxembourg - Accounting', 'version': '1.0', 'category': 'Localization/Account Charts', 'description': """ This is the base module to manage the accounting chart for Luxembourg. ====================================================================== * the Luxembourg Official Chart of Accounts (law of June 2009 + 2011 chart and Taxes), * the Tax Code Chart for Luxembourg * the main taxes used in Luxembourg * default fiscal position for local, intracom, extracom """, 'author': 'OpenERP SA & ADN', 'website': 'http://www.openerp.com http://www.adn-luxembourg.com', 'depends': ['account', 'base_vat', 'base_iban'], 'init_xml': [], 'update_xml': [ # basic accounting data 'account.account.type-2011.csv', 'account.account.template-2011.csv', 'account.tax.code.template-2011.csv', 'account.chart.template-2011.csv', 'account.tax.template-2011.csv', # Change BRE: adds fiscal position 'account.fiscal.position.template-2011.csv', 'account.fiscal.position.tax.template-2011.csv', # configuration wizard, views, reports... 'l10n_lu_wizard.xml', 'account.tax.template.csv', 'l10n_lu_view.xml', 'wizard/print_vat_view.xml' ], 'test': ['test/l10n_lu_report.yml'], 'demo_xml': [], 'installable': True, 'auto_install': False, 'certificate': '0078164766621', 'images': ['images/config_chart_l10n_lu.jpeg','images/l10n_lu_chart.jpeg'], } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
Python
c0ee3bb87a26a57bc7dc1bd4e1aaf6136f94bc17
Add missing filters.py file in organizations
ain7/www.ain7.org,ain7/www.ain7.org,ain7/www.ain7.org,ain7/www.ain7.org
ain7/organizations/filters.py
ain7/organizations/filters.py
# -*- coding: utf-8 """ ain7/organizations/filters.py """ # # Copyright © 2007-2015 AIn7 Devel Team # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA # # import django_filters from ain7.organizations.models import Organization class OrganizationFilter(django_filters.FilterSet): class Meta: model = Organization fields = { 'name': ['icontains'], 'activity_field': ['icontains'], }
lgpl-2.1
Python
f56181aaf6df758abb988d10c757c6eba72d5025
write beginning of method for storing probabilities in a hash
hollabaq86/haikuna-matata,hollabaq86/haikuna-matata,hollabaq86/haikuna-matata
parser.py
parser.py
import re probabilityHash = {[], ""} #[word1, word2], count def parseIntoProbabilityHash(text): stripPunctuation = re.sub(ur"[^\w\d'\s]+",' ',text) wordsInText = stripPunctuation.split() n = 0 for word in wordsInText: probabilityHash[wordsInText[n]] = 1 return probabilityHash
mit
Python
5e54e5ebf9add6d8bd879d963803ee57fd591f4b
Write new Preparation tests
osu-cass/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api
whats_fresh/whats_fresh_api/tests/views/entry/test_new_preparation.py
whats_fresh/whats_fresh_api/tests/views/entry/test_new_preparation.py
from django.test import TestCase from django.core.urlresolvers import reverse from whats_fresh_api.models import * from django.contrib.gis.db import models import json class NewPreparationTestCase(TestCase): """ Test that the New Preparation page works as expected. Things tested: URLs reverse correctly The outputted page has the correct form fields POSTing "correct" data will result in the creation of a new object with the specified details POSTing data with all fields missing (hitting "save" without entering data) returns the same field with notations of missing fields """ def test_url_endpoint(self): url = reverse('new-preparation') self.assertEqual(url, '/entry/preparations/new') def test_form_fields(self): """ Tests to see if the form contains all of the right fields """ response = self.client.get(reverse('new-preparation')) fields = {'name': 'input', 'description': 'input', 'additional_info': 'select'} form = response.context['preparation_form'] for field in fields: # for the Edit tests, you should be able to access # form[field].value self.assertIn(fields[field], str(form[field])) def test_successful_preparation_creation_minimal(self): """ POST a proper "new preparation" command to the server, and see if the new preparation appears in the database. All optional fields are null. """ Preparation.objects.all().delete() # Data that we'll post to the server to get the new preparation created new_preparation = { 'name': 'Fried', 'description': '', 'additional_info': ''} response = self.client.post(reverse('new-preparation'), new_preparation) preparation = Preparation.objects.all()[0] for field in new_preparation: self.assertEqual( getattr(preparation, field), new_preparation[field]) def test_successful_preparation_creation_maximal(self): """ POST a proper "new preparation" command to the server, and see if the new preparation appears in the database. All optional fields are used. """ Preparation.objects.all().delete() # Data that we'll post to the server to get the new preparation created new_preparation = { 'name': 'Fried', 'description': 'Test Description', 'additional_info': 'Fried food is good'} response = self.client.post(reverse('new-preparation'), new_preparation) preparation = Preparation.objects.all()[0] for field in new_preparation: self.assertEqual( getattr(preparation, field), new_preparation[field]) def test_no_data_error(self): """ POST a "new preparation" command to the server missing all of the required fields, and test to see what the error comes back as. """ # Create a list of all objects before sending bad POST data all_preparations = Preparation.objects.all() response = self.client.post(reverse('new-preparation')) required_fields = ['name'] for field_name in required_fields: self.assertIn(field_name, response.context['preparation_form'].errors) # Test that we didn't add any new objects self.assertEqual( list(Preparation.objects.all()), list(all_preparations))
apache-2.0
Python
4d92b111eecd3ce938676edee36b288c42484905
test scraper for UKÄ
jplusplus/statscraper
statscraper/scrapers/uka_scraper.py
statscraper/scrapers/uka_scraper.py
# encoding: utf-8 u""" A scraper to fetch Swedish university application statistics from the Swedish Higher Education Authority (Universitetskanslerämbetet, UKÄ), at http://statistik.uka.se """ from statscraper import BaseScraper, Dataset, Dimension, Result, Collection import requests from bs4 import BeautifulSoup class UKA(BaseScraper): def _fetch_itemslist(self, item): """ We only offer regional application stats. Other collections are differently structured. """ if item.is_root: yield Collection("regional", label="New students by area and school.") else: yield Dataset("county", label="New students by county, school and semester.") def _fetch_dimensions(self, dataset): """ Declaring available dimensions like this is not mandatory, but nice, especially if they differ from dataset to dataset. If you are using a built in datatype, you can specify the dialect you are expecting, to have values normalized. This scraper will look for Swedish month names (e.g. 'Januari'), but return them according to the Statscraper standard ('january'). """ yield Dimension(u"school") yield Dimension(u"semester") yield Dimension(u"year", datatype="year") yield Dimension(u"semester", datatype="academic_term", dialect="swedish") def _fetch_data(self, dataset, query=None): url = "http://statistik.uka.se/4.5d85793915901d205f935d0f.12.5d85793915901d205f965eab.portlet?action=resultat&view=resultTable&frageTyp=3&frageNr=240&tid=%s&grupp1=%s&grupp2=%s" terms = [6] counties = [{ 'id': "10", 'municipalities': ["80"] }, ] for t in terms: for c in counties: for m in c["municipalities"]: html = requests.get(url % (t, c, m["id"])).text soup = BeautifulSoup(html, 'html.parser') table = soup.find("table") row = table.find_all("tr")[5:] cells = row.find_all("td") print cells[0].text, print cells[2].text """ yield Result(value.text.encode("utf-8"), { "date": date, "month": month, "year": years[i], }) """
mit
Python
d2762f81a9f8ed405ca5fc9d567004af182d137b
add importer for delimited data
Arabidopsis-Information-Portal/jbrowse,igemsoftware/Shenzhen_BGIC_0101_2013,SuLab/jbrowse,GMOD/jbrowse,limeng12/jbrowse,Arabidopsis-Information-Portal/jbrowse,limeng12/jbrowse,GreggHelt2/apollo-test,nathandunn/jbrowse,GreggHelt2/apollo-test,erasche/jbrowse,GreggHelt2/apollo-test,SuLab/jbrowse,nathandunn/jbrowse,Arabidopsis-Information-Portal/jbrowse,nathandunn/jbrowse,limeng12/jbrowse,erasche/jbrowse,GMOD/jbrowse,GreggHelt2/apollo-test,igemsoftware/Shenzhen_BGIC_0101_2013,nathandunn/jbrowse,igemsoftware/Shenzhen_BGIC_0101_2013,erasche/jbrowse,igemsoftware/Shenzhen_BGIC_0101_2013,limeng12/jbrowse,SuLab/jbrowse,nathandunn/jbrowse,erasche/jbrowse,Arabidopsis-Information-Portal/jbrowse,limeng12/jbrowse,Arabidopsis-Information-Portal/jbrowse,GMOD/jbrowse,GreggHelt2/apollo-test,GMOD/jbrowse,erasche/jbrowse,Arabidopsis-Information-Portal/jbrowse,GreggHelt2/apollo-test,igemsoftware/Shenzhen_BGIC_0101_2013,erasche/jbrowse,limeng12/jbrowse,SuLab/jbrowse,erasche/jbrowse,GreggHelt2/apollo-test,igemsoftware/Shenzhen_BGIC_0101_2013,igemsoftware/Shenzhen_BGIC_0101_2013,limeng12/jbrowse,GMOD/jbrowse,erasche/jbrowse,SuLab/jbrowse,SuLab/jbrowse,SuLab/jbrowse,igemsoftware/Shenzhen_BGIC_0101_2013,limeng12/jbrowse,Arabidopsis-Information-Portal/jbrowse
python/delim_import.py
python/delim_import.py
from json_generator import JsonGenerator, writeTrackEntry def delimImport(file, skipLines, colNames, dataDir, trackLabel, key = None, delim = "\t", chunkBytes = 200000, compress = True, config = {'style': {'className': 'feature2'}} ): fh = open(file, 'r') data = [line.split(delim) for line in fh.readlines()] fh.close() startIndex = colNames.index("Start") endIndex = colNames.index("End") chromIndex = colNames.index("Chrom") for item in data: item[startIndex] = int(item[startIndex]) item[endIndex] = int(item[endIndex]) def nclCmp(a, b): if a[startIndex] == b[startIndex]: return b[endIndex] - a[endIndex] return a[startIndex] - b[startIndex] data.sort(nclCmp) curRef = None jsongen = None for item in data: if item[chromIndex] != curRef: if jsongen is not None: jsongen.generateTrack() curRef = item[chromIndex] classMeta = [{'attributes': colNames, 'proto': {'Chrom': item[chromIndex]} } ] jsongen = JsonGenerator(dataDir, trackLabel, item[chromIndex], chunkBytes, compress, classMeta, key) jsongen.addSorted([0] + item) if (jsongen is not None) and (jsongen.hasFeatures): jsongen.generateTrack() #attrs = ArrayRepr config['urlTemplate'] = jsongen.urlTemplate writeTrackEntry(dataDir, 'FeatureTrack', trackLabel, key if key is not None else trackLabel, config)
lgpl-2.1
Python
51a5c7626b634687be57c3e6ed05ea07f6468ad0
add analyzer test
Parisson/TimeSide,Parisson/TimeSide,Parisson/TimeSide,Parisson/TimeSide,Parisson/TimeSide
timeside/tests/api/test_analyzer.py
timeside/tests/api/test_analyzer.py
# -*- coding: utf-8 -*- import timeside from sys import stdout import os.path import numpy class TestAnalyzer: graphers = timeside.core.processors(timeside.api.IGrapher) decoders = timeside.core.processors(timeside.api.IDecoder) encoders= timeside.core.processors(timeside.api.IEncoder) analyzers = timeside.core.processors(timeside.api.IAnalyzer) def __init__(self, path): self.source = os.path.join(os.path.dirname(__file__), path) print "Processing %s" % self.source self.decoder = timeside.decoder.FileDecoder(self.source) print 'format: ', self.decoder.format() self.pipe = self.decoder self.analyzers_sub_pipe = [] def process(self): for analyzer in self.analyzers: sub_pipe = analyzer() self.analyzers_sub_pipe.append(sub_pipe) self.pipe = self.pipe | sub_pipe self.pipe.run() def results(self): analyzers = [] for analyzer in self.analyzers_sub_pipe: value = analyzer.result() analyzers.append({'name':analyzer.name(), 'id':analyzer.id(), 'unit':analyzer.unit(), 'value':str(value)}) print analyzers test = TestAnalyzer('../samples/guitar.wav') #test = TestAnalyzer('/mnt/data4/Music1/Cellar_playlist_tmp/JanoB/VirulentAcidMix.wav') test.process() test.results()
agpl-3.0
Python
b634e5966c48299eda8cc9a3dcd4e8f769df6812
Create 5kyu_tree_to_list.py
Orange9000/Codewars,Orange9000/Codewars
Solutions/5kyu/5kyu_tree_to_list.py
Solutions/5kyu/5kyu_tree_to_list.py
class Node: def __init__(self, data, child_nodes=None): self.data = data self.child_nodes = child_nodes def tree_to_list(tr): call = to_list(tr, 0, []) return call def to_list(tr, depth, res): res.append([tr.data, depth]) if tr.child_nodes: for i in tr.child_nodes: to_list(i, depth+1, res) return [i[0] for i in sorted(res, key = lambda x: x[1])]
mit
Python
f1cb1cb0cdcf7ef3d5d0e286bfbd9d9664239098
Create 6kyu_alphabetized.py
Orange9000/Codewars,Orange9000/Codewars
Solutions/6kyu/6kyu_alphabetized.py
Solutions/6kyu/6kyu_alphabetized.py
def alphabetized(s): return ''.join(s for s in sorted(s, key=lambda s: s.lower()) if s.isalpha())
mit
Python
0f55bd7e100dca1ef94dfe2f47b0f46774197e3f
Create cbus.py
drmcinnes/python-C-BUS
cbus.py
cbus.py
#!/usr/bin/python3 #console command for lighting control of c-bus network #add command line switches for changing the default ip and port #add option for immediate return i.e. dont wait for return codes #cbus on 6, cbus off 7, cbus ramp 7m 100 #parse command line, convert time to closest value # Copyright 2014 Darren McInnes codemonkey[at}archer.com(dot]au # # Permission to use, copy, modify, distribute this # software and its documentation for any purpose is hereby granted # without fee, provided that the above copyright notice appear in # all copies and that both that the copyright notice and this # permission notice and warranty disclaimer appear in supporting # documentation, and that the name of the author not be used in # advertising or publicity pertaining to distribution of the # software without specific, written prior permission. # The author disclaims all warranties with regard to this # software, including all implied warranties of merchantability # and fitness due to it being crap. In no event shall the author # be liable for any special, indirect or consequential damages or # any damages whatsoever resulting from loss of use, data or profits, # whether in an action of contract, negligence, arising out of or in # connection with the use or performance of this software. import os import sys #handles command line arguments import socket import time import argparse parser = argparse.ArgumentParser() parser.add_argument("command", choices=["off", "on", "ramp"], help="off/on/ramp") parser.add_argument("group", type=int, choices=range(0,254), help="group between 0 and 254") parser.add_argument("-a", "--address", default="192.168.0.105", help="network address of c-gate server") parser.add_argument("-p", "--port",type=int, default="20023", help="command port number") parser.add_argument("-n", "--net", type=int, default="254", help="c-bus network number") parser.add_argument("-l", "--lighting", type=int, default="56", help="c-bus application number") parser.add_argument("-r", "--ramp", type=int, default="0", help="ramp speed 0s to 17m") #parser.add_argument("-p", "--level", type=int, default="100", help="level") args = parser.parse_args() #print (args.echo) #print (args.gr if args.command=="ramp": x = socket.socket(socket.AF_INET, socket.SOCK_STREAM) x.connect((args.address, args.port)) data = x.recv(4096) x.sendall(bytes(args.command+' '+str(args.net)+'/'+str(args.lighting)+'/'+str(args.group)+'\n','UTF-8')) # time.sleep(.1) data = x.recv(4096) x.close() else: x = socket.socket(socket.AF_INET, socket.SOCK_STREAM) x.connect((args.address, args.port)) data = x.recv(4096) x.sendall(bytes(args.command+' '+str(args.net)+'/'+str(args.lighting)+'/'+str(args.group)+'\n','UTF-8')) # time.sleep(.1) data = x.recv(4096) x.close() print(data)
mit
Python
2eddc73e2d7b78fbfac521eb1e6014ca26421510
Add forgotten migration
Cartocite/osmada
osmdata/migrations/0012_auto_20170829_1539.py
osmdata/migrations/0012_auto_20170829_1539.py
# -*- coding: utf-8 -*- # Generated by Django 1.11.4 on 2017-08-29 15:39 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('osmdata', '0011_auto_20170824_1521'), ] operations = [ migrations.AlterField( model_name='osmelement', name='bounds', field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='osmdata.Bounds'), ), ]
agpl-3.0
Python
d00243d9500118400f7e08409d9564b15b2b4148
Add trivial CLI example
AlienVault-Labs/OTX-Python-SDK
examples/cliExample.py
examples/cliExample.py
# Very Simple CLI example from OTXv2 import OTXv2 import IndicatorTypes import argparse # Your API key API_KEY = '' OTX_SERVER = 'https://otx.alienvault.com/' otx = OTXv2(API_KEY, server=OTX_SERVER) parser = argparse.ArgumentParser(description='Description of your program') parser.add_argument('-i', '--ip', help='IP eg; 4.4.4.4', required=False) parser.add_argument( '-d', '--domain', help='Domain eg; alienvault.com', required=False) parser.add_argument('-ho', '--hostname', help='Hostname eg; www.alienvault.com', required=False) parser.add_argument( '-u', '--url', help='URL eg; http://www.alienvault.com', required=False) parser.add_argument( '-m', '--md5', help='MD5 Hash of a file eg; 7b42b35832855ab4ff37ae9b8fa9e571', required=False) parser.add_argument( '-p', '--pulse', help='Search pulses for a string eg; Dridex', required=False) parser.add_argument('-s', '--subscribed', help='Get pulses you are subscribed to', required=False, action='store_true') args = vars(parser.parse_args()) if args["ip"]: print (str(otx.get_indicator_details_full(IndicatorTypes.IPv4, args["ip"]))) if args["domain"]: print (str(otx.get_indicator_details_full(IndicatorTypes.DOMAIN, args["domain"]))) if args["hostname"]: print (str(otx.get_indicator_details_full(IndicatorTypes.HOSTNAME, args["hostname"]))) if args["url"]: print (str(otx.get_indicator_details_full(IndicatorTypes.URL, args["url"]))) if args["md5"]: print (str(otx.get_indicator_details_full(IndicatorTypes.FILE_HASH_MD5, args["md5"]))) if args["pulse"]: result = otx.search_pulses(args["pulse"]) print (str(result.get('results'))) if args["subscribed"]: print (str(otx.getall(max_page=3, limit=5)))
apache-2.0
Python
ecc8a93ddda784102311ebfd4c3c93624f356778
Add migration to add strip_html sql function
Connexions/cnx-archive,Connexions/cnx-archive
cnxarchive/sql/migrations/20160723123620_add_sql_function_strip_html.py
cnxarchive/sql/migrations/20160723123620_add_sql_function_strip_html.py
# -*- coding: utf-8 -*- def up(cursor): cursor.execute("""\ CREATE OR REPLACE FUNCTION strip_html(html_text TEXT) RETURNS text AS $$ import re return re.sub('<[^>]*?>', '', html_text, re.MULTILINE) $$ LANGUAGE plpythonu IMMUTABLE; """) def down(cursor): cursor.execute("DROP FUNCTION IF EXISTS strip_html(TEXT)")
agpl-3.0
Python
0f5b15a1f909c79b40a3f2655d00bc7852d41847
add missing migration
geometalab/osmaxx-frontend,geometalab/osmaxx,geometalab/osmaxx-frontend,geometalab/osmaxx,geometalab/osmaxx-frontend,geometalab/osmaxx,geometalab/drf-utm-zone-info,geometalab/drf-utm-zone-info,geometalab/osmaxx-frontend,geometalab/osmaxx
conversion_service/conversion_job/migrations/0003_auto_20151120_1528.py
conversion_service/conversion_job/migrations/0003_auto_20151120_1528.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('conversion_job', '0002_auto_20151119_1332'), ] operations = [ migrations.AlterField( model_name='conversionjob', name='status', field=models.CharField(max_length=20, verbose_name='job status', default='new', choices=[('error', 'error'), ('new', 'new'), ('queued', 'queued'), ('started', 'started'), ('done', 'done')]), ), migrations.AlterField( model_name='gisformat', name='progress', field=models.CharField(max_length=20, verbose_name='progress', default='new', choices=[('error', 'error'), ('new', 'new'), ('received', 'received'), ('started', 'started'), ('successful', 'successful')]), ), ]
mit
Python
ed45aa20bc54714c6eb355417520c3d90a6b47fc
Add init.py
qqbuby/readthedocs-docker
init.py
init.py
#!/usr/bin/env python import os import sys import django os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'readthedocs.settings.dev') sys.path.append(os.getcwd()) django.setup() from django.contrib.auth.models import User admin = User.objects.create_user('admin', '', 'admin') admin.is_superuser = True admin.is_staff = True admin.save() test = User.objects.create_user('test', '', 'test') test.is_staff = True test.save()
mit
Python
67d86229279e979d8ef5ac54e5ed8ca85c32ff2e
add another sample script (multiple.py).
maximumG/exscript,knipknap/exscript,maximumG/exscript,knipknap/exscript
demos/multiple.py
demos/multiple.py
#!/usr/bin/env python from Exscript import Host from Exscript.util.interact import read_login from Exscript.util.template import eval_file from Exscript.util.start import start def one(conn): conn.open() conn.authenticate() conn.autoinit() conn.execute('show ip int brie') def two(conn): eval_file(conn, 'mytemplate.exscript', interface = 'POS1/0') account = read_login() # Start on one host. host1 = Host('localhost') host1.set('myvariable', 'foobar') start(account, host1, one) # Start on another. host2 = Host('otherhost1') host3 = Host('otherhost2') start(account, [host1, host2], two)
mit
Python
3704654e704c0595e933f4ab2832e945816afde8
Add setup.py file
schmidt4brains/Examples,schmidt4brains/Examples,AquaticInformatics/Examples,schmidt4brains/Examples,schmidt4brains/Examples,AquaticInformatics/Examples,schmidt4brains/Examples,AquaticInformatics/Examples,AquaticInformatics/Examples,AquaticInformatics/Examples
TimeSeries/PublicApis/Python/setup.py
TimeSeries/PublicApis/Python/setup.py
from setuptools import setup setup( name="aquarius-timeseries-client", py_modules=["timeseries_client"], version="0.1", description="Python client for Aquarius TimeSeries API", long_description=open("README.md").read(), long_description_content_type="text/markdown", url="https://github.com/AquaticInformatics/Examples", install_requires=( "requests", "pyrfc3339" ) )
apache-2.0
Python
42e1447db973cce539353912eada05b26870bae6
Add serial test connection.
salkinium/bachelor,salkinium/bachelor,salkinium/bachelor
experiment_control/test_serial_connection.py
experiment_control/test_serial_connection.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014, Niklas Hauser # All rights reserved. # # The file is part of my bachelor thesis and is released under the 3-clause BSD # license. See the file `LICENSE` for the full license governing this code. # ----------------------------------------------------------------------------- import os, sys, time import logging sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'tinyos', 'support', 'sdk', 'python')) from tinyos.message import * from tinyos.message.Message import * from tinyos.message.SerialPacket import * from tinyos.packet.Serial import Serial from messages import * class Connection(object): def __init__(self, device=None): super(Connection, self).__init__() self.logger = logging.getLogger('Connection.({})'.format(device)) self.logger.setLevel(logging.DEBUG) formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') # console logging ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) ch.setFormatter(formatter) self.logger.addHandler(ch) self.mif = MoteIF.MoteIF() self.device = device self.tos_source = self.mif.addSource("serial@" + device) self.mif.addListener(self, SerialMessage.SerialMessage) self.mif.addListener(self, RadioMessage.RadioMessage) self.mif.addListener(self, SensorMessage.SensorMessage) self.logger.info("listening") self.temperature = 0 self.humidity = 0 def receive(self, src, msg): if msg.get_amType() == SensorMessage.AM_TYPE: m = SensorMessage.SensorMessage(msg.dataGet()) self.temperature = m.get_temperature()*0.01 - 40.1 linear_humidity = -2.0468 + 0.0367 * m.get_humidity() + (-1.5955e-6 * m.get_humidity())**2 self.humidity = (self.temperature - 25) * (0.01 + 0.00008 * m.get_humidity()) + linear_humidity self.logger.debug("SensorMessage: NodeId={}, Temp={:.1f}C, Hum={:.1f}%" \ .format(m.get_nodeid(), self.temperature, self.humidity)) elif msg.get_amType() == SerialMessage.AM_TYPE: m = SerialMessage.SerialMessage(msg.dataGet()) self.logger.info("SerialMessage: {}".format(str(m))) elif msg.get_amType() == RadioMessage.AM_TYPE: m = RadioMessage.RadioMessage(msg.dataGet()) self.logger.info("RadioMessage: {}".format(str(m))) else: self.logger.warn("Unknown Message: {}".format(str(msg))) def transmit(self, addr, msg): self.logger.info("Transmitting: addr={} {}".format(addr, msg)) self.mif.sendMsg(self.tos_source, addr, msg.get_amType(), 0, msg) if __name__ == "__main__": sender = Connection("/dev/ttyUSB3:telos") receiver = Connection("/dev/ttyUSB1:telos") data = [0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9, 0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9, 0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,7,8,9] # data = [0x88,0x88,0x88,0x88,0x88,0x88,0x88,0x88,0x88,0x88, # 0x88,0x88,0x88,0x88,0x88,0x88,0x88,0x88,0x88,0x88, # 0x88,0x88,0x88,0x88,0x88,0x88,0x88,0x88,0x88,0x88] rawData = chr(0)*20 + "".join(map(chr, data)) tx = SerialMessage.SerialMessage() tx.set_header_channel(26) tx.set_header_type(SerialMessage.SerialMessage.get_amType()) tx.set_header_power(3) tx.set_header_len(len(data)) tx.set_header_nodeid(0) tx.set_data(data) print tx.data time.sleep(1) sender.transmit(1, tx) while(1): pass
bsd-2-clause
Python
da22d8dffadbb4713e715aca7918942f445090c9
embed video form and model fields
hellhovnd/django-embed-video,mpachas/django-embed-video,mpachas/django-embed-video,hellhovnd/django-embed-video,yetty/django-embed-video,jazzband/django-embed-video,yetty/django-embed-video,jazzband/django-embed-video
embed_video/fields.py
embed_video/fields.py
from django.db import models from django import forms from django.utils.translation import ugettext_lazy as _ from .base import detect_backend __all__ = ('EmbedVideoField', 'EmbedVideoFormField') class EmbedVideoField(models.URLField): def formfield(self, **kwargs): defaults = {'form_class': EmbedVideoFormField} defaults.update(kwargs) return super(EmbedVideoField, self).formfield(**defaults) class EmbedVideoFormField(forms.URLField): def validate(self, url): super(EmbedVideoFormField, self).validate(url) try: detect_backend(url) except: raise forms.ValidationError(_(u'URL could not be recognized.')) return url
mit
Python
b81028067cf65b2ee3a155d081e7983a1de70d5f
Add mistakenly omitted migrations
recklessromeo/otm-core,maurizi/otm-core,clever-crow-consulting/otm-core,clever-crow-consulting/otm-core,maurizi/otm-core,clever-crow-consulting/otm-core,maurizi/otm-core,recklessromeo/otm-core,recklessromeo/otm-core,RickMohr/otm-core,RickMohr/otm-core,clever-crow-consulting/otm-core,recklessromeo/otm-core,maurizi/otm-core,RickMohr/otm-core,RickMohr/otm-core
opentreemap/treemap/migrations/0005_auto_20150729_1046.py
opentreemap/treemap/migrations/0005_auto_20150729_1046.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('treemap', '0004_auto_20150720_1523'), ] operations = [ migrations.AlterField( model_name='fieldpermission', name='permission_level', field=models.IntegerField(default=0, choices=[(0, 'Invisible'), (1, 'Read Only'), (2, 'Pending Write Access'), (3, 'Full Write Access')]), ), migrations.AlterField( model_name='role', name='default_permission', field=models.IntegerField(default=0, choices=[(0, 'Invisible'), (1, 'Read Only'), (2, 'Pending Write Access'), (3, 'Full Write Access')]), ), ]
agpl-3.0
Python
1fa74f6a6a5faeb9579c889df32e4bfe8d6908df
Add migration
softwaresaved/fat,softwaresaved/fat,softwaresaved/fat,softwaresaved/fat
fat/migrations/0059_event_extra_sponsored.py
fat/migrations/0059_event_extra_sponsored.py
# -*- coding: utf-8 -*- # Generated by Django 1.9.5 on 2016-08-08 10:16 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('fat', '0058_auto_20160808_1007'), ] operations = [ migrations.AddField( model_name='event', name='extra_sponsored', field=models.TextField(blank=True), ), ]
bsd-3-clause
Python
62c70b301ffc1e178c3bd54bd81291876b3883ea
Add simple linear interpolation filling.
lmjohns3/cube-experiment,lmjohns3/cube-experiment,lmjohns3/cube-experiment
analysis/03-fill-dropouts-linear.py
analysis/03-fill-dropouts-linear.py
#!/usr/bin/env python from __future__ import division import climate import lmj.cubes import lmj.cubes.fill import numpy as np import pandas as pd logging = climate.get_logger('fill') def fill(dfs, window): '''Complete missing marker data using linear interpolation. This method alters the given `dfs` in-place. Parameters ---------- dfs : list of pd.DataFrame Frames of source data. The frames will be stacked into a single large frame to use during SVT. This stacked frame will then be split and returned. window : int Model windows of this many consecutive frames. ''' df = lmj.cubes.fill.stack(dfs, window) centers = lmj.cubes.fill.center(df) pos, _, _ = lmj.cubes.fill.window(df, window, interpolate=True) lmj.cubes.fill.update(df, pos, window) lmj.cubes.fill.restore(df, centers) lmj.cubes.fill.unstack(df, dfs) def main(args): lmj.cubes.fill.main(args, lambda ts: fill([t.df for t in ts], args.window)) if __name__ == '__main__': climate.call(main)
mit
Python
7942254131bcf005d5a5f1bb33ca7d1ffff1b311
Create keyAllCtrls.py
aaronfang/personal_scripts
af_scripts/blendshapes/keyAllCtrls.py
af_scripts/blendshapes/keyAllCtrls.py
import maya.cmds as cmds import maya.mel as mel cmds.select(cmds.ls('*:*.faceCtrl', o=1)) mel.eval('doSetKeyframeArgList 6 { "4","0","0","0","1","0","0","animationList","0","1","0" };')
mit
Python
f51c4abc95fda5504e7c7a5ad87355698798ddd1
create temporary streaming solution
benbroce3/PiCamServer,benbroce3/PiCamServer,benbroce3/PiCamServer,benbroce3/PiCamServer
temp_vidstream.py
temp_vidstream.py
import picamera with picamera.PiCamera() as camera: camera.resolution = (640, 480) camera.start_recording('vidstream.mp4') camera.wait_recording(60) camera.stop_recording()
mit
Python
89d27dd0a28f84c99930c0f1dad496e525f62272
migrate to namespace table
EthanBlackburn/sync-engine,PriviPK/privipk-sync-engine,wakermahmud/sync-engine,nylas/sync-engine,EthanBlackburn/sync-engine,closeio/nylas,rmasters/inbox,nylas/sync-engine,ErinCall/sync-engine,Eagles2F/sync-engine,gale320/sync-engine,gale320/sync-engine,wakermahmud/sync-engine,ErinCall/sync-engine,ErinCall/sync-engine,ErinCall/sync-engine,PriviPK/privipk-sync-engine,EthanBlackburn/sync-engine,gale320/sync-engine,wakermahmud/sync-engine,closeio/nylas,PriviPK/privipk-sync-engine,Eagles2F/sync-engine,jobscore/sync-engine,wakermahmud/sync-engine,Eagles2F/sync-engine,PriviPK/privipk-sync-engine,jobscore/sync-engine,EthanBlackburn/sync-engine,gale320/sync-engine,closeio/nylas,rmasters/inbox,gale320/sync-engine,nylas/sync-engine,PriviPK/privipk-sync-engine,wakermahmud/sync-engine,nylas/sync-engine,Eagles2F/sync-engine,jobscore/sync-engine,rmasters/inbox,rmasters/inbox,closeio/nylas,ErinCall/sync-engine,Eagles2F/sync-engine,EthanBlackburn/sync-engine,jobscore/sync-engine
migrations/versions/28c0d6c2f887_add_namespaces.py
migrations/versions/28c0d6c2f887_add_namespaces.py
"""Add namespaces Revision ID: 28c0d6c2f887 Revises: 4323056c0b78 Create Date: 2013-10-14 22:18:29.705865 """ # revision identifiers, used by Alembic. revision = '28c0d6c2f887' down_revision = '4323056c0b78' from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import mysql def upgrade(): ### commands auto generated by Alembic - please adjust! ### # op.create_table('namespaces', # sa.Column('id', sa.Integer(), nullable=False), # sa.Column('user_id', sa.Integer(), nullable=False), # sa.PrimaryKeyConstraint('id') # ) op.alter_column(u'foldermeta', u'user_id', new_column_name='namespace_id', existing_type=mysql.INTEGER(display_width=11)) op.alter_column(u'foldermeta', 'folder_name', existing_type=mysql.VARCHAR(length=255), nullable=False) op.alter_column(u'foldermeta', 'msg_uid', existing_type=mysql.INTEGER(display_width=11), nullable=False) op.alter_column(u'messagemeta', u'user_id', new_column_name='namespace_id', existing_type=mysql.INTEGER(display_width=11)) op.alter_column(u'rawmessage', u'user_id', new_column_name='namespace_id', existing_type=mysql.INTEGER(display_width=11)) op.alter_column(u'uidvalidity', u'user_id', new_column_name='namespace_id', existing_type=mysql.INTEGER(display_width=11)) op.add_column(u'users', sa.Column('root_namespace', sa.Integer(), nullable=False)) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column(u'users', 'root_namespace') op.add_column(u'uidvalidity', sa.Column(u'user_id', mysql.INTEGER(display_width=11), nullable=False)) op.drop_column(u'uidvalidity', 'namespace_id') op.add_column(u'rawmessage', sa.Column(u'user_id', mysql.INTEGER(display_width=11), nullable=False)) op.drop_column(u'rawmessage', 'namespace_id') op.add_column(u'messagemeta', sa.Column(u'user_id', mysql.INTEGER(display_width=11), nullable=False)) op.drop_column(u'messagemeta', 'namespace_id') op.alter_column(u'foldermeta', 'msg_uid', existing_type=mysql.INTEGER(display_width=11), nullable=True) op.alter_column(u'foldermeta', 'folder_name', existing_type=mysql.VARCHAR(length=255), nullable=True) op.add_column(u'foldermeta', sa.Column(u'user_id', mysql.INTEGER(display_width=11), nullable=False)) op.drop_column(u'foldermeta', 'namespace_id') op.drop_table('namespaces') ### end Alembic commands ### # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4 # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
agpl-3.0
Python
55f2325354724cfe8b90324038daf2c1acaa916a
Add unit tests for OpenStack config defaults
SUSE/teuthology,dmick/teuthology,ceph/teuthology,SUSE/teuthology,dmick/teuthology,robbat2/teuthology,ktdreyer/teuthology,dmick/teuthology,caibo2014/teuthology,robbat2/teuthology,ktdreyer/teuthology,dreamhost/teuthology,SUSE/teuthology,caibo2014/teuthology,ceph/teuthology,dreamhost/teuthology
teuthology/openstack/test/test_config.py
teuthology/openstack/test/test_config.py
from teuthology.config import config class TestOpenStack(object): def setup(self): self.openstack_config = config['openstack'] def test_config_clone(self): assert 'clone' in self.openstack_config def test_config_user_data(self): os_type = 'rhel' os_version = '7.0' template_path = self.openstack_config['user-data'].format( os_type=os_type, os_version=os_version) assert os_type in template_path assert os_version in template_path def test_config_ip(self): assert 'ip' in self.openstack_config def test_config_machine(self): assert 'machine' in self.openstack_config machine_config = self.openstack_config['machine'] assert 'disk' in machine_config assert 'ram' in machine_config assert 'cpus' in machine_config def test_config_volumes(self): assert 'volumes' in self.openstack_config volumes_config = self.openstack_config['volumes'] assert 'count' in volumes_config assert 'size' in volumes_config
mit
Python
526d58fb917a4e098018f733b4c0b254417140b4
Add @log_route decorator
lsst-sqre/ltd-keeper,lsst-sqre/ltd-keeper
keeper/logutils.py
keeper/logutils.py
"""Logging helpers and utilities. """ __all__ = ['log_route'] from functools import wraps from timeit import default_timer as timer import uuid from flask import request, make_response import structlog def log_route(): """Route decorator to initialize a thread-local logger for a route. """ def decorator(f): @wraps(f) def decorated_function(*args, **kwargs): # Initialize a timer to capture the response time # This is for convenience, in addition to route monitoring. start_time = timer() # Initialize a new thread-local logger and add a unique request # ID to its context. # http://www.structlog.org/en/stable/examples.html logger = structlog.get_logger() log = logger.new( request_id=str(uuid.uuid4()), path=request.path, method=request.method, ) # Pass through route response = f(*args, **kwargs) response = make_response(response) # Close out the logger end_time = timer() log.info( status=response.status_code, response_time=end_time - start_time) return response return decorated_function return decorator
mit
Python
3f3115a0a9c7407820b3b10c06dcfa4f92ac6e57
Add owned book scaffold
mdzhang/goodreads-api-client-python
goodreads_api_client/resources/owned_book.py
goodreads_api_client/resources/owned_book.py
# -*- coding: utf-8 -*- """Module containing owned book resource class.""" from goodreads_api_client.exceptions import OauthEndpointNotImplemented from goodreads_api_client.resources.base import Resource class OwnedBook(Resource): def create(self): raise OauthEndpointNotImplemented('owned_book.compare') def destroy(self): raise OauthEndpointNotImplemented('owned_book.destroy') def list(self): raise OauthEndpointNotImplemented('owned_book.list') def show(self): raise OauthEndpointNotImplemented('owned_book.show') def update(self): raise OauthEndpointNotImplemented('owned_book.update')
mit
Python
5d99b7c2dfbfbb776716f2258d560bab2602531f
Create main.py
Otend/backlog
main.py
main.py
# -*- coding: utf-8 -*- #Backlog Manager #programmed by Ian Hitterdal (otend) #licensed under MIT license import work import random def addWork(medium): #input: valid medium string #user input: work title string #output: none #user output: none, really global workDict global mediumList if medium not in mediumList: print("Invalid medium, otend did something wrong") else: inName = input("What is the name of the work? ") workDict[medium].append(work.Work(inName)) def pickMedium(): #input: none #user input: integer to choose a medium from the list #output: valid medium string global mediumList print("Which medium would you like to use?") n = 1 for med in mediumList: print(n,". ",med) n = n+1 choice = int(input("Enter a number. ")) return mediumList[choice-1] def chooseWork(medium): #input: valid medium string #user input: affirmation of viewing #output: none #user output: work chosen global workDict valList = [] for item in workDict[medium]: if item.wasViewed == False: valList.append(item) if len(valList) == 0: print("No works.") else: a = random.choice(workDict[medium]) print("You should watch/play/whatever...") print(a.name,"\n") b = input("Did you watch it? y/n") if(b == "y"): a.wasViewed = True def listWork(medium): #Input: string that is in the medium list #output: none #user output: all entries present in the list for that medium. global workDict print("Here are the works registered for {}.",medium) for i in workDict[medium]: print(i) def watDo(): #input: none #user input: choice of task #output: none #user output: tasks available, other outputs dependent on validity of choice #valid: goodbye or none #invalid: error message print("What do you want to do?") print("1. Add a work.") print("2. Have a work chosen.") print("3. List works.") print("4. Quit.") choice = input("Enter a number.") if choice not in ["1","2","3","4"]: print("You have entered an invalid choice. Please try again.") watDo() elif choice == "4": print("Goodbye.") else: a = pickMedium() if(choice == "1"): addWork(a) watDo() elif(choice == "2"): chooseWork(a) watDo() else: listWork(a) watDo() mediumList = ["film", "game", "show", "comic", "book", "album"] workDict = dict() for n in mediumList: workDict[n] = list() print("Welcome to Backlog Manager 0.1 Pre-Alpha!") watDo()
bsd-2-clause
Python
f75d321b200217514cde901cc15cc2b798e3dcfe
Add new hipchat module
tobi-wan-kenobi/bumblebee-status,tobi-wan-kenobi/bumblebee-status
bumblebee/modules/hipchat.py
bumblebee/modules/hipchat.py
"""Displays the unread messages count for an HipChat user Requires the following library: * requests Parameters: * hipchat.token: HipChat user access token, the token needs to have the 'View Messages' scope. * hipchat.interval: Refresh interval in minutes (defaults to 5) """ import time import functools import bumblebee.input import bumblebee.output import bumblebee.engine try: import requests except ImportError: pass HIPCHAT_API_URL = "https://www.hipchat.com/v2/readstate?expand=items.unreadCount" class Module(bumblebee.engine.Module): def __init__(self, engine, config): super(Module, self).__init__(engine, config, bumblebee.output.Widget(full_text=self.output) ) self._count = 0 self._interval = int(self.parameter("interval", "5")) self._nextcheck = 0 self._requests = requests.Session() self._requests.headers.update({"Authorization":"Bearer {}".format(self.parameter("token", ""))}) immediate_update = functools.partial(self.update, immediate=True) engine.input.register_callback(self, button=bumblebee.input.RIGHT_MOUSE, cmd=immediate_update) def output(self, _): return str(self._count) def update(self, _, immediate=False): if immediate or self._nextcheck < int(time.time()): self._nextcheck = int(time.time()) + self._interval * 60 try: self._count = 0 items = self._requests.get(HIPCHAT_API_URL).json().get('items') self._count = sum([item.get('unreadCount').get('count') for item in items]) except Exception: self._count = "n/a" # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
mit
Python
786ed1d37ae5285bce1178d401d487233d4bd5b1
Add greater/less than tests
openstack/openstack-ansible-plugins,os-cloud/openstack-ansible-plugins,os-cloud/openstack-ansible-plugins,openstack/openstack-ansible-plugins
test/osa_tests.py
test/osa_tests.py
#!/usr/bin/env python # Copyright 2016, Rackspace US, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Extra tests for jinja2 templates in Ansible.""" def greater_than(value, reference_value): """Return true if value > reference_value.""" return value > reference_value def less_than(value, reference_value): """Return true if value < reference_value.""" return value < reference_value class TestModule: """Main test class from Ansible.""" def tests(self): """Add these tests to the list of tests available to Ansible.""" return { 'greater_than': greater_than, 'less_than': less_than, }
apache-2.0
Python
0a3488915938de418ab0675f4cc051769b470927
Fix tab switching test on reference builds.
patrickm/chromium.src,axinging/chromium-crosswalk,Jonekee/chromium.src,chuan9/chromium-crosswalk,Jonekee/chromium.src,dednal/chromium.src,jaruba/chromium.src,ChromiumWebApps/chromium,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,bright-sparks/chromium-spacewalk,ltilve/chromium,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,jaruba/chromium.src,dednal/chromium.src,ondra-novak/chromium.src,littlstar/chromium.src,mohamed--abdel-maksoud/chromium.src,ChromiumWebApps/chromium,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,mogoweb/chromium-crosswalk,ltilve/chromium,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,patrickm/chromium.src,hgl888/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,dushu1203/chromium.src,markYoungH/chromium.src,patrickm/chromium.src,crosswalk-project/chromium-crosswalk-efl,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,anirudhSK/chromium,hgl888/chromium-crosswalk,Just-D/chromium-1,M4sse/chromium.src,ondra-novak/chromium.src,crosswalk-project/chromium-crosswalk-efl,littlstar/chromium.src,hgl888/chromium-crosswalk,Chilledheart/chromium,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,ondra-novak/chromium.src,littlstar/chromium.src,dushu1203/chromium.src,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,hgl888/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,jaruba/chromium.src,Jonekee/chromium.src,Jonekee/chromium.src,markYoungH/chromium.src,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,ltilve/chromium,Just-D/chromium-1,jaruba/chromium.src,dednal/chromium.src,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,dednal/chromium.src,mogoweb/chromium-crosswalk,markYoungH/chromium.src,ChromiumWebApps/chromium,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Chilledheart/chromium,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,bright-sparks/chromium-spacewalk,mohamed--abdel-maksoud/chromium.src,ChromiumWebApps/chromium,Jonekee/chromium.src,patrickm/chromium.src,jaruba/chromium.src,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,patrickm/chromium.src,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,Just-D/chromium-1,markYoungH/chromium.src,crosswalk-project/chromium-crosswalk-efl,Jonekee/chromium.src,dednal/chromium.src,dednal/chromium.src,mogoweb/chromium-crosswalk,ChromiumWebApps/chromium,mogoweb/chromium-crosswalk,ChromiumWebApps/chromium,mohamed--abdel-maksoud/chromium.src,TheTypoMaster/chromium-crosswalk,ltilve/chromium,Chilledheart/chromium,chuan9/chromium-crosswalk,M4sse/chromium.src,chuan9/chromium-crosswalk,Jonekee/chromium.src,littlstar/chromium.src,krieger-od/nwjs_chromium.src,PeterWangIntel/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,anirudhSK/chromium,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,ltilve/chromium,ltilve/chromium,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,markYoungH/chromium.src,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,littlstar/chromium.src,anirudhSK/chromium,fujunwei/chromium-crosswalk,ondra-novak/chromium.src,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,bright-sparks/chromium-spacewalk,chuan9/chromium-crosswalk,Jonekee/chromium.src,Chilledheart/chromium,dushu1203/chromium.src,bright-sparks/chromium-spacewalk,Just-D/chromium-1,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,anirudhSK/chromium,anirudhSK/chromium,ondra-novak/chromium.src,axinging/chromium-crosswalk,jaruba/chromium.src,M4sse/chromium.src,axinging/chromium-crosswalk,M4sse/chromium.src,M4sse/chromium.src,Fireblend/chromium-crosswalk,anirudhSK/chromium,mogoweb/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,ChromiumWebApps/chromium,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,patrickm/chromium.src,markYoungH/chromium.src,ChromiumWebApps/chromium,chuan9/chromium-crosswalk,ondra-novak/chromium.src,jaruba/chromium.src,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,Just-D/chromium-1,dednal/chromium.src,anirudhSK/chromium,axinging/chromium-crosswalk,Jonekee/chromium.src,Pluto-tv/chromium-crosswalk,markYoungH/chromium.src,littlstar/chromium.src,chuan9/chromium-crosswalk,mogoweb/chromium-crosswalk,krieger-od/nwjs_chromium.src,markYoungH/chromium.src,fujunwei/chromium-crosswalk,ondra-novak/chromium.src,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk-efl,anirudhSK/chromium,dednal/chromium.src,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,patrickm/chromium.src,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,anirudhSK/chromium,bright-sparks/chromium-spacewalk,ondra-novak/chromium.src,Pluto-tv/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,dushu1203/chromium.src,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,krieger-od/nwjs_chromium.src,Fireblend/chromium-crosswalk,bright-sparks/chromium-spacewalk,fujunwei/chromium-crosswalk,ChromiumWebApps/chromium,M4sse/chromium.src,jaruba/chromium.src,anirudhSK/chromium,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,littlstar/chromium.src,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk,ltilve/chromium,axinging/chromium-crosswalk,Jonekee/chromium.src,hgl888/chromium-crosswalk,dednal/chromium.src,Fireblend/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,littlstar/chromium.src,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk,patrickm/chromium.src,Pluto-tv/chromium-crosswalk,markYoungH/chromium.src,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,ChromiumWebApps/chromium,Chilledheart/chromium,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,jaruba/chromium.src,Just-D/chromium-1,axinging/chromium-crosswalk,Chilledheart/chromium,M4sse/chromium.src,markYoungH/chromium.src,mogoweb/chromium-crosswalk,patrickm/chromium.src,Pluto-tv/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Just-D/chromium-1,jaruba/chromium.src,ltilve/chromium,dednal/chromium.src,ondra-novak/chromium.src,fujunwei/chromium-crosswalk,M4sse/chromium.src,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,bright-sparks/chromium-spacewalk,Chilledheart/chromium,crosswalk-project/chromium-crosswalk-efl,jaruba/chromium.src,chuan9/chromium-crosswalk
tools/perf/measurements/tab_switching.py
tools/perf/measurements/tab_switching.py
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """The tab switching measurement. This measurement opens pages in different tabs. After all the tabs have opened, it cycles through each tab in sequence, and records a histogram of the time between when a tab was first requested to be shown, and when it was painted. """ from metrics import histogram_util from telemetry.core import util from telemetry.page import page_measurement from telemetry.page import page_runner # TODO: Revisit this test once multitab support is finalized. class TabSwitching(page_measurement.PageMeasurement): def CustomizeBrowserOptions(self, options): options.AppendExtraBrowserArg('--enable-stats-collection-bindings') options.AppendExtraBrowserArg('--dom-automation') options.AppendExtraBrowserArg('--reduce-security-for-dom-automation-tests') def CanRunForPage(self, page): return not page.page_set.pages.index(page) def DidNavigateToPage(self, page, tab): for i in xrange(1, len(page.page_set.pages)): t = tab.browser.tabs.New() page_state = page_runner.PageState() page_state.PreparePage(page.page_set.pages[i], t) page_state.ImplicitPageNavigation(page.page_set.pages[i], t) def MeasurePage(self, _, tab, results): """Although this is called MeasurePage, we're actually using this function to cycle through each tab that was opened via DidNavigateToPage and thenrecord a single histogram for the tab switching metric. """ histogram_name = 'MPArch.RWH_TabSwitchPaintDuration' histogram_type = histogram_util.BROWSER_HISTOGRAM first_histogram = histogram_util.GetHistogramFromDomAutomation( histogram_type, histogram_name, tab) prev_histogram = first_histogram for i in xrange(len(tab.browser.tabs)): t = tab.browser.tabs[i] t.Activate() def _IsDone(): cur_histogram = histogram_util.GetHistogramFromDomAutomation( histogram_type, histogram_name, tab) diff_histogram = histogram_util.SubtractHistogram( cur_histogram, prev_histogram) return diff_histogram util.WaitFor(_IsDone, 30) prev_histogram = histogram_util.GetHistogramFromDomAutomation( histogram_type, histogram_name, tab) last_histogram = histogram_util.GetHistogramFromDomAutomation( histogram_type, histogram_name, tab) diff_histogram = histogram_util.SubtractHistogram(last_histogram, first_histogram) results.AddSummary(histogram_name, '', diff_histogram, data_type='unimportant-histogram')
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """The tab switching measurement. This measurement opens pages in different tabs. After all the tabs have opened, it cycles through each tab in sequence, and records a histogram of the time between when a tab was first requested to be shown, and when it was painted. """ from metrics import histogram_util from telemetry.core import util from telemetry.page import page_measurement from telemetry.page import page_runner # TODO: Revisit this test once multitab support is finalized. class TabSwitching(page_measurement.PageMeasurement): def CustomizeBrowserOptions(self, options): options.AppendExtraBrowserArg('--enable-stats-collection-bindings') options.AppendExtraBrowserArg('--dom-automation') def CanRunForPage(self, page): return not page.page_set.pages.index(page) def DidNavigateToPage(self, page, tab): for i in xrange(1, len(page.page_set.pages)): t = tab.browser.tabs.New() page_state = page_runner.PageState() page_state.PreparePage(page.page_set.pages[i], t) page_state.ImplicitPageNavigation(page.page_set.pages[i], t) def MeasurePage(self, _, tab, results): """Although this is called MeasurePage, we're actually using this function to cycle through each tab that was opened via DidNavigateToPage and thenrecord a single histogram for the tab switching metric. """ histogram_name = 'MPArch.RWH_TabSwitchPaintDuration' histogram_type = histogram_util.BROWSER_HISTOGRAM first_histogram = histogram_util.GetHistogramFromDomAutomation( histogram_type, histogram_name, tab) prev_histogram = first_histogram for i in xrange(len(tab.browser.tabs)): t = tab.browser.tabs[i] t.Activate() def _IsDone(): cur_histogram = histogram_util.GetHistogramFromDomAutomation( histogram_type, histogram_name, tab) diff_histogram = histogram_util.SubtractHistogram( cur_histogram, prev_histogram) return diff_histogram util.WaitFor(_IsDone, 30) prev_histogram = histogram_util.GetHistogramFromDomAutomation( histogram_type, histogram_name, tab) last_histogram = histogram_util.GetHistogramFromDomAutomation( histogram_type, histogram_name, tab) diff_histogram = histogram_util.SubtractHistogram(last_histogram, first_histogram) results.AddSummary(histogram_name, '', diff_histogram, data_type='unimportant-histogram')
bsd-3-clause
Python
01d9134067852a1f9dfecf75f730f9fba14434e0
Add test_gradient_checker.py
tensor-tang/Paddle,pengli09/Paddle,hedaoyuan/Paddle,reyoung/Paddle,luotao1/Paddle,putcn/Paddle,lispc/Paddle,QiJune/Paddle,baidu/Paddle,jacquesqiao/Paddle,reyoung/Paddle,Canpio/Paddle,lcy-seso/Paddle,hedaoyuan/Paddle,baidu/Paddle,pengli09/Paddle,chengduoZH/Paddle,PaddlePaddle/Paddle,pkuyym/Paddle,reyoung/Paddle,PaddlePaddle/Paddle,pkuyym/Paddle,yu239/Paddle,jacquesqiao/Paddle,pengli09/Paddle,reyoung/Paddle,lispc/Paddle,PaddlePaddle/Paddle,Canpio/Paddle,lcy-seso/Paddle,luotao1/Paddle,luotao1/Paddle,chengduoZH/Paddle,putcn/Paddle,pkuyym/Paddle,putcn/Paddle,pengli09/Paddle,yu239/Paddle,yu239/Paddle,hedaoyuan/Paddle,PaddlePaddle/Paddle,hedaoyuan/Paddle,luotao1/Paddle,pengli09/Paddle,jacquesqiao/Paddle,hedaoyuan/Paddle,Canpio/Paddle,PaddlePaddle/Paddle,yu239/Paddle,lispc/Paddle,jacquesqiao/Paddle,pengli09/Paddle,lcy-seso/Paddle,Canpio/Paddle,pkuyym/Paddle,lispc/Paddle,lcy-seso/Paddle,baidu/Paddle,yu239/Paddle,chengduoZH/Paddle,Canpio/Paddle,tensor-tang/Paddle,Canpio/Paddle,hedaoyuan/Paddle,baidu/Paddle,QiJune/Paddle,tensor-tang/Paddle,jacquesqiao/Paddle,PaddlePaddle/Paddle,lcy-seso/Paddle,pengli09/Paddle,QiJune/Paddle,hedaoyuan/Paddle,lispc/Paddle,chengduoZH/Paddle,baidu/Paddle,lispc/Paddle,reyoung/Paddle,jacquesqiao/Paddle,PaddlePaddle/Paddle,Canpio/Paddle,Canpio/Paddle,yu239/Paddle,QiJune/Paddle,QiJune/Paddle,hedaoyuan/Paddle,pengli09/Paddle,reyoung/Paddle,putcn/Paddle,QiJune/Paddle,chengduoZH/Paddle,luotao1/Paddle,tensor-tang/Paddle,luotao1/Paddle,pkuyym/Paddle,luotao1/Paddle,pkuyym/Paddle,tensor-tang/Paddle,putcn/Paddle,lispc/Paddle,putcn/Paddle,lispc/Paddle,lcy-seso/Paddle,yu239/Paddle,yu239/Paddle
python/paddle/v2/framework/tests/test_gradient_checker.py
python/paddle/v2/framework/tests/test_gradient_checker.py
import unittest import numpy from paddle.v2.framework.op import Operator from gradient_checker import GradientChecker from gradient_checker import get_numeric_gradient class GetNumericGradientTest(unittest.TestCase): def test_add_op(self): add_op = Operator('add_two', X="X", Y="Y", Out="Z") x = numpy.random.random((10, 1)).astype("float32") y = numpy.random.random((10, 1)).astype("float32") arr = get_numeric_gradient(add_op, {'X': x, "Y": y}, 'Z', 'X') self.assertAlmostEqual(arr.mean(), 1.0, delta=1e-4) def test_softmax_op(self): def stable_softmax(x): """Compute the softmax of vector x in a numerically stable way.""" shiftx = x - numpy.max(x) exps = numpy.exp(shiftx) return exps / numpy.sum(exps) def label_softmax_grad(Y, dY): dX = Y * 0.0 for i in range(Y.shape[0]): d = numpy.dot(Y[i, :], dY[i, :]) dX[i, :] = Y[i, :] * (dY[i, :] - d) return dX softmax_op = Operator("softmax", X="X", Y="Y") X = numpy.random.random((2, 2)).astype("float32") Y = numpy.apply_along_axis(stable_softmax, 1, X) dY = numpy.ones(Y.shape) dX = label_softmax_grad(Y, dY) arr = get_numeric_gradient(softmax_op, {"X": X}, 'Y', 'X') numpy.testing.assert_almost_equal(arr, dX, decimal=1e-2) if __name__ == '__main__': unittest.main()
apache-2.0
Python
9779fc585d8d8d87580a47139742eb25bc52facd
Add new decorators module, move deprecated from utils over here
stoq/kiwi
kiwi/decorators.py
kiwi/decorators.py
# # Kiwi: a Framework and Enhanced Widgets for Python # # Copyright (C) 2005 Async Open Source # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 # USA # # Author(s): Johan Dahlin <[email protected]> # import gobject from kiwi import _warn class deprecated(object): def __init__(self, new): self._new = new def __call__(self, func): def wrapper(*args, **kwargs): _warn("%s is deprecated, use %s instead" % (func.__name__, self._new)) return func(*args, **kwargs) return wrapper class delayed(object): def __init__(self, delay): self._delay = delay self._timeout_id = -1 def __call__(self, func): def real_call(args, kwargs): func(*args, **kwargs) self._timeout_id = -1 return False def wrapper(*args, **kwargs): # Only one call at a time if self._timeout_id != -1: return self._timeout_id = gobject.timeout_add(self._delay, real_call, args, kwargs) return wrapper
lgpl-2.1
Python
9258451157de31f3ece7e18fcb8ae43c433239f4
add example to post files to Portals File System
exosite-garage/exosite_api_usage_examples
portals_api/upload_files_to_portals_file_system.py
portals_api/upload_files_to_portals_file_system.py
# Example that uploads a file to the Portals File System using Portals API # Access Level- Portals Domain Administrator # Note: Uses Python 'Requests' module for calling API # APIs: # - http://docs.exosite.com/portals/#update-file-content import requests import getpass directory = "images" #default directory name domain = "" #example: example.exosite.com user_email = "" #example: [email protected] - assume administrator access to Portals Domain Solution if domain == "": domain = raw_input('Enter Portals Domain (e.g. "example.exosite.com": ') if user_email == "": user_email = raw_input('Enter Your Email Address: ') user_password = getpass.getpass() #ask for password each time at prompt # Files to upload files = {"MyLogo.png":open("./MyLogo.png", "rb"), "MyOtherLogo.jpg":open("./MyOtherLogo.jpg", "rb") } url = "https://"+domain+"/api/portals/v1/fs/"+directory print 'Uploading files to ' + domain r = requests.post(url, files=files, auth=(user_email, user_password)) print("Status: ", r.status_code) r = requests.get(url) if r.status_code == 200: folder = r.json() for directory, filepath in folder.iteritems(): for filename, filetype in filepath.iteritems(): print("/".join([url,directory,filename]))
bsd-2-clause
Python
425d8ef0f439e9580c85e0dc04e5fe0c93cffddf
add 16
ericdahl/project-euler,ericdahl/project-euler,ericdahl/project-euler,ericdahl/project-euler,ericdahl/project-euler,ericdahl/project-euler
p016.py
p016.py
# 2**15 = 32768 and the sum of its digits is 3+2+7+6+8=26 # what is the sum of the digits of the number 2**1000? def f(n): return sum([ int(c) for c in str(2**n)]) print f(1000)
bsd-3-clause
Python
2b73467ccfbf6e29047223f1c1e3250916b6ffdb
add 23
ericdahl/project-euler,ericdahl/project-euler,ericdahl/project-euler,ericdahl/project-euler,ericdahl/project-euler,ericdahl/project-euler
p023.py
p023.py
from itertools import combinations_with_replacement def divisors(n): r = set() for i in range(1, n / 2): if n % i == 0: r.add(i) r.add(n / i) r.discard(n) return r abundant = filter(lambda n: sum(divisors(n)) > n, range(2, 29000)) u = set(range(1, 29000)) for i in combinations_with_replacement(abundant, 2): u.discard(sum(i)) print sum(u)
bsd-3-clause
Python
351f2779549add63963d4103fbe1b058dde59d85
Add stupid test to make Jenkins happy.
otmaneJai/Zipline,enigmampc/catalyst,davidastephens/zipline,nborggren/zipline,Scapogo/zipline,semio/zipline,keir-rex/zipline,jimgoo/zipline-fork,jordancheah/zipline,dmitriz/zipline,chrjxj/zipline,erikness/AlephOne,YuepengGuo/zipline,zhoulingjun/zipline,StratsOn/zipline,euri10/zipline,wilsonkichoi/zipline,joequant/zipline,humdings/zipline,CarterBain/AlephNull,DVegaCapital/zipline,StratsOn/zipline,dkushner/zipline,zhoulingjun/zipline,enigmampc/catalyst,aajtodd/zipline,wilsonkichoi/zipline,MonoCloud/zipline,AlirezaShahabi/zipline,sketchytechky/zipline,dkushner/zipline,quantopian/zipline,stkubr/zipline,wubr2000/zipline,chrjxj/zipline,ronalcc/zipline,michaeljohnbennett/zipline,iamkingmaker/zipline,stkubr/zipline,umuzungu/zipline,aajtodd/zipline,cmorgan/zipline,iamkingmaker/zipline,magne-max/zipline-ja,otmaneJai/Zipline,davidastephens/zipline,grundgruen/zipline,bartosh/zipline,keir-rex/zipline,bartosh/zipline,kmather73/zipline,alphaBenj/zipline,morrisonwudi/zipline,kmather73/zipline,AlirezaShahabi/zipline,dhruvparamhans/zipline,jimgoo/zipline-fork,erikness/AlephOne,YuepengGuo/zipline,joequant/zipline,florentchandelier/zipline,dhruvparamhans/zipline,semio/zipline,CarterBain/AlephNull,morrisonwudi/zipline,gwulfs/zipline,alphaBenj/zipline,ChinaQuants/zipline,DVegaCapital/zipline,michaeljohnbennett/zipline,euri10/zipline,umuzungu/zipline,humdings/zipline,CDSFinance/zipline,ronalcc/zipline,dmitriz/zipline,quantopian/zipline,nborggren/zipline,wubr2000/zipline,magne-max/zipline-ja,grundgruen/zipline,cmorgan/zipline,Scapogo/zipline,mattcaldwell/zipline,florentchandelier/zipline,mattcaldwell/zipline,ChinaQuants/zipline,jordancheah/zipline,MonoCloud/zipline,CDSFinance/zipline,sketchytechky/zipline,gwulfs/zipline
zipline/test/test_sanity.py
zipline/test/test_sanity.py
from unittest2 import TestCase class TestEnviroment(TestCase): def test_universe(self): # first order logic is working today. Yay! self.assertTrue(True != False)
apache-2.0
Python
67f5e754a5f90903e09a6a876d858d002c513f8a
Add initial draft of posterior models
lintusj1/elfi,lintusj1/elfi,elfi-dev/elfi,HIIT/elfi,elfi-dev/elfi
abcpy/posteriors.py
abcpy/posteriors.py
import scipy as sp from .utils import stochastic_optimization class BolfiPosterior(): def __init__(self, model, threshold, priors=None): self.threshold = threshold self.model = model self.priors = [None] * model.n_var self.ML, ML_val = stochastic_optimization(self._neg_unnormalized_loglikelihood_density, self.model.bounds, 10000) print("ML parameters: %s" % (self.ML)) self.MAP, MAP_val = stochastic_optimization(self._neg_unnormalized_logposterior_density, self.model.bounds, 10000) print("MAP parameters: %s" % (self.MAP)) def _unnormalized_loglikelihood_density(self, x): mean, var, std = self.model.evaluate(x) return sp.stats.norm.logcdf(self.threshold, mean, std) def _unnormalized_likelihood_density(self, x): return np.exp(self._unnormalized_loglikelihood_density(x)) def _neg_unnormalized_loglikelihood_density(self, x): return -1 * self._unnormalized_loglikelihood_density(x) def _unnormalized_logposterior_density(self, x): return self._unnormalized_loglikelihood_density(x) + self._logprior_density(x) def _unnormalized_posterior_density(self, x): return np.exp(self._unnormalized_logposterior_density(x)) def _neg_unnormalized_logposterior_density(self, x): return -1 * self._unnormalized_logposterior_density(x) def _logprior_density(self, x): logprior_density = 0.0 for xv, prior in zip(x, self.priors): if prior is not None: logprior_density += prior.getLogProbDensity(xv) return logprior_density def _prior_density(self, x): return np.exp(self._logprior_density(x)) def _neg_logprior_density(self, x): return -1 * self._logprior_density(x) def sample(self): return tuple([[v] for v in self.MAP])
bsd-3-clause
Python
8131bb276a467d7df00f7452616869d20d312eb7
add api_view test
MySmile/mysmile,MySmile/mysmile
apps/api/tests/tests_view.py
apps/api/tests/tests_view.py
import datetime from django.test import TestCase from django.test.client import Client from apps.pages.models import Page, Page_translation class MySmileApiTestCase(TestCase): def setUp(self): some_page = Page.objects.create(id=1, slug='index', color='#FDA132', photo='images/photo.png', sortorder=1, status=Page.STATUS_PUBLISHED, ptype=Page.PTYPE_API, updated_at=datetime.datetime.now(), created_at=datetime.datetime.now()) Page_translation.objects.create(id=1, page=some_page, lang='en', menu='Main', col_central='lorem ipsum', col_bottom_1='lorem ipsum', col_bottom_2='lorem ipsum', col_bottom_3='lorem ipsum', meta_title='Welcome!', meta_description='This is mane page!', meta_keywords='Python3, Django', photo_alt='', photo_description = '', updated_at=datetime.datetime.now(), created_at=datetime.datetime.now()) self._client = Client() def test_content_short(self): response = self._client.get('/api/content') self.assertEqual(response.status_code, 200) def test_content_slug(self): response = self._client.get('/api/content?slug=index') self.assertEqual(response.status_code, 200) def test_content_slug_lang(self): response = self._client.get('/api/content?slug=index&lang=en') self.assertEqual(response.status_code, 200) def test_language(self): response = self._client.get('/api/language') self.assertEqual(response.status_code, 200) def test_contact(self): response = self._client.get('/api/contact') self.assertEqual(response.status_code, 200)
bsd-3-clause
Python
6104fdc57931151f6cf3c8cd517f5efee17fe826
Update repost_stock_for_deleted_bins_for_merging_items.py
indictranstech/erpnext,indictranstech/erpnext,geekroot/erpnext,geekroot/erpnext,Aptitudetech/ERPNext,gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext,geekroot/erpnext,njmube/erpnext,indictranstech/erpnext,gsnbng/erpnext,indictranstech/erpnext,geekroot/erpnext,njmube/erpnext,njmube/erpnext,njmube/erpnext
erpnext/patches/v7_1/repost_stock_for_deleted_bins_for_merging_items.py
erpnext/patches/v7_1/repost_stock_for_deleted_bins_for_merging_items.py
from __future__ import unicode_literals import frappe from erpnext.stock.stock_balance import repost_stock def execute(): frappe.reload_doc('manufacturing', 'doctype', 'production_order_item') frappe.reload_doc('manufacturing', 'doctype', 'production_order') modified_items = frappe.db.sql_list(""" select name from `tabItem` where is_stock_item=1 and modified >= '2016-10-31' """) if not modified_items: return item_warehouses_with_transactions = [] transactions = ("Sales Order Item", "Material Request Item", "Purchase Order Item", "Stock Ledger Entry", "Packed Item") for doctype in transactions: item_warehouses_with_transactions += list(frappe.db.sql(""" select distinct item_code, warehouse from `tab{0}` where docstatus=1 and item_code in ({1})""" .format(doctype, ', '.join(['%s']*len(modified_items))), tuple(modified_items))) item_warehouses_with_transactions += list(frappe.db.sql(""" select distinct production_item, fg_warehouse from `tabProduction Order` where docstatus=1 and production_item in ({0})""" .format(', '.join(['%s']*len(modified_items))), tuple(modified_items))) item_warehouses_with_transactions += list(frappe.db.sql(""" select distinct pr_item.item_code, pr.source_warehouse from `tabProduction Order` pr, `tabProduction Order Item` pr_item where pr_item.parent and pr.name and pr.docstatus=1 and pr_item.item_code in ({0})""" .format(', '.join(['%s']*len(modified_items))), tuple(modified_items))) item_warehouses_with_bin = list(frappe.db.sql("select distinct item_code, warehouse from `tabBin`")) item_warehouses_with_missing_bin = list( set(item_warehouses_with_transactions) - set(item_warehouses_with_bin)) for item_code, warehouse in item_warehouses_with_missing_bin: repost_stock(item_code, warehouse)
from __future__ import unicode_literals import frappe from erpnext.stock.stock_balance import repost_stock def execute(): frappe.reload_doc('manufacturing', 'doctype', 'production_order_item') modified_items = frappe.db.sql_list(""" select name from `tabItem` where is_stock_item=1 and modified >= '2016-10-31' """) if not modified_items: return item_warehouses_with_transactions = [] transactions = ("Sales Order Item", "Material Request Item", "Purchase Order Item", "Stock Ledger Entry", "Packed Item") for doctype in transactions: item_warehouses_with_transactions += list(frappe.db.sql(""" select distinct item_code, warehouse from `tab{0}` where docstatus=1 and item_code in ({1})""" .format(doctype, ', '.join(['%s']*len(modified_items))), tuple(modified_items))) item_warehouses_with_transactions += list(frappe.db.sql(""" select distinct production_item, fg_warehouse from `tabProduction Order` where docstatus=1 and production_item in ({0})""" .format(', '.join(['%s']*len(modified_items))), tuple(modified_items))) item_warehouses_with_transactions += list(frappe.db.sql(""" select distinct pr_item.item_code, pr.source_warehouse from `tabProduction Order` pr, `tabProduction Order Item` pr_item where pr_item.parent and pr.name and pr.docstatus=1 and pr_item.item_code in ({0})""" .format(', '.join(['%s']*len(modified_items))), tuple(modified_items))) item_warehouses_with_bin = list(frappe.db.sql("select distinct item_code, warehouse from `tabBin`")) item_warehouses_with_missing_bin = list( set(item_warehouses_with_transactions) - set(item_warehouses_with_bin)) for item_code, warehouse in item_warehouses_with_missing_bin: repost_stock(item_code, warehouse)
agpl-3.0
Python
142ec5bdca99d11236f2d479cf4dafbc7e8962a3
test of the nis module
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
Lib/test/test_nis.py
Lib/test/test_nis.py
import nis verbose = 0 if __name__ == '__main__': verbose = 1 maps = nis.maps() for nismap in maps: if verbose: print nismap mapping = nis.cat(nismap) for k, v in mapping.items(): if verbose: print ' ', k, v if not k: continue if nis.match(k, nismap) <> v: print "NIS match failed for key `%s' in map `%s'" % (k, nismap)
mit
Python
a35a6b715670e985c0bd711a4cb55df2a267e018
Create downloader.py
EscapeLife/web_crawler
3.下载缓存/downloader.py
3.下载缓存/downloader.py
import urlparse import urllib2 import random import time from datetime import datetime, timedelta import socket DEFAULT_AGENT = 'wswp' DEFAULT_DELAY = 5 DEFAULT_RETRIES = 1 DEFAULT_TIMEOUT = 60 class Downloader: def __init__(self, delay=DEFAULT_DELAY, user_agent=DEFAULT_AGENT, proxies=None, num_retries=DEFAULT_RETRIES, timeout=DEFAULT_TIMEOUT, opener=None, cache=None): socket.setdefaulttimeout(timeout) self.throttle = Throttle(delay) self.user_agent = user_agent self.proxies = proxies self.num_retries = num_retries self.opener = opener self.cache = cache def __call__(self, url): result = None if self.cache: try: result = self.cache[url] except KeyError: # url is not available in cache pass else: if self.num_retries > 0 and 500 <= result['code'] < 600: # server error so ignore result from cache and re-download result = None if result is None: # result was not loaded from cache so still need to download self.throttle.wait(url) proxy = random.choice(self.proxies) if self.proxies else None headers = {'User-agent': self.user_agent} result = self.download(url, headers, proxy=proxy, num_retries=self.num_retries) if self.cache: # save result to cache self.cache[url] = result return result['html'] def download(self, url, headers, proxy, num_retries, data=None): print 'Downloading:', url request = urllib2.Request(url, data, headers or {}) opener = self.opener or urllib2.build_opener() if proxy: proxy_params = {urlparse.urlparse(url).scheme: proxy} opener.add_handler(urllib2.ProxyHandler(proxy_params)) try: response = opener.open(request) html = response.read() code = response.code except Exception as e: print 'Download error:', str(e) html = '' if hasattr(e, 'code'): code = e.code if num_retries > 0 and 500 <= code < 600: # retry 5XX HTTP errors return self._get(url, headers, proxy, num_retries-1, data) else: code = None return {'html': html, 'code': code} class Throttle: """Throttle downloading by sleeping between requests to same domain """ def __init__(self, delay): # amount of delay between downloads for each domain self.delay = delay # timestamp of when a domain was last accessed self.domains = {} def wait(self, url): """Delay if have accessed this domain recently """ domain = urlparse.urlsplit(url).netloc last_accessed = self.domains.get(domain) if self.delay > 0 and last_accessed is not None: sleep_secs = self.delay - (datetime.now() - last_accessed).seconds if sleep_secs > 0: time.sleep(sleep_secs) self.domains[domain] = datetime.now()
mit
Python
6bf4f7491bdfe8a5afd5eb8cdb4a8fcb2af78b36
Add commands/findCognateClassesCrossingMeanings.py
lingdb/CoBL-public,lingdb/CoBL-public,lingdb/CoBL-public,lingdb/CoBL-public
ielex/lexicon/management/commands/findCognateClassesCrossingMeanings.py
ielex/lexicon/management/commands/findCognateClassesCrossingMeanings.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals, print_function from collections import defaultdict from django.core.management import BaseCommand from ielex.lexicon.models import CognateJudgement, Lexeme class Command(BaseCommand): help = "Compiles a list of cognate classes,"\ "\nwhere each cognate class belongs to more than one meaning." def handle(self, *args, **options): lexemeMeaningMap = dict(Lexeme.objects.values_list('id', 'meaning_id')) cogLexTuples = CognateJudgement.objects.values_list( 'cognate_class_id', 'lexeme_id') cogMeaningMap = defaultdict(set) for cogId, lexId in cogLexTuples: cogMeaningMap[cogId].add(lexemeMeaningMap[lexId]) for cogId, mIdSet in cogMeaningMap.iteritems(): if len(mIdSet) > 1: print("Cognate class %s has multiple meanings: %s." % (cogId, mIdSet))
bsd-2-clause
Python
b7dd7f75f655f4fbcb34d8f9ec260a6f18e8f617
Add utility to create administrative users.
materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org
backend/scripts/adminuser.py
backend/scripts/adminuser.py
#!/usr/bin/env python import rethinkdb as r from optparse import OptionParser import sys def create_group(conn): group = {} group['name'] = "Admin Group" group['description'] = "Administration Group for Materials Commons" group['id'] = 'admin' group['owner'] = '[email protected]' group['users'] = [] group['birthtime'] = r.now() group['mtime'] = r.now() r.table('usergroups').insert(group).run(conn) admin_group = r.table('usergroups').get('admin')\ .run(conn, time_format='raw') return admin_group def add_user(user, group, conn): for u in group['users']: if u == user: return group['users'].append(user) r.table('usergroups').get('admin').update(group).run(conn) if __name__ == "__main__": parser = OptionParser() parser.add_option("-P", "--port", type="int", dest="port", help="rethinkdb port") parser.add_option("-u", "--user", type="string", dest="user", help="user to add to admin group") (options, args) = parser.parse_args() if options.port is None: print "You must specify the rethinkdb port" sys.exit(1) if options.user is None: print "You must specify a user to add" sys.exit(1) conn = r.connect('localhost', options.port, db='materialscommons') admin_group = r.table('usergroups').get('admin')\ .run(conn, time_format='raw') if admin_group is None: admin_group = create_group(conn) add_user(options.user, admin_group, conn)
mit
Python
a1c4eb2183e3d3920e992b0753392d987b518bcf
add unit-test for tablegenerator.util.split_string_at_suffix
ultimate-pa/benchexec,martin-neuhaeusser/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,dbeyer/benchexec,martin-neuhaeusser/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,martin-neuhaeusser/benchexec,dbeyer/benchexec,martin-neuhaeusser/benchexec,IljaZakharov/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,IljaZakharov/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,dbeyer/benchexec,ultimate-pa/benchexec,IljaZakharov/benchexec,IljaZakharov/benchexec,dbeyer/benchexec
benchexec/tablegenerator/test_util.py
benchexec/tablegenerator/test_util.py
# BenchExec is a framework for reliable benchmarking. # This file is part of BenchExec. # # Copyright (C) 2007-2016 Dirk Beyer # All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # prepare for Python 3 from __future__ import absolute_import, division, print_function, unicode_literals import sys import unittest sys.dont_write_bytecode = True # prevent creation of .pyc files from benchexec.tablegenerator import util class TestUnit(unittest.TestCase): @classmethod def setUpClass(cls): cls.longMessage = True cls.maxDiff = None def assertEqualNumberAndUnit(self, value, number, unit): self.assertEqual(util.split_number_and_unit(value), (number, unit)) self.assertEqual(util.split_string_at_suffix(value, False), (number, unit)) def assertEqualTextAndNumber(self, value, text, number): self.assertEqual(util.split_string_at_suffix(value, True), (text, number)) def test_split_number_and_unit(self): self.assertEqualNumberAndUnit("", "", "") self.assertEqualNumberAndUnit("1", "1", "") self.assertEqualNumberAndUnit("1s", "1", "s") self.assertEqualNumberAndUnit("111s", "111", "s") self.assertEqualNumberAndUnit("s1", "s1", "") self.assertEqualNumberAndUnit("s111", "s111", "") self.assertEqualNumberAndUnit("-1s", "-1", "s") self.assertEqualNumberAndUnit("1abc", "1", "abc") self.assertEqualNumberAndUnit("abc", "", "abc") self.assertEqualNumberAndUnit("abc1abc", "abc1", "abc") self.assertEqualNumberAndUnit("abc1abc1abc", "abc1abc1", "abc") def test_split_string_at_suffix(self): self.assertEqualTextAndNumber("", "", "") self.assertEqualTextAndNumber("1", "", "1") self.assertEqualTextAndNumber("1s", "1s", "") self.assertEqualTextAndNumber("111s", "111s", "") self.assertEqualTextAndNumber("s1", "s", "1") self.assertEqualTextAndNumber("s111", "s", "111") self.assertEqualTextAndNumber("-1s", "-1s", "") self.assertEqualTextAndNumber("abc1", "abc", "1") self.assertEqualTextAndNumber("abc", "abc", "") self.assertEqualTextAndNumber("abc1abc", "abc1abc", "") self.assertEqualTextAndNumber("abc1abc1", "abc1abc", "1")
apache-2.0
Python
8d32947304d72a13ed8e27d41d35028a904072e9
Add libpq package
trigger-happy/conan-packages
libpq/conanfile.py
libpq/conanfile.py
from conans import ConanFile, AutoToolsBuildEnvironment, tools import os class LibpqConn(ConanFile): name = "libpq" version = "9.6.3" license = "PostgreSQL license https://www.postgresql.org/about/licence/" url = "https://github.com/trigger-happy/conan-packages" description = "C library for interfacing with postgresql" settings = "os", "compiler", "build_type", "arch" options = {"shared": [True, False]} default_options = "shared=False" generators = "cmake" def source(self): pkgLink = 'https://ftp.postgresql.org/pub/source/v{pkgver}/postgresql-{pkgver}.tar.bz2'.format(pkgver=self.version) self.run("curl -JOL " + pkgLink) self.run("tar xf postgresql-{pkgver}.tar.bz2".format(pkgver=self.version)) self.run("mkdir deploy") def build(self): env_build = AutoToolsBuildEnvironment(self) install_prefix=os.getcwd() with tools.chdir("postgresql-{pkgver}".format(pkgver=self.version)): with tools.environment_append(env_build.vars): self.run("./configure --with-openssl --without-readline --prefix={0}".format(install_prefix)) with tools.chdir("src/interfaces/libpq"): self.run("make install") def package(self): with tools.chdir("deploy"): self.copy("lib/*", dst="lib", keep_path=False) self.copy("include/*", dst=".", keep_path=True) def package_info(self): self.cpp_info.libs = ["pq"]
mit
Python
e59c03f0bad78c9cb1db86f2fb0ac29009c8474e
add rll
mengzhuo/my-leetcode-solution
reverse-linked-list.py
reverse-linked-list.py
# https://leetcode.com/problems/reverse-linked-list/ # Definition for singly-linked list. class ListNode: def __init__(self, x): self.val = x self.next = None class Solution: # @param {ListNode} head # @return {ListNode} def reverseList(self, head): last, current = None, head while current: next = current.next current.next = last last = current current = next return last
mit
Python
0c17398f68597eae175ad6a37945cf37e95e1809
Reset invalid default quotas for CloudServiceProjectLink [WAL-814]
opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor
nodeconductor/structure/migrations/0050_reset_cloud_spl_quota_limits.py
nodeconductor/structure/migrations/0050_reset_cloud_spl_quota_limits.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.contrib.contenttypes import models as ct_models from django.db import migrations, models from nodeconductor.quotas.models import Quota from nodeconductor.structure.models import CloudServiceProjectLink def reset_cloud_spl_quota_limits(apps, schema_editor): old_limits = { 'vcpu': 100, 'ram': 256000, 'storage': 5120000, } for model in CloudServiceProjectLink.get_all_models(): content_type = ct_models.ContentType.objects.get_for_model(model) for quota, limit in old_limits.items(): Quota.objects.filter(content_type=content_type, name=quota, limit=limit).update(limit=-1) class Migration(migrations.Migration): dependencies = [ ('structure', '0049_extend_abbreviation'), ] operations = [ migrations.RunPython(reset_cloud_spl_quota_limits), ]
mit
Python
63ae0b619ea50b1e234abc139becaeb84c703302
add player class
Mellcap/MellPlayer
MellPlayer/player.py
MellPlayer/player.py
#!/usr/bin/env python # -*- coding: utf-8 -*- ''' Netease Music Player Created on 2017-02-20 @author: Mellcap ''' class Player(object): def __init__(self): pass def start(self): pass def pause(self): pass def start_or_pause(self): pass def switch_song(self, action='next'): ''' action: next/prev ''' pass def switch_playlist(self, action='next'): ''' action: next/prev ''' pass
mit
Python
602db58ff01ef7ea2718d713a5b2026377023b8d
Create context_processors.py
20tab/twentytab_project,20tab/twentytab_project,20tab/twentytab_project
commons/context_processors.py
commons/context_processors.py
from os import environ from {{ project_name }} import __version__ import uuid def metainfo(request): metainfo = { 'uuid': unicode(uuid.uuid4()), 'version': __version__, 'static_version': "?v={}".format(uuid), 'branch': environ['BRANCH'] } return metainfo
mit
Python
4152b6a10610aa364e901f062a8611b94f65b3de
Create e.py
xsthunder/a,xsthunder/acm,xsthunder/a,xsthunder/acm,xsthunder/acm,xsthunder/a,xsthunder/a,xsthunder/acm,xsthunder/a
at/abc126/e.py
at/abc126/e.py
# 并查集 read = input n, m = map(int, read().split()) f = [-1 for i in range(n + 1)] # 1 ~ n def find(x): if f[x]<0: return x else : f[x] = find(f[x]) return f[x] for i in range(m): x,y,z = map(int, read().split()) if abs(x) < abs(y): #合并到x上,保证x是大集合 x,y = y,x fx = find(x) fy = find(y) if fx == fy:continue # 已经在一个集合,不操作 f[fx] = f[fx] - 1 f[fy] = fx # print(x,y,fx,fy,f) ans = 0 for i in range(1, n+1): # 1~n if f[i] < 0: ans += 1 print(ans)
mit
Python
2057ebd9bae44b232b133ca0c0f76e11d4ca3b5f
Add missing file
sassoftware/conary,sassoftware/conary,sassoftware/conary,sassoftware/conary,sassoftware/conary
conary/server/wsgi_adapter.py
conary/server/wsgi_adapter.py
# # Copyright (c) rPath, Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # import webob import sys def modpython_to_webob(mpreq, handler): # This could be written as a mod_python -> WSGI gateway, but this is much # more compact. from mod_python import apache mpreq.add_common_vars() environ = dict(mpreq.subprocess_env.items()) environ['wsgi.version'] = (1, 0) if environ.get('HTTPS', '').lower() == 'on': environ['wsgi.url_scheme'] = 'https' else: environ['wsgi.url_scheme'] = 'http' environ['wsgi.input'] = mpreq environ['wsgi.errors'] = sys.stderr environ['wsgi.multithread'] = False environ['wsgi.multiprocess'] = True environ['wsgi.run_once'] = False request = webob.Request(environ) response = handler(request) mpreq.status = response.status_int for key, value in response.headerlist: if key.lower() == 'content-length': mpreq.set_content_length(int(value)) elif key.lower() == 'content-type': mpreq.content_type = value else: mpreq.headers_out.add(key, value) for chunk in response.app_iter: mpreq.write(chunk) return apache.OK
apache-2.0
Python
38cec6e7806e55d957e9810d1bb861054ae4842b
add useful methods
marianosimone/interviewed,marianosimone/interviewed
useful_methods.py
useful_methods.py
# encoding utf-8 def bisect_right(data, target, lo, hi): """ Given a sorted array, returns the insertion position of target If the value is already present, the insertion post is to the right of all of them >>> bisect_right([1,1,2,3,4,5], 1, 0, 6) 2 >>> bisect_right([1,1,2,3,4,5], 0, 0, 6) 0 >>> bisect_right([1,1,2,3,4,5], 6, 0, 6) 6 """ while lo < hi: mid = (lo+hi)/2 if data[mid] > target: hi = mid else: lo = mid+1 return lo def bisect_left(data, target, lo, hi): """ Given a sorted array, returns the insertion position of target If the value is already present, the insertion post is to the left of all of them >>> bisect_left([1,1,2,3,4,5], 1, 0, 6) 0 >>> bisect_left([1,1,2,3,4,5], 6, 0, 6) 6 >>> bisect_left([1,1,2,3,4,5], 0, 0, 6) 0 """ while lo < hi: mid = (lo+hi)/2 if data[mid] < target: lo = mid+1 else: hi = mid return lo def permutations_generator(head, tail=[]): """ >>> [p for p in permutations_generator([1, 2, 3])] [[1, 2, 3], [1, 3, 2], [2, 1, 3], [2, 3, 1], [3, 1, 2], [3, 2, 1]] """ if not head: yield tail else: for i in xrange(len(head)): for p in permutations_generator(head[:i] + head[i+1:], tail+[head[i]]): yield p def permutations(data): """ >>> [p for p in permutations([1, 2, 3])] [[3, 2, 1], [2, 3, 1], [3, 1, 2], [1, 3, 2], [2, 1, 3], [1, 2, 3]] """ stack = [(data, [])] rv = [] while stack: head, tail = stack.pop() if not head: rv.append(tail) else: for i in xrange(len(head)-1, -1, -1): stack.append((head[:i] + head[i+1:], [head[i]]+tail)) return rv class BinaryIndexedTree: def __init__(self, length): self._data = [0 for i in xrange(length+1)] def value(self, pos): rv = self._data[pos] if (pos > 0): z = pos - (pos & -pos) pos -= 1 while (pos != z): rv -= self._data[pos] pos -= (pos & -pos) return rv def add(self, pos, count): while pos <= len(self._data): self._data[pos] += count pos += (pos & -pos) def accum(self, pos): rv = 0 while (pos > 0): rv += self._data[pos] pos -= (pos & -pos) return rv def powerset(s): """Computes all of the sublists of s""" rv = [[]] for num in s: rv += [x+[num] for x in rv] return rv def lis(arr): """ Return the Longest Increasing Subsequence of arr, in O(N^2) >>> lis([2, 1, 3, 4, -5, 3, 2, 4, 5]) [-5, 2, 4, 5] """ elements = [(0, 1)] global_max = (0, 1) for i in xrange(1, len(arr)): max_before = (i, 1) for j in xrange(i-1, -1, -1): if arr[i] > arr[j] and elements[j][1]+1 > max_before[1]: max_before = (j, elements[j][1]+1) elements.append(max_before) if max_before[1] > global_max[1]: global_max = (i, max_before[1]) last = len(arr) current = global_max sequence = [] while last != current[0]: last = current[0] sequence.append(arr[current[0]]) current = elements[current[0]] return sequence[::-1]
unlicense
Python
aef33a2c8f34d164bba18741a3cf6e5b71a60a99
Add stub file for extract_csv.py
illumenati/duwamish-sensor,tipsqueal/duwamish-sensor
extract_csv.py
extract_csv.py
def extract_csv(filename): # TODO: connect to sqlite database and extract a csv of the rows. pass if __name__ == '__main__': extract_csv('data.csv')
mit
Python
f99eb9a2397f571f045f6a5f663a42878e94b3ea
Create Euler_003.py
kingmak/Project_Euler_Solutions
Euler_003.py
Euler_003.py
# x, num = 2, 600851475143 while num != x: if num % x == 0: num = num / x; x = 2 else: x += 1 print x
unlicense
Python
411ef30db7431e9df1af02cd68a6ae0b9d874af0
add a first draft for the test of canal metrics
rfdougherty/dipy,samuelstjean/dipy,sinkpoint/dipy,beni55/dipy,mdesco/dipy,jyeatman/dipy,matthieudumont/dipy,Messaoud-Boudjada/dipy,nilgoyyou/dipy,JohnGriffiths/dipy,JohnGriffiths/dipy,beni55/dipy,villalonreina/dipy,demianw/dipy,samuelstjean/dipy,jyeatman/dipy,sinkpoint/dipy,oesteban/dipy,oesteban/dipy,FrancoisRheaultUS/dipy,StongeEtienne/dipy,rfdougherty/dipy,villalonreina/dipy,mdesco/dipy,matthieudumont/dipy,FrancoisRheaultUS/dipy,samuelstjean/dipy,Messaoud-Boudjada/dipy,StongeEtienne/dipy,demianw/dipy,nilgoyyou/dipy
dipy/reconst/tests/test_canal_metrics.py
dipy/reconst/tests/test_canal_metrics.py
import numpy as np from dipy.reconst.dsi import DiffusionSpectrumModel from dipy.data import get_data from dipy.core.gradients import gradient_table from numpy.testing import (assert_almost_equal, run_module_suite) from dipy.reconst.canal import ShoreModel, SHOREmatrix from dipy.sims.voxel import MultiTensor, all_tensor_evecs, multi_tensor_odf, single_tensor_odf, multi_tensor_rtop, multi_tensor_msd, multi_tensor_pdf from dipy.data import fetch_isbi2013_2shell, read_isbi2013_2shell from dipy.data import fetch_taiwan_ntu_dsi, read_taiwan_ntu_dsi from dipy.data import get_sphere def test_canal_metrics(): fetch_taiwan_ntu_dsi() img, gtab = read_taiwan_ntu_dsi() # fetch_isbi2013_2shell() # img, gtab = read_isbi2013_2shell() mevals = np.array(([0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003])) angl = [(0, 0), (60, 0)] S, sticks = MultiTensor(gtab, mevals, S0=100, angles=angl, fractions=[50, 50], snr=None) S = S / S[0, None].astype(np.float) asm = ShoreModel(gtab) asmfit = asm.fit(S) radialOrder = 8 zeta = 800 lambdaN = 1e-12 lambdaL = 1e-12 Cshore = asmfit.l2estimation(radialOrder=radialOrder, zeta=zeta, lambdaN=lambdaN, lambdaL=lambdaL) Cmat = SHOREmatrix(radialOrder, zeta, gtab) S_reconst = np.dot(Cmat, Cshore) nmse_signal = np.sqrt(np.sum((S - S_reconst) ** 2)) / (S.sum()) assert_almost_equal(nmse_signal, 0.0, 4) mevecs2 = np.zeros((2, 3, 3)) angl = np.array(angl) for i in range(2): mevecs2[i] = all_tensor_evecs(sticks[i]).T sphere = get_sphere('symmetric724') v = sphere.vertices radius = 10e-3 pdf_shore = asmfit.pdf_iso(v * radius) pdf_mt = multi_tensor_pdf( v * radius, [.5, .5], mevals=mevals, mevecs=mevecs2) nmse_pdf = np.sqrt(np.sum((pdf_mt - pdf_shore) ** 2)) / (pdf_mt.sum()) assert_almost_equal(nmse_pdf, 0.0, 2) rtop_shore_signal = asmfit.rtop_signal() rtop_shore_pdf = asmfit.rtop_pdf() assert_almost_equal(rtop_shore_signal, rtop_shore_pdf, 9) #rtop_mt = multi_tensor_rtop([.5, .5], mevals=mevals) #err_rtop = np.abs(rtop_mt - rtop_shore_pdf) / rtop_mt #assert_almost_equal(err_rtop, 0.0, 1) msd_mt = multi_tensor_msd([.5, .5], mevals=mevals) msd_shore = asmfit.msd() err_msd = np.abs(msd_mt - msd_shore) / msd_mt assert_almost_equal(err_msd, 0, 1) if __name__ == '__main__': run_module_suite()
bsd-3-clause
Python
1072b8e28e75cf41a35302c9febd1ec22473e966
Add code/analyse_chain_growth.py
pdebuyl/cg_md_polymerization,pdebuyl/cg_md_polymerization
code/analyse_chain_growth.py
code/analyse_chain_growth.py
#!/usr/bin/env python import sys import os import os.path import argparse parser = argparse.ArgumentParser() parser.add_argument('dirs', type=str, nargs='+', help='directories containing simulation files') parser.add_argument('--rate', type=float, default=0.1) parser.add_argument('--sites', type=int, default=1) parser.add_argument('-N', type=int, default=10000) args = parser.parse_args() import numpy as np from scipy.optimize import leastsq from io import StringIO import matplotlib.pyplot as plt NNEIGH=3.5 # Open lammps log file to extract thermodynamic observables def from_log(logfile,i0,i1): return np.loadtxt(StringIO(u''.join(logfile[i0+1:i1])), unpack=True) fitfunc = lambda p, t: 1*(1.-np.exp(-t*p[0]-p[1])) errfunc = lambda p, t, y: fitfunc(p, t) - y p_data = [] for d in args.dirs: logfile = open(os.path.join(os.getcwd(), d, 'log.lammps')).readlines() start_indices = [(i,l) for (i,l) in enumerate(logfile) if l.startswith('Time ')] stop_indices = [(i,l) for (i,l) in enumerate(logfile) if l.startswith('Loop time')] time, e_tot, temp, e_kin, e_vdw, e_bond, e_pot, press, rho, n_bonds, n_bonds_max, bonds = from_log(logfile, start_indices[-1][0], stop_indices[-1][0]) time -= time[0] plt.plot(time, n_bonds) nmax = min(int(1./(args.rate*args.fraction)), len(time)) nmax = len(time) p, success = leastsq(errfunc, [args.rate*NNEIGH*args.fraction, 0./args.rate], args=(time[:nmax], n_bonds[:nmax])) p_data.append(p) print p plt.plot(time, 1*(1.-np.exp(-time*args.rate*NNEIGH*args.fraction))) p_data = np.array(p_data) print p_data.mean(axis=0) plt.plot(time, fitfunc(p_data.mean(axis=0), time), 'k--') plt.show()
bsd-3-clause
Python
bd15388aa877f32ebc613511ad909b311ed3bcf0
Add tests
ChristinaZografou/sympy,AunShiLord/sympy,Mitchkoens/sympy,saurabhjn76/sympy,VaibhavAgarwalVA/sympy,jbbskinny/sympy,shipci/sympy,Curious72/sympy,abloomston/sympy,meghana1995/sympy,jaimahajan1997/sympy,VaibhavAgarwalVA/sympy,Davidjohnwilson/sympy,mcdaniel67/sympy,beni55/sympy,MechCoder/sympy,shikil/sympy,saurabhjn76/sympy,madan96/sympy,kumarkrishna/sympy,lindsayad/sympy,Vishluck/sympy,sahmed95/sympy,farhaanbukhsh/sympy,hargup/sympy,asm666/sympy,sahilshekhawat/sympy,ga7g08/sympy,vipulroxx/sympy,Designist/sympy,meghana1995/sympy,yukoba/sympy,MechCoder/sympy,hargup/sympy,cccfran/sympy,sunny94/temp,postvakje/sympy,cswiercz/sympy,drufat/sympy,bukzor/sympy,cccfran/sympy,wyom/sympy,Designist/sympy,shikil/sympy,sahmed95/sympy,vipulroxx/sympy,Titan-C/sympy,sahilshekhawat/sympy,maniteja123/sympy,hargup/sympy,Gadal/sympy,sampadsaha5/sympy,chaffra/sympy,postvakje/sympy,kaushik94/sympy,rahuldan/sympy,emon10005/sympy,maniteja123/sympy,Arafatk/sympy,pandeyadarsh/sympy,ChristinaZografou/sympy,kumarkrishna/sympy,atsao72/sympy,debugger22/sympy,drufat/sympy,wyom/sympy,Shaswat27/sympy,garvitr/sympy,grevutiu-gabriel/sympy,asm666/sympy,beni55/sympy,souravsingh/sympy,kaichogami/sympy,garvitr/sympy,abhiii5459/sympy,lindsayad/sympy,Vishluck/sympy,jaimahajan1997/sympy,rahuldan/sympy,oliverlee/sympy,Davidjohnwilson/sympy,skidzo/sympy,Gadal/sympy,Shaswat27/sympy,MechCoder/sympy,jamesblunt/sympy,pbrady/sympy,kevalds51/sympy,oliverlee/sympy,mafiya69/sympy,liangjiaxing/sympy,diofant/diofant,mafiya69/sympy,yukoba/sympy,maniteja123/sympy,Shaswat27/sympy,abloomston/sympy,pandeyadarsh/sympy,skirpichev/omg,moble/sympy,saurabhjn76/sympy,Sumith1896/sympy,pandeyadarsh/sympy,asm666/sympy,atsao72/sympy,drufat/sympy,toolforger/sympy,mcdaniel67/sympy,sahmed95/sympy,MridulS/sympy,Curious72/sympy,atreyv/sympy,cswiercz/sympy,wanglongqi/sympy,chaffra/sympy,Mitchkoens/sympy,farhaanbukhsh/sympy,Titan-C/sympy,moble/sympy,shipci/sympy,ChristinaZografou/sympy,souravsingh/sympy,chaffra/sympy,Arafatk/sympy,jerli/sympy,grevutiu-gabriel/sympy,Sumith1896/sympy,ga7g08/sympy,sunny94/temp,meghana1995/sympy,yukoba/sympy,cswiercz/sympy,liangjiaxing/sympy,oliverlee/sympy,kevalds51/sympy,liangjiaxing/sympy,yashsharan/sympy,jamesblunt/sympy,kevalds51/sympy,iamutkarshtiwari/sympy,yashsharan/sympy,jerli/sympy,AkademieOlympia/sympy,mafiya69/sympy,VaibhavAgarwalVA/sympy,shipci/sympy,skidzo/sympy,Mitchkoens/sympy,kaushik94/sympy,Vishluck/sympy,debugger22/sympy,AkademieOlympia/sympy,atreyv/sympy,jamesblunt/sympy,postvakje/sympy,MridulS/sympy,kaichogami/sympy,emon10005/sympy,atreyv/sympy,AunShiLord/sympy,madan96/sympy,kaushik94/sympy,dqnykamp/sympy,farhaanbukhsh/sympy,aktech/sympy,sampadsaha5/sympy,grevutiu-gabriel/sympy,atsao72/sympy,Titan-C/sympy,emon10005/sympy,lindsayad/sympy,beni55/sympy,moble/sympy,Curious72/sympy,abhiii5459/sympy,souravsingh/sympy,Gadal/sympy,ahhda/sympy,jbbskinny/sympy,Designist/sympy,iamutkarshtiwari/sympy,debugger22/sympy,wyom/sympy,aktech/sympy,pbrady/sympy,dqnykamp/sympy,ahhda/sympy,Davidjohnwilson/sympy,wanglongqi/sympy,garvitr/sympy,jaimahajan1997/sympy,jerli/sympy,madan96/sympy,jbbskinny/sympy,bukzor/sympy,toolforger/sympy,abhiii5459/sympy,abloomston/sympy,vipulroxx/sympy,pbrady/sympy,yashsharan/sympy,kumarkrishna/sympy,sahilshekhawat/sympy,AkademieOlympia/sympy,iamutkarshtiwari/sympy,cccfran/sympy,AunShiLord/sympy,toolforger/sympy,skidzo/sympy,Arafatk/sympy,aktech/sympy,kaichogami/sympy,rahuldan/sympy,sunny94/temp,sampadsaha5/sympy,Sumith1896/sympy,bukzor/sympy,MridulS/sympy,dqnykamp/sympy,mcdaniel67/sympy,wanglongqi/sympy,shikil/sympy,ga7g08/sympy,ahhda/sympy
sympy/concrete/tests/test_dispersion.py
sympy/concrete/tests/test_dispersion.py
from sympy.core import Symbol, S, oo from sympy.concrete.dispersion import * def test_dispersion(): x = Symbol("x") fp = S(0).as_poly(x) assert sorted(dispersionset(fp)) == [0] fp = S(2).as_poly(x) assert sorted(dispersionset(fp)) == [0] fp = (x + 1).as_poly(x) assert sorted(dispersionset(fp)) == [0] assert dispersion(fp) == 0 fp = (x*(x + 3)).as_poly(x) assert sorted(dispersionset(fp)) == [0, 3] assert dispersion(fp) == 3 fp = ((x - 3)*(x + 3)).as_poly(x) assert sorted(dispersionset(fp)) == [0, 6] assert dispersion(fp) == 6 fp = ((x + 1)*(x + 2)).as_poly(x) assert sorted(dispersionset(fp)) == [0, 1] assert dispersion(fp) == 1 fp = (x**4 - 3*x**2 + 1).as_poly(x) gp = fp.shift(-3) assert sorted(dispersionset(fp, gp)) == [2, 3, 4] assert dispersion(fp, gp) == 4 assert sorted(dispersionset(gp, fp)) == [] assert dispersion(gp, fp) == -oo a = Symbol("a") fp = (x*(3*x**2+a)*(x-2536)*(x**3+a)).as_poly(x) gp = fp.as_expr().subs(x, x-345).as_poly(x) assert sorted(dispersionset(fp, gp)) == [345, 2881]
bsd-3-clause
Python
6ed3b62efe24aa8aeaedd314bb4e472628713bac
Create deft_opportunist.py
GrognardsFromHell/TemplePlus,GrognardsFromHell/TemplePlus,GrognardsFromHell/TemplePlus,GrognardsFromHell/TemplePlus,GrognardsFromHell/TemplePlus
tpdatasrc/tpgamefiles/scr/tpModifiers/deft_opportunist.py
tpdatasrc/tpgamefiles/scr/tpModifiers/deft_opportunist.py
#Deft Opportunist: Complete Adventurer, p. 106 from templeplus.pymod import PythonModifier from toee import * import tpdp print "Registering Deft Opportunist" def DOAOO(attachee, args, evt_obj): if attachee.has_feat("Deft Opportunist") != 0: #Check if it's an AOO, if so add 4 to the Attack Roll if evt_obj.attack_packet.get_flags() & D20CAF_ATTACK_OF_OPPORTUNITY: evt_obj.bonus_list.add(4, 0, "Target Deft Opportunist bonus") return 0 eDO = PythonModifier("Deft Opportunist Feat", 2) eDO.MapToFeat("Deft Opportunist") eDO.AddHook(ET_OnToHitBonus2, EK_NONE, DOAOO, ())
mit
Python
52f8daf63644fde1efd1c132d6b02ac6670ef0a4
Add migrations merge
pulilab/rapidpro,tsotetsi/textily-web,tsotetsi/textily-web,pulilab/rapidpro,tsotetsi/textily-web,pulilab/rapidpro,pulilab/rapidpro,tsotetsi/textily-web,pulilab/rapidpro,tsotetsi/textily-web
temba/channels/migrations/0038_merge.py
temba/channels/migrations/0038_merge.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('channels', '0037_auto_20160905_1537'), ('channels', '0033_auto_20160623_1438'), ] operations = [ ]
agpl-3.0
Python
1b538aba890c8a81fc7bf66f2c35519608fbd6be
Create drivers.py
ariegg/webiopi-drivers,ariegg/webiopi-drivers
chips/analog/mock/drivers.py
chips/analog/mock/drivers.py
# This code has to be added to the corresponding __init__.py DRIVERS["analogmock"] = ["ANALOG", "PUUM"]
apache-2.0
Python
528de5a29d7beb743e5e80775a349f931e71262f
add test that triggers previous error
oesteban/preprocessing-workflow,poldracklab/fmriprep,poldracklab/fmriprep,oesteban/fmriprep,shoshber/fmriprep,oesteban/preprocessing-workflow,poldracklab/preprocessing-workflow,shoshber/fmriprep,oesteban/fmriprep,poldracklab/fmriprep,chrisfilo/fmriprep,poldracklab/preprocessing-workflow,oesteban/fmriprep
test/workflows/test_base.py
test/workflows/test_base.py
import json import fmriprep.workflows.base as base import re import unittest import mock class TestBase(unittest.TestCase): def test_fmri_preprocess_single(self): ''' Tests that it runs without errors ''' # NOT a test for correctness # SET UP INPUTS test_settings = { 'output_dir': '.', 'work_dir': '.' } # SET UP EXPECTATIONS # RUN base.fmri_preprocess_single(settings=test_settings) # ASSERT
bsd-3-clause
Python
0e02a9de3599e726b5a4dffd17f92a0cd0d2aaee
add import script for Wyre
chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations
polling_stations/apps/data_collection/management/commands/import_wyre.py
polling_stations/apps/data_collection/management/commands/import_wyre.py
from data_collection.management.commands import BaseXpressWebLookupCsvImporter class Command(BaseXpressWebLookupCsvImporter): council_id = 'E07000128' addresses_name = 'WyrePropertyPostCodePollingStationWebLookup-2017-03-08 2.CSV' stations_name = 'WyrePropertyPostCodePollingStationWebLookup-2017-03-08 2.CSV' elections = ['local.lancashire.2017-05-04']
bsd-3-clause
Python
b14fb988321076f4cf17cebec7635fd209e08465
Create video.py
sonus89/FIPER,sonus89/FIPER,sonus89/FIPER
client/video.py
client/video.py
# Capture video with OpenCV import numpy as np import cv2 import time cap = cv2.VideoCapture('serenity.mp4') while(cap.isOpened()): ret, frame = cap.read() # time.sleep(.25) cv2.rectangle(frame,(384,0),(510,128),(0,255,0),3) cv2.imshow('frame',frame) if cv2.waitKey(5) & 0xFF == ord('q'): break cap.release()
mit
Python
18a356c9fa49f32627481f312b03aa34ff711456
Revert "Define the tests as grpc_cc_test to automatically test against all po…"
Vizerai/grpc,muxi/grpc,pszemus/grpc,nicolasnoble/grpc,kpayson64/grpc,murgatroid99/grpc,carl-mastrangelo/grpc,carl-mastrangelo/grpc,stanley-cheung/grpc,pszemus/grpc,ctiller/grpc,kpayson64/grpc,jtattermusch/grpc,stanley-cheung/grpc,firebase/grpc,donnadionne/grpc,donnadionne/grpc,sreecha/grpc,ejona86/grpc,murgatroid99/grpc,chrisdunelm/grpc,stanley-cheung/grpc,muxi/grpc,sreecha/grpc,muxi/grpc,muxi/grpc,sreecha/grpc,jboeuf/grpc,thinkerou/grpc,kpayson64/grpc,Vizerai/grpc,kpayson64/grpc,muxi/grpc,chrisdunelm/grpc,vjpai/grpc,jboeuf/grpc,mehrdada/grpc,muxi/grpc,firebase/grpc,chrisdunelm/grpc,vjpai/grpc,jboeuf/grpc,chrisdunelm/grpc,jtattermusch/grpc,firebase/grpc,murgatroid99/grpc,kpayson64/grpc,ejona86/grpc,grpc/grpc,dgquintas/grpc,ejona86/grpc,ncteisen/grpc,sreecha/grpc,kpayson64/grpc,nicolasnoble/grpc,pszemus/grpc,vjpai/grpc,ctiller/grpc,ctiller/grpc,ncteisen/grpc,jboeuf/grpc,ejona86/grpc,grpc/grpc,stanley-cheung/grpc,ctiller/grpc,vjpai/grpc,simonkuang/grpc,chrisdunelm/grpc,thinkerou/grpc,firebase/grpc,sreecha/grpc,carl-mastrangelo/grpc,ejona86/grpc,ejona86/grpc,thinkerou/grpc,mehrdada/grpc,sreecha/grpc,ncteisen/grpc,carl-mastrangelo/grpc,simonkuang/grpc,ctiller/grpc,murgatroid99/grpc,jtattermusch/grpc,ncteisen/grpc,jboeuf/grpc,vjpai/grpc,simonkuang/grpc,firebase/grpc,firebase/grpc,chrisdunelm/grpc,nicolasnoble/grpc,donnadionne/grpc,donnadionne/grpc,thinkerou/grpc,chrisdunelm/grpc,thinkerou/grpc,firebase/grpc,carl-mastrangelo/grpc,jtattermusch/grpc,simonkuang/grpc,Vizerai/grpc,grpc/grpc,nicolasnoble/grpc,Vizerai/grpc,thinkerou/grpc,kpayson64/grpc,dgquintas/grpc,kpayson64/grpc,ctiller/grpc,carl-mastrangelo/grpc,jboeuf/grpc,donnadionne/grpc,Vizerai/grpc,mehrdada/grpc,kpayson64/grpc,murgatroid99/grpc,donnadionne/grpc,nicolasnoble/grpc,sreecha/grpc,jboeuf/grpc,muxi/grpc,nicolasnoble/grpc,stanley-cheung/grpc,dgquintas/grpc,ejona86/grpc,muxi/grpc,pszemus/grpc,grpc/grpc,jtattermusch/grpc,thinkerou/grpc,nicolasnoble/grpc,Vizerai/grpc,chrisdunelm/grpc,muxi/grpc,stanley-cheung/grpc,firebase/grpc,jboeuf/grpc,firebase/grpc,murgatroid99/grpc,murgatroid99/grpc,dgquintas/grpc,dgquintas/grpc,carl-mastrangelo/grpc,carl-mastrangelo/grpc,kpayson64/grpc,dgquintas/grpc,simonkuang/grpc,murgatroid99/grpc,Vizerai/grpc,stanley-cheung/grpc,nicolasnoble/grpc,mehrdada/grpc,firebase/grpc,kpayson64/grpc,carl-mastrangelo/grpc,grpc/grpc,dgquintas/grpc,dgquintas/grpc,donnadionne/grpc,mehrdada/grpc,pszemus/grpc,Vizerai/grpc,pszemus/grpc,dgquintas/grpc,ncteisen/grpc,vjpai/grpc,grpc/grpc,vjpai/grpc,mehrdada/grpc,sreecha/grpc,sreecha/grpc,ncteisen/grpc,muxi/grpc,jtattermusch/grpc,ctiller/grpc,sreecha/grpc,vjpai/grpc,jtattermusch/grpc,ncteisen/grpc,ctiller/grpc,ejona86/grpc,ejona86/grpc,nicolasnoble/grpc,mehrdada/grpc,ejona86/grpc,dgquintas/grpc,jtattermusch/grpc,jboeuf/grpc,nicolasnoble/grpc,ejona86/grpc,firebase/grpc,carl-mastrangelo/grpc,chrisdunelm/grpc,ctiller/grpc,firebase/grpc,stanley-cheung/grpc,grpc/grpc,thinkerou/grpc,thinkerou/grpc,grpc/grpc,ncteisen/grpc,dgquintas/grpc,simonkuang/grpc,pszemus/grpc,mehrdada/grpc,ctiller/grpc,chrisdunelm/grpc,thinkerou/grpc,stanley-cheung/grpc,murgatroid99/grpc,nicolasnoble/grpc,simonkuang/grpc,simonkuang/grpc,stanley-cheung/grpc,carl-mastrangelo/grpc,pszemus/grpc,Vizerai/grpc,donnadionne/grpc,pszemus/grpc,vjpai/grpc,sreecha/grpc,nicolasnoble/grpc,grpc/grpc,jtattermusch/grpc,jtattermusch/grpc,vjpai/grpc,pszemus/grpc,muxi/grpc,Vizerai/grpc,jtattermusch/grpc,donnadionne/grpc,grpc/grpc,pszemus/grpc,grpc/grpc,ncteisen/grpc,ncteisen/grpc,ctiller/grpc,donnadionne/grpc,thinkerou/grpc,grpc/grpc,pszemus/grpc,muxi/grpc,mehrdada/grpc,stanley-cheung/grpc,jtattermusch/grpc,vjpai/grpc,ctiller/grpc,thinkerou/grpc,stanley-cheung/grpc,donnadionne/grpc,Vizerai/grpc,chrisdunelm/grpc,mehrdada/grpc,sreecha/grpc,jboeuf/grpc,ncteisen/grpc,donnadionne/grpc,ejona86/grpc,ncteisen/grpc,murgatroid99/grpc,mehrdada/grpc,simonkuang/grpc,mehrdada/grpc,jboeuf/grpc,jboeuf/grpc,vjpai/grpc,carl-mastrangelo/grpc
test/core/bad_client/generate_tests.bzl
test/core/bad_client/generate_tests.bzl
#!/usr/bin/env python2.7 # Copyright 2015 gRPC authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Generates the appropriate build.json data for all the bad_client tests.""" def test_options(): return struct() # maps test names to options BAD_CLIENT_TESTS = { 'badreq': test_options(), 'connection_prefix': test_options(), 'headers': test_options(), 'initial_settings_frame': test_options(), 'head_of_line_blocking': test_options(), 'large_metadata': test_options(), 'server_registered_method': test_options(), 'simple_request': test_options(), 'window_overflow': test_options(), 'unknown_frame': test_options(), } def grpc_bad_client_tests(): native.cc_library( name = 'bad_client_test', srcs = ['bad_client.cc'], hdrs = ['bad_client.h'], deps = ['//test/core/util:grpc_test_util', '//:grpc', '//:gpr', '//test/core/end2end:cq_verifier'] ) for t, topt in BAD_CLIENT_TESTS.items(): native.cc_test( name = '%s_bad_client_test' % t, srcs = ['tests/%s.cc' % t], deps = [':bad_client_test'], )
#!/usr/bin/env python2.7 # Copyright 2015 gRPC authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Generates the appropriate build.json data for all the bad_client tests.""" load("//bazel:grpc_build_system.bzl", "grpc_cc_test", "grpc_cc_library") def test_options(): return struct() # maps test names to options BAD_CLIENT_TESTS = { 'badreq': test_options(), 'connection_prefix': test_options(), 'headers': test_options(), 'initial_settings_frame': test_options(), 'head_of_line_blocking': test_options(), 'large_metadata': test_options(), 'server_registered_method': test_options(), 'simple_request': test_options(), 'window_overflow': test_options(), 'unknown_frame': test_options(), } def grpc_bad_client_tests(): grpc_cc_library( name = 'bad_client_test', srcs = ['bad_client.cc'], hdrs = ['bad_client.h'], deps = ['//test/core/util:grpc_test_util', '//:grpc', '//:gpr', '//test/core/end2end:cq_verifier'] ) for t, topt in BAD_CLIENT_TESTS.items(): grpc_cc_test( name = '%s_bad_client_test' % t, srcs = ['tests/%s.cc' % t], deps = [':bad_client_test'], )
apache-2.0
Python
59ac83e45116a97cfbdd7522f967337e73d51766
add cargo deny test
firecracker-microvm/firecracker,firecracker-microvm/firecracker,firecracker-microvm/firecracker,firecracker-microvm/firecracker,firecracker-microvm/firecracker
tests/integration_tests/build/test_dependencies.py
tests/integration_tests/build/test_dependencies.py
# Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 """Enforces controls over dependencies.""" import os import framework.utils as utils def test_licenses(): """Ensure license compatibility for Firecracker. For a list of currently allowed licenses checkout deny.toml in the root directory. @type: build """ toml_file = os.path.normpath( os.path.join( os.path.dirname(os.path.realpath(__file__)), '../../../Cargo.toml') ) utils.run_cmd('cargo deny --manifest-path {} check licenses'. format(toml_file))
apache-2.0
Python
c3f01d8b365e6d367b1a565e5ce59cf04eb1bac3
fix build
missionpinball/mpf-monitor
get_version.py
get_version.py
"""Return the short version string.""" from mpfmonitor._version import __short_version__ print("{}.x".format(__short_version__))
mit
Python
15d3692aee84432b6b7f8306505b3f59649fd6f9
Remove mimetype from the module_files table
Connexions/cnx-archive,Connexions/cnx-archive
cnxarchive/sql/migrations/20160128111115_mimetype_removal_from_module_files.py
cnxarchive/sql/migrations/20160128111115_mimetype_removal_from_module_files.py
# -*- coding: utf-8 -*- """\ - Move the mimetype value from ``module_files`` to ``files``. - Remove the ``mimetype`` column from the ``module_files`` table. """ from __future__ import print_function import sys def up(cursor): # Move the mimetype value from ``module_files`` to ``files``. cursor.execute("UPDATE files AS f SET media_type = mf.mimetype " "FROM module_files AS mf " "WHERE mf.fileid = f.fileid") # Warn about missing mimetype. cursor.execute("SELECT fileid, sha1 " "FROM files AS f " "WHERE f.fileid NOT IN (SELECT fileid FROM module_files)") rows = '\n'.join(['{}, {}'.format(fid, sha1) for fid, sha1 in cursor.fetchall()]) print("These files (fileid, sha1) do not have a corresponding " "module_files entry:\n{}\n".format(rows), file=sys.stderr) # Remove the ``mimetype`` column from the ``module_files`` table. cursor.execute("ALTER TABLE module_files DROP COLUMN mimetype") def down(cursor): # Add a ``mimetype`` column to the ``module_files`` table. cursor.execute("ALTER TABLE module_files ADD COLUMN mimetype TEXT") # Move the mimetype value from ``files`` to ``module_files``. print("Rollback cannot accurately replace mimetype values that " "were in the ``modules_files`` table.", file=sys.stderr) cursor.execute("UPDATE module_files AS mf SET mimetype = f.media_type " "FROM files AS f " "WHERE f.fileid = mf.fileid")
agpl-3.0
Python
67b5cd3f00ca57c4251dab65c5a6e15ab2be8a42
Create result.py
JeckLabs/aiorucaptcha
aiorucaptcha/result.py
aiorucaptcha/result.py
class ResultObject: def __init__(self, code, task_id): self.code = code self.task_id = task_id def __str__(self): return self.code
apache-2.0
Python
4a7a15359763cbd6956bd30bde7cd68b05b2b4a2
test _compare_and_pop_smallest
stephtzhang/algorithms
tests/test_huffman_codes.py
tests/test_huffman_codes.py
import sys import os sys.path.append(os.path.abspath(os.path.dirname(__file__) + '../..')) import unittest from huffman_codes import huffman_codes, Node, Queue, _compare_and_pop_smallest, \ _traverse_children_and_assign_codes class TestHuffmanCodes(unittest.TestCase): def test_compare_and_pop_smallest__first_q_smaller(self): q_1 = Queue() q_1.enqueue((None, 1)) q_2 = Queue() q_2.enqueue((None, 2)) output = _compare_and_pop_smallest(q_1, q_2) self.assertEqual(output[1], 1) def test_compare_and_pop_smallest__second_q_smaller(self): q_1 = Queue() q_1.enqueue((None, 1)) q_2 = Queue() q_2.enqueue((None, 2)) output = _compare_and_pop_smallest(q_2, q_1) self.assertEqual(output[1], 1) def test_compare_and_pop_smallest__first_q_empty(self): q_1 = Queue() q_2 = Queue() q_2.enqueue((None, 2)) output = _compare_and_pop_smallest(q_2, q_1) self.assertEqual(output[1], 2) def test_compare_and_pop_smallest__second_q_empty(self): q_1 = Queue() q_1.enqueue((None, 1)) q_2 = Queue() output = _compare_and_pop_smallest(q_2, q_1) self.assertEqual(output[1], 1) def test_traverse_children_and_assign_codes(self): pass def test_huffman_codes(self): pass
mit
Python
43d3158e536b7cae3f427f655b08aa8b4c24fe96
Add an iter_entry_points style test
mozilla/spicedham,mozilla/spicedham
tests/test_spicedham_api.py
tests/test_spicedham_api.py
from unittest import TestCase from spicedham import Spicedham from mock import Mock, patch class TestSpicedHamAPI(TestCase): @patch('spicedham.Spicedham._classifier_plugins') def test_classify(self, mock_plugins): sh = Spicedham() plugin0 = Mock() plugin0.classify.return_value = .5 plugin1 = Mock() plugin1.classify.return_value = .75 plugin2 = Mock() plugin2.classify.return_value = None mock_plugins.__iter__.return_value = [plugin0, plugin1, plugin2] # Test when some plugins return numbers and some return None value = sh.classify(['classifying', 'data']) self.assertEqual(value, 0.625) # Test when all plugins return one plugin0.classify.return_value = None plugin1.classify.return_value = None value = sh.classify(['classifying', 'data']) self.assertEqual(value, 0) @patch('spicedham.iter_entry_points') @patch('spicedham.Spicedham.backend') def test_load_backend(self, mock_backend, mock_iter_entry_points): sh = Spicedham() mock_backend = None mock_django_orm = Mock() mock_iter_entry_points = Mock() # mock_plugin_class = Mock() # mock_plugin_object = Mock() # mock_plugin_class.return_value = mock_plugin_object # mock_django_orm.load.return_value = mock_plugin_class() #h= mock_django_orm.name = 'djangoorm' # mock_sqlalchemy_orm = Mock() #mock_sqlalchemy_orm.name = 'sqlalchemy' #mock_plugin_class.return_value = mock_plugin_object #mock_sqlalchemy_orm.load.return_value = mock_plugin_class() #mock_iter_entry_points.__iter__.return_value = [mock_django_orm, # mock_sqlalchemy_orm] # Test the first run with the django_orm plugin ret = sh._load_backend() print 'mm', mock_iter_entry_points.mock_calls self.assertEqual(ret, mock_plugin_object) # Test the second run with the django_orm plugin ret = sh._load_backend() self.assertEqual(ret, mock_plugin_class.return_value) # rest the backend for the next test mock_backend = None mock_iter_entry_points.return_value = [mock_django_orm, mock_sqlalchemy_orm] # Test the first run with the sqlalchemy plugin ret = sh._load_backend() self.assertEqual(ret, mock_plugin_class.return_value) # Test the second run with the sqlalchemy plugin ret = sh._load_backend() self.assertEqual(ret, mock_plugin_class.return_value) @patch('spicedham.Spicedham._load_backend') @patch('spicedham.iter_entry_points') def test_load_plugins(self, mock_iter_entry_points, mock_load_backend): #plugin0 = Mock() plugin0Object = Mock() #plugin0Class = Mock(return_value=plugin0Object) #plugin0.load = Mock(return_value=plugin0Class) #plugin1 = Mock() plugin1Object = Mock() #plugin1Class = Mock(return_value=plugin1Object) #plugin1.load = Mock(return_value=plugin1Class) #plugin2 = Mock() plugin2Object = Mock() #plugin2Class = Mock(return_value=plugin2Object) #plugin2.load = Mock(return_value=plugin2Class) input_plugins = [plugin0Object, plugin1Object, plugin2Object] expected_plugins = [plugin0Object.load, plugin1Object.load, plugin2Object.load] mock_iter_entry_points.return_value = input_plugins self.assertEqual(spicedham._plugins, None) # now load the plugins load_plugins() mock_iter_entry_points.assert_called_with(group='spicedham.classifiers', name=None) self.assertEqual(spicedham._plugins, expected_plugins) # now load the plugins again, they should not change mock_iter_entry_points.called = False load_plugins() self.assertEqual(mock_iter_entry_points.called, False) self.assertEqual(spicedham._plugins, input_plugins)
mpl-2.0
Python
ba49a66b401bc32e57abede6adc5a0f933e8834a
Add tests for view helpers
IRI-Research/django-cas-ng,11h42/django-cas-ng,wrygiel/django-cas-ng,nitmir/django-cas-ng,bgroff/django-cas-ng,forcityplatform/django-cas-ng,pbaehr/django-cas-ng,11h42/django-cas-ng,mingchen/django-cas-ng
tests/test_views_helpers.py
tests/test_views_helpers.py
from django.test import RequestFactory from django_cas_ng.views import ( _service_url, _redirect_url, _login_url, _logout_url, ) # # _service_url tests # def test_service_url_helper(): factory = RequestFactory() request = factory.get('/login/') actual = _service_url(request) expected = 'http://testserver/login/' assert actual == expected def test_service_url_helper_as_https(): factory = RequestFactory() request = factory.get('/login/', secure=True) actual = _service_url(request) expected = 'https://testserver/login/' assert actual == expected def test_service_url_helper_with_redirect(): factory = RequestFactory() request = factory.get('/login/', secure=True) actual = _service_url(request, redirect_to='https://testserver/landing-page/') expected = 'https://testserver/login/?next=https%3A%2F%2Ftestserver%2Flanding-page%2F' assert actual == expected # # _redirect_url tests # def test_redirect_url_with_url_as_get_parameter(): factory = RequestFactory() request = factory.get('/login/', data={'next': '/landing-page/'}, secure=True) actual = _redirect_url(request) expected = '/landing-page/' assert actual == expected def test_redirect_url_falls_back_to_cas_redirect_url_setting(settings): settings.CAS_IGNORE_REFERER = True settings.CAS_REDIRECT_URL = '/landing-page/' factory = RequestFactory() request = factory.get('/login/', secure=True) actual = _redirect_url(request) expected = '/landing-page/' assert actual == expected def test_params_redirect_url_preceeds_settings_redirect_url(settings): settings.CAS_IGNORE_REFERER = True settings.CAS_REDIRECT_URL = '/landing-page/' factory = RequestFactory() request = factory.get('/login/', data={'next': '/override/'}, secure=True) actual = _redirect_url(request) expected = '/override/' assert actual == expected def test_redirect_url_falls_back_to_http_referrer(settings): settings.CAS_IGNORE_REFERER = False settings.CAS_REDIRECT_URL = '/wrong-landing-page/' factory = RequestFactory() request = factory.get('/login/', secure=True, HTTP_REFERER='/landing-page/') actual = _redirect_url(request) expected = '/landing-page/' assert actual == expected def test_redirect_url_strips_domain_prefix(settings): settings.CAS_IGNORE_REFERER = True settings.CAS_REDIRECT_URL = 'https://testserver/landing-page/' factory = RequestFactory() request = factory.get('/login/', secure=True) actual = _redirect_url(request) expected = '/landing-page/' assert actual == expected # # _login_url tests # def test_login_url_helper(settings): settings.CAS_RENEW = False settings.CAS_EXTRA_LOGIN_PARAMS = False settings.CAS_SERVER_URL = 'http://www.example.com/cas/' actual = _login_url('http://testserver/') expected = 'http://www.example.com/cas/login?service=http%3A%2F%2Ftestserver%2F' assert actual == expected def test_login_url_helper_with_extra_params(settings): settings.CAS_RENEW = False settings.CAS_EXTRA_LOGIN_PARAMS = {'test': '1234'} settings.CAS_SERVER_URL = 'http://www.example.com/cas/' actual = _login_url('http://testserver/') # since the dictionary of parameters is unordered, we dont know which # parameter will be first, so just check that both are in the url. assert 'service=http%3A%2F%2Ftestserver%2F' in actual assert 'test=1234' in actual def test_login_url_helper_with_renew(settings): settings.CAS_RENEW = True settings.CAS_EXTRA_LOGIN_PARAMS = None settings.CAS_SERVER_URL = 'http://www.example.com/cas/' actual = _login_url('http://testserver/') # since the dictionary of parameters is unordered, we dont know which # parameter will be first, so just check that both are in the url. assert 'service=http%3A%2F%2Ftestserver%2F' in actual assert 'renew=true' in actual # # _login_url tests # def test_logout_url_helper(settings): settings.CAS_SERVER_URL = 'https://www.example.com/cas/' factory = RequestFactory() request = factory.get('/logout/') actual = _logout_url(request) expected = 'https://www.example.com/cas/logout' assert actual == expected def test_logout_url_helper_with_redirect(settings): settings.CAS_SERVER_URL = 'https://www.example.com/cas/' factory = RequestFactory() request = factory.get('/logout/') actual = _logout_url(request, next_page='/landing-page/') expected = 'https://www.example.com/cas/logout?url=http%3A%2F%2Ftestserver%2Flanding-page%2F' assert actual == expected
mit
Python
3f84a3cb50e18ce9df96a9173d0be180633aad0d
Add polynomial learning example
SamuelWarner/Python-ML
Examples/polynomial_approximation.py
Examples/polynomial_approximation.py
""" Example of neural network learning a polynomial equation. Test polynomial is f(x) = (6x^2 + 3x) ÷ (3x) Training is run on x values from 1.0 to 100.0 """ from mazex import MazeX import numpy as np import random import math import matplotlib.pyplot as plt # Create list to store how close networks guesses are graph_data = [] # Create Neural Network net = MazeX([1, 20, 4, 1], ["relu", "relu", 'lin'], learning_constant=0.00001) # test how close the network is to the correct answer given x = 12 and log the result for the graph def check(run): guess = net.forward(np.array([[12.0]])) print(f"run {run} OFF BY: {25 - guess[0][0]}") graph_data.append(25 - guess[0][0]) # run a bunch of training steps on random values to help network learn the polynomial for i in range(100): t = random.uniform(1.0, 100.0) ans = ((6 * math.pow(t, 2)) + (3 * t)) / (3 * t) Y = np.full((1, 1), ans) X = np.full((1, 1), t) net.train(X, Y) check(i) # plot the training data for visual feedback of learning progress. Saves graph to same directory as script plt.plot(graph_data) plt.ylabel('Error') plt.xlabel("training run") plt.title('Error over time') plt.savefig(f'Polynomial_approximation.png')
mit
Python
abe40e3c82ef1f351275a59b2e537f43530caa0c
Clean up db script (remove articles older than two days).
hw3jung/Gucci,hw3jung/Gucci
app/cleanup_stories.py
app/cleanup_stories.py
from pymongo import MongoClient from fetch_stories import get_mongo_client, close_mongo_client from bson import ObjectId from datetime import datetime, timedelta def remove_old_stories(): client = get_mongo_client() db = client.get_default_database() article_collection = db['articles'] two_days_ago = datetime.utcnow() - timedelta(days=2) two_days_ago = ObjectId.from_datetime(two_days_ago) query = { '_id' : { '$lt' : two_days_ago} } article_collection.remove(query) close_mongo_client(client) def main(): remove_old_stories() if __name__ == '__main__': main()
mit
Python
ba590d28810409fa57783e6d29a651790f865e5c
create base api exceptions module
apipanda/openssl,apipanda/openssl,apipanda/openssl,apipanda/openssl
apps/api/exceptions.py
apps/api/exceptions.py
import json from tastypie.exceptions import TastypieError from tastypie.http import HttpResponse class CustomBadRequest(TastypieError): """ This exception is used to interrupt the flow of processing to immediately return a custom HttpResponse. """ def __init__(self, success=False, code="", message=""): self._response = { "error": { "success": success or False, "code": code or "not_provided", "message": message or "No error message was provided."}} @property def response(self): return HttpResponse( json.dumps(self._response), content_type='application/json')
mit
Python
b1a5764956e0f569b4955dbf43e5656873c903f6
Create new package. (#7649)
LLNL/spack,mfherbst/spack,EmreAtes/spack,EmreAtes/spack,LLNL/spack,tmerrick1/spack,iulian787/spack,matthiasdiener/spack,krafczyk/spack,iulian787/spack,EmreAtes/spack,tmerrick1/spack,krafczyk/spack,tmerrick1/spack,LLNL/spack,LLNL/spack,EmreAtes/spack,krafczyk/spack,mfherbst/spack,matthiasdiener/spack,mfherbst/spack,iulian787/spack,tmerrick1/spack,krafczyk/spack,krafczyk/spack,iulian787/spack,mfherbst/spack,matthiasdiener/spack,LLNL/spack,iulian787/spack,matthiasdiener/spack,matthiasdiener/spack,EmreAtes/spack,tmerrick1/spack,mfherbst/spack
var/spack/repos/builtin/packages/soapdenovo-trans/package.py
var/spack/repos/builtin/packages/soapdenovo-trans/package.py
############################################################################## # Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, [email protected], All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/spack/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * class SoapdenovoTrans(MakefilePackage): """SOAPdenovo-Trans is a de novo transcriptome assembler basing on the SOAPdenovo framework, adapt to alternative splicing and different expression level among transcripts.""" homepage = "http://soap.genomics.org.cn/SOAPdenovo-Trans.html" url = "https://github.com/aquaskyline/SOAPdenovo-Trans/archive/1.0.4.tar.gz" version('1.0.4', 'a3b00b0f743b96141c4d5f1b49f2918c') build_directory = 'src' def edit(self, spec, prefix): with working_dir(self.build_directory): makefile = FileFilter('Makefile') makefile.filter('CFLAGS= -O3 -fomit-frame-pointer -static', 'CFLAGS= -O3 -fomit-frame-pointer') def build(self, spec, prefix): with working_dir(self.build_directory): make() make('127mer=1', parallel=False) def install(self, spec, prefix): install_tree('.', prefix.bin)
lgpl-2.1
Python
402004b1a0612e5b4eeb703f3787dd1b7f3def30
make auto migration
DrMartiner/django-yandex-kassa,VladimirFilonov/django-yandex-kassa,VladimirFilonov/django-yandex-kassa
yandex_kassa/migrations/0004_auto_20151209_0940.py
yandex_kassa/migrations/0004_auto_20151209_0940.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('yandex_kassa', '0003_auto_20151116_1530'), ] operations = [ migrations.AlterModelOptions( name='payment', options={'ordering': ('-created',), 'verbose_name': '\u043f\u043b\u0430\u0442\u0435\u0436', 'verbose_name_plural': '\u041f\u043b\u0430\u0442\u0435\u0436\u0438'}, ), migrations.AlterField( model_name='payment', name='scid', field=models.PositiveIntegerField(default=528277, verbose_name=b'\xd0\x9d\xd0\xbe\xd0\xbc\xd0\xb5\xd1\x80 \xd0\xb2\xd0\xb8\xd1\x82\xd1\x80\xd0\xb8\xd0\xbd\xd1\x8b'), ), migrations.AlterField( model_name='payment', name='shop_id', field=models.PositiveIntegerField(default=104674, verbose_name=b'ID \xd0\xbc\xd0\xb0\xd0\xb3\xd0\xb0\xd0\xb7\xd0\xb8\xd0\xbd\xd0\xb0'), ), ]
mit
Python
7ec4133b11ba91541e9ec9895e39a2c402c63087
define the AVB loss separately
gdikov/vae-playground
avb/models/avb_loss.py
avb/models/avb_loss.py
import keras.backend as ker from keras.layers import Layer from keras.losses import categorical_crossentropy class AVBLossLayer(Layer): def __init__(self, **kwargs): self.is_placeholder = True super(AVBLossLayer, self).__init__(**kwargs) @staticmethod def avb_loss(discrim_output_posterior, discrim_output_posterior_prior, data_log_probs): # 1/m * sum_{i=1}^m log p(x_i|z), where z = encoder(x_i, epsilon_i) reconstruction_log_likelihood = ker.mean(ker.sum(data_log_probs, axis=1)) # The decoder tries to maximise the reconstruction data log-likelihood decoder_loss = -reconstruction_log_likelihood # The encoder tries to minimize the discriminator output encoder_loss = ker.mean(discrim_output_posterior) # The dicriminator loss is the GAN loss with input from the prior and posterior distributions discriminator_loss = ker.mean(categorical_crossentropy(y_true=ker.ones_like(discrim_output_posterior), y_pred=discrim_output_posterior) + categorical_crossentropy(y_true=ker.zeros_like(discrim_output_posterior_prior), y_pred=discrim_output_posterior_prior)) return ker.mean(encoder_loss + decoder_loss + discriminator_loss) def call(self, inputs, **kwargs): discrim_output_posterior, discrim_output_prior, decoder_output_log_probs = inputs loss = self.avb_loss(discrim_output_posterior, discrim_output_prior, decoder_output_log_probs) self.add_loss(loss, inputs=inputs) # unused output return inputs[0]
mit
Python
fd54c28be8d9ffd7e5711035bf5b5e1b7fe332cc
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/ab2e190c2bfe60b3b738c125ca9db1a2785cdcaa.
tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,gautam1858/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,frreiss/tensorflow-fred,paolodedios/tensorflow,Intel-Corporation/tensorflow,gautam1858/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,gautam1858/tensorflow,Intel-Corporation/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,frreiss/tensorflow-fred,Intel-tensorflow/tensorflow,yongtang/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,frreiss/tensorflow-fred,karllessard/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,frreiss/tensorflow-fred,paolodedios/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,karllessard/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,Intel-Corporation/tensorflow,yongtang/tensorflow,tensorflow/tensorflow
third_party/tf_runtime/workspace.bzl
third_party/tf_runtime/workspace.bzl
"""Provides the repository macro to import TFRT.""" load("//third_party:repo.bzl", "tf_http_archive") def repo(): """Imports TFRT.""" # Attention: tools parse and update these lines. TFRT_COMMIT = "ab2e190c2bfe60b3b738c125ca9db1a2785cdcaa" TFRT_SHA256 = "b097063dd10c010e827e58cc8e5a0e4008d99bcba1dcb20259c8ef890620b9b5" tf_http_archive( name = "tf_runtime", sha256 = TFRT_SHA256, strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT), urls = [ "http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT), "https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT), ], )
"""Provides the repository macro to import TFRT.""" load("//third_party:repo.bzl", "tf_http_archive") def repo(): """Imports TFRT.""" # Attention: tools parse and update these lines. TFRT_COMMIT = "285e48bc47db23a479637fd1e2767b9a35dc2c9b" TFRT_SHA256 = "6f0067d0cb7bb407caeef060603b6e33f1231cddf1ce4ce2ebce027dc418764f" tf_http_archive( name = "tf_runtime", sha256 = TFRT_SHA256, strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT), urls = [ "http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT), "https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT), ], )
apache-2.0
Python
c9c00a6a5ab267ab56dd147e6542cae6566061d8
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/dc109b725d8f36f8c7db7847f0c95a819c43f9e9.
tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,yongtang/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,yongtang/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow
third_party/tf_runtime/workspace.bzl
third_party/tf_runtime/workspace.bzl
"""Provides the repository macro to import TFRT.""" load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls") def repo(): """Imports TFRT.""" # Attention: tools parse and update these lines. TFRT_COMMIT = "dc109b725d8f36f8c7db7847f0c95a819c43f9e9" TFRT_SHA256 = "e6a6359ecd731f7208f32402fac9bf874b26855497c0252fcddc44e5133320df" tf_http_archive( name = "tf_runtime", sha256 = TFRT_SHA256, strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT), urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)), # A patch file can be provided for atomic commits to both TF and TFRT. # The job that bumps the TFRT_COMMIT also resets patch_file to 'None'. patch_file = None, )
"""Provides the repository macro to import TFRT.""" load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls") def repo(): """Imports TFRT.""" # Attention: tools parse and update these lines. TFRT_COMMIT = "4bcf968d66a6bb2899b9d99917b916f6ec04c327" TFRT_SHA256 = "9bd2cc2e7003f73f767e138ae4776b43d15ca286f0f85ad374ec5f8aaeab1aa4" tf_http_archive( name = "tf_runtime", sha256 = TFRT_SHA256, strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT), urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)), # A patch file can be provided for atomic commits to both TF and TFRT. # The job that bumps the TFRT_COMMIT also resets patch_file to 'None'. patch_file = None, )
apache-2.0
Python
d81a2b0328c86165b09c2d41aa2a4684c75388cd
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/78537f15f4873bbed59258bed4442225303f462a.
tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,yongtang/tensorflow,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,karllessard/tensorflow,karllessard/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow
third_party/tf_runtime/workspace.bzl
third_party/tf_runtime/workspace.bzl
"""Provides the repository macro to import TFRT.""" load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls") def repo(): """Imports TFRT.""" # Attention: tools parse and update these lines. TFRT_COMMIT = "78537f15f4873bbed59258bed4442225303f462a" TFRT_SHA256 = "87526ed2a287d7809b2cadf82f9db94994b0019635d431f2fc9c3db2bd4a31cc" tf_http_archive( name = "tf_runtime", sha256 = TFRT_SHA256, strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT), urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)), # A patch file can be provided for atomic commits to both TF and TFRT. # The job that bumps the TFRT_COMMIT also resets patch_file to 'None'. patch_file = None, )
"""Provides the repository macro to import TFRT.""" load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls") def repo(): """Imports TFRT.""" # Attention: tools parse and update these lines. TFRT_COMMIT = "a2f5e07760d2a888370d0686546b757ee9628494" TFRT_SHA256 = "70653b94faa603befef83457482c8a1151fa529b3215124e18a0f97592d5ad05" tf_http_archive( name = "tf_runtime", sha256 = TFRT_SHA256, strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT), urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)), # A patch file can be provided for atomic commits to both TF and TFRT. # The job that bumps the TFRT_COMMIT also resets patch_file to 'None'. patch_file = None, )
apache-2.0
Python
8c1b20941c1216bb56fa55fe881962d2ea883366
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/c68238f982305e3618a2b5347e1e0a5663898c90.
gautam1858/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,yongtang/tensorflow,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,Intel-Corporation/tensorflow,Intel-tensorflow/tensorflow,Intel-Corporation/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,karllessard/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once
third_party/tf_runtime/workspace.bzl
third_party/tf_runtime/workspace.bzl
"""Provides the repository macro to import TFRT.""" load("//third_party:repo.bzl", "tf_http_archive") def repo(): """Imports TFRT.""" # Attention: tools parse and update these lines. TFRT_COMMIT = "c68238f982305e3618a2b5347e1e0a5663898c90" TFRT_SHA256 = "b28ed95058c101a9d3203ddbaa271044de984f6b49c5609124e1cb4ae0b3e165" tf_http_archive( name = "tf_runtime", sha256 = TFRT_SHA256, strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT), urls = [ "http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT), "https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT), ], # A patch file can be provided for atomic commits to both TF and TFRT. # The job that bumps the TFRT_COMMIT also resets patch_file to 'None'. patch_file = None, )
"""Provides the repository macro to import TFRT.""" load("//third_party:repo.bzl", "tf_http_archive") def repo(): """Imports TFRT.""" # Attention: tools parse and update these lines. TFRT_COMMIT = "377c20166e8e1b5124493c1433b1df34ca62cf3f" TFRT_SHA256 = "f0c3c03e7d9ca2e10c3256f28bf9c0aa0aa26d9aa4da539c00532ee5217ba7ba" tf_http_archive( name = "tf_runtime", sha256 = TFRT_SHA256, strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT), urls = [ "http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT), "https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT), ], # A patch file can be provided for atomic commits to both TF and TFRT. # The job that bumps the TFRT_COMMIT also resets patch_file to 'None'. patch_file = None, )
apache-2.0
Python
5a8fde172f0fc7aff841e8059927ff126712b321
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/feffe7beb261f6dfe9af083e8f46dfea293ded54.
tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,karllessard/tensorflow,Intel-Corporation/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,gautam1858/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,Intel-Corporation/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow
third_party/tf_runtime/workspace.bzl
third_party/tf_runtime/workspace.bzl
"""Provides the repository macro to import TFRT.""" load("//third_party:repo.bzl", "tf_http_archive") def repo(): """Imports TFRT.""" # Attention: tools parse and update these lines. TFRT_COMMIT = "feffe7beb261f6dfe9af083e8f46dfea293ded54" TFRT_SHA256 = "830492c8a9884e5ca84b15a4da953491f74b2ffbd45656352d58b624e881b9b7" tf_http_archive( name = "tf_runtime", sha256 = TFRT_SHA256, strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT), urls = [ "http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT), "https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT), ], # A patch file can be provided for atomic commits to both TF and TFRT. # The job that bumps the TFRT_COMMIT also resets patch_file to 'None'. patch_file = None, )
"""Provides the repository macro to import TFRT.""" load("//third_party:repo.bzl", "tf_http_archive") def repo(): """Imports TFRT.""" # Attention: tools parse and update these lines. TFRT_COMMIT = "509cf2f10beb666002ece6a7b968fe2c7c0c1e4b" TFRT_SHA256 = "14b22d39d3eebcf255e4dd8ee8630b4da3ecc786f5053adf9c94a2e42362ee0c" tf_http_archive( name = "tf_runtime", sha256 = TFRT_SHA256, strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT), urls = [ "http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT), "https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT), ], # A patch file can be provided for atomic commits to both TF and TFRT. # The job that bumps the TFRT_COMMIT also resets patch_file to 'None'. patch_file = None, )
apache-2.0
Python
e42862ce7bde45e90bec0980f3c35c5cef5c65b6
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/47a1de40f17e70f901238edfe99dc510a5db797a.
Intel-Corporation/tensorflow,frreiss/tensorflow-fred,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,frreiss/tensorflow-fred,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,paolodedios/tensorflow,sarvex/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,frreiss/tensorflow-fred,yongtang/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,Intel-Corporation/tensorflow,sarvex/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,Intel-Corporation/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,sarvex/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,frreiss/tensorflow-fred,Intel-Corporation/tensorflow,karllessard/tensorflow,yongtang/tensorflow,sarvex/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,frreiss/tensorflow-fred,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,frreiss/tensorflow-fred,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,frreiss/tensorflow-fred,paolodedios/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,Intel-Corporation/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,sarvex/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,sarvex/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,sarvex/tensorflow,karllessard/tensorflow,sarvex/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,karllessard/tensorflow
third_party/tf_runtime/workspace.bzl
third_party/tf_runtime/workspace.bzl
"""Provides the repository macro to import TFRT.""" load("//third_party:repo.bzl", "tf_http_archive") def repo(): """Imports TFRT.""" # Attention: tools parse and update these lines. TFRT_COMMIT = "47a1de40f17e70f901238edfe99dc510a5db797a" TFRT_SHA256 = "87631491c3fdd34b4d00b6999274468b89a98f23113aeafa15b53c3a7517fc36" tf_http_archive( name = "tf_runtime", sha256 = TFRT_SHA256, strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT), urls = [ "http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT), "https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT), ], )
"""Provides the repository macro to import TFRT.""" load("//third_party:repo.bzl", "tf_http_archive") def repo(): """Imports TFRT.""" # Attention: tools parse and update these lines. TFRT_COMMIT = "033f079420053002701271e4173bdcaf21bd1b73" TFRT_SHA256 = "15c1c5a3617b91322d4ef96ce884676d27164cf94211f83bc1fcec50ab96aad4" tf_http_archive( name = "tf_runtime", sha256 = TFRT_SHA256, strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT), urls = [ "http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT), "https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT), ], )
apache-2.0
Python
5ab54cf353cece6a8754a1869d8f342ba0a8b351
Add a script to find and compare F* execution times
project-everest/vale,project-everest/vale,project-everest/vale,project-everest/vale
tools/scripts/collect-fstar-times.py
tools/scripts/collect-fstar-times.py
#!/usr/bin/python import argparse import os import glob import re import time import sys import fnmatch import pickle from prettytable import PrettyTable # Install via: easy_install PrettyTable def find_fstar_output_files(directory): matches = [] extensions = ["vfsti", "vfst"] # Based on: https://stackoverflow.com/a/2186565 for root, dirnames, filenames in os.walk(directory): for ext in extensions: for filename in fnmatch.filter(filenames, '*.' + ext): matches.append(os.path.join(root, filename)) return matches def parse_fstar_output(filename): time = 0 found = False with open(filename, "r") as f: for line in f.readlines(): result = re.search("Verified.*\((\d+) milliseconds\)", line) if result: time += int(result.group(1)) found = True if found: return time else: return None def collect_times_dir(d): files = find_fstar_output_files(d) times = {} for f in files: times[f] = parse_fstar_output(f) return times def collect_times(directories): times = {} for d in directories: times.update(collect_times_dir(d)) return times def display_times(times): tab = PrettyTable(["Filename", "Time", "Full Path"]) tab.align["Filename"] = "l" tab.align["Time"] = "r" tab.align["FullPath"] = "l" total_time = 0 for f in sorted(times.keys()): filename = os.path.basename(f) tab.add_row([filename, times[f], f]) if not times[f] is None: total_time += times[f] tab.add_row(["", "", ""]) tab.add_row(["Total", total_time, ""]) print(tab) def store_times(times, label): pickle_file = "times." + label + ".pickle" if not os.path.isfile(pickle_file): with open(pickle_file, "wb") as pickler: pickle.dump(times, pickler) else: print "WARNING: Found existing pickled file %s. No data written. Consider moving or deleting it." % pickle_file def load_times(filename): with open(filename, "rb") as pickler: return pickle.load(pickler) def compute_diff(times1, times2): diffs = {} for f,t in times1.items(): if f in times2 and not t is None: diffs[f] = t - times2[f] return diffs def display_diffs(times, diffs): tab = PrettyTable(["Filename", "t1 time", "delta", "delta \%","Full Path"]) tab.align["Filename"] = "l" tab.align["t1 time"] = "r" tab.align["delta"] = "r" tab.align["delta \%"] = "r" tab.align["FullPath"] = "l" tab.sortby = "delta" total_time = 0 total_delta = 0 for f in sorted(times.keys()): filename = os.path.basename(f) delta = "n/a" delta_percent = "n/a" if f in diffs: delta = diffs[f] delta_percent = "%0.1f" % (delta / float(times[f])) tab.add_row([filename, times[f], delta, delta_percent, f]) if not times[f] is None: total_time += times[f] total_delta += delta tab.add_row(["", "", "", "", ""]) #tab.add_row(["Total", total_time, total_delta, total_delta / float(total_time), ""]) print(tab) def main(): parser = argparse.ArgumentParser(description= 'Collect and summarize F* verification times') parser.add_argument('--dir', action='append', required=False, help='Collect all results in this folder and its subfolders') parser.add_argument('--label', action='store', required=False, help='Label for file containing the results') parser.add_argument('--t1', action='store', required=False, help='File of times to compare to t2') parser.add_argument('--t2', action='store', required=False, help='File of times to compare to t1') args = parser.parse_args() if (not args.dir is None) and (not args.label is None): times = collect_times(args.dir) display_times(times) store_times(times, args.label) sys.exit(0) if (not args.t1 is None) and (not args.t2 is None): times1 = load_times(args.t1) times2 = load_times(args.t2) diffs = compute_diff(times1, times2) display_diffs(times1, diffs) sys.exit(0) print("Invalid or insufficient arguments supplied. Try running with -h") if (__name__=="__main__"): main()
apache-2.0
Python
a6cc742a7272d1138031e26c61fd10617e6b0ac1
Initialize transpositionTest
JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials
books/CrackingCodesWithPython/Chapter09/transpositionTest.py
books/CrackingCodesWithPython/Chapter09/transpositionTest.py
# Transposition Cipher Test # https://www.nostarch.com/crackingcodes/ (BSD Licensed) import random, sys, transpositionEncrypt, transpositionDecrypt def main(): random.seed(42) # Set the random "seed" to a static value. for i in range(20): # Run 20 tests. # Generate random messages to test. # The message will have a random length: message = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' * random.randint(4, 40) # Convert the message string to a list to shuffle it: message = list(message) random.shuffle(message) message = ''.join(message) # Convert the list back to a string. print('Test #%s: %s..."' % (i + 1, message[:50])) # Check all possible keys for each message: for key in range(1, int(len(message)/2)): encrypted = transpositionEncrypt.encryptMessage(key, message) decrypted = transpositionDecrypt.decryptMessage(key, encrypted) # If the decryption doesn't match the original message, display # an error message and quit: if message != decrypted: print('Mismatch with key %s and message %s.' % (key, message)) print('Decrypted as: ' + decrypted) sys.exit() print('Transposition cipher test passed.') # If transpositionTest.py is run (instead of imported as a module) call # the main() function: if __name__ == '__main__': main()
mit
Python
1c7daf0bd9801885d7740620b3e81faa03ce49d4
add sign/verify json tests
matrix-org/matrix-python-sdk
test/crypto/olm_device_test.py
test/crypto/olm_device_test.py
from copy import deepcopy from matrix_client.client import MatrixClient from matrix_client.crypto.olm_device import OlmDevice HOSTNAME = 'http://example.com' class TestOlmDevice: cli = MatrixClient(HOSTNAME) user_id = '@user:matrix.org' device_id = 'QBUAZIFURK' device = OlmDevice(cli.api, user_id, device_id) signing_key = device.olm_account.identity_keys['ed25519'] def test_sign_json(self): example_payload = { "name": "example.org", "unsigned": { "age_ts": 922834800000 } } saved_payload = deepcopy(example_payload) signed_payload = self.device.sign_json(example_payload) signature = signed_payload.pop('signatures') # We should not have modified the payload besides the signatures key assert example_payload == saved_payload key_id = 'ed25519:' + self.device_id assert signature[self.user_id][key_id] def test_verify_json(self): example_payload = { "test": "test", "unsigned": { "age_ts": 922834800000 }, "signatures": { "@user:matrix.org": { "ed25519:QBUAZIFURK": ("WI7TgwqTp4YVn1dFWmDu7xrJvEikEzAbmoqyM5JY5t0P" "6fVaiMFAirmwb13GzIyYDLR+nQfoksNBcrp7xSaMCA") } } } saved_payload = deepcopy(example_payload) signing_key = "WQF5z9b4DV1DANI5HUMJfhTIDvJs1jkoGTLY6AQdjF0" assert self.device.verify_json(example_payload, signing_key, self.user_id, self.device_id) # We should not have modified the payload assert example_payload == saved_payload # Try to verify an object that has been tampered with example_payload['test'] = 'test1' assert not self.device.verify_json(example_payload, signing_key, self.user_id, self.device_id) # Try to verify invalid payloads example_payload['signatures'].pop(self.user_id) assert not self.device.verify_json(example_payload, signing_key, self.user_id, self.device_id) example_payload.pop('signatures') assert not self.device.verify_json(example_payload, signing_key, self.user_id, self.device_id) def test_sign_verify(self): example_payload = { "name": "example.org", } signed_payload = self.device.sign_json(example_payload) assert self.device.verify_json(signed_payload, self.signing_key, self.user_id, self.device_id)
apache-2.0
Python
c4ffd77a56e09f3b418e6d13e8339fe693fffbdb
add fasd_cleanup script
ratheesh/dot-files,ratheesh/dot-files
misc/fasd_clean.py
misc/fasd_clean.py
#/usr/bin/env python # Copyright (C) 2015 Ratheesh S<[email protected]> # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. import os db_file = "/home/ratheesh/.fasd" purged_items = 0 try: f = open(db_file, "r+") except IOError: print 'ERROR: No File found: %s' % db_file exit(1) d = f.readlines() f.close() try: f = open(db_file, "w+") except IOError: print 'ERROR: No File found: %s' % db_file exit(1) print "Cleaning fasd database ..." for i in d: path, sep, misc = i.partition('|') if os.path.exists(path): f.write(i) else: print 'Removing %s' % path purged_items += 1 # increment purged items f.close() if purged_items == 0: print "fasd database is clean!" else: print "---------------------------------------" print "No. of Purged Items: %d" % purged_items # End of File
apache-2.0
Python
2a45679c02e74ce7a63e259b1475d4190086084e
Add errors to zombase
mozaiques/zombase,ouihelp/yesaide
zombase/errors.py
zombase/errors.py
# -*- coding: utf-8 -*- class ZombaseRuntimeError(Exception): pass
mit
Python
629c9e330e6114680f22af125252d95fb6989201
update migrations for link manager
veselosky/webquills,veselosky/webquills,veselosky/webquills
webquills/linkmgr/migrations/0002_alter_linkcategory_site.py
webquills/linkmgr/migrations/0002_alter_linkcategory_site.py
# Generated by Django 3.2 on 2021-06-07 11:11 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('wqsites', '0001_initial'), ('linkmgr', '0001_initial'), ] operations = [ migrations.AlterField( model_name='linkcategory', name='site', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='link_lists', to='wqsites.site', verbose_name='site'), ), ]
apache-2.0
Python
24e2ddfd49aa2c05879460baeb67ed6cc75ffa87
fix benchmark script
rabernat/pyqg,jamesp/pyqg,crocha700/pyqg,pyqg/pyqg
benchmark/benchmark.py
benchmark/benchmark.py
import pyqg import time import cProfile import pstats import numpy as np tmax = 8000*1000 dtfac = 64 * 8000. mynx = [32, 64, 128, 256, 512, 1024, 2048] mynth = [1,2,4,8,16,32] res = np.zeros((len(mynx), 5)) print 'nx, threads, timesteps, time' for j, nx in enumerate(mynx): dt = dtfac / nx #for i, (use_fftw, nth) in enumerate([(False, 1), (True, 1), # (True, 2), (True, 4), (True, 8)]): for i, nth in enumerate(mynth): m = pyqg.QGModel(nx=nx, tmax=tmax, dt=dt, ntd=nth, # no output twrite=np.inf, # no time average taveint=np.inf,) tic = time.time() m.run() toc = time.time() tottime = toc-tic #res[j,i] = tottime #print 'nx=%3d, fftw=%g, threads=%g: %g' % (nx, use_fftw, nth, tottime) print '%3d, %3d, %8d, %10.4f' % (nx, nth, m.tc, tottime) # # profiling # prof = cProfile.Profile() # prof.run('m.run()') # p = pstats.Stats(prof) # p.sort_stats('cum').print_stats(0.3)
from pyqg import qg_model, model import time import cProfile import pstats import numpy as np tmax = 104000000 dtfac = (64 * 8000.) mynx = [32, 64, 128, 256] res = np.zeros((len(mynx), 5)) for j, nx in enumerate(mynx): dt = dtfac / nx for i, (use_fftw, nth) in enumerate([(False, 1), (True, 1), (True, 2), (True, 4), (True, 8)]): m = qg_model.QGModel(nx=64, tmax=tmax, dt=dt, use_fftw=use_fftw, ntd=nth) tic = time.time() m.run() toc = time.time() tottime = toc-tic res[j,i] = tottime print 'nx=%3d, fftw=%g, threads=%g: %g' % (nx, use_fftw, nth, tottime) # # profiling # prof = cProfile.Profile() # prof.run('m.run()') # p = pstats.Stats(prof) # p.sort_stats('cum').print_stats(0.3)
mit
Python
078727dcaba9f7861f84ab7ef61e653f28253226
add script
grinich/mdmvendorsign,tjmoney0201/mdmvendorsign
mdm_vendor_sign.py
mdm_vendor_sign.py
# This is based loosely on Softthinker's java code found here # http://www.softhinker.com/in-the-news/iosmdmvendorcsrsigning # fuck java import argparse from plistlib import writePlistToString import os import subprocess from base64 import b64encode import sys import urllib2 def p(s): sys.stdout.write(s) sys.stdout.flush() def mdm_vendor_sign(): """ This utility will create a properly encoded certifiate signing request that you can upload to identity.apple.com/pushcert """ parser = argparse.ArgumentParser(description=mdm_vendor_sign.__doc__) parser.add_argument('--key', help='Private key', required=True) parser.add_argument('--csr', help='Certificate signing request', required=True) parser.add_argument('--mdm', help='MDM vendor certificate', required=True) parser.add_argument('--out', help='Output filename', required=False) cli_args = vars(parser.parse_args()) # Verify CSR # openssl req -text -noout -verify -in CSR.csr p('Verifying %s ... ' % cli_args['csr']) csr_file = open(cli_args['csr']).read() args = ['openssl', 'req', '-noout', '-verify' ] command = subprocess.Popen(args, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) output, error = command.communicate(input = csr_file) if output.rstrip().split('\n')[0] == 'verify OK': p('OK\n') else: p('FAILED\n') return # Verify private key # openssl rsa -in privateKey.key -check p('Verifying %s ... ' % cli_args['key']) key_file = open(cli_args['key']).read() args = ['openssl', 'rsa', '-check', '-noout' ] command = subprocess.Popen(args, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) output, error = command.communicate(input = key_file) if output.rstrip().split('\n')[0] == 'RSA key ok': p('OK\n') else: p('FAILED\n\n') print """If you don't have the plain private key already, you need to extract it from the pkcs12 file... First convert to PEM openssl pkcs12 -in filename.p12 -nocerts -out key.pem Then export the certificate file from the pfx file openssl pkcs12 -in filename.pfx -clcerts -nokeys -out cert.pem Lastly Remove the passphrase from the private key openssl rsa -in key.pem -out the_private_key.key """ return # Verify MDM vendor certificate # openssl x509 -noout -in mdm.cer -inform DER p('Verifying %s ... ' % cli_args['mdm']) mdm_cert_file = open(cli_args['mdm']).read() args = ['openssl', 'x509', '-noout', '-inform', 'DER' ] command = subprocess.Popen(args, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) output, error = command.communicate(input = mdm_cert_file) if len(output) == 0: p('OK\n') else: p('FAILED\n') return # Convert CSR to DER format # openssl req -inform pem -outform der -in customer.csr -out customer.der p('Converting %s to DER format... ' % cli_args['csr']) args = ['openssl', 'req', '-inform', 'pem', '-outform', 'der' ] command = subprocess.Popen(args, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) output, error = command.communicate(input = csr_file) if error: p('FAILED\n') return p('OK\n') csr_der = output csr_b64 = b64encode(csr_der) # Sign the CSR with the private key # openssl sha1 -sign private_key.key -out signed_output.rsa data_to_sign.txt p('Signing CSR with private key... ') args = ['openssl', 'sha1', '-sign', cli_args['key'] ] command = subprocess.Popen(args, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) output, error = command.communicate(input = csr_der) if error: p('FAILED\n') return p('OK\n') signature_bytes = output signature = b64encode(signature_bytes) def cer_to_pem(cer_data): # openssl x509 -inform der -in mdm.cer -out mdm.pem # -in and -out flags are handled by STDIN and STDOUT args = ['openssl', 'x509', '-inform', 'der' ] command = subprocess.Popen(args, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) output, error = command.communicate(input = cer_data) if error: p('Error converting from cer to pem: %s' % error) return output # TODO : Probably should verify these too p('Downloading WWDR intermediate certificate...') intermediate_cer = urllib2.urlopen('https://developer.apple.com/certificationauthority/AppleWWDRCA.cer').read() p(' converting to pem...') intermediate_pem = cer_to_pem(intermediate_cer) p('OK\n') p('Downloading Apple Root Certificate...') root_cer = urllib2.urlopen('http://www.apple.com/appleca/AppleIncRootCertificate.cer').read() p(' converting to pem...') root_pem = cer_to_pem(root_cer) p('OK\n') mdm_pem = cer_to_pem(mdm_cert_file) p('Finishing...') plist_dict = dict( PushCertRequestCSR = csr_b64, PushCertCertificateChain = mdm_pem + intermediate_pem + root_pem, PushCertSignature = signature ) plist_xml = writePlistToString(plist_dict) plist_b64 = b64encode(plist_xml) output_filename = cli_args['out'] if cli_args['out'] else 'plist_encoded' write_path = os.path.join(os.getcwd(), output_filename) output = open(write_path, 'wb') output.write(plist_b64) output.close() p('DONE\n\nGo upload file \'%s\' to identity.apple.com/pushcert !\n' % output_filename) if __name__=="__main__": mdm_vendor_sign()
mit
Python
2347ee253f04fa87b28206b0ec00fd2a3fffb49f
Create hello_market_maker.py
MKTSTK/Runover
hello_market_maker.py
hello_market_maker.py
class hello_market_maker(): def __init__(self, anchor_price, tick_increment, max_pos): self.anchor_price = anchor_price self.tick_increment = tick_increment self.position = 0 self.upper_bound = anchor_price + ((max_pos + 1) * tick_increment) self.lower_bound = anchor_price - ((max_pos + 1) * tick_increment) self.max_pos = max_pos self.mkt = inside_market(anchor_price - tick_increment, anchor_price + tick_increment) def on_bid_fill(self): # modify current bid and ask down 1 tick_increment #self.mkt.shift(-self.tick_increment) self.position += 1 price = self.mkt.bid.price if self.position < self.max_pos: self.mkt.shift(-self.tick_increment) else: self.mkt.exit(BID, self.tick_increment) return "BID_FILL @ ", price def on_ask_fill(self): # modify current bid and ask up 1 tick_increment #self.mkt.shift(-self.tick_increment) self.position -= 1 price = self.mkt.ask.price if self.position > -self.max_pos: self.mkt.shift(self.tick_increment) else: self.mkt.exit(ASK, self.tick_increment) return "ASK_FILL @ ", price def evaluate(self, trade_price): fill, price = self.mkt.evaluate(trade_price) self.adjust_bounds(trade_price) if fill == BID: self.on_bid_fill() elif fill == ASK: self.on_ask_fill() else: filler = 0 return fill, price def adjust_bounds(self, trade_price): if trade_price > self.upper_bound: self.mkt.shift(self.tick_increment) self.upper_bound += self.tick_increment self.lower_bound += self.tick_increment print "ADJUSTING UP" elif trade_price < self.lower_bound: self.mkt.shift(-self.tick_increment) self.upper_bound -= self.tick_increment self.lower_bound -= self.tick_increment print "ADJUSTING DOWN"
bsd-3-clause
Python
819a47ce69164aa48f3b68e9ab997f6ee90e2292
Add a index stats tool
andyfoundi/mongodb-tools,safanaj/mongodb-tools,jwilder/mongodb-tools,safanaj/mongodb-tools,jwilder/mongodb-tools,andyfoundi/mongodb-tools,yaowenqiang/mongodb-tools,yaowenqiang/mongodb-tools,svdata/mongodb-tools,svdata/mongodb-tools
index-stats.py
index-stats.py
""" This script prints some basic collection stats about the size of the collections and their indexes. """ from prettytable import PrettyTable import psutil from pymongo import Connection from pymongo import ReadPreference connection = Connection(read_preference=ReadPreference.SECONDARY) def compute_signature(index): signature = index["ns"] for key in index["key"]: signature += "%s_%s" % (key, index["key"][key]) return signature def get_collection_stats(database, collection): print "Checking DB: %s" % collection.full_name return database.command("collstats", collection.name) # From http://www.5dollarwhitebox.org/drupal/node/84 def convert_bytes(bytes): bytes = float(bytes) if bytes >= 1099511627776: terabytes = bytes / 1099511627776 size = '%.2fT' % terabytes elif bytes >= 1073741824: gigabytes = bytes / 1073741824 size = '%.2fG' % gigabytes elif bytes >= 1048576: megabytes = bytes / 1048576 size = '%.2fM' % megabytes elif bytes >= 1024: kilobytes = bytes / 1024 size = '%.2fK' % kilobytes else: size = '%.2fb' % bytes return size summary_stats = { "count" : 0, "size" : 0, "indexSize" : 0 } all_stats = [] all_db_stats = {} for db in connection.database_names(): # FIXME: Add an option to include oplog stats. if db == "local": continue database = connection[db] all_db_stats[database.name] = [] for collection_name in database.collection_names(): stats = get_collection_stats(database, database[collection_name]) all_stats.append(stats) all_db_stats[database.name].append(stats) summary_stats["count"] += stats["count"] summary_stats["size"] += stats["size"] summary_stats["indexSize"] += stats.get("totalIndexSize", 0) x = PrettyTable(["Collection", "Index","% Size", "Index Size"]) x.set_field_align("Collection", "l") x.set_field_align("Index", "l") x.set_field_align("% Size", "r") x.set_field_align("Index Size", "r") x.set_padding_width(1) print index_size_mapping = {} for db in all_db_stats: db_stats = all_db_stats[db] count = 0 for stat in db_stats: count += stat["count"] for index in stat["indexSizes"]: index_size = stat["indexSizes"].get(index, 0) row = [stat["ns"], index, "%0.1f%%" % ((index_size / float(stat["totalIndexSize"])) * 100), convert_bytes(index_size)] index_size_mapping[index_size] = row x.add_row(row) print "Index Overview" x.printt(sortby="Collection") print print "Top 5 Largest Indexes" x = PrettyTable(["Collection", "Index","% Size", "Index Size"]) x.set_field_align("Collection", "l") x.set_field_align("Index", "l") x.set_field_align("% Size", "r") x.set_field_align("Index Size", "r") x.set_padding_width(1) top_five_indexes = sorted(index_size_mapping.keys(), reverse=True)[0:5] for size in top_five_indexes: x.add_row(index_size_mapping.get(size)) x.printt() print print "Total Documents:", summary_stats["count"] print "Total Data Size:", convert_bytes(summary_stats["size"]) print "Total Index Size:", convert_bytes(summary_stats["indexSize"]) ram_headroom = psutil.phymem_usage()[0] - summary_stats["indexSize"] print "RAM Headroom:", convert_bytes(ram_headroom) print "RAM Used: %s (%s%%)" % (convert_bytes(psutil.phymem_usage()[1]), psutil.phymem_usage()[3]) print "Available RAM Headroom:", convert_bytes((100 - psutil.phymem_usage()[3]) / 100 * ram_headroom)
mit
Python
372f4a988411e48a0c50cdc74fb2a7f4e5abf052
Add a server identity test
Kitware/tangelo,Kitware/tangelo,Kitware/tangelo
tests/server-identity.py
tests/server-identity.py
import nose import requests import fixture @nose.with_setup(fixture.start_tangelo, fixture.stop_tangelo) def test_server_identity(): response = requests.get(fixture.url("/")) assert response.headers["server"] == "Tangelo"
apache-2.0
Python
19db4647257617992e9b195828baf39907cc5db1
Add tests for exit codes
amperser/proselint,amperser/proselint,amperser/proselint,amperser/proselint,amperser/proselint
tests/test_exit_codes.py
tests/test_exit_codes.py
"""Check that the CLI returns the appropriate exit code.""" import subprocess def test_exit_code_demo(): """Ensure that linting the demo returns an exit code of 1.""" try: subprocess.check_output("proselint --demo", shell=True) except subprocess.CalledProcessError as grepexc: assert(grepexc.returncode == 1) def test_exit_code_version(): """Ensure that getting the version returns an exit code of 0.""" try: subprocess.check_output("proselint --version", shell=True) except subprocess.CalledProcessError: assert(False)
bsd-3-clause
Python
787298889fd85dffb597dee6571dead42227c7d6
add test to validate generated stub constants.pyi
mehcode/python-xmlsec,mehcode/python-xmlsec
tests/test_type_stubs.py
tests/test_type_stubs.py
"""Test type stubs for correctness where possible.""" import os import sys import pytest import xmlsec black = pytest.importorskip('black') if sys.version_info >= (3, 4): from pathlib import Path else: from _pytest.pathlib import Path constants_stub_header = """ import sys from typing import NamedTuple if sys.version_info >= (3, 8): from typing import Final, Literal else: from typing_extensions import Final, Literal class __KeyData(NamedTuple): # __KeyData type href: str name: str class __Transform(NamedTuple): # __Transform type href: str name: str usage: int """ def gen_constants_stub(): """ Generate contents of the file:`xmlsec/constants.pyi`. Simply load all constants at runtime, generate appropriate type hint for each constant type. """ def process_constant(name): """Generate line in stub file for constant name.""" obj = getattr(xmlsec.constants, name) return '{name}: Final = {obj!r}'.format(name=name, obj=obj) names = list(sorted(name for name in dir(xmlsec.constants) if not name.startswith('__'))) lines = [process_constant(name) for name in names] return constants_stub_header + os.linesep.join(lines) def test_xmlsec_constants_stub(request): """ Generate the stub file for :mod:`xmlsec.constants` from existing code. Compare it against the existing stub :file:`xmlsec/constants.pyi`. """ rootdir = Path(str(request.config.rootdir)) stub = rootdir / 'src' / 'xmlsec' / 'constants.pyi' mode = black.FileMode(target_versions=[black.TargetVersion.PY38], line_length=130, is_pyi=True, string_normalization=False) formatted = black.format_file_contents(gen_constants_stub(), fast=False, mode=mode) assert formatted == stub.read_text()
mit
Python
f1e35886822a7ff7e7f19ef4f1db90c870e8d45d
Add file for remove nonterminal tests
PatrikValkovic/grammpy
tests/NonterminalRemoveTest.py
tests/NonterminalRemoveTest.py
#!/usr/bin/env python """ :Author Patrik Valkovic :Created 23.06.2017 16:39 :Licence GNUv3 Part of grammpy """ from unittest import TestCase, main from grammpy import Grammar from grammpy import Nonterminal class TempClass(Nonterminal): pass class Second(Nonterminal): pass class Third(Nonterminal): pass class NonterminalRemoveTest(TestCase): def test_removeOne(self): gr = Grammar() gr.add_term([0, 'asdf', TempClass]) self.assertEqual(gr.terms_count(), 3) gr.remove_term(0) self.assertEqual(gr.terms_count(), 2) self.assertTrue(gr.have_term('asdf')) self.assertTrue(gr.have_term(TempClass)) self.assertFalse(gr.have_term(0)) def test_removeClass(self): gr = Grammar() gr.add_term([0, 'asdf', TempClass]) self.assertEqual(gr.terms_count(), 3) gr.remove_term(TempClass) self.assertEqual(gr.terms_count(), 2) self.assertTrue(gr.have_term('asdf')) self.assertTrue(gr.have_term(0)) self.assertFalse(gr.have_term(TempClass)) def test_removeTwo(self): gr = Grammar() gr.add_term([0, 'asdf', TempClass]) self.assertEqual(gr.terms_count(), 3) gr.remove_term(0) gr.remove_term('asdf') self.assertEqual(gr.terms_count(), 1) self.assertTrue(gr.have_term(TempClass)) self.assertFalse(gr.have_term('asdf')) self.assertFalse(gr.have_term(0)) def test_removeTwoInArray(self): gr = Grammar() gr.add_term([0, 'asdf', TempClass]) self.assertEqual(gr.terms_count(), 3) gr.remove_term([0, 'asdf']) self.assertEqual(gr.terms_count(), 1) self.assertTrue(gr.have_term(TempClass)) self.assertFalse(gr.have_term('asdf')) self.assertFalse(gr.have_term(0)) def test_removeTwoInTuple(self): gr = Grammar() gr.add_term([0, 'asdf', TempClass]) self.assertEqual(gr.terms_count(), 3) gr.remove_term((0, 'asdf')) self.assertEqual(gr.terms_count(), 1) self.assertTrue(gr.have_term(TempClass)) self.assertFalse(gr.have_term('asdf')) self.assertFalse(gr.have_term(0)) def test_removeAllWithoutParam(self): gr = Grammar() gr.add_term([0, 'asdf', TempClass]) self.assertEqual(gr.terms_count(), 3) gr.remove_term() self.assertEqual(gr.terms_count(), 0) self.assertFalse(gr.have_term(TempClass)) self.assertFalse(gr.have_term('asdf')) self.assertFalse(gr.have_term(0)) def test_removeEmptyGrammar(self): gr = Grammar() self.assertEqual(gr.terms_count(), 0) gr.remove_term() self.assertEqual(gr.terms_count(), 0) if __name__ == '__main__': main()
mit
Python