commit
stringlengths
40
40
subject
stringlengths
4
1.73k
repos
stringlengths
5
127k
old_file
stringlengths
2
751
new_file
stringlengths
2
751
new_contents
stringlengths
1
8.98k
old_contents
stringlengths
0
6.59k
license
stringclasses
13 values
lang
stringclasses
23 values
79b258dedda23c9130dda6d4d674f077ac52683e
Add select field for food type
foodsnag/foodsnag-web,foodsnag/foodsnag-web,foodsnag/foodsnag-web
app/main/forms.py
app/main/forms.py
import datetime from dateutil.parser import parse from flask.ext.wtf import Form from wtforms import StringField, TextAreaField, BooleanField, SelectField,\ DateTimeField, SubmitField from wtforms.validators import Required, Length, Email from wtforms import ValidationError from ..models import User, Event, Location from autocomplete.forms import AutocompleteField def get_loc_by_id(id): loc = Location.query.filter_by(id=id).first() return loc class EditProfileForm(Form): text_updates = BooleanField('Send notifications through text') phone = StringField('Phone Number (To recieve event notifications)') location = AutocompleteField('School', url='auth.autocomplete', get_label='name', getter=get_loc_by_id, validators=[Required()] ) submit = SubmitField('Submit') def validate_phone(self, field): if field.data != '' and User.query.filter_by(phone=num).first(): raise ValidationError('That number is already in use.') class MakeEventForm(Form): name = StringField('What is the event?', validators=[Required()]) food_types = [("Fruit","Fruit"), ("Lemonade","Lemonade"), ("Breakfast","Breakfast"), ("Meat","Meat"), ("Sausage","Sausage"), ("Hot dogs","Hot dogs"), ("Burgers","Burgers"), ("Candy","Candy"), ("Ice cream","Ice cream"), ("Drinks","Drinks"), ("Soup","Soup"), ("Alcohol","Alcohol"), ("Pizza","Pizza"), ("Chicken","Chicken"), ("Fish","Fish"), ("Cake","Cake"), ("BBQ","BBQ"), ("Formal dinner","Formal dinner"), ("Smoothie","Smoothie"), ("Coffee","Coffee"), ("Tea","Tea")] serving = SelectField('What is being offered?', choices=food_types) place = StringField('Where is this happening (Building/room)?', validators=[Required()]) now = datetime.datetime.now()#.strftime('%m-%d %H:%M') time = DateTimeField('When is this happening?', default=now, format='%m/%d %I:%M%p') body = StringField('Anything else we should know?') submit = SubmitField('Submit') def validate_time(self, field): pass #if field.data < datetime.datetime.now(): # raise ValidationError('Time must be in the future') class SchoolSearchForm(Form): location = AutocompleteField( url='main.autocomplete', placeholder='Your school...', get_label='name', getter=get_loc_by_id, validators=[Required()] )
import datetime from dateutil.parser import parse from flask.ext.wtf import Form from wtforms import StringField, TextAreaField, BooleanField, SelectField,\ DateTimeField, SubmitField from wtforms.validators import Required, Length, Email from wtforms import ValidationError from ..models import User, Event, Location from autocomplete.forms import AutocompleteField def get_loc_by_id(id): loc = Location.query.filter_by(id=id).first() return loc class EditProfileForm(Form): text_updates = BooleanField('Send notifications through text') phone = StringField('Phone Number (To recieve event notifications)') location = AutocompleteField('School', url='auth.autocomplete', get_label='name', getter=get_loc_by_id, validators=[Required()] ) submit = SubmitField('Submit') def validate_phone(self, field): if field.data != '' and User.query.filter_by(phone=num).first(): raise ValidationError('That number is already in use.') class MakeEventForm(Form): name = StringField('What is the event?', validators=[Required()]) serving = StringField('What is being offered?', default='Pizza!') place = StringField('Where is this happening (Building/room)?', validators=[Required()]) now = datetime.datetime.now()#.strftime('%m-%d %H:%M') time = DateTimeField('When is this happening?', default=now, format='%m/%d %I:%M%p') body = StringField('Anything else we should know?') submit = SubmitField('Submit') def validate_time(self, field): pass #if field.data < datetime.datetime.now(): # raise ValidationError('Time must be in the future') class SchoolSearchForm(Form): location = AutocompleteField( url='main.autocomplete', placeholder='Your school...', get_label='name', getter=get_loc_by_id, validators=[Required()] )
mit
Python
d973f95e1807cb27ac61febaa7ffbcf849252ffa
adjust labeling of demo_inset.py
leejjoon/pywcsgrid2
examples/demo_inset.py
examples/demo_inset.py
import matplotlib.pyplot as plt from astropy.io import fits as pyfits from mpl_toolkits.axes_grid1.axes_grid import ImageGrid import pywcsgrid2 #from pywcsgrid2.axes_wcs import AxesWcs, GridHelperWcs from mpl_toolkits.axes_grid1.inset_locator import zoomed_inset_axes, mark_inset import mpl_toolkits.axisartist as axisartist import matplotlib.patheffects as patheffects if 1: f = pyfits.open("pspc_skyview.fits") d = f[0].data h = f[0].header fig = plt.figure(1) grid = ImageGrid(fig, (1, 1, 1), nrows_ncols = (1, 1), cbar_mode="single", cbar_pad="2%", cbar_location="right", axes_class=(pywcsgrid2.Axes, dict(header=h))) main_axes = grid[0] main_axes.locator_params(nbins=5) cb_axes = grid.cbar_axes[0] # colorbar axes im = main_axes.imshow(d, origin="lower", cmap=plt.cm.gray_r, vmin=4.e-05, vmax=0.00018, interpolation="nearest") cb_axes.colorbar(im) cb_axes.axis["right"].toggle(ticklabels=False) axins = zoomed_inset_axes(main_axes, zoom=3, loc=1, axes_class=pywcsgrid2.Axes, axes_kwargs=dict(wcs=h)) im2 = axins.imshow(d, origin="lower", interpolation="nearest", vmin=9.e-05, vmax=18.e-05, cmap=plt.cm.gray_r) axins.set_xlim(120, 160) axins.set_ylim(120, 160) axins.set_ticklabel_type("delta", center_pixel="center") axins.axis[:].invert_ticklabel_direction() mark_inset(main_axes, axins, loc1=2, loc2=4, fc="none", ec="0.5") plt.show()
import matplotlib.pyplot as plt from astropy.io import fits as pyfits from mpl_toolkits.axes_grid1.axes_grid import ImageGrid import pywcsgrid2 #from pywcsgrid2.axes_wcs import AxesWcs, GridHelperWcs from mpl_toolkits.axes_grid1.inset_locator import zoomed_inset_axes, mark_inset import mpl_toolkits.axisartist as axisartist import matplotlib.patheffects as patheffects if 1: f = pyfits.open("pspc_skyview.fits") d = f[0].data h = f[0].header fig = plt.figure(1) grid = ImageGrid(fig, (1, 1, 1), nrows_ncols = (1, 1), cbar_mode="single", cbar_pad="2%", cbar_location="right", axes_class=(pywcsgrid2.Axes, dict(header=h))) main_axes = grid[0] main_axes.locator_params(nbins=5) cb_axes = grid.cbar_axes[0] # colorbar axes im = main_axes.imshow(d, origin="lower", cmap=plt.cm.gray_r, vmin=4.e-05, vmax=0.00018, interpolation="nearest") cb_axes.colorbar(im) cb_axes.axis["right"].toggle(ticklabels=False) axins = zoomed_inset_axes(main_axes, zoom=3, loc=1, axes_class=pywcsgrid2.Axes, axes_kwargs=dict(wcs=h)) im2 = axins.imshow(d, origin="lower", interpolation="nearest", vmin=9.e-05, vmax=18.e-05, cmap=plt.cm.gray_r) axins.set_xlim(120, 160) axins.set_ylim(120, 160) axins.set_ticklabel_type("delta") axins.axis[:].invert_ticklabel_direction() mark_inset(main_axes, axins, loc1=2, loc2=4, fc="none", ec="0.5") plt.show()
mit
Python
060b124888953d302d8611c459f978ef9201ae96
update g95 version pattern
jschueller/numpy,SiccarPoint/numpy,NextThought/pypy-numpy,drasmuss/numpy,charris/numpy,drasmuss/numpy,BabeNovelty/numpy,dato-code/numpy,behzadnouri/numpy,pbrod/numpy,Anwesh43/numpy,jorisvandenbossche/numpy,Eric89GXL/numpy,ogrisel/numpy,mhvk/numpy,numpy/numpy,ahaldane/numpy,madphysicist/numpy,b-carter/numpy,stefanv/numpy,CMartelLML/numpy,ChristopherHogan/numpy,rhythmsosad/numpy,CMartelLML/numpy,stefanv/numpy,ogrisel/numpy,ChristopherHogan/numpy,jonathanunderwood/numpy,dwf/numpy,dimasad/numpy,felipebetancur/numpy,MaPePeR/numpy,skwbc/numpy,hainm/numpy,Eric89GXL/numpy,jorisvandenbossche/numpy,ewmoore/numpy,WarrenWeckesser/numpy,sigma-random/numpy,BabeNovelty/numpy,jorisvandenbossche/numpy,rherault-insa/numpy,bringingheavendown/numpy,tynn/numpy,utke1/numpy,grlee77/numpy,endolith/numpy,andsor/numpy,kiwifb/numpy,kirillzhuravlev/numpy,endolith/numpy,trankmichael/numpy,tdsmith/numpy,charris/numpy,kirillzhuravlev/numpy,Eric89GXL/numpy,rmcgibbo/numpy,numpy/numpy-refactor,rudimeier/numpy,MaPePeR/numpy,embray/numpy,brandon-rhodes/numpy,GaZ3ll3/numpy,mathdd/numpy,trankmichael/numpy,simongibbons/numpy,MichaelAquilina/numpy,sonnyhu/numpy,NextThought/pypy-numpy,BMJHayward/numpy,GrimDerp/numpy,embray/numpy,yiakwy/numpy,astrofrog/numpy,b-carter/numpy,ChristopherHogan/numpy,andsor/numpy,ViralLeadership/numpy,gmcastil/numpy,musically-ut/numpy,mathdd/numpy,empeeu/numpy,joferkington/numpy,mortada/numpy,tdsmith/numpy,Eric89GXL/numpy,mindw/numpy,rajathkumarmp/numpy,pelson/numpy,skymanaditya1/numpy,tdsmith/numpy,sigma-random/numpy,seberg/numpy,argriffing/numpy,rajathkumarmp/numpy,pdebuyl/numpy,SiccarPoint/numpy,mathdd/numpy,mingwpy/numpy,grlee77/numpy,KaelChen/numpy,anntzer/numpy,KaelChen/numpy,charris/numpy,nbeaver/numpy,jorisvandenbossche/numpy,ogrisel/numpy,chatcannon/numpy,tacaswell/numpy,naritta/numpy,MSeifert04/numpy,NextThought/pypy-numpy,ESSS/numpy,ssanderson/numpy,nguyentu1602/numpy,nbeaver/numpy,CMartelLML/numpy,brandon-rhodes/numpy,yiakwy/numpy,cjermain/numpy,ogrisel/numpy,matthew-brett/numpy,WillieMaddox/numpy,madphysicist/numpy,dwillmer/numpy,stefanv/numpy,mwiebe/numpy,Dapid/numpy,SiccarPoint/numpy,Anwesh43/numpy,sigma-random/numpy,cjermain/numpy,SunghanKim/numpy,mingwpy/numpy,BMJHayward/numpy,mindw/numpy,Linkid/numpy,Srisai85/numpy,empeeu/numpy,rudimeier/numpy,MaPePeR/numpy,groutr/numpy,immerrr/numpy,pdebuyl/numpy,ekalosak/numpy,cjermain/numpy,dch312/numpy,WarrenWeckesser/numpy,b-carter/numpy,sinhrks/numpy,simongibbons/numpy,jankoslavic/numpy,MSeifert04/numpy,endolith/numpy,ajdawson/numpy,groutr/numpy,njase/numpy,seberg/numpy,dwillmer/numpy,pizzathief/numpy,MaPePeR/numpy,felipebetancur/numpy,ewmoore/numpy,githubmlai/numpy,WarrenWeckesser/numpy,mattip/numpy,cowlicks/numpy,andsor/numpy,pizzathief/numpy,bmorris3/numpy,jakirkham/numpy,chiffa/numpy,MichaelAquilina/numpy,dwillmer/numpy,githubmlai/numpy,Yusa95/numpy,maniteja123/numpy,solarjoe/numpy,bertrand-l/numpy,rmcgibbo/numpy,naritta/numpy,shoyer/numpy,sonnyhu/numpy,ChanderG/numpy,bringingheavendown/numpy,WillieMaddox/numpy,CMartelLML/numpy,jakirkham/numpy,GrimDerp/numpy,Dapid/numpy,joferkington/numpy,musically-ut/numpy,anntzer/numpy,shoyer/numpy,brandon-rhodes/numpy,pyparallel/numpy,cowlicks/numpy,ssanderson/numpy,pizzathief/numpy,chatcannon/numpy,grlee77/numpy,groutr/numpy,dwf/numpy,yiakwy/numpy,anntzer/numpy,maniteja123/numpy,mhvk/numpy,tynn/numpy,dato-code/numpy,jonathanunderwood/numpy,seberg/numpy,matthew-brett/numpy,mindw/numpy,BMJHayward/numpy,mortada/numpy,pbrod/numpy,dch312/numpy,nguyentu1602/numpy,ESSS/numpy,Anwesh43/numpy,gmcastil/numpy,felipebetancur/numpy,shoyer/numpy,endolith/numpy,sigma-random/numpy,rmcgibbo/numpy,bringingheavendown/numpy,shoyer/numpy,sinhrks/numpy,seberg/numpy,pbrod/numpy,matthew-brett/numpy,ContinuumIO/numpy,jorisvandenbossche/numpy,rajathkumarmp/numpy,kiwifb/numpy,trankmichael/numpy,has2k1/numpy,cowlicks/numpy,dato-code/numpy,simongibbons/numpy,musically-ut/numpy,embray/numpy,larsmans/numpy,skymanaditya1/numpy,utke1/numpy,SunghanKim/numpy,bmorris3/numpy,mortada/numpy,pelson/numpy,astrofrog/numpy,astrofrog/numpy,jschueller/numpy,mortada/numpy,ogrisel/numpy,sinhrks/numpy,moreati/numpy,madphysicist/numpy,nbeaver/numpy,njase/numpy,argriffing/numpy,chiffa/numpy,abalkin/numpy,stuarteberg/numpy,larsmans/numpy,mwiebe/numpy,nguyentu1602/numpy,mhvk/numpy,pdebuyl/numpy,bertrand-l/numpy,rgommers/numpy,joferkington/numpy,pelson/numpy,chatcannon/numpy,SunghanKim/numpy,pdebuyl/numpy,gmcastil/numpy,Srisai85/numpy,stuarteberg/numpy,abalkin/numpy,AustereCuriosity/numpy,joferkington/numpy,numpy/numpy,BabeNovelty/numpy,ddasilva/numpy,felipebetancur/numpy,trankmichael/numpy,empeeu/numpy,pizzathief/numpy,ekalosak/numpy,GaZ3ll3/numpy,tdsmith/numpy,mhvk/numpy,charris/numpy,ChristopherHogan/numpy,ahaldane/numpy,pelson/numpy,jschueller/numpy,simongibbons/numpy,sinhrks/numpy,behzadnouri/numpy,dwf/numpy,andsor/numpy,gfyoung/numpy,ajdawson/numpy,solarjoe/numpy,has2k1/numpy,matthew-brett/numpy,hainm/numpy,sonnyhu/numpy,numpy/numpy,naritta/numpy,numpy/numpy,has2k1/numpy,solarjoe/numpy,leifdenby/numpy,Yusa95/numpy,madphysicist/numpy,ajdawson/numpy,numpy/numpy-refactor,brandon-rhodes/numpy,ahaldane/numpy,Yusa95/numpy,ViralLeadership/numpy,jakirkham/numpy,mathdd/numpy,githubmlai/numpy,ContinuumIO/numpy,rhythmsosad/numpy,jakirkham/numpy,kiwifb/numpy,sonnyhu/numpy,immerrr/numpy,rherault-insa/numpy,tynn/numpy,stefanv/numpy,jschueller/numpy,kirillzhuravlev/numpy,Srisai85/numpy,BabeNovelty/numpy,jonathanunderwood/numpy,dwf/numpy,rgommers/numpy,jankoslavic/numpy,jakirkham/numpy,MSeifert04/numpy,stefanv/numpy,abalkin/numpy,behzadnouri/numpy,WarrenWeckesser/numpy,gfyoung/numpy,immerrr/numpy,githubmlai/numpy,KaelChen/numpy,hainm/numpy,matthew-brett/numpy,ajdawson/numpy,ViralLeadership/numpy,skwbc/numpy,naritta/numpy,numpy/numpy-refactor,dch312/numpy,rajathkumarmp/numpy,hainm/numpy,argriffing/numpy,pizzathief/numpy,dato-code/numpy,anntzer/numpy,grlee77/numpy,empeeu/numpy,rhythmsosad/numpy,madphysicist/numpy,tacaswell/numpy,utke1/numpy,GaZ3ll3/numpy,Srisai85/numpy,tacaswell/numpy,bmorris3/numpy,drasmuss/numpy,stuarteberg/numpy,pelson/numpy,GrimDerp/numpy,rmcgibbo/numpy,ESSS/numpy,ssanderson/numpy,embray/numpy,stuarteberg/numpy,rgommers/numpy,NextThought/pypy-numpy,SiccarPoint/numpy,MSeifert04/numpy,Linkid/numpy,dwillmer/numpy,dimasad/numpy,njase/numpy,skymanaditya1/numpy,MSeifert04/numpy,Anwesh43/numpy,musically-ut/numpy,MichaelAquilina/numpy,mindw/numpy,rhythmsosad/numpy,GrimDerp/numpy,mhvk/numpy,Linkid/numpy,BMJHayward/numpy,mattip/numpy,pyparallel/numpy,WillieMaddox/numpy,leifdenby/numpy,dimasad/numpy,SunghanKim/numpy,ahaldane/numpy,ChanderG/numpy,astrofrog/numpy,kirillzhuravlev/numpy,GaZ3ll3/numpy,skymanaditya1/numpy,ekalosak/numpy,ekalosak/numpy,maniteja123/numpy,mingwpy/numpy,nguyentu1602/numpy,numpy/numpy-refactor,mwiebe/numpy,mattip/numpy,ahaldane/numpy,jankoslavic/numpy,skwbc/numpy,embray/numpy,AustereCuriosity/numpy,pbrod/numpy,cjermain/numpy,gfyoung/numpy,moreati/numpy,simongibbons/numpy,MichaelAquilina/numpy,cowlicks/numpy,ewmoore/numpy,mingwpy/numpy,ChanderG/numpy,ContinuumIO/numpy,larsmans/numpy,Dapid/numpy,grlee77/numpy,WarrenWeckesser/numpy,KaelChen/numpy,pyparallel/numpy,moreati/numpy,rudimeier/numpy,ddasilva/numpy,astrofrog/numpy,rgommers/numpy,has2k1/numpy,pbrod/numpy,dimasad/numpy,ChanderG/numpy,leifdenby/numpy,immerrr/numpy,ddasilva/numpy,numpy/numpy-refactor,larsmans/numpy,AustereCuriosity/numpy,ewmoore/numpy,bertrand-l/numpy,shoyer/numpy,jankoslavic/numpy,dwf/numpy,dch312/numpy,Linkid/numpy,rherault-insa/numpy,yiakwy/numpy,mattip/numpy,chiffa/numpy,Yusa95/numpy,ewmoore/numpy,bmorris3/numpy,rudimeier/numpy
numpy/distutils/fcompiler/g95.py
numpy/distutils/fcompiler/g95.py
# http://g95.sourceforge.net/ import os import sys from numpy.distutils.cpuinfo import cpu from numpy.distutils.fcompiler import FCompiler class G95FCompiler(FCompiler): compiler_type = 'g95' # version_pattern = r'G95 \((GCC (?P<gccversion>[\d.]+)|.*?) \(g95!\) (?P<version>.*)\).*' # $ g95 --version # G95 (GCC 4.0.3 (g95!) May 22 2006) version_pattern = r'G95 \((GCC (?P<gccversion>[\d.]+)|.*?) \(g95 (?P<version>.*)!\) (?P<date>.*)\).*' # $ g95 --version # G95 (GCC 4.0.3 (g95 0.90!) Aug 22 2006) executables = { 'version_cmd' : ["g95", "--version"], 'compiler_f77' : ["g95", "-ffixed-form"], 'compiler_fix' : ["g95", "-ffixed-form"], 'compiler_f90' : ["g95"], 'linker_so' : ["g95","-shared"], 'archiver' : ["ar", "-cr"], 'ranlib' : ["ranlib"] } pic_flags = ['-fpic'] module_dir_switch = '-fmod=' module_include_switch = '-I' def get_flags(self): return ['-fno-second-underscore'] def get_flags_opt(self): return ['-O'] def get_flags_debug(self): return ['-g'] if __name__ == '__main__': from distutils import log log.set_verbosity(2) from numpy.distutils.fcompiler import new_fcompiler #compiler = new_fcompiler(compiler='g95') compiler = G95FCompiler() compiler.customize() print compiler.get_version()
# http://g95.sourceforge.net/ import os import sys from numpy.distutils.cpuinfo import cpu from numpy.distutils.fcompiler import FCompiler class G95FCompiler(FCompiler): compiler_type = 'g95' version_pattern = r'G95 \((GCC (?P<gccversion>[\d.]+)|.*?) \(g95!\) (?P<version>.*)\).*' # $ g95 --version # G95 (GCC 4.0.3 (g95!) May 22 2006) executables = { 'version_cmd' : ["g95", "--version"], 'compiler_f77' : ["g95", "-ffixed-form"], 'compiler_fix' : ["g95", "-ffixed-form"], 'compiler_f90' : ["g95"], 'linker_so' : ["g95","-shared"], 'archiver' : ["ar", "-cr"], 'ranlib' : ["ranlib"] } pic_flags = ['-fpic'] module_dir_switch = '-fmod=' module_include_switch = '-I' def get_flags(self): return ['-fno-second-underscore'] def get_flags_opt(self): return ['-O'] def get_flags_debug(self): return ['-g'] if __name__ == '__main__': from distutils import log log.set_verbosity(2) from numpy.distutils.fcompiler import new_fcompiler #compiler = new_fcompiler(compiler='g95') compiler = G95FCompiler() compiler.customize() print compiler.get_version()
bsd-3-clause
Python
7e8e5ceb765189974bcaee86e15c26b94ac05f3a
Update modulation.py
Koheron/lase
examples/modulation.py
examples/modulation.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import initExample import os from lase.core import KClient # Driver to use from lase.drivers import Oscillo # Modules to import import numpy as np import matplotlib.pyplot as plt import time # Load the oscillo instrument host = os.getenv('HOST','192.168.1.100') password = os.getenv('PASSWORD','changeme') ssh = ZynqSSH(host, password) ssh.unzip_app() ssh.install_instrument('oscillo') # Connect to the instrument client = KClient(host) driver = Oscillo(client) # Enable laser driver.start_laser() # Set laser current current = 30 # mA driver.set_laser_current(current) # Modulation on DAC amp_mod = 0.2 freq_mod = 1e6 driver.dac[1, :] = amp_mod*np.sin(2 * np.pi * freq_mod * driver.sampling.t) driver.set_dac() # Signal on ADC driver.get_adc() signal = driver.adc[0, :] # Plot plt.plot(driver.sampling.t, signal) plt.show() # Plot psd_signal = np.abs(np.fft.fft(signal)) ** 2 plt.semilogy(1e-6 * np.fft.fftshift(driver.sampling.f_fft), np.fft.fftshift(psd_signal)) plt.xlabel('Frequency (MHz)') plt.show() # Disable laser driver.stop_laser() driver.close()
#!/usr/bin/env python # -*- coding: utf-8 -*- import initExample import os from lase.core import KClient # Driver to use from lase.drivers import Oscillo # Modules to import import numpy as np import matplotlib.pyplot as plt import time # Connect to Lase host = os.getenv('HOST','192.168.1.100') client = KClient(host) driver = Oscillo(client) # Replace with appropriate driver # Enable laser driver.start_laser() # Set laser current current = 30 # mA driver.set_laser_current(current) # Modulation on DAC amp_mod = 0.2 freq_mod = 1e6 driver.dac[1, :] = amp_mod*np.sin(2 * np.pi * freq_mod * driver.sampling.t) driver.set_dac() # Signal on ADC driver.get_adc() signal = driver.adc[0, :] # Plot plt.plot(driver.sampling.t, signal) plt.show() # Plot psd_signal = np.abs(np.fft.fft(signal)) ** 2 plt.semilogy(1e-6 * np.fft.fftshift(driver.sampling.f_fft), np.fft.fftshift(psd_signal)) plt.xlabel('Frequency (MHz)') plt.show() # Disable laser driver.stop_laser() driver.close()
mit
Python
6dbcc892f8b659a22a33fce3836cb082b64dd817
load order
sl2017/campos
campos_event/__openerp__.py
campos_event/__openerp__.py
# -*- coding: utf-8 -*- ############################################################################## # # This file is part of CampOS Event, # an Odoo module. # # Copyright (c) 2015 Stein & Gabelgaard ApS # http://www.steingabelgaard.dk # Hans Henrik Gaelgaard # # CampOS Event is free software: # you can redistribute it and/or modify it under the terms of the GNU # Affero General Public License as published by the Free Software # Foundation,either version 3 of the License, or (at your option) any # later version. # # CampOS Event is distributed # in the hope that it will be useful, but WITHOUT ANY WARRANTY; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR # PURPOSE. See the GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with CampOS Event. # If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': "CampOS Event", 'summary': """ Scout Camp Management Solution""", # 'description': put the module description in README.rst 'author': "Hans Henrik Gabelgaard", 'website': "http://www.steingabelgaard.dk", # Categories can be used to filter modules in modules listing # Check http://goo.gl/0TfwzD for the full list 'category': 'Uncategorized', 'version': '0.1', 'license': 'AGPL-3', # any module necessary for this one to work correctly 'depends': [ 'base', 'mail', 'event', 'website', 'portal', ], # always loaded 'data': [ 'security/campos_event_security.xml', 'security/ir.model.access.csv', 'security/ir.rule.csv', 'data/campos.municipality.csv', 'data/campos.scout.org.csv', 'views/templates.xml', 'views/participant_view.xml', 'views/committee_view.xml', 'views/municipality_view.xml', "views/scout_org_view.xml", "views/res_partner_view.xml", "views/job_view.xml", "views/job_template.xml", "views/mail_templates.xml", "views/portal_menu.xml", "views/res_users_view.xml", 'views/campos_menu.xml', ], # only loaded in demonstration mode 'demo': [ 'demo.xml', ], }
# -*- coding: utf-8 -*- ############################################################################## # # This file is part of CampOS Event, # an Odoo module. # # Copyright (c) 2015 Stein & Gabelgaard ApS # http://www.steingabelgaard.dk # Hans Henrik Gaelgaard # # CampOS Event is free software: # you can redistribute it and/or modify it under the terms of the GNU # Affero General Public License as published by the Free Software # Foundation,either version 3 of the License, or (at your option) any # later version. # # CampOS Event is distributed # in the hope that it will be useful, but WITHOUT ANY WARRANTY; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR # PURPOSE. See the GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with CampOS Event. # If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': "CampOS Event", 'summary': """ Scout Camp Management Solution""", # 'description': put the module description in README.rst 'author': "Hans Henrik Gabelgaard", 'website': "http://www.steingabelgaard.dk", # Categories can be used to filter modules in modules listing # Check http://goo.gl/0TfwzD for the full list 'category': 'Uncategorized', 'version': '0.1', 'license': 'AGPL-3', # any module necessary for this one to work correctly 'depends': [ 'base', 'mail', 'event', 'website', 'portal', ], # always loaded 'data': [ 'security/campos_event_security.xml', 'security/ir.model.access.csv', 'security/ir.rule.csv', 'data/campos.municipality.csv', 'data/campos.scout.org.csv', 'views/campos_menu.xml', 'views/templates.xml', 'views/participant_view.xml', 'views/committee_view.xml', 'views/municipality_view.xml', "views/scout_org_view.xml", "views/res_partner_view.xml", "views/job_view.xml", "views/job_template.xml", "views/mail_templates.xml", "views/portal_menu.xml", "views/res_users_view.xml", ], # only loaded in demonstration mode 'demo': [ 'demo.xml', ], }
agpl-3.0
Python
00c07a76185aff4873abe3eb814b847b6d02f58f
fix date
jerodg/hackerrank-python
02_BasicDataTypes/04_find-second-maximum-number-in-a-list.py
02_BasicDataTypes/04_find-second-maximum-number-in-a-list.py
#!/usr/bin/python3.6 """Jerod Gawne, 2017-09-28 Find the Second Largest Number https://www.hackerrank.com/challenges/find-second-maximum-number-in-a-list/ Editorial: - There are many ways to solve this problem. This can be solved by maintaining two variables max and second_max. Iterate through the list and find the maximum and store it. Iterate again and find the next maximum value by having an if condition that checks if it's not equal to first maximum. Create a counter from the given array. Extract the keys, sort them and print the second last element. Transform the list to a set and then list again, removing all the duplicates. Then sort the list and print the second last element. """ def main(): """ Main/Tests """ input() print(sorted(set(map(int, input().split())))[-2]) if __name__ == '__main__': try: main() except Exception: import sys import traceback print(traceback.print_exception(*sys.exc_info()))
#!/usr/bin/python3.6 """Jerod Gawne, 2017-09-12 Find the Second Largest Number https://www.hackerrank.com/challenges/find-second-maximum-number-in-a-list/ Editorial: - There are many ways to solve this problem. This can be solved by maintaining two variables max and second_max. Iterate through the list and find the maximum and store it. Iterate again and find the next maximum value by having an if condition that checks if it's not equal to first maximum. Create a counter from the given array. Extract the keys, sort them and print the second last element. Transform the list to a set and then list again, removing all the duplicates. Then sort the list and print the second last element. """ def main(): """ Main/Tests """ input() print(sorted(set(map(int, input().split())))[-2]) if __name__ == '__main__': try: main() except Exception: import sys import traceback print(traceback.print_exception(*sys.exc_info()))
mit
Python
0c305e8bd8624af2d2ce1ca292bb26095d0e1dcc
Fix __eq__ method on ExecuteContainer to compare only proper classes
jboss-container-images/concreate,jboss-container-images/concreate,jboss-container-images/concreate
cekit/descriptor/execute.py
cekit/descriptor/execute.py
import yaml import cekit from cekit.descriptor import Descriptor execute_schemas = [yaml.safe_load(""" map: name: {type: str} script: {type: str} user: {type: text}""")] container_schemas = [yaml.safe_load(""" seq: - {type: any}""")] class Execute(Descriptor): def __init__(self, descriptor, module_name): self.schemas = execute_schemas super(Execute, self).__init__(descriptor) descriptor['directory'] = module_name if 'user' not in descriptor: descriptor['user'] = cekit.DEFAULT_USER descriptor['module_name'] = module_name if 'name' not in descriptor: descriptor['name'] = "%s/%s" % (module_name, descriptor['script']) class ExecuteContainer(Descriptor): """Container holding Execute classes. I't responsible for correct Execute Class merging and ordering""" def __init__(self, descriptor, module_name): self.schemas = container_schemas super(ExecuteContainer, self).__init__(descriptor) self.name = module_name if not descriptor: descriptor = [{'name': 'noop'}] self._descriptor = [Execute(x, module_name) for x in descriptor] def _get_real_executes(self): return [x for x in self._descriptor if x['name'] != 'noop'] def __len__(self): return len(self._get_real_executes()) def __iter__(self): return iter(self._get_real_executes()) def merge(self, descriptor): """To merge modules in correct order we need to insert new executes before the last module. This the raeson why noop execut exists""" prev_module = self._descriptor[-1]['module_name'] pos = 0 for executes in self._descriptor: if executes['module_name'] == prev_module: continue pos += 1 for executes in reversed(list(descriptor)): if executes not in self._descriptor: self._descriptor.insert(pos, executes) def __eq__(self, other): if not isinstance(other, self.__class__): return False for i, execute in enumerate(self._descriptor): if execute != other[i]: return False return True
import yaml import cekit from cekit.descriptor import Descriptor execute_schemas = [yaml.safe_load(""" map: name: {type: str} script: {type: str} user: {type: text}""")] container_schemas = [yaml.safe_load(""" seq: - {type: any}""")] class Execute(Descriptor): def __init__(self, descriptor, module_name): self.schemas = execute_schemas super(Execute, self).__init__(descriptor) descriptor['directory'] = module_name if 'user' not in descriptor: descriptor['user'] = cekit.DEFAULT_USER descriptor['module_name'] = module_name if 'name' not in descriptor: descriptor['name'] = "%s/%s" % (module_name, descriptor['script']) class ExecuteContainer(Descriptor): """Container holding Execute classes. I't responsible for correct Execute Class merging and ordering""" def __init__(self, descriptor, module_name): self.schemas = container_schemas super(ExecuteContainer, self).__init__(descriptor) self.name = module_name if not descriptor: descriptor = [{'name': 'noop'}] self._descriptor = [Execute(x, module_name) for x in descriptor] def _get_real_executes(self): return [x for x in self._descriptor if x['name'] != 'noop'] def __len__(self): return len(self._get_real_executes()) def __iter__(self): return iter(self._get_real_executes()) def merge(self, descriptor): """To merge modules in correct order we need to insert new executes before the last module. This the raeson why noop execut exists""" prev_module = self._descriptor[-1]['module_name'] pos = 0 for executes in self._descriptor: if executes['module_name'] == prev_module: continue pos += 1 for executes in reversed(list(descriptor)): if executes not in self._descriptor: self._descriptor.insert(pos, executes) def __eq__(self, other): for i, execute in enumerate(self._descriptor): if execute != other[i]: return False return True
mit
Python
b0d88fccf51f240800fda462bddb9d934931c078
Update tests.py
py-in-the-sky/challenges,py-in-the-sky/challenges,py-in-the-sky/challenges
intermediate_words_search_python/tests.py
intermediate_words_search_python/tests.py
# imports from solution from main import solution, LOCAL_DICTIONARY from utils import show_path from time import time TEST_CASES = ( # start word, target word, minimal path length ( 'cat', 'dog', 4 ), ( 'cat', 'mistrial', 9 ), ( 'strong', 'weak', 7 ), ( 'hot', 'cold', 4 ), ( 'up', 'down', 5 ), ( 'left', 'right', 7 ), ( 'light', 'heavy', 10 ), ( 'computer', 'virus', 12 ), ( 'strike', 'freeze', 6 ), ( 'fan', 'for', 3 ), ( 'duck', 'dusty', 4 ), ( 'rue', 'be', 3 ), ( 'rue', 'defuse', 5 ), ( 'rue', 'bend', 5 ), ( 'zoologist', 'zoology', None ) # no path; these two words are disjoint ) def tests2(): t0 = time() opts = { 'search_method': 'A*', 'dictionary_filename': LOCAL_DICTIONARY } for start_word,target_word,path_len in TEST_CASES: path = solution(start_word, target_word, opts) assert (len(path) if path else None) == path_len return 'tests pass in {} seconds!'.format(time() - t0) def tests(): for search_method in ('BFS', 'A*'): opts = { 'search_method': search_method } assert solution('cat', 'dog', opts) == ('cat', 'cot', 'dot', 'dog') assert solution('cat', 'dot', opts) == ('cat', 'cot', 'dot') assert solution('cat', 'cot', opts) == ('cat', 'cot') assert solution('cat', 'cat', opts) == ('cat', ) assert solution('fan', 'for', opts) == ('fan', 'fin', 'fir', 'for') assert solution('place', 'places', opts) == ('place', 'places') assert solution('duck', 'dusty', opts) == ('duck', 'dusk', 'dust', 'dusty') assert solution('duck', 'ducked', opts) is None assert solution('rue', 'be', opts) == ('rue', 'run', 'runt', 'bunt', 'bent', 'beet', 'bee', 'be') assert solution('rue', 'defuse', opts) == ('rue', 'ruse', 'reuse', 'refuse', 'defuse') not_a_word_1 = 'NotAWord' assert solution('rue', not_a_word_1, opts) is None not_a_word_2 = 'plar' assert solution(not_a_word_2, 'play', opts) == (not_a_word_2, 'play') not_a_word_3 = 'blah' assert solution(not_a_word_3, 'defuse', opts) is None return 'tests pass!' if __name__ == '__main__': # print tests() print tests2()
# imports from solution from main import solution from utils import show_path def tests(): for search_method in ('BFS', 'A*'): opts = { 'search_method': search_method } assert solution('cat', 'dog', opts) == ('cat', 'cot', 'dot', 'dog') assert solution('cat', 'dot', opts) == ('cat', 'cot', 'dot') assert solution('cat', 'cot', opts) == ('cat', 'cot') assert solution('cat', 'cat', opts) == ('cat', ) assert solution('fan', 'for', opts) == ('fan', 'fin', 'fir', 'for') assert solution('place', 'places', opts) == ('place', 'places') assert solution('duck', 'dusty', opts) == ('duck', 'dusk', 'dust', 'dusty') assert solution('duck', 'ducked', opts) is None assert solution('rue', 'be', opts) == ('rue', 'run', 'runt', 'bunt', 'bent', 'beet', 'bee', 'be') assert solution('rue', 'defuse', opts) == ('rue', 'ruse', 'reuse', 'refuse', 'defuse') not_a_word_1 = 'NotAWord' assert solution('rue', not_a_word_1, opts) is None not_a_word_2 = 'plar' assert solution(not_a_word_2, 'play', opts) == (not_a_word_2, 'play') not_a_word_3 = 'blah' assert solution(not_a_word_3, 'defuse', opts) is None return 'tests pass!' if __name__ == '__main__': print tests()
mit
Python
32e83559e00b7d5a363585d599cd087af854c445
Support custom initializer in links.CRF1d
keisuke-umezawa/chainer,hvy/chainer,okuta/chainer,niboshi/chainer,niboshi/chainer,okuta/chainer,chainer/chainer,chainer/chainer,keisuke-umezawa/chainer,keisuke-umezawa/chainer,okuta/chainer,pfnet/chainer,tkerola/chainer,wkentaro/chainer,chainer/chainer,okuta/chainer,hvy/chainer,niboshi/chainer,hvy/chainer,chainer/chainer,hvy/chainer,keisuke-umezawa/chainer,niboshi/chainer,wkentaro/chainer,wkentaro/chainer,wkentaro/chainer
chainer/links/loss/crf1d.py
chainer/links/loss/crf1d.py
from chainer.functions.loss import crf1d from chainer import link from chainer import variable class CRF1d(link.Link): """Linear-chain conditional random field loss layer. This link wraps the :func:`~chainer.functions.crf1d` function. It holds a transition cost matrix as a parameter. Args: n_label (int): Number of labels. .. seealso:: :func:`~chainer.functions.crf1d` for more detail. Attributes: cost (~chainer.Variable): Transition cost parameter. """ def __init__(self, n_label, initialW=0): super(CRF1d, self).__init__() with self.init_scope(): self.cost = variable.Parameter(initializer=initialW, shape=(n_label, n_label)) def forward(self, xs, ys, reduce='mean'): return crf1d.crf1d(self.cost, xs, ys, reduce) def argmax(self, xs): """Computes a state that maximizes a joint probability. Args: xs (list of Variable): Input vector for each label. Returns: tuple: A tuple of :class:`~chainer.Variable` representing each log-likelihood and a list representing the argmax path. .. seealso:: See :func:`~chainer.frunctions.crf1d_argmax` for more detail. """ return crf1d.argmax_crf1d(self.cost, xs)
from chainer.functions.loss import crf1d from chainer import link from chainer import variable class CRF1d(link.Link): """Linear-chain conditional random field loss layer. This link wraps the :func:`~chainer.functions.crf1d` function. It holds a transition cost matrix as a parameter. Args: n_label (int): Number of labels. .. seealso:: :func:`~chainer.functions.crf1d` for more detail. Attributes: cost (~chainer.Variable): Transition cost parameter. """ def __init__(self, n_label): super(CRF1d, self).__init__() with self.init_scope(): self.cost = variable.Parameter(0, (n_label, n_label)) def forward(self, xs, ys, reduce='mean'): return crf1d.crf1d(self.cost, xs, ys, reduce) def argmax(self, xs): """Computes a state that maximizes a joint probability. Args: xs (list of Variable): Input vector for each label. Returns: tuple: A tuple of :class:`~chainer.Variable` representing each log-likelihood and a list representing the argmax path. .. seealso:: See :func:`~chainer.frunctions.crf1d_argmax` for more detail. """ return crf1d.argmax_crf1d(self.cost, xs)
mit
Python
8214d516b3feba92ab3ad3b1f2fa1cf253e83012
Remove use of deprecated `scan_plugins` method
chfw/pyexcel,chfw/pyexcel
pyexcel/internal/__init__.py
pyexcel/internal/__init__.py
""" pyexcel.internal ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Pyexcel internals that subjected to change :copyright: (c) 2015-2017 by Onni Software Ltd. :license: New BSD License """ from lml.loader import scan_plugins from pyexcel.internal.plugins import PARSER, RENDERER # noqa from pyexcel.internal.source_plugin import SOURCE # noqa from pyexcel.internal.generators import SheetStream, BookStream # noqa BLACK_LIST = [ "pyexcel_io", "pyexcel_webio", "pyexcel_xlsx", "pyexcel_xls", "pyexcel_ods3", "pyexcel_ods", "pyexcel_odsr", "pyexcel_xlsxw", ] WHITE_LIST = [ "pyexcel.plugins.parsers", "pyexcel.plugins.renderers", "pyexcel.plugins.sources", ] scan_plugins_regex("^pyexcel_.+$", "pyexcel", BLACK_LIST, WHITE_LIST)
""" pyexcel.internal ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Pyexcel internals that subjected to change :copyright: (c) 2015-2017 by Onni Software Ltd. :license: New BSD License """ from lml.loader import scan_plugins from pyexcel.internal.plugins import PARSER, RENDERER # noqa from pyexcel.internal.source_plugin import SOURCE # noqa from pyexcel.internal.generators import SheetStream, BookStream # noqa BLACK_LIST = [ "pyexcel_io", "pyexcel_webio", "pyexcel_xlsx", "pyexcel_xls", "pyexcel_ods3", "pyexcel_ods", "pyexcel_odsr", "pyexcel_xlsxw", ] WHITE_LIST = [ "pyexcel.plugins.parsers", "pyexcel.plugins.renderers", "pyexcel.plugins.sources", ] scan_plugins("pyexcel_", "pyexcel", BLACK_LIST, WHITE_LIST)
bsd-3-clause
Python
708e105713d7fd480b4b45d3ef31a46e35e63a8e
annotate calculate_tx_fee.
PeerAssets/pypeerassets
pypeerassets/transactions.py
pypeerassets/transactions.py
'''transaction assembly/dissasembly''' from time import time from math import ceil from btcpy.structs.address import Address from btcpy.structs.transaction import TxOut, TxIn, Sequence, Locktime, MutableTransaction from btcpy.structs.script import StackData, ScriptSig, NulldataScript, ScriptSig, ScriptPubKey from btcpy.structs.script import P2pkhScript, MultisigScript, P2shScript from .networks import query def calculate_tx_fee(tx_size: int) -> float: '''return tx fee from tx size in bytes''' min_fee = 0.01 # minimum return ceil(tx_size / 1000) * min_fee def nulldata_script(data: bytes): '''create nulldata (OP_return) script''' stack = StackData.from_bytes(data) return NulldataScript(stack) def p2pkh_script(address: str): '''create pay-to-key-hash (P2PKH) script''' addr = Address.from_string(address) return P2pkhScript(addr) def tx_output(value: float, seq: int, script: ScriptSig): '''create TxOut object''' return TxOut(int(value * 1000000), seq, script) def make_raw_transaction(inputs: list, outputs: list, locktime=Locktime(0), timestamp: int=int(time()), version=1): '''create raw transaction''' return MutableTransaction(version, timestamp, inputs, outputs, locktime) def find_parent_outputs(provider, utxo: TxIn): '''due to design of the btcpy library, TxIn object must be converted to TxOut object before signing''' index = utxo.txout # utxo index return TxOut.from_json(provider.getrawtransaction(utxo.txid)['vout'][index])
'''transaction assembly/dissasembly''' from time import time from math import ceil from btcpy.structs.address import Address from btcpy.structs.transaction import TxOut, TxIn, Sequence, Locktime, MutableTransaction from btcpy.structs.script import StackData, ScriptSig, NulldataScript, ScriptSig, ScriptPubKey from btcpy.structs.script import P2pkhScript, MultisigScript, P2shScript from .networks import query def calculate_tx_fee(tx_size: int): '''return tx fee from tx size in bytes''' min_fee = 0.01 # minimum return ceil(tx_size / 1000) * min_fee def nulldata_script(data: bytes): '''create nulldata (OP_return) script''' stack = StackData.from_bytes(data) return NulldataScript(stack) def p2pkh_script(address: str): '''create pay-to-key-hash (P2PKH) script''' addr = Address.from_string(address) return P2pkhScript(addr) def tx_output(value: float, seq: int, script: ScriptSig): '''create TxOut object''' return TxOut(int(value * 1000000), seq, script) def make_raw_transaction(inputs: list, outputs: list, locktime=Locktime(0), timestamp: int=int(time()), version=1): '''create raw transaction''' return MutableTransaction(version, timestamp, inputs, outputs, locktime) def find_parent_outputs(provider, utxo: TxIn): '''due to design of the btcpy library, TxIn object must be converted to TxOut object before signing''' index = utxo.txout # utxo index return TxOut.from_json(provider.getrawtransaction(utxo.txid)['vout'][index])
bsd-3-clause
Python
56e4c14ea6e2266bb8fa6f25ef1c0a3b2123f5ad
fix py3k
squeaky-pl/pystacia,squeaky-pl/pystacia
pystacia/image/_impl/blur.py
pystacia/image/_impl/blur.py
# coding: utf-8 # pystacia/image/_impl/blur.py # Copyright (C) 2011-2012 by Paweł Piotr Przeradowski # This module is part of Pystacia and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php from future_builtins import zip def _make_radius_strength_bias(c_name, names, order=None): def function(image, *args): kwargs = dict(zip(names, args)) if kwargs['strength'] is None: kwargs['strength'] = kwargs['radius'] if 'bias' in kwargs and kwargs['bias'] is None: kwargs['bias'] = 0 order_ = order or names values = [kwargs[k] for k in order_] c_call(image, c_name, *values) return function blur = _make_radius_strength_bias('blur', ['radius', 'strength']) gaussian_blur = _make_radius_strength_bias( 'gaussian_blur', ['radius', 'strength', 'bias']) motion_blur = _make_radius_strength_bias( 'motion_blur', ['radius', 'angle', 'strength', 'bias'], ['radius', 'strength', 'angle', 'bias']) adaptive_blur = _make_radius_strength_bias( 'adaptive_blur', ['radius', 'strength', 'bias']) sharpen = _make_radius_strength_bias( 'sharpen', ['radius', 'strength', 'bias']) adaptive_sharpen = _make_radius_strength_bias( 'adaptive_sharpen', ['radius', 'strength', 'bias']) detect_edges = _make_radius_strength_bias('edge', ['radius', 'strength']) #TODO: moving center here def radial_blur(image, angle): """Performs radial blur. :param angle: Blur angle in degrees :type angle: ``float`` Radial blurs image within given angle. This method can be chained. """ c_call(image, 'radial_blur', angle) def denoise(image): """Attempt to remove noise preserving edges. Applies a digital filter that improves the quality of a noisy image. This method can be chained. """ c_call(image, 'enhance') def despeckle(image): """Attempt to remove speckle preserving edges. Resulting image almost solid color areas are smoothed preserving edges. This method can be chained. """ c_call(image, 'despeckle') emboss = _make_radius_strength_bias('emboss', ['radius', 'strength']) from pystacia.api.func import c_call
# coding: utf-8 # pystacia/image/_impl/blur.py # Copyright (C) 2011-2012 by Paweł Piotr Przeradowski # This module is part of Pystacia and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php def _make_radius_strength_bias(c_name, names, order=None): def function(image, *args): kwargs = dict(zip(names, args)) if kwargs['strength'] is None: kwargs['strength'] = kwargs['radius'] if 'bias' in kwargs and kwargs['bias'] is None: kwargs['bias'] = 0 order_ = order or names values = [kwargs[k] for k in order_] c_call(image, c_name, *values) return function blur = _make_radius_strength_bias('blur', ['radius', 'strength']) gaussian_blur = _make_radius_strength_bias( 'gaussian_blur', ['radius', 'strength', 'bias']) motion_blur = _make_radius_strength_bias( 'motion_blur', ['radius', 'angle', 'strength', 'bias'], ['radius', 'strength', 'angle', 'bias']) adaptive_blur = _make_radius_strength_bias( 'adaptive_blur', ['radius', 'strength', 'bias']) sharpen = _make_radius_strength_bias( 'sharpen', ['radius', 'strength', 'bias']) adaptive_sharpen = _make_radius_strength_bias( 'adaptive_sharpen', ['radius', 'strength', 'bias']) detect_edges = _make_radius_strength_bias('edge', ['radius', 'strength']) #TODO: moving center here def radial_blur(image, angle): """Performs radial blur. :param angle: Blur angle in degrees :type angle: ``float`` Radial blurs image within given angle. This method can be chained. """ c_call(image, 'radial_blur', angle) def denoise(image): """Attempt to remove noise preserving edges. Applies a digital filter that improves the quality of a noisy image. This method can be chained. """ c_call(image, 'enhance') def despeckle(image): """Attempt to remove speckle preserving edges. Resulting image almost solid color areas are smoothed preserving edges. This method can be chained. """ c_call(image, 'despeckle') emboss = _make_radius_strength_bias('emboss', ['radius', 'strength']) from pystacia.api.func import c_call
mit
Python
27273335422781dcee950ee081ed13e53816e6d6
Bump version
thombashi/pytablewriter
pytablewriter/__version__.py
pytablewriter/__version__.py
__author__ = "Tsuyoshi Hombashi" __copyright__ = f"Copyright 2016, {__author__}" __license__ = "MIT License" __version__ = "0.63.0" __maintainer__ = __author__ __email__ = "[email protected]"
__author__ = "Tsuyoshi Hombashi" __copyright__ = f"Copyright 2016, {__author__}" __license__ = "MIT License" __version__ = "0.62.0" __maintainer__ = __author__ __email__ = "[email protected]"
mit
Python
349f975e257192458b1944753d4f609869b9e8d6
use yield_fixture to support pytest on py2.7
fireeye/flare-wmi,fireeye/flare-wmi,fireeye/flare-wmi
python-cim/tests/fixtures.py
python-cim/tests/fixtures.py
import os import pytest import cim import cim.objects @pytest.fixture def repopath(): """ Returns: str: path to the repos/win7/deleted-instance repository """ cd = os.path.dirname(os.path.abspath(__file__)) return os.path.join(cd, 'repos', 'win7', 'deleted-instance') @pytest.fixture def repo(): """ Returns: cim.CIM: repos/win7/deleted-instance repository """ return cim.CIM(cim.CIM_TYPE_WIN7, repopath()) @pytest.yield_fixture def root(): r = repo() with cim.objects.Namespace(r, cim.objects.ROOT_NAMESPACE_NAME) as ns: yield ns
import os import pytest import cim import cim.objects @pytest.fixture def repopath(): """ Returns: str: path to the repos/win7/deleted-instance repository """ cd = os.path.dirname(os.path.abspath(__file__)) return os.path.join(cd, 'repos', 'win7', 'deleted-instance') @pytest.fixture def repo(): """ Returns: cim.CIM: repos/win7/deleted-instance repository """ return cim.CIM(cim.CIM_TYPE_WIN7, repopath()) @pytest.fixture def root(): r = repo() with cim.objects.Namespace(r, cim.objects.ROOT_NAMESPACE_NAME) as ns: yield ns
apache-2.0
Python
fe4c66b2e50035ab2701923d6a2cd0cb82e63780
Fix call mkl gemm in mkldnn.py (#7007)
Laurawly/tvm-1,dmlc/tvm,dmlc/tvm,Laurawly/tvm-1,Laurawly/tvm-1,Laurawly/tvm-1,Laurawly/tvm-1,dmlc/tvm,Laurawly/tvm-1,dmlc/tvm,Laurawly/tvm-1,dmlc/tvm,Laurawly/tvm-1,dmlc/tvm,Laurawly/tvm-1,dmlc/tvm,dmlc/tvm,dmlc/tvm,Laurawly/tvm-1
python/tvm/contrib/mkldnn.py
python/tvm/contrib/mkldnn.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """External function interface to BLAS libraries.""" import tvm from tvm import te def matmul(lhs, rhs, transa=False, transb=False, **kwargs): """Create an extern op that compute matrix mult of A and rhs with CrhsLAS This function serves as an example on how to call external libraries. Parameters ---------- lhs: Tensor The left matrix operand rhs: Tensor The right matrix operand transa: bool Whether transpose lhs transb: bool Whether transpose rhs Returns ------- C: Tensor The result tensor. """ n = lhs.shape[1] if transa else lhs.shape[0] m = rhs.shape[0] if transb else rhs.shape[1] return te.extern( (n, m), [lhs, rhs], lambda ins, outs: tvm.tir.call_packed( "tvm.contrib.mkldnn.matmul", ins[0], ins[1], outs[0], transa, transb ), name="C", **kwargs, )
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """External function interface to BLAS libraries.""" import tvm from tvm import te def matmul(lhs, rhs, transa=False, transb=False, **kwargs): """Create an extern op that compute matrix mult of A and rhs with CrhsLAS This function serves as an example on how to call external libraries. Parameters ---------- lhs: Tensor The left matrix operand rhs: Tensor The right matrix operand transa: bool Whether transpose lhs transb: bool Whether transpose rhs Returns ------- C: Tensor The result tensor. """ n = lhs.shape[1] if transa else lhs.shape[0] m = rhs.shape[0] if transb else rhs.shape[1] return te.extern( (n, m), [lhs, rhs], lambda ins, outs: tvm.tir.call_packed( "tvm.contrib.mkl.matmul", ins[0], ins[1], outs[0], transa, transb ), name="C", **kwargs, )
apache-2.0
Python
a2097bf7c3103ec6e0482bb34f3b33c753ea7889
Enable QtNetwork test for PySide
davvid/qtpy,davvid/qtpy,spyder-ide/qtpy,goanpeca/qtpy,goanpeca/qtpy
qtpy/tests/test_qtnetwork.py
qtpy/tests/test_qtnetwork.py
from __future__ import absolute_import import pytest from qtpy import PYSIDE, PYSIDE2, QtNetwork def test_qtnetwork(): """Test the qtpy.QtNetwork namespace""" assert QtNetwork.QAbstractNetworkCache is not None assert QtNetwork.QNetworkCacheMetaData is not None if not PYSIDE and not PYSIDE2: assert QtNetwork.QHttpMultiPart is not None assert QtNetwork.QHttpPart is not None assert QtNetwork.QNetworkAccessManager is not None assert QtNetwork.QNetworkCookie is not None assert QtNetwork.QNetworkCookieJar is not None assert QtNetwork.QNetworkDiskCache is not None assert QtNetwork.QNetworkReply is not None assert QtNetwork.QNetworkRequest is not None assert QtNetwork.QNetworkConfigurationManager is not None assert QtNetwork.QNetworkConfiguration is not None assert QtNetwork.QNetworkSession is not None assert QtNetwork.QAuthenticator is not None assert QtNetwork.QHostAddress is not None assert QtNetwork.QHostInfo is not None assert QtNetwork.QNetworkAddressEntry is not None assert QtNetwork.QNetworkInterface is not None assert QtNetwork.QNetworkProxy is not None assert QtNetwork.QNetworkProxyFactory is not None assert QtNetwork.QNetworkProxyQuery is not None assert QtNetwork.QAbstractSocket is not None assert QtNetwork.QLocalServer is not None assert QtNetwork.QLocalSocket is not None assert QtNetwork.QTcpServer is not None assert QtNetwork.QTcpSocket is not None assert QtNetwork.QUdpSocket is not None if not PYSIDE: assert QtNetwork.QSslCertificate is not None assert QtNetwork.QSslCipher is not None assert QtNetwork.QSslConfiguration is not None assert QtNetwork.QSslError is not None assert QtNetwork.QSslKey is not None assert QtNetwork.QSslSocket is not None
from __future__ import absolute_import import pytest from qtpy import PYSIDE, PYSIDE2, QtNetwork @pytest.mark.skipif(PYSIDE2 or PYSIDE, reason="It fails on PySide/PySide2") def test_qtnetwork(): """Test the qtpy.QtNetwork namespace""" assert QtNetwork.QAbstractNetworkCache is not None assert QtNetwork.QNetworkCacheMetaData is not None assert QtNetwork.QHttpMultiPart is not None assert QtNetwork.QHttpPart is not None assert QtNetwork.QNetworkAccessManager is not None assert QtNetwork.QNetworkCookie is not None assert QtNetwork.QNetworkCookieJar is not None assert QtNetwork.QNetworkDiskCache is not None assert QtNetwork.QNetworkReply is not None assert QtNetwork.QNetworkRequest is not None assert QtNetwork.QNetworkConfigurationManager is not None assert QtNetwork.QNetworkConfiguration is not None assert QtNetwork.QNetworkSession is not None assert QtNetwork.QAuthenticator is not None assert QtNetwork.QHostAddress is not None assert QtNetwork.QHostInfo is not None assert QtNetwork.QNetworkAddressEntry is not None assert QtNetwork.QNetworkInterface is not None assert QtNetwork.QNetworkProxy is not None assert QtNetwork.QNetworkProxyFactory is not None assert QtNetwork.QNetworkProxyQuery is not None assert QtNetwork.QAbstractSocket is not None assert QtNetwork.QLocalServer is not None assert QtNetwork.QLocalSocket is not None assert QtNetwork.QTcpServer is not None assert QtNetwork.QTcpSocket is not None assert QtNetwork.QUdpSocket is not None assert QtNetwork.QSslCertificate is not None assert QtNetwork.QSslCipher is not None assert QtNetwork.QSslConfiguration is not None assert QtNetwork.QSslError is not None assert QtNetwork.QSslKey is not None assert QtNetwork.QSslSocket is not None
mit
Python
cd323386b61cd280fcf3e599ae6a02b889f81a40
Remove support for old style api urls
getsentry/raven-python,johansteffner/raven-python,getsentry/raven-python,johansteffner/raven-python,johansteffner/raven-python,getsentry/raven-python
raven/contrib/django/urls.py
raven/contrib/django/urls.py
""" raven.contrib.django.urls ~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import try: from django.conf.urls import url except ImportError: # for Django version less than 1.4 from django.conf.urls.defaults import url # NOQA import raven.contrib.django.views urlpatterns = ( url(r'^api/(?P<project_id>[\w_-]+)/store/$', raven.contrib.django.views.report, name='raven-report'), url(r'^report/', raven.contrib.django.views.report), )
""" raven.contrib.django.urls ~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import try: from django.conf.urls import url except ImportError: # for Django version less than 1.4 from django.conf.urls.defaults import url # NOQA import raven.contrib.django.views urlpatterns = ( url(r'^api/(?:(?P<project_id>[\w_-]+)/)?store/$', raven.contrib.django.views.report, name='raven-report'), url(r'^report/', raven.contrib.django.views.report), )
bsd-3-clause
Python
80c0f29d7b81a68d14e261ce1c062d37a0e6d4f7
Increase ARM process startup time baseline
firecracker-microvm/firecracker,firecracker-microvm/firecracker,firecracker-microvm/firecracker,firecracker-microvm/firecracker,firecracker-microvm/firecracker
tests/integration_tests/performance/test_process_startup_time.py
tests/integration_tests/performance/test_process_startup_time.py
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 """Test that the process startup time up to socket bind is within spec.""" import json import os import platform import time import host_tools.logging as log_tools MAX_STARTUP_TIME_CPU_US = {'x86_64': 5500, 'aarch64': 2800} """ The maximum acceptable startup time in CPU us. """ # TODO: Keep a `current` startup time in S3 and validate we don't regress def test_startup_time(test_microvm_with_api): """Check the startup time for jailer and Firecracker up to socket bind.""" microvm = test_microvm_with_api microvm.spawn() microvm.basic_config(vcpu_count=2, mem_size_mib=1024) # Configure metrics. metrics_fifo_path = os.path.join(microvm.path, 'metrics_fifo') metrics_fifo = log_tools.Fifo(metrics_fifo_path) response = microvm.metrics.put( metrics_path=microvm.create_jailed_resource(metrics_fifo.path) ) assert microvm.api_session.is_status_no_content(response.status_code) microvm.start() time.sleep(0.4) # The metrics fifo should be at index 1. # Since metrics are flushed at InstanceStart, the first line will suffice. lines = metrics_fifo.sequential_reader(1) metrics = json.loads(lines[0]) startup_time_us = metrics['api_server']['process_startup_time_us'] cpu_startup_time_us = metrics['api_server']['process_startup_time_cpu_us'] print('Process startup time is: {} us ({} CPU us)' .format(startup_time_us, cpu_startup_time_us)) assert cpu_startup_time_us > 0 assert cpu_startup_time_us <= MAX_STARTUP_TIME_CPU_US[platform.machine()]
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 """Test that the process startup time up to socket bind is within spec.""" import json import os import platform import time import host_tools.logging as log_tools MAX_STARTUP_TIME_CPU_US = {'x86_64': 5500, 'aarch64': 2600} """ The maximum acceptable startup time in CPU us. """ # TODO: Keep a `current` startup time in S3 and validate we don't regress def test_startup_time(test_microvm_with_api): """Check the startup time for jailer and Firecracker up to socket bind.""" microvm = test_microvm_with_api microvm.spawn() microvm.basic_config(vcpu_count=2, mem_size_mib=1024) # Configure metrics. metrics_fifo_path = os.path.join(microvm.path, 'metrics_fifo') metrics_fifo = log_tools.Fifo(metrics_fifo_path) response = microvm.metrics.put( metrics_path=microvm.create_jailed_resource(metrics_fifo.path) ) assert microvm.api_session.is_status_no_content(response.status_code) microvm.start() time.sleep(0.4) # The metrics fifo should be at index 1. # Since metrics are flushed at InstanceStart, the first line will suffice. lines = metrics_fifo.sequential_reader(1) metrics = json.loads(lines[0]) startup_time_us = metrics['api_server']['process_startup_time_us'] cpu_startup_time_us = metrics['api_server']['process_startup_time_cpu_us'] print('Process startup time is: {} us ({} CPU us)' .format(startup_time_us, cpu_startup_time_us)) assert cpu_startup_time_us > 0 assert cpu_startup_time_us <= MAX_STARTUP_TIME_CPU_US[platform.machine()]
apache-2.0
Python
0a80fa2e610637a6c657f99c8eac5d99e33b5844
Use /usr/bin/env in #!
chisophugis/clang-compdb-in-object-file,chisophugis/clang-compdb-in-object-file
lines2jsonarray.py
lines2jsonarray.py
#!/usr/bin/env python from __future__ import print_function import sys print('[') for i, line in enumerate(sys.stdin): if i != 0: print(',') print(line) print(']')
#!/usr/bin/python from __future__ import print_function import sys print('[') for i, line in enumerate(sys.stdin): if i != 0: print(',') print(line) print(']')
mit
Python
14884a18b42dab22e5893c619164049a475888cc
fix traceback on bank accounts creation
ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo
addons/account_bank_statement_import/wizard/setup_wizards.py
addons/account_bank_statement_import/wizard/setup_wizards.py
# -*- coding: utf-8 -*- from odoo import models, fields, api class SetupBarBankConfigWizard(models.TransientModel): _inherit = 'account.setup.bank.manual.config' def validate(self): """ Default the bank statement source of new bank journals as 'file_import' """ super(SetupBarBankConfigWizard, self).validate() if (self.create_or_link_option == 'new' or self.linked_journal_id.bank_statements_source == 'undefined') \ and self.env['account.journal']._get_bank_statements_available_import_formats(): self.linked_journal_id.bank_statements_source = 'file_import'
# -*- coding: utf-8 -*- from odoo import models, fields, api class SetupBarBankConfigWizard(models.TransientModel): _inherit = 'account.setup.bank.manual.config' def validate(self): """ Default the bank statement source of new bank journals as 'file_import' """ super(SetupBarBankConfigWizard, self).validate() if self.create_or_link_option == 'new' or self.linked_journal_id.bank_statements_source == 'undefined' \ and self.env['account.journal']._get_bank_statements_available_import_formats(): self.linked_journal_id.bank_statements_source = 'file_import'
agpl-3.0
Python
8fbdced7a4c8ea61116e8c978e420c30b8a1f1dc
update for urls.py
cdelguercio/slothauth,cdelguercio/slothauth
slothauth/urls.py
slothauth/urls.py
from django.conf.urls import include, url from rest_framework.routers import DefaultRouter from .views import change_email, login, logout, password_reset, profile, signup, passwordless_signup,\ passwordless_login, AccountViewSet, AuthViewSet from . import settings router = DefaultRouter() router.register(r'accounts', AccountViewSet) router.register(r'accounts/auth', AuthViewSet) urlpatterns = [ url(r'^api/' + settings.API_VERSION + '/', include(router.urls)), # TODO makes sense to have a settings.API_BASE_URL rather than a settings.API_VERSION? url(r'^signup/?', signup, name='signup'), url(r'^login/?', login, name='login'), url(r'^password_reset/?', password_reset, name='password_reset'), url(r'^change_email/?', change_email, name='change_email'), url(r'^profile/?', profile, name='profile'), url(r'^logout/?', logout, name='logout'), url(r'^passwordless_signup/?', passwordless_signup, name='passwordless_signup'), url(r'^passwordless_login/?', passwordless_login, name='passwordless_login'), #(r'^password-reset-done/$', 'django.contrib.auth.views.password_reset_complete'), #(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>.+)/$', 'django.contrib.auth.views.password_reset_confirm', # {'post_reset_redirect' : '/password-reset-done/'}), ] # TODO create setting for turning on and off debug urls
from django.conf.urls import include, url from rest_framework.routers import DefaultRouter from .views import change_email, login, logout, password_reset, profile, signup, passwordless_signup,\ passwordless_login, AccountViewSet, AuthViewSet from . import settings router = DefaultRouter() router.register(r'accounts', AccountViewSet) router.register(r'accounts/auth', AuthViewSet) urlpatterns = [ url(r'^api/' + settings.API_VERSION + '/', include(router.urls)), url(r'^signup/?', signup, name='signup'), url(r'^login/?', login, name='login'), url(r'^password_reset/?', password_reset, name='password_reset'), url(r'^change_email/?', change_email, name='change_email'), url(r'^profile/?', profile, name='profile'), url(r'^logout/?', logout, name='logout'), url(r'^passwordless_signup/?', passwordless_signup, name='passwordless_signup'), url(r'^passwordless_login/?', passwordless_login, name='passwordless_login'), #(r'^password-reset-done/$', 'django.contrib.auth.views.password_reset_complete'), #(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>.+)/$', 'django.contrib.auth.views.password_reset_confirm', # {'post_reset_redirect' : '/password-reset-done/'}), ]
apache-2.0
Python
ac765968a9a83685c28244200958164d2a0fc81e
fix typo
nyaruka/smartmin,nyaruka/smartmin,nyaruka/smartmin,caktus/smartmin,caktus/smartmin,caktus/smartmin,caktus/smartmin
smartmin/email.py
smartmin/email.py
from django.conf import settings from django.template import Context from django.utils.module_loading import import_string def link_components(request, user=None): protocol = 'https' if request.is_secure() else 'http' hostname = getattr(settings, 'HOSTNAME', request.get_host()) return {"protocol": protocol, "hostname": hostname} def build_email_context(request=None, user=None): context = Context({'user': user}) processors = [] collect = [] collect.extend(getattr(settings, "EMAIL_CONTEXT_PROCESSORS", ('smartmin.email.link_components',))) for path in collect: func = import_string(path) processors.append(func) for processor in processors: context.update(processor(request, user)) return context
from django.conf import settings from django.template import Context from django.utils.module_loading import import_string def link_components(request, user=None): protocol = 'https' if request.is_secure() else 'http' hostname = getattr(settings, 'HOSTNAME', request.get_host()) return {"protocol": protocol, "hostname": hostname} def build_email_context(request=None, user=None): context = Context({'user': user}) processors = [] collect = [] collect.extend(getattr(settings, "EMAIL_CONTEXT_PROCESSORS", ('smartmin.emaile.link_components',))) for path in collect: func = import_string(path) processors.append(func) for processor in processors: context.update(processor(request, user)) return context
bsd-3-clause
Python
091735fce650d6326e73ca6fb224a77ae68bb601
Add data written to message
Frostman/SalaryZenAggregator,Frostman/SalaryZenAggregator_Old
salaryzenaggr/manager.py
salaryzenaggr/manager.py
# -*- coding: utf-8 -*- # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from datetime import datetime from stevedore import extension from salaryzenaggr.formatters import json_formatter _fetchers = extension.ExtensionManager(namespace='salaryzenaggr.fetchers', invoke_on_load=True) def _get_fetchers(banks, currencies): for ext in _fetchers.extensions: fetcher = ext.obj if (any([bank in fetcher.get_supported_banks() for bank in banks]) and any([curr in fetcher.get_supported_currencies() for curr in currencies])): yield fetcher def aggregate_rates(banks, currencies, from_date, result_file, debug): res = {} for fetcher in _get_fetchers(banks, currencies): fetcher.fetch_data(res, currencies, from_date) formatter = json_formatter.JsonPrettyFormatter if debug else json_formatter.JsonFormatter output = formatter().format_data(res) if debug: print output print "New data aggregated at %s UTC" % datetime.utcnow() if result_file: result_file.write(output) result_file.close() print "Data successfully written to %s" % result_file
# -*- coding: utf-8 -*- # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from datetime import datetime from stevedore import extension from salaryzenaggr.formatters import json_formatter _fetchers = extension.ExtensionManager(namespace='salaryzenaggr.fetchers', invoke_on_load=True) def _get_fetchers(banks, currencies): for ext in _fetchers.extensions: fetcher = ext.obj if (any([bank in fetcher.get_supported_banks() for bank in banks]) and any([curr in fetcher.get_supported_currencies() for curr in currencies])): yield fetcher def aggregate_rates(banks, currencies, from_date, result_file, debug): res = {} for fetcher in _get_fetchers(banks, currencies): fetcher.fetch_data(res, currencies, from_date) formatter = json_formatter.JsonPrettyFormatter if debug else json_formatter.JsonFormatter output = formatter().format_data(res) if debug: print output print "New data aggregated at %s UTC" % datetime.utcnow() if result_file: result_file.write(output) result_file.close()
apache-2.0
Python
1dd8ce20632d8a6b857a06136b89ac11b27c8f07
Update documentation
bwhicks/PlinyProject,bwhicks/PlinyProject,bwhicks/PlinyProject,bwhicks/PlinyProject
letters/models.py
letters/models.py
from django.db import models from common.models import Citation class Letter(models.Model): """A letter from one of Pliny's books of personal correspondence Attributes: book (PositiveSmallIntegerField): book number. manuscript_correspondent_name (CharField): override if manuscript correspondent as written differs from database name letter (PositiveSmallIntegerField): letter number date (PositiveSmallIntegerField): Year of letter if known. citations (ManyToManyField): Citations related to the letter """ book = models.PositiveSmallIntegerField() manuscript_correspondent_name = models.CharField(blank=True, max_length=255) letter = models.PositiveSmallIntegerField() topics = models.ManyToManyField('Topic', blank=True) date = models.PositiveSmallIntegerField(blank=True, null=True) citations = models.ManyToManyField(Citation, blank=True) class Meta: unique_together = ('book', 'letter') ordering = ['book', 'letter'] def __str__(self): return "%s.%s" % (self.book, self.letter) class Topic(models.Model): """A topic for one of Pliny's letters""" name = models.CharField(max_length=255) def __str__(self): return self.name
from django.db import models from common.models import Citation class Letter(models.Model): """A letter from one of Pliny's books of personal correspondence""" book = models.PositiveSmallIntegerField() manuscript_correspondent_name = models.CharField(blank=True, max_length=255) letter = models.PositiveSmallIntegerField() topics = models.ManyToManyField('Topic', blank=True) date = models.PositiveSmallIntegerField(blank=True, null=True) citations = models.ManyToManyField(Citation, blank=True) class Meta: unique_together = ('book', 'letter') ordering = ['book', 'letter'] def __str__(self): return "%s.%s" % (self.book, self.letter) class Topic(models.Model): """A topic for one of Pliny's letters""" name = models.CharField(max_length=255) def __str__(self): return self.name
mit
Python
c1be270c96ef07faa7ceecf4117890ce06af65a8
Update setup.py
IntelRealSense/librealsense,IntelRealSense/librealsense,IntelRealSense/librealsense,IntelRealSense/librealsense,IntelRealSense/librealsense,IntelRealSense/librealsense,IntelRealSense/librealsense,IntelRealSense/librealsense,IntelRealSense/librealsense
wrappers/python/setup.py
wrappers/python/setup.py
from setuptools import setup, find_packages from setuptools.dist import Distribution # _version.py should be generated by running find_librs_version.py and copied to pyrealsense2 folder from pyrealsense2._version import __version__ import os import io package_name = "pyrealsense2" package_data = {} print("version = ", __version__) def load_readme(): with io.open('README.rst', encoding="utf-8") as f: return f.read() if os.name == 'posix': package_data[package_name] = ['*.so'] else: package_data[package_name] = ['*.pyd', '*.dll'] # This creates a list which is empty but returns a length of 1. # Should make the wheel a binary distribution and platlib compliant. class EmptyListWithLength(list): def __len__(self): return 1 setup( name=package_name, version=__version__, author='Intel(R) RealSense(TM)', author_email='[email protected]', url='https://github.com/IntelRealSense/librealsense', scripts=['examples/align-depth2color.py', 'examples/export_ply_example.py', 'examples/opencv_viewer_example.py', 'examples/python-rs400-advanced-mode-example.py', 'examples/python-tutorial-1-depth.py' ], license='Apache License, Version 2.0', description='Python Wrapper for Intel Realsense SDK 2.0.', long_description=load_readme(), install_requires=[], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: Education', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: Apache Software License', 'Operating System :: Microsoft :: Windows', 'Operating System :: POSIX', 'Operating System :: Unix', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Topic :: Multimedia :: Video', 'Topic :: Scientific/Engineering', 'Topic :: Scientific/Engineering :: Human Machine Interfaces', 'Topic :: Scientific/Engineering :: Image Recognition', 'Topic :: Software Development', 'Topic :: Software Development :: Libraries :: Application Frameworks' ], packages=find_packages(exclude=['third_party', 'docs', 'examples']), include_package_data=True, ext_modules=EmptyListWithLength(), package_data=package_data )
from setuptools import setup, find_packages from setuptools.dist import Distribution # _version.py should be generated by running find_librs_version.py and copied to pyrealsense2 folder from pyrealsense2._version import __version__ import os import io package_name = "pyrealsense2" package_data = {} print("version = ", __version__) def load_readme(): with io.open('README.rst', encoding="utf-8") as f: return f.read() if os.name == 'posix': package_data[package_name] = ['*.so'] else: package_data[package_name] = ['*.pyd', '*.dll'] # This creates a list which is empty but returns a length of 1. # Should make the wheel a binary distribution and platlib compliant. class EmptyListWithLength(list): def __len__(self): return 1 setup( name=package_name, version=__version__, author='Intel(R) RealSense(TM)', author_email='[email protected]', url='https://github.com/IntelRealSense/librealsense', scripts=['examples/align-depth2color.py', 'examples/export_ply_example.py', 'examples/opencv_viewer_example.py', 'examples/python-rs400-advanced-mode-example.py', 'examples/python-tutorial-1-depth.py' ], license='Apache License, Version 2.0', description='Python Wrapper for Intel Realsense SDK 2.0.', long_description=load_readme(), install_requires=[], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: Education', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: Apache Software License', 'Operating System :: MacOS', 'Operating System :: Microsoft :: Windows', 'Operating System :: POSIX', 'Operating System :: Unix', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Topic :: Multimedia :: Video', 'Topic :: Scientific/Engineering', 'Topic :: Scientific/Engineering :: Human Machine Interfaces', 'Topic :: Scientific/Engineering :: Image Recognition', 'Topic :: Software Development', 'Topic :: Software Development :: Libraries :: Application Frameworks' ], packages=find_packages(exclude=['third_party', 'docs', 'examples']), include_package_data=True, ext_modules=EmptyListWithLength(), package_data=package_data )
apache-2.0
Python
60ccc393fc428d609e772b838c5a4c4002e3ec49
Add sendError method
tsnik/SkyNet
snp/SNProtocol.py
snp/SNProtocol.py
from twisted.internet import defer from twisted.protocols.basic import NetstringReceiver import json class SNError(Exception): def __init__(self, *args, **kwargs): Exception.__init__(self, args, kwargs) self.code = args[1] self.request = args[2] class SNProtocol(NetstringReceiver): id_counter = 0 def stringReceived(self, string): packet = json.loads(string) if "reqid" in packet: if len(packet["reqid"]) > 2: type = packet["reqid"][:2] reqid = packet["reqid"][2:] if type == "RQ": self.factory.service.hadleRequest(packet, reqid, self) elif type == "RE": if reqid in self.requests: self.factory.requests[reqid].callback(packet) self.factory.requests.pop(reqid) def sendRequest(self, request): reqid = str(self.id_counter) request["reqid"] = "RQ{0}".format(reqid) self._sendPacket(request) d = self.createDeferred(reqid) self.id_counter += 1 return d def sendResponse(self, request, reqid): request["reqid"] = "RE{0}".format(str(reqid)) self._sendPacket(request) def sendError(self, code, request): r = {"Error": code, "Request": request} self._sendPacket(r) def _sendPacket(self, request): json_str = json.dumps(request) self.sendString(json_str) def connectionMade(self): self.factory.service.connectionMade(self) def createDeferred(self, reqid): d = defer.Deferred() d.addCallback(self.errorChecker) self.factory.service.requests[reqid] = d return d def errorChecker(self, packet): if "Error" in packet: raise SNError("", int(packet["Error"]), packet["Request"]) return packet
from twisted.internet import defer from twisted.protocols.basic import NetstringReceiver import json class SNError(Exception): def __init__(self, *args, **kwargs): Exception.__init__(self, args, kwargs) self.code = args[1] self.request = args[2] class SNProtocol(NetstringReceiver): id_counter = 0 def stringReceived(self, string): packet = json.loads(string) if "reqid" in packet: if len(packet["reqid"]) > 2: type = packet["reqid"][:2] reqid = packet["reqid"][2:] if type == "RQ": self.factory.service.hadleRequest(packet, reqid, self) elif type == "RE": if reqid in self.requests: self.factory.requests[reqid].callback(packet) self.factory.requests.pop(reqid) def sendRequest(self, request): reqid = str(self.id_counter) request["reqid"] = "RQ{0}".format(reqid) self._sendPacket(request) d = self.createDeferred(reqid) self.id_counter += 1 return d def sendResponse(self, request, reqid): request["reqid"] = "RE{0}".format(str(reqid)) self._sendPacket(request) def _sendPacket(self, request): json_str = json.dumps(request) self.sendString(json_str) def connectionMade(self): self.factory.service.connectionMade(self) def createDeferred(self, reqid): d = defer.Deferred() d.addCallback(self.errorChecker) self.factory.service.requests[reqid] = d return d def errorChecker(self, packet): if "Error" in packet: raise SNError("", int(packet["Error"]), packet["Request"]) return packet
mit
Python
b223865ded88b5467c1088abbf628048e39e564c
Test str_cat() schema; use fixtures for exception tests
ContinuumIO/blaze,ContinuumIO/blaze
blaze/expr/tests/test_strings.py
blaze/expr/tests/test_strings.py
import pytest from datashape import dshape from blaze import symbol dshapes = ['var * {name: string}', 'var * {name: ?string}', 'var * string', 'var * ?string', 'string'] lhsrhs_ds = ['var * {name: string, comment: string[25]}', 'var * {name: string[10], comment: string}', 'var * {name: string, comment: string}', 'var * {name: ?string, comment: string}', 'var * {name: string, comment: ?string}'] @pytest.fixture(scope='module') def strcat_sym(): ''' blaze symbol used to test exceptions raised by str_cat() ''' ds = dshape('3 * {name: string, comment: string, num: int32}') s = symbol('s', dshape=ds) return s @pytest.mark.parametrize('ds', dshapes) def test_like(ds): t = symbol('t', ds) expr = getattr(t, 'name', t).like('Alice*') assert expr.pattern == 'Alice*' assert expr.schema.measure == dshape( '%sbool' % ('?' if '?' in ds else '') ).measure @pytest.mark.parametrize('ds', dshapes) def test_str_upper_schema(ds): t = symbol('t', ds) expr_upper = getattr(t, 'name', t).str_upper() expr_lower = getattr(t, 'name', t).str_upper() assert (expr_upper.schema.measure == expr_lower.schema.measure == dshape('%sstring' % ('?' if '?' in ds else '')).measure) @pytest.mark.parametrize('ds', lhsrhs_ds) def test_str_schema(ds): t = symbol('t', ds) expr = t.name.str_cat(t.comment) assert (expr.schema.measure == dshape('%sstring' % ('?' if '?' in ds else '')).measure) def test_str_cat_exception_non_string_sep(strcat_sym): with pytest.raises(TypeError): strcat_sym.name.str_cat(strcat_sym.comment, sep=123) def test_str_cat_exception_non_string_col_to_cat(strcat_sym): with pytest.raises(TypeError): strcat_sym.name.str_cat(strcat_sym.num)
import pytest from datashape import dshape from blaze import symbol dshapes = ['var * {name: string}', 'var * {name: ?string}', 'var * string', 'var * ?string', 'string'] @pytest.mark.parametrize('ds', dshapes) def test_like(ds): t = symbol('t', ds) expr = getattr(t, 'name', t).like('Alice*') assert expr.pattern == 'Alice*' assert expr.schema.measure == dshape( '%sbool' % ('?' if '?' in ds else '') ).measure @pytest.mark.parametrize('ds', dshapes) def test_str_upper_schema(ds): t = symbol('t', ds) expr_upper = getattr(t, 'name', t).str_upper() expr_lower = getattr(t, 'name', t).str_upper() assert (expr_upper.schema.measure == expr_lower.schema.measure == dshape('%sstring' % ('?' if '?' in ds else '')).measure) class TestStrCatExceptions(): ds = dshape('3 * {name: string[10], comment: string[25], num: int32}') s = symbol('s', dshape=ds) def test_str_cat_exception_non_string_sep(self): with pytest.raises(TypeError): self.s.name.str_cat(self.s.comment, sep=123) def test_str_cat_exception_non_string_col_to_cat(self): with pytest.raises(TypeError): self.s.name.str_cat(self.s.num)
bsd-3-clause
Python
09fffb062b45e4715c092c0899a6d4f89cf0b4e1
Fix toolbarbox test
azjps/bokeh,Karel-van-de-Plassche/bokeh,rs2/bokeh,dennisobrien/bokeh,ericmjl/bokeh,schoolie/bokeh,aavanian/bokeh,dennisobrien/bokeh,mindriot101/bokeh,Karel-van-de-Plassche/bokeh,philippjfr/bokeh,jakirkham/bokeh,rs2/bokeh,stonebig/bokeh,phobson/bokeh,draperjames/bokeh,philippjfr/bokeh,timsnyder/bokeh,azjps/bokeh,DuCorey/bokeh,philippjfr/bokeh,stonebig/bokeh,jakirkham/bokeh,DuCorey/bokeh,bokeh/bokeh,ericmjl/bokeh,aiguofer/bokeh,bokeh/bokeh,aiguofer/bokeh,dennisobrien/bokeh,jakirkham/bokeh,stonebig/bokeh,jakirkham/bokeh,schoolie/bokeh,Karel-van-de-Plassche/bokeh,DuCorey/bokeh,percyfal/bokeh,philippjfr/bokeh,bokeh/bokeh,mindriot101/bokeh,rs2/bokeh,percyfal/bokeh,draperjames/bokeh,phobson/bokeh,jakirkham/bokeh,ericmjl/bokeh,timsnyder/bokeh,dennisobrien/bokeh,dennisobrien/bokeh,draperjames/bokeh,rs2/bokeh,DuCorey/bokeh,ptitjano/bokeh,schoolie/bokeh,percyfal/bokeh,percyfal/bokeh,Karel-van-de-Plassche/bokeh,aavanian/bokeh,aiguofer/bokeh,draperjames/bokeh,stonebig/bokeh,phobson/bokeh,ptitjano/bokeh,rs2/bokeh,schoolie/bokeh,timsnyder/bokeh,schoolie/bokeh,azjps/bokeh,bokeh/bokeh,mindriot101/bokeh,ptitjano/bokeh,ericmjl/bokeh,azjps/bokeh,draperjames/bokeh,timsnyder/bokeh,ericmjl/bokeh,aavanian/bokeh,DuCorey/bokeh,mindriot101/bokeh,aavanian/bokeh,percyfal/bokeh,ptitjano/bokeh,Karel-van-de-Plassche/bokeh,philippjfr/bokeh,ptitjano/bokeh,aiguofer/bokeh,bokeh/bokeh,timsnyder/bokeh,phobson/bokeh,azjps/bokeh,phobson/bokeh,aiguofer/bokeh,aavanian/bokeh
bokeh/models/tests/test_tools.py
bokeh/models/tests/test_tools.py
from __future__ import absolute_import from bokeh.models.layouts import Box from bokeh.models.tools import Toolbar, ToolbarBox # TODO (bev) validate entire list of props def test_Toolbar(): tb = Toolbar() assert tb.active_drag == 'auto' assert tb.active_scroll == 'auto' assert tb.active_tap == 'auto' # # ToolbarBox # def test_toolbar_box_is_instance_of_box(): tb_box = ToolbarBox() assert isinstance(tb_box, Box) def test_toolbar_box_properties(): tb_box = ToolbarBox() assert tb_box.logo == "normal" assert tb_box.toolbar_location == "right" assert tb_box.tools == [] assert tb_box.merge_tools is True
from __future__ import absolute_import from bokeh.models.layouts import Box from bokeh.models.tools import Toolbar, ToolbarBox # TODO (bev) validate entire list of props def test_Toolbar(): tb = Toolbar() assert tb.active_drag == 'auto' assert tb.active_scroll == 'auto' assert tb.active_tap == 'auto' # # ToolbarBox # def test_toolbar_box_is_instance_of_box(): tb_box = ToolbarBox() assert isinstance(tb_box, Box) def test_toolbar_box_properties(): tb_box = ToolbarBox() assert tb_box.logo == "normal" assert tb_box.toolbar_location == "right" assert tb_box.tools is None assert tb_box.merge_tools is True
bsd-3-clause
Python
9fbab30f5d32d96460c7c8188f21a94aa050e0cb
add baomihua.com
lcplj123/video-dl,lcplj123/video-get
extractors/baomihua.py
extractors/baomihua.py
#!/usr/bin/env python3 import re import sys import json sys.path.append('..') from define import * from utils import * from extractor import BasicExtractor import urllib.parse class BaoMiHuaExtractor(BasicExtractor): ''' ku6下载器 ''' def __init__(self,c): super(BaoMiHuaExtractor,self).__init__(c, BAOMIHUA) def download(self): print('baomihua:start downloading ...') retry = 3 while retry >=0 : self.page = get_html(self.c.url) if self.page: break retry -= 1 if not self.page: print('error: request video info error,check url. %s' % (self.c.url,)) sys.exit(0) self.i.vid = self.getVid() if not self.i.vid: print('error: not find vid! exit...') sys.exit(0) url = r'http://play.baomihua.com/getvideourl.aspx?flvid=%s' % (self.i.vid,) html = get_html(url) info = '&%s&' % (urllib.parse.unquote_plus(html),) self.i.title = self.getTitle(info = info) self.i.desc = self.getDesc(info = info) self.i.tags = self.getTags(info = info) self.i.m3u8 = self.query_m3u8(info = info) self.i.fsize = self.getFsize(info = info) self.i.fname = self.getFname() self.flvlist = self.query_real(info = info) self.i.views = self.getViews() self.i.uptime = self.getUptime(info = info) self.i.category = self.getCategory(info = info) self.i.duration = self.getDuration(info = info) ret = checkCondition(self.i,self.c) if ret == C_PASS: if not realDownload(self.flvlist,self.tmppath): sys.exit(0) #下载成功,合并视频,并删除临时文件 if not mergeVideos(self.flvlist, self.tmppath, self.i.path, self.i.fname): sys.exit(0) self.jsonToFile() else: print('tips: video do not math conditions. code = %d' % (ret,)) sys.exit(0) def query_m3u8(self,*args,**kwargs): m3u8 = '' info = kwargs['info'] r = re.search(r'&hlshost=(.*?)&',info) if r: m3u8 = r.groups()[0] return m3u8 def query_real(self,*args,**kwargs): urls = [] info = kwargs['info'] host = '' stream_name = '' stream_type = '' r = re.search('&host=(.*?)&',info) if r: host = r.groups()[0] r2 = re.search('&stream_name=(.*?)&',info) if r2: stream_name = r2.groups()[0] r3 = re.search('&videofiletype=(.*?)&',info) if r3: stream_type = r3.groups()[0] url = r'http://%s/pomoho_video/%s.%s' % (host,stream_name,stream_type) return [url] def getVid(self,*args,**kwargs): vid = '' r = re.search(r'var\s+flvid\s*=\s*(\d+)',self.page) if r: vid = r.groups()[0] else: r2 = re.search(r'flvid=(\d+)',self.page) if r2: vid = r2.groups()[0] return vid def getFsize(self,*args,**kwargs): size = 1024*1024 info = kwargs['info'] r = re.search(r'&videofilesize=(\d+)&',info) if r: size = r.groups()[0] return int(size) def getTitle(self,*args,**kwargs): title = '' info = kwargs['info'] r = re.search(r'&title=(.*?)&',info) if r: title = r.groups()[0] return title def getDesc(self,*args,**kwargs): desc = self.i.title r = re.search(r'\<meta\s+content=\"(.*?)\"\s+name=\"description\"',self.page) if r: desc = r.groups()[0] return desc def getTags(self,*args,**kwargs): tag = '' r = re.search(r'\<meta\s+content=\"(.*?)\"\s+name=\"keywords\"',self.page) if r: tag = r.groups()[0] return tag.split(',') def getViews(self,*args,**kwargs): views = 1 r = re.search(r'var\s+appId\s*=\s*(\d+)\s*;',self.page) appid = '0' if r: appid = r.groups()[0] url = r'http://action.interface.baomihua.com/AppInfoApi.asmx/GetAppInfo?appid=%s' %(appid,) data = get_html(url) r = re.search(r'appPlayCount:\s*[\'\"](\d+)[\'\"]',data) if r: views = r.groups()[0] return int(views) def getCategory(self,*args,**kwargs): cat = '未知' return cat def getDuration(self,*args,**kwargs): duration = 0 info = kwargs['info'] r = re.search(r'&totaltime=(\d+)&',info) if r: duration = r.groups()[0] return int(duration) def getUptime(self,*args,**kwargs): return '20150813' def download(c): d = BaoMiHuaExtractor(c) return d.download()
#!/usr/bin/env python3
mit
Python
edb1c61a7ded49b63e272bd409fcbf6468173948
remove comment
bit-bots/bitbots_behaviour
bitbots_head_behavior/src/bitbots_head_behavior/head_node.py
bitbots_head_behavior/src/bitbots_head_behavior/head_node.py
#!/usr/bin/env python3 """ This is the ROS-Node which contains the head behavior, starts the appropriate DSD, initializes the HeadBlackboard and subscribes to head_behavior specific ROS-Topics. """ import os import rospy from bitbots_blackboard.blackboard import HeadBlackboard from dynamic_stack_decider.dsd import DSD from humanoid_league_msgs.msg import HeadMode as HeadModeMsg, PoseWithCertainty, PoseWithCertaintyArray from bitbots_msgs.msg import JointCommand from sensor_msgs.msg import JointState from std_msgs.msg import Header from geometry_msgs.msg import PoseWithCovarianceStamped from moveit_ros_planning_interface._moveit_roscpp_initializer import roscpp_init, roscpp_shutdown from bitbots_ros_patches.rate import Rate def run(dsd): """ Main run-loop :returns: Never """ rate = Rate(60) while not rospy.is_shutdown(): dsd.update() rate.sleep() # Also stop cpp node roscpp_shutdown() def init(): """ Initialize new components needed for head_behavior: blackboard, dsd, rostopic subscriber """ rospy.init_node('head_behavior') # This is a general purpose initialization function provided by moved # It is used to correctly initialize roscpp which is used in the collision checker module roscpp_init('collision_checker', []) blackboard = HeadBlackboard() rospy.Subscriber('head_mode', HeadModeMsg, blackboard.head_capsule.head_mode_callback, queue_size=1) rospy.Subscriber("ball_position_relative_filtered", PoseWithCovarianceStamped, blackboard.world_model.ball_filtered_callback) rospy.Subscriber('joint_states', JointState, blackboard.head_capsule.joint_state_callback) blackboard.head_capsule.position_publisher = rospy.Publisher("head_motor_goals", JointCommand, queue_size=10) blackboard.head_capsule.visual_compass_record_trigger = rospy.Publisher(blackboard.config['visual_compass_trigger_topic'], Header, queue_size=5) dirname = os.path.dirname(os.path.realpath(__file__)) dsd = DSD(blackboard, 'debug/dsd/head_behavior') dsd.register_actions(os.path.join(dirname, 'actions')) dsd.register_decisions(os.path.join(dirname, 'decisions')) dsd.load_behavior(os.path.join(dirname, 'head_behavior.dsd')) rospy.logdebug("Head Behavior completely loaded") return dsd if __name__ == '__main__': run(init())
#!/usr/bin/env python3 """ This is the ROS-Node which contains the head behavior, starts the appropriate DSD, initializes the HeadBlackboard and subscribes to head_behavior specific ROS-Topics. """ import os import rospy from bitbots_blackboard.blackboard import HeadBlackboard from dynamic_stack_decider.dsd import DSD from humanoid_league_msgs.msg import HeadMode as HeadModeMsg, PoseWithCertainty, PoseWithCertaintyArray from bitbots_msgs.msg import JointCommand from sensor_msgs.msg import JointState from std_msgs.msg import Header from geometry_msgs.msg import PoseWithCovarianceStamped from moveit_ros_planning_interface._moveit_roscpp_initializer import roscpp_init, roscpp_shutdown from bitbots_ros_patches.rate import Rate def run(dsd): """ Main run-loop :returns: Never """ rate = Rate(60) while not rospy.is_shutdown(): dsd.update() rate.sleep() # Also stop cpp node roscpp_shutdown() def init(): """ Initialize new components needed for head_behavior: blackboard, dsd, rostopic subscriber """ rospy.init_node('head_behavior') # This is a general purpose initialization function provided by moved # It is used to correctly initialize roscpp which is used in the collision checker module roscpp_init('collision_checker', []) blackboard = HeadBlackboard() rospy.Subscriber('head_mode', HeadModeMsg, blackboard.head_capsule.head_mode_callback, queue_size=1) # rospy.Subscriber("balls_relative", PoseWithCertaintyArray, blackboard.world_model.balls_callback) rospy.Subscriber("ball_position_relative_filtered", PoseWithCovarianceStamped, blackboard.world_model.ball_filtered_callback) rospy.Subscriber('joint_states', JointState, blackboard.head_capsule.joint_state_callback) blackboard.head_capsule.position_publisher = rospy.Publisher("head_motor_goals", JointCommand, queue_size=10) blackboard.head_capsule.visual_compass_record_trigger = rospy.Publisher(blackboard.config['visual_compass_trigger_topic'], Header, queue_size=5) dirname = os.path.dirname(os.path.realpath(__file__)) dsd = DSD(blackboard, 'debug/dsd/head_behavior') dsd.register_actions(os.path.join(dirname, 'actions')) dsd.register_decisions(os.path.join(dirname, 'decisions')) dsd.load_behavior(os.path.join(dirname, 'head_behavior.dsd')) rospy.logdebug("Head Behavior completely loaded") return dsd if __name__ == '__main__': run(init())
bsd-3-clause
Python
0fceb297dc4855cd5617daaf9821fb3a332c19ed
Fix descriptions
roderickm/MediaCrush,MediaCrush/MediaCrush,MediaCrush/MediaCrush,roderickm/MediaCrush,nerdzeu/NERDZCrush,roderickm/MediaCrush,nerdzeu/NERDZCrush,nerdzeu/NERDZCrush
mediacrush/slimdown.py
mediacrush/slimdown.py
from functools import partial from markdown import Markdown, odict from markdown.blockprocessors import build_block_parser from markdown.preprocessors import build_preprocessors from markdown.inlinepatterns import build_inlinepatterns from markdown.treeprocessors import build_treeprocessors slimdown = Markdown(safe_mode="escape") # Remove some block parsers block = build_block_parser(slimdown) del block.blockprocessors["hashheader"] del block.blockprocessors["setextheader"] del block.blockprocessors["olist"] del block.blockprocessors["ulist"] slimdown.parser = block # Delete most inline patterns inline = build_inlinepatterns(slimdown) del inline["backtick"] del inline["reference"] del inline["image_link"] del inline["image_reference"] del inline["short_reference"] del inline["autolink"] del inline["automail"] del inline["entity"] slimdown.inlinePatterns = inline # Monkey-patch unicode fix slimdown._convert = slimdown.convert def slimdown_convert(text): text = text.decode('utf-8') return slimdown._convert(text) slimdown.convert = slimdown_convert
from functools import partial from markdown import Markdown, odict from markdown.blockprocessors import build_block_parser from markdown.preprocessors import build_preprocessors from markdown.inlinepatterns import build_inlinepatterns from markdown.treeprocessors import build_treeprocessors slimdown = Markdown(safe_mode="escape") # Remove some block parsers block = build_block_parser(slimdown) del block.blockprocessors["hashheader"] del block.blockprocessors["setextheader"] del block.blockprocessors["olist"] del block.blockprocessors["ulist"] slimdown.parser = block # Delete most inline patterns inline = build_inlinepatterns(slimdown) del inline["backtick"] del inline["reference"] del inline["image_link"] del inline["image_reference"] del inline["short_reference"] del inline["autolink"] del inline["automail"] del inline["entity"] slimdown.inlinePatterns = inline
mit
Python
a23a1050501563889c2806a514fe2994a2ebe3a8
Add python3 support in example
johnwlockwood/stream_tap,johnwlockwood/karl_data,johnwlockwood/stream_tap,johnwlockwood/iter_karld_tools
example/consume_many_csv_files.py
example/consume_many_csv_files.py
from __future__ import print_function from itertools import chain try: from itertools import imap except ImportError: # if python 3 imap = map import karld from karld.path import i_walk_csv_paths def main(): """ Consume many csv files as if one. """ import pathlib input_dir = pathlib.Path('test_data/things_kinds') # # Use a generator expression # iterables = (karld.io.i_get_csv_data(data_path) # for data_path in i_walk_csv_paths(str(input_dir))) # # or a generator map. iterables = imap(karld.io.i_get_csv_data, i_walk_csv_paths(str(input_dir))) items = chain.from_iterable(iterables) for item in items: print(item[0], item[1]) if __name__ == "__main__": main()
from __future__ import print_function from itertools import chain from itertools import imap import karld from karld.path import i_walk_csv_paths def main(): """ Consume many csv files as if one. """ import pathlib input_dir = pathlib.Path('test_data/things_kinds') # # Use a generator expression # iterables = (karld.io.i_get_csv_data(data_path) # for data_path in i_walk_csv_paths(str(input_dir))) # # or a generator map. iterables = imap(karld.io.i_get_csv_data, i_walk_csv_paths(str(input_dir))) items = chain.from_iterable(iterables) for item in items: print(item[0], item[1]) if __name__ == "__main__": main()
apache-2.0
Python
d27c34c65198280e324c37acda7f33ece07c2c92
make text field usage consistnt
ChristosChristofidis/bokeh,percyfal/bokeh,quasiben/bokeh,birdsarah/bokeh,tacaswell/bokeh,schoolie/bokeh,percyfal/bokeh,muku42/bokeh,ahmadia/bokeh,phobson/bokeh,ericmjl/bokeh,timsnyder/bokeh,ericmjl/bokeh,abele/bokeh,laurent-george/bokeh,KasperPRasmussen/bokeh,stuart-knock/bokeh,ChristosChristofidis/bokeh,roxyboy/bokeh,laurent-george/bokeh,Karel-van-de-Plassche/bokeh,daodaoliang/bokeh,roxyboy/bokeh,rhiever/bokeh,ericdill/bokeh,josherick/bokeh,srinathv/bokeh,mutirri/bokeh,akloster/bokeh,xguse/bokeh,mindriot101/bokeh,awanke/bokeh,schoolie/bokeh,draperjames/bokeh,ChinaQuants/bokeh,jplourenco/bokeh,matbra/bokeh,bokeh/bokeh,caseyclements/bokeh,maxalbert/bokeh,josherick/bokeh,timothydmorton/bokeh,phobson/bokeh,khkaminska/bokeh,mutirri/bokeh,rothnic/bokeh,philippjfr/bokeh,msarahan/bokeh,justacec/bokeh,stonebig/bokeh,azjps/bokeh,maxalbert/bokeh,justacec/bokeh,khkaminska/bokeh,msarahan/bokeh,CrazyGuo/bokeh,justacec/bokeh,philippjfr/bokeh,aiguofer/bokeh,aavanian/bokeh,clairetang6/bokeh,timsnyder/bokeh,laurent-george/bokeh,abele/bokeh,eteq/bokeh,awanke/bokeh,bsipocz/bokeh,timsnyder/bokeh,percyfal/bokeh,tacaswell/bokeh,canavandl/bokeh,Karel-van-de-Plassche/bokeh,aavanian/bokeh,stonebig/bokeh,deeplook/bokeh,timothydmorton/bokeh,jplourenco/bokeh,muku42/bokeh,PythonCharmers/bokeh,azjps/bokeh,stonebig/bokeh,ptitjano/bokeh,azjps/bokeh,msarahan/bokeh,jakirkham/bokeh,saifrahmed/bokeh,Karel-van-de-Plassche/bokeh,satishgoda/bokeh,caseyclements/bokeh,mindriot101/bokeh,bsipocz/bokeh,PythonCharmers/bokeh,gpfreitas/bokeh,aiguofer/bokeh,evidation-health/bokeh,tacaswell/bokeh,satishgoda/bokeh,aiguofer/bokeh,matbra/bokeh,josherick/bokeh,timsnyder/bokeh,percyfal/bokeh,ericmjl/bokeh,KasperPRasmussen/bokeh,ChinaQuants/bokeh,srinathv/bokeh,carlvlewis/bokeh,ptitjano/bokeh,abele/bokeh,CrazyGuo/bokeh,caseyclements/bokeh,jplourenco/bokeh,KasperPRasmussen/bokeh,carlvlewis/bokeh,gpfreitas/bokeh,rs2/bokeh,jakirkham/bokeh,matbra/bokeh,azjps/bokeh,phobson/bokeh,clairetang6/bokeh,CrazyGuo/bokeh,bsipocz/bokeh,khkaminska/bokeh,awanke/bokeh,draperjames/bokeh,phobson/bokeh,timothydmorton/bokeh,rhiever/bokeh,muku42/bokeh,daodaoliang/bokeh,mindriot101/bokeh,akloster/bokeh,KasperPRasmussen/bokeh,saifrahmed/bokeh,daodaoliang/bokeh,canavandl/bokeh,carlvlewis/bokeh,tacaswell/bokeh,DuCorey/bokeh,laurent-george/bokeh,rothnic/bokeh,birdsarah/bokeh,rothnic/bokeh,htygithub/bokeh,ptitjano/bokeh,dennisobrien/bokeh,xguse/bokeh,bokeh/bokeh,dennisobrien/bokeh,timothydmorton/bokeh,KasperPRasmussen/bokeh,paultcochrane/bokeh,rhiever/bokeh,draperjames/bokeh,ChinaQuants/bokeh,alan-unravel/bokeh,clairetang6/bokeh,jplourenco/bokeh,ericdill/bokeh,ptitjano/bokeh,draperjames/bokeh,jakirkham/bokeh,schoolie/bokeh,deeplook/bokeh,dennisobrien/bokeh,rs2/bokeh,ChristosChristofidis/bokeh,alan-unravel/bokeh,PythonCharmers/bokeh,aiguofer/bokeh,bokeh/bokeh,ericmjl/bokeh,azjps/bokeh,birdsarah/bokeh,stuart-knock/bokeh,daodaoliang/bokeh,paultcochrane/bokeh,schoolie/bokeh,carlvlewis/bokeh,msarahan/bokeh,xguse/bokeh,ahmadia/bokeh,percyfal/bokeh,htygithub/bokeh,stonebig/bokeh,timsnyder/bokeh,PythonCharmers/bokeh,alan-unravel/bokeh,justacec/bokeh,stuart-knock/bokeh,deeplook/bokeh,mindriot101/bokeh,roxyboy/bokeh,abele/bokeh,quasiben/bokeh,matbra/bokeh,Karel-van-de-Plassche/bokeh,bsipocz/bokeh,birdsarah/bokeh,mutirri/bokeh,evidation-health/bokeh,bokeh/bokeh,bokeh/bokeh,satishgoda/bokeh,gpfreitas/bokeh,canavandl/bokeh,rs2/bokeh,philippjfr/bokeh,schoolie/bokeh,paultcochrane/bokeh,DuCorey/bokeh,alan-unravel/bokeh,lukebarnard1/bokeh,caseyclements/bokeh,rs2/bokeh,gpfreitas/bokeh,ChristosChristofidis/bokeh,aavanian/bokeh,philippjfr/bokeh,ahmadia/bokeh,ahmadia/bokeh,eteq/bokeh,ericmjl/bokeh,jakirkham/bokeh,aavanian/bokeh,phobson/bokeh,paultcochrane/bokeh,awanke/bokeh,canavandl/bokeh,philippjfr/bokeh,xguse/bokeh,htygithub/bokeh,rs2/bokeh,eteq/bokeh,muku42/bokeh,satishgoda/bokeh,DuCorey/bokeh,dennisobrien/bokeh,quasiben/bokeh,roxyboy/bokeh,rhiever/bokeh,aavanian/bokeh,evidation-health/bokeh,saifrahmed/bokeh,mutirri/bokeh,DuCorey/bokeh,lukebarnard1/bokeh,srinathv/bokeh,Karel-van-de-Plassche/bokeh,khkaminska/bokeh,josherick/bokeh,ericdill/bokeh,akloster/bokeh,rothnic/bokeh,srinathv/bokeh,akloster/bokeh,lukebarnard1/bokeh,draperjames/bokeh,dennisobrien/bokeh,eteq/bokeh,ptitjano/bokeh,maxalbert/bokeh,stuart-knock/bokeh,evidation-health/bokeh,clairetang6/bokeh,saifrahmed/bokeh,ericdill/bokeh,deeplook/bokeh,htygithub/bokeh,aiguofer/bokeh,DuCorey/bokeh,maxalbert/bokeh,lukebarnard1/bokeh,jakirkham/bokeh,ChinaQuants/bokeh,CrazyGuo/bokeh
examples/plotting/server/markers.py
examples/plotting/server/markers.py
# The plot server must be running # Go to http://localhost:5006/bokeh to view this plot from numpy.random import random from bokeh.plotting import * def mscatter(p, x, y, typestr): p.scatter(x, y, marker=typestr, line_color="#6666ee", fill_color="#ee6666", fill_alpha=0.5, size=12) def mtext(p, x, y, textstr): p.text(x, y, text=[textstr], text_color="#449944", text_align="center", text_font_size="10pt") output_server("markers") p = figure(title="markers.py example") N = 10 mscatter(p, random(N)+2, random(N)+1, "circle") mscatter(p, random(N)+4, random(N)+1, "square") mscatter(p, random(N)+6, random(N)+1, "triangle") mscatter(p, random(N)+8, random(N)+1, "asterisk") mscatter(p, random(N)+2, random(N)+4, "circle_x") mscatter(p, random(N)+4, random(N)+4, "square_x") mscatter(p, random(N)+6, random(N)+4, "inverted_triangle") mscatter(p, random(N)+8, random(N)+4, "x") mscatter(p, random(N)+2, random(N)+7, "circle_cross") mscatter(p, random(N)+4, random(N)+7, "square_cross") mscatter(p, random(N)+6, random(N)+7, "diamond") mscatter(p, random(N)+8, random(N)+7, "cross") mtext(p, [2.5], [0.5], "circle / o") mtext(p, [4.5], [0.5], "square") mtext(p, [6.5], [0.5], "triangle") mtext(p, [8.5], [0.5], "asterisk / *") mtext(p, [2.5], [3.5], "circle_x / ox") mtext(p, [4.5], [3.5], "square_x") mtext(p, [6.5], [3.5], "inverted_triangle") mtext(p, [8.5], [3.5], "x") mtext(p, [2.5], [6.5], "circle_cross / o+") mtext(p, [4.5], [6.5], "square_cross") mtext(p, [6.5], [6.5], "diamond") mtext(p, [8.5], [6.5], "cross / +") show(p) # open a browser
# The plot server must be running # Go to http://localhost:5006/bokeh to view this plot from numpy.random import random from bokeh.plotting import * def mscatter(p, x, y, typestr): p.scatter(x, y, marker=typestr, line_color="#6666ee", fill_color="#ee6666", fill_alpha=0.5, size=12) def mtext(p, x, y, textstr): p.text(x, y, text=textstr, text_color="#449944", text_align="center", text_font_size="10pt") output_server("markers") p = figure(title="markers.py example") N = 10 mscatter(p, random(N)+2, random(N)+1, "circle") mscatter(p, random(N)+4, random(N)+1, "square") mscatter(p, random(N)+6, random(N)+1, "triangle") mscatter(p, random(N)+8, random(N)+1, "asterisk") mscatter(p, random(N)+2, random(N)+4, "circle_x") mscatter(p, random(N)+4, random(N)+4, "square_x") mscatter(p, random(N)+6, random(N)+4, "inverted_triangle") mscatter(p, random(N)+8, random(N)+4, "x") mscatter(p, random(N)+2, random(N)+7, "circle_cross") mscatter(p, random(N)+4, random(N)+7, "square_cross") mscatter(p, random(N)+6, random(N)+7, "diamond") mscatter(p, random(N)+8, random(N)+7, "cross") mtext(p, [2.5], [0.5], "circle / o") mtext(p, [4.5], [0.5], "square") mtext(p, [6.5], [0.5], "triangle") mtext(p, [8.5], [0.5], "asterisk / *") mtext(p, [2.5], [3.5], "circle_x / ox") mtext(p, [4.5], [3.5], "square_x") mtext(p, [6.5], [3.5], "inverted_triangle") mtext(p, [8.5], [3.5], "x") mtext(p, [2.5], [6.5], "circle_cross / o+") mtext(p, [4.5], [6.5], "square_cross") mtext(p, [6.5], [6.5], "diamond") mtext(p, [8.5], [6.5], "cross / +") show(p) # open a browser
bsd-3-clause
Python
0051b5a5e287057cab06452d4f178e4c04cbd0c5
Put the win_osinfo classes in a helper function
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
salt/utils/win_osinfo.py
salt/utils/win_osinfo.py
# -*- coding: utf-8 -*- ''' Get Version information from Windows ''' # http://stackoverflow.com/questions/32300004/python-ctypes-getting-0-with-getversionex-function from __future__ import absolute_import # Import Third Party Libs import ctypes try: from ctypes.wintypes import BYTE, WORD, DWORD, WCHAR HAS_WIN32 = True except (ImportError, ValueError): HAS_WIN32 = False if HAS_WIN32: kernel32 = ctypes.WinDLL('kernel32', use_last_error=True) # Although utils are often directly imported, it is also possible to use the # loader. def __virtual__(): ''' Only load if Win32 Libraries are installed ''' if not HAS_WIN32: return False, 'This utility requires pywin32' return 'win_osinfo' def os_version_info_ex(): ''' Helper function to return the OSVersionInfo class Returns: class: The OsVersionInfo class ''' class OSVersionInfo(ctypes.Structure): _fields_ = (('dwOSVersionInfoSize', DWORD), ('dwMajorVersion', DWORD), ('dwMinorVersion', DWORD), ('dwBuildNumber', DWORD), ('dwPlatformId', DWORD), ('szCSDVersion', WCHAR * 128)) def __init__(self, *args, **kwds): super(OSVersionInfo, self).__init__(*args, **kwds) self.dwOSVersionInfoSize = ctypes.sizeof(self) kernel32.GetVersionExW(ctypes.byref(self)) class OSVersionInfoEx(OSVersionInfo): _fields_ = (('wServicePackMajor', WORD), ('wServicePackMinor', WORD), ('wSuiteMask', WORD), ('wProductType', BYTE), ('wReserved', BYTE)) return OSVersionInfoEx() def get_os_version_info(): info = os_version_info_ex() ret = {'MajorVersion': info.dwMajorVersion, 'MinorVersion': info.dwMinorVersion, 'BuildNumber': info.dwBuildNumber, 'PlatformID': info.dwPlatformId, 'ServicePackMajor': info.wServicePackMajor, 'ServicePackMinor': info.wServicePackMinor, 'SuiteMask': info.wSuiteMask, 'ProductType': info.wProductType} return ret
# -*- coding: utf-8 -*- ''' Get Version information from Windows ''' # http://stackoverflow.com/questions/32300004/python-ctypes-getting-0-with-getversionex-function from __future__ import absolute_import # Import Third Party Libs import ctypes try: from ctypes.wintypes import BYTE, WORD, DWORD, WCHAR HAS_WIN32 = True except (ImportError, ValueError): HAS_WIN32 = False if HAS_WIN32: kernel32 = ctypes.WinDLL('kernel32', use_last_error=True) # Although utils are often directly imported, it is also possible to use the # loader. def __virtual__(): ''' Only load if Win32 Libraries are installed ''' if not HAS_WIN32: return False, 'This utility requires pywin32' return 'win_osinfo' if HAS_WIN32: class OSVERSIONINFO(ctypes.Structure): _fields_ = (('dwOSVersionInfoSize', DWORD), ('dwMajorVersion', DWORD), ('dwMinorVersion', DWORD), ('dwBuildNumber', DWORD), ('dwPlatformId', DWORD), ('szCSDVersion', WCHAR * 128)) def __init__(self, *args, **kwds): super(OSVERSIONINFO, self).__init__(*args, **kwds) self.dwOSVersionInfoSize = ctypes.sizeof(self) kernel32.GetVersionExW(ctypes.byref(self)) class OSVERSIONINFOEX(OSVERSIONINFO): _fields_ = (('wServicePackMajor', WORD), ('wServicePackMinor', WORD), ('wSuiteMask', WORD), ('wProductType', BYTE), ('wReserved', BYTE)) def errcheck_bool(result, func, args): if not result: raise ctypes.WinError(ctypes.get_last_error()) return args kernel32.GetVersionExW.errcheck = errcheck_bool kernel32.GetVersionExW.argtypes = (ctypes.POINTER(OSVERSIONINFO),) def get_os_version_info(): info = OSVERSIONINFOEX() ret = {'MajorVersion': info.dwMajorVersion, 'MinorVersion': info.dwMinorVersion, 'BuildNumber': info.dwBuildNumber, 'PlatformID': info.dwPlatformId, 'ServicePackMajor': info.wServicePackMajor, 'ServicePackMinor': info.wServicePackMinor, 'SuiteMask': info.wSuiteMask, 'ProductType': info.wProductType} return ret
apache-2.0
Python
1cc1df8c00a7a956b0a1207f99928f731714541a
add main path reminder in TaskLog
CaptainDesAstres/Blender-Render-Manager,CaptainDesAstres/Simple-Blender-Render-Manager
TaskList/TaskLog/TaskLog.py
TaskList/TaskLog/TaskLog.py
#!/usr/bin/python3.4 # -*-coding:Utf-8 -* '''module to manage task running log''' import xml.etree.ElementTree as xmlMod from TaskList.TaskLog.GroupLog import * from Preferences.PresetList.Preset.Preset import * from Preferences.PresetList.Preset.Metapreset import * class TaskLog: '''class to manage task running log''' def __init__(self, xml = None, pref = None, task = None): '''initialize task log object''' if xml is None: self.defaultInit(pref, task) else: self.fromXml(xml) def defaultInit(self, preferences, task): '''initialize Task log object by generating from the task settings''' self.presetName = task.preset if self.presetName == '[default]': self.presetName = preferences.presets.default self.preset = preferences.presets.getPreset(self.presetName).copy() fileName = task.path.split('/').pop() fileName = fileName[0:fileName.rfind('.blend')] self.path = preferences.output.getMainPath(fileName, task.scene, self.presetName) if type(self.preset) is Preset: self.groups = [GroupLog(groupName = '[main]', preferences = preferences, task = task)] else: self.groups = [] for g in self.preset.groups.keys(): group = preferences.presets.renderlayers.groups[g] if group.isUsefull(task.info.scenes[task.scene]): self.groups.append(GroupLog(groupName = g, preferences = preferences, task = task)) default = GroupLog(groupName = '[default]', preferences = preferences, task = task) if len(default.renderlayers) > 0: self.groups.append(default) def fromXml(self, xml): '''initialize Task log object with saved log''' node = xml.find('preset') if node is None: node = xml.find('metapreset') self.presetName = node.get('alias') self.preset = Metapreset(xml = node) else: self.presetName = node.get('alias') self.preset = Preset(xml = node) self.groups = [] for node in xml.findall('group'): self.groups.append(GroupLog(xml = node)) def toXml(self): '''export task log into xml syntaxed string''' xml = '<log>\n' xml += self.preset.toXml(self.presetName) for g in self.groups: xml += g.toXml() xml += '</log>' return xml def print(self): '''A method to print task log''' def getGroup(self, g): '''a method to get a group by his name''' for group in self.groups: if g == group.name: return group
#!/usr/bin/python3.4 # -*-coding:Utf-8 -* '''module to manage task running log''' import xml.etree.ElementTree as xmlMod from TaskList.TaskLog.GroupLog import * from Preferences.PresetList.Preset.Preset import * from Preferences.PresetList.Preset.Metapreset import * class TaskLog: '''class to manage task running log''' def __init__(self, xml = None, pref = None, task = None): '''initialize task log object''' if xml is None: self.defaultInit(pref, task) else: self.fromXml(xml) def defaultInit(self, preferences, task): '''initialize Task log object by generating from the task settings''' self.presetName = task.preset if self.presetName == '[default]': self.presetName = preferences.presets.default self.preset = preferences.presets.getPreset(self.presetName).copy() if type(self.preset) is Preset: self.groups = [GroupLog(groupName = '[main]', preferences = preferences, task = task)] else: self.groups = [] for g in self.preset.groups.keys(): group = preferences.presets.renderlayers.groups[g] if group.isUsefull(task.info.scenes[task.scene]): self.groups.append(GroupLog(groupName = g, preferences = preferences, task = task)) default = GroupLog(groupName = '[default]', preferences = preferences, task = task) if len(default.renderlayers) > 0: self.groups.append(default) def fromXml(self, xml): '''initialize Task log object with saved log''' node = xml.find('preset') if node is None: node = xml.find('metapreset') self.presetName = node.get('alias') self.preset = Metapreset(xml = node) else: self.presetName = node.get('alias') self.preset = Preset(xml = node) self.groups = [] for node in xml.findall('group'): self.groups.append(GroupLog(xml = node)) def toXml(self): '''export task log into xml syntaxed string''' xml = '<log>\n' xml += self.preset.toXml(self.presetName) for g in self.groups: xml += g.toXml() xml += '</log>' return xml def print(self): '''A method to print task log''' def getGroup(self, g): '''a method to get a group by his name''' for group in self.groups: if g == group.name: return group
mit
Python
32ab2353d7a7f64300445688b0bd583fbe1a13fb
Improve keystone.conf [endpoint_policy] documentation
mahak/keystone,mahak/keystone,rajalokan/keystone,ilay09/keystone,openstack/keystone,ilay09/keystone,openstack/keystone,openstack/keystone,rajalokan/keystone,mahak/keystone,rajalokan/keystone,cernops/keystone,ilay09/keystone,cernops/keystone
keystone/conf/endpoint_policy.py
keystone/conf/endpoint_policy.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from keystone.conf import utils enabled = cfg.BoolOpt( 'enabled', default=True, deprecated_for_removal=True, deprecated_reason=utils.fmt(""" The option to enable the OS-ENDPOINT-POLICY API extension has been deprecated in the M release and will be removed in the O release. The OS-ENDPOINT-POLICY API extension will be enabled by default. """), help=utils.fmt(""" Enable endpoint-policy functionality, which allows policies to be associated with either specific endpoints, or endpoints of a given service type. """)) driver = cfg.StrOpt( 'driver', default='sql', help=utils.fmt(""" Entry point for the endpoint policy driver in the `keystone.endpoint_policy` namespace. Only a `sql` driver is provided by keystone, so there is no reason to set this unless you are providing a custom entry point. """)) GROUP_NAME = __name__.split('.')[-1] ALL_OPTS = [ enabled, driver, ] def register_opts(conf): conf.register_opts(ALL_OPTS, group=GROUP_NAME) def list_opts(): return {GROUP_NAME: ALL_OPTS}
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from keystone.conf import utils enabled = cfg.BoolOpt( 'enabled', default=True, deprecated_for_removal=True, deprecated_reason=utils.fmt(""" The option to enable the OS-ENDPOINT-POLICY extension has been deprecated in the M release and will be removed in the O release. The OS-ENDPOINT-POLICY extension will be enabled by default. """), help=utils.fmt(""" Enable endpoint_policy functionality. """)) driver = cfg.StrOpt( 'driver', default='sql', help=utils.fmt(""" Entrypoint for the endpoint policy backend driver in the keystone.endpoint_policy namespace. """)) GROUP_NAME = __name__.split('.')[-1] ALL_OPTS = [ enabled, driver, ] def register_opts(conf): conf.register_opts(ALL_OPTS, group=GROUP_NAME) def list_opts(): return {GROUP_NAME: ALL_OPTS}
apache-2.0
Python
29384b927b620b7e943343409f62511451bb3059
Fix problem with Hopfield energy function for Python 2.7
stczhc/neupy,stczhc/neupy,itdxer/neupy,itdxer/neupy,stczhc/neupy,stczhc/neupy,itdxer/neupy,itdxer/neupy
neupy/algorithms/memory/utils.py
neupy/algorithms/memory/utils.py
from numpy import where, inner from numpy.core.umath_tests import inner1d __all__ = ('sign2bin', 'bin2sign', 'hopfield_energy') def sign2bin(matrix): return where(matrix == 1, 1, 0) def bin2sign(matrix): return where(matrix == 0, -1, 1) def hopfield_energy(weight, input_data, output_data): return -0.5 * inner1d(input_data.dot(weight), output_data)
from numpy import where __all__ = ('sign2bin', 'bin2sign', 'hopfield_energy') def sign2bin(matrix): return where(matrix == 1, 1, 0) def bin2sign(matrix): return where(matrix == 0, -1, 1) def hopfield_energy(weight, input_data, output_data): energy_output = -0.5 * input_data.dot(weight).dot(output_data.T) return energy_output.item(0)
mit
Python
bf28376f252fd474d594e5039d0b2f2bb1afc26a
Add proper warnings on use of the backwards compatibility shim.
ipython/ipython,ipython/ipython
IPython/frontend.py
IPython/frontend.py
""" Shim to maintain backwards compatibility with old frontend imports. We have moved all contents of the old `frontend` subpackage into top-level subpackages (`html`, `qt` and `terminal`). This will let code that was making `from IPython.frontend...` calls continue working, though a warning will be printed. """ #----------------------------------------------------------------------------- # Copyright (c) 2013, IPython Development Team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- from __future__ import print_function import sys import types #----------------------------------------------------------------------------- # Class declarations #----------------------------------------------------------------------------- class ShimModule(types.ModuleType): def __getattribute__(self, key): m = ("*** WARNING*** : The top-level `frontend` module has been deprecated.\n" "Please import %s directly from the `IPython` level." % key) # FIXME: I don't understand why, but if the print statement below is # redirected to stderr, this shim module stops working. It seems the # Python import machinery has problem with redirected prints happening # during the import process. If we can't figure out a solution, we may # need to leave it to print to default stdout. print(m) # FIXME: this seems to work fine, but we should replace it with an # __import__ call instead of using exec/eval. exec 'from IPython import %s' % key return eval(key) # Unconditionally insert the shim into sys.modules so that further import calls # trigger the custom attribute access above sys.modules['IPython.frontend'] = ShimModule('frontend')
import sys import types class ShimModule(types.ModuleType): def __getattribute__(self, key): exec 'from IPython import %s' % key return eval(key) sys.modules['IPython.frontend'] = ShimModule('frontend')
bsd-3-clause
Python
7f800ee03790d28a25f0f5989c8f6a15401af172
remove print statement
mmohrhard/crash,Liongold/crash,Liongold/crash,mmohrhard/crash,Liongold/crash,mmohrhard/crash
django/crashreport/symbols/views.py
django/crashreport/symbols/views.py
# -*- Mode: python; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */ # # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # from django.shortcuts import render from django import forms from django.http import HttpResponse, HttpResponseNotAllowed, HttpResponseServerError from django.views.decorators.csrf import csrf_exempt from .handler import SymbolsUploadHandler import os class UploadSymbolsForm(forms.Form): symbols = forms.FileField() comment = forms.CharField() def handle_uploaded_file(f): # TODO: moggi: get the symbols localtion from the configuration file_path = os.path.join('/tmp/symbols_upload', f.name) with open(file_path, 'wb+') as destination: for chunk in f.chunks(): destination.write(chunk) return file_path # TODO: this needs to be limited to logged in users @csrf_exempt def upload_symbols(request): if request.method != 'POST': return HttpResponseNotAllowed('Only POST here') form = UploadSymbolsForm(request.POST, request.FILES) if not form.is_valid(): return HttpResponseNotAllowed('Invalid data') path = handle_uploaded_file(request.FILES['symbols']) upload = SymbolsUploadHandler() upload.process(form.cleaned_data, path) # TODO: moggi: maybe report the zipfile.BadZipfile exception return HttpResponse("Success") # vim:set shiftwidth=4 softtabstop=4 expandtab: */
# -*- Mode: python; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */ # # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # from django.shortcuts import render from django import forms from django.http import HttpResponse, HttpResponseNotAllowed, HttpResponseServerError from django.views.decorators.csrf import csrf_exempt from .handler import SymbolsUploadHandler import os class UploadSymbolsForm(forms.Form): symbols = forms.FileField() comment = forms.CharField() def handle_uploaded_file(f): # TODO: moggi: get the symbols localtion from the configuration file_path = os.path.join('/tmp/symbols_upload', f.name) print(file_path) with open(file_path, 'wb+') as destination: for chunk in f.chunks(): destination.write(chunk) return file_path # TODO: this needs to be limited to logged in users @csrf_exempt def upload_symbols(request): if request.method != 'POST': return HttpResponseNotAllowed('Only POST here') form = UploadSymbolsForm(request.POST, request.FILES) print(form.fields) if not form.is_valid(): return HttpResponseNotAllowed('Invalid data') path = handle_uploaded_file(request.FILES['symbols']) upload = SymbolsUploadHandler() upload.process(form.cleaned_data, path) # TODO: moggi: maybe report the zipfile.BadZipfile exception return HttpResponse("Success") # vim:set shiftwidth=4 softtabstop=4 expandtab: */
mpl-2.0
Python
f782633c857932803283dd9c26621e69f6ccb44e
fix version
acysos/odoo-addons,acysos/odoo-addons,acysos/odoo-addons
l10n_es_aeat_sii/__manifest__.py
l10n_es_aeat_sii/__manifest__.py
# -*- coding: utf-8 -*- # Copyright 2017 Ignacio Ibeas <[email protected]> # (c) 2017 Diagram Software S.L. # Copyright (c) 2017-TODAY MINORISA <[email protected]> # (c) 2017 Studio73 - Pablo Fuentes <[email protected]> # (c) 2017 Studio73 - Jordi Tolsà <[email protected]> # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). { "name": "Suministro Inmediato de Información en el IVA", "version": "12.0.1.3.2", "category": "Accounting & Finance", "website": "https://www.acysos.com", "author": "Acysos S.L.", "license": "AGPL-3", "application": False, "installable": True, "external_dependencies": { "python": ["zeep", "requests"], }, "depends": [ "account", "account_invoice_refund_link", "l10n_es_aeat", "queue_job", "account_payment_partner", ], "data": [ "data/ir_config_parameter.xml", "data/aeat_sii_mapping_registration_keys_data.xml", "data/aeat_sii_map_data.xml", "data/aeat_sii_map_data_1_1.xml", "data/aeat_sii_mapping_payment_keys_data.xml", "data/account_fiscal_position_data.xml", "views/res_company_view.xml", "views/account_invoice_view.xml", "views/aeat_sii_view.xml", "views/aeat_sii_result_view.xml", "views/aeat_check_sii_result_view.xml", "wizard/aeat_sii_password_view.xml", "views/aeat_sii_mapping_registration_keys_view.xml", "views/aeat_sii_map_view.xml", "security/ir.model.access.csv", "security/aeat_sii.xml", "views/product_view.xml", "views/account_view.xml", "views/account_payment_mode_view.xml", ], 'images': ['static/description/banner.jpg'], "post_init_hook": "post_init_sii_hook", }
# -*- coding: utf-8 -*- # Copyright 2017 Ignacio Ibeas <[email protected]> # (c) 2017 Diagram Software S.L. # Copyright (c) 2017-TODAY MINORISA <[email protected]> # (c) 2017 Studio73 - Pablo Fuentes <[email protected]> # (c) 2017 Studio73 - Jordi Tolsà <[email protected]> # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). { "name": "Suministro Inmediato de Información en el IVA", "version": "11.0.1.3.2", "category": "Accounting & Finance", "website": "https://www.acysos.com", "author": "Acysos S.L.", "license": "AGPL-3", "application": False, "installable": True, "external_dependencies": { "python": ["zeep", "requests"], }, "depends": [ "account", "account_invoice_refund_link", "l10n_es_aeat", "queue_job", "account_payment_partner", ], "data": [ "data/ir_config_parameter.xml", "data/aeat_sii_mapping_registration_keys_data.xml", "data/aeat_sii_map_data.xml", "data/aeat_sii_map_data_1_1.xml", "data/aeat_sii_mapping_payment_keys_data.xml", "data/account_fiscal_position_data.xml", "views/res_company_view.xml", "views/account_invoice_view.xml", "views/aeat_sii_view.xml", "views/aeat_sii_result_view.xml", "views/aeat_check_sii_result_view.xml", "wizard/aeat_sii_password_view.xml", "views/aeat_sii_mapping_registration_keys_view.xml", "views/aeat_sii_map_view.xml", "security/ir.model.access.csv", "security/aeat_sii.xml", "views/product_view.xml", "views/account_view.xml", "views/account_payment_mode_view.xml", ], 'images': ['static/description/banner.jpg'], "post_init_hook": "post_init_sii_hook", }
agpl-3.0
Python
7d7b12d176a0315057c54eff794fbe7117c7f6da
Include the Python version in the support tracking data.
bkochendorfer/reviewboard,sgallagher/reviewboard,reviewboard/reviewboard,1tush/reviewboard,reviewboard/reviewboard,reviewboard/reviewboard,KnowNo/reviewboard,KnowNo/reviewboard,1tush/reviewboard,brennie/reviewboard,custode/reviewboard,1tush/reviewboard,davidt/reviewboard,custode/reviewboard,1tush/reviewboard,beol/reviewboard,beol/reviewboard,chipx86/reviewboard,brennie/reviewboard,reviewboard/reviewboard,davidt/reviewboard,chipx86/reviewboard,1tush/reviewboard,KnowNo/reviewboard,custode/reviewboard,chipx86/reviewboard,davidt/reviewboard,davidt/reviewboard,custode/reviewboard,bkochendorfer/reviewboard,beol/reviewboard,brennie/reviewboard,KnowNo/reviewboard,sgallagher/reviewboard,bkochendorfer/reviewboard,sgallagher/reviewboard,bkochendorfer/reviewboard,1tush/reviewboard,sgallagher/reviewboard,beol/reviewboard,chipx86/reviewboard,brennie/reviewboard,1tush/reviewboard,1tush/reviewboard,1tush/reviewboard
reviewboard/admin/support.py
reviewboard/admin/support.py
from __future__ import unicode_literals import base64 import sys import time from datetime import datetime from hashlib import sha1 from django.conf import settings from django.contrib.auth.models import User from djblets.siteconfig.models import SiteConfiguration from reviewboard import get_package_version def get_install_key(): """Returns the installation key for this server.""" return sha1(settings.SECRET_KEY).hexdigest() def serialize_support_data(request=None, force_is_admin=False): """Serializes support data into a base64-encoded string.""" siteconfig = SiteConfiguration.objects.get_current() is_admin = (force_is_admin or (request is not None and request.user.is_staff)) return base64.b64encode('\t'.join([ get_install_key(), '%d' % is_admin, siteconfig.site.domain, siteconfig.get('site_admin_name'), siteconfig.get('site_admin_email'), get_package_version(), '%d' % User.objects.filter(is_active=True).count(), '%d' % int(time.mktime(datetime.now().timetuple())), siteconfig.get('company'), '%s.%s.%s' % sys.version_info[:3], ])) def get_default_support_url(request=None, force_is_admin=False): """Returns the URL for the default Review Board support page.""" siteconfig = SiteConfiguration.objects.get_current() if siteconfig.get('send_support_usage_stats'): support_data = serialize_support_data(request, force_is_admin) else: support_data = '' return settings.DEFAULT_SUPPORT_URL % { 'support_data': support_data, } def get_register_support_url(request=None, force_is_admin=False): """Returns the URL for registering the Review Board support page.""" siteconfig = SiteConfiguration.objects.get_current() if siteconfig.get('send_support_usage_stats'): support_data = serialize_support_data(request, force_is_admin) else: support_data = '' return settings.REGISTER_SUPPORT_URL % { 'support_data': support_data, } def get_support_url(request): """Returns the URL for the configured support page.""" siteconfig = SiteConfiguration.objects.get_current() return (siteconfig.get('support_url') or get_default_support_url(request))
from __future__ import unicode_literals import base64 import time from datetime import datetime from hashlib import sha1 from django.conf import settings from django.contrib.auth.models import User from djblets.siteconfig.models import SiteConfiguration from reviewboard import get_package_version def get_install_key(): """Returns the installation key for this server.""" return sha1(settings.SECRET_KEY).hexdigest() def serialize_support_data(request=None, force_is_admin=False): """Serializes support data into a base64-encoded string.""" siteconfig = SiteConfiguration.objects.get_current() is_admin = (force_is_admin or (request is not None and request.user.is_staff)) return base64.b64encode('\t'.join([ get_install_key(), '%d' % is_admin, siteconfig.site.domain, siteconfig.get('site_admin_name'), siteconfig.get('site_admin_email'), get_package_version(), '%d' % User.objects.filter(is_active=True).count(), '%d' % int(time.mktime(datetime.now().timetuple())), siteconfig.get('company'), ])) def get_default_support_url(request=None, force_is_admin=False): """Returns the URL for the default Review Board support page.""" siteconfig = SiteConfiguration.objects.get_current() if siteconfig.get('send_support_usage_stats'): support_data = serialize_support_data(request, force_is_admin) else: support_data = '' return settings.DEFAULT_SUPPORT_URL % { 'support_data': support_data, } def get_register_support_url(request=None, force_is_admin=False): """Returns the URL for registering the Review Board support page.""" siteconfig = SiteConfiguration.objects.get_current() if siteconfig.get('send_support_usage_stats'): support_data = serialize_support_data(request, force_is_admin) else: support_data = '' return settings.REGISTER_SUPPORT_URL % { 'support_data': support_data, } def get_support_url(request): """Returns the URL for the configured support page.""" siteconfig = SiteConfiguration.objects.get_current() return (siteconfig.get('support_url') or get_default_support_url(request))
mit
Python
64d5847f058dec81a288482665a1d8208f0f4e17
Fix security issue in singularity + misc cleanups (#8657)
iulian787/spack,LLNL/spack,mfherbst/spack,krafczyk/spack,LLNL/spack,mfherbst/spack,mfherbst/spack,LLNL/spack,krafczyk/spack,iulian787/spack,mfherbst/spack,iulian787/spack,krafczyk/spack,iulian787/spack,LLNL/spack,krafczyk/spack,mfherbst/spack,LLNL/spack,krafczyk/spack,iulian787/spack
var/spack/repos/builtin/packages/singularity/package.py
var/spack/repos/builtin/packages/singularity/package.py
############################################################################## # Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, [email protected], All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/spack/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * class Singularity(AutotoolsPackage): """Singularity is a container platform focused on supporting 'Mobility of Compute'""" homepage = "https://www.sylabs.io/singularity/" url = "https://github.com/singularityware/singularity/releases/download/2.5.2/singularity-2.5.2.tar.gz" # Versions before 2.5.2 suffer from a serious security problem. # https://nvd.nist.gov/vuln/detail/CVE-2018-12021 version('2.5.2', '2edc1a8ac9a4d7d26fba6244f1c5fd95') version('develop', git='https://github.com/singularityware/singularity.git', branch='master') depends_on('libarchive', when='@2.5.2:') # these are only needed if we're grabbing the unreleased tree depends_on('m4', type='build', when='@develop') depends_on('autoconf', type='build', when='@develop') depends_on('automake', type='build', when='@develop') depends_on('libtool', type='build', when='@develop') # When installing as root, the copy has to run before chmod runs def install(self, spec, prefix): make('install', parallel=False)
############################################################################## # Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, [email protected], All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/spack/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * class Singularity(AutotoolsPackage): """Singularity is a container platform focused on supporting 'Mobility of Compute'""" homepage = "http://singularity.lbl.gov/" url = "https://github.com/singularityware/singularity/archive/2.4.tar.gz" version('2.4.5', '9afa903ee019448104b4f40be77a46e7') version('2.4', 'd357ce68ef2f8149edd84155731531465dbe74148c37719f87f168fc39384377') version('2.3.1', '292ff7fe3db09c854b8accf42f763f62') version('develop', git='https://github.com/singularityware/singularity.git', branch='master') depends_on('m4', type='build') depends_on('autoconf', type='build') depends_on('automake', type='build') depends_on('libtool', type='build')
lgpl-2.1
Python
346e6c45b3c2a49168eac9fd1b703bd5c05503b1
fix sql дата начала-создания направления из подверждения исследования
moodpulse/l2,moodpulse/l2,moodpulse/l2,moodpulse/l2,moodpulse/l2
rmis_integration/sql_func.py
rmis_integration/sql_func.py
from django.db import connection from laboratory.settings import TIME_ZONE def get_confirm_direction(d_s, d_e, limit): with connection.cursor() as cursor: cursor.execute( """WITH t_all_direction AS ( SELECT DISTINCT ON (napravleniye_id) napravleniye_id FROM public.directions_issledovaniya WHERE time_confirmation AT TIME ZONE %(tz)s <= %(d_end)s), t_not_confirm_direction AS ( SELECT DISTINCT ON (napravleniye_id) napravleniye_id FROM public.directions_issledovaniya WHERE napravleniye_id IN (SELECT napravleniye_id FROM t_all_direction) AND time_confirmation IS NULL), t_only_confirm_direction AS ( SELECT napravleniye_id FROM t_all_direction WHERE napravleniye_id NOT IN (SELECT napravleniye_id FROM t_not_confirm_direction)), t_istochnik_f_rmis_auto_send AS ( SELECT id FROM directions_istochnikifinansirovaniya WHERE rmis_auto_send = false) SELECT id FROM directions_napravleniya WHERE id IN (SELECT napravleniye_id FROM t_only_confirm_direction) AND data_sozdaniya AT TIME ZONE %(tz)s >= %(d_start)s AND rmis_number != ANY(ARRAY['NONERMIS', '', NULL]) AND result_rmis_send = false AND NOT (imported_from_rmis = True and imported_directions_rmis_send = False) AND NOT (istochnik_f_id IN (SELECT id FROM t_istochnik_f_rmis_auto_send) and force_rmis_send = False) ORDER BY data_sozdaniya LIMIT %(limit)s """, params={'d_start': d_s, 'd_end': d_e, 'tz': TIME_ZONE, 'limit': limit}, ) row = cursor.fetchall() return row
from django.db import connection from laboratory.settings import TIME_ZONE def get_confirm_direction(d_s, d_e, limit): with connection.cursor() as cursor: cursor.execute( """WITH t_all_direction AS ( SELECT DISTINCT ON (napravleniye_id) napravleniye_id FROM public.directions_issledovaniya WHERE time_confirmation AT TIME ZONE %(tz)s BETWEEN %(d_start)s AND %(d_end)s), t_not_confirm_direction AS ( SELECT DISTINCT ON (napravleniye_id) napravleniye_id FROM public.directions_issledovaniya WHERE napravleniye_id IN (SELECT napravleniye_id FROM t_all_direction) AND time_confirmation IS NULL), t_only_confirm_direction AS ( SELECT napravleniye_id FROM t_all_direction WHERE napravleniye_id NOT IN (SELECT napravleniye_id FROM t_not_confirm_direction)), t_istochnik_f_rmis_auto_send AS ( SELECT id FROM directions_istochnikifinansirovaniya WHERE rmis_auto_send = false) SELECT id FROM directions_napravleniya WHERE id IN (SELECT napravleniye_id FROM t_only_confirm_direction) AND rmis_number != ANY(ARRAY['NONERMIS', '', NULL]) AND result_rmis_send = false AND NOT (imported_from_rmis = True and imported_directions_rmis_send = False) AND NOT (istochnik_f_id IN (SELECT id FROM t_istochnik_f_rmis_auto_send) and force_rmis_send = False) ORDER BY data_sozdaniya LIMIT %(limit)s """, params={'d_start': d_s, 'd_end': d_e, 'tz': TIME_ZONE, 'limit': limit}, ) row = cursor.fetchall() return row
mit
Python
77ffc94a0439dd7309df0630cddba7daf60bc5ee
Add imported requests to Audit context.
hyperNURb/ggrc-core,hasanalom/ggrc-core,jmakov/ggrc-core,edofic/ggrc-core,hasanalom/ggrc-core,hasanalom/ggrc-core,kr41/ggrc-core,edofic/ggrc-core,uskudnik/ggrc-core,kr41/ggrc-core,vladan-m/ggrc-core,plamut/ggrc-core,uskudnik/ggrc-core,jmakov/ggrc-core,uskudnik/ggrc-core,andrei-karalionak/ggrc-core,uskudnik/ggrc-core,selahssea/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,kr41/ggrc-core,hyperNURb/ggrc-core,jmakov/ggrc-core,NejcZupec/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,NejcZupec/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,jmakov/ggrc-core,prasannav7/ggrc-core,NejcZupec/ggrc-core,j0gurt/ggrc-core,hasanalom/ggrc-core,josthkko/ggrc-core,plamut/ggrc-core,vladan-m/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core,andrei-karalionak/ggrc-core,j0gurt/ggrc-core,prasannav7/ggrc-core,uskudnik/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,hasanalom/ggrc-core,plamut/ggrc-core,vladan-m/ggrc-core,edofic/ggrc-core,prasannav7/ggrc-core,vladan-m/ggrc-core,j0gurt/ggrc-core,vladan-m/ggrc-core,j0gurt/ggrc-core,hyperNURb/ggrc-core,NejcZupec/ggrc-core,VinnieJohns/ggrc-core,josthkko/ggrc-core,selahssea/ggrc-core,AleksNeStu/ggrc-core,prasannav7/ggrc-core,hyperNURb/ggrc-core,andrei-karalionak/ggrc-core,hyperNURb/ggrc-core,kr41/ggrc-core,andrei-karalionak/ggrc-core,selahssea/ggrc-core,edofic/ggrc-core,jmakov/ggrc-core
src/ggrc/converters/requests.py
src/ggrc/converters/requests.py
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: [email protected] # Maintained By: [email protected] from .base import * from ggrc.models import Audit, Request from .base_row import * from collections import OrderedDict class RequestRowConverter(BaseRowConverter): model_class = Request def find_by_slug(self, slug): return self.model_class.query.filter_by(slug=slug).first() def setup_object(self): self.obj = self.setup_object_by_slug(self.attrs) if self.obj.id is not None: self.add_warning('slug', "Request already exists and will be updated") def reify(self): self.handle('objective_id', ObjectiveHandler) self.handle('request_type', RequestTypeColumnHandler, is_required=True) self.handle('status', StatusColumnHandler, valid_states=Request.VALID_STATES, default_value='Draft') self.handle_date('requested_on', is_required=True) self.handle_date('due_on', is_required=True) self.handle_text_or_html('description') self.handle_text_or_html('test') self.handle_text_or_html('notes') self.handle_raw_attr('auditor_contact') # default to audit lead self.handle( 'assignee', AssigneeHandler, is_required=True, person_must_exist=True) def save_object(self, db_session, **options): audit = options.get('audit') if audit: self.obj.audit = audit self.obj.context = audit.context db_session.add(self.obj) class RequestsConverter(BaseConverter): metadata_map = OrderedDict([ ('Type', 'type'), ('Program Code', 'slug') ]) object_map = OrderedDict([ ('Request Type', 'request_type'), ('Request Description', 'description'), ('Objective Code', 'objective_id'), ('Notes', 'notes'), ('Test', 'test'), ('Assignee', 'assignee'), ('Audit Contact', 'auditor_contact'), ('Requested On', 'requested_on'), ('Due On', 'due_on'), ('Status', 'status'), ]) row_converter = RequestRowConverter # Overwrite validate functions since they assume a program rather than a directive def validate_code(self, attrs): if not attrs.get('slug'): self.errors.append('Missing Program Code heading') elif attrs['slug'] != self.program().slug: self.errors.append('Program Code must be {}'.format(self.program().slug)) def validate_metadata(self, attrs): self.validate_metadata_type(attrs, "Requests") self.validate_code(attrs) def program(self): return self.options['program'] def do_export_metadata(self): yield self.metadata_map.keys() yield ['Requests', self.program().slug] yield[] yield[] yield self.object_map.keys()
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: [email protected] # Maintained By: [email protected] from .base import * from ggrc.models import Request from .base_row import * from collections import OrderedDict class RequestRowConverter(BaseRowConverter): model_class = Request def find_by_slug(self, slug): return self.model_class.query.filter_by(slug=slug).first() def setup_object(self): self.obj = self.setup_object_by_slug(self.attrs) if self.obj.id is not None: self.add_warning('slug', "Request already exists and will be updated") def reify(self): self.handle('objective_id', ObjectiveHandler) self.handle('request_type', RequestTypeColumnHandler, is_required=True) self.handle('status', StatusColumnHandler, valid_states=Request.VALID_STATES, default_value='Draft') self.handle_date('requested_on', is_required=True) self.handle_date('due_on', is_required=True) self.handle_text_or_html('description') self.handle_text_or_html('test') self.handle_text_or_html('notes') self.handle_raw_attr('auditor_contact') # default to audit lead self.handle( 'assignee', AssigneeHandler, is_required=True, person_must_exist=True) def save_object(self, db_session, **options): if options.get('audit'): self.obj.audit_id = options.get('audit').id db_session.add(self.obj) class RequestsConverter(BaseConverter): metadata_map = OrderedDict([ ('Type', 'type'), ('Program Code', 'slug') ]) object_map = OrderedDict([ ('Request Type', 'request_type'), ('Request Description', 'description'), ('Objective Code', 'objective_id'), ('Notes', 'notes'), ('Test', 'test'), ('Assignee', 'assignee'), ('Audit Contact', 'auditor_contact'), ('Requested On', 'requested_on'), ('Due On', 'due_on'), ('Status', 'status'), ]) row_converter = RequestRowConverter # Overwrite validate functions since they assume a program rather than a directive def validate_code(self, attrs): if not attrs.get('slug'): self.errors.append('Missing Program Code heading') elif attrs['slug'] != self.program().slug: self.errors.append('Program Code must be {}'.format(self.program().slug)) def validate_metadata(self, attrs): self.validate_metadata_type(attrs, "Requests") self.validate_code(attrs) def program(self): return self.options['program'] def do_export_metadata(self): yield self.metadata_map.keys() yield ['Requests', self.program().slug] yield[] yield[] yield self.object_map.keys()
apache-2.0
Python
3b950782e0f4140715084ebace2820db4f61c600
Fix reindent-rst.py: it works on binary files
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
Tools/scripts/reindent-rst.py
Tools/scripts/reindent-rst.py
#!/usr/bin/env python # Make a reST file compliant to our pre-commit hook. # Currently just remove trailing whitespace. import sys, re, shutil ws_re = re.compile(br'\s+(\r?\n)$') def main(argv=sys.argv): rv = 0 for filename in argv[1:]: try: with open(filename, 'rb') as f: lines = f.readlines() new_lines = [ws_re.sub(br'\1', line) for line in lines] if new_lines != lines: print('Fixing %s...' % filename) shutil.copyfile(filename, filename + '.bak') with open(filename, 'wb') as f: f.writelines(new_lines) except Exception as err: print('Cannot fix %s: %s' % (filename, err)) rv = 1 return rv if __name__ == '__main__': sys.exit(main())
#!/usr/bin/env python # Make a reST file compliant to our pre-commit hook. # Currently just remove trailing whitespace. import sys, re, shutil ws_re = re.compile(r'\s+(\r?\n)$') def main(argv=sys.argv): rv = 0 for filename in argv[1:]: try: with open(filename, 'rb') as f: lines = f.readlines() new_lines = [ws_re.sub(r'\1', line) for line in lines] if new_lines != lines: print('Fixing %s...' % filename) shutil.copyfile(filename, filename + '.bak') with open(filename, 'wb') as f: f.writelines(new_lines) except Exception as err: print('Cannot fix %s: %s' % (filename, err)) rv = 1 return rv if __name__ == '__main__': sys.exit(main())
mit
Python
5221630769f53853fea490e04301c2a19711894c
Fix #296. Generate name according to profile's sex. Thanks @Dutcho for the report
HAYASAKA-Ryosuke/faker,trtd/faker,joke2k/faker,danhuss/faker,meganlkm/faker,joke2k/faker
faker/providers/profile/__init__.py
faker/providers/profile/__init__.py
# coding=utf-8 from .. import BaseProvider import itertools class Provider(BaseProvider): """ This provider is a collection of functions to generate personal profiles and identities. """ def simple_profile(self): """ Generates a basic profile with personal informations """ sex = self.random_element(["F", "M"]) if sex == 'F': name = self.generator.name_female() elif sex == 'M': name = self.generator.name_male() return { "username": self.generator.user_name(), "name": name, "sex": sex, "address": self.generator.address(), "mail": self.generator.free_email(), #"password":self.generator.password() "birthdate": self.generator.date(), } def profile(self, fields=None): """ Generates a complete profile. If "fields" is not empty, only the fields in the list will be returned """ if fields is None: fields = [] d = { "job": self.generator.job(), "company": self.generator.company(), "ssn": self.generator.ssn(), "residence": self.generator.address(), "current_location": (self.generator.latitude(), self.generator.longitude()), "blood_group": "".join(self.random_element(list(itertools.product(["A", "B", "AB", "0"], ["+", "-"])))), "website": [self.generator.url() for i in range(1, self.random_int(2, 5))] } d = dict(d, **self.generator.simple_profile()) #field selection if len(fields) > 0: d = dict((k, v) for (k, v) in d.items() if k in fields) return d
# coding=utf-8 from .. import BaseProvider import itertools class Provider(BaseProvider): """ This provider is a collection of functions to generate personal profiles and identities. """ def simple_profile(self): """ Generates a basic profile with personal informations """ return { "username": self.generator.user_name(), "name": self.generator.name(), "sex": self.random_element(["M", "F"]), "address": self.generator.address(), "mail": self.generator.free_email(), #"password":self.generator.password() "birthdate": self.generator.date(), } def profile(self, fields=None): """ Generates a complete profile. If "fields" is not empty, only the fields in the list will be returned """ if fields is None: fields = [] d = { "job": self.generator.job(), "company": self.generator.company(), "ssn": self.generator.ssn(), "residence": self.generator.address(), "current_location": (self.generator.latitude(), self.generator.longitude()), "blood_group": "".join(self.random_element(list(itertools.product(["A", "B", "AB", "0"], ["+", "-"])))), "website": [self.generator.url() for i in range(1, self.random_int(2, 5))] } d = dict(d, **self.generator.simple_profile()) #field selection if len(fields) > 0: d = dict((k, v) for (k, v) in d.items() if k in fields) return d
mit
Python
981a74b116081f3ce1d97262c3c88104a953cdf4
Use numpy's float supporting range
Mause/statistical_atlas_of_au
saau/sections/misc/header.py
saau/sections/misc/header.py
import matplotlib.pyplot as plt from operator import itemgetter from lxml.etree import fromstring, XMLSyntaxError import numpy as np def parse_lines(lines): for line in lines: try: xml_line = fromstring(line.encode('utf-8')) except XMLSyntaxError: attrs = [] else: attrs = [thing.tag for thing in xml_line.getiterator()] line = list(xml_line.getiterator())[-1].text yield line, attrs def render_header_to(ax, sy, lines, sx=0.5): calc = lambda q: q / 20 y_points = map(calc, np.arange(sy, 0, -1)) parsed = list(parse_lines(lines)) lines = map(itemgetter(0), parsed) line_attrs = map(itemgetter(1), parsed) lines = [ ax.figure.text(sx, y, text, ha='center') for y, text in zip(y_points, lines) ] for idx, attrs in enumerate(line_attrs): if 'b' in attrs: lines[idx].set_weight('extra bold') if 'i' in attrs: lines[idx].set_style('italic') return ax class Header: __init__ = lambda self, _, a: None has_required_data = lambda _: True def build_image(self): ax = plt.axes() render_header_to(ax) plt.show() return ax
import matplotlib.pyplot as plt from operator import gt, lt, itemgetter from lxml.etree import fromstring, XMLSyntaxError def frange(start, stop, step): cur = start op = gt if start > stop else lt while op(cur, stop): yield cur cur += step def parse_lines(lines): for line in lines: try: xml_line = fromstring(line.encode('utf-8')) except XMLSyntaxError: attrs = [] else: attrs = [thing.tag for thing in xml_line.getiterator()] line = list(xml_line.getiterator())[-1].text yield line, attrs def render_header_to(ax, sy, lines, sx=0.5): calc = lambda q: q / 20 y_points = map(calc, frange(sy, 0, -1)) parsed = list(parse_lines(lines)) lines = map(itemgetter(0), parsed) line_attrs = map(itemgetter(1), parsed) lines = [ ax.figure.text(sx, y, text, ha='center') for y, text in zip(y_points, lines) ] for idx, attrs in enumerate(line_attrs): if 'b' in attrs: lines[idx].set_weight('extra bold') if 'i' in attrs: lines[idx].set_style('italic') return ax class Header: __init__ = lambda self, _, a: None has_required_data = lambda _: True def build_image(self): ax = plt.axes() render_header_to(ax) plt.show() return ax
mit
Python
4e2fa3a249b9027f9e50f0f957163cad3bdc28bf
Fix typo
cpsaltis/pythogram-core
src/gramcore/features/points.py
src/gramcore/features/points.py
"""Functions for extracting interest points. These are applied to numpy.arrays representing images. """ import numpy from skimage import feature def harris(parameters): """Harris interest point operator. It wraps `skimage.feature.harris`. The `threshold`, `eps` and `gaussian_deviation` options are not supported. This function returns an array of 0s and 1s. Harris points are marked with 1s. This way the result can be easily transformed to an image. It works on RGB and greyscale images. The wrapped function returns a set of point coordinates in a list. For some reason it is not possible to do something like: >>> points = feature.harris(data, min_distance=5) >>> data[points] = 1 so a for loop is used. .. note:: The coordinates returned are not directly on the corner, but a pixel inside the object (TODO: is this expected?). :param parameters['data'][0]: input array :type parameters['data'][0]: numpy.array :param parameters['min_distance']: minimum number of pixels separating interest points and image boundary, defaults to 10 :type parameters['min_distance']: float :return: numpy.array, it contains 1s where points were found, otherwise 0 """ data = parameters['data'][0] min_distance = parameters.get('min_distance', 10) points = feature.harris(data, min_distance=min_distance) result = numpy.zeros((data.shape[0], data.shape[1]), dtype='uint8') for point in points: result[point[0], point[1]] = 1 return result
"""Functions for extracting interest points. These are applied to numpy.arrays representing images. """ import numpy from skimage import feature def harris(parameters): """Harris interest point operator. It wraps `skimage.feature.harris`. The `threshold`, `eps` and `gaussian_deviation` options are not supported. This function returns an array of 0s and 1s. Harris points are marked with 1s. This way the result can be easily transformed to an image. It works on RGB and greyscale images. The wrapped function returns a set of point coordinates in a list. For some reason it is not possible to do something like: >>> points = feature.harris(data, min_distance=5) >>> data[points] = 1 Instead a for loop is used. :param parameters['data'][0]: input array :type parameters['data'][0]: numpy.array :param parameters['min_distance']: minimum number of pixels separating interest points and image boundary, defaults to 10 :type parameters['min_distance']: float :return: numpy.array, it contains 1s where points were found, otherwise 0 """ data = parameters['data'][0] min_distance = parameters.get('min_distance', 10) points = feature.harris(data, min_distance=pmin_distance) result = numpy.zeros((data.shape[0], data.shape[1]), dtype='uint8') for point in points: result[point[0], point[1]] = 1 return result
mit
Python
f4bbb244716f9471b520f53ebffaf34a31503cd1
Remove unused imports (besides they are Py 2.x only)
CoolProp/CoolProp,CoolProp/CoolProp,henningjp/CoolProp,henningjp/CoolProp,CoolProp/CoolProp,CoolProp/CoolProp,CoolProp/CoolProp,henningjp/CoolProp,henningjp/CoolProp,henningjp/CoolProp,henningjp/CoolProp,henningjp/CoolProp,CoolProp/CoolProp,CoolProp/CoolProp,henningjp/CoolProp,CoolProp/CoolProp
Web/scripts/CPWeb/__init__.py
Web/scripts/CPWeb/__init__.py
""" CPWeb - A collection of commonly used routines to produce CoolProp's online documentation ===== """ from __future__ import division, absolute_import, print_function def get_version(): return 5.0 if __name__ == "__main__": print('You are using version %s of the Python package for creating CoolProp\' online documentation.'%(get_version())) print()
""" CPWeb - A collection of commonly used routines to produce CoolProp's online documentation ===== """ from __future__ import division, absolute_import, print_function import codecs import csv import cStringIO def get_version(): return 5.0 if __name__ == "__main__": print('You are using version %s of the Python package for creating CoolProp\' online documentation.'%(get_version())) print()
mit
Python
3b77fbb82d2ba098f00f7221070f9610d0d90809
add unknown person
tomviner/dojo-adventure-game
game.py
game.py
import random from adventurelib import Item, Bag, when, start import rooms import characters from sys import exit people = '123456' rooms = 'abcdef' # murder configuration # who was where # who is the murderer # current configuration # who was where # player location murder_config_people = list(people) random.shuffle(murder_config_people) murder_location = random.choice(rooms) murderer = people[rooms.find(murder_location)] current_config_people = list(people) random.shuffle(current_config_people) current_location = random.choice(rooms) @when('where am i') def my_room(): print("I am in: ", current_location) @when('go to ROOM') @when('go to the ROOM') def to_room(room): if room in rooms: print("I am now in %s" % room) global current_location current_location = room else: print("I can't find the %s" % room) @when('it was M') def accuse(m): if m == murderer: print ("Yes, %s is the murderer!" % m) exit else: if m in people: print ("%s said: 'How could you!'" % m) else: print ("No one has ever heard of '%s'!" % m) start()
import random from adventurelib import Item, Bag, when, start import rooms import characters from sys import exit people = '123456' rooms = 'abcdef' # murder configuration # who was where # who is the murderer # current configuration # who was where # player location murder_config_people = list(people) random.shuffle(murder_config_people) murder_location = random.choice(rooms) murderer = people[rooms.find(murder_location)] current_config_people = list(people) random.shuffle(current_config_people) current_location = random.choice(rooms) @when('where am i') def my_room(): print("I am in: " , current_location) @when('go to ROOM') @when('go to the ROOM') def to_room(room): if room in rooms: print("I am now in %s" % room) global current_location current_location = room else: print("I can't find the %s" % room) @when('it was M') def accuse(m): if m == murderer: print ("Yes, %s is the murderer!" % m) exit else: print ("%s said: 'How could you!'" % m) start()
mit
Python
144a35d639ccd3a60f100793df00fd62aa81766b
document no trust algo
tomviner/network-rock-paper-scissors
game.py
game.py
""" Play with trust: for player in game: if current player: send move else: listen for move receive move decide winner Play trusting no one: Swap hashes: for player in game: if current player: send hasher(move + salt) else: listen for hash receive hash Swap salts: for player in game: if current player: send move + salt else: listen for move + salt receive move + salt verify hasher(move + salt) == hash decide winner """
""" For player in game: if current player: send move else: listen for move receive move decide winner """
bsd-2-clause
Python
866e0ec72163debd9f46b1ecb8e4d07b040694b4
Fix absolute import
testedminds/sand,testedminds/sand
sand/cytoscape/themes/ops.py
sand/cytoscape/themes/ops.py
from . import colors as c from . import label_positions as p settings = { # node style 'NODE_TRANSPARENCY': 255, 'NODE_SIZE': 25, 'NODE_BORDER_WIDTH': 4, 'NODE_BORDER_PAINT': c.BRIGHT_GREEN, 'NODE_FILL_COLOR': c.DARK_GREEN, 'NODE_SELECTED_PAINT': c.BRIGHT_YELLOW, # node label style 'NODE_LABEL_COLOR': c.BRIGHT_GRAY, 'NODE_LABEL_FONT_SIZE': 16, 'NODE_LABEL_POSITION': p.LOWER_RIGHT, # edge style 'EDGE_TRANSPARENCY': 255, 'EDGE_WIDTH': 2.5, 'EDGE_LINE_TYPE': 'SOLID', 'EDGE_STROKE_SELECTED_PAINT': c.BRIGHT_YELLOW, 'EDGE_STROKE_UNSELECTED_PAINT': c.BRIGHT_GRAY, 'EDGE_TARGET_ARROW_UNSELECTED_PAINT': c.BRIGHT_GRAY, 'EDGE_TARGET_ARROW_SHAPE': 'DELTA', # network style 'NETWORK_BACKGROUND_PAINT': c.DARK_GRAY }
import sand.cytoscape.themes.colors as c import sand.cytoscape.themes.label_positions as p settings = { # node style 'NODE_TRANSPARENCY': 255, 'NODE_SIZE': 25, 'NODE_BORDER_WIDTH': 4, 'NODE_BORDER_PAINT': c.BRIGHT_GREEN, 'NODE_FILL_COLOR': c.DARK_GREEN, 'NODE_SELECTED_PAINT': c.BRIGHT_YELLOW, # node label style 'NODE_LABEL_COLOR': c.BRIGHT_GRAY, 'NODE_LABEL_FONT_SIZE': 16, 'NODE_LABEL_POSITION': p.LOWER_RIGHT, # edge style 'EDGE_TRANSPARENCY': 255, 'EDGE_WIDTH': 2.5, 'EDGE_LINE_TYPE': 'SOLID', 'EDGE_STROKE_SELECTED_PAINT': c.BRIGHT_YELLOW, 'EDGE_STROKE_UNSELECTED_PAINT': c.BRIGHT_GRAY, 'EDGE_TARGET_ARROW_UNSELECTED_PAINT': c.BRIGHT_GRAY, 'EDGE_TARGET_ARROW_SHAPE': 'DELTA', # network style 'NETWORK_BACKGROUND_PAINT': c.DARK_GRAY }
apache-2.0
Python
75635315598ccbcad887bf77f7cdc99772157033
Add construct_data function to construct data for the API
meetmangukiya/gistey,meetmangukiya/gistey
gist.py
gist.py
import os import sys from parser import parser args = parser.parse_args() def process_files(args): """ :param args: The arguments parsed by argparse :returns: A dict containing file_names as keys and a dict containing a key `content` as the value Example return: { "file_name": { "content": { # file contents } } } """ files = [os.path.abspath(file) for file in args.files] file_contents = {} for file in files: try: f = open(file) file_contents[os.path.split(file)[1]] = f.read() f.close() except FileNotFoundError: print('File "{}"\n\tdoes not exist'.format(file)) should_create = input('Create the gist without this file [Y/n]: ') or 'Y' if not should_create == 'Y': sys.exit("gist: exiting ...") return file_contents def create_gist(data): """ :param data: The JSON data to be posted to the API :returns: request object of the POST request made to create the gist """ end_point = 'https://api.github.com/gists' rq = requests.post(end_point, json=data) return rq def construct_data(args): """ :param args: The arguments parsed by argparse :returns: `data` dict to be passed to crete the POST request """ data = { "public": args.secret, "description": args.description, "files": process_files(args) } return data
import os import sys from parser import parser args = parser.parse_args() def process_files(args): """ :param args: The arguments parsed by argparse :returns: A dict containing file_names as keys and a dict containing a key `content` as the value Example return: { "file_name": { "content": { # file contents } } } """ files = [os.path.abspath(file) for file in args.files] file_contents = {} for file in files: try: f = open(file) file_contents[os.path.split(file)[1]] = f.read() f.close() except FileNotFoundError: print('File "{}"\n\tdoes not exist'.format(file)) should_create = input('Create the gist without this file [Y/n]: ') or 'Y' if not should_create == 'Y': sys.exit("gist: exiting ...") return file_contents def create_gist(data): """ :param data: The JSON data to be posted to the API :returns: request object of the POST request made to create the gist """ end_point = 'https://api.github.com/gists' rq = requests.post(end_point, json=data) return rq
mit
Python
37ab58016e69993b5ab1d63c99d9afcf54bd95af
Implement more TGT Neutral Epics
Meerkov/fireplace,Ragowit/fireplace,oftc-ftw/fireplace,liujimj/fireplace,oftc-ftw/fireplace,smallnamespace/fireplace,Ragowit/fireplace,Meerkov/fireplace,liujimj/fireplace,amw2104/fireplace,beheh/fireplace,amw2104/fireplace,NightKev/fireplace,smallnamespace/fireplace,jleclanche/fireplace
fireplace/cards/tgt/neutral_epic.py
fireplace/cards/tgt/neutral_epic.py
from ..utils import * ## # Minions # Twilight Guardian class AT_017: play = HOLDING_DRAGON & Buff(SELF, "AT_017e") # Sideshow Spelleater class AT_098: play = Summon(CONTROLLER, Copy(ENEMY_HERO_POWER)) # Kodorider class AT_099: inspire = Summon(CONTROLLER, "AT_099t") # Master of Ceremonies class AT_117: play = Find(FRIENDLY_MINIONS + SPELLPOWER) & Buff(SELF, "AT_117e") # Frost Giant class AT_120: cost = lambda self, i: i - self.controller.times_hero_power_used_this_game # Crowd Favorite class AT_121: events = Play(CONTROLLER, BATTLECRY).on(Buff(SELF, "AT_121e"))
from ..utils import * ## # Minions # Kodorider class AT_099: inspire = Summon(CONTROLLER, "AT_099t")
agpl-3.0
Python
dfe1213ba9de5e5e5aaf9690a2cf5e3b295869fa
Remove Python 3 incompatible print statement
RMKD/networkx,jakevdp/networkx,JamesClough/networkx,jni/networkx,harlowja/networkx,debsankha/networkx,andnovar/networkx,tmilicic/networkx,ghdk/networkx,goulu/networkx,RMKD/networkx,dmoliveira/networkx,beni55/networkx,chrisnatali/networkx,wasade/networkx,nathania/networkx,jakevdp/networkx,blublud/networkx,kernc/networkx,farhaanbukhsh/networkx,Sixshaman/networkx,ghdk/networkx,dhimmel/networkx,sharifulgeo/networkx,kernc/networkx,ionanrozenfeld/networkx,bzero/networkx,aureooms/networkx,nathania/networkx,ghdk/networkx,OrkoHunter/networkx,bzero/networkx,harlowja/networkx,dhimmel/networkx,cmtm/networkx,jfinkels/networkx,SanketDG/networkx,dhimmel/networkx,jni/networkx,dmoliveira/networkx,aureooms/networkx,NvanAdrichem/networkx,aureooms/networkx,debsankha/networkx,yashu-seth/networkx,farhaanbukhsh/networkx,jakevdp/networkx,blublud/networkx,ionanrozenfeld/networkx,blublud/networkx,sharifulgeo/networkx,chrisnatali/networkx,jni/networkx,dmoliveira/networkx,kernc/networkx,harlowja/networkx,ionanrozenfeld/networkx,bzero/networkx,jcurbelo/networkx,ltiao/networkx,farhaanbukhsh/networkx,sharifulgeo/networkx,RMKD/networkx,debsankha/networkx,chrisnatali/networkx,michaelpacer/networkx,nathania/networkx
examples/graph/degree_sequence.py
examples/graph/degree_sequence.py
#!/usr/bin/env python """ Random graph from given degree sequence. """ __author__ = """Aric Hagberg ([email protected])""" __date__ = "$Date: 2004-11-03 08:11:09 -0700 (Wed, 03 Nov 2004) $" __credits__ = """""" __revision__ = "$Revision: 503 $" # Copyright (C) 2004 by # Aric Hagberg <[email protected]> # Dan Schult <[email protected]> # Pieter Swart <[email protected]> # All rights reserved. # BSD license. from networkx import * z=[5,3,3,3,3,2,2,2,1,1,1] print(is_valid_degree_sequence(z)) print("Configuration model") G=configuration_model(z) # configuration model degree_sequence=list(degree(G).values()) # degree sequence print("Degree sequence %s" % degree_sequence) print("Degree histogram") hist={} for d in degree_sequence: if d in hist: hist[d]+=1 else: hist[d]=1 print("degree #nodes") for d in hist: print('%d %d' % (d,hist[d]))
#!/usr/bin/env python """ Random graph from given degree sequence. """ __author__ = """Aric Hagberg ([email protected])""" __date__ = "$Date: 2004-11-03 08:11:09 -0700 (Wed, 03 Nov 2004) $" __credits__ = """""" __revision__ = "$Revision: 503 $" # Copyright (C) 2004 by # Aric Hagberg <[email protected]> # Dan Schult <[email protected]> # Pieter Swart <[email protected]> # All rights reserved. # BSD license. from networkx import * z=[5,3,3,3,3,2,2,2,1,1,1] print is_valid_degree_sequence(z) print("Configuration model") G=configuration_model(z) # configuration model degree_sequence=list(degree(G).values()) # degree sequence print("Degree sequence %s" % degree_sequence) print("Degree histogram") hist={} for d in degree_sequence: if d in hist: hist[d]+=1 else: hist[d]=1 print("degree #nodes") for d in hist: print('%d %d' % (d,hist[d]))
bsd-3-clause
Python
72e30b3b881418d40dd0446842176fc5c4468802
Add name url converter
mikeboers/Flask-Roots,mikeboers/Flask-Roots
flask_roots/routing.py
flask_roots/routing.py
from werkzeug.routing import BaseConverter class RegexConverter(BaseConverter): def __init__(self, url_map, *items): super(RegexConverter, self).__init__(url_map) self.regex = items[0] def strip_accents(s): s = unicode(s) return ''.join((c for c in unicodedata.normalize('NFD', s) if unicodedata.category(c) != 'Mn')) def _urlify_name(name): """Converts a name or title into something we can put into a URI. This is designed to only be for one way usage (ie. we can't use the urlified names to figure out what photo or photoset we are talking about). """ return re.sub(r'\W+', '-', name).strip('-') or 'Untitled' def urlify_name(name): return _urlify_name(strip_accents(name).encode('ascii', 'ignore')) class NameConverter(BaseConverter): def to_python(self, value): return value def to_url(self, value): if not isinstance(value, str) and hasattr(value, 'name'): value = value.name return urlify_name(str(value)).lower() def setup_routing(app): app.url_map.converters['re'] = RegexConverter app.url_map.converters['name'] = NameConverter
from werkzeug.routing import BaseConverter class RegexConverter(BaseConverter): def __init__(self, url_map, *items): super(RegexConverter, self).__init__(url_map) self.regex = items[0] def setup_routing(app): app.url_map.converters['re'] = RegexConverter
bsd-3-clause
Python
42463351a598d45f2738c894e00d0eceec308f9c
Add docstring
wholebiome/aegea,wholebiome/aegea,kislyuk/aegea,wholebiome/aegea,kislyuk/aegea,kislyuk/aegea
aegea/billing.py
aegea/billing.py
""" View detailed billing reports. Detailed billing reports can be configured at https://console.aws.amazon.com/billing/home#/preferences. """ from __future__ import absolute_import, division, print_function, unicode_literals import os, sys, json, zipfile, csv, io from io import BytesIO, TextIOWrapper from datetime import datetime import boto3, requests from botocore.exceptions import ClientError from . import register_parser from .util.printing import format_table, page_output, get_field, get_cell, tabulate from .util.aws import ARN def filter_line_items(args): def filter_fn(item): if args.min_cost and float(item["Cost"]) < args.min_cost: return False return True return filter_fn def billing(args): s3 = boto3.resource("s3") iam = boto3.resource("iam") account_id = ARN(iam.CurrentUser().user.arn).account_id args.detailed_billing_reports_bucket = args.detailed_billing_reports_bucket.format(account_id=account_id) now = datetime.utcnow() report = "{account_id}-aws-billing-detailed-line-items-with-resources-and-tags-{year}-{month}.csv.zip" report = report.format(account_id=account_id, year=args.year or now.year, month="%02d" % (args.month or now.month)) try: billing_object = s3.Bucket(args.detailed_billing_reports_bucket).Object(report) billing_object_body = billing_object.get()["Body"] except ClientError as e: console_url = "https://console.aws.amazon.com/billing/home#/preferences" msg = "Can't get detailed billing report {} from bucket {} in account {}: {}. Go to {} to set up detailed billing." sys.exit(msg.format(report, args.detailed_billing_reports_bucket, account_id, e, console_url)) zbuf = BytesIO(billing_object_body.read()) with zipfile.ZipFile(zbuf) as zfile: with TextIOWrapper(zfile.open(report.rstrip(".zip"))) as fh: reader = csv.DictReader(fh) page_output(tabulate(filter(filter_line_items(args), reader), args)) parser = register_parser(billing, help='List contents of AWS detailed billing reports', description=__doc__) parser.add_argument("--columns", nargs="+") #parser.add_argument("--sort-by") parser.add_argument("--year", type=int, help="Year to get billing reports for. Defaults to current year") parser.add_argument("--month", type=int, help="Month (numeral) to get billing reports for. Defaults to current month") parser.add_argument("--detailed-billing-reports-bucket", help="Name of S3 bucket to retrieve detailed billing reports from") parser.add_argument("--min-cost", type=float, help="Omit billing line items below this cost")
""" View detailed billing reports. Detailed billing reports can be configured at https://console.aws.amazon.com/billing/home#/preferences. """ from __future__ import absolute_import, division, print_function, unicode_literals import os, sys, json, zipfile, csv, io from io import BytesIO, TextIOWrapper from datetime import datetime import boto3, requests from botocore.exceptions import ClientError from . import register_parser from .util.printing import format_table, page_output, get_field, get_cell, tabulate from .util.aws import ARN def filter_line_items(args): def filter_fn(item): if args.min_cost and float(item["Cost"]) < args.min_cost: return False return True return filter_fn def billing(args): s3 = boto3.resource("s3") iam = boto3.resource("iam") account_id = ARN(iam.CurrentUser().user.arn).account_id args.detailed_billing_reports_bucket = args.detailed_billing_reports_bucket.format(account_id=account_id) now = datetime.utcnow() report = "{account_id}-aws-billing-detailed-line-items-with-resources-and-tags-{year}-{month}.csv.zip" report = report.format(account_id=account_id, year=args.year or now.year, month="%02d" % (args.month or now.month)) try: billing_object = s3.Bucket(args.detailed_billing_reports_bucket).Object(report) billing_object_body = billing_object.get()["Body"] except ClientError as e: console_url = "https://console.aws.amazon.com/billing/home#/preferences" msg = "Can't get detailed billing report {} from bucket {} in account {}: {}. Go to {} to set up detailed billing." sys.exit(msg.format(report, args.detailed_billing_reports_bucket, account_id, e, console_url)) zbuf = BytesIO(billing_object_body.read()) with zipfile.ZipFile(zbuf) as zfile: with TextIOWrapper(zfile.open(report.rstrip(".zip"))) as fh: reader = csv.DictReader(fh) page_output(tabulate(filter(filter_line_items(args), reader), args)) parser = register_parser(billing, help='List contents of AWS detailed billing reports') parser.add_argument("--columns", nargs="+") #parser.add_argument("--sort-by") parser.add_argument("--year", type=int, help="Year to get billing reports for. Defaults to current year") parser.add_argument("--month", type=int, help="Month (numeral) to get billing reports for. Defaults to current month") parser.add_argument("--detailed-billing-reports-bucket", help="Name of S3 bucket to retrieve detailed billing reports from") parser.add_argument("--min-cost", type=float, help="Omit billing line items below this cost")
apache-2.0
Python
803fead9cbfa9d2a950e9fa16f42e905f6a942d7
add module imports
jml/flocker,Azulinho/flocker,runcom/flocker,agonzalezro/flocker,hackday-profilers/flocker,w4ngyi/flocker,1d4Nf6/flocker,runcom/flocker,1d4Nf6/flocker,AndyHuu/flocker,hackday-profilers/flocker,mbrukman/flocker,moypray/flocker,Azulinho/flocker,jml/flocker,achanda/flocker,mbrukman/flocker,achanda/flocker,wallnerryan/flocker-profiles,hackday-profilers/flocker,achanda/flocker,1d4Nf6/flocker,LaynePeng/flocker,lukemarsden/flocker,Azulinho/flocker,moypray/flocker,agonzalezro/flocker,runcom/flocker,lukemarsden/flocker,lukemarsden/flocker,jml/flocker,agonzalezro/flocker,wallnerryan/flocker-profiles,adamtheturtle/flocker,LaynePeng/flocker,wallnerryan/flocker-profiles,adamtheturtle/flocker,AndyHuu/flocker,w4ngyi/flocker,mbrukman/flocker,adamtheturtle/flocker,LaynePeng/flocker,moypray/flocker,AndyHuu/flocker,w4ngyi/flocker
flocker/ca/__init__.py
flocker/ca/__init__.py
# Copyright ClusterHQ Inc. See LICENSE file for details. """ A minimal certificate authority. """ __all__ = [ "RootCredential", "ControlCredential", "NodeCredential", "UserCredential", "ComparableKeyPair", "PathError", "CertificateAlreadyExistsError", "KeyAlreadyExistsError", "EXPIRY_20_YEARS", "AUTHORITY_CERTIFICATE_FILENAME", "AUTHORITY_KEY_FILENAME", "CONTROL_CERTIFICATE_FILENAME", "CONTROL_KEY_FILENAME" ] from ._ca import ( RootCredential, ControlCredential, NodeCredential, UserCredential, ComparableKeyPair, PathError, CertificateAlreadyExistsError, KeyAlreadyExistsError, EXPIRY_20_YEARS, AUTHORITY_CERTIFICATE_FILENAME, AUTHORITY_KEY_FILENAME, CONTROL_CERTIFICATE_FILENAME, CONTROL_KEY_FILENAME )
# Copyright ClusterHQ Inc. See LICENSE file for details. """ A minimal certificate authority. """ __all__ = [ "RootCredential", "ControlCredential", "NodeCredential", "ComparableKeyPair", "PathError", "CertificateAlreadyExistsError", "KeyAlreadyExistsError", "EXPIRY_20_YEARS", "AUTHORITY_CERTIFICATE_FILENAME", "AUTHORITY_KEY_FILENAME", "CONTROL_CERTIFICATE_FILENAME", "CONTROL_KEY_FILENAME" ] from ._ca import ( RootCredential, ControlCredential, NodeCredential, ComparableKeyPair, PathError, CertificateAlreadyExistsError, KeyAlreadyExistsError, EXPIRY_20_YEARS, AUTHORITY_CERTIFICATE_FILENAME, AUTHORITY_KEY_FILENAME, CONTROL_CERTIFICATE_FILENAME, CONTROL_KEY_FILENAME )
apache-2.0
Python
b45db0476212891dd23934d775bc3082cbcaabdf
Fix KLD
mufid/berkilau,mufid/berkilau
ws/CSUIBotClass2014/MCL/kldmcl.py
ws/CSUIBotClass2014/MCL/kldmcl.py
# @obj: implement the standard MCL alg.; table 8.2 on the book Prob. Robotics by S. Thrun # @author: vektor dewanto import numpy as np import CSUIBotClass2014.action_model.model_uas as act_model import CSUIBotClass2014.perception_model.beam_range_finder_model as obs_model from scipy import stats import math def normalize_weight(X): # Normalize all weights, so that they sum up to one total_w = sum([xw[1] for xw in X]) X = [(xw[0], xw[1]/total_w) for xw in X] return X def resample(X_bar): ''' draw i with probability proportional to w_t^i ''' X_bar = normalize_weight(X_bar) X = [] while len(X) < len(X_bar): candidate_idx = np.random.random_integers(low=0, high= len(X_bar)-1) candidate_w = X_bar[candidate_idx][1] sampled = np.random.binomial(n=1, p=candidate_w)# a Bernoulli dist. if sampled==1: return X_bar[candidate_idx] return X def run(X_past, u, z, m): ''' \param X: is a list of tuples (x, w) \param u: the control/action \param z: the observation \param m: the given map ''' epsilon = 0.05 delta = 0.01 Xt = [] b = [[0]*20]*20 M = 0 Mx = 0 Mxmin = 20 k = 0 n_particle = len(X_past)# fixed #particle for ever :( while True: xt1 = resample(X_past) print "menunggu pagi" print len(X_past) print xt1 xmt = act_model.sample_motion_model(u, xt1[0], m) w = 1-obs_model.beam_range_finder_model(z, xmt, m) Xt.append((xmt, w)) idx = int(math.floor(xmt['x'])) idy = int(math.floor(xmt['y'])) if(b[idy][idx]==0): k += 1 b[idy][idx] = 1 if(k>1): var1 = 2.0/(9*(k-1)) Mx = ((k-1)/2.0*epsilon* (1 - var1 + math.sqrt(var1)*stats.norm.ppf(1-delta))**3) M+=1 if not ((M<Mx) or (M<Mxmin)): return Xt return Xt
# @obj: implement the standard MCL alg.; table 8.2 on the book Prob. Robotics by S. Thrun # @author: vektor dewanto import numpy as np import CSUIBotClass2014.action_model.model_uas as act_model import CSUIBotClass2014.perception_model.beam_range_finder_model as obs_model def normalize_weight(X): # Normalize all weights, so that they sum up to one total_w = sum([xw[1] for xw in X]) X = [(xw[0], xw[1]/total_w) for xw in X] return X def resample(X_bar): ''' draw i with probability proportional to w_t^i ''' X_bar = normalize_weight(X_bar) X = [] while len(X) < len(X_bar): candidate_idx = np.random.random_integers(low=0, high= len(X_bar)-1) candidate_w = X_bar[candidate_idx][1] sampled = np.random.binomial(n=1, p=candidate_w)# a Bernoulli dist. if sampled==1: X.append(X_bar[candidate_idx]) return X def run(X_past, u, z, m): ''' \param X: is a list of tuples (x, w) \param u: the control/action \param z: the observation \param m: the given map ''' X_bar = [] X = [] n_particle = len(X_past)# fixed #particle for ever :( for i in range(n_particle): x = act_model.sample_motion_model(u, X_past[i][0], m) w = 1-obs_model.beam_range_finder_model(z, x, m) X_bar.append((x, w)) X = resample(X_bar) return X
mit
Python
bf476a199492c7966b6a3886da284867622a8b04
Update populate_vm_metrics.py
GoogleCloudPlatform/gcsfuse,GoogleCloudPlatform/gcsfuse,GoogleCloudPlatform/gcsfuse
perfmetrics/scripts/populate_vm_metrics.py
perfmetrics/scripts/populate_vm_metrics.py
"""Executes vm_metrics.py by passing appropriate arguments. To run the script: >> python3 populate_vm_metrics.py <start_time> <end_time> """ import socket import sys import time import os from vm_metrics import vm_metrics INSTANCE = socket.gethostname() metric_data_name = ['start_time_sec', 'cpu_utilization_peak','cpu_utilization_mean', 'network_bandwidth_peak', 'network_bandwidth_mean', 'gcs/ops_latency', 'gcs/read_bytes_count', 'gcs/ops_error_count'] if __name__ == '__main__': argv = sys.argv if len(argv) != 3: raise TypeError('Incorrect number of arguments.\n' 'Usage: ' 'python3 populate_vm_metrics.py <start_time> <end_time>') print('Waiting for 250 seconds for metrics to be updated on VM...') # It takes up to 240 seconds for sampled data to be visible on the VM metrics graph # So, waiting for 250 seconds to ensure the returned metrics are not empty time.sleep(250) vm_metrics_obj = vm_metrics.VmMetrics() start_time_sec = int(argv[1]) end_time_sec = int(argv[2]) period = end_time_sec - start_time_sec print(f'Getting VM metrics for ML model') vm_metrics_obj.fetch_metrics_and_write_to_google_sheet(start_time_sec, end_time_sec, INSTANCE, period, 'read', 'ml_metrics')
"""Executes vm_metrics.py by passing appropriate arguments. To run the script: >> python3 populate_vm_metrics.py <start_time> <end_time> """ import socket import sys import time import os from vm_metrics import vm_metrics INSTANCE = socket.gethostname() metric_data_name = ['start_time_sec', 'cpu_utilization_peak','cpu_utilization_mean', 'network_bandwidth_peak', 'network_bandwidth_mean', 'gcs/ops_latency', 'gcs/read_bytes_count', 'gcs/ops_error_count'] if __name__ == '__main__': argv = sys.argv if len(argv) != 3: raise TypeError('Incorrect number of arguments.\n' 'Usage: ' 'python3 populate_vm_metrics.py <start_time> <end_time>') print('Waiting for 250 seconds for metrics to be updated on VM...') # It takes up to 240 seconds for sampled data to be visible on the VM metrics graph # So, waiting for 250 seconds to ensure the returned metrics are not empty time.sleep(250) vm_metrics_obj = vm_metrics.VmMetrics() start_time_sec = int(argv[1]) end_time_sec = int(argv[2]) period = end_time_sec - start_time_sec print(f'Getting VM metrics for ML model') vm_metrics_obj.fetch_metrics_and_write_to_google_sheet(start_time_sec, end_time_sec, INSTANCE, period, 'read', 'ml_metrics!')
apache-2.0
Python
c18972be7609b3de061ec41977ad73efccd5213c
Fix HTTP Basic authentication decorator
lafranceinsoumise/api-django,lafranceinsoumise/api-django,lafranceinsoumise/api-django,lafranceinsoumise/api-django
agir/lib/http.py
agir/lib/http.py
import base64 from functools import wraps from hashlib import sha1 from django.http import HttpResponse from django.utils.crypto import constant_time_compare EMPTY_HASH = sha1().digest() class HttpResponseUnauthorized(HttpResponse): status_code = 401 def __init__(self, content=b'', realm="api", *args, **kwargs): super().__init__(content, *args, **kwargs) self['WWW-Authenticate'] = f'Basic realm="{realm}", charset="UTF-8"' def check_basic_auth(request, identities): auth = request.META.get('HTTP_AUTHORIZATION', '').split() if len(auth) != 2 or auth[0].lower() != 'basic': return HttpResponseUnauthorized() try: user, password = base64.b64decode(auth[1]).split(b':') except: return HttpResponseUnauthorized() h = sha1() h.update(password) digest = h.digest() user_exists = user in identities identical_password = constant_time_compare(digest, identities.get(user, EMPTY_HASH)) if not user_exists or not identical_password: return HttpResponseUnauthorized() return None def with_http_basic_auth(identities): hashed_identities = {} for user, password in identities.items(): h = sha1() h.update(password.encode('utf8')) hashed_identities[user.encode('utf8')] = h.digest() def decorator(view): if isinstance(view, type): wrapped_dispatch = type.dispatch @wraps(wrapped_dispatch) def wrapper(self, request, *args, **kwargs): return check_basic_auth(request, hashed_identities) or wrapped_dispatch(self, request, *args, **kwargs) view.dispatch = wrapper return view @wraps(view) def wrapper(request, *args, **kwargs): return check_basic_auth(request, hashed_identities) or view(request, *args, **kwargs) return wrapper return decorator
import base64 from functools import wraps from hashlib import sha1 from django.http import HttpResponse from django.utils.crypto import constant_time_compare EMPTY_HASH = sha1().digest() class HttpResponseUnauthorized(HttpResponse): status_code = 401 def __init__(self, content=b'', realm="api", *args, **kwargs): super().__init__(content, *args, **kwargs) self['WWW-Authenticate'] = f'Basic realm="{realm}"' def check_basic_auth(request, identities): auth = request.META.get('HTTP_AUTHORIZATION', '').split() if len(auth) != 2 or auth[0].lower() != 'basic': return HttpResponseUnauthorized() try: user, password = base64.b64decode(auth[1]).decode().split(':') except: return HttpResponseUnauthorized() h = sha1() h.update(password) digest = h.digest() user_exists = user in identities identical_password = constant_time_compare(digest, identities.get(user, EMPTY_HASH)) if not user_exists or not identical_password: return HttpResponseUnauthorized() def with_http_basic_auth(identities): hashed_identities = {} for user, password in identities.items(): h = sha1() h.update(password.encode('utf8')) hashed_identities[user] = h.digest() def decorator(view): if isinstance(view, type): wrapped_dispatch = type.dispatch @wraps(wrapped_dispatch) def wrapper(self, request, *args, **kwargs): check_basic_auth(request, hashed_identities) return wrapped_dispatch(self, request, *args, **kwargs) view.dispatch = wrapper return view @wraps(view) def wrapper(request, *args, **kwargs): check_basic_auth(request, hashed_identities) return view(request, *args, **kwargs) return wrapper return decorator
agpl-3.0
Python
3e8d113a6fa32c7c9163d3334e484993c29080ba
remove split test
tlevine/vlermv
vlermv/test/test_s3.py
vlermv/test/test_s3.py
import json import pytest from .._s3 import S3Vlermv class FakeBucket: def __init__(self, name, **db): self.db = db self.name = name def list(self): for key in self.db: yield self.new_key(key) def new_key(self, key): return FakeKey(self.db, key) def get_key(self, key): if key in self.db: return FakeKey(self.db, key) def delete_key(self, key): del(self.db[key]) class FakeKey: def __init__(self, db, key): self.db = db self.key = key def get_contents_as_string(self): return self.db[self.key] def get_contents_to_filename(self, filename): with open(filename, 'wb') as fp: fp.write(self.db[self.key]) def set_contents_from_string(self, payload, **kwargs): self.db[self.key] = payload def set_contents_from_filename(self, filename, **kwargs): with open(filename, 'rb') as fp: self.db[self.key] = fp.read() CONTRACT = { 'bids': [], 'contract': 'http://search.worldbank.org/wcontractawards/procdetails/OP00032101', 'method.selection': 'QCBS ? Quality andCost-Based Selection', 'price': 'INR 1,96,53,750', 'project': None } PAYLOAD = json.dumps(CONTRACT).encode('utf-8') def test_read(): d = S3Vlermv('contracts', serializer = json, bucket = FakeBucket('aoeu', OP00032101 = PAYLOAD)) assert d['OP00032101'] == CONTRACT def test_write(): fakebucket = FakeBucket('aoeu') d = S3Vlermv('contracts', bucket = fakebucket, serializer = json) assert fakebucket.db == {} d['OP00032101'] = CONTRACT assert fakebucket.db == {'OP00032101': PAYLOAD} def test_delete(): fakebucket = FakeBucket('aoeu') d = S3Vlermv('contracts', bucket = fakebucket, serializer = json) d['OP00032101'] = CONTRACT del(d['OP00032101']) assert len(fakebucket.db) == 0
import json import pytest from .._s3 import S3Vlermv, split class FakeBucket: def __init__(self, name, **db): self.db = db self.name = name def list(self): for key in self.db: yield self.new_key(key) def new_key(self, key): return FakeKey(self.db, key) def get_key(self, key): if key in self.db: return FakeKey(self.db, key) def delete_key(self, key): del(self.db[key]) class FakeKey: def __init__(self, db, key): self.db = db self.key = key def get_contents_as_string(self): return self.db[self.key] def get_contents_to_filename(self, filename): with open(filename, 'wb') as fp: fp.write(self.db[self.key]) def set_contents_from_string(self, payload, **kwargs): self.db[self.key] = payload def set_contents_from_filename(self, filename, **kwargs): with open(filename, 'rb') as fp: self.db[self.key] = fp.read() CONTRACT = { 'bids': [], 'contract': 'http://search.worldbank.org/wcontractawards/procdetails/OP00032101', 'method.selection': 'QCBS ? Quality andCost-Based Selection', 'price': 'INR 1,96,53,750', 'project': None } PAYLOAD = json.dumps(CONTRACT).encode('utf-8') def test_read(): d = S3Vlermv('contracts', serializer = json, bucket = FakeBucket('aoeu', OP00032101 = PAYLOAD)) assert d['OP00032101'] == CONTRACT def test_write(): fakebucket = FakeBucket('aoeu') d = S3Vlermv('contracts', bucket = fakebucket, serializer = json) assert fakebucket.db == {} d['OP00032101'] = CONTRACT assert fakebucket.db == {'OP00032101': PAYLOAD} def test_split(): assert split('a/bb/cc') == ('a', 'bb', 'cc') assert split('one') == ('one',) def test_delete(): fakebucket = FakeBucket('aoeu') d = S3Vlermv('contracts', bucket = fakebucket, serializer = json) d['OP00032101'] = CONTRACT del(d['OP00032101']) assert len(fakebucket.db) == 0
agpl-3.0
Python
d4ffe068638aa1394c1a34eaa43859edb47c0473
Update hodograph_inset example for plot the colormap by height.
ShawnMurd/MetPy,ahaberlie/MetPy,dopplershift/MetPy,Unidata/MetPy,Unidata/MetPy,ahaberlie/MetPy,dopplershift/MetPy
examples/plots/Hodograph_Inset.py
examples/plots/Hodograph_Inset.py
# Copyright (c) 2016 MetPy Developers. # Distributed under the terms of the BSD 3-Clause License. # SPDX-License-Identifier: BSD-3-Clause """ Hodograph Inset =============== Layout a Skew-T plot with a hodograph inset into the plot. """ import matplotlib.pyplot as plt from mpl_toolkits.axes_grid1.inset_locator import inset_axes import numpy as np import pandas as pd import metpy.calc as mpcalc from metpy.cbook import get_test_data from metpy.plots import add_metpy_logo, Hodograph, SkewT from metpy.units import units ########################################### # Upper air data can be obtained using the siphon package, but for this example we will use # some of MetPy's sample data. col_names = ['pressure', 'height', 'temperature', 'dewpoint', 'direction', 'speed'] df = pd.read_fwf(get_test_data('may4_sounding.txt', as_file_obj=False), skiprows=5, usecols=[0, 1, 2, 3, 6, 7], names=col_names) df['u_wind'], df['v_wind'] = mpcalc.wind_components(df['speed'], np.deg2rad(df['direction'])) # Drop any rows with all NaN values for T, Td, winds df = df.dropna(subset=('temperature', 'dewpoint', 'direction', 'speed', 'u_wind', 'v_wind'), how='all').reset_index(drop=True) ########################################### # We will pull the data out of the example dataset into individual variables and # assign units. hght = df['height'].values * units.hPa p = df['pressure'].values * units.hPa T = df['temperature'].values * units.degC Td = df['dewpoint'].values * units.degC wind_speed = df['speed'].values * units.knots wind_dir = df['direction'].values * units.degrees u, v = mpcalc.wind_components(wind_speed, wind_dir) ########################################### # Create a new figure. The dimensions here give a good aspect ratio fig = plt.figure(figsize=(9, 9)) add_metpy_logo(fig, 115, 100) # Grid for plots skew = SkewT(fig, rotation=45) # Plot the data using normal plotting functions, in this case using # log scaling in Y, as dictated by the typical meteorological plot skew.plot(p, T, 'r') skew.plot(p, Td, 'g') skew.plot_barbs(p, u, v) skew.ax.set_ylim(1000, 100) # Add the relevant special lines skew.plot_dry_adiabats() skew.plot_moist_adiabats() skew.plot_mixing_lines() # Good bounds for aspect ratio skew.ax.set_xlim(-50, 60) # Create a hodograph ax_hod = inset_axes(skew.ax, '40%', '40%', loc=1) h = Hodograph(ax_hod, component_range=80.) h.add_grid(increment=20) h.plot_colormapped(u, v, hght) # Show the plot plt.show()
# Copyright (c) 2016 MetPy Developers. # Distributed under the terms of the BSD 3-Clause License. # SPDX-License-Identifier: BSD-3-Clause """ Hodograph Inset =============== Layout a Skew-T plot with a hodograph inset into the plot. """ import matplotlib.pyplot as plt from mpl_toolkits.axes_grid1.inset_locator import inset_axes import numpy as np import pandas as pd import metpy.calc as mpcalc from metpy.cbook import get_test_data from metpy.plots import add_metpy_logo, Hodograph, SkewT from metpy.units import units ########################################### # Upper air data can be obtained using the siphon package, but for this example we will use # some of MetPy's sample data. col_names = ['pressure', 'height', 'temperature', 'dewpoint', 'direction', 'speed'] df = pd.read_fwf(get_test_data('may4_sounding.txt', as_file_obj=False), skiprows=5, usecols=[0, 1, 2, 3, 6, 7], names=col_names) df['u_wind'], df['v_wind'] = mpcalc.wind_components(df['speed'], np.deg2rad(df['direction'])) # Drop any rows with all NaN values for T, Td, winds df = df.dropna(subset=('temperature', 'dewpoint', 'direction', 'speed', 'u_wind', 'v_wind'), how='all').reset_index(drop=True) ########################################### # We will pull the data out of the example dataset into individual variables and # assign units. p = df['pressure'].values * units.hPa T = df['temperature'].values * units.degC Td = df['dewpoint'].values * units.degC wind_speed = df['speed'].values * units.knots wind_dir = df['direction'].values * units.degrees u, v = mpcalc.wind_components(wind_speed, wind_dir) ########################################### # Create a new figure. The dimensions here give a good aspect ratio fig = plt.figure(figsize=(9, 9)) add_metpy_logo(fig, 115, 100) # Grid for plots skew = SkewT(fig, rotation=45) # Plot the data using normal plotting functions, in this case using # log scaling in Y, as dictated by the typical meteorological plot skew.plot(p, T, 'r') skew.plot(p, Td, 'g') skew.plot_barbs(p, u, v) skew.ax.set_ylim(1000, 100) # Add the relevant special lines skew.plot_dry_adiabats() skew.plot_moist_adiabats() skew.plot_mixing_lines() # Good bounds for aspect ratio skew.ax.set_xlim(-50, 60) # Create a hodograph ax_hod = inset_axes(skew.ax, '40%', '40%', loc=1) h = Hodograph(ax_hod, component_range=80.) h.add_grid(increment=20) h.plot_colormapped(u, v, np.hypot(u, v)) # Show the plot plt.show()
bsd-3-clause
Python
3b0865bbfcee18afb842cc9f50f8c83c0d70f221
Add the other v ;-).
saifrahmed/bokeh,azjps/bokeh,aiguofer/bokeh,KasperPRasmussen/bokeh,josherick/bokeh,rs2/bokeh,ChinaQuants/bokeh,schoolie/bokeh,schoolie/bokeh,daodaoliang/bokeh,timsnyder/bokeh,phobson/bokeh,bokeh/bokeh,ChinaQuants/bokeh,xguse/bokeh,bokeh/bokeh,timsnyder/bokeh,quasiben/bokeh,justacec/bokeh,ptitjano/bokeh,CrazyGuo/bokeh,KasperPRasmussen/bokeh,draperjames/bokeh,matbra/bokeh,stonebig/bokeh,matbra/bokeh,ptitjano/bokeh,schoolie/bokeh,carlvlewis/bokeh,stonebig/bokeh,draperjames/bokeh,dennisobrien/bokeh,jakirkham/bokeh,philippjfr/bokeh,timsnyder/bokeh,ptitjano/bokeh,CrazyGuo/bokeh,bokeh/bokeh,ChinaQuants/bokeh,justacec/bokeh,percyfal/bokeh,philippjfr/bokeh,azjps/bokeh,ericdill/bokeh,aavanian/bokeh,phobson/bokeh,percyfal/bokeh,josherick/bokeh,gpfreitas/bokeh,dennisobrien/bokeh,srinathv/bokeh,xguse/bokeh,aavanian/bokeh,roxyboy/bokeh,azjps/bokeh,josherick/bokeh,CrazyGuo/bokeh,rothnic/bokeh,ericmjl/bokeh,dennisobrien/bokeh,bokeh/bokeh,jplourenco/bokeh,matbra/bokeh,gpfreitas/bokeh,rothnic/bokeh,aavanian/bokeh,clairetang6/bokeh,jakirkham/bokeh,evidation-health/bokeh,aavanian/bokeh,Karel-van-de-Plassche/bokeh,clairetang6/bokeh,Karel-van-de-Plassche/bokeh,carlvlewis/bokeh,matbra/bokeh,roxyboy/bokeh,jplourenco/bokeh,jakirkham/bokeh,phobson/bokeh,stonebig/bokeh,muku42/bokeh,KasperPRasmussen/bokeh,ptitjano/bokeh,carlvlewis/bokeh,evidation-health/bokeh,DuCorey/bokeh,paultcochrane/bokeh,DuCorey/bokeh,saifrahmed/bokeh,DuCorey/bokeh,daodaoliang/bokeh,percyfal/bokeh,muku42/bokeh,josherick/bokeh,rs2/bokeh,Karel-van-de-Plassche/bokeh,jakirkham/bokeh,ChinaQuants/bokeh,mindriot101/bokeh,clairetang6/bokeh,ericmjl/bokeh,aavanian/bokeh,rothnic/bokeh,azjps/bokeh,saifrahmed/bokeh,deeplook/bokeh,tacaswell/bokeh,DuCorey/bokeh,maxalbert/bokeh,htygithub/bokeh,timsnyder/bokeh,evidation-health/bokeh,tacaswell/bokeh,paultcochrane/bokeh,evidation-health/bokeh,gpfreitas/bokeh,deeplook/bokeh,draperjames/bokeh,ericmjl/bokeh,maxalbert/bokeh,dennisobrien/bokeh,philippjfr/bokeh,htygithub/bokeh,Karel-van-de-Plassche/bokeh,khkaminska/bokeh,aiguofer/bokeh,srinathv/bokeh,khkaminska/bokeh,carlvlewis/bokeh,timsnyder/bokeh,mindriot101/bokeh,rothnic/bokeh,deeplook/bokeh,phobson/bokeh,caseyclements/bokeh,quasiben/bokeh,KasperPRasmussen/bokeh,aiguofer/bokeh,daodaoliang/bokeh,ericdill/bokeh,jplourenco/bokeh,dennisobrien/bokeh,daodaoliang/bokeh,Karel-van-de-Plassche/bokeh,aiguofer/bokeh,msarahan/bokeh,ericdill/bokeh,mindriot101/bokeh,philippjfr/bokeh,rs2/bokeh,srinathv/bokeh,paultcochrane/bokeh,jakirkham/bokeh,justacec/bokeh,xguse/bokeh,saifrahmed/bokeh,ericdill/bokeh,roxyboy/bokeh,muku42/bokeh,mindriot101/bokeh,deeplook/bokeh,caseyclements/bokeh,stonebig/bokeh,schoolie/bokeh,percyfal/bokeh,percyfal/bokeh,khkaminska/bokeh,DuCorey/bokeh,KasperPRasmussen/bokeh,maxalbert/bokeh,ericmjl/bokeh,gpfreitas/bokeh,maxalbert/bokeh,phobson/bokeh,philippjfr/bokeh,xguse/bokeh,htygithub/bokeh,justacec/bokeh,jplourenco/bokeh,rs2/bokeh,CrazyGuo/bokeh,srinathv/bokeh,clairetang6/bokeh,tacaswell/bokeh,msarahan/bokeh,roxyboy/bokeh,aiguofer/bokeh,rs2/bokeh,azjps/bokeh,draperjames/bokeh,khkaminska/bokeh,quasiben/bokeh,draperjames/bokeh,tacaswell/bokeh,msarahan/bokeh,msarahan/bokeh,paultcochrane/bokeh,caseyclements/bokeh,schoolie/bokeh,muku42/bokeh,bokeh/bokeh,ptitjano/bokeh,ericmjl/bokeh,htygithub/bokeh,caseyclements/bokeh
sphinx/fabfile.py
sphinx/fabfile.py
from fabric.api import run, env, roles from fabric.contrib.files import exists from fabric.contrib.project import rsync_project import sys sys.path.append("source") import conf env.roledefs = { 'web': ['bokeh.pydata.org'] } env.user = "bokeh" @roles('web') def deploy(v=None): if v is None: v = conf.version elif v == "latest": raise RuntimeError("You can not pass 'latest' as fab argument. Use " "fab latest:x.x.x instead.") # make a backup of the old directory run("rm -rf /www/bokeh/en/%s.bak" % v) run("mkdir -p /www/bokeh/en/%s" % v) run("cp -ar /www/bokeh/en/%s /www/bokeh/en/%s.bak" % (v, v)) rsync_project( local_dir="_build/html/", remote_dir="/www/bokeh/en/%s" % v, delete=True ) # set permissions run("chmod -R g+w /www/bokeh/en/%s" % v) @roles('web') def latest(v=None): if v is None: raise RuntimeError("You need to specify a version number: fab latest:x.x.x") if exists("/www/bokeh/en/%s" % v): # switch the current symlink to new docs run("rm /www/bokeh/en/latest") run("ln -s /www/bokeh/en/%s /www/bokeh/en/latest" % v) else: raise RuntimeError("We did not detect a %s docs version, please use " "fab deploy:%s first." % (v, v))
from fabric.api import run, env, roles from fabric.contrib.files import exists from fabric.contrib.project import rsync_project import sys sys.path.append("source") import conf env.roledefs = { 'web': ['bokeh.pydata.org'] } env.user = "bokeh" @roles('web') def deploy(v=None): if v is None: v = conf.version elif v == "latest": raise RuntimeError("You can not pass 'latest' as fab argument. Use " "fab latest:x.x.x instead.") # make a backup of the old directory run("rm -rf /www/bokeh/en/%s.bak" % v) run("mkdir -p /www/bokeh/en/%s" % v) run("cp -ar /www/bokeh/en/%s /www/bokeh/en/%s.bak" % (v, v)) rsync_project( local_dir="_build/html/", remote_dir="/www/bokeh/en/%s" % v, delete=True ) # set permissions run("chmod -R g+w /www/bokeh/en/%s" % v) @roles('web') def latest(v=None): if v is None: raise RuntimeError("You need to specify a version number: fab latest:x.x.x") if exists("/www/bokeh/en/%s" % v): # switch the current symlink to new docs run("rm /www/bokeh/en/latest") run("ln -s /www/bokeh/en/%s /www/bokeh/en/latest" % v) else: raise RuntimeError("We did not detect a %s docs version, please use " "fab deploy:%s first." % v)
bsd-3-clause
Python
b3ddba27c92f36ee9534903b43ff632daa148585
Fix public body search index by indexing jurisdiction name
CodeforHawaii/froide,ryankanno/froide,okfse/froide,stefanw/froide,CodeforHawaii/froide,catcosmo/froide,LilithWittmann/froide,ryankanno/froide,stefanw/froide,fin/froide,okfse/froide,LilithWittmann/froide,catcosmo/froide,fin/froide,okfse/froide,okfse/froide,fin/froide,ryankanno/froide,LilithWittmann/froide,catcosmo/froide,stefanw/froide,stefanw/froide,LilithWittmann/froide,okfse/froide,ryankanno/froide,stefanw/froide,fin/froide,CodeforHawaii/froide,CodeforHawaii/froide,LilithWittmann/froide,catcosmo/froide,ryankanno/froide,CodeforHawaii/froide,catcosmo/froide
froide/publicbody/search_indexes.py
froide/publicbody/search_indexes.py
from django.conf import settings from haystack import indexes from haystack import site from publicbody.models import PublicBody from helper.searchindex import QueuedRealTimeSearchIndex PUBLIC_BODY_BOOSTS = getattr(settings, "FROIDE_PUBLIC_BODY_BOOSTS", {}) class PublicBodyIndex(QueuedRealTimeSearchIndex): text = indexes.EdgeNgramField(document=True, use_template=True) name = indexes.CharField(model_attr='name', boost=1.5) jurisdiction = indexes.CharField(model_attr='jurisdiction__name', default='') topic_auto = indexes.EdgeNgramField(model_attr='topic_name') topic_slug = indexes.CharField(model_attr='topic__slug') name_auto = indexes.EdgeNgramField(model_attr='name') url = indexes.CharField(model_attr='get_absolute_url') def index_queryset(self): """Used when the entire index for model is updated.""" return PublicBody.objects.get_for_search_index() def prepare(self, obj): data = super(PublicBodyIndex, self).prepare(obj) if obj.classification in PUBLIC_BODY_BOOSTS: data['boost'] = PUBLIC_BODY_BOOSTS[obj.classification] print "Boosting %s at %f" % (obj, data['boost']) return data site.register(PublicBody, PublicBodyIndex)
from django.conf import settings from haystack import indexes from haystack import site from publicbody.models import PublicBody from helper.searchindex import QueuedRealTimeSearchIndex PUBLIC_BODY_BOOSTS = getattr(settings, "FROIDE_PUBLIC_BODY_BOOSTS", {}) class PublicBodyIndex(QueuedRealTimeSearchIndex): text = indexes.EdgeNgramField(document=True, use_template=True) name = indexes.CharField(model_attr='name', boost=1.5) jurisdiction = indexes.CharField(model_attr='jurisdiction', default='') topic_auto = indexes.EdgeNgramField(model_attr='topic_name') topic_slug = indexes.CharField(model_attr='topic__slug') name_auto = indexes.EdgeNgramField(model_attr='name') url = indexes.CharField(model_attr='get_absolute_url') def index_queryset(self): """Used when the entire index for model is updated.""" return PublicBody.objects.get_for_search_index() def prepare(self, obj): data = super(PublicBodyIndex, self).prepare(obj) if obj.classification in PUBLIC_BODY_BOOSTS: data['boost'] = PUBLIC_BODY_BOOSTS[obj.classification] print "Boosting %s at %f" % (obj, data['boost']) return data site.register(PublicBody, PublicBodyIndex)
mit
Python
3da17a2f61daecc34772ead7e6caffa9da49bf48
Add default values and shebang
hashbangstudio/Python-Minecraft-Examples
06-setPositionFromArgs.py
06-setPositionFromArgs.py
#!/usr/bin/env python # We have to import the minecraft api module to do anything in the minecraft world from mcpi.minecraft import * import sys # this means that the file can be imported without executing anything in this code block if __name__ == "__main__": """ First thing you do is create a connection to minecraft This is like dialling a phone. It sets up a communication line between your script and the minecraft world """ # Create a connection to Minecraft # Any communication with the world must use this object mc = Minecraft.create() # Get the current tile/block that the player is located at in the world playerPosition = mc.player.getTilePos() # create the output message as a string message = " you are at (" +str(playerPosition.x)+","+str(playerPosition.y)+","+str(playerPosition.z)+")" # print to the python interpreter standard output (terminal or IDLE probably) print(message) # send message to the minecraft chat mc.postToChat(message) #Set Default values newXposn = 0 newZposn = 0 numOfArgs = len(sys.argv) if numOfArgs == 3: newXposn = int(sys.argv[1]) newZposn = int(sys.argv[2]) else: print("incorrect number of arguments") sys.exit() newYposn = mc.getHeight(newXposn, newZposn) mc.player.setTilePos(newXposn, newYposn, newZposn) # Get the current tile/block that the player is located at in the world playerPosition = mc.player.getTilePos() message = " you are now at (" +str(playerPosition.x)+","+str(playerPosition.y)+","+str(playerPosition.z)+")" print(message) mc.postToChat(message)
# We have to import the minecraft api module to do anything in the minecraft world from mcpi.minecraft import * import sys # this means that the file can be imported without executing anything in this code block if __name__ == "__main__": """ First thing you do is create a connection to minecraft This is like dialling a phone. It sets up a communication line between your script and the minecraft world """ # Create a connection to Minecraft # Any communication with the world must use this object mc = Minecraft.create() # Get the current tile/block that the player is located at in the world playerPosition = mc.player.getTilePos() # create the output message as a string message = " you are at (" +str(playerPosition.x)+","+str(playerPosition.y)+","+str(playerPosition.z)+")" # print to the python interpreter standard output (terminal or IDLE probably) print(message) # send message to the minecraft chat mc.postToChat(message) numOfArgs = len(sys.argv) if numOfArgs == 3: newXposn = int(sys.argv[1]) newZposn = int(sys.argv[2]) else: print("incorrect number of arguments") sys.exit() newYposn = mc.getHeight(newXposn, newZposn) mc.player.setTilePos(newXposn, newYposn, newZposn) # Get the current tile/block that the player is located at in the world playerPosition = mc.player.getTilePos() message = " you are now at (" +str(playerPosition.x)+","+str(playerPosition.y)+","+str(playerPosition.z)+")" print(message) mc.postToChat(message)
bsd-3-clause
Python
de381a56e87a21da1e82146da01bb546c5094ec4
Print the traceback as well for debugging purposes.
eltoncarr/tubular,eltoncarr/tubular
scripts/asgard-deploy.py
scripts/asgard-deploy.py
#!/usr/bin/env python import sys import logging import traceback import click from os import path # Add top-level module path to sys.path before importing tubular code. sys.path.append( path.dirname( path.dirname( path.abspath(__file__) ) ) ) from tubular import asgard logging.basicConfig(stream=sys.stdout, level=logging.INFO) @click.command() @click.option('--ami_id', envvar='AMI_ID', help='The ami-id to deploy', required=True) def deploy(ami_id): try: asgard.deploy(ami_id) except Exception, e: traceback.print_exc() click.secho("Error Deploying AMI: {0}.\nMessage: {1}".format(ami_id, e.message), fg='red') sys.exit(1) sys.exit(0) if __name__ == "__main__": deploy()
#!/usr/bin/env python import sys import logging import click from os import path # Add top-level module path to sys.path before importing tubular code. sys.path.append( path.dirname( path.dirname( path.abspath(__file__) ) ) ) from tubular import asgard logging.basicConfig(stream=sys.stdout, level=logging.INFO) @click.command() @click.option('--ami_id', envvar='AMI_ID', help='The ami-id to deploy', required=True) def deploy(ami_id): try: asgard.deploy(ami_id) except Exception, e: click.secho("Error Deploying AMI: {0}.\nMessage: {1}".format(ami_id, e.message), fg='red') sys.exit(1) sys.exit(0) if __name__ == "__main__": deploy()
agpl-3.0
Python
0a81356e0f8011f0764a8c28719d1371e5860656
Make sure create_privatekml mgmt command produces unique names less than 100 chars; fail gracefully if not
Ecotrust/madrona_addons,Ecotrust/madrona_addons
lingcod/layers/management/commands/create_privatekml.py
lingcod/layers/management/commands/create_privatekml.py
from django.core.management.base import BaseCommand, AppCommand from django.conf import settings from optparse import make_option import os import glob from lingcod.layers.models import PrivateKml from django.contrib.auth.models import User, Group class Command(BaseCommand): help = "Populates the PrivateKml table from the PRIVATE_KML_ROOT contents .. a good starting point" args = '[optional group name to share all KMLs with]' # Validation is called explicitly each time the server is reloaded. requires_model_validation = False def handle(self, groupname=None, *args, **options): for pkml in PrivateKml.objects.all(): pkml.delete() if groupname: g = Group.objects.get(name=groupname) if not os.path.exists(settings.PRIVATE_KML_ROOT): raise Exception("Please create or set up a PRIVATE_KML_ROOT directory (currently set to %s" % settings.PRIVATE_KML_ROOT) for d in os.listdir(settings.PRIVATE_KML_ROOT): path = os.path.join(settings.PRIVATE_KML_ROOT,d) kmls = glob.glob(os.path.join(path,'*.km*')) if len(kmls) == 0: print "No KML/KMZ found in %s" % path continue for kml in kmls: basename = os.path.basename(kml).split('.')[0] privatekml_name = d+'_'+basename try: pkml = PrivateKml.objects.create(name=privatekml_name[:99],base_kml=kml) if groupname: pkml.sharing_groups.add(g) print "Created %s from %s" % (pkml,kml) except: print "couldn't create privatekml from %s" % s
from django.core.management.base import BaseCommand, AppCommand from django.conf import settings from optparse import make_option import os import glob from lingcod.layers.models import PrivateKml from django.contrib.auth.models import User, Group class Command(BaseCommand): help = "Populates the PrivateKml table from the PRIVATE_KML_ROOT contents .. a good starting point" args = '[optional group name to share all KMLs with]' # Validation is called explicitly each time the server is reloaded. requires_model_validation = False def handle(self, groupname=None, *args, **options): for pkml in PrivateKml.objects.all(): pkml.delete() if groupname: g = Group.objects.get(name=groupname) if not os.path.exists(settings.PRIVATE_KML_ROOT): raise Exception("Please create or set up a PRIVATE_KML_ROOT directory (currently set to %s" % settings.PRIVATE_KML_ROOT) for d in os.listdir(settings.PRIVATE_KML_ROOT): path = os.path.join(settings.PRIVATE_KML_ROOT,d) kmls = glob.glob(os.path.join(path,'*.km*')) if len(kmls) == 0: print "No KML/KMZ found in %s" % path continue for kml in kmls: basename = os.path.basename(kml).split('.')[0] pkml = PrivateKml.objects.create(name=d+"_"+basename,base_kml=kml) if groupname: pkml.sharing_groups.add(g) print "Created %s from %s" % (pkml,kml)
bsd-3-clause
Python
9a1921fb27b7073d9c79f6727766eb516478f403
Bump version 0.6.0 (git sync solution)
aldryn/aldryn-client,aldryn/aldryn-client
cmscloud_client/__init__.py
cmscloud_client/__init__.py
# -*- coding: utf-8 -*- __version__ = '0.6.0'
# -*- coding: utf-8 -*- __version__ = '0.5.4'
bsd-3-clause
Python
9926cbb1919b96999d479f5a8d67e17ce71a1091
Improve the get_nick a tiny amount
Motoko11/MotoBot
motobot/irc_message.py
motobot/irc_message.py
class IRCMessage: """ Class to store and parse an IRC Message. """ def __init__(self, msg): """ Parse a raw IRC message to IRCMessage. """ self.sender = None self.nick = None self.command = None self.params = [] self.__parse_msg(msg) def __parse_msg(self, msg): if msg[0] == ':': self.sender, msg = msg[1:].split(' ', 1) self.nick = get_nick(self.sender) if ' :' in msg: msg, trailing = msg.split(' :', 1) self.params = msg.split(' ') self.params.append(trailing) else: self.params = msg.split(' ') self.command = self.params.pop(0) def __repr__(self): """ Print the IRCMessage all nice 'n' pretty. """ return "Sender: {};\nCommand: {};\nParams: {};\n".format( self.sender, self.command, self.params) def action(message): """ Make the message an action. """ return '\u0001ACTION {}\u0001'.format(message) def get_nick(host): """ Get the user's nick from a host. """ return host.split('!', 1)[0]
class IRCMessage: """ Class to store and parse an IRC Message. """ def __init__(self, msg): """ Parse a raw IRC message to IRCMessage. """ self.sender = None self.nick = None self.command = None self.params = [] self.__parse_msg(msg) def __parse_msg(self, msg): if msg[0] == ':': self.sender, msg = msg[1:].split(' ', 1) self.nick = get_nick(self.sender) if ' :' in msg: msg, trailing = msg.split(' :', 1) self.params = msg.split(' ') self.params.append(trailing) else: self.params = msg.split(' ') self.command = self.params.pop(0) def __repr__(self): """ Print the IRCMessage all nice 'n' pretty. """ return "Sender: {};\nCommand: {};\nParams: {};\n".format( self.sender, self.command, self.params) def action(message): """ Make the message an action. """ return '\u0001ACTION {}\u0001'.format(message) def get_nick(host): """ Get the user's nick from a host. """ return host.split('!')[0]
mit
Python
119ce47d9e876c345c2bc44751ccf04f0b226259
Remove lie_system package dependency
MD-Studio/MDStudio,MD-Studio/MDStudio,MD-Studio/MDStudio,MD-Studio/MDStudio,MD-Studio/MDStudio
components/lie_structures/setup.py
components/lie_structures/setup.py
#! /usr/bin/env python # -*- coding: utf-8 -*- # package: lie_docking # file: setup.py # # Part of ‘lie_docking’, a package providing molecular docking functionality # for the LIEStudio package. # # Copyright © 2016 Marc van Dijk, VU University Amsterdam, the Netherlands # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from setuptools import setup, find_packages distribution_name = 'lie_structures' setup( name=distribution_name, version=0.1, description='LIEStudio structure database module', author='Marc van Dijk, VU University, Amsterdam, The Netherlands', author_email='[email protected]', url='https://github.com/NLeSC/LIEStudio', license='Apache Software License 2.0', keywords='LIEStudio structures database', platforms=['Any'], packages=find_packages(), package_data={'': ['*.json']}, py_modules=[distribution_name], install_requires=['openbabel'], include_package_data=True, zip_safe=True, entry_points={ 'autobahn.twisted.wamplet': [ 'wamp_services = lie_structures.wamp_services:make' ], }, classifiers=[ 'Development Status :: 3 - Alpha', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python', 'Topic :: Scientific/Engineering :: Chemistry', 'Operating System :: OS Independent', 'Intended Audience :: Science/Research', ], )
#! /usr/bin/env python # -*- coding: utf-8 -*- # package: lie_docking # file: setup.py # # Part of ‘lie_docking’, a package providing molecular docking functionality # for the LIEStudio package. # # Copyright © 2016 Marc van Dijk, VU University Amsterdam, the Netherlands # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from setuptools import setup, find_packages distribution_name = 'lie_structures' setup( name=distribution_name, version=0.1, description='LIEStudio structure database module', author='Marc van Dijk, VU University, Amsterdam, The Netherlands', author_email='[email protected]', url='https://github.com/NLeSC/LIEStudio', license='Apache Software License 2.0', keywords='LIEStudio structures database', platforms=['Any'], packages=find_packages(), package_data={'': ['*.json']}, py_modules=[distribution_name], install_requires=['lie_system', 'openbabel'], include_package_data=True, zip_safe=True, entry_points={ 'autobahn.twisted.wamplet': [ 'wamp_services = lie_structures.wamp_services:make' ], }, classifiers=[ 'Development Status :: 3 - Alpha', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python', 'Topic :: Scientific/Engineering :: Chemistry', 'Operating System :: OS Independent', 'Intended Audience :: Science/Research', ], )
apache-2.0
Python
eb48fba5b3334437a752681df200c2bbefb0bc18
change font to be purple
allen7wang/musicbox,caitinggui/musicbox,kigawas/musicbox,Catofes/musicbox,kigawas/musicbox,AnyOfYou/musicbox,darknessomi/musicbox
NEMbox/osdlyrics.py
NEMbox/osdlyrics.py
from PyQt4 import QtGui, QtCore, QtDBus import sys import os from multiprocessing import Process class Lyrics(QtGui.QWidget): def __init__(self): super(Lyrics, self).__init__() self.initUI() def initUI(self): self.setAttribute(QtCore.Qt.WA_TranslucentBackground) self.resize(900, 150) self.text = u"OSD Lyrics for Musicbox" self.setWindowTitle("Lyrics") self.show() @QtCore.pyqtSlot(str) def refresh_lyrics(self, text): self.text = text self.repaint() def paintEvent(self, event): qp = QtGui.QPainter() qp.begin(self) self.drawText(event, qp) qp.end() def drawText(self, event, qp): qp.setPen(QtGui.QColor(128, 0, 128)) qp.setFont(QtGui.QFont('Decorative', 16)) qp.drawText(event.rect(), QtCore.Qt.AlignCenter, self.text) def show_lyrics(): app = QtGui.QApplication(sys.argv) # lyrics_receiver = LyricsReceiver() lyrics = Lyrics() QtDBus.QDBusConnection.sessionBus().registerService('org.musicbox.Bus') QtDBus.QDBusConnection.sessionBus().registerObject('/', lyrics, QtDBus.QDBusConnection.ExportAllSlots) sys.exit(app.exec_()) def show_lyrics_new_process(): p = Process(target=show_lyrics) p.start() # p.join()
from PyQt4 import QtGui, QtCore, QtDBus import sys import os from multiprocessing import Process class Lyrics(QtGui.QWidget): def __init__(self): super(Lyrics, self).__init__() self.initUI() def initUI(self): self.setAttribute(QtCore.Qt.WA_TranslucentBackground) self.resize(900, 150) self.text = u"OSD Lyrics for Musicbox" self.setWindowTitle("Lyrics") self.show() @QtCore.pyqtSlot(str) def refresh_lyrics(self, text): self.text = text self.repaint() def paintEvent(self, event): qp = QtGui.QPainter() qp.begin(self) self.drawText(event, qp) qp.end() def drawText(self, event, qp): qp.setPen(QtGui.QColor(0, 0, 0)) qp.setFont(QtGui.QFont('Decorative', 16)) qp.drawText(event.rect(), QtCore.Qt.AlignCenter, self.text) def show_lyrics(): app = QtGui.QApplication(sys.argv) # lyrics_receiver = LyricsReceiver() lyrics = Lyrics() QtDBus.QDBusConnection.sessionBus().registerService('org.musicbox.Bus') QtDBus.QDBusConnection.sessionBus().registerObject('/', lyrics, QtDBus.QDBusConnection.ExportAllSlots) sys.exit(app.exec_()) def show_lyrics_new_process(): p = Process(target=show_lyrics) p.start() # p.join()
mit
Python
c61d4c6df77fe505074c81eebaec938c6716d9ab
Create columns before querying them.
vguzmanp/dataset,reubano/dataset,askebos/dataset,pudo/dataset,okfn/sqlaload,stefanw/dataset,twds/dataset,saimn/dataset
sqlaload/query.py
sqlaload/query.py
import logging from itertools import count from sqlalchemy.sql import expression, and_ from sqlaload.schema import _ensure_columns log = logging.getLogger(__name__) def resultiter(rp): """ SQLAlchemy ResultProxies are not iterable to get a list of dictionaries. This is to wrap them. """ keys = rp.keys() while True: row = rp.fetchone() if row is None: break yield dict(zip(keys, row)) def find_one(engine, table, **kw): res = list(find(engine, table, _limit=1, **kw)) if not len(res): return None return res[0] def find(engine, table, _limit=None, _step=5000, _offset=0, order_by=None, **kw): _ensure_columns(engine, table, kw) if order_by is None: order_by = [table.c.id.asc()] qargs = [] try: for col, val in kw.items(): qargs.append(table.c[col]==val) except KeyError: return for i in count(): qoffset = _offset + (_step * i) qlimit = _step if _limit is not None: qlimit = min(_limit-(_step*i), _step) if qlimit <= 0: break q = table.select(whereclause=and_(*qargs), limit=qlimit, offset=qoffset, order_by=order_by) rows = list(resultiter(engine.execute(q))) if not len(rows): return for row in rows: yield row def distinct(engine, table, *columns): columns = [table.c[c] for c in columns] q = expression.select(columns, distinct=True) return list(resultiter(engine.execute(q))) def all(engine, table): return find(engine, table)
import logging from itertools import count from sqlalchemy.sql import expression, and_ log = logging.getLogger(__name__) def resultiter(rp): """ SQLAlchemy ResultProxies are not iterable to get a list of dictionaries. This is to wrap them. """ keys = rp.keys() while True: row = rp.fetchone() if row is None: break yield dict(zip(keys, row)) def find_one(engine, table, **kw): res = list(find(engine, table, _limit=1, **kw)) if not len(res): return None return res[0] def find(engine, table, _limit=None, _step=5000, _offset=0, order_by=None, **kw): if order_by is None: order_by = [table.c.id.asc()] qargs = [] try: for col, val in kw.items(): qargs.append(table.c[col]==val) except KeyError: return for i in count(): qoffset = _offset + (_step * i) qlimit = _step if _limit is not None: qlimit = min(_limit-(_step*i), _step) if qlimit <= 0: break q = table.select(whereclause=and_(*qargs), limit=qlimit, offset=qoffset, order_by=order_by) rows = list(resultiter(engine.execute(q))) if not len(rows): return for row in rows: yield row def distinct(engine, table, *columns): columns = [table.c[c] for c in columns] q = expression.select(columns, distinct=True) return list(resultiter(engine.execute(q))) def all(engine, table): return find(engine, table)
mit
Python
0c4e6ff26d716bf20a1a7c36a4e3e363a1101c2a
add forced/default to plexpy.library.stream
pannal/Subliminal.bundle,pannal/Subliminal.bundle,pannal/Subliminal.bundle
Contents/Libraries/Shared/plex/objects/library/stream.py
Contents/Libraries/Shared/plex/objects/library/stream.py
from plex.objects.core.base import Descriptor, Property class Stream(Descriptor): id = Property(type=int) index = Property(type=int) stream_type = Property('streamType', type=int) selected = Property(type=bool) forced = Property(type=bool) default = Property(type=bool) title = Property duration = Property(type=int) codec = Property codec_id = Property('codecID') bit_depth = Property('bitDepth', type=int) chroma_subsampling = Property('chromaSubsampling') color_space = Property('colorSpace') width = Property(type=int) height = Property(type=int) bitrate = Property(type=int) bitrate_mode = Property('bitrateMode') channels = Property(type=int) sampling_rate = Property('samplingRate', type=int) frame_rate = Property('frameRate') profile = Property scan_type = Property('scanType') language = Property('language') language_code = Property('languageCode') bvop = Property(type=int) gmc = Property(type=int) level = Property(type=int) qpel = Property(type=int) @classmethod def from_node(cls, client, node): items = [] for genre in cls.helpers.findall(node, 'Stream'): _, obj = Stream.construct(client, genre, child=True) items.append(obj) return [], items
from plex.objects.core.base import Descriptor, Property class Stream(Descriptor): id = Property(type=int) index = Property(type=int) stream_type = Property('streamType', type=int) selected = Property(type=bool) title = Property duration = Property(type=int) codec = Property codec_id = Property('codecID') bit_depth = Property('bitDepth', type=int) chroma_subsampling = Property('chromaSubsampling') color_space = Property('colorSpace') width = Property(type=int) height = Property(type=int) bitrate = Property(type=int) bitrate_mode = Property('bitrateMode') channels = Property(type=int) sampling_rate = Property('samplingRate', type=int) frame_rate = Property('frameRate') profile = Property scan_type = Property('scanType') language = Property('language') language_code = Property('languageCode') bvop = Property(type=int) gmc = Property(type=int) level = Property(type=int) qpel = Property(type=int) @classmethod def from_node(cls, client, node): items = [] for genre in cls.helpers.findall(node, 'Stream'): _, obj = Stream.construct(client, genre, child=True) items.append(obj) return [], items
mit
Python
e92a612ba231eebb8dbe7ac42d24ac002a89fbe1
add docstring
mhbu50/frappe,saurabh6790/frappe,frappe/frappe,saurabh6790/frappe,yashodhank/frappe,almeidapaulopt/frappe,saurabh6790/frappe,yashodhank/frappe,yashodhank/frappe,saurabh6790/frappe,frappe/frappe,almeidapaulopt/frappe,almeidapaulopt/frappe,almeidapaulopt/frappe,StrellaGroup/frappe,StrellaGroup/frappe,mhbu50/frappe,adityahase/frappe,mhbu50/frappe,frappe/frappe,yashodhank/frappe,adityahase/frappe,adityahase/frappe,StrellaGroup/frappe,mhbu50/frappe,adityahase/frappe
frappe/utils/logger.py
frappe/utils/logger.py
# imports - compatibility imports from __future__ import unicode_literals # imports - standard imports import logging import os from logging.handlers import RotatingFileHandler # imports - third party imports from six import text_type # imports - module imports import frappe default_log_level = logging.DEBUG site = getattr(frappe.local, 'site', None) def get_logger(module, with_more_info=False, _site=None): """Application Logger for your given module Args: module (str): Name of your logger and consequently your log file. with_more_info (bool, optional): Will log the form dict using the SiteContextFilter. Defaults to False. _site (str, optional): If set, validates the current site context with the passed value. The `frappe.web` logger uses this to determine that the application is logging information related to the logger called. Defaults to None. Returns: <class 'logging.Logger'>: Returns a Python logger object with Site and Bench level logging capabilities. """ global site def allow_site(): allow = False if site: allow = True if _site: allow = site == _site return allow if module in frappe.loggers: return frappe.loggers[module] if not module: module = "frappe" with_more_info = True logfile = module + '.log' site = getattr(frappe.local, 'site', None) LOG_FILENAME = os.path.join('..', 'logs', logfile) logger = logging.getLogger(module) logger.setLevel(frappe.log_level or default_log_level) logger.propagate = False formatter = logging.Formatter('%(asctime)s %(levelname)s %(name)s %(message)s') handler = RotatingFileHandler(LOG_FILENAME, maxBytes=100_000, backupCount=20) logger.addHandler(handler) if allow_site(): SITELOG_FILENAME = os.path.join(site, 'logs', logfile) site_handler = RotatingFileHandler(SITELOG_FILENAME, maxBytes=100_000, backupCount=20) site_handler.setFormatter(formatter) logger.addHandler(site_handler) if with_more_info: handler.addFilter(SiteContextFilter()) handler.setFormatter(formatter) frappe.loggers[module] = logger return logger class SiteContextFilter(logging.Filter): """This is a filter which injects request information (if available) into the log.""" def filter(self, record): if "Form Dict" not in text_type(record.msg): record.msg = text_type(record.msg) + "\nSite: {0}\nForm Dict: {1}".format(site, getattr(frappe.local, 'form_dict', None)) return True def set_log_level(level): '''Use this method to set log level to something other than the default DEBUG''' frappe.log_level = getattr(logging, (level or '').upper(), None) or default_log_level frappe.loggers = {}
# imports - compatibility imports from __future__ import unicode_literals # imports - standard imports import logging import os from logging.handlers import RotatingFileHandler # imports - third party imports from six import text_type # imports - module imports import frappe default_log_level = logging.DEBUG site = getattr(frappe.local, 'site', None) def get_logger(module, with_more_info=False, _site=None): global site if module in frappe.loggers: return frappe.loggers[module] if not module: module = "frappe" with_more_info = True logfile = module + '.log' site = getattr(frappe.local, 'site', None) LOG_FILENAME = os.path.join('..', 'logs', logfile) logger = logging.getLogger(module) logger.setLevel(frappe.log_level or default_log_level) logger.propagate = False formatter = logging.Formatter('%(asctime)s %(levelname)s %(name)s %(message)s') handler = RotatingFileHandler(LOG_FILENAME, maxBytes=100_000, backupCount=20) logger.addHandler(handler) # if site == _site: SITELOG_FILENAME = os.path.join(site, 'logs', logfile) site_handler = RotatingFileHandler(SITELOG_FILENAME, maxBytes=100_000, backupCount=20) site_handler.setFormatter(formatter) logger.addHandler(site_handler) if with_more_info: handler.addFilter(SiteContextFilter()) handler.setFormatter(formatter) frappe.loggers[module] = logger return logger class SiteContextFilter(logging.Filter): """This is a filter which injects request information (if available) into the log.""" def filter(self, record): if "Form Dict" not in text_type(record.msg): record.msg = text_type(record.msg) + "\nSite: {0}\nForm Dict: {1}".format(site, getattr(frappe.local, 'form_dict', None)) return True def set_log_level(level): '''Use this method to set log level to something other than the default DEBUG''' frappe.log_level = getattr(logging, (level or '').upper(), None) or default_log_level frappe.loggers = {}
mit
Python
24fbe55a3517e50f4d158bbb7b8857f8f10dc148
Use argparse to parse julia-py arguments
JuliaLang/pyjulia,JuliaPy/pyjulia,JuliaPy/pyjulia
src/julia/julia_py.py
src/julia/julia_py.py
""" Launch Julia through PyJulia. """ from __future__ import print_function, absolute_import import argparse import os import sys from .api import LibJulia from .core import enable_debug from .tools import julia_py_executable def julia_py(julia, pyjulia_debug, jl_args): if pyjulia_debug: enable_debug() os.environ["_PYJULIA_JULIA_PY"] = julia_py_executable() os.environ["_PYJULIA_PATCH_JL"] = os.path.join( os.path.dirname(os.path.realpath(__file__)), "patch.jl" ) api = LibJulia.load(julia=julia) api.init_julia(jl_args) code = 1 if api.jl_eval_string(b"""Base.include(Main, ENV["_PYJULIA_PATCH_JL"])"""): if api.jl_eval_string(b"Base.invokelatest(Base._start)"): code = 0 api.jl_atexit_hook(code) sys.exit(code) class CustomFormatter( argparse.RawDescriptionHelpFormatter, argparse.ArgumentDefaultsHelpFormatter ): pass def parse_args(args, **kwargs): options = dict( prog="julia-py", usage="%(prog)s [--julia JULIA] [--pyjulia-debug] [<julia arguments>...]", formatter_class=CustomFormatter, description=__doc__, ) options.update(kwargs) parser = argparse.ArgumentParser(**options) parser.add_argument( "--julia", default="julia", help=""" Julia `executable` used by PyJulia. """, ) parser.add_argument( "--pyjulia-debug", action="store_true", help=""" Print PyJulia's debugging messages to standard error. """, ) ns, jl_args = parser.parse_known_args(args) ns.jl_args = jl_args return ns def main(args=None, **kwargs): julia_py(**vars(parse_args(args, **kwargs))) if __name__ == "__main__": main()
from __future__ import print_function, absolute_import from argparse import Namespace import os import sys from .api import LibJulia from .tools import julia_py_executable def parse_args(args): ns = Namespace(julia="julia") jl_args = list(args) if len(jl_args) >= 2 and jl_args[0] == "--julia": ns.julia = jl_args[1] jl_args = jl_args[2:] elif len(jl_args) >= 1 and jl_args[0].startswith("--julia="): ns.julia = jl_args[0][len("--julia=") :] jl_args = jl_args[1:] return ns, jl_args def main(args=None): if args is None: args = sys.argv[1:] ns, jl_args = parse_args(args) os.environ["_PYJULIA_JULIA_PY"] = julia_py_executable() os.environ["_PYJULIA_PATCH_JL"] = os.path.join( os.path.dirname(os.path.realpath(__file__)), "patch.jl" ) api = LibJulia.load(julia=ns.julia) api.init_julia(jl_args) code = 1 if api.jl_eval_string(b"""Base.include(Main, ENV["_PYJULIA_PATCH_JL"])"""): if api.jl_eval_string(b"Base.invokelatest(Base._start)"): code = 0 api.jl_atexit_hook(code) sys.exit(code) if __name__ == "__main__": main()
mit
Python
59e7fc5c924ebf8af66e0aeef990da55e84d3f9e
update to 3.30.1
DeadSix27/python_cross_compile_script
packages/dependencies/sqlite3.py
packages/dependencies/sqlite3.py
{ 'repo_type' : 'archive', 'custom_cflag' : '-O2', # make sure we build it without -ffast-math 'download_locations' : [ { 'url' : 'https://www.sqlite.org/2019/sqlite-autoconf-3300100.tar.gz', 'hashes' : [ { 'type' : 'sha256', 'sum' : '8c5a50db089bd2a1b08dbc5b00d2027602ca7ff238ba7658fabca454d4298e60' }, ], }, { 'url' : 'https://fossies.org/linux/misc/sqlite-autoconf-3300100.tar.gz', 'hashes' : [ { 'type' : 'sha256', 'sum' : '8c5a50db089bd2a1b08dbc5b00d2027602ca7ff238ba7658fabca454d4298e60' }, ], }, ], 'cflag_addition' : '-fexceptions -DSQLITE_ENABLE_COLUMN_METADATA=1 -DSQLITE_USE_MALLOC_H=1 -DSQLITE_USE_MSIZE=1 -DSQLITE_DISABLE_DIRSYNC=1 -DSQLITE_ENABLE_RTREE=1 -fno-strict-aliasing', 'configure_options': '--host={target_host} --prefix={target_prefix} --disable-shared --enable-static --enable-threadsafe --disable-editline --enable-readline --enable-json1 --enable-fts5 --enable-session', 'depends_on': ( 'zlib', ), 'update_check' : { 'url' : 'https://www.sqlite.org/index.html', 'type' : 'httpregex', 'regex' : r'<a href="releaselog/.*\.html">Version (?P<version_num>[\d.]+)<\/a>' }, '_info' : { 'version' : '3.30.1', 'fancy_name' : 'libsqlite3' }, }
{ 'repo_type' : 'archive', 'custom_cflag' : '-O2', # make sure we build it without -ffast-math 'download_locations' : [ { 'url' : 'https://www.sqlite.org/2019/sqlite-autoconf-3300000.tar.gz', 'hashes' : [ { 'type' : 'sha256', 'sum' : 'e0a8cf4c7a87455e55e10413d16f358ca121ccec687fe1301eac95e2d340fc58' }, ], }, { 'url' : 'https://fossies.org/linux/misc/sqlite-autoconf-3300000.tar.gz', 'hashes' : [ { 'type' : 'sha256', 'sum' : 'e0a8cf4c7a87455e55e10413d16f358ca121ccec687fe1301eac95e2d340fc58' }, ], }, ], 'cflag_addition' : '-fexceptions -DSQLITE_ENABLE_COLUMN_METADATA=1 -DSQLITE_USE_MALLOC_H=1 -DSQLITE_USE_MSIZE=1 -DSQLITE_DISABLE_DIRSYNC=1 -DSQLITE_ENABLE_RTREE=1 -fno-strict-aliasing', 'configure_options': '--host={target_host} --prefix={target_prefix} --disable-shared --enable-static --enable-threadsafe --disable-editline --enable-readline --enable-json1 --enable-fts5 --enable-session', 'depends_on': ( 'zlib', ), 'update_check' : { 'url' : 'https://www.sqlite.org/index.html', 'type' : 'httpregex', 'regex' : r'<a href="releaselog/.*\.html">Version (?P<version_num>[\d.]+)<\/a>' }, '_info' : { 'version' : '3.30.0', 'fancy_name' : 'libsqlite3' }, }
mpl-2.0
Python
6ad4796030aab2f6dbf8389b4030007d0fcf8761
Update to test for mount setup
Guokr1991/POCS,joshwalawender/POCS,AstroHuntsman/POCS,panoptes/POCS,AstroHuntsman/POCS,panoptes/POCS,Guokr1991/POCS,Guokr1991/POCS,Guokr1991/POCS,panoptes/POCS,AstroHuntsman/POCS,panoptes/POCS,joshwalawender/POCS,AstroHuntsman/POCS,fmin2958/POCS,joshwalawender/POCS,fmin2958/POCS,fmin2958/POCS
panoptes/test/mount/test_ioptron.py
panoptes/test/mount/test_ioptron.py
from nose.tools import raises import panoptes from panoptes.mount.ioptron import Mount class TestIOptron(): @raises(AssertionError) def test_no_config_no_commands(self): """ Mount needs a config """ mount = Mount() @raises(AssertionError) def test_config_bad_commands(self): """ Passes in a default config but blank commands, which should error """ mount = Mount(config={'mount': { 'model': 'ioptron', 'port':'/dev/ttyUSB0' } }, commands={'foo': 'bar'}) def test_config_auto_commands(self): """ Passes in config like above, but no commands, so they should read from defaults """ mount = Mount(config={'mount': { 'model': 'ioptron', 'port':'/dev/ttyUSB0' } })
from nose.tools import raises import panoptes from panoptes.mount.ioptron import Mount class TestIOptron(): @raises(AssertionError) def test_no_config_no_commands(self): """ Mount needs a config """ mount = Mount() @raises(AssertionError) def test_config_no_commands(self): """ """ mount = Mount(config={'mount': { 'model': 'ioptron', 'port':'/dev/ttyUSB0' } }, commands=dict())
mit
Python
4f0e0d4d92301dea408925d99001913e76a15ee1
Update filterscan.py
galkan/flashlight
lib/filterscan.py
lib/filterscan.py
try: import os import subprocess from lib.core.core import Core from lib.filter.filter import Filter except ImportError, err: from lib.core.core import Core Core.print_error(err) class FilterScan(Filter): def __init__(self, args): self.__args = args Filter.__init__(self, [self.__args.pcap], self.__args, "filter") def __run_cmd(self, cmd, file_name, result_set, logger): output_file = "{0}{1}_{2}.txt".format(self._output_dir, file_name, os.path.basename(self.__args.pcap)) result_file = open(output_file, "w") logger._logging("Filter: {0} parsing".format(file_name)) proc = subprocess.Popen([cmd], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE,) if isinstance(result_set, (list, tuple)): [ result_set.append(line) for line in iter(proc.stdout.readline, '') if line not in result_set ] else: for line in iter(proc.stdout.readline, ''): try: result_set[line.rstrip()] += 1 except: result_set[line.rstrip()] = 1 if isinstance(result_set, (list, tuple)): if len(result_set) > 10: result_file.write("".join(result_set[1:10])) else: result_file.write("".join(result_set)) else: for counter, value in enumerate(sorted(result_set, key=result_set.get, reverse=True)): if counter == 10: break else: result_file.write("{0} {1}\n".format(result_set[value], value)) def _run(self, logger): logger._logging("START: Filter pcap file") for file_name, tshark_cmd in self._filter_commands.iteritems(): result_set = {} if file_name.startswith("top10") else [] self.__run_cmd(tshark_cmd, file_name, result_set, logger) logger._logging("STOP: Filter pcap file") logger._logging("Finished Filtering. Results saved in {0} folder".format(self._output_dir))
try: import subprocess from lib.core.core import Core from lib.filter.filter import Filter except ImportError, err: from lib.core.core import Core Core.print_error(err) class FilterScan(Filter): def __init__(self, args): Filter.__init__(self, [args.pcap], args, "filter") print self._output_dir def __run_cmd(self, cmd, file_name, result_set): output_file = "{0}{1}.txt".format(self._output_dir, file_name) result_file = open(output_file, "w") proc = subprocess.Popen([cmd], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE,) if isinstance(result_set, (list, tuple)): for line in iter(proc.stdout.readline, ''): if line not in result_set: result_set.append(line) else: for line in iter(proc.stdout.readline, ''): try: result_set[line.rstrip()] += 1 except: result_set[line.rstrip()] = 1 if isinstance(result_set, (list, tuple)): if len(result_set) > 10: result_file.write("".join(result_set[1:10])) else: result_file.write("".join(result_set)) else: for counter, value in enumerate(sorted(result_set, key=result_set.get, reverse=True)): if counter == 10: break else: print result_set[value], value def _run(self, logger): for file_name, tshark_cmd in self._filter_commands.iteritems(): result_set = None if file_name.startswith("top10"): result_set = {} else: result_set = [] self.__run_cmd(tshark_cmd,file_name, result_set)
mit
Python
c52d056091acf49624450cc2d1e01cbf0900a08f
Add a profiling option
mouton5000/DiscreteEventApplicationEditor
main.py
main.py
#!/usr/bin/env python import sys from PyQt4.QtGui import QApplication as QApp from gui.EditorWindow import MainWindow def main(): import grammar.grammars grammar.grammars.compileGrammars() app = QApp(sys.argv) ex = MainWindow() sys.exit(app.exec_()) if __name__ == '__main__': profile = False if profile: import cProfile cProfile.run('main()') else: main()
#!/usr/bin/env python import sys from PyQt4.QtGui import QApplication as QApp from gui.EditorWindow import MainWindow def main(): import grammar.grammars grammar.grammars.compileGrammars() app = QApp(sys.argv) ex = MainWindow() sys.exit(app.exec_()) if __name__ == '__main__': main()
mit
Python
d951b11e9991c021e631299f0e22da8eb4c7d850
comment out post-checkout undo demonstration
lancekindle/gitTA,lancekindle/gitTA
main.py
main.py
# this is the main file that get called import os import sys import gitTA as git import colorama from colorama import Fore, Back # add color output to terminal: we want anything printed to be VERY visible to user colorama.init() # called so that windows colors work ''' modify this file! When git runs certain commands, it will run THIS main.py which will trigger the functions you've decorated here with gitta.listen('event-name') your methods can listen for the following events: pre-push, pre-commit, # pre-x methods can be aborted by raising an exception post-commit, post-checkout, post-merge ''' # pre-* events can be aborted by raising an exception ??? @git.listen('pre-push') def prepush(*args, **kwargs): print(Fore.GREEN) # set so that ALL next prints will be green print(args, kwargs) @git.listen('pre-commit') def precommit(*args, **kwargs): print(Fore.GREEN) print(args, kwargs) @git.listen('post-commit') def postcommit(*args, **kwargs): print(Fore.GREEN) print(args, kwargs) @git.listen('post-checkout') def postcheckout(*args, **kwargs): print(Fore.GREEN) # set so that ALL next prints will be green print(args, kwargs) # branches = git.Branch() # branches.undo_checkout(*args, **kwargs) @git.listen('post-merge') def postmerge(*args, **kwargs): print(args, kwargs) if __name__ == '__main__': git.trigger(45, event='post-checkout') # example of what might get passed to postcheckout # the garbled message that appears before (45, ) is the Fore.GREEN. On normal terminals this garbled output will NOT appear # ['.gitta/py/main.py', 'pre-push', 'origin', 'https://github.com/lancekindle/test.git'] # ['.gitta/py/main.py', 'pre-commit'] # ['.gitta/py/main.py', 'post-commit']
# this is the main file that get called import os import sys import gitTA as git import colorama from colorama import Fore, Back # add color output to terminal: we want anything printed to be VERY visible to user colorama.init() # called so that windows colors work ''' modify this file! When git runs certain commands, it will run THIS main.py which will trigger the functions you've decorated here with gitta.listen('event-name') your methods can listen for the following events: pre-push, pre-commit, # pre-x methods can be aborted by raising an exception post-commit, post-checkout, post-merge ''' # pre-* events can be aborted by raising an exception ??? @git.listen('pre-push') def prepush(*args, **kwargs): print(Fore.GREEN) # set so that ALL next prints will be green print(args, kwargs) @git.listen('pre-commit') def precommit(*args, **kwargs): print(Fore.GREEN) print(args, kwargs) @git.listen('post-commit') def postcommit(*args, **kwargs): print(Fore.GREEN) print(args, kwargs) @git.listen('post-checkout') def postcheckout(*args, **kwargs): print(Fore.GREEN) # set so that ALL next prints will be green print(args, kwargs) branches = git.Branch() branches.undo_checkout(*args, **kwargs) @git.listen('post-merge') def postmerge(*args, **kwargs): print(args, kwargs) if __name__ == '__main__': git.trigger(45, event='post-checkout') # example of what might get passed to postcheckout # the garbled message that appears before (45, ) is the Fore.GREEN. On normal terminals this garbled output will NOT appear # ['.gitta/py/main.py', 'pre-push', 'origin', 'https://github.com/lancekindle/test.git'] # ['.gitta/py/main.py', 'pre-commit'] # ['.gitta/py/main.py', 'post-commit']
mit
Python
1624504bd966eaf47698938e387a58dd14738a76
add warnings about deprecation of compiler specific template tags
paera/django-static-precompiler,liumengjun/django-static-precompiler,liumengjun/django-static-precompiler,liumengjun/django-static-precompiler,liumengjun/django-static-precompiler,jaheba/django-static-precompiler,liumengjun/django-static-precompiler,jaheba/django-static-precompiler,paera/django-static-precompiler,jaheba/django-static-precompiler,paera/django-static-precompiler,paera/django-static-precompiler,jaheba/django-static-precompiler
static_precompiler/templatetags/compile_static.py
static_precompiler/templatetags/compile_static.py
import six import warnings from django.template import Library from django.templatetags.static import static from static_precompiler.settings import PREPEND_STATIC_URL, USE_CACHE, CACHE_TIMEOUT from static_precompiler.utils import compile_static, get_compiler_by_name, get_cache_key, get_hexdigest, get_cache from static_precompiler.templatetags.base import container_tag register = Library() @register.simple_tag(name="compile") def compile_tag(source_path, compiler=None): if compiler: compiled = compiler.compile(source_path) else: compiled = compile_static(source_path) if PREPEND_STATIC_URL: compiled = static(compiled) return compiled @container_tag(register) def inlinecompile(nodelist, context, compiler): source = nodelist.render(context) if isinstance(compiler, six.string_types): compiler = get_compiler_by_name(compiler) if USE_CACHE: cache_key = get_cache_key("{0}.{1}".format( compiler.__class__.__name__, get_hexdigest(source) )) cache = get_cache() cached = cache.get(cache_key, None) if cached is not None: return cached output = compiler.compile_source(source) cache.set(cache_key, output, CACHE_TIMEOUT) return output return compiler.compile_source(source) def _warn(old, new): warnings.warn( "{%% %s %%} tag has been deprecated, use {%% %s %%} " "from `compile_static` template tag library instead." % (old, new), UserWarning, ) def register_compiler_tags(register, compiler): @register.simple_tag(name=compiler.name) def tag(source_path): _warn(compiler.name, 'compile') return compile_tag(source_path, compiler) @container_tag(register, name="inline" + compiler.name) def inline_tag(nodelist, context): _warn('inline%s' % compiler.name, 'inlinecompile "%s"' % compiler.name) return inlinecompile(nodelist, context, compiler)
import six from django.template import Library from django.templatetags.static import static from static_precompiler.settings import PREPEND_STATIC_URL, USE_CACHE, CACHE_TIMEOUT from static_precompiler.utils import compile_static, get_compiler_by_name, get_cache_key, get_hexdigest, get_cache from static_precompiler.templatetags.base import container_tag register = Library() @register.simple_tag(name="compile") def compile_tag(source_path, compiler=None): if compiler: compiled = compiler.compile(source_path) else: compiled = compile_static(source_path) if PREPEND_STATIC_URL: compiled = static(compiled) return compiled @container_tag(register) def inlinecompile(nodelist, context, compiler): source = nodelist.render(context) if isinstance(compiler, six.string_types): compiler = get_compiler_by_name(compiler) if USE_CACHE: cache_key = get_cache_key("{0}.{1}".format( compiler.__class__.__name__, get_hexdigest(source) )) cache = get_cache() cached = cache.get(cache_key, None) if cached is not None: return cached output = compiler.compile_source(source) cache.set(cache_key, output, CACHE_TIMEOUT) return output return compiler.compile_source(source) def register_compiler_tags(register, compiler): @register.simple_tag(name=compiler.name) def tag(source_path): return compile_tag(source_path, compiler) @container_tag(register, name="inline" + compiler.name) def inline_tag(nodelist, context): return inlinecompile(nodelist, context, compiler)
mit
Python
437643d0f0680470b52ce893555df5dac17bdca1
use selenium for loading js content
Shadi-A/ExpediaCrawler
main.py
main.py
import time from bs4 import BeautifulSoup from selenium import webdriver browser = webdriver.Firefox() ffResults = browser.get("https://www.expedia.com/Flights-Search?trip=roundtrip&leg1=from:Hamburg,%20Germany%20(HAM-All%20Airports),to:Amman,%20Jordan%20(AMM-Queen%20Alia%20Intl.),departure:03/08/2017TANYT&leg2=from:Amman,%20Jordan%20(AMM-Queen%20Alia%20Intl.),to:Hamburg,%20Germany%20(HAM-All%20Airports),departure:03/24/2017TANYT&passengers=adults:2,children:0,seniors:0,infantinlap:Y&mode=search") time.sleep(15) full_content = browser.execute_script("return document.getElementsByTagName('html')[0].innerHTML") browser.quit() soup = BeautifulSoup(full_content, "lxml" ) print(soup.find_all('span', class_='dollars')) #for dollar in dollars_copy: # print(dollar.text) #print(dollars) #print(result)
import urllib.request result=urllib.request.urlopen("https://www.expedia.de/Flights-Search?trip=roundtrip&leg1=from:Hamburg,%20Deutschland%20(HAM-Alle%20Flugh%C3%A4fen),to:Amman,%20Jordanien%20(AMM-Queen%20Alia%20Intl.),departure:08.03.2017TANYT&leg2=from:Amman,%20Jordanien%20(AMM-Queen%20Alia%20Intl.),to:Hamburg,%20Deutschland%20(HAM-Alle%20Flugh%C3%A4fen),departure:26.03.2017TANYT&passengers=children:0,adults:2,seniors:0,infantinlap:Y&mode=search").read() print(result)
mit
Python
24d4fee92c1c2ff4bac1fe09d9b436748234a48c
Add argument for execution of defective server.
Solucionamos/dummybmc
main.py
main.py
#! /usr/bin/env python # -*- coding: utf-8 -*- # vim:fenc=utf-8 """ Main script. Executes the XML Server implementation with an HTTP connection and default parameters. """ import sys import argparse from server import xml_server, defective_servers from connection import http_connection parser = argparse.ArgumentParser() parser.add_argument('-p', '--port', type=int, default=8080, help="server's HTTP port") parser.add_argument('--sensordata', type=str, default='server/sensor_data.csv', help="sensor data file") parser.add_argument('--randomloss', action='store_true') if __name__ == '__main__': args = parser.parse_args() if args.randomloss: server = defective_servers.RandomLossXMLServer(args.sensordata) else: server = xml_server.XMLServer(args.sensordata) connection = http_connection.HttpConnection(server, port=args.port)
#! /usr/bin/env python # -*- coding: utf-8 -*- # vim:fenc=utf-8 """ Main script. Executes the XML Server implementation with an HTTP connection and default parameters. """ import sys import argparse from server import xml_server from connection import http_connection parser = argparse.ArgumentParser() parser.add_argument('-p', '--port', type=int, default=8080, help="server's HTTP port") parser.add_argument('--sensordata', type=str, default='server/sensor_data.csv', help="sensor data file") if __name__ == '__main__': args = parser.parse_args() server = xml_server.XMLServer(args.sensordata) connection = http_connection.HttpConnection(server, port=args.port)
apache-2.0
Python
52c2205804d8dc38447bca1ccbf5599e00cd1d7b
Rename user_id config key to admin_user_id
alvarogzp/telegram-bot,alvarogzp/telegram-bot
main.py
main.py
#!/usr/bin/env python3 import requests CONFIG_DIR = "config" class Bot: def __init__(self): self.config = Config(CONFIG_DIR) self.api = TelegramBotApi(self.config.get_auth_token()) def run(self): self.api.send_message(self.config.get_admin_user_id(), "test") class TelegramBotApi: def __init__(self, auth_token): self.base_url = "https://api.telegram.org/bot" + auth_token + "/" def send_message(self, chat_id, text): self.__send_request("sendMessage", chat_id=chat_id, text=text) def __send_request(self, command, **params): requests.get(self.base_url + command, params=params) class Config: def __init__(self, config_dir): self.config_dir = config_dir + "/" def get_auth_token(self): return self.__get_config_value("auth_token") def get_admin_user_id(self): return self.__get_config_value("admin_user_id") def __get_config_value(self, config_key): return open(self.config_dir + config_key).read().strip() if __name__ == "__main__": Bot().run()
#!/usr/bin/env python3 import requests CONFIG_DIR = "config" class Bot: def __init__(self): self.config = Config(CONFIG_DIR) self.api = TelegramBotApi(self.config.get_auth_token()) def run(self): self.api.send_message(self.config.get_user_id(), "test") class TelegramBotApi: def __init__(self, auth_token): self.base_url = "https://api.telegram.org/bot" + auth_token + "/" def send_message(self, chat_id, text): self.__send_request("sendMessage", chat_id=chat_id, text=text) def __send_request(self, command, **params): requests.get(self.base_url + command, params=params) class Config: def __init__(self, config_dir): self.config_dir = config_dir + "/" def get_auth_token(self): return self.__get_config_value("auth_token") def get_user_id(self): return self.__get_config_value("user_id") def __get_config_value(self, config_key): return open(self.config_dir + config_key).read().strip() if __name__ == "__main__": Bot().run()
agpl-3.0
Python
8cbe375b478764f05e67b3d5600ca51bbd5b5c48
enable 'inline_defnode_calls' optimisation for benchmarks (even though they don't benefit currently)
andreasvc/cython,mrGeen/cython,fabianrost84/cython,slonik-az/cython,ChristopherHogan/cython,mcanthony/cython,c-blake/cython,scoder/cython,da-woods/cython,mrGeen/cython,madjar/cython,madjar/cython,scoder/cython,encukou/cython,da-woods/cython,mrGeen/cython,JelleZijlstra/cython,madjar/cython,fabianrost84/cython,hhsprings/cython,JelleZijlstra/cython,fperez/cython,ABcDexter/cython,dahebolangkuan/cython,fabianrost84/cython,ABcDexter/cython,dahebolangkuan/cython,c-blake/cython,c-blake/cython,ABcDexter/cython,roxyboy/cython,acrispin/cython,cython/cython,cython/cython,hhsprings/cython,hickford/cython,andreasvc/cython,hickford/cython,c-blake/cython,JelleZijlstra/cython,acrispin/cython,roxyboy/cython,fperez/cython,da-woods/cython,da-woods/cython,acrispin/cython,marscher/cython,hhsprings/cython,ChristopherHogan/cython,encukou/cython,mcanthony/cython,fabianrost84/cython,achernet/cython,encukou/cython,ABcDexter/cython,mrGeen/cython,mcanthony/cython,marscher/cython,roxyboy/cython,andreasvc/cython,hhsprings/cython,marscher/cython,fperez/cython,hickford/cython,encukou/cython,slonik-az/cython,madjar/cython,cython/cython,marscher/cython,madjar/cython,hhsprings/cython,mrGeen/cython,mcanthony/cython,ChristopherHogan/cython,achernet/cython,slonik-az/cython,JelleZijlstra/cython,encukou/cython,fabianrost84/cython,dahebolangkuan/cython,mcanthony/cython,dahebolangkuan/cython,fperez/cython,andreasvc/cython,achernet/cython,roxyboy/cython,roxyboy/cython,hickford/cython,acrispin/cython,andreasvc/cython,scoder/cython,c-blake/cython,slonik-az/cython,fperez/cython,scoder/cython,slonik-az/cython,ABcDexter/cython,dahebolangkuan/cython,hickford/cython,achernet/cython,cython/cython,acrispin/cython,JelleZijlstra/cython,achernet/cython,marscher/cython
Demos/benchmarks/setup.py
Demos/benchmarks/setup.py
from distutils.core import setup from Cython.Build import cythonize directives = { 'optimize.inline_defnode_calls': True } setup( name = 'benchmarks', ext_modules = cythonize("*.py", language_level=3, annotate=True, compiler_directives=directives), )
from distutils.core import setup from Cython.Build import cythonize setup( name = 'benchmarks', ext_modules = cythonize("*.py", language_level=3, annotate=True), )
apache-2.0
Python
18d59a1d23cc9021fa388028ab723822e031dc07
Add health check
waprin/continuous-deployment-demo,waprin/continuous-deployment-demo,waprin/continuous-deployment-demo,waprin/continuous-deployment-demo
main.py
main.py
# Copyright 2015, Google, Inc. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use # this file except in compliance with the License. You may obtain a copy of the # License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable # law or agreed to in writing, software distributed under the License is distributed # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express # or implied. See the License for the specific language governing permissions and # limitations under the License. import urllib2 import json from google.appengine.ext import vendor vendor.add('lib') from flask import Flask app = Flask(__name__) from api_key import key @app.route('/_ah/health') def health_check(): return 'ok', 200 @app.route('/get_author/<title>') def get_author(title): host = 'https://www.googleapis.com/books/v1/volume?q={}&key={}&country=US'.format(title, key) request = urllib2.Request(host) try: response = urllib2.urlopen(request) except urllib2.HTTPError, error: contents = error.read() print ('Received error from Books API {}'.format(contents)) return str(contents) html = response.read() author = json.loads(html)['items'][0]['volumeInfo']['authors'][0] return author if __name__ == '__main__': app.run(debug=True)
# Copyright 2015, Google, Inc. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use # this file except in compliance with the License. You may obtain a copy of the # License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable # law or agreed to in writing, software distributed under the License is distributed # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express # or implied. See the License for the specific language governing permissions and # limitations under the License. import urllib2 import json from google.appengine.ext import vendor vendor.add('lib') from flask import Flask app = Flask(__name__) from api_key import key @app.route('/get_author/<title>') def get_author(title): host = 'https://www.googleapis.com/books/v1/volume?q={}&key={}&country=US'.format(title, key) request = urllib2.Request(host) try: response = urllib2.urlopen(request) except urllib2.HTTPError, error: contents = error.read() print ('Received error from Books API {}'.format(contents)) return str(contents) html = response.read() author = json.loads(html)['items'][0]['volumeInfo']['authors'][0] return author if __name__ == '__main__': app.run(debug=True)
apache-2.0
Python
6ddc63dcb1005ccf6d09f2577faf99566bafced7
fix Log.add_group() use in live_plot.py example
vlukes/sfepy,rc/sfepy,vlukes/sfepy,BubuLK/sfepy,sfepy/sfepy,sfepy/sfepy,sfepy/sfepy,vlukes/sfepy,BubuLK/sfepy,rc/sfepy,BubuLK/sfepy,rc/sfepy
examples/miscellaneous/live_plot.py
examples/miscellaneous/live_plot.py
from __future__ import print_function from __future__ import absolute_import import os import sys sys.path.append( '.' ) import numpy as nm from sfepy.base.base import output, pause from sfepy.base.log import Log def main(): cwd = os.path.split(os.path.join(os.getcwd(), __file__))[0] log = Log((['sin(x)', 'cos(x)'], ['exp(x)']), yscales=['linear', 'log'], xlabels=['angle', None], ylabels=[None, 'a function'], log_filename=os.path.join(cwd, 'live_plot.log')) log2 = Log([['x^3']], yscales=['linear'], xlabels=['x'], ylabels=['a cubic function'], aggregate=50, sleep=0.5, log_filename=os.path.join(cwd, 'live_plot2.log')) added = 0 for x in nm.linspace(0, 4.0 * nm.pi, 200): output('x: ', x) if x < (2.0 * nm.pi): log(nm.sin(x), nm.cos(x), nm.exp(x), x = [x, None]) else: if added: log(nm.sin(x), nm.cos(x), nm.exp(x), x**2, x=[x, None, x]) else: log.plot_vlines(color='r', linewidth=2) log.add_group(['x^2'], yscale='linear', xlabel='new x', ylabel='square', formats=['%+g']) added += 1 if (added == 20) or (added == 50): log.plot_vlines([2], color='g', linewidth=2) log2(x*x*x, x=[x]) print(log) print(log2) pause() log(finished=True) log2(finished=True) if __name__ == '__main__': main()
from __future__ import print_function from __future__ import absolute_import import os import sys sys.path.append( '.' ) import numpy as nm from sfepy.base.base import output, pause from sfepy.base.log import Log def main(): cwd = os.path.split(os.path.join(os.getcwd(), __file__))[0] log = Log((['sin(x)', 'cos(x)'], ['exp(x)']), yscales=['linear', 'log'], xlabels=['angle', None], ylabels=[None, 'a function'], log_filename=os.path.join(cwd, 'live_plot.log')) log2 = Log([['x^3']], yscales=['linear'], xlabels=['x'], ylabels=['a cubic function'], aggregate=50, sleep=0.5, log_filename=os.path.join(cwd, 'live_plot2.log')) added = 0 for x in nm.linspace(0, 4.0 * nm.pi, 200): output('x: ', x) if x < (2.0 * nm.pi): log(nm.sin(x), nm.cos(x), nm.exp(x), x = [x, None]) else: if added: log(nm.sin(x), nm.cos(x), nm.exp(x), x**2, x=[x, None, x]) else: log.plot_vlines(color='r', linewidth=2) log.add_group(['x^2'], 'linear', 'new x', 'square', formats=['%+g']) added += 1 if (added == 20) or (added == 50): log.plot_vlines([2], color='g', linewidth=2) log2(x*x*x, x=[x]) print(log) print(log2) pause() log(finished=True) log2(finished=True) if __name__ == '__main__': main()
bsd-3-clause
Python
884e17eb92e35ab5a9f4d6bc94f11f49977711a3
Use render() so that we can pass in the request context and thus link to static files correctly (reviewed by @smn).
praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go
go/apps/jsbox/views.py
go/apps/jsbox/views.py
import requests from urlparse import urlparse, urlunparse from django.http import HttpResponse from django.contrib.auth.decorators import login_required from django.shortcuts import render from django.views.decorators.csrf import csrf_exempt from go.conversation.base import ConversationViews from go.apps.jsbox.forms import JsboxForm, JsboxAppConfigFormset from go.apps.jsbox.log import LogManager from go.base.utils import conversation_or_404 class JsboxConversationViews(ConversationViews): conversation_type = u'jsbox' conversation_display_name = u'Javascript App' conversation_initiator = None edit_conversation_forms = ( ('jsbox', JsboxForm), ('jsbox_app_config', JsboxAppConfigFormset), ) @login_required @csrf_exempt def cross_domain_xhr(request): url = request.POST.get('url', None) parse_result = urlparse(url) if parse_result.username: auth = (parse_result.username, parse_result.password) url = urlunparse((parse_result.scheme, ('%s:%s' % (parse_result.hostname, parse_result.port) if parse_result.port else parse_result.hostname), parse_result.path, parse_result.params, parse_result.query, parse_result.fragment)) else: auth = None url = url r = requests.get(url, auth=auth) return HttpResponse(r.text, status=r.status_code) @login_required def jsbox_logs(request, conversation_key): campaign_key = request.user_api.user_account_key conversation = conversation_or_404(request.user_api, conversation_key) log_manager = LogManager(request.user_api.api.redis) logs = log_manager.get_logs(campaign_key, conversation_key) logs = list(reversed(logs)) return render(request, "jsbox/jsbox_logs.html", { "conversation": conversation, "logs": logs, })
import requests from urlparse import urlparse, urlunparse from django.http import HttpResponse from django.contrib.auth.decorators import login_required from django.shortcuts import render_to_response from django.views.decorators.csrf import csrf_exempt from go.conversation.base import ConversationViews from go.apps.jsbox.forms import JsboxForm, JsboxAppConfigFormset from go.apps.jsbox.log import LogManager from go.base.utils import conversation_or_404 class JsboxConversationViews(ConversationViews): conversation_type = u'jsbox' conversation_display_name = u'Javascript App' conversation_initiator = None edit_conversation_forms = ( ('jsbox', JsboxForm), ('jsbox_app_config', JsboxAppConfigFormset), ) @login_required @csrf_exempt def cross_domain_xhr(request): url = request.POST.get('url', None) parse_result = urlparse(url) if parse_result.username: auth = (parse_result.username, parse_result.password) url = urlunparse((parse_result.scheme, ('%s:%s' % (parse_result.hostname, parse_result.port) if parse_result.port else parse_result.hostname), parse_result.path, parse_result.params, parse_result.query, parse_result.fragment)) else: auth = None url = url r = requests.get(url, auth=auth) return HttpResponse(r.text, status=r.status_code) @login_required def jsbox_logs(request, conversation_key): campaign_key = request.user_api.user_account_key conversation = conversation_or_404(request.user_api, conversation_key) log_manager = LogManager(request.user_api.api.redis) logs = log_manager.get_logs(campaign_key, conversation_key) logs = list(reversed(logs)) return render_to_response("jsbox/jsbox_logs.html", { "conversation": conversation, "logs": logs, })
bsd-3-clause
Python
8d2167bc3bc37f68e225ddcd86bc4114d90be87e
Update version number
spywhere/LocalPackages
local_packages.py
local_packages.py
import sublime from .event_handler import EventHandler from .settings import Settings package_control_installed = False LOCAL_PACKAGES_VERSION = "0.1.2" evaluating = False retry_times = 3 def plugin_loaded(): Settings.reset() Settings.startup() EventHandler().register_handler( evaluate_install, EventHandler().ON_LOAD ) print("[Local Packages] v%s" % (LOCAL_PACKAGES_VERSION)) check_package_control() def check_package_control(): try: __import__("Package Control").package_control global package_control_installed package_control_installed = True except: global retry_times if retry_times > 0: retry_times -= 1 sublime.set_timeout(check_package_control, 3000) else: sublime.error_message( "Package Control is not found.\n\n" + "Local Packages will now disabled" ) return evaluate_install() def evaluate_install(view=None): global evaluating if evaluating: return print("[Local Packages] Evaluating missing packages") from .package_evaluator import PackageEvaluatorThread evaluating = True PackageEvaluatorThread( window=sublime.active_window(), callback=on_installed ).start() def on_installed(failed_packages=[]): global evaluating evaluating = False if len(failed_packages) > 0: msg = "Local Packages failed to install %s missing packages...\n" % ( len(failed_packages) ) limit = 10 for package in failed_packages: limit -= 1 if limit < 0: break msg += " - %s\n" % (package) if limit < 0: msg += "and more..." sublime.error_message(msg) else: print("[Local Packages] Dependencies already installed")
import sublime from .event_handler import EventHandler from .settings import Settings package_control_installed = False LOCAL_PACKAGES_VERSION = "0.1.1" evaluating = False retry_times = 3 def plugin_loaded(): Settings.reset() Settings.startup() EventHandler().register_handler( evaluate_install, EventHandler().ON_LOAD ) print("[Local Packages] v%s" % (LOCAL_PACKAGES_VERSION)) check_package_control() def check_package_control(): try: __import__("Package Control").package_control global package_control_installed package_control_installed = True except: global retry_times if retry_times > 0: retry_times -= 1 sublime.set_timeout(check_package_control, 3000) else: sublime.error_message( "Package Control is not found.\n\n" + "Local Packages will now disabled" ) return evaluate_install() def evaluate_install(view=None): global evaluating if evaluating: return print("[Local Packages] Evaluating missing packages") from .package_evaluator import PackageEvaluatorThread evaluating = True PackageEvaluatorThread( window=sublime.active_window(), callback=on_installed ).start() def on_installed(failed_packages=[]): global evaluating evaluating = False if len(failed_packages) > 0: msg = "Local Packages failed to install %s missing packages...\n" % ( len(failed_packages) ) limit = 10 for package in failed_packages: limit -= 1 if limit < 0: break msg += " - %s\n" % (package) if limit < 0: msg += "and more..." sublime.error_message(msg) else: print("[Local Packages] Dependencies already installed")
mit
Python
4ca8889396595f9da99becbb88fb7e38ab0ed560
Raise exception if connection not succeed and customize error message
anapaulagomes/reviews-assigner
hunter/reviewsapi.py
hunter/reviewsapi.py
import requests import os from .endpoints import * class UnauthorizedToken(Exception): pass class ReviewsAPI: def __init__(self): token = os.environ.get('UDACITY_AUTH_TOKEN') self.headers = {'Authorization': token, 'Content-Length': '0'} def certifications(self): try: raw_response = requests.get(CERTIFICATIONS_URL, headers=self.headers) response = raw_response.json() raw_response.raise_for_status() certifications_list = [item['project_id'] for item in response if item['status'] == 'certified'] return certifications_list except requests.exceptions.HTTPError: raise UnauthorizedToken('Maybe it\'s time to change you token!') def request_reviews(self, certifications_list): projects = self.__projects(certifications_list) return requests.post(SUBMISSION_REQUESTS, json=projects, headers=self.headers) # TODO Add support to multi language def __projects(self, certifications_list): projects_list = [] for certification in certifications_list: projects_list.append({'project_id': certification, 'language': 'pt-br'}) return {'projects': projects_list}
import requests import os from .endpoints import * class UnauthorizedToken(Exception): pass class ReviewsAPI: def __init__(self): token = os.environ.get('UDACITY_AUTH_TOKEN') self.headers = {'Authorization': token, 'Content-Length': '0'} def certifications(self): try: raw_response = requests.get(CERTIFICATIONS_URL, headers=self.headers) response = raw_response.json() certifications_list = [item['project_id'] for item in response if item['status'] == 'certified'] return certifications_list except requests.exceptions.HTTPError: raise UnauthorizedToken def request_reviews(self, certifications_list): projects = self.__projects(certifications_list) return requests.post(SUBMISSION_REQUESTS, json=projects, headers=self.headers) # TODO Add support to multi language def __projects(self, certifications_list): projects_list = [] for certification in certifications_list: projects_list.append({'project_id': certification, 'language': 'pt-br'}) return {'projects': projects_list}
mit
Python
792e46bcd01d2718215a3cb324b8deca5e4e1a7e
bump 1.3.10 release (#160)
F5Networks/f5-icontrol-rest-python,F5Networks/f5-icontrol-rest-python
icontrol/__init__.py
icontrol/__init__.py
__version__ = "1.3.10"
__version__ = "1.3.9"
apache-2.0
Python
9d20717b39154252109153a6c5936922d28c6511
mark unicode context values as safe
danxshap/django-mailviews,danxshap/django-mailviews
mailviews/utils.py
mailviews/utils.py
import textwrap from collections import namedtuple from django.utils.safestring import mark_safe Docstring = namedtuple('Docstring', ('summary', 'body')) def split_docstring(value): """ Splits the docstring of the given value into it's summary and body. :returns: a 2-tuple of the format ``(summary, body)`` """ docstring = getattr(value, '__doc__', '') or '' docstring = textwrap.dedent(docstring) if not docstring: return None pieces = docstring.strip().split('\n\n', 1) try: body = pieces[1] except IndexError: body = None return Docstring(pieces[0], body) def unimplemented(*args, **kwargs): raise NotImplementedError def unescape(context): """ Accepts a context object, returning a new context with autoescape off. Useful for rendering plain-text templates without having to wrap the entire template in an `{% autoescape off %}` tag. """ for key in context: if type(context[key]) in [str, unicode]: context[key] = mark_safe(context[key]) return context
import textwrap from collections import namedtuple from django.utils.safestring import mark_safe Docstring = namedtuple('Docstring', ('summary', 'body')) def split_docstring(value): """ Splits the docstring of the given value into it's summary and body. :returns: a 2-tuple of the format ``(summary, body)`` """ docstring = getattr(value, '__doc__', '') or '' docstring = textwrap.dedent(docstring) if not docstring: return None pieces = docstring.strip().split('\n\n', 1) try: body = pieces[1] except IndexError: body = None return Docstring(pieces[0], body) def unimplemented(*args, **kwargs): raise NotImplementedError def unescape(context): """ Accepts a context object, returning a new context with autoescape off. Useful for rendering plain-text templates without having to wrap the entire template in an `{% autoescape off %}` tag. """ for key in context: if type(context[key]) is str: context[key] = mark_safe(context[key]) return context
apache-2.0
Python
bdca4889442e7d84f8c4e68ecdbee676d46ff264
Fix data provider example file.
Lothiraldan/pytf
examples/test_with_data_provider.py
examples/test_with_data_provider.py
from pytf.dataprovider import DataProvider, call @DataProvider(max_5=call(max=5), max_10=call(max=10), max_15=call(max=15)) class TestCase(object): def __init__(self, max): self.max = max @DataProvider(n_3=call(n=3), n_7=call(n=7), n_12=call(n=12), n_20=call(n=20)) def test_test(self, n): assert n < self.max
from pytf.dataprovider import DataProvider try: from unittest.mock import call except ImportError: from mock import call @DataProvider([call(max=5), call(max=10), call(max=15)]) class TestCase(object): def __init__(self, max): self.max = max @DataProvider([call(n=3), call(n=7), call(n=12), call(n=20)]) def test_test(self, n): assert n < self.max
mit
Python
97e2e80b43ba3639e5af9deb6485c28da1a5e7af
change path
duthchao/kaggle-Otto,tks0123456789/kaggle-Otto
make_submission.py
make_submission.py
""" Ensemble by columnwise weighted sum. The weights are determined by scipy.optimize.minimize using validation set predictions. LB Private: 0.40076 LB Public: 0.39773 """ import numpy as np import pandas as pd import sklearn.preprocessing as pp path = './' # Neural Networks pred = [np.load(path + 'pred_TRI_kmax_' + str(k_max) + '.npy') for k_max in [4,5]] pred.append(np.load(path + 'pred_Sparse_RI.npy')) pred_NN = (pred[0] + pred[1] + pred[2]) / 3 # XGBoost pred_XGB = (np.load(path + 'pred_RI.npy') + np.load(path + 'pred_CF.npy')) / 2 # Ensemble weights w = np.array([1.,0.95657896,0.52392701,0.75156431,1.,0.77871818,0.81764163,0.9541003,0.82863579]) pr005 = pp.normalize(pred_NN * w + pred_XGB * (1 - w), norm = 'l1') pred005 = pd.read_csv(path + 'sampleSubmission.csv', index_col = 0) pred005.iloc[:,:] = pr005 pred005.to_csv(path + 'pred005.csv', float_format='%.8f')
""" Ensemble by columnwise weighted sum. The weights are determined by scipy.optimize.minimize using validation set predictions. LB Private: 0.40076 LB Public: 0.39773 """ import numpy as np import pandas as pd import sklearn.preprocessing as pp path = '~/' # Neural Networks pred = [np.load(path + 'pred_TRI_kmax_' + str(k_max) + '.npy') for k_max in [4,5]] pred.append(np.load(path + 'pred_Sparse_RI.npy')) pred_NN = (pred[0] + pred[1] + pred[2]) / 3 # XGBoost pred_XGB = (np.load(path + 'pred_RI.npy') + np.load(path + 'pred_CF.npy')) / 2 # Ensemble weights w = np.array([1.,0.95657896,0.52392701,0.75156431,1.,0.77871818,0.81764163,0.9541003,0.82863579]) pr005 = pp.normalize(pred_NN * w + pred_XGB * (1 - w), norm = 'l1') pred005 = pd.read_csv(path + 'sampleSubmission.csv', index_col = 0) pred005.iloc[:,:] = pr005 pred005.to_csv(path + 'pred005.csv', float_format='%.8f')
mit
Python
542ddc0d0bd96c8ff8635f649344f468d7d497d0
bump version to 0.2.3
braintree/mallory,braintree/mallory
mallory/version.py
mallory/version.py
Version = "0.2.3"
Version = "0.2.2"
mit
Python
ff9444ea838bb7ed3efae125d343cee2cec994a9
Improve the level of comments in mysite/base/depends.py
moijes12/oh-mainline,sudheesh001/oh-mainline,Changaco/oh-mainline,heeraj123/oh-mainline,Changaco/oh-mainline,willingc/oh-mainline,willingc/oh-mainline,openhatch/oh-mainline,heeraj123/oh-mainline,onceuponatimeforever/oh-mainline,SnappleCap/oh-mainline,mzdaniel/oh-mainline,onceuponatimeforever/oh-mainline,ehashman/oh-mainline,campbe13/openhatch,mzdaniel/oh-mainline,moijes12/oh-mainline,moijes12/oh-mainline,moijes12/oh-mainline,SnappleCap/oh-mainline,willingc/oh-mainline,heeraj123/oh-mainline,campbe13/openhatch,ojengwa/oh-mainline,Changaco/oh-mainline,onceuponatimeforever/oh-mainline,SnappleCap/oh-mainline,ojengwa/oh-mainline,sudheesh001/oh-mainline,heeraj123/oh-mainline,ehashman/oh-mainline,waseem18/oh-mainline,Changaco/oh-mainline,vipul-sharma20/oh-mainline,openhatch/oh-mainline,vipul-sharma20/oh-mainline,SnappleCap/oh-mainline,nirmeshk/oh-mainline,campbe13/openhatch,openhatch/oh-mainline,mzdaniel/oh-mainline,vipul-sharma20/oh-mainline,eeshangarg/oh-mainline,waseem18/oh-mainline,ojengwa/oh-mainline,campbe13/openhatch,onceuponatimeforever/oh-mainline,sudheesh001/oh-mainline,mzdaniel/oh-mainline,openhatch/oh-mainline,ojengwa/oh-mainline,nirmeshk/oh-mainline,ehashman/oh-mainline,openhatch/oh-mainline,ojengwa/oh-mainline,vipul-sharma20/oh-mainline,SnappleCap/oh-mainline,ehashman/oh-mainline,mzdaniel/oh-mainline,waseem18/oh-mainline,willingc/oh-mainline,waseem18/oh-mainline,moijes12/oh-mainline,vipul-sharma20/oh-mainline,sudheesh001/oh-mainline,waseem18/oh-mainline,heeraj123/oh-mainline,willingc/oh-mainline,Changaco/oh-mainline,onceuponatimeforever/oh-mainline,nirmeshk/oh-mainline,mzdaniel/oh-mainline,eeshangarg/oh-mainline,nirmeshk/oh-mainline,mzdaniel/oh-mainline,sudheesh001/oh-mainline,nirmeshk/oh-mainline,eeshangarg/oh-mainline,eeshangarg/oh-mainline,eeshangarg/oh-mainline,campbe13/openhatch,ehashman/oh-mainline
mysite/base/depends.py
mysite/base/depends.py
# -*- coding: utf-8 -*- # This file is part of OpenHatch. # Copyright (C) 2011 Asheesh Laroia # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. ### This file exists to wrap some dependencies for other parts of the code. ### ### In general, core parts of the OpenHatch site are forbidden from importing ### some hard-to-install modules, like lxml. Those files import from here ### instead so that if the import fails, the site doesn't crash. ### ### This is so that new contributors can run the OpenHatch site without ### installing these hard-to-install dependencies. # Used within this file import os import logging # Wrap lxml and the modules that are part of it try: import lxml import lxml.etree import lxml.html except: class nothing(object): pass lxml = nothing() lxml.etree = None lxml.html = None if lxml.html is None: logging.warning("Some parts of the OpenHatch site may fail because the lxml" " library is not installed. Look in README.mkd for" " information about lxml.") # Provide a helper to check if svnadmin is available. If not, # we can skip running code (and tests) that require it. def svnadmin_available(): # FIXME: This should move to a variable controlled # by settings.py. SVNADMIN_PATH = '/usr/bin/svnadmin' return os.path.exists(SVNADMIN_PATH) ### Here we try to import "Image", from the Python Imaging Library. ### If we fail, Image is None. Image = None try: import Image except: try: from PIL import Image except ImportError: ### Okay, for a good time, let's hack sys.modules. ### This permits Django to think ImageFields might ### possibly work. import sys sys.modules['Image'] = sys.modules['sys'] # Wrap launchpadbugs. We wrap it because it imports libxml2, # which qualifies as hard-to-install. try: import launchpadbugs import launchpadbugs.connector import launchpadbugs.basebuglistfilter import launchpadbugs.text_bug import launchpadbugs.lphelper except ImportError: # usually because python-libxml2 is missing launchpadbugs = None logging.warning("launchpadbugs did not import. Install python-libxml2.")
import os try: import lxml import lxml.etree import lxml.html except: class nothing(object): pass lxml = nothing() lxml.etree = None lxml.html = None import logging if lxml.html is None: logging.warning("Some parts of the OpenHatch site may fail because the lxml" " library is not installed. Look in README.mkd for" " information about lxml.") def svnadmin_available(): # FIXME: This should move to a variable controlled # by settings.py. SVNADMIN_PATH = '/usr/bin/svnadmin' return os.path.exists(SVNADMIN_PATH) ### Here we try to import "Image", from the Python Imaging Library. ### If we fail, Image is None. Image = None try: import Image except: try: from PIL import Image except ImportError: ### Okay, for a good time, let's hack sys.modules. ### This permits Django to think ImageFields might ### possibly work. import sys sys.modules['Image'] = sys.modules['sys'] try: import launchpadbugs import launchpadbugs.connector import launchpadbugs.basebuglistfilter import launchpadbugs.text_bug import launchpadbugs.lphelper except ImportError: # usually because python2libxml2 is missing launchpadbugs = None logging.warning("launchpadbugs did not import. Install python-libxml2.")
agpl-3.0
Python
bf8b29e7d05a7b476198109f1dccfd42da38f73b
Update pack.py: copy directory to destination instead of compressing
AwesomeTickets/StaticPageServer,AwesomeTickets/StaticPageServer
pack.py
pack.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- ' Generate static webpage files ' import os import sys import shutil usage_prompt = 'Usage: python3 pack.py <destination_path> [-H <hostname>]' protocal = "http" hostname = '' host_path = os.path.join('scripts', 'host.js') site_dir = 'site' if (len(sys.argv) < 2): print(usage_prompt) sys.exit(0) else: des_path = sys.argv[1] + site_dir for i, arg in enumerate(sys.argv[2:]): if (arg == '-H' and i + 3 < len(sys.argv)): hostname = protocal + '://' + sys.argv[i + 3] if hostname != '': print("Hostname changed to '%s'" % hostname) host_file = open(host_path, 'w') host_file.write("var hostname = '%s'" % hostname) host_file.close() print("Gulp building...") os.system("gulp clean --silent") os.system("gulp build --silent") print("Copying files to '%s'..." % des_path) shutil.rmtree(des_path, ignore_errors=True) shutil.copytree(site_dir, des_path) print("Done.")
#!/usr/bin/env python3 # -*- coding: utf-8 -*- ' Generate static webpage files ' import os import sys usage_prompt = '''Usage: python3 pack.py python3 pack.py -H <hostname> python3 pack.py { ? | -h | --help }''' protocal = "http" hostname = 'localhost' filename_host = os.path.join('scripts', 'host.js') dir_site = 'site' filename_pkg = dir_site + '.tar.gz' for i, arg in enumerate(sys.argv[1:]): if (arg == '?' or arg == '-h' or arg == '--help'): print(usage_prompt) sys.exit(0) elif (arg == '-H' and i + 2 < len(sys.argv)): hostname = sys.argv[i + 2] hostname = protocal + '://' + hostname print("Hostname set to '%s'" % hostname) host_file = open(filename_host, 'w') host_file.write("var hostname = '%s'" % hostname) host_file.close() print("Gulp building...") os.system("gulp clean --silent") os.system("gulp build --silent") print("Compressing...") os.system("tar -zcf %s %s" % (filename_pkg, dir_site)) print("Files saved to '%s'" % filename_pkg)
mit
Python
346e296872e1ca011eb5e469505de1c15c86732f
Clarify the comment about setting the PYTHON variable for the Doc Makefile.
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
Doc/tools/sphinx-build.py
Doc/tools/sphinx-build.py
# -*- coding: utf-8 -*- """ Sphinx - Python documentation toolchain ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: 2007 by Georg Brandl. :license: Python license. """ import sys if __name__ == '__main__': if sys.version_info[:3] < (2, 5, 0): print >>sys.stderr, """\ Error: Sphinx needs to be executed with Python 2.5 or newer (If you run this from the Makefile, you can set the PYTHON variable to the path of an alternative interpreter executable, e.g., ``make html PYTHON=python2.5``). """ sys.exit(1) from sphinx import main sys.exit(main(sys.argv))
# -*- coding: utf-8 -*- """ Sphinx - Python documentation toolchain ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: 2007 by Georg Brandl. :license: Python license. """ import sys if __name__ == '__main__': if sys.version_info[:3] < (2, 5, 0): print >>sys.stderr, """\ Error: Sphinx needs to be executed with Python 2.5 or newer. (If you run this from the Makefile, you can set the PYTHON variable to the path of an alternative interpreter executable.) """ sys.exit(1) from sphinx import main sys.exit(main(sys.argv))
mit
Python
8070b119c11ad18e2c1979afef21503a255dd8d8
Check the number of matches for each query
kalgynirae/rockuefort
rockuefort.py
rockuefort.py
#!/usr/bin/python3 """ Usage: rockuefort copy <file> <destination> rockuefort symlink <file> <destination> rockuefort list <file> """ from collections import OrderedDict import subprocess import sys from docopt import docopt def log(*args, **kwargs): print("rockuefort:", *args, file=sys.stderr, **kwargs) if __name__ == '__main__': args = docopt(__doc__) # Load and evaluate queries files = OrderedDict() queries = [] with open(args['<file>']) as f: for line in f: try: c, query = line.strip().split(':', 1) c = int(c) except ValueError: c = 1 query = line.strip() queries.append((c, query)) for c, query in queries: r = subprocess.check_output(['quodlibet', '--print-query', query]) matched_files = [mf.decode() for mf in r.splitlines() if mf] nm = len(matched_files) if nm != c: log("Matched {} (expected {}): {}".format(nm, c, query)) for file in matched_files: log(" match: {}".format(file)) for file in matched_files: files.setdefault(file, []).append(query) # Check for multiply-matched files for file, queries in files.items(): if len(queries) > 1: log("Matched by multiple: {}".format(file)) for q in queries: log(" query: {}".format(q)) # Perform the requested action if args['copy']: log("Copying to {}".format(args['<destination>'])) ... elif args['symlink']: log("Symlinking to {}".format(args['<destination>'])) ... else: # args['list'] for file in files: print(file)
#!/usr/bin/python3 """ Usage: rockuefort copy <file> <destination> rockuefort symlink <file> <destination> rockuefort list <file> """ from collections import OrderedDict import subprocess import sys from docopt import docopt def log(*args, **kwargs): print("rockuefort:", *args, file=sys.stderr, **kwargs) if __name__ == '__main__': args = docopt(__doc__) # Load and evaluate queries files = OrderedDict() with open(args['<file>']) as f: queries = [line.strip() for line in f] for query in queries: r = subprocess.check_output(['quodlibet', '--print-query', query]) matched_files = [mf.decode() for mf in r.splitlines() if mf] for file in matched_files: files.setdefault(file, []).append(query) if not matched_files: log("No match: {}".format(query)) # Check for multiply-matched files for file, queries in files.items(): if len(queries) > 1: log("Matched multiple: {}".format(file)) for q in queries: log(" query: {}".format(q)) # Perform the requested action if args['copy']: log("Copying to {}".format(args['<destination>'])) ... elif args['symlink']: log("Symlinking to {}".format(args['<destination>'])) ... else: # args['list'] for file in files: print(file)
mit
Python