repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
quantumlib/OpenFermion | src/openfermion/transforms/repconversions/operator_tapering_test.py | 1 | 1640 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""tests for operator_tapering.py"""
import unittest
from openfermion.ops.operators import FermionOperator, BosonOperator
from openfermion.transforms.repconversions.operator_tapering import (
freeze_orbitals, prune_unused_indices)
class FreezeOrbitalsTest(unittest.TestCase):
def test_freeze_orbitals_nonvanishing(self):
op = FermionOperator(((1, 1), (1, 0), (0, 1), (2, 0)))
op_frozen = freeze_orbitals(op, [1])
expected = FermionOperator(((0, 1), (1, 0)), -1)
self.assertEqual(op_frozen, expected)
def test_freeze_orbitals_vanishing(self):
op = FermionOperator(((1, 1), (2, 0)))
op_frozen = freeze_orbitals(op, [], [2])
self.assertEqual(len(op_frozen.terms), 0)
class PruneUnusedIndicesTest(unittest.TestCase):
def test_prune(self):
for LadderOp in (FermionOperator, BosonOperator):
op = LadderOp(((1, 1), (8, 1), (3, 0)), 0.5)
op = prune_unused_indices(op)
expected = LadderOp(((0, 1), (2, 1), (1, 0)), 0.5)
self.assertTrue(expected == op) | apache-2.0 |
gooftroop/Zeus | contrib/tornado/test/tcpserver_test.py | 105 | 1278 | import socket
from tornado import gen
from tornado.iostream import IOStream
from tornado.log import app_log
from tornado.stack_context import NullContext
from tornado.tcpserver import TCPServer
from tornado.testing import AsyncTestCase, ExpectLog, bind_unused_port, gen_test
class TCPServerTest(AsyncTestCase):
@gen_test
def test_handle_stream_coroutine_logging(self):
# handle_stream may be a coroutine and any exception in its
# Future will be logged.
class TestServer(TCPServer):
@gen.coroutine
def handle_stream(self, stream, address):
yield gen.moment
stream.close()
1/0
server = client = None
try:
sock, port = bind_unused_port()
with NullContext():
server = TestServer()
server.add_socket(sock)
client = IOStream(socket.socket())
with ExpectLog(app_log, "Exception in callback"):
yield client.connect(('localhost', port))
yield client.read_until_close()
yield gen.moment
finally:
if server is not None:
server.stop()
if client is not None:
client.close()
| mit |
yqm/sl4a | python/src/Lib/commands.py | 58 | 2540 | """Execute shell commands via os.popen() and return status, output.
Interface summary:
import commands
outtext = commands.getoutput(cmd)
(exitstatus, outtext) = commands.getstatusoutput(cmd)
outtext = commands.getstatus(file) # returns output of "ls -ld file"
A trailing newline is removed from the output string.
Encapsulates the basic operation:
pipe = os.popen('{ ' + cmd + '; } 2>&1', 'r')
text = pipe.read()
sts = pipe.close()
[Note: it would be nice to add functions to interpret the exit status.]
"""
__all__ = ["getstatusoutput","getoutput","getstatus"]
# Module 'commands'
#
# Various tools for executing commands and looking at their output and status.
#
# NB This only works (and is only relevant) for UNIX.
# Get 'ls -l' status for an object into a string
#
def getstatus(file):
"""Return output of "ls -ld <file>" in a string."""
import warnings
warnings.warn("commands.getstatus() is deprecated", DeprecationWarning)
return getoutput('ls -ld' + mkarg(file))
# Get the output from a shell command into a string.
# The exit status is ignored; a trailing newline is stripped.
# Assume the command will work with '{ ... ; } 2>&1' around it..
#
def getoutput(cmd):
"""Return output (stdout or stderr) of executing cmd in a shell."""
return getstatusoutput(cmd)[1]
# Ditto but preserving the exit status.
# Returns a pair (sts, output)
#
def getstatusoutput(cmd):
"""Return (status, output) of executing cmd in a shell."""
import os
pipe = os.popen('{ ' + cmd + '; } 2>&1', 'r')
text = pipe.read()
sts = pipe.close()
if sts is None: sts = 0
if text[-1:] == '\n': text = text[:-1]
return sts, text
# Make command argument from directory and pathname (prefix space, add quotes).
#
def mk2arg(head, x):
from warnings import warnpy3k
warnpy3k("In 3.x, mk2arg has been removed.")
import os
return mkarg(os.path.join(head, x))
# Make a shell command argument from a string.
# Return a string beginning with a space followed by a shell-quoted
# version of the argument.
# Two strategies: enclose in single quotes if it contains none;
# otherwise, enclose in double quotes and prefix quotable characters
# with backslash.
#
def mkarg(x):
from warnings import warnpy3k
warnpy3k("in 3.x, mkarg has been removed.")
if '\'' not in x:
return ' \'' + x + '\''
s = ' "'
for c in x:
if c in '\\$"`':
s = s + '\\'
s = s + c
s = s + '"'
return s
| apache-2.0 |
scipsycho/mlpack | linear_regression.py | 1 | 8215 | import numpy as np
class linear_regression:
def __init__(self,batch_size=0,epochs=100,learning_rate=0.001,tolerance=0.00001,show_progress=True):
"""
The function initiaizes the class
Parameters
----------
batch_size: int
It defines the number of data sets the alogrithm takes at ones to optimise the Parameters.
It may be the factor of the number of examples given to the algorithm in the fit function
Default Value is 0 which means it will compute all the data sets together.
epochs: int
It is the maximum number of times the algorithm is going to compute the whole data set available
for training.
Default Value is 100
learning_rate: float
It is the learning rate of the machine learning algorithm.
Default Value is 0.001
tolerance: float
It defines the minimum improvement that the algorithm will tolerate i.e. if the parameters show a change
less than the value of tolerance, it assumes that the alogrithm is optimised to the maximum
Default Value is 0.00001
show_progress: Boolean
It controls whether the object will show the progress as output or not.
Default Value: True
Returns
-------
Nothing
"""
#Batch Size
self.batch=batch_size
#Maximum number of iterations that the object will perfom
self.epochs=epochs
#Learning Rate of the linear regression algo
self.l_rate=learning_rate
#Bool Value of whtether to show progress or not
self.show_progress=show_progress
#Maximum change in parameters or weights that can be assumed negligible
self.tol=tolerance
def fit(self,X,Y):
"""
The function fits the training data set to the algorithm.
Detailed Description
--------------------
The function takes on the input and actual ouput of the data set and optimises the parameters accordingly.
Parameters
----------
X: numpy.ndarray
It is the input data set. The number of columns define the number of dimensions in the input data.
The number of rows defines the number of data sets avaiable for training.
If there is only one dimension, it can also be a linear numpy.ndarray.
Y: numpy.ndarray
It is the proposed output corresponding to the input given in any row of the input data set X.
The number of rows defines the number of data sets avaiable for training.
It can also be a linear numpy.ndarray.
Returns
-------
Nothing
Notes
-----
X.shape[0] must be equal to Y.shape[0] which is also the number of data sets avaiable for training.
"""
#Number of Training Examples
self.note=X.shape[0]
#If Batch value is zero, it is assumed the whole dataset is the batch
if self.batch is 0:
self.batch=self.note
#Changing Vector To Mat
if len(X.shape) is 1:
X=X.reshape([X.shape[0],1])
#Number of Dimensions plus one bias introducted
self.nod=X.shape[1]+1
#Training data initialized
self.train_i=np.ones([self.note,self.nod])
#Leaving Bias values as 1
self.train_i[:,1:]=X
#Training data output stored and changing Vector To Matrix
if len(Y.shape) is 1:
Y=Y.reshape([Y.shape[0],1])
self.train_o=Y
#Parameters or weights randomly generated
self.parameters=np.random.random([self.nod,1])
#Starting Gradient Descent
self.__start_gradient_descent__()
def __GradDescent__(self,initial,final):
"""
The function optimises the paramters according a specific subset of the data set available
Parameters
----------
initial: int
It is the inital index of block of the data set being used.
final: int
It is the final index of block of the data set being used.
Returns
-------
Nothing
Notes
-----
initial should always be less than or equal to final. Also, final should always be less than the
number of data sets avaiable
"""
#Difference between expected and actual values
diff=(self.train_i[initial:final].dot(self.parameters)-self.train_o[initial:final])
#Multiplying with respected values to get differentiation
product=diff*self.train_i[initial:final]
#Adding column-wise to get differentitation w.r.t. parameters
delta=(product.sum(axis=0))*self.l_rate/(final-initial+1)
#Changing the Value Of parameters
self.parameters=self.parameters-delta.reshape([delta.shape[0],1])
def __start_gradient_descent__(self):
"""
This function optimises the parameters for the whole data set.
Detailed Description
--------------------
This function uses the number of batches, epochs, tolerance to find the optimised value of the parameters
according to the need of the user. The function also shows progress in terms of the epochs covered. This does
not take into account the tolerance value.
Parameters
----------
None
Returns
-------
None
"""
#Number of times the whole set of Parameters be optimized in one epochs
times=int(self.note/self.batch)
#Value used to show percentage
percent=1
#Loss Curve is initialzed every time this function is being called
self.loss_curve=[]
#Gradient Desecent Started
for i in range(self.epochs):
#Initial Parameters Stored
self.initial_parameters=self.parameters
for j in range(times):
initial=j*self.batch
final=(j+1)*self.batch
self.__GradDescent__(initial,final)
#One Iteration of Gradient Descent Complete
#Finding and adding loss to the loss curve
diff=(self.train_i.dot(self.parameters)-self.train_o)
loss=(np.abs(diff)).sum()
self.loss_curve.append(loss)
#Checking for tolerance
if (np.abs(self.initial_parameters-self.parameters)).sum()/self.note < self.tol:
print('Optimised to the maxium')
break
#For showing percentage
if self.show_progress and(i*100/self.epochs >= percent):
print('|',end='')
percent+=1
#Completing the Percentage if the loops is broken in between
while percent<=101 and self.show_progress:
print('|',end='')
percent+=1
#Displaying 100% Complete
if self.show_progress:
print(" 100%")
def predict(self,Y):
"""
This function gives the predicted value of the data set given for testing.
Parameters
----------
Y: numpy.ndarray
This is the input of the linear regression model whose number of columns represent
the number of dimensions of the input. The rows represent the number of data sets given
for prediction.
Returns
------
numpy.ndarray
This the predicted output of the input given in Y. It's number of rows represent
the number of data sets given for prediction
Notes
-----
Y.shape[1] should be equal to the number of dimensions given in the fit function.
"""
#Converting the testing data into data with bias
self.test_i=np.ones([Y.shape[0],self.nod])
if len(Y.shape) is 1:
Y=Y.reshape([Y.shape[0],1])
self.test_i[:,1:]=Y
#Storing Output
self.test_o=self.test_i.dot(self.parameters)
return self.test_o
| mit |
rabernat/mitgcm-xray | _xgcm/mdsxray.py | 2 | 23927 | import operator
from glob import glob
import os
import re
import warnings
import numpy as np
import dask.array as da
import xray
from xray import Variable
from xray.backends.common import AbstractDataStore
from xray.core.utils import NDArrayMixin
from xray.core.pycompat import OrderedDict
from xray.core.indexing import NumpyIndexingAdapter
#from ..conventions import pop_to, cf_encoder
#from ..core import indexing
#from ..core.utils import (FrozenOrderedDict, NDArrayMixin,
# close_on_error, is_remote_uri)
#from ..core.pycompat import iteritems, basestring, OrderedDict
#from .common import AbstractWritableDataStore, robust_getitem
# This lookup table maps from dtype.byteorder to a readable endian
# string used by netCDF4.
_endian_lookup = {'=': 'native',
'>': 'big',
'<': 'little',
'|': 'native'}
# the variable metadata will be stored in dicts of the form
#_variable[varname] = (dimensions, description, units)
_grid_variables = OrderedDict(
# horizontal grid
X= (('X',), "X-coordinate of cell center", "meters"),
Y= (('Y',), "Y-coordinate of cell center", "meters"),
Xp1= (('Xp1',), "X-coordinate of cell corner", "meters"),
Yp1= (('Yp1',), "Y-coordinate of cell corner", "meters"),
# 2d versions
XC= (('Y','X'), "X coordinate of cell center (T-P point)", "degree_east"),
YC= (('Y','X'), "Y coordinate of cell center (T-P point)", "degree_north"),
XG= (('Yp1','Xp1'), "X coordinate of cell corner (Vorticity point)", "degree_east"),
YG= (('Yp1','Xp1'), "Y coordinate of cell corner (Vorticity point)", "degree_north"),
# vertical grid
Z= (('Z',), "vertical coordinate of cell center", "meters"),
Zp1= (('Zp1',), "vertical coordinate of cell interface", "meters"),
Zu= (('Zu',), "vertical coordinate of lower cell interface", "meters"),
Zl= (('Zl',), "vertical coordinate of upper cell interface", "meters"),
# (for some reason, the netCDF files use both R and Z notation )
# 'RC': (('Z',), "R coordinate of cell center", "m"),
# 'RF': (('Zp1',), "R coordinate of cell interface", "m"),
# 'RU': (('Zu',), "R coordinate of lower cell interface", "m"),
# 'RL': (('Zl',), "R coordinate of upper cell interface", "m"),
# horiz. differentials
dxC= (('Y','Xp1'), "x cell center separation", "meters"),
dyC= (('Yp1','X'), "y cell center separation", "meters"),
dxG= (('Yp1','X'), "x cell corner separation", "meters"),
dyG= (('Y','Xp1'), "y cell corner separation", "meters"),
# vert. differentials
drC= (('Zp1',), "r cell center separation", "m"),
drF= (('Z',), "r cell face separation", "m"),
# areas
rA= (('Y','X'), "r-face area at cell center", "m^2"),
rAw= (('Y','Xp1'), "r-face area at U point", "m^2"),
rAs= (('Yp1','X'), "r-face area at V point", "m^2"),
rAz= (('Yp1','Xp1'), "r-face area at cell corner", "m^2"),
# depth
Depth=(('Y','X'), "fluid thickness in r coordinates (at rest)", "meters"),
# thickness factors
HFacC=(('Z','Y','X'),
"vertical fraction of open cell at cell center", "none (0-1)"),
HFacW=(('Z','Y','Xp1'),
"vertical fraction of open cell at West face", "none (0-1)"),
HFacS=(('Z','Yp1','X'),
"vertical fraction of open cell at South face", "none (0-1)")
)
_grid_special_mapping = {
'Z': ('RC', (slice(None),0,0)),
'Zp1': ('RF', (slice(None),0,0)),
'Zu': ('RF', (slice(1,None),0,0)),
'Zl': ('RF', (slice(None,-1),0,0)),
'X': ('XC', (0,slice(None))),
'Y': ('YC', (slice(None),0)),
'Xp1': ('XG', (0,slice(None))),
'Yp1': ('YG', (slice(None),0)),
'rA': ('RAC', None),
'HFacC': ('hFacC', None),
'HFacW': ('hFacW', None),
'HFacS': ('hFacS', None),
}
_state_variables = OrderedDict(
# state
U= (('Z','Y','Xp1'), 'Zonal Component of Velocity', 'm/s'),
V= (('Z','Yp1','X'), 'Meridional Component of Velocity', 'm/s'),
W= (('Zl','Y','X'), 'Vertical Component of Velocity', 'm/s'),
T= (('Z','Y','X'), 'Potential Temperature', 'degC'),
S= (('Z','Y','X'), 'Salinity', 'psu'),
PH= (('Z','Y','X'), 'Hydrostatic Pressure Pot.(p/rho) Anomaly', 'm^2/s^2'),
PHL=(('Y','X'), 'Bottom Pressure Pot.(p/rho) Anomaly', 'm^2/s^2'),
Eta=(('Y','X'), 'Surface Height Anomaly', 'm'),
# tave
uVeltave=(('Z','Y','Xp1'), 'Zonal Component of Velocity', 'm/s'),
vVeltave=(('Z','Yp1','X'), 'Meridional Component of Velocity', 'm/s'),
wVeltave=(('Zl','Y','X'), 'Vertical Component of Velocity', 'm/s'),
Ttave=(('Z','Y','X'), 'Potential Temperature', 'degC'),
Stave=(('Z','Y','X'), 'Salinity', 'psu'),
PhHytave=(('Z','Y','X'), 'Hydrostatic Pressure Pot.(p/rho) Anomaly', 'm^2/s^2'),
PHLtave=(('Y','X'), 'Bottom Pressure Pot.(p/rho) Anomaly', 'm^2/s^2'),
ETAtave=(('Y','X'), 'Surface Height Anomaly', 'm'),
Convtave=(('Zl','Y','X'), "Convective Adjustment Index", "none [0-1]"),
Eta2tave=(('Y','X'), "Square of Surface Height Anomaly", "m^2"),
PHL2tave=(('Y','X'), 'Square of Hyd. Pressure Pot.(p/rho) Anomaly', 'm^4/s^4'),
sFluxtave=(('Y','X'), 'total salt flux (match salt-content variations), >0 increases salt', 'g/m^2/s'),
Tdiftave=(('Zl','Y','X'), "Vertical Diffusive Flux of Pot.Temperature", "degC.m^3/s"),
tFluxtave=(('Y','X'), "Total heat flux (match heat-content variations), >0 increases theta", "W/m^2"),
TTtave=(('Z','Y','X'), 'Squared Potential Temperature', 'degC^2'),
uFluxtave=(('Y','Xp1'), 'surface zonal momentum flux, positive -> increase u', 'N/m^2'),
UStave=(('Z','Y','Xp1'), "Zonal Transport of Salinity", "psu m/s"),
UTtave=(('Z','Y','Xp1'), "Zonal Transport of Potenial Temperature", "degC m/s"),
UUtave=(('Z','Y','Xp1'), "Zonal Transport of Zonal Momentum", "m^2/s^2"),
UVtave=(('Z','Yp1','Xp1'), 'Product of meridional and zonal velocity', 'm^2/s^2'),
vFluxtave=(('Yp1','X'), 'surface meridional momentum flux, positive -> increase v', 'N/m^2'),
VStave=(('Z','Yp1','X'), "Meridional Transport of Salinity", "psu m/s"),
VTtave=(('Z','Yp1','X'), "Meridional Transport of Potential Temperature", "degC m/s"),
VVtave=(('Z','Yp1','X'), 'Zonal Transport of Zonal Momentum', 'm^2/s^2'),
WStave=(('Zl','Y','X'), 'Vertical Transport of Salinity', "psu m/s"),
WTtave=(('Zl','Y','X'), 'Vertical Transport of Potential Temperature', "degC m/s")
)
def _force_native_endianness(var):
# possible values for byteorder are:
# = native
# < little-endian
# > big-endian
# | not applicable
# Below we check if the data type is not native or NA
if var.dtype.byteorder not in ['=', '|']:
# if endianness is specified explicitly, convert to the native type
data = var.data.astype(var.dtype.newbyteorder('='))
var = Variable(var.dims, data, var.attrs, var.encoding)
# if endian exists, remove it from the encoding.
var.encoding.pop('endian', None)
# check to see if encoding has a value for endian its 'native'
if not var.encoding.get('endian', 'native') is 'native':
raise NotImplementedError("Attempt to write non-native endian type, "
"this is not supported by the netCDF4 python "
"library.")
return var
def _parse_available_diagnostics(fname):
all_diags = {}
# add default diagnostics for grid, tave, and state
with open(fname) as f:
# will automatically skip first four header lines
for l in f:
c = re.split('\|',l)
if len(c)==7 and c[0].strip()!='Num':
key = c[1].strip()
levs = int(c[2].strip())
mate = c[3].strip()
if mate: mate = int(mate)
code = c[4]
units = c[5].strip()
desc = c[6].strip()
dds = MITgcmDiagnosticDescription(
key, code, units, desc, levs, mate)
# return dimensions, description, units
all_diags[key] = (dds.coords(), dds.desc, dds.units)
return all_diags
class MITgcmDiagnosticDescription(object):
def __init__(self, key, code, units=None, desc=None, levs=None, mate=None):
self.key = key
self.levs = levs
self.mate = mate
self.code = code
self.units = units
self.desc = desc
def coords(self):
"""Parse code to determine coordinates."""
hpoint = self.code[1]
rpoint = self.code[8]
rlev = self.code[9]
xcoords = {'U': 'Xp1', 'V': 'X', 'M': 'X', 'Z': 'Xp1'}
ycoords = {'U': 'Y', 'V': 'Yp1', 'M': 'Y', 'Z': 'Yp1'}
rcoords = {'M': 'Z', 'U': 'Zu', 'L': 'Zl'}
if rlev=='1' and self.levs==1:
return (ycoords[hpoint], xcoords[hpoint])
elif rlev=='R':
return (rcoords[rpoint], ycoords[hpoint], xcoords[hpoint])
else:
warnings.warn("Not sure what to do with rlev = " + rlev)
return (rcoords[rpoint], ycoords[hpoint], xcoords[hpoint])
def _parse_meta(fname):
"""Get the metadata as a dict out of the mitGCM mds .meta file."""
flds = {}
basename = re.match('(^.+?)\..+', os.path.basename(fname)).groups()[0]
flds['basename'] = basename
with open(fname) as f:
text = f.read()
# split into items
for item in re.split(';', text):
# remove whitespace at beginning
item = re.sub('^\s+', '', item)
#match = re.match('(\w+) = ', item)
match = re.match('(\w+) = (\[|\{)(.*)(\]|\})', item, re.DOTALL)
if match:
key, _, value, _ = match.groups()
# remove more whitespace
value = re.sub('^\s+', '', value)
value = re.sub('\s+$', '', value)
#print key,':', value
flds[key] = value
# now check the needed things are there
needed_keys = ['dimList','nDims','nrecords','dataprec']
for k in needed_keys:
assert flds.has_key(k)
# transform datatypes
flds['nDims'] = int(flds['nDims'])
flds['nrecords'] = int(flds['nrecords'])
# use big endian always
flds['dataprec'] = np.dtype(re.sub("'",'',flds['dataprec'])).newbyteorder('>')
flds['dimList'] = [[int(h) for h in
re.split(',', g)] for g in
re.split(',\n',flds['dimList'])]
if flds.has_key('fldList'):
flds['fldList'] = [re.match("'*(\w+)",g).groups()[0] for g in
re.split("'\s+'",flds['fldList'])]
assert flds['nrecords'] == len(flds['fldList'])
return flds
def _read_mds(fname, iternum=None, use_mmap=True,
force_dict=True, convert_big_endian=False):
"""Read an MITgcm .meta / .data file pair"""
if iternum is None:
istr = ''
else:
assert isinstance(iternum, int)
istr = '.%010d' % iternum
datafile = fname + istr + '.data'
metafile = fname + istr + '.meta'
# get metadata
meta = _parse_meta(metafile)
# why does the .meta file contain so much repeated info?
# just get the part we need
# and reverse order (numpy uses C order, mds is fortran)
shape = [g[0] for g in meta['dimList']][::-1]
assert len(shape) == meta['nDims']
# now add an extra for number of recs
nrecs = meta['nrecords']
shape.insert(0, nrecs)
# load and shape data
if use_mmap:
d = np.memmap(datafile, meta['dataprec'], 'r')
else:
d = np.fromfile(datafile, meta['dataprec'])
if convert_big_endian:
dtnew = d.dtype.newbyteorder('=')
d = d.astype(dtnew)
d.shape = shape
if nrecs == 1:
if meta.has_key('fldList'):
name = meta['fldList'][0]
else:
name = meta['basename']
if force_dict:
return {name: d[0]}
else:
return d[0]
else:
# need record names
out = {}
for n, name in enumerate(meta['fldList']):
out[name] = d[n]
return out
class MDSArrayWrapper(NDArrayMixin):
def __init__(self, array):
self.array = array
@property
def dtype(self):
dtype = self.array.dtype
def _list_all_mds_files(dirname):
"""Find all the meta / data files"""
files = glob(os.path.join(dirname, '*.meta'))
# strip the suffix
return [f[:-5] for f in files]
#class MemmapArrayWrapper(NumpyIndexingAdapter):
class MemmapArrayWrapper(NDArrayMixin):
def __init__(self, memmap_array):
self._memmap_array = memmap_array
@property
def array(self):
# We can't store the actual netcdf_variable object or its data array,
# because otherwise scipy complains about variables or files still
# referencing mmapped arrays when we try to close datasets without
# having read all data in the file.
return self._memmap_array
@property
def dtype(self):
return self._memmap_array.dtype
def __getitem__(self, key):
data = self._memmap_array.__getitem__(key)
return np.asarray(data)
_valid_geometry = ['Cartesian', 'SphericalPolar']
def open_mdsdataset(dirname, iters=None, deltaT=1,
prefix=None, ref_date=None, calendar=None,
ignore_pickup=True, geometry='Cartesian'):
"""Open MITgcm-style mds file output as xray datset."""
store = _MDSDataStore(dirname, iters, deltaT,
prefix, ref_date, calendar,
ignore_pickup, geometry)
return xray.Dataset.load_store(store)
class _MDSDataStore(AbstractDataStore):
"""Represents the entire directory of MITgcm mds output
including all grid variables. Similar in some ways to
netCDF.Dataset."""
def __init__(self, dirname, iters=None, deltaT=1,
prefix=None, ref_date=None, calendar=None,
ignore_pickup=True, geometry='Cartesian'):
"""iters: list of iteration numbers
deltaT: timestep
prefix: list of file prefixes (if None use all)
"""
assert geometry in _valid_geometry
self.geometry = geometry
# the directory where the files live
self.dirname = dirname
# storage dicts for variables and attributes
self._variables = OrderedDict()
self._attributes = OrderedDict()
self._dimensions = []
### figure out the mapping between diagnostics names and variable properties
# all possible diagnostics
diag_meta = _parse_available_diagnostics(
os.path.join(dirname, 'available_diagnostics.log'))
### read grid files
for k in _grid_variables:
if _grid_special_mapping.has_key(k):
fname = _grid_special_mapping[k][0]
sl = _grid_special_mapping[k][1]
else:
fname = k
sl = None
data = None
try:
data = _read_mds(os.path.join(dirname, fname), force_dict=False)
except IOError:
try:
data = _read_mds(os.path.join(dirname, fname.upper()),
force_dict=False)
except IOError:
warnings.warn("Couldn't load grid variable " + k)
if data is not None:
data = data[sl] if sl is not None else data.squeeze()
dims, desc, units = _grid_variables[k]
self._variables[k] = Variable(
dims, MemmapArrayWrapper(data), {'description': desc, 'units': units})
self._dimensions.append(k)
# now get variables from our iters
if iters is not None:
# create iteration array
iterdata = np.asarray(iters)
self._variables['iter'] = Variable(('time',), iterdata,
{'description': 'model timestep number'})
# create time array
timedata = np.asarray(iters)*deltaT
time_attrs = {'description': 'model time'}
if ref_date is not None:
time_attrs['units'] = 'seconds since %s' % ref_date
else:
time_attrs['units'] = 'seconds'
if calendar is not None:
time_attrs['calendar'] = calendar
self._variables['time'] = Variable(
('time',), timedata, time_attrs)
self._dimensions.append('time')
varnames = []
fnames = []
_data_vars = OrderedDict()
# look at first iter to get variable metadata
for f in glob(os.path.join(dirname, '*.%010d.meta' % iters[0])):
if ignore_pickup and re.search('pickup', f):
pass
else:
go = True
if prefix is not None:
bname = os.path.basename(f[:-16])
matches = [bname==p for p in prefix]
if not any(matches):
go = False
if go:
meta = _parse_meta(f)
if meta.has_key('fldList'):
flds = meta['fldList']
[varnames.append(fl) for fl in flds]
else:
varnames.append(meta['basename'])
fnames.append(os.path.join(dirname,meta['basename']))
# read data as dask arrays (should be an option)
vardata = {}
for k in varnames:
vardata[k] = []
for i in iters:
for f in fnames:
try:
data = _read_mds(f, i, force_dict=True)
for k in data.keys():
mwrap = MemmapArrayWrapper(data[k])
vardata[k].append(
da.from_array(mwrap, mwrap.shape))
except IOError:
# couldn't find the variable, remove it from the list
#print 'Removing %s from list (iter %g)' % (k, i)
varnames.remove(k)
# final loop to create Variable objects
for k in varnames:
try:
dims, desc, units = _state_variables[k]
except KeyError:
dims, desc, units = diag_meta[k]
# check for shape compatability
varshape = vardata[k][0].shape
varndims = len(varshape)
if len(dims) != varndims:
warnings.warn("Shape of variable data is not compatible "
"with expected number of dimensions. This "
"can arise if the 'levels' option is used "
"in data.diagnostics. Right now we have no "
"way to infer the level, so the variable is "
"skipped: " + k)
else:
# add time to dimension
dims_time = ('time',) + dims
# wrap variable in dask array
vardask = da.stack([da.from_array(d, varshape) for d in vardata[k]])
self._variables[k] = Variable( dims_time, vardask,
{'description': desc, 'units': units})
self._attributes = {'history': 'Some made up attribute'}
def get_variables(self):
return self._variables
def get_attrs(self):
return self._attributes
def get_dimensions(self):
return self._dimensions
def close(self):
pass
# from MITgcm netCDF grid file
# dimensions:
# Z = 30 ;
# Zp1 = 31 ;
# Zu = 30 ;
# Zl = 30 ;
# X = 25 ;
# Y = 40 ;
# Xp1 = 26 ;
# Yp1 = 41 ;
# variables:
# double Z(Z) ;
# Z:long_name = "vertical coordinate of cell center" ;
# Z:units = "meters" ;
# Z:positive = "up" ;
# double RC(Z) ;
# RC:description = "R coordinate of cell center" ;
# RC:units = "m" ;
# double Zp1(Zp1) ;
# Zp1:long_name = "vertical coordinate of cell interface" ;
# Zp1:units = "meters" ;
# Zp1:positive = "up" ;
# double RF(Zp1) ;
# RF:description = "R coordinate of cell interface" ;
# RF:units = "m" ;
# double Zu(Zu) ;
# Zu:long_name = "vertical coordinate of lower cell interface" ;
# Zu:units = "meters" ;
# Zu:positive = "up" ;
# double RU(Zu) ;
# RU:description = "R coordinate of upper interface" ;
# RU:units = "m" ;
# double Zl(Zl) ;
# Zl:long_name = "vertical coordinate of upper cell interface" ;
# Zl:units = "meters" ;
# Zl:positive = "up" ;
# double RL(Zl) ;
# RL:description = "R coordinate of lower interface" ;
# RL:units = "m" ;
# double drC(Zp1) ;
# drC:description = "r cell center separation" ;
# double drF(Z) ;
# drF:description = "r cell face separation" ;
# double X(X) ;
# X:long_name = "X-coordinate of cell center" ;
# X:units = "meters" ;
# double Y(Y) ;
# Y:long_name = "Y-Coordinate of cell center" ;
# Y:units = "meters" ;
# double XC(Y, X) ;
# XC:description = "X coordinate of cell center (T-P point)" ;
# XC:units = "degree_east" ;
# double YC(Y, X) ;
# YC:description = "Y coordinate of cell center (T-P point)" ;
# YC:units = "degree_north" ;
# double Xp1(Xp1) ;
# Xp1:long_name = "X-Coordinate of cell corner" ;
# Xp1:units = "meters" ;
# double Yp1(Yp1) ;
# Yp1:long_name = "Y-Coordinate of cell corner" ;
# Yp1:units = "meters" ;
# double XG(Yp1, Xp1) ;
# XG:description = "X coordinate of cell corner (Vorticity point)" ;
# XG:units = "degree_east" ;
# double YG(Yp1, Xp1) ;
# YG:description = "Y coordinate of cell corner (Vorticity point)" ;
# YG:units = "degree_north" ;
# double dxC(Y, Xp1) ;
# dxC:description = "x cell center separation" ;
# double dyC(Yp1, X) ;
# dyC:description = "y cell center separation" ;
# double dxF(Y, X) ;
# dxF:description = "x cell face separation" ;
# double dyF(Y, X) ;
# dyF:description = "y cell face separation" ;
# double dxG(Yp1, X) ;
# dxG:description = "x cell corner separation" ;
# double dyG(Y, Xp1) ;
# dyG:description = "y cell corner separation" ;
# double dxV(Yp1, Xp1) ;
# dxV:description = "x v-velocity separation" ;
# double dyU(Yp1, Xp1) ;
# dyU:description = "y u-velocity separation" ;
# double rA(Y, X) ;
# rA:description = "r-face area at cell center" ;
# double rAw(Y, Xp1) ;
# rAw:description = "r-face area at U point" ;
# double rAs(Yp1, X) ;
# rAs:description = "r-face area at V point" ;
# double rAz(Yp1, Xp1) ;
# rAz:description = "r-face area at cell corner" ;
# double fCori(Y, X) ;
# fCori:description = "Coriolis f at cell center" ;
# double fCoriG(Yp1, Xp1) ;
# fCoriG:description = "Coriolis f at cell corner" ;
# double R_low(Y, X) ;
# R_low:description = "base of fluid in r-units" ;
# double Ro_surf(Y, X) ;
# Ro_surf:description = "surface reference (at rest) position" ;
# double Depth(Y, X) ;
# Depth:description = "fluid thickness in r coordinates (at rest)" ;
# double HFacC(Z, Y, X) ;
# HFacC:description = "vertical fraction of open cell at cell center" ;
# double HFacW(Z, Y, Xp1) ;
# HFacW:description = "vertical fraction of open cell at West face" ;
# double HFacS(Z, Yp1, X) ;
# HFacS:description = "vertical fraction of open cell at South face" ; | mit |
LxMLS/lxmls-toolkit | lxmls/deep_learning/numpy_models/log_linear.py | 1 | 1802 | import numpy as np
from lxmls.deep_learning.utils import (
Model,
glorot_weight_init,
index2onehot,
logsumexp
)
class NumpyLogLinear(Model):
def __init__(self, **config):
# Initialize parameters
weight_shape = (config['input_size'], config['num_classes'])
# after Xavier Glorot et al
self.weight = glorot_weight_init(weight_shape, 'softmax')
self.bias = np.zeros((1, config['num_classes']))
self.learning_rate = config['learning_rate']
def log_forward(self, input=None):
"""Forward pass of the computation graph"""
# Linear transformation
z = np.dot(input, self.weight.T) + self.bias
# Softmax implemented in log domain
log_tilde_z = z - logsumexp(z, axis=1, keepdims=True)
return log_tilde_z
def predict(self, input=None):
"""Most probable class index"""
return np.argmax(np.exp(self.log_forward(input)), axis=1)
def update(self, input=None, output=None):
"""Stochastic Gradient Descent update"""
# Probabilities of each class
class_probabilities = np.exp(self.log_forward(input))
batch_size, num_classes = class_probabilities.shape
# Error derivative at softmax layer
I = index2onehot(output, num_classes)
error = (class_probabilities - I) / batch_size
# Weight gradient
gradient_weight = np.zeros(self.weight.shape)
for l in np.arange(batch_size):
gradient_weight += np.outer(error[l, :], input[l, :])
# Bias gradient
gradient_bias = np.sum(error, axis=0, keepdims=True)
# SGD update
self.weight = self.weight - self.learning_rate * gradient_weight
self.bias = self.bias - self.learning_rate * gradient_bias
| mit |
PierreFaniel/openerp-7.0 | stock_landed_costs/__init__.py | 2 | 1191 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010-2014 Elico Corp (<http://www.elico-corp.com>)
# Alex Duan <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import product
from . import stock
from . import wizard
from . import product_price_history
from . import account_anglo_saxon_pos
from . import purchase
| agpl-3.0 |
vwarg/ubuntutouch-kernel-jfltexx | tools/perf/scripts/python/sched-migration.py | 11215 | 11670 | #!/usr/bin/python
#
# Cpu task migration overview toy
#
# Copyright (C) 2010 Frederic Weisbecker <[email protected]>
#
# perf script event handlers have been generated by perf script -g python
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
import os
import sys
from collections import defaultdict
from UserList import UserList
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
sys.path.append('scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from SchedGui import *
threads = { 0 : "idle"}
def thread_name(pid):
return "%s:%d" % (threads[pid], pid)
class RunqueueEventUnknown:
@staticmethod
def color():
return None
def __repr__(self):
return "unknown"
class RunqueueEventSleep:
@staticmethod
def color():
return (0, 0, 0xff)
def __init__(self, sleeper):
self.sleeper = sleeper
def __repr__(self):
return "%s gone to sleep" % thread_name(self.sleeper)
class RunqueueEventWakeup:
@staticmethod
def color():
return (0xff, 0xff, 0)
def __init__(self, wakee):
self.wakee = wakee
def __repr__(self):
return "%s woke up" % thread_name(self.wakee)
class RunqueueEventFork:
@staticmethod
def color():
return (0, 0xff, 0)
def __init__(self, child):
self.child = child
def __repr__(self):
return "new forked task %s" % thread_name(self.child)
class RunqueueMigrateIn:
@staticmethod
def color():
return (0, 0xf0, 0xff)
def __init__(self, new):
self.new = new
def __repr__(self):
return "task migrated in %s" % thread_name(self.new)
class RunqueueMigrateOut:
@staticmethod
def color():
return (0xff, 0, 0xff)
def __init__(self, old):
self.old = old
def __repr__(self):
return "task migrated out %s" % thread_name(self.old)
class RunqueueSnapshot:
def __init__(self, tasks = [0], event = RunqueueEventUnknown()):
self.tasks = tuple(tasks)
self.event = event
def sched_switch(self, prev, prev_state, next):
event = RunqueueEventUnknown()
if taskState(prev_state) == "R" and next in self.tasks \
and prev in self.tasks:
return self
if taskState(prev_state) != "R":
event = RunqueueEventSleep(prev)
next_tasks = list(self.tasks[:])
if prev in self.tasks:
if taskState(prev_state) != "R":
next_tasks.remove(prev)
elif taskState(prev_state) == "R":
next_tasks.append(prev)
if next not in next_tasks:
next_tasks.append(next)
return RunqueueSnapshot(next_tasks, event)
def migrate_out(self, old):
if old not in self.tasks:
return self
next_tasks = [task for task in self.tasks if task != old]
return RunqueueSnapshot(next_tasks, RunqueueMigrateOut(old))
def __migrate_in(self, new, event):
if new in self.tasks:
self.event = event
return self
next_tasks = self.tasks[:] + tuple([new])
return RunqueueSnapshot(next_tasks, event)
def migrate_in(self, new):
return self.__migrate_in(new, RunqueueMigrateIn(new))
def wake_up(self, new):
return self.__migrate_in(new, RunqueueEventWakeup(new))
def wake_up_new(self, new):
return self.__migrate_in(new, RunqueueEventFork(new))
def load(self):
""" Provide the number of tasks on the runqueue.
Don't count idle"""
return len(self.tasks) - 1
def __repr__(self):
ret = self.tasks.__repr__()
ret += self.origin_tostring()
return ret
class TimeSlice:
def __init__(self, start, prev):
self.start = start
self.prev = prev
self.end = start
# cpus that triggered the event
self.event_cpus = []
if prev is not None:
self.total_load = prev.total_load
self.rqs = prev.rqs.copy()
else:
self.rqs = defaultdict(RunqueueSnapshot)
self.total_load = 0
def __update_total_load(self, old_rq, new_rq):
diff = new_rq.load() - old_rq.load()
self.total_load += diff
def sched_switch(self, ts_list, prev, prev_state, next, cpu):
old_rq = self.prev.rqs[cpu]
new_rq = old_rq.sched_switch(prev, prev_state, next)
if old_rq is new_rq:
return
self.rqs[cpu] = new_rq
self.__update_total_load(old_rq, new_rq)
ts_list.append(self)
self.event_cpus = [cpu]
def migrate(self, ts_list, new, old_cpu, new_cpu):
if old_cpu == new_cpu:
return
old_rq = self.prev.rqs[old_cpu]
out_rq = old_rq.migrate_out(new)
self.rqs[old_cpu] = out_rq
self.__update_total_load(old_rq, out_rq)
new_rq = self.prev.rqs[new_cpu]
in_rq = new_rq.migrate_in(new)
self.rqs[new_cpu] = in_rq
self.__update_total_load(new_rq, in_rq)
ts_list.append(self)
if old_rq is not out_rq:
self.event_cpus.append(old_cpu)
self.event_cpus.append(new_cpu)
def wake_up(self, ts_list, pid, cpu, fork):
old_rq = self.prev.rqs[cpu]
if fork:
new_rq = old_rq.wake_up_new(pid)
else:
new_rq = old_rq.wake_up(pid)
if new_rq is old_rq:
return
self.rqs[cpu] = new_rq
self.__update_total_load(old_rq, new_rq)
ts_list.append(self)
self.event_cpus = [cpu]
def next(self, t):
self.end = t
return TimeSlice(t, self)
class TimeSliceList(UserList):
def __init__(self, arg = []):
self.data = arg
def get_time_slice(self, ts):
if len(self.data) == 0:
slice = TimeSlice(ts, TimeSlice(-1, None))
else:
slice = self.data[-1].next(ts)
return slice
def find_time_slice(self, ts):
start = 0
end = len(self.data)
found = -1
searching = True
while searching:
if start == end or start == end - 1:
searching = False
i = (end + start) / 2
if self.data[i].start <= ts and self.data[i].end >= ts:
found = i
end = i
continue
if self.data[i].end < ts:
start = i
elif self.data[i].start > ts:
end = i
return found
def set_root_win(self, win):
self.root_win = win
def mouse_down(self, cpu, t):
idx = self.find_time_slice(t)
if idx == -1:
return
ts = self[idx]
rq = ts.rqs[cpu]
raw = "CPU: %d\n" % cpu
raw += "Last event : %s\n" % rq.event.__repr__()
raw += "Timestamp : %d.%06d\n" % (ts.start / (10 ** 9), (ts.start % (10 ** 9)) / 1000)
raw += "Duration : %6d us\n" % ((ts.end - ts.start) / (10 ** 6))
raw += "Load = %d\n" % rq.load()
for t in rq.tasks:
raw += "%s \n" % thread_name(t)
self.root_win.update_summary(raw)
def update_rectangle_cpu(self, slice, cpu):
rq = slice.rqs[cpu]
if slice.total_load != 0:
load_rate = rq.load() / float(slice.total_load)
else:
load_rate = 0
red_power = int(0xff - (0xff * load_rate))
color = (0xff, red_power, red_power)
top_color = None
if cpu in slice.event_cpus:
top_color = rq.event.color()
self.root_win.paint_rectangle_zone(cpu, color, top_color, slice.start, slice.end)
def fill_zone(self, start, end):
i = self.find_time_slice(start)
if i == -1:
return
for i in xrange(i, len(self.data)):
timeslice = self.data[i]
if timeslice.start > end:
return
for cpu in timeslice.rqs:
self.update_rectangle_cpu(timeslice, cpu)
def interval(self):
if len(self.data) == 0:
return (0, 0)
return (self.data[0].start, self.data[-1].end)
def nr_rectangles(self):
last_ts = self.data[-1]
max_cpu = 0
for cpu in last_ts.rqs:
if cpu > max_cpu:
max_cpu = cpu
return max_cpu
class SchedEventProxy:
def __init__(self):
self.current_tsk = defaultdict(lambda : -1)
self.timeslices = TimeSliceList()
def sched_switch(self, headers, prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio):
""" Ensure the task we sched out this cpu is really the one
we logged. Otherwise we may have missed traces """
on_cpu_task = self.current_tsk[headers.cpu]
if on_cpu_task != -1 and on_cpu_task != prev_pid:
print "Sched switch event rejected ts: %s cpu: %d prev: %s(%d) next: %s(%d)" % \
(headers.ts_format(), headers.cpu, prev_comm, prev_pid, next_comm, next_pid)
threads[prev_pid] = prev_comm
threads[next_pid] = next_comm
self.current_tsk[headers.cpu] = next_pid
ts = self.timeslices.get_time_slice(headers.ts())
ts.sched_switch(self.timeslices, prev_pid, prev_state, next_pid, headers.cpu)
def migrate(self, headers, pid, prio, orig_cpu, dest_cpu):
ts = self.timeslices.get_time_slice(headers.ts())
ts.migrate(self.timeslices, pid, orig_cpu, dest_cpu)
def wake_up(self, headers, comm, pid, success, target_cpu, fork):
if success == 0:
return
ts = self.timeslices.get_time_slice(headers.ts())
ts.wake_up(self.timeslices, pid, target_cpu, fork)
def trace_begin():
global parser
parser = SchedEventProxy()
def trace_end():
app = wx.App(False)
timeslices = parser.timeslices
frame = RootFrame(timeslices, "Migration")
app.MainLoop()
def sched__sched_stat_runtime(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, runtime, vruntime):
pass
def sched__sched_stat_iowait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_stat_sleep(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_stat_wait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_process_fork(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
parent_comm, parent_pid, child_comm, child_pid):
pass
def sched__sched_process_wait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_process_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_process_free(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_migrate_task(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, orig_cpu,
dest_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.migrate(headers, pid, prio, orig_cpu, dest_cpu)
def sched__sched_switch(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.sched_switch(headers, prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio)
def sched__sched_wakeup_new(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, success,
target_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.wake_up(headers, comm, pid, success, target_cpu, 1)
def sched__sched_wakeup(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, success,
target_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.wake_up(headers, comm, pid, success, target_cpu, 0)
def sched__sched_wait_task(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_kthread_stop_ret(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
ret):
pass
def sched__sched_kthread_stop(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid):
pass
def trace_unhandled(event_name, context, common_cpu, common_secs, common_nsecs,
common_pid, common_comm):
pass
| gpl-2.0 |
amjad-twalo/icsisumm | icsisumm-primary-sys34_v1/nltk/nltk-0.9.2/nltk/corpus/reader/xmldocs.py | 9 | 1313 | # Natural Language Toolkit: XML Corpus Reader
#
# Copyright (C) 2001-2008 University of Pennsylvania
# Author: Steven Bird <[email protected]>
# URL: <http://nltk.sf.net>
# For license information, see LICENSE.TXT
"""
Corpus reader for corpora whose documents are xml files.
(note -- not named 'xml' to avoid conflicting w/ standard xml package)
"""
from api import CorpusReader
from util import *
from nltk.internals import deprecated
# Use the c version of ElementTree, which is faster, if possible:
try: from xml.etree import cElementTree as ElementTree
except ImportError: from nltk.etree import ElementTree
class XMLCorpusReader(CorpusReader):
"""
Corpus reader for corpora whose documents are xml files.
"""
def xml(self, files=None):
return concat([ElementTree.parse(filename).getroot()
for filename in self.abspaths(files)])
def raw(self, files=None):
return concat([open(filename).read()
for filename in self.abspaths(files)])
#{ Deprecated since 0.8
@deprecated("Use .raw() or .xml() instead.")
def read(self, items=None, format='xml'):
if format == 'raw': return self.raw(items)
if format == 'xml': return self.xml(items)
raise ValueError('bad format %r' % format)
#}
| gpl-3.0 |
bigzz/ZenKernel_Shamu | Documentation/target/tcm_mod_builder.py | 2358 | 40707 | #!/usr/bin/python
# The TCM v4 multi-protocol fabric module generation script for drivers/target/$NEW_MOD
#
# Copyright (c) 2010 Rising Tide Systems
# Copyright (c) 2010 Linux-iSCSI.org
#
# Author: [email protected]
#
import os, sys
import subprocess as sub
import string
import re
import optparse
tcm_dir = ""
fabric_ops = []
fabric_mod_dir = ""
fabric_mod_port = ""
fabric_mod_init_port = ""
def tcm_mod_err(msg):
print msg
sys.exit(1)
def tcm_mod_create_module_subdir(fabric_mod_dir_var):
if os.path.isdir(fabric_mod_dir_var) == True:
return 1
print "Creating fabric_mod_dir: " + fabric_mod_dir_var
ret = os.mkdir(fabric_mod_dir_var)
if ret:
tcm_mod_err("Unable to mkdir " + fabric_mod_dir_var)
return
def tcm_mod_build_FC_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for FC Initiator Nport */\n"
buf += " u64 nport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Initiator Nport */\n"
buf += " char nport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* FC lport target portal group tag for TCM */\n"
buf += " u16 lport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_lport */\n"
buf += " struct " + fabric_mod_name + "_lport *lport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_lport {\n"
buf += " /* SCSI protocol the lport is providing */\n"
buf += " u8 lport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for FC Target Lport */\n"
buf += " u64 lport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Target Lport */\n"
buf += " char lport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_lport() */\n"
buf += " struct se_wwn lport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "lport"
fabric_mod_init_port = "nport"
return
def tcm_mod_build_SAS_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for SAS Initiator port */\n"
buf += " u64 iport_wwpn;\n"
buf += " /* ASCII formatted WWPN for Sas Initiator port */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* SAS port target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for SAS Target port */\n"
buf += " u64 tport_wwpn;\n"
buf += " /* ASCII formatted WWPN for SAS Target port */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_iSCSI_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* ASCII formatted InitiatorName */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* iSCSI target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* ASCII formatted TargetName for IQN */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_base_includes(proto_ident, fabric_mod_dir_val, fabric_mod_name):
if proto_ident == "FC":
tcm_mod_build_FC_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "SAS":
tcm_mod_build_SAS_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "iSCSI":
tcm_mod_build_iSCSI_include(fabric_mod_dir_val, fabric_mod_name)
else:
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
return
def tcm_mod_build_configfs(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_configfs.c"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#include <linux/module.h>\n"
buf += "#include <linux/moduleparam.h>\n"
buf += "#include <linux/version.h>\n"
buf += "#include <generated/utsrelease.h>\n"
buf += "#include <linux/utsname.h>\n"
buf += "#include <linux/init.h>\n"
buf += "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/configfs.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_fabric_configfs.h>\n"
buf += "#include <target/target_core_configfs.h>\n"
buf += "#include <target/configfs_macros.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "/* Local pointer to allocated TCM configfs fabric module */\n"
buf += "struct target_fabric_configfs *" + fabric_mod_name + "_fabric_configfs;\n\n"
buf += "static struct se_node_acl *" + fabric_mod_name + "_make_nodeacl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct se_node_acl *se_nacl, *se_nacl_new;\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n"
buf += " u32 nexus_depth;\n\n"
buf += " /* " + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n"
buf += " se_nacl_new = " + fabric_mod_name + "_alloc_fabric_acl(se_tpg);\n"
buf += " if (!se_nacl_new)\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += "//#warning FIXME: Hardcoded nexus depth in " + fabric_mod_name + "_make_nodeacl()\n"
buf += " nexus_depth = 1;\n"
buf += " /*\n"
buf += " * se_nacl_new may be released by core_tpg_add_initiator_node_acl()\n"
buf += " * when converting a NodeACL from demo mode -> explict\n"
buf += " */\n"
buf += " se_nacl = core_tpg_add_initiator_node_acl(se_tpg, se_nacl_new,\n"
buf += " name, nexus_depth);\n"
buf += " if (IS_ERR(se_nacl)) {\n"
buf += " " + fabric_mod_name + "_release_fabric_acl(se_tpg, se_nacl_new);\n"
buf += " return se_nacl;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Locate our struct " + fabric_mod_name + "_nacl and set the FC Nport WWPN\n"
buf += " */\n"
buf += " nacl = container_of(se_nacl, struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " nacl->" + fabric_mod_init_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&nacl->" + fabric_mod_init_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return se_nacl;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_nodeacl(struct se_node_acl *se_acl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_acl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " core_tpg_del_initiator_node_acl(se_acl->se_tpg, se_acl, 1);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
buf += "static struct se_portal_group *" + fabric_mod_name + "_make_tpg(\n"
buf += " struct se_wwn *wwn,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + "*" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg;\n"
buf += " unsigned long tpgt;\n"
buf += " int ret;\n\n"
buf += " if (strstr(name, \"tpgt_\") != name)\n"
buf += " return ERR_PTR(-EINVAL);\n"
buf += " if (strict_strtoul(name + 5, 10, &tpgt) || tpgt > UINT_MAX)\n"
buf += " return ERR_PTR(-EINVAL);\n\n"
buf += " tpg = kzalloc(sizeof(struct " + fabric_mod_name + "_tpg), GFP_KERNEL);\n"
buf += " if (!tpg) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_tpg\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
buf += " tpg->" + fabric_mod_port + " = " + fabric_mod_port + ";\n"
buf += " tpg->" + fabric_mod_port + "_tpgt = tpgt;\n\n"
buf += " ret = core_tpg_register(&" + fabric_mod_name + "_fabric_configfs->tf_ops, wwn,\n"
buf += " &tpg->se_tpg, (void *)tpg,\n"
buf += " TRANSPORT_TPG_TYPE_NORMAL);\n"
buf += " if (ret < 0) {\n"
buf += " kfree(tpg);\n"
buf += " return NULL;\n"
buf += " }\n"
buf += " return &tpg->se_tpg;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_tpg(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n\n"
buf += " core_tpg_deregister(se_tpg);\n"
buf += " kfree(tpg);\n"
buf += "}\n\n"
buf += "static struct se_wwn *" + fabric_mod_name + "_make_" + fabric_mod_port + "(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + ";\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n\n"
buf += " /* if (" + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n\n"
buf += " " + fabric_mod_port + " = kzalloc(sizeof(struct " + fabric_mod_name + "_" + fabric_mod_port + "), GFP_KERNEL);\n"
buf += " if (!" + fabric_mod_port + ") {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_" + fabric_mod_port + "\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " " + fabric_mod_port + "->" + fabric_mod_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&" + fabric_mod_port + "->" + fabric_mod_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_wwn;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_" + fabric_mod_port + "(struct se_wwn *wwn)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n"
buf += " kfree(" + fabric_mod_port + ");\n"
buf += "}\n\n"
buf += "static ssize_t " + fabric_mod_name + "_wwn_show_attr_version(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " char *page)\n"
buf += "{\n"
buf += " return sprintf(page, \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \"on \"UTS_RELEASE\"\\n\", " + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += "}\n\n"
buf += "TF_WWN_ATTR_RO(" + fabric_mod_name + ", version);\n\n"
buf += "static struct configfs_attribute *" + fabric_mod_name + "_wwn_attrs[] = {\n"
buf += " &" + fabric_mod_name + "_wwn_version.attr,\n"
buf += " NULL,\n"
buf += "};\n\n"
buf += "static struct target_core_fabric_ops " + fabric_mod_name + "_ops = {\n"
buf += " .get_fabric_name = " + fabric_mod_name + "_get_fabric_name,\n"
buf += " .get_fabric_proto_ident = " + fabric_mod_name + "_get_fabric_proto_ident,\n"
buf += " .tpg_get_wwn = " + fabric_mod_name + "_get_fabric_wwn,\n"
buf += " .tpg_get_tag = " + fabric_mod_name + "_get_tag,\n"
buf += " .tpg_get_default_depth = " + fabric_mod_name + "_get_default_depth,\n"
buf += " .tpg_get_pr_transport_id = " + fabric_mod_name + "_get_pr_transport_id,\n"
buf += " .tpg_get_pr_transport_id_len = " + fabric_mod_name + "_get_pr_transport_id_len,\n"
buf += " .tpg_parse_pr_out_transport_id = " + fabric_mod_name + "_parse_pr_out_transport_id,\n"
buf += " .tpg_check_demo_mode = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_check_demo_mode_cache = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_demo_mode_write_protect = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_prod_mode_write_protect = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_alloc_fabric_acl = " + fabric_mod_name + "_alloc_fabric_acl,\n"
buf += " .tpg_release_fabric_acl = " + fabric_mod_name + "_release_fabric_acl,\n"
buf += " .tpg_get_inst_index = " + fabric_mod_name + "_tpg_get_inst_index,\n"
buf += " .release_cmd = " + fabric_mod_name + "_release_cmd,\n"
buf += " .shutdown_session = " + fabric_mod_name + "_shutdown_session,\n"
buf += " .close_session = " + fabric_mod_name + "_close_session,\n"
buf += " .stop_session = " + fabric_mod_name + "_stop_session,\n"
buf += " .fall_back_to_erl0 = " + fabric_mod_name + "_reset_nexus,\n"
buf += " .sess_logged_in = " + fabric_mod_name + "_sess_logged_in,\n"
buf += " .sess_get_index = " + fabric_mod_name + "_sess_get_index,\n"
buf += " .sess_get_initiator_sid = NULL,\n"
buf += " .write_pending = " + fabric_mod_name + "_write_pending,\n"
buf += " .write_pending_status = " + fabric_mod_name + "_write_pending_status,\n"
buf += " .set_default_node_attributes = " + fabric_mod_name + "_set_default_node_attrs,\n"
buf += " .get_task_tag = " + fabric_mod_name + "_get_task_tag,\n"
buf += " .get_cmd_state = " + fabric_mod_name + "_get_cmd_state,\n"
buf += " .queue_data_in = " + fabric_mod_name + "_queue_data_in,\n"
buf += " .queue_status = " + fabric_mod_name + "_queue_status,\n"
buf += " .queue_tm_rsp = " + fabric_mod_name + "_queue_tm_rsp,\n"
buf += " .is_state_remove = " + fabric_mod_name + "_is_state_remove,\n"
buf += " /*\n"
buf += " * Setup function pointers for generic logic in target_core_fabric_configfs.c\n"
buf += " */\n"
buf += " .fabric_make_wwn = " + fabric_mod_name + "_make_" + fabric_mod_port + ",\n"
buf += " .fabric_drop_wwn = " + fabric_mod_name + "_drop_" + fabric_mod_port + ",\n"
buf += " .fabric_make_tpg = " + fabric_mod_name + "_make_tpg,\n"
buf += " .fabric_drop_tpg = " + fabric_mod_name + "_drop_tpg,\n"
buf += " .fabric_post_link = NULL,\n"
buf += " .fabric_pre_unlink = NULL,\n"
buf += " .fabric_make_np = NULL,\n"
buf += " .fabric_drop_np = NULL,\n"
buf += " .fabric_make_nodeacl = " + fabric_mod_name + "_make_nodeacl,\n"
buf += " .fabric_drop_nodeacl = " + fabric_mod_name + "_drop_nodeacl,\n"
buf += "};\n\n"
buf += "static int " + fabric_mod_name + "_register_configfs(void)\n"
buf += "{\n"
buf += " struct target_fabric_configfs *fabric;\n"
buf += " int ret;\n\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \" on \"UTS_RELEASE\"\\n\"," + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += " /*\n"
buf += " * Register the top level struct config_item_type with TCM core\n"
buf += " */\n"
buf += " fabric = target_fabric_configfs_init(THIS_MODULE, \"" + fabric_mod_name[4:] + "\");\n"
buf += " if (IS_ERR(fabric)) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_init() failed\\n\");\n"
buf += " return PTR_ERR(fabric);\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup fabric->tf_ops from our local " + fabric_mod_name + "_ops\n"
buf += " */\n"
buf += " fabric->tf_ops = " + fabric_mod_name + "_ops;\n"
buf += " /*\n"
buf += " * Setup default attribute lists for various fabric->tf_cit_tmpl\n"
buf += " */\n"
buf += " TF_CIT_TMPL(fabric)->tfc_wwn_cit.ct_attrs = " + fabric_mod_name + "_wwn_attrs;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_param_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_np_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_auth_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_param_cit.ct_attrs = NULL;\n"
buf += " /*\n"
buf += " * Register the fabric for use within TCM\n"
buf += " */\n"
buf += " ret = target_fabric_configfs_register(fabric);\n"
buf += " if (ret < 0) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_register() failed\"\n"
buf += " \" for " + fabric_mod_name.upper() + "\\n\");\n"
buf += " return ret;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup our local pointer to *fabric\n"
buf += " */\n"
buf += " " + fabric_mod_name + "_fabric_configfs = fabric;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Set fabric -> " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_deregister_configfs(void)\n"
buf += "{\n"
buf += " if (!" + fabric_mod_name + "_fabric_configfs)\n"
buf += " return;\n\n"
buf += " target_fabric_configfs_deregister(" + fabric_mod_name + "_fabric_configfs);\n"
buf += " " + fabric_mod_name + "_fabric_configfs = NULL;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Cleared " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += "};\n\n"
buf += "static int __init " + fabric_mod_name + "_init(void)\n"
buf += "{\n"
buf += " int ret;\n\n"
buf += " ret = " + fabric_mod_name + "_register_configfs();\n"
buf += " if (ret < 0)\n"
buf += " return ret;\n\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_exit(void)\n"
buf += "{\n"
buf += " " + fabric_mod_name + "_deregister_configfs();\n"
buf += "};\n\n"
buf += "MODULE_DESCRIPTION(\"" + fabric_mod_name.upper() + " series fabric driver\");\n"
buf += "MODULE_LICENSE(\"GPL\");\n"
buf += "module_init(" + fabric_mod_name + "_init);\n"
buf += "module_exit(" + fabric_mod_name + "_exit);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_scan_fabric_ops(tcm_dir):
fabric_ops_api = tcm_dir + "include/target/target_core_fabric.h"
print "Using tcm_mod_scan_fabric_ops: " + fabric_ops_api
process_fo = 0;
p = open(fabric_ops_api, 'r')
line = p.readline()
while line:
if process_fo == 0 and re.search('struct target_core_fabric_ops {', line):
line = p.readline()
continue
if process_fo == 0:
process_fo = 1;
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
continue
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
p.close()
return
def tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
bufi = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.c"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
fi = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.h"
print "Writing file: " + fi
pi = open(fi, 'w')
if not pi:
tcm_mod_err("Unable to open file: " + fi)
buf = "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/list.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n"
buf += "#include <scsi/scsi.h>\n"
buf += "#include <scsi/scsi_host.h>\n"
buf += "#include <scsi/scsi_device.h>\n"
buf += "#include <scsi/scsi_cmnd.h>\n"
buf += "#include <scsi/libfc.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_configfs.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "int " + fabric_mod_name + "_check_true(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_true(struct se_portal_group *);\n"
buf += "int " + fabric_mod_name + "_check_false(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_false(struct se_portal_group *);\n"
total_fabric_ops = len(fabric_ops)
i = 0
while i < total_fabric_ops:
fo = fabric_ops[i]
i += 1
# print "fabric_ops: " + fo
if re.search('get_fabric_name', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_name(void)\n"
buf += "{\n"
buf += " return \"" + fabric_mod_name[4:] + "\";\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_name(void);\n"
continue
if re.search('get_fabric_proto_ident', fo):
buf += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " u8 proto_id;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " proto_id = fc_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " proto_id = sas_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " proto_id = iscsi_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return proto_id;\n"
buf += "}\n\n"
bufi += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *);\n"
if re.search('get_wwn', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_name[0];\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *);\n"
if re.search('get_tag', fo):
buf += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " return tpg->" + fabric_mod_port + "_tpgt;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *);\n"
if re.search('get_default_depth', fo):
buf += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *);\n"
if re.search('get_pr_transport_id\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code,\n"
buf += " unsigned char *buf)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *, unsigned char *);\n"
if re.search('get_pr_transport_id_len\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *);\n"
if re.search('parse_pr_out_transport_id\)\(', fo):
buf += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " const char *buf,\n"
buf += " u32 *out_tid_len,\n"
buf += " char **port_nexus_ptr)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " char *tid = NULL;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " tid = fc_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " tid = sas_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " tid = iscsi_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
buf += " }\n\n"
buf += " return tid;\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(struct se_portal_group *,\n"
bufi += " const char *, u32 *, char **);\n"
if re.search('alloc_fabric_acl\)\(', fo):
buf += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n\n"
buf += " nacl = kzalloc(sizeof(struct " + fabric_mod_name + "_nacl), GFP_KERNEL);\n"
buf += " if (!nacl) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_nacl\\n\");\n"
buf += " return NULL;\n"
buf += " }\n\n"
buf += " return &nacl->se_node_acl;\n"
buf += "}\n\n"
bufi += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *);\n"
if re.search('release_fabric_acl\)\(', fo):
buf += "void " + fabric_mod_name + "_release_fabric_acl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_nacl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_fabric_acl(struct se_portal_group *,\n"
bufi += " struct se_node_acl *);\n"
if re.search('tpg_get_inst_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *);\n"
if re.search('\*release_cmd\)\(', fo):
buf += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *);\n"
if re.search('shutdown_session\)\(', fo):
buf += "int " + fabric_mod_name + "_shutdown_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_shutdown_session(struct se_session *);\n"
if re.search('close_session\)\(', fo):
buf += "void " + fabric_mod_name + "_close_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_close_session(struct se_session *);\n"
if re.search('stop_session\)\(', fo):
buf += "void " + fabric_mod_name + "_stop_session(struct se_session *se_sess, int sess_sleep , int conn_sleep)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_stop_session(struct se_session *, int, int);\n"
if re.search('fall_back_to_erl0\)\(', fo):
buf += "void " + fabric_mod_name + "_reset_nexus(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_reset_nexus(struct se_session *);\n"
if re.search('sess_logged_in\)\(', fo):
buf += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *);\n"
if re.search('sess_get_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *);\n"
if re.search('write_pending\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending(struct se_cmd *);\n"
if re.search('write_pending_status\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *);\n"
if re.search('set_default_node_attributes\)\(', fo):
buf += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *nacl)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *);\n"
if re.search('get_task_tag\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *);\n"
if re.search('get_cmd_state\)\(', fo):
buf += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *);\n"
if re.search('queue_data_in\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *);\n"
if re.search('queue_status\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_status(struct se_cmd *);\n"
if re.search('queue_tm_rsp\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *);\n"
if re.search('is_state_remove\)\(', fo):
buf += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
ret = pi.write(bufi)
if ret:
tcm_mod_err("Unable to write fi: " + fi)
pi.close()
return
def tcm_mod_build_kbuild(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Makefile"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf += fabric_mod_name + "-objs := " + fabric_mod_name + "_fabric.o \\\n"
buf += " " + fabric_mod_name + "_configfs.o\n"
buf += "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name + ".o\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_build_kconfig(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Kconfig"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "config " + fabric_mod_name.upper() + "\n"
buf += " tristate \"" + fabric_mod_name.upper() + " fabric module\"\n"
buf += " depends on TARGET_CORE && CONFIGFS_FS\n"
buf += " default n\n"
buf += " ---help---\n"
buf += " Say Y here to enable the " + fabric_mod_name.upper() + " fabric module\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_add_kbuild(tcm_dir, fabric_mod_name):
buf = "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name.lower() + "/\n"
kbuild = tcm_dir + "/drivers/target/Makefile"
f = open(kbuild, 'a')
f.write(buf)
f.close()
return
def tcm_mod_add_kconfig(tcm_dir, fabric_mod_name):
buf = "source \"drivers/target/" + fabric_mod_name.lower() + "/Kconfig\"\n"
kconfig = tcm_dir + "/drivers/target/Kconfig"
f = open(kconfig, 'a')
f.write(buf)
f.close()
return
def main(modname, proto_ident):
# proto_ident = "FC"
# proto_ident = "SAS"
# proto_ident = "iSCSI"
tcm_dir = os.getcwd();
tcm_dir += "/../../"
print "tcm_dir: " + tcm_dir
fabric_mod_name = modname
fabric_mod_dir = tcm_dir + "drivers/target/" + fabric_mod_name
print "Set fabric_mod_name: " + fabric_mod_name
print "Set fabric_mod_dir: " + fabric_mod_dir
print "Using proto_ident: " + proto_ident
if proto_ident != "FC" and proto_ident != "SAS" and proto_ident != "iSCSI":
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
ret = tcm_mod_create_module_subdir(fabric_mod_dir)
if ret:
print "tcm_mod_create_module_subdir() failed because module already exists!"
sys.exit(1)
tcm_mod_build_base_includes(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_scan_fabric_ops(tcm_dir)
tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_configfs(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kbuild(fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kconfig(fabric_mod_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Makefile..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kbuild(tcm_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Kconfig..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kconfig(tcm_dir, fabric_mod_name)
return
parser = optparse.OptionParser()
parser.add_option('-m', '--modulename', help='Module name', dest='modname',
action='store', nargs=1, type='string')
parser.add_option('-p', '--protoident', help='Protocol Ident', dest='protoident',
action='store', nargs=1, type='string')
(opts, args) = parser.parse_args()
mandatories = ['modname', 'protoident']
for m in mandatories:
if not opts.__dict__[m]:
print "mandatory option is missing\n"
parser.print_help()
exit(-1)
if __name__ == "__main__":
main(str(opts.modname), opts.protoident)
| gpl-2.0 |
Brunux/shityjobs | shityjobs/users/migrations/0001_initial.py | 1 | 2931 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-10-21 00:17
from __future__ import unicode_literals
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0008_alter_user_username_max_length'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')),
('first_name', models.CharField(blank=True, max_length=30, verbose_name='first name')),
('last_name', models.CharField(blank=True, max_length=30, verbose_name='last name')),
('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'verbose_name': 'user',
'verbose_name_plural': 'users',
'abstract': False,
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
]
| mit |
starrybeam/samba | source4/scripting/bin/gen_hresult.py | 19 | 9170 | #!/usr/bin/env python
#
# Unix SMB/CIFS implementation.
#
# HRESULT Error definitions
#
# Copyright (C) Noel Power <[email protected]> 2014
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import sys, os.path, io, string
# parsed error data
Errors = []
# error data model
class ErrorDef:
def __init__(self):
self.err_code = ""
self.err_define = None
self.err_string = ""
self.isWinError = False
self.linenum = ""
def escapeString( input ):
output = input.replace('"','\\"')
output = output.replace("\\<","\\\\<")
output = output.replace('\t',"")
return output
def parseErrorDescriptions( input_file, isWinError ):
# read in the data
fileContents = open(input_file,"r")
count = 0;
for line in fileContents:
content = line.strip().split(None,1)
# start new error definition ?
if line.startswith("0x"):
newError = ErrorDef()
newError.err_code = content[0]
# escape the usual suspects
if len(content) > 1:
newError.err_string = escapeString(content[1])
newError.linenum = count
newError.isWinError = isWinError
Errors.append(newError)
else:
if len(Errors) == 0:
print "Error parsing file as line %d"%count
sys.exit()
err = Errors[-1]
if err.err_define == None:
err.err_define = "HRES_" + content[0]
else:
if len(content) > 0:
desc = escapeString(line.strip())
if len(desc):
if err.err_string == "":
err.err_string = desc
else:
err.err_string = err.err_string + " " + desc
count = count + 1
fileContents.close()
print "parsed %d lines generated %d error definitions"%(count,len(Errors))
def write_license(out_file):
out_file.write("/*\n")
out_file.write(" * Unix SMB/CIFS implementation.\n")
out_file.write(" *\n")
out_file.write(" * HRESULT Error definitions\n")
out_file.write(" *\n")
out_file.write(" * Copyright (C) Noel Power <[email protected]> 2014\n")
out_file.write(" *\n")
out_file.write(" * This program is free software; you can redistribute it and/or modify\n")
out_file.write(" * it under the terms of the GNU General Public License as published by\n")
out_file.write(" * the Free Software Foundation; either version 3 of the License, or\n")
out_file.write(" * (at your option) any later version.\n")
out_file.write(" *\n")
out_file.write(" * This program is distributed in the hope that it will be useful,\n")
out_file.write(" * but WITHOUT ANY WARRANTY; without even the implied warranty of\n")
out_file.write(" * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n")
out_file.write(" * GNU General Public License for more details.\n")
out_file.write(" *\n")
out_file.write(" * You should have received a copy of the GNU General Public License\n")
out_file.write(" * along with this program. If not, see <http://www.gnu.org/licenses/>.\n")
out_file.write(" */\n")
out_file.write("\n")
def generateHeaderFile(out_file):
write_license(out_file)
out_file.write("#ifndef _HRESULT_H_\n")
out_file.write("#define _HRESULT_H_\n\n")
macro_magic = "#if defined(HAVE_IMMEDIATE_STRUCTURES)\n"
macro_magic += "typedef struct {uint32_t h;} HRESULT;\n"
macro_magic += "#define HRES_ERROR(x) ((HRESULT) { x })\n"
macro_magic += "#define HRES_ERROR_V(x) ((x).h)\n"
macro_magic += "#else\n"
macro_magic += "typedef uint32_t HRESULT;\n"
macro_magic += "#define HRES_ERROR(x) (x)\n"
macro_magic += "#define HRES_ERROR_V(x) (x)\n"
macro_magic += "#endif\n"
macro_magic += "\n"
macro_magic += "#define HRES_IS_OK(x) (HRES_ERROR_V(x) == 0)\n"
macro_magic += "#define HRES_IS_EQUAL(x,y) (HRES_ERROR_V(x) == HRES_ERROR_V(y))\n"
out_file.write(macro_magic)
out_file.write("\n\n")
out_file.write("/*\n")
out_file.write(" * The following error codes are autogenerated from [MS-ERREF]\n")
out_file.write(" * see http://msdn.microsoft.com/en-us/library/cc704587.aspx\n")
out_file.write(" */\n")
out_file.write("\n")
for err in Errors:
line = "#define {0:49} HRES_ERROR({1})\n".format(err.err_define ,err.err_code)
out_file.write(line)
out_file.write("\nconst char *hresult_errstr_const(HRESULT err_code);\n")
out_file.write("\nconst char *hresult_errstr(HRESULT err_code);\n")
out_file.write("\n#define FACILITY_WIN32 0x0007\n")
out_file.write("#define WIN32_FROM_HRESULT(x) (HRES_ERROR_V(x) == 0 ? HRES_ERROR_V(x) : ~((FACILITY_WIN32 << 16) | 0x80000000) & HRES_ERROR_V(x))\n")
out_file.write("#define HRESULT_IS_LIKELY_WERR(x) ((HRES_ERROR_V(x) & 0xFFFF0000) == 0x80070000)\n")
out_file.write("\n\n\n#endif /*_HRESULT_H_*/")
def generateSourceFile(out_file):
write_license(out_file)
out_file.write("#include \"includes.h\"\n")
out_file.write("#include \"hresult.h\"\n")
out_file.write("/*\n")
out_file.write(" * The following error codes and descriptions are autogenerated from [MS-ERREF]\n")
out_file.write(" * see http://msdn.microsoft.com/en-us/library/cc704587.aspx\n")
out_file.write(" */\n")
out_file.write("\n")
out_file.write("static const struct {\n")
out_file.write(" HRESULT error_code;\n")
out_file.write(" const char *error_str;\n")
out_file.write(" const char *error_message;\n")
out_file.write("} hresult_errs[] = {\n")
for err in Errors:
out_file.write(" {\n")
if err.isWinError:
out_file.write(" HRESULT_FROM_WIN32(%s),\n"%err.err_define)
out_file.write(" \"HRESULT_FROM_WIN32(%s)\",\n"%err.err_define)
else:
out_file.write(" %s,\n"%err.err_define)
out_file.write(" \"%s\",\n"%err.err_define)
out_file.write(" \"%s\"\n"%err.err_string)
out_file.write(" },\n")
out_file.write("};\n")
out_file.write("\n")
out_file.write("const char *hresult_errstr_const(HRESULT err_code)\n")
out_file.write("{\n");
out_file.write(" const char *result = NULL;\n")
out_file.write(" int i;\n")
out_file.write(" for (i = 0; i < ARRAY_SIZE(hresult_errs); ++i) {\n")
out_file.write(" if (HRES_IS_EQUAL(err_code, hresult_errs[i].error_code)) {\n")
out_file.write(" result = hresult_errs[i].error_message;\n")
out_file.write(" break;\n")
out_file.write(" }\n")
out_file.write(" }\n")
out_file.write(" /* convert & check win32 error space? */\n")
out_file.write(" if (result == NULL && HRESULT_IS_LIKELY_WERR(err_code)) {\n")
out_file.write(" WERROR wErr = W_ERROR(WIN32_FROM_HRESULT(err_code));\n")
out_file.write(" result = get_friendly_werror_msg(wErr);\n")
out_file.write(" }\n")
out_file.write(" return result;\n")
out_file.write("};\n")
out_file.write("\n")
out_file.write("const char *hresult_errstr(HRESULT err_code)\n")
out_file.write("{\n");
out_file.write(" static char msg[20];\n")
out_file.write(" int i;\n")
out_file.write("\n")
out_file.write(" for (i = 0; i < ARRAY_SIZE(hresult_errs); i++) {\n")
out_file.write(" if (HRES_IS_EQUAL(err_code, hresult_errs[i].error_code)) {\n")
out_file.write(" return hresult_errs[i].error_str;\n")
out_file.write(" }\n")
out_file.write(" }\n")
out_file.write(" snprintf(msg, sizeof(msg), \"HRES code 0x%08x\", HRES_ERROR_V(err_code));\n")
out_file.write(" return msg;\n")
out_file.write("};\n")
# Very simple script to generate files hresult.c & hresult.h
# The script simply takes a text file as input, format of input file is
# very simple and is just the content of a html table ( such as that found
# in http://msdn.microsoft.com/en-us/library/cc704587.aspx ) copied and
# pasted into a text file
def main ():
input_file1 = None;
filename = "hresult"
headerfile_name = filename + ".h"
sourcefile_name = filename + ".c"
if len(sys.argv) > 1:
input_file1 = sys.argv[1]
else:
print "usage: %s winerrorfile"%(sys.argv[0])
sys.exit()
parseErrorDescriptions(input_file1, False)
out_file = open(headerfile_name,"w")
generateHeaderFile(out_file)
out_file.close()
out_file = open(sourcefile_name,"w")
generateSourceFile(out_file)
if __name__ == '__main__':
main()
| gpl-3.0 |
pkoutsias/SickRage | lib/guessit/language.py | 3 | 11578 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# GuessIt - A library for guessing information from filenames
# Copyright (c) 2013 Nicolas Wack <[email protected]>
#
# GuessIt is free software; you can redistribute it and/or modify it under
# the terms of the Lesser GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# GuessIt is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Lesser GNU General Public License for more details.
#
# You should have received a copy of the Lesser GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import, division, print_function, unicode_literals
import re
import logging
from guessit import u
from guessit.textutils import find_words
from babelfish import Language, Country
import babelfish
from guessit.guess import Guess
__all__ = ['Language', 'UNDETERMINED',
'search_language', 'guess_language']
log = logging.getLogger(__name__)
UNDETERMINED = babelfish.Language('und')
SYN = {('und', None): ['unknown', 'inconnu', 'unk', 'un'],
('ell', None): ['gr', 'greek'],
('spa', None): ['esp', 'español'],
('fra', None): ['français', 'vf', 'vff', 'vfi'],
('swe', None): ['se'],
('por', 'BR'): ['po', 'pb', 'pob', 'br', 'brazilian'],
('cat', None): ['català'],
('ces', None): ['cz'],
('ukr', None): ['ua'],
('zho', None): ['cn'],
('jpn', None): ['jp'],
('hrv', None): ['scr'],
('mul', None): ['multi', 'dl'], # http://scenelingo.wordpress.com/2009/03/24/what-does-dl-mean/
}
class GuessitConverter(babelfish.LanguageReverseConverter):
_with_country_regexp = re.compile('(.*)\((.*)\)')
_with_country_regexp2 = re.compile('(.*)-(.*)')
def __init__(self):
self.guessit_exceptions = {}
for (alpha3, country), synlist in SYN.items():
for syn in synlist:
self.guessit_exceptions[syn.lower()] = (alpha3, country, None)
@property
def codes(self):
return (babelfish.language_converters['alpha3b'].codes |
babelfish.language_converters['alpha2'].codes |
babelfish.language_converters['name'].codes |
babelfish.language_converters['opensubtitles'].codes |
babelfish.country_converters['name'].codes |
frozenset(self.guessit_exceptions.keys()))
@staticmethod
def convert(alpha3, country=None, script=None):
return str(babelfish.Language(alpha3, country, script))
def reverse(self, name):
with_country = (GuessitConverter._with_country_regexp.match(name) or
GuessitConverter._with_country_regexp2.match(name))
name = u(name.lower())
if with_country:
lang = Language.fromguessit(with_country.group(1).strip())
lang.country = babelfish.Country.fromguessit(with_country.group(2).strip())
return lang.alpha3, lang.country.alpha2 if lang.country else None, lang.script or None
# exceptions come first, as they need to override a potential match
# with any of the other guessers
try:
return self.guessit_exceptions[name]
except KeyError:
pass
for conv in [babelfish.Language,
babelfish.Language.fromalpha3b,
babelfish.Language.fromalpha2,
babelfish.Language.fromname,
babelfish.Language.fromopensubtitles]:
try:
c = conv(name)
return c.alpha3, c.country, c.script
except (ValueError, babelfish.LanguageReverseError):
pass
raise babelfish.LanguageReverseError(name)
babelfish.language_converters['guessit'] = GuessitConverter()
COUNTRIES_SYN = {'ES': ['españa'],
'GB': ['UK'],
'BR': ['brazilian', 'bra'],
# FIXME: this one is a bit of a stretch, not sure how to do
# it properly, though...
'MX': ['Latinoamérica', 'latin america']
}
class GuessitCountryConverter(babelfish.CountryReverseConverter):
def __init__(self):
self.guessit_exceptions = {}
for alpha2, synlist in COUNTRIES_SYN.items():
for syn in synlist:
self.guessit_exceptions[syn.lower()] = alpha2
@property
def codes(self):
return (babelfish.country_converters['name'].codes |
frozenset(babelfish.COUNTRIES.values()) |
frozenset(self.guessit_exceptions.keys()))
@staticmethod
def convert(alpha2):
if alpha2 == 'GB':
return 'UK'
return str(Country(alpha2))
def reverse(self, name):
# exceptions come first, as they need to override a potential match
# with any of the other guessers
try:
return self.guessit_exceptions[name.lower()]
except KeyError:
pass
try:
return babelfish.Country(name.upper()).alpha2
except ValueError:
pass
for conv in [babelfish.Country.fromname]:
try:
return conv(name).alpha2
except babelfish.CountryReverseError:
pass
raise babelfish.CountryReverseError(name)
babelfish.country_converters['guessit'] = GuessitCountryConverter()
# list of common words which could be interpreted as languages, but which
# are far too common to be able to say they represent a language in the
# middle of a string (where they most likely carry their commmon meaning)
LNG_COMMON_WORDS = frozenset([
# english words
'is', 'it', 'am', 'mad', 'men', 'man', 'run', 'sin', 'st', 'to',
'no', 'non', 'war', 'min', 'new', 'car', 'day', 'bad', 'bat', 'fan',
'fry', 'cop', 'zen', 'gay', 'fat', 'one', 'cherokee', 'got', 'an', 'as',
'cat', 'her', 'be', 'hat', 'sun', 'may', 'my', 'mr', 'rum', 'pi', 'bb',
'bt', 'tv', 'aw', 'by', 'md', 'mp', 'cd', 'lt', 'gt', 'in', 'ad', 'ice',
'ay', 'at', 'star', 'so',
# french words
'bas', 'de', 'le', 'son', 'ne', 'ca', 'ce', 'et', 'que',
'mal', 'est', 'vol', 'or', 'mon', 'se', 'je', 'tu', 'me',
'ne', 'ma', 'va', 'au',
# japanese words,
'wa', 'ga', 'ao',
# spanish words
'la', 'el', 'del', 'por', 'mar', 'al',
# other
'ind', 'arw', 'ts', 'ii', 'bin', 'chan', 'ss', 'san', 'oss', 'iii',
'vi', 'ben', 'da', 'lt', 'ch', 'sr', 'ps', 'cx',
# new from babelfish
'mkv', 'avi', 'dmd', 'the', 'dis', 'cut', 'stv', 'des', 'dia', 'and',
'cab', 'sub', 'mia', 'rim', 'las', 'une', 'par', 'srt', 'ano', 'toy',
'job', 'gag', 'reel', 'www', 'for', 'ayu', 'csi', 'ren', 'moi', 'sur',
'fer', 'fun', 'two', 'big', 'psy', 'air',
# movie title
'brazil',
# release groups
'bs', # Bosnian
'kz',
# countries
'gt', 'lt', 'im',
# part/pt
'pt'
])
LNG_COMMON_WORDS_STRICT = frozenset(['brazil'])
subtitle_prefixes = ['sub', 'subs', 'st', 'vost', 'subforced', 'fansub', 'hardsub']
subtitle_suffixes = ['subforced', 'fansub', 'hardsub', 'sub', 'subs']
lang_prefixes = ['true']
all_lang_prefixes_suffixes = subtitle_prefixes + subtitle_suffixes + lang_prefixes
def find_possible_languages(string, allowed_languages=None):
"""Find possible languages in the string
:return: list of tuple (property, Language, lang_word, word)
"""
common_words = None
if allowed_languages:
common_words = LNG_COMMON_WORDS_STRICT
else:
common_words = LNG_COMMON_WORDS
words = find_words(string)
valid_words = []
for word in words:
lang_word = word.lower()
key = 'language'
for prefix in subtitle_prefixes:
if lang_word.startswith(prefix):
lang_word = lang_word[len(prefix):]
key = 'subtitleLanguage'
for suffix in subtitle_suffixes:
if lang_word.endswith(suffix):
lang_word = lang_word[:len(suffix)-1]
key = 'subtitleLanguage'
for prefix in lang_prefixes:
if lang_word.startswith(prefix):
lang_word = lang_word[len(prefix):]
if lang_word not in common_words and word.lower() not in common_words:
try:
lang = Language.fromguessit(lang_word)
if allowed_languages:
if lang.name.lower() in allowed_languages or lang.alpha2.lower() in allowed_languages or lang.alpha3.lower() in allowed_languages:
valid_words.append((key, lang, lang_word, word))
# Keep language with alpha2 equivalent. Others are probably
# uncommon languages.
elif lang == 'mul' or hasattr(lang, 'alpha2'):
valid_words.append((key, lang, lang_word, word))
except babelfish.Error:
pass
return valid_words
def search_language(string, allowed_languages=None):
"""Looks for language patterns, and if found return the language object,
its group span and an associated confidence.
you can specify a list of allowed languages using the lang_filter argument,
as in lang_filter = [ 'fr', 'eng', 'spanish' ]
>>> search_language('movie [en].avi')['language']
<Language [en]>
>>> search_language('the zen fat cat and the gay mad men got a new fan', allowed_languages = ['en', 'fr', 'es'])
"""
if allowed_languages:
allowed_languages = set(Language.fromguessit(lang) for lang in allowed_languages)
confidence = 1.0 # for all of them
for prop, language, lang, word in find_possible_languages(string, allowed_languages):
pos = string.find(word)
end = pos + len(word)
# only allow those languages that have a 2-letter code, those that
# don't are too esoteric and probably false matches
# if language.lang not in lng3_to_lng2:
# continue
# confidence depends on alpha2, alpha3, english name, ...
if len(lang) == 2:
confidence = 0.8
elif len(lang) == 3:
confidence = 0.9
elif prop == 'subtitleLanguage':
confidence = 0.6 # Subtitle prefix found with language
else:
# Note: we could either be really confident that we found a
# language or assume that full language names are too
# common words and lower their confidence accordingly
confidence = 0.3 # going with the low-confidence route here
return Guess({prop: language}, confidence=confidence, input=string, span=(pos, end))
return None
def guess_language(text): # pragma: no cover
"""Guess the language in which a body of text is written.
This uses the external guess-language python module, and will fail and return
Language(Undetermined) if it is not installed.
"""
try:
from guess_language import guessLanguage
return Language.fromguessit(guessLanguage(text))
except ImportError:
log.error('Cannot detect the language of the given text body, missing dependency: guess-language')
log.error('Please install it from PyPI, by doing eg: pip install guess-language')
return UNDETERMINED
| gpl-3.0 |
azumimuo/family-xbmc-addon | script.module.livestreamer/lib/livestreamer/packages/flashmedia/types.py | 42 | 45802 | from .compat import OrderedDict, is_py2, str, bytes, integer_types, string_types
from .util import pack_bytes_into
from collections import namedtuple
from struct import Struct, error as struct_error
from inspect import getargspec
(SCRIPT_DATA_TYPE_NUMBER, SCRIPT_DATA_TYPE_BOOLEAN,
SCRIPT_DATA_TYPE_STRING, SCRIPT_DATA_TYPE_OBJECT,
SCRIPT_DATA_TYPE_RESERVED, SCRIPT_DATA_TYPE_NULL,
SCRIPT_DATA_TYPE_UNDEFINED, SCRIPT_DATA_TYPE_REFERENCE,
SCRIPT_DATA_TYPE_ECMAARRAY, SCRIPT_DATA_TYPE_OBJECTEND,
SCRIPT_DATA_TYPE_STRICTARRAY, SCRIPT_DATA_TYPE_DATE,
SCRIPT_DATA_TYPE_LONGSTRING) = range(13)
SCRIPT_DATA_TYPE_AMF3 = 0x11
(AMF3_TYPE_UNDEFINED, AMF3_TYPE_NULL, AMF3_TYPE_FALSE, AMF3_TYPE_TRUE,
AMF3_TYPE_INTEGER, AMF3_TYPE_DOUBLE, AMF3_TYPE_STRING, AMF3_TYPE_XML_DOC,
AMF3_TYPE_DATE, AMF3_TYPE_ARRAY, AMF3_TYPE_OBJECT, AMF3_TYPE_XML,
AMF3_TYPE_BYTE_ARRAY, AMF3_TYPE_VECTOR_INT, AMF3_TYPE_VECTOR_UINT,
AMF3_TYPE_VECTOR_DOUBLE, AMF3_TYPE_VECTOR_OBJECT, AMF3_TYPE_DICT) = range(0x12)
AMF3_EMPTY_STRING = 0x01
AMF3_DYNAMIC_OBJECT = 0x0b
AMF3_CLOSE_DYNAMIC_OBJECT = 0x01
AMF3_CLOSE_DYNAMIC_ARRAY = 0x01
AMF3_MIN_INTEGER = -268435456
AMF3_MAX_INTEGER = 268435455
class PrimitiveType(Struct):
def __call__(self, *args):
return self.pack(*args)
def read(self, fd):
data = fd.read(self.size)
if len(data) != self.size:
raise IOError("Unable to read required amount of data")
return self.unpack(data)[0]
class PrimitiveClassType(PrimitiveType):
def __init__(self, format, cls):
self.cls = cls
PrimitiveType.__init__(self, format)
def pack(self, val):
return PrimitiveType.pack(self, *val)
def pack_into(self, buf, offset, val):
return PrimitiveType.pack_into(self, buf, offset, *val)
def unpack(self, data):
vals = PrimitiveType.unpack(self, data)
rval = self.cls(*vals)
return (rval,)
def unpack_from(self, buf, offset):
vals = PrimitiveType.unpack_from(self, buf, offset)
rval = self.cls(*vals)
return (rval,)
class DynamicType(object):
def __new__(cls, *args, **kwargs):
return cls.pack(*args, **kwargs)
@classmethod
def size(cls, val):
raise NotImplementedError
@classmethod
def pack(cls, val):
raise NotImplementedError
@classmethod
def pack_into(cls, buf, offset, val):
raise NotImplementedError
@classmethod
def read(cls, fd):
raise NotImplementedError
@classmethod
def unpack_from(cls, buf, offset):
raise NotImplementedError
@classmethod
def unpack(cls, buf):
return cls.unpack_from(buf, 0)
class TwosComplement(PrimitiveType):
def __init__(self, primitive):
self.primitive = primitive
bits = self.primitive.size * 8
self.maxval = 1 << bits
self.midval = self.maxval >> 1
self.upper = self.midval - 1
self.lower = -self.midval
@property
def size(self):
return 3
def pack(self, val):
if val < self.lower or val > self.upper:
msg = "{0} format requires {1} <= number <= {2}".format(self.primitive.format,
self.lower, self.upper)
raise struct_error(msg)
if val < 0:
val = val + self.maxval
return self.primitive.pack(val)
def pack_into(self, buf, offset, val):
if val < self.lower or val > self.upper:
msg = "{0} format requires {1} <= number <= {2}".format(self.primitive.format,
self.lower, self.upper)
raise struct_error(msg)
if val < 0:
val = val + self.maxval
return self.primitive.pack_into(buf, offset, val)
def unpack(self, data):
val = self.primitive.unpack(data)[0]
if val & self.midval:
val = val - self.maxval
return (val,)
def unpack_from(self, buf, offset):
val = self.primitive.unpack_from(buf, offset)[0]
if val & self.midval:
val = val - self.maxval
return (val,)
class HighLowCombo(PrimitiveType):
def __init__(self, format, highbits, reverse=True):
PrimitiveType.__init__(self, format)
self.highbits = highbits
self.lowmask = (1 << highbits) - 1
self.reverse = reverse
self.lower = 0
self.upper = (1 << (self.size * 8)) - 1
def pack(self, val):
if val < self.lower or val > self.upper:
msg = "{0} format requires {1} <= number <= {2}".format(self.format,
self.lower, self.upper)
raise struct_error(msg)
if self.reverse:
high = val >> self.highbits
low = val & self.lowmask
else:
high = val & self.lowmask
low = val >> self.highbits
return PrimitiveType.pack(self, high, low)
def pack_into(self, buf, offset, val):
if val < self.lower or val > self.upper:
msg = "{0} format requires {1} <= number <= {2}".format(self.format,
self.lower, self.upper)
raise struct_error(msg)
if self.reverse:
high = val >> self.highbits
low = val & self.lowmask
else:
high = val & self.lowmask
low = val >> self.highbits
return PrimitiveType.pack_into(self, buf, offset, high, low)
def unpack(self, data):
high, low = PrimitiveType.unpack(self, data)
if self.reverse:
ret = high << self.highbits
ret |= low
else:
ret = high
ret |= low << self.highbits
return (ret,)
def unpack_from(self, buf, offset):
high, low = PrimitiveType.unpack_from(self, buf, offset)
if self.reverse:
ret = high << self.highbits
ret |= low
else:
ret = high
ret |= low << self.highbits
return (ret,)
class FixedPoint(PrimitiveType):
def __init__(self, format, bits):
self.divider = float(1 << bits)
PrimitiveType.__init__(self, format)
def pack(self, val):
val *= self.divider
return PrimitiveType.pack(self, int(val))
def pack_into(self, buf, offset, val):
val *= self.divider
return PrimitiveType.pack_into(self, buf, offset, int(val))
def unpack(self, data):
val = PrimitiveType.unpack(self, data)[0]
val /= self.divider
return (val,)
def unpack_from(self, buf, offset):
val = PrimitiveType.unpack_from(self, buf, offset)[0]
val /= self.divider
return (val,)
class PaddedBytes(PrimitiveType):
def __init__(self, size, padding):
self.padded_size = size
self.padding = bytes(padding, "ascii")
@property
def size(self):
return self.padded_size
def pack(self, val):
rval = bytes(val[:self.size], "ascii")
if len(rval) < self.size:
paddinglen = self.size - len(rval)
rval += self.padding * paddinglen
return rval
def pack_into(self, buf, offset, val):
rval = bytes(val[:self.size], "ascii")
offset = pack_bytes_into(buf, offset, rval)
if len(rval) < self.size:
paddinglen = self.size - len(rval)
offset = pack_bytes_into(buf, offset, self.padding * paddinglen)
def unpack(self, data):
return (str(data.rstrip(self.padding), "ascii"),)
def unpack_from(self, buf, offset):
data = buf[offset:offset + self.padded_size]
return (str(data.rstrip(self.padding), "ascii"),)
""" 8-bit integer """
U8 = PrimitiveType("B")
S8 = PrimitiveType("b")
""" 16-bit integer """
U16BE = PrimitiveType(">H")
S16BE = PrimitiveType(">h")
U16LE = PrimitiveType("<H")
S16LE = PrimitiveType("<h")
""" 24-bit integer """
U24BE = HighLowCombo(">HB", 8, True)
S24BE = TwosComplement(U24BE)
U24LE = HighLowCombo("<HB", 16, False)
S24LE = TwosComplement(U24LE)
""" 32-bit integer """
U32BE = PrimitiveType(">I")
S32BE = PrimitiveType(">i")
U32LE = PrimitiveType("<I")
S32LE = PrimitiveType("<i")
""" 64-bit integer """
U64BE = PrimitiveType(">Q")
U64LE = PrimitiveType("<Q")
""" Fixed point numbers """
U8_8BE = FixedPoint(">H", 8)
S8_8BE = FixedPoint(">h", 8)
U16_16BE = FixedPoint("<I", 16)
S16_16BE = FixedPoint("<i", 16)
U8_8LE = FixedPoint("<H", 8)
S8_8LE = FixedPoint("<h", 8)
U16_16LE = FixedPoint("<I", 16)
S16_16LE = FixedPoint("<i", 16)
DoubleLE = PrimitiveType("<d")
DoubleBE = PrimitiveType(">d")
""" Various types """
FourCC = PaddedBytes(4, " ")
""" Script data types """
ScriptDataNumber = DoubleBE
ScriptDataBoolean = PrimitiveType("?")
class U3264(DynamicType):
@classmethod
def size(cls, val, version):
if version == 1:
return U64BE.size
else:
return U32BE.size
@classmethod
def pack(cls, val, version):
if version == 1:
return U64BE(val)
else:
return U32BE(val)
@classmethod
def pack_into(cls, buf, offset, val, version):
if version == 1:
prim = U64BE
else:
prim = U32BE
prim.pack_into(buf, offset, val)
return offset + prim.size
@classmethod
def read(cls, fd, version):
if version == 1:
return U64BE.read(fd)
else:
return U32BE.read(fd)
@classmethod
def unpack_from(cls, buf, offset, version):
if version == 1:
prim = U64BE
else:
prim = U32BE
rval = prim.unpack_from(buf, offset)
offset += prim.size
return (rval, offset)
class String(DynamicType):
@classmethod
def size(cls, *args, **kwargs):
return len(cls.pack(*args, **kwargs))
@classmethod
def pack(cls, val, encoding="utf8", errors="ignore"):
rval = val.encode(encoding, errors)
return rval
@classmethod
def pack_into(cls, buf, offset, val,
encoding="utf8", errors="ignore"):
return pack_bytes_into(buf, offset,
val.encode(encoding, errors))
class CString(String):
EndMarker = b"\x00"
@classmethod
def pack(cls, *args, **kwargs):
rval = String.pack(*args, **kwargs)
rval += CString.EndMarker
return rval
@classmethod
def pack_into(cls, buf, offset, *args, **kwargs):
offset = String.pack_into(buf, offset, *args, **kwargs)
U8.pack_into(buf, offset, 0)
return offset + 1
@classmethod
def read(cls, fd, encoding="utf8", errors="ignore"):
rval = b""
while True:
ch = fd.read(1)
if len(ch) == 0 or ch == CString.EndMarker:
break
rval += ch
return rval.decode(encoding, errors)
@classmethod
def unpack_from(cls, buf, offset, encoding="utf8", errors="ignore"):
end = buf[offset:].find(b"\x00")
rval = buf[offset:offset + end].decode(encoding, errors)
offset += end + 1
return (rval, offset)
class ScriptDataType(object):
__identifier__ = 0
class ScriptDataString(String):
__size_primitive__ = U16BE
@classmethod
def pack(cls, val, *args, **kwargs):
rval = String.pack(val, *args, **kwargs)
size = cls.__size_primitive__(len(rval))
return size + rval
@classmethod
def pack_into(cls, buf, offset, val, *args, **kwargs):
noffset = String.pack_into(buf, offset + cls.__size_primitive__.size,
val, *args, **kwargs)
cls.__size_primitive__.pack_into(buf, offset,
(noffset - offset) - cls.__size_primitive__.size)
return noffset
@classmethod
def read(cls, fd, encoding="utf8", errors="ignore"):
size = cls.__size_primitive__.read(fd)
data = fd.read(size)
return data.decode(encoding, errors)
@classmethod
def unpack_from(cls, buf, offset, encoding="utf8", errors="ignore"):
size = cls.__size_primitive__.unpack_from(buf, offset)[0]
offset += cls.__size_primitive__.size
data = buf[offset:offset + size].decode(encoding, errors)
offset += size
return (data, offset)
class ScriptDataLongString(ScriptDataString):
__size_primitive__ = U32BE
class ScriptDataObjectEnd(Exception):
pass
class ScriptDataObject(OrderedDict, ScriptDataType):
__identifier__ = SCRIPT_DATA_TYPE_OBJECT
@classmethod
def size(cls, val):
size = 3
for key, value in val.items():
size += ScriptDataString.size(key)
size += ScriptDataValue.size(value)
return size
@classmethod
def pack(cls, val):
rval = b""
for key, value in val.items():
rval += ScriptDataString(key)
rval += ScriptDataValue.pack(value)
# Zero length key + object end identifier ends object
rval += ScriptDataString("")
rval += U8(SCRIPT_DATA_TYPE_OBJECTEND)
return rval
@classmethod
def pack_into(cls, buf, offset, val):
for key, value in val.items():
offset = ScriptDataString.pack_into(buf, offset, key)
offset = ScriptDataValue.pack_into(buf, offset, value)
# Zero length key + object end identifier ends object
offset = ScriptDataString.pack_into(buf, offset, "")
U8.pack_into(buf, offset, SCRIPT_DATA_TYPE_OBJECTEND)
return offset + U8.size
@classmethod
def read(cls, fd):
rval = cls()
while True:
try:
key = ScriptDataString.read(fd)
value = ScriptDataValue.read(fd)
except ScriptDataObjectEnd:
break
if len(key) == 0:
break
rval[key] = value
return rval
@classmethod
def unpack_from(cls, buf, offset):
rval = cls()
while True:
try:
key, offset = ScriptDataString.unpack_from(buf, offset)
value, offset = ScriptDataValue.unpack_from(buf, offset)
except ScriptDataObjectEnd:
offset += 1
break
if len(key) == 0:
break
rval[key] = value
return (rval, offset)
class ScriptDataECMAArray(ScriptDataObject):
__identifier__ = SCRIPT_DATA_TYPE_ECMAARRAY
@classmethod
def size(cls, val):
return 4 + ScriptDataObject.size(val)
@classmethod
def pack(cls, val):
rval = U32BE(len(val))
rval += ScriptDataObject.pack(val)
return rval
@classmethod
def pack_into(cls, buf, offset, val):
U32BE.pack_into(buf, offset, len(val))
return ScriptDataObject.pack_into(buf, offset + U32BE.size,
val)
@classmethod
def read(cls, fd):
U32BE.read(fd) # Length
val = ScriptDataObject.read(fd)
return cls(val)
@classmethod
def unpack_from(cls, buf, offset):
U32BE.unpack_from(buf, offset) # Length
offset += U32BE.size
val, offset = ScriptDataObject.unpack_from(buf, offset)
return (cls(val), offset)
class ScriptDataStrictArray(DynamicType):
@classmethod
def size(cls, val):
size = 4
for sdval in val:
size += ScriptDataValue.size(sdval)
return size
@classmethod
def pack(cls, val):
rval = U32BE(len(val))
for sdval in val:
rval += ScriptDataValue.pack(sdval)
return rval
@classmethod
def pack_into(cls, buf, offset, val):
U32BE.pack_into(buf, offset, len(val))
offset += U32BE.size
for sdval in val:
offset = ScriptDataValue.pack_into(buf, offset, sdval)
return offset
@classmethod
def read(cls, fd):
length = U32BE.read(fd)
rval = []
for i in range(length):
val = ScriptDataValue.read(fd)
rval.append(val)
return rval
@classmethod
def unpack_from(cls, buf, offset):
length = U32BE.unpack_from(buf, offset)[0]
offset += U32BE.size
rval = []
for i in range(length):
val, offset = ScriptDataValue.unpack_from(buf, offset)
rval.append(val)
return (rval, offset)
ScriptDataDate = namedtuple("ScriptDataDate", ["timestamp", "offset"])
ScriptDataDateStruct = PrimitiveClassType(">dh", ScriptDataDate)
ScriptDataDate.__identifier__ = SCRIPT_DATA_TYPE_DATE
ScriptDataDate.__packer__ = ScriptDataDateStruct
ScriptDataReference = namedtuple("ScriptDataReference", ["reference"])
ScriptDataReferenceStruct = PrimitiveClassType(">H", ScriptDataReference)
ScriptDataReference.__identifier__ = SCRIPT_DATA_TYPE_REFERENCE
ScriptDataReference.__packer__ = ScriptDataReferenceStruct
class ScriptDataValue(DynamicType, ScriptDataType):
# key: identifier, value: unpacker class
PrimitiveReaders = {
SCRIPT_DATA_TYPE_NUMBER: ScriptDataNumber,
SCRIPT_DATA_TYPE_BOOLEAN: ScriptDataBoolean,
SCRIPT_DATA_TYPE_REFERENCE: ScriptDataReferenceStruct,
SCRIPT_DATA_TYPE_DATE: ScriptDataDateStruct,
}
DynamicReaders = {
SCRIPT_DATA_TYPE_STRING: ScriptDataString,
SCRIPT_DATA_TYPE_LONGSTRING: ScriptDataLongString,
SCRIPT_DATA_TYPE_OBJECT: ScriptDataObject,
SCRIPT_DATA_TYPE_ECMAARRAY: ScriptDataECMAArray,
SCRIPT_DATA_TYPE_STRICTARRAY: ScriptDataStrictArray,
}
Readers = PrimitiveReaders.copy()
Readers.update(DynamicReaders)
@classmethod
def size(cls, val):
size = 1
if isinstance(val, bool):
size += ScriptDataBoolean.size
elif isinstance(val, (int, float)):
size += ScriptDataNumber.size
elif isinstance(val, list):
size += ScriptDataStrictArray.size(val)
elif isinstance(val, string_types):
if len(val) > 0xFFFF:
size += ScriptDataLongString.size(val)
else:
size += ScriptDataString.size(val)
elif isinstance(val, ScriptDataType):
cls = type(val)
size += cls.size(val)
elif type(val) in (ScriptDataDate, ScriptDataReference):
cls = type(val)
packer = cls.__packer__
size += packer.size
elif isinstance(val, AMF3ObjectBase):
size += U8.size
size += AMF3Value.size(val)
return size
@classmethod
def pack(cls, val):
rval = b""
if isinstance(val, bool):
rval += U8(SCRIPT_DATA_TYPE_BOOLEAN)
rval += ScriptDataBoolean(val)
elif isinstance(val, (int, float)):
rval += U8(SCRIPT_DATA_TYPE_NUMBER)
rval += ScriptDataNumber(val)
elif isinstance(val, list):
rval += U8(SCRIPT_DATA_TYPE_STRICTARRAY)
rval += ScriptDataStrictArray(val)
elif isinstance(val, string_types):
if len(val) > 0xFFFF:
rval += U8(SCRIPT_DATA_TYPE_LONGSTRING)
rval += ScriptDataLongString(val)
else:
rval += U8(SCRIPT_DATA_TYPE_STRING)
rval += ScriptDataString(val)
elif val is None:
rval += U8(SCRIPT_DATA_TYPE_NULL)
elif isinstance(val, ScriptDataType):
cls = type(val)
rval += U8(cls.__identifier__)
rval += cls.pack(val)
elif type(val) in (ScriptDataDate, ScriptDataReference):
cls = type(val)
packer = cls.__packer__
rval += U8(cls.__identifier__)
rval += packer.pack(val)
elif isinstance(val, AMF3ObjectBase):
rval += U8(SCRIPT_DATA_TYPE_AMF3)
rval += AMF3Value.pack(val)
else:
raise ValueError("Unable to pack value of type {0}".format(type(val)))
return rval
@classmethod
def pack_into(cls, buf, offset, val):
if isinstance(val, bool):
U8.pack_into(buf, offset, SCRIPT_DATA_TYPE_BOOLEAN)
offset += U8.size
ScriptDataBoolean.pack_into(buf, offset, val)
offset += ScriptDataBoolean.size
elif isinstance(val, (int, float)):
U8.pack_into(buf, offset, SCRIPT_DATA_TYPE_NUMBER)
offset += U8.size
ScriptDataNumber.pack_into(buf, offset, val)
offset += ScriptDataNumber.size
elif isinstance(val, list):
U8.pack_into(buf, offset, SCRIPT_DATA_TYPE_STRICTARRAY)
offset += U8.size
offset = ScriptDataStrictArray.pack_into(buf, offset, val)
elif isinstance(val, string_types):
if len(val) > 0xFFFF:
U8.pack_into(buf, offset, SCRIPT_DATA_TYPE_LONGSTRING)
offset += U8.size
offset = ScriptDataLongString.pack_into(buf, offset, val)
else:
U8.pack_into(buf, offset, SCRIPT_DATA_TYPE_STRING)
offset += U8.size
offset = ScriptDataString.pack_into(buf, offset, val)
elif val is None:
U8.pack_into(buf, offset, SCRIPT_DATA_TYPE_NULL)
elif isinstance(val, ScriptDataType):
cls = type(val)
U8.pack_into(buf, offset, cls.__identifier__)
offset += U8.size
offset = cls.pack_into(buf, offset, val)
elif type(val) in (ScriptDataDate, ScriptDataReference):
cls = type(val)
packer = cls.__packer__
U8.pack_into(buf, offset, cls.__identifier__)
offset += U8.size
packer.pack_into(buf, offset, val)
offset += packer.size
else:
raise ValueError("Unable to pack value of type {0}".format(type(val)))
return offset
@classmethod
def read(cls, fd, marker=None):
if marker is None:
type_ = U8.read(fd)
else:
type_ = marker
if type_ == SCRIPT_DATA_TYPE_AMF3:
return AMF3Value.read(fd)
elif type_ in ScriptDataValue.Readers:
return ScriptDataValue.Readers[type_].read(fd)
elif type_ == SCRIPT_DATA_TYPE_OBJECTEND:
raise ScriptDataObjectEnd
elif (type_ == SCRIPT_DATA_TYPE_NULL or
type_ == SCRIPT_DATA_TYPE_UNDEFINED):
return None
else:
raise IOError("Unhandled script data type: {0}".format(type_))
@classmethod
def unpack_from(cls, buf, offset):
type_ = U8.unpack_from(buf, offset)[0]
offset += U8.size
if type_ in ScriptDataValue.DynamicReaders:
return ScriptDataValue.Readers[type_].unpack_from(buf, offset)
elif type_ in ScriptDataValue.PrimitiveReaders:
reader = ScriptDataValue.PrimitiveReaders[type_]
rval = reader.unpack_from(buf, offset)[0]
offset += reader.size
return (rval, offset)
elif type_ == SCRIPT_DATA_TYPE_OBJECTEND:
raise ScriptDataObjectEnd
elif (type_ == SCRIPT_DATA_TYPE_NULL or
type_ == SCRIPT_DATA_TYPE_UNDEFINED):
return (None, offset)
else:
raise IOError("Unhandled script data type: {0}".format(hex(type_)))
class AMF0Value(ScriptDataValue):
pass
class AMF0String(ScriptDataString):
pass
AMF0Number = ScriptDataNumber
AMF3Double = ScriptDataNumber
class AMF3Type(ScriptDataType):
pass
class AMF3Integer(DynamicType, AMF3Type):
__identifier__ = AMF3_TYPE_INTEGER
@classmethod
def size(cls, val):
val &= 0x1fffffff
if val < 0x80:
return 1
elif val < 0x4000:
return 2
elif val < 0x200000:
return 3
elif val < 0x40000000:
return 4
@classmethod
def pack(cls, val):
size = cls.size(val)
buf = bytearray(size)
offset = cls.pack_into(buf, 0, val)
return bytes(buf[:offset])
@classmethod
def pack_into(cls, buf, offset, val):
val &= 0x1fffffff
if val < 0x80:
buf[offset] = val
offset += 1
elif val < 0x4000:
buf[offset] = (val >> 7 & 0x7f) | 0x80
buf[offset+1] = val & 0x7f
offset += 2
elif val < 0x200000:
buf[offset] = (val >> 14 & 0x7f) | 0x80
buf[offset+1] = (val >> 7 & 0x7f) | 0x80
buf[offset+2] = val & 0x7f
offset += 3
elif val < 0x40000000:
buf[offset] = (val >> 22 & 0x7f) | 0x80
buf[offset+1] = (val >> 15 & 0x7f) | 0x80
buf[offset+2] = (val >> 8 & 0x7f) | 0x80
buf[offset+3] = val & 0xff
offset += 4
return offset
@classmethod
def read(cls, fd):
rval, byte_count = 0, 0
byte = U8.read(fd)
while (byte & 0x80) != 0 and byte_count < 3:
rval <<= 7
rval |= byte & 0x7f
byte = U8.read(fd)
byte_count += 1
if byte_count < 3:
rval <<= 7
rval |= byte & 0x7F
else:
rval <<= 8
rval |= byte & 0xff
if (rval & 0x10000000) != 0:
rval -= 0x20000000
return rval
class AMF3String(String):
@classmethod
def size(cls, val, cache):
data = String.pack(val, "utf8", "ignore")
size = len(data)
if size == 0:
return U8.size
elif val in cache:
index = cache.index(val)
return AMF3Integer.size(index << 1)
else:
cache.append(val)
return AMF3Integer.size(size << 1 | 1) + size
@classmethod
def pack(cls, val, cache):
data = String.pack(val, "utf8", "ignore")
size = len(data)
if size == 0:
return U8(AMF3_EMPTY_STRING)
elif val in cache:
index = cache.index(val)
return AMF3Integer(index << 1)
else:
cache.append(val)
chunks = []
chunks.append(AMF3Integer(size << 1 | 1))
chunks.append(data)
return b"".join(chunks)
@classmethod
def read(cls, fd, cache):
header = AMF3Integer.read(fd)
if (header & 1) == 0:
index = header >> 1
return cache[index]
else:
size = header >> 1
data = fd.read(size)
rval = data.decode("utf8", "ignore")
if len(data) > 0:
cache.append(rval)
return rval
class AMF3ObjectBase(object):
__dynamic__ = False
__externalizable__ = False
__members__ = []
_registry = {}
def __init__(self, *args, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
def __repr__(self):
return "<{0} {1!r}".format(self.__class__.__name__, self.__dict__)
@classmethod
def register(cls, name):
def deco(amfcls):
amfcls.__name__ = name
if not amfcls.__members__:
amfcls.__members__ = getargspec(amfcls.__init__).args[1:]
cls._registry[name] = amfcls
return amfcls
return deco
@classmethod
def lookup(cls, name):
return cls._registry.get(name, None)
@classmethod
def create(cls, name, externalizable, dynamic, members):
if is_py2:
name = name.encode("utf8")
amfcls = type(name, (cls,), {})
amfcls.__externalizable__ = externalizable
amfcls.__members__ = members
return amfcls
class AMF3Object(OrderedDict, AMF3ObjectBase):
__dynamic__ = True
class AMF3ObjectPacker(DynamicType, AMF3Type):
__identifier__ = AMF3_TYPE_OBJECT
@classmethod
def size(cls, val, str_cache, object_cache, traits_cache):
if val in object_cache:
index = object_cache.index(val)
return AMF3Integer.size(index << 1)
else:
object_cache.append(val)
size = 0
traits = type(val)
if traits in traits_cache:
index = traits_cache.index(traits)
size += AMF3Integer.size(index << 2 | 0x01)
else:
header = 0x03
if traits.__dynamic__:
header |= 0x02 << 2
if traits.__externalizable__:
header |= 0x01 << 2
header |= (len(traits.__members__)) << 4
size += AMF3Integer.size(header)
if isinstance(val, AMF3Object):
size += U8.size
else:
size += AMF3String.size(traits.__name__, cache=str_cache)
traits_cache.append(traits)
for member in traits.__members__:
size += AMF3String.size(member, cache=str_cache)
for member in traits.__members__:
value = getattr(val, member)
size += AMF3Value.size(value, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
if traits.__dynamic__:
if isinstance(val, AMF3Object):
iterator = val.items()
else:
iterator = val.__dict__.items()
for key, value in iterator:
if key in traits.__members__:
continue
size += AMF3String.size(key, cache=str_cache)
size += AMF3Value.size(value, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
size += U8.size
return size
@classmethod
def pack(cls, val, str_cache, object_cache, traits_cache):
chunks = []
if val in object_cache:
index = object_cache.index(val)
return AMF3Integer(index << 1)
else:
object_cache.append(val)
chunks = []
traits = type(val)
if traits in traits_cache:
index = traits_cache.index(traits)
chunks.append(AMF3Integer(index << 2 | 0x01))
else:
header = 0x03
if traits.__dynamic__:
header |= 0x02 << 2
if traits.__externalizable__:
header |= 0x01 << 2
header |= (len(traits.__members__)) << 4
chunks.append(AMF3Integer(header))
if isinstance(val, AMF3Object):
chunks.append(U8(AMF3_EMPTY_STRING))
else:
chunks.append(AMF3String(traits.__name__, cache=str_cache))
traits_cache.append(traits)
for member in traits.__members__:
chunks.append(AMF3String(member, cache=str_cache))
for member in traits.__members__:
value = getattr(val, member)
value = AMF3Value.pack(value, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
chunks.append(value)
if traits.__dynamic__:
if isinstance(val, AMF3Object):
iterator = val.items()
else:
iterator = val.__dict__.items()
for key, value in iterator:
if key in traits.__members__:
continue
key = AMF3String(key, cache=str_cache)
value = AMF3Value.pack(value, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
chunks.append(key)
chunks.append(value)
# Empty string is end of dynamic values
chunks.append(U8(AMF3_CLOSE_DYNAMIC_ARRAY))
return b"".join(chunks)
@classmethod
def read(cls, fd, str_cache, object_cache, traits_cache):
header = AMF3Integer.read(fd)
obj = None
if (header & 1) == 0:
index = header >> 1
obj = object_cache[index]
else:
header >>= 1
if (header & 1) == 0:
index = header >> 1
traits = traits_cache[index]
else:
externalizable = (header & 2) != 0
dynamic = (header & 4) != 0
members_len = header >> 3
class_name = AMF3String.read(fd, cache=str_cache)
members = []
for i in range(members_len):
member_name = AMF3String.read(fd, cache=str_cache)
members.append(member_name)
if len(class_name) == 0:
traits = AMF3Object
elif AMF3ObjectBase.lookup(class_name):
traits = AMF3ObjectBase.lookup(class_name)
traits.__members__ = members
traits.__dynamic__ = dynamic
traits_cache.append(traits)
else:
traits = AMF3ObjectBase.create(class_name, externalizable,
dynamic, members)
traits_cache.append(traits)
values = OrderedDict()
for member in traits.__members__:
value = AMF3Value.read(fd, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
values[member] = value
if traits.__dynamic__:
key = AMF3String.read(fd, cache=str_cache)
while len(key) > 0:
value = AMF3Value.read(fd, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
values[key] = value
key = AMF3String.read(fd, cache=str_cache)
if traits == AMF3Object:
obj = traits(values)
else:
obj = traits(**values)
return obj
class AMF3Array(OrderedDict):
def __init__(self, *args, **kwargs):
if args and isinstance(args[0], list):
OrderedDict.__init__(self, **kwargs)
for i, value in enumerate(args[0]):
self[i] = value
else:
OrderedDict.__init__(self, *args, **kwargs)
def dense_keys(self):
dense_keys = []
for i in range(len(self)):
if i in self:
dense_keys.append(i)
return dense_keys
def dense_values(self):
for key in self.dense_keys():
yield self[key]
class AMF3ArrayPacker(DynamicType, AMF3Type):
__identifier__ = AMF3_TYPE_ARRAY
@classmethod
def size(cls, val, str_cache, object_cache, traits_cache):
if val in object_cache:
index = object_cache.index(val)
return AMF3Integer.size(index << 1)
else:
object_cache.append(val)
size = 0
if isinstance(val, AMF3Array):
dense_keys = val.dense_keys()
length = len(dense_keys)
else:
length = len(val)
dense_keys = list(range(length))
header = length << 1 | 1
size += AMF3Integer.size(header)
if isinstance(val, AMF3Array):
for key, value in val.items():
if key in dense_keys:
continue
size += AMF3String.size(key, cache=str_cache)
size += AMF3Value.size(value, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
size += U8.size
for key in dense_keys:
value = val[key]
size += AMF3Value.size(value, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
return size
@classmethod
def pack(cls, val, str_cache, object_cache, traits_cache):
if val in object_cache:
index = object_cache.index(val)
return AMF3Integer(index << 1)
else:
object_cache.append(val)
chunks = []
if isinstance(val, AMF3Array):
dense_keys = val.dense_keys()
length = len(dense_keys)
else:
length = len(val)
dense_keys = list(range(length))
header = length << 1 | 1
chunks.append(AMF3Integer(header))
if isinstance(val, AMF3Array):
for key, value in val.items():
if key in dense_keys:
continue
chunks.append(AMF3String(key, cache=str_cache))
value = AMF3Value.pack(value, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
chunks.append(value)
# Empty string is end of dynamic values
chunks.append(U8(AMF3_CLOSE_DYNAMIC_ARRAY))
for key in dense_keys:
value = val[key]
value = AMF3Value.pack(value, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
chunks.append(value)
return b"".join(chunks)
@classmethod
def read(cls, fd, str_cache, object_cache, traits_cache):
header = AMF3Integer.read(fd)
obj = None
if (header & 1) == 0:
index = header >> 1
obj = object_cache[index]
else:
header >>= 1
obj = AMF3Array()
object_cache.append(obj)
key = AMF3String.read(fd, cache=str_cache)
while len(key) > 0:
value = AMF3Value.read(fd, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
obj[key] = value
key = AMF3String.read(fd, cache=str_cache)
for i in range(header):
value = AMF3Value.read(fd, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
obj[i] = value
return obj
class AMF3Date(object):
def __init__(self, time):
self.time = time
class AMF3DatePacker(DynamicType, AMF3Type):
__identifier__ = AMF3_TYPE_ARRAY
@classmethod
def size(cls, val, cache):
if val in cache:
index = cache.index(val)
return AMF3Integer.size(index << 1)
else:
cache.append(val)
return AMF3Double.size + U8.size
@classmethod
def pack(cls, val, cache):
if val in cache:
index = cache.index(val)
return AMF3Integer(index << 1)
else:
cache.append(val)
chunks = [U8(AMF3_TYPE_NULL),
AMF3Double(val.time)]
return b"".join(chunks)
@classmethod
def read(cls, fd, cache):
header = AMF3Integer.read(fd)
if (header & 1) == 0:
index = header >> 1
return cache[index]
else:
time = AMF3Double.read(fd)
date = AMF3Date(time)
cache.append(date)
return date
class AMF3Value(DynamicType):
PrimitiveReaders = {
AMF3_TYPE_DOUBLE: AMF3Double,
}
DynamicReaders = {
AMF3_TYPE_INTEGER: AMF3Integer,
}
Readers = PrimitiveReaders.copy()
Readers.update(DynamicReaders)
@classmethod
def size(cls, val, str_cache=None, object_cache=None, traits_cache=None):
if str_cache is None:
str_cache = []
if object_cache is None:
object_cache = []
if traits_cache is None:
traits_cache = []
size = U8.size
if isinstance(val, bool) and val in (False, True):
pass
elif val is None:
pass
elif isinstance(val, integer_types):
if val < AMF3_MIN_INTEGER or val > AMF3_MAX_INTEGER:
size += AMF3Double.size
else:
size += AMF3Integer.size(val)
elif isinstance(val, float):
size += AMF3Double.size
elif isinstance(val, (AMF3Array, list)):
size += AMF3ArrayPacker.size(val, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
elif isinstance(val, string_types):
size += AMF3String.size(val, cache=str_cache)
elif isinstance(val, AMF3ObjectBase):
size += AMF3ObjectPacker.size(val, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
elif isinstance(val, AMF3Date):
size += AMF3DatePacker.size(val, cache=object_cache)
else:
raise ValueError("Unable to pack value of type {0}".format(type(val)))
return size
@classmethod
def pack(cls, val, str_cache=None, object_cache=None, traits_cache=None):
if str_cache is None:
str_cache = []
if object_cache is None:
object_cache = []
if traits_cache is None:
traits_cache = []
chunks = []
if isinstance(val, bool):
if val is False:
chunks.append(U8(AMF3_TYPE_FALSE))
elif val is True:
chunks.append(U8(AMF3_TYPE_TRUE))
elif val is None:
chunks.append(U8(AMF3_TYPE_NULL))
elif isinstance(val, integer_types):
if val < AMF3_MIN_INTEGER or val > AMF3_MAX_INTEGER:
chunks.append(U8(AMF3_TYPE_DOUBLE))
chunks.append(AMF3Double(val))
else:
chunks.append(U8(AMF3_TYPE_INTEGER))
chunks.append(AMF3Integer(val))
elif isinstance(val, float):
chunks.append(U8(AMF3_TYPE_DOUBLE))
chunks.append(AMF3Double(val))
elif isinstance(val, (AMF3Array, list)):
chunks.append(U8(AMF3_TYPE_ARRAY))
chunks.append(AMF3ArrayPacker.pack(val, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache))
elif isinstance(val, string_types):
chunks.append(U8(AMF3_TYPE_STRING))
chunks.append(AMF3String.pack(val, cache=str_cache))
elif isinstance(val, AMF3ObjectBase):
chunks.append(U8(AMF3_TYPE_OBJECT))
chunks.append(AMF3ObjectPacker.pack(val, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache))
elif isinstance(val, AMF3Date):
chunks.append(U8(AMF3_TYPE_DATE))
chunks.append(AMF3DatePacker.pack(val, cache=object_cache))
else:
raise ValueError("Unable to pack value of type {0}".format(type(val)))
return b"".join(chunks)
@classmethod
def read(cls, fd, str_cache=None, object_cache=None, traits_cache=None):
type_ = U8.read(fd)
if str_cache is None:
str_cache = []
if object_cache is None:
object_cache = []
if traits_cache is None:
traits_cache = []
if type_ == AMF3_TYPE_UNDEFINED or type_ == AMF3_TYPE_NULL:
return None
elif type_ == AMF3_TYPE_FALSE:
return False
elif type_ == AMF3_TYPE_TRUE:
return True
elif type_ == AMF3_TYPE_STRING:
return AMF3String.read(fd, cache=str_cache)
elif type_ == AMF3_TYPE_ARRAY:
return AMF3ArrayPacker.read(fd, str_cache=str_cache,
object_cache=object_cache,
traits_cache=traits_cache)
elif type_ == AMF3_TYPE_OBJECT:
return AMF3ObjectPacker.read(fd, str_cache=str_cache, object_cache=object_cache,
traits_cache=traits_cache)
elif type_ == AMF3_TYPE_DATE:
return AMF3DatePacker.read(fd, cache=object_cache)
elif type_ in cls.Readers:
return cls.Readers[type_].read(fd)
else:
raise IOError("Unhandled AMF3 type: {0}".format(hex(type_)))
| gpl-2.0 |
40223240/cadb_g3_0420 | static/Brython3.1.1-20150328-091302/Lib/unittest/test/test_break.py | 785 | 8138 | import gc
import io
import os
import sys
import signal
import weakref
import unittest
@unittest.skipUnless(hasattr(os, 'kill'), "Test requires os.kill")
@unittest.skipIf(sys.platform =="win32", "Test cannot run on Windows")
@unittest.skipIf(sys.platform == 'freebsd6', "Test kills regrtest on freebsd6 "
"if threads have been used")
class TestBreak(unittest.TestCase):
def setUp(self):
self._default_handler = signal.getsignal(signal.SIGINT)
def tearDown(self):
signal.signal(signal.SIGINT, self._default_handler)
unittest.signals._results = weakref.WeakKeyDictionary()
unittest.signals._interrupt_handler = None
def testInstallHandler(self):
default_handler = signal.getsignal(signal.SIGINT)
unittest.installHandler()
self.assertNotEqual(signal.getsignal(signal.SIGINT), default_handler)
try:
pid = os.getpid()
os.kill(pid, signal.SIGINT)
except KeyboardInterrupt:
self.fail("KeyboardInterrupt not handled")
self.assertTrue(unittest.signals._interrupt_handler.called)
def testRegisterResult(self):
result = unittest.TestResult()
unittest.registerResult(result)
for ref in unittest.signals._results:
if ref is result:
break
elif ref is not result:
self.fail("odd object in result set")
else:
self.fail("result not found")
def testInterruptCaught(self):
default_handler = signal.getsignal(signal.SIGINT)
result = unittest.TestResult()
unittest.installHandler()
unittest.registerResult(result)
self.assertNotEqual(signal.getsignal(signal.SIGINT), default_handler)
def test(result):
pid = os.getpid()
os.kill(pid, signal.SIGINT)
result.breakCaught = True
self.assertTrue(result.shouldStop)
try:
test(result)
except KeyboardInterrupt:
self.fail("KeyboardInterrupt not handled")
self.assertTrue(result.breakCaught)
def testSecondInterrupt(self):
result = unittest.TestResult()
unittest.installHandler()
unittest.registerResult(result)
def test(result):
pid = os.getpid()
os.kill(pid, signal.SIGINT)
result.breakCaught = True
self.assertTrue(result.shouldStop)
os.kill(pid, signal.SIGINT)
self.fail("Second KeyboardInterrupt not raised")
try:
test(result)
except KeyboardInterrupt:
pass
else:
self.fail("Second KeyboardInterrupt not raised")
self.assertTrue(result.breakCaught)
def testTwoResults(self):
unittest.installHandler()
result = unittest.TestResult()
unittest.registerResult(result)
new_handler = signal.getsignal(signal.SIGINT)
result2 = unittest.TestResult()
unittest.registerResult(result2)
self.assertEqual(signal.getsignal(signal.SIGINT), new_handler)
result3 = unittest.TestResult()
def test(result):
pid = os.getpid()
os.kill(pid, signal.SIGINT)
try:
test(result)
except KeyboardInterrupt:
self.fail("KeyboardInterrupt not handled")
self.assertTrue(result.shouldStop)
self.assertTrue(result2.shouldStop)
self.assertFalse(result3.shouldStop)
def testHandlerReplacedButCalled(self):
# If our handler has been replaced (is no longer installed) but is
# called by the *new* handler, then it isn't safe to delay the
# SIGINT and we should immediately delegate to the default handler
unittest.installHandler()
handler = signal.getsignal(signal.SIGINT)
def new_handler(frame, signum):
handler(frame, signum)
signal.signal(signal.SIGINT, new_handler)
try:
pid = os.getpid()
os.kill(pid, signal.SIGINT)
except KeyboardInterrupt:
pass
else:
self.fail("replaced but delegated handler doesn't raise interrupt")
def testRunner(self):
# Creating a TextTestRunner with the appropriate argument should
# register the TextTestResult it creates
runner = unittest.TextTestRunner(stream=io.StringIO())
result = runner.run(unittest.TestSuite())
self.assertIn(result, unittest.signals._results)
def testWeakReferences(self):
# Calling registerResult on a result should not keep it alive
result = unittest.TestResult()
unittest.registerResult(result)
ref = weakref.ref(result)
del result
# For non-reference counting implementations
gc.collect();gc.collect()
self.assertIsNone(ref())
def testRemoveResult(self):
result = unittest.TestResult()
unittest.registerResult(result)
unittest.installHandler()
self.assertTrue(unittest.removeResult(result))
# Should this raise an error instead?
self.assertFalse(unittest.removeResult(unittest.TestResult()))
try:
pid = os.getpid()
os.kill(pid, signal.SIGINT)
except KeyboardInterrupt:
pass
self.assertFalse(result.shouldStop)
def testMainInstallsHandler(self):
failfast = object()
test = object()
verbosity = object()
result = object()
default_handler = signal.getsignal(signal.SIGINT)
class FakeRunner(object):
initArgs = []
runArgs = []
def __init__(self, *args, **kwargs):
self.initArgs.append((args, kwargs))
def run(self, test):
self.runArgs.append(test)
return result
class Program(unittest.TestProgram):
def __init__(self, catchbreak):
self.exit = False
self.verbosity = verbosity
self.failfast = failfast
self.catchbreak = catchbreak
self.testRunner = FakeRunner
self.test = test
self.result = None
p = Program(False)
p.runTests()
self.assertEqual(FakeRunner.initArgs, [((), {'buffer': None,
'verbosity': verbosity,
'failfast': failfast,
'warnings': None})])
self.assertEqual(FakeRunner.runArgs, [test])
self.assertEqual(p.result, result)
self.assertEqual(signal.getsignal(signal.SIGINT), default_handler)
FakeRunner.initArgs = []
FakeRunner.runArgs = []
p = Program(True)
p.runTests()
self.assertEqual(FakeRunner.initArgs, [((), {'buffer': None,
'verbosity': verbosity,
'failfast': failfast,
'warnings': None})])
self.assertEqual(FakeRunner.runArgs, [test])
self.assertEqual(p.result, result)
self.assertNotEqual(signal.getsignal(signal.SIGINT), default_handler)
def testRemoveHandler(self):
default_handler = signal.getsignal(signal.SIGINT)
unittest.installHandler()
unittest.removeHandler()
self.assertEqual(signal.getsignal(signal.SIGINT), default_handler)
# check that calling removeHandler multiple times has no ill-effect
unittest.removeHandler()
self.assertEqual(signal.getsignal(signal.SIGINT), default_handler)
def testRemoveHandlerAsDecorator(self):
default_handler = signal.getsignal(signal.SIGINT)
unittest.installHandler()
@unittest.removeHandler
def test():
self.assertEqual(signal.getsignal(signal.SIGINT), default_handler)
test()
self.assertNotEqual(signal.getsignal(signal.SIGINT), default_handler)
| gpl-3.0 |
halberom/ansible | lib/ansible/plugins/shell/fish.py | 45 | 4770 | # (c) 2014, Chris Church <[email protected]>
#
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.shell.sh import ShellModule as ShModule
from ansible.compat.six import text_type
from ansible.compat.six.moves import shlex_quote
class ShellModule(ShModule):
# Common shell filenames that this plugin handles
COMPATIBLE_SHELLS = frozenset(('fish',))
# Family of shells this has. Must match the filename without extension
SHELL_FAMILY = 'fish'
_SHELL_EMBEDDED_PY_EOL = '\n'
_SHELL_REDIRECT_ALLNULL = '> /dev/null 2>&1'
_SHELL_AND = '; and'
_SHELL_OR = '; or'
_SHELL_SUB_LEFT = '('
_SHELL_SUB_RIGHT = ')'
_SHELL_GROUP_LEFT = ''
_SHELL_GROUP_RIGHT = ''
def env_prefix(self, **kwargs):
env = self.env.copy()
env.update(kwargs)
return ' '.join(['set -lx %s %s;' % (k, shlex_quote(text_type(v))) for k,v in env.items()])
def build_module_command(self, env_string, shebang, cmd, arg_path=None, rm_tmp=None):
# don't quote the cmd if it's an empty string, because this will break pipelining mode
if cmd.strip() != '':
cmd = shlex_quote(cmd)
cmd_parts = [env_string.strip(), shebang.replace("#!", "").strip(), cmd]
if arg_path is not None:
cmd_parts.append(arg_path)
new_cmd = " ".join(cmd_parts)
if rm_tmp:
new_cmd = 'begin ; %s; rm -rf "%s" %s ; end' % (new_cmd, rm_tmp, self._SHELL_REDIRECT_ALLNULL)
return new_cmd
def checksum(self, path, python_interp):
# The following test is fish-compliant.
#
# In the following test, each condition is a check and logical
# comparison (or or and) that sets the rc value. Every check is run so
# the last check in the series to fail will be the rc that is
# returned.
#
# If a check fails we error before invoking the hash functions because
# hash functions may successfully take the hash of a directory on BSDs
# (UFS filesystem?) which is not what the rest of the ansible code
# expects
#
# If all of the available hashing methods fail we fail with an rc of
# 0. This logic is added to the end of the cmd at the bottom of this
# function.
# Return codes:
# checksum: success!
# 0: Unknown error
# 1: Remote file does not exist
# 2: No read permissions on the file
# 3: File is a directory
# 4: No python interpreter
# Quoting gets complex here. We're writing a python string that's
# used by a variety of shells on the remote host to invoke a python
# "one-liner".
shell_escaped_path = shlex_quote(path)
test = "set rc flag; [ -r %(p)s ] %(shell_or)s set rc 2; [ -f %(p)s ] %(shell_or)s set rc 1; [ -d %(p)s ] %(shell_and)s set rc 3; %(i)s -V 2>/dev/null %(shell_or)s set rc 4; [ x\"$rc\" != \"xflag\" ] %(shell_and)s echo \"$rc \"%(p)s %(shell_and)s exit 0" % dict(p=shell_escaped_path, i=python_interp, shell_and=self._SHELL_AND, shell_or=self._SHELL_OR)
csums = [
u"({0} -c 'import hashlib; BLOCKSIZE = 65536; hasher = hashlib.sha1();{2}afile = open(\"'{1}'\", \"rb\"){2}buf = afile.read(BLOCKSIZE){2}while len(buf) > 0:{2}\thasher.update(buf){2}\tbuf = afile.read(BLOCKSIZE){2}afile.close(){2}print(hasher.hexdigest())' 2>/dev/null)".format(python_interp, shell_escaped_path, self._SHELL_EMBEDDED_PY_EOL), # Python > 2.4 (including python3)
u"({0} -c 'import sha; BLOCKSIZE = 65536; hasher = sha.sha();{2}afile = open(\"'{1}'\", \"rb\"){2}buf = afile.read(BLOCKSIZE){2}while len(buf) > 0:{2}\thasher.update(buf){2}\tbuf = afile.read(BLOCKSIZE){2}afile.close(){2}print(hasher.hexdigest())' 2>/dev/null)".format(python_interp, shell_escaped_path, self._SHELL_EMBEDDED_PY_EOL), # Python == 2.4
]
cmd = (" %s " % self._SHELL_OR).join(csums)
cmd = "%s; %s %s (echo \'0 \'%s)" % (test, cmd, self._SHELL_OR, shell_escaped_path)
return cmd
| gpl-3.0 |
rbunge/CI-ArduPlane | Tools/autotest/dump_logs.py | 229 | 1762 | #!/usr/bin/env python
# dump flash logs from SITL
# Andrew Tridgell, April 2013
import pexpect, os, sys, shutil, atexit
import optparse, fnmatch, time, glob, traceback, signal
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), 'pysim'))
import util
############## main program #############
parser = optparse.OptionParser(sys.argv[0])
parser.add_option("--cli", action='store_true', default=False, help='put us in the CLI menu in logs')
opts, args = parser.parse_args()
os.environ['PYTHONUNBUFFERED'] = '1'
def dump_logs(atype):
'''dump DataFlash logs'''
logfile = '%s.log' % atype
print("Dumping logs for %s to %s" % (atype, logfile))
sil = util.start_SIL(atype)
log = open(logfile, mode='w')
mavproxy = util.start_MAVProxy_SIL(atype, setup=True, logfile=log)
mavproxy.send('\n\n\n')
print("navigating menus")
mavproxy.expect(']')
mavproxy.send("logs\n")
if opts.cli:
mavproxy.interact()
return
mavproxy.expect("logs enabled:")
lognums = []
i = mavproxy.expect(["No logs", "(\d+) logs"])
if i == 0:
numlogs = 0
else:
numlogs = int(mavproxy.match.group(1))
for i in range(numlogs):
mavproxy.expect("Log (\d+)")
lognums.append(int(mavproxy.match.group(1)))
mavproxy.expect("Log]")
for i in range(numlogs):
print("Dumping log %u (i=%u)" % (lognums[i], i))
mavproxy.send("dump %u\n" % lognums[i])
mavproxy.expect("logs enabled:", timeout=120)
mavproxy.expect("Log]")
util.pexpect_close(mavproxy)
util.pexpect_close(sil)
log.close()
print("Saved log for %s to %s" % (atype, logfile))
return True
vehicle = os.path.basename(os.getcwd())
dump_logs(vehicle)
| gpl-3.0 |
MERegistro/meregistro | meregistro/apps/titulos/models/CohorteExtensionAulicaSeguimiento.py | 1 | 1280 | # -*- coding: utf-8 -*-
from django.db import models
from apps.titulos.models.CohorteExtensionAulica import CohorteExtensionAulica
import datetime
"Seguimiento de cada cohorte de la extensión áulica"
class CohorteExtensionAulicaSeguimiento(models.Model):
cohorte_extension_aulica = models.ForeignKey(CohorteExtensionAulica, related_name='seguimiento')
anio = models.PositiveIntegerField()
solo_cursan_nuevas_unidades = models.PositiveIntegerField()
solo_recursan_nuevas_unidades = models.PositiveIntegerField()
recursan_cursan_nuevas_unidades = models.PositiveIntegerField()
no_cursan = models.PositiveIntegerField()
egresados = models.PositiveIntegerField()
observaciones = models.CharField(max_length=255, null=True, blank=True)
class Meta:
app_label = 'titulos'
ordering = ['cohorte_extension_aulica__cohorte__anio', 'anio']
db_table = 'titulos_cohorte_extension_aulica_seguimiento'
unique_together = ('cohorte_extension_aulica', 'anio') # -> no funciona, valido a mano
def __unicode__(self):
return str(self.anio)
"Sobreescribo el init para agregarle propiedades"
def __init__(self, *args, **kwargs):
super(CohorteExtensionAulicaSeguimiento, self).__init__(*args, **kwargs)
| bsd-3-clause |
Manexware/medical | oemedical/oemedical_invoice/wizard/wizard_appointment_invoice.py | 1 | 5003 |
import logging
from openerp.osv import osv,fields
from openerp import _
#import pooler
logging.basicConfig(level=logging.DEBUG)
class make_medical_appointment_invoice(osv.osv_memory):
_name="oemedical.appointment.invoice"
def create_invoice(self, cr, uid, ids, context={}):
invoice_obj = self.pool.get('account.invoice')
appointment_obj = self.pool.get('oemedical.appointment')
apps = context.get ('active_ids')
pats = []
for app_id in apps:
pats.append(appointment_obj.browse(cr, uid, app_id).patient_id.id)
if pats.count(pats[0]) == len(pats):
invoice_data={}
for app_id in apps:
appointment = appointment_obj.browse( cr, uid, app_id)
# Check if the appointment is invoice exempt, and stop the invoicing process
if appointment.no_invoice :
raise osv.except_osv(_('UserError'), _('The appointment is invoice exempt'))
if appointment.validity_status=='invoiced':
if len(apps) > 1:
raise osv.except_osv(_('UserError'),_('At least one of the selected appointments is already invoiced'))
else:
raise osv.except_osv(_('UserError'),_('Appointment already invoiced'))
if appointment.validity_status=='no':
if len(apps) > 1:
raise osv.except_osv(_('UserError'),_('At least one of the selected appointments can not be invoiced'))
else:
raise osv.except_osv(_('UserError'),_('You can not invoice this appointment'))
if appointment.patient_id.id:
invoice_data['partner_id'] = appointment.patient_id.id
res = self.pool.get('res.partner').address_get(cr, uid, [appointment.patient_id.id], ['contact', 'invoice'])
invoice_data['address_contact_id'] = res['contact']
invoice_data['address_invoice_id'] = res['invoice']
invoice_data['account_id'] = appointment.patient_id.property_account_receivable.id
invoice_data['fiscal_position'] = appointment.patient_id.property_account_position and appointment.patient_id.property_account_position.id or False
invoice_data['payment_term'] = appointment.patient_id.property_payment_term and appointment.patient_id.property_payment_term.id or False
prods_data = {}
for app_id in apps:
appointment = appointment_obj.browse( cr, uid, app_id)
logging.debug('appointment = %s; appointment.consultations = %s', appointment, appointment.consultations)
if appointment.consultations:
logging.debug('appointment.consultations = %s; appointment.consultations.id = %s', appointment.consultations, appointment.consultations.id)
if prods_data.has_key(appointment.consultations.id):
prods_data[appointment.consultations.id]['quantity'] += 1
else:
a = appointment.consultations.product_tmpl_id.property_account_income.id
if not a:
a = appointment.consultations.categ_id.property_account_income_categ.id
prods_data[appointment.consultations.id] = {'product_id':appointment.consultations.id,
'name':appointment.consultations.name,
'quantity':1,
'account_id':a,
'price_unit':appointment.consultations.lst_price}
else:
raise osv.except_osv(_('UserError'),_('No consultation service is connected with the selected appointments'))
product_lines = []
for prod_id, prod_data in prods_data.items():
product_lines.append((0,0,{'product_id':prod_data['product_id'],
'name':prod_data['name'],
'quantity':prod_data['quantity'],
'account_id':prod_data['account_id'],
'price_unit':prod_data['price_unit']}))
invoice_data['invoice_line'] = product_lines
invoice_id = invoice_obj.create(cr, uid, invoice_data)
appointment_obj.write(cr, uid, apps, {'validity_status':'invoiced'})
return {
'domain': "[('id','=', "+str(invoice_id)+")]",
'name': 'Create invoice',
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'account.invoice',
'type': 'ir.actions.act_window'
}
else:
raise osv.except_osv(_('UserError'),_('When multiple appointments are selected, patient must be the same'))
make_medical_appointment_invoice()
| gpl-2.0 |
jadbin/xpaw | tests/test_commands.py | 1 | 1655 | # coding=utf-8
import pytest
from os.path import join
from xpaw.cmdline import main
from xpaw import __version__
def test_print_help(capsys):
with pytest.raises(SystemExit) as excinfo:
main(argv=['xpaw'])
assert excinfo.value.code == 0
out, _ = capsys.readouterr()
assert out.startswith('usage:')
def test_unknown_command(capsys):
with pytest.raises(SystemExit) as excinfo:
main(argv=['xpaw', 'unknown_command'])
assert excinfo.value.code == 2
_, _ = capsys.readouterr()
def test_version(capsys):
main(argv=['xpaw', 'version'])
out, _ = capsys.readouterr()
assert out.strip() == 'xpaw version {}'.format(__version__)
spider_source = """# coding=utf-8
from xpaw import Spider
class NewSpider(Spider):
def start_requests(self):
pass
def parse(self, response):
pass
"""
def test_crawl_spider(tmpdir, capsys):
spider_file = join(str(tmpdir), 'spider.py')
with open(spider_file, 'w') as f:
f.write(spider_source)
main(argv=['xpaw', 'crawl', spider_file])
_, _ = capsys.readouterr()
def test_crawl_spider_no_config_file(tmpdir, capsys):
with pytest.raises(ValueError):
spider_file = join(str(tmpdir), 'spider.py')
with open(spider_file, 'w') as f:
f.write(spider_source)
main(argv=['xpaw', 'crawl', spider_file,
'-c', 'no_such_config.py'])
_, _ = capsys.readouterr()
def test_crawl_no_spider_file(capsys):
with pytest.raises(SystemExit) as excinfo:
main(argv=['xpaw', 'crawl', 'dont_exist.py'])
assert excinfo.value.code == 2
_, _ = capsys.readouterr()
| apache-2.0 |
pschella/scipy | scipy/special/tests/test_spherical_bessel.py | 44 | 13962 | #
# Tests of spherical Bessel functions.
#
import numpy as np
from numpy.testing import (assert_almost_equal, assert_allclose, dec,
assert_array_almost_equal)
from numpy import sin, cos, sinh, cosh, exp, inf, nan, r_, pi
from scipy.special import spherical_jn, spherical_yn, spherical_in, spherical_kn
from scipy.integrate import quad
class TestSphericalJn:
def test_spherical_jn_exact(self):
# http://dlmf.nist.gov/10.49.E3
# Note: exact expression is numerically stable only for small
# n or z >> n.
x = np.array([0.12, 1.23, 12.34, 123.45, 1234.5])
assert_allclose(spherical_jn(2, x),
(-1/x + 3/x**3)*sin(x) - 3/x**2*cos(x))
def test_spherical_jn_recurrence_complex(self):
# http://dlmf.nist.gov/10.51.E1
n = np.array([1, 2, 3, 7, 12])
x = 1.1 + 1.5j
assert_allclose(spherical_jn(n - 1, x) + spherical_jn(n + 1, x),
(2*n + 1)/x*spherical_jn(n, x))
def test_spherical_jn_recurrence_real(self):
# http://dlmf.nist.gov/10.51.E1
n = np.array([1, 2, 3, 7, 12])
x = 0.12
assert_allclose(spherical_jn(n - 1, x) + spherical_jn(n + 1,x),
(2*n + 1)/x*spherical_jn(n, x))
def test_spherical_jn_inf_real(self):
# http://dlmf.nist.gov/10.52.E3
n = 6
x = np.array([-inf, inf])
assert_allclose(spherical_jn(n, x), np.array([0, 0]))
def test_spherical_jn_inf_complex(self):
# http://dlmf.nist.gov/10.52.E3
n = 7
x = np.array([-inf + 0j, inf + 0j, inf*(1+1j)])
assert_allclose(spherical_jn(n, x), np.array([0, 0, inf*(1+1j)]))
def test_spherical_jn_large_arg_1(self):
# https://github.com/scipy/scipy/issues/2165
# Reference value computed using mpmath, via
# besselj(n + mpf(1)/2, z)*sqrt(pi/(2*z))
assert_allclose(spherical_jn(2, 3350.507), -0.00029846226538040747)
def test_spherical_jn_large_arg_2(self):
# https://github.com/scipy/scipy/issues/1641
# Reference value computed using mpmath, via
# besselj(n + mpf(1)/2, z)*sqrt(pi/(2*z))
assert_allclose(spherical_jn(2, 10000), 3.0590002633029811e-05)
def test_spherical_jn_at_zero(self):
# http://dlmf.nist.gov/10.52.E1
# But note that n = 0 is a special case: j0 = sin(x)/x -> 1
n = np.array([0, 1, 2, 5, 10, 100])
x = 0
assert_allclose(spherical_jn(n, x), np.array([1, 0, 0, 0, 0, 0]))
class TestSphericalYn:
def test_spherical_yn_exact(self):
# http://dlmf.nist.gov/10.49.E5
# Note: exact expression is numerically stable only for small
# n or z >> n.
x = np.array([0.12, 1.23, 12.34, 123.45, 1234.5])
assert_allclose(spherical_yn(2, x),
(1/x - 3/x**3)*cos(x) - 3/x**2*sin(x))
def test_spherical_yn_recurrence_real(self):
# http://dlmf.nist.gov/10.51.E1
n = np.array([1, 2, 3, 7, 12])
x = 0.12
assert_allclose(spherical_yn(n - 1, x) + spherical_yn(n + 1,x),
(2*n + 1)/x*spherical_yn(n, x))
def test_spherical_yn_recurrence_complex(self):
# http://dlmf.nist.gov/10.51.E1
n = np.array([1, 2, 3, 7, 12])
x = 1.1 + 1.5j
assert_allclose(spherical_yn(n - 1, x) + spherical_yn(n + 1, x),
(2*n + 1)/x*spherical_yn(n, x))
def test_spherical_yn_inf_real(self):
# http://dlmf.nist.gov/10.52.E3
n = 6
x = np.array([-inf, inf])
assert_allclose(spherical_yn(n, x), np.array([0, 0]))
def test_spherical_yn_inf_complex(self):
# http://dlmf.nist.gov/10.52.E3
n = 7
x = np.array([-inf + 0j, inf + 0j, inf*(1+1j)])
assert_allclose(spherical_yn(n, x), np.array([0, 0, inf*(1+1j)]))
def test_spherical_yn_at_zero(self):
# http://dlmf.nist.gov/10.52.E2
n = np.array([0, 1, 2, 5, 10, 100])
x = 0
assert_allclose(spherical_yn(n, x), -inf*np.ones(shape=n.shape))
def test_spherical_yn_at_zero_complex(self):
# Consistently with numpy:
# >>> -np.cos(0)/0
# -inf
# >>> -np.cos(0+0j)/(0+0j)
# (-inf + nan*j)
n = np.array([0, 1, 2, 5, 10, 100])
x = 0 + 0j
assert_allclose(spherical_yn(n, x), nan*np.ones(shape=n.shape))
class TestSphericalJnYnCrossProduct:
def test_spherical_jn_yn_cross_product_1(self):
# http://dlmf.nist.gov/10.50.E3
n = np.array([1, 5, 8])
x = np.array([0.1, 1, 10])
left = (spherical_jn(n + 1, x) * spherical_yn(n, x) -
spherical_jn(n, x) * spherical_yn(n + 1, x))
right = 1/x**2
assert_allclose(left, right)
def test_spherical_jn_yn_cross_product_2(self):
# http://dlmf.nist.gov/10.50.E3
n = np.array([1, 5, 8])
x = np.array([0.1, 1, 10])
left = (spherical_jn(n + 2, x) * spherical_yn(n, x) -
spherical_jn(n, x) * spherical_yn(n + 2, x))
right = (2*n + 3)/x**3
assert_allclose(left, right)
class TestSphericalIn:
def test_spherical_in_exact(self):
# http://dlmf.nist.gov/10.49.E9
x = np.array([0.12, 1.23, 12.34, 123.45])
assert_allclose(spherical_in(2, x),
(1/x + 3/x**3)*sinh(x) - 3/x**2*cosh(x))
def test_spherical_in_recurrence_real(self):
# http://dlmf.nist.gov/10.51.E4
n = np.array([1, 2, 3, 7, 12])
x = 0.12
assert_allclose(spherical_in(n - 1, x) - spherical_in(n + 1,x),
(2*n + 1)/x*spherical_in(n, x))
def test_spherical_in_recurrence_complex(self):
# http://dlmf.nist.gov/10.51.E1
n = np.array([1, 2, 3, 7, 12])
x = 1.1 + 1.5j
assert_allclose(spherical_in(n - 1, x) - spherical_in(n + 1,x),
(2*n + 1)/x*spherical_in(n, x))
def test_spherical_in_inf_real(self):
# http://dlmf.nist.gov/10.52.E3
n = 5
x = np.array([-inf, inf])
assert_allclose(spherical_in(n, x), np.array([-inf, inf]))
def test_spherical_in_inf_complex(self):
# http://dlmf.nist.gov/10.52.E5
# Ideally, i1n(n, 1j*inf) = 0 and i1n(n, (1+1j)*inf) = (1+1j)*inf, but
# this appears impossible to achieve because C99 regards any complex
# value with at least one infinite part as a complex infinity, so
# 1j*inf cannot be distinguished from (1+1j)*inf. Therefore, nan is
# the correct return value.
n = 7
x = np.array([-inf + 0j, inf + 0j, inf*(1+1j)])
assert_allclose(spherical_in(n, x), np.array([-inf, inf, nan]))
def test_spherical_in_at_zero(self):
# http://dlmf.nist.gov/10.52.E1
# But note that n = 0 is a special case: i0 = sinh(x)/x -> 1
n = np.array([0, 1, 2, 5, 10, 100])
x = 0
assert_allclose(spherical_in(n, x), np.array([1, 0, 0, 0, 0, 0]))
class TestSphericalKn:
def test_spherical_kn_exact(self):
# http://dlmf.nist.gov/10.49.E13
x = np.array([0.12, 1.23, 12.34, 123.45])
assert_allclose(spherical_kn(2, x),
pi/2*exp(-x)*(1/x + 3/x**2 + 3/x**3))
def test_spherical_kn_recurrence_real(self):
# http://dlmf.nist.gov/10.51.E4
n = np.array([1, 2, 3, 7, 12])
x = 0.12
assert_allclose((-1)**(n - 1)*spherical_kn(n - 1, x) - (-1)**(n + 1)*spherical_kn(n + 1,x),
(-1)**n*(2*n + 1)/x*spherical_kn(n, x))
def test_spherical_kn_recurrence_complex(self):
# http://dlmf.nist.gov/10.51.E4
n = np.array([1, 2, 3, 7, 12])
x = 1.1 + 1.5j
assert_allclose((-1)**(n - 1)*spherical_kn(n - 1, x) - (-1)**(n + 1)*spherical_kn(n + 1,x),
(-1)**n*(2*n + 1)/x*spherical_kn(n, x))
def test_spherical_kn_inf_real(self):
# http://dlmf.nist.gov/10.52.E6
n = 5
x = np.array([-inf, inf])
assert_allclose(spherical_kn(n, x), np.array([-inf, 0]))
def test_spherical_kn_inf_complex(self):
# http://dlmf.nist.gov/10.52.E6
# The behavior at complex infinity depends on the sign of the real
# part: if Re(z) >= 0, then the limit is 0; if Re(z) < 0, then it's
# z*inf. This distinction cannot be captured, so we return nan.
n = 7
x = np.array([-inf + 0j, inf + 0j, inf*(1+1j)])
assert_allclose(spherical_kn(n, x), np.array([-inf, 0, nan]))
def test_spherical_kn_at_zero(self):
# http://dlmf.nist.gov/10.52.E2
n = np.array([0, 1, 2, 5, 10, 100])
x = 0
assert_allclose(spherical_kn(n, x), inf*np.ones(shape=n.shape))
def test_spherical_kn_at_zero_complex(self):
# http://dlmf.nist.gov/10.52.E2
n = np.array([0, 1, 2, 5, 10, 100])
x = 0 + 0j
assert_allclose(spherical_kn(n, x), nan*np.ones(shape=n.shape))
class SphericalDerivativesTestCase:
def fundamental_theorem(self, n, a, b):
integral, tolerance = quad(lambda z: self.df(n, z), a, b)
assert_allclose(integral,
self.f(n, b) - self.f(n, a),
atol=tolerance)
@dec.slow
def test_fundamental_theorem_0(self):
self.fundamental_theorem(0, 3.0, 15.0)
@dec.slow
def test_fundamental_theorem_7(self):
self.fundamental_theorem(7, 0.5, 1.2)
class TestSphericalJnDerivatives(SphericalDerivativesTestCase):
def f(self, n, z):
return spherical_jn(n, z)
def df(self, n, z):
return spherical_jn(n, z, derivative=True)
def test_spherical_jn_d_zero(self):
n = np.array([1, 2, 3, 7, 15])
assert_allclose(spherical_jn(n, 0, derivative=True),
np.zeros(5))
class TestSphericalYnDerivatives(SphericalDerivativesTestCase):
def f(self, n, z):
return spherical_yn(n, z)
def df(self, n, z):
return spherical_yn(n, z, derivative=True)
class TestSphericalInDerivatives(SphericalDerivativesTestCase):
def f(self, n, z):
return spherical_in(n, z)
def df(self, n, z):
return spherical_in(n, z, derivative=True)
def test_spherical_in_d_zero(self):
n = np.array([1, 2, 3, 7, 15])
assert_allclose(spherical_in(n, 0, derivative=True),
np.zeros(5))
class TestSphericalKnDerivatives(SphericalDerivativesTestCase):
def f(self, n, z):
return spherical_kn(n, z)
def df(self, n, z):
return spherical_kn(n, z, derivative=True)
class TestSphericalOld:
# These are tests from the TestSpherical class of test_basic.py,
# rewritten to use spherical_* instead of sph_* but otherwise unchanged.
def test_sph_in(self):
# This test reproduces test_basic.TestSpherical.test_sph_in.
i1n = np.empty((2,2))
x = 0.2
i1n[0][0] = spherical_in(0, x)
i1n[0][1] = spherical_in(1, x)
i1n[1][0] = spherical_in(0, x, derivative=True)
i1n[1][1] = spherical_in(1, x, derivative=True)
inp0 = (i1n[0][1])
inp1 = (i1n[0][0] - 2.0/0.2 * i1n[0][1])
assert_array_almost_equal(i1n[0],np.array([1.0066800127054699381,
0.066933714568029540839]),12)
assert_array_almost_equal(i1n[1],[inp0,inp1],12)
def test_sph_in_kn_order0(self):
x = 1.
sph_i0 = np.empty((2,))
sph_i0[0] = spherical_in(0, x)
sph_i0[1] = spherical_in(0, x, derivative=True)
sph_i0_expected = np.array([np.sinh(x)/x,
np.cosh(x)/x-np.sinh(x)/x**2])
assert_array_almost_equal(r_[sph_i0], sph_i0_expected)
sph_k0 = np.empty((2,))
sph_k0[0] = spherical_kn(0, x)
sph_k0[1] = spherical_kn(0, x, derivative=True)
sph_k0_expected = np.array([0.5*pi*exp(-x)/x,
-0.5*pi*exp(-x)*(1/x+1/x**2)])
assert_array_almost_equal(r_[sph_k0], sph_k0_expected)
def test_sph_jn(self):
s1 = np.empty((2,3))
x = 0.2
s1[0][0] = spherical_jn(0, x)
s1[0][1] = spherical_jn(1, x)
s1[0][2] = spherical_jn(2, x)
s1[1][0] = spherical_jn(0, x, derivative=True)
s1[1][1] = spherical_jn(1, x, derivative=True)
s1[1][2] = spherical_jn(2, x, derivative=True)
s10 = -s1[0][1]
s11 = s1[0][0]-2.0/0.2*s1[0][1]
s12 = s1[0][1]-3.0/0.2*s1[0][2]
assert_array_almost_equal(s1[0],[0.99334665397530607731,
0.066400380670322230863,
0.0026590560795273856680],12)
assert_array_almost_equal(s1[1],[s10,s11,s12],12)
def test_sph_kn(self):
kn = np.empty((2,3))
x = 0.2
kn[0][0] = spherical_kn(0, x)
kn[0][1] = spherical_kn(1, x)
kn[0][2] = spherical_kn(2, x)
kn[1][0] = spherical_kn(0, x, derivative=True)
kn[1][1] = spherical_kn(1, x, derivative=True)
kn[1][2] = spherical_kn(2, x, derivative=True)
kn0 = -kn[0][1]
kn1 = -kn[0][0]-2.0/0.2*kn[0][1]
kn2 = -kn[0][1]-3.0/0.2*kn[0][2]
assert_array_almost_equal(kn[0],[6.4302962978445670140,
38.581777787067402086,
585.15696310385559829],12)
assert_array_almost_equal(kn[1],[kn0,kn1,kn2],9)
def test_sph_yn(self):
sy1 = spherical_yn(2, 0.2)
sy2 = spherical_yn(0, 0.2)
assert_almost_equal(sy1,-377.52483,5) # previous values in the system
assert_almost_equal(sy2,-4.9003329,5)
sphpy = (spherical_yn(0, 0.2) - 2*spherical_yn(2, 0.2))/3
sy3 = spherical_yn(1, 0.2, derivative=True)
assert_almost_equal(sy3,sphpy,4) # compare correct derivative val. (correct =-system val).
| bsd-3-clause |
thinkopensolutions/geraldo | site/newsite/site-geraldo/django/core/serializers/python.py | 14 | 3883 | """
A Python "serializer". Doesn't do much serializing per se -- just converts to
and from basic Python data types (lists, dicts, strings, etc.). Useful as a basis for
other serializers.
"""
from django.conf import settings
from django.core.serializers import base
from django.db import models
from django.utils.encoding import smart_unicode
class Serializer(base.Serializer):
"""
Serializes a QuerySet to basic Python objects.
"""
internal_use_only = True
def start_serialization(self):
self._current = None
self.objects = []
def end_serialization(self):
pass
def start_object(self, obj):
self._current = {}
def end_object(self, obj):
self.objects.append({
"model" : smart_unicode(obj._meta),
"pk" : smart_unicode(obj._get_pk_val(), strings_only=True),
"fields" : self._current
})
self._current = None
def handle_field(self, obj, field):
self._current[field.name] = smart_unicode(getattr(obj, field.name), strings_only=True)
def handle_fk_field(self, obj, field):
related = getattr(obj, field.name)
if related is not None:
if field.rel.field_name == related._meta.pk.name:
# Related to remote object via primary key
related = related._get_pk_val()
else:
# Related to remote object via other field
related = getattr(related, field.rel.field_name)
self._current[field.name] = smart_unicode(related, strings_only=True)
def handle_m2m_field(self, obj, field):
if field.creates_table:
self._current[field.name] = [smart_unicode(related._get_pk_val(), strings_only=True)
for related in getattr(obj, field.name).iterator()]
def getvalue(self):
return self.objects
def Deserializer(object_list, **options):
"""
Deserialize simple Python objects back into Django ORM instances.
It's expected that you pass the Python objects themselves (instead of a
stream or a string) to the constructor
"""
models.get_apps()
for d in object_list:
# Look up the model and starting build a dict of data for it.
Model = _get_model(d["model"])
data = {Model._meta.pk.attname : Model._meta.pk.to_python(d["pk"])}
m2m_data = {}
# Handle each field
for (field_name, field_value) in d["fields"].iteritems():
if isinstance(field_value, str):
field_value = smart_unicode(field_value, options.get("encoding", settings.DEFAULT_CHARSET), strings_only=True)
field = Model._meta.get_field(field_name)
# Handle M2M relations
if field.rel and isinstance(field.rel, models.ManyToManyRel):
m2m_convert = field.rel.to._meta.pk.to_python
m2m_data[field.name] = [m2m_convert(smart_unicode(pk)) for pk in field_value]
# Handle FK fields
elif field.rel and isinstance(field.rel, models.ManyToOneRel):
if field_value is not None:
data[field.attname] = field.rel.to._meta.get_field(field.rel.field_name).to_python(field_value)
else:
data[field.attname] = None
# Handle all other fields
else:
data[field.name] = field.to_python(field_value)
yield base.DeserializedObject(Model(**data), m2m_data)
def _get_model(model_identifier):
"""
Helper to look up a model from an "app_label.module_name" string.
"""
try:
Model = models.get_model(*model_identifier.split("."))
except TypeError:
Model = None
if Model is None:
raise base.DeserializationError(u"Invalid model identifier: '%s'" % model_identifier)
return Model
| lgpl-3.0 |
kutenai/django | tests/admin_views/models.py | 13 | 25314 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
import os
import tempfile
import uuid
from django.contrib.auth.models import User
from django.contrib.contenttypes.fields import (
GenericForeignKey, GenericRelation,
)
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.core.files.storage import FileSystemStorage
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Section(models.Model):
"""
A simple section that links to articles, to test linking to related items
in admin views.
"""
name = models.CharField(max_length=100)
def __str__(self):
return self.name
@property
def name_property(self):
"""
A property that simply returns the name. Used to test #24461
"""
return self.name
@python_2_unicode_compatible
class Article(models.Model):
"""
A simple article to test admin views. Test backwards compatibility.
"""
title = models.CharField(max_length=100)
content = models.TextField()
date = models.DateTimeField()
section = models.ForeignKey(Section, models.CASCADE, null=True, blank=True)
another_section = models.ForeignKey(Section, models.CASCADE, null=True, blank=True, related_name='+')
sub_section = models.ForeignKey(Section, models.SET_NULL, null=True, blank=True, related_name='+')
def __str__(self):
return self.title
def model_year(self):
return self.date.year
model_year.admin_order_field = 'date'
model_year.short_description = ''
def model_year_reversed(self):
return self.date.year
model_year_reversed.admin_order_field = '-date'
model_year_reversed.short_description = ''
@python_2_unicode_compatible
class Book(models.Model):
"""
A simple book that has chapters.
"""
name = models.CharField(max_length=100, verbose_name='¿Name?')
def __str__(self):
return self.name
@python_2_unicode_compatible
class Promo(models.Model):
name = models.CharField(max_length=100, verbose_name='¿Name?')
book = models.ForeignKey(Book, models.CASCADE)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Chapter(models.Model):
title = models.CharField(max_length=100, verbose_name='¿Title?')
content = models.TextField()
book = models.ForeignKey(Book, models.CASCADE)
def __str__(self):
return self.title
class Meta:
# Use a utf-8 bytestring to ensure it works (see #11710)
verbose_name = '¿Chapter?'
@python_2_unicode_compatible
class ChapterXtra1(models.Model):
chap = models.OneToOneField(Chapter, models.CASCADE, verbose_name='¿Chap?')
xtra = models.CharField(max_length=100, verbose_name='¿Xtra?')
def __str__(self):
return '¿Xtra1: %s' % self.xtra
@python_2_unicode_compatible
class ChapterXtra2(models.Model):
chap = models.OneToOneField(Chapter, models.CASCADE, verbose_name='¿Chap?')
xtra = models.CharField(max_length=100, verbose_name='¿Xtra?')
def __str__(self):
return '¿Xtra2: %s' % self.xtra
class RowLevelChangePermissionModel(models.Model):
name = models.CharField(max_length=100, blank=True)
class CustomArticle(models.Model):
content = models.TextField()
date = models.DateTimeField()
@python_2_unicode_compatible
class ModelWithStringPrimaryKey(models.Model):
string_pk = models.CharField(max_length=255, primary_key=True)
def __str__(self):
return self.string_pk
def get_absolute_url(self):
return '/dummy/%s/' % self.string_pk
@python_2_unicode_compatible
class Color(models.Model):
value = models.CharField(max_length=10)
warm = models.BooleanField(default=False)
def __str__(self):
return self.value
# we replicate Color to register with another ModelAdmin
class Color2(Color):
class Meta:
proxy = True
@python_2_unicode_compatible
class Thing(models.Model):
title = models.CharField(max_length=20)
color = models.ForeignKey(Color, models.CASCADE, limit_choices_to={'warm': True})
pub_date = models.DateField(blank=True, null=True)
def __str__(self):
return self.title
@python_2_unicode_compatible
class Actor(models.Model):
name = models.CharField(max_length=50)
age = models.IntegerField()
title = models.CharField(max_length=50, null=True, blank=True)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Inquisition(models.Model):
expected = models.BooleanField(default=False)
leader = models.ForeignKey(Actor, models.CASCADE)
country = models.CharField(max_length=20)
def __str__(self):
return "by %s from %s" % (self.leader, self.country)
@python_2_unicode_compatible
class Sketch(models.Model):
title = models.CharField(max_length=100)
inquisition = models.ForeignKey(
Inquisition,
models.CASCADE,
limit_choices_to={
'leader__name': 'Palin',
'leader__age': 27,
'expected': False,
},
)
defendant0 = models.ForeignKey(
Actor,
models.CASCADE,
limit_choices_to={'title__isnull': False},
related_name='as_defendant0',
)
defendant1 = models.ForeignKey(
Actor,
models.CASCADE,
limit_choices_to={'title__isnull': True},
related_name='as_defendant1',
)
def __str__(self):
return self.title
def today_callable_dict():
return {"last_action__gte": datetime.datetime.today()}
def today_callable_q():
return models.Q(last_action__gte=datetime.datetime.today())
@python_2_unicode_compatible
class Character(models.Model):
username = models.CharField(max_length=100)
last_action = models.DateTimeField()
def __str__(self):
return self.username
@python_2_unicode_compatible
class StumpJoke(models.Model):
variation = models.CharField(max_length=100)
most_recently_fooled = models.ForeignKey(
Character,
models.CASCADE,
limit_choices_to=today_callable_dict,
related_name="+",
)
has_fooled_today = models.ManyToManyField(Character, limit_choices_to=today_callable_q, related_name="+")
def __str__(self):
return self.variation
class Fabric(models.Model):
NG_CHOICES = (
('Textured', (
('x', 'Horizontal'),
('y', 'Vertical'),
)),
('plain', 'Smooth'),
)
surface = models.CharField(max_length=20, choices=NG_CHOICES)
@python_2_unicode_compatible
class Person(models.Model):
GENDER_CHOICES = (
(1, "Male"),
(2, "Female"),
)
name = models.CharField(max_length=100)
gender = models.IntegerField(choices=GENDER_CHOICES)
age = models.IntegerField(default=21)
alive = models.BooleanField(default=True)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Persona(models.Model):
"""
A simple persona associated with accounts, to test inlining of related
accounts which inherit from a common accounts class.
"""
name = models.CharField(blank=False, max_length=80)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Account(models.Model):
"""
A simple, generic account encapsulating the information shared by all
types of accounts.
"""
username = models.CharField(blank=False, max_length=80)
persona = models.ForeignKey(Persona, models.CASCADE, related_name="accounts")
servicename = 'generic service'
def __str__(self):
return "%s: %s" % (self.servicename, self.username)
class FooAccount(Account):
"""A service-specific account of type Foo."""
servicename = 'foo'
class BarAccount(Account):
"""A service-specific account of type Bar."""
servicename = 'bar'
@python_2_unicode_compatible
class Subscriber(models.Model):
name = models.CharField(blank=False, max_length=80)
email = models.EmailField(blank=False, max_length=175)
def __str__(self):
return "%s (%s)" % (self.name, self.email)
class ExternalSubscriber(Subscriber):
pass
class OldSubscriber(Subscriber):
pass
class Media(models.Model):
name = models.CharField(max_length=60)
class Podcast(Media):
release_date = models.DateField()
class Meta:
ordering = ('release_date',) # overridden in PodcastAdmin
class Vodcast(Media):
media = models.OneToOneField(Media, models.CASCADE, primary_key=True, parent_link=True)
released = models.BooleanField(default=False)
class Parent(models.Model):
name = models.CharField(max_length=128)
def clean(self):
if self.name == '_invalid':
raise ValidationError('invalid')
class Child(models.Model):
parent = models.ForeignKey(Parent, models.CASCADE, editable=False)
name = models.CharField(max_length=30, blank=True)
def clean(self):
if self.name == '_invalid':
raise ValidationError('invalid')
@python_2_unicode_compatible
class EmptyModel(models.Model):
def __str__(self):
return "Primary key = %s" % self.id
temp_storage = FileSystemStorage(tempfile.mkdtemp())
UPLOAD_TO = os.path.join(temp_storage.location, 'test_upload')
class Gallery(models.Model):
name = models.CharField(max_length=100)
class Picture(models.Model):
name = models.CharField(max_length=100)
image = models.FileField(storage=temp_storage, upload_to='test_upload')
gallery = models.ForeignKey(Gallery, models.CASCADE, related_name="pictures")
class Language(models.Model):
iso = models.CharField(max_length=5, primary_key=True)
name = models.CharField(max_length=50)
english_name = models.CharField(max_length=50)
shortlist = models.BooleanField(default=False)
class Meta:
ordering = ('iso',)
# a base class for Recommender and Recommendation
class Title(models.Model):
pass
class TitleTranslation(models.Model):
title = models.ForeignKey(Title, models.CASCADE)
text = models.CharField(max_length=100)
class Recommender(Title):
pass
class Recommendation(Title):
recommender = models.ForeignKey(Recommender, models.CASCADE)
class Collector(models.Model):
name = models.CharField(max_length=100)
class Widget(models.Model):
owner = models.ForeignKey(Collector, models.CASCADE)
name = models.CharField(max_length=100)
class DooHickey(models.Model):
code = models.CharField(max_length=10, primary_key=True)
owner = models.ForeignKey(Collector, models.CASCADE)
name = models.CharField(max_length=100)
class Grommet(models.Model):
code = models.AutoField(primary_key=True)
owner = models.ForeignKey(Collector, models.CASCADE)
name = models.CharField(max_length=100)
class Whatsit(models.Model):
index = models.IntegerField(primary_key=True)
owner = models.ForeignKey(Collector, models.CASCADE)
name = models.CharField(max_length=100)
class Doodad(models.Model):
name = models.CharField(max_length=100)
class FancyDoodad(Doodad):
owner = models.ForeignKey(Collector, models.CASCADE)
expensive = models.BooleanField(default=True)
@python_2_unicode_compatible
class Category(models.Model):
collector = models.ForeignKey(Collector, models.CASCADE)
order = models.PositiveIntegerField()
class Meta:
ordering = ('order',)
def __str__(self):
return '%s:o%s' % (self.id, self.order)
def link_posted_default():
return datetime.date.today() - datetime.timedelta(days=7)
class Link(models.Model):
posted = models.DateField(default=link_posted_default)
url = models.URLField()
post = models.ForeignKey("Post", models.CASCADE)
readonly_link_content = models.TextField()
class PrePopulatedPost(models.Model):
title = models.CharField(max_length=100)
published = models.BooleanField(default=False)
slug = models.SlugField()
class PrePopulatedSubPost(models.Model):
post = models.ForeignKey(PrePopulatedPost, models.CASCADE)
subtitle = models.CharField(max_length=100)
subslug = models.SlugField()
class Post(models.Model):
title = models.CharField(max_length=100, help_text="Some help text for the title (with unicode ŠĐĆŽćžšđ)")
content = models.TextField(help_text="Some help text for the content (with unicode ŠĐĆŽćžšđ)")
readonly_content = models.TextField()
posted = models.DateField(
default=datetime.date.today,
help_text="Some help text for the date (with unicode ŠĐĆŽćžšđ)"
)
public = models.NullBooleanField()
def awesomeness_level(self):
return "Very awesome."
# Proxy model to test overridden fields attrs on Post model so as not to
# interfere with other tests.
class FieldOverridePost(Post):
class Meta:
proxy = True
@python_2_unicode_compatible
class Gadget(models.Model):
name = models.CharField(max_length=100)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Villain(models.Model):
name = models.CharField(max_length=100)
def __str__(self):
return self.name
class SuperVillain(Villain):
pass
@python_2_unicode_compatible
class FunkyTag(models.Model):
"Because we all know there's only one real use case for GFKs."
name = models.CharField(max_length=25)
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
def __str__(self):
return self.name
@python_2_unicode_compatible
class Plot(models.Model):
name = models.CharField(max_length=100)
team_leader = models.ForeignKey(Villain, models.CASCADE, related_name='lead_plots')
contact = models.ForeignKey(Villain, models.CASCADE, related_name='contact_plots')
tags = GenericRelation(FunkyTag)
def __str__(self):
return self.name
@python_2_unicode_compatible
class PlotDetails(models.Model):
details = models.CharField(max_length=100)
plot = models.OneToOneField(Plot, models.CASCADE, null=True, blank=True)
def __str__(self):
return self.details
class PlotProxy(Plot):
class Meta:
proxy = True
@python_2_unicode_compatible
class SecretHideout(models.Model):
""" Secret! Not registered with the admin! """
location = models.CharField(max_length=100)
villain = models.ForeignKey(Villain, models.CASCADE)
def __str__(self):
return self.location
@python_2_unicode_compatible
class SuperSecretHideout(models.Model):
""" Secret! Not registered with the admin! """
location = models.CharField(max_length=100)
supervillain = models.ForeignKey(SuperVillain, models.CASCADE)
def __str__(self):
return self.location
@python_2_unicode_compatible
class Bookmark(models.Model):
name = models.CharField(max_length=60)
tag = GenericRelation(FunkyTag, related_query_name='bookmark')
def __str__(self):
return self.name
@python_2_unicode_compatible
class CyclicOne(models.Model):
name = models.CharField(max_length=25)
two = models.ForeignKey('CyclicTwo', models.CASCADE)
def __str__(self):
return self.name
@python_2_unicode_compatible
class CyclicTwo(models.Model):
name = models.CharField(max_length=25)
one = models.ForeignKey(CyclicOne, models.CASCADE)
def __str__(self):
return self.name
class Topping(models.Model):
name = models.CharField(max_length=20)
class Pizza(models.Model):
name = models.CharField(max_length=20)
toppings = models.ManyToManyField('Topping', related_name='pizzas')
class Album(models.Model):
owner = models.ForeignKey(User, models.SET_NULL, null=True, blank=True)
title = models.CharField(max_length=30)
class Employee(Person):
code = models.CharField(max_length=20)
class WorkHour(models.Model):
datum = models.DateField()
employee = models.ForeignKey(Employee, models.CASCADE)
class Question(models.Model):
question = models.CharField(max_length=20)
@python_2_unicode_compatible
class Answer(models.Model):
question = models.ForeignKey(Question, models.PROTECT)
answer = models.CharField(max_length=20)
def __str__(self):
return self.answer
class Reservation(models.Model):
start_date = models.DateTimeField()
price = models.IntegerField()
DRIVER_CHOICES = (
('bill', 'Bill G'),
('steve', 'Steve J'),
)
RESTAURANT_CHOICES = (
('indian', 'A Taste of India'),
('thai', 'Thai Pography'),
('pizza', 'Pizza Mama'),
)
class FoodDelivery(models.Model):
reference = models.CharField(max_length=100)
driver = models.CharField(max_length=100, choices=DRIVER_CHOICES, blank=True)
restaurant = models.CharField(max_length=100, choices=RESTAURANT_CHOICES, blank=True)
class Meta:
unique_together = (("driver", "restaurant"),)
@python_2_unicode_compatible
class CoverLetter(models.Model):
author = models.CharField(max_length=30)
date_written = models.DateField(null=True, blank=True)
def __str__(self):
return self.author
class Paper(models.Model):
title = models.CharField(max_length=30)
author = models.CharField(max_length=30, blank=True, null=True)
class ShortMessage(models.Model):
content = models.CharField(max_length=140)
timestamp = models.DateTimeField(null=True, blank=True)
@python_2_unicode_compatible
class Telegram(models.Model):
title = models.CharField(max_length=30)
date_sent = models.DateField(null=True, blank=True)
def __str__(self):
return self.title
class Story(models.Model):
title = models.CharField(max_length=100)
content = models.TextField()
class OtherStory(models.Model):
title = models.CharField(max_length=100)
content = models.TextField()
class ComplexSortedPerson(models.Model):
name = models.CharField(max_length=100)
age = models.PositiveIntegerField()
is_employee = models.NullBooleanField()
class PluggableSearchPerson(models.Model):
name = models.CharField(max_length=100)
age = models.PositiveIntegerField()
class PrePopulatedPostLargeSlug(models.Model):
"""
Regression test for #15938: a large max_length for the slugfield must not
be localized in prepopulated_fields_js.html or it might end up breaking
the javascript (ie, using THOUSAND_SEPARATOR ends up with maxLength=1,000)
"""
title = models.CharField(max_length=100)
published = models.BooleanField(default=False)
# `db_index=False` because MySQL cannot index large CharField (#21196).
slug = models.SlugField(max_length=1000, db_index=False)
class AdminOrderedField(models.Model):
order = models.IntegerField()
stuff = models.CharField(max_length=200)
class AdminOrderedModelMethod(models.Model):
order = models.IntegerField()
stuff = models.CharField(max_length=200)
def some_order(self):
return self.order
some_order.admin_order_field = 'order'
class AdminOrderedAdminMethod(models.Model):
order = models.IntegerField()
stuff = models.CharField(max_length=200)
class AdminOrderedCallable(models.Model):
order = models.IntegerField()
stuff = models.CharField(max_length=200)
@python_2_unicode_compatible
class Report(models.Model):
title = models.CharField(max_length=100)
def __str__(self):
return self.title
class MainPrepopulated(models.Model):
name = models.CharField(max_length=100)
pubdate = models.DateField()
status = models.CharField(
max_length=20,
choices=(('option one', 'Option One'),
('option two', 'Option Two')))
slug1 = models.SlugField(blank=True)
slug2 = models.SlugField(blank=True)
slug3 = models.SlugField(blank=True, allow_unicode=True)
class RelatedPrepopulated(models.Model):
parent = models.ForeignKey(MainPrepopulated, models.CASCADE)
name = models.CharField(max_length=75)
pubdate = models.DateField()
status = models.CharField(
max_length=20,
choices=(('option one', 'Option One'),
('option two', 'Option Two')))
slug1 = models.SlugField(max_length=50)
slug2 = models.SlugField(max_length=60)
class UnorderedObject(models.Model):
"""
Model without any defined `Meta.ordering`.
Refs #16819.
"""
name = models.CharField(max_length=255)
bool = models.BooleanField(default=True)
class UndeletableObject(models.Model):
"""
Model whose show_delete in admin change_view has been disabled
Refs #10057.
"""
name = models.CharField(max_length=255)
class UnchangeableObject(models.Model):
"""
Model whose change_view is disabled in admin
Refs #20640.
"""
class UserMessenger(models.Model):
"""
Dummy class for testing message_user functions on ModelAdmin
"""
class Simple(models.Model):
"""
Simple model with nothing on it for use in testing
"""
class Choice(models.Model):
choice = models.IntegerField(
blank=True, null=True,
choices=((1, 'Yes'), (0, 'No'), (None, 'No opinion')),
)
class ParentWithDependentChildren(models.Model):
"""
Issue #20522
Model where the validation of child foreign-key relationships depends
on validation of the parent
"""
some_required_info = models.PositiveIntegerField()
family_name = models.CharField(max_length=255, blank=False)
class DependentChild(models.Model):
"""
Issue #20522
Model that depends on validation of the parent class for one of its
fields to validate during clean
"""
parent = models.ForeignKey(ParentWithDependentChildren, models.CASCADE)
family_name = models.CharField(max_length=255)
class _Manager(models.Manager):
def get_queryset(self):
return super(_Manager, self).get_queryset().filter(pk__gt=1)
class FilteredManager(models.Model):
def __str__(self):
return "PK=%d" % self.pk
pk_gt_1 = _Manager()
objects = models.Manager()
class EmptyModelVisible(models.Model):
""" See ticket #11277. """
class EmptyModelHidden(models.Model):
""" See ticket #11277. """
class EmptyModelMixin(models.Model):
""" See ticket #11277. """
class State(models.Model):
name = models.CharField(max_length=100)
class City(models.Model):
state = models.ForeignKey(State, models.CASCADE)
name = models.CharField(max_length=100)
def get_absolute_url(self):
return '/dummy/%s/' % self.pk
class Restaurant(models.Model):
city = models.ForeignKey(City, models.CASCADE)
name = models.CharField(max_length=100)
def get_absolute_url(self):
return '/dummy/%s/' % self.pk
class Worker(models.Model):
work_at = models.ForeignKey(Restaurant, models.CASCADE)
name = models.CharField(max_length=50)
surname = models.CharField(max_length=50)
# Models for #23329
class ReferencedByParent(models.Model):
name = models.CharField(max_length=20, unique=True)
class ParentWithFK(models.Model):
fk = models.ForeignKey(
ReferencedByParent,
models.CASCADE,
to_field='name',
related_name='hidden+',
)
class ChildOfReferer(ParentWithFK):
pass
# Models for #23431
class ReferencedByInline(models.Model):
name = models.CharField(max_length=20, unique=True)
class InlineReference(models.Model):
fk = models.ForeignKey(
ReferencedByInline,
models.CASCADE,
to_field='name',
related_name='hidden+',
)
class InlineReferer(models.Model):
refs = models.ManyToManyField(InlineReference)
# Models for #23604 and #23915
class Recipe(models.Model):
rname = models.CharField(max_length=20, unique=True)
class Ingredient(models.Model):
iname = models.CharField(max_length=20, unique=True)
recipes = models.ManyToManyField(Recipe, through='RecipeIngredient')
class RecipeIngredient(models.Model):
ingredient = models.ForeignKey(Ingredient, models.CASCADE, to_field='iname')
recipe = models.ForeignKey(Recipe, models.CASCADE, to_field='rname')
# Model for #23839
class NotReferenced(models.Model):
# Don't point any FK at this model.
pass
# Models for #23934
class ExplicitlyProvidedPK(models.Model):
name = models.IntegerField(primary_key=True)
class ImplicitlyGeneratedPK(models.Model):
name = models.IntegerField(unique=True)
# Models for #25622
class ReferencedByGenRel(models.Model):
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
class GenRelReference(models.Model):
references = GenericRelation(ReferencedByGenRel)
class ParentWithUUIDPK(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
title = models.CharField(max_length=100)
def __str__(self):
return str(self.id)
class RelatedWithUUIDPKModel(models.Model):
parent = models.ForeignKey(ParentWithUUIDPK, on_delete=models.SET_NULL, null=True, blank=True)
| bsd-3-clause |
SoftwareKing/zstack-woodpecker | integrationtest/vm/multihosts/volumes/test_volumes_robot_2h_wfairly.py | 4 | 2395 | '''
Robot testing for test volume operations for 2 hours. Will use weight fairly
strategy.
@author: Youyk
'''
import zstackwoodpecker.action_select as action_select
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_state as test_state
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.header.vm as vm_header
import time
_config_ = {
'timeout' : 9000,
'noparallel' : False
}
test_stub = test_lib.lib_get_test_stub()
test_dict = test_state.TestStateDict()
def test():
test_util.test_dsc('''
Will doing random test operations, including vm create/stop/start/reboot
/destroy, volume create/attach/detach/delete. It doesn't include SG
VIP and snapshots operations. If reach max 4 coexisting running vm,
testing will success and quit.
''')
target_running_vm = 4
test_util.test_dsc('Random Test Begin. Test target: 4 coexisting running VM (not include VR).')
robot_test_obj = test_util.Robot_Test_Object()
robot_test_obj.set_test_dict(test_dict)
robot_test_obj.set_exclusive_actions_list(\
test_state.TestAction.sg_actions \
+ test_state.TestAction.vip_actions \
+ test_state.TestAction.snapshot_actions)
priority_actions = test_state.TestAction.volume_actions * 2
priority_action_obj = action_select.ActionPriority()
priority_action_obj.add_priority_action_list(priority_actions)
robot_test_obj.set_priority_actions(priority_action_obj)
robot_test_obj.set_random_type(action_select.weight_fair_strategy)
rounds = 1
current_time = time.time()
timeout_time = current_time + 7200
while time.time() <= timeout_time:
print "test_dict: %s" % test_dict
test_util.test_dsc('New round %s starts: random operation pickup.' % rounds)
test_lib.lib_vm_random_operation(robot_test_obj)
test_util.test_dsc('Round %s finished. Begin status checking.' % rounds)
rounds += 1
test_lib.lib_robot_status_check(test_dict)
test_util.test_dsc('Reach test pass exit criterial.')
test_lib.lib_robot_cleanup(test_dict)
test_util.test_pass('Create random VM Test Success')
#Will be called only if exception happens in test().
def error_cleanup():
test_lib.lib_error_cleanup(test_dict)
| apache-2.0 |
poojavade/Genomics_Docker | Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/ipython-2.2.0-py2.7.egg/IPython/qt/rich_text.py | 12 | 8785 | """ Defines classes and functions for working with Qt's rich text system.
"""
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
import io
import os
import re
# System library imports
from IPython.external.qt import QtGui
# IPython imports
from IPython.utils import py3compat
#-----------------------------------------------------------------------------
# Constants
#-----------------------------------------------------------------------------
# A regular expression for an HTML paragraph with no content.
EMPTY_P_RE = re.compile(r'<p[^/>]*>\s*</p>')
# A regular expression for matching images in rich text HTML.
# Note that this is overly restrictive, but Qt's output is predictable...
IMG_RE = re.compile(r'<img src="(?P<name>[\d]+)" />')
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class HtmlExporter(object):
""" A stateful HTML exporter for a Q(Plain)TextEdit.
This class is designed for convenient user interaction.
"""
def __init__(self, control):
""" Creates an HtmlExporter for the given Q(Plain)TextEdit.
"""
assert isinstance(control, (QtGui.QPlainTextEdit, QtGui.QTextEdit))
self.control = control
self.filename = 'ipython.html'
self.image_tag = None
self.inline_png = None
def export(self):
""" Displays a dialog for exporting HTML generated by Qt's rich text
system.
Returns
-------
The name of the file that was saved, or None if no file was saved.
"""
parent = self.control.window()
dialog = QtGui.QFileDialog(parent, 'Save as...')
dialog.setAcceptMode(QtGui.QFileDialog.AcceptSave)
filters = [
'HTML with PNG figures (*.html *.htm)',
'XHTML with inline SVG figures (*.xhtml *.xml)'
]
dialog.setNameFilters(filters)
if self.filename:
dialog.selectFile(self.filename)
root,ext = os.path.splitext(self.filename)
if ext.lower() in ('.xml', '.xhtml'):
dialog.selectNameFilter(filters[-1])
if dialog.exec_():
self.filename = dialog.selectedFiles()[0]
choice = dialog.selectedNameFilter()
html = py3compat.cast_unicode(self.control.document().toHtml())
# Configure the exporter.
if choice.startswith('XHTML'):
exporter = export_xhtml
else:
# If there are PNGs, decide how to export them.
inline = self.inline_png
if inline is None and IMG_RE.search(html):
dialog = QtGui.QDialog(parent)
dialog.setWindowTitle('Save as...')
layout = QtGui.QVBoxLayout(dialog)
msg = "Exporting HTML with PNGs"
info = "Would you like inline PNGs (single large html " \
"file) or external image files?"
checkbox = QtGui.QCheckBox("&Don't ask again")
checkbox.setShortcut('D')
ib = QtGui.QPushButton("&Inline")
ib.setShortcut('I')
eb = QtGui.QPushButton("&External")
eb.setShortcut('E')
box = QtGui.QMessageBox(QtGui.QMessageBox.Question,
dialog.windowTitle(), msg)
box.setInformativeText(info)
box.addButton(ib, QtGui.QMessageBox.NoRole)
box.addButton(eb, QtGui.QMessageBox.YesRole)
layout.setSpacing(0)
layout.addWidget(box)
layout.addWidget(checkbox)
dialog.setLayout(layout)
dialog.show()
reply = box.exec_()
dialog.hide()
inline = (reply == 0)
if checkbox.checkState():
# Don't ask anymore; always use this choice.
self.inline_png = inline
exporter = lambda h, f, i: export_html(h, f, i, inline)
# Perform the export!
try:
return exporter(html, self.filename, self.image_tag)
except Exception as e:
msg = "Error exporting HTML to %s\n" % self.filename + str(e)
reply = QtGui.QMessageBox.warning(parent, 'Error', msg,
QtGui.QMessageBox.Ok, QtGui.QMessageBox.Ok)
return None
#-----------------------------------------------------------------------------
# Functions
#-----------------------------------------------------------------------------
def export_html(html, filename, image_tag = None, inline = True):
""" Export the contents of the ConsoleWidget as HTML.
Parameters
----------
html : unicode,
A Python unicode string containing the Qt HTML to export.
filename : str
The file to be saved.
image_tag : callable, optional (default None)
Used to convert images. See ``default_image_tag()`` for information.
inline : bool, optional [default True]
If True, include images as inline PNGs. Otherwise, include them as
links to external PNG files, mimicking web browsers' "Web Page,
Complete" behavior.
"""
if image_tag is None:
image_tag = default_image_tag
if inline:
path = None
else:
root,ext = os.path.splitext(filename)
path = root + "_files"
if os.path.isfile(path):
raise OSError("%s exists, but is not a directory." % path)
with io.open(filename, 'w', encoding='utf-8') as f:
html = fix_html(html)
f.write(IMG_RE.sub(lambda x: image_tag(x, path = path, format = "png"),
html))
def export_xhtml(html, filename, image_tag=None):
""" Export the contents of the ConsoleWidget as XHTML with inline SVGs.
Parameters
----------
html : unicode,
A Python unicode string containing the Qt HTML to export.
filename : str
The file to be saved.
image_tag : callable, optional (default None)
Used to convert images. See ``default_image_tag()`` for information.
"""
if image_tag is None:
image_tag = default_image_tag
with io.open(filename, 'w', encoding='utf-8') as f:
# Hack to make xhtml header -- note that we are not doing any check for
# valid XML.
offset = html.find("<html>")
assert offset > -1, 'Invalid HTML string: no <html> tag.'
html = (u'<html xmlns="http://www.w3.org/1999/xhtml">\n'+
html[offset+6:])
html = fix_html(html)
f.write(IMG_RE.sub(lambda x: image_tag(x, path = None, format = "svg"),
html))
def default_image_tag(match, path = None, format = "png"):
""" Return (X)HTML mark-up for the image-tag given by match.
This default implementation merely removes the image, and exists mostly
for documentation purposes. More information than is present in the Qt
HTML is required to supply the images.
Parameters
----------
match : re.SRE_Match
A match to an HTML image tag as exported by Qt, with match.group("Name")
containing the matched image ID.
path : string|None, optional [default None]
If not None, specifies a path to which supporting files may be written
(e.g., for linked images). If None, all images are to be included
inline.
format : "png"|"svg", optional [default "png"]
Format for returned or referenced images.
"""
return u''
def fix_html(html):
""" Transforms a Qt-generated HTML string into a standards-compliant one.
Parameters
----------
html : unicode,
A Python unicode string containing the Qt HTML.
"""
# A UTF-8 declaration is needed for proper rendering of some characters
# (e.g., indented commands) when viewing exported HTML on a local system
# (i.e., without seeing an encoding declaration in an HTTP header).
# C.f. http://www.w3.org/International/O-charset for details.
offset = html.find('<head>')
if offset > -1:
html = (html[:offset+6]+
'\n<meta http-equiv="Content-Type" '+
'content="text/html; charset=utf-8" />\n'+
html[offset+6:])
# Replace empty paragraphs tags with line breaks.
html = re.sub(EMPTY_P_RE, '<br/>', html)
return html
| apache-2.0 |
ryfeus/lambda-packs | Lxml_requests/source/google/protobuf/internal/symbol_database_test.py | 43 | 5386 | #! /usr/bin/env python
#
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests for google.protobuf.symbol_database."""
try:
import unittest2 as unittest #PY26
except ImportError:
import unittest
from google.protobuf import unittest_pb2
from google.protobuf import descriptor
from google.protobuf import descriptor_pool
from google.protobuf import symbol_database
class SymbolDatabaseTest(unittest.TestCase):
def _Database(self):
if descriptor._USE_C_DESCRIPTORS:
# The C++ implementation does not allow mixing descriptors from
# different pools.
db = symbol_database.SymbolDatabase(pool=descriptor_pool.Default())
else:
db = symbol_database.SymbolDatabase()
# Register representative types from unittest_pb2.
db.RegisterFileDescriptor(unittest_pb2.DESCRIPTOR)
db.RegisterMessage(unittest_pb2.TestAllTypes)
db.RegisterMessage(unittest_pb2.TestAllTypes.NestedMessage)
db.RegisterMessage(unittest_pb2.TestAllTypes.OptionalGroup)
db.RegisterMessage(unittest_pb2.TestAllTypes.RepeatedGroup)
db.RegisterEnumDescriptor(unittest_pb2.ForeignEnum.DESCRIPTOR)
db.RegisterEnumDescriptor(unittest_pb2.TestAllTypes.NestedEnum.DESCRIPTOR)
return db
def testGetPrototype(self):
instance = self._Database().GetPrototype(
unittest_pb2.TestAllTypes.DESCRIPTOR)
self.assertTrue(instance is unittest_pb2.TestAllTypes)
def testGetMessages(self):
messages = self._Database().GetMessages(
['google/protobuf/unittest.proto'])
self.assertTrue(
unittest_pb2.TestAllTypes is
messages['protobuf_unittest.TestAllTypes'])
def testGetSymbol(self):
self.assertEqual(
unittest_pb2.TestAllTypes, self._Database().GetSymbol(
'protobuf_unittest.TestAllTypes'))
self.assertEqual(
unittest_pb2.TestAllTypes.NestedMessage, self._Database().GetSymbol(
'protobuf_unittest.TestAllTypes.NestedMessage'))
self.assertEqual(
unittest_pb2.TestAllTypes.OptionalGroup, self._Database().GetSymbol(
'protobuf_unittest.TestAllTypes.OptionalGroup'))
self.assertEqual(
unittest_pb2.TestAllTypes.RepeatedGroup, self._Database().GetSymbol(
'protobuf_unittest.TestAllTypes.RepeatedGroup'))
def testEnums(self):
# Check registration of types in the pool.
self.assertEqual(
'protobuf_unittest.ForeignEnum',
self._Database().pool.FindEnumTypeByName(
'protobuf_unittest.ForeignEnum').full_name)
self.assertEqual(
'protobuf_unittest.TestAllTypes.NestedEnum',
self._Database().pool.FindEnumTypeByName(
'protobuf_unittest.TestAllTypes.NestedEnum').full_name)
def testFindMessageTypeByName(self):
self.assertEqual(
'protobuf_unittest.TestAllTypes',
self._Database().pool.FindMessageTypeByName(
'protobuf_unittest.TestAllTypes').full_name)
self.assertEqual(
'protobuf_unittest.TestAllTypes.NestedMessage',
self._Database().pool.FindMessageTypeByName(
'protobuf_unittest.TestAllTypes.NestedMessage').full_name)
def testFindFindContainingSymbol(self):
# Lookup based on either enum or message.
self.assertEqual(
'google/protobuf/unittest.proto',
self._Database().pool.FindFileContainingSymbol(
'protobuf_unittest.TestAllTypes.NestedEnum').name)
self.assertEqual(
'google/protobuf/unittest.proto',
self._Database().pool.FindFileContainingSymbol(
'protobuf_unittest.TestAllTypes').name)
def testFindFileByName(self):
self.assertEqual(
'google/protobuf/unittest.proto',
self._Database().pool.FindFileByName(
'google/protobuf/unittest.proto').name)
if __name__ == '__main__':
unittest.main()
| mit |
jgcaaprom/android_external_chromium_org | tools/telemetry/telemetry/page/actions/scroll.py | 45 | 3747 | # Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from telemetry.page.actions import page_action
class ScrollAction(page_action.PageAction):
# TODO(chrishenry): Ignore attributes, to be deleted when usage in
# other repo is cleaned up.
def __init__(self, selector=None, text=None, element_function=None,
left_start_ratio=0.5, top_start_ratio=0.5, direction='down',
distance=None, distance_expr=None,
speed_in_pixels_per_second=800, use_touch=False):
super(ScrollAction, self).__init__()
if direction not in ['down', 'up', 'left', 'right']:
raise page_action.PageActionNotSupported(
'Invalid scroll direction: %s' % self.direction)
self._selector = selector
self._text = text
self._element_function = element_function
self._left_start_ratio = left_start_ratio
self._top_start_ratio = top_start_ratio
self._direction = direction
self._speed = speed_in_pixels_per_second
self._use_touch = use_touch
self._distance_func = 'null'
if distance:
assert not distance_expr
distance_expr = str(distance)
if distance_expr:
self._distance_func = ('function() { return 0 + %s; }' %
distance_expr)
def WillRunAction(self, tab):
for js_file in ['gesture_common.js', 'scroll.js']:
with open(os.path.join(os.path.dirname(__file__), js_file)) as f:
js = f.read()
tab.ExecuteJavaScript(js)
# Fail if browser doesn't support synthetic scroll gestures.
if not tab.EvaluateJavaScript('window.__ScrollAction_SupportedByBrowser()'):
raise page_action.PageActionNotSupported(
'Synthetic scroll not supported for this browser')
# Fail if this action requires touch and we can't send touch events.
if self._use_touch:
if not page_action.IsGestureSourceTypeSupported(tab, 'touch'):
raise page_action.PageActionNotSupported(
'Touch scroll not supported for this browser')
if (page_action.GetGestureSourceTypeFromOptions(tab) ==
'chrome.gpuBenchmarking.MOUSE_INPUT'):
raise page_action.PageActionNotSupported(
'Scroll requires touch on this page but mouse input was requested')
done_callback = 'function() { window.__scrollActionDone = true; }'
tab.ExecuteJavaScript("""
window.__scrollActionDone = false;
window.__scrollAction = new __ScrollAction(%s, %s);"""
% (done_callback, self._distance_func))
def RunAction(self, tab):
if (self._selector is None and self._text is None and
self._element_function is None):
self._element_function = 'document.body'
gesture_source_type = page_action.GetGestureSourceTypeFromOptions(tab)
if self._use_touch:
gesture_source_type = 'chrome.gpuBenchmarking.TOUCH_INPUT'
code = '''
function(element, info) {
if (!element) {
throw Error('Cannot find element: ' + info);
}
window.__scrollAction.start({
element: element,
left_start_ratio: %s,
top_start_ratio: %s,
direction: '%s',
speed: %s,
gesture_source_type: %s
});
}''' % (self._left_start_ratio,
self._top_start_ratio,
self._direction,
self._speed,
gesture_source_type)
page_action.EvaluateCallbackWithElement(
tab, code, selector=self._selector, text=self._text,
element_function=self._element_function)
tab.WaitForJavaScriptExpression('window.__scrollActionDone', 60)
| bsd-3-clause |
mclaughlin6464/pylearn2 | pylearn2/sandbox/rnn/utils/iteration.py | 34 | 5669 | """
Iterator for RNN data
"""
from functools import wraps
import numpy as np
from theano import config
from pylearn2.sandbox.rnn.space import SequenceDataSpace
from pylearn2.sandbox.rnn.space import SequenceMaskSpace
from pylearn2.space import CompositeSpace
from pylearn2.utils import safe_izip
from pylearn2.utils.iteration import FiniteDatasetIterator
class SequenceDatasetIterator(FiniteDatasetIterator):
"""
Assumes space is a CompositeSpace and source is a tuple.
Parameters
----------
dataset : `Dataset` object
The dataset over which to iterate.
data_specs : tuple
A `(space, source)` tuple. See :ref:`data_specs` for a full
description. Must not contain nested composite spaces.
subset_iterator : object
An iterator object that returns slice objects or lists of
examples, conforming to the interface specified by
:py:class:`SubsetIterator`.
return_tuple : bool, optional
Always return a tuple, even if there is exactly one source
of data being returned. Defaults to `False`.
convert : list of callables
A list of callables, in the same order as the sources
in `data_specs`, that will be called on the individual
source batches prior to any further processing.
Notes
-----
See the documentation for :py:class:`SubsetIterator` for
attribute documentation.
"""
def __init__(self, dataset, data_specs, subset_iterator,
return_tuple=False, convert=None):
# Unpack the data specs into two tuples
space, source = data_specs
if not isinstance(source, tuple):
source = (source,)
# Remove the requested mask from the data specs before calling
# the parent constructor
self._original_source = source
mask_seen, sequence_seen = False, False
self.mask_needed = []
retain = []
for i, (subspace, subsource) in enumerate(safe_izip(space.components,
source)):
if isinstance(subspace, SequenceMaskSpace):
if not subsource.endswith('_mask') or \
subsource[:-5] not in source:
raise ValueError("SequenceDatasetIterator received "
"data_specs containing a "
"SequenceMaskSpace with corresponding "
"source %s, but the source should end "
"with `_mask` in order to match it to the"
"correct SequenceDataSpace")
mask_seen = True
self.mask_needed.append(subsource[:-5])
else:
retain.append(i)
if isinstance(subspace, SequenceDataSpace):
sequence_seen = True
if mask_seen != sequence_seen and i + 1 != len(retain):
raise ValueError("SequenceDatasetIterator was asked to iterate "
"over a sequence mask without data or vice versa")
space = space.restrict(retain)
source = tuple(source[i] for i in retain)
super(SequenceDatasetIterator, self).__init__(
dataset, subset_iterator, (space, source),
return_tuple=return_tuple, convert=convert
)
if not isinstance(space, CompositeSpace):
space = (space,)
else:
space = space.components
assert len(space) == len(source)
self._original_space = space
def __iter__(self):
return self
def _create_mask(self, data):
"""
Creates the mask for a given set of data.
Parameters
----------
data : numpy sequence of ndarrays
A sequence of ndarrays representing sequential data
"""
sequence_lengths = [len(sample) for sample in data]
max_sequence_length = max(sequence_lengths)
mask = np.zeros((max_sequence_length, len(data)), dtype=config.floatX)
for i, sequence_length in enumerate(sequence_lengths):
mask[:sequence_length, i] = 1
return mask
@wraps(FiniteDatasetIterator.next)
def next(self):
next_index = self._subset_iterator.next()
rvals = []
for space, source, data, fn in safe_izip(self._space, self._source,
self._raw_data,
self._convert):
rval = data[next_index]
if isinstance(space, SequenceDataSpace):
# Add padding
max_sequence_length = max(len(sample) for sample
in rval)
batch = np.zeros((len(rval), max_sequence_length) +
data[0].shape[1:], dtype=data[0].dtype)
for i, sample in enumerate(rval):
batch[i, :len(sample)] = sample
# Create mask
if source in self.mask_needed:
mask = self._create_mask(rval)
rval = np.swapaxes(batch, 0, 1)
if fn:
rval = fn(rval)
rvals.append(rval)
if source in self.mask_needed:
rvals.append(mask)
else:
if fn:
rval = fn(rval)
rvals.append(rval)
# Reorder according to given data specs
if not self._return_tuple and len(rval) == 1:
rvals, = rvals
return tuple(rvals)
| bsd-3-clause |
susansalkeld/discsongs | discsongs/lib/python2.7/site-packages/setuptools/package_index.py | 258 | 38941 | """PyPI and direct package downloading"""
import sys
import os
import re
import shutil
import socket
import base64
import hashlib
from functools import wraps
from pkg_resources import (
CHECKOUT_DIST, Distribution, BINARY_DIST, normalize_path, SOURCE_DIST,
require, Environment, find_distributions, safe_name, safe_version,
to_filename, Requirement, DEVELOP_DIST,
)
from setuptools import ssl_support
from distutils import log
from distutils.errors import DistutilsError
from setuptools.compat import (urllib2, httplib, StringIO, HTTPError,
urlparse, urlunparse, unquote, splituser,
url2pathname, name2codepoint,
unichr, urljoin, urlsplit, urlunsplit,
ConfigParser)
from setuptools.compat import filterfalse
from fnmatch import translate
from setuptools.py26compat import strip_fragment
from setuptools.py27compat import get_all_headers
EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$')
HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I)
# this is here to fix emacs' cruddy broken syntax highlighting
PYPI_MD5 = re.compile(
'<a href="([^"#]+)">([^<]+)</a>\n\s+\\(<a (?:title="MD5 hash"\n\s+)'
'href="[^?]+\?:action=show_md5&digest=([0-9a-f]{32})">md5</a>\\)'
)
URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):',re.I).match
EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split()
__all__ = [
'PackageIndex', 'distros_for_url', 'parse_bdist_wininst',
'interpret_distro_name',
]
_SOCKET_TIMEOUT = 15
def parse_bdist_wininst(name):
"""Return (base,pyversion) or (None,None) for possible .exe name"""
lower = name.lower()
base, py_ver, plat = None, None, None
if lower.endswith('.exe'):
if lower.endswith('.win32.exe'):
base = name[:-10]
plat = 'win32'
elif lower.startswith('.win32-py',-16):
py_ver = name[-7:-4]
base = name[:-16]
plat = 'win32'
elif lower.endswith('.win-amd64.exe'):
base = name[:-14]
plat = 'win-amd64'
elif lower.startswith('.win-amd64-py',-20):
py_ver = name[-7:-4]
base = name[:-20]
plat = 'win-amd64'
return base,py_ver,plat
def egg_info_for_url(url):
scheme, server, path, parameters, query, fragment = urlparse(url)
base = unquote(path.split('/')[-1])
if server=='sourceforge.net' and base=='download': # XXX Yuck
base = unquote(path.split('/')[-2])
if '#' in base: base, fragment = base.split('#',1)
return base,fragment
def distros_for_url(url, metadata=None):
"""Yield egg or source distribution objects that might be found at a URL"""
base, fragment = egg_info_for_url(url)
for dist in distros_for_location(url, base, metadata): yield dist
if fragment:
match = EGG_FRAGMENT.match(fragment)
if match:
for dist in interpret_distro_name(
url, match.group(1), metadata, precedence = CHECKOUT_DIST
):
yield dist
def distros_for_location(location, basename, metadata=None):
"""Yield egg or source distribution objects based on basename"""
if basename.endswith('.egg.zip'):
basename = basename[:-4] # strip the .zip
if basename.endswith('.egg') and '-' in basename:
# only one, unambiguous interpretation
return [Distribution.from_location(location, basename, metadata)]
if basename.endswith('.exe'):
win_base, py_ver, platform = parse_bdist_wininst(basename)
if win_base is not None:
return interpret_distro_name(
location, win_base, metadata, py_ver, BINARY_DIST, platform
)
# Try source distro extensions (.zip, .tgz, etc.)
#
for ext in EXTENSIONS:
if basename.endswith(ext):
basename = basename[:-len(ext)]
return interpret_distro_name(location, basename, metadata)
return [] # no extension matched
def distros_for_filename(filename, metadata=None):
"""Yield possible egg or source distribution objects based on a filename"""
return distros_for_location(
normalize_path(filename), os.path.basename(filename), metadata
)
def interpret_distro_name(
location, basename, metadata, py_version=None, precedence=SOURCE_DIST,
platform=None
):
"""Generate alternative interpretations of a source distro name
Note: if `location` is a filesystem filename, you should call
``pkg_resources.normalize_path()`` on it before passing it to this
routine!
"""
# Generate alternative interpretations of a source distro name
# Because some packages are ambiguous as to name/versions split
# e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc.
# So, we generate each possible interepretation (e.g. "adns, python-1.1.0"
# "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice,
# the spurious interpretations should be ignored, because in the event
# there's also an "adns" package, the spurious "python-1.1.0" version will
# compare lower than any numeric version number, and is therefore unlikely
# to match a request for it. It's still a potential problem, though, and
# in the long run PyPI and the distutils should go for "safe" names and
# versions in distribution archive names (sdist and bdist).
parts = basename.split('-')
if not py_version:
for i,p in enumerate(parts[2:]):
if len(p)==5 and p.startswith('py2.'):
return # It's a bdist_dumb, not an sdist -- bail out
for p in range(1,len(parts)+1):
yield Distribution(
location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]),
py_version=py_version, precedence = precedence,
platform = platform
)
# From Python 2.7 docs
def unique_everseen(iterable, key=None):
"List unique elements, preserving order. Remember all elements ever seen."
# unique_everseen('AAAABBBCCDAABBB') --> A B C D
# unique_everseen('ABBCcAD', str.lower) --> A B C D
seen = set()
seen_add = seen.add
if key is None:
for element in filterfalse(seen.__contains__, iterable):
seen_add(element)
yield element
else:
for element in iterable:
k = key(element)
if k not in seen:
seen_add(k)
yield element
def unique_values(func):
"""
Wrap a function returning an iterable such that the resulting iterable
only ever yields unique items.
"""
@wraps(func)
def wrapper(*args, **kwargs):
return unique_everseen(func(*args, **kwargs))
return wrapper
REL = re.compile("""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I)
# this line is here to fix emacs' cruddy broken syntax highlighting
@unique_values
def find_external_links(url, page):
"""Find rel="homepage" and rel="download" links in `page`, yielding URLs"""
for match in REL.finditer(page):
tag, rel = match.groups()
rels = set(map(str.strip, rel.lower().split(',')))
if 'homepage' in rels or 'download' in rels:
for match in HREF.finditer(tag):
yield urljoin(url, htmldecode(match.group(1)))
for tag in ("<th>Home Page", "<th>Download URL"):
pos = page.find(tag)
if pos!=-1:
match = HREF.search(page,pos)
if match:
yield urljoin(url, htmldecode(match.group(1)))
user_agent = "Python-urllib/%s setuptools/%s" % (
sys.version[:3], require('setuptools')[0].version
)
class ContentChecker(object):
"""
A null content checker that defines the interface for checking content
"""
def feed(self, block):
"""
Feed a block of data to the hash.
"""
return
def is_valid(self):
"""
Check the hash. Return False if validation fails.
"""
return True
def report(self, reporter, template):
"""
Call reporter with information about the checker (hash name)
substituted into the template.
"""
return
class HashChecker(ContentChecker):
pattern = re.compile(
r'(?P<hash_name>sha1|sha224|sha384|sha256|sha512|md5)='
r'(?P<expected>[a-f0-9]+)'
)
def __init__(self, hash_name, expected):
self.hash_name = hash_name
self.hash = hashlib.new(hash_name)
self.expected = expected
@classmethod
def from_url(cls, url):
"Construct a (possibly null) ContentChecker from a URL"
fragment = urlparse(url)[-1]
if not fragment:
return ContentChecker()
match = cls.pattern.search(fragment)
if not match:
return ContentChecker()
return cls(**match.groupdict())
def feed(self, block):
self.hash.update(block)
def is_valid(self):
return self.hash.hexdigest() == self.expected
def report(self, reporter, template):
msg = template % self.hash_name
return reporter(msg)
class PackageIndex(Environment):
"""A distribution index that scans web pages for download URLs"""
def __init__(
self, index_url="https://pypi.python.org/simple", hosts=('*',),
ca_bundle=None, verify_ssl=True, *args, **kw
):
Environment.__init__(self,*args,**kw)
self.index_url = index_url + "/"[:not index_url.endswith('/')]
self.scanned_urls = {}
self.fetched_urls = {}
self.package_pages = {}
self.allows = re.compile('|'.join(map(translate,hosts))).match
self.to_scan = []
if verify_ssl and ssl_support.is_available and (ca_bundle or ssl_support.find_ca_bundle()):
self.opener = ssl_support.opener_for(ca_bundle)
else: self.opener = urllib2.urlopen
def process_url(self, url, retrieve=False):
"""Evaluate a URL as a possible download, and maybe retrieve it"""
if url in self.scanned_urls and not retrieve:
return
self.scanned_urls[url] = True
if not URL_SCHEME(url):
self.process_filename(url)
return
else:
dists = list(distros_for_url(url))
if dists:
if not self.url_ok(url):
return
self.debug("Found link: %s", url)
if dists or not retrieve or url in self.fetched_urls:
list(map(self.add, dists))
return # don't need the actual page
if not self.url_ok(url):
self.fetched_urls[url] = True
return
self.info("Reading %s", url)
self.fetched_urls[url] = True # prevent multiple fetch attempts
f = self.open_url(url, "Download error on %s: %%s -- Some packages may not be found!" % url)
if f is None: return
self.fetched_urls[f.url] = True
if 'html' not in f.headers.get('content-type', '').lower():
f.close() # not html, we can't process it
return
base = f.url # handle redirects
page = f.read()
if not isinstance(page, str): # We are in Python 3 and got bytes. We want str.
if isinstance(f, HTTPError):
# Errors have no charset, assume latin1:
charset = 'latin-1'
else:
charset = f.headers.get_param('charset') or 'latin-1'
page = page.decode(charset, "ignore")
f.close()
for match in HREF.finditer(page):
link = urljoin(base, htmldecode(match.group(1)))
self.process_url(link)
if url.startswith(self.index_url) and getattr(f,'code',None)!=404:
page = self.process_index(url, page)
def process_filename(self, fn, nested=False):
# process filenames or directories
if not os.path.exists(fn):
self.warn("Not found: %s", fn)
return
if os.path.isdir(fn) and not nested:
path = os.path.realpath(fn)
for item in os.listdir(path):
self.process_filename(os.path.join(path,item), True)
dists = distros_for_filename(fn)
if dists:
self.debug("Found: %s", fn)
list(map(self.add, dists))
def url_ok(self, url, fatal=False):
s = URL_SCHEME(url)
if (s and s.group(1).lower()=='file') or self.allows(urlparse(url)[1]):
return True
msg = ("\nNote: Bypassing %s (disallowed host; see "
"http://bit.ly/1dg9ijs for details).\n")
if fatal:
raise DistutilsError(msg % url)
else:
self.warn(msg, url)
def scan_egg_links(self, search_path):
for item in search_path:
if os.path.isdir(item):
for entry in os.listdir(item):
if entry.endswith('.egg-link'):
self.scan_egg_link(item, entry)
def scan_egg_link(self, path, entry):
lines = [_f for _f in map(str.strip,
open(os.path.join(path, entry))) if _f]
if len(lines)==2:
for dist in find_distributions(os.path.join(path, lines[0])):
dist.location = os.path.join(path, *lines)
dist.precedence = SOURCE_DIST
self.add(dist)
def process_index(self,url,page):
"""Process the contents of a PyPI page"""
def scan(link):
# Process a URL to see if it's for a package page
if link.startswith(self.index_url):
parts = list(map(
unquote, link[len(self.index_url):].split('/')
))
if len(parts)==2 and '#' not in parts[1]:
# it's a package page, sanitize and index it
pkg = safe_name(parts[0])
ver = safe_version(parts[1])
self.package_pages.setdefault(pkg.lower(),{})[link] = True
return to_filename(pkg), to_filename(ver)
return None, None
# process an index page into the package-page index
for match in HREF.finditer(page):
try:
scan(urljoin(url, htmldecode(match.group(1))))
except ValueError:
pass
pkg, ver = scan(url) # ensure this page is in the page index
if pkg:
# process individual package page
for new_url in find_external_links(url, page):
# Process the found URL
base, frag = egg_info_for_url(new_url)
if base.endswith('.py') and not frag:
if ver:
new_url+='#egg=%s-%s' % (pkg,ver)
else:
self.need_version_info(url)
self.scan_url(new_url)
return PYPI_MD5.sub(
lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1,3,2), page
)
else:
return "" # no sense double-scanning non-package pages
def need_version_info(self, url):
self.scan_all(
"Page at %s links to .py file(s) without version info; an index "
"scan is required.", url
)
def scan_all(self, msg=None, *args):
if self.index_url not in self.fetched_urls:
if msg: self.warn(msg,*args)
self.info(
"Scanning index of all packages (this may take a while)"
)
self.scan_url(self.index_url)
def find_packages(self, requirement):
self.scan_url(self.index_url + requirement.unsafe_name+'/')
if not self.package_pages.get(requirement.key):
# Fall back to safe version of the name
self.scan_url(self.index_url + requirement.project_name+'/')
if not self.package_pages.get(requirement.key):
# We couldn't find the target package, so search the index page too
self.not_found_in_index(requirement)
for url in list(self.package_pages.get(requirement.key,())):
# scan each page that might be related to the desired package
self.scan_url(url)
def obtain(self, requirement, installer=None):
self.prescan()
self.find_packages(requirement)
for dist in self[requirement.key]:
if dist in requirement:
return dist
self.debug("%s does not match %s", requirement, dist)
return super(PackageIndex, self).obtain(requirement,installer)
def check_hash(self, checker, filename, tfp):
"""
checker is a ContentChecker
"""
checker.report(self.debug,
"Validating %%s checksum for %s" % filename)
if not checker.is_valid():
tfp.close()
os.unlink(filename)
raise DistutilsError(
"%s validation failed for %s; "
"possible download problem?" % (
checker.hash.name, os.path.basename(filename))
)
def add_find_links(self, urls):
"""Add `urls` to the list that will be prescanned for searches"""
for url in urls:
if (
self.to_scan is None # if we have already "gone online"
or not URL_SCHEME(url) # or it's a local file/directory
or url.startswith('file:')
or list(distros_for_url(url)) # or a direct package link
):
# then go ahead and process it now
self.scan_url(url)
else:
# otherwise, defer retrieval till later
self.to_scan.append(url)
def prescan(self):
"""Scan urls scheduled for prescanning (e.g. --find-links)"""
if self.to_scan:
list(map(self.scan_url, self.to_scan))
self.to_scan = None # from now on, go ahead and process immediately
def not_found_in_index(self, requirement):
if self[requirement.key]: # we've seen at least one distro
meth, msg = self.info, "Couldn't retrieve index page for %r"
else: # no distros seen for this name, might be misspelled
meth, msg = (self.warn,
"Couldn't find index page for %r (maybe misspelled?)")
meth(msg, requirement.unsafe_name)
self.scan_all()
def download(self, spec, tmpdir):
"""Locate and/or download `spec` to `tmpdir`, returning a local path
`spec` may be a ``Requirement`` object, or a string containing a URL,
an existing local filename, or a project/version requirement spec
(i.e. the string form of a ``Requirement`` object). If it is the URL
of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one
that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is
automatically created alongside the downloaded file.
If `spec` is a ``Requirement`` object or a string containing a
project/version requirement spec, this method returns the location of
a matching distribution (possibly after downloading it to `tmpdir`).
If `spec` is a locally existing file or directory name, it is simply
returned unchanged. If `spec` is a URL, it is downloaded to a subpath
of `tmpdir`, and the local filename is returned. Various errors may be
raised if a problem occurs during downloading.
"""
if not isinstance(spec,Requirement):
scheme = URL_SCHEME(spec)
if scheme:
# It's a url, download it to tmpdir
found = self._download_url(scheme.group(1), spec, tmpdir)
base, fragment = egg_info_for_url(spec)
if base.endswith('.py'):
found = self.gen_setup(found,fragment,tmpdir)
return found
elif os.path.exists(spec):
# Existing file or directory, just return it
return spec
else:
try:
spec = Requirement.parse(spec)
except ValueError:
raise DistutilsError(
"Not a URL, existing file, or requirement spec: %r" %
(spec,)
)
return getattr(self.fetch_distribution(spec, tmpdir),'location',None)
def fetch_distribution(
self, requirement, tmpdir, force_scan=False, source=False,
develop_ok=False, local_index=None
):
"""Obtain a distribution suitable for fulfilling `requirement`
`requirement` must be a ``pkg_resources.Requirement`` instance.
If necessary, or if the `force_scan` flag is set, the requirement is
searched for in the (online) package index as well as the locally
installed packages. If a distribution matching `requirement` is found,
the returned distribution's ``location`` is the value you would have
gotten from calling the ``download()`` method with the matching
distribution's URL or filename. If no matching distribution is found,
``None`` is returned.
If the `source` flag is set, only source distributions and source
checkout links will be considered. Unless the `develop_ok` flag is
set, development and system eggs (i.e., those using the ``.egg-info``
format) will be ignored.
"""
# process a Requirement
self.info("Searching for %s", requirement)
skipped = {}
dist = None
def find(req, env=None):
if env is None:
env = self
# Find a matching distribution; may be called more than once
for dist in env[req.key]:
if dist.precedence==DEVELOP_DIST and not develop_ok:
if dist not in skipped:
self.warn("Skipping development or system egg: %s",dist)
skipped[dist] = 1
continue
if dist in req and (dist.precedence<=SOURCE_DIST or not source):
return dist
if force_scan:
self.prescan()
self.find_packages(requirement)
dist = find(requirement)
if local_index is not None:
dist = dist or find(requirement, local_index)
if dist is None:
if self.to_scan is not None:
self.prescan()
dist = find(requirement)
if dist is None and not force_scan:
self.find_packages(requirement)
dist = find(requirement)
if dist is None:
self.warn(
"No local packages or download links found for %s%s",
(source and "a source distribution of " or ""),
requirement,
)
else:
self.info("Best match: %s", dist)
return dist.clone(location=self.download(dist.location, tmpdir))
def fetch(self, requirement, tmpdir, force_scan=False, source=False):
"""Obtain a file suitable for fulfilling `requirement`
DEPRECATED; use the ``fetch_distribution()`` method now instead. For
backward compatibility, this routine is identical but returns the
``location`` of the downloaded distribution instead of a distribution
object.
"""
dist = self.fetch_distribution(requirement,tmpdir,force_scan,source)
if dist is not None:
return dist.location
return None
def gen_setup(self, filename, fragment, tmpdir):
match = EGG_FRAGMENT.match(fragment)
dists = match and [
d for d in
interpret_distro_name(filename, match.group(1), None) if d.version
] or []
if len(dists)==1: # unambiguous ``#egg`` fragment
basename = os.path.basename(filename)
# Make sure the file has been downloaded to the temp dir.
if os.path.dirname(filename) != tmpdir:
dst = os.path.join(tmpdir, basename)
from setuptools.command.easy_install import samefile
if not samefile(filename, dst):
shutil.copy2(filename, dst)
filename=dst
file = open(os.path.join(tmpdir, 'setup.py'), 'w')
file.write(
"from setuptools import setup\n"
"setup(name=%r, version=%r, py_modules=[%r])\n"
% (
dists[0].project_name, dists[0].version,
os.path.splitext(basename)[0]
)
)
file.close()
return filename
elif match:
raise DistutilsError(
"Can't unambiguously interpret project/version identifier %r; "
"any dashes in the name or version should be escaped using "
"underscores. %r" % (fragment,dists)
)
else:
raise DistutilsError(
"Can't process plain .py files without an '#egg=name-version'"
" suffix to enable automatic setup script generation."
)
dl_blocksize = 8192
def _download_to(self, url, filename):
self.info("Downloading %s", url)
# Download the file
fp, tfp, info = None, None, None
try:
checker = HashChecker.from_url(url)
fp = self.open_url(strip_fragment(url))
if isinstance(fp, HTTPError):
raise DistutilsError(
"Can't download %s: %s %s" % (url, fp.code,fp.msg)
)
headers = fp.info()
blocknum = 0
bs = self.dl_blocksize
size = -1
if "content-length" in headers:
# Some servers return multiple Content-Length headers :(
sizes = get_all_headers(headers, 'Content-Length')
size = max(map(int, sizes))
self.reporthook(url, filename, blocknum, bs, size)
tfp = open(filename,'wb')
while True:
block = fp.read(bs)
if block:
checker.feed(block)
tfp.write(block)
blocknum += 1
self.reporthook(url, filename, blocknum, bs, size)
else:
break
self.check_hash(checker, filename, tfp)
return headers
finally:
if fp: fp.close()
if tfp: tfp.close()
def reporthook(self, url, filename, blocknum, blksize, size):
pass # no-op
def open_url(self, url, warning=None):
if url.startswith('file:'):
return local_open(url)
try:
return open_with_auth(url, self.opener)
except (ValueError, httplib.InvalidURL):
v = sys.exc_info()[1]
msg = ' '.join([str(arg) for arg in v.args])
if warning:
self.warn(warning, msg)
else:
raise DistutilsError('%s %s' % (url, msg))
except urllib2.HTTPError:
v = sys.exc_info()[1]
return v
except urllib2.URLError:
v = sys.exc_info()[1]
if warning:
self.warn(warning, v.reason)
else:
raise DistutilsError("Download error for %s: %s"
% (url, v.reason))
except httplib.BadStatusLine:
v = sys.exc_info()[1]
if warning:
self.warn(warning, v.line)
else:
raise DistutilsError(
'%s returned a bad status line. The server might be '
'down, %s' %
(url, v.line)
)
except httplib.HTTPException:
v = sys.exc_info()[1]
if warning:
self.warn(warning, v)
else:
raise DistutilsError("Download error for %s: %s"
% (url, v))
def _download_url(self, scheme, url, tmpdir):
# Determine download filename
#
name, fragment = egg_info_for_url(url)
if name:
while '..' in name:
name = name.replace('..','.').replace('\\','_')
else:
name = "__downloaded__" # default if URL has no path contents
if name.endswith('.egg.zip'):
name = name[:-4] # strip the extra .zip before download
filename = os.path.join(tmpdir,name)
# Download the file
#
if scheme=='svn' or scheme.startswith('svn+'):
return self._download_svn(url, filename)
elif scheme=='git' or scheme.startswith('git+'):
return self._download_git(url, filename)
elif scheme.startswith('hg+'):
return self._download_hg(url, filename)
elif scheme=='file':
return url2pathname(urlparse(url)[2])
else:
self.url_ok(url, True) # raises error if not allowed
return self._attempt_download(url, filename)
def scan_url(self, url):
self.process_url(url, True)
def _attempt_download(self, url, filename):
headers = self._download_to(url, filename)
if 'html' in headers.get('content-type','').lower():
return self._download_html(url, headers, filename)
else:
return filename
def _download_html(self, url, headers, filename):
file = open(filename)
for line in file:
if line.strip():
# Check for a subversion index page
if re.search(r'<title>([^- ]+ - )?Revision \d+:', line):
# it's a subversion index page:
file.close()
os.unlink(filename)
return self._download_svn(url, filename)
break # not an index page
file.close()
os.unlink(filename)
raise DistutilsError("Unexpected HTML page found at "+url)
def _download_svn(self, url, filename):
url = url.split('#',1)[0] # remove any fragment for svn's sake
creds = ''
if url.lower().startswith('svn:') and '@' in url:
scheme, netloc, path, p, q, f = urlparse(url)
if not netloc and path.startswith('//') and '/' in path[2:]:
netloc, path = path[2:].split('/',1)
auth, host = splituser(netloc)
if auth:
if ':' in auth:
user, pw = auth.split(':',1)
creds = " --username=%s --password=%s" % (user, pw)
else:
creds = " --username="+auth
netloc = host
url = urlunparse((scheme, netloc, url, p, q, f))
self.info("Doing subversion checkout from %s to %s", url, filename)
os.system("svn checkout%s -q %s %s" % (creds, url, filename))
return filename
@staticmethod
def _vcs_split_rev_from_url(url, pop_prefix=False):
scheme, netloc, path, query, frag = urlsplit(url)
scheme = scheme.split('+', 1)[-1]
# Some fragment identification fails
path = path.split('#',1)[0]
rev = None
if '@' in path:
path, rev = path.rsplit('@', 1)
# Also, discard fragment
url = urlunsplit((scheme, netloc, path, query, ''))
return url, rev
def _download_git(self, url, filename):
filename = filename.split('#',1)[0]
url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True)
self.info("Doing git clone from %s to %s", url, filename)
os.system("git clone --quiet %s %s" % (url, filename))
if rev is not None:
self.info("Checking out %s", rev)
os.system("(cd %s && git checkout --quiet %s)" % (
filename,
rev,
))
return filename
def _download_hg(self, url, filename):
filename = filename.split('#',1)[0]
url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True)
self.info("Doing hg clone from %s to %s", url, filename)
os.system("hg clone --quiet %s %s" % (url, filename))
if rev is not None:
self.info("Updating to %s", rev)
os.system("(cd %s && hg up -C -r %s >&-)" % (
filename,
rev,
))
return filename
def debug(self, msg, *args):
log.debug(msg, *args)
def info(self, msg, *args):
log.info(msg, *args)
def warn(self, msg, *args):
log.warn(msg, *args)
# This pattern matches a character entity reference (a decimal numeric
# references, a hexadecimal numeric reference, or a named reference).
entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub
def uchr(c):
if not isinstance(c, int):
return c
if c>255: return unichr(c)
return chr(c)
def decode_entity(match):
what = match.group(1)
if what.startswith('#x'):
what = int(what[2:], 16)
elif what.startswith('#'):
what = int(what[1:])
else:
what = name2codepoint.get(what, match.group(0))
return uchr(what)
def htmldecode(text):
"""Decode HTML entities in the given text."""
return entity_sub(decode_entity, text)
def socket_timeout(timeout=15):
def _socket_timeout(func):
def _socket_timeout(*args, **kwargs):
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
try:
return func(*args, **kwargs)
finally:
socket.setdefaulttimeout(old_timeout)
return _socket_timeout
return _socket_timeout
def _encode_auth(auth):
"""
A function compatible with Python 2.3-3.3 that will encode
auth from a URL suitable for an HTTP header.
>>> str(_encode_auth('username%3Apassword'))
'dXNlcm5hbWU6cGFzc3dvcmQ='
Long auth strings should not cause a newline to be inserted.
>>> long_auth = 'username:' + 'password'*10
>>> chr(10) in str(_encode_auth(long_auth))
False
"""
auth_s = unquote(auth)
# convert to bytes
auth_bytes = auth_s.encode()
# use the legacy interface for Python 2.3 support
encoded_bytes = base64.encodestring(auth_bytes)
# convert back to a string
encoded = encoded_bytes.decode()
# strip the trailing carriage return
return encoded.replace('\n','')
class Credential(object):
"""
A username/password pair. Use like a namedtuple.
"""
def __init__(self, username, password):
self.username = username
self.password = password
def __iter__(self):
yield self.username
yield self.password
def __str__(self):
return '%(username)s:%(password)s' % vars(self)
class PyPIConfig(ConfigParser.ConfigParser):
def __init__(self):
"""
Load from ~/.pypirc
"""
defaults = dict.fromkeys(['username', 'password', 'repository'], '')
ConfigParser.ConfigParser.__init__(self, defaults)
rc = os.path.join(os.path.expanduser('~'), '.pypirc')
if os.path.exists(rc):
self.read(rc)
@property
def creds_by_repository(self):
sections_with_repositories = [
section for section in self.sections()
if self.get(section, 'repository').strip()
]
return dict(map(self._get_repo_cred, sections_with_repositories))
def _get_repo_cred(self, section):
repo = self.get(section, 'repository').strip()
return repo, Credential(
self.get(section, 'username').strip(),
self.get(section, 'password').strip(),
)
def find_credential(self, url):
"""
If the URL indicated appears to be a repository defined in this
config, return the credential for that repository.
"""
for repository, cred in self.creds_by_repository.items():
if url.startswith(repository):
return cred
def open_with_auth(url, opener=urllib2.urlopen):
"""Open a urllib2 request, handling HTTP authentication"""
scheme, netloc, path, params, query, frag = urlparse(url)
# Double scheme does not raise on Mac OS X as revealed by a
# failing test. We would expect "nonnumeric port". Refs #20.
if netloc.endswith(':'):
raise httplib.InvalidURL("nonnumeric port: ''")
if scheme in ('http', 'https'):
auth, host = splituser(netloc)
else:
auth = None
if not auth:
cred = PyPIConfig().find_credential(url)
if cred:
auth = str(cred)
info = cred.username, url
log.info('Authenticating as %s for %s (from .pypirc)' % info)
if auth:
auth = "Basic " + _encode_auth(auth)
new_url = urlunparse((scheme,host,path,params,query,frag))
request = urllib2.Request(new_url)
request.add_header("Authorization", auth)
else:
request = urllib2.Request(url)
request.add_header('User-Agent', user_agent)
fp = opener(request)
if auth:
# Put authentication info back into request URL if same host,
# so that links found on the page will work
s2, h2, path2, param2, query2, frag2 = urlparse(fp.url)
if s2==scheme and h2==host:
fp.url = urlunparse((s2,netloc,path2,param2,query2,frag2))
return fp
# adding a timeout to avoid freezing package_index
open_with_auth = socket_timeout(_SOCKET_TIMEOUT)(open_with_auth)
def fix_sf_url(url):
return url # backward compatibility
def local_open(url):
"""Read a local path, with special support for directories"""
scheme, server, path, param, query, frag = urlparse(url)
filename = url2pathname(path)
if os.path.isfile(filename):
return urllib2.urlopen(url)
elif path.endswith('/') and os.path.isdir(filename):
files = []
for f in os.listdir(filename):
if f=='index.html':
fp = open(os.path.join(filename,f),'r')
body = fp.read()
fp.close()
break
elif os.path.isdir(os.path.join(filename,f)):
f+='/'
files.append("<a href=%r>%s</a>" % (f,f))
else:
body = ("<html><head><title>%s</title>" % url) + \
"</head><body>%s</body></html>" % '\n'.join(files)
status, message = 200, "OK"
else:
status, message, body = 404, "Path not found", "Not found"
headers = {'content-type': 'text/html'}
return HTTPError(url, status, message, headers, StringIO(body))
| mit |
ndingwall/scikit-learn | sklearn/tests/test_build.py | 17 | 1175 | import os
import pytest
import textwrap
from sklearn import __version__
from sklearn.utils._openmp_helpers import _openmp_parallelism_enabled
def test_openmp_parallelism_enabled():
# Check that sklearn is built with OpenMP-based parallelism enabled.
# This test can be skipped by setting the environment variable
# ``SKLEARN_SKIP_OPENMP_TEST``.
if os.getenv("SKLEARN_SKIP_OPENMP_TEST"):
pytest.skip("test explicitly skipped (SKLEARN_SKIP_OPENMP_TEST)")
base_url = "dev" if __version__.endswith(".dev0") else "stable"
err_msg = textwrap.dedent(
"""
This test fails because scikit-learn has been built without OpenMP.
This is not recommended since some estimators will run in sequential
mode instead of leveraging thread-based parallelism.
You can find instructions to build scikit-learn with OpenMP at this
address:
https://scikit-learn.org/{}/developers/advanced_installation.html
You can skip this test by setting the environment variable
SKLEARN_SKIP_OPENMP_TEST to any value.
""").format(base_url)
assert _openmp_parallelism_enabled(), err_msg
| bsd-3-clause |
anilpai/leetcode | BalancedBT/LCA_BT.py | 1 | 3404 | from TreeSerialize.TreeSerialize import deserialize, drawtree
'''
Needs Python 3+
'''
class Solution_old(object):
'''
Lowest Common Ancestor (LCA) in a Binary Tree (BT) : Takes additional space, not space optimized.
'''
def findPath(self, root, path, k):
'''
A Helper function to make sure that both nodes exist.
'''
if root is None:
return False
path.append(root.val)
if root.val == k:
return True
# To check if K is found in left or right sub tree.
if ((root.left is not None) and (self.findPath(root.left, path, k))) or ((root.right is not None) and (self.findPath(root.right, path, k))):
return True
# If not present in subtree with root, remove root from path and return False
path.pop()
return False
def lca(self, root, n1, n2):
# To store the paths to n1 and n2 from the root
path1 = []
path2 = []
# Find path from root to n1 and n2 and if either is not present, return -1
if (not self.findPath(root, path1, n1) or not self.findPath(root, path2, n2)):
return -1
# Compare the paths to get the first different value.
i = 0
while i < len(path1) and i < len(path2):
if path1[i] != path2[i]:
break
i += 1
return path1[i-1]
class Solution(object):
'''
Lowest Common Ancestor (LCA) in a Binary Tree (BT)
'''
def lca(self, root, p, q):
if root is None:
return None
left = self.lca(root.left, p, q)
right = self.lca(root.right, p, q)
if (left and right) or (root in [p, q]):
return root
else:
return left or right
if __name__ == '__main__':
""" Both p & q must exist. If either of them is null, then the other node is the least common ancestor. """
old_solution = False
if old_solution:
s = Solution_old()
lca = s.lca(deserialize('[1,2,3,null,null,4,null,5,6]'), 5, 6)
print(lca)
drawtree(deserialize('[1,2,3,null,null,4,null,5,6]'))
print(s.lca(deserialize('[2,1,3,0,7,9,11,12,null,21,20,null,null,38,48,null,null,null,null,17]'), 17, 12))
drawtree(deserialize('[2,1,3,0,7,9,11,12,null,21,20,null,null,38,48,null,null,null,null,17]'))
print(s.lca(deserialize('[2,1,3,0,7,9,11,12,null,21,20,null,null,18,28,null,null,null,null,17,22,31,43,0,47,49,51,52,null,61,40,null,null,48,58,null,null,null,null,47]'), 21, 58))
drawtree(deserialize('[2,1,3,0,7,9,11,12,null,21,20,null,null,18,28,null,null,null,null,17,22,31,43,0,47,49,51,52,null,61,40,null,null,48,58,null,null,null,null,47]'))
else:
s1 = Solution()
# Example 1
# root = deserialize('[1,2,3,null,null,4,null,5,6]')
# p = root.right.left.left
# q = root.right.left.right
# lca = s1.lca(root, p, q)
# print(lca)
# drawtree(deserialize('[1,2,3,null,null,4,null,5,6]'))
# Example 2
root = deserialize('[2,1,3,0,7,9,11,12,null,21,20,null,null,38,48,null,null,null,null,17]')
p = root.left.left.left
q = root.left.right.right.left
lca = s1.lca(root, p, q)
print(lca)
drawtree(deserialize('[2,1,3,0,7,9,11,12,null,21,20,null,null,38,48,null,null,null,null,17]'))
| mit |
mpvismer/pyqtgraph | pyqtgraph/widgets/FeedbackButton.py | 52 | 6429 | # -*- coding: utf-8 -*-
from ..Qt import QtCore, QtGui
__all__ = ['FeedbackButton']
class FeedbackButton(QtGui.QPushButton):
"""
QPushButton which flashes success/failure indication for slow or asynchronous procedures.
"""
### For thread-safetyness
sigCallSuccess = QtCore.Signal(object, object, object)
sigCallFailure = QtCore.Signal(object, object, object)
sigCallProcess = QtCore.Signal(object, object, object)
sigReset = QtCore.Signal()
def __init__(self, *args):
QtGui.QPushButton.__init__(self, *args)
self.origStyle = None
self.origText = self.text()
self.origStyle = self.styleSheet()
self.origTip = self.toolTip()
self.limitedTime = True
#self.textTimer = QtCore.QTimer()
#self.tipTimer = QtCore.QTimer()
#self.textTimer.timeout.connect(self.setText)
#self.tipTimer.timeout.connect(self.setToolTip)
self.sigCallSuccess.connect(self.success)
self.sigCallFailure.connect(self.failure)
self.sigCallProcess.connect(self.processing)
self.sigReset.connect(self.reset)
def feedback(self, success, message=None, tip="", limitedTime=True):
"""Calls success() or failure(). If you want the message to be displayed until the user takes an action, set limitedTime to False. Then call self.reset() after the desired action.Threadsafe."""
if success:
self.success(message, tip, limitedTime=limitedTime)
else:
self.failure(message, tip, limitedTime=limitedTime)
def success(self, message=None, tip="", limitedTime=True):
"""Displays specified message on button and flashes button green to let user know action was successful. If you want the success to be displayed until the user takes an action, set limitedTime to False. Then call self.reset() after the desired action. Threadsafe."""
isGuiThread = QtCore.QThread.currentThread() == QtCore.QCoreApplication.instance().thread()
if isGuiThread:
self.setEnabled(True)
#print "success"
self.startBlink("#0F0", message, tip, limitedTime=limitedTime)
else:
self.sigCallSuccess.emit(message, tip, limitedTime)
def failure(self, message=None, tip="", limitedTime=True):
"""Displays specified message on button and flashes button red to let user know there was an error. If you want the error to be displayed until the user takes an action, set limitedTime to False. Then call self.reset() after the desired action. Threadsafe. """
isGuiThread = QtCore.QThread.currentThread() == QtCore.QCoreApplication.instance().thread()
if isGuiThread:
self.setEnabled(True)
#print "fail"
self.startBlink("#F00", message, tip, limitedTime=limitedTime)
else:
self.sigCallFailure.emit(message, tip, limitedTime)
def processing(self, message="Processing..", tip="", processEvents=True):
"""Displays specified message on button to let user know the action is in progress. Threadsafe. """
isGuiThread = QtCore.QThread.currentThread() == QtCore.QCoreApplication.instance().thread()
if isGuiThread:
self.setEnabled(False)
self.setText(message, temporary=True)
self.setToolTip(tip, temporary=True)
if processEvents:
QtGui.QApplication.processEvents()
else:
self.sigCallProcess.emit(message, tip, processEvents)
def reset(self):
"""Resets the button to its original text and style. Threadsafe."""
isGuiThread = QtCore.QThread.currentThread() == QtCore.QCoreApplication.instance().thread()
if isGuiThread:
self.limitedTime = True
self.setText()
self.setToolTip()
self.setStyleSheet()
else:
self.sigReset.emit()
def startBlink(self, color, message=None, tip="", limitedTime=True):
#if self.origStyle is None:
#self.origStyle = self.styleSheet()
#self.origText = self.text()
self.setFixedHeight(self.height())
if message is not None:
self.setText(message, temporary=True)
self.setToolTip(tip, temporary=True)
self.count = 0
#self.indStyle = "QPushButton {border: 2px solid %s; border-radius: 5px}" % color
self.indStyle = "QPushButton {background-color: %s}" % color
self.limitedTime = limitedTime
self.borderOn()
if limitedTime:
QtCore.QTimer.singleShot(2000, self.setText)
QtCore.QTimer.singleShot(10000, self.setToolTip)
def borderOn(self):
self.setStyleSheet(self.indStyle, temporary=True)
if self.limitedTime or self.count <=2:
QtCore.QTimer.singleShot(100, self.borderOff)
def borderOff(self):
self.setStyleSheet()
self.count += 1
if self.count >= 2:
if self.limitedTime:
return
QtCore.QTimer.singleShot(30, self.borderOn)
def setText(self, text=None, temporary=False):
if text is None:
text = self.origText
#print text
QtGui.QPushButton.setText(self, text)
if not temporary:
self.origText = text
def setToolTip(self, text=None, temporary=False):
if text is None:
text = self.origTip
QtGui.QPushButton.setToolTip(self, text)
if not temporary:
self.origTip = text
def setStyleSheet(self, style=None, temporary=False):
if style is None:
style = self.origStyle
QtGui.QPushButton.setStyleSheet(self, style)
if not temporary:
self.origStyle = style
if __name__ == '__main__':
import time
app = QtGui.QApplication([])
win = QtGui.QMainWindow()
btn = FeedbackButton("Button")
fail = True
def click():
btn.processing("Hold on..")
time.sleep(2.0)
global fail
fail = not fail
if fail:
btn.failure(message="FAIL.", tip="There was a failure. Get over it.")
else:
btn.success(message="Bueno!")
btn.clicked.connect(click)
win.setCentralWidget(btn)
win.show() | mit |
claneys/shinken | test/shinken_modules.py | 13 | 9693 | #!/usr/bin/env python
import os
import re
import copy
import time
import subprocess
import shutil
import datetime # not used but "sub-"imported by livestatus test.. (to be corrected..)
import sys # not here used but "sub-"imported by livestatus test.. (to be corrected..)
#
from shinken.modulesctx import modulesctx
from shinken.objects.module import Module
from shinken.modulesmanager import ModulesManager
from shinken.misc.datamanager import datamgr
from shinken.log import logger
#
from shinken_test import (
modules_dir,
ShinkenTest,
time_hacker, # not used here but "sub"-imported by lvestatus test (to be corrected)
)
modulesctx.set_modulesdir(modules_dir)
# Special Livestatus module opening since the module rename
#from shinken.modules.livestatus import module as livestatus_broker
livestatus_broker = modulesctx.get_module('livestatus')
LiveStatus_broker = livestatus_broker.LiveStatus_broker
LiveStatus = livestatus_broker.LiveStatus
LiveStatusRegenerator = livestatus_broker.LiveStatusRegenerator
LiveStatusQueryCache = livestatus_broker.LiveStatusQueryCache
LiveStatusClientThread = livestatus_broker.LiveStatusClientThread
Logline = livestatus_broker.Logline
LiveStatusLogStoreMongoDB = modulesctx.get_module('logstore-mongodb').LiveStatusLogStoreMongoDB
LiveStatusLogStoreSqlite = modulesctx.get_module('logstore-sqlite').LiveStatusLogStoreSqlite
livestatus_modconf = Module()
livestatus_modconf.module_name = "livestatus"
livestatus_modconf.module_type = livestatus_broker.properties['type']
livestatus_modconf.properties = livestatus_broker.properties.copy()
class ShinkenModulesTest(ShinkenTest):
def do_load_modules(self):
self.modules_manager.load_and_init()
self.log.log("I correctly loaded the modules: [%s]" % (','.join([inst.get_name() for inst in self.modules_manager.instances])))
def update_broker(self, dodeepcopy=False):
# The brok should be manage in the good order
ids = self.sched.brokers['Default-Broker']['broks'].keys()
ids.sort()
for brok_id in ids:
brok = self.sched.brokers['Default-Broker']['broks'][brok_id]
#print "Managing a brok type", brok.type, "of id", brok_id
#if brok.type == 'update_service_status':
# print "Problem?", brok.data['is_problem']
if dodeepcopy:
brok = copy.deepcopy(brok)
brok.prepare()
self.livestatus_broker.manage_brok(brok)
self.sched.brokers['Default-Broker']['broks'] = {}
def init_livestatus(self, modconf=None, dbmodconf=None, needcache=False):
self.livelogs = 'tmp/livelogs.db' + self.testid
if modconf is None:
modconf = Module({'module_name': 'LiveStatus',
'module_type': 'livestatus',
'port': str(50000 + os.getpid()),
'pnp_path': 'tmp/pnp4nagios_test' + self.testid,
'host': '127.0.0.1',
'socket': 'live',
'name': 'test', #?
})
if dbmodconf is None:
dbmodconf = Module({'module_name': 'LogStore',
'module_type': 'logstore_sqlite',
'use_aggressive_sql': "0",
'database_file': self.livelogs,
'archive_path': os.path.join(os.path.dirname(self.livelogs), 'archives'),
})
modconf.modules = [dbmodconf]
self.livestatus_broker = LiveStatus_broker(modconf)
self.livestatus_broker.create_queues()
#--- livestatus_broker.main
self.livestatus_broker.log = logger
# this seems to damage the logger so that the scheduler can't use it
#self.livestatus_broker.log.load_obj(self.livestatus_broker)
self.livestatus_broker.debug_output = []
self.livestatus_broker.modules_manager = ModulesManager('livestatus', modules_dir, [])
self.livestatus_broker.modules_manager.set_modules(self.livestatus_broker.modules)
# We can now output some previouly silented debug ouput
self.livestatus_broker.do_load_modules()
for inst in self.livestatus_broker.modules_manager.instances:
if inst.properties["type"].startswith('logstore'):
f = getattr(inst, 'load', None)
if f and callable(f):
f(self.livestatus_broker) # !!! NOT self here !!!!
break
for s in self.livestatus_broker.debug_output:
print "errors during load", s
del self.livestatus_broker.debug_output
self.livestatus_broker.rg = LiveStatusRegenerator()
self.livestatus_broker.datamgr = datamgr
datamgr.load(self.livestatus_broker.rg)
self.livestatus_broker.query_cache = LiveStatusQueryCache()
if not needcache:
self.livestatus_broker.query_cache.disable()
self.livestatus_broker.rg.register_cache(self.livestatus_broker.query_cache)
#--- livestatus_broker.main
self.livestatus_broker.init()
self.livestatus_broker.db = self.livestatus_broker.modules_manager.instances[0]
self.livestatus_broker.livestatus = LiveStatus(self.livestatus_broker.datamgr, self.livestatus_broker.query_cache, self.livestatus_broker.db, self.livestatus_broker.pnp_path, self.livestatus_broker.from_q)
#--- livestatus_broker.do_main
self.livestatus_broker.db.open()
if hasattr(self.livestatus_broker.db, 'prepare_log_db_table'):
self.livestatus_broker.db.prepare_log_db_table()
#--- livestatus_broker.do_main
class TestConfig(ShinkenModulesTest):
def tearDown(self):
self.livestatus_broker.db.close()
if os.path.exists(self.livelogs):
os.remove(self.livelogs)
if os.path.exists(self.livelogs + "-journal"):
os.remove(self.livelogs + "-journal")
if os.path.exists(self.livestatus_broker.pnp_path):
shutil.rmtree(self.livestatus_broker.pnp_path)
if os.path.exists('var/shinken.log'):
os.remove('var/shinken.log')
if os.path.exists('var/retention.dat'):
os.remove('var/retention.dat')
if os.path.exists('var/status.dat'):
os.remove('var/status.dat')
self.livestatus_broker = None
def contains_line(self, text, pattern):
regex = re.compile(pattern)
for line in text.splitlines():
if re.search(regex, line):
return True
return False
def update_broker(self, dodeepcopy=False):
# The brok should be manage in the good order
ids = self.sched.brokers['Default-Broker']['broks'].keys()
ids.sort()
for brok_id in ids:
brok = self.sched.brokers['Default-Broker']['broks'][brok_id]
#print "Managing a brok type", brok.type, "of id", brok_id
#if brok.type == 'update_service_status':
# print "Problem?", brok.data['is_problem']
if dodeepcopy:
brok = copy.deepcopy(brok)
brok.prepare()
self.livestatus_broker.manage_brok(brok)
self.sched.brokers['Default-Broker']['broks'] = {}
def lines_equal(self, text1, text2):
# gets two multiline strings and compares the contents
# lifestatus output may not be in alphabetical order, so this
# function is used to compare unordered output with unordered
# expected output
# sometimes mklivestatus returns 0 or 1 on an empty result
text1 = text1.replace("200 1", "200 0")
text2 = text2.replace("200 1", "200 0")
text1 = text1.rstrip()
text2 = text2.rstrip()
#print "text1 //%s//" % text1
#print "text2 //%s//" % text2
sorted1 = "\n".join(sorted(text1.split("\n")))
sorted2 = "\n".join(sorted(text2.split("\n")))
len1 = len(text1.split("\n"))
len2 = len(text2.split("\n"))
#print "%s == %s text cmp %s" % (len1, len2, sorted1 == sorted2)
#print "text1 //%s//" % sorted(text1.split("\n"))
#print "text2 //%s//" % sorted(text2.split("\n"))
if sorted1 == sorted2 and len1 == len2:
return True
else:
# Maybe list members are different
# allhosts;test_host_0;test_ok_0;servicegroup_02,servicegroup_01,ok
# allhosts;test_host_0;test_ok_0;servicegroup_02,ok,servicegroup_01
# break it up to
# [['allhosts'], ['test_host_0'], ['test_ok_0'],
# ['ok', 'servicegroup_01', 'servicegroup_02']]
[line for line in sorted(text1.split("\n"))]
data1 = [[sorted(c.split(',')) for c in columns] for columns in [line.split(';') for line in sorted(text1.split("\n")) if line]]
data2 = [[sorted(c.split(',')) for c in columns] for columns in [line.split(';') for line in sorted(text2.split("\n")) if line]]
#print "text1 //%s//" % data1
#print "text2 //%s//" % data2
# cmp is clever enough to handle nested arrays
return cmp(data1, data2) == 0
def show_broks(self, title):
print
print "--- ", title
for brok in sorted(self.sched.broks.values(), lambda x, y: x.id - y.id):
if re.compile('^service_').match(brok.type):
pass
#print "BROK:", brok.type
#print "BROK ", brok.data['in_checking']
self.update_broker()
request = 'GET services\nColumns: service_description is_executing\n'
response, keepalive = self.livestatus_broker.livestatus.handle_request(request)
print response
| agpl-3.0 |
nexusz99/boto | tests/integration/s3/test_mfa.py | 136 | 3629 | # Copyright (c) 2010 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2010, Eucalyptus Systems, Inc.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Some unit tests for S3 MfaDelete with versioning
"""
import unittest
import time
from nose.plugins.attrib import attr
from boto.s3.connection import S3Connection
from boto.exception import S3ResponseError
from boto.s3.deletemarker import DeleteMarker
@attr('notdefault', 's3mfa')
class S3MFATest (unittest.TestCase):
def setUp(self):
self.conn = S3Connection()
self.bucket_name = 'mfa-%d' % int(time.time())
self.bucket = self.conn.create_bucket(self.bucket_name)
def tearDown(self):
for k in self.bucket.list_versions():
self.bucket.delete_key(k.name, version_id=k.version_id)
self.bucket.delete()
def test_mfadel(self):
# Enable Versioning with MfaDelete
mfa_sn = raw_input('MFA S/N: ')
mfa_code = raw_input('MFA Code: ')
self.bucket.configure_versioning(True, mfa_delete=True, mfa_token=(mfa_sn, mfa_code))
# Check enabling mfa worked.
i = 0
for i in range(1, 8):
time.sleep(2**i)
d = self.bucket.get_versioning_status()
if d['Versioning'] == 'Enabled' and d['MfaDelete'] == 'Enabled':
break
self.assertEqual('Enabled', d['Versioning'])
self.assertEqual('Enabled', d['MfaDelete'])
# Add a key to the bucket
k = self.bucket.new_key('foobar')
s1 = 'This is v1'
k.set_contents_from_string(s1)
v1 = k.version_id
# Now try to delete v1 without the MFA token
try:
self.bucket.delete_key('foobar', version_id=v1)
self.fail("Must fail if not using MFA token")
except S3ResponseError:
pass
# Now try delete again with the MFA token
mfa_code = raw_input('MFA Code: ')
self.bucket.delete_key('foobar', version_id=v1, mfa_token=(mfa_sn, mfa_code))
# Next suspend versioning and disable MfaDelete on the bucket
mfa_code = raw_input('MFA Code: ')
self.bucket.configure_versioning(False, mfa_delete=False, mfa_token=(mfa_sn, mfa_code))
# Lastly, check disabling mfa worked.
i = 0
for i in range(1, 8):
time.sleep(2**i)
d = self.bucket.get_versioning_status()
if d['Versioning'] == 'Suspended' and d['MfaDelete'] != 'Enabled':
break
self.assertEqual('Suspended', d['Versioning'])
self.assertNotEqual('Enabled', d['MfaDelete'])
| mit |
fedorpatlin/ansible | lib/ansible/plugins/lookup/credstash.py | 131 | 1891 | # (c) 2015, Ensighten <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
CREDSTASH_INSTALLED = False
try:
import credstash
CREDSTASH_INSTALLED = True
except ImportError:
CREDSTASH_INSTALLED = False
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
if not CREDSTASH_INSTALLED:
raise AnsibleError('The credstash lookup plugin requires credstash to be installed.')
ret = []
for term in terms:
try:
version = kwargs.pop('version', '')
region = kwargs.pop('region', None)
table = kwargs.pop('table', 'credential-store')
val = credstash.getSecret(term, version, region, table,
context=kwargs)
except credstash.ItemNotFound:
raise AnsibleError('Key {0} not found'.format(term))
except Exception as e:
raise AnsibleError('Encountered exception while fetching {0}: {1}'.format(term, e.message))
ret.append(val)
return ret
| gpl-3.0 |
snikch/sot-symfony | vendor/doctrine/orm/docs/en/conf.py | 2448 | 6497 | # -*- coding: utf-8 -*-
#
# Doctrine 2 ORM documentation build configuration file, created by
# sphinx-quickstart on Fri Dec 3 18:10:24 2010.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.append(os.path.abspath('_exts'))
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['configurationblock']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Doctrine 2 ORM'
copyright = u'2010-12, Doctrine Project Team'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '2'
# The full version, including alpha/beta/rc tags.
release = '2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
language = 'en'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'doctrine'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_theme']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'Doctrine2ORMdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Doctrine2ORM.tex', u'Doctrine 2 ORM Documentation',
u'Doctrine Project Team', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
primary_domain = "dcorm"
def linkcode_resolve(domain, info):
if domain == 'dcorm':
return 'http://'
return None
| mit |
arthaud/git-dumper | git_dumper.py | 1 | 21245 | #!/usr/bin/env python3
from contextlib import closing
import argparse
import multiprocessing
import os
import os.path
import re
import socket
import subprocess
import sys
import traceback
import urllib.parse
import urllib3
import bs4
import dulwich.index
import dulwich.objects
import dulwich.pack
import requests
import socks
def printf(fmt, *args, file=sys.stdout):
if args:
fmt = fmt % args
file.write(fmt)
file.flush()
def is_html(response):
""" Return True if the response is a HTML webpage """
return (
"Content-Type" in response.headers
and "text/html" in response.headers["Content-Type"]
)
def get_indexed_files(response):
""" Return all the files in the directory index webpage """
html = bs4.BeautifulSoup(response.text, "html.parser")
files = []
bad = {".", "..", "../"}
for link in html.find_all("a"):
url = urllib.parse.urlparse(link.get("href"))
if (
url.path
and url.path not in bad
and not url.path.startswith("/")
and not url.scheme
and not url.netloc
):
files.append(url.path)
return files
def verify_response(response):
if response.status_code != 200:
return (
False,
"[-] %s/%s responded with status code {code}\n".format(
code=response.status_code
),
)
elif (
"Content-Length" in response.headers
and response.headers["Content-Length"] == 0
):
return False, "[-] %s/%s responded with a zero-length body\n"
elif (
"Content-Type" in response.headers
and "text/html" in response.headers["Content-Type"]
):
return False, "[-] %s/%s responded with HTML\n"
else:
return True, True
def create_intermediate_dirs(path):
""" Create intermediate directories, if necessary """
dirname, basename = os.path.split(path)
if dirname and not os.path.exists(dirname):
try:
os.makedirs(dirname)
except FileExistsError:
pass # race condition
def get_referenced_sha1(obj_file):
""" Return all the referenced SHA1 in the given object file """
objs = []
if isinstance(obj_file, dulwich.objects.Commit):
objs.append(obj_file.tree.decode())
for parent in obj_file.parents:
objs.append(parent.decode())
elif isinstance(obj_file, dulwich.objects.Tree):
for item in obj_file.iteritems():
objs.append(item.sha.decode())
elif isinstance(obj_file, dulwich.objects.Blob):
pass
elif isinstance(obj_file, dulwich.objects.Tag):
pass
else:
printf(
"error: unexpected object type: %r\n" % obj_file, file=sys.stderr
)
sys.exit(1)
return objs
class Worker(multiprocessing.Process):
""" Worker for process_tasks """
def __init__(self, pending_tasks, tasks_done, args):
super().__init__()
self.daemon = True
self.pending_tasks = pending_tasks
self.tasks_done = tasks_done
self.args = args
def run(self):
# initialize process
self.init(*self.args)
# fetch and do tasks
while True:
task = self.pending_tasks.get(block=True)
if task is None: # end signal
return
try:
result = self.do_task(task, *self.args)
except Exception:
printf("Task %s raised exception:\n", task, file=sys.stderr)
traceback.print_exc()
result = []
assert isinstance(
result, list
), "do_task() should return a list of tasks"
self.tasks_done.put(result)
def init(self, *args):
raise NotImplementedError
def do_task(self, task, *args):
raise NotImplementedError
def process_tasks(initial_tasks, worker, jobs, args=(), tasks_done=None):
""" Process tasks in parallel """
if not initial_tasks:
return
tasks_seen = set(tasks_done) if tasks_done else set()
pending_tasks = multiprocessing.Queue()
tasks_done = multiprocessing.Queue()
num_pending_tasks = 0
# add all initial tasks in the queue
for task in initial_tasks:
assert task is not None
if task not in tasks_seen:
pending_tasks.put(task)
num_pending_tasks += 1
tasks_seen.add(task)
# initialize processes
processes = [worker(pending_tasks, tasks_done, args) for _ in range(jobs)]
# launch them all
for p in processes:
p.start()
# collect task results
while num_pending_tasks > 0:
task_result = tasks_done.get(block=True)
num_pending_tasks -= 1
for task in task_result:
assert task is not None
if task not in tasks_seen:
pending_tasks.put(task)
num_pending_tasks += 1
tasks_seen.add(task)
# send termination signal (task=None)
for _ in range(jobs):
pending_tasks.put(None)
# join all
for p in processes:
p.join()
class DownloadWorker(Worker):
""" Download a list of files """
def init(self, url, directory, retry, timeout, http_headers):
self.session = requests.Session()
self.session.verify = False
self.session.headers = http_headers
self.session.mount(
url, requests.adapters.HTTPAdapter(max_retries=retry)
)
def do_task(self, filepath, url, directory, retry, timeout, http_headers):
if os.path.isfile(os.path.join(directory, filepath)):
printf("[-] Already downloaded %s/%s\n", url, filepath)
return []
with closing(
self.session.get(
"%s/%s" % (url, filepath),
allow_redirects=False,
stream=True,
timeout=timeout,
)
) as response:
printf(
"[-] Fetching %s/%s [%d]\n",
url,
filepath,
response.status_code,
)
valid, error_message = verify_response(response)
if not valid:
printf(error_message, url, filepath, file=sys.stderr)
return []
abspath = os.path.abspath(os.path.join(directory, filepath))
create_intermediate_dirs(abspath)
# write file
with open(abspath, "wb") as f:
for chunk in response.iter_content(4096):
f.write(chunk)
return []
class RecursiveDownloadWorker(DownloadWorker):
""" Download a directory recursively """
def do_task(self, filepath, url, directory, retry, timeout, http_headers):
if os.path.isfile(os.path.join(directory, filepath)):
printf("[-] Already downloaded %s/%s\n", url, filepath)
return []
with closing(
self.session.get(
"%s/%s" % (url, filepath),
allow_redirects=False,
stream=True,
timeout=timeout,
)
) as response:
printf(
"[-] Fetching %s/%s [%d]\n",
url,
filepath,
response.status_code,
)
if (
response.status_code in (301, 302)
and "Location" in response.headers
and response.headers["Location"].endswith(filepath + "/")
):
return [filepath + "/"]
if filepath.endswith("/"): # directory index
assert is_html(response)
return [
filepath + filename
for filename in get_indexed_files(response)
]
else: # file
valid, error_message = verify_response(response)
if not valid:
printf(error_message, url, filepath, file=sys.stderr)
return []
abspath = os.path.abspath(os.path.join(directory, filepath))
create_intermediate_dirs(abspath)
# write file
with open(abspath, "wb") as f:
for chunk in response.iter_content(4096):
f.write(chunk)
return []
class FindRefsWorker(DownloadWorker):
""" Find refs/ """
def do_task(self, filepath, url, directory, retry, timeout, http_headers):
response = self.session.get(
"%s/%s" % (url, filepath), allow_redirects=False, timeout=timeout
)
printf(
"[-] Fetching %s/%s [%d]\n", url, filepath, response.status_code
)
valid, error_message = verify_response(response)
if not valid:
printf(error_message, url, filepath, file=sys.stderr)
return []
abspath = os.path.abspath(os.path.join(directory, filepath))
create_intermediate_dirs(abspath)
# write file
with open(abspath, "w") as f:
f.write(response.text)
# find refs
tasks = []
for ref in re.findall(
r"(refs(/[a-zA-Z0-9\-\.\_\*]+)+)", response.text
):
ref = ref[0]
if not ref.endswith("*"):
tasks.append(".git/%s" % ref)
tasks.append(".git/logs/%s" % ref)
return tasks
class FindObjectsWorker(DownloadWorker):
""" Find objects """
def do_task(self, obj, url, directory, retry, timeout, http_headers):
filepath = ".git/objects/%s/%s" % (obj[:2], obj[2:])
if os.path.isfile(os.path.join(directory, filepath)):
printf("[-] Already downloaded %s/%s\n", url, filepath)
else:
response = self.session.get(
"%s/%s" % (url, filepath),
allow_redirects=False,
timeout=timeout,
)
printf(
"[-] Fetching %s/%s [%d]\n",
url,
filepath,
response.status_code,
)
valid, error_message = verify_response(response)
if not valid:
printf(error_message, url, filepath, file=sys.stderr)
return []
abspath = os.path.abspath(os.path.join(directory, filepath))
create_intermediate_dirs(abspath)
# write file
with open(abspath, "wb") as f:
f.write(response.content)
abspath = os.path.abspath(os.path.join(directory, filepath))
# parse object file to find other objects
obj_file = dulwich.objects.ShaFile.from_path(abspath)
return get_referenced_sha1(obj_file)
def fetch_git(url, directory, jobs, retry, timeout, http_headers):
""" Dump a git repository into the output directory """
assert os.path.isdir(directory), "%s is not a directory" % directory
assert jobs >= 1, "invalid number of jobs"
assert retry >= 1, "invalid number of retries"
assert timeout >= 1, "invalid timeout"
session = requests.Session()
session.verify = False
session.headers = http_headers
session.mount(url, requests.adapters.HTTPAdapter(max_retries=retry))
if os.listdir(directory):
printf("Warning: Destination '%s' is not empty\n", directory)
# find base url
url = url.rstrip("/")
if url.endswith("HEAD"):
url = url[:-4]
url = url.rstrip("/")
if url.endswith(".git"):
url = url[:-4]
url = url.rstrip("/")
# check for /.git/HEAD
printf("[-] Testing %s/.git/HEAD ", url)
response = session.get("%s/.git/HEAD" % url, allow_redirects=False)
printf("[%d]\n", response.status_code)
valid, error_message = verify_response(response)
if not valid:
printf(error_message, url, "/.git/HEAD", file=sys.stderr)
return 1
elif not re.match(r'^(ref:.*|[0-9a-f]{40}$)', response.text.strip()):
printf(
"error: %s/.git/HEAD is not a git HEAD file\n",
url,
file=sys.stderr,
)
return 1
# check for directory listing
printf("[-] Testing %s/.git/ ", url)
response = session.get("%s/.git/" % url, allow_redirects=False)
printf("[%d]\n", response.status_code)
if (
response.status_code == 200
and is_html(response)
and "HEAD" in get_indexed_files(response)
):
printf("[-] Fetching .git recursively\n")
process_tasks(
[".git/", ".gitignore"],
RecursiveDownloadWorker,
jobs,
args=(url, directory, retry, timeout, http_headers),
)
printf("[-] Running git checkout .\n")
os.chdir(directory)
subprocess.check_call(["git", "checkout", "."])
return 0
# no directory listing
printf("[-] Fetching common files\n")
tasks = [
".gitignore",
".git/COMMIT_EDITMSG",
".git/description",
".git/hooks/applypatch-msg.sample",
".git/hooks/commit-msg.sample",
".git/hooks/post-commit.sample",
".git/hooks/post-receive.sample",
".git/hooks/post-update.sample",
".git/hooks/pre-applypatch.sample",
".git/hooks/pre-commit.sample",
".git/hooks/pre-push.sample",
".git/hooks/pre-rebase.sample",
".git/hooks/pre-receive.sample",
".git/hooks/prepare-commit-msg.sample",
".git/hooks/update.sample",
".git/index",
".git/info/exclude",
".git/objects/info/packs",
]
process_tasks(
tasks,
DownloadWorker,
jobs,
args=(url, directory, retry, timeout, http_headers),
)
# find refs
printf("[-] Finding refs/\n")
tasks = [
".git/FETCH_HEAD",
".git/HEAD",
".git/ORIG_HEAD",
".git/config",
".git/info/refs",
".git/logs/HEAD",
".git/logs/refs/heads/master",
".git/logs/refs/remotes/origin/HEAD",
".git/logs/refs/remotes/origin/master",
".git/logs/refs/stash",
".git/packed-refs",
".git/refs/heads/master",
".git/refs/remotes/origin/HEAD",
".git/refs/remotes/origin/master",
".git/refs/stash",
".git/refs/wip/wtree/refs/heads/master", # Magit
".git/refs/wip/index/refs/heads/master", # Magit
]
process_tasks(
tasks,
FindRefsWorker,
jobs,
args=(url, directory, retry, timeout, http_headers),
)
# find packs
printf("[-] Finding packs\n")
tasks = []
# use .git/objects/info/packs to find packs
info_packs_path = os.path.join(
directory, ".git", "objects", "info", "packs"
)
if os.path.exists(info_packs_path):
with open(info_packs_path, "r") as f:
info_packs = f.read()
for sha1 in re.findall(r"pack-([a-f0-9]{40})\.pack", info_packs):
tasks.append(".git/objects/pack/pack-%s.idx" % sha1)
tasks.append(".git/objects/pack/pack-%s.pack" % sha1)
process_tasks(
tasks,
DownloadWorker,
jobs,
args=(url, directory, retry, timeout, http_headers),
)
# find objects
printf("[-] Finding objects\n")
objs = set()
packed_objs = set()
# .git/packed-refs, .git/info/refs, .git/refs/*, .git/logs/*
files = [
os.path.join(directory, ".git", "packed-refs"),
os.path.join(directory, ".git", "info", "refs"),
os.path.join(directory, ".git", "FETCH_HEAD"),
os.path.join(directory, ".git", "ORIG_HEAD"),
]
for dirpath, _, filenames in os.walk(
os.path.join(directory, ".git", "refs")
):
for filename in filenames:
files.append(os.path.join(dirpath, filename))
for dirpath, _, filenames in os.walk(
os.path.join(directory, ".git", "logs")
):
for filename in filenames:
files.append(os.path.join(dirpath, filename))
for filepath in files:
if not os.path.exists(filepath):
continue
with open(filepath, "r") as f:
content = f.read()
for obj in re.findall(r"(^|\s)([a-f0-9]{40})($|\s)", content):
obj = obj[1]
objs.add(obj)
# use .git/index to find objects
index_path = os.path.join(directory, ".git", "index")
if os.path.exists(index_path):
index = dulwich.index.Index(index_path)
for entry in index.iterblobs():
objs.add(entry[1].decode())
# use packs to find more objects to fetch, and objects that are packed
pack_file_dir = os.path.join(directory, ".git", "objects", "pack")
if os.path.isdir(pack_file_dir):
for filename in os.listdir(pack_file_dir):
if filename.startswith("pack-") and filename.endswith(".pack"):
pack_data_path = os.path.join(pack_file_dir, filename)
pack_idx_path = os.path.join(
pack_file_dir, filename[:-5] + ".idx"
)
pack_data = dulwich.pack.PackData(pack_data_path)
pack_idx = dulwich.pack.load_pack_index(pack_idx_path)
pack = dulwich.pack.Pack.from_objects(pack_data, pack_idx)
for obj_file in pack.iterobjects():
packed_objs.add(obj_file.sha().hexdigest())
objs |= set(get_referenced_sha1(obj_file))
# fetch all objects
printf("[-] Fetching objects\n")
process_tasks(
objs,
FindObjectsWorker,
jobs,
args=(url, directory, retry, timeout, http_headers),
tasks_done=packed_objs,
)
# git checkout
printf("[-] Running git checkout .\n")
os.chdir(directory)
# ignore errors
subprocess.call(["git", "checkout", "."], stderr=open(os.devnull, "wb"))
return 0
def main():
parser = argparse.ArgumentParser(
usage="git-dumper [options] URL DIR",
description="Dump a git repository from a website.",
)
parser.add_argument("url", metavar="URL", help="url")
parser.add_argument("directory", metavar="DIR", help="output directory")
parser.add_argument("--proxy", help="use the specified proxy")
parser.add_argument(
"-j",
"--jobs",
type=int,
default=10,
help="number of simultaneous requests",
)
parser.add_argument(
"-r",
"--retry",
type=int,
default=3,
help="number of request attempts before giving up",
)
parser.add_argument(
"-t",
"--timeout",
type=int,
default=3,
help="maximum time in seconds before giving up",
)
parser.add_argument(
"-u",
"--user-agent",
type=str,
default="Mozilla/5.0 (Windows NT 10.0; rv:78.0) Gecko/20100101 Firefox/78.0",
help="user-agent to use for requests",
)
parser.add_argument(
"-H",
"--header",
type=str,
action="append",
help="additional http headers, e.g `NAME=VALUE`",
)
args = parser.parse_args()
# jobs
if args.jobs < 1:
parser.error("invalid number of jobs, got `%d`" % args.jobs)
# retry
if args.retry < 1:
parser.error("invalid number of retries, got `%d`" % args.retry)
# timeout
if args.timeout < 1:
parser.error("invalid timeout, got `%d`" % args.timeout)
# header
http_headers = {"User-Agent": args.user_agent}
if args.header:
for header in args.header:
tokens = header.split("=", maxsplit=1)
if len(tokens) != 2:
parser.error(
"http header must have the form NAME=VALUE, got `%s`"
% header
)
name, value = tokens
http_headers[name.strip()] = value.strip()
# proxy
if args.proxy:
proxy_valid = False
for pattern, proxy_type in [
(r"^socks5:(.*):(\d+)$", socks.PROXY_TYPE_SOCKS5),
(r"^socks4:(.*):(\d+)$", socks.PROXY_TYPE_SOCKS4),
(r"^http://(.*):(\d+)$", socks.PROXY_TYPE_HTTP),
(r"^(.*):(\d+)$", socks.PROXY_TYPE_SOCKS5),
]:
m = re.match(pattern, args.proxy)
if m:
socks.setdefaultproxy(proxy_type, m.group(1), int(m.group(2)))
socket.socket = socks.socksocket
proxy_valid = True
break
if not proxy_valid:
parser.error("invalid proxy, got `%s`" % args.proxy)
# output directory
if not os.path.exists(args.directory):
os.makedirs(args.directory)
if not os.path.isdir(args.directory):
parser.error("`%s` is not a directory" % args.directory)
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# fetch everything
sys.exit(
fetch_git(
args.url,
args.directory,
args.jobs,
args.retry,
args.timeout,
http_headers,
)
)
if __name__ == "__main__":
main()
| mit |
gudcjfdldu/volatility | volatility/plugins/netscan.py | 44 | 9924 | # Volatility
#
# Authors:
# Michael Hale Ligh <[email protected]>
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
import volatility.utils as utils
import volatility.plugins.common as common
import volatility.scan as scan
import volatility.obj as obj
import volatility.cache as cache
import volatility.debug as debug
import socket
import volatility.plugins.overlays.windows.tcpip_vtypes as tcpip_vtypes
# Python's socket.AF_INET6 is 0x1e but Microsoft defines it
# as a constant value of 0x17 in their source code. Thus we
# need Microsoft's since that's what is found in memory.
AF_INET = 2
AF_INET6 = 0x17
# String representations of INADDR_ANY and INADDR6_ANY
inaddr_any = utils.inet_ntop(socket.AF_INET, '\0' * 4)
inaddr6_any = utils.inet_ntop(socket.AF_INET6, '\0' * 16)
#--------------------------------------------------------------------------------
# pool scanners
#--------------------------------------------------------------------------------
class PoolScanUdpEndpoint(scan.PoolScanner):
"""PoolScanner for Udp Endpoints"""
def object_offset(self, found, address_space):
return found + (address_space.profile.get_obj_size("_POOL_HEADER") -
address_space.profile.get_obj_offset("_POOL_HEADER", "PoolTag"))
checks = [ ('PoolTagCheck', dict(tag = "UdpA")),
# Seen as 0xa8 on Vista SP0, 0xb0 on Vista SP2, and 0xb8 on 7
# Seen as 0x150 on Win7 SP0 x64
('CheckPoolSize', dict(condition = lambda x: x >= 0xa8)),
('CheckPoolType', dict(non_paged = True, free = True)),
('CheckPoolIndex', dict(value = 0)),
]
class PoolScanTcpListener(PoolScanUdpEndpoint):
"""PoolScanner for Tcp Listeners"""
checks = [ ('PoolTagCheck', dict(tag = "TcpL")),
# Seen as 0x120 on Win7 SP0 x64
('CheckPoolSize', dict(condition = lambda x: x >= 0xa8)),
('CheckPoolType', dict(non_paged = True, free = True)),
('CheckPoolIndex', dict(value = 0)),
]
class PoolScanTcpEndpoint(PoolScanUdpEndpoint):
"""PoolScanner for TCP Endpoints"""
checks = [ ('PoolTagCheck', dict(tag = "TcpE")),
# Seen as 0x1f0 on Vista SP0, 0x1f8 on Vista SP2 and 0x210 on 7
# Seen as 0x320 on Win7 SP0 x64
('CheckPoolSize', dict(condition = lambda x: x >= 0x1f0)),
('CheckPoolType', dict(non_paged = True, free = True)),
('CheckPoolIndex', dict(value = 0)),
]
#--------------------------------------------------------------------------------
# object classes
#--------------------------------------------------------------------------------
class _TCP_LISTENER(obj.CType):
"""Class for objects found in TcpL pools"""
@property
def AddressFamily(self):
return self.InetAF.dereference().AddressFamily
@property
def Owner(self):
return self.m('Owner').dereference()
def dual_stack_sockets(self):
"""Handle Windows dual-stack sockets"""
# If this pointer is valid, the socket is bound to
# a specific IP address. Otherwise, the socket is
# listening on all IP addresses of the address family.
local_addr = self.LocalAddr.dereference()
# Note the remote address is always INADDR_ANY or
# INADDR6_ANY for sockets. The moment a client
# connects to the listener, a TCP_ENDPOINT is created
# and that structure contains the remote address.
if local_addr != None:
inaddr = local_addr.pData.dereference().dereference()
if self.AddressFamily == AF_INET:
yield "v4", inaddr.addr4, inaddr_any
else:
yield "v6", inaddr.addr6, inaddr6_any
else:
yield "v4", inaddr_any, inaddr_any
if self.AddressFamily == AF_INET6:
yield "v6", inaddr6_any, inaddr6_any
class _TCP_ENDPOINT(_TCP_LISTENER):
"""Class for objects found in TcpE pools"""
def _ipv4_or_ipv6(self, in_addr):
if self.AddressFamily == AF_INET:
return in_addr.addr4
else:
return in_addr.addr6
@property
def LocalAddress(self):
inaddr = self.AddrInfo.dereference().Local.\
pData.dereference().dereference()
return self._ipv4_or_ipv6(inaddr)
@property
def RemoteAddress(self):
inaddr = self.AddrInfo.dereference().\
Remote.dereference()
return self._ipv4_or_ipv6(inaddr)
class _UDP_ENDPOINT(_TCP_LISTENER):
"""Class for objects found in UdpA pools"""
#--------------------------------------------------------------------------------
# profile modifications
#--------------------------------------------------------------------------------
class NetscanObjectClasses(obj.ProfileModification):
"""Network OCs for Vista, 2008, and 7 x86 and x64"""
before = ['WindowsObjectClasses']
conditions = {'os': lambda x: x == 'windows',
'major': lambda x : x == 6,
'minor': lambda x : x >= 0}
def modification(self, profile):
profile.object_classes.update({
'_TCP_LISTENER': _TCP_LISTENER,
'_TCP_ENDPOINT': _TCP_ENDPOINT,
'_UDP_ENDPOINT': _UDP_ENDPOINT,
})
#--------------------------------------------------------------------------------
# netscan plugin
#--------------------------------------------------------------------------------
class Netscan(common.AbstractWindowsCommand):
"""Scan a Vista, 2008 or Windows 7 image for connections and sockets"""
@staticmethod
def is_valid_profile(profile):
return (profile.metadata.get('os', 'unknown') == 'windows' and
profile.metadata.get('major', 0) == 6)
@cache.CacheDecorator("tests/netscan")
def calculate(self):
# Virtual kernel space for dereferencing pointers
kernel_space = utils.load_as(self._config)
# Physical space for scanning
flat_space = utils.load_as(self._config, astype = 'physical')
if not self.is_valid_profile(kernel_space.profile):
debug.error("This command does not support the selected profile.")
# Scan for TCP listeners also known as sockets
for offset in PoolScanTcpListener().scan(flat_space):
tcpentry = obj.Object('_TCP_LISTENER', offset = offset,
vm = flat_space, native_vm = kernel_space)
# Only accept IPv4 or IPv6
if tcpentry.AddressFamily not in (AF_INET, AF_INET6):
continue
# For TcpL, the state is always listening and the remote port is zero
for ver, laddr, raddr in tcpentry.dual_stack_sockets():
yield tcpentry, "TCP" + ver, laddr, tcpentry.Port, raddr, 0, "LISTENING"
# Scan for TCP endpoints also known as connections
for offset in PoolScanTcpEndpoint().scan(flat_space):
tcpentry = obj.Object('_TCP_ENDPOINT', offset = offset,
vm = flat_space, native_vm = kernel_space)
if tcpentry.AddressFamily == AF_INET:
proto = "TCPv4"
elif tcpentry.AddressFamily == AF_INET6:
proto = "TCPv6"
else:
continue
# These are our sanity checks
if (tcpentry.State.v() not in tcpip_vtypes.TCP_STATE_ENUM or
(not tcpentry.LocalAddress and (not tcpentry.Owner or
tcpentry.Owner.UniqueProcessId == 0 or
tcpentry.Owner.UniqueProcessId > 65535))):
continue
yield tcpentry, proto, tcpentry.LocalAddress, tcpentry.LocalPort, \
tcpentry.RemoteAddress, tcpentry.RemotePort, tcpentry.State
# Scan for UDP endpoints
for offset in PoolScanUdpEndpoint().scan(flat_space):
udpentry = obj.Object('_UDP_ENDPOINT', offset = offset,
vm = flat_space, native_vm = kernel_space)
# Only accept IPv4 or IPv6
if udpentry.AddressFamily not in (AF_INET, AF_INET6):
continue
# For UdpA, the state is always blank and the remote end is asterisks
for ver, laddr, _ in udpentry.dual_stack_sockets():
yield udpentry, "UDP" + ver, laddr, udpentry.Port, "*", "*", ""
def render_text(self, outfd, data):
outfd.write("{0:<10} {1:<8} {2:<30} {3:<20} {4:<16} {5:<8} {6:<14} {7}\n".format(
"Offset(P)", "Proto", "Local Address", "Foreign Address",
"State", "Pid", "Owner", "Created"))
for net_object, proto, laddr, lport, raddr, rport, state in data:
lendpoint = "{0}:{1}".format(laddr, lport)
rendpoint = "{0}:{1}".format(raddr, rport)
outfd.write("{0:<#10x} {1:<8} {2:<30} {3:<20} {4:<16} {5:<8} {6:<14} {7}\n".format(
net_object.obj_offset, proto, lendpoint,
rendpoint, state, net_object.Owner.UniqueProcessId,
net_object.Owner.ImageFileName,
str(net_object.CreateTime or '')
))
| gpl-2.0 |
Slezhuk/ansible | lib/ansible/module_utils/k8s_common.py | 62 | 12599 | #
# Copyright 2017 Red Hat | Ansible
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import copy
import json
import os
from ansible.module_utils.basic import AnsibleModule
try:
from openshift.helper.ansible import KubernetesAnsibleModuleHelper, ARG_ATTRIBUTES_BLACKLIST
from openshift.helper.exceptions import KubernetesException
HAS_K8S_MODULE_HELPER = True
except ImportError as exc:
HAS_K8S_MODULE_HELPER = False
try:
import yaml
HAS_YAML = True
except ImportError:
HAS_YAML = False
class KubernetesAnsibleException(Exception):
pass
class KubernetesAnsibleModule(AnsibleModule):
@staticmethod
def get_helper(api_version, kind):
return KubernetesAnsibleModuleHelper(api_version, kind)
def __init__(self, kind, api_version):
self.api_version = api_version
self.kind = kind
self.argspec_cache = None
if not HAS_K8S_MODULE_HELPER:
raise KubernetesAnsibleException(
"This module requires the OpenShift Python client. Try `pip install openshift`"
)
if not HAS_YAML:
raise KubernetesAnsibleException(
"This module requires PyYAML. Try `pip install PyYAML`"
)
try:
self.helper = self.get_helper(api_version, kind)
except Exception as exc:
raise KubernetesAnsibleException(
"Error initializing AnsibleModuleHelper: {}".format(exc)
)
mutually_exclusive = (
('resource_definition', 'src'),
)
AnsibleModule.__init__(self,
argument_spec=self.argspec,
supports_check_mode=True,
mutually_exclusive=mutually_exclusive)
@property
def argspec(self):
"""
Build the module argument spec from the helper.argspec, removing any extra attributes not needed by
Ansible.
:return: dict: a valid Ansible argument spec
"""
if not self.argspec_cache:
spec = {}
for arg_name, arg_properties in self.helper.argspec.items():
spec[arg_name] = {}
for option, option_value in arg_properties.items():
if option not in ARG_ATTRIBUTES_BLACKLIST:
if option == 'choices':
if isinstance(option_value, dict):
spec[arg_name]['choices'] = [value for key, value in option_value.items()]
else:
spec[arg_name]['choices'] = option_value
else:
spec[arg_name][option] = option_value
self.argspec_cache = spec
return self.argspec_cache
def execute_module(self):
"""
Performs basic CRUD operations on the model object. Ends by calling
AnsibleModule.fail_json(), if an error is encountered, otherwise
AnsibleModule.exit_json() with a dict containing:
changed: boolean
api_version: the API version
<kind>: a dict representing the object's state
:return: None
"""
if self.params.get('debug'):
self.helper.enable_debug(reset_logfile=False)
self.helper.log_argspec()
resource_definition = self.params.get('resource_definition')
if self.params.get('src'):
resource_definition = self.load_resource_definition(self.params['src'])
if resource_definition:
resource_params = self.resource_to_parameters(resource_definition)
self.params.update(resource_params)
state = self.params.get('state', None)
force = self.params.get('force', False)
name = self.params.get('name')
namespace = self.params.get('namespace', None)
existing = None
return_attributes = dict(changed=False, api_version=self.api_version)
return_attributes[self.helper.base_model_name_snake] = {}
try:
auth_options = {}
for key, value in self.helper.argspec.items():
if value.get('auth_option') and self.params.get(key) is not None:
auth_options[key] = self.params[key]
self.helper.set_client_config(**auth_options)
except KubernetesException as e:
self.fail_json(msg='Error loading config', error=str(e))
if state is None:
# This is a list, rollback or ? module with no 'state' param
if self.helper.base_model_name_snake.endswith('list'):
# For list modules, execute a GET, and exit
k8s_obj = self._read(name, namespace)
return_attributes[self.kind] = k8s_obj.to_dict()
self.exit_json(**return_attributes)
elif self.helper.has_method('create'):
# For a rollback, execute a POST, and exit
k8s_obj = self._create(namespace)
return_attributes[self.kind] = k8s_obj.to_dict()
return_attributes['changed'] = True
self.exit_json(**return_attributes)
else:
self.fail_json(msg="Missing state parameter. Expected one of: present, absent")
# CRUD modules
try:
existing = self.helper.get_object(name, namespace)
except KubernetesException as exc:
self.fail_json(msg='Failed to retrieve requested object: {}'.format(exc.message),
error=exc.value.get('status'))
if state == 'absent':
if not existing:
# The object already does not exist
self.exit_json(**return_attributes)
else:
# Delete the object
if not self.check_mode:
try:
self.helper.delete_object(name, namespace)
except KubernetesException as exc:
self.fail_json(msg="Failed to delete object: {}".format(exc.message),
error=exc.value.get('status'))
return_attributes['changed'] = True
self.exit_json(**return_attributes)
else:
if not existing:
k8s_obj = self._create(namespace)
return_attributes[self.kind] = k8s_obj.to_dict()
return_attributes['changed'] = True
self.exit_json(**return_attributes)
if existing and force:
k8s_obj = None
request_body = self.helper.request_body_from_params(self.params)
if not self.check_mode:
try:
k8s_obj = self.helper.replace_object(name, namespace, body=request_body)
except KubernetesException as exc:
self.fail_json(msg="Failed to replace object: {}".format(exc.message),
error=exc.value.get('status'))
return_attributes[self.kind] = k8s_obj.to_dict()
return_attributes['changed'] = True
self.exit_json(**return_attributes)
# Check if existing object should be patched
k8s_obj = copy.deepcopy(existing)
try:
self.helper.object_from_params(self.params, obj=k8s_obj)
except KubernetesException as exc:
self.fail_json(msg="Failed to patch object: {}".format(exc.message))
match, diff = self.helper.objects_match(existing, k8s_obj)
if match:
return_attributes[self.kind] = existing.to_dict()
self.exit_json(**return_attributes)
else:
self.helper.log('Existing:')
self.helper.log(json.dumps(existing.to_dict(), indent=4))
self.helper.log('\nDifferences:')
self.helper.log(json.dumps(diff, indent=4))
# Differences exist between the existing obj and requested params
if not self.check_mode:
try:
k8s_obj = self.helper.patch_object(name, namespace, k8s_obj)
except KubernetesException as exc:
self.fail_json(msg="Failed to patch object: {}".format(exc.message))
return_attributes[self.kind] = k8s_obj.to_dict()
return_attributes['changed'] = True
self.exit_json(**return_attributes)
def _create(self, namespace):
request_body = None
k8s_obj = None
try:
request_body = self.helper.request_body_from_params(self.params)
except KubernetesException as exc:
self.fail_json(msg="Failed to create object: {}".format(exc.message))
if not self.check_mode:
try:
k8s_obj = self.helper.create_object(namespace, body=request_body)
except KubernetesException as exc:
self.fail_json(msg="Failed to create object: {}".format(exc.message),
error=exc.value.get('status'))
return k8s_obj
def _read(self, name, namespace):
k8s_obj = None
try:
k8s_obj = self.helper.get_object(name, namespace)
except KubernetesException as exc:
self.fail_json(msg='Failed to retrieve requested object',
error=exc.value.get('status'))
return k8s_obj
def load_resource_definition(self, src):
""" Load the requested src path """
result = None
path = os.path.normpath(src)
self.helper.log("Reading definition from {}".format(path))
if not os.path.exists(path):
self.fail_json(msg="Error accessing {}. Does the file exist?".format(path))
try:
result = yaml.safe_load(open(path, 'r'))
except (IOError, yaml.YAMLError) as exc:
self.fail_json(msg="Error loading resource_definition: {}".format(exc))
return result
def resource_to_parameters(self, resource):
""" Converts a resource definition to module parameters """
parameters = {}
for key, value in resource.items():
if key in ('apiVersion', 'kind', 'status'):
continue
elif key == 'metadata' and isinstance(value, dict):
for meta_key, meta_value in value.items():
if meta_key in ('name', 'namespace', 'labels', 'annotations'):
parameters[meta_key] = meta_value
elif key in self.helper.argspec and value is not None:
parameters[key] = value
elif isinstance(value, dict):
self._add_parameter(value, [key], parameters)
self.helper.log("Request to parameters: {}".format(json.dumps(parameters)))
return parameters
def _add_parameter(self, request, path, parameters):
for key, value in request.items():
if path:
param_name = '_'.join(path + [self.helper.attribute_to_snake(key)])
else:
param_name = self.helper.attribute_to_snake(key)
if param_name in self.helper.argspec and value is not None:
parameters[param_name] = value
elif isinstance(value, dict):
continue_path = copy.copy(path) if path else []
continue_path.append(self.helper.attribute_to_snake(key))
self._add_parameter(value, continue_path, parameters)
else:
self.fail_json(
msg=("Error parsing resource definition. Encountered {}, which does not map to a module "
"parameter. If this looks like a problem with the module, please open an issue at "
"github.com/openshift/openshift-restclient-python/issues").format(param_name)
)
| gpl-3.0 |
alanch-ms/PTVS | Python/Tests/TestData/VirtualEnv/env/Lib/UserDict.py | 358 | 5811 | """A more or less complete user-defined wrapper around dictionary objects."""
class UserDict:
def __init__(self, dict=None, **kwargs):
self.data = {}
if dict is not None:
self.update(dict)
if len(kwargs):
self.update(kwargs)
def __repr__(self): return repr(self.data)
def __cmp__(self, dict):
if isinstance(dict, UserDict):
return cmp(self.data, dict.data)
else:
return cmp(self.data, dict)
__hash__ = None # Avoid Py3k warning
def __len__(self): return len(self.data)
def __getitem__(self, key):
if key in self.data:
return self.data[key]
if hasattr(self.__class__, "__missing__"):
return self.__class__.__missing__(self, key)
raise KeyError(key)
def __setitem__(self, key, item): self.data[key] = item
def __delitem__(self, key): del self.data[key]
def clear(self): self.data.clear()
def copy(self):
if self.__class__ is UserDict:
return UserDict(self.data.copy())
import copy
data = self.data
try:
self.data = {}
c = copy.copy(self)
finally:
self.data = data
c.update(self)
return c
def keys(self): return self.data.keys()
def items(self): return self.data.items()
def iteritems(self): return self.data.iteritems()
def iterkeys(self): return self.data.iterkeys()
def itervalues(self): return self.data.itervalues()
def values(self): return self.data.values()
def has_key(self, key): return key in self.data
def update(self, dict=None, **kwargs):
if dict is None:
pass
elif isinstance(dict, UserDict):
self.data.update(dict.data)
elif isinstance(dict, type({})) or not hasattr(dict, 'items'):
self.data.update(dict)
else:
for k, v in dict.items():
self[k] = v
if len(kwargs):
self.data.update(kwargs)
def get(self, key, failobj=None):
if key not in self:
return failobj
return self[key]
def setdefault(self, key, failobj=None):
if key not in self:
self[key] = failobj
return self[key]
def pop(self, key, *args):
return self.data.pop(key, *args)
def popitem(self):
return self.data.popitem()
def __contains__(self, key):
return key in self.data
@classmethod
def fromkeys(cls, iterable, value=None):
d = cls()
for key in iterable:
d[key] = value
return d
class IterableUserDict(UserDict):
def __iter__(self):
return iter(self.data)
import _abcoll
_abcoll.MutableMapping.register(IterableUserDict)
class DictMixin:
# Mixin defining all dictionary methods for classes that already have
# a minimum dictionary interface including getitem, setitem, delitem,
# and keys. Without knowledge of the subclass constructor, the mixin
# does not define __init__() or copy(). In addition to the four base
# methods, progressively more efficiency comes with defining
# __contains__(), __iter__(), and iteritems().
# second level definitions support higher levels
def __iter__(self):
for k in self.keys():
yield k
def has_key(self, key):
try:
self[key]
except KeyError:
return False
return True
def __contains__(self, key):
return self.has_key(key)
# third level takes advantage of second level definitions
def iteritems(self):
for k in self:
yield (k, self[k])
def iterkeys(self):
return self.__iter__()
# fourth level uses definitions from lower levels
def itervalues(self):
for _, v in self.iteritems():
yield v
def values(self):
return [v for _, v in self.iteritems()]
def items(self):
return list(self.iteritems())
def clear(self):
for key in self.keys():
del self[key]
def setdefault(self, key, default=None):
try:
return self[key]
except KeyError:
self[key] = default
return default
def pop(self, key, *args):
if len(args) > 1:
raise TypeError, "pop expected at most 2 arguments, got "\
+ repr(1 + len(args))
try:
value = self[key]
except KeyError:
if args:
return args[0]
raise
del self[key]
return value
def popitem(self):
try:
k, v = self.iteritems().next()
except StopIteration:
raise KeyError, 'container is empty'
del self[k]
return (k, v)
def update(self, other=None, **kwargs):
# Make progressively weaker assumptions about "other"
if other is None:
pass
elif hasattr(other, 'iteritems'): # iteritems saves memory and lookups
for k, v in other.iteritems():
self[k] = v
elif hasattr(other, 'keys'):
for k in other.keys():
self[k] = other[k]
else:
for k, v in other:
self[k] = v
if kwargs:
self.update(kwargs)
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def __repr__(self):
return repr(dict(self.iteritems()))
def __cmp__(self, other):
if other is None:
return 1
if isinstance(other, DictMixin):
other = dict(other.iteritems())
return cmp(dict(self.iteritems()), other)
def __len__(self):
return len(self.keys())
| apache-2.0 |
jonparrott/gcloud-python | securitycenter/google/cloud/securitycenter_v1beta1/proto/securitycenter_service_pb2_grpc.py | 2 | 21413 | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from google.cloud.securitycenter_v1beta1.proto import finding_pb2 as google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_finding__pb2
from google.cloud.securitycenter_v1beta1.proto import organization_settings_pb2 as google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_organization__settings__pb2
from google.cloud.securitycenter_v1beta1.proto import security_marks_pb2 as google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_security__marks__pb2
from google.cloud.securitycenter_v1beta1.proto import securitycenter_service_pb2 as google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2
from google.cloud.securitycenter_v1beta1.proto import source_pb2 as google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_source__pb2
from google.iam.v1 import iam_policy_pb2 as google_dot_iam_dot_v1_dot_iam__policy__pb2
from google.iam.v1 import policy_pb2 as google_dot_iam_dot_v1_dot_policy__pb2
from google.longrunning import operations_pb2 as google_dot_longrunning_dot_operations__pb2
class SecurityCenterStub(object):
"""V1 Beta APIs for Security Center service.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.CreateSource = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/CreateSource',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.CreateSourceRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_source__pb2.Source.FromString,
)
self.CreateFinding = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/CreateFinding',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.CreateFindingRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_finding__pb2.Finding.FromString,
)
self.GetIamPolicy = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/GetIamPolicy',
request_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.SerializeToString,
response_deserializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString,
)
self.GetOrganizationSettings = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/GetOrganizationSettings',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.GetOrganizationSettingsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_organization__settings__pb2.OrganizationSettings.FromString,
)
self.GetSource = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/GetSource',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.GetSourceRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_source__pb2.Source.FromString,
)
self.GroupAssets = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/GroupAssets',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.GroupAssetsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.GroupAssetsResponse.FromString,
)
self.GroupFindings = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/GroupFindings',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.GroupFindingsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.GroupFindingsResponse.FromString,
)
self.ListAssets = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/ListAssets',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.ListAssetsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.ListAssetsResponse.FromString,
)
self.ListFindings = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/ListFindings',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.ListFindingsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.ListFindingsResponse.FromString,
)
self.ListSources = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/ListSources',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.ListSourcesRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.ListSourcesResponse.FromString,
)
self.RunAssetDiscovery = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/RunAssetDiscovery',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.RunAssetDiscoveryRequest.SerializeToString,
response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
)
self.SetFindingState = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/SetFindingState',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.SetFindingStateRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_finding__pb2.Finding.FromString,
)
self.SetIamPolicy = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/SetIamPolicy',
request_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.SerializeToString,
response_deserializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString,
)
self.TestIamPermissions = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/TestIamPermissions',
request_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.SerializeToString,
response_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.FromString,
)
self.UpdateFinding = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/UpdateFinding',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.UpdateFindingRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_finding__pb2.Finding.FromString,
)
self.UpdateOrganizationSettings = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/UpdateOrganizationSettings',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.UpdateOrganizationSettingsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_organization__settings__pb2.OrganizationSettings.FromString,
)
self.UpdateSource = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/UpdateSource',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.UpdateSourceRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_source__pb2.Source.FromString,
)
self.UpdateSecurityMarks = channel.unary_unary(
'/google.cloud.securitycenter.v1beta1.SecurityCenter/UpdateSecurityMarks',
request_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.UpdateSecurityMarksRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_security__marks__pb2.SecurityMarks.FromString,
)
class SecurityCenterServicer(object):
"""V1 Beta APIs for Security Center service.
"""
def CreateSource(self, request, context):
"""Creates a source.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateFinding(self, request, context):
"""Creates a finding. The corresponding source must exist for finding creation
to succeed.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetIamPolicy(self, request, context):
"""Gets the access control policy on the specified Source.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetOrganizationSettings(self, request, context):
"""Gets the settings for an organization.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetSource(self, request, context):
"""Gets a source.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GroupAssets(self, request, context):
"""Filters an organization's assets and groups them by their specified
properties.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GroupFindings(self, request, context):
"""Filters an organization or source's findings and groups them by their
specified properties.
To group across all sources provide a `-` as the source id.
Example: /v1beta1/organizations/123/sources/-/findings
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListAssets(self, request, context):
"""Lists an organization's assets.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListFindings(self, request, context):
"""Lists an organization or source's findings.
To list across all sources provide a `-` as the source id.
Example: /v1beta1/organizations/123/sources/-/findings
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListSources(self, request, context):
"""Lists all sources belonging to an organization.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RunAssetDiscovery(self, request, context):
"""Runs asset discovery. The discovery is tracked with a long-running
operation.
This API can only be called with limited frequency for an organization. If
it is called too frequently the caller will receive a TOO_MANY_REQUESTS
error.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetFindingState(self, request, context):
"""Updates the state of a finding.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetIamPolicy(self, request, context):
"""Sets the access control policy on the specified Source.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def TestIamPermissions(self, request, context):
"""Returns the permissions that a caller has on the specified source.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateFinding(self, request, context):
"""Creates or updates a finding. The corresponding source must exist for a
finding creation to succeed.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateOrganizationSettings(self, request, context):
"""Updates an organization's settings.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateSource(self, request, context):
"""Updates a source.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateSecurityMarks(self, request, context):
"""Updates security marks.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_SecurityCenterServicer_to_server(servicer, server):
rpc_method_handlers = {
'CreateSource': grpc.unary_unary_rpc_method_handler(
servicer.CreateSource,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.CreateSourceRequest.FromString,
response_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_source__pb2.Source.SerializeToString,
),
'CreateFinding': grpc.unary_unary_rpc_method_handler(
servicer.CreateFinding,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.CreateFindingRequest.FromString,
response_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_finding__pb2.Finding.SerializeToString,
),
'GetIamPolicy': grpc.unary_unary_rpc_method_handler(
servicer.GetIamPolicy,
request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.FromString,
response_serializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString,
),
'GetOrganizationSettings': grpc.unary_unary_rpc_method_handler(
servicer.GetOrganizationSettings,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.GetOrganizationSettingsRequest.FromString,
response_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_organization__settings__pb2.OrganizationSettings.SerializeToString,
),
'GetSource': grpc.unary_unary_rpc_method_handler(
servicer.GetSource,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.GetSourceRequest.FromString,
response_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_source__pb2.Source.SerializeToString,
),
'GroupAssets': grpc.unary_unary_rpc_method_handler(
servicer.GroupAssets,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.GroupAssetsRequest.FromString,
response_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.GroupAssetsResponse.SerializeToString,
),
'GroupFindings': grpc.unary_unary_rpc_method_handler(
servicer.GroupFindings,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.GroupFindingsRequest.FromString,
response_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.GroupFindingsResponse.SerializeToString,
),
'ListAssets': grpc.unary_unary_rpc_method_handler(
servicer.ListAssets,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.ListAssetsRequest.FromString,
response_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.ListAssetsResponse.SerializeToString,
),
'ListFindings': grpc.unary_unary_rpc_method_handler(
servicer.ListFindings,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.ListFindingsRequest.FromString,
response_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.ListFindingsResponse.SerializeToString,
),
'ListSources': grpc.unary_unary_rpc_method_handler(
servicer.ListSources,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.ListSourcesRequest.FromString,
response_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.ListSourcesResponse.SerializeToString,
),
'RunAssetDiscovery': grpc.unary_unary_rpc_method_handler(
servicer.RunAssetDiscovery,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.RunAssetDiscoveryRequest.FromString,
response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
),
'SetFindingState': grpc.unary_unary_rpc_method_handler(
servicer.SetFindingState,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.SetFindingStateRequest.FromString,
response_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_finding__pb2.Finding.SerializeToString,
),
'SetIamPolicy': grpc.unary_unary_rpc_method_handler(
servicer.SetIamPolicy,
request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.FromString,
response_serializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString,
),
'TestIamPermissions': grpc.unary_unary_rpc_method_handler(
servicer.TestIamPermissions,
request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.FromString,
response_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.SerializeToString,
),
'UpdateFinding': grpc.unary_unary_rpc_method_handler(
servicer.UpdateFinding,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.UpdateFindingRequest.FromString,
response_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_finding__pb2.Finding.SerializeToString,
),
'UpdateOrganizationSettings': grpc.unary_unary_rpc_method_handler(
servicer.UpdateOrganizationSettings,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.UpdateOrganizationSettingsRequest.FromString,
response_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_organization__settings__pb2.OrganizationSettings.SerializeToString,
),
'UpdateSource': grpc.unary_unary_rpc_method_handler(
servicer.UpdateSource,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.UpdateSourceRequest.FromString,
response_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_source__pb2.Source.SerializeToString,
),
'UpdateSecurityMarks': grpc.unary_unary_rpc_method_handler(
servicer.UpdateSecurityMarks,
request_deserializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_securitycenter__service__pb2.UpdateSecurityMarksRequest.FromString,
response_serializer=google_dot_cloud_dot_securitycenter__v1beta1_dot_proto_dot_security__marks__pb2.SecurityMarks.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'google.cloud.securitycenter.v1beta1.SecurityCenter', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
| apache-2.0 |
naucoin/VTKSlicerWidgets | Utilities/mrmpi/examples/rmat.py | 11 | 4924 | #!/usr/local/bin/python
# ----------------------------------------------------------------------
# MR-MPI = MapReduce-MPI library
# http://www.cs.sandia.gov/~sjplimp/mapreduce.html
# Steve Plimpton, [email protected], Sandia National Laboratories
#
# Copyright (2009) Sandia Corporation. Under the terms of Contract
# DE-AC04-94AL85000 with Sandia Corporation, the U.S. Government retains
# certain rights in this software. This software is distributed under
# the modified Berkeley Software Distribution (BSD) License.
#
# See the README file in the top-level MapReduce directory.
# -------------------------------------------------------------------------
# MapReduce random RMAT matrix generation example in C++
# Syntax: rmat.py N Nz a b c d frac seed {outfile}
# 2^N = # of rows in RMAT matrix
# Nz = non-zeroes per row
# a,b,c,d = RMAT params (must sum to 1.0)
# frac = RMAT randomization param (frac < 1, 0 = no randomization)
# seed = RNG seed (positive int)
# outfile = output RMAT matrix to this filename (optional)
import sys, random
from mrmpi import mrmpi
try:
import pypar
except:
import pypar_serial as pypar
# generate RMAT matrix entries
# emit one KV per edge: key = edge, value = NULL
def generate(itask,mr):
for m in xrange(ngenerate):
delta = order / 2
a1 = a; b1 = b; c1 = c; d1 = d
i = j = 0
for ilevel in xrange(nlevels):
rn = random.random()
if rn < a1:
pass
elif rn < a1+b1:
j += delta
elif rn < a1+b1+c1:
i += delta
else:
i += delta
j += delta
delta /= 2
if fraction > 0.0:
a1 += a1*fraction * (drand48() - 0.5)
b1 += b1*fraction * (drand48() - 0.5)
c1 += c1*fraction * (drand48() - 0.5)
d1 += d1*fraction * (drand48() - 0.5)
total = a1+b1+c1+d1
a1 /= total
b1 /= total
c1 /= total
d1 /= total
mr.add((i,j),None)
# eliminate duplicate edges
# input: one KMV per edge, MV has multiple entries if duplicates exist
# output: one KV per edge: key = edge, value = NULL
def cull(key,mvalue,mr):
mr.add(key,None)
# write edges to a file unique to this processor
def output(key,mvalue,mr):
print >>fp,key[0]+1,key[1]+1,1
# enumerate nonzeroes in each row
# input: one KMV per edge
# output: one KV per edge: key = row I, value = NULL
def nonzero(key,mvalue,mr):
mr.add(key[0],None)
# count nonzeroes in each row
# input: one KMV per row, MV has entry for each nonzero
# output: one KV: key = # of nonzeroes, value = NULL
def degree(key,mvalue,mr):
mr.add(len(mvalue),None);
# count rows with same # of nonzeroes
# input: one KMV per nonzero count, MV has entry for each row
# output: one KV: key = # of nonzeroes, value = # of rows
def histo(key,mvalue,mr):
mr.add(key,len(mvalue))
# compare two counts
# order values by count, largest first
def ncompare(one,two):
if one > two: return -1;
elif one < two: return 1;
else: return 0;
# print # of rows with a specific # of nonzeroes
def stats(itask,key,value,mr):
global total
total += value;
print "%d rows with %d nonzeroes" % (value,key)
# main program
nprocs = pypar.size()
me = pypar.rank()
if len(sys.argv) != 9 and len(sys.argv) != 10:
if me == 0: print "Syntax: N Nz a b c d frac seed {outfile}"
sys.exit()
nlevels = int(sys.argv[1])
nnonzero = int(sys.argv[2])
a = float(sys.argv[3])
b = float(sys.argv[4])
c = float(sys.argv[5])
d = float(sys.argv[6])
fraction = float(sys.argv[7])
seed = int(sys.argv[8])
if len(sys.argv) == 10: outfile = sys.argv[9]
else: outfile = None
if a+b+c+d != 1.0:
if me == 0: print "ERROR: a,b,c,d must sum to 1"
sys.exit()
if fraction >= 1.0:
if me == 0: print "ERROR: fraction must be < 1"
sys.exit()
random.seed(seed+me)
order = 1 << nlevels
mr = mrmpi()
# loop until desired number of unique nonzero entries
pypar.barrier()
tstart = pypar.time()
niterate = 0
ntotal = (1 << nlevels) * nnonzero
nremain = ntotal
while nremain:
niterate += 1
ngenerate = nremain/nprocs
if me < nremain % nprocs: ngenerate += 1
mr.map(nprocs,generate,None,1)
nunique = mr.collate()
if nunique == ntotal: break
mr.reduce(cull)
nremain = ntotal - nunique
pypar.barrier()
tstop = pypar.time()
# output matrix if requested
if outfile:
fp = open(outfile + "." + str(me),"w")
if not fp:
print "ERROR: Could not open output file"
sys.exit()
mr2 = mr.copy()
mr2.reduce(output)
fp.close()
mr2.destroy()
# stats to screen
# include stats on number of nonzeroes per row
if me == 0:
print order,"rows in matrix"
print ntotal,"nonzeroes in matrix"
mr.reduce(nonzero)
mr.collate()
mr.reduce(degree)
mr.collate()
mr.reduce(histo)
mr.gather(1)
mr.sort_keys(ncompare)
total = 0
mr.map_kv(mr,stats)
if me == 0: print order-total,"rows with 0 nonzeroes"
if me == 0:
print "%g secs to generate matrix on %d procs in %d iterations" % \
(tstop-tstart,nprocs,niterate)
mr.destroy()
pypar.finalize()
| bsd-3-clause |
nuncjo/odoo | addons/account_payment/__init__.py | 436 | 1279 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#----------------------------------------------------------
# Init Sales
#----------------------------------------------------------
import account_payment
import wizard
import account_move_line
import account_invoice
import report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
xsixing/blaze | samples/basics/array_evaluation.py | 10 | 1121 | """
Sample script showing the way to perform computations in blaze
This should be executable and result in an out of core execution to
generate the result of the expression
This illustrates the idea of:
- Using large in-disk arrays as operands
- Building expressions to evaluate in blaze
- Evaluate those expressions to produce a result
- Showing that we can configure how we evaluate expressions
- Showing how we can specify the kind of result we desire
"""
from __future__ import absolute_import, division, print_function
import blaze
def generate_operand(uri):
"""build some operands on disk"""
pass
def evaluation(operand_dict):
a = blaze.load(operand_dict['a'])
b = blaze.load(operand_dict['b'])
expr = (a+b)*(a*b)
print(type(expr)) # would this be "blaze.array"?
print(type(expr._data)) # would this be blaze.BlazeFuncDataDescriptor?
print(expr) # what should this print???
c = blaze.eval(expr, out_caps={}, hints={})
print(c) #should print the result... rejoice!
def main(argv):
pass
if __name__ == '___main___':
sys.exit(main(sys.argv))
| bsd-3-clause |
chenc10/Spark-PAF | dist/ec2/lib/boto-2.34.0/boto/ecs/__init__.py | 153 | 4177 | # Copyright (c) 2010 Chris Moyer http://coredumped.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import boto
from boto.connection import AWSQueryConnection, AWSAuthConnection
from boto.exception import BotoServerError
import time
import urllib
import xml.sax
from boto.ecs.item import ItemSet
from boto import handler
class ECSConnection(AWSQueryConnection):
"""
ECommerce Connection
For more information on how to use this module see:
http://blog.coredumped.org/2010/09/search-for-books-on-amazon-using-boto.html
"""
APIVersion = '2010-11-01'
def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
is_secure=True, port=None, proxy=None, proxy_port=None,
proxy_user=None, proxy_pass=None, host='ecs.amazonaws.com',
debug=0, https_connection_factory=None, path='/',
security_token=None, profile_name=None):
super(ECSConnection, self).__init__(aws_access_key_id, aws_secret_access_key,
is_secure, port, proxy, proxy_port, proxy_user, proxy_pass,
host, debug, https_connection_factory, path,
security_token=security_token,
profile_name=profile_name)
def _required_auth_capability(self):
return ['ecs']
def get_response(self, action, params, page=0, itemSet=None):
"""
Utility method to handle calls to ECS and parsing of responses.
"""
params['Service'] = "AWSECommerceService"
params['Operation'] = action
if page:
params['ItemPage'] = page
response = self.make_request(None, params, "/onca/xml")
body = response.read().decode('utf-8')
boto.log.debug(body)
if response.status != 200:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
raise BotoServerError(response.status, response.reason, body)
if itemSet is None:
rs = ItemSet(self, action, params, page)
else:
rs = itemSet
h = handler.XmlHandler(rs, self)
xml.sax.parseString(body.encode('utf-8'), h)
if not rs.is_valid:
raise BotoServerError(response.status, '{Code}: {Message}'.format(**rs.errors[0]))
return rs
#
# Group methods
#
def item_search(self, search_index, **params):
"""
Returns items that satisfy the search criteria, including one or more search
indices.
For a full list of search terms,
:see: http://docs.amazonwebservices.com/AWSECommerceService/2010-09-01/DG/index.html?ItemSearch.html
"""
params['SearchIndex'] = search_index
return self.get_response('ItemSearch', params)
def item_lookup(self, **params):
"""
Returns items that satisfy the lookup query.
For a full list of parameters, see:
http://s3.amazonaws.com/awsdocs/Associates/2011-08-01/prod-adv-api-dg-2011-08-01.pdf
"""
return self.get_response('ItemLookup', params) | apache-2.0 |
mrkulk/text-world | evennia/server/oob_cmds.py | 2 | 14608 | """
Out-of-band default plugin commands available for OOB handler.
This module implements commands as defined by the MSDP standard
(http://tintin.sourceforge.net/msdp/), but is independent of the
actual transfer protocol (webclient, MSDP, GMCP etc). It also
implements several OOB commands unique to Evennia (both some
external and some for testing)
The available OOB commands can be extended by changing
`settings.OOB_PLUGIN_MODULES`
This module must contain a global dictionary CMD_MAP. This is a
dictionary that maps the call available in the OOB call to a function
in this module (this allows you to map multiple oob cmdnames to a
single actual Python function, for example).
For example, if the OOB strings received looks like this:
MDSP.LISTEN [desc, key] # GMCP (wrapping to MSDP)
LISTEN ARRAY VAL desc VAL key # MSDP
and CMD_MAP = {"LISTEN", listen} then this would result in a call to a
function "listen" in this module, with the arguments *("desc", "key").
oob functions have the following call signature:
function(session, *args, **kwargs)
where session is the active session and *args, **kwargs are extra
arguments sent with the oob command.
A function mapped to the key "oob_error" will retrieve error strings
if it is defined. It will get the error message as its 1st argument.
oob_error(session, error, *args, **kwargs)
This allows for customizing error handling.
Data is usually returned to the user via a return OOB call:
session.msg(oob=(oobcmdname, (args,), {kwargs}))
Oobcmdnames (like "MSDP.LISTEN" / "LISTEN" above) are case-sensitive.
Note that args, kwargs must be iterable. Non-iterables will be
interpreted as a new command name (you can send multiple oob commands
with one msg() call)
Evennia introduces two internal extensions to MSDP, and that is the
MSDP_ARRAY and MSDP_TABLE commands. These are never sent across the
wire to the client (so this is fully compliant with the MSDP
protocol), but tells the Evennia OOB Protocol that you want to send a
"bare" array or table to the client, without prepending any command
name.
"""
from django.conf import settings
from evennia.utils.utils import to_str
from evennia.server.oobhandler import OOB_HANDLER
_GA = object.__getattribute__
_SA = object.__setattr__
_NA = lambda o: "N/A"
#------------------------------------------------------------
# All OOB commands must be on the form
# cmdname(oobhandler, session, *args, **kwargs)
#------------------------------------------------------------
#
# General OOB commands
#
def oob_error(session, errmsg, *args, **kwargs):
"""
Error handling method. Error messages are relayed here.
Args:
session (Session): The session to receive the error
errmsg (str): The failure message
A function with this name is special and is also called by the
oobhandler when an error occurs already at the execution stage
(such as the oob function not being recognized or having the wrong
args etc). Call this from other oob functions to centralize error
management.
"""
session.msg(oob=("error", ("OOB ERROR: %s" % errmsg,)))
def oob_echo(session, *args, **kwargs):
"""
Test echo function. Echoes args, kwargs sent to it.
Args:
session (Session): The Session to receive the echo.
args (list of str): Echo text.
kwargs (dict of str, optional): Keyed echo text
"""
session.msg(oob=("echo", args, kwargs))
##OOB{"repeat":10}
def oob_repeat(session, oobfuncname, interval, *args, **kwargs):
"""
Called as REPEAT <oobfunc> <interval> <args>
Repeats a given OOB command with a certain frequency.
Args:
session (Session): Session creating the repeat
oobfuncname (str): OOB function called every interval seconds
interval (int): Interval of repeat, in seconds.
Notes:
The command checks so that it cannot repeat itself.
"""
if not oobfuncname:
oob_error(session, "Usage: REPEAT <oobfuncname>, <interval>")
return
# limit repeat actions to minimum 5 seconds interval
interval = 20 if not interval else (max(5, interval))
obj = session.get_puppet_or_player()
if obj and oobfuncname != "REPEAT":
OOB_HANDLER.add_repeater(obj, session.sessid, oobfuncname, interval, *args, **kwargs)
##OOB{"UNREPEAT":10}
def oob_unrepeat(session, oobfuncname, interval):
"""
Called with UNREPEAT <oobfunc> <interval>
Disable repeating callback.
Args:
session (Session): Session controlling the repeater
oobfuncname (str): OOB function called every interval seconds
interval (int): Interval of repeater, in seconds.
Notes:
The command checks so that it cannot repeat itself.
"""
obj = session.get_puppet_or_player()
if obj:
OOB_HANDLER.remove_repeater(obj, session.sessid, oobfuncname, interval)
#
# MSDP protocol standard commands
#
# MSDP suggests the following standard name conventions for making
# different properties available to the player
# "CHARACTER_NAME", "SERVER_ID", "SERVER_TIME", "AFFECTS", "ALIGNMENT", "EXPERIENCE", "EXPERIENCE_MAX", "EXPERIENCE_TNL",
# "HEALTH", "HEALTH_MAX", "LEVEL", "RACE", "CLASS", "MANA", "MANA_MAX", "WIMPY", "PRACTICE", "MONEY", "MOVEMENT",
# "MOVEMENT_MAX", "HITROLL", "DAMROLL", "AC", "STR", "INT", "WIS", "DEX", "CON", "OPPONENT_HEALTH", "OPPONENT_HEALTH_MAX",
# "OPPONENT_LEVEL", "OPPONENT_NAME", "AREA_NAME", "ROOM_EXITS", "ROOM_VNUM", "ROOM_NAME", "WORLD_TIME", "CLIENT_ID",
# "CLIENT_VERSION", "PLUGIN_ID", "ANSI_COLORS", "XTERM_256_COLORS", "UTF_8", "SOUND", "MXP", "BUTTON_1", "BUTTON_2",
# "BUTTON_3", "BUTTON_4", "BUTTON_5", "GAUGE_1", "GAUGE_2","GAUGE_3", "GAUGE_4", "GAUGE_5"
# mapping from MSDP standard names to Evennia variables
OOB_SENDABLE = {
"CHARACTER_NAME": lambda o: o.key,
"SERVER_ID": lambda o: settings.SERVERNAME,
"ROOM_NAME": lambda o: o.db_location.key,
"ANSI_COLORS": lambda o: True,
"XTERM_256_COLORS": lambda o: True,
"UTF_8": lambda o: True
}
##OOB{"SEND":"CHARACTER_NAME"} - from webclient
def oob_send(session, *args, **kwargs):
"""
Called with the SEND MSDP command.
This function directly returns the value of the given variable to
the session. It assumes the object on which the variable sits
belongs to the session.
Args:
session (Session): Session object
args (str): any number of properties to return. These
must belong to the OOB_SENDABLE dictionary.
Examples:
oob input: ("SEND", "CHARACTER_NAME", "SERVERNAME")
oob output: ("MSDP_TABLE", "CHARACTER_NAME", "Amanda",
"SERVERNAME", "Evennia")
"""
# mapping of MSDP name to a property
obj = session.get_puppet_or_player()
ret = {}
if obj:
for name in (a.upper() for a in args if a):
try:
#print "MSDP SEND inp:", name
value = OOB_SENDABLE.get(name, _NA)(obj)
ret[name] = value
except Exception, e:
ret[name] = str(e)
# return, make sure to use the right case
session.msg(oob=("MSDP_TABLE", (), ret))
else:
oob_error(session, "You must log in first.")
# mapping standard MSDP keys to Evennia field names
OOB_REPORTABLE = {
"CHARACTER_NAME": "db_key",
"ROOM_NAME": "db_location",
"TEST" : "test"
}
##OOB{"REPORT":"TEST"}
def oob_report(session, *args, **kwargs):
"""
Called with the `REPORT PROPNAME` MSDP command.
Monitors the changes of given property name. Assumes reporting
happens on an object controlled by the session.
Args:
session (Session): The Session doing the monitoring. The
property is assumed to sit on the entity currently
controlled by the Session. If puppeting, this is an
Object, otherwise the object will be the Player the
Session belongs to.
args (str or list): One or more property names to monitor changes in.
If a name starts with `db_`, the property is assumed to
be a field, otherwise an Attribute of the given name will
be monitored (if it exists).
Notes:
When the property updates, the monitor will send a MSDP_ARRAY
to the session of the form `(SEND, fieldname, new_value)`
Examples:
("REPORT", "CHARACTER_NAME")
("MSDP_TABLE", "CHARACTER_NAME", "Amanda")
"""
obj = session.get_puppet_or_player()
if obj:
ret = []
for name in args:
propname = OOB_REPORTABLE.get(name, None)
if not propname:
oob_error(session, "No Reportable property '%s'. Use LIST REPORTABLE_VARIABLES." % propname)
# the field_monitors require an oob function as a callback when they report a change.
elif propname.startswith("db_"):
OOB_HANDLER.add_field_monitor(obj, session.sessid, propname, "return_field_report")
ret.append(to_str(_GA(obj, propname), force_string=True))
else:
OOB_HANDLER.add_attribute_monitor(obj, session.sessid, propname, "return_attribute_report")
ret.append(_GA(obj, "db_value"))
#print "ret:", ret
session.msg(oob=("MSDP_ARRAY", ret))
else:
oob_error(session, "You must log in first.")
def oob_return_field_report(session, fieldname, obj, *args, **kwargs):
"""
This is a helper command called by the monitor when fieldname
changes. It is not part of the official MSDP specification but is
a callback used by the monitor to format the result before sending
it on.
"""
session.msg(oob=("MSDP_TABLE", (),
{fieldname: to_str(getattr(obj, fieldname), force_string=True)}))
def oob_return_attribute_report(session, fieldname, obj, *args, **kwargs):
"""
This is a helper command called by the monitor when an Attribute
changes. We need to handle this a little differently from fields
since we are generally not interested in the field name (it's
always db_value for Attributes) but the Attribute's name.
This command is not part of the official MSDP specification but is
a callback used by the monitor to format the result before sending
it on.
"""
session.msg(oob=("MSDP_TABLE", (),
{obj.db_key: to_str(getattr(obj, fieldname), force_string=True)}))
##OOB{"UNREPORT": "TEST"}
def oob_unreport(session, *args, **kwargs):
"""
This removes tracking for the given data.
"""
obj = session.get_puppet_or_player()
if obj:
for name in (a.upper() for a in args if a):
propname = OOB_REPORTABLE.get(name, None)
if not propname:
oob_error(session, "No Un-Reportable property '%s'. Use LIST REPORTABLE_VARIABLES." % propname)
elif propname.startswith("db_"):
OOB_HANDLER.remove_field_monitor(obj, session.sessid, propname, "oob_return_field_report")
else: # assume attribute
OOB_HANDLER.remove_attribute_monitor(obj, session.sessid, propname, "oob_return_attribute_report")
else:
oob_error(session, "You must log in first.")
##OOB{"LIST":"COMMANDS"}
def oob_list(session, mode, *args, **kwargs):
"""
Called with the `LIST <MODE>` MSDP command.
Args:
session (Session): The Session asking for the information
mode (str): The available properties. One of
"COMMANDS" Request an array of commands supported
by the server.
"LISTS" Request an array of lists supported
by the server.
"CONFIGURABLE_VARIABLES" Request an array of variables the client
can configure.
"REPORTABLE_VARIABLES" Request an array of variables the server
will report.
"REPORTED_VARIABLES" Request an array of variables currently
being reported.
"SENDABLE_VARIABLES" Request an array of variables the server
will send.
Examples:
oob in: LIST COMMANDS
oob out: (COMMANDS, (SEND, REPORT, LIST, ...)
"""
mode = mode.upper()
if mode == "COMMANDS":
session.msg(oob=("COMMANDS", ("LIST",
"REPORT",
"UNREPORT",
# "RESET",
"SEND")))
elif mode == "REPORTABLE_VARIABLES":
session.msg(oob=("REPORTABLE_VARIABLES", tuple(key for key in OOB_REPORTABLE.keys())))
elif mode == "REPORTED_VARIABLES":
# we need to check so as to use the right return value depending on if it is
# an Attribute (identified by tracking the db_value field) or a normal database field
# reported is a list of tuples (obj, propname, args, kwargs)
reported = OOB_HANDLER.get_all_monitors(session.sessid)
reported = [rep[0].key if rep[1] == "db_value" else rep[1] for rep in reported]
session.msg(oob=("REPORTED_VARIABLES", reported))
elif mode == "SENDABLE_VARIABLES":
session.msg(oob=("SENDABLE_VARIABLES", tuple(key for key in OOB_REPORTABLE.keys())))
elif mode == "CONFIGURABLE_VARIABLES":
# Not implemented (game specific)
oob_error(session, "Not implemented (game specific)")
else:
# mode == "LISTS" or not given
session.msg(oob=("LISTS",("REPORTABLE_VARIABLES",
"REPORTED_VARIABLES",
# "CONFIGURABLE_VARIABLES",
"SENDABLE_VARIABLES")))
#
# Cmd mapping
#
# this maps the commands to the names available to use from
# the oob call. The standard MSDP commands are capitalized
# as per the protocol, Evennia's own commands are not.
CMD_MAP = {"oob_error": oob_error, # will get error messages
"return_field_report": oob_return_field_report,
"return_attribute_report": oob_return_attribute_report,
# MSDP
"REPEAT": oob_repeat,
"UNREPEAT": oob_unrepeat,
"SEND": oob_send,
"ECHO": oob_echo,
"REPORT": oob_report,
"UNREPORT": oob_unreport,
"LIST": oob_list,
# GMCP
}
| bsd-3-clause |
betoesquivel/fil2014 | filenv/lib/python2.7/site-packages/setuptools/tests/test_find_packages.py | 109 | 5619 | """Tests for setuptools.find_packages()."""
import os
import sys
import shutil
import tempfile
import unittest
import platform
import setuptools
from setuptools import find_packages
from setuptools.tests.py26compat import skipIf
find_420_packages = setuptools.PEP420PackageFinder.find
def has_symlink():
bad_symlink = (
# Windows symlink directory detection is broken on Python 3.2
platform.system() == 'Windows' and sys.version_info[:2] == (3,2)
)
return hasattr(os, 'symlink') and not bad_symlink
class TestFindPackages(unittest.TestCase):
def setUp(self):
self.dist_dir = tempfile.mkdtemp()
self._make_pkg_structure()
def tearDown(self):
shutil.rmtree(self.dist_dir)
def _make_pkg_structure(self):
"""Make basic package structure.
dist/
docs/
conf.py
pkg/
__pycache__/
nspkg/
mod.py
subpkg/
assets/
asset
__init__.py
setup.py
"""
self.docs_dir = self._mkdir('docs', self.dist_dir)
self._touch('conf.py', self.docs_dir)
self.pkg_dir = self._mkdir('pkg', self.dist_dir)
self._mkdir('__pycache__', self.pkg_dir)
self.ns_pkg_dir = self._mkdir('nspkg', self.pkg_dir)
self._touch('mod.py', self.ns_pkg_dir)
self.sub_pkg_dir = self._mkdir('subpkg', self.pkg_dir)
self.asset_dir = self._mkdir('assets', self.sub_pkg_dir)
self._touch('asset', self.asset_dir)
self._touch('__init__.py', self.sub_pkg_dir)
self._touch('setup.py', self.dist_dir)
def _mkdir(self, path, parent_dir=None):
if parent_dir:
path = os.path.join(parent_dir, path)
os.mkdir(path)
return path
def _touch(self, path, dir_=None):
if dir_:
path = os.path.join(dir_, path)
fp = open(path, 'w')
fp.close()
return path
def test_regular_package(self):
self._touch('__init__.py', self.pkg_dir)
packages = find_packages(self.dist_dir)
self.assertEqual(packages, ['pkg', 'pkg.subpkg'])
def test_exclude(self):
self._touch('__init__.py', self.pkg_dir)
packages = find_packages(self.dist_dir, exclude=('pkg.*',))
assert packages == ['pkg']
def test_include_excludes_other(self):
"""
If include is specified, other packages should be excluded.
"""
self._touch('__init__.py', self.pkg_dir)
alt_dir = self._mkdir('other_pkg', self.dist_dir)
self._touch('__init__.py', alt_dir)
packages = find_packages(self.dist_dir, include=['other_pkg'])
self.assertEqual(packages, ['other_pkg'])
def test_dir_with_dot_is_skipped(self):
shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets'))
data_dir = self._mkdir('some.data', self.pkg_dir)
self._touch('__init__.py', data_dir)
self._touch('file.dat', data_dir)
packages = find_packages(self.dist_dir)
self.assertTrue('pkg.some.data' not in packages)
def test_dir_with_packages_in_subdir_is_excluded(self):
"""
Ensure that a package in a non-package such as build/pkg/__init__.py
is excluded.
"""
build_dir = self._mkdir('build', self.dist_dir)
build_pkg_dir = self._mkdir('pkg', build_dir)
self._touch('__init__.py', build_pkg_dir)
packages = find_packages(self.dist_dir)
self.assertTrue('build.pkg' not in packages)
@skipIf(not has_symlink(), 'Symlink support required')
def test_symlinked_packages_are_included(self):
"""
A symbolically-linked directory should be treated like any other
directory when matched as a package.
Create a link from lpkg -> pkg.
"""
self._touch('__init__.py', self.pkg_dir)
linked_pkg = os.path.join(self.dist_dir, 'lpkg')
os.symlink('pkg', linked_pkg)
assert os.path.isdir(linked_pkg)
packages = find_packages(self.dist_dir)
self.assertTrue('lpkg' in packages)
def _assert_packages(self, actual, expected):
self.assertEqual(set(actual), set(expected))
def test_pep420_ns_package(self):
packages = find_420_packages(
self.dist_dir, include=['pkg*'], exclude=['pkg.subpkg.assets'])
self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg'])
def test_pep420_ns_package_no_includes(self):
packages = find_420_packages(
self.dist_dir, exclude=['pkg.subpkg.assets'])
self._assert_packages(packages, ['docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg'])
def test_pep420_ns_package_no_includes_or_excludes(self):
packages = find_420_packages(self.dist_dir)
expected = [
'docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg', 'pkg.subpkg.assets']
self._assert_packages(packages, expected)
def test_regular_package_with_nested_pep420_ns_packages(self):
self._touch('__init__.py', self.pkg_dir)
packages = find_420_packages(
self.dist_dir, exclude=['docs', 'pkg.subpkg.assets'])
self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg'])
def test_pep420_ns_package_no_non_package_dirs(self):
shutil.rmtree(self.docs_dir)
shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets'))
packages = find_420_packages(self.dist_dir)
self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg'])
| mit |
margguo/python-ivi | ivi/agilent/agilentDSA91204A.py | 7 | 1632 | """
Python Interchangeable Virtual Instrument Library
Copyright (c) 2012-2014 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .agilent90000 import *
class agilentDSA91204A(agilent90000):
"Agilent Infiniium DSA91204A IVI oscilloscope driver"
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', 'DSO91204A')
super(agilentDSA91204A, self).__init__(*args, **kwargs)
self._analog_channel_count = 4
self._digital_channel_count = 0
self._channel_count = 4
self._bandwidth = 12e9
self._init_channels()
| mit |
qwhelan/asv | asv/results.py | 1 | 33074 | # -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import sys
import base64
import os
import re
import zlib
import itertools
import hashlib
import datetime
import collections
import six
from six.moves import zip as izip
from . import environment
from .console import log
from .machine import Machine
from . import statistics
from . import util
def iter_results_paths(results):
"""
Iterate over all of the result file paths.
"""
skip_files = set([
'machine.json', 'benchmarks.json'
])
for root, dirs, files in os.walk(results):
# Iterate over files only if machine.json is valid json
machine_json = os.path.join(root, "machine.json")
try:
data = util.load_json(machine_json, api_version=Machine.api_version)
machine_name = data.get('machine')
if not isinstance(machine_name, six.text_type):
raise util.UserError("malformed {0}".format(machine_json))
except util.UserError as err:
machine_json_err = "Skipping results: {0}".format(six.text_type(err))
except IOError as err:
machine_json_err = "Skipping results: could not load {0}".format(
machine_json)
else:
machine_json_err = None
# Iterate over files
for filename in files:
if filename not in skip_files and filename.endswith('.json'):
if machine_json_err is not None:
# Show the warning only if there are some files to load
log.warning(machine_json_err)
break
yield (root, filename, machine_name)
def iter_results(results):
"""
Iterate over all of the result files.
"""
for (root, filename, machine_name) in iter_results_paths(results):
try:
yield Results.load(os.path.join(root, filename), machine_name=machine_name)
except util.UserError as exc:
log.warning(six.text_type(exc))
def iter_results_for_machine(results, machine_name):
"""
Iterate over all of the result files for a particular machine.
"""
return iter_results(os.path.join(results, machine_name))
def iter_results_for_machine_and_hash(results, machine_name, commit):
"""
Iterate over all of the result files with a given hash for a
particular machine.
"""
full_commit = get_result_hash_from_prefix(results, machine_name, commit)
for (root, filename, machine_name) in iter_results_paths(
os.path.join(results, machine_name)):
results_commit = filename.split('-')[0]
if results_commit == full_commit:
try:
yield Results.load(os.path.join(root, filename), machine_name=machine_name)
except util.UserError as exc:
log.warning(six.text_type(exc))
def iter_existing_hashes(results):
"""
Iterate over all of the result commit hashes and dates and yields
commit_hash.
May return duplicates. Use `get_existing_hashes` if that matters.
"""
for result in iter_results(results):
yield result.commit_hash
def get_existing_hashes(results):
"""
Get a list of the commit hashes that have already been tested.
"""
log.info("Getting existing hashes")
hashes = list(set(iter_existing_hashes(results)))
return hashes
def get_result_hash_from_prefix(results, machine_name, commit_prefix):
"""
Get the 8-char result commit identifier from a potentially shorter
prefix. Only considers the set of commits that have had
results computed.
Returns None if there are no matches. Raises a UserError
if the prefix is non-unique.
"""
commits = set([])
path = os.path.join(results, machine_name)
for (root, filename, r_machine_name) in iter_results_paths(path):
if r_machine_name != machine_name:
log.warning("Skipping results '{0}': machine name is not '{1}'".format(
os.path.join(root, filename), machine_name))
continue
results_commit = filename.split('-')[0]
cmp_len = min(len(commit_prefix), len(results_commit))
if results_commit[:cmp_len] == commit_prefix[:cmp_len]:
commits.add(results_commit)
if len(commits) > 1:
commit_list_str = ', '.join(sorted(commits))
raise util.UserError('Git hash prefix could represent one of ' +
'multiple commits: {0}'.format(commit_list_str))
elif len(commits) == 1:
return list(commits)[0]
else:
return None
def get_filename(machine, commit_hash, env_name):
"""
Get the result filename for a given machine, commit_hash and
environment.
If the environment name is too long, use its hash instead.
"""
if env_name and len(env_name) >= 128:
env_name = "env-" + hashlib.md5(env_name.encode('utf-8')).hexdigest()
return os.path.join(
machine,
"{0}-{1}.json".format(
commit_hash[:8],
env_name))
def _compatible_results(result, result_params, params):
"""
For parameterized benchmarks, obtain values from *result* that
are compatible with parameters of *benchmark*
"""
if result is None:
# All results missing, eg. build failure
return [None for param in itertools.product(*params)]
# Pick results for those parameters that also appear in the
# current benchmark
old_results = {}
for param, value in izip(itertools.product(*result_params), result):
old_results[param] = value
new_results = []
for param in itertools.product(*params):
new_results.append(old_results.get(param))
return new_results
class Results(object):
"""
Manage a set of benchmark results for a single machine and commit
hash.
"""
api_version = 2
def __init__(self,
params,
requirements,
commit_hash,
date,
python,
env_name,
env_vars):
"""
Parameters
----------
params : dict
Parameters describing the environment in which the
benchmarks were run.
requirements : list
Requirements of the benchmarks being run.
commit_hash : str
The commit hash for the benchmark run.
date : int
JavaScript timestamp for when the commit was merged into
the repository.
python : str
A Python version specifier.
env_name : str
Environment name
env_vars: dict
Environment variables
"""
self._params = params
self._requirements = requirements
self._commit_hash = commit_hash
self._date = date
self._results = {}
self._samples = {}
self._stats = {}
self._benchmark_params = {}
self._profiles = {}
self._python = python
self._env_name = env_name
self._started_at = {}
self._duration = {}
self._benchmark_version = {}
self._env_vars = env_vars
# Note: stderr and errcode are not saved to files
self._stderr = {}
self._errcode = {}
if commit_hash is not None:
self._filename = get_filename(
params['machine'], self._commit_hash, env_name)
else:
self._filename = None
@classmethod
def unnamed(cls):
return cls({}, {}, None, None, None, None, {})
@property
def commit_hash(self):
return self._commit_hash
@property
def date(self):
return self._date
@property
def params(self):
return self._params
@property
def env_vars(self):
return self._env_vars
@property
def started_at(self):
return self._started_at
@property
def duration(self):
return self._duration
def set_build_duration(self, value):
self._duration["<build>"] = float(value)
def set_setup_cache_duration(self, setup_cache_key, value):
self._duration["<setup_cache {}>".format(setup_cache_key)] = float(value)
@property
def benchmark_version(self):
return self._benchmark_version
@property
def stderr(self):
return self._stderr
@property
def errcode(self):
return self._errcode
def get_all_result_keys(self):
"""
Return all available result keys.
"""
return six.iterkeys(self._results)
def get_result_keys(self, benchmarks):
"""
Return result keys corresponding to benchmarks.
Parameters
----------
benchmarks : Benchmarks
Benchmarks to return results for.
Used for checking benchmark versions.
Returns
-------
keys : set
Set of benchmark result keys
"""
keys = set()
for key in six.iterkeys(self._results):
if key not in benchmarks:
continue
version = self._benchmark_version.get(key)
bench_version = benchmarks[key].get('version')
if version is not None and version != bench_version:
continue
keys.add(key)
return keys
def get_result_value(self, key, params):
"""
Return the value of benchmark result.
Parameters
----------
key : str
Benchmark name to return results for
params : {list of list, None}
Set of benchmark parameters to return values for
Returns
-------
value : {float, list of float}
Benchmark result value. If the benchmark is parameterized, return
a list of values.
"""
return _compatible_results(self._results[key],
self._benchmark_params[key],
params)
def get_result_stats(self, key, params):
"""
Return the statistical information of a benchmark result.
Parameters
----------
key : str
Benchmark name to return results for
params : {list of list, None}
Set of benchmark parameters to return values for
Returns
-------
stats : {None, dict, list of dict}
Result statistics. If the benchmark is parameterized,
return a list of values.
"""
return _compatible_results(self._stats[key],
self._benchmark_params[key],
params)
def get_result_samples(self, key, params):
"""
Return the raw data points of a benchmark result.
Parameters
----------
key : str
Benchmark name to return results for
params : {list of list, None}
Set of benchmark parameters to return values for
Returns
-------
samples : {None, list}
Raw result samples. If the benchmark is parameterized,
return a list of values.
"""
return _compatible_results(self._samples[key],
self._benchmark_params[key],
params)
def get_result_params(self, key):
"""
Return the benchmark parameters of the given result
"""
return self._benchmark_params[key]
def remove_result(self, key):
"""
Remove results corresponding to a given benchmark.
"""
del self._results[key]
del self._benchmark_params[key]
del self._samples[key]
del self._stats[key]
# Remove profiles (may be missing)
self._profiles.pop(key, None)
# Remove run times (may be missing in old files)
self._started_at.pop(key, None)
self._duration.pop(key, None)
# Remove version (may be missing)
self._benchmark_version.pop(key, None)
def remove_samples(self, key, selected_idx=None):
"""
Remove measurement samples from the selected benchmark.
"""
if key not in self._results:
raise ValueError(key)
if selected_idx is None:
self._samples[key] = None
elif self._samples[key] is not None:
for j in selected_idx:
self._samples[key][j] = None
def add_result(self, benchmark, result,
started_at=None, duration=None,
record_samples=False,
append_samples=False,
selected_idx=None):
"""
Add benchmark result.
Parameters
----------
benchmark : dict
Benchmark object
result : runner.BenchmarkResult
Result of the benchmark.
started_at : datetime.datetime, optional
Benchmark start time.
duration : float, optional
Benchmark total duration in seconds.
record_samples : bool, optional
Whether to save samples.
append_samples : bool, optional
Whether to combine new samples with old.
selected_idx : set, optional
Which indices in a parametrized benchmark to update
"""
new_result = list(result.result)
new_samples = list(result.samples)
new_number = result.number
benchmark_name = benchmark['name']
benchmark_version = benchmark['version']
if started_at is None:
started_at = datetime.datetime.utcnow()
new_stats = [None] * len(new_result)
if (benchmark_name in self._results and
benchmark_version == self._benchmark_version.get(benchmark_name)):
# Append to old samples, if requested
if append_samples:
old_samples = self.get_result_samples(benchmark_name, benchmark['params'])
for j in range(len(new_samples)):
if old_samples[j] is not None and new_samples[j] is not None:
new_samples[j] = old_samples[j] + new_samples[j]
# Retain old result where requested
merge_idx = [j for j in range(len(new_result))
if selected_idx is not None and j not in selected_idx]
if merge_idx:
old_result = self.get_result_value(benchmark_name, benchmark['params'])
old_samples = self.get_result_samples(benchmark_name, benchmark['params'])
old_stats = self.get_result_stats(benchmark_name, benchmark['params'])
for j in merge_idx:
new_result[j] = old_result[j]
new_samples[j] = old_samples[j]
new_stats[j] = old_stats[j]
# Recompute stats for updated entries (and drop unnecessary data)
for j, (r, s, n) in enumerate(zip(new_result, new_samples, new_number)):
if util.is_na(r):
new_samples[j] = None
new_stats[j] = None
continue
if n is not None:
new_result[j], new_stats[j] = statistics.compute_stats(s, n)
# Compress None lists to just None
if all(x is None for x in new_result):
new_result = None
if all(x is None for x in new_samples):
new_samples = None
if all(x is None for x in new_stats):
new_stats = None
# Drop samples if requested
if not record_samples:
new_samples = None
# Store result
self._results[benchmark_name] = new_result
self._stats[benchmark_name] = new_stats
self._samples[benchmark_name] = new_samples
self._benchmark_params[benchmark_name] = benchmark['params'] if benchmark['params'] else []
self._started_at[benchmark_name] = util.datetime_to_js_timestamp(started_at)
if duration is None:
self._duration.pop(benchmark_name, None)
else:
self._duration[benchmark_name] = float(duration)
self._benchmark_version[benchmark_name] = benchmark_version
self._stderr[benchmark_name] = result.stderr
self._errcode[benchmark_name] = result.errcode
if result.profile:
profile_data = base64.b64encode(zlib.compress(result.profile))
if sys.version_info[0] >= 3:
profile_data = profile_data.decode('ascii')
self._profiles[benchmark_name] = profile_data
def get_profile(self, benchmark_name):
"""
Get the profile data for the given benchmark name.
Parameters
----------
benchmark_name : str
Name of benchmark
Returns
-------
profile_data : bytes
Raw profile data
"""
profile_data = self._profiles[benchmark_name]
if sys.version_info[0] >= 3:
profile_data = profile_data.encode('ascii')
return zlib.decompress(base64.b64decode(profile_data))
def has_profile(self, benchmark_name):
"""
Does the given benchmark data have profiling information?
"""
return benchmark_name in self._profiles
def save(self, result_dir):
"""
Save the results to disk, replacing existing results.
Parameters
----------
result_dir : str
Path to root of results tree.
"""
if self._filename is None:
raise ValueError("Cannot save unnamed Results")
path = os.path.join(result_dir, self._filename)
results = {}
simple_dict = {
'result': self._results,
'params': self._benchmark_params,
'version': self._benchmark_version,
'started_at': self._started_at,
'duration': self._duration,
'samples': self._samples,
'profile': self._profiles,
}
all_keys = ['result', 'params', 'version', 'started_at', 'duration',
'stats_ci_99_a', 'stats_ci_99_b', 'stats_q_25', 'stats_q_75',
'stats_number', 'stats_repeat', 'samples', 'profile']
for name in six.iterkeys(self._results):
row = []
for key in all_keys:
if key in simple_dict:
value = simple_dict[key].get(name)
else:
assert key[:6] == 'stats_'
z = self._stats[name]
if z is None:
value = None
else:
value = [x.get(key[6:]) if x is not None else None
for x in z]
if key != 'params':
if isinstance(value, list) and all(x is None for x in value):
value = None
if key.startswith('stats_') or key == 'duration':
value = util.truncate_float_list(value)
row.append(value)
while row and row[-1] is None:
row.pop()
results[name] = row
other_durations = {}
for key, value in six.iteritems(self._duration):
if key.startswith('<'):
other_durations[key] = value
data = {
'commit_hash': self._commit_hash,
'env_name': self._env_name,
'date': self._date,
'params': self._params,
'python': self._python,
'requirements': self._requirements,
'env_vars': self._env_vars,
'result_columns': all_keys,
'results': results,
'durations': other_durations,
}
util.write_json(path, data, self.api_version, compact=True)
def load_data(self, result_dir):
"""
Load previous results for the current parameters (if any).
"""
if self._filename is None:
raise ValueError("Cannot load unnamed Results")
path = os.path.join(result_dir, self._filename)
if os.path.isfile(path):
old = self.load(path)
for dict_name in ('_results', '_samples', '_stats', '_env_vars',
'_benchmark_params', '_profiles', '_started_at',
'_duration', '_benchmark_version'):
setattr(self, dict_name, getattr(old, dict_name))
@classmethod
def load(cls, path, machine_name=None):
"""
Load results from disk.
Parameters
----------
path : str
Path to results file.
machine_name : str, optional
If given, check that the results file is for the given machine.
"""
d = util.load_json(path, cls.api_version)
d.setdefault('env_vars', {})
try:
obj = cls(
d['params'],
d['requirements'],
d['commit_hash'],
d['date'],
d['python'],
d['env_name'],
d['env_vars'],
)
obj._results = {}
obj._samples = {}
obj._stats = {}
obj._benchmark_params = {}
obj._profiles = {}
obj._started_at = {}
obj._duration = d.get('durations', {})
obj._benchmark_version = {}
simple_keys = {
'result': obj._results,
'params': obj._benchmark_params,
'version': obj._benchmark_version,
'started_at': obj._started_at,
'duration': obj._duration,
'samples': obj._samples,
'profile': obj._profiles,
}
for name, key_values in six.iteritems(d['results']):
for key, value in zip(d['result_columns'], key_values):
key_dict = simple_keys.get(key)
if key_dict is not None:
key_dict[name] = value
continue
elif key.startswith('stats_'):
if value is not None:
if name not in obj._stats:
obj._stats[name] = [{}]*len(value)
stats_key = key[6:]
for j, v in enumerate(value):
if v is not None:
obj._stats[name][j][stats_key] = v
else:
raise KeyError("unknown data key {}".format(key))
for key_dict in simple_keys.values():
key_dict.setdefault(name, None)
obj._stats.setdefault(name, None)
obj._filename = os.path.join(*path.split(os.path.sep)[-2:])
except KeyError as exc:
raise util.UserError(
"Error loading results file '{0}': missing key {1}".format(
path, six.text_type(exc)))
if machine_name is not None and obj.params.get('machine') != machine_name:
raise util.UserError(
"Error loading results file '{0}': machine name is not '{1}'".format(
path, machine_name))
return obj
def rm(self, result_dir):
if self._filename is None:
raise ValueError("Cannot remove unnamed Results")
path = os.path.join(result_dir, self._filename)
os.remove(path)
@classmethod
def update(cls, path):
util.update_json(cls, path, cls.api_version, compact=True)
@property
def env_name(self):
return self._env_name
#
# Old data format support
#
@classmethod
def update_to_2(cls, d):
"""
Reformat data in api_version 1 format to version 2.
"""
try:
d2 = {}
d2['commit_hash'] = d['commit_hash']
d2['date'] = d['date']
d2['env_name'] = d.get('env_name',
environment.get_env_name('',
d['python'],
d['requirements'],
{}))
d2['params'] = d['params']
d2['python'] = d['python']
d2['requirements'] = d['requirements']
d2['env_vars'] = d.get('env_vars', {})
# Backward-compatible load
results = {}
samples = {}
stats = {}
benchmark_params = {}
for key, value in six.iteritems(d['results']):
# Backward compatibility
if not isinstance(value, dict):
value = {'result': [value], 'samples': None,
'stats': None, 'params': []}
if not isinstance(value['result'], list):
value['result'] = [value['result']]
if 'stats' in value and not isinstance(value['stats'], list):
value['stats'] = [value['stats']]
value.setdefault('samples', None)
value.setdefault('stats', None)
value.setdefault('params', [])
# Assign results
results[key] = value['result']
samples[key] = value['samples']
stats[key] = value['stats']
benchmark_params[key] = value['params']
if 'profiles' in d:
profiles = d['profiles']
else:
profiles = {}
started_at = d.get('started_at', {})
duration = d.get('duration', {})
benchmark_version = d.get('benchmark_version', {})
# Convert to new format
getters = [
('result', results, None),
('params', benchmark_params, None),
('version', benchmark_version, None),
('started_at', started_at, None),
('duration', duration, None),
('stats_ci_99_a', stats, lambda z: z['ci_99'][0]),
('stats_ci_99_b', stats, lambda z: z['ci_99'][1]),
('stats_q_25', stats, lambda z: z.get('q_25')),
('stats_q_75', stats, lambda z: z.get('q_75')),
('stats_number', stats, lambda z: z.get('number')),
('stats_repeat', stats, lambda z: z.get('repeat')),
('samples', samples, None),
('profile', profiles, None),
]
names = set()
for key_dict in (results, benchmark_params):
names.update(key_dict.keys())
d2['result_columns'] = [x[0] for x in getters]
d2['results'] = {}
for name in sorted(names):
r = []
for key_name, key_dict, key_getter in getters:
value = key_dict.get(name)
if key_getter is not None and value is not None:
if isinstance(value, list):
value = [key_getter(z) if z is not None else None
for z in value]
else:
value = key_getter(value)
if key_name.startswith('stats_') or key_name == 'duration':
value = util.truncate_float_list(value)
if key_name == 'params' and value is None:
value = []
if key_name != 'params' and isinstance(value, list):
if all(x is None for x in value):
value = None
r.append(value)
while r and r[-1] is None:
r.pop()
d2['results'][name] = r
d2['durations'] = {}
for key, value in six.iteritems(duration):
if key.startswith('<'):
d2['durations'][key] = value
return d2
except KeyError as exc:
raise util.UserError(
"Error loading results data: missing key {}".format(
six.text_type(exc)))
def format_benchmark_result(results, benchmark):
"""
Pretty-print a benchmark result to human-readable form.
Parameters
----------
results : Results
Result set object
benchmark : dict
Benchmark dictionary
Returns
-------
info : {str, None}
One-line description of results
details : {str, None}
Additional details
"""
name = benchmark['name']
result = results.get_result_value(name, benchmark['params'])
stats = results.get_result_stats(name, benchmark['params'])
total_count = len(result)
failure_count = sum(r is None for r in result)
info = None
details = None
# Display status
if failure_count > 0:
if failure_count == total_count:
info = "failed"
else:
info = "{0}/{1} failed".format(failure_count, total_count)
# Display results
if benchmark['params']:
# Long format display
if failure_count == 0:
info = "ok"
display_result = [(v, statistics.get_err(v, s) if s is not None else None)
for v, s in zip(result, stats)]
display = _format_benchmark_result(display_result, benchmark)
display = "\n".join(display).strip()
details = display
else:
if failure_count == 0:
# Failure already shown above
if not result:
display = "[]"
else:
if stats[0]:
err = statistics.get_err(result[0], stats[0])
else:
err = None
display = util.human_value(result[0], benchmark['unit'], err=err)
if len(result) > 1:
display += ";..."
info = display
return info, details
def _format_benchmark_result(result, benchmark, max_width=None):
"""
Format the result from a parameterized benchmark as an ASCII table
"""
if not result:
return ['[]']
def do_formatting(num_column_params):
# Fold result to a table
if num_column_params > 0:
column_params = benchmark['params'][-num_column_params:]
else:
column_params = []
rows = []
if column_params:
row_params = benchmark['params'][:-len(column_params)]
header = benchmark['param_names'][:len(row_params)]
column_param_permutations = list(itertools.product(*column_params))
header += [" / ".join(_format_param_value(value) for value in values)
for values in column_param_permutations]
rows.append(header)
column_items = len(column_param_permutations)
name_header = " / ".join(benchmark['param_names'][len(row_params):])
else:
column_items = 1
row_params = benchmark['params']
name_header = ""
header = benchmark['param_names']
rows.append(header)
for j, values in enumerate(itertools.product(*row_params)):
row_results = [util.human_value(x[0], benchmark['unit'], err=x[1])
for x in result[j*column_items:(j+1)*column_items]]
row = [_format_param_value(value) for value in values] + row_results
rows.append(row)
if name_header:
display = util.format_text_table(rows, 1,
top_header_text=name_header,
top_header_span_start=len(row_params))
else:
display = util.format_text_table(rows, 1)
return display.splitlines()
# Determine how many parameters can be fit to columns
if max_width is None:
max_width = util.get_terminal_width() * 3//4
text = do_formatting(0)
for j in range(1, len(benchmark['params'])):
new_text = do_formatting(j)
width = max(len(line) for line in new_text)
if width < max_width:
text = new_text
else:
break
return text
def _format_param_value(value_repr):
"""
Format a parameter value for displaying it as test output. The
values are string obtained via Python repr.
"""
regexs = ["^'(.+)'$",
"^u'(.+)'$",
"^<class '(.+)'>$"]
for regex in regexs:
m = re.match(regex, value_repr)
if m and m.group(1).strip():
return m.group(1)
return value_repr
| bsd-3-clause |
killbill/killbill-client-python | killbill/api/credit_api.py | 1 | 10462 | # coding: utf-8
#
# Copyright 2010-2014 Ning, Inc.
# Copyright 2014-2020 Groupon, Inc
# Copyright 2020-2021 Equinix, Inc
# Copyright 2014-2021 The Billing Project, LLC
#
# The Billing Project, LLC licenses this file to you under the Apache License, version 2.0
# (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
Kill Bill
Kill Bill is an open-source billing and payments platform # noqa: E501
OpenAPI spec version: 0.22.22-SNAPSHOT
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from killbill.api_client import ApiClient
class CreditApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_credits(self, body=None, created_by=None, **kwargs): # noqa: E501
"""Create a credit # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_credits(body, created_by, async=True)
>>> result = thread.get()
:param async bool
:param List[InvoiceItem] body: (required)
:param Str created_by: (required)
:param Bool auto_commit:
:param List[Str] plugin_property:
:param Str reason:
:param Str comment:
:return: List[InvoiceItem]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_credits_with_http_info(body, created_by, **kwargs) # noqa: E501
else:
(data) = self.create_credits_with_http_info(body, created_by, **kwargs) # noqa: E501
return data
def create_credits_with_http_info(self, body=None, created_by=None, **kwargs): # noqa: E501
"""Create a credit # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_credits_with_http_info(body, created_by, async=True)
>>> result = thread.get()
:param async bool
:param List[InvoiceItem] body: (required)
:param Str created_by: (required)
:param Bool auto_commit:
:param List[Str] plugin_property:
:param Str reason:
:param Str comment:
:return: List[InvoiceItem]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'created_by', 'auto_commit', 'plugin_property', 'reason', 'comment'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_credits" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_credits`") # noqa: E501
# verify the required parameter 'created_by' is set
if ('created_by' not in params or
params['created_by'] is None):
raise ValueError("Missing the required parameter `created_by` when calling `create_credits`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'auto_commit' in params:
query_params.append(('autoCommit', params['auto_commit'])) # noqa: E501
if 'plugin_property' in params:
query_params.append(('pluginProperty', params['plugin_property'])) # noqa: E501
collection_formats['pluginProperty'] = 'multi' # noqa: E501
header_params = {}
if 'created_by' in params:
header_params['X-Killbill-CreatedBy'] = params['created_by'] # noqa: E501
if 'reason' in params:
header_params['X-Killbill-Reason'] = params['reason'] # noqa: E501
if 'comment' in params:
header_params['X-Killbill-Comment'] = params['comment'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Killbill Api Key', 'Killbill Api Secret', 'basicAuth'] # noqa: E501
return self.api_client.call_api(
'/1.0/kb/credits', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='List[InvoiceItem]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_credit(self, credit_id=None, **kwargs): # noqa: E501
"""Retrieve a credit by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_credit(credit_id, async=True)
>>> result = thread.get()
:param async bool
:param Str credit_id: (required)
:return: InvoiceItem
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_credit_with_http_info(credit_id, **kwargs) # noqa: E501
else:
(data) = self.get_credit_with_http_info(credit_id, **kwargs) # noqa: E501
return data
def get_credit_with_http_info(self, credit_id=None, **kwargs): # noqa: E501
"""Retrieve a credit by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_credit_with_http_info(credit_id, async=True)
>>> result = thread.get()
:param async bool
:param Str credit_id: (required)
:return: InvoiceItem
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['credit_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_credit" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'credit_id' is set
if ('credit_id' not in params or
params['credit_id'] is None):
raise ValueError("Missing the required parameter `credit_id` when calling `get_credit`") # noqa: E501
if 'credit_id' in params and not re.search('\\w+-\\w+-\\w+-\\w+-\\w+', params['credit_id']): # noqa: E501
raise ValueError("Invalid value for parameter `credit_id` when calling `get_credit`, must conform to the pattern `/\\w+-\\w+-\\w+-\\w+-\\w+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'credit_id' in params:
path_params['creditId'] = params['credit_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Killbill Api Key', 'Killbill Api Secret', 'basicAuth'] # noqa: E501
return self.api_client.call_api(
'/1.0/kb/credits/{creditId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InvoiceItem', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| apache-2.0 |
guewen/OpenUpgrade | addons/mass_mailing/models/mass_mailing_stats.py | 61 | 4455 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
from openerp.osv import osv, fields
class MailMailStats(osv.Model):
""" MailMailStats models the statistics collected about emails. Those statistics
are stored in a separated model and table to avoid bloating the mail_mail table
with statistics values. This also allows to delete emails send with mass mailing
without loosing the statistics about them. """
_name = 'mail.mail.statistics'
_description = 'Email Statistics'
_rec_name = 'message_id'
_order = 'message_id'
_columns = {
'mail_mail_id': fields.many2one('mail.mail', 'Mail ID', ondelete='set null'),
'message_id': fields.char('Message-ID'),
'model': fields.char('Document model'),
'res_id': fields.integer('Document ID'),
# campaign / wave data
'mass_mailing_id': fields.many2one(
'mail.mass_mailing', 'Mass Mailing',
ondelete='set null',
),
'mass_mailing_campaign_id': fields.related(
'mass_mailing_id', 'mass_mailing_campaign_id',
type='many2one', ondelete='set null',
relation='mail.mass_mailing.campaign',
string='Mass Mailing Campaign',
store=True, readonly=True,
),
# Bounce and tracking
'scheduled': fields.datetime('Scheduled', help='Date when the email has been created'),
'sent': fields.datetime('Sent', help='Date when the email has been sent'),
'exception': fields.datetime('Exception', help='Date of technical error leading to the email not being sent'),
'opened': fields.datetime('Opened', help='Date when the email has been opened the first time'),
'replied': fields.datetime('Replied', help='Date when this email has been replied for the first time.'),
'bounced': fields.datetime('Bounced', help='Date when this email has bounced.'),
}
_defaults = {
'scheduled': fields.datetime.now,
}
def _get_ids(self, cr, uid, ids=None, mail_mail_ids=None, mail_message_ids=None, domain=None, context=None):
if not ids and mail_mail_ids:
base_domain = [('mail_mail_id', 'in', mail_mail_ids)]
elif not ids and mail_message_ids:
base_domain = [('message_id', 'in', mail_message_ids)]
else:
base_domain = [('id', 'in', ids or [])]
if domain:
base_domain = ['&'] + domain + base_domain
return self.search(cr, uid, base_domain, context=context)
def set_opened(self, cr, uid, ids=None, mail_mail_ids=None, mail_message_ids=None, context=None):
stat_ids = self._get_ids(cr, uid, ids, mail_mail_ids, mail_message_ids, [('opened', '=', False)], context)
self.write(cr, uid, stat_ids, {'opened': fields.datetime.now()}, context=context)
return stat_ids
def set_replied(self, cr, uid, ids=None, mail_mail_ids=None, mail_message_ids=None, context=None):
stat_ids = self._get_ids(cr, uid, ids, mail_mail_ids, mail_message_ids, [('replied', '=', False)], context)
self.write(cr, uid, stat_ids, {'replied': fields.datetime.now()}, context=context)
return stat_ids
def set_bounced(self, cr, uid, ids=None, mail_mail_ids=None, mail_message_ids=None, context=None):
stat_ids = self._get_ids(cr, uid, ids, mail_mail_ids, mail_message_ids, [('bounced', '=', False)], context)
self.write(cr, uid, stat_ids, {'bounced': fields.datetime.now()}, context=context)
return stat_ids
| agpl-3.0 |
JeffRoy/mi-dataset | mi/dataset/driver/wc_sbe/cspp/wc_sbe_cspp_recovered_driver.py | 1 | 2044 | #!/usr/bin/env python
"""
@package mi.dataset.driver.wc_sbe.cspp
@file mi/dataset/driver/wc_sbe/cspp/wc_sbe_cspp_recovered_driver.py
@author Jeff Roy
@brief Driver for the wc_sbe_cspp instrument
Release notes:
Initial Release
"""
from mi.dataset.dataset_parser import DataSetDriverConfigKeys
from mi.dataset.dataset_driver import SimpleDatasetDriver
from mi.dataset.parser.wc_sbe_cspp import \
WcSbeCsppParser, \
WcSbeEngRecoveredDataParticle, \
WcSbeMetadataRecoveredDataParticle
from mi.dataset.parser.cspp_base import \
METADATA_PARTICLE_CLASS_KEY, \
DATA_PARTICLE_CLASS_KEY
from mi.core.versioning import version
@version("15.6.1")
def parse(basePythonCodePath, sourceFilePath, particleDataHdlrObj):
"""
This is the method called by Uframe
:param basePythonCodePath This is the file system location of mi-dataset
:param sourceFilePath This is the full path and filename of the file to be parsed
:param particleDataHdlrObj Java Object to consume the output of the parser
:return particleDataHdlrObj
"""
with open(sourceFilePath, 'rU') as stream_handle:
# create and instance of the concrete driver class defined below
driver = WcSbeCsppRecoveredDriver(basePythonCodePath, stream_handle, particleDataHdlrObj)
driver.processFileStream()
return particleDataHdlrObj
class WcSbeCsppRecoveredDriver(SimpleDatasetDriver):
"""
Derived wc_sbe_cspp driver class
All this needs to do is create a concrete _build_parser method
"""
def _build_parser(self, stream_handle):
parser_config = {
DataSetDriverConfigKeys.PARTICLE_CLASS: None,
DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: {
METADATA_PARTICLE_CLASS_KEY: WcSbeMetadataRecoveredDataParticle,
DATA_PARTICLE_CLASS_KEY: WcSbeEngRecoveredDataParticle
}
}
parser = WcSbeCsppParser(parser_config, stream_handle,
self._exception_callback)
return parser
| bsd-2-clause |
m11s/MissionPlanner | Lib/rlcompleter.py | 61 | 6036 | """Word completion for GNU readline 2.0.
This requires the latest extension to the readline module. The completer
completes keywords, built-ins and globals in a selectable namespace (which
defaults to __main__); when completing NAME.NAME..., it evaluates (!) the
expression up to the last dot and completes its attributes.
It's very cool to do "import sys" type "sys.", hit the
completion key (twice), and see the list of names defined by the
sys module!
Tip: to use the tab key as the completion key, call
readline.parse_and_bind("tab: complete")
Notes:
- Exceptions raised by the completer function are *ignored* (and
generally cause the completion to fail). This is a feature -- since
readline sets the tty device in raw (or cbreak) mode, printing a
traceback wouldn't work well without some complicated hoopla to save,
reset and restore the tty state.
- The evaluation of the NAME.NAME... form may cause arbitrary
application defined code to be executed if an object with a
__getattr__ hook is found. Since it is the responsibility of the
application (or the user) to enable this feature, I consider this an
acceptable risk. More complicated expressions (e.g. function calls or
indexing operations) are *not* evaluated.
- GNU readline is also used by the built-in functions input() and
raw_input(), and thus these also benefit/suffer from the completer
features. Clearly an interactive application can benefit by
specifying its own completer function and using raw_input() for all
its input.
- When the original stdin is not a tty device, GNU readline is never
used, and this module (and the readline module) are silently inactive.
"""
import __builtin__
import __main__
__all__ = ["Completer"]
class Completer:
def __init__(self, namespace = None):
"""Create a new completer for the command line.
Completer([namespace]) -> completer instance.
If unspecified, the default namespace where completions are performed
is __main__ (technically, __main__.__dict__). Namespaces should be
given as dictionaries.
Completer instances should be used as the completion mechanism of
readline via the set_completer() call:
readline.set_completer(Completer(my_namespace).complete)
"""
if namespace and not isinstance(namespace, dict):
raise TypeError,'namespace must be a dictionary'
# Don't bind to namespace quite yet, but flag whether the user wants a
# specific namespace or to use __main__.__dict__. This will allow us
# to bind to __main__.__dict__ at completion time, not now.
if namespace is None:
self.use_main_ns = 1
else:
self.use_main_ns = 0
self.namespace = namespace
def complete(self, text, state):
"""Return the next possible completion for 'text'.
This is called successively with state == 0, 1, 2, ... until it
returns None. The completion should begin with 'text'.
"""
if self.use_main_ns:
self.namespace = __main__.__dict__
if state == 0:
if "." in text:
self.matches = self.attr_matches(text)
else:
self.matches = self.global_matches(text)
try:
return self.matches[state]
except IndexError:
return None
def _callable_postfix(self, val, word):
if hasattr(val, '__call__'):
word = word + "("
return word
def global_matches(self, text):
"""Compute matches when text is a simple name.
Return a list of all keywords, built-in functions and names currently
defined in self.namespace that match.
"""
import keyword
matches = []
n = len(text)
for word in keyword.kwlist:
if word[:n] == text:
matches.append(word)
for nspace in [__builtin__.__dict__, self.namespace]:
for word, val in nspace.items():
if word[:n] == text and word != "__builtins__":
matches.append(self._callable_postfix(val, word))
return matches
def attr_matches(self, text):
"""Compute matches when text contains a dot.
Assuming the text is of the form NAME.NAME....[NAME], and is
evaluatable in self.namespace, it will be evaluated and its attributes
(as revealed by dir()) are used as possible completions. (For class
instances, class members are also considered.)
WARNING: this can still invoke arbitrary C code, if an object
with a __getattr__ hook is evaluated.
"""
import re
m = re.match(r"(\w+(\.\w+)*)\.(\w*)", text)
if not m:
return []
expr, attr = m.group(1, 3)
try:
thisobject = eval(expr, self.namespace)
except Exception:
return []
# get the content of the object, except __builtins__
words = dir(thisobject)
if "__builtins__" in words:
words.remove("__builtins__")
if hasattr(thisobject, '__class__'):
words.append('__class__')
words.extend(get_class_members(thisobject.__class__))
matches = []
n = len(attr)
for word in words:
if word[:n] == attr and hasattr(thisobject, word):
val = getattr(thisobject, word)
word = self._callable_postfix(val, "%s.%s" % (expr, word))
matches.append(word)
return matches
def get_class_members(klass):
ret = dir(klass)
if hasattr(klass,'__bases__'):
for base in klass.__bases__:
ret = ret + get_class_members(base)
return ret
try:
import readline
except ImportError:
pass
else:
readline.set_completer(Completer().complete)
| gpl-3.0 |
caperren/Archives | OSU Robotics Club/Mars Rover 2017-2018/software/ros_packages/ground_station/src/Framework/MapSystems/RoverMapHelper.py | 1 | 1189 | import PIL.Image
import math
class MapHelper(object):
@staticmethod
def new_image(width, height, alpha=False):
"""
Generates a new image using PIL.Image module
returns PIL.IMAGE OBJECT
"""
if alpha is True:
return PIL.Image.new('RGBA', (width, height), (0, 0, 0, 0))
else:
return PIL.Image.new('RGBA', (width, height))
@staticmethod
def fast_round(value, precision):
"""
Function to round values instead of using python's
return INT
"""
return int(value * 10 ** precision) / 10. ** precision
@staticmethod
def pixels_to_degrees(pixels, zoom):
"""
Generates pixels to be expected at zoom levels
returns INT
"""
return pixels * 2 ** (21-zoom)
@staticmethod
def pixels_to_meters(latitude, zoom):
"""
Function generates how many pixels per meter it
should be from the projecction
returns FLOAT
"""
# https://groups.google.com/forum/#!topic/google-maps-js-api-v3/hDRO4oHVSeM
return 2 ** zoom / (156543.03392 * math.cos(math.radians(latitude)))
| gpl-3.0 |
ctmarinas/stgit | stgit/lib/git/objects.py | 1 | 10634 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import re
from stgit.compat import text
from stgit.config import config
from .base import Immutable
from .person import Person
class GitObject(Immutable):
"""Base class for all git objects. One git object is represented by at
most one C{GitObject}, which makes it possible to compare them
using normal Python object comparison; it also ensures we don't
waste more memory than necessary."""
class BlobData(Immutable):
"""Represents the data contents of a git blob object."""
def __init__(self, data):
assert isinstance(data, bytes)
self.bytes = data
def commit(self, repository):
"""Commit the blob.
@return: The committed blob
@rtype: L{Blob}"""
sha1 = (
repository.run(['git', 'hash-object', '-w', '--stdin'])
.encoding(None)
.raw_input(self.bytes)
.output_one_line()
)
return repository.get_blob(sha1)
class Blob(GitObject):
"""Represents a git blob object. All the actual data contents of the
blob object is stored in the L{data} member, which is a
L{BlobData} object."""
typename = 'blob'
default_perm = '100644'
def __init__(self, repository, sha1):
self._repository = repository
self.sha1 = sha1
def __repr__(self): # pragma: no cover
return 'Blob<%s>' % self.sha1
@property
def data(self):
type_, content = self._repository.cat_object(self.sha1)
assert type_ == 'blob', 'expected "blob", got "%s" for %s' % (type_, self.sha1)
return BlobData(content)
class TreeData(Immutable):
"""Represents the data contents of a git tree object."""
def __init__(self, entries):
"""Create a new L{TreeData} object from the given mapping from names
(strings) to either (I{permission}, I{object}) tuples or just
objects."""
self._entries = {}
for name, po in entries.items():
assert '/' not in name, 'tree entry name contains slash: %s' % name
if isinstance(po, GitObject):
perm, obj = po.default_perm, po
else:
perm, obj = po
self._entries[name] = (perm, obj)
def __getitem__(self, key):
return self._entries[key]
def __iter__(self):
for name, (perm, obj) in self._entries.items():
yield name, (perm, obj)
def commit(self, repository):
"""Commit the tree.
@return: The committed tree
@rtype: L{Tree}"""
listing = [
'%s %s %s\t%s' % (perm, obj.typename, obj.sha1, name)
for name, (perm, obj) in self
]
sha1 = (
repository.run(['git', 'mktree', '-z'])
.input_nulterm(listing)
.output_one_line()
)
return repository.get_tree(sha1)
@classmethod
def parse(cls, repository, lines):
"""Parse a raw git tree description.
@return: A new L{TreeData} object
@rtype: L{TreeData}"""
entries = {}
for line in lines:
m = re.match(r'^([0-7]{6}) ([a-z]+) ([0-9a-f]{40})\t(.*)$', line)
perm, type, sha1, name = m.groups()
entries[name] = (perm, repository.get_object(type, sha1))
return cls(entries)
class Tree(GitObject):
"""Represents a git tree object. All the actual data contents of the
tree object is stored in the L{data} member, which is a
L{TreeData} object."""
typename = 'tree'
default_perm = '040000'
def __init__(self, repository, sha1):
self.sha1 = sha1
self._repository = repository
self._data = None
@property
def data(self):
if self._data is None:
self._data = TreeData.parse(
self._repository,
self._repository.run(['git', 'ls-tree', '-z', self.sha1]).output_lines(
'\0'
),
)
return self._data
def __repr__(self): # pragma: no cover
return 'Tree<sha1: %s>' % self.sha1
class CommitData(Immutable):
"""Represents the data contents of a git commit object."""
def __init__(
self, tree, parents, message, encoding=None, author=None, committer=None
):
self.tree = tree
self.parents = parents
self.encoding = (
encoding if encoding is not None else config.get('i18n.commitencoding')
)
if isinstance(message, bytes):
self.message = message
else:
self.message = message.encode(self.encoding)
if author is None:
self._author = Person.author()
else:
assert isinstance(author, (Person, bytes))
self._author = author
if committer is None:
self._committer = Person.committer()
else:
assert isinstance(committer, (Person, bytes))
self._committer = committer
@property
def env(self):
env = {}
for p, v1 in [(self.author, 'AUTHOR'), (self.committer, 'COMMITTER')]:
if p is not None:
for attr, v2 in [
('name', 'NAME'),
('email', 'EMAIL'),
('date', 'DATE'),
]:
if getattr(p, attr) is not None:
env['GIT_%s_%s' % (v1, v2)] = text(getattr(p, attr))
return env
@property
def message_str(self):
return self.message.decode(self.encoding)
@property
def parent(self):
assert len(self.parents) == 1
return self.parents[0]
@property
def author(self):
if isinstance(self._author, bytes):
self._author = Person.parse(self._author.decode(self.encoding))
return self._author
@property
def committer(self):
if isinstance(self._committer, bytes):
self._committer = Person.parse(self._committer.decode(self.encoding))
return self._committer
def set_tree(self, tree):
return self._replace(tree=tree)
def set_parent(self, parent):
return self._replace(parents=[parent])
def set_author(self, author):
assert isinstance(author, Person) or author is None
return self._replace(author=author)
def set_committer(self, committer):
assert isinstance(committer, Person) or committer is None
return self._replace(committer=committer)
def set_message(self, message):
commit_encoding = config.get('i18n.commitencoding')
if isinstance(message, bytes):
message.decode(commit_encoding)
else:
message = message.encode(commit_encoding)
return self._replace(message=message, encoding=commit_encoding)
def _replace(self, **kws):
return type(self)(
tree=kws.get('tree', self.tree),
parents=kws.get('parents', self.parents),
message=kws.get('message', self.message),
encoding=kws.get('encoding', self.encoding),
author=kws.get('author', self.author),
committer=kws.get('committer', self.committer),
)
def is_nochange(self):
return len(self.parents) == 1 and self.tree == self.parent.data.tree
def __repr__(self): # pragma: no cover
return (
'CommitData<tree: %s, parents: %s, author: %s, committer: %s, '
'message: %s>'
) % (
self.tree.sha1,
[p.sha1 for p in self.parents],
self._author,
self._committer,
self.message.split(b'\n', 1)[0],
)
def commit(self, repository):
"""Commit the commit.
@return: The committed commit
@rtype: L{Commit}"""
c = ['git', 'commit-tree', self.tree.sha1]
for p in self.parents:
c.append('-p')
c.append(p.sha1)
sha1 = (
repository.run(c, env=self.env)
.encoding(None)
.raw_input(self.message)
.output_one_line()
)
return repository.get_commit(sha1)
@classmethod
def parse(cls, repository, content):
"""Parse a raw git commit description.
@return: A new L{CommitData} object
@rtype: L{CommitData}"""
required_keys = set(['tree', 'author', 'committer'])
parents = []
encoding = None
while True:
line, content = content.split(b'\n', 1)
if line:
while content.startswith(b' '):
extended, content = content.split(b'\n', 1)
line += extended[1:]
key_b, value_b = line.split(b' ', 1)
key = key_b.decode('utf-8')
if key == 'tree':
tree = repository.get_tree(value_b.decode('utf-8'))
required_keys.remove(key)
elif key == 'parent':
parents.append(repository.get_commit(value_b.decode('utf-8')))
elif key == 'author':
author = value_b
required_keys.remove(key)
elif key == 'committer':
committer = value_b
required_keys.remove(key)
elif key == 'encoding':
encoding = value_b.decode('utf-8')
else:
# Any other keys are meant to be explicitly ignored
pass
else:
break
assert not required_keys, 'commit data missing keys %s' % required_keys
return cls(tree, parents, content, encoding, author, committer)
class Commit(GitObject):
"""Represents a git commit object. All the actual data contents of the
commit object is stored in the L{data} member, which is a
L{CommitData} object."""
typename = 'commit'
def __init__(self, repository, sha1):
self.sha1 = sha1
self._repository = repository
self._data = None
@property
def data(self):
if self._data is None:
type_, content = self._repository.cat_object(self.sha1)
assert type_ == 'commit', 'expected "commit", got "%s" for %s' % (
type_,
self.sha1,
)
self._data = CommitData.parse(self._repository, content)
return self._data
def __repr__(self): # pragma: no cover
return 'Commit<sha1: %s, data: %s>' % (self.sha1, self._data)
| gpl-2.0 |
j00bar/ansible | lib/ansible/modules/storage/netapp/na_cdot_lun.py | 69 | 12603 | #!/usr/bin/python
# (c) 2017, NetApp, Inc
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
module: na_cdot_lun
short_description: Manage NetApp cDOT luns
extends_documentation_fragment:
- netapp.ontap
version_added: '2.3'
author: Sumit Kumar ([email protected])
description:
- Create, destroy, resize luns on NetApp cDOT.
options:
state:
description:
- Whether the specified lun should exist or not.
required: true
choices: ['present', 'absent']
name:
description:
- The name of the lun to manage.
required: true
flexvol_name:
description:
- The name of the FlexVol the lun should exist on.
- Required when C(state=present).
size:
description:
- The size of the lun in C(size_unit).
- Required when C(state=present).
size_unit:
description:
- The unit used to interpret the size parameter.
choices: ['bytes', 'b', 'kb', 'mb', 'gb', 'tb', 'pb', 'eb', 'zb', 'yb']
default: 'gb'
force_resize:
description:
- Forcibly reduce the size. This is required for reducing the size of the LUN to avoid accidentally reducing the LUN size.
default: false
force_remove:
description:
- If "true", override checks that prevent a LUN from being destroyed if it is online and mapped.
- If "false", destroying an online and mapped LUN will fail.
default: false
force_remove_fenced:
description:
- If "true", override checks that prevent a LUN from being destroyed while it is fenced.
- If "false", attempting to destroy a fenced LUN will fail.
- The default if not specified is "false". This field is available in Data ONTAP 8.2 and later.
default: false
vserver:
required: true
description:
- The name of the vserver to use.
'''
EXAMPLES = """
- name: Create LUN
na_cdot_lun:
state: present
name: ansibleLUN
flexvol_name: ansibleVolume
vserver: ansibleVServer
size: 5
size_unit: mb
hostname: "{{ netapp_hostname }}"
username: "{{ netapp_username }}"
password: "{{ netapp_password }}"
- name: Resize Lun
na_cdot_lun:
state: present
name: ansibleLUN
force_resize: True
flexvol_name: ansibleVolume
vserver: ansibleVServer
size: 5
size_unit: gb
hostname: "{{ netapp_hostname }}"
username: "{{ netapp_username }}"
password: "{{ netapp_password }}"
"""
RETURN = """
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
import ansible.module_utils.netapp as netapp_utils
HAS_NETAPP_LIB = netapp_utils.has_netapp_lib()
class NetAppCDOTLUN(object):
def __init__(self):
self._size_unit_map = dict(
bytes=1,
b=1,
kb=1024,
mb=1024 ** 2,
gb=1024 ** 3,
tb=1024 ** 4,
pb=1024 ** 5,
eb=1024 ** 6,
zb=1024 ** 7,
yb=1024 ** 8
)
self.argument_spec = netapp_utils.ontap_sf_host_argument_spec()
self.argument_spec.update(dict(
state=dict(required=True, choices=['present', 'absent']),
name=dict(required=True, type='str'),
size=dict(type='int'),
size_unit=dict(default='gb',
choices=['bytes', 'b', 'kb', 'mb', 'gb', 'tb',
'pb', 'eb', 'zb', 'yb'], type='str'),
force_resize=dict(default=False, type='bool'),
force_remove=dict(default=False, type='bool'),
force_remove_fenced=dict(default=False, type='bool'),
flexvol_name=dict(type='str'),
vserver=dict(required=True, type='str'),
))
self.module = AnsibleModule(
argument_spec=self.argument_spec,
required_if=[
('state', 'present', ['flexvol_name', 'size'])
],
supports_check_mode=True
)
p = self.module.params
# set up state variables
self.state = p['state']
self.name = p['name']
self.size_unit = p['size_unit']
if p['size'] is not None:
self.size = p['size'] * self._size_unit_map[self.size_unit]
else:
self.size = None
self.force_resize = p['force_resize']
self.force_remove = p['force_remove']
self.force_remove_fenced = p['force_remove_fenced']
self.flexvol_name = p['flexvol_name']
self.vserver = p['vserver']
if HAS_NETAPP_LIB is False:
self.module.fail_json(msg="the python NetApp-Lib module is required")
else:
self.server = netapp_utils.setup_ontap_zapi(module=self.module, vserver=self.vserver)
def get_lun(self):
"""
Return details about the LUN
:return: Details about the lun
:rtype: dict
"""
luns = []
tag = None
while True:
lun_info = netapp_utils.zapi.NaElement('lun-get-iter')
if tag:
lun_info.add_new_child('tag', tag, True)
query_details = netapp_utils.zapi.NaElement('lun-info')
query_details.add_new_child('vserver', self.vserver)
query_details.add_new_child('volume', self.flexvol_name)
query = netapp_utils.zapi.NaElement('query')
query.add_child_elem(query_details)
lun_info.add_child_elem(query)
result = self.server.invoke_successfully(lun_info, True)
if result.get_child_by_name('num-records') and int(result.get_child_content('num-records')) >= 1:
attr_list = result.get_child_by_name('attributes-list')
luns.extend(attr_list.get_children())
tag = result.get_child_content('next-tag')
if tag is None:
break
# The LUNs have been extracted.
# Find the specified lun and extract details.
return_value = None
for lun in luns:
path = lun.get_child_content('path')
_rest, _splitter, found_name = path.rpartition('/')
if found_name == self.name:
size = lun.get_child_content('size')
# Find out if the lun is attached
attached_to = None
lun_id = None
if lun.get_child_content('mapped') == 'true':
lun_map_list = netapp_utils.zapi.NaElement.create_node_with_children(
'lun-map-list-info', **{'path': path})
result = self.server.invoke_successfully(
lun_map_list, enable_tunneling=True)
igroups = result.get_child_by_name('initiator-groups')
if igroups:
for igroup_info in igroups.get_children():
igroup = igroup_info.get_child_content(
'initiator-group-name')
attached_to = igroup
lun_id = igroup_info.get_child_content('lun-id')
return_value = {
'name': found_name,
'size': size,
'attached_to': attached_to,
'lun_id': lun_id
}
else:
continue
return return_value
def create_lun(self):
"""
Create LUN with requested name and size
"""
path = '/vol/%s/%s' % (self.flexvol_name, self.name)
lun_create = netapp_utils.zapi.NaElement.create_node_with_children(
'lun-create-by-size', **{'path': path,
'size': str(self.size),
'ostype': 'linux'})
try:
self.server.invoke_successfully(lun_create, enable_tunneling=True)
except netapp_utils.zapi.NaApiError:
err = get_exception()
self.module.fail_json(msg="Error provisioning lun %s of size %s" % (self.name, self.size),
exception=str(err))
def delete_lun(self):
"""
Delete requested LUN
"""
path = '/vol/%s/%s' % (self.flexvol_name, self.name)
lun_delete = netapp_utils.zapi.NaElement.create_node_with_children(
'lun-destroy', **{'path': path,
'force': str(self.force_remove),
'destroy-fenced-lun':
str(self.force_remove_fenced)})
try:
self.server.invoke_successfully(lun_delete, enable_tunneling=True)
except netapp_utils.zapi.NaApiError:
err = get_exception()
self.module.fail_json(msg="Error deleting lun %s" % path,
exception=str(err))
def resize_lun(self):
"""
Resize requested LUN.
:return: True if LUN was actually re-sized, false otherwise.
:rtype: bool
"""
path = '/vol/%s/%s' % (self.flexvol_name, self.name)
lun_resize = netapp_utils.zapi.NaElement.create_node_with_children(
'lun-resize', **{'path': path,
'size': str(self.size),
'force': str(self.force_resize)})
try:
self.server.invoke_successfully(lun_resize, enable_tunneling=True)
except netapp_utils.zapi.NaApiError:
e = get_exception()
if str(e.code) == "9042":
# Error 9042 denotes the new LUN size being the same as the
# old LUN size. This happens when there's barely any difference
# in the two sizes. For example, from 8388608 bytes to
# 8194304 bytes. This should go away if/when the default size
# requested/reported to/from the controller is changed to a
# larger unit (MB/GB/TB).
return False
else:
err = get_exception()
self.module.fail_json(msg="Error resizing lun %s" % path,
exception=str(err))
return True
def apply(self):
property_changed = False
multiple_properties_changed = False
size_changed = False
lun_exists = False
lun_detail = self.get_lun()
if lun_detail:
lun_exists = True
current_size = lun_detail['size']
if self.state == 'absent':
property_changed = True
elif self.state == 'present':
if not current_size == self.size:
size_changed = True
property_changed = True
else:
if self.state == 'present':
property_changed = True
if property_changed:
if self.module.check_mode:
pass
else:
if self.state == 'present':
if not lun_exists:
self.create_lun()
else:
if size_changed:
# Ensure that size was actually changed. Please
# read notes in 'resize_lun' function for details.
size_changed = self.resize_lun()
if not size_changed and not \
multiple_properties_changed:
property_changed = False
elif self.state == 'absent':
self.delete_lun()
changed = property_changed or size_changed
# TODO: include other details about the lun (size, etc.)
self.module.exit_json(changed=changed)
def main():
v = NetAppCDOTLUN()
v.apply()
if __name__ == '__main__':
main()
| gpl-3.0 |
AdiPat/android_kernel_htc_pico | tools/perf/scripts/python/failed-syscalls-by-pid.py | 11180 | 2058 | # failed system call counts, by pid
# (c) 2010, Tom Zanussi <[email protected]>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide failed system call totals, broken down by pid.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s syscall-counts-by-pid.py [comm|pid]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_error_totals()
def raw_syscalls__sys_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, ret):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
if ret < 0:
try:
syscalls[common_comm][common_pid][id][ret] += 1
except TypeError:
syscalls[common_comm][common_pid][id][ret] = 1
def print_error_totals():
if for_comm is not None:
print "\nsyscall errors for %s:\n\n" % (for_comm),
else:
print "\nsyscall errors:\n\n",
print "%-30s %10s\n" % ("comm [pid]", "count"),
print "%-30s %10s\n" % ("------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id in id_keys:
print " syscall: %-16s\n" % syscall_name(id),
ret_keys = syscalls[comm][pid][id].keys()
for ret, val in sorted(syscalls[comm][pid][id].iteritems(), key = lambda(k, v): (v, k), reverse = True):
print " err = %-20s %10d\n" % (strerror(ret), val),
| gpl-2.0 |
0jpq0/kbengine | kbe/res/scripts/common/Lib/test/test_peepholer.py | 84 | 13107 | import dis
import re
import sys
from io import StringIO
import unittest
from math import copysign
from test.bytecode_helper import BytecodeTestCase
class TestTranforms(BytecodeTestCase):
def test_unot(self):
# UNARY_NOT POP_JUMP_IF_FALSE --> POP_JUMP_IF_TRUE'
def unot(x):
if not x == 2:
del x
self.assertNotInBytecode(unot, 'UNARY_NOT')
self.assertNotInBytecode(unot, 'POP_JUMP_IF_FALSE')
self.assertInBytecode(unot, 'POP_JUMP_IF_TRUE')
def test_elim_inversion_of_is_or_in(self):
for line, cmp_op in (
('not a is b', 'is not',),
('not a in b', 'not in',),
('not a is not b', 'is',),
('not a not in b', 'in',),
):
code = compile(line, '', 'single')
self.assertInBytecode(code, 'COMPARE_OP', cmp_op)
def test_global_as_constant(self):
# LOAD_GLOBAL None/True/False --> LOAD_CONST None/True/False
def f(x):
None
None
return x
def g(x):
True
return x
def h(x):
False
return x
for func, elem in ((f, None), (g, True), (h, False)):
self.assertNotInBytecode(func, 'LOAD_GLOBAL')
self.assertInBytecode(func, 'LOAD_CONST', elem)
def f():
'Adding a docstring made this test fail in Py2.5.0'
return None
self.assertNotInBytecode(f, 'LOAD_GLOBAL')
self.assertInBytecode(f, 'LOAD_CONST', None)
def test_while_one(self):
# Skip over: LOAD_CONST trueconst POP_JUMP_IF_FALSE xx
def f():
while 1:
pass
return list
for elem in ('LOAD_CONST', 'POP_JUMP_IF_FALSE'):
self.assertNotInBytecode(f, elem)
for elem in ('JUMP_ABSOLUTE',):
self.assertInBytecode(f, elem)
def test_pack_unpack(self):
for line, elem in (
('a, = a,', 'LOAD_CONST',),
('a, b = a, b', 'ROT_TWO',),
('a, b, c = a, b, c', 'ROT_THREE',),
):
code = compile(line,'','single')
self.assertInBytecode(code, elem)
self.assertNotInBytecode(code, 'BUILD_TUPLE')
self.assertNotInBytecode(code, 'UNPACK_TUPLE')
def test_folding_of_tuples_of_constants(self):
for line, elem in (
('a = 1,2,3', (1, 2, 3)),
('("a","b","c")', ('a', 'b', 'c')),
('a,b,c = 1,2,3', (1, 2, 3)),
('(None, 1, None)', (None, 1, None)),
('((1, 2), 3, 4)', ((1, 2), 3, 4)),
):
code = compile(line,'','single')
self.assertInBytecode(code, 'LOAD_CONST', elem)
self.assertNotInBytecode(code, 'BUILD_TUPLE')
# Long tuples should be folded too.
code = compile(repr(tuple(range(10000))),'','single')
self.assertNotInBytecode(code, 'BUILD_TUPLE')
# One LOAD_CONST for the tuple, one for the None return value
load_consts = [instr for instr in dis.get_instructions(code)
if instr.opname == 'LOAD_CONST']
self.assertEqual(len(load_consts), 2)
# Bug 1053819: Tuple of constants misidentified when presented with:
# . . . opcode_with_arg 100 unary_opcode BUILD_TUPLE 1 . . .
# The following would segfault upon compilation
def crater():
(~[
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
],)
def test_folding_of_lists_of_constants(self):
for line, elem in (
# in/not in constants with BUILD_LIST should be folded to a tuple:
('a in [1,2,3]', (1, 2, 3)),
('a not in ["a","b","c"]', ('a', 'b', 'c')),
('a in [None, 1, None]', (None, 1, None)),
('a not in [(1, 2), 3, 4]', ((1, 2), 3, 4)),
):
code = compile(line, '', 'single')
self.assertInBytecode(code, 'LOAD_CONST', elem)
self.assertNotInBytecode(code, 'BUILD_LIST')
def test_folding_of_sets_of_constants(self):
for line, elem in (
# in/not in constants with BUILD_SET should be folded to a frozenset:
('a in {1,2,3}', frozenset({1, 2, 3})),
('a not in {"a","b","c"}', frozenset({'a', 'c', 'b'})),
('a in {None, 1, None}', frozenset({1, None})),
('a not in {(1, 2), 3, 4}', frozenset({(1, 2), 3, 4})),
('a in {1, 2, 3, 3, 2, 1}', frozenset({1, 2, 3})),
):
code = compile(line, '', 'single')
self.assertNotInBytecode(code, 'BUILD_SET')
self.assertInBytecode(code, 'LOAD_CONST', elem)
# Ensure that the resulting code actually works:
def f(a):
return a in {1, 2, 3}
def g(a):
return a not in {1, 2, 3}
self.assertTrue(f(3))
self.assertTrue(not f(4))
self.assertTrue(not g(3))
self.assertTrue(g(4))
def test_folding_of_binops_on_constants(self):
for line, elem in (
('a = 2+3+4', 9), # chained fold
('"@"*4', '@@@@'), # check string ops
('a="abc" + "def"', 'abcdef'), # check string ops
('a = 3**4', 81), # binary power
('a = 3*4', 12), # binary multiply
('a = 13//4', 3), # binary floor divide
('a = 14%4', 2), # binary modulo
('a = 2+3', 5), # binary add
('a = 13-4', 9), # binary subtract
('a = (12,13)[1]', 13), # binary subscr
('a = 13 << 2', 52), # binary lshift
('a = 13 >> 2', 3), # binary rshift
('a = 13 & 7', 5), # binary and
('a = 13 ^ 7', 10), # binary xor
('a = 13 | 7', 15), # binary or
):
code = compile(line, '', 'single')
self.assertInBytecode(code, 'LOAD_CONST', elem)
for instr in dis.get_instructions(code):
self.assertFalse(instr.opname.startswith('BINARY_'))
# Verify that unfoldables are skipped
code = compile('a=2+"b"', '', 'single')
self.assertInBytecode(code, 'LOAD_CONST', 2)
self.assertInBytecode(code, 'LOAD_CONST', 'b')
# Verify that large sequences do not result from folding
code = compile('a="x"*1000', '', 'single')
self.assertInBytecode(code, 'LOAD_CONST', 1000)
def test_binary_subscr_on_unicode(self):
# valid code get optimized
code = compile('"foo"[0]', '', 'single')
self.assertInBytecode(code, 'LOAD_CONST', 'f')
self.assertNotInBytecode(code, 'BINARY_SUBSCR')
code = compile('"\u0061\uffff"[1]', '', 'single')
self.assertInBytecode(code, 'LOAD_CONST', '\uffff')
self.assertNotInBytecode(code,'BINARY_SUBSCR')
# With PEP 393, non-BMP char get optimized
code = compile('"\U00012345"[0]', '', 'single')
self.assertInBytecode(code, 'LOAD_CONST', '\U00012345')
self.assertNotInBytecode(code, 'BINARY_SUBSCR')
# invalid code doesn't get optimized
# out of range
code = compile('"fuu"[10]', '', 'single')
self.assertInBytecode(code, 'BINARY_SUBSCR')
def test_folding_of_unaryops_on_constants(self):
for line, elem in (
('-0.5', -0.5), # unary negative
('-0.0', -0.0), # -0.0
('-(1.0-1.0)', -0.0), # -0.0 after folding
('-0', 0), # -0
('~-2', 1), # unary invert
('+1', 1), # unary positive
):
code = compile(line, '', 'single')
self.assertInBytecode(code, 'LOAD_CONST', elem)
for instr in dis.get_instructions(code):
self.assertFalse(instr.opname.startswith('UNARY_'))
# Check that -0.0 works after marshaling
def negzero():
return -(1.0-1.0)
for instr in dis.get_instructions(code):
self.assertFalse(instr.opname.startswith('UNARY_'))
# Verify that unfoldables are skipped
for line, elem, opname in (
('-"abc"', 'abc', 'UNARY_NEGATIVE'),
('~"abc"', 'abc', 'UNARY_INVERT'),
):
code = compile(line, '', 'single')
self.assertInBytecode(code, 'LOAD_CONST', elem)
self.assertInBytecode(code, opname)
def test_elim_extra_return(self):
# RETURN LOAD_CONST None RETURN --> RETURN
def f(x):
return x
self.assertNotInBytecode(f, 'LOAD_CONST', None)
returns = [instr for instr in dis.get_instructions(f)
if instr.opname == 'RETURN_VALUE']
self.assertEqual(len(returns), 1)
def test_elim_jump_to_return(self):
# JUMP_FORWARD to RETURN --> RETURN
def f(cond, true_value, false_value):
return true_value if cond else false_value
self.assertNotInBytecode(f, 'JUMP_FORWARD')
self.assertNotInBytecode(f, 'JUMP_ABSOLUTE')
returns = [instr for instr in dis.get_instructions(f)
if instr.opname == 'RETURN_VALUE']
self.assertEqual(len(returns), 2)
def test_elim_jump_after_return1(self):
# Eliminate dead code: jumps immediately after returns can't be reached
def f(cond1, cond2):
if cond1: return 1
if cond2: return 2
while 1:
return 3
while 1:
if cond1: return 4
return 5
return 6
self.assertNotInBytecode(f, 'JUMP_FORWARD')
self.assertNotInBytecode(f, 'JUMP_ABSOLUTE')
returns = [instr for instr in dis.get_instructions(f)
if instr.opname == 'RETURN_VALUE']
self.assertEqual(len(returns), 6)
def test_elim_jump_after_return2(self):
# Eliminate dead code: jumps immediately after returns can't be reached
def f(cond1, cond2):
while 1:
if cond1: return 4
self.assertNotInBytecode(f, 'JUMP_FORWARD')
# There should be one jump for the while loop.
returns = [instr for instr in dis.get_instructions(f)
if instr.opname == 'JUMP_ABSOLUTE']
self.assertEqual(len(returns), 1)
returns = [instr for instr in dis.get_instructions(f)
if instr.opname == 'RETURN_VALUE']
self.assertEqual(len(returns), 2)
def test_make_function_doesnt_bail(self):
def f():
def g()->1+1:
pass
return g
self.assertNotInBytecode(f, 'BINARY_ADD')
def test_constant_folding(self):
# Issue #11244: aggressive constant folding.
exprs = [
'3 * -5',
'-3 * 5',
'2 * (3 * 4)',
'(2 * 3) * 4',
'(-1, 2, 3)',
'(1, -2, 3)',
'(1, 2, -3)',
'(1, 2, -3) * 6',
'lambda x: x in {(3 * -5) + (-1 - 6), (1, -2, 3) * 2, None}',
]
for e in exprs:
code = compile(e, '', 'single')
for instr in dis.get_instructions(code):
self.assertFalse(instr.opname.startswith('UNARY_'))
self.assertFalse(instr.opname.startswith('BINARY_'))
self.assertFalse(instr.opname.startswith('BUILD_'))
class TestBuglets(unittest.TestCase):
def test_bug_11510(self):
# folded constant set optimization was commingled with the tuple
# unpacking optimization which would fail if the set had duplicate
# elements so that the set length was unexpected
def f():
x, y = {1, 1}
return x, y
with self.assertRaises(ValueError):
f()
def test_main(verbose=None):
import sys
from test import support
test_classes = (TestTranforms, TestBuglets)
support.run_unittest(*test_classes)
# verify reference counting
if verbose and hasattr(sys, 'gettotalrefcount'):
import gc
counts = [None] * 5
for i in range(len(counts)):
support.run_unittest(*test_classes)
gc.collect()
counts[i] = sys.gettotalrefcount()
print(counts)
if __name__ == "__main__":
test_main(verbose=True)
| lgpl-3.0 |
xHeliotrope/injustice_dropper | env/lib/python3.4/site-packages/six.py | 878 | 29664 | """Utilities for writing code that runs on Python 2 and 3"""
# Copyright (c) 2010-2015 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import
import functools
import itertools
import operator
import sys
import types
__author__ = "Benjamin Peterson <[email protected]>"
__version__ = "1.9.0"
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = basestring,
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result) # Invokes __set__.
try:
# This is a bit ugly, but it avoids running this again by
# removing this descriptor.
delattr(obj.__class__, self.name)
except AttributeError:
pass
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
def __getattr__(self, attr):
_module = self._resolve()
value = getattr(_module, attr)
setattr(self, attr, value)
return value
class _LazyModule(types.ModuleType):
def __init__(self, name):
super(_LazyModule, self).__init__(name)
self.__doc__ = self.__class__.__doc__
def __dir__(self):
attrs = ["__doc__", "__name__"]
attrs += [attr.name for attr in self._moved_attributes]
return attrs
# Subclasses should override this
_moved_attributes = []
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _SixMetaPathImporter(object):
"""
A meta path importer to import six.moves and its submodules.
This class implements a PEP302 finder and loader. It should be compatible
with Python 2.5 and all existing versions of Python3
"""
def __init__(self, six_module_name):
self.name = six_module_name
self.known_modules = {}
def _add_module(self, mod, *fullnames):
for fullname in fullnames:
self.known_modules[self.name + "." + fullname] = mod
def _get_module(self, fullname):
return self.known_modules[self.name + "." + fullname]
def find_module(self, fullname, path=None):
if fullname in self.known_modules:
return self
return None
def __get_module(self, fullname):
try:
return self.known_modules[fullname]
except KeyError:
raise ImportError("This loader does not know module " + fullname)
def load_module(self, fullname):
try:
# in case of a reload
return sys.modules[fullname]
except KeyError:
pass
mod = self.__get_module(fullname)
if isinstance(mod, MovedModule):
mod = mod._resolve()
else:
mod.__loader__ = self
sys.modules[fullname] = mod
return mod
def is_package(self, fullname):
"""
Return true, if the named module is a package.
We need this method to get correct spec objects with
Python 3.4 (see PEP451)
"""
return hasattr(self.__get_module(fullname), "__path__")
def get_code(self, fullname):
"""Return None
Required, if is_package is implemented"""
self.__get_module(fullname) # eventually raises ImportError
return None
get_source = get_code # same as get_code
_importer = _SixMetaPathImporter(__name__)
class _MovedItems(_LazyModule):
"""Lazy loading of moved objects"""
__path__ = [] # mark as package
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("intern", "__builtin__", "sys"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("UserDict", "UserDict", "collections"),
MovedAttribute("UserList", "UserList", "collections"),
MovedAttribute("UserString", "UserString", "collections"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("_thread", "thread", "_thread"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
"tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog",
"tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
"tkinter.simpledialog"),
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
if isinstance(attr, MovedModule):
_importer._add_module(attr, "moves." + attr.name)
del attr
_MovedItems._moved_attributes = _moved_attributes
moves = _MovedItems(__name__ + ".moves")
_importer._add_module(moves, "moves")
class Module_six_moves_urllib_parse(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_parse"""
_urllib_parse_moved_attributes = [
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
MovedAttribute("quote", "urllib", "urllib.parse"),
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
MovedAttribute("splitquery", "urllib", "urllib.parse"),
MovedAttribute("splittag", "urllib", "urllib.parse"),
MovedAttribute("splituser", "urllib", "urllib.parse"),
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
]
for attr in _urllib_parse_moved_attributes:
setattr(Module_six_moves_urllib_parse, attr.name, attr)
del attr
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
"moves.urllib_parse", "moves.urllib.parse")
class Module_six_moves_urllib_error(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_error"""
_urllib_error_moved_attributes = [
MovedAttribute("URLError", "urllib2", "urllib.error"),
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
]
for attr in _urllib_error_moved_attributes:
setattr(Module_six_moves_urllib_error, attr.name, attr)
del attr
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
"moves.urllib_error", "moves.urllib.error")
class Module_six_moves_urllib_request(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_request"""
_urllib_request_moved_attributes = [
MovedAttribute("urlopen", "urllib2", "urllib.request"),
MovedAttribute("install_opener", "urllib2", "urllib.request"),
MovedAttribute("build_opener", "urllib2", "urllib.request"),
MovedAttribute("pathname2url", "urllib", "urllib.request"),
MovedAttribute("url2pathname", "urllib", "urllib.request"),
MovedAttribute("getproxies", "urllib", "urllib.request"),
MovedAttribute("Request", "urllib2", "urllib.request"),
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
del attr
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
"moves.urllib_request", "moves.urllib.request")
class Module_six_moves_urllib_response(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_response"""
_urllib_response_moved_attributes = [
MovedAttribute("addbase", "urllib", "urllib.response"),
MovedAttribute("addclosehook", "urllib", "urllib.response"),
MovedAttribute("addinfo", "urllib", "urllib.response"),
MovedAttribute("addinfourl", "urllib", "urllib.response"),
]
for attr in _urllib_response_moved_attributes:
setattr(Module_six_moves_urllib_response, attr.name, attr)
del attr
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
"moves.urllib_response", "moves.urllib.response")
class Module_six_moves_urllib_robotparser(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
_urllib_robotparser_moved_attributes = [
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
]
for attr in _urllib_robotparser_moved_attributes:
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
del attr
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
"moves.urllib_robotparser", "moves.urllib.robotparser")
class Module_six_moves_urllib(types.ModuleType):
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
__path__ = [] # mark as package
parse = _importer._get_module("moves.urllib_parse")
error = _importer._get_module("moves.urllib_error")
request = _importer._get_module("moves.urllib_request")
response = _importer._get_module("moves.urllib_response")
robotparser = _importer._get_module("moves.urllib_robotparser")
def __dir__(self):
return ['parse', 'error', 'request', 'response', 'robotparser']
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
"moves.urllib")
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))
if PY3:
_meth_func = "__func__"
_meth_self = "__self__"
_func_closure = "__closure__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_func_globals = "__globals__"
else:
_meth_func = "im_func"
_meth_self = "im_self"
_func_closure = "func_closure"
_func_code = "func_code"
_func_defaults = "func_defaults"
_func_globals = "func_globals"
try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator
try:
callable = callable
except NameError:
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
if PY3:
def get_unbound_function(unbound):
return unbound
create_bound_method = types.MethodType
Iterator = object
else:
def get_unbound_function(unbound):
return unbound.im_func
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
class Iterator(object):
def next(self):
return type(self).__next__(self)
callable = callable
_add_doc(get_unbound_function,
"""Get the function out of a possibly unbound function""")
get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_closure = operator.attrgetter(_func_closure)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
get_function_globals = operator.attrgetter(_func_globals)
if PY3:
def iterkeys(d, **kw):
return iter(d.keys(**kw))
def itervalues(d, **kw):
return iter(d.values(**kw))
def iteritems(d, **kw):
return iter(d.items(**kw))
def iterlists(d, **kw):
return iter(d.lists(**kw))
viewkeys = operator.methodcaller("keys")
viewvalues = operator.methodcaller("values")
viewitems = operator.methodcaller("items")
else:
def iterkeys(d, **kw):
return iter(d.iterkeys(**kw))
def itervalues(d, **kw):
return iter(d.itervalues(**kw))
def iteritems(d, **kw):
return iter(d.iteritems(**kw))
def iterlists(d, **kw):
return iter(d.iterlists(**kw))
viewkeys = operator.methodcaller("viewkeys")
viewvalues = operator.methodcaller("viewvalues")
viewitems = operator.methodcaller("viewitems")
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
_add_doc(iteritems,
"Return an iterator over the (key, value) pairs of a dictionary.")
_add_doc(iterlists,
"Return an iterator over the (key, [values]) pairs of a dictionary.")
if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
unichr = chr
if sys.version_info[1] <= 1:
def int2byte(i):
return bytes((i,))
else:
# This is about 2x faster than the implementation above on 3.2+
int2byte = operator.methodcaller("to_bytes", 1, "big")
byte2int = operator.itemgetter(0)
indexbytes = operator.getitem
iterbytes = iter
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
_assertCountEqual = "assertCountEqual"
_assertRaisesRegex = "assertRaisesRegex"
_assertRegex = "assertRegex"
else:
def b(s):
return s
# Workaround for standalone backslash
def u(s):
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
unichr = unichr
int2byte = chr
def byte2int(bs):
return ord(bs[0])
def indexbytes(buf, i):
return ord(buf[i])
iterbytes = functools.partial(itertools.imap, ord)
import StringIO
StringIO = BytesIO = StringIO.StringIO
_assertCountEqual = "assertItemsEqual"
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
def assertCountEqual(self, *args, **kwargs):
return getattr(self, _assertCountEqual)(*args, **kwargs)
def assertRaisesRegex(self, *args, **kwargs):
return getattr(self, _assertRaisesRegex)(*args, **kwargs)
def assertRegex(self, *args, **kwargs):
return getattr(self, _assertRegex)(*args, **kwargs)
if PY3:
exec_ = getattr(moves.builtins, "exec")
def reraise(tp, value, tb=None):
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb
""")
if sys.version_info[:2] == (3, 2):
exec_("""def raise_from(value, from_value):
if from_value is None:
raise value
raise value from from_value
""")
elif sys.version_info[:2] > (3, 2):
exec_("""def raise_from(value, from_value):
raise value from from_value
""")
else:
def raise_from(value, from_value):
raise value
print_ = getattr(moves.builtins, "print", None)
if print_ is None:
def print_(*args, **kwargs):
"""The new-style print function for Python 2.4 and 2.5."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
# If the file has an encoding, encode unicode with it.
if (isinstance(fp, file) and
isinstance(data, unicode) and
fp.encoding is not None):
errors = getattr(fp, "errors", None)
if errors is None:
errors = "strict"
data = data.encode(fp.encoding, errors)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
newline = unicode("\n")
space = unicode(" ")
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)
if sys.version_info[:2] < (3, 3):
_print = print_
def print_(*args, **kwargs):
fp = kwargs.get("file", sys.stdout)
flush = kwargs.pop("flush", False)
_print(*args, **kwargs)
if flush and fp is not None:
fp.flush()
_add_doc(reraise, """Reraise an exception.""")
if sys.version_info[0:2] < (3, 4):
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES):
def wrapper(f):
f = functools.wraps(wrapped, assigned, updated)(f)
f.__wrapped__ = wrapped
return f
return wrapper
else:
wraps = functools.wraps
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(meta):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, 'temporary_class', (), {})
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
slots = orig_vars.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
def python_2_unicode_compatible(klass):
"""
A decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class.
"""
if PY2:
if '__str__' not in klass.__dict__:
raise ValueError("@python_2_unicode_compatible cannot be applied "
"to %s because it doesn't define __str__()." %
klass.__name__)
klass.__unicode__ = klass.__str__
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
return klass
# Complete the moves implementation.
# This code is at the end of this module to speed up module loading.
# Turn this module into a package.
__path__ = [] # required for PEP 302 and PEP 451
__package__ = __name__ # see PEP 366 @ReservedAssignment
if globals().get("__spec__") is not None:
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
# Remove other six meta path importers, since they cause problems. This can
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
# this for some reason.)
if sys.meta_path:
for i, importer in enumerate(sys.meta_path):
# Here's some real nastiness: Another "instance" of the six module might
# be floating around. Therefore, we can't use isinstance() to check for
# the six meta path importer, since the other six instance will have
# inserted an importer with different class.
if (type(importer).__name__ == "_SixMetaPathImporter" and
importer.name == __name__):
del sys.meta_path[i]
break
del i, importer
# Finally, add the importer to the meta path import hook.
sys.meta_path.append(_importer)
| mit |
AlphaSmartDog/DeepLearningNotes | Note-6 A3CNet/Note 6 simple ACNet/sonnet/python/modules/nets/mlp.py | 10 | 8926 | # Copyright 2017 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""A minimal interface mlp module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from six.moves import xrange # pylint: disable=redefined-builtin
from sonnet.python.modules import base
from sonnet.python.modules import basic
from sonnet.python.modules import util
import tensorflow as tf
class MLP(base.AbstractModule, base.Transposable):
"""A Multi-Layer perceptron module."""
def __init__(self,
output_sizes,
activation=tf.nn.relu,
activate_final=False,
initializers=None,
partitioners=None,
regularizers=None,
use_bias=True,
custom_getter=None,
name="mlp"):
"""Constructs an MLP module.
Args:
output_sizes: An iterable of output dimensionalities as defined in
`basic.Linear`. Output size can be defined either as number or via a
callable. In the latter case, since the function invocation is deferred
to graph construction time, the user must only ensure that entries can
be called when build is called. Each entry in the iterable defines
properties in the corresponding linear layer.
activation: An activation op. The activation is applied to intermediate
layers, and optionally to the output of the final layer.
activate_final: Boolean determining if the activation is applied to
the output of the final layer. Default `False`.
initializers: Optional dict containing ops to initialize the linear
layers' weights (with key 'w') or biases (with key 'b').
partitioners: Optional dict containing partitioners to partition the
linear layers' weights (with key 'w') or biases (with key 'b').
regularizers: Optional dict containing regularizers for the linear layers'
weights (with key 'w') and the biases (with key 'b'). As a default, no
regularizers are used. A regularizer should be a function that takes
a single `Tensor` as an input and returns a scalar `Tensor` output, e.g.
the L1 and L2 regularizers in `tf.contrib.layers`.
use_bias: Whether to include bias parameters in the linear layers.
Default `True`.
custom_getter: Callable or dictionary of callables to use as
custom getters inside the module. If a dictionary, the keys
correspond to regexes to match variable names. See the `tf.get_variable`
documentation for information about the custom_getter API.
name: Name of the module.
Raises:
KeyError: If initializers contains any keys other than 'w' or 'b'.
KeyError: If regularizers contains any keys other than 'w' or 'b'.
ValueError: If output_sizes is empty.
TypeError: If `activation` is not callable; or if `output_sizes` is not
iterable.
"""
super(MLP, self).__init__(custom_getter=custom_getter, name=name)
if not isinstance(output_sizes, collections.Iterable):
raise TypeError("output_sizes must be iterable")
output_sizes = tuple(output_sizes)
if not output_sizes:
raise ValueError("output_sizes must not be empty")
self._output_sizes = output_sizes
self._num_layers = len(self._output_sizes)
self._input_shape = None
self.possible_keys = self.get_possible_initializer_keys(use_bias=use_bias)
self._initializers = util.check_initializers(
initializers, self.possible_keys)
self._partitioners = util.check_partitioners(
partitioners, self.possible_keys)
self._regularizers = util.check_regularizers(
regularizers, self.possible_keys)
if not callable(activation):
raise TypeError("Input 'activation' must be callable")
self._activation = activation
self._activate_final = activate_final
self._use_bias = use_bias
self._instantiate_layers()
def _instantiate_layers(self):
"""Instantiates all the linear modules used in the network.
Layers are instantiated in the constructor, as opposed to the build
function, because MLP implements the Transposable interface, and the
transpose function can be called before the module is actually connected
to the graph and build is called.
Notice that this is safe since layers in the transposed module are
instantiated using a lambda returning input_size of the mlp layers, and
this doesn't have to return sensible values until the original module is
connected to the graph.
"""
with self._enter_variable_scope():
self._layers = [basic.Linear(self._output_sizes[i],
name="linear_{}".format(i),
initializers=self._initializers,
partitioners=self._partitioners,
regularizers=self._regularizers,
use_bias=self.use_bias)
for i in xrange(self._num_layers)]
@classmethod
def get_possible_initializer_keys(cls, use_bias=True):
return basic.Linear.get_possible_initializer_keys(use_bias=use_bias)
def _build(self, inputs):
"""Assembles the `MLP` and connects it to the graph.
Args:
inputs: A 2D Tensor of size `[batch_size, input_size]`.
Returns:
A 2D Tensor of size `[batch_size, output_sizes[-1]]`.
"""
self._input_shape = tuple(inputs.get_shape().as_list())
net = inputs
final_index = self._num_layers - 1
for layer_id in xrange(self._num_layers):
net = self._layers[layer_id](net)
if final_index != layer_id or self._activate_final:
net = self._activation(net)
return net
@property
def layers(self):
"""Returns a tuple containing the linear layers of the `MLP`."""
return self._layers
@property
def output_sizes(self):
"""Returns a tuple of all output sizes of all the layers."""
return tuple([l() if callable(l) else l for l in self._output_sizes])
@property
def output_size(self):
"""Returns the size of the module output, not including the batch dimension.
This allows the MLP to be used inside a DeepRNN.
Returns:
The scalar size of the module output.
"""
last_size = self._output_sizes[-1]
return last_size() if callable(last_size) else last_size
@property
def use_bias(self):
return self._use_bias
@property
def initializers(self):
"""Returns the intializers dictionary."""
return self._initializers
@property
def partitioners(self):
"""Returns the partitioners dictionary."""
return self._partitioners
@property
def regularizers(self):
"""Returns the regularizers dictionary."""
return self._regularizers
@property
def activation(self):
return self._activation
@property
def activate_final(self):
return self._activate_final
# Implements Transposable interface
@property
def input_shape(self):
"""Returns shape of input `Tensor` passed at last call to `build`."""
self._ensure_is_connected()
return self._input_shape
# Implements Transposable interface
def transpose(self, name=None, activate_final=None):
"""Returns transposed `MLP`.
Args:
name: Optional string specifying the name of the transposed module. The
default name is constructed by appending "_transpose"
to `self.module_name`.
activate_final: Optional boolean determining if the activation and batch
normalization, if turned on, are applied to the final layer.
Returns:
Matching transposed `MLP` module.
"""
if name is None:
name = self.module_name + "_transpose"
if activate_final is None:
activate_final = self.activate_final
output_sizes = [lambda l=layer: l.input_shape[1] for layer in self._layers]
output_sizes.reverse()
return MLP(name=name,
output_sizes=output_sizes,
activation=self.activation,
activate_final=activate_final,
initializers=self.initializers,
partitioners=self.partitioners,
regularizers=self.regularizers,
use_bias=self.use_bias)
| mit |
disqus/django-old | tests/modeltests/test_client/views.py | 1 | 7992 | from xml.dom.minidom import parseString
from django.core import mail
from django.template import Context, Template
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseNotFound
from django.contrib.auth.decorators import login_required, permission_required
from django.forms.forms import Form
from django.forms import fields
from django.shortcuts import render_to_response
from django.utils.decorators import method_decorator
def get_view(request):
"A simple view that expects a GET request, and returns a rendered template"
t = Template('This is a test. {{ var }} is the value.', name='GET Template')
c = Context({'var': request.GET.get('var', 42)})
return HttpResponse(t.render(c))
def post_view(request):
"""A view that expects a POST, and returns a different template depending
on whether any POST data is available
"""
if request.method == 'POST':
if request.POST:
t = Template('Data received: {{ data }} is the value.', name='POST Template')
c = Context({'data': request.POST['value']})
else:
t = Template('Viewing POST page.', name='Empty POST Template')
c = Context()
else:
t = Template('Viewing GET page.', name='Empty GET Template')
c = Context()
return HttpResponse(t.render(c))
def view_with_header(request):
"A view that has a custom header"
response = HttpResponse()
response['X-DJANGO-TEST'] = 'Slartibartfast'
return response
def raw_post_view(request):
"""A view which expects raw XML to be posted and returns content extracted
from the XML"""
if request.method == 'POST':
root = parseString(request.raw_post_data)
first_book = root.firstChild.firstChild
title, author = [n.firstChild.nodeValue for n in first_book.childNodes]
t = Template("{{ title }} - {{ author }}", name="Book template")
c = Context({"title": title, "author": author})
else:
t = Template("GET request.", name="Book GET template")
c = Context()
return HttpResponse(t.render(c))
def redirect_view(request):
"A view that redirects all requests to the GET view"
if request.GET:
from urllib import urlencode
query = '?' + urlencode(request.GET, True)
else:
query = ''
return HttpResponseRedirect('/test_client/get_view/' + query)
def view_with_secure(request):
"A view that indicates if the request was secure"
response = HttpResponse()
response.test_was_secure_request = request.is_secure()
return response
def double_redirect_view(request):
"A view that redirects all requests to a redirection view"
return HttpResponseRedirect('/test_client/permanent_redirect_view/')
def bad_view(request):
"A view that returns a 404 with some error content"
return HttpResponseNotFound('Not found!. This page contains some MAGIC content')
TestChoices = (
('a', 'First Choice'),
('b', 'Second Choice'),
('c', 'Third Choice'),
('d', 'Fourth Choice'),
('e', 'Fifth Choice')
)
class TestForm(Form):
text = fields.CharField()
email = fields.EmailField()
value = fields.IntegerField()
single = fields.ChoiceField(choices=TestChoices)
multi = fields.MultipleChoiceField(choices=TestChoices)
def form_view(request):
"A view that tests a simple form"
if request.method == 'POST':
form = TestForm(request.POST)
if form.is_valid():
t = Template('Valid POST data.', name='Valid POST Template')
c = Context()
else:
t = Template('Invalid POST data. {{ form.errors }}', name='Invalid POST Template')
c = Context({'form': form})
else:
form = TestForm(request.GET)
t = Template('Viewing base form. {{ form }}.', name='Form GET Template')
c = Context({'form': form})
return HttpResponse(t.render(c))
def form_view_with_template(request):
"A view that tests a simple form"
if request.method == 'POST':
form = TestForm(request.POST)
if form.is_valid():
message = 'POST data OK'
else:
message = 'POST data has errors'
else:
form = TestForm()
message = 'GET form page'
return render_to_response('form_view.html',
{
'form': form,
'message': message
}
)
def login_protected_view(request):
"A simple view that is login protected."
t = Template('This is a login protected test. Username is {{ user.username }}.', name='Login Template')
c = Context({'user': request.user})
return HttpResponse(t.render(c))
login_protected_view = login_required(login_protected_view)
def login_protected_view_changed_redirect(request):
"A simple view that is login protected with a custom redirect field set"
t = Template('This is a login protected test. Username is {{ user.username }}.', name='Login Template')
c = Context({'user': request.user})
return HttpResponse(t.render(c))
login_protected_view_changed_redirect = login_required(redirect_field_name="redirect_to")(login_protected_view_changed_redirect)
def _permission_protected_view(request):
"A simple view that is permission protected."
t = Template('This is a permission protected test. '
'Username is {{ user.username }}. '
'Permissions are {{ user.get_all_permissions }}.' ,
name='Permissions Template')
c = Context({'user': request.user})
return HttpResponse(t.render(c))
permission_protected_view = permission_required('modeltests.test_perm')(_permission_protected_view)
permission_protected_view_exception = permission_required('modeltests.test_perm', raise_exception=True)(_permission_protected_view)
class _ViewManager(object):
@method_decorator(login_required)
def login_protected_view(self, request):
t = Template('This is a login protected test using a method. '
'Username is {{ user.username }}.',
name='Login Method Template')
c = Context({'user': request.user})
return HttpResponse(t.render(c))
@method_decorator(permission_required('modeltests.test_perm'))
def permission_protected_view(self, request):
t = Template('This is a permission protected test using a method. '
'Username is {{ user.username }}. '
'Permissions are {{ user.get_all_permissions }}.' ,
name='Permissions Template')
c = Context({'user': request.user})
return HttpResponse(t.render(c))
_view_manager = _ViewManager()
login_protected_method_view = _view_manager.login_protected_view
permission_protected_method_view = _view_manager.permission_protected_view
def session_view(request):
"A view that modifies the session"
request.session['tobacconist'] = 'hovercraft'
t = Template('This is a view that modifies the session.',
name='Session Modifying View Template')
c = Context()
return HttpResponse(t.render(c))
def broken_view(request):
"""A view which just raises an exception, simulating a broken view."""
raise KeyError("Oops! Looks like you wrote some bad code.")
def mail_sending_view(request):
mail.EmailMessage(
"Test message",
"This is a test email",
"[email protected]",
['[email protected]', '[email protected]']).send()
return HttpResponse("Mail sent")
def mass_mail_sending_view(request):
m1 = mail.EmailMessage(
'First Test message',
'This is the first test email',
'[email protected]',
['[email protected]', '[email protected]'])
m2 = mail.EmailMessage(
'Second Test message',
'This is the second test email',
'[email protected]',
['[email protected]', '[email protected]'])
c = mail.get_connection()
c.send_messages([m1,m2])
return HttpResponse("Mail sent")
| bsd-3-clause |
django-nonrel/django | docs/_ext/literals_to_xrefs.py | 92 | 4869 | """
Runs through a reST file looking for old-style literals, and helps replace them
with new-style references.
"""
import re
import sys
import shelve
refre = re.compile(r'``([^`\s]+?)``')
ROLES = (
'attr',
'class',
"djadmin",
'data',
'exc',
'file',
'func',
'lookup',
'meth',
'mod' ,
"djadminopt",
"ref",
"setting",
"term",
"tfilter",
"ttag",
# special
"skip"
)
ALWAYS_SKIP = [
"NULL",
"True",
"False",
]
def fixliterals(fname):
with open(fname) as fp:
data = fp.read()
last = 0
new = []
storage = shelve.open("/tmp/literals_to_xref.shelve")
lastvalues = storage.get("lastvalues", {})
for m in refre.finditer(data):
new.append(data[last:m.start()])
last = m.end()
line_start = data.rfind("\n", 0, m.start())
line_end = data.find("\n", m.end())
prev_start = data.rfind("\n", 0, line_start)
next_end = data.find("\n", line_end + 1)
# Skip always-skip stuff
if m.group(1) in ALWAYS_SKIP:
new.append(m.group(0))
continue
# skip when the next line is a title
next_line = data[m.end():next_end].strip()
if next_line[0] in "!-/:-@[-`{-~" and all(c == next_line[0] for c in next_line):
new.append(m.group(0))
continue
sys.stdout.write("\n"+"-"*80+"\n")
sys.stdout.write(data[prev_start+1:m.start()])
sys.stdout.write(colorize(m.group(0), fg="red"))
sys.stdout.write(data[m.end():next_end])
sys.stdout.write("\n\n")
replace_type = None
while replace_type is None:
replace_type = raw_input(
colorize("Replace role: ", fg="yellow")
).strip().lower()
if replace_type and replace_type not in ROLES:
replace_type = None
if replace_type == "":
new.append(m.group(0))
continue
if replace_type == "skip":
new.append(m.group(0))
ALWAYS_SKIP.append(m.group(1))
continue
default = lastvalues.get(m.group(1), m.group(1))
if default.endswith("()") and replace_type in ("class", "func", "meth"):
default = default[:-2]
replace_value = raw_input(
colorize("Text <target> [", fg="yellow") + default + colorize("]: ", fg="yellow")
).strip()
if not replace_value:
replace_value = default
new.append(":%s:`%s`" % (replace_type, replace_value))
lastvalues[m.group(1)] = replace_value
new.append(data[last:])
with open(fname, "w") as fp:
fp.write("".join(new))
storage["lastvalues"] = lastvalues
storage.close()
#
# The following is taken from django.utils.termcolors and is copied here to
# avoid the dependency.
#
def colorize(text='', opts=(), **kwargs):
"""
Returns your text, enclosed in ANSI graphics codes.
Depends on the keyword arguments 'fg' and 'bg', and the contents of
the opts tuple/list.
Returns the RESET code if no parameters are given.
Valid colors:
'black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white'
Valid options:
'bold'
'underscore'
'blink'
'reverse'
'conceal'
'noreset' - string will not be auto-terminated with the RESET code
Examples:
colorize('hello', fg='red', bg='blue', opts=('blink',))
colorize()
colorize('goodbye', opts=('underscore',))
print(colorize('first line', fg='red', opts=('noreset',)))
print('this should be red too')
print(colorize('and so should this'))
print('this should not be red')
"""
color_names = ('black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white')
foreground = dict([(color_names[x], '3%s' % x) for x in range(8)])
background = dict([(color_names[x], '4%s' % x) for x in range(8)])
RESET = '0'
opt_dict = {'bold': '1', 'underscore': '4', 'blink': '5', 'reverse': '7', 'conceal': '8'}
text = str(text)
code_list = []
if text == '' and len(opts) == 1 and opts[0] == 'reset':
return '\x1b[%sm' % RESET
for k, v in kwargs.iteritems():
if k == 'fg':
code_list.append(foreground[v])
elif k == 'bg':
code_list.append(background[v])
for o in opts:
if o in opt_dict:
code_list.append(opt_dict[o])
if 'noreset' not in opts:
text = text + '\x1b[%sm' % RESET
return ('\x1b[%sm' % ';'.join(code_list)) + text
if __name__ == '__main__':
try:
fixliterals(sys.argv[1])
except (KeyboardInterrupt, SystemExit):
print('')
| bsd-3-clause |
e-dorigatti/pyspider | pyspider/database/mysql/mysqlbase.py | 75 | 1880 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
# Author: Binux<[email protected]>
# http://binux.me
# Created on 2014-11-05 10:42:24
import time
import mysql.connector
class MySQLMixin(object):
@property
def dbcur(self):
try:
if self.conn.unread_result:
self.conn.get_rows()
return self.conn.cursor()
except (mysql.connector.OperationalError, mysql.connector.InterfaceError):
self.conn.ping(reconnect=True)
self.conn.database = self.database_name
return self.conn.cursor()
class SplitTableMixin(object):
UPDATE_PROJECTS_TIME = 10 * 60
def _tablename(self, project):
if self.__tablename__:
return '%s_%s' % (self.__tablename__, project)
else:
return project
@property
def projects(self):
if time.time() - getattr(self, '_last_update_projects', 0) \
> self.UPDATE_PROJECTS_TIME:
self._list_project()
return self._projects
@projects.setter
def projects(self, value):
self._projects = value
def _list_project(self):
self._last_update_projects = time.time()
self.projects = set()
if self.__tablename__:
prefix = '%s_' % self.__tablename__
else:
prefix = ''
for project, in self._execute('show tables;'):
if project.startswith(prefix):
project = project[len(prefix):]
self.projects.add(project)
def drop(self, project):
if project not in self.projects:
self._list_project()
if project not in self.projects:
return
tablename = self._tablename(project)
self._execute("DROP TABLE %s" % self.escape(tablename))
self._list_project()
| apache-2.0 |
doismellburning/edx-platform | lms/djangoapps/lms_xblock/test/test_runtime.py | 92 | 6099 | """
Tests of the LMS XBlock Runtime and associated utilities
"""
from django.contrib.auth.models import User
from django.conf import settings
from ddt import ddt, data
from mock import Mock
from unittest import TestCase
from urlparse import urlparse
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from lms.djangoapps.lms_xblock.runtime import quote_slashes, unquote_slashes, LmsModuleSystem
from xblock.fields import ScopeIds
TEST_STRINGS = [
'',
'foobar',
'foo/bar',
'foo/bar;',
'foo;;bar',
'foo;_bar',
'foo/',
'/bar',
'foo//bar',
'foo;;;bar',
]
@ddt
class TestQuoteSlashes(TestCase):
"""Test the quote_slashes and unquote_slashes functions"""
@data(*TEST_STRINGS)
def test_inverse(self, test_string):
self.assertEquals(test_string, unquote_slashes(quote_slashes(test_string)))
@data(*TEST_STRINGS)
def test_escaped(self, test_string):
self.assertNotIn('/', quote_slashes(test_string))
class TestHandlerUrl(TestCase):
"""Test the LMS handler_url"""
def setUp(self):
super(TestHandlerUrl, self).setUp()
self.block = Mock(name='block', scope_ids=ScopeIds(None, None, None, 'dummy'))
self.course_key = SlashSeparatedCourseKey("org", "course", "run")
self.runtime = LmsModuleSystem(
static_url='/static',
track_function=Mock(),
get_module=Mock(),
render_template=Mock(),
replace_urls=str,
course_id=self.course_key,
descriptor_runtime=Mock(),
)
def test_trailing_characters(self):
self.assertFalse(self.runtime.handler_url(self.block, 'handler').endswith('?'))
self.assertFalse(self.runtime.handler_url(self.block, 'handler').endswith('/'))
self.assertFalse(self.runtime.handler_url(self.block, 'handler', 'suffix').endswith('?'))
self.assertFalse(self.runtime.handler_url(self.block, 'handler', 'suffix').endswith('/'))
self.assertFalse(self.runtime.handler_url(self.block, 'handler', 'suffix', 'query').endswith('?'))
self.assertFalse(self.runtime.handler_url(self.block, 'handler', 'suffix', 'query').endswith('/'))
self.assertFalse(self.runtime.handler_url(self.block, 'handler', query='query').endswith('?'))
self.assertFalse(self.runtime.handler_url(self.block, 'handler', query='query').endswith('/'))
def _parsed_query(self, query_string):
"""Return the parsed query string from a handler_url generated with the supplied query_string"""
return urlparse(self.runtime.handler_url(self.block, 'handler', query=query_string)).query
def test_query_string(self):
self.assertIn('foo=bar', self._parsed_query('foo=bar'))
self.assertIn('foo=bar&baz=true', self._parsed_query('foo=bar&baz=true'))
self.assertIn('foo&bar&baz', self._parsed_query('foo&bar&baz'))
def _parsed_path(self, handler_name='handler', suffix=''):
"""Return the parsed path from a handler_url with the supplied handler_name and suffix"""
return urlparse(self.runtime.handler_url(self.block, handler_name, suffix=suffix)).path
def test_suffix(self):
self.assertTrue(self._parsed_path(suffix="foo").endswith('foo'))
self.assertTrue(self._parsed_path(suffix="foo/bar").endswith('foo/bar'))
self.assertTrue(self._parsed_path(suffix="/foo/bar").endswith('/foo/bar'))
def test_handler_name(self):
self.assertIn('handler1', self._parsed_path('handler1'))
self.assertIn('handler_a', self._parsed_path('handler_a'))
def test_thirdparty_fq(self):
"""Testing the Fully-Qualified URL returned by thirdparty=True"""
parsed_fq_url = urlparse(self.runtime.handler_url(self.block, 'handler', thirdparty=True))
self.assertEqual(parsed_fq_url.scheme, 'https')
self.assertEqual(parsed_fq_url.hostname, settings.SITE_NAME)
def test_not_thirdparty_rel(self):
"""Testing the Fully-Qualified URL returned by thirdparty=False"""
parsed_fq_url = urlparse(self.runtime.handler_url(self.block, 'handler', thirdparty=False))
self.assertEqual(parsed_fq_url.scheme, '')
self.assertIsNone(parsed_fq_url.hostname)
class TestUserServiceAPI(TestCase):
"""Test the user service interface"""
def setUp(self):
super(TestUserServiceAPI, self).setUp()
self.course_id = SlashSeparatedCourseKey("org", "course", "run")
self.user = User(username='runtime_robot', email='[email protected]', password='test', first_name='Robot')
self.user.save()
def mock_get_real_user(_anon_id):
"""Just returns the test user"""
return self.user
self.runtime = LmsModuleSystem(
static_url='/static',
track_function=Mock(),
get_module=Mock(),
render_template=Mock(),
replace_urls=str,
course_id=self.course_id,
get_real_user=mock_get_real_user,
descriptor_runtime=Mock(),
)
self.scope = 'course'
self.key = 'key1'
self.mock_block = Mock()
self.mock_block.service_declaration.return_value = 'needs'
def test_get_set_tag(self):
# test for when we haven't set the tag yet
tag = self.runtime.service(self.mock_block, 'user_tags').get_tag(self.scope, self.key)
self.assertIsNone(tag)
# set the tag
set_value = 'value'
self.runtime.service(self.mock_block, 'user_tags').set_tag(self.scope, self.key, set_value)
tag = self.runtime.service(self.mock_block, 'user_tags').get_tag(self.scope, self.key)
self.assertEqual(tag, set_value)
# Try to set tag in wrong scope
with self.assertRaises(ValueError):
self.runtime.service(self.mock_block, 'user_tags').set_tag('fake_scope', self.key, set_value)
# Try to get tag in wrong scope
with self.assertRaises(ValueError):
self.runtime.service(self.mock_block, 'user_tags').get_tag('fake_scope', self.key)
| agpl-3.0 |
sorenk/ansible | lib/ansible/modules/crypto/openssl_certificate.py | 15 | 36712 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016-2017, Yanis Guenane <[email protected]>
# (c) 2017, Markus Teufelberger <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: openssl_certificate
author:
- Yanis Guenane (@Spredzy)
- Markus Teufelberger (@MarkusTeufelberger)
version_added: "2.4"
short_description: Generate and/or check OpenSSL certificates
description:
- "This module allows one to (re)generate OpenSSL certificates. It implements a notion
of provider (ie. C(selfsigned), C(acme), C(assertonly)) for your certificate.
The 'assertonly' provider is intended for use cases where one is only interested in
checking properties of a supplied certificate.
Many properties that can be specified in this module are for validation of an
existing or newly generated certificate. The proper place to specify them, if you
want to receive a certificate with these properties is a CSR (Certificate Signing Request).
It uses the pyOpenSSL python library to interact with OpenSSL."
requirements:
- python-pyOpenSSL >= 0.15 (if using C(selfsigned) or C(assertonly) provider)
- acme-tiny (if using the C(acme) provider)
options:
state:
default: "present"
choices: [ present, absent ]
description:
- Whether the certificate should exist or not, taking action if the state is different from what is stated.
path:
required: true
description:
- Remote absolute path where the generated certificate file should be created or is already located.
provider:
required: true
choices: [ 'selfsigned', 'assertonly', 'acme' ]
description:
- Name of the provider to use to generate/retrieve the OpenSSL certificate.
The C(assertonly) provider will not generate files and fail if the certificate file is missing.
force:
default: False
type: bool
description:
- Generate the certificate, even if it already exists.
csr_path:
description:
- Path to the Certificate Signing Request (CSR) used to generate this certificate. This is not required in C(assertonly) mode.
privatekey_path:
description:
- Path to the private key to use when signing the certificate.
privatekey_passphrase:
description:
- The passphrase for the I(privatekey_path).
selfsigned_version:
default: 3
description:
- Version of the C(selfsigned) certificate. Nowadays it should almost always be C(3).
version_added: "2.5"
selfsigned_digest:
default: "sha256"
description:
- Digest algorithm to be used when self-signing the certificate
selfsigned_not_before:
description:
- The timestamp at which the certificate starts being valid. The timestamp is formatted as an ASN.1 TIME.
If this value is not specified, certificate will start being valid from now.
aliases: [ selfsigned_notBefore ]
selfsigned_not_after:
description:
- The timestamp at which the certificate stops being valid. The timestamp is formatted as an ASN.1 TIME.
If this value is not specified, certificate will stop being valid 10 years from now.
aliases: [ selfsigned_notAfter ]
acme_accountkey_path:
description:
- Path to the accountkey for the C(acme) provider
acme_challenge_path:
description:
- Path to the ACME challenge directory that is served on U(http://<HOST>:80/.well-known/acme-challenge/)
acme_chain:
default: True
description:
- Include the intermediate certificate to the generated certificate
version_added: "2.5"
signature_algorithms:
description:
- list of algorithms that you would accept the certificate to be signed with
(e.g. ['sha256WithRSAEncryption', 'sha512WithRSAEncryption']).
issuer:
description:
- Key/value pairs that must be present in the issuer name field of the certificate.
If you need to specify more than one value with the same key, use a list as value.
issuer_strict:
default: False
type: bool
description:
- If set to True, the I(issuer) field must contain only these values.
version_added: "2.5"
subject:
description:
- Key/value pairs that must be present in the subject name field of the certificate.
If you need to specify more than one value with the same key, use a list as value.
subject_strict:
default: False
type: bool
description:
- If set to True, the I(subject) field must contain only these values.
version_added: "2.5"
has_expired:
default: False
type: bool
description:
- Checks if the certificate is expired/not expired at the time the module is executed.
version:
description:
- Version of the certificate. Nowadays it should almost always be 3.
valid_at:
description:
- The certificate must be valid at this point in time. The timestamp is formatted as an ASN.1 TIME.
invalid_at:
description:
- The certificate must be invalid at this point in time. The timestamp is formatted as an ASN.1 TIME.
not_before:
description:
- The certificate must start to become valid at this point in time. The timestamp is formatted as an ASN.1 TIME.
aliases: [ notBefore ]
not_after:
description:
- The certificate must expire at this point in time. The timestamp is formatted as an ASN.1 TIME.
aliases: [ notAfter ]
valid_in:
description:
- The certificate must still be valid in I(valid_in) seconds from now.
key_usage:
description:
- The I(key_usage) extension field must contain all these values.
aliases: [ keyUsage ]
key_usage_strict:
default: False
type: bool
description:
- If set to True, the I(key_usage) extension field must contain only these values.
aliases: [ keyUsage_strict ]
extended_key_usage:
description:
- The I(extended_key_usage) extension field must contain all these values.
aliases: [ extendedKeyUsage ]
extended_key_usage_strict:
default: False
type: bool
description:
- If set to True, the I(extended_key_usage) extension field must contain only these values.
aliases: [ extendedKeyUsage_strict ]
subject_alt_name:
description:
- The I(subject_alt_name) extension field must contain these values.
aliases: [ subjectAltName ]
subject_alt_name_strict:
default: False
type: bool
description:
- If set to True, the I(subject_alt_name) extension field must contain only these values.
aliases: [ subjectAltName_strict ]
extends_documentation_fragment: files
notes:
- All ASN.1 TIME values should be specified following the YYYYMMDDHHMMSSZ pattern.
Date specified should be UTC. Minutes and seconds are mandatory.
'''
EXAMPLES = '''
- name: Generate a Self Signed OpenSSL certificate
openssl_certificate:
path: /etc/ssl/crt/ansible.com.crt
privatekey_path: /etc/ssl/private/ansible.com.pem
csr_path: /etc/ssl/csr/ansible.com.csr
provider: selfsigned
- name: Generate a Let's Encrypt Certificate
openssl_certificate:
path: /etc/ssl/crt/ansible.com.crt
csr_path: /etc/ssl/csr/ansible.com.csr
provider: acme
acme_accountkey_path: /etc/ssl/private/ansible.com.pem
acme_challenge_path: /etc/ssl/challenges/ansible.com/
- name: Force (re-)generate a new Let's Encrypt Certificate
openssl_certificate:
path: /etc/ssl/crt/ansible.com.crt
csr_path: /etc/ssl/csr/ansible.com.csr
provider: acme
acme_accountkey_path: /etc/ssl/private/ansible.com.pem
acme_challenge_path: /etc/ssl/challenges/ansible.com/
force: True
# Examples for some checks one could use the assertonly provider for:
# How to use the assertonly provider to implement and trigger your own custom certificate generation workflow:
- name: Check if a certificate is currently still valid, ignoring failures
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
has_expired: False
ignore_errors: True
register: validity_check
- name: Run custom task(s) to get a new, valid certificate in case the initial check failed
command: superspecialSSL recreate /etc/ssl/crt/example.com.crt
when: validity_check.failed
- name: Check the new certificate again for validity with the same parameters, this time failing the play if it is still invalid
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
has_expired: False
when: validity_check.failed
# Some other checks that assertonly could be used for:
- name: Verify that an existing certificate was issued by the Let's Encrypt CA and is currently still valid
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
issuer:
O: Let's Encrypt
has_expired: False
- name: Ensure that a certificate uses a modern signature algorithm (no SHA1, MD5 or DSA)
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
signature_algorithms:
- sha224WithRSAEncryption
- sha256WithRSAEncryption
- sha384WithRSAEncryption
- sha512WithRSAEncryption
- sha224WithECDSAEncryption
- sha256WithECDSAEncryption
- sha384WithECDSAEncryption
- sha512WithECDSAEncryption
- name: Ensure that the existing certificate belongs to the specified private key
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
privatekey_path: /etc/ssl/private/example.com.pem
provider: assertonly
- name: Ensure that the existing certificate is still valid at the winter solstice 2017
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
valid_at: 20171221162800Z
- name: Ensure that the existing certificate is still valid 2 weeks (1209600 seconds) from now
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
valid_in: 1209600
- name: Ensure that the existing certificate is only used for digital signatures and encrypting other keys
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
key_usage:
- digitalSignature
- keyEncipherment
key_usage_strict: true
- name: Ensure that the existing certificate can be used for client authentication
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
extended_key_usage:
- clientAuth
- name: Ensure that the existing certificate can only be used for client authentication and time stamping
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
extended_key_usage:
- clientAuth
- 1.3.6.1.5.5.7.3.8
extended_key_usage_strict: true
- name: Ensure that the existing certificate has a certain domain in its subjectAltName
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
subject_alt_name:
- www.example.com
- test.example.com
'''
RETURN = '''
filename:
description: Path to the generated Certificate
returned: changed or success
type: string
sample: /etc/ssl/crt/www.ansible.com.crt
'''
from random import randint
import datetime
import os
from ansible.module_utils import crypto as crypto_utils
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native, to_bytes
try:
import OpenSSL
from OpenSSL import crypto
except ImportError:
pyopenssl_found = False
else:
pyopenssl_found = True
class CertificateError(crypto_utils.OpenSSLObjectError):
pass
class Certificate(crypto_utils.OpenSSLObject):
def __init__(self, module):
super(Certificate, self).__init__(
module.params['path'],
module.params['state'],
module.params['force'],
module.check_mode
)
self.provider = module.params['provider']
self.privatekey_path = module.params['privatekey_path']
self.privatekey_passphrase = module.params['privatekey_passphrase']
self.csr_path = module.params['csr_path']
self.cert = None
self.privatekey = None
self.module = module
def check(self, module, perms_required=True):
"""Ensure the resource is in its desired state."""
state_and_perms = super(Certificate, self).check(module, perms_required)
def _validate_privatekey():
if self.privatekey_path:
ctx = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_2_METHOD)
ctx.use_privatekey(self.privatekey)
ctx.use_certificate(self.cert)
try:
ctx.check_privatekey()
return True
except OpenSSL.SSL.Error:
return False
if not state_and_perms:
return False
self.cert = crypto_utils.load_certificate(self.path)
if self.privatekey_path:
self.privatekey = crypto_utils.load_privatekey(
self.privatekey_path,
self.privatekey_passphrase
)
return _validate_privatekey()
return True
class SelfSignedCertificate(Certificate):
"""Generate the self-signed certificate."""
def __init__(self, module):
super(SelfSignedCertificate, self).__init__(module)
self.notBefore = module.params['selfsigned_notBefore']
self.notAfter = module.params['selfsigned_notAfter']
self.digest = module.params['selfsigned_digest']
self.version = module.params['selfsigned_version']
self.serial_number = randint(1000, 99999)
self.csr = crypto_utils.load_certificate_request(self.csr_path)
self.privatekey = crypto_utils.load_privatekey(
self.privatekey_path, self.privatekey_passphrase
)
def generate(self, module):
if not os.path.exists(self.privatekey_path):
raise CertificateError(
'The private key %s does not exist' % self.privatekey_path
)
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file %s does not exist' % self.csr_path
)
if not self.check(module, perms_required=False) or self.force:
cert = crypto.X509()
cert.set_serial_number(self.serial_number)
if self.notBefore:
cert.set_notBefore(self.notBefore)
else:
cert.gmtime_adj_notBefore(0)
if self.notAfter:
cert.set_notAfter(self.notAfter)
else:
# If no NotAfter specified, expire in
# 10 years. 315360000 is 10 years in seconds.
cert.gmtime_adj_notAfter(315360000)
cert.set_subject(self.csr.get_subject())
cert.set_issuer(self.csr.get_subject())
cert.set_version(self.version - 1)
cert.set_pubkey(self.csr.get_pubkey())
cert.add_extensions(self.csr.get_extensions())
cert.sign(self.privatekey, self.digest)
self.cert = cert
try:
with open(self.path, 'wb') as cert_file:
cert_file.write(crypto.dump_certificate(crypto.FILETYPE_PEM, self.cert))
except EnvironmentError as exc:
raise CertificateError(exc)
self.changed = True
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False):
self.changed = True
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path
}
if check_mode:
now = datetime.datetime.utcnow()
ten = now.replace(now.year + 10)
result.update({
'notBefore': self.notBefore if self.notBefore else now.strftime("%Y%m%d%H%M%SZ"),
'notAfter': self.notAfter if self.notAfter else ten.strftime("%Y%m%d%H%M%SZ"),
'serial_number': self.serial_number,
})
else:
result.update({
'notBefore': self.cert.get_notBefore(),
'notAfter': self.cert.get_notAfter(),
'serial_number': self.cert.get_serial_number(),
})
return result
class AssertOnlyCertificate(Certificate):
"""validate the supplied certificate."""
def __init__(self, module):
super(AssertOnlyCertificate, self).__init__(module)
self.signature_algorithms = module.params['signature_algorithms']
if module.params['subject']:
self.subject = crypto_utils.parse_name_field(module.params['subject'])
else:
self.subject = []
self.subject_strict = module.params['subject_strict']
if module.params['issuer']:
self.issuer = crypto_utils.parse_name_field(module.params['issuer'])
else:
self.issuer = []
self.issuer_strict = module.params['issuer_strict']
self.has_expired = module.params['has_expired']
self.version = module.params['version']
self.keyUsage = module.params['keyUsage']
self.keyUsage_strict = module.params['keyUsage_strict']
self.extendedKeyUsage = module.params['extendedKeyUsage']
self.extendedKeyUsage_strict = module.params['extendedKeyUsage_strict']
self.subjectAltName = module.params['subjectAltName']
self.subjectAltName_strict = module.params['subjectAltName_strict']
self.notBefore = module.params['notBefore']
self.notAfter = module.params['notAfter']
self.valid_at = module.params['valid_at']
self.invalid_at = module.params['invalid_at']
self.valid_in = module.params['valid_in']
self.message = []
self._sanitize_inputs()
def _sanitize_inputs(self):
"""Ensure inputs are properly sanitized before comparison."""
for param in ['signature_algorithms', 'keyUsage', 'extendedKeyUsage',
'subjectAltName', 'subject', 'issuer', 'notBefore',
'notAfter', 'valid_at', 'invalid_at']:
attr = getattr(self, param)
if isinstance(attr, list) and attr:
if isinstance(attr[0], str):
setattr(self, param, [to_bytes(item) for item in attr])
elif isinstance(attr[0], tuple):
setattr(self, param, [(to_bytes(item[0]), to_bytes(item[1])) for item in attr])
elif isinstance(attr, tuple):
setattr(self, param, dict((to_bytes(k), to_bytes(v)) for (k, v) in attr.items()))
elif isinstance(attr, dict):
setattr(self, param, dict((to_bytes(k), to_bytes(v)) for (k, v) in attr.items()))
elif isinstance(attr, str):
setattr(self, param, to_bytes(attr))
def assertonly(self):
self.cert = crypto_utils.load_certificate(self.path)
def _validate_signature_algorithms():
if self.signature_algorithms:
if self.cert.get_signature_algorithm() not in self.signature_algorithms:
self.message.append(
'Invalid signature algorithm (got %s, expected one of %s)' % (self.cert.get_signature_algorithm(), self.signature_algorithms)
)
def _validate_subject():
if self.subject:
expected_subject = [(OpenSSL._util.lib.OBJ_txt2nid(sub[0]), sub[1]) for sub in self.subject]
cert_subject = self.cert.get_subject().get_components()
current_subject = [(OpenSSL._util.lib.OBJ_txt2nid(sub[0]), sub[1]) for sub in cert_subject]
if (not self.subject_strict and not all(x in current_subject for x in expected_subject)) or \
(self.subject_strict and not set(expected_subject) == set(current_subject)):
self.message.append(
'Invalid subject component (got %s, expected all of %s to be present)' % (cert_subject, self.subject)
)
def _validate_issuer():
if self.issuer:
expected_issuer = [(OpenSSL._util.lib.OBJ_txt2nid(iss[0]), iss[1]) for iss in self.issuer]
cert_issuer = self.cert.get_issuer().get_components()
current_issuer = [(OpenSSL._util.lib.OBJ_txt2nid(iss[0]), iss[1]) for iss in cert_issuer]
if (not self.issuer_strict and not all(x in current_issuer for x in expected_issuer)) or \
(self.issuer_strict and not set(expected_issuer) == set(current_issuer)):
self.message.append(
'Invalid issuer component (got %s, expected all of %s to be present)' % (cert_issuer, self.issuer)
)
def _validate_has_expired():
if self.has_expired:
if self.has_expired != self.cert.has_expired():
self.message.append(
'Certificate expiration check failed (certificate expiration is %s, expected %s)' % (self.cert.has_expired(), self.has_expired)
)
def _validate_version():
if self.version:
# Version numbers in certs are off by one:
# v1: 0, v2: 1, v3: 2 ...
if self.version != self.cert.get_version() + 1:
self.message.append(
'Invalid certificate version number (got %s, expected %s)' % (self.cert.get_version() + 1, self.version)
)
def _validate_keyUsage():
if self.keyUsage:
for extension_idx in range(0, self.cert.get_extension_count()):
extension = self.cert.get_extension(extension_idx)
if extension.get_short_name() == b'keyUsage':
keyUsage = [OpenSSL._util.lib.OBJ_txt2nid(keyUsage) for keyUsage in self.keyUsage]
current_ku = [OpenSSL._util.lib.OBJ_txt2nid(usage.strip()) for usage in
to_bytes(extension, errors='surrogate_or_strict').split(b',')]
if (not self.keyUsage_strict and not all(x in current_ku for x in keyUsage)) or \
(self.keyUsage_strict and not set(keyUsage) == set(current_ku)):
self.message.append(
'Invalid keyUsage component (got %s, expected all of %s to be present)' % (str(extension).split(', '), self.keyUsage)
)
def _validate_extendedKeyUsage():
if self.extendedKeyUsage:
for extension_idx in range(0, self.cert.get_extension_count()):
extension = self.cert.get_extension(extension_idx)
if extension.get_short_name() == b'extendedKeyUsage':
extKeyUsage = [OpenSSL._util.lib.OBJ_txt2nid(keyUsage) for keyUsage in self.extendedKeyUsage]
current_xku = [OpenSSL._util.lib.OBJ_txt2nid(usage.strip()) for usage in
to_bytes(extension, errors='surrogate_or_strict').split(b',')]
if (not self.extendedKeyUsage_strict and not all(x in current_xku for x in extKeyUsage)) or \
(self.extendedKeyUsage_strict and not set(extKeyUsage) == set(current_xku)):
self.message.append(
'Invalid extendedKeyUsage component (got %s, expected all of %s to be present)' % (str(extension).split(', '),
self.extendedKeyUsage)
)
def _validate_subjectAltName():
if self.subjectAltName:
for extension_idx in range(0, self.cert.get_extension_count()):
extension = self.cert.get_extension(extension_idx)
if extension.get_short_name() == b'subjectAltName':
l_altnames = [altname.replace(b'IP Address', b'IP') for altname in
to_bytes(extension, errors='surrogate_or_strict').split(b', ')]
if (not self.subjectAltName_strict and not all(x in l_altnames for x in self.subjectAltName)) or \
(self.subjectAltName_strict and not set(self.subjectAltName) == set(l_altnames)):
self.message.append(
'Invalid subjectAltName component (got %s, expected all of %s to be present)' % (l_altnames, self.subjectAltName)
)
def _validate_notBefore():
if self.notBefore:
if self.cert.get_notBefore() != self.notBefore:
self.message.append(
'Invalid notBefore component (got %s, expected %s to be present)' % (self.cert.get_notBefore(), self.notBefore)
)
def _validate_notAfter():
if self.notAfter:
if self.cert.get_notAfter() != self.notAfter:
self.message.append(
'Invalid notAfter component (got %s, expected %s to be present)' % (self.cert.get_notAfter(), self.notAfter)
)
def _validate_valid_at():
if self.valid_at:
if not (self.valid_at >= self.cert.get_notBefore() and self.valid_at <= self.cert.get_notAfter()):
self.message.append(
'Certificate is not valid for the specified date (%s) - notBefore: %s - notAfter: %s' % (self.valid_at,
self.cert.get_notBefore(),
self.cert.get_notAfter())
)
def _validate_invalid_at():
if self.invalid_at:
if not (self.invalid_at <= self.cert.get_notBefore() or self.invalid_at >= self.cert.get_notAfter()):
self.message.append(
'Certificate is not invalid for the specified date (%s) - notBefore: %s - notAfter: %s' % (self.invalid_at,
self.cert.get_notBefore(),
self.cert.get_notAfter())
)
def _validate_valid_in():
if self.valid_in:
valid_in_date = datetime.datetime.utcnow() + datetime.timedelta(seconds=self.valid_in)
valid_in_date = valid_in_date.strftime('%Y%m%d%H%M%SZ')
if not (valid_in_date >= self.cert.get_notBefore() and valid_in_date <= self.cert.get_notAfter()):
self.message.append(
'Certificate is not valid in %s seconds from now (%s) - notBefore: %s - notAfter: %s' % (self.valid_in,
valid_in_date,
self.cert.get_notBefore(),
self.cert.get_notAfter())
)
for validation in ['signature_algorithms', 'subject', 'issuer',
'has_expired', 'version', 'keyUsage',
'extendedKeyUsage', 'subjectAltName',
'notBefore', 'notAfter', 'valid_at',
'invalid_at', 'valid_in']:
f_name = locals()['_validate_%s' % validation]
f_name()
def generate(self, module):
"""Don't generate anything - assertonly"""
self.assertonly()
if self.privatekey_path and \
not super(AssertOnlyCertificate, self).check(module, perms_required=False):
self.message.append(
'Certificate %s and private key %s does not match' % (self.path, self.privatekey_path)
)
if len(self.message):
module.fail_json(msg=' | '.join(self.message))
def check(self, module, perms_required=True):
"""Ensure the resource is in its desired state."""
parent_check = super(AssertOnlyCertificate, self).check(module, perms_required)
self.assertonly()
assertonly_check = not len(self.message)
self.message = []
return parent_check and assertonly_check
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path,
}
return result
class AcmeCertificate(Certificate):
"""Retrieve a certificate using the ACME protocol."""
def __init__(self, module):
super(AcmeCertificate, self).__init__(module)
self.accountkey_path = module.params['acme_accountkey_path']
self.challenge_path = module.params['acme_challenge_path']
self.use_chain = module.params['acme_chain']
def generate(self, module):
if not os.path.exists(self.privatekey_path):
raise CertificateError(
'The private key %s does not exist' % self.privatekey_path
)
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file %s does not exist' % self.csr_path
)
if not os.path.exists(self.accountkey_path):
raise CertificateError(
'The account key %s does not exist' % self.accountkey_path
)
if not os.path.exists(self.challenge_path):
raise CertificateError(
'The challenge path %s does not exist' % self.challenge_path
)
if not self.check(module, perms_required=False) or self.force:
acme_tiny_path = self.module.get_bin_path('acme-tiny', required=True)
chain = ''
if self.use_chain:
chain = '--chain'
try:
crt = module.run_command("%s %s --account-key %s --csr %s "
"--acme-dir %s" % (acme_tiny_path, chain,
self.accountkey_path,
self.csr_path,
self.challenge_path),
check_rc=True)[1]
with open(self.path, 'wb') as certfile:
certfile.write(to_bytes(crt))
except OSError as exc:
raise CertificateError(exc)
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False):
self.changed = True
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'accountkey': self.accountkey_path,
'csr': self.csr_path,
}
return result
def main():
module = AnsibleModule(
argument_spec=dict(
state=dict(type='str', choices=['present', 'absent'], default='present'),
path=dict(type='path', required=True),
provider=dict(type='str', choices=['selfsigned', 'assertonly', 'acme']),
force=dict(type='bool', default=False,),
csr_path=dict(type='path'),
# General properties of a certificate
privatekey_path=dict(type='path'),
privatekey_passphrase=dict(type='str', no_log=True),
signature_algorithms=dict(type='list'),
subject=dict(type='dict'),
subject_strict=dict(type='bool', default=False),
issuer=dict(type='dict'),
issuer_strict=dict(type='bool', default=False),
has_expired=dict(type='bool', default=False),
version=dict(type='int'),
keyUsage=dict(type='list', aliases=['key_usage']),
keyUsage_strict=dict(type='bool', default=False, aliases=['key_usage_strict']),
extendedKeyUsage=dict(type='list', aliases=['extended_key_usage'], ),
extendedKeyUsage_strict=dict(type='bool', default=False, aliases=['extended_key_usage_strict']),
subjectAltName=dict(type='list', aliases=['subject_alt_name']),
subjectAltName_strict=dict(type='bool', default=False, aliases=['subject_alt_name_strict']),
notBefore=dict(type='str', aliases=['not_before']),
notAfter=dict(type='str', aliases=['not_after']),
valid_at=dict(type='str'),
invalid_at=dict(type='str'),
valid_in=dict(type='int'),
# provider: selfsigned
selfsigned_version=dict(type='int', default='3'),
selfsigned_digest=dict(type='str', default='sha256'),
selfsigned_notBefore=dict(type='str', aliases=['selfsigned_not_before']),
selfsigned_notAfter=dict(type='str', aliases=['selfsigned_not_after']),
# provider: acme
acme_accountkey_path=dict(type='path'),
acme_challenge_path=dict(type='path'),
acme_chain=dict(type='bool', default=True),
),
supports_check_mode=True,
add_file_common_args=True,
)
if not pyopenssl_found:
module.fail_json(msg='The python pyOpenSSL library is required')
if module.params['provider'] in ['selfsigned', 'assertonly']:
try:
getattr(crypto.X509Req, 'get_extensions')
except AttributeError:
module.fail_json(msg='You need to have PyOpenSSL>=0.15')
base_dir = os.path.dirname(module.params['path'])
if not os.path.isdir(base_dir):
module.fail_json(
name=base_dir,
msg='The directory %s does not exist or the file is not a directory' % base_dir
)
provider = module.params['provider']
if provider == 'selfsigned':
certificate = SelfSignedCertificate(module)
elif provider == 'acme':
certificate = AcmeCertificate(module)
else:
certificate = AssertOnlyCertificate(module)
if module.params['state'] == 'present':
if module.check_mode:
result = certificate.dump(check_mode=True)
result['changed'] = module.params['force'] or not certificate.check(module)
module.exit_json(**result)
try:
certificate.generate(module)
except CertificateError as exc:
module.fail_json(msg=to_native(exc))
else:
if module.check_mode:
result = certificate.dump(check_mode=True)
result['changed'] = os.path.exists(module.params['path'])
module.exit_json(**result)
try:
certificate.remove()
except CertificateError as exc:
module.fail_json(msg=to_native(exc))
result = certificate.dump()
module.exit_json(**result)
if __name__ == "__main__":
main()
| gpl-3.0 |
tomka/rednotebook | rednotebook/gui/options.py | 1 | 13404 | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------
# Copyright (c) 2009 Jendrik Seipp
#
# RedNotebook is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# RedNotebook is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with RedNotebook; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# -----------------------------------------------------------------------
import os
import sys
import logging
import platform
import gtk
import gobject
from rednotebook.gui.customwidgets import UrlButton, CustomComboBoxEntry
from rednotebook.gui.customwidgets import ActionButton
from rednotebook.gui import browser
from rednotebook.util import filesystem, utils
from rednotebook import info
class Option(gtk.HBox):
def __init__(self, text, option_name, tooltip=''):
gtk.HBox.__init__(self)
self.text = text
self.option_name = option_name
self.set_spacing(5)
self.label = gtk.Label(self.text)
self.pack_start(self.label, False, False)
if tooltip:
self.set_tooltip_text(tooltip)
def get_value(self):
raise NotImplementedError
def get_string_value(self):
return str(self.get_value()).strip()
class TickOption(Option):
def __init__(self, text, name, default_value=None, tooltip=''):
Option.__init__(self, '', name, tooltip=tooltip)
self.check_button = gtk.CheckButton(text)
if default_value is None:
self.check_button.set_active(Option.config.read(name, 0) == 1)
else:
self.check_button.set_active(default_value)
self.pack_start(self.check_button, False)
def get_value(self):
return self.check_button.get_active()
def get_string_value(self):
'''
We use 0 and 1 internally for bool options
'''
return int(self.get_value())
class AutostartOption(TickOption):
def __init__(self):
home_dir = os.path.expanduser('~')
autostart_dir = os.path.join(home_dir, '.config/autostart/')
self.autostart_file = os.path.join(autostart_dir, 'rednotebook.desktop')
autostart_file_exists = os.path.exists(self.autostart_file)
TickOption.__init__(self, _('Load RedNotebook at startup'), None, \
default_value=autostart_file_exists)
def get_value(self):
return self.check_button.get_active()
def set(self):
'''Apply the current setting'''
selected = self.get_value()
if selected:
# Add autostart file if it is not present
filesystem.make_file_with_dir(self.autostart_file, info.desktop_file)
else:
# Remove autostart file
if os.path.exists(self.autostart_file):
os.remove(self.autostart_file)
#class TextOption(Option):
# def __init__(self, text, name):
# self.entry = gtk.Entry(30)
# self.entry.set_text(Option.config.read(name, ''))
#
# Option.__init__(self, text, name, self.entry)
#
# def get_value(self):
# return self.entry.get_text()
class CsvTextOption(Option):
def __init__(self, text, option_name, **kwargs):
Option.__init__(self, text, option_name, **kwargs)
# directly read the string, not the list
values_string = Option.config.read(option_name, '')
# Ensure that we have a string here
values_string = str(values_string)
self.entry = gtk.Entry()
self.entry.set_text(values_string)
self.pack_start(self.entry, True)
def get_value(self):
return self.entry.get_text()
#class TextAndButtonOption(TextOption):
# def __init__(self, text, name, button):
# TextOption.__init__(self, text, name)
# self.widget.pack_end(button, False, False)
class ComboBoxOption(Option):
def __init__(self, text, name, entries):
Option.__init__(self, text, name)
self.combo = CustomComboBoxEntry(gtk.ComboBoxEntry())
self.combo.set_entries(entries)
self.pack_start(self.combo.combo_box, False)
def get_value(self):
return self.combo.get_active_text()
class DateFormatOption(ComboBoxOption):
def __init__(self, text, name):
date_formats = ['%A, %x %X', _('%A, %x, Day %j'), '%H:%M', _('Week %W of Year %Y'), \
'%y-%m-%d', _('Day %j'), '%A', '%B']
ComboBoxOption.__init__(self, text, name, date_formats)
date_url = 'http://docs.python.org/library/time.html#time.strftime'
date_format_help_button = UrlButton(_('Help'), date_url)
self.preview = gtk.Label()
self.pack_start(self.preview, False)
self.pack_end(date_format_help_button, False)
# Set default format if not present
format = Option.config.read(name, '%A, %x %X')
self.combo.set_active_text(str(format))
self.combo.connect('changed', self.on_format_changed)
# Update the preview
self.on_format_changed(None)
def on_format_changed(self, widget):
import time
### Translators: Noun
self.preview.set_text(_('Preview:') + ' ' + time.strftime(self.combo.get_active_text()))
class FontSizeOption(ComboBoxOption):
def __init__(self, text, name):
sizes = range(6, 15) + range(16, 29, 2) + [32, 36, 40, 48, 56, 64, 72]
sizes = ['default'] + map(str, sizes)
ComboBoxOption.__init__(self, text, name, sizes)
# Set default size if not present
size = Option.config.read(name, -1)
if size == -1:
self.combo.set_active_text('default')
else:
self.combo.set_active_text(str(size))
self.combo.set_editable(False)
self.combo.combo_box.set_wrap_width(3)
self.combo.connect('changed', self.on_combo_changed)
def on_combo_changed(self, widget):
'''Live update'''
size = self.get_string_value()
Option.main_window.set_font_size(size)
def get_string_value(self):
'''We use 0 and 1 internally for size options'''
size = self.combo.get_active_text()
if size == 'default':
return -1
try:
return int(size)
except ValueError:
return -1
#class SpinOption(LabelAndWidgetOption):
# def __init__(self, text, name):
#
# adj = gtk.Adjustment(10.0, 6.0, 72.0, 1.0, 10.0, 0.0)
# self.spin = gtk.SpinButton(adj)#, climb_rate=1.0)
# self.spin.set_numeric(True)
# self.spin.set_range(6,72)
# self.spin.set_sensitive(True)
# value = Option.config.read(name, -1)
# if value >= 0:
# self.spin.set_value(value)
#
# LabelAndWidgetOption.__init__(self, text, name, self.spin)
#
# def get_value(self):
# print type(self.spin.get_value())
# return self.spin.get_value()
#
# def get_string_value(self):
# value = int(self.get_value())
# return value
class OptionsDialog(object):
def __init__(self, dialog):
self.dialog = dialog
self.categories = {}
def __getattr__(self, attr):
'''Wrap the dialog'''
return getattr(self.dialog, attr)
def add_option(self, category, option):
self.categories[category].pack_start(option, False)
option.show_all()
def add_category(self, name, vbox):
self.categories[name] = vbox
def clear(self):
for category, vbox in self.categories.items():
for option in vbox.get_children():
vbox.remove(option)
class OptionsManager(object):
def __init__(self, main_window):
self.main_window = main_window
self.builder = main_window.builder
self.journal = main_window.journal
self.config = self.journal.config
self.dialog = OptionsDialog(self.builder.get_object('options_dialog'))
self.dialog.set_default_size(600, 300)
self.dialog.add_category('general', self.builder.get_object('general_vbox'))
def on_options_dialog(self):
self.dialog.clear()
# Make the config globally available
Option.config = self.config
Option.main_window = self.main_window
self.options = []
if platform.system() == 'Linux' and os.path.exists('/usr/bin/rednotebook'):
logging.debug('Running on Linux. Is installed. Adding autostart option')
self.options.insert(0, AutostartOption())
self.options.append(TickOption(_('Close to system tray'), 'closeToTray',
tooltip=_('Closing the window will send RedNotebook to the tray')))
able_to_spell_check = self.main_window.day_text_field.can_spell_check()
tooltip = _('Underline misspelled words') if able_to_spell_check else \
_('Requires gtkspell.') + ' ' + \
_('This is included in the python-gtkspell or python-gnome2-extras package')
spell_check_option = TickOption(_('Check Spelling'), 'spellcheck',
tooltip=tooltip)
if not sys.platform == 'win32':
self.options.append(spell_check_option)
spell_check_option.set_sensitive(able_to_spell_check)
#webkit_available = bool(browser.webkit)
#tooltip = _('Webkit offers a nicer preview') if webkit_available else \
# _('Requires pywebkitgtk (python-webkit)')
#webkit_label = _('Use webkit for previews') + ' ' + _('(Restart required)')
#webkit_option = TickOption(webkit_label, 'useWebkit',
# tooltip=tooltip)
#if not sys.platform == 'win32':
#self.options.append(webkit_option)
#webkit_option.set_sensitive(webkit_available)
# Check for new version
check_version_option = TickOption(_('Check for new version at startup'), 'checkForNewVersion')
def check_version_action(widget):
utils.check_new_version(self.main_window, info.version)
# Apply changes from dialog to options window
check = bool(self.journal.config.get('checkForNewVersion'))
check_version_option.check_button.set_active(check)
check_version_button = ActionButton(_('Check now'), check_version_action)
check_version_option.pack_start(check_version_button, False, False)
self.options.append(check_version_option)
self.options.extend([
FontSizeOption(_('Font Size'), 'mainFontSize'),
DateFormatOption(_('Date/Time format'), 'dateTimeString'),
CsvTextOption(_('Exclude from clouds'), 'cloudIgnoreList', \
tooltip=_('Do not show those comma separated words in any cloud')),
CsvTextOption(_('Allow small words in clouds'), 'cloudIncludeList', \
tooltip=_('Allow those words with 4 letters or less in the text cloud')),
])
self.add_all_options()
response = self.dialog.run()
if response == gtk.RESPONSE_OK:
self.save_options()
# Apply some options
self.main_window.cloud.update_lists()
self.main_window.cloud.update(force_update=True)
spell_check_enabled = self.config.read('spellcheck', 0)
self.main_window.day_text_field.enable_spell_check(spell_check_enabled)
visible = (self.config.read('closeToTray', 0) == 1)
self.main_window.tray_icon.set_visible(visible)
else:
# Reset some options
self.main_window.set_font_size(self.config.read('mainFontSize', -1))
self.dialog.hide()
def add_all_options(self):
for option in self.options:
self.dialog.add_option('general', option)
def save_options(self):
logging.debug('Saving Options')
for option in self.options:
value = option.get_string_value()
if option.option_name is not None:
logging.debug('Setting %s = %s' % (option.option_name, value))
self.config[option.option_name] = value
else:
# We don't save the autostart setting in the config file
option.set()
| gpl-2.0 |
20tab/django-filer | filer/admin/folderadmin.py | 2 | 52396 | #-*- coding: utf-8 -*-
from django import forms
from django import template
from django.core.exceptions import ValidationError
from django.contrib.admin import helpers
from django.contrib.admin.util import quote, unquote, capfirst
from django.contrib import messages
from django.template.defaultfilters import urlencode
from filer.admin.patched.admin_utils import get_deleted_objects
from django.core.exceptions import PermissionDenied
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.core.urlresolvers import reverse
from django.db import router
from django.db.models import Q
from django.http import HttpResponseRedirect, HttpResponse
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.utils.encoding import force_unicode
from django.utils.html import escape
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from django.utils.translation import ungettext, ugettext_lazy
from filer import settings
from filer.admin.forms import (CopyFilesAndFoldersForm, ResizeImagesForm,
RenameFilesForm)
from filer.admin.permissions import PrimitivePermissionAwareModelAdmin
from filer.views import (popup_status, popup_param, selectfolder_status,
selectfolder_param)
from filer.admin.tools import (userperms_for_request,
check_folder_edit_permissions,
check_files_edit_permissions,
check_files_read_permissions,
check_folder_read_permissions)
from filer.models import (Folder, FolderRoot, UnfiledImages, File, tools,
ImagesWithMissingData, FolderPermission, Image)
from filer.settings import FILER_STATICMEDIA_PREFIX, FILER_PAGINATE_BY
from filer.utils.filer_easy_thumbnails import FilerActionThumbnailer
from filer.thumbnail_processors import normalize_subject_location
from django.conf import settings as django_settings
import urllib
import os
import itertools
import inspect
class AddFolderPopupForm(forms.ModelForm):
folder = forms.HiddenInput()
class Meta:
model = Folder
fields = ('name',)
class FolderAdmin(PrimitivePermissionAwareModelAdmin):
list_display = ('name',)
exclude = ('parent',)
list_per_page = 20
list_filter = ('owner',)
search_fields = ['name', 'files__name']
raw_id_fields = ('owner',)
save_as = True # see ImageAdmin
actions = ['move_to_clipboard', 'files_set_public', 'files_set_private',
'delete_files_or_folders', 'move_files_and_folders',
'copy_files_and_folders', 'resize_images', 'rename_files']
def get_form(self, request, obj=None, **kwargs):
"""
Returns a Form class for use in the admin add view. This is used by
add_view and change_view.
"""
parent_id = request.REQUEST.get('parent_id', None)
if parent_id:
return AddFolderPopupForm
else:
folder_form = super(FolderAdmin, self).get_form(
request, obj=None, **kwargs)
def folder_form_clean(form_obj):
cleaned_data = form_obj.cleaned_data
if Folder.objects.filter(parent=form_obj.instance.parent,
name=cleaned_data['name']):
raise ValidationError('Folder with this name already exists.')
return cleaned_data
# attach clean to the default form rather than defining a new form class
folder_form.clean = folder_form_clean
return folder_form
def save_form(self, request, form, change):
"""
Given a ModelForm return an unsaved instance. ``change`` is True if
the object is being changed, and False if it's being added.
"""
r = form.save(commit=False)
parent_id = request.REQUEST.get('parent_id', None)
if parent_id:
parent = Folder.objects.get(id=parent_id)
r.parent = parent
return r
def response_change(self, request, obj):
"""
Overrides the default to be able to forward to the directory listing
instead of the default change_list_view
"""
r = super(FolderAdmin, self).response_change(request, obj)
## Code borrowed from django ModelAdmin to determine changelist on the fly
if r['Location']:
# it was a successful save
if (r['Location'] in ['../'] or
r['Location'] == self._get_post_url(obj)):
if obj.parent:
url = reverse('admin:filer-directory_listing',
kwargs={'folder_id': obj.parent.id})
else:
url = reverse('admin:filer-directory_listing-root')
url = "%s%s%s" % (url,popup_param(request),
selectfolder_param(request,"&"))
return HttpResponseRedirect(url)
else:
# this means it probably was a save_and_continue_editing
pass
return r
def render_change_form(self, request, context, add=False, change=False,
form_url='', obj=None):
extra_context = {'show_delete': True,
'is_popup': popup_status(request),
'select_folder': selectfolder_status(request),}
context.update(extra_context)
return super(FolderAdmin, self).render_change_form(
request=request, context=context, add=False,
change=False, form_url=form_url, obj=obj)
def delete_view(self, request, object_id, extra_context=None):
"""
Overrides the default to enable redirecting to the directory view after
deletion of a folder.
we need to fetch the object and find out who the parent is
before super, because super will delete the object and make it
impossible to find out the parent folder to redirect to.
"""
parent_folder = None
try:
obj = self.queryset(request).get(pk=unquote(object_id))
parent_folder = obj.parent
except self.model.DoesNotExist:
obj = None
r = super(FolderAdmin, self).delete_view(
request=request, object_id=object_id,
extra_context=extra_context)
url = r.get("Location", None)
if url in ["../../../../", "../../"] or url == self._get_post_url(obj):
if parent_folder:
url = reverse('admin:filer-directory_listing',
kwargs={'folder_id': parent_folder.id})
else:
url = reverse('admin:filer-directory_listing-root')
url = "%s%s%s" % (url,popup_param(request),
selectfolder_param(request,"&"))
return HttpResponseRedirect(url)
return r
def icon_img(self, xs):
return mark_safe(('<img src="%simg/icons/plainfolder_32x32.png" ' + \
'alt="Folder Icon" />') % FILER_STATICMEDIA_PREFIX)
icon_img.allow_tags = True
def get_urls(self):
try:
# django >=1.4
from django.conf.urls import patterns, url
except ImportError:
# django <1.4
from django.conf.urls.defaults import patterns, url
urls = super(FolderAdmin, self).get_urls()
from filer import views
url_patterns = patterns('',
# we override the default list view with our own directory listing
# of the root directories
url(r'^$',
self.admin_site.admin_view(self.directory_listing),
name='filer-directory_listing-root'),
url(r'^last/$',
self.admin_site.admin_view(self.directory_listing),
{'viewtype': 'last'},
name='filer-directory_listing-last'),
url(r'^(?P<folder_id>\d+)/list/$',
self.admin_site.admin_view(self.directory_listing),
name='filer-directory_listing'),
url(r'^(?P<folder_id>\d+)/make_folder/$',
self.admin_site.admin_view(views.make_folder),
name='filer-directory_listing-make_folder'),
url(r'^make_folder/$',
self.admin_site.admin_view(views.make_folder),
name='filer-directory_listing-make_root_folder'),
url(r'^images_with_missing_data/$',
self.admin_site.admin_view(self.directory_listing),
{'viewtype': 'images_with_missing_data'},
name='filer-directory_listing-images_with_missing_data'),
url(r'^unfiled_images/$',
self.admin_site.admin_view(self.directory_listing),
{'viewtype': 'unfiled_images'},
name='filer-directory_listing-unfiled_images'),
)
url_patterns.extend(urls)
return url_patterns
# custom views
def directory_listing(self, request, folder_id=None, viewtype=None):
clipboard = tools.get_user_clipboard(request.user)
if viewtype == 'images_with_missing_data':
folder = ImagesWithMissingData()
elif viewtype == 'unfiled_images':
folder = UnfiledImages()
elif viewtype == 'last':
last_folder_id = request.session.get('filer_last_folder_id')
try:
Folder.objects.get(id=last_folder_id)
except Folder.DoesNotExist:
url = reverse('admin:filer-directory_listing-root')
url = "%s%s%s" % (url, popup_param(request), selectfolder_param(request,"&"))
else:
url = reverse('admin:filer-directory_listing', kwargs={'folder_id': last_folder_id})
url = "%s%s%s" % (url, popup_param(request), selectfolder_param(request,"&"))
return HttpResponseRedirect(url)
elif folder_id == None:
folder = FolderRoot()
else:
folder = get_object_or_404(Folder, id=folder_id)
request.session['filer_last_folder_id'] = folder_id
# Check actions to see if any are available on this changelist
actions = self.get_actions(request)
# Remove action checkboxes if there aren't any actions available.
list_display = list(self.list_display)
if not actions:
try:
list_display.remove('action_checkbox')
except ValueError:
pass
# search
def filter_folder(qs, terms=[]):
for term in terms:
qs = qs.filter(Q(name__icontains=term) | \
Q(owner__username__icontains=term) | \
Q(owner__first_name__icontains=term) | \
Q(owner__last_name__icontains=term))
return qs
def filter_file(qs, terms=[]):
for term in terms:
qs = qs.filter(Q(name__icontains=term) | \
Q(description__icontains=term) | \
Q(original_filename__icontains=term) | \
Q(owner__username__icontains=term) | \
Q(owner__first_name__icontains=term) | \
Q(owner__last_name__icontains=term))
return qs
q = request.GET.get('q', None)
if q:
search_terms = unquote(q).split(" ")
else:
search_terms = []
q = ''
limit_search_to_folder = request.GET.get('limit_search_to_folder',
False) in (True, 'on')
if len(search_terms) > 0:
if folder and limit_search_to_folder and not folder.is_root:
folder_qs = folder.get_descendants()
file_qs = File.objects.filter(
folder__in=folder.get_descendants())
else:
folder_qs = Folder.objects.all()
file_qs = File.objects.all()
folder_qs = filter_folder(folder_qs, search_terms)
file_qs = filter_file(file_qs, search_terms)
show_result_count = True
else:
folder_qs = folder.children.all()
file_qs = folder.files.all()
show_result_count = False
folder_qs = folder_qs.order_by('name')
file_qs = file_qs.order_by('name')
folder_children = []
folder_files = []
if folder.is_root:
folder_children += folder.virtual_folders
for f in folder_qs:
f.perms = userperms_for_request(f, request)
if hasattr(f, 'has_read_permission'):
if f.has_read_permission(request):
folder_children.append(f)
else:
pass
else:
folder_children.append(f)
for f in file_qs:
f.perms = userperms_for_request(f, request)
if hasattr(f, 'has_read_permission'):
if f.has_read_permission(request):
folder_files.append(f)
else:
pass
else:
folder_files.append(f)
try:
permissions = {
'has_edit_permission': folder.has_edit_permission(request),
'has_read_permission': folder.has_read_permission(request),
'has_add_children_permission': \
folder.has_add_children_permission(request),
}
except:
permissions = {}
folder_files.sort()
items = folder_children + folder_files
paginator = Paginator(items, FILER_PAGINATE_BY)
# Are we moving to clipboard?
if request.method == 'POST' and '_save' not in request.POST:
for f in folder_files:
if "move-to-clipboard-%d" % (f.id,) in request.POST:
clipboard = tools.get_user_clipboard(request.user)
if f.has_edit_permission(request):
tools.move_file_to_clipboard([f], clipboard)
return HttpResponseRedirect(request.get_full_path())
else:
raise PermissionDenied
selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME)
# Actions with no confirmation
if (actions and request.method == 'POST' and
'index' in request.POST and '_save' not in request.POST):
if selected:
response = self.response_action(request, files_queryset=file_qs, folders_queryset=folder_qs)
if response:
return response
else:
msg = _("Items must be selected in order to perform "
"actions on them. No items have been changed.")
self.message_user(request, msg)
# Actions with confirmation
if (actions and request.method == 'POST' and
helpers.ACTION_CHECKBOX_NAME in request.POST and
'index' not in request.POST and '_save' not in request.POST):
if selected:
response = self.response_action(request, files_queryset=file_qs, folders_queryset=folder_qs)
if response:
return response
# Build the action form and populate it with available actions.
if actions:
action_form = self.action_form(auto_id=None)
action_form.fields['action'].choices = self.get_action_choices(request)
else:
action_form = None
selection_note_all = ungettext('%(total_count)s selected',
'All %(total_count)s selected', paginator.count)
# If page request (9999) is out of range, deliver last page of results.
try:
paginated_items = paginator.page(request.GET.get('page', 1))
except PageNotAnInteger:
paginated_items = paginator.page(1)
except EmptyPage:
paginated_items = paginator.page(paginator.num_pages)
return render_to_response(
'admin/filer/folder/directory_listing.html',
{
'folder': folder,
'clipboard_files': File.objects.filter(
in_clipboards__clipboarditem__clipboard__user=request.user
).distinct(),
'paginator': paginator,
'paginated_items': paginated_items,
'permissions': permissions,
'permstest': userperms_for_request(folder, request),
'current_url': request.path,
'title': u'Directory listing for %s' % folder.name,
'search_string': ' '.join(search_terms),
'q': urlencode(q),
'show_result_count': show_result_count,
'limit_search_to_folder': limit_search_to_folder,
'is_popup': popup_status(request),
'select_folder': selectfolder_status(request),
# needed in the admin/base.html template for logout links
'root_path': reverse('admin:index'),
'action_form': action_form,
'actions_on_top': self.actions_on_top,
'actions_on_bottom': self.actions_on_bottom,
'actions_selection_counter': self.actions_selection_counter,
'selection_note': _('0 of %(cnt)s selected') % {'cnt': len(paginated_items.object_list)},
'selection_note_all': selection_note_all % {'total_count': paginator.count},
'media': self.media,
'enable_permissions': settings.FILER_ENABLE_PERMISSIONS
}, context_instance=RequestContext(request))
def response_action(self, request, files_queryset, folders_queryset):
"""
Handle an admin action. This is called if a request is POSTed to the
changelist; it returns an HttpResponse if the action was handled, and
None otherwise.
"""
# There can be multiple action forms on the page (at the top
# and bottom of the change list, for example). Get the action
# whose button was pushed.
try:
action_index = int(request.POST.get('index', 0))
except ValueError:
action_index = 0
# Construct the action form.
data = request.POST.copy()
data.pop(helpers.ACTION_CHECKBOX_NAME, None)
data.pop("index", None)
# Use the action whose button was pushed
try:
data.update({'action': data.getlist('action')[action_index]})
except IndexError:
# If we didn't get an action from the chosen form that's invalid
# POST data, so by deleting action it'll fail the validation check
# below. So no need to do anything here
pass
action_form = self.action_form(data, auto_id=None)
action_form.fields['action'].choices = self.get_action_choices(request)
# If the form's valid we can handle the action.
if action_form.is_valid():
action = action_form.cleaned_data['action']
select_across = action_form.cleaned_data['select_across']
func, name, description = self.get_actions(request)[action]
# Get the list of selected PKs. If nothing's selected, we can't
# perform an action on it, so bail. Except we want to perform
# the action explicitly on all objects.
selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME)
if not selected and not select_across:
# Reminder that something needs to be selected or nothing will happen
msg = _("Items must be selected in order to perform "
"actions on them. No items have been changed.")
self.message_user(request, msg)
return None
if not select_across:
selected_files = []
selected_folders = []
for pk in selected:
if pk[:5] == "file-":
selected_files.append(pk[5:])
else:
selected_folders.append(pk[7:])
# Perform the action only on the selected objects
files_queryset = files_queryset.filter(pk__in=selected_files)
folders_queryset = folders_queryset.filter(pk__in=selected_folders)
response = func(self, request, files_queryset, folders_queryset)
# Actions may return an HttpResponse, which will be used as the
# response from the POST. If not, we'll be a good little HTTP
# citizen and redirect back to the changelist page.
if isinstance(response, HttpResponse):
return response
else:
return HttpResponseRedirect(request.get_full_path())
else:
msg = _("No action selected.")
self.message_user(request, msg)
return None
def get_actions(self, request):
actions = super(FolderAdmin, self).get_actions(request)
if 'delete_selected' in actions:
del actions['delete_selected']
return actions
def move_to_clipboard(self, request, files_queryset, folders_queryset):
"""
Action which moves the selected files and files in selected folders to clipboard.
"""
if not self.has_change_permission(request):
raise PermissionDenied
if request.method != 'POST':
return None
clipboard = tools.get_user_clipboard(request.user)
check_files_edit_permissions(request, files_queryset)
check_folder_edit_permissions(request, folders_queryset)
# TODO: Display a confirmation page if moving more than X files to clipboard?
files_count = [0] # We define it like that so that we can modify it inside the move_files function
def move_files(files):
files_count[0] += tools.move_file_to_clipboard(files, clipboard)
def move_folders(folders):
for f in folders:
move_files(f.files)
move_folders(f.children.all())
move_files(files_queryset)
move_folders(folders_queryset)
self.message_user(request, _("Successfully moved %(count)d files to clipboard.") % {
"count": files_count[0],
})
return None
move_to_clipboard.short_description = ugettext_lazy("Move selected files to clipboard")
def files_set_public_or_private(self, request, set_public, files_queryset, folders_queryset):
"""
Action which enables or disables permissions for selected files and files in selected folders to clipboard (set them private or public).
"""
if not self.has_change_permission(request):
raise PermissionDenied
if request.method != 'POST':
return None
check_files_edit_permissions(request, files_queryset)
check_folder_edit_permissions(request, folders_queryset)
files_count = [0] # We define it like that so that we can modify it inside the set_files function
def set_files(files):
for f in files:
if f.is_public != set_public:
f.is_public = set_public
f.save()
files_count[0] += 1
def set_folders(folders):
for f in folders:
set_files(f.files)
set_folders(f.children.all())
set_files(files_queryset)
set_folders(folders_queryset)
if set_public:
self.message_user(request, _("Successfully disabled permissions for %(count)d files.") % {
"count": files_count[0],
})
else:
self.message_user(request, _("Successfully enabled permissions for %(count)d files.") % {
"count": files_count[0],
})
return None
def files_set_private(self, request, files_queryset, folders_queryset):
return self.files_set_public_or_private(request, False, files_queryset, folders_queryset)
files_set_private.short_description = ugettext_lazy("Enable permissions for selected files")
def files_set_public(self, request, files_queryset, folders_queryset):
return self.files_set_public_or_private(request, True, files_queryset, folders_queryset)
files_set_public.short_description = ugettext_lazy("Disable permissions for selected files")
def delete_files_or_folders(self, request, files_queryset, folders_queryset):
"""
Action which deletes the selected files and/or folders.
This action first displays a confirmation page whichs shows all the
deleteable files and/or folders, or, if the user has no permission on one of the related
childs (foreignkeys), a "permission denied" message.
Next, it delets all selected files and/or folders and redirects back to the folder.
"""
opts = self.model._meta
app_label = opts.app_label
# Check that the user has delete permission for the actual model
if not self.has_delete_permission(request):
raise PermissionDenied
current_folder = self._get_current_action_folder(request, files_queryset, folders_queryset)
all_protected = []
# Populate deletable_objects, a data structure of all related objects that
# will also be deleted.
# Hopefully this also checks for necessary permissions.
# TODO: Check if permissions are really verified
(args, varargs, keywords, defaults) = inspect.getargspec(get_deleted_objects)
if 'levels_to_root' in args:
# Django 1.2
deletable_files, perms_needed_files = get_deleted_objects(files_queryset, files_queryset.model._meta, request.user, self.admin_site, levels_to_root=2)
deletable_folders, perms_needed_folders = get_deleted_objects(folders_queryset, folders_queryset.model._meta, request.user, self.admin_site, levels_to_root=2)
else:
# Django 1.3
using = router.db_for_write(self.model)
deletable_files, perms_needed_files, protected_files = get_deleted_objects(files_queryset, files_queryset.model._meta, request.user, self.admin_site, using)
deletable_folders, perms_needed_folders, protected_folders = get_deleted_objects(folders_queryset, folders_queryset.model._meta, request.user, self.admin_site, using)
all_protected.extend(protected_files)
all_protected.extend(protected_folders)
all_deletable_objects = [deletable_files, deletable_folders]
all_perms_needed = perms_needed_files.union(perms_needed_folders)
# The user has already confirmed the deletion.
# Do the deletion and return a None to display the change list view again.
if request.POST.get('post'):
if all_perms_needed:
raise PermissionDenied
n = files_queryset.count() + folders_queryset.count()
if n:
# delete all explicitly selected files
for f in files_queryset:
self.log_deletion(request, f, force_unicode(f))
f.delete()
# delete all files in all selected folders and their children
# This would happen automatically by ways of the delete cascade, but then the individual .delete()
# methods won't be called and the files won't be deleted from the filesystem.
folder_ids = set()
for folder in folders_queryset:
folder_ids.add(folder.id)
folder_ids.update(folder.get_descendants().values_list('id', flat=True))
for f in File.objects.filter(folder__in=folder_ids):
self.log_deletion(request, f, force_unicode(f))
f.delete()
# delete all folders
for f in folders_queryset:
self.log_deletion(request, f, force_unicode(f))
f.delete()
self.message_user(request, _("Successfully deleted %(count)d files and/or folders.") % {
"count": n,
})
# Return None to display the change list page again.
return None
if all_perms_needed or all_protected:
title = _("Cannot delete files and/or folders")
else:
title = _("Are you sure?")
context = {
"title": title,
"instance": current_folder,
"breadcrumbs_action": _("Delete files and/or folders"),
"deletable_objects": all_deletable_objects,
"files_queryset": files_queryset,
"folders_queryset": folders_queryset,
"perms_lacking": all_perms_needed,
"protected": all_protected,
"opts": opts,
'is_popup': popup_status(request),
'select_folder': selectfolder_status(request),
"root_path": reverse('admin:index'),
"app_label": app_label,
"action_checkbox_name": helpers.ACTION_CHECKBOX_NAME,
}
# Display the destination folder selection page
return render_to_response([
"admin/filer/delete_selected_files_confirmation.html"
], context, context_instance=template.RequestContext(request))
delete_files_or_folders.short_description = ugettext_lazy("Delete selected files and/or folders")
# Copied from django.contrib.admin.util
def _format_callback(self, obj, user, admin_site, perms_needed):
has_admin = obj.__class__ in admin_site._registry
opts = obj._meta
if has_admin:
admin_url = reverse('%s:%s_%s_change'
% (admin_site.name,
opts.app_label,
opts.object_name.lower()),
None, (quote(obj._get_pk_val()),))
p = '%s.%s' % (opts.app_label,
opts.get_delete_permission())
if not user.has_perm(p):
perms_needed.add(opts.verbose_name)
# Display a link to the admin page.
return mark_safe(u'%s: <a href="%s">%s</a>' %
(escape(capfirst(opts.verbose_name)),
admin_url,
escape(obj)))
else:
# Don't display link to edit, because it either has no
# admin or is edited inline.
return u'%s: %s' % (capfirst(opts.verbose_name),
force_unicode(obj))
def _check_copy_perms(self, request, files_queryset, folders_queryset):
try:
check_files_read_permissions(request, files_queryset)
check_folder_read_permissions(request, folders_queryset)
except PermissionDenied:
return True
return False
def _check_move_perms(self, request, files_queryset, folders_queryset):
try:
check_files_read_permissions(request, files_queryset)
check_folder_read_permissions(request, folders_queryset)
check_files_edit_permissions(request, files_queryset)
check_folder_edit_permissions(request, folders_queryset)
except PermissionDenied:
return True
return False
def _get_current_action_folder(self, request, files_queryset, folders_queryset):
if files_queryset:
return files_queryset[0].folder
elif folders_queryset:
return folders_queryset[0].parent
else:
return None
def _list_folders_to_copy_or_move(self, request, folders):
for fo in folders:
yield self._format_callback(fo, request.user, self.admin_site, set())
children = list(self._list_folders_to_copy_or_move(request, fo.children.all()))
children.extend([self._format_callback(f, request.user, self.admin_site, set()) for f in sorted(fo.files)])
if children:
yield children
def _list_all_to_copy_or_move(self, request, files_queryset, folders_queryset):
to_copy_or_move = list(self._list_folders_to_copy_or_move(request, folders_queryset))
to_copy_or_move.extend([self._format_callback(f, request.user, self.admin_site, set()) for f in sorted(files_queryset)])
return to_copy_or_move
def _list_all_destination_folders_recursive(self, request, folders_queryset, current_folder, folders, allow_self, level):
for fo in folders:
if not allow_self and fo in folders_queryset:
# We do not allow moving to selected folders or their descendants
continue
if not fo.has_read_permission(request):
continue
# We do not allow copying/moving back to the folder itself
enabled = (allow_self or fo != current_folder) and fo.has_add_children_permission(request)
yield (fo, (mark_safe((" " * level) + force_unicode(fo)), enabled))
for c in self._list_all_destination_folders_recursive(request, folders_queryset, current_folder, fo.children.all(), allow_self, level + 1):
yield c
def _list_all_destination_folders(self, request, folders_queryset, current_folder, allow_self):
return list(self._list_all_destination_folders_recursive(request, folders_queryset, current_folder, FolderRoot().children, allow_self, 0))
def _move_files_and_folders_impl(self, files_queryset, folders_queryset, destination):
for f in files_queryset:
f.folder = destination
f.save()
for f in folders_queryset:
f.move_to(destination, 'last-child')
f.save()
def move_files_and_folders(self, request, files_queryset, folders_queryset):
opts = self.model._meta
app_label = opts.app_label
current_folder = self._get_current_action_folder(request, files_queryset, folders_queryset)
perms_needed = self._check_move_perms(request, files_queryset, folders_queryset)
to_move = self._list_all_to_copy_or_move(request, files_queryset, folders_queryset)
folders = self._list_all_destination_folders(request, folders_queryset, current_folder, False)
if request.method == 'POST' and request.POST.get('post'):
if perms_needed:
raise PermissionDenied
try:
destination = Folder.objects.get(pk=request.POST.get('destination'))
except Folder.DoesNotExist:
raise PermissionDenied
folders_dict = dict(folders)
if destination not in folders_dict or not folders_dict[destination][1]:
raise PermissionDenied
# We count only topmost files and folders here
n = files_queryset.count() + folders_queryset.count()
conflicting_names = [folder.name for folder in Folder.objects.filter(
parent=destination,
name__in=folders_queryset.values('name'))]
if conflicting_names:
messages.error(request, _(u"Folders with names %s already exist at the selected "
"destination") % u", ".join(conflicting_names))
elif n:
self._move_files_and_folders_impl(files_queryset, folders_queryset, destination)
self.message_user(request, _("Successfully moved %(count)d files and/or folders to folder '%(destination)s'.") % {
"count": n,
"destination": destination,
})
return None
context = {
"title": _("Move files and/or folders"),
"instance": current_folder,
"breadcrumbs_action": _("Move files and/or folders"),
"to_move": to_move,
"destination_folders": folders,
"files_queryset": files_queryset,
"folders_queryset": folders_queryset,
"perms_lacking": perms_needed,
"opts": opts,
"root_path": reverse('admin:index'),
"app_label": app_label,
"action_checkbox_name": helpers.ACTION_CHECKBOX_NAME,
}
# Display the destination folder selection page
return render_to_response([
"admin/filer/folder/choose_move_destination.html"
], context, context_instance=template.RequestContext(request))
move_files_and_folders.short_description = ugettext_lazy("Move selected files and/or folders")
def _rename_file(self, file_obj, form_data, counter, global_counter):
original_basename, original_extension = os.path.splitext(file_obj.original_filename)
if file_obj.name:
current_basename, current_extension = os.path.splitext(file_obj.name)
else:
current_basename = ""
current_extension = ""
file_obj.name = form_data['rename_format'] % {
'original_filename': file_obj.original_filename,
'original_basename': original_basename,
'original_extension': original_extension,
'current_filename': file_obj.name or "",
'current_basename': current_basename,
'current_extension': current_extension,
'current_folder': file_obj.folder.name,
'counter': counter + 1, # 1-based
'global_counter': global_counter + 1, # 1-based
}
file_obj.save()
def _rename_files(self, files, form_data, global_counter):
n = 0
for f in sorted(files):
self._rename_file(f, form_data, n, global_counter + n)
n += 1
return n
def _rename_folder(self, folder, form_data, global_counter):
return self._rename_files_impl(folder.files.all(), folder.children.all(), form_data, global_counter)
def _rename_files_impl(self, files_queryset, folders_queryset, form_data, global_counter):
n = 0
for f in folders_queryset:
n += self._rename_folder(f, form_data, global_counter + n)
n += self._rename_files(files_queryset, form_data, global_counter + n)
return n
def rename_files(self, request, files_queryset, folders_queryset):
opts = self.model._meta
app_label = opts.app_label
current_folder = self._get_current_action_folder(request, files_queryset, folders_queryset)
perms_needed = self._check_move_perms(request, files_queryset, folders_queryset)
to_rename = self._list_all_to_copy_or_move(request, files_queryset, folders_queryset)
if request.method == 'POST' and request.POST.get('post'):
if perms_needed:
raise PermissionDenied
form = RenameFilesForm(request.POST)
if form.is_valid():
if files_queryset.count() + folders_queryset.count():
n = self._rename_files_impl(files_queryset, folders_queryset, form.cleaned_data, 0)
self.message_user(request, _("Successfully renamed %(count)d files.") % {
"count": n,
})
return None
else:
form = RenameFilesForm()
context = {
"title": _("Rename files"),
"instance": current_folder,
"breadcrumbs_action": _("Rename files"),
"to_rename": to_rename,
"rename_form": form,
"files_queryset": files_queryset,
"folders_queryset": folders_queryset,
"perms_lacking": perms_needed,
"opts": opts,
"root_path": reverse('admin:index'),
"app_label": app_label,
"action_checkbox_name": helpers.ACTION_CHECKBOX_NAME,
}
# Display the rename format selection page
return render_to_response([
"admin/filer/folder/choose_rename_format.html"
], context, context_instance=template.RequestContext(request))
rename_files.short_description = ugettext_lazy("Rename files")
def _generate_new_filename(self, filename, suffix):
basename, extension = os.path.splitext(filename)
return basename + suffix + extension
def _copy_file(self, file_obj, destination, suffix, overwrite):
if overwrite:
# Not yet implemented as we have to find a portable (for different storage backends) way to overwrite files
raise NotImplementedError
# We are assuming here that we are operating on an already saved database objects with current database state available
filename = self._generate_new_filename(file_obj.file.name, suffix)
# Due to how inheritance works, we have to set both pk and id to None
file_obj.pk = None
file_obj.id = None
file_obj.save()
file_obj.folder = destination
file_obj.file = file_obj._copy_file(filename)
file_obj.original_filename = self._generate_new_filename(file_obj.original_filename, suffix)
file_obj.save()
def _copy_files(self, files, destination, suffix, overwrite):
for f in files:
self._copy_file(f, destination, suffix, overwrite)
return len(files)
def _get_available_name(self, destination, name):
count = itertools.count(1)
original = name
while destination.contains_folder(name):
name = "%s_%s" % (original, count.next())
return name
def _copy_folder(self, folder, destination, suffix, overwrite):
if overwrite:
# Not yet implemented as we have to find a portable (for different storage backends) way to overwrite files
raise NotImplementedError
# TODO: Should we also allow not to overwrite the folder if it exists, but just copy into it?
# TODO: Is this a race-condition? Would this be a problem?
foldername = self._get_available_name(destination, folder.name)
old_folder = Folder.objects.get(pk=folder.pk)
# Due to how inheritance works, we have to set both pk and id to None
folder.pk = None
folder.id = None
folder.name = foldername
folder.insert_at(destination, 'last-child', True) # We save folder here
for perm in FolderPermission.objects.filter(folder=old_folder):
perm.pk = None
perm.id = None
perm.folder = folder
perm.save()
return 1 + self._copy_files_and_folders_impl(old_folder.files.all(), old_folder.children.all(), folder, suffix, overwrite)
def _copy_files_and_folders_impl(self, files_queryset, folders_queryset, destination, suffix, overwrite):
n = self._copy_files(files_queryset, destination, suffix, overwrite)
for f in folders_queryset:
n += self._copy_folder(f, destination, suffix, overwrite)
return n
def copy_files_and_folders(self, request, files_queryset, folders_queryset):
opts = self.model._meta
app_label = opts.app_label
current_folder = self._get_current_action_folder(request, files_queryset, folders_queryset)
perms_needed = self._check_copy_perms(request, files_queryset, folders_queryset)
to_copy = self._list_all_to_copy_or_move(request, files_queryset, folders_queryset)
folders = self._list_all_destination_folders(request, folders_queryset, current_folder, True)
if request.method == 'POST' and request.POST.get('post'):
if perms_needed:
raise PermissionDenied
form = CopyFilesAndFoldersForm(request.POST)
if form.is_valid():
try:
destination = Folder.objects.get(pk=request.POST.get('destination'))
except Folder.DoesNotExist:
raise PermissionDenied
folders_dict = dict(folders)
if destination not in folders_dict or not folders_dict[destination][1]:
raise PermissionDenied
if files_queryset.count() + folders_queryset.count():
# We count all files and folders here (recursivelly)
n = self._copy_files_and_folders_impl(files_queryset, folders_queryset, destination, form.cleaned_data['suffix'], False)
self.message_user(request, _("Successfully copied %(count)d files and/or folders to folder '%(destination)s'.") % {
"count": n,
"destination": destination,
})
return None
else:
form = CopyFilesAndFoldersForm()
try:
selected_destination_folder = int(request.POST.get('destination', 0))
except ValueError:
if current_folder:
selected_destination_folder = current_folder.pk
else:
selected_destination_folder = 0
context = {
"title": _("Copy files and/or folders"),
"instance": current_folder,
"breadcrumbs_action": _("Copy files and/or folders"),
"to_copy": to_copy,
"destination_folders": folders,
"selected_destination_folder": selected_destination_folder,
"copy_form": form,
"files_queryset": files_queryset,
"folders_queryset": folders_queryset,
"perms_lacking": perms_needed,
"opts": opts,
"root_path": reverse('admin:index'),
"app_label": app_label,
"action_checkbox_name": helpers.ACTION_CHECKBOX_NAME,
}
# Display the destination folder selection page
return render_to_response([
"admin/filer/folder/choose_copy_destination.html"
], context, context_instance=template.RequestContext(request))
copy_files_and_folders.short_description = ugettext_lazy("Copy selected files and/or folders")
def _check_resize_perms(self, request, files_queryset, folders_queryset):
try:
check_files_read_permissions(request, files_queryset)
check_folder_read_permissions(request, folders_queryset)
check_files_edit_permissions(request, files_queryset)
except PermissionDenied:
return True
return False
def _list_folders_to_resize(self, request, folders):
for fo in folders:
children = list(self._list_folders_to_resize(request, fo.children.all()))
children.extend([self._format_callback(f, request.user, self.admin_site, set()) for f in sorted(fo.files) if isinstance(f, Image)])
if children:
yield self._format_callback(fo, request.user, self.admin_site, set())
yield children
def _list_all_to_resize(self, request, files_queryset, folders_queryset):
to_resize = list(self._list_folders_to_resize(request, folders_queryset))
to_resize.extend([self._format_callback(f, request.user, self.admin_site, set()) for f in sorted(files_queryset) if isinstance(f, Image)])
return to_resize
def _new_subject_location(self, original_width, original_height, new_width, new_height, x, y, crop):
# TODO: We could probably do better
return (round(new_width / 2), round(new_height / 2))
def _resize_image(self, image, form_data):
original_width = float(image.width)
original_height = float(image.height)
thumbnailer = FilerActionThumbnailer(file=image.file.file, name=image.file.name, source_storage=image.file.source_storage, thumbnail_storage=image.file.source_storage)
# This should overwrite the original image
new_image = thumbnailer.get_thumbnail({
'size': (form_data['width'], form_data['height']),
'crop': form_data['crop'],
'upscale': form_data['upscale'],
'subject_location': image.subject_location,
})
from django.db.models.fields.files import ImageFieldFile
image.file.file = new_image.file
image.generate_sha1()
image.save() # Also gets new width and height
subject_location = normalize_subject_location(image.subject_location)
if subject_location:
(x, y) = subject_location
x = float(x)
y = float(y)
new_width = float(image.width)
new_height = float(image.height)
(new_x, new_y) = self._new_subject_location(original_width, original_height, new_width, new_height, x, y, form_data['crop'])
image.subject_location = "%d,%d" % (new_x, new_y)
image.save()
def _resize_images(self, files, form_data):
n = 0
for f in files:
if isinstance(f, Image):
self._resize_image(f, form_data)
n += 1
return n
def _resize_folder(self, folder, form_data):
return self._resize_images_impl(folder.files.all(), folder.children.all(), form_data)
def _resize_images_impl(self, files_queryset, folders_queryset, form_data):
n = self._resize_images(files_queryset, form_data)
for f in folders_queryset:
n += self._resize_folder(f, form_data)
return n
def resize_images(self, request, files_queryset, folders_queryset):
opts = self.model._meta
app_label = opts.app_label
current_folder = self._get_current_action_folder(request, files_queryset, folders_queryset)
perms_needed = self._check_resize_perms(request, files_queryset, folders_queryset)
to_resize = self._list_all_to_resize(request, files_queryset, folders_queryset)
if request.method == 'POST' and request.POST.get('post'):
if perms_needed:
raise PermissionDenied
form = ResizeImagesForm(request.POST)
if form.is_valid():
if form.cleaned_data.get('thumbnail_option'):
form.cleaned_data['width'] = form.cleaned_data['thumbnail_option'].width
form.cleaned_data['height'] = form.cleaned_data['thumbnail_option'].height
form.cleaned_data['crop'] = form.cleaned_data['thumbnail_option'].crop
form.cleaned_data['upscale'] = form.cleaned_data['thumbnail_option'].upscale
if files_queryset.count() + folders_queryset.count():
# We count all files here (recursivelly)
n = self._resize_images_impl(files_queryset, folders_queryset, form.cleaned_data)
self.message_user(request, _("Successfully resized %(count)d images.") % {
"count": n,
})
return None
else:
form = ResizeImagesForm()
context = {
"title": _("Resize images"),
"instance": current_folder,
"breadcrumbs_action": _("Resize images"),
"to_resize": to_resize,
"resize_form": form,
"cmsplugin_enabled": 'cmsplugin_filer_image' in django_settings.INSTALLED_APPS,
"files_queryset": files_queryset,
"folders_queryset": folders_queryset,
"perms_lacking": perms_needed,
"opts": opts,
"root_path": reverse('admin:index'),
"app_label": app_label,
"action_checkbox_name": helpers.ACTION_CHECKBOX_NAME,
}
# Display the resize options page
return render_to_response([
"admin/filer/folder/choose_images_resize_options.html"
], context, context_instance=template.RequestContext(request))
resize_images.short_description = ugettext_lazy("Resize selected images")
| bsd-3-clause |
hyrole/scrapy | scrapy/settings/deprecated.py | 160 | 1383 | import warnings
from scrapy.exceptions import ScrapyDeprecationWarning
DEPRECATED_SETTINGS = [
('TRACK_REFS', 'no longer needed (trackref is always enabled)'),
('RESPONSE_CLASSES', 'no longer supported'),
('DEFAULT_RESPONSE_ENCODING', 'no longer supported'),
('BOT_VERSION', 'no longer used (user agent defaults to Scrapy now)'),
('ENCODING_ALIASES', 'no longer needed (encoding discovery uses w3lib now)'),
('STATS_ENABLED', 'no longer supported (change STATS_CLASS instead)'),
('SQLITE_DB', 'no longer supported'),
('SELECTORS_BACKEND', 'use SCRAPY_SELECTORS_BACKEND environment variable instead'),
('AUTOTHROTTLE_MIN_DOWNLOAD_DELAY', 'use DOWNLOAD_DELAY instead'),
('AUTOTHROTTLE_MAX_CONCURRENCY', 'use CONCURRENT_REQUESTS_PER_DOMAIN instead'),
('AUTOTHROTTLE_MAX_CONCURRENCY', 'use CONCURRENT_REQUESTS_PER_DOMAIN instead'),
('REDIRECT_MAX_METAREFRESH_DELAY', 'use METAREFRESH_MAXDELAY instead'),
]
def check_deprecated_settings(settings):
deprecated = [x for x in DEPRECATED_SETTINGS if settings[x[0]] is not None]
if deprecated:
msg = "You are using the following settings which are deprecated or obsolete"
msg += " (ask [email protected] for alternatives):"
msg = msg + "\n " + "\n ".join("%s: %s" % x for x in deprecated)
warnings.warn(msg, ScrapyDeprecationWarning)
| bsd-3-clause |
rschnapka/odoo | addons/document_webdav/__init__.py | 58 | 1119 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
import webdav
import webdav_server
import document_webdav
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
aayushidwivedi01/spark-tk | python/sparktk/frame/ops/quantiles.py | 12 | 2626 | # vim: set encoding=utf-8
# Copyright (c) 2016 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def quantiles(self, column_name, quantiles):
"""
Returns a new frame with Quantiles and their values.
Parameters
----------
:param column_name: (str) The column to calculate quantiles on
:param quantiles: (List[float]) The quantiles being requested
:return: (Frame) A new frame with two columns (float): requested Quantiles and their respective values.
Calculates quantiles on the given column.
Examples
--------
<hide>
>>> data = [[100],[250],[95],[179],[315],[660],[540],[420],[250],[335]]
>>> schema = [('final_sale_price', int)]
>>> my_frame = tc.frame.create(data, schema)
<progress>
</hide>
Consider Frame *my_frame*, which accesses a frame that contains a single
column *final_sale_price*:
>>> my_frame.inspect()
[#] final_sale_price
=====================
[0] 100
[1] 250
[2] 95
[3] 179
[4] 315
[5] 660
[6] 540
[7] 420
[8] 250
[9] 335
To calculate 10th, 50th, and 100th quantile:
>>> quantiles_frame = my_frame.quantiles('final_sale_price', [10, 50, 100])
<progress>
A new Frame containing the requested Quantiles and their respective values
will be returned:
>>> quantiles_frame.inspect()
[#] Quantiles final_sale_price_QuantileValue
==============================================
[0] 10.0 95.0
[1] 50.0 250.0
[2] 100.0 660.0
"""
from sparktk.frame.frame import Frame
return Frame(self._tc, self._scala.quantiles(column_name, self._tc.jutils.convert.to_scala_list_double(quantiles)))
| apache-2.0 |
Instagram/django | tests/regressiontests/comment_tests/tests/__init__.py | 88 | 3272 | from django.contrib.auth.models import User
from django.contrib.comments.forms import CommentForm
from django.contrib.comments.models import Comment
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.test import TestCase
from regressiontests.comment_tests.models import Article, Author
# Shortcut
CT = ContentType.objects.get_for_model
# Helper base class for comment tests that need data.
class CommentTestCase(TestCase):
fixtures = ["comment_tests"]
urls = 'django.contrib.comments.urls'
def createSomeComments(self):
# Two anonymous comments on two different objects
c1 = Comment.objects.create(
content_type = CT(Article),
object_pk = "1",
user_name = "Joe Somebody",
user_email = "[email protected]",
user_url = "http://example.com/~joe/",
comment = "First!",
site = Site.objects.get_current(),
)
c2 = Comment.objects.create(
content_type = CT(Author),
object_pk = "1",
user_name = "Joe Somebody",
user_email = "[email protected]",
user_url = "http://example.com/~joe/",
comment = "First here, too!",
site = Site.objects.get_current(),
)
# Two authenticated comments: one on the same Article, and
# one on a different Author
user = User.objects.create(
username = "frank_nobody",
first_name = "Frank",
last_name = "Nobody",
email = "[email protected]",
password = "",
is_staff = False,
is_active = True,
is_superuser = False,
)
c3 = Comment.objects.create(
content_type = CT(Article),
object_pk = "1",
user = user,
user_url = "http://example.com/~frank/",
comment = "Damn, I wanted to be first.",
site = Site.objects.get_current(),
)
c4 = Comment.objects.create(
content_type = CT(Author),
object_pk = "2",
user = user,
user_url = "http://example.com/~frank/",
comment = "You get here first, too?",
site = Site.objects.get_current(),
)
return c1, c2, c3, c4
def getData(self):
return {
'name' : 'Jim Bob',
'email' : '[email protected]',
'url' : '',
'comment' : 'This is my comment',
}
def getValidData(self, obj):
f = CommentForm(obj)
d = self.getData()
d.update(f.initial)
return d
from regressiontests.comment_tests.tests.app_api_tests import *
from regressiontests.comment_tests.tests.feed_tests import *
from regressiontests.comment_tests.tests.model_tests import *
from regressiontests.comment_tests.tests.comment_form_tests import *
from regressiontests.comment_tests.tests.templatetag_tests import *
from regressiontests.comment_tests.tests.comment_view_tests import *
from regressiontests.comment_tests.tests.moderation_view_tests import *
from regressiontests.comment_tests.tests.comment_utils_moderators_tests import *
| bsd-3-clause |
bbozhev/flask-test | flask/lib/python2.7/site-packages/werkzeug/testsuite/datastructures.py | 145 | 28212 | # -*- coding: utf-8 -*-
"""
werkzeug.testsuite.datastructures
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Tests the functionality of the provided Werkzeug
datastructures.
TODO:
- FileMultiDict
- Immutable types undertested
- Split up dict tests
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from __future__ import with_statement
import unittest
import pickle
from contextlib import contextmanager
from copy import copy, deepcopy
from werkzeug import datastructures
from werkzeug._compat import iterkeys, itervalues, iteritems, iterlists, \
iterlistvalues, text_type, PY2
from werkzeug.testsuite import WerkzeugTestCase
from werkzeug.exceptions import BadRequestKeyError
class NativeItermethodsTestCase(WerkzeugTestCase):
def test_basic(self):
@datastructures.native_itermethods(['keys', 'values', 'items'])
class StupidDict(object):
def keys(self, multi=1):
return iter(['a', 'b', 'c'] * multi)
def values(self, multi=1):
return iter([1, 2, 3] * multi)
def items(self, multi=1):
return iter(zip(iterkeys(self, multi=multi),
itervalues(self, multi=multi)))
d = StupidDict()
expected_keys = ['a', 'b', 'c']
expected_values = [1, 2, 3]
expected_items = list(zip(expected_keys, expected_values))
self.assert_equal(list(iterkeys(d)), expected_keys)
self.assert_equal(list(itervalues(d)), expected_values)
self.assert_equal(list(iteritems(d)), expected_items)
self.assert_equal(list(iterkeys(d, 2)), expected_keys * 2)
self.assert_equal(list(itervalues(d, 2)), expected_values * 2)
self.assert_equal(list(iteritems(d, 2)), expected_items * 2)
class MutableMultiDictBaseTestCase(WerkzeugTestCase):
storage_class = None
def test_pickle(self):
cls = self.storage_class
for protocol in range(pickle.HIGHEST_PROTOCOL + 1):
d = cls()
d.setlist(b'foo', [1, 2, 3, 4])
d.setlist(b'bar', b'foo bar baz'.split())
s = pickle.dumps(d, protocol)
ud = pickle.loads(s)
self.assert_equal(type(ud), type(d))
self.assert_equal(ud, d)
self.assert_equal(pickle.loads(
s.replace(b'werkzeug.datastructures', b'werkzeug')), d)
ud[b'newkey'] = b'bla'
self.assert_not_equal(ud, d)
def test_basic_interface(self):
md = self.storage_class()
assert isinstance(md, dict)
mapping = [('a', 1), ('b', 2), ('a', 2), ('d', 3),
('a', 1), ('a', 3), ('d', 4), ('c', 3)]
md = self.storage_class(mapping)
# simple getitem gives the first value
self.assert_equal(md['a'], 1)
self.assert_equal(md['c'], 3)
with self.assert_raises(KeyError):
md['e']
self.assert_equal(md.get('a'), 1)
# list getitem
self.assert_equal(md.getlist('a'), [1, 2, 1, 3])
self.assert_equal(md.getlist('d'), [3, 4])
# do not raise if key not found
self.assert_equal(md.getlist('x'), [])
# simple setitem overwrites all values
md['a'] = 42
self.assert_equal(md.getlist('a'), [42])
# list setitem
md.setlist('a', [1, 2, 3])
self.assert_equal(md['a'], 1)
self.assert_equal(md.getlist('a'), [1, 2, 3])
# verify that it does not change original lists
l1 = [1, 2, 3]
md.setlist('a', l1)
del l1[:]
self.assert_equal(md['a'], 1)
# setdefault, setlistdefault
self.assert_equal(md.setdefault('u', 23), 23)
self.assert_equal(md.getlist('u'), [23])
del md['u']
md.setlist('u', [-1, -2])
# delitem
del md['u']
with self.assert_raises(KeyError):
md['u']
del md['d']
self.assert_equal(md.getlist('d'), [])
# keys, values, items, lists
self.assert_equal(list(sorted(md.keys())), ['a', 'b', 'c'])
self.assert_equal(list(sorted(iterkeys(md))), ['a', 'b', 'c'])
self.assert_equal(list(sorted(itervalues(md))), [1, 2, 3])
self.assert_equal(list(sorted(itervalues(md))), [1, 2, 3])
self.assert_equal(list(sorted(md.items())),
[('a', 1), ('b', 2), ('c', 3)])
self.assert_equal(list(sorted(md.items(multi=True))),
[('a', 1), ('a', 2), ('a', 3), ('b', 2), ('c', 3)])
self.assert_equal(list(sorted(iteritems(md))),
[('a', 1), ('b', 2), ('c', 3)])
self.assert_equal(list(sorted(iteritems(md, multi=True))),
[('a', 1), ('a', 2), ('a', 3), ('b', 2), ('c', 3)])
self.assert_equal(list(sorted(md.lists())),
[('a', [1, 2, 3]), ('b', [2]), ('c', [3])])
self.assert_equal(list(sorted(iterlists(md))),
[('a', [1, 2, 3]), ('b', [2]), ('c', [3])])
# copy method
c = md.copy()
self.assert_equal(c['a'], 1)
self.assert_equal(c.getlist('a'), [1, 2, 3])
# copy method 2
c = copy(md)
self.assert_equal(c['a'], 1)
self.assert_equal(c.getlist('a'), [1, 2, 3])
# deepcopy method
c = md.deepcopy()
self.assert_equal(c['a'], 1)
self.assert_equal(c.getlist('a'), [1, 2, 3])
# deepcopy method 2
c = deepcopy(md)
self.assert_equal(c['a'], 1)
self.assert_equal(c.getlist('a'), [1, 2, 3])
# update with a multidict
od = self.storage_class([('a', 4), ('a', 5), ('y', 0)])
md.update(od)
self.assert_equal(md.getlist('a'), [1, 2, 3, 4, 5])
self.assert_equal(md.getlist('y'), [0])
# update with a regular dict
md = c
od = {'a': 4, 'y': 0}
md.update(od)
self.assert_equal(md.getlist('a'), [1, 2, 3, 4])
self.assert_equal(md.getlist('y'), [0])
# pop, poplist, popitem, popitemlist
self.assert_equal(md.pop('y'), 0)
assert 'y' not in md
self.assert_equal(md.poplist('a'), [1, 2, 3, 4])
assert 'a' not in md
self.assert_equal(md.poplist('missing'), [])
# remaining: b=2, c=3
popped = md.popitem()
assert popped in [('b', 2), ('c', 3)]
popped = md.popitemlist()
assert popped in [('b', [2]), ('c', [3])]
# type conversion
md = self.storage_class({'a': '4', 'b': ['2', '3']})
self.assert_equal(md.get('a', type=int), 4)
self.assert_equal(md.getlist('b', type=int), [2, 3])
# repr
md = self.storage_class([('a', 1), ('a', 2), ('b', 3)])
assert "('a', 1)" in repr(md)
assert "('a', 2)" in repr(md)
assert "('b', 3)" in repr(md)
# add and getlist
md.add('c', '42')
md.add('c', '23')
self.assert_equal(md.getlist('c'), ['42', '23'])
md.add('c', 'blah')
self.assert_equal(md.getlist('c', type=int), [42, 23])
# setdefault
md = self.storage_class()
md.setdefault('x', []).append(42)
md.setdefault('x', []).append(23)
self.assert_equal(md['x'], [42, 23])
# to dict
md = self.storage_class()
md['foo'] = 42
md.add('bar', 1)
md.add('bar', 2)
self.assert_equal(md.to_dict(), {'foo': 42, 'bar': 1})
self.assert_equal(md.to_dict(flat=False), {'foo': [42], 'bar': [1, 2]})
# popitem from empty dict
with self.assert_raises(KeyError):
self.storage_class().popitem()
with self.assert_raises(KeyError):
self.storage_class().popitemlist()
# key errors are of a special type
with self.assert_raises(BadRequestKeyError):
self.storage_class()[42]
# setlist works
md = self.storage_class()
md['foo'] = 42
md.setlist('foo', [1, 2])
self.assert_equal(md.getlist('foo'), [1, 2])
class ImmutableDictBaseTestCase(WerkzeugTestCase):
storage_class = None
def test_follows_dict_interface(self):
cls = self.storage_class
data = {'foo': 1, 'bar': 2, 'baz': 3}
d = cls(data)
self.assert_equal(d['foo'], 1)
self.assert_equal(d['bar'], 2)
self.assert_equal(d['baz'], 3)
self.assert_equal(sorted(d.keys()), ['bar', 'baz', 'foo'])
self.assert_true('foo' in d)
self.assert_true('foox' not in d)
self.assert_equal(len(d), 3)
def test_copies_are_mutable(self):
cls = self.storage_class
immutable = cls({'a': 1})
with self.assert_raises(TypeError):
immutable.pop('a')
mutable = immutable.copy()
mutable.pop('a')
self.assert_true('a' in immutable)
self.assert_true(mutable is not immutable)
self.assert_true(copy(immutable) is immutable)
def test_dict_is_hashable(self):
cls = self.storage_class
immutable = cls({'a': 1, 'b': 2})
immutable2 = cls({'a': 2, 'b': 2})
x = set([immutable])
self.assert_true(immutable in x)
self.assert_true(immutable2 not in x)
x.discard(immutable)
self.assert_true(immutable not in x)
self.assert_true(immutable2 not in x)
x.add(immutable2)
self.assert_true(immutable not in x)
self.assert_true(immutable2 in x)
x.add(immutable)
self.assert_true(immutable in x)
self.assert_true(immutable2 in x)
class ImmutableTypeConversionDictTestCase(ImmutableDictBaseTestCase):
storage_class = datastructures.ImmutableTypeConversionDict
class ImmutableMultiDictTestCase(ImmutableDictBaseTestCase):
storage_class = datastructures.ImmutableMultiDict
def test_multidict_is_hashable(self):
cls = self.storage_class
immutable = cls({'a': [1, 2], 'b': 2})
immutable2 = cls({'a': [1], 'b': 2})
x = set([immutable])
self.assert_true(immutable in x)
self.assert_true(immutable2 not in x)
x.discard(immutable)
self.assert_true(immutable not in x)
self.assert_true(immutable2 not in x)
x.add(immutable2)
self.assert_true(immutable not in x)
self.assert_true(immutable2 in x)
x.add(immutable)
self.assert_true(immutable in x)
self.assert_true(immutable2 in x)
class ImmutableDictTestCase(ImmutableDictBaseTestCase):
storage_class = datastructures.ImmutableDict
class ImmutableOrderedMultiDictTestCase(ImmutableDictBaseTestCase):
storage_class = datastructures.ImmutableOrderedMultiDict
def test_ordered_multidict_is_hashable(self):
a = self.storage_class([('a', 1), ('b', 1), ('a', 2)])
b = self.storage_class([('a', 1), ('a', 2), ('b', 1)])
self.assert_not_equal(hash(a), hash(b))
class MultiDictTestCase(MutableMultiDictBaseTestCase):
storage_class = datastructures.MultiDict
def test_multidict_pop(self):
make_d = lambda: self.storage_class({'foo': [1, 2, 3, 4]})
d = make_d()
self.assert_equal(d.pop('foo'), 1)
assert not d
d = make_d()
self.assert_equal(d.pop('foo', 32), 1)
assert not d
d = make_d()
self.assert_equal(d.pop('foos', 32), 32)
assert d
with self.assert_raises(KeyError):
d.pop('foos')
def test_setlistdefault(self):
md = self.storage_class()
self.assert_equal(md.setlistdefault('u', [-1, -2]), [-1, -2])
self.assert_equal(md.getlist('u'), [-1, -2])
self.assert_equal(md['u'], -1)
def test_iter_interfaces(self):
mapping = [('a', 1), ('b', 2), ('a', 2), ('d', 3),
('a', 1), ('a', 3), ('d', 4), ('c', 3)]
md = self.storage_class(mapping)
self.assert_equal(list(zip(md.keys(), md.listvalues())),
list(md.lists()))
self.assert_equal(list(zip(md, iterlistvalues(md))),
list(iterlists(md)))
self.assert_equal(list(zip(iterkeys(md), iterlistvalues(md))),
list(iterlists(md)))
class OrderedMultiDictTestCase(MutableMultiDictBaseTestCase):
storage_class = datastructures.OrderedMultiDict
def test_ordered_interface(self):
cls = self.storage_class
d = cls()
assert not d
d.add('foo', 'bar')
self.assert_equal(len(d), 1)
d.add('foo', 'baz')
self.assert_equal(len(d), 1)
self.assert_equal(list(iteritems(d)), [('foo', 'bar')])
self.assert_equal(list(d), ['foo'])
self.assert_equal(list(iteritems(d, multi=True)),
[('foo', 'bar'), ('foo', 'baz')])
del d['foo']
assert not d
self.assert_equal(len(d), 0)
self.assert_equal(list(d), [])
d.update([('foo', 1), ('foo', 2), ('bar', 42)])
d.add('foo', 3)
self.assert_equal(d.getlist('foo'), [1, 2, 3])
self.assert_equal(d.getlist('bar'), [42])
self.assert_equal(list(iteritems(d)), [('foo', 1), ('bar', 42)])
expected = ['foo', 'bar']
self.assert_sequence_equal(list(d.keys()), expected)
self.assert_sequence_equal(list(d), expected)
self.assert_sequence_equal(list(iterkeys(d)), expected)
self.assert_equal(list(iteritems(d, multi=True)),
[('foo', 1), ('foo', 2), ('bar', 42), ('foo', 3)])
self.assert_equal(len(d), 2)
self.assert_equal(d.pop('foo'), 1)
assert d.pop('blafasel', None) is None
self.assert_equal(d.pop('blafasel', 42), 42)
self.assert_equal(len(d), 1)
self.assert_equal(d.poplist('bar'), [42])
assert not d
d.get('missingkey') is None
d.add('foo', 42)
d.add('foo', 23)
d.add('bar', 2)
d.add('foo', 42)
self.assert_equal(d, datastructures.MultiDict(d))
id = self.storage_class(d)
self.assert_equal(d, id)
d.add('foo', 2)
assert d != id
d.update({'blah': [1, 2, 3]})
self.assert_equal(d['blah'], 1)
self.assert_equal(d.getlist('blah'), [1, 2, 3])
# setlist works
d = self.storage_class()
d['foo'] = 42
d.setlist('foo', [1, 2])
self.assert_equal(d.getlist('foo'), [1, 2])
with self.assert_raises(BadRequestKeyError):
d.pop('missing')
with self.assert_raises(BadRequestKeyError):
d['missing']
# popping
d = self.storage_class()
d.add('foo', 23)
d.add('foo', 42)
d.add('foo', 1)
self.assert_equal(d.popitem(), ('foo', 23))
with self.assert_raises(BadRequestKeyError):
d.popitem()
assert not d
d.add('foo', 23)
d.add('foo', 42)
d.add('foo', 1)
self.assert_equal(d.popitemlist(), ('foo', [23, 42, 1]))
with self.assert_raises(BadRequestKeyError):
d.popitemlist()
def test_iterables(self):
a = datastructures.MultiDict((("key_a", "value_a"),))
b = datastructures.MultiDict((("key_b", "value_b"),))
ab = datastructures.CombinedMultiDict((a,b))
self.assert_equal(sorted(ab.lists()), [('key_a', ['value_a']), ('key_b', ['value_b'])])
self.assert_equal(sorted(ab.listvalues()), [['value_a'], ['value_b']])
self.assert_equal(sorted(ab.keys()), ["key_a", "key_b"])
self.assert_equal(sorted(iterlists(ab)), [('key_a', ['value_a']), ('key_b', ['value_b'])])
self.assert_equal(sorted(iterlistvalues(ab)), [['value_a'], ['value_b']])
self.assert_equal(sorted(iterkeys(ab)), ["key_a", "key_b"])
class CombinedMultiDictTestCase(WerkzeugTestCase):
storage_class = datastructures.CombinedMultiDict
def test_basic_interface(self):
d1 = datastructures.MultiDict([('foo', '1')])
d2 = datastructures.MultiDict([('bar', '2'), ('bar', '3')])
d = self.storage_class([d1, d2])
# lookup
self.assert_equal(d['foo'], '1')
self.assert_equal(d['bar'], '2')
self.assert_equal(d.getlist('bar'), ['2', '3'])
self.assert_equal(sorted(d.items()),
[('bar', '2'), ('foo', '1')])
self.assert_equal(sorted(d.items(multi=True)),
[('bar', '2'), ('bar', '3'), ('foo', '1')])
assert 'missingkey' not in d
assert 'foo' in d
# type lookup
self.assert_equal(d.get('foo', type=int), 1)
self.assert_equal(d.getlist('bar', type=int), [2, 3])
# get key errors for missing stuff
with self.assert_raises(KeyError):
d['missing']
# make sure that they are immutable
with self.assert_raises(TypeError):
d['foo'] = 'blub'
# copies are immutable
d = d.copy()
with self.assert_raises(TypeError):
d['foo'] = 'blub'
# make sure lists merges
md1 = datastructures.MultiDict((("foo", "bar"),))
md2 = datastructures.MultiDict((("foo", "blafasel"),))
x = self.storage_class((md1, md2))
self.assert_equal(list(iterlists(x)), [('foo', ['bar', 'blafasel'])])
class HeadersTestCase(WerkzeugTestCase):
storage_class = datastructures.Headers
def test_basic_interface(self):
headers = self.storage_class()
headers.add('Content-Type', 'text/plain')
headers.add('X-Foo', 'bar')
assert 'x-Foo' in headers
assert 'Content-type' in headers
headers['Content-Type'] = 'foo/bar'
self.assert_equal(headers['Content-Type'], 'foo/bar')
self.assert_equal(len(headers.getlist('Content-Type')), 1)
# list conversion
self.assert_equal(headers.to_wsgi_list(), [
('Content-Type', 'foo/bar'),
('X-Foo', 'bar')
])
self.assert_equal(str(headers), (
"Content-Type: foo/bar\r\n"
"X-Foo: bar\r\n"
"\r\n"))
self.assert_equal(str(self.storage_class()), "\r\n")
# extended add
headers.add('Content-Disposition', 'attachment', filename='foo')
self.assert_equal(headers['Content-Disposition'],
'attachment; filename=foo')
headers.add('x', 'y', z='"')
self.assert_equal(headers['x'], r'y; z="\""')
def test_defaults_and_conversion(self):
# defaults
headers = self.storage_class([
('Content-Type', 'text/plain'),
('X-Foo', 'bar'),
('X-Bar', '1'),
('X-Bar', '2')
])
self.assert_equal(headers.getlist('x-bar'), ['1', '2'])
self.assert_equal(headers.get('x-Bar'), '1')
self.assert_equal(headers.get('Content-Type'), 'text/plain')
self.assert_equal(headers.setdefault('X-Foo', 'nope'), 'bar')
self.assert_equal(headers.setdefault('X-Bar', 'nope'), '1')
self.assert_equal(headers.setdefault('X-Baz', 'quux'), 'quux')
self.assert_equal(headers.setdefault('X-Baz', 'nope'), 'quux')
headers.pop('X-Baz')
# type conversion
self.assert_equal(headers.get('x-bar', type=int), 1)
self.assert_equal(headers.getlist('x-bar', type=int), [1, 2])
# list like operations
self.assert_equal(headers[0], ('Content-Type', 'text/plain'))
self.assert_equal(headers[:1], self.storage_class([('Content-Type', 'text/plain')]))
del headers[:2]
del headers[-1]
self.assert_equal(headers, self.storage_class([('X-Bar', '1')]))
def test_copying(self):
a = self.storage_class([('foo', 'bar')])
b = a.copy()
a.add('foo', 'baz')
self.assert_equal(a.getlist('foo'), ['bar', 'baz'])
self.assert_equal(b.getlist('foo'), ['bar'])
def test_popping(self):
headers = self.storage_class([('a', 1)])
self.assert_equal(headers.pop('a'), 1)
self.assert_equal(headers.pop('b', 2), 2)
with self.assert_raises(KeyError):
headers.pop('c')
def test_set_arguments(self):
a = self.storage_class()
a.set('Content-Disposition', 'useless')
a.set('Content-Disposition', 'attachment', filename='foo')
self.assert_equal(a['Content-Disposition'], 'attachment; filename=foo')
def test_reject_newlines(self):
h = self.storage_class()
for variation in 'foo\nbar', 'foo\r\nbar', 'foo\rbar':
with self.assert_raises(ValueError):
h['foo'] = variation
with self.assert_raises(ValueError):
h.add('foo', variation)
with self.assert_raises(ValueError):
h.add('foo', 'test', option=variation)
with self.assert_raises(ValueError):
h.set('foo', variation)
with self.assert_raises(ValueError):
h.set('foo', 'test', option=variation)
def test_slicing(self):
# there's nothing wrong with these being native strings
# Headers doesn't care about the data types
h = self.storage_class()
h.set('X-Foo-Poo', 'bleh')
h.set('Content-Type', 'application/whocares')
h.set('X-Forwarded-For', '192.168.0.123')
h[:] = [(k, v) for k, v in h if k.startswith(u'X-')]
self.assert_equal(list(h), [
('X-Foo-Poo', 'bleh'),
('X-Forwarded-For', '192.168.0.123')
])
def test_bytes_operations(self):
h = self.storage_class()
h.set('X-Foo-Poo', 'bleh')
h.set('X-Whoops', b'\xff')
self.assert_equal(h.get('x-foo-poo', as_bytes=True), b'bleh')
self.assert_equal(h.get('x-whoops', as_bytes=True), b'\xff')
def test_to_wsgi_list(self):
h = self.storage_class()
h.set(u'Key', u'Value')
for key, value in h.to_wsgi_list():
if PY2:
self.assert_strict_equal(key, b'Key')
self.assert_strict_equal(value, b'Value')
else:
self.assert_strict_equal(key, u'Key')
self.assert_strict_equal(value, u'Value')
class EnvironHeadersTestCase(WerkzeugTestCase):
storage_class = datastructures.EnvironHeaders
def test_basic_interface(self):
# this happens in multiple WSGI servers because they
# use a vary naive way to convert the headers;
broken_env = {
'HTTP_CONTENT_TYPE': 'text/html',
'CONTENT_TYPE': 'text/html',
'HTTP_CONTENT_LENGTH': '0',
'CONTENT_LENGTH': '0',
'HTTP_ACCEPT': '*',
'wsgi.version': (1, 0)
}
headers = self.storage_class(broken_env)
assert headers
self.assert_equal(len(headers), 3)
self.assert_equal(sorted(headers), [
('Accept', '*'),
('Content-Length', '0'),
('Content-Type', 'text/html')
])
assert not self.storage_class({'wsgi.version': (1, 0)})
self.assert_equal(len(self.storage_class({'wsgi.version': (1, 0)})), 0)
def test_return_type_is_unicode(self):
# environ contains native strings; we return unicode
headers = self.storage_class({
'HTTP_FOO': '\xe2\x9c\x93',
'CONTENT_TYPE': 'text/plain',
})
self.assert_equal(headers['Foo'], u"\xe2\x9c\x93")
assert isinstance(headers['Foo'], text_type)
assert isinstance(headers['Content-Type'], text_type)
iter_output = dict(iter(headers))
self.assert_equal(iter_output['Foo'], u"\xe2\x9c\x93")
assert isinstance(iter_output['Foo'], text_type)
assert isinstance(iter_output['Content-Type'], text_type)
def test_bytes_operations(self):
foo_val = '\xff'
h = self.storage_class({
'HTTP_X_FOO': foo_val
})
self.assert_equal(h.get('x-foo', as_bytes=True), b'\xff')
self.assert_equal(h.get('x-foo'), u'\xff')
class HeaderSetTestCase(WerkzeugTestCase):
storage_class = datastructures.HeaderSet
def test_basic_interface(self):
hs = self.storage_class()
hs.add('foo')
hs.add('bar')
assert 'Bar' in hs
self.assert_equal(hs.find('foo'), 0)
self.assert_equal(hs.find('BAR'), 1)
assert hs.find('baz') < 0
hs.discard('missing')
hs.discard('foo')
assert hs.find('foo') < 0
self.assert_equal(hs.find('bar'), 0)
with self.assert_raises(IndexError):
hs.index('missing')
self.assert_equal(hs.index('bar'), 0)
assert hs
hs.clear()
assert not hs
class ImmutableListTestCase(WerkzeugTestCase):
storage_class = datastructures.ImmutableList
def test_list_hashable(self):
t = (1, 2, 3, 4)
l = self.storage_class(t)
self.assert_equal(hash(t), hash(l))
self.assert_not_equal(t, l)
def make_call_asserter(assert_equal_func, func=None):
"""Utility to assert a certain number of function calls.
>>> assert_calls, func = make_call_asserter(self.assert_equal)
>>> with assert_calls(2):
func()
func()
"""
calls = [0]
@contextmanager
def asserter(count, msg=None):
calls[0] = 0
yield
assert_equal_func(calls[0], count, msg)
def wrapped(*args, **kwargs):
calls[0] += 1
if func is not None:
return func(*args, **kwargs)
return asserter, wrapped
class CallbackDictTestCase(WerkzeugTestCase):
storage_class = datastructures.CallbackDict
def test_callback_dict_reads(self):
assert_calls, func = make_call_asserter(self.assert_equal)
initial = {'a': 'foo', 'b': 'bar'}
dct = self.storage_class(initial=initial, on_update=func)
with assert_calls(0, 'callback triggered by read-only method'):
# read-only methods
dct['a']
dct.get('a')
self.assert_raises(KeyError, lambda: dct['x'])
'a' in dct
list(iter(dct))
dct.copy()
with assert_calls(0, 'callback triggered without modification'):
# methods that may write but don't
dct.pop('z', None)
dct.setdefault('a')
def test_callback_dict_writes(self):
assert_calls, func = make_call_asserter(self.assert_equal)
initial = {'a': 'foo', 'b': 'bar'}
dct = self.storage_class(initial=initial, on_update=func)
with assert_calls(8, 'callback not triggered by write method'):
# always-write methods
dct['z'] = 123
dct['z'] = 123 # must trigger again
del dct['z']
dct.pop('b', None)
dct.setdefault('x')
dct.popitem()
dct.update([])
dct.clear()
with assert_calls(0, 'callback triggered by failed del'):
self.assert_raises(KeyError, lambda: dct.__delitem__('x'))
with assert_calls(0, 'callback triggered by failed pop'):
self.assert_raises(KeyError, lambda: dct.pop('x'))
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(MultiDictTestCase))
suite.addTest(unittest.makeSuite(OrderedMultiDictTestCase))
suite.addTest(unittest.makeSuite(CombinedMultiDictTestCase))
suite.addTest(unittest.makeSuite(ImmutableTypeConversionDictTestCase))
suite.addTest(unittest.makeSuite(ImmutableMultiDictTestCase))
suite.addTest(unittest.makeSuite(ImmutableDictTestCase))
suite.addTest(unittest.makeSuite(ImmutableOrderedMultiDictTestCase))
suite.addTest(unittest.makeSuite(HeadersTestCase))
suite.addTest(unittest.makeSuite(EnvironHeadersTestCase))
suite.addTest(unittest.makeSuite(HeaderSetTestCase))
suite.addTest(unittest.makeSuite(NativeItermethodsTestCase))
suite.addTest(unittest.makeSuite(CallbackDictTestCase))
return suite
| mit |
mgit-at/ansible | lib/ansible/modules/net_tools/nios/nios_dns_view.py | 68 | 4192 | #!/usr/bin/python
# Copyright (c) 2018 Red Hat, Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = '''
---
module: nios_dns_view
version_added: "2.5"
author: "Peter Sprygada (@privateip)"
short_description: Configure Infoblox NIOS DNS views
description:
- Adds and/or removes instances of DNS view objects from
Infoblox NIOS servers. This module manages NIOS C(view) objects
using the Infoblox WAPI interface over REST.
- Updates instances of DNS view object from Infoblox NIOS servers.
requirements:
- infoblox-client
extends_documentation_fragment: nios
options:
name:
description:
- Specifies the fully qualified hostname to add or remove from
the system. User can also update the hostname as it is possible
to pass a dict containing I(new_name), I(old_name). See examples.
required: true
aliases:
- view
network_view:
description:
- Specifies the name of the network view to assign the configured
DNS view to. The network view must already be configured on the
target system.
required: true
default: default
extattrs:
description:
- Allows for the configuration of Extensible Attributes on the
instance of the object. This argument accepts a set of key / value
pairs for configuration.
required: false
comment:
description:
- Configures a text string comment to be associated with the instance
of this object. The provided text string will be configured on the
object instance.
required: false
state:
description:
- Configures the intended state of the instance of the object on
the NIOS server. When this value is set to C(present), the object
is configured on the device and when this value is set to C(absent)
the value is removed (if necessary) from the device.
required: false
default: present
choices:
- present
- absent
'''
EXAMPLES = '''
- name: configure a new dns view instance
nios_dns_view:
name: ansible-dns
state: present
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
- name: update the comment for dns view
nios_dns_view:
name: ansible-dns
comment: this is an example comment
state: present
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
- name: remove the dns view instance
nios_dns_view:
name: ansible-dns
state: absent
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
- name: update the dns view instance
nios_dns_view:
name: {new_name: ansible-dns-new, old_name: ansible-dns}
state: present
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
'''
RETURN = ''' # '''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.net_tools.nios.api import WapiModule
from ansible.module_utils.net_tools.nios.api import NIOS_DNS_VIEW
def main():
''' Main entry point for module execution
'''
ib_spec = dict(
name=dict(required=True, aliases=['view'], ib_req=True),
network_view=dict(default='default', ib_req=True),
extattrs=dict(type='dict'),
comment=dict()
)
argument_spec = dict(
provider=dict(required=True),
state=dict(default='present', choices=['present', 'absent'])
)
argument_spec.update(ib_spec)
argument_spec.update(WapiModule.provider_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
wapi = WapiModule(module)
result = wapi.run(NIOS_DNS_VIEW, ib_spec)
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
tareqalayan/ansible | lib/ansible/modules/network/f5/bigip_monitor_tcp.py | 17 | 17981 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: bigip_monitor_tcp
short_description: Manages F5 BIG-IP LTM tcp monitors
description: Manages F5 BIG-IP LTM tcp monitors via iControl SOAP API.
version_added: 1.4
options:
name:
description:
- Monitor name.
required: True
parent:
description:
- The parent template of this monitor template. Once this value has
been set, it cannot be changed. By default, this value is the C(tcp)
parent on the C(Common) partition.
default: /Common/tcp
send:
description:
- The send string for the monitor call.
receive:
description:
- The receive string for the monitor call.
ip:
description:
- IP address part of the IP/port definition. If this parameter is not
provided when creating a new monitor, then the default value will be
'*'.
- If this value is an IP address, and the C(type) is C(tcp) (the default),
then a C(port) number must be specified.
port:
description:
- Port address part of the IP/port definition. If this parameter is not
provided when creating a new monitor, then the default value will be
'*'. Note that if specifying an IP address, a value between 1 and 65535
must be specified
- This argument is not supported for TCP Echo types.
interval:
description:
- The interval specifying how frequently the monitor instance of this
template will run. If this parameter is not provided when creating
a new monitor, then the default value will be 5. This value B(must)
be less than the C(timeout) value.
timeout:
description:
- The number of seconds in which the node or service must respond to
the monitor request. If the target responds within the set time
period, it is considered up. If the target does not respond within
the set time period, it is considered down. You can change this
number to any number you want, however, it should be 3 times the
interval number of seconds plus 1 second. If this parameter is not
provided when creating a new monitor, then the default value will be 16.
time_until_up:
description:
- Specifies the amount of time in seconds after the first successful
response before a node will be marked up. A value of 0 will cause a
node to be marked up immediately after a valid response is received
from the node. If this parameter is not provided when creating
a new monitor, then the default value will be 0.
partition:
description:
- Device partition to manage resources on.
default: Common
version_added: 2.5
state:
description:
- When C(present), ensures that the monitor exists.
- When C(absent), ensures the monitor is removed.
default: present
choices:
- present
- absent
version_added: 2.5
notes:
- Requires BIG-IP software version >= 12
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = r'''
- name: Create TCP Monitor
bigip_monitor_tcp:
state: present
server: lb.mydomain.com
user: admin
password: secret
name: my_tcp_monitor
type: tcp
send: tcp string to send
receive: tcp string to receive
delegate_to: localhost
- name: Remove TCP Monitor
bigip_monitor_tcp:
state: absent
server: lb.mydomain.com
user: admin
password: secret
name: my_tcp_monitor
delegate_to: localhost
'''
RETURN = r'''
parent:
description: New parent template of the monitor.
returned: changed
type: string
sample: tcp
send:
description: The new send string for this monitor.
returned: changed
type: string
sample: tcp string to send
receive:
description: The new receive string for this monitor.
returned: changed
type: string
sample: tcp string to receive
ip:
description: The new IP of IP/port definition.
returned: changed
type: string
sample: 10.12.13.14
port:
description: The new port of IP/port definition.
returned: changed
type: string
sample: [email protected]
interval:
description: The new interval in which to run the monitor check.
returned: changed
type: int
sample: 2
timeout:
description: The new timeout in which the remote system must respond to the monitor.
returned: changed
type: int
sample: 10
time_until_up:
description: The new time in which to mark a system as up after first successful response.
returned: changed
type: int
sample: 2
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
try:
from library.module_utils.network.f5.bigip import HAS_F5SDK
from library.module_utils.network.f5.bigip import F5Client
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import f5_argument_spec
try:
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
except ImportError:
from ansible.module_utils.network.f5.bigip import HAS_F5SDK
from ansible.module_utils.network.f5.bigip import F5Client
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import f5_argument_spec
try:
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
try:
import netaddr
HAS_NETADDR = True
except ImportError:
HAS_NETADDR = False
class Parameters(AnsibleF5Parameters):
api_map = {
'timeUntilUp': 'time_until_up',
'defaultsFrom': 'parent',
'recv': 'receive'
}
api_attributes = [
'timeUntilUp', 'defaultsFrom', 'interval', 'timeout', 'recv', 'send',
'destination'
]
returnables = [
'parent', 'send', 'receive', 'ip', 'port', 'interval', 'timeout',
'time_until_up'
]
updatables = [
'destination', 'send', 'receive', 'interval', 'timeout', 'time_until_up'
]
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
return result
except Exception:
return result
def api_params(self):
result = {}
for api_attribute in self.api_attributes:
if self.api_map is not None and api_attribute in self.api_map:
result[api_attribute] = getattr(self, self.api_map[api_attribute])
else:
result[api_attribute] = getattr(self, api_attribute)
result = self._filter_params(result)
return result
@property
def interval(self):
if self._values['interval'] is None:
return None
if 1 > int(self._values['interval']) > 86400:
raise F5ModuleError(
"Interval value must be between 1 and 86400"
)
return int(self._values['interval'])
@property
def timeout(self):
if self._values['timeout'] is None:
return None
return int(self._values['timeout'])
@property
def ip(self):
if self._values['ip'] is None:
return None
try:
if self._values['ip'] in ['*', '0.0.0.0']:
return '*'
result = str(netaddr.IPAddress(self._values['ip']))
return result
except netaddr.core.AddrFormatError:
raise F5ModuleError(
"The provided 'ip' parameter is not an IP address."
)
@property
def time_until_up(self):
if self._values['time_until_up'] is None:
return None
return int(self._values['time_until_up'])
@property
def parent(self):
if self._values['parent'] is None:
return None
result = fq_name(self.partition, self._values['parent'])
return result
@property
def port(self):
if self._values['port'] is None:
return None
elif self._values['port'] == '*':
return '*'
return int(self._values['port'])
@property
def destination(self):
if self.ip is None and self.port is None:
return None
destination = '{0}:{1}'.format(self.ip, self.port)
return destination
@destination.setter
def destination(self, value):
ip, port = value.split(':')
self._values['ip'] = ip
self._values['port'] = port
@property
def type(self):
return 'tcp'
class Changes(Parameters):
pass
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
result = self.__default(param)
return result
@property
def parent(self):
if self.want.parent != self.have.parent:
raise F5ModuleError(
"The parent monitor cannot be changed"
)
@property
def destination(self):
if self.want.ip is None and self.want.port is None:
return None
if self.want.port is None:
self.want.update({'port': self.have.port})
if self.want.ip is None:
self.want.update({'ip': self.have.ip})
if self.want.port in [None, '*'] and self.want.ip != '*':
raise F5ModuleError(
"Specifying an IP address requires that a port number be specified"
)
if self.want.destination != self.have.destination:
return self.want.destination
@property
def interval(self):
if self.want.timeout is not None and self.want.interval is not None:
if self.want.interval >= self.want.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
elif self.want.timeout is not None:
if self.have.interval >= self.want.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
elif self.want.interval is not None:
if self.want.interval >= self.have.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
if self.want.interval != self.have.interval:
return self.want.interval
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.have = None
self.want = Parameters(params=self.module.params)
self.changes = Changes()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = Changes(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
changed[k] = change
if changed:
self.changes = Changes(params=changed)
return True
return False
def _announce_deprecations(self):
warnings = []
if self.want:
warnings += self.want._values.get('__warnings', [])
if self.have:
warnings += self.have._values.get('__warnings', [])
for warning in warnings:
self.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def exec_module(self):
changed = False
result = dict()
state = self.want.state
try:
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
except iControlUnexpectedHTTPError as e:
raise F5ModuleError(str(e))
changes = self.changes.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations()
return result
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def create(self):
self._set_changed_options()
self._set_default_creation_values()
if self.module.check_mode:
return True
self.create_on_device()
return True
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def absent(self):
if self.exists():
return self.remove()
return False
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the monitor.")
return True
def _set_default_creation_values(self):
if self.want.timeout is None:
self.want.update({'timeout': 16})
if self.want.interval is None:
self.want.update({'interval': 5})
if self.want.time_until_up is None:
self.want.update({'time_until_up': 0})
if self.want.ip is None:
self.want.update({'ip': '*'})
if self.want.port is None:
self.want.update({'port': '*'})
def read_current_from_device(self):
resource = self.client.api.tm.ltm.monitor.tcps.tcp.load(
name=self.want.name,
partition=self.want.partition
)
result = resource.attrs
return Parameters(params=result)
def exists(self):
result = self.client.api.tm.ltm.monitor.tcps.tcp.exists(
name=self.want.name,
partition=self.want.partition
)
return result
def update_on_device(self):
params = self.changes.api_params()
result = self.client.api.tm.ltm.monitor.tcps.tcp.load(
name=self.want.name,
partition=self.want.partition
)
result.modify(**params)
def create_on_device(self):
params = self.want.api_params()
self.client.api.tm.ltm.monitor.tcps.tcp.create(
name=self.want.name,
partition=self.want.partition,
**params
)
def remove_from_device(self):
result = self.client.api.tm.ltm.monitor.tcps.tcp.load(
name=self.want.name,
partition=self.want.partition
)
if result:
result.delete()
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(required=True),
parent=dict(default='/Common/tcp'),
send=dict(),
receive=dict(),
ip=dict(),
port=dict(type='int'),
interval=dict(type='int'),
timeout=dict(type='int'),
time_until_up=dict(type='int'),
state=dict(
default='present',
choices=['present', 'absent']
),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode
)
if not HAS_F5SDK:
module.fail_json(msg="The python f5-sdk module is required")
if not HAS_NETADDR:
module.fail_json(msg="The python netaddr module is required")
try:
client = F5Client(**module.params)
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
module.exit_json(**results)
except F5ModuleError as ex:
cleanup_tokens(client)
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
| gpl-3.0 |
erikdejonge/youtube-dl | youtube_dl/extractor/ccma.py | 19 | 3704 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
clean_html,
int_or_none,
parse_duration,
parse_iso8601,
parse_resolution,
url_or_none,
)
class CCMAIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?ccma\.cat/(?:[^/]+/)*?(?P<type>video|audio)/(?P<id>\d+)'
_TESTS = [{
'url': 'http://www.ccma.cat/tv3/alacarta/lespot-de-la-marato-de-tv3/lespot-de-la-marato-de-tv3/video/5630208/',
'md5': '7296ca43977c8ea4469e719c609b0871',
'info_dict': {
'id': '5630208',
'ext': 'mp4',
'title': 'L\'espot de La Marató de TV3',
'description': 'md5:f12987f320e2f6e988e9908e4fe97765',
'timestamp': 1470918540,
'upload_date': '20160811',
}
}, {
'url': 'http://www.ccma.cat/catradio/alacarta/programa/el-consell-de-savis-analitza-el-derbi/audio/943685/',
'md5': 'fa3e38f269329a278271276330261425',
'info_dict': {
'id': '943685',
'ext': 'mp3',
'title': 'El Consell de Savis analitza el derbi',
'description': 'md5:e2a3648145f3241cb9c6b4b624033e53',
'upload_date': '20171205',
'timestamp': 1512507300,
}
}]
def _real_extract(self, url):
media_type, media_id = re.match(self._VALID_URL, url).groups()
media = self._download_json(
'http://dinamics.ccma.cat/pvideo/media.jsp', media_id, query={
'media': media_type,
'idint': media_id,
})
formats = []
media_url = media['media']['url']
if isinstance(media_url, list):
for format_ in media_url:
format_url = url_or_none(format_.get('file'))
if not format_url:
continue
label = format_.get('label')
f = parse_resolution(label)
f.update({
'url': format_url,
'format_id': label,
})
formats.append(f)
else:
formats.append({
'url': media_url,
'vcodec': 'none' if media_type == 'audio' else None,
})
self._sort_formats(formats)
informacio = media['informacio']
title = informacio['titol']
durada = informacio.get('durada', {})
duration = int_or_none(durada.get('milisegons'), 1000) or parse_duration(durada.get('text'))
timestamp = parse_iso8601(informacio.get('data_emissio', {}).get('utc'))
subtitles = {}
subtitols = media.get('subtitols', {})
if subtitols:
sub_url = subtitols.get('url')
if sub_url:
subtitles.setdefault(
subtitols.get('iso') or subtitols.get('text') or 'ca', []).append({
'url': sub_url,
})
thumbnails = []
imatges = media.get('imatges', {})
if imatges:
thumbnail_url = imatges.get('url')
if thumbnail_url:
thumbnails = [{
'url': thumbnail_url,
'width': int_or_none(imatges.get('amplada')),
'height': int_or_none(imatges.get('alcada')),
}]
return {
'id': media_id,
'title': title,
'description': clean_html(informacio.get('descripcio')),
'duration': duration,
'timestamp': timestamp,
'thumbnails': thumbnails,
'subtitles': subtitles,
'formats': formats,
}
| unlicense |
openstack/heat | heat/tests/convergence/scenarios/update_replace.py | 2 | 2098 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
c_uuid = None
def store_c_uuid():
global c_uuid
c_uuid = next(iter(reality.resources_by_logical_name('C'))).uuid
def check_c_replaced():
test.assertNotEqual(c_uuid,
next(iter(reality.resources_by_logical_name('C'))).uuid)
test.assertIsNotNone(c_uuid)
example_template = Template({
'A': RsrcDef({'a': 'initial'}, []),
'B': RsrcDef({}, []),
'C': RsrcDef({'!a': GetAtt('A', 'a')}, ['B']),
'D': RsrcDef({'c': GetRes('C')}, []),
'E': RsrcDef({'ca': GetAtt('C', '!a')}, []),
})
engine.create_stack('foo', example_template)
engine.noop(5)
engine.call(verify, example_template)
engine.call(store_c_uuid)
example_template_updated = Template({
'A': RsrcDef({'a': 'updated'}, []),
'B': RsrcDef({}, []),
'C': RsrcDef({'!a': GetAtt('A', 'a')}, ['B']),
'D': RsrcDef({'c': GetRes('C')}, []),
'E': RsrcDef({'ca': GetAtt('C', '!a')}, []),
})
engine.update_stack('foo', example_template_updated)
engine.noop(11)
engine.call(verify, example_template_updated)
example_template_long = Template({
'A': RsrcDef({'a': 'updated'}, []),
'B': RsrcDef({}, []),
'C': RsrcDef({'!a': GetAtt('A', 'a')}, ['B']),
'D': RsrcDef({'c': GetRes('C')}, []),
'E': RsrcDef({'ca': GetAtt('C', '!a')}, []),
'F': RsrcDef({}, ['D', 'E']),
})
engine.update_stack('foo', example_template_long)
engine.noop(12)
engine.call(verify, example_template_long)
engine.call(check_c_replaced)
engine.delete_stack('foo')
engine.noop(6)
engine.call(verify, Template({}))
| apache-2.0 |
tal-nino/shinken | shinken/objects/brokerlink.py | 13 | 1701 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2014:
# Gabes Jean, [email protected]
# Gerhard Lausser, [email protected]
# Gregory Starck, [email protected]
# Hartmut Goebel, [email protected]
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
from shinken.objects.satellitelink import SatelliteLink, SatelliteLinks
from shinken.property import IntegerProp, StringProp
class BrokerLink(SatelliteLink):
"""TODO: Add some comment about this class for the doc"""
id = 0
my_type = 'broker'
properties = SatelliteLink.properties.copy()
properties.update({
'broker_name': StringProp(fill_brok=['full_status'], to_send=True),
'port': IntegerProp(default=7772, fill_brok=['full_status']),
})
def get_name(self):
return self.broker_name
def register_to_my_realm(self):
self.realm.brokers.append(self)
class BrokerLinks(SatelliteLinks):
"""TODO: Add some comment about this class for the doc"""
name_property = "broker_name"
inner_class = BrokerLink
| agpl-3.0 |
fangxingli/hue | desktop/core/ext-py/Django-1.6.10/django/views/decorators/cache.py | 129 | 2286 | from functools import wraps
from django.utils.decorators import decorator_from_middleware_with_args, available_attrs
from django.utils.cache import patch_cache_control, add_never_cache_headers
from django.middleware.cache import CacheMiddleware
def cache_page(*args, **kwargs):
"""
Decorator for views that tries getting the page from the cache and
populates the cache if the page isn't in the cache yet.
The cache is keyed by the URL and some data from the headers.
Additionally there is the key prefix that is used to distinguish different
cache areas in a multi-site setup. You could use the
sites.get_current_site().domain, for example, as that is unique across a Django
project.
Additionally, all headers from the response's Vary header will be taken
into account on caching -- just like the middleware does.
"""
# We also add some asserts to give better error messages in case people are
# using other ways to call cache_page that no longer work.
if len(args) != 1 or callable(args[0]):
raise TypeError("cache_page has a single mandatory positional argument: timeout")
cache_timeout = args[0]
cache_alias = kwargs.pop('cache', None)
key_prefix = kwargs.pop('key_prefix', None)
if kwargs:
raise TypeError("cache_page has two optional keyword arguments: cache and key_prefix")
return decorator_from_middleware_with_args(CacheMiddleware)(cache_timeout=cache_timeout, cache_alias=cache_alias, key_prefix=key_prefix)
def cache_control(**kwargs):
def _cache_controller(viewfunc):
@wraps(viewfunc, assigned=available_attrs(viewfunc))
def _cache_controlled(request, *args, **kw):
response = viewfunc(request, *args, **kw)
patch_cache_control(response, **kwargs)
return response
return _cache_controlled
return _cache_controller
def never_cache(view_func):
"""
Decorator that adds headers to a response so that it will
never be cached.
"""
@wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view_func(request, *args, **kwargs):
response = view_func(request, *args, **kwargs)
add_never_cache_headers(response)
return response
return _wrapped_view_func
| apache-2.0 |
commial/miasm | miasm/arch/mep/arch.py | 3 | 66072 | # Toshiba MeP-c4 - miasm architecture definition
# Guillaume Valadon <[email protected]>
from builtins import range
from miasm.core.cpu import *
from miasm.core.utils import Disasm_Exception
from miasm.expression.expression import ExprId, ExprInt, ExprLoc, \
ExprMem, ExprOp, is_expr
from miasm.core.asm_ast import AstId, AstMem
from miasm.arch.mep.regs import *
import miasm.arch.mep.regs as mep_regs_module # will be used to set mn_mep.regs
# Note: pyparsing is used to alter the way special operands are parsed
from pyparsing import Literal, Group, Word, hexnums
# These definitions will help parsing dereferencing instructions (i.e. that uses
# parenthesis) with pyparsing
LPARENTHESIS = Literal("(")
RPARENTHESIS = Literal(")")
PLUSSIGN = Literal("+")
HEX_INTEGER = str_int_pos | str_int_neg
def ExprInt2SignedString(expr, pos_fmt="%d", neg_fmt="%d", size=None, offset=0):
"""Return the signed string corresponding to an ExprInt
Note: this function is only useful to mimic objdump output"""
# Apply a mask to the integer
if size is None:
mask_length = expr.size
else:
mask_length = size
mask = (1 << mask_length) - 1
value = int(expr) & mask
# Return a signed integer if necessary
if (value >> mask_length - 1) == 1:
value = offset - ((value ^ mask) + 1)
if value < 0:
return "-" + neg_fmt % -value
else:
value += offset
return pos_fmt % value
class instruction_mep(instruction):
"""Generic MeP-c4 instruction
Notes:
- this object is used to build internal miasm instructions based
on mnemonics
- it must be implemented !
"""
# Default delay slot
# Note:
# - mandatory for the miasm Machine
delayslot = 0
@staticmethod
def arg2str(expr, pos=None, loc_db=None):
"""Convert mnemonics arguments into readable strings according to the
MeP-c4 architecture manual and their internal types
Notes:
- it must be implemented ! However, a simple 'return str(expr)'
could do the trick.
- it is used to mimic objdump output
Args:
expr: argument as a miasm expression
pos: position index in the arguments list
"""
if isinstance(expr, ExprId) or isinstance(expr, ExprInt):
return str(expr)
elif isinstance(expr, ExprLoc):
if loc_db is not None:
return loc_db.pretty_str(expr.loc_key)
else:
return str(expr)
elif isinstance(expr, ExprMem) and (isinstance(expr.ptr, ExprId) or isinstance(expr.ptr, ExprInt)):
return "(%s)" % expr.ptr
elif isinstance(expr, ExprMem) and isinstance(expr.ptr, ExprOp):
return "0x%X(%s)" % (int(expr.ptr.args[1]), expr.ptr.args[0])
# Raise an exception if the expression type was not processed
message = "instruction_mep.arg2str(): don't know what \
to do with a '%s' instance." % type(expr)
raise Disasm_Exception(message)
def __str__(self):
"""Return the mnemonic as a string.
Note:
- it is not mandatory as the instruction class already implement
it. It used to get rid of the padding between the opcode and the
arguments.
- most of this code is copied from miasm/core/cpu.py
"""
o = "%s" % self.name
if self.name == "SSARB":
# The first operand is displayed in decimal, not in hex
o += " %d" % int(self.args[0])
o += self.arg2str(self.args[1])
elif self.name in ["MOV", "ADD"] and isinstance(self.args[1], ExprInt):
# The second operand is displayed in decimal, not in hex
o += " " + self.arg2str(self.args[0])
o += ", %s" % ExprInt2SignedString(self.args[1])
elif "CPI" in self.name:
# The second operand ends with the '+' sign
o += " " + self.arg2str(self.args[0])
deref_reg_str = self.arg2str(self.args[1])
o += ", %s+)" % deref_reg_str[:-1] # GV: looks ugly
elif self.name[0] in ["S", "L"] and self.name[-3:] in ["CPA", "PM0", "PM1"]:
# The second operand ends with the '+' sign
o += " " + self.arg2str(self.args[0])
deref_reg_str = self.arg2str(self.args[1])
o += ", %s+)" % deref_reg_str[:-1] # GV: looks ugly
# The third operand is displayed in decimal, not in hex
o += ", %s" % ExprInt2SignedString(self.args[2])
elif len(self.args) == 2 and self.name in ["SB", "SH", "LBU", "LB", "LH", "LW"] and \
isinstance(self.args[1], ExprMem) and isinstance(self.args[1].ptr, ExprOp): # Major Opcodes #12
# The second operand is an offset to a register
o += " " + self.arg2str(self.args[0])
o += ", %s" % ExprInt2SignedString(self.args[1].ptr.args[1], "0x%X")
o += "(%s)" % self.arg2str(self.args[1].ptr.args[0])
elif len(self.args) == 2 and self.name in ["SWCP", "LWCP", "SMCP", "LMCP"] \
and isinstance(self.args[1], ExprMem) and isinstance(self.args[1].ptr, ExprOp): # Major Opcodes #12
# The second operand is an offset to a register
o += " " + self.arg2str(self.args[0])
o += ", %s" % ExprInt2SignedString(self.args[1].ptr.args[1])
o += "(%s)" % self.arg2str(self.args[1].ptr.args[0])
elif self.name == "SLL" and isinstance(self.args[1], ExprInt): # Major Opcodes #6
# The second operand is displayed in hex, not in decimal
o += " " + self.arg2str(self.args[0])
o += ", 0x%X" % int(self.args[1])
elif self.name in ["ADD3", "SLT3"] and isinstance(self.args[2], ExprInt):
o += " %s" % self.arg2str(self.args[0])
o += ", %s" % self.arg2str(self.args[1])
# The third operand is displayed in decimal, not in hex
o += ", %s" % ExprInt2SignedString(self.args[2], pos_fmt="0x%X")
elif self.name == "(RI)":
return o
else:
args = []
if self.args:
o += " "
for i, arg in enumerate(self.args):
if not is_expr(arg):
raise ValueError('zarb arg type')
x = self.arg2str(arg, pos=i)
args.append(x)
o += self.gen_args(args)
return o
def breakflow(self):
"""Instructions that stop a basic block."""
if self.name in ["BRA", "BEQZ", "BNEZ", "BEQI", "BNEI", "BLTI", "BGEI", "BEQ", "BNE", "BSR"]:
return True
if self.name in ["JMP", "JSR", "RET"]:
return True
if self.name in ["RETI", "HALT", "SLEEP"]:
return True
return False
def splitflow(self):
"""Instructions that splits a basic block, i.e. the CPU can go somewhere else."""
if self.name in ["BEQZ", "BNEZ", "BEQI", "BNEI", "BLTI", "BGEI", "BEQ", "BNE", "BSR"]:
return True
return False
def dstflow(self):
"""Instructions that explicitly provide the destination."""
if self.name in ["BRA", "BEQZ", "BNEZ", "BEQI", "BNEI", "BLTI", "BGEI", "BEQ", "BNE", "BSR"]:
return True
if self.name in ["JMP"]:
return True
return False
def dstflow2label(self, loc_db):
"""Set the label for the current destination.
Note: it is used at disassembly"""
if self.name == "JMP" and isinstance(self.args[0], ExprId):
# 'JMP RM' does not provide the destination
return
# Compute the correct address
num = self.get_dst_num()
addr = int(self.args[num])
if not self.name == "JMP":
addr += self.offset
# Get a new label at the address
label = loc_db.get_or_create_offset_location(addr)
# Assign the label to the correct instruction argument
self.args[num] = ExprLoc(label, self.args[num].size)
def get_dst_num(self):
"""Get the index of the argument that points to the instruction destination."""
if self.name[-1] == "Z":
num = 1
elif self.name in ["BEQI", "BNEI", "BLTI", "BGEI", "BEQ", "BNE"]:
num = 2
else:
num = 0
return num
def getdstflow(self, loc_db):
"""Get the argument that points to the instruction destination."""
num = self.get_dst_num()
return [self.args[num]]
def is_subcall(self):
"""Instructions used to call sub functions."""
return self.name in ["JSR", "BSR"]
def fixDstOffset(self):
"""Fix/correct the instruction immediate according to the current offset
Note: - it is used at assembly
- code inspired by miasm/arch/mips32/arch.py"""
if self.name == "JMP" and isinstance(self.args[0], ExprInt):
# 'JMP IMMEDIATE' does not need to be fixed
return
# Get the argument that needs to be fixed
if not len(self.args):
return
num = self.get_dst_num()
expr = self.args[num]
# Check that the argument can be fixed
if self.offset is None:
raise ValueError("Symbol not resolved %s" % self.l)
if not isinstance(expr, ExprInt):
return
# Adjust the immediate according to the current instruction offset
off = expr.arg - self.offset
if int(off % 2):
raise ValueError("Strange offset! %r" % off)
self.args[num] = ExprInt(off, 32)
class mep_additional_info(object):
"""Additional MeP instructions information
"""
def __init__(self):
self.except_on_instr = False
class mn_mep(cls_mn):
"""Toshiba MeP-c4 disassembler & assembler
"""
# Define variables that stores information used to disassemble & assemble
# Notes: - these variables are mandatory
# - they could be moved to the cls_mn class
num = 0 # holds the number of mnemonics
all_mn = list() # list of mnenomnics, converted to metamn objects
all_mn_mode = defaultdict(list) # mneomnics, converted to metamn objects
# Note:
# - the key is the mode # GV: what is it ?
# - the data is a list of mnemonics
all_mn_name = defaultdict(list) # mnenomnics strings
# Note:
# - the key is the mnemonic string
# - the data is the corresponding
# metamn object
all_mn_inst = defaultdict(list) # mnemonics objects
# Note:
# - the key is the mnemonic Python class
# - the data is an instantiated object
bintree = dict() # Variable storing internal values used to guess a
# mnemonic during disassembly
# Defines the instruction set that will be used
instruction = instruction_mep
# Python module that stores registers information
regs = mep_regs_module
# Default delay slot
# Note:
# - mandatory for the miasm Machine
delayslot = 0
# Architecture name
name = "mep"
# PC name depending on architecture attributes (here, l or b)
pc = {'l': PC, 'b': PC}
def additional_info(self):
"""Define instruction side effects # GV: not fully understood yet
When used, it must return an object that implements specific
variables, such as except_on_instr.
Notes:
- it must be implemented !
- it could be moved to the cls_mn class
"""
return mep_additional_info()
@classmethod
def gen_modes(cls, subcls, name, bases, dct, fields):
"""Ease populating internal variables used to disassemble & assemble, such
as self.all_mn_mode, self.all_mn_name and self.all_mn_inst
Notes:
- it must be implemented !
- it could be moved to the cls_mn class. All miasm architectures
use the same code
Args:
cls: ?
sublcs:
name: mnemonic name
bases: ?
dct: ?
fields: ?
Returns:
a list of ?
"""
dct["mode"] = None
return [(subcls, name, bases, dct, fields)]
@classmethod
def getmn(cls, name):
"""Get the mnemonic name
Notes:
- it must be implemented !
- it could be moved to the cls_mn class. Most miasm architectures
use the same code
Args:
cls: the mnemonic class
name: the mnemonic string
"""
return name.upper()
@classmethod
def getpc(cls, attrib=None):
""""Return the ExprId that represents the Program Counter.
Notes:
- mandatory for the symbolic execution
- PC is defined in regs.py
Args:
attrib: architecture dependent attributes (here, l or b)
"""
return PC
@classmethod
def getsp(cls, attrib=None):
""""Return the ExprId that represents the Stack Pointer.
Notes:
- mandatory for the symbolic execution
- SP is defined in regs.py
Args:
attrib: architecture dependent attributes (here, l or b)
"""
return SP
@classmethod
def getbits(cls, bitstream, attrib, start, n):
"""Return an integer of n bits at the 'start' offset
Note: code from miasm/arch/mips32/arch.py
"""
# Return zero if zero bits are requested
if not n:
return 0
o = 0 # the returned value
while n:
# Get a byte, the offset is adjusted according to the endianness
offset = start // 8 # the offset in bytes
n_offset = cls.endian_offset(attrib, offset) # the adjusted offset
c = cls.getbytes(bitstream, n_offset, 1)
if not c:
raise IOError
# Extract the bits value
c = ord(c)
r = 8 - start % 8
c &= (1 << r) - 1
l = min(r, n)
c >>= (r - l)
o <<= l
o |= c
n -= l
start += l
return o
@classmethod
def endian_offset(cls, attrib, offset):
"""Adjust the byte offset according to the endianness"""
if attrib == "l": # Little Endian
if offset % 2:
return offset - 1
else:
return offset + 1
elif attrib == "b": # Big Endian
return offset
else:
raise NotImplementedError("Bad MeP endianness")
def value(self, mode):
"""Adjust the assembled instruction based on the endianness
Note: code inspired by miasm/arch/mips32/arch.py
"""
# Get the candidated
candidates = super(mn_mep, self).value(mode)
if mode == "l":
# Invert bytes per 16-bits
for i in range(len(candidates)):
tmp = candidates[i][1] + candidates[i][0]
if len(candidates[i]) == 4:
tmp += candidates[i][3] + candidates[i][2]
candidates[i] = tmp
return candidates
elif mode == "b":
return candidates
else:
raise NotImplementedError("Bad MeP endianness (%s)" % mode)
def addop(name, fields, args=None, alias=False):
"""Dynamically create the "name" object
Notes:
- it could be moved to a generic function such as:
addop(name, fields, cls_mn, args=None, alias=False).
- most architectures use the same code
Args:
name: the mnemonic name
fields: used to fill the object.__dict__'fields' attribute # GV: not understood yet
args: used to fill the object.__dict__'fields' attribute # GV: not understood yet
alias: used to fill the object.__dict__'fields' attribute # GV: not understood yet
"""
namespace = {"fields": fields, "alias": alias}
if args is not None:
namespace["args"] = args
# Dynamically create the "name" object
type(name, (mn_mep,), namespace)
# Define specific operand parsers & converters
def deref2expr(s, l, parse_results):
"""Convert a parsed dereferenced register to an ExprMem"""
# Only use the first results
parse_results = parse_results[0]
if type(parse_results[0]) == AstInt and isinstance(parse_results[2], AstId):
return AstMem(parse_results[2] + parse_results[0], 32) # 1 == "(" and 3 == ")"
elif type(parse_results[0]) == int and isinstance(parse_results[2], AstId):
return AstMem(parse_results[2] + AstOp('-', AstInt(-parse_results[0])), 32) # 1 == "(" and 3 == ")"
else:
return AstMem(parse_results[1], 32) # 0 == "(" and 2 == ")"
deref_reg_parser = Group(LPARENTHESIS + gpr_infos.parser + RPARENTHESIS).setParseAction(deref2expr)
deref_inc_reg_parser = Group(LPARENTHESIS + gpr_infos.parser + PLUSSIGN + RPARENTHESIS).setParseAction(deref2expr)
abs24_deref_parser = Group(LPARENTHESIS + HEX_INTEGER + RPARENTHESIS).setParseAction(deref2expr)
offset_deref_reg_parser = Group(HEX_INTEGER + LPARENTHESIS + gpr_infos.parser + RPARENTHESIS).setParseAction(deref2expr)
# Define registers decoders and encoders
class mep_arg(m_arg):
def asm_ast_to_expr(self, arg, loc_db):
"""Convert AST to expressions
Note: - code inspired by miasm/arch/mips32/arch.py"""
if isinstance(arg, AstId):
if isinstance(arg.name, ExprId):
return arg.name
if isinstance(arg.name, str) and arg.name in gpr_names:
return None # GV: why?
loc_key = loc_db.get_or_create_name_location(arg.name.encode())
return ExprLoc(loc_key, 32)
elif isinstance(arg, AstMem):
addr = self.asm_ast_to_expr(arg.ptr, loc_db)
if addr is None:
return None
return ExprMem(addr, 32)
elif isinstance(arg, AstInt):
return ExprInt(arg.value, 32)
elif isinstance(arg, AstOp):
args = [self.asm_ast_to_expr(tmp, loc_db) for tmp in arg.args]
if None in args:
return None
return ExprOp(arg.op, *args)
# Raise an exception if the argument was not processed
message = "mep_arg.asm_ast_to_expr(): don't know what \
to do with a '%s' instance." % type(arg)
raise Exception(message)
class mep_reg(reg_noarg, mep_arg):
"""Generic Toshiba MeP-c4 register
Note:
- the register size will be set using bs()
"""
reg_info = gpr_infos # the list of MeP-c4 registers defined in regs.py
parser = reg_info.parser # GV: not understood yet
class mep_deref_reg(mep_arg):
"""Generic Toshiba MeP-c4 dereferenced register
Note:
- the arg2str() method could be defined to change the output string
"""
parser = deref_reg_parser
def decode(self, v):
"""Transform the decoded value to a ExprMem(ExprId()) expression"""
r = gpr_infos.expr[v] # get the ExprId, i.e. the register expression
self.expr = ExprMem(r, 32)
return True
def encode(self):
"""Ensure that we have a ExprMem(ExprId()) expression, and return the
register value."""
if not isinstance(self.expr, ExprMem):
return False
if not isinstance(self.expr.ptr, ExprId):
return False
# Get the ExprId index, i.e. its value
self.value = gpr_exprs.index(self.expr.ptr)
return True
class mep_reg_sp(mep_reg):
"""Dummy Toshiba MeP-c4 register that represents SP. It is used in
instructions that implicitly use SP, such as ADD3.
"""
implicit_reg = SP
def decode(self, v):
"""Always return 'implicit_reg."""
self.expr = self.implicit_reg
return True
def encode(self):
"""Do nothing"""
return True
class mep_reg_tp(mep_reg_sp):
"""Dummy Toshiba MeP-c4 register that represents TP.
"""
implicit_reg = TP
class mep_deref_reg_offset(mep_arg):
"""Toshiba MeP-c4 dereferenced register that represents SP, plus an
offset.
"""
parser = offset_deref_reg_parser
def decode(self, v):
"""Modify the decoded value using the previously decoded
register id.
"""
# Apply the immediate mask
se = sign_ext(v & 0xFFFF, 16, 32) # GV: might not belong here
int_id = ExprInt(se, 32)
# Get the register expression
reg_id = gpr_infos.expr[self.parent.reg04_deref.value]
# Build the internal expression
self.expr = ExprMem(reg_id + int_id, 32)
return True
def encode(self):
"""Modify the encoded value. One part is stored in this object, and
the other one in reg04_deref.
"""
# Verify the expression
if not isinstance(self.expr, ExprMem):
return False
if not isinstance(self.expr.ptr, ExprOp):
return False
# Get the integer and check the upper bound
v = int(self.expr.ptr.args[1]) & 0xFFFF
# Encode the values
self.parent.reg04_deref.value = gpr_exprs.index(self.expr.ptr.args[0])
self.value = v & 0xFFFF
return True
class mep_deref_sp_offset(mep_deref_reg):
"""Dummy Toshiba MeP-c4 dereferenced register that represents SP, plus an
offset.
Note: it is as generic as possible to ease its use in different instructions
"""
implicit_reg = SP
parser = offset_deref_reg_parser
def decode(self, v):
"""Modify the decoded value using the previously decoded
immediate.
"""
immediate = None
if getattr(self.parent, "imm7_align4", False):
# Apply the immediate mask
v = self.parent.imm7_align4.value & 0x1F
# Shift value such as:
# imm7=iii_ii||00
immediate = v << 2
elif getattr(self.parent, "imm7", False):
# Apply the immediate mask
immediate = self.parent.imm7.value & 0x7F
elif getattr(self.parent, "disp7_align2", False):
# Apply the immediate mask
disp7_align2 = self.parent.disp7_align2.value & 0x3F
# Shift value such as:
# disp7 = ddd_ddd||0
immediate = disp7_align2 << 1
if immediate is not None:
self.expr = ExprMem(self.implicit_reg + ExprInt(immediate, 32), 32)
return True
else:
return False
def encode(self):
"""Modify the encoded value. One part is stored in this object, and
the other one in a parent immediate.
"""
# Verify the expression
if not isinstance(self.expr, ExprMem):
return False
if not isinstance(self.expr.ptr, ExprOp):
return False
if self.expr.ptr.args[0] != self.implicit_reg:
return False
if getattr(self.parent, "imm7_align4", False):
# Get the integer and check the upper bound
v = int(self.expr.ptr.args[1].arg)
if v > 0x80:
return False
# Encode the value
self.parent.imm7_align4.value = v >> 2
return True
elif getattr(self.parent, "imm7", False):
# Get the integer and check the upper bound
v = int(self.expr.ptr.args[1].arg)
if v > 0x80:
return False
# Encode the value
self.parent.imm7.value = v
return True
elif getattr(self.parent, "disp7_align2", False):
# Get the integer and check the upper bound
v = int(self.expr.ptr.args[1].arg)
if v > 0x80:
return False
# Encode the value
self.parent.disp7_align2.value = v >> 1
return True
return False
class mep_deref_tp_offset(mep_deref_sp_offset):
"""Dummy Toshiba MeP-c4 dereferenced register that represents TP, plus an
offset.
"""
implicit_reg = TP
class mep_copro_reg(reg_noarg, mep_arg):
"""Generic Toshiba MeP-c4 coprocessor register
"""
reg_info = copro_gpr_infos # the list of MeP-c4 coprocessor registers defined in regs.py
parser = reg_info.parser # GV: not understood yet
class mep_copro_reg_split(mep_copro_reg):
"""Generic Toshiba MeP-c4 coprocessor register encode into different fields
"""
def decode(self, v):
"""Modify the decoded value using the previously decoded imm4_noarg.
"""
# Apply the immediate mask
v = v & self.lmask
# Shift values such as:
# CRn=NNnnnn
crn = (v << 4) + (self.parent.imm4.value & 0xF)
# Build the internal expression
self.expr = ExprId("C%d" % crn, 32)
return True
def encode(self):
"""Modify the encoded value. One part is stored in this object, and
the other one in imm4_noarg.
"""
if not isinstance(self.expr, ExprId):
return False
# Get the register and check the upper bound
reg_name = self.expr.name
if reg_name[0] != "C":
return False
reg_value = copro_gpr_names.index(reg_name)
if reg_value > 0x3f:
return False
# Encode the value into two parts
self.parent.imm4.value = (reg_value & 0xF)
self.value = (reg_value >> 4) & 0x3
return True
class mep_deref_inc_reg(mep_deref_reg):
"""Generic Toshiba MeP-c4 coprocess dereferenced & incremented register
"""
parser = deref_inc_reg_parser
# Immediate decoders and encoders
class mep_int32_noarg(int32_noarg):
"""Generic Toshiba MeP-c4 signed immediate
Note: encode() is copied from int32_noarg.encode() and modified to allow
small (< 32 bits) signed immediate to be manipulated.
"""
def encode(self):
if not isinstance(self.expr, ExprInt):
return False
v = int(self.expr)
# Note: the following lines were commented on purpose
#if sign_ext(v & self.lmask, self.l, self.intsize) != v:
# return False
v = self.encodeval(v & self.lmask)
self.value = v & self.lmask
return True
class mep_imm(imm_noarg, mep_arg):
"""Generic Toshiba MeP-c4 immediate
Note:
- the immediate size will be set using bs()
"""
parser = base_expr
class mep_imm6(mep_int32_noarg):
"""Toshiba MeP-c4 signed 6 bits immediate."""
parser = base_expr
intsize = 6
intmask = (1 << intsize) - 1
int2expr = lambda self, x: ExprInt(sign_ext(x, self.l, 32), 32)
class mep_imm8(mep_int32_noarg):
"""Toshiba MeP-c4 signed 8 bits immediate."""
parser = base_expr
intsize = 8
intmask = (1 << intsize) - 1
int2expr = lambda self, x: ExprInt(sign_ext(x, self.l, 32), 32)
class mep_imm16(mep_int32_noarg):
"""Toshiba MeP-c4 16 bits immediate."""
parser = base_expr
intsize = 16
intmask = (1 << intsize) - 1
int2expr = lambda self, x: ExprInt(x, 32)
class mep_imm16_signed(mep_int32_noarg):
"""Toshiba MeP-c4 signed 16 bits immediate."""
parser = base_expr
intsize = 16
intmask = (1 << intsize) - 1
int2expr = lambda self, x: ExprInt(sign_ext(x, self.l, 32), 32)
class mep_target24(mep_imm):
"""Toshiba MeP-c4 target24 immediate, as used in JMP
"""
def decode(self, v):
"""Modify the decoded value using the previously decoded imm7.
"""
# Apply the immediate mask
v = v & self.lmask
# Shift values such as:
# target24=tttt_tttt_tttt_tttt||TTT_TTTT||0
target24 = (v << 8) + ((self.parent.imm7.value & 0x7F) << 1)
# Build the internal expression
self.expr = ExprInt(target24, 32)
return True
def encode(self):
"""Modify the encoded value. One part is stored in this object, and
the other one in imm7.
"""
if not isinstance(self.expr, ExprInt):
return False
# Get the integer and apply a mask
v = int(self.expr) & 0x00FFFFFF
# Encode the value into two parts
self.parent.imm7.value = (v & 0xFF) >> 1
self.value = v >> 8
return True
class mep_target24_signed(mep_target24):
"""Toshiba MeP-c4 target24 signed immediate, as used in BSR
"""
def decode(self, v):
"""Perform sign extension
"""
mep_target24.decode(self, v)
v = int(self.expr)
self.expr = ExprInt(sign_ext(v, 24, 32), 32)
return True
class mep_code20(mep_imm):
"""Toshiba MeP-c4 code20 immediate, as used in DSP1
"""
def decode(self, v):
"""Modify the decoded value using the previously decoded imm4_noarg.
"""
# Apply the immediate mask
v = v & self.lmask
# Shift values such as:
# code20=mmmm_cccc_cccc_cccc_cccc
code20 = v + ((self.parent.imm4.value & 0xFF) << 16)
# Build the internal expression
self.expr = ExprInt(code20, 32)
return True
def encode(self):
"""Modify the encoded value. One part is stored in this object, and
the other one in imm4_noarg.
"""
if not isinstance(self.expr, ExprInt):
return False
# Get the integer and check the upper bound
v = int(self.expr.arg)
if v > 0xffffff:
return False
# Encode the value into two parts
self.parent.imm4 = ((v >> 16) & 0xFF)
self.value = v
return True
class mep_code24(mep_imm):
"""Toshiba MeP-c4 code24 immediate, as used in CP
"""
def decode(self, v):
"""Modify the decoded value using the previously decoded imm8_CCCC_CCCC.
"""
# Shift values such as:
# code24=CCCC_CCCC||cccc_cccc_cccc_cccc
code24 = v + ((self.parent.imm8_CCCC_CCCC.value & 0xFF) << 16)
# Build the internal expression
self.expr = ExprInt(code24, 32)
return True
def encode(self):
"""Modify the encoded value. One part is stored in this object, and
the other one in imm8_CCCC_CCCC.
"""
if not isinstance(self.expr, ExprInt):
return False
# Get the integer and check the upper bound
v = int(self.expr.arg)
if v > 0xFFFFFF:
return False
# Encode the value into two parts
self.parent.imm8_CCCC_CCCC.value = ((v >> 16) & 0xFF)
self.value = v & 0xFFFF
return True
class mep_imm7_align4(mep_imm):
"""Toshiba MeP-c4 imm7.align4 immediate, as used in Major #4 opcodes
"""
def decode(self, v):
"""Modify the decoded value.
"""
# Apply the immediate mask
v = v & self.lmask
# Shift value such as:
# imm7=iii_ii||00
imm7_align4 = v << 2
# Build the internal expression
self.expr = ExprInt(imm7_align4, 32)
return True
def encode(self):
"""Modify the encoded value.
"""
if not isinstance(self.expr, ExprInt):
return False
# Get the integer and check the upper bound
v = int(self.expr)
if v > 0x80:
return False
# Encode the value
self.value = v >> 2
return True
class mep_imm5_Iiiii (mep_imm):
"""Toshiba MeP-c4 imm5 immediate, as used in STC & LDC. It encodes a
control/special register.
"""
reg_info = csr_infos # the list of MeP-c4 control/special registers defined in regs.py
parser = reg_info.parser # GV: not understood yet
def decode(self, v):
"""Modify the decoded value using the previously decoded imm4_iiii
"""
# Apply the immediate mask
I = v & self.lmask
# Shift values such as:
# imm5=I||iiii
imm5 = (I << 4) + (self.parent.imm4_iiii.value & 0xF)
# Build the internal register expression
self.expr = ExprId(csr_names[imm5], 32)
return True
def encode(self):
"""Modify the encoded value. One part is stored in this object, and
the other one in imm4_iiii.
"""
if not isinstance(self.expr, ExprId):
return False
# Get the register number and check the upper bound
v = csr_names.index(self.expr.name)
if v > 0x1F:
return False
# Encode the value into two parts
self.parent.imm4_iiii.value = v & 0xF # iiii
self.value = (v >> 4) & 0b1 # I
return True
class mep_disp7_align2(mep_imm):
"""Toshiba MeP-c4 disp7.align2 immediate, as used in Major #8 opcodes
"""
upper_bound = 0x7F
bits_shift = 1
def decode(self, v):
"""Modify the decoded value.
"""
# Apply the immediate mask
v = v & self.lmask
# Shift value such as:
# disp7 = ddd_ddd||0
disp7_align2 = (v << self.bits_shift)
# Sign extension
disp7_align2 = sign_ext(disp7_align2, self.l + self.bits_shift, 32)
# Build the internal expression
self.expr = ExprInt(disp7_align2, 32)
return True
def encode(self):
"""Modify the encoded value.
"""
if not isinstance(self.expr, ExprInt):
return False
# Get the integer
v = int(self.expr) & self.upper_bound
# Encode the value
self.value = (v >> self.bits_shift) & self.upper_bound
self.value = (v & self.upper_bound) >> self.bits_shift
return True
class mep_disp8_align2(mep_disp7_align2):
upper_bound = 0xFF
class mep_disp8_align4(mep_disp7_align2):
upper_bound = 0xFF
bits_shift = 2
class mep_imm8_align8(mep_disp7_align2):
upper_bound = 0xFF
bits_shift = 3
class mep_disp12_align2(mep_disp7_align2):
upper_bound = 0xFFF
class mep_disp12_align2_signed(mep_disp12_align2):
def decode(self, v):
"""Perform sign extension.
"""
mep_disp12_align2.decode(self, v)
v = int(self.expr)
self.expr = ExprInt(sign_ext(v, 12, 32), 32)
return True
class mep_disp17(mep_disp7_align2):
upper_bound = 0x1FFFF
class mep_imm24(mep_imm):
"""Toshiba MeP-c4 imm24 immediate, as used in MOVU
"""
def decode(self, v):
"""Modify the decoded value.
"""
# Apply the immediate mask
v = v & self.lmask
# Shift values such as:
# imm24=iiii_iiii_iiii_iiii||IIII_IIIII
imm24 = ((v & 0xFFFF) << 8) + ((v & 0xFF0000) >> 16)
# Build the internal expression
self.expr = ExprInt(imm24, 32)
return True
def encode(self):
"""Modify the encoded value.
"""
if not isinstance(self.expr, ExprInt):
return False
# Get the integer and check the upper bound
v = int(self.expr)
if v > 0xFFFFFF:
return False
# Encode the value
self.value = ((v & 0xFFFF00) >> 8) + ((v & 0xFF) << 16)
return True
class mep_abs24(mep_imm):
"""Toshiba MeP-c4 abs24 immediate
"""
parser = abs24_deref_parser
def decode(self, v):
"""Modify the decoded value using the previously decoded imm6.
"""
# Apply the immediate mask
v = v & self.lmask
# Shift values such as:
# abs24=dddd_dddd_dddd_dddd||DDDD_DD||00
abs24 = (v << 8) + ((self.parent.imm6.value & 0x3F) << 2)
# Build the internal expression
self.expr = ExprMem(ExprInt(abs24, 32), 32)
return True
def encode(self):
"""Modify the encoded value. One part is stored in this object, and
the other one in imm6.
"""
if not (isinstance(self.expr, ExprMem) and isinstance(self.expr.ptr, ExprInt)):
return False
# Get the integer and check the upper bound
v = int(self.expr.ptr)
if v > 0xffffff:
return False
# Encode the value into two parts
self.parent.imm6.value = (v & 0xFF) >> 2
self.value = v >> 8
return True
# Define MeP-c4 assembly operands
reg04 = bs(l=4, # length in bits
cls=(mep_reg, )) # class implementing decoding & encoding
reg04_l = bs(l=4, cls=(mep_reg, ))
reg04_m = bs(l=4, cls=(mep_reg, ))
reg04_n = bs(l=4, cls=(mep_reg, ))
reg00 = bs(l=0, cls=(mep_reg, ))
reg00_sp = bs(l=0, cls=(mep_reg_sp, ))
reg00_tp = bs(l=0, cls=(mep_reg_tp, ))
reg00_deref_sp = bs(l=0, cls=(mep_deref_sp_offset, ))
reg00_deref_tp = bs(l=0, cls=(mep_deref_tp_offset, ))
reg03 = bs(l=3, cls=(mep_reg, ))
reg04_deref = bs(l=4, cls=(mep_deref_reg,))
reg04_deref_noarg = bs(l=4, fname="reg04_deref")
reg04_inc_deref = bs(l=4, cls=(mep_deref_inc_reg,))
copro_reg04 = bs(l=4, cls=(mep_copro_reg,))
copro_reg05 = bs(l=1, cls=(mep_copro_reg_split,))
copro_reg06 = bs(l=2, cls=(mep_copro_reg_split,))
disp2 = bs(l=2, cls=(mep_imm, ))
imm2 = disp2
imm3 = bs(l=3, cls=(mep_imm, ))
imm4 = bs(l=4, cls=(mep_imm, ))
imm4_noarg = bs(l=4, fname="imm4")
imm4_iiii_noarg = bs(l=4, fname="imm4_iiii")
imm5 = bs(l=5, cls=(mep_imm, ))
imm5_Iiiii = bs(l=1, cls=(mep_imm5_Iiiii, )) # it is not an immediate, but a
# control/special register.
imm6 = bs(l=6, cls=(mep_imm6, mep_arg))
imm6_noarg = bs(l=6, fname="imm6")
imm7 = bs(l=7, cls=(mep_imm, ))
imm7_noarg = bs(l=7, fname="imm7") # Note:
# - will be decoded as a 7 bits immediate
# - fname is used to set the operand name
# used in mep_target24 to merge operands
# values. By default, the bs class fills
# fname with an hex string compute from
# arguments passed to __init__
imm7_align4 = bs(l=5, cls=(mep_imm7_align4,))
imm7_align4_noarg = bs(l=5, fname="imm7_align4")
disp7_align2 = bs(l=6, cls=(mep_disp7_align2,))
disp7_align2_noarg = bs(l=6, fname="disp7_align2")
imm8 = bs(l=8, cls=(mep_imm8, mep_arg))
imm8_noarg = bs(l=8, fname="imm8_CCCC_CCCC")
disp8 = bs(l=7, cls=(mep_disp8_align2, ))
imm8_align2 = bs(l=7, cls=(mep_disp8_align2, ))
imm8_align4 = bs(l=6, cls=(mep_disp8_align4, ))
imm8_align8 = bs(l=5, cls=(mep_imm8_align8, ))
imm12 = bs(l=12, cls=(mep_imm, ))
disp12_signed = bs(l=11, cls=(mep_disp12_align2_signed, ))
imm16 = bs(l=16, cls=(mep_imm16, mep_arg))
imm16_signed = bs(l=16, cls=(mep_imm16_signed, mep_arg))
disp16_reg_deref = bs(l=16, cls=(mep_deref_reg_offset,))
disp17 = bs(l=16, cls=(mep_disp17, ))
imm18 = bs(l=19, cls=(mep_imm, ))
imm_code20 = bs(l=16, cls=(mep_code20, ))
imm24 = bs(l=24, cls=(mep_imm24, ))
imm_target24 = bs(l=16, cls=(mep_target24, ))
imm_target24_signed = bs(l=16, cls=(mep_target24_signed, ))
imm_code24 = bs(l=16, cls=(mep_code24, ))
abs24 = bs(l=16, cls=(mep_abs24, ))
# MeP-c4 mnemonics objects
### <Major Opcode #0>
# MOV Rn,Rm - 0000_nnnn_mmmm_0000
addop("MOV", [bs("0000"), reg04, reg04, bs("0000")])
# NEG Rn,Rm - 0000_nnnn_mmmm_0001
addop("NEG", [bs("0000"), reg04, reg04, bs("0001")])
# SLT3 R0,Rn,Rm - 0000_nnnn_mmmm_0010
addop("SLT3", [bs("0000"), reg00, reg04, reg04, bs("0010")])
# SLTU3 R0,Rn,Rm - 0000_nnnn_mmmm_0011
addop("SLTU3", [bs("0000"), reg00, reg04, reg04, bs("0011")])
# SUB Rn,Rm - 0000_nnnn_mmmm_0100
addop("SUB", [bs("0000"), reg04, reg04, bs("0100")])
# SBVCK3 R0,Rn,Rm - 0000_nnnn_mmmm_0101
addop("SBVCK3", [bs("0000"), reg00, reg04, reg04, bs("0101")])
# (RI) - 0000_xxxx_xxxx_0110
addop("(RI)", [bs("0000"), reg04, reg04, bs("0110")])
# ADVCK3 R0,Rn,Rm - 0000_nnnn_mmmm_0111
addop("ADVCK3", [bs("0000"), reg00, reg04, reg04, bs("0111")])
# SB Rn,(Rm) - 0000_nnnn_mmmm_1000
addop("SB", [bs("0000"), reg04, reg04_deref, bs("1000")])
# SH Rn,(Rm) - 0000_nnnn_mmmm_1001
addop("SH", [bs("0000"), reg04, reg04_deref, bs("1001")])
# SW Rn,(Rm) - 0000_nnnn_mmmm_1010
addop("SW", [bs("0000"), reg04, reg04_deref, bs("1010")])
# LBU Rn,(Rm) - 0000_nnnn_mmmm_1011
addop("LBU", [bs("0000"), reg04, reg04_deref, bs("1011")])
# LB Rn,(Rm) - 0000_nnnn_mmmm_1100
addop("LB", [bs("0000"), reg04, reg04_deref, bs("1100")])
# LH Rn,(Rm) - 0000_nnnn_mmmm_1101
addop("LH", [bs("0000"), reg04, reg04_deref, bs("1101")])
# LW Rn,(Rm) - 0000_nnnn_mmmm_1110
addop("LW", [bs("0000"), reg04, reg04_deref, bs("1110")])
# LHU Rn,(Rm) - 0000_nnnn_mmmm_1111
addop("LHU", [bs("0000"), reg04, reg04_deref, bs("1111")])
### <Major Opcode #1>
# OR Rn,Rm - 0001_nnnn_mmmm_0000
addop("OR", [bs("0001"), reg04, reg04, bs("0000")])
# AND Rn,Rm - 0001_nnnn_mmmm_0001
addop("AND", [bs("0001"), reg04, reg04, bs("0001")])
# XOR Rn,Rm - 0001_nnnn_mmmm_0010
addop("XOR", [bs("0001"), reg04, reg04, bs("0010")])
# NOR Rn,Rm - 0001_nnnn_mmmm_0011
addop("NOR", [bs("0001"), reg04, reg04, bs("0011")])
# MUL Rn,Rm - 0001_nnnn_mmmm_0100
addop("MUL", [bs("0001"), reg04, reg04, bs("0100")])
# MULU Rn,Rm - 0001_nnnn_mmmm_0101
addop("MULU", [bs("0001"), reg04, reg04, bs("0101")])
# MULR Rn,Rm - 0001_nnnn_mmmm_0110
addop("MULR", [bs("0001"), reg04, reg04, bs("0110")])
# MULRU Rn,Rm - 0001_nnnn_mmmm_0111
addop("MULRU", [bs("0001"), reg04, reg04, bs("0111")])
# DIV Rn,Rm - 0001_nnnn_mmmm_1000
addop("DIV", [bs("0001"), reg04, reg04, bs("1000")])
# DIVU Rn,Rm - 0001_nnnn_mmmm_1001
addop("DIVU", [bs("0001"), reg04, reg04, bs("1001")])
# (RI) - 0001_xxxx_xxxx_1010
addop("(RI)", [bs("0001"), reg04, reg04, bs("1010")])
# (RI) - 0001_xxxx_xxxx_1011
addop("(RI)", [bs("0001"), reg04, reg04, bs("1011")])
# SSARB disp2(Rm) - 0001_00dd_mmmm_1100
addop("SSARB", [bs("000100"), disp2, reg04_deref, bs("1100")])
# EXTB Rn - 0001_nnnn_0000_1101
addop("EXTB", [bs("0001"), reg04, bs("00001101")])
# EXTH Rn - 0001_nnnn_0010_1101
addop("EXTH", [bs("0001"), reg04, bs("00101101")])
# EXTUB Rn - 0001_nnnn_1000_1101
addop("EXTUB", [bs("0001"), reg04, bs("10001101")])
# EXTUH Rn - 0001_nnnn_1010_1101
addop("EXTUH", [bs("0001"), reg04, bs("10101101")])
# JMP Rm - 0001_0000_mmmm_1110
addop("JMP", [bs("00010000"), reg04, bs("1110")])
# JSR Rm - 0001_0000_mmmm_1111
addop("JSR", [bs("00010000"), reg04, bs("1111")])
# JSRV Rm - 0001_1000_mmmm_1111
addop("JSRV", [bs("00011000"), reg04, bs("1111")])
### <Major Opcode #2>
# BSETM (Rm),imm3 - 0010_0iii_mmmm_0000
addop("BSETM", [bs("00100"), imm3, reg04_deref, bs("0000")], [reg04_deref, imm3])
# BCLRM (Rn),imm3 - 0010_0iii_mmmm_0001
addop("BCLRM", [bs("00100"), imm3, reg04_deref, bs("0001")], [reg04_deref, imm3])
# BNOTM (Rm),imm3 - 0010_0iii_mmmm_0010
addop("BNOTM", [bs("00100"), imm3, reg04_deref, bs("0010")], [reg04_deref, imm3])
# BTSTM R0,(Rm),imm3 - 0010_0iii_mmmm_0011
addop("BTSTM", [bs("00100"), reg00, imm3, reg04_deref, bs("0011")], [reg00, reg04_deref, imm3])
# TAS Rn,(Rm) - 0010_nnnn_mmmm_0100
addop("TAS", [bs("0010"), reg04, reg04_deref, bs("0100")])
# (RI) - 0010_xxxx_xxxx_0101
addop("(RI)", [bs("0010"), reg04, reg04, bs("0101")])
# SL1AD3 R0,Rn,Rm - 0010_nnnn_mmmm_0110
addop("SL1AD3", [bs("0010"), reg00, reg04, reg04, bs("0110")])
# SL2AD3 R0,Rn,Rm - 0010_nnnn_mmmm_0111
addop("SL2AD3", [bs("0010"), reg00, reg04, reg04, bs("0111")])
# (RI) - 0010_xxxx_xxxx_1000
addop("(RI)", [bs("0010"), reg04, reg04, bs("1000")])
# (RI) - 0010_xxxx_xxxx_1001
addop("(RI)", [bs("0010"), reg04, reg04, bs("1001")])
# (RI) - 0010_xxxx_xxxx_1010
addop("(RI)", [bs("0010"), reg04, reg04, bs("1010")])
# (RI) - 0010_xxxx_xxxx_1011
addop("(RI)", [bs("0010"), reg04, reg04, bs("1011")])
# SRL Rn,Rm - 0010_nnnn_mmmm_1100
addop("SRL", [bs("0010"), reg04, reg04, bs("1100")])
# SRA Rn,Rm - 0010_nnnn_mmmm_1101
addop("SRA", [bs("0010"), reg04, reg04, bs("1101")])
# SLL Rn,Rm - 0010_nnnn_mmmm_1110
addop("SLL", [bs("0010"), reg04, reg04, bs("1110")])
# FSFT Rn,Rm - 0010_nnnn_mmmm_1111
addop("FSFT", [bs("0010"), reg04, reg04, bs("1111")])
### <Major Opcode #3>
# SWCPI CRn,(Rm+) - 0011_nnnn_mmmm_0000
addop("SWCPI", [bs("0011"), copro_reg04, reg04_inc_deref, bs("0000")])
# LWCPI CRn,(Rm+) - 0011_nnnn_mmmm_0001
addop("LWCPI", [bs("0011"), copro_reg04, reg04_inc_deref, bs("0001")])
# SMCPI CRn,(Rm+) - 0011_nnnn_mmmm_0010
addop("SMCPI", [bs("0011"), copro_reg04, reg04_inc_deref, bs("0010")])
# LMCPI CRn,(Rm+) - 0011_nnnn_mmmm_0011
addop("LMCPI", [bs("0011"), copro_reg04, reg04_inc_deref, bs("0011")])
# SWCP CRn,(Rm) - 0011_nnnn_mmmm_1000
addop("SWCP", [bs("0011"), copro_reg04, reg04_deref, bs("1000")])
# LWCP CRn,(Rm) - 0011_nnnn_mmmm_1001
addop("LWCP", [bs("0011"), copro_reg04, reg04_deref, bs("1001")])
# SMCP CRn,(Rm) - 0011_nnnn_mmmm_1010
addop("SMCP", [bs("0011"), copro_reg04, reg04_deref, bs("1010")])
# LMCP CRn,(Rm) - 0011_nnnn_mmmm_1011
addop("LMCP", [bs("0011"), copro_reg04, reg04_deref, bs("1011")])
### <Major Opcode #4>
# ADD3 Rn,SP,imm7.align4 - 0100_nnnn_0iii_ii00
addop("ADD3", [bs("0100"), reg04, reg00_sp, bs("0"), imm7_align4, bs("00")])
# SW Rn,disp7.align4(SP) - 0100_nnnn_0ddd_dd10
# Note: disp7.align4 is the same as imm7.align4
addop("SW", [bs("0100"), reg04, bs("0"), imm7_align4_noarg, reg00_deref_sp, bs("10")])
# LW Rn,disp7.align4(SP) - 0100_nnnn_0ddd_dd11
addop("LW", [bs("0100"), reg04, bs("0"), imm7_align4_noarg, reg00_deref_sp, bs("11")])
# SW Rn[0-7],disp7.align4(TP) - 0100_0nnn_1ddd_dd10
addop("SW", [bs("01000"), reg03, bs("1"), imm7_align4_noarg, reg00_deref_tp, bs("10")])
# LW Rn[0-7],disp7.align4(TP) - 0100_0nnn_1ddd_dd11
addop("LW", [bs("01000"), reg03, bs("1"), imm7_align4_noarg, reg00_deref_tp, bs("11")])
# LBU Rn[0-7],disp7(TP) - 0100_1nnn_1ddd_dddd
addop("LBU", [bs("01001"), reg03, bs("1"), imm7_noarg, reg00_deref_tp], [reg03, reg00_deref_tp])
### <Major Opcode #5>
# MOV Rn,imm8 - 0101_nnnn_iiii_iiii
addop("MOV", [bs("0101"), reg04, imm8])
### <Major Opcode #6>
# ADD Rn,imm6 - 0110_nnnn_iiii_ii00
addop("ADD", # mnemonic name
[bs("0110"), reg04, imm6, bs("00")]) # mnemonic description
# SLT3 R0,Rn,imm5 - 0110_nnnn_iiii_i001
addop("SLT3", [bs("0110"), reg00, reg04, imm5, bs("001")])
# SRL Rn,imm5 - 0110_nnnn_iiii_i010
addop("SRL", [bs("0110"), reg04, imm5, bs("010")])
# SRA Rn,imm5 - 0110_nnnn_iiii_i011
addop("SRA", [bs("0110"), reg04, imm5, bs("011")])
# SLTU3 R0,Rn,imm5 - 0110_nnnn_iiii_i101
addop("SLTU3", [bs("0110"), reg00, reg04, imm5, bs("101")])
# SLL Rn,imm5 - 0110_nnnn_iiii_i110
addop("SLL", [bs("0110"), reg04, imm5, bs("110")])
# SLL3 R0,Rn,imm5 - 0110_nnnn_iiii_i111
addop("SLL3", [bs("0110"), reg00, reg04, imm5, bs("111")])
### <Major Opcode #7>
# DI - 0111_0000_0000_0000
addop("DI", [bs("0111000000000000")])
# EI - 0111_0000_0001_0000
addop("EI", [bs("0111000000010000")])
# SYNCM - 0111_0000_0001_0001
addop("SYNCM", [bs("0111000000010001")])
# SYNCCP - 0111_0000_0010_0001
addop("SYNCCP", [bs("0111000000100001")])
# RET - 0111_0000_0000_0010
addop("RET", [bs("0111000000000010")])
# RETI - 0111_0000_0001_0010
addop("RETI", [bs("0111000000010010")])
# HALT - 0111_0000_0010_0010
addop("HALT", [bs("0111000000100010")])
# BREAK - 0111_0000_0011_0010
addop("BREAK", [bs("0111000000110010")])
# SLEEP - 0111_0000_0110_0010
addop("SLEEP", [bs("0111000001100010")])
# DRET - 0111_0000_0001_0011
addop("DRET", [bs("0111000000010011")])
# DBREAK - 0111_0000_0011_0011
addop("DBREAK", [bs("0111000000110011")])
# CACHE imm4,(Rm) - 0111_iiii_mmmm_0100
addop("CACHE", [bs("0111"), imm4, reg04_deref, bs("0100")])
# (RI) - 0111_xxxx_xxxx_0101
addop("(RI)", [bs("0111"), reg04, reg04, bs("0101")])
# SWI imm2 - 0111_0000_00ii_0110
addop("SWI", [bs("0111000000"), imm2, bs("0110")])
# (RI) - 0111_xxxx_xxxx_0111
addop("(RI)", [bs("0111"), reg04, reg04, bs("0111")])
# STC Rn,imm5 - 0111_nnnn_iiii_100I
addop("STC", [bs("0111"), reg04, imm4_iiii_noarg, bs("100"), imm5_Iiiii])
# LDC Rn,imm5 - 0111_nnnn_iiii_101I
addop("LDC", [bs("0111"), reg04, imm4_iiii_noarg, bs("101"), imm5_Iiiii])
# (RI) - 0111_xxxx_xxxx_1100
addop("(RI)", [bs("0111"), reg04, reg04, bs("1100")])
# (RI) - 0111_xxxx_xxxx_1101
addop("(RI)", [bs("0111"), reg04, reg04, bs("1101")])
# (RI) - 0111_xxxx_xxxx_1110
addop("(RI)", [bs("0111"), reg04, reg04, bs("1110")])
# (RI) - 0111_xxxx_xxxx_1111
addop("(RI)", [bs("0111"), reg04, reg04, bs("1111")])
### <Major Opcode #8>
# SB Rn[0-7],disp7(TP) - 1000_0nnn_0ddd_dddd
addop("SB", [bs("10000"), reg03, bs("0"), imm7_noarg, reg00_deref_tp])
# SH Rn[0-7],disp7.align2(TP) - 1000_0nnn_1ddd_ddd0
# (disp7.align2 = ddd_ddd||0)
addop("SH", [bs("10000"), reg03, bs("1"), disp7_align2_noarg, bs("0"), reg00_deref_tp])
# LB Rn[0-7],disp7(TP) - 1000_1nnn_0ddd_dddd
addop("LB", [bs("10001"), reg03, bs("0"), imm7_noarg, reg00_deref_tp])
# LH Rn[0-7],disp7.align2(TP) - 1000_1nnn_1ddd_ddd0
addop("LH", [bs("10001"), reg03, bs("1"), disp7_align2_noarg, bs("0"), reg00_deref_tp])
# LHU Rn[0-7],disp7.align2(TP) - 1000_1nnn_1ddd_ddd1
addop("LHU", [bs("10001"), reg03, bs("1"), disp7_align2_noarg, bs("1"), reg00_deref_tp])
### <Major Opcode #9>
# ADD3 Rl,Rn,Rm - 1001_nnnn_mmmm_llll
addop("ADD3", [bs("1001"), reg04_n, reg04_m, reg04_l], [reg04_l, reg04_n, reg04_m])
### <Major Opcode #10>
# BEQZ Rn,disp8.align2 - 1010_nnnn_dddd_ddd0
# (disp8=dddd_ddd||0)
addop("BEQZ", [bs("1010"), reg04, disp8, bs("0")])
# BNEZ Rn,disp8.align2 - 1010_nnnn_dddd_ddd1
addop("BNEZ", [bs("1010"), reg04, disp8, bs("1")])
### <Major Opcode #11>
# BRA disp12.align2 - 1011_dddd_dddd_ddd0
# (disp12=dddd_dddd_ddd||0)
addop("BRA", [bs("1011"), disp12_signed, bs("0")])
# BSR disp12.align2 - 1011_dddd_dddd_ddd1
addop("BSR", [bs("1011"), disp12_signed, bs("1")])
### <Major Opcode #12>
# ADD3 Rn,Rm,imm16 - 1100_nnnn_mmmm_0000 iiii_iiii_iiii_iiii
addop("ADD3", [bs("1100"), reg04, reg04, bs("0000"), imm16_signed])
# MOV Rn,imm16 - 1100_nnnn_0000_0001 iiii_iiii_iiii_iiii
addop("MOV", [bs("1100"), reg04, bs("00000001"), imm16])
# MOVU Rn,imm16 - 1100_nnnn_0001_0001 iiii_iiii_iiii_iiii
addop("MOVU", [bs("1100"), reg04, bs("00010001"), imm16])
# MOVH Rn,imm16 - 1100_nnnn_0010_0001 iiii_iiii_iiii_iiii
addop("MOVH", [bs("1100"), reg04, bs("00100001"), imm16])
# SLT3 Rn,Rm,imm16 - 1100_nnnn_mmmm_0010 iiii_iiii_iiii_iiii
addop("SLT3", [bs("1100"), reg04, reg04, bs("0010"), imm16_signed])
# SLTU3 Rn,Rm,imm16 - 1100_nnnn_mmmm_0011 iiii_iiii_iiii_iiii
addop("SLTU3", [bs("1100"), reg04, reg04, bs("0011"), imm16])
# OR3 Rn,Rm,imm16 - 1100_nnnn_mmmm_0100 iiii_iiii_iiii_iiii
addop("OR3", [bs("1100"), reg04, reg04, bs("0100"), imm16])
# AND3 Rn,Rm,imm16 - 1100_nnnn_mmmm_0101 iiii_iiii_iiii_iiii
addop("AND3", [bs("1100"), reg04, reg04, bs("0101"), imm16])
# XOR3 Rn,Rm,imm16 - 1100_nnnn_mmmm_0110 iiii_iiii_iiii_iiii
addop("XOR3", [bs("1100"), reg04, reg04, bs("0110"), imm16])
# (RI) - 1100_xxxx_xxxx_0111 xxxx_xxxx_xxxx_xxxx
addop("(RI)", [bs("1100"), imm8, bs("0111"), imm16])
# SB Rn,disp16(Rm) - 1100_nnnn_mmmm_1000 dddd_dddd_dddd_dddd
addop("SB", [bs("1100"), reg04, reg04_deref_noarg, bs("1000"), disp16_reg_deref], [reg04, disp16_reg_deref])
# SH Rn,disp16(Rm) - 1100_nnnn_mmmm_1001 dddd_dddd_dddd_dddd
addop("SH", [bs("1100"), reg04, reg04_deref_noarg, bs("1001"), disp16_reg_deref], [reg04, disp16_reg_deref])
# SW Rn,disp16(Rm) - 1100_nnnn_mmmm_1010 dddd_dddd_dddd_dddd
addop("SW", [bs("1100"), reg04, reg04_deref_noarg, bs("1010"), disp16_reg_deref], [reg04, disp16_reg_deref])
# LBU Rn,disp16(Rm) - 1100_nnnn_mmmm_1011 dddd_dddd_dddd_dddd
addop("LBU", [bs("1100"), reg04, reg04_deref_noarg, bs("1011"), disp16_reg_deref], [reg04, disp16_reg_deref])
# LB Rn,disp16(Rm) - 1100_nnnn_mmmm_1100 dddd_dddd_dddd_dddd
addop("LB", [bs("1100"), reg04, reg04_deref_noarg, bs("1100"), disp16_reg_deref], [reg04, disp16_reg_deref])
# LH Rn,disp16(Rm) - 1100_nnnn_mmmm_1101 dddd_dddd_dddd_dddd
addop("LH", [bs("1100"), reg04, reg04_deref_noarg, bs("1101"), disp16_reg_deref], [reg04, disp16_reg_deref])
# LW Rn,disp16(Rm) - 1100_nnnn_mmmm_1110 dddd_dddd_dddd_dddd
addop("LW", [bs("1100"), reg04, reg04_deref_noarg, bs("1110"), disp16_reg_deref], [reg04, disp16_reg_deref])
# LHU Rn,disp16(Rm) - 1100_nnnn_mmmm_1111 dddd_dddd_dddd_dddd
addop("LHU", [bs("1100"), reg04, reg04_deref_noarg, bs("1111"), disp16_reg_deref], [reg04, disp16_reg_deref])
### <Major Opcode #13>
# MOVU Rn[0-7],imm24 - 1101_0nnn_IIII_IIII iiii_iiii_iiii_iiii
addop("MOVU", [bs("11010"), reg03, imm24])
# BCPEQ cccc,disp17 - 1101_1000_cccc_0100 dddd_dddd_dddd_dddd
addop("BCPEQ", [bs("11011000"), imm4, bs("0100"), disp17])
# BCPNE cccc,disp17 - 1101_1000_cccc_0101 dddd_dddd_dddd_dddd
addop("BCPNE", [bs("11011000"), imm4, bs("0101"), disp17])
# BCPAT cccc,disp17 - 1101_1000_cccc_0110 dddd_dddd_dddd_dddd
addop("BCPAT", [bs("11011000"), imm4, bs("0110"), disp17])
# BCPAF cccc,disp17 - 1101_1000_cccc_0111 dddd_dddd_dddd_dddd
addop("BCPAF", [bs("11011000"), imm4, bs("0111"), disp17])
# JMP target24 - 1101_1TTT_TTTT_1000 tttt_tttt_tttt_tttt
addop("JMP", [bs("11011"), imm7_noarg, bs("1000"), imm_target24],
[imm_target24]) # the only interesting operand is imm_target24
# BSR disp24 - 1101_1DDD_DDDD_1001 dddd_dddd_dddd_dddd
addop("BSR", [bs("11011"), imm7_noarg, bs("1001"), imm_target24_signed], [imm_target24_signed])
# BSRV disp24 1101_1DDD_DDDD_1011 dddd_dddd_dddd_dddd
addop("BSRV", [bs("11011"), imm7_noarg, bs("1011"), imm_target24], [imm_target24])
### <Major Opcode #14>
# BEQI Rn,imm4,disp17 - 1110_nnnn_iiii_0000 dddd_dddd_dddd_dddd
addop("BEQI", [bs("1110"), reg04, imm4, bs("0000"), disp17])
# BEQ Rn,Rm,disp17 - 1110_nnnn_mmmm_0001 dddd_dddd_dddd_dddd
addop("BEQ", [bs("1110"), reg04, reg04, bs("0001"), disp17])
# BNEI Rn,imm4,disp17 - 1110_nnnn_iiii_0100 dddd_dddd_dddd_dddd
addop("BNEI", [bs("1110"), reg04, imm4, bs("0100"), disp17])
# BNE Rn,Rm,disp17 - 1110_nnnn_mmmm_0101 dddd_dddd_dddd_dddd
addop("BNE", [bs("1110"), reg04, reg04, bs("0101"), disp17])
# BGEI Rn,imm4,disp17 - 1110_nnnn_iiii_1000 dddd_dddd_dddd_dddd
addop("BGEI", [bs("1110"), reg04, imm4, bs("1000"), disp17])
# REPEAT Rn,disp17 - 1110_nnnn_0000_1001 dddd_dddd_dddd_dddd
addop("REPEAT", [bs("1110"), reg04, bs("00001001"), disp17])
# EREPEAT disp17 - 1110_0000_0001_1001 dddd_dddd_dddd_dddd
addop("EREPEAT", [bs("1110000000011001"), disp17])
# BLTI Rn,imm4,disp17 - 1110_nnnn_iiii_1100 dddd_dddd_dddd_dddd
addop("BLTI", [bs("1110"), reg04, imm4, bs("1100"), disp17])
# (RI) - 1110_xxxx_xxxx_1101 xxxx_xxxx_xxxx_xxxx
addop("(RI)", [bs("1110"), imm8, bs("1101"), imm16])
# SW Rn,(abs24) - 1110_nnnn_DDDD_DD10 dddd_dddd_dddd_dddd
addop("SW", [bs("1110"), reg04, imm6_noarg, bs("10"), abs24])
# LW Rn,(abs24) - 1110_nnnn_DDDD_DD11 dddd_dddd_dddd_dddd
addop("LW", [bs("1110"), reg04, imm6_noarg, bs("11"), abs24])
### <Major Opcode #15>
# DSP Rn,Rm,code16 - 1111_nnnn_mmmm_0000 cccc_cccc_cccc_cccc
addop("DSP", [bs("1111"), reg04, reg04, bs("0000"), imm16])
# Note: DSP, DSP0 & DSP1 look exactly the same. This is ambiguous, and prevent
# them for being correctly disassembled. DSP0 & DSP1 are arbitrarily
# disabled.
# DSP0 code24 - 1111_nnnn_mmmm_0000 cccc_cccc_cccc_cccc
#addop("DSP0", [bs("1111"), imm8_noarg, bs("0000"), imm_code24], [imm_code24])
# DSP1 Rn,code20 - 1111_nnnn_mmmm_0000 cccc_cccc_cccc_cccc
#addop("DSP1", [bs("1111"), reg04, imm4_noarg, bs("0000"), imm_code20])
# LDZ Rn,Rm - 1111_nnnn_mmmm_0001 0000_0000_0000_0000
addop("LDZ", [bs("1111"), reg04, reg04, bs("00010000000000000000")])
# AVE Rn,Rm - 1111_nnnn_mmmm_0001 0000_0000_0000_0010
addop("AVE", [bs("1111"), reg04, reg04, bs("00010000000000000010")])
# ABS Rn,Rm - 1111_nnnn_mmmm_0001 0000_0000_0000_0011
addop("ABS", [bs("1111"), reg04, reg04, bs("00010000000000000011")])
# MIN Rn,Rm - 1111_nnnn_mmmm_0001 0000_0000_0000_0100
addop("MIN", [bs("1111"), reg04, reg04, bs("00010000000000000100")])
# MAX Rn,Rm - 1111_nnnn_mmmm_0001 0000_0000_0000_0101
addop("MAX", [bs("1111"), reg04, reg04, bs("00010000000000000101")])
# MINU Rn,Rm - 1111_nnnn_mmmm_0001 0000_0000_0000_0110
addop("MINU", [bs("1111"), reg04, reg04, bs("00010000000000000110")])
# MAXU Rn,Rm - 1111_nnnn_mmmm_0001 0000_0000_0000_0111
addop("MAXU", [bs("1111"), reg04, reg04, bs("00010000000000000111")])
# SADD Rn,Rm - 1111_nnnn_mmmm_0001 0000_0000_0000_1000
addop("SADD", [bs("1111"), reg04, reg04, bs("00010000000000001000")])
# SADDU Rn,Rm - 1111_nnnn_mmmm_0001 0000_0000_0000_1001
addop("SADDU", [bs("1111"), reg04, reg04, bs("00010000000000001001")])
# SSUB Rn,Rm - 1111_nnnn_mmmm_0001 0000_0000_0000_1010
addop("SSUB", [bs("1111"), reg04, reg04, bs("00010000000000001010")])
# SSUBU Rn,Rm - 1111_nnnn_mmmm_0001 0000_0000_0000_1011
addop("SSUBU", [bs("1111"), reg04, reg04, bs("00010000000000001011")])
# CLIP Rn,imm5 - 1111_nnnn_0000_0001 0001_0000_iiii_i000
addop("CLIP", [bs("1111"), reg04, bs("0000000100010000"), imm5, bs("000")])
# CLIPU Rn,imm5 - 1111_nnnn_0000_0001 0001_0000_iiii_i001
addop("CLIPU", [bs("1111"), reg04, bs("0000000100010000"), imm5, bs("001")])
# (RI) - 1111_xxxx_xxxx_0001 0010_xxxx_xxxx_xxxx
addop("(RI)", [bs("1111"), imm8, bs("00010010"), imm12])
# MADD Rn,Rm - 1111_nnnn_mmmm_0001 0011_0000_0000_0100
addop("MADD", [bs("1111"), reg04, reg04, bs("00010011000000000100")])
# MADDU Rn,Rm - 1111_nnnn_mmmm_0001 0011_0000_0000_0101
addop("MADDU", [bs("1111"), reg04, reg04, bs("00010011000000000101")])
# MADDR Rn,Rm - 1111_nnnn_mmmm_0001 0011_0000_0000_0110
addop("MADDR", [bs("1111"), reg04, reg04, bs("00010011000000000110")])
# MADDRU Rn,Rm - 1111_nnnn_mmmm_0001 0011_0000_0000_0111
addop("MADDRU", [bs("1111"), reg04, reg04, bs("00010011000000000111")])
# UCI Rn,Rm,code16 - 1111_nnnn_mmmm_0010 cccc_cccc_cccc_cccc
addop("UCI", [bs("1111"), reg04, reg04, bs("0010"), imm16])
# (RI) - 1111_xxxx_xxxx_0011 xxxx_xxxx_xxxx_xxxx
addop("(RI)", [bs("1111"), imm8, bs("0011"), imm16])
# STCB Rn,abs16 - 1111_nnnn_0000_0100 aaaa_aaaa_aaaa_aaaa
addop("STCB", [bs("1111"), reg04, bs("00000100"), imm16])
# LDCB Rn,abs16 - 1111_nnnn_0001_0100 aaaa_aaaa_aaaa_aaaa
addop("LDCB", [bs("1111"), reg04, bs("00010100"), imm16])
# SBCPA CRn,(Rm+),imm8 - 1111_nnnn_mmmm_0101 0000_0000_iiii_iiii
addop("SBCPA", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010100000000"), imm8])
# SHCPA CRn,(Rm+),imm8.align2 - 1111_nnnn_mmmm_0101 0001_0000_iiii_iii0
addop("SHCPA", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010100010000"), imm8_align2, bs("0")])
# SWCPA CRn,(Rm+),imm8.align4 - 1111_nnnn_mmmm_0101 0010_0000_iiii_ii00
addop("SWCPA", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010100100000"), imm8_align4, bs("00")])
# SMCPA CRn,(Rm+),imm8.align8 - 1111_nnnn_mmmm_0101 0011_0000_iiii_i000
addop("SMCPA", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010100110000"), imm8_align8, bs("000")])
# LBCPA CRn,(Rm+),imm8 - 1111_nnnn_mmmm_0101 0100_0000_iiii_iiii
addop("LBCPA", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010101000000"), imm8])
# LHCPA CRn,(Rm+),imm8.align2 - 1111_nnnn_mmmm_0101 0101_0000_iiii_iii0
addop("LHCPA", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010101010000"), imm8_align2, bs("0")])
# LWCPA CRn,(Rm+),imm8.align4 - 1111_nnnn_mmmm_0101 0110_0000_iiii_ii00
addop("LWCPA", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010101100000"), imm8_align4, bs("00")])
# LMCPA CRn,(Rm+),imm8.align8 - 1111_nnnn_mmmm_0101 0111_0000_iiii_i000
addop("LMCPA", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010101110000"), imm8_align8, bs("000")])
# SBCPM0 CRn,(Rm+),imm8 - 1111_nnnn_mmmm_0101 0000_1000_iiii_iiii
addop("SBCPM0", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010100001000"), imm8])
# SHCPM0 CRn,(Rm+),imm8.align2 - 1111_nnnn_mmmm_0101 0001_1000_iiii_iii0
addop("SHCPM0", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010100011000"), imm8_align2, bs("0")])
# SWCPM0 CRn,(Rm+),imm8.align4 - 1111_nnnn_mmmm_0101 0010_1000_iiii_ii00
addop("SWCPM0", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010100101000"), imm8_align4, bs("00")])
# SMCPM0 CRn,(Rm+),imm8.align8 - 1111_nnnn_mmmm_0101 0011_1000_iiii_i000
addop("SMCPM0", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010100111000"), imm8_align8, bs("000")])
# LBCPM0 CRn,(Rm+),imm8 - 1111_nnnn_mmmm_0101 0100_1000_iiii_iiii
addop("LBCPM0", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010101001000"), imm8])
# LHCPM0 CRn,(Rm+),imm8.align2 - 1111_nnnn_mmmm_0101 0101_1000_iiii_iii0
addop("LHCPM0", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010101011000"), imm8_align2, bs("0")])
# LWCPM0 CRn,(Rm+),imm8.align4 - 1111_nnnn_mmmm_0101 0110_1000_iiii_ii00
addop("LWCPM0", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010101101000"), imm8_align4, bs("00")])
# LMCPM0 CRn,(Rm+),imm8.align8 - 1111_nnnn_mmmm_0101 0111_1000_iiii_i000
addop("LMCPM0", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010101111000"), imm8_align8, bs("000")])
# SBCPM1 CRn,(Rm+),imm8 - 1111_nnnn_mmmm_0101 0000_1100_iiii_iiii
addop("SBCPM1", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010100001100"), imm8])
# SHCPM1 CRn,(Rm+),imm8.align2 - 1111_nnnn_mmmm_0101 0001_1100_iiii_iii0
addop("SHCPM1", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010100011100"), imm8_align2, bs("0")])
# SWCPM1 CRn,(Rm+),imm8.align4 - 1111_nnnn_mmmm_0101 0010_1100_iiii_ii00
addop("SWCPM1", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010100101100"), imm8_align4, bs("00")])
# SMCPM1 CRn,(Rm+),imm8.align8 - 1111_nnnn_mmmm_0101 0011_1100_iiii_i000
addop("SMCPM1", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010100111100"), imm8_align8, bs("000")])
# LBCPM1 CRn,(Rm+),imm8 - 1111_nnnn_mmmm_0101 0100_1100_iiii_iiii
addop("LBCPM1", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010101001100"), imm8])
# LHCPM1 CRn,(Rm+),imm8.align2 - 1111_nnnn_mmmm_0101 0101_1100_iiii_iii0
addop("LHCPM1", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010101011100"), imm8_align2, bs("0")])
# LWCPM1 CRn,(Rm+),imm8.align4 - 1111_nnnn_mmmm_0101 0110_1100_iiii_ii00
addop("LWCPM1", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010101101100"), imm8_align4, bs("00")])
# LMCPM1 CRn,(Rm+),imm8.align8 - 1111_nnnn_mmmm_0101 0111_1100_iiii_i000
addop("LMCPM1", [bs("1111"), copro_reg04, reg04_inc_deref, bs("010101111100"), imm8_align8, bs("000")])
# (RI) - 1111_xxxx_xxxx_0110 xxxx_xxxx_xxxx_xxxx
addop("(RI)", [bs("1111"), imm8, bs("0110"), imm16])
# CP code24 - 1111_CCCC_CCCC_0111 cccc_cccc_cccc_cccc
#addop("CP", [bs("1111"), imm8_noarg, bs("0111"), imm_code24], [imm_code24])
# Note: CP & CMOV* look exactly the same. This is ambiguous, and prevent
# them for being correctly disassembled. CP was arbitrarily disabled.
# CP code56 - 1111_CCCC_CCCC_0111 cccc_cccc_cccc_cccc cccc_cccc_cccc_cccc
# 64-bit VLIW operation mode - not implemented
# CMOV CRn,Rm - 1111_nnnn_mmmm_0111 1111_0000_0000_0000
#addop("CMOV", [bs("1111"), copro_reg04, reg04, bs("01111111000000000000")])
# CMOV Rm,CRn - 1111_nnnn_mmmm_0111 1111_0000_0000_0001
#addop("CMOV", [bs("1111"), copro_reg04, reg04, bs("01111111000000000001")], [reg04, copro_reg04])
# CMOVC CCRn,Rm - 1111_nnnn_mmmm_0111 1111_0000_0000_NN10
# CRn=NNnnnn
addop("CMOVC", [bs("1111"), imm4_noarg, reg04, bs("0111111100000000"), copro_reg06, bs("10")], [copro_reg06, reg04])
# CMOVC Rm,CCRn - 1111_nnnn_mmmm_0111 1111_0000_0000_NN11
# CRn=NNnnnn
addop("CMOVC", [bs("1111"), imm4_noarg, reg04, bs("0111111100000000"), copro_reg06, bs("11")], [reg04, copro_reg06])
# CMOVH CRn,Rm - 1111_nnnn_mmmm_0111 1111_0001_0000_0000
#addop("CMOVH", [bs("1111"), copro_reg04, reg04, bs("01111111000100000000")])
# CMOVH Rm,CRn - 1111_nnnn_mmmm_0111 1111_0001_0000_0001
#addop("CMOVH", [bs("1111"), copro_reg04, reg04, bs("01111111000100000001")], [reg04, copro_reg04])
# Note: the following CMOV* instructions are extensions used when the processor
# has more than 16 coprocessor general-purpose registers. They can be
# used to assemble and disassemble both CMOV* instructuons sets.
# CMOV CRn,Rm - 1111_nnnn_mmmm_0111 1111_0000_0000_N000
# CRn=Nnnnn
addop("CMOV", [bs("1111"), imm4_noarg, reg04, bs("0111111100000000"), copro_reg05, bs("000")], [copro_reg05, reg04])
# CMOV Rm,CRn - 1111_nnnn_mmmm_0111 1111_0000_0000_N001
addop("CMOV", [bs("1111"), imm4_noarg, reg04, bs("0111111100000000"), copro_reg05, bs("001")], [reg04, copro_reg05])
# CMOVH CRn,Rm - 1111_nnnn_mmmm_0111 1111_0001_0000_N000
addop("CMOVH", [bs("1111"), imm4_noarg, reg04, bs("0111111100010000"), copro_reg05, bs("000")], [copro_reg05, reg04])
# CMOVH Rm,CRn - 1111_nnnn_mmmm_0111 1111_0001_0000_N001
addop("CMOVH", [bs("1111"), imm4_noarg, reg04, bs("0111111100010000"), copro_reg05, bs("001")], [reg04, copro_reg05])
# (RI) - 1111_xxxx_xxxx_10xx xxxx_xxxx_xxxx_xxxx
addop("(RI)", [bs("1111"), imm8, bs("10"), imm18])
# SWCP CRn,disp16(Rm) - 1111_nnnn_mmmm_1100 dddd_dddd_dddd_dddd
addop("SWCP", [bs("1111"), copro_reg04, reg04_deref_noarg, bs("1100"), disp16_reg_deref], [copro_reg04, disp16_reg_deref])
# LWCP CRn,disp16(Rm) - 1111_nnnn_mmmm_1101 dddd_dddd_dddd_dddd
addop("LWCP", [bs("1111"), copro_reg04, reg04_deref_noarg, bs("1101"), disp16_reg_deref], [copro_reg04, disp16_reg_deref, reg04_deref])
# SMCP CRn,disp16(Rm) - 1111_nnnn_mmmm_1110 dddd_dddd_dddd_dddd
addop("SMCP", [bs("1111"), copro_reg04, reg04_deref_noarg, bs("1110"), disp16_reg_deref], [copro_reg04, disp16_reg_deref, reg04_deref])
# LMCP CRn,disp16(Rm) - 1111_nnnn_mmmm_1111 dddd_dddd_dddd_dddd
addop("LMCP", [bs("1111"), copro_reg04, reg04_deref_noarg, bs("1111"), disp16_reg_deref], [copro_reg04, disp16_reg_deref])
| gpl-2.0 |
CodigoSur/cyclope | cyclope/apps/social/frontend_views.py | 2 | 1653 | # -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.template.loader import render_to_string
from django.core.paginator import Paginator
from actstream.models import Action, target_stream, user_stream
from cyclope.core import frontend
import cyclope.utils
from models import Social
class GlobalActivity(frontend.FrontendView):
name = 'global_activity'
verbose_name = _("Global activity or user's feed if authenticated")
is_default = True
is_instance_view = False
is_region_view = False
is_content_view = True
template = "social/actions_list.html"
def get_response(self, request, req_context, options):
actions = self.get_actions(request)
page = self.build_page(request, actions)
return render_to_string(self.template, {
'page': page,
}, req_context)
def get_actions(self, request):
if request.user.is_authenticated():
actions = target_stream(request.user) | user_stream(request.user)
else:
actions = Action.objects.public()
return actions
def build_page(self, request, actions):
paginator = Paginator(actions, per_page=10)
page = cyclope.utils.get_page(paginator, request)
return page
class GlobalOnlyActivity(GlobalActivity):
name = 'global_only_activity'
verbose_name = _('Global activity of the site')
is_default = False
def get_actions(self, request):
return Action.objects.public()
frontend.site.register_view(Social, GlobalActivity)
frontend.site.register_view(Social, GlobalOnlyActivity)
| gpl-3.0 |
ahamilton55/ansible | lib/ansible/module_utils/facts/system/selinux.py | 52 | 3061 | # Collect facts related to selinux
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.module_utils.facts.collector import BaseFactCollector
try:
import selinux
HAVE_SELINUX = True
except ImportError:
HAVE_SELINUX = False
SELINUX_MODE_DICT = {1: 'enforcing',
0: 'permissive',
-1: 'disabled'}
class SelinuxFactCollector(BaseFactCollector):
name = 'selinux'
_fact_ids = set()
def collect(self, module=None, collected_facts=None):
facts_dict = {}
selinux_facts = {}
# This is weird. The value of the facts 'selinux' key can be False or a dict
if not HAVE_SELINUX:
facts_dict['selinux'] = False
facts_dict['selinux_python_present'] = False
return facts_dict
facts_dict['selinux_python_present'] = True
if not selinux.is_selinux_enabled():
selinux_facts['status'] = 'disabled'
# NOTE: this could just return in the above clause and the rest of this is up an indent -akl
else:
selinux_facts['status'] = 'enabled'
try:
selinux_facts['policyvers'] = selinux.security_policyvers()
except (AttributeError, OSError):
selinux_facts['policyvers'] = 'unknown'
try:
(rc, configmode) = selinux.selinux_getenforcemode()
if rc == 0:
selinux_facts['config_mode'] = SELINUX_MODE_DICT.get(configmode, 'unknown')
else:
selinux_facts['config_mode'] = 'unknown'
except (AttributeError, OSError):
selinux_facts['config_mode'] = 'unknown'
try:
mode = selinux.security_getenforce()
selinux_facts['mode'] = SELINUX_MODE_DICT.get(mode, 'unknown')
except (AttributeError, OSError):
selinux_facts['mode'] = 'unknown'
try:
(rc, policytype) = selinux.selinux_getpolicytype()
if rc == 0:
selinux_facts['type'] = policytype
else:
selinux_facts['type'] = 'unknown'
except (AttributeError, OSError):
selinux_facts['type'] = 'unknown'
facts_dict['selinux'] = selinux_facts
return facts_dict
| gpl-3.0 |
Feverup/ansible-modules-extras | cloud/vmware/vmware_target_canonical_facts.py | 16 | 3194 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Joseph Callen <jcallen () csc.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: vmware_target_canonical_facts
short_description: Return canonical (NAA) from an ESXi host
description:
- Return canonical (NAA) from an ESXi host based on SCSI target ID
version_added: "2.0"
author: Joseph Callen
notes:
requirements:
- Tested on vSphere 5.5
- PyVmomi installed
options:
hostname:
description:
- The hostname or IP address of the vSphere vCenter
required: True
username:
description:
- The username of the vSphere vCenter
required: True
aliases: ['user', 'admin']
password:
description:
- The password of the vSphere vCenter
required: True
aliases: ['pass', 'pwd']
target_id:
description:
- The target id based on order of scsi device
required: True
'''
EXAMPLES = '''
# Example vmware_target_canonical_facts command from Ansible Playbooks
- name: Get Canonical name
local_action: >
vmware_target_canonical_facts
hostname="{{ ansible_ssh_host }}" username=root password=vmware
target_id=7
'''
try:
from pyVmomi import vim, vmodl
HAS_PYVMOMI = True
except ImportError:
HAS_PYVMOMI = False
def find_hostsystem(content):
host_system = get_all_objs(content, [vim.HostSystem])
for host in host_system:
return host
return None
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(dict(target_id=dict(required=True, type='int')))
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False)
if not HAS_PYVMOMI:
module.fail_json(msg='pyvmomi is required for this module')
content = connect_to_api(module)
host = find_hostsystem(content)
target_lun_uuid = {}
scsilun_canonical = {}
# Associate the scsiLun key with the canonicalName (NAA)
for scsilun in host.config.storageDevice.scsiLun:
scsilun_canonical[scsilun.key] = scsilun.canonicalName
# Associate target number with LUN uuid
for target in host.config.storageDevice.scsiTopology.adapter[0].target:
for lun in target.lun:
target_lun_uuid[target.target] = lun.scsiLun
module.exit_json(changed=False, canonical=scsilun_canonical[target_lun_uuid[module.params['target_id']]])
from ansible.module_utils.basic import *
from ansible.module_utils.vmware import *
if __name__ == '__main__':
main()
| gpl-3.0 |
cstipkovic/spidermonkey-research | testing/mozbase/moznetwork/moznetwork/moznetwork.py | 1 | 5591 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import argparse
import array
import re
import socket
import struct
import subprocess
import sys
import mozinfo
import mozlog
if mozinfo.isLinux:
import fcntl
class NetworkError(Exception):
"""Exception thrown when unable to obtain interface or IP."""
def _get_logger():
logger = mozlog.get_default_logger(component='moznetwork')
if not logger:
logger = mozlog.unstructured.getLogger('moznetwork')
return logger
def _get_interface_list():
"""Provides a list of available network interfaces
as a list of tuples (name, ip)"""
logger = _get_logger()
logger.debug('Gathering interface list')
max_iface = 32 # Maximum number of interfaces(Aribtrary)
bytes = max_iface * 32
is_32bit = (8 * struct.calcsize("P")) == 32 # Set Architecture
struct_size = 32 if is_32bit else 40
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
names = array.array('B', '\0' * bytes)
outbytes = struct.unpack('iL', fcntl.ioctl(
s.fileno(),
0x8912, # SIOCGIFCONF
struct.pack('iL', bytes, names.buffer_info()[0])
))[0]
namestr = names.tostring()
return [(namestr[i:i + 32].split('\0', 1)[0],
socket.inet_ntoa(namestr[i + 20:i + 24]))
for i in range(0, outbytes, struct_size)]
except IOError:
raise NetworkError('Unable to call ioctl with SIOCGIFCONF')
def _proc_matches(args, regex):
"""Helper returns the matches of regex in the output of a process created with
the given arguments"""
output = subprocess.Popen(args=args,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT).stdout.read()
return re.findall(regex, output)
def _parse_ifconfig():
"""Parse the output of running ifconfig on mac in cases other methods
have failed"""
logger = _get_logger()
logger.debug('Parsing ifconfig')
# Attempt to determine the default interface in use.
default_iface = _proc_matches(['route', '-n', 'get', 'default'],
'interface: (\w+)')
if default_iface:
addr_list = _proc_matches(['ifconfig', default_iface[0]],
'inet (\d+.\d+.\d+.\d+)')
if addr_list:
logger.debug('Default interface: [%s] %s' % (default_iface[0],
addr_list[0]))
if not addr_list[0].startswith('127.'):
return addr_list[0]
# Iterate over plausible interfaces if we didn't find a suitable default.
for iface in ['en%s' % i for i in range(10)]:
addr_list = _proc_matches(['ifconfig', iface],
'inet (\d+.\d+.\d+.\d+)')
if addr_list:
logger.debug('Interface: [%s] %s' % (iface, addr_list[0]))
if not addr_list[0].startswith('127.'):
return addr_list[0]
# Just return any that isn't localhost. If we can't find one, we have
# failed.
addrs = _proc_matches(['ifconfig'],
'inet (\d+.\d+.\d+.\d+)')
try:
return [addr for addr in addrs if not addr.startswith('127.')][0]
except IndexError:
return None
def get_ip():
"""Provides an available network interface address, for example
"192.168.1.3".
A `NetworkError` exception is raised in case of failure."""
logger = _get_logger()
try:
hostname = socket.gethostname()
try:
logger.debug('Retrieving IP for %s' % hostname)
ips = socket.gethostbyname_ex(hostname)[2]
except socket.gaierror: # for Mac OS X
hostname += '.local'
logger.debug('Retrieving IP for %s' % hostname)
ips = socket.gethostbyname_ex(hostname)[2]
if len(ips) == 1:
ip = ips[0]
elif len(ips) > 1:
logger.debug('Multiple addresses found: %s' % ips)
# no fallback on Windows so take the first address
ip = ips[0] if mozinfo.isWin else None
else:
ip = None
except socket.gaierror:
# sometimes the hostname doesn't resolve to an ip address, in which
# case this will always fail
ip = None
if ip is None or ip.startswith("127."):
if mozinfo.isLinux:
interfaces = _get_interface_list()
for ifconfig in interfaces:
logger.debug('Interface: [%s] %s' % (ifconfig[0], ifconfig[1]))
if ifconfig[0] == 'lo':
continue
else:
return ifconfig[1]
elif mozinfo.isMac:
ip = _parse_ifconfig()
if ip is None:
raise NetworkError('Unable to obtain network address')
return ip
def get_lan_ip():
"""Deprecated. Please use get_ip() instead."""
return get_ip()
def cli(args=sys.argv[1:]):
parser = argparse.ArgumentParser(
description='Retrieve IP address')
structured.commandline.add_logging_group(
parser,
include_formatters=structured.commandline.TEXT_FORMATTERS
)
args = parser.parse_args()
structured.commandline.setup_logging(
'mozversion', args, {'mach': sys.stdout})
_get_logger().info('IP address: %s' % get_ip())
if __name__ == '__main__':
cli()
| mpl-2.0 |
hamzehd/edx-platform | lms/djangoapps/mobile_api/video_outlines/views.py | 121 | 4888 | """
Video Outlines
We only provide the listing view for a video outline, and video outlines are
only displayed at the course level. This is because it makes it a lot easier to
optimize and reason about, and it avoids having to tackle the bigger problem of
general XBlock representation in this rather specialized formatting.
"""
from functools import partial
from django.http import Http404, HttpResponse
from mobile_api.models import MobileApiConfig
from rest_framework import generics
from rest_framework.response import Response
from opaque_keys.edx.locator import BlockUsageLocator
from xmodule.exceptions import NotFoundError
from xmodule.modulestore.django import modulestore
from ..utils import mobile_view, mobile_course_access
from .serializers import BlockOutline, video_summary
@mobile_view()
class VideoSummaryList(generics.ListAPIView):
"""
**Use Case**
Get a list of all videos in the specified course. You can use the
video_url value to access the video file.
**Example Request**
GET /api/mobile/v0.5/video_outlines/courses/{organization}/{course_number}/{course_run}
**Response Values**
If the request is successful, the request returns an HTTP 200 "OK"
response along with an array of videos in the course. The array
includes the following information for each video.
* named_path: An array that consists of the display names of the
courseware objects in the path to the video.
* path: An array that specifies the complete path to the video in
the courseware hierarchy. The array contains the following
values.
* category: The type of division in the course outline.
Possible values are "chapter", "sequential", and "vertical".
* name: The display name for the object.
* id: The The unique identifier for the video.
* section_url: The URL to the first page of the section that
contains the video in the Learning Management System.
* summary: An array of data about the video that includes the
following values.
* category: The type of component. This value will always be "video".
* duration: The length of the video, if available.
* id: The unique identifier for the video.
* language: The language code for the video.
* name: The display name of the video.
* size: The size of the video file.
* transcripts: An array of language codes and URLs to available
video transcripts. Use the URL value to access a transcript
for the video.
* video_thumbnail_url: The URL to the thumbnail image for the
video, if available.
* video_url: The URL to the video file. Use this value to access
the video.
* unit_url: The URL to the unit that contains the video in the Learning
Management System.
"""
@mobile_course_access(depth=None)
def list(self, request, course, *args, **kwargs):
video_profiles = MobileApiConfig.get_video_profiles()
video_outline = list(
BlockOutline(
course.id,
course,
{"video": partial(video_summary, video_profiles)},
request,
video_profiles,
)
)
return Response(video_outline)
@mobile_view()
class VideoTranscripts(generics.RetrieveAPIView):
"""
**Use Case**
Get a transcript for a specified video and language.
**Example request**
GET /api/mobile/v0.5/video_outlines/transcripts/{organization}/{course_number}/{course_run}/{video ID}/{language code}
**Response Values**
If the request is successful, the request returns an HTTP 200 "OK"
response along with an .srt file that you can download.
"""
@mobile_course_access()
def get(self, request, course, *args, **kwargs):
block_id = kwargs['block_id']
lang = kwargs['lang']
usage_key = BlockUsageLocator(
course.id, block_type="video", block_id=block_id
)
try:
video_descriptor = modulestore().get_item(usage_key)
transcripts = video_descriptor.get_transcripts_info()
content, filename, mimetype = video_descriptor.get_transcript(transcripts, lang=lang)
except (NotFoundError, ValueError, KeyError):
raise Http404(u"Transcript not found for {}, lang: {}".format(block_id, lang))
response = HttpResponse(content, content_type=mimetype)
response['Content-Disposition'] = 'attachment; filename="{}"'.format(filename.encode('utf-8'))
return response
| agpl-3.0 |
ingokegel/intellij-community | python/testData/inspections/PyTypeCheckerInspection/Generator.py | 30 | 3112 | def test():
def gen(n):
for x in xrange(n):
yield str(x)
def f_1(xs):
"""
:type xs: list of int
"""
return xs
def f_2(xs):
"""
:type xs: collections.Sequence of int
"""
return xs
def f_3(xs):
"""
:type xs: collections.Container of int
"""
return xs
def f_4(xs):
"""
:type xs: collections.Iterator of int
"""
return xs
def f_5(xs):
"""
:type xs: collections.Iterable of int
"""
return xs
def f_6(xs):
"""
:type xs: list
"""
return xs
def f_7(xs):
"""
:type xs: collections.Sequence
"""
return xs
def f_8(xs):
"""
:type xs: collections.Container
"""
return xs
def f_9(xs):
"""
:type xs: collections.Iterator
"""
return xs
def f_10(xs):
"""
:type xs: collections.Iterable
"""
return xs
def f_11(xs):
"""
:type xs: list of string
"""
return xs
def f_12(xs):
"""
:type xs: collections.Sequence of string
"""
return xs
def f_13(xs):
"""
:type xs: collections.Container of string
"""
return xs
def f_14(xs):
"""
:type xs: collections.Iterator of string
"""
return xs
def f_15(xs):
"""
:type xs: collections.Iterable of string
"""
return xs
return [
''.join(gen(10)),
f_1(<warning descr="Expected type 'List[int]', got 'Generator[str, Any, None]' instead">gen(11)</warning>),
f_2(<warning descr="Expected type 'Sequence[int]', got 'Generator[str, Any, None]' instead">gen(11)</warning>),
f_3(<warning descr="Expected type 'Container[int]', got 'Generator[str, Any, None]' instead">gen(11)</warning>),
f_4(<warning descr="Expected type 'Iterator[int]', got 'Generator[str, Any, None]' instead">gen(11)</warning>),
f_5(<warning descr="Expected type 'Iterable[int]', got 'Generator[str, Any, None]' instead">gen(11)</warning>),
f_6(<warning descr="Expected type 'list', got 'Generator[str, Any, None]' instead">gen(11)</warning>),
f_7(<warning descr="Expected type 'Sequence', got 'Generator[str, Any, None]' instead">gen(11)</warning>),
f_8(<warning descr="Expected type 'Container', got 'Generator[str, Any, None]' instead">gen(11)</warning>),
f_9(gen(11)),
f_10(gen(11)),
f_11(<warning descr="Expected type 'List[Union[str, unicode]]', got 'Generator[str, Any, None]' instead">gen(11)</warning>),
f_12(<warning descr="Expected type 'Sequence[Union[str, unicode]]', got 'Generator[str, Any, None]' instead">gen(11)</warning>),
f_13(<warning descr="Expected type 'Container[Union[str, unicode]]', got 'Generator[str, Any, None]' instead">gen(11)</warning>),
f_14(gen(11)),
f_15(gen(11)),
f_15('foo'.split('o')),
]
| apache-2.0 |
xzregg/yunwei | yw/core/bf.py | 2 | 3673 | #!/usr/bin/env python
# coding:utf-8
# 并发进程的类 by xzr
import multiprocessing
import time
import subprocess
import os
import sys
import traceback
import threading
#import Queue
def get_now():
tf = '%Y-%m-%d %H:%M:%S'
return time.strftime(tf, time.localtime())
_Cpus = multiprocessing.cpu_count()
class xBF:
'''
一个并发进程线程的类
@f 函数
@tuplist 函数的参数列表
@bfn 限制的任务数
@threadnums 每个进程开启的线程数
@printret 打印结果
'''
def __init__(self, f=None, tuplist=[], bfn=0, printret=False):
self.cpus = _Cpus
bfn = int(bfn)
self.bfn = bfn or 150
self.Manager = multiprocessing.Manager()
self.retdict = self.Manager.dict()
self.Q = multiprocessing.Queue()
self.printret = printret
self.funcs = []
self.mul = threading.Thread
if f and tuplist: # 初始化时有函数
for n, o in enumerate(tuplist):
n += 1 # 下限为1
process = self.mul(
target=self.get_fun_ret, args=(n, self.f, o))
self.funcs.append(process)
def append(self, f, opttup):
n = len(self.funcs) + 1
process = self.mul(target=self.get_fun_ret, args=(n, f, opttup))
self.funcs.append(process)
def get_fun_ret(self, n, f, tup):
self.retdict[n] = f(*tup)
if self.printret == True:
print '<%s>\n%s' % (n, self.retdict[n])
def startprocess(self, threadjobs, n): # 进程启动线程
for t in threadjobs:
t.start()
for t in threadjobs:
t.join()
self.Q.put(n)
def start(self, Print=True):
stime = get_now()
tp = len(self.funcs)
ltp = min(tp, self.bfn)
# 按限制任务数或任务数分线程,使cpus个进程的总线程数接近线程。
self.threadnums = ltp / self.cpus or 1
self.threadnums += 1 if ltp % self.cpus else 0
GroupbyPl = [self.funcs[i:i + self.threadnums]
for i in xrange(0, tp, self.threadnums)]
pp = []
for i, threadjobs in enumerate(GroupbyPl):
process = multiprocessing.Process(
target=self.startprocess, args=(threadjobs, i))
pp.append(process)
process.start()
if i >= self.cpus - 1:
n = self.Q.get()
if n != None:
pp[n].join()
for p in pp:
p.join()
if Print:
print '[%s]' % stime, '-' * 30, '任务数 %s 限制任务数:%s 进程:%s 线程数为:%s 开始' % (tp, self.bfn, self.cpus, self.threadnums)
print '[%s]' % get_now(), '-' * 30, '任务数 %s 限制任务数:%s 进程:%s 线程数为:%s 结束' % (tp, self.bfn, self.cpus, self.threadnums)
def dict(self):
d = dict(self.retdict)
self.Manager.shutdown()
return d
def Print(self):
print '任务数 %s 限制任务数:%s 进程:%s 线程数为:%s' % (len(self.funcs), self.bfn, self.cpus, self.threadnums)
def f(a, b):
time.sleep(10)
return 'f' + str(a) + str(b)
def test(x):
time.sleep(1)
# print x
return x
def test1(x):
time.sleep(1)
return x
if __name__ == "__main__":
# a=xBF(bfn=9,thread=False,look=False)
# a=xBF(bfn=1220,threadnums=100,look=False)
a = xBF(bfn=12, look=True)
for x in xrange(40):
a.append(test, (x,))
a.start()
a.Print()
dd = a.dict()
# for k in dd:
# print'<%s>' % k,'-'*20
# print dd[k]
| lgpl-3.0 |
cloudbase/maas | src/maasserver/tests/test_messages.py | 1 | 6341 | # Copyright 2012, 2013 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Test maasserver messages."""
from __future__ import (
absolute_import,
print_function,
unicode_literals,
)
str = None
__metaclass__ = type
__all__ = []
import json
import socket
from maasserver.exceptions import NoRabbit
from maasserver.messages import (
MAASMessenger,
MESSENGER_EVENT,
MessengerBase,
)
from maasserver.models import Node
from maasserver.testing.factory import factory
from maasserver.testing.testcase import MAASServerTestCase
from maasserver.tests.models import MessagesTestModel
from maastesting.djangotestcase import TestModelMixin
class FakeProducer:
"""A fake RabbitProducer that simply records published messages."""
def __init__(self):
self.messages = []
def publish(self, message):
self.messages.append(message)
class TestMessenger(MessengerBase):
def create_msg(self, event_name, instance):
return [event_name, instance]
class MessengerBaseTest(TestModelMixin, MAASServerTestCase):
app = 'maasserver.tests'
def test_update_obj_publishes_message_if_created(self):
producer = FakeProducer()
messenger = TestMessenger(MessagesTestModel, producer)
instance = factory.getRandomString()
messenger.update_obj(MessagesTestModel, instance, True)
self.assertEqual(
[[MESSENGER_EVENT.CREATED, instance]], producer.messages)
def test_update_obj_publishes_message_if_not_created(self):
producer = FakeProducer()
messenger = TestMessenger(MessagesTestModel, producer)
instance = factory.getRandomString()
messenger.update_obj(MessagesTestModel, instance, False)
self.assertEqual(
[[MESSENGER_EVENT.UPDATED, instance]], producer.messages)
def test_delete_obj_publishes_message(self):
producer = FakeProducer()
messenger = TestMessenger(MessagesTestModel, producer)
instance = factory.getRandomString()
messenger.delete_obj(MessagesTestModel, instance)
self.assertEqual(
[[MESSENGER_EVENT.DELETED, instance]], producer.messages)
def test_register_registers_update_signal(self):
producer = FakeProducer()
messenger = TestMessenger(MessagesTestModel, producer)
obj = MessagesTestModel(name=factory.getRandomString())
obj.save()
messenger.register()
obj.save()
self.assertEqual(
[[MESSENGER_EVENT.UPDATED, obj]], producer.messages)
def test_register_registers_created_signal(self):
producer = FakeProducer()
messenger = TestMessenger(MessagesTestModel, producer)
messenger.register()
obj = MessagesTestModel(name=factory.getRandomString())
obj.save()
self.assertEqual(
[[MESSENGER_EVENT.CREATED, obj]], producer.messages)
def test_register_registers_delete_signal(self):
obj = MessagesTestModel(name=factory.getRandomString())
obj.save()
producer = FakeProducer()
messenger = TestMessenger(MessagesTestModel, producer)
messenger.register()
obj.delete()
self.assertEqual(
[[MESSENGER_EVENT.DELETED, obj]], producer.messages)
def test_publish_message_publishes_message(self):
event = factory.getRandomString()
instance = {factory.getRandomString(): factory.getRandomString()}
messenger = TestMessenger(MessagesTestModel, FakeProducer())
messenger.publish_message(messenger.create_msg(event, instance))
self.assertEqual([[event, instance]], messenger.producer.messages)
def test_publish_message_swallows_missing_rabbit(self):
event = factory.getRandomString()
instance = {factory.getRandomString(): factory.getRandomString()}
def fail_for_lack_of_rabbit(*args, **kwargs):
raise NoRabbit("I'm pretending not to have a RabbitMQ.")
messenger = TestMessenger(MessagesTestModel, FakeProducer())
messenger.producer.publish = fail_for_lack_of_rabbit
messenger.publish_message(messenger.create_msg(event, instance))
self.assertEqual([], messenger.producer.messages)
def test_publish_message_propagates_exceptions(self):
event = factory.getRandomString()
instance = {factory.getRandomString(): factory.getRandomString()}
def fail_despite_having_a_rabbit(*args, **kwargs):
raise socket.error("I have a rabbit but I fail anyway.")
messenger = TestMessenger(MessagesTestModel, FakeProducer())
messenger.producer.publish = fail_despite_having_a_rabbit
self.assertRaises(
socket.error,
messenger.publish_message, messenger.create_msg(event, instance))
self.assertEqual([], messenger.producer.messages)
class MAASMessengerTest(TestModelMixin, MAASServerTestCase):
app = 'maasserver.tests'
def test_event_key(self):
producer = FakeProducer()
event_name = factory.getRandomString()
obj = MessagesTestModel(name=factory.getRandomString())
messenger = MAASMessenger(MessagesTestModel, producer)
self.assertEqual(
'%s.%s' % ('MessagesTestModel', event_name),
messenger.event_key(event_name, obj))
def test_create_msg(self):
producer = FakeProducer()
messenger = MAASMessenger(Node, producer)
event_name = factory.getRandomString()
obj_name = factory.getRandomString()
obj = MessagesTestModel(name=obj_name)
obj.save()
msg = messenger.create_msg(event_name, obj)
decoded_msg = json.loads(msg)
self.assertItemsEqual(['instance', 'event_key'], list(decoded_msg))
self.assertItemsEqual(
['id', 'name'], list(decoded_msg['instance']))
self.assertEqual(
obj_name, decoded_msg['instance']['name'])
def test_msg_containing_node_representation(self):
node = factory.make_node()
messenger = MAASMessenger(Node, FakeProducer())
msg = messenger.create_msg(factory.getRandomString(), node)
decoded_msg = json.loads(msg)
self.assertItemsEqual(['instance', 'event_key'], list(decoded_msg))
| agpl-3.0 |
poojavade/Genomics_Docker | Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/scipy/linalg/basic.py | 9 | 39330 | #
# Author: Pearu Peterson, March 2002
#
# w/ additions by Travis Oliphant, March 2002
# and Jake Vanderplas, August 2012
from __future__ import division, print_function, absolute_import
__all__ = ['solve', 'solve_triangular', 'solveh_banded', 'solve_banded',
'solve_toeplitz', 'solve_circulant', 'inv', 'det', 'lstsq',
'pinv', 'pinv2', 'pinvh']
import warnings
import numpy as np
from .flinalg import get_flinalg_funcs
from .lapack import get_lapack_funcs, _compute_lwork
from .misc import LinAlgError, _datacopied
from .decomp import _asarray_validated
from . import decomp, decomp_svd
from ._solve_toeplitz import levinson
# Linear equations
def solve(a, b, sym_pos=False, lower=False, overwrite_a=False,
overwrite_b=False, debug=False, check_finite=True):
"""
Solve the equation ``a x = b`` for ``x``.
Parameters
----------
a : (M, M) array_like
A square matrix.
b : (M,) or (M, N) array_like
Right-hand side matrix in ``a x = b``.
sym_pos : bool, optional
Assume `a` is symmetric and positive definite.
lower : bool, optional
Use only data contained in the lower triangle of `a`, if `sym_pos` is
true. Default is to use upper triangle.
overwrite_a : bool, optional
Allow overwriting data in `a` (may enhance performance).
Default is False.
overwrite_b : bool, optional
Allow overwriting data in `b` (may enhance performance).
Default is False.
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
x : (M,) or (M, N) ndarray
Solution to the system ``a x = b``. Shape of the return matches the
shape of `b`.
Raises
------
LinAlgError
If `a` is singular.
ValueError
If `a` is not square
Examples
--------
Given `a` and `b`, solve for `x`:
>>> a = np.array([[3, 2, 0], [1, -1, 0], [0, 5, 1]])
>>> b = np.array([2, 4, -1])
>>> from scipy import linalg
>>> x = linalg.solve(a, b)
>>> x
array([ 2., -2., 9.])
>>> np.dot(a, x) == b
array([ True, True, True], dtype=bool)
"""
a1 = _asarray_validated(a, check_finite=check_finite)
b1 = _asarray_validated(b, check_finite=check_finite)
if len(a1.shape) != 2 or a1.shape[0] != a1.shape[1]:
raise ValueError('expected square matrix')
if a1.shape[0] != b1.shape[0]:
raise ValueError('incompatible dimensions')
overwrite_a = overwrite_a or _datacopied(a1, a)
overwrite_b = overwrite_b or _datacopied(b1, b)
if debug:
print('solve:overwrite_a=', overwrite_a)
print('solve:overwrite_b=', overwrite_b)
if sym_pos:
posv, = get_lapack_funcs(('posv',), (a1, b1))
c, x, info = posv(a1, b1, lower=lower,
overwrite_a=overwrite_a,
overwrite_b=overwrite_b)
else:
gesv, = get_lapack_funcs(('gesv',), (a1, b1))
lu, piv, x, info = gesv(a1, b1, overwrite_a=overwrite_a,
overwrite_b=overwrite_b)
if info == 0:
return x
if info > 0:
raise LinAlgError("singular matrix")
raise ValueError('illegal value in %d-th argument of internal gesv|posv' %
-info)
def solve_triangular(a, b, trans=0, lower=False, unit_diagonal=False,
overwrite_b=False, debug=False, check_finite=True):
"""
Solve the equation `a x = b` for `x`, assuming a is a triangular matrix.
Parameters
----------
a : (M, M) array_like
A triangular matrix
b : (M,) or (M, N) array_like
Right-hand side matrix in `a x = b`
lower : bool, optional
Use only data contained in the lower triangle of `a`.
Default is to use upper triangle.
trans : {0, 1, 2, 'N', 'T', 'C'}, optional
Type of system to solve:
======== =========
trans system
======== =========
0 or 'N' a x = b
1 or 'T' a^T x = b
2 or 'C' a^H x = b
======== =========
unit_diagonal : bool, optional
If True, diagonal elements of `a` are assumed to be 1 and
will not be referenced.
overwrite_b : bool, optional
Allow overwriting data in `b` (may enhance performance)
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
x : (M,) or (M, N) ndarray
Solution to the system `a x = b`. Shape of return matches `b`.
Raises
------
LinAlgError
If `a` is singular
Notes
-----
.. versionadded:: 0.9.0
"""
a1 = _asarray_validated(a, check_finite=check_finite)
b1 = _asarray_validated(b, check_finite=check_finite)
if len(a1.shape) != 2 or a1.shape[0] != a1.shape[1]:
raise ValueError('expected square matrix')
if a1.shape[0] != b1.shape[0]:
raise ValueError('incompatible dimensions')
overwrite_b = overwrite_b or _datacopied(b1, b)
if debug:
print('solve:overwrite_b=', overwrite_b)
trans = {'N': 0, 'T': 1, 'C': 2}.get(trans, trans)
trtrs, = get_lapack_funcs(('trtrs',), (a1, b1))
x, info = trtrs(a1, b1, overwrite_b=overwrite_b, lower=lower,
trans=trans, unitdiag=unit_diagonal)
if info == 0:
return x
if info > 0:
raise LinAlgError("singular matrix: resolution failed at diagonal %s" %
info-1)
raise ValueError('illegal value in %d-th argument of internal trtrs' %
-info)
def solve_banded(l_and_u, ab, b, overwrite_ab=False, overwrite_b=False,
debug=False, check_finite=True):
"""
Solve the equation a x = b for x, assuming a is banded matrix.
The matrix a is stored in `ab` using the matrix diagonal ordered form::
ab[u + i - j, j] == a[i,j]
Example of `ab` (shape of a is (6,6), `u` =1, `l` =2)::
* a01 a12 a23 a34 a45
a00 a11 a22 a33 a44 a55
a10 a21 a32 a43 a54 *
a20 a31 a42 a53 * *
Parameters
----------
(l, u) : (integer, integer)
Number of non-zero lower and upper diagonals
ab : (`l` + `u` + 1, M) array_like
Banded matrix
b : (M,) or (M, K) array_like
Right-hand side
overwrite_ab : bool, optional
Discard data in `ab` (may enhance performance)
overwrite_b : bool, optional
Discard data in `b` (may enhance performance)
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
x : (M,) or (M, K) ndarray
The solution to the system a x = b. Returned shape depends on the
shape of `b`.
"""
a1 = _asarray_validated(ab, check_finite=check_finite, as_inexact=True)
b1 = _asarray_validated(b, check_finite=check_finite, as_inexact=True)
# Validate shapes.
if a1.shape[-1] != b1.shape[0]:
raise ValueError("shapes of ab and b are not compatible.")
(l, u) = l_and_u
if l + u + 1 != a1.shape[0]:
raise ValueError("invalid values for the number of lower and upper "
"diagonals: l+u+1 (%d) does not equal ab.shape[0] "
"(%d)" % (l+u+1, ab.shape[0]))
overwrite_b = overwrite_b or _datacopied(b1, b)
if a1.shape[-1] == 1:
b2 = np.array(b1, copy=overwrite_b)
b2 /= a1[1, 0]
return b2
if l == u == 1:
overwrite_ab = overwrite_ab or _datacopied(a1, ab)
gtsv, = get_lapack_funcs(('gtsv',), (a1, b1))
du = a1[0, 1:]
d = a1[1, :]
dl = a1[2, :-1]
du2, d, du, x, info = gtsv(dl, d, du, b1, overwrite_ab, overwrite_ab,
overwrite_ab, overwrite_b)
else:
gbsv, = get_lapack_funcs(('gbsv',), (a1, b1))
a2 = np.zeros((2*l+u+1, a1.shape[1]), dtype=gbsv.dtype)
a2[l:, :] = a1
lu, piv, x, info = gbsv(l, u, a2, b1, overwrite_ab=True,
overwrite_b=overwrite_b)
if info == 0:
return x
if info > 0:
raise LinAlgError("singular matrix")
raise ValueError('illegal value in %d-th argument of internal gbsv/gtsv' %
-info)
def solveh_banded(ab, b, overwrite_ab=False, overwrite_b=False, lower=False,
check_finite=True):
"""
Solve equation a x = b. a is Hermitian positive-definite banded matrix.
The matrix a is stored in `ab` either in lower diagonal or upper
diagonal ordered form:
ab[u + i - j, j] == a[i,j] (if upper form; i <= j)
ab[ i - j, j] == a[i,j] (if lower form; i >= j)
Example of `ab` (shape of a is (6, 6), `u` =2)::
upper form:
* * a02 a13 a24 a35
* a01 a12 a23 a34 a45
a00 a11 a22 a33 a44 a55
lower form:
a00 a11 a22 a33 a44 a55
a10 a21 a32 a43 a54 *
a20 a31 a42 a53 * *
Cells marked with * are not used.
Parameters
----------
ab : (`u` + 1, M) array_like
Banded matrix
b : (M,) or (M, K) array_like
Right-hand side
overwrite_ab : bool, optional
Discard data in `ab` (may enhance performance)
overwrite_b : bool, optional
Discard data in `b` (may enhance performance)
lower : bool, optional
Is the matrix in the lower form. (Default is upper form)
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
x : (M,) or (M, K) ndarray
The solution to the system a x = b. Shape of return matches shape
of `b`.
"""
a1 = _asarray_validated(ab, check_finite=check_finite)
b1 = _asarray_validated(b, check_finite=check_finite)
# Validate shapes.
if a1.shape[-1] != b1.shape[0]:
raise ValueError("shapes of ab and b are not compatible.")
overwrite_b = overwrite_b or _datacopied(b1, b)
overwrite_ab = overwrite_ab or _datacopied(a1, ab)
if a1.shape[0] == 2:
ptsv, = get_lapack_funcs(('ptsv',), (a1, b1))
if lower:
d = a1[0, :].real
e = a1[1, :-1]
else:
d = a1[1, :].real
e = a1[0, 1:].conj()
d, du, x, info = ptsv(d, e, b1, overwrite_ab, overwrite_ab,
overwrite_b)
else:
pbsv, = get_lapack_funcs(('pbsv',), (a1, b1))
c, x, info = pbsv(a1, b1, lower=lower, overwrite_ab=overwrite_ab,
overwrite_b=overwrite_b)
if info > 0:
raise LinAlgError("%d-th leading minor not positive definite" % info)
if info < 0:
raise ValueError('illegal value in %d-th argument of internal pbsv' %
-info)
return x
def solve_toeplitz(c_or_cr, b, check_finite=True):
"""Solve a Toeplitz system using Levinson Recursion
The Toeplitz matrix has constant diagonals, with c as its first column
and r as its first row. If r is not given, ``r == conjugate(c)`` is
assumed.
Parameters
----------
c_or_cr : array_like or tuple of (array_like, array_like)
The vector ``c``, or a tuple of arrays (``c``, ``r``). Whatever the
actual shape of ``c``, it will be converted to a 1-D array. If not
supplied, ``r = conjugate(c)`` is assumed; in this case, if c[0] is
real, the Toeplitz matrix is Hermitian. r[0] is ignored; the first row
of the Toeplitz matrix is ``[c[0], r[1:]]``. Whatever the actual shape
of ``r``, it will be converted to a 1-D array.
b : (M,) or (M, K) array_like
Right-hand side in ``T x = b``.
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(result entirely NaNs) if the inputs do contain infinities or NaNs.
Returns
-------
x : (M,) or (M, K) ndarray
The solution to the system ``T x = b``. Shape of return matches shape
of `b`.
Notes
-----
The solution is computed using Levinson-Durbin recursion, which is faster
than generic least-squares methods, but can be less numerically stable.
"""
# If numerical stability of this algorithm is a problem, a future
# developer might consider implementing other O(N^2) Toeplitz solvers,
# such as GKO (http://www.jstor.org/stable/2153371) or Bareiss.
if isinstance(c_or_cr, tuple):
c, r = c_or_cr
c = _asarray_validated(c, check_finite=check_finite).ravel()
r = _asarray_validated(r, check_finite=check_finite).ravel()
else:
c = _asarray_validated(c_or_cr, check_finite=check_finite).ravel()
r = c.conjugate()
# Form a 1D array of values to be used in the matrix, containing a reversed
# copy of r[1:], followed by c.
vals = np.concatenate((r[-1:0:-1], c))
if b is None:
raise ValueError('illegal value, `b` is a required argument')
if vals.shape[0] != (2*b.shape[0] - 1):
raise ValueError('incompatible dimensions')
b = _asarray_validated(b)
if np.iscomplexobj(vals) or np.iscomplexobj(b):
vals = np.asarray(vals, dtype=np.complex128, order='c')
b = np.asarray(b, dtype=np.complex128)
else:
vals = np.asarray(vals, dtype=np.double, order='c')
b = np.asarray(b, dtype=np.double)
if b.ndim == 1:
x, _ = levinson(vals, np.ascontiguousarray(b))
else:
b_shape = b.shape
b = b.reshape(b.shape[0], -1)
x = np.column_stack(
(levinson(vals, np.ascontiguousarray(b[:, i]))[0])
for i in range(b.shape[1]))
x = x.reshape(*b_shape)
return x
def _get_axis_len(aname, a, axis):
ax = axis
if ax < 0:
ax += a.ndim
if 0 <= ax < a.ndim:
return a.shape[ax]
raise ValueError("'%saxis' entry is out of bounds" % (aname,))
def solve_circulant(c, b, singular='raise', tol=None,
caxis=-1, baxis=0, outaxis=0):
"""Solve C x = b for x, where C is a circulant matrix.
`C` is the circulant matrix associated with the vector `c`.
The system is solved by doing division in Fourier space. The
calculation is::
x = ifft(fft(b) / fft(c))
where `fft` and `ifft` are the fast Fourier transform and its inverse,
respectively. For a large vector `c`, this is *much* faster than
solving the system with the full circulant matrix.
Parameters
----------
c : array_like
The coefficients of the circulant matrix.
b : array_like
Right-hand side matrix in ``a x = b``.
singular : str, optional
This argument controls how a near singular circulant matrix is
handled. If `singular` is "raise" and the circulant matrix is
near singular, a `LinAlgError` is raised. If `singular` is
"lstsq", the least squares solution is returned. Default is "raise".
tol : float, optional
If any eigenvalue of the circulant matrix has an absolute value
that is less than or equal to `tol`, the matrix is considered to be
near singular. If not given, `tol` is set to::
tol = abs_eigs.max() * abs_eigs.size * np.finfo(np.float64).eps
where `abs_eigs` is the array of absolute values of the eigenvalues
of the circulant matrix.
caxis : int
When `c` has dimension greater than 1, it is viewed as a collection
of circulant vectors. In this case, `caxis` is the axis of `c` that
holds the vectors of circulant coefficients.
baxis : int
When `b` has dimension greater than 1, it is viewed as a collection
of vectors. In this case, `baxis` is the axis of `b` that holds the
right-hand side vectors.
outaxis : int
When `c` or `b` are multidimensional, the value returned by
`solve_circulant` is multidimensional. In this case, `outaxis` is
the axis of the result that holds the solution vectors.
Returns
-------
x : ndarray
Solution to the system ``C x = b``.
Raises
------
LinAlgError
If the circulant matrix associated with `c` is near singular.
See Also
--------
circulant
Notes
-----
For a one-dimensional vector `c` with length `m`, and an array `b`
with shape ``(m, ...)``,
solve_circulant(c, b)
returns the same result as
solve(circulant(c), b)
where `solve` and `circulant` are from `scipy.linalg`.
.. versionadded:: 0.16.0
Examples
--------
>>> from scipy.linalg import solve_circulant, solve, circulant, lstsq
>>> c = np.array([2, 2, 4])
>>> b = np.array([1, 2, 3])
>>> solve_circulant(c, b)
array([ 0.75, -0.25, 0.25])
Compare that result to solving the system with `scipy.linalg.solve`:
>>> solve(circulant(c), b)
array([ 0.75, -0.25, 0.25])
A singular example:
>>> c = np.array([1, 1, 0, 0])
>>> b = np.array([1, 2, 3, 4])
Calling ``solve_circulant(c, b)`` will raise a `LinAlgError`. For the
least square solution, use the option ``singular='lstsq'``:
>>> solve_circulant(c, b, singular='lstsq')
array([ 0.25, 1.25, 2.25, 1.25])
Compare to `scipy.linalg.lstsq`:
>>> x, resid, rnk, s = lstsq(circulant(c), b)
>>> x
array([ 0.25, 1.25, 2.25, 1.25])
A broadcasting example:
Suppose we have the vectors of two circulant matrices stored in an array
with shape (2, 5), and three `b` vectors stored in an array with shape
(3, 5). For example,
>>> c = np.array([[1.5, 2, 3, 0, 0], [1, 1, 4, 3, 2]])
>>> b = np.arange(15).reshape(-1, 5)
We want to solve all combinations of circulant matrices and `b` vectors,
with the result stored in an array with shape (2, 3, 5). When we
disregard the axes of `c` and `b` that hold the vectors of coefficients,
the shapes of the collections are (2,) and (3,), respectively, which are
not compatible for broadcasting. To have a broadcast result with shape
(2, 3), we add a trivial dimension to `c`: ``c[:, np.newaxis, :]`` has
shape (2, 1, 5). The last dimension holds the coefficients of the
circulant matrices, so when we call `solve_circulant`, we can use the
default ``caxis=-1``. The coefficients of the `b` vectors are in the last
dimension of the array `b`, so we use ``baxis=-1``. If we use the
default `outaxis`, the result will have shape (5, 2, 3), so we'll use
``outaxis=-1`` to put the solution vectors in the last dimension.
>>> x = solve_circulant(c[:, np.newaxis, :], b, baxis=-1, outaxis=-1)
>>> x.shape
(2, 3, 5)
>>> np.set_printoptions(precision=3) # For compact output of numbers.
>>> x
array([[[-0.118, 0.22 , 1.277, -0.142, 0.302],
[ 0.651, 0.989, 2.046, 0.627, 1.072],
[ 1.42 , 1.758, 2.816, 1.396, 1.841]],
[[ 0.401, 0.304, 0.694, -0.867, 0.377],
[ 0.856, 0.758, 1.149, -0.412, 0.831],
[ 1.31 , 1.213, 1.603, 0.042, 1.286]]])
Check by solving one pair of `c` and `b` vectors (cf. ``x[1, 1, :]``):
>>> solve_circulant(c[1], b[1, :])
array([ 0.856, 0.758, 1.149, -0.412, 0.831])
"""
c = np.atleast_1d(c)
nc = _get_axis_len("c", c, caxis)
b = np.atleast_1d(b)
nb = _get_axis_len("b", b, baxis)
if nc != nb:
raise ValueError('Incompatible c and b axis lengths')
fc = np.fft.fft(np.rollaxis(c, caxis, c.ndim), axis=-1)
abs_fc = np.abs(fc)
if tol is None:
# This is the same tolerance as used in np.linalg.matrix_rank.
tol = abs_fc.max(axis=-1) * nc * np.finfo(np.float64).eps
if tol.shape != ():
tol.shape = tol.shape + (1,)
else:
tol = np.atleast_1d(tol)
near_zeros = abs_fc <= tol
is_near_singular = np.any(near_zeros)
if is_near_singular:
if singular == 'raise':
raise LinAlgError("near singular circulant matrix.")
else:
# Replace the small values with 1 to avoid errors in the
# division fb/fc below.
fc[near_zeros] = 1
fb = np.fft.fft(np.rollaxis(b, baxis, b.ndim), axis=-1)
q = fb / fc
if is_near_singular:
# `near_zeros` is a boolean array, same shape as `c`, that is
# True where `fc` is (near) zero. `q` is the broadcasted result
# of fb / fc, so to set the values of `q` to 0 where `fc` is near
# zero, we use a mask that is the broadcast result of an array
# of True values shaped like `b` with `near_zeros`.
mask = np.ones_like(b, dtype=bool) & near_zeros
q[mask] = 0
x = np.fft.ifft(q, axis=-1)
if not (np.iscomplexobj(c) or np.iscomplexobj(b)):
x = x.real
if outaxis != -1:
x = np.rollaxis(x, -1, outaxis)
return x
# matrix inversion
def inv(a, overwrite_a=False, check_finite=True):
"""
Compute the inverse of a matrix.
Parameters
----------
a : array_like
Square matrix to be inverted.
overwrite_a : bool, optional
Discard data in `a` (may improve performance). Default is False.
check_finite : bool, optional
Whether to check that the input matrix contains only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
ainv : ndarray
Inverse of the matrix `a`.
Raises
------
LinAlgError :
If `a` is singular.
ValueError :
If `a` is not square, or not 2-dimensional.
Examples
--------
>>> from scipy import linalg
>>> a = np.array([[1., 2.], [3., 4.]])
>>> linalg.inv(a)
array([[-2. , 1. ],
[ 1.5, -0.5]])
>>> np.dot(a, linalg.inv(a))
array([[ 1., 0.],
[ 0., 1.]])
"""
a1 = _asarray_validated(a, check_finite=check_finite)
if len(a1.shape) != 2 or a1.shape[0] != a1.shape[1]:
raise ValueError('expected square matrix')
overwrite_a = overwrite_a or _datacopied(a1, a)
#XXX: I found no advantage or disadvantage of using finv.
## finv, = get_flinalg_funcs(('inv',),(a1,))
## if finv is not None:
## a_inv,info = finv(a1,overwrite_a=overwrite_a)
## if info==0:
## return a_inv
## if info>0: raise LinAlgError, "singular matrix"
## if info<0: raise ValueError,\
## 'illegal value in %d-th argument of internal inv.getrf|getri'%(-info)
getrf, getri, getri_lwork = get_lapack_funcs(('getrf', 'getri',
'getri_lwork'),
(a1,))
lu, piv, info = getrf(a1, overwrite_a=overwrite_a)
if info == 0:
lwork = _compute_lwork(getri_lwork, a1.shape[0])
# XXX: the following line fixes curious SEGFAULT when
# benchmarking 500x500 matrix inverse. This seems to
# be a bug in LAPACK ?getri routine because if lwork is
# minimal (when using lwork[0] instead of lwork[1]) then
# all tests pass. Further investigation is required if
# more such SEGFAULTs occur.
lwork = int(1.01 * lwork)
inv_a, info = getri(lu, piv, lwork=lwork, overwrite_lu=1)
if info > 0:
raise LinAlgError("singular matrix")
if info < 0:
raise ValueError('illegal value in %d-th argument of internal '
'getrf|getri' % -info)
return inv_a
### Determinant
def det(a, overwrite_a=False, check_finite=True):
"""
Compute the determinant of a matrix
The determinant of a square matrix is a value derived arithmetically
from the coefficients of the matrix.
The determinant for a 3x3 matrix, for example, is computed as follows::
a b c
d e f = A
g h i
det(A) = a*e*i + b*f*g + c*d*h - c*e*g - b*d*i - a*f*h
Parameters
----------
a : (M, M) array_like
A square matrix.
overwrite_a : bool, optional
Allow overwriting data in a (may enhance performance).
check_finite : bool, optional
Whether to check that the input matrix contains only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
det : float or complex
Determinant of `a`.
Notes
-----
The determinant is computed via LU factorization, LAPACK routine z/dgetrf.
Examples
--------
>>> from scipy import linalg
>>> a = np.array([[1,2,3], [4,5,6], [7,8,9]])
>>> linalg.det(a)
0.0
>>> a = np.array([[0,2,3], [4,5,6], [7,8,9]])
>>> linalg.det(a)
3.0
"""
a1 = _asarray_validated(a, check_finite=check_finite)
if len(a1.shape) != 2 or a1.shape[0] != a1.shape[1]:
raise ValueError('expected square matrix')
overwrite_a = overwrite_a or _datacopied(a1, a)
fdet, = get_flinalg_funcs(('det',), (a1,))
a_det, info = fdet(a1, overwrite_a=overwrite_a)
if info < 0:
raise ValueError('illegal value in %d-th argument of internal '
'det.getrf' % -info)
return a_det
### Linear Least Squares
class LstsqLapackError(LinAlgError):
pass
def lstsq(a, b, cond=None, overwrite_a=False, overwrite_b=False,
check_finite=True, lapack_driver=None):
"""
Compute least-squares solution to equation Ax = b.
Compute a vector x such that the 2-norm ``|b - A x|`` is minimized.
Parameters
----------
a : (M, N) array_like
Left hand side matrix (2-D array).
b : (M,) or (M, K) array_like
Right hand side matrix or vector (1-D or 2-D array).
cond : float, optional
Cutoff for 'small' singular values; used to determine effective
rank of a. Singular values smaller than
``rcond * largest_singular_value`` are considered zero.
overwrite_a : bool, optional
Discard data in `a` (may enhance performance). Default is False.
overwrite_b : bool, optional
Discard data in `b` (may enhance performance). Default is False.
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
lapack_driver: str, optional
Which LAPACK driver is used to solve the least-squares problem.
Options are ``'gelsd'``, ``'gelsy'``, ``'gelss'``. Default
(``'gelsd'``) is a good choice. However, ``'gelsy'`` can be slightly
faster on many problems. ``'gelss'`` was used historically. It is
generally slow but uses less memory.
.. versionadded:: 0.17.0
Returns
-------
x : (N,) or (N, K) ndarray
Least-squares solution. Return shape matches shape of `b`.
residues : () or (1,) or (K,) ndarray
Sums of residues, squared 2-norm for each column in ``b - a x``.
If rank of matrix a is ``< N`` or ``> M``, or ``'gelsy'`` is used,
this is an empty array. If b was 1-D, this is an (1,) shape array,
otherwise the shape is (K,).
rank : int
Effective rank of matrix `a`.
s : (min(M,N),) ndarray or None
Singular values of `a`. The condition number of a is
``abs(s[0] / s[-1])``. None is returned when ``'gelsy'`` is used.
Raises
------
LinAlgError :
If computation does not converge.
ValueError :
When parameters are wrong.
See Also
--------
optimize.nnls : linear least squares with non-negativity constraint
"""
a1 = _asarray_validated(a, check_finite=check_finite)
b1 = _asarray_validated(b, check_finite=check_finite)
if len(a1.shape) != 2:
raise ValueError('expected matrix')
m, n = a1.shape
if len(b1.shape) == 2:
nrhs = b1.shape[1]
else:
nrhs = 1
if m != b1.shape[0]:
raise ValueError('incompatible dimensions')
driver = lapack_driver
if driver is None:
driver = lstsq.default_lapack_driver
if driver not in ('gelsd', 'gelsy', 'gelss'):
raise ValueError('LAPACK driver "%s" is not found' % driver)
lapack_func, lapack_lwork = get_lapack_funcs((driver,
'%s_lwork' % driver), (a1, b1))
real_data = True if (lapack_func.dtype.kind == 'f') else False
if m < n:
# need to extend b matrix as it will be filled with
# a larger solution matrix
if len(b1.shape) == 2:
b2 = np.zeros((n, nrhs), dtype=lapack_func.dtype)
b2[:m, :] = b1
else:
b2 = np.zeros(n, dtype=lapack_func.dtype)
b2[:m] = b1
b1 = b2
overwrite_a = overwrite_a or _datacopied(a1, a)
overwrite_b = overwrite_b or _datacopied(b1, b)
if cond is None:
cond = np.finfo(lapack_func.dtype).eps
if driver in ('gelss', 'gelsd'):
if driver == 'gelss':
lwork = _compute_lwork(lapack_lwork, m, n, nrhs, cond)
v, x, s, rank, work, info = lapack_func(a1, b1, cond, lwork,
overwrite_a=overwrite_a,
overwrite_b=overwrite_b)
elif driver == 'gelsd':
if real_data:
lwork, iwork = _compute_lwork(lapack_lwork, m, n, nrhs, cond)
if iwork == 0:
# this is LAPACK bug 0038: dgelsd does not provide the
# size of the iwork array in query mode. This bug was
# fixed in LAPACK 3.2.2, released July 21, 2010.
mesg = ("internal gelsd driver lwork query error, "
"required iwork dimension not returned. "
"This is likely the result of LAPACK bug "
"0038, fixed in LAPACK 3.2.2 (released "
"July 21, 2010). ")
if lapack_driver is None:
# restart with gelss
lstsq.default_lapack_driver = 'gelss'
mesg += "Falling back to 'gelss' driver."
warnings.warn(mesg, RuntimeWarning)
return lstsq(a, b, cond, overwrite_a, overwrite_b,
check_finite, lapack_driver='gelss')
# can't proceed, bail out
mesg += ("Use a different lapack_driver when calling lstsq "
"or upgrade LAPACK.")
raise LstsqLapackError(mesg)
x, s, rank, info = lapack_func(a1, b1, lwork,
iwork, cond, False, False)
else: # complex data
lwork, rwork, iwork = _compute_lwork(lapack_lwork, m, n,
nrhs, cond)
x, s, rank, info = lapack_func(a1, b1, lwork, rwork, iwork,
cond, False, False)
if info > 0:
raise LinAlgError("SVD did not converge in Linear Least Squares")
if info < 0:
raise ValueError('illegal value in %d-th argument of internal %s'
% (-info, lapack_driver))
resids = np.asarray([], dtype=x.dtype)
if m > n:
x1 = x[:n]
if rank == n:
resids = np.sum(np.abs(x[n:])**2, axis=0)
x = x1
return x, resids, rank, s
elif driver == 'gelsy':
lwork = _compute_lwork(lapack_lwork, m, n, nrhs, cond)
jptv = np.zeros((a1.shape[1],1), dtype=np.int32)
v, x, j, rank, info = lapack_func(a1, b1, jptv, cond,
lwork, False, False)
if info < 0:
raise ValueError("illegal value in %d-th argument of internal "
"gelsy" % -info)
if m > n:
x1 = x[:n]
x = x1
return x, np.array([], x.dtype), rank, None
lstsq.default_lapack_driver = 'gelsd'
def pinv(a, cond=None, rcond=None, return_rank=False, check_finite=True):
"""
Compute the (Moore-Penrose) pseudo-inverse of a matrix.
Calculate a generalized inverse of a matrix using a least-squares
solver.
Parameters
----------
a : (M, N) array_like
Matrix to be pseudo-inverted.
cond, rcond : float, optional
Cutoff for 'small' singular values in the least-squares solver.
Singular values smaller than ``rcond * largest_singular_value``
are considered zero.
return_rank : bool, optional
if True, return the effective rank of the matrix
check_finite : bool, optional
Whether to check that the input matrix contains only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
B : (N, M) ndarray
The pseudo-inverse of matrix `a`.
rank : int
The effective rank of the matrix. Returned if return_rank == True
Raises
------
LinAlgError
If computation does not converge.
Examples
--------
>>> from scipy import linalg
>>> a = np.random.randn(9, 6)
>>> B = linalg.pinv(a)
>>> np.allclose(a, np.dot(a, np.dot(B, a)))
True
>>> np.allclose(B, np.dot(B, np.dot(a, B)))
True
"""
a = _asarray_validated(a, check_finite=check_finite)
b = np.identity(a.shape[0], dtype=a.dtype)
if rcond is not None:
cond = rcond
x, resids, rank, s = lstsq(a, b, cond=cond, check_finite=False)
if return_rank:
return x, rank
else:
return x
def pinv2(a, cond=None, rcond=None, return_rank=False, check_finite=True):
"""
Compute the (Moore-Penrose) pseudo-inverse of a matrix.
Calculate a generalized inverse of a matrix using its
singular-value decomposition and including all 'large' singular
values.
Parameters
----------
a : (M, N) array_like
Matrix to be pseudo-inverted.
cond, rcond : float or None
Cutoff for 'small' singular values.
Singular values smaller than ``rcond*largest_singular_value``
are considered zero.
If None or -1, suitable machine precision is used.
return_rank : bool, optional
if True, return the effective rank of the matrix
check_finite : bool, optional
Whether to check that the input matrix contains only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
B : (N, M) ndarray
The pseudo-inverse of matrix `a`.
rank : int
The effective rank of the matrix. Returned if return_rank == True
Raises
------
LinAlgError
If SVD computation does not converge.
Examples
--------
>>> from scipy import linalg
>>> a = np.random.randn(9, 6)
>>> B = linalg.pinv2(a)
>>> np.allclose(a, np.dot(a, np.dot(B, a)))
True
>>> np.allclose(B, np.dot(B, np.dot(a, B)))
True
"""
a = _asarray_validated(a, check_finite=check_finite)
u, s, vh = decomp_svd.svd(a, full_matrices=False, check_finite=False)
if rcond is not None:
cond = rcond
if cond in [None, -1]:
t = u.dtype.char.lower()
factor = {'f': 1E3, 'd': 1E6}
cond = factor[t] * np.finfo(t).eps
rank = np.sum(s > cond * np.max(s))
u = u[:, :rank]
u /= s[:rank]
B = np.transpose(np.conjugate(np.dot(u, vh[:rank])))
if return_rank:
return B, rank
else:
return B
def pinvh(a, cond=None, rcond=None, lower=True, return_rank=False,
check_finite=True):
"""
Compute the (Moore-Penrose) pseudo-inverse of a Hermitian matrix.
Calculate a generalized inverse of a Hermitian or real symmetric matrix
using its eigenvalue decomposition and including all eigenvalues with
'large' absolute value.
Parameters
----------
a : (N, N) array_like
Real symmetric or complex hermetian matrix to be pseudo-inverted
cond, rcond : float or None
Cutoff for 'small' eigenvalues.
Singular values smaller than rcond * largest_eigenvalue are considered
zero.
If None or -1, suitable machine precision is used.
lower : bool, optional
Whether the pertinent array data is taken from the lower or upper
triangle of a. (Default: lower)
return_rank : bool, optional
if True, return the effective rank of the matrix
check_finite : bool, optional
Whether to check that the input matrix contains only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
B : (N, N) ndarray
The pseudo-inverse of matrix `a`.
rank : int
The effective rank of the matrix. Returned if return_rank == True
Raises
------
LinAlgError
If eigenvalue does not converge
Examples
--------
>>> from scipy.linalg import pinvh
>>> a = np.random.randn(9, 6)
>>> a = np.dot(a, a.T)
>>> B = pinvh(a)
>>> np.allclose(a, np.dot(a, np.dot(B, a)))
True
>>> np.allclose(B, np.dot(B, np.dot(a, B)))
True
"""
a = _asarray_validated(a, check_finite=check_finite)
s, u = decomp.eigh(a, lower=lower, check_finite=False)
if rcond is not None:
cond = rcond
if cond in [None, -1]:
t = u.dtype.char.lower()
factor = {'f': 1E3, 'd': 1E6}
cond = factor[t] * np.finfo(t).eps
# For Hermitian matrices, singular values equal abs(eigenvalues)
above_cutoff = (abs(s) > cond * np.max(abs(s)))
psigma_diag = 1.0 / s[above_cutoff]
u = u[:, above_cutoff]
B = np.dot(u * psigma_diag, np.conjugate(u).T)
if return_rank:
return B, len(psigma_diag)
else:
return B
| apache-2.0 |
askulkarni2/ansible-modules-core | cloud/amazon/ec2_scaling_policy.py | 37 | 6191 | #!/usr/bin/python
DOCUMENTATION = """
module: ec2_scaling_policy
short_description: Create or delete AWS scaling policies for Autoscaling groups
description:
- Can create or delete scaling policies for autoscaling groups
- Referenced autoscaling groups must already exist
version_added: "1.6"
author: "Zacharie Eakin (@zeekin)"
options:
state:
description:
- register or deregister the policy
required: true
choices: ['present', 'absent']
name:
description:
- Unique name for the scaling policy
required: true
asg_name:
description:
- Name of the associated autoscaling group
required: true
adjustment_type:
description:
- The type of change in capacity of the autoscaling group
required: false
choices: ['ChangeInCapacity','ExactCapacity','PercentChangeInCapacity']
scaling_adjustment:
description:
- The amount by which the autoscaling group is adjusted by the policy
required: false
min_adjustment_step:
description:
- Minimum amount of adjustment when policy is triggered
required: false
cooldown:
description:
- The minimum period of time between which autoscaling actions can take place
required: false
extends_documentation_fragment: aws
"""
EXAMPLES = '''
- ec2_scaling_policy:
state: present
region: US-XXX
name: "scaledown-policy"
adjustment_type: "ChangeInCapacity"
asg_name: "slave-pool"
scaling_adjustment: -1
min_adjustment_step: 1
cooldown: 300
'''
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
try:
import boto.ec2.autoscale
from boto.ec2.autoscale import ScalingPolicy
from boto.exception import BotoServerError
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def create_scaling_policy(connection, module):
sp_name = module.params.get('name')
adjustment_type = module.params.get('adjustment_type')
asg_name = module.params.get('asg_name')
scaling_adjustment = module.params.get('scaling_adjustment')
min_adjustment_step = module.params.get('min_adjustment_step')
cooldown = module.params.get('cooldown')
scalingPolicies = connection.get_all_policies(as_group=asg_name,policy_names=[sp_name])
if not scalingPolicies:
sp = ScalingPolicy(
name=sp_name,
adjustment_type=adjustment_type,
as_name=asg_name,
scaling_adjustment=scaling_adjustment,
min_adjustment_step=min_adjustment_step,
cooldown=cooldown)
try:
connection.create_scaling_policy(sp)
policy = connection.get_all_policies(as_group=asg_name,policy_names=[sp_name])[0]
module.exit_json(changed=True, name=policy.name, arn=policy.policy_arn, as_name=policy.as_name, scaling_adjustment=policy.scaling_adjustment, cooldown=policy.cooldown, adjustment_type=policy.adjustment_type, min_adjustment_step=policy.min_adjustment_step)
except BotoServerError, e:
module.fail_json(msg=str(e))
else:
policy = scalingPolicies[0]
changed = False
# min_adjustment_step attribute is only relevant if the adjustment_type
# is set to percentage change in capacity, so it is a special case
if getattr(policy, 'adjustment_type') == 'PercentChangeInCapacity':
if getattr(policy, 'min_adjustment_step') != module.params.get('min_adjustment_step'):
changed = True
# set the min adjustment step incase the user decided to change their
# adjustment type to percentage
setattr(policy, 'min_adjustment_step', module.params.get('min_adjustment_step'))
# check the remaining attributes
for attr in ('adjustment_type','scaling_adjustment','cooldown'):
if getattr(policy, attr) != module.params.get(attr):
changed = True
setattr(policy, attr, module.params.get(attr))
try:
if changed:
connection.create_scaling_policy(policy)
policy = connection.get_all_policies(as_group=asg_name,policy_names=[sp_name])[0]
module.exit_json(changed=changed, name=policy.name, arn=policy.policy_arn, as_name=policy.as_name, scaling_adjustment=policy.scaling_adjustment, cooldown=policy.cooldown, adjustment_type=policy.adjustment_type, min_adjustment_step=policy.min_adjustment_step)
except BotoServerError, e:
module.fail_json(msg=str(e))
def delete_scaling_policy(connection, module):
sp_name = module.params.get('name')
asg_name = module.params.get('asg_name')
scalingPolicies = connection.get_all_policies(as_group=asg_name,policy_names=[sp_name])
if scalingPolicies:
try:
connection.delete_policy(sp_name, asg_name)
module.exit_json(changed=True)
except BotoServerError, e:
module.exit_json(changed=False, msg=str(e))
else:
module.exit_json(changed=False)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
name = dict(required=True, type='str'),
adjustment_type = dict(type='str', choices=['ChangeInCapacity','ExactCapacity','PercentChangeInCapacity']),
asg_name = dict(required=True, type='str'),
scaling_adjustment = dict(type='int'),
min_adjustment_step = dict(type='int'),
cooldown = dict(type='int'),
state=dict(default='present', choices=['present', 'absent']),
)
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
state = module.params.get('state')
try:
connection = connect_to_aws(boto.ec2.autoscale, region, **aws_connect_params)
except (boto.exception.NoAuthHandlerFound, StandardError), e:
module.fail_json(msg = str(e))
if state == 'present':
create_scaling_policy(connection, module)
elif state == 'absent':
delete_scaling_policy(connection, module)
main()
| gpl-3.0 |
jontrulson/upm | examples/python/ms5803.py | 7 | 2129 | #!/usr/bin/python
# Author: Jon Trulson <[email protected]>
# Copyright (c) 2016 Intel Corporation.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import print_function
import time, sys, signal, atexit
from upm import pyupm_ms5803 as sensorObj
def main():
# Instantiate a MS5803 instance using bus 0 and default i2c address
sensor = sensorObj.MS5803(0)
# For SPI, bus 0, you would pass -1 as the address, and a valid pin for CS:
# MS5803(0, -1, 10);
## Exit handlers ##
# This function stops python from printing a stacktrace when you
# hit control-C
def SIGINTHandler(signum, frame):
raise SystemExit
# This function lets you run code on exit
def exitHandler():
print("Exiting")
sys.exit(0)
# Register exit handlers
atexit.register(exitHandler)
signal.signal(signal.SIGINT, SIGINTHandler)
while (1):
sensor.update()
print("Temperature:", sensor.getTemperature(), "C,", end=' ')
print("Pressure: ", sensor.getPressure(), "mbar")
time.sleep(1)
if __name__ == '__main__':
main()
| mit |
rubyu/anki | aqt/addons.py | 2 | 5205 | # Copyright: Damien Elmes <[email protected]>
# -*- coding: utf-8 -*-
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import sys, os, traceback
from cStringIO import StringIO
from aqt.qt import *
from aqt.utils import showInfo, openFolder, isWin, openLink, \
askUser
from zipfile import ZipFile
import aqt.forms
import aqt
from aqt.downloader import download
# in the future, it would be nice to save the addon id and unzippped file list
# to the config so that we can clear up all files and check for updates
class AddonManager(object):
def __init__(self, mw):
self.mw = mw
f = self.mw.form; s = SIGNAL("triggered()")
self.mw.connect(f.actionOpenPluginFolder, s, self.onOpenAddonFolder)
self.mw.connect(f.actionDownloadSharedPlugin, s, self.onGetAddons)
self._menus = []
if isWin:
self.clearAddonCache()
sys.path.insert(0, self.addonsFolder())
if not self.mw.safeMode:
self.loadAddons()
def files(self):
return [f for f in os.listdir(self.addonsFolder())
if f.endswith(".py")]
def loadAddons(self):
for file in self.files():
try:
__import__(file.replace(".py", ""))
except:
traceback.print_exc()
self.rebuildAddonsMenu()
# Menus
######################################################################
def onOpenAddonFolder(self, path=None):
if path is None:
path = self.addonsFolder()
openFolder(path)
def rebuildAddonsMenu(self):
for m in self._menus:
self.mw.form.menuPlugins.removeAction(m.menuAction())
for file in self.files():
m = self.mw.form.menuPlugins.addMenu(
os.path.splitext(file)[0])
self._menus.append(m)
a = QAction(_("Edit..."), self.mw)
p = os.path.join(self.addonsFolder(), file)
self.mw.connect(a, SIGNAL("triggered()"),
lambda p=p: self.onEdit(p))
m.addAction(a)
a = QAction(_("Delete..."), self.mw)
self.mw.connect(a, SIGNAL("triggered()"),
lambda p=p: self.onRem(p))
m.addAction(a)
def onEdit(self, path):
d = QDialog(self.mw)
frm = aqt.forms.editaddon.Ui_Dialog()
frm.setupUi(d)
d.setWindowTitle(os.path.basename(path))
frm.text.setPlainText(unicode(open(path).read(), "utf8"))
d.connect(frm.buttonBox, SIGNAL("accepted()"),
lambda: self.onAcceptEdit(path, frm))
d.exec_()
def onAcceptEdit(self, path, frm):
open(path, "w").write(frm.text.toPlainText().encode("utf8"))
showInfo(_("Edits saved. Please restart Anki."))
def onRem(self, path):
if not askUser(_("Delete %s?") % os.path.basename(path)):
return
os.unlink(path)
self.rebuildAddonsMenu()
showInfo(_("Deleted. Please restart Anki."))
# Tools
######################################################################
def addonsFolder(self):
dir = self.mw.pm.addonFolder()
if isWin:
dir = dir.encode(sys.getfilesystemencoding())
return dir
def clearAddonCache(self):
"Clear .pyc files which may cause crashes if Python version updated."
dir = self.addonsFolder()
for curdir, dirs, files in os.walk(dir):
for f in files:
if not f.endswith(".pyc"):
continue
os.unlink(os.path.join(curdir, f))
def registerAddon(self, name, updateId):
# not currently used
return
# Installing add-ons
######################################################################
def onGetAddons(self):
GetAddons(self.mw)
def install(self, data, fname):
if fname.endswith(".py"):
# .py files go directly into the addon folder
path = os.path.join(self.addonsFolder(), fname)
open(path, "w").write(data)
return
# .zip file
z = ZipFile(StringIO(data))
base = self.addonsFolder()
for n in z.namelist():
if n.endswith("/"):
# folder; ignore
continue
# write
z.extract(n, base)
class GetAddons(QDialog):
def __init__(self, mw):
QDialog.__init__(self, mw)
self.mw = mw
self.form = aqt.forms.getaddons.Ui_Dialog()
self.form.setupUi(self)
b = self.form.buttonBox.addButton(
_("Browse"), QDialogButtonBox.ActionRole)
self.connect(b, SIGNAL("clicked()"), self.onBrowse)
self.exec_()
def onBrowse(self):
openLink(aqt.appShared + "addons/")
def accept(self):
QDialog.accept(self)
# create downloader thread
ret = download(self.mw, self.form.code.text())
if not ret:
return
data, fname = ret
self.mw.addonManager.install(data, fname)
self.mw.progress.finish()
showInfo(_("Download successful. Please restart Anki."))
| agpl-3.0 |
carl-mastrangelo/grpc | tools/profiling/microbenchmarks/bm_json.py | 18 | 6459 | # Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Utilities for manipulating JSON data that represents microbenchmark results.
import os
# template arguments and dynamic arguments of individual benchmark types
# Example benchmark name: "BM_UnaryPingPong<TCP, NoOpMutator, NoOpMutator>/0/0"
_BM_SPECS = {
'BM_UnaryPingPong': {
'tpl': ['fixture', 'client_mutator', 'server_mutator'],
'dyn': ['request_size', 'response_size'],
},
'BM_PumpStreamClientToServer': {
'tpl': ['fixture'],
'dyn': ['request_size'],
},
'BM_PumpStreamServerToClient': {
'tpl': ['fixture'],
'dyn': ['request_size'],
},
'BM_StreamingPingPong': {
'tpl': ['fixture', 'client_mutator', 'server_mutator'],
'dyn': ['request_size', 'request_count'],
},
'BM_StreamingPingPongMsgs': {
'tpl': ['fixture', 'client_mutator', 'server_mutator'],
'dyn': ['request_size'],
},
'BM_PumpStreamServerToClient_Trickle': {
'tpl': [],
'dyn': ['request_size', 'bandwidth_kilobits'],
},
'BM_PumpUnbalancedUnary_Trickle': {
'tpl': [],
'dyn': ['cli_req_size', 'svr_req_size', 'bandwidth_kilobits'],
},
'BM_ErrorStringOnNewError': {
'tpl': ['fixture'],
'dyn': [],
},
'BM_ErrorStringRepeatedly': {
'tpl': ['fixture'],
'dyn': [],
},
'BM_ErrorGetStatus': {
'tpl': ['fixture'],
'dyn': [],
},
'BM_ErrorGetStatusCode': {
'tpl': ['fixture'],
'dyn': [],
},
'BM_ErrorHttpError': {
'tpl': ['fixture'],
'dyn': [],
},
'BM_HasClearGrpcStatus': {
'tpl': ['fixture'],
'dyn': [],
},
'BM_IsolatedFilter': {
'tpl': ['fixture', 'client_mutator'],
'dyn': [],
},
'BM_HpackEncoderEncodeHeader': {
'tpl': ['fixture'],
'dyn': ['end_of_stream', 'request_size'],
},
'BM_HpackParserParseHeader': {
'tpl': ['fixture', 'on_header'],
'dyn': [],
},
'BM_CallCreateDestroy': {
'tpl': ['fixture'],
'dyn': [],
},
'BM_Zalloc': {
'tpl': [],
'dyn': ['request_size'],
},
'BM_PollEmptyPollset_SpeedOfLight': {
'tpl': [],
'dyn': ['request_size', 'request_count'],
},
'BM_StreamCreateSendInitialMetadataDestroy': {
'tpl': ['fixture'],
'dyn': [],
},
'BM_TransportStreamSend': {
'tpl': [],
'dyn': ['request_size'],
},
'BM_TransportStreamRecv': {
'tpl': [],
'dyn': ['request_size'],
},
'BM_StreamingPingPongWithCoalescingApi': {
'tpl': ['fixture', 'client_mutator', 'server_mutator'],
'dyn': ['request_size', 'request_count', 'end_of_stream'],
},
'BM_Base16SomeStuff': {
'tpl': [],
'dyn': ['request_size'],
}
}
def numericalize(s):
"""Convert abbreviations like '100M' or '10k' to a number."""
if not s: return ''
if s[-1] == 'k':
return float(s[:-1]) * 1024
if s[-1] == 'M':
return float(s[:-1]) * 1024 * 1024
if 0 <= (ord(s[-1]) - ord('0')) <= 9:
return float(s)
assert 'not a number: %s' % s
def parse_name(name):
cpp_name = name
if '<' not in name and '/' not in name and name not in _BM_SPECS:
return {'name': name, 'cpp_name': name}
rest = name
out = {}
tpl_args = []
dyn_args = []
if '<' in rest:
tpl_bit = rest[rest.find('<') + 1:rest.rfind('>')]
arg = ''
nesting = 0
for c in tpl_bit:
if c == '<':
nesting += 1
arg += c
elif c == '>':
nesting -= 1
arg += c
elif c == ',':
if nesting == 0:
tpl_args.append(arg.strip())
arg = ''
else:
arg += c
else:
arg += c
tpl_args.append(arg.strip())
rest = rest[:rest.find('<')] + rest[rest.rfind('>') + 1:]
if '/' in rest:
s = rest.split('/')
rest = s[0]
dyn_args = s[1:]
name = rest
assert name in _BM_SPECS, '_BM_SPECS needs to be expanded for %s' % name
assert len(dyn_args) == len(_BM_SPECS[name]['dyn'])
assert len(tpl_args) == len(_BM_SPECS[name]['tpl'])
out['name'] = name
out['cpp_name'] = cpp_name
out.update(
dict((k, numericalize(v))
for k, v in zip(_BM_SPECS[name]['dyn'], dyn_args)))
out.update(dict(zip(_BM_SPECS[name]['tpl'], tpl_args)))
return out
def expand_json(js, js2=None):
if not js and not js2: raise StopIteration()
if not js: js = js2
for bm in js['benchmarks']:
if bm['name'].endswith('_stddev') or bm['name'].endswith('_mean'):
continue
context = js['context']
if 'label' in bm:
labels_list = [
s.split(':')
for s in bm['label'].strip().split(' ')
if len(s) and s[0] != '#'
]
for el in labels_list:
el[0] = el[0].replace('/iter', '_per_iteration')
labels = dict(labels_list)
else:
labels = {}
row = {
'jenkins_build': os.environ.get('BUILD_NUMBER', ''),
'jenkins_job': os.environ.get('JOB_NAME', ''),
}
row.update(context)
row.update(bm)
row.update(parse_name(row['name']))
row.update(labels)
if js2:
for bm2 in js2['benchmarks']:
if bm['name'] == bm2['name'] and 'already_used' not in bm2:
row['cpu_time'] = bm2['cpu_time']
row['real_time'] = bm2['real_time']
row['iterations'] = bm2['iterations']
bm2['already_used'] = True
break
yield row
| apache-2.0 |
edx/edx-platform | lms/djangoapps/lti_provider/tests/test_views.py | 5 | 9132 | """
Tests for the LTI provider views
"""
from unittest.mock import MagicMock, patch
from django.test import TestCase
from django.test.client import RequestFactory
from django.urls import reverse
from opaque_keys.edx.locator import BlockUsageLocator, CourseLocator
from common.djangoapps.student.tests.factories import UserFactory
from lms.djangoapps.courseware.testutils import RenderXBlockTestMixin
from lms.djangoapps.lti_provider import models, views
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
LTI_DEFAULT_PARAMS = {
'roles': 'Instructor,urn:lti:instrole:ims/lis/Administrator',
'context_id': 'lti_launch_context_id',
'oauth_version': '1.0',
'oauth_consumer_key': 'consumer_key',
'oauth_signature': 'OAuth Signature',
'oauth_signature_method': 'HMAC-SHA1',
'oauth_timestamp': 'OAuth Timestamp',
'oauth_nonce': 'OAuth Nonce',
'user_id': 'LTI_User',
}
LTI_OPTIONAL_PARAMS = {
'context_title': 'context title',
'context_label': 'context label',
'lis_result_sourcedid': 'result sourcedid',
'lis_outcome_service_url': 'outcome service URL',
'tool_consumer_instance_guid': 'consumer instance guid'
}
COURSE_KEY = CourseLocator(org='some_org', course='some_course', run='some_run')
USAGE_KEY = BlockUsageLocator(course_key=COURSE_KEY, block_type='problem', block_id='block_id')
COURSE_PARAMS = {
'course_key': COURSE_KEY,
'usage_key': USAGE_KEY
}
ALL_PARAMS = dict(list(LTI_DEFAULT_PARAMS.items()) + list(COURSE_PARAMS.items()))
def build_launch_request(extra_post_data=None, param_to_delete=None):
"""
Helper method to create a new request object for the LTI launch.
"""
if extra_post_data is None:
extra_post_data = {}
post_data = dict(list(LTI_DEFAULT_PARAMS.items()) + list(extra_post_data.items()))
if param_to_delete:
del post_data[param_to_delete]
request = RequestFactory().post('/', data=post_data)
request.user = UserFactory.create()
request.session = {}
return request
class LtiTestMixin:
"""
Mixin for LTI tests
"""
@patch.dict('django.conf.settings.FEATURES', {'ENABLE_LTI_PROVIDER': True})
def setUp(self):
super().setUp()
# Always accept the OAuth signature
self.mock_verify = MagicMock(return_value=True)
patcher = patch('lms.djangoapps.lti_provider.signature_validator.SignatureValidator.verify', self.mock_verify)
patcher.start()
self.addCleanup(patcher.stop)
self.consumer = models.LtiConsumer(
consumer_name='consumer',
consumer_key=LTI_DEFAULT_PARAMS['oauth_consumer_key'],
consumer_secret='secret'
)
self.consumer.save()
class LtiLaunchTest(LtiTestMixin, TestCase):
"""
Tests for the lti_launch view
"""
@patch('lms.djangoapps.lti_provider.views.render_courseware')
@patch('lms.djangoapps.lti_provider.views.authenticate_lti_user')
def test_valid_launch(self, _authenticate, render):
"""
Verifies that the LTI launch succeeds when passed a valid request.
"""
request = build_launch_request()
views.lti_launch(request, str(COURSE_KEY), str(USAGE_KEY))
render.assert_called_with(request, USAGE_KEY)
@patch('lms.djangoapps.lti_provider.views.render_courseware')
@patch('lms.djangoapps.lti_provider.views.store_outcome_parameters')
@patch('lms.djangoapps.lti_provider.views.authenticate_lti_user')
def test_valid_launch_with_optional_params(self, _authenticate, store_params, _render):
"""
Verifies that the LTI launch succeeds when passed a valid request.
"""
request = build_launch_request(extra_post_data=LTI_OPTIONAL_PARAMS)
views.lti_launch(request, str(COURSE_KEY), str(USAGE_KEY))
store_params.assert_called_with(
dict(list(ALL_PARAMS.items()) + list(LTI_OPTIONAL_PARAMS.items())),
request.user,
self.consumer
)
@patch('lms.djangoapps.lti_provider.views.render_courseware')
@patch('lms.djangoapps.lti_provider.views.store_outcome_parameters')
@patch('lms.djangoapps.lti_provider.views.authenticate_lti_user')
def test_outcome_service_registered(self, _authenticate, store_params, _render):
"""
Verifies that the LTI launch succeeds when passed a valid request.
"""
request = build_launch_request()
views.lti_launch(
request,
str(COURSE_PARAMS['course_key']),
str(COURSE_PARAMS['usage_key'])
)
store_params.assert_called_with(ALL_PARAMS, request.user, self.consumer)
def launch_with_missing_parameter(self, missing_param):
"""
Helper method to remove a parameter from the LTI launch and call the view
"""
request = build_launch_request(param_to_delete=missing_param)
return views.lti_launch(request, None, None)
def test_launch_with_missing_parameters(self):
"""
Runs through all required LTI parameters and verifies that the lti_launch
view returns Bad Request if any of them are missing.
"""
for missing_param in views.REQUIRED_PARAMETERS:
response = self.launch_with_missing_parameter(missing_param)
assert response.status_code == 400, (('Launch should fail when parameter ' + missing_param) + ' is missing')
def test_launch_with_disabled_feature_flag(self):
"""
Verifies that the LTI launch will fail if the ENABLE_LTI_PROVIDER flag
is not set
"""
with patch.dict('django.conf.settings.FEATURES', {'ENABLE_LTI_PROVIDER': False}):
request = build_launch_request()
response = views.lti_launch(request, None, None)
assert response.status_code == 403
def test_forbidden_if_signature_fails(self):
"""
Verifies that the view returns Forbidden if the LTI OAuth signature is
incorrect.
"""
self.mock_verify.return_value = False
request = build_launch_request()
response = views.lti_launch(request, None, None)
assert response.status_code == 403
assert response.status_code == 403
@patch('lms.djangoapps.lti_provider.views.render_courseware')
def test_lti_consumer_record_supplemented_with_guid(self, _render):
self.mock_verify.return_value = False
request = build_launch_request(LTI_OPTIONAL_PARAMS)
with self.assertNumQueries(3):
views.lti_launch(request, None, None)
consumer = models.LtiConsumer.objects.get(
consumer_key=LTI_DEFAULT_PARAMS['oauth_consumer_key']
)
assert consumer.instance_guid == 'consumer instance guid'
class LtiLaunchTestRender(LtiTestMixin, RenderXBlockTestMixin, ModuleStoreTestCase):
"""
Tests for the rendering returned by lti_launch view.
This class overrides the get_response method, which is used by
the tests defined in RenderXBlockTestMixin.
"""
def get_response(self, usage_key, url_encoded_params=None):
"""
Overridable method to get the response from the endpoint that is being tested.
"""
lti_launch_url = reverse(
'lti_provider_launch',
kwargs={
'course_id': str(self.course.id),
'usage_id': str(usage_key)
}
)
if url_encoded_params:
lti_launch_url += '?' + url_encoded_params
return self.client.post(lti_launch_url, data=LTI_DEFAULT_PARAMS)
# The following test methods override the base tests for verifying access
# by unenrolled and unauthenticated students, since there is a discrepancy
# of access rules between the 2 endpoints (LTI and xBlock_render).
# TODO fix this access discrepancy to the same underlying data.
def test_unenrolled_student(self):
"""
Override since LTI allows access to unenrolled students.
"""
self.setup_course()
self.setup_user(admin=False, enroll=False, login=True)
self.verify_response()
def test_unauthenticated(self):
"""
Override since LTI allows access to unauthenticated users.
"""
self.setup_course()
self.setup_user(admin=False, enroll=True, login=False)
self.verify_response()
def get_success_enrolled_staff_mongo_count(self):
"""
Override because mongo queries are higher for this
particular test. This has not been investigated exhaustively
as mongo is no longer used much, and removing user_partitions
from inheritance fixes the problem.
# The 9 mongoDB calls include calls for
# Old Mongo:
# (1) fill_in_run
# (2) get_course in get_course_with_access
# (3) get_item for HTML block in get_module_by_usage_id
# (4) get_parent when loading HTML block
# (5)-(8) calls related to the inherited user_partitions field.
# (9) edx_notes descriptor call to get_course
"""
return 9
| agpl-3.0 |
udayinfy/openerp-7.0 | wms/__openerp__.py | 3 | 2789 | # -*- coding: utf-8 -*-
##############################################################################
#
# wms module for OpenERP, This module allows to manage crossdocking in warehouses
# Copyright (C) 2011 SYLEAM Info Services (<http://www.Syleam.fr/>)
# Sylvain Garancher <[email protected]>
# Elico Corp (port to 7.0) <[email protected]>
# This file is a part of wms
#
# wms is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wms is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Warehouse Management System',
'version': '1.2',
'category': 'Generic Modules/Inventory Control',
'description': """This module is extensions to stock module""",
'author': 'SYLEAM',
'website': 'http://www.syleam.fr/',
'depends': [
'base',
'stock',
],
'init_xml': [],
'images': [],
'update_xml': [
'security/ir.model.access.csv',
#'stock_view.xml',
# 'report_stock_view.xml',
#'wizard/stock_to_date_view.xml',
],
'demo_xml': [],
'test': [
#'test/wms_test01.yml',
#'test/wms_test02.yml',
#'test/wms_test03.yml',
#'test/wms_test04.yml',
#'test/wms_test05.yml',
#'test/wms_test06.yml',
#'test/wms_test07.yml',
#'test/wms_test08.yml',
#'test/wms_test09.yml',
#'test/wms_test10.yml',
#'test/wms_test11.yml',
#'test/wms_test12.yml',
#'test/wms_test13.yml',
#'test/wms_test14.yml',
#'test/wms_test15.yml',
#'test/wms_test16.yml',
#'test/wms_test17.yml',
#'test/wms_test18.yml',
#'test/wms_test19.yml',
#'test/wms_test20.yml',
#'test/wms_test21.yml',
#'test/wms_test22.yml',
#'test/wms_test23.yml',
#'test/wms_test24.yml',
#'test/wms_test25.yml',
#'test/wms_test26.yml',
#'test/wms_test27.yml',
#'test/wms_test28.yml',
#'test/wms_test29.yml',
],
'installable': True,
'active': False,
'license': 'AGPL-3',
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
MMaus/mutils | models/threeSeg.py | 1 | 3984 | # -*- coding : utf8 -*-
"""
.. module:: 3seg
:synopsis: Equations and solutions for the three-segment model
.. moduleauthor:: Moritz Maus <[email protected]>
"""
# format: l1, l2, l3, c1, c2]
from pylab import (array, arccos, linspace, vstack, figure, clf, plot, xlabel,
ylabel, show, savefig, sqrt, xlim, ylim, axis, arange)
def cfunc(x, params):
"""
this function returns the constraint violation of the 3seg leg.
it must be zeroed!
Parameters
----------
x
configuration of the leg: Fy, h1, h2, tau1, tau2
params
parameter of the system: l, l1, l2, l3, c1, c2
Returns
-------
eq : *array* (1x5)
the non-fulfilment of the constraints (subject to root finding)
"""
l, l1, l2, l3, c1, c2 = params
# print l, l1, l2, l3, c1, c2
Fy, h1, h2, tau1, tau2 = array(x).squeeze()
# print Fy, h1, h2, tau1, tau2
if h1 > l1:
print "warning: invalid h1"
h1 = l1
return [5000, ]*5
if h2 > l3:
print "warning: invalid h2"
h2 = l2
return [5000, ]*5
if h1 + h2 > l2:
print "warning: invalid h1 + h2"
return [5000, ]*5
while h1 + h2 > l2:
h1 = .8 * h1
h2 = .8 * h2
eq1 = Fy * h1 - tau1
eq2 = tau1 - Fy * h1 - Fy * h2 + tau2
eq3 = Fy * h2 - tau2
eq4 = -1. * c1 * (arccos(h1 / l1) + arccos( (h1 + h2) / l2) - .9 * pi) - tau1
eq5 = -1. * c2 * (arccos(h2 / l3) + arccos( (h1 + h2) / l2) - .9 * pi) - tau2
eq6 = sqrt(l1**2 - h1**2) + sqrt(l2**2 - (h1 + h2)**2) + sqrt(l3**2 -
h2**2) - l
# note: eq2 is omitted because of equality to - (eq1 + eq3)!
return array([eq1, eq3, eq4, eq5, eq6])
if __name__ == '__main__':
import scipy.optimize as opt
x0 = array([ 2.64347199e+03, 7.04878037e-02, 1.67474976e-01,
1.86332534e+02, 4.42715408e+02])
# first parameter is L0
params = [.999, .36, .45, .2, 110., 65.]
#IC = array([0., .00001, .00001, .0002, .003])
IC = array([1., .001, .005, 1., 2.])
res0 = opt.fsolve(cfunc, IC, args=params, xtol=1e-10)
def qfun(x, p):
"""
did root finding succeed?
"""
return sum(cfunc(x, p) **2)
all_res = [res0, ]
all_ll = [params[0], ]
all_q = [qfun(res0, params),]
all_params = [params[:], ]
for leglength in linspace(.999, .5, 100):
params[0] = leglength
IC = all_res[-1]
all_res.append(opt.fsolve(cfunc, all_res[-1], args=params, xtol=1e-10))
all_ll.append(leglength)
all_params.append(params[:])
all_q.append(qfun(all_res[-1], all_params[-1]))
print 'll:', leglength
all_res = vstack(all_res)
all_params = vstack(all_params)
figure('force of the leg')
clf()
plot(all_ll, all_res[:,0],'b.-')
xlabel('leg length')
ylabel('Force')
show()
def visualize(config, param):
"""
.. note::
plots the leg on the current axes
parameters
----------
config : *array* (1x5)
of cfunc's x parameter type, describing the configuration of the leg
param : *list*
the list of model parameters, according to cfunc's definition
Returns
-------
*None*
"""
figure('anim figure')
clf()
# plot ground
plot([-1,1],[0,0], color='#000044', linewidth=8)
x = [0, -1 * config[1], config[2], 0]
y1 = sqrt(param[1]**2 - config[1]**2)
y2 = sqrt(param[2]**2 - (config[1] + config[2])**2)
y3 = sqrt(param[3]**2 - config[2]**2)
y = [0, y1, y1 + y2, y1 + y2 + y3]
plot(x, y, color='#000000', linewidth=3)
plot(x, y, color='#982112', linewidth=2, linestyle='--')
plot(x[-1], y[-1], 'o', markersize=13, color='#ffea93')
xlim(-1,1)
ylim(-.2,2)
axis('equal')
def viz(until):
for k in arange(until):
visualize(all_res[k,:], all_params[k,:])
savefig('fig_%02i.png' % k)
| gpl-2.0 |
leilihh/novaha | nova/tests/virt/baremetal/db/test_bm_node.py | 29 | 6886 | # Copyright (c) 2012 NTT DOCOMO, INC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Bare-Metal DB testcase for BareMetalNode
"""
from nova import exception
from nova.tests.virt.baremetal.db import base
from nova.tests.virt.baremetal.db import utils
from nova.virt.baremetal import db
class BareMetalNodesTestCase(base.BMDBTestCase):
def _create_nodes(self):
nodes = [
utils.new_bm_node(pm_address='0', service_host="host1",
memory_mb=100000, cpus=100, local_gb=10000),
utils.new_bm_node(pm_address='1', service_host="host2",
instance_uuid='A',
memory_mb=100000, cpus=100, local_gb=10000),
utils.new_bm_node(pm_address='2', service_host="host2",
memory_mb=1000, cpus=1, local_gb=1000),
utils.new_bm_node(pm_address='3', service_host="host2",
memory_mb=1000, cpus=2, local_gb=1000),
utils.new_bm_node(pm_address='4', service_host="host2",
memory_mb=2000, cpus=1, local_gb=1000),
utils.new_bm_node(pm_address='5', service_host="host2",
memory_mb=2000, cpus=2, local_gb=1000),
]
self.ids = []
for n in nodes:
ref = db.bm_node_create(self.context, n)
self.ids.append(ref['id'])
def test_get_all(self):
r = db.bm_node_get_all(self.context)
self.assertEqual(r, [])
self._create_nodes()
r = db.bm_node_get_all(self.context)
self.assertEqual(len(r), 6)
def test_get(self):
self._create_nodes()
r = db.bm_node_get(self.context, self.ids[0])
self.assertEqual(r['pm_address'], '0')
r = db.bm_node_get(self.context, self.ids[1])
self.assertEqual(r['pm_address'], '1')
self.assertRaises(
exception.NodeNotFound,
db.bm_node_get,
self.context, -1)
def test_get_by_service_host(self):
self._create_nodes()
r = db.bm_node_get_all(self.context, service_host=None)
self.assertEqual(len(r), 6)
r = db.bm_node_get_all(self.context, service_host="host1")
self.assertEqual(len(r), 1)
self.assertEqual(r[0]['pm_address'], '0')
r = db.bm_node_get_all(self.context, service_host="host2")
self.assertEqual(len(r), 5)
pmaddrs = [x['pm_address'] for x in r]
self.assertIn('1', pmaddrs)
self.assertIn('2', pmaddrs)
self.assertIn('3', pmaddrs)
self.assertIn('4', pmaddrs)
self.assertIn('5', pmaddrs)
r = db.bm_node_get_all(self.context, service_host="host3")
self.assertEqual(r, [])
def test_get_associated(self):
self._create_nodes()
r = db.bm_node_get_associated(self.context, service_host=None)
self.assertEqual(len(r), 1)
self.assertEqual(r[0]['pm_address'], '1')
r = db.bm_node_get_unassociated(self.context, service_host=None)
self.assertEqual(len(r), 5)
pmaddrs = [x['pm_address'] for x in r]
self.assertIn('0', pmaddrs)
self.assertIn('2', pmaddrs)
self.assertIn('3', pmaddrs)
self.assertIn('4', pmaddrs)
self.assertIn('5', pmaddrs)
def test_destroy(self):
self._create_nodes()
db.bm_node_destroy(self.context, self.ids[0])
self.assertRaises(
exception.NodeNotFound,
db.bm_node_get,
self.context, self.ids[0])
r = db.bm_node_get_all(self.context)
self.assertEqual(len(r), 5)
def test_destroy_with_interfaces(self):
self._create_nodes()
if_a_id = db.bm_interface_create(self.context, self.ids[0],
'aa:aa:aa:aa:aa:aa', None, None)
if_b_id = db.bm_interface_create(self.context, self.ids[0],
'bb:bb:bb:bb:bb:bb', None, None)
if_x_id = db.bm_interface_create(self.context, self.ids[1],
'11:22:33:44:55:66', None, None)
db.bm_node_destroy(self.context, self.ids[0])
self.assertRaises(
exception.NovaException,
db.bm_interface_get,
self.context, if_a_id)
self.assertRaises(
exception.NovaException,
db.bm_interface_get,
self.context, if_b_id)
# Another node's interface is not affected
if_x = db.bm_interface_get(self.context, if_x_id)
self.assertEqual(self.ids[1], if_x['bm_node_id'])
self.assertRaises(
exception.NodeNotFound,
db.bm_node_get,
self.context, self.ids[0])
r = db.bm_node_get_all(self.context)
self.assertEqual(len(r), 5)
def test_find_free(self):
self._create_nodes()
fn = db.bm_node_find_free(self.context, 'host2')
self.assertEqual(fn['pm_address'], '2')
fn = db.bm_node_find_free(self.context, 'host2',
memory_mb=500, cpus=2, local_gb=100)
self.assertEqual(fn['pm_address'], '3')
fn = db.bm_node_find_free(self.context, 'host2',
memory_mb=1001, cpus=1, local_gb=1000)
self.assertEqual(fn['pm_address'], '4')
fn = db.bm_node_find_free(self.context, 'host2',
memory_mb=2000, cpus=1, local_gb=1000)
self.assertEqual(fn['pm_address'], '4')
fn = db.bm_node_find_free(self.context, 'host2',
memory_mb=2000, cpus=2, local_gb=1000)
self.assertEqual(fn['pm_address'], '5')
# check memory_mb
fn = db.bm_node_find_free(self.context, 'host2',
memory_mb=2001, cpus=2, local_gb=1000)
self.assertIsNone(fn)
# check cpus
fn = db.bm_node_find_free(self.context, 'host2',
memory_mb=2000, cpus=3, local_gb=1000)
self.assertIsNone(fn)
# check local_gb
fn = db.bm_node_find_free(self.context, 'host2',
memory_mb=2000, cpus=2, local_gb=1001)
self.assertIsNone(fn)
| apache-2.0 |
mSenyor/kivy | examples/animation/animate.py | 40 | 1338 | '''
Widget animation
================
This example demonstrates creating and applying a multi-part animation to
a button widget. You should see a button labelled 'plop' that will move with
an animation when clicked.
'''
import kivy
kivy.require('1.0.7')
from kivy.animation import Animation
from kivy.app import App
from kivy.uix.button import Button
class TestApp(App):
def animate(self, instance):
# create an animation object. This object could be stored
# and reused each call or reused across different widgets.
# += is a sequential step, while &= is in parallel
animation = Animation(pos=(100, 100), t='out_bounce')
animation += Animation(pos=(200, 100), t='out_bounce')
animation &= Animation(size=(500, 500))
animation += Animation(size=(100, 50))
# apply the animation on the button, passed in the "instance" argument
# Notice that default 'click' animation (changing the button
# color while the mouse is down) is unchanged.
animation.start(instance)
def build(self):
# create a button, and attach animate() method as a on_press handler
button = Button(size_hint=(None, None), text='plop',
on_press=self.animate)
return button
if __name__ == '__main__':
TestApp().run()
| mit |
99cloud/keystone_register | horizon/management/commands/startdash.py | 16 | 2450 | import glob
from optparse import make_option
import os
from django.core.management.base import CommandError
from django.core.management.templates import TemplateCommand
from django.utils.importlib import import_module
import horizon
class Command(TemplateCommand):
template = os.path.join(horizon.__path__[0], "conf", "dash_template")
option_list = TemplateCommand.option_list + (
make_option('--target',
dest='target',
action='store',
default=None,
help='The directory in which the panel '
'should be created. Defaults to the '
'current directory. The value "auto" '
'may also be used to automatically '
'create the panel inside the specified '
'dashboard module.'),)
help = ("Creates a Django app directory structure for a new dashboard "
"with the given name in the current directory or optionally in "
"the given directory.")
def handle(self, dash_name=None, **options):
if dash_name is None:
raise CommandError("You must provide a dashboard name.")
# Use our default template if one isn't specified.
if not options.get("template", None):
options["template"] = self.template
# We have html templates as well, so make sure those are included.
options["extensions"].extend(["tmpl", "html", "js", "css"])
# Check that the app_name cannot be imported.
try:
import_module(dash_name)
except ImportError:
pass
else:
raise CommandError("%r conflicts with the name of an existing "
"Python module and cannot be used as an app "
"name. Please try another name." % dash_name)
super(Command, self).handle('dash', dash_name, **options)
target = options.pop("target", None)
if not target:
target = os.path.join(os.curdir, dash_name)
# Rename our python template files.
file_names = glob.glob(os.path.join(target, "*.py.tmpl"))
for filename in file_names:
os.rename(filename, filename[:-5])
| apache-2.0 |
fuhongliang/odoo | addons/edi/models/res_currency.py | 437 | 2892 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2011-2012 OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
from edi import EDIMixin
from openerp import SUPERUSER_ID
RES_CURRENCY_EDI_STRUCT = {
#custom: 'code'
'symbol': True,
'rate': True,
}
class res_currency(osv.osv, EDIMixin):
_inherit = "res.currency"
def edi_export(self, cr, uid, records, edi_struct=None, context=None):
edi_struct = dict(edi_struct or RES_CURRENCY_EDI_STRUCT)
edi_doc_list = []
for currency in records:
# Get EDI doc based on struct. The result will also contain all metadata fields and attachments.
edi_doc = super(res_currency,self).edi_export(cr, uid, [currency], edi_struct, context)[0]
edi_doc.update(code=currency.name)
edi_doc_list.append(edi_doc)
return edi_doc_list
def edi_import(self, cr, uid, edi_document, context=None):
self._edi_requires_attributes(('code','symbol'), edi_document)
external_id = edi_document['__id']
existing_currency = self._edi_get_object_by_external_id(cr, uid, external_id, 'res_currency', context=context)
if existing_currency:
return existing_currency.id
# find with unique ISO code
existing_ids = self.search(cr, uid, [('name','=',edi_document['code'])])
if existing_ids:
return existing_ids[0]
# nothing found, create a new one
currency_id = self.create(cr, SUPERUSER_ID, {'name': edi_document['code'],
'symbol': edi_document['symbol']}, context=context)
rate = edi_document.pop('rate')
if rate:
self.pool.get('res.currency.rate').create(cr, SUPERUSER_ID, {'currency_id': currency_id,
'rate': rate}, context=context)
return currency_id
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
suyashphadtare/sajil-final-frappe | frappe/widgets/form/save.py | 32 | 1739 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe, json
from frappe.widgets.form.load import run_onload
@frappe.whitelist()
def savedocs():
"""save / submit / update doclist"""
try:
doc = frappe.get_doc(json.loads(frappe.form_dict.doc))
set_local_name(doc)
# action
doc.docstatus = {"Save":0, "Submit": 1, "Update": 1, "Cancel": 2}[frappe.form_dict.action]
try:
doc.save()
except frappe.NameError, e:
doctype, name, original_exception = e if isinstance(e, tuple) else (doc.doctype or "", doc.name or "", None)
frappe.msgprint(frappe._("{0} {1} already exists").format(doctype, name))
raise
# update recent documents
run_onload(doc)
frappe.user.update_recent(doc.doctype, doc.name)
send_updated_docs(doc)
except Exception:
frappe.msgprint(frappe._('Did not save'))
frappe.errprint(frappe.utils.get_traceback())
raise
@frappe.whitelist()
def cancel(doctype=None, name=None):
"""cancel a doclist"""
try:
doc = frappe.get_doc(doctype, name)
doc.cancel()
send_updated_docs(doc)
except Exception:
frappe.errprint(frappe.utils.get_traceback())
frappe.msgprint(frappe._("Did not cancel"))
raise
def send_updated_docs(doc):
from load import get_docinfo
get_docinfo(doc)
d = doc.as_dict()
if hasattr(doc, 'localname'):
d["localname"] = doc.localname
frappe.response.docs.append(d)
def set_local_name(doc):
def _set_local_name(d):
if doc.get('__islocal') or d.get('__islocal'):
d.localname = d.name
d.name = None
_set_local_name(doc)
for child in doc.get_all_children():
_set_local_name(child)
if doc.get("__newname"):
doc.name = doc.get("__newname")
| mit |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.