repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
ma-compbio/PEP | genVecs.py | 1 | 7271 | #encoding:utf-8
from gensim.models import Word2Vec
from gensim.models.word2vec import LineSentence
import pandas as pd
import numpy as np
import os
import sys
import math
import random
import processSeq
import warnings
import threading
from multiprocessing.dummy import Pool as ThreadPool
from sklearn import preprocessing
import sklearn.preprocessing
from gensim import corpora, models, similarities
class mycorpuse(object):
def __iter__(self):
for line in open("./Data/Learning/unlabeled_train_enhancer_GM12878"):
yield line.split()
class mycorpusp(object):
def __iter__(self):
for line in open("./Data/Learning/unlabeled_train_promoter_GM12878"):
yield line.split()
# Load training data
def getData(type,cell):
data = pd.read_table('./Data/Learning/supervised_'+str(cell)+"_"+str(type))
return data
# Load trained Word2Vec model or train a new model
def getWord_model(word,num_features,min_count,type,cell):
word_model1 = ""
model_name = str(cell)+"_enhancer"
if not os.path.isfile("./" + model_name):
sentence = LineSentence("./Data/Learning/unlabeled_train_enhancer_"+str(cell),max_sentence_length=15000)
print "Start Training Word2Vec model..."
# Set values for various parameters
num_features = int(num_features) # Word vector dimensionality
min_word_count = int(min_count) # Minimum word count
num_workers = 20 # Number of threads to run in parallel
context = 20 # Context window size
downsampling = 1e-3 # Downsample setting for frequent words
# Initialize and train the model
print "Training Word2Vec model..."
word_model1 = Word2Vec(sentence, workers=num_workers,\
size=num_features, min_count=min_word_count, \
window =context, sample=downsampling, seed=1)
word_model1.init_sims(replace=False)
word_model1.save(model_name)
print word_model1.most_similar("CATAGT")
else:
print "Loading Word2Vec model..."
word_model1 = Word2Vec.load(model_name)
word_model2 = ""
model_name = str(cell)+"_promoter"
if not os.path.isfile("./" + model_name):
sentence = LineSentence("./Data/Learning/unlabeled_train_promoter_"+str(cell),max_sentence_length=15000)
print "Start Training Word2Vec model..."
# Set values for various parameters
num_features = int(num_features) # Word vector dimensionality
min_word_count = int(min_count) # Minimum word count
num_workers = 20 # Number of threads to run in parallel
context = 20 # Context window size
downsampling = 1e-3 # Downsample setting for frequent words
# Initialize and train the model
print "Training Word2Vec model..."
word_model2 = Word2Vec(sentence, workers=num_workers,\
size=num_features, min_count=min_word_count, \
window=context, sample=downsampling, seed=1)
word_model2.init_sims(replace=False)
word_model2.save(model_name)
print word_model2.most_similar("CATAGT")
else:
print "Loading Word2Vec model..."
word_model2 = Word2Vec.load(model_name)
return word_model1,word_model2
# Split sequences into words
def getCleanDNA_split(DNAdata,word):
dnalist = []
counter = 0
for dna in DNAdata:
if counter % 100 == 0:
print "DNA %d of %d\r" % (counter, len(DNAdata)),
sys.stdout.flush()
dna = str(dna).upper()
dnalist.append(processSeq.DNA2Sentence(dna,word).split(" "))
counter += 1
print
return dnalist
def makeFeatureVecs(words, model, num_features,word,k,temp):
featureVec = np.zeros((k,num_features), dtype="float32")
nwords = 0
index2word_set = set(model.index2word)
length = len(words)
for word in words:
if word in index2word_set:
# divide the words into k parts, add up in each part
featureVec[math.floor((nwords * k) / length)] += (model[word]) * temp[nwords]
nwords =nwords + 1
featureVec = featureVec.reshape(k * num_features)
#featureVec = featureVec/nwords
return featureVec
def mean2max(vec):
length = len(vec)
mean1 = np.max(vec[0:int(length*0.5)],axis = 0)
mean2 = np.max(vec[int(length*0.5):int(length)],axis = 0)
maxvec = np.mean([mean1,mean2],axis = 0)
return maxvec
def getAvgFeatureVecs(data,model1,model2, num_features, word,k,type,cell):
dnaFeatureVecs = np.zeros((len(data),2*k*num_features), dtype="float32")
if not os.path.isfile("./Data/enhancertfidf"+str(cell)):
print "Getting dictionary"
Corp = mycorpuse()
dictionary = corpora.Dictionary(Corp)
dictionary.save("./Data/enhancerdic"+str(cell))
corpus = [dictionary.doc2bow(text) for text in Corp]
print "Calculating TFIDF"
tfidf = models.TfidfModel(corpus)
tfidf.save("./Data/enhancertfidf"+str(cell))
else:
tfidf = models.TfidfModel.load("./Data/enhancertfidf"+str(cell))
dictionary = corpora.Dictionary.load("./Data/enhancerdic"+str(cell))
dict1 = {k:v for k, v in dictionary.items()}
DNAdata1 = getCleanDNA_split(data["seq1"],word)
counter = 0
for dna in DNAdata1:
if counter % 100 == 0:
print "DNA %d of %d\r" % (counter, len(DNAdata1)),
sys.stdout.flush()
vec_bow = dictionary.doc2bow(dna)
vec_tfidf = tfidf[vec_bow]
for i in xrange(len(vec_tfidf)):
dnaFeatureVecs[counter][0:k*num_features] += model1[dict1[vec_tfidf[i][0]]] * vec_tfidf[i][1]
counter += 1
print
del DNAdata1
counter = 0
if not os.path.isfile("./Data/promotertfidf"+str(cell)):
print "Getting dictionary"
Corp = mycorpusp()
dictionary = corpora.Dictionary(Corp)
dictionary.save("./Data/promoterdic"+str(cell))
corpus = [dictionary.doc2bow(text) for text in Corp]
print "Calculating TFIDF"
tfidf = models.TfidfModel(corpus)
tfidf.save("./Data/promotertfidf"+str(cell))
else:
tfidf = models.TfidfModel.load("./Data/promotertfidf"+str(cell))
dictionary = corpora.Dictionary.load("./Data/promoterdic"+str(cell))
dict2 = {k:v for k, v in dictionary.items()}
DNAdata2 = []
counter = 0
for dna in data["seq2"]:
if counter % 100 == 0:
print "DNA %d of %d\r" % (counter, len(data)),
sys.stdout.flush()
dna = str(dna).upper()
DNAdata2.append(processSeq.DNA2Sentence(dna,word).split(" "))
counter += 1
counter = 0
print
for dna in DNAdata2:
if counter % 100 == 0:
print "DNA %d of %d\r" % (counter, len(DNAdata2)),
sys.stdout.flush()
vec_bow = dictionary.doc2bow(dna)
vec_tfidf = tfidf[vec_bow]
for i in xrange(len(vec_tfidf)):
dnaFeatureVecs[counter][k*num_features:2*k*num_features] += model2[dict2[vec_tfidf[i][0]]] * vec_tfidf[i][1]
counter += 1
print
np.save("./Datavecs/datavecs_"+str(cell)+"_"+str(type)+".npy",dnaFeatureVecs)
return dnaFeatureVecs
def run(word, num_features,K,type,cell):
warnings.filterwarnings("ignore")
global word_model,data,k
word = int(word)
num_features = int(num_features)
k=int(K)
word_model=""
min_count=10
word_model1,word_model2 = getWord_model(word,num_features,min_count,type,cell)
# Read data
data = getData(type,cell)
length = data.shape[0]
print length
print "Generating Training and Testing Vector"
dataDataVecs = getAvgFeatureVecs(data,word_model1,word_model2,num_features,word,k,type,cell)
if __name__ == "__main__":
run(6,300,1,'new','GM12878')
| mit |
Karosuo/Linux_tools | xls_handlers/xls_sum_venv/lib/python3.6/site-packages/wheel/cli/pack.py | 8 | 2263 | from __future__ import print_function
import os.path
import re
import sys
from wheel.cli import WheelError
from wheel.wheelfile import WheelFile
DIST_INFO_RE = re.compile(r"^(?P<namever>(?P<name>.+?)-(?P<ver>\d.*?))\.dist-info$")
def pack(directory, dest_dir, build_number):
"""Repack a previously unpacked wheel directory into a new wheel file.
The .dist-info/WHEEL file must contain one or more tags so that the target
wheel file name can be determined.
:param directory: The unpacked wheel directory
:param dest_dir: Destination directory (defaults to the current directory)
"""
# Find the .dist-info directory
dist_info_dirs = [fn for fn in os.listdir(directory)
if os.path.isdir(os.path.join(directory, fn)) and DIST_INFO_RE.match(fn)]
if len(dist_info_dirs) > 1:
raise WheelError('Multiple .dist-info directories found in {}'.format(directory))
elif not dist_info_dirs:
raise WheelError('No .dist-info directories found in {}'.format(directory))
# Determine the target wheel filename
dist_info_dir = dist_info_dirs[0]
name_version = DIST_INFO_RE.match(dist_info_dir).group('namever')
# Add the build number if specific
if build_number:
name_version += '-' + build_number
# Read the tags from .dist-info/WHEEL
with open(os.path.join(directory, dist_info_dir, 'WHEEL')) as f:
tags = [line.split(' ')[1].rstrip() for line in f if line.startswith('Tag: ')]
if not tags:
raise WheelError('No tags present in {}/WHEEL; cannot determine target wheel filename'
.format(dist_info_dir))
# Reassemble the tags for the wheel file
impls = sorted({tag.split('-')[0] for tag in tags})
abivers = sorted({tag.split('-')[1] for tag in tags})
platforms = sorted({tag.split('-')[2] for tag in tags})
tagline = '-'.join(['.'.join(impls), '.'.join(abivers), '.'.join(platforms)])
# Repack the wheel
wheel_path = os.path.join(dest_dir, '{}-{}.whl'.format(name_version, tagline))
with WheelFile(wheel_path, 'w') as wf:
print("Repacking wheel as {}...".format(wheel_path), end='')
sys.stdout.flush()
wf.write_files(directory)
print('OK')
| gpl-3.0 |
brianhelba/pylibtiff | libtiff/tiff_sample_plane.py | 2 | 11678 | """ Implements TIFF sample plane.
"""
# Author: Pearu Peterson
# Created: Jan 2011
import numpy
import tif_lzw
__all__ = ['TiffSamplePlane']
def set_array(output_array, input_array):
dtype = numpy.uint8
numpy.frombuffer(output_array.data, dtype=dtype)[:] = numpy.frombuffer(input_array.data, dtype=dtype)
class TiffSamplePlane:
""" Image of a single sample in a TIFF image file directory.
"""
def __init__(self, ifd, sample_index=0):
""" Construct TiffSamplePlane instance.
Parameters
----------
ifd : `libtiff.tiff_file.IFDEntry`
sample_index : int
Specify sample index. When None then interpret pixel as a sample.
"""
self.ifd = ifd
self.sample_index = sample_index
self.planar_config = planar_config = ifd.get_value('PlanarConfiguration')
self.samples_per_pixel = samples_per_pixel = ifd.get_value('SamplesPerPixel')
if sample_index is not None and sample_index >= samples_per_pixel:
raise IndexError ('sample index %r must be less that nof samples %r' % (sample_index, samples_per_pixel))
pixels_per_row = ifd.get_value('ImageWidth')
rows_of_pixels = ifd.get_value('ImageLength')
self.shape = (int(rows_of_pixels), int(pixels_per_row))
rows_per_strip = ifd.get_value('RowsPerStrip')
strips_per_image = (rows_of_pixels + rows_per_strip - 1) // rows_per_strip
rows_per_strip = min(rows_of_pixels, rows_per_strip)
self.rows_per_strip = rows_per_strip
self.strip_offsets = strip_offsets = ifd.get_value('StripOffsets')
self.strip_nbytes = strip_nbytes = ifd.get_value('StripByteCounts')
self.sample_format = sample_format = ifd.get_value('SampleFormat')
self.bits_per_sample = bits_per_sample = ifd.get_value('BitsPerSample')
bits_per_pixel = sum(bits_per_sample)
assert bits_per_pixel % 8==0, repr((bits_per_pixel, bits_per_sample))
bytes_per_pixel = bits_per_pixel // 8
if sample_index is None:
bytes_per_sample = bytes_per_pixel
else:
bytes_per_sample = bits_per_sample[sample_index] // 8
bytes_per_row = bytes_per_pixel * pixels_per_row
bytes_per_strip = rows_per_strip * bytes_per_row
sample_names = ifd.get_sample_names()
pixel_dtype = ifd.get_pixel_dtype()
sample_offset = 0
if sample_index is None:
dtype = pixel_dtype
sample_names = ['pixel']
sample_name = 'pixel'
else:
dtype = ifd.get_sample_dtypes ()[sample_index]
sample_name = sample_names[sample_index]
if planar_config==1:
sample_offset = sum(bits_per_sample[:sample_index]) // 8
bytes_per_row = pixels_per_row * bytes_per_pixel # uncompressed
sample_offset = 0
if planar_config==1 or sample_index is None:
bytes_per_sample_row = bytes_per_row
else:
bytes_per_sample_row = bytes_per_row // samples_per_pixel
self.dtype = dtype
self.pixel_dtype = pixel_dtype
self.bytes_per_pixel = bytes_per_pixel
self.bytes_per_row = bytes_per_row
self.bytes_per_sample_image = bytes_per_sample_row * rows_of_pixels
self.uncompressed_bytes_per_strip = bytes_per_strip
self.compression = compression = ifd.get_value('Compression')
self.sample_name = sample_name
self.sample_offset = sample_offset
self.bytes_per_sample_row = bytes_per_sample_row
self.strips_per_image = strips_per_image
self.is_contiguous = compression==1 and ifd.is_contiguous()
time = None
descr = str(ifd.get_value('ImageDescription', human=True))
if descr is not None:
if descr.startswith('<?xml') or descr[:4].lower()=='<ome':
pass
else:
it = descr.find('RelativeTime')
if it != -1:
time = float(descr[it:].split (None, 2)[1].strip())
self.time = time
def set_time (self, time):
if None not in [self.time, time]:
if self.time!=time:
print('%s:warning: overwriting time value %s with %s' % (self.__class__.__name__, self.time, time))
self.time = time
def check_same_shape_and_type(self, other):
return self.shape==other.shape and self.dtype==other.dtype
def get_topology (self):
return '''shape=%(shape)s planar_config=%(planar_config)s sample_index=%(sample_index)s
dtype=%(dtype)s pixel_dtype=%(pixel_dtype)s
bytes_per_pixel=%(bytes_per_pixel)s
bytes_per_sample_row=%(bytes_per_sample_row)s
bytes_per_row=%(bytes_per_row)s
bytes_per_strip=%(uncompressed_bytes_per_strip)s
bytes_per_sample_image=%(bytes_per_sample_image)s
strip_offsets=%(strip_offsets)s
strip_nbytes=%(strip_nbytes)s
strips_per_image=%(strips_per_image)s
rows_per_strip=%(rows_per_strip)s
''' % (self.__dict__)
def get_row(self, index, subindex = None):
if index < 0:
index += self.shape[0]
if index > self.shape[0] or index < 0:
raise IndexError('Row index %r out of bounds [0,%r]' % (index, self.shape[0]-1))
if self.planar_config==1: # RGBRGB..
strip_index, row_index = divmod(index, self.rows_per_strip)
else: # RR..GG..BB..
index2 = self.sample_index * self.shape[0] + index
strip_index, row_index = divmod(index2, self.rows_per_strip)
start = self.strip_offsets[strip_index]
stop = start + self.strip_nbytes[strip_index]
if self.compression==1:
strip = self.ifd.tiff.data[start:stop]
else:
compressed_strip = self.ifd.tiff.data[start:stop]
if self.compression==5: # lzw
strip = tif_lzw.decode(compressed_strip, self.uncompressed_bytes_per_strip)
else:
raise NotImplementedError (repr(self.compression))
start = row_index * self.bytes_per_sample_row + self.sample_offset
stop = start + self.bytes_per_sample_row + self.sample_offset
if isinstance (subindex, tuple):
if len(subindex)==1:
subindex = subindex[0]
if self.planar_config==1:
if isinstance(subindex, int):
start = start + subindex * self.bytes_per_pixel
stop = start + self.bytes_per_pixel
return strip[start:stop].view(dtype=self.pixel_dtype)[self.sample_name][0]
row = strip[start:stop].view(dtype=self.pixel_dtype)[self.sample_name]
if not row.size:
print(self.get_topology())
else:
row = strip[start:stop].view(dtype=self.dtype)
if subindex is not None:
return row[subindex]
return row
def get_rows(self, index, subindex=None):
if isinstance(index, int):
r = self.get_row (index, subindex=subindex)
return r.reshape((1,)+r.shape)
if isinstance (index, slice):
indices = list(range(*index.indices(self.shape[0])))
for i,j in enumerate(indices):
s = self.get_row(j, subindex=subindex)
if i==0:
r = numpy.empty((len (indices),)+s.shape, dtype=self.dtype)
r[i] = s
return r
if isinstance (index, tuple):
if len (index)==1:
return self[index[0]]
raise NotImplementedError (repr(index))
def get_image(self):
if self.is_contiguous:
if self.planar_config==1:
start = self.strip_offsets[0] + self.sample_offset
stop = self.strip_offsets[-1] + self.strip_nbytes[-1]
image =self.ifd.tiff.data[start:stop].view(dtype=self.pixel_dtype)
image = image[self.sample_name].reshape (self.shape)
return image
else:
if self.sample_index is None:
start = self.strip_offsets[0]
else:
start = self.strip_offsets[0] + self.sample_index * self.bytes_per_sample_image
stop = start + self.bytes_per_sample_image
image = self.ifd.tiff.data[start:stop]
image = image.view(dtype=self.dtype).reshape(self.shape)
return image
else:
image = numpy.empty((self.bytes_per_sample_image,), dtype=numpy.uint8)
offset = 0
for strip_index in range (len (self.strip_offsets)):
start = self.strip_offsets[strip_index]
stop = start + self.strip_nbytes[strip_index]
if self.compression==1:
strip = self.ifd.tiff.data[start:stop]
else:
compressed_strip = self.ifd.tiff.data[start:stop]
if self.compression==5: # lzw
strip = tif_lzw.decode(compressed_strip, self.uncompressed_bytes_per_strip)
else:
raise NotImplementedError (repr(self.compression))
target = image[offset:offset + strip.nbytes]
if target.nbytes < strip.nbytes:
print('%s.get_image warning: tiff data contains %s extra bytes (compression=%r) that are ignored' % (self.__class__.__name__, strip.nbytes-target.nbytes, self.compression))
image[offset:offset + strip.nbytes] = strip[:target.nbytes]
offset += strip.nbytes
image = image.view(dtype=self.dtype).reshape(self.shape)
return image
def __len__(self):
return self.shape[0]
def __getitem__(self, index):
if isinstance (index, int):
return self.get_row(index)
elif isinstance(index, slice):
return self.get_image()[index]
elif isinstance(index, tuple):
if len(index)==0:
return self.get_image()
if len(index)==1:
return self[index[0]]
index0 = index[0]
if isinstance(index0, int):
return self.get_row(index0, index[1:])
return self.get_image()[index]
raise NotImplementedError (repr(index))
class TiffSamplePlaneLazy(TiffSamplePlane):
def __init__ (self, tiff_file_getter):
self.tiff_file_getter = tiff_file_getter
self.time = None
self._ifd = None
@property
def ifd(self):
ifd = self._ifd
if ifd is None:
tiff = self.tiff_file_getter()
assert len (tiff.IFD)==1,repr(len (tiff.IFD))
self._ifd = ifd = tiff.IFD[0]
return ifd
@property
def strip_offsets (self): return self.ifd.get_value ('StripOffsets')
@property
def strip_nbytes (self): return self.ifd.get_value ('StripByteCounts')
@property
def compression(self): return self.ifd.get_value ('Compression')
@property
def is_contiguous(self): return self.compression==1 and self.ifd.is_contiguous()
def copy_attrs(self, other):
for attr in ['sample_index', 'planar_config', 'samples_per_pixel','shape',
'rows_per_strip', 'sample_format', 'bits_per_sample',
'dtype', 'pixel_dtype', 'bytes_per_pixel', 'bytes_per_row',
'bytes_per_sample_image', 'uncompressed_bytes_per_strip',
'sample_name', 'sample_offset', 'bytes_per_sample_row',
'strips_per_image'
]:
setattr (self, attr, getattr (other, attr))
| bsd-3-clause |
jalexvig/tensorflow | tensorflow/contrib/gan/python/eval/python/summaries_impl.py | 15 | 8343 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Common TFGAN summaries."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.gan.python import namedtuples
from tensorflow.contrib.gan.python.eval.python import eval_utils
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops.losses import util as loss_util
from tensorflow.python.summary import summary
__all__ = [
'add_gan_model_image_summaries',
'add_image_comparison_summaries',
'add_gan_model_summaries',
'add_regularization_loss_summaries',
'add_cyclegan_image_summaries',
]
def _assert_is_image(data):
data.shape.assert_has_rank(4)
data.shape[1:].assert_is_fully_defined()
def add_gan_model_image_summaries(gan_model, grid_size=4, model_summaries=True):
"""Adds image summaries for real and fake images.
Args:
gan_model: A GANModel tuple.
grid_size: The size of an image grid.
model_summaries: Also add summaries of the model.
Raises:
ValueError: If real and generated data aren't images.
"""
if isinstance(gan_model, namedtuples.CycleGANModel):
raise ValueError(
'`add_gan_model_image_summaries` does not take CycleGANModels. Please '
'use `add_cyclegan_image_summaries` instead.')
_assert_is_image(gan_model.real_data)
_assert_is_image(gan_model.generated_data)
num_images = grid_size ** 2
real_image_shape = gan_model.real_data.shape.as_list()[1:3]
generated_image_shape = gan_model.generated_data.shape.as_list()[1:3]
real_channels = gan_model.real_data.shape.as_list()[3]
generated_channels = gan_model.generated_data.shape.as_list()[3]
summary.image(
'real_data',
eval_utils.image_grid(
gan_model.real_data[:num_images],
grid_shape=(grid_size, grid_size),
image_shape=real_image_shape,
num_channels=real_channels),
max_outputs=1)
summary.image(
'generated_data',
eval_utils.image_grid(
gan_model.generated_data[:num_images],
grid_shape=(grid_size, grid_size),
image_shape=generated_image_shape,
num_channels=generated_channels),
max_outputs=1)
if model_summaries:
add_gan_model_summaries(gan_model)
def add_cyclegan_image_summaries(cyclegan_model):
"""Adds image summaries for CycleGAN.
There are two summaries, one for each generator. The first image is the
generator input, the second is the generator output, and the third is G(F(x)).
Args:
cyclegan_model: A CycleGANModel tuple.
Raises:
ValueError: If `cyclegan_model` isn't a CycleGANModel.
ValueError: If generated data, generator inputs, and reconstructions aren't
images.
ValueError: If the generator input, generated data, and reconstructions
aren't all the same size.
"""
if not isinstance(cyclegan_model, namedtuples.CycleGANModel):
raise ValueError('`cyclegan_model` was not a CycleGANModel. Instead, was '
'%s' % type(cyclegan_model))
_assert_is_image(cyclegan_model.model_x2y.generator_inputs)
_assert_is_image(cyclegan_model.model_x2y.generated_data)
_assert_is_image(cyclegan_model.reconstructed_x)
_assert_is_image(cyclegan_model.model_y2x.generator_inputs)
_assert_is_image(cyclegan_model.model_y2x.generated_data)
_assert_is_image(cyclegan_model.reconstructed_y)
def _add_comparison_summary(gan_model, reconstructions):
image_list = (array_ops.unstack(gan_model.generator_inputs[:1]) +
array_ops.unstack(gan_model.generated_data[:1]) +
array_ops.unstack(reconstructions[:1]))
summary.image(
'image_comparison', eval_utils.image_reshaper(
image_list, num_cols=len(image_list)), max_outputs=1)
with ops.name_scope('x2y_image_comparison_summaries'):
_add_comparison_summary(
cyclegan_model.model_x2y, cyclegan_model.reconstructed_x)
with ops.name_scope('y2x_image_comparison_summaries'):
_add_comparison_summary(
cyclegan_model.model_y2x, cyclegan_model.reconstructed_y)
def add_image_comparison_summaries(gan_model, num_comparisons=2,
display_diffs=False):
"""Adds image summaries to compare triplets of images.
The first image is the generator input, the second is the generator output,
and the third is the real data. This style of comparison is useful for
image translation problems, where the generator input is a corrupted image,
the generator output is the reconstruction, and the real data is the target.
Args:
gan_model: A GANModel tuple.
num_comparisons: The number of image triplets to display.
display_diffs: Also display the difference between generated and target.
Raises:
ValueError: If real data, generated data, and generator inputs aren't
images.
ValueError: If the generator input, real, and generated data aren't all the
same size.
"""
_assert_is_image(gan_model.generator_inputs)
_assert_is_image(gan_model.generated_data)
_assert_is_image(gan_model.real_data)
gan_model.generated_data.shape.assert_is_compatible_with(
gan_model.generator_inputs.shape)
gan_model.real_data.shape.assert_is_compatible_with(
gan_model.generated_data.shape)
image_list = []
image_list.extend(
array_ops.unstack(gan_model.generator_inputs[:num_comparisons]))
image_list.extend(
array_ops.unstack(gan_model.generated_data[:num_comparisons]))
image_list.extend(array_ops.unstack(gan_model.real_data[:num_comparisons]))
if display_diffs:
generated_list = array_ops.unstack(
gan_model.generated_data[:num_comparisons])
real_list = array_ops.unstack(gan_model.real_data[:num_comparisons])
diffs = [
math_ops.abs(math_ops.to_float(generated) - math_ops.to_float(real)) for
generated, real in zip(generated_list, real_list)]
image_list.extend(diffs)
# Reshape image and display.
summary.image(
'image_comparison',
eval_utils.image_reshaper(image_list, num_cols=num_comparisons),
max_outputs=1)
def add_gan_model_summaries(gan_model):
"""Adds typical GANModel summaries.
Args:
gan_model: A GANModel tuple.
"""
if isinstance(gan_model, namedtuples.CycleGANModel):
with ops.name_scope('cyclegan_x2y_summaries'):
add_gan_model_summaries(gan_model.model_x2y)
with ops.name_scope('cyclegan_y2x_summaries'):
add_gan_model_summaries(gan_model.model_y2x)
return
with ops.name_scope('generator_variables'):
for var in gan_model.generator_variables:
summary.histogram(var.name, var)
with ops.name_scope('discriminator_variables'):
for var in gan_model.discriminator_variables:
summary.histogram(var.name, var)
def add_regularization_loss_summaries(gan_model):
"""Adds summaries for a regularization losses..
Args:
gan_model: A GANModel tuple.
"""
if isinstance(gan_model, namedtuples.CycleGANModel):
with ops.name_scope('cyclegan_x2y_regularization_loss_summaries'):
add_regularization_loss_summaries(gan_model.model_x2y)
with ops.name_scope('cyclegan_y2x_regularization_loss_summaries'):
add_regularization_loss_summaries(gan_model.model_y2x)
return
if gan_model.generator_scope:
summary.scalar(
'generator_regularization_loss',
loss_util.get_regularization_loss(gan_model.generator_scope.name))
if gan_model.discriminator_scope:
summary.scalar(
'discriminator_regularization_loss',
loss_util.get_regularization_loss(gan_model.discriminator_scope.name))
| apache-2.0 |
charlesvdv/servo | tests/wpt/web-platform-tests/tools/six/test_six.py | 418 | 22226 | import operator
import sys
import types
import py
import six
def test_add_doc():
def f():
"""Icky doc"""
pass
six._add_doc(f, """New doc""")
assert f.__doc__ == "New doc"
def test_import_module():
from logging import handlers
m = six._import_module("logging.handlers")
assert m is handlers
def test_integer_types():
assert isinstance(1, six.integer_types)
assert isinstance(-1, six.integer_types)
assert isinstance(six.MAXSIZE + 23, six.integer_types)
assert not isinstance(.1, six.integer_types)
def test_string_types():
assert isinstance("hi", six.string_types)
assert isinstance(six.u("hi"), six.string_types)
assert issubclass(six.text_type, six.string_types)
def test_class_types():
class X:
pass
class Y(object):
pass
assert isinstance(X, six.class_types)
assert isinstance(Y, six.class_types)
assert not isinstance(X(), six.class_types)
def test_text_type():
assert type(six.u("hi")) is six.text_type
def test_binary_type():
assert type(six.b("hi")) is six.binary_type
def test_MAXSIZE():
try:
# This shouldn't raise an overflow error.
six.MAXSIZE.__index__()
except AttributeError:
# Before Python 2.6.
pass
py.test.raises(
(ValueError, OverflowError),
operator.mul, [None], six.MAXSIZE + 1)
def test_lazy():
if six.PY3:
html_name = "html.parser"
else:
html_name = "HTMLParser"
assert html_name not in sys.modules
mod = six.moves.html_parser
assert sys.modules[html_name] is mod
assert "htmlparser" not in six._MovedItems.__dict__
try:
import _tkinter
except ImportError:
have_tkinter = False
else:
have_tkinter = True
have_gdbm = True
try:
import gdbm
except ImportError:
try:
import dbm.gnu
except ImportError:
have_gdbm = False
@py.test.mark.parametrize("item_name",
[item.name for item in six._moved_attributes])
def test_move_items(item_name):
"""Ensure that everything loads correctly."""
try:
item = getattr(six.moves, item_name)
if isinstance(item, types.ModuleType):
__import__("six.moves." + item_name)
except AttributeError:
if item_name == "zip_longest" and sys.version_info < (2, 6):
py.test.skip("zip_longest only available on 2.6+")
except ImportError:
if item_name == "winreg" and not sys.platform.startswith("win"):
py.test.skip("Windows only module")
if item_name.startswith("tkinter"):
if not have_tkinter:
py.test.skip("requires tkinter")
if item_name == "tkinter_ttk" and sys.version_info[:2] <= (2, 6):
py.test.skip("ttk only available on 2.7+")
if item_name.startswith("dbm_gnu") and not have_gdbm:
py.test.skip("requires gdbm")
raise
if sys.version_info[:2] >= (2, 6):
assert item_name in dir(six.moves)
@py.test.mark.parametrize("item_name",
[item.name for item in six._urllib_parse_moved_attributes])
def test_move_items_urllib_parse(item_name):
"""Ensure that everything loads correctly."""
if item_name == "ParseResult" and sys.version_info < (2, 5):
py.test.skip("ParseResult is only found on 2.5+")
if item_name in ("parse_qs", "parse_qsl") and sys.version_info < (2, 6):
py.test.skip("parse_qs[l] is new in 2.6")
if sys.version_info[:2] >= (2, 6):
assert item_name in dir(six.moves.urllib.parse)
getattr(six.moves.urllib.parse, item_name)
@py.test.mark.parametrize("item_name",
[item.name for item in six._urllib_error_moved_attributes])
def test_move_items_urllib_error(item_name):
"""Ensure that everything loads correctly."""
if sys.version_info[:2] >= (2, 6):
assert item_name in dir(six.moves.urllib.error)
getattr(six.moves.urllib.error, item_name)
@py.test.mark.parametrize("item_name",
[item.name for item in six._urllib_request_moved_attributes])
def test_move_items_urllib_request(item_name):
"""Ensure that everything loads correctly."""
if sys.version_info[:2] >= (2, 6):
assert item_name in dir(six.moves.urllib.request)
getattr(six.moves.urllib.request, item_name)
@py.test.mark.parametrize("item_name",
[item.name for item in six._urllib_response_moved_attributes])
def test_move_items_urllib_response(item_name):
"""Ensure that everything loads correctly."""
if sys.version_info[:2] >= (2, 6):
assert item_name in dir(six.moves.urllib.response)
getattr(six.moves.urllib.response, item_name)
@py.test.mark.parametrize("item_name",
[item.name for item in six._urllib_robotparser_moved_attributes])
def test_move_items_urllib_robotparser(item_name):
"""Ensure that everything loads correctly."""
if sys.version_info[:2] >= (2, 6):
assert item_name in dir(six.moves.urllib.robotparser)
getattr(six.moves.urllib.robotparser, item_name)
def test_import_moves_error_1():
from six.moves.urllib.parse import urljoin
from six import moves
# In 1.4.1: AttributeError: 'Module_six_moves_urllib_parse' object has no attribute 'urljoin'
assert moves.urllib.parse.urljoin
def test_import_moves_error_2():
from six import moves
assert moves.urllib.parse.urljoin
# In 1.4.1: ImportError: cannot import name urljoin
from six.moves.urllib.parse import urljoin
def test_import_moves_error_3():
from six.moves.urllib.parse import urljoin
# In 1.4.1: ImportError: cannot import name urljoin
from six.moves.urllib_parse import urljoin
def test_from_imports():
from six.moves.queue import Queue
assert isinstance(Queue, six.class_types)
from six.moves.configparser import ConfigParser
assert isinstance(ConfigParser, six.class_types)
def test_filter():
from six.moves import filter
f = filter(lambda x: x % 2, range(10))
assert six.advance_iterator(f) == 1
def test_filter_false():
from six.moves import filterfalse
f = filterfalse(lambda x: x % 3, range(10))
assert six.advance_iterator(f) == 0
assert six.advance_iterator(f) == 3
assert six.advance_iterator(f) == 6
def test_map():
from six.moves import map
assert six.advance_iterator(map(lambda x: x + 1, range(2))) == 1
def test_zip():
from six.moves import zip
assert six.advance_iterator(zip(range(2), range(2))) == (0, 0)
@py.test.mark.skipif("sys.version_info < (2, 6)")
def test_zip_longest():
from six.moves import zip_longest
it = zip_longest(range(2), range(1))
assert six.advance_iterator(it) == (0, 0)
assert six.advance_iterator(it) == (1, None)
class TestCustomizedMoves:
def teardown_method(self, meth):
try:
del six._MovedItems.spam
except AttributeError:
pass
try:
del six.moves.__dict__["spam"]
except KeyError:
pass
def test_moved_attribute(self):
attr = six.MovedAttribute("spam", "foo", "bar")
if six.PY3:
assert attr.mod == "bar"
else:
assert attr.mod == "foo"
assert attr.attr == "spam"
attr = six.MovedAttribute("spam", "foo", "bar", "lemma")
assert attr.attr == "lemma"
attr = six.MovedAttribute("spam", "foo", "bar", "lemma", "theorm")
if six.PY3:
assert attr.attr == "theorm"
else:
assert attr.attr == "lemma"
def test_moved_module(self):
attr = six.MovedModule("spam", "foo")
if six.PY3:
assert attr.mod == "spam"
else:
assert attr.mod == "foo"
attr = six.MovedModule("spam", "foo", "bar")
if six.PY3:
assert attr.mod == "bar"
else:
assert attr.mod == "foo"
def test_custom_move_module(self):
attr = six.MovedModule("spam", "six", "six")
six.add_move(attr)
six.remove_move("spam")
assert not hasattr(six.moves, "spam")
attr = six.MovedModule("spam", "six", "six")
six.add_move(attr)
from six.moves import spam
assert spam is six
six.remove_move("spam")
assert not hasattr(six.moves, "spam")
def test_custom_move_attribute(self):
attr = six.MovedAttribute("spam", "six", "six", "u", "u")
six.add_move(attr)
six.remove_move("spam")
assert not hasattr(six.moves, "spam")
attr = six.MovedAttribute("spam", "six", "six", "u", "u")
six.add_move(attr)
from six.moves import spam
assert spam is six.u
six.remove_move("spam")
assert not hasattr(six.moves, "spam")
def test_empty_remove(self):
py.test.raises(AttributeError, six.remove_move, "eggs")
def test_get_unbound_function():
class X(object):
def m(self):
pass
assert six.get_unbound_function(X.m) is X.__dict__["m"]
def test_get_method_self():
class X(object):
def m(self):
pass
x = X()
assert six.get_method_self(x.m) is x
py.test.raises(AttributeError, six.get_method_self, 42)
def test_get_method_function():
class X(object):
def m(self):
pass
x = X()
assert six.get_method_function(x.m) is X.__dict__["m"]
py.test.raises(AttributeError, six.get_method_function, hasattr)
def test_get_function_closure():
def f():
x = 42
def g():
return x
return g
cell = six.get_function_closure(f())[0]
assert type(cell).__name__ == "cell"
def test_get_function_code():
def f():
pass
assert isinstance(six.get_function_code(f), types.CodeType)
if not hasattr(sys, "pypy_version_info"):
py.test.raises(AttributeError, six.get_function_code, hasattr)
def test_get_function_defaults():
def f(x, y=3, b=4):
pass
assert six.get_function_defaults(f) == (3, 4)
def test_get_function_globals():
def f():
pass
assert six.get_function_globals(f) is globals()
def test_dictionary_iterators(monkeypatch):
def stock_method_name(iterwhat):
"""Given a method suffix like "lists" or "values", return the name
of the dict method that delivers those on the version of Python
we're running in."""
if six.PY3:
return iterwhat
return 'iter' + iterwhat
class MyDict(dict):
if not six.PY3:
def lists(self, **kw):
return [1, 2, 3]
def iterlists(self, **kw):
return iter([1, 2, 3])
f = MyDict.iterlists
del MyDict.iterlists
setattr(MyDict, stock_method_name('lists'), f)
d = MyDict(zip(range(10), reversed(range(10))))
for name in "keys", "values", "items", "lists":
meth = getattr(six, "iter" + name)
it = meth(d)
assert not isinstance(it, list)
assert list(it) == list(getattr(d, name)())
py.test.raises(StopIteration, six.advance_iterator, it)
record = []
def with_kw(*args, **kw):
record.append(kw["kw"])
return old(*args)
old = getattr(MyDict, stock_method_name(name))
monkeypatch.setattr(MyDict, stock_method_name(name), with_kw)
meth(d, kw=42)
assert record == [42]
monkeypatch.undo()
@py.test.mark.skipif(sys.version_info[:2] < (2, 7),
reason="view methods on dictionaries only available on 2.7+")
def test_dictionary_views():
def stock_method_name(viewwhat):
"""Given a method suffix like "keys" or "values", return the name
of the dict method that delivers those on the version of Python
we're running in."""
if six.PY3:
return viewwhat
return 'view' + viewwhat
d = dict(zip(range(10), (range(11, 20))))
for name in "keys", "values", "items":
meth = getattr(six, "view" + name)
view = meth(d)
assert set(view) == set(getattr(d, name)())
def test_advance_iterator():
assert six.next is six.advance_iterator
l = [1, 2]
it = iter(l)
assert six.next(it) == 1
assert six.next(it) == 2
py.test.raises(StopIteration, six.next, it)
py.test.raises(StopIteration, six.next, it)
def test_iterator():
class myiter(six.Iterator):
def __next__(self):
return 13
assert six.advance_iterator(myiter()) == 13
class myitersub(myiter):
def __next__(self):
return 14
assert six.advance_iterator(myitersub()) == 14
def test_callable():
class X:
def __call__(self):
pass
def method(self):
pass
assert six.callable(X)
assert six.callable(X())
assert six.callable(test_callable)
assert six.callable(hasattr)
assert six.callable(X.method)
assert six.callable(X().method)
assert not six.callable(4)
assert not six.callable("string")
def test_create_bound_method():
class X(object):
pass
def f(self):
return self
x = X()
b = six.create_bound_method(f, x)
assert isinstance(b, types.MethodType)
assert b() is x
if six.PY3:
def test_b():
data = six.b("\xff")
assert isinstance(data, bytes)
assert len(data) == 1
assert data == bytes([255])
def test_u():
s = six.u("hi \u0439 \U00000439 \\ \\\\ \n")
assert isinstance(s, str)
assert s == "hi \u0439 \U00000439 \\ \\\\ \n"
else:
def test_b():
data = six.b("\xff")
assert isinstance(data, str)
assert len(data) == 1
assert data == "\xff"
def test_u():
s = six.u("hi \u0439 \U00000439 \\ \\\\ \n")
assert isinstance(s, unicode)
assert s == "hi \xd0\xb9 \xd0\xb9 \\ \\\\ \n".decode("utf8")
def test_u_escapes():
s = six.u("\u1234")
assert len(s) == 1
def test_unichr():
assert six.u("\u1234") == six.unichr(0x1234)
assert type(six.u("\u1234")) is type(six.unichr(0x1234))
def test_int2byte():
assert six.int2byte(3) == six.b("\x03")
py.test.raises((OverflowError, ValueError), six.int2byte, 256)
def test_byte2int():
assert six.byte2int(six.b("\x03")) == 3
assert six.byte2int(six.b("\x03\x04")) == 3
py.test.raises(IndexError, six.byte2int, six.b(""))
def test_bytesindex():
assert six.indexbytes(six.b("hello"), 3) == ord("l")
def test_bytesiter():
it = six.iterbytes(six.b("hi"))
assert six.next(it) == ord("h")
assert six.next(it) == ord("i")
py.test.raises(StopIteration, six.next, it)
def test_StringIO():
fp = six.StringIO()
fp.write(six.u("hello"))
assert fp.getvalue() == six.u("hello")
def test_BytesIO():
fp = six.BytesIO()
fp.write(six.b("hello"))
assert fp.getvalue() == six.b("hello")
def test_exec_():
def f():
l = []
six.exec_("l.append(1)")
assert l == [1]
f()
ns = {}
six.exec_("x = 42", ns)
assert ns["x"] == 42
glob = {}
loc = {}
six.exec_("global y; y = 42; x = 12", glob, loc)
assert glob["y"] == 42
assert "x" not in glob
assert loc["x"] == 12
assert "y" not in loc
def test_reraise():
def get_next(tb):
if six.PY3:
return tb.tb_next.tb_next
else:
return tb.tb_next
e = Exception("blah")
try:
raise e
except Exception:
tp, val, tb = sys.exc_info()
try:
six.reraise(tp, val, tb)
except Exception:
tp2, value2, tb2 = sys.exc_info()
assert tp2 is Exception
assert value2 is e
assert tb is get_next(tb2)
try:
six.reraise(tp, val)
except Exception:
tp2, value2, tb2 = sys.exc_info()
assert tp2 is Exception
assert value2 is e
assert tb2 is not tb
try:
six.reraise(tp, val, tb2)
except Exception:
tp2, value2, tb3 = sys.exc_info()
assert tp2 is Exception
assert value2 is e
assert get_next(tb3) is tb2
try:
six.reraise(tp, None, tb)
except Exception:
tp2, value2, tb2 = sys.exc_info()
assert tp2 is Exception
assert value2 is not val
assert isinstance(value2, Exception)
assert tb is get_next(tb2)
def test_raise_from():
try:
try:
raise Exception("blah")
except Exception:
ctx = sys.exc_info()[1]
f = Exception("foo")
six.raise_from(f, None)
except Exception:
tp, val, tb = sys.exc_info()
if sys.version_info[:2] > (3, 0):
# We should have done a raise f from None equivalent.
assert val.__cause__ is None
assert val.__context__ is ctx
if sys.version_info[:2] >= (3, 3):
# And that should suppress the context on the exception.
assert val.__suppress_context__
# For all versions the outer exception should have raised successfully.
assert str(val) == "foo"
def test_print_():
save = sys.stdout
out = sys.stdout = six.moves.StringIO()
try:
six.print_("Hello,", "person!")
finally:
sys.stdout = save
assert out.getvalue() == "Hello, person!\n"
out = six.StringIO()
six.print_("Hello,", "person!", file=out)
assert out.getvalue() == "Hello, person!\n"
out = six.StringIO()
six.print_("Hello,", "person!", file=out, end="")
assert out.getvalue() == "Hello, person!"
out = six.StringIO()
six.print_("Hello,", "person!", file=out, sep="X")
assert out.getvalue() == "Hello,Xperson!\n"
out = six.StringIO()
six.print_(six.u("Hello,"), six.u("person!"), file=out)
result = out.getvalue()
assert isinstance(result, six.text_type)
assert result == six.u("Hello, person!\n")
six.print_("Hello", file=None) # This works.
out = six.StringIO()
six.print_(None, file=out)
assert out.getvalue() == "None\n"
@py.test.mark.skipif("sys.version_info[:2] >= (2, 6)")
def test_print_encoding(monkeypatch):
# Fool the type checking in print_.
monkeypatch.setattr(six, "file", six.BytesIO, raising=False)
out = six.BytesIO()
out.encoding = "utf-8"
out.errors = None
six.print_(six.u("\u053c"), end="", file=out)
assert out.getvalue() == six.b("\xd4\xbc")
out = six.BytesIO()
out.encoding = "ascii"
out.errors = "strict"
py.test.raises(UnicodeEncodeError, six.print_, six.u("\u053c"), file=out)
out.errors = "backslashreplace"
six.print_(six.u("\u053c"), end="", file=out)
assert out.getvalue() == six.b("\\u053c")
def test_print_exceptions():
py.test.raises(TypeError, six.print_, x=3)
py.test.raises(TypeError, six.print_, end=3)
py.test.raises(TypeError, six.print_, sep=42)
def test_with_metaclass():
class Meta(type):
pass
class X(six.with_metaclass(Meta)):
pass
assert type(X) is Meta
assert issubclass(X, object)
class Base(object):
pass
class X(six.with_metaclass(Meta, Base)):
pass
assert type(X) is Meta
assert issubclass(X, Base)
class Base2(object):
pass
class X(six.with_metaclass(Meta, Base, Base2)):
pass
assert type(X) is Meta
assert issubclass(X, Base)
assert issubclass(X, Base2)
assert X.__mro__ == (X, Base, Base2, object)
def test_wraps():
def f(g):
@six.wraps(g)
def w():
return 42
return w
def k():
pass
original_k = k
k = f(f(k))
assert hasattr(k, '__wrapped__')
k = k.__wrapped__
assert hasattr(k, '__wrapped__')
k = k.__wrapped__
assert k is original_k
assert not hasattr(k, '__wrapped__')
def f(g, assign, update):
def w():
return 42
w.glue = {"foo" : "bar"}
return six.wraps(g, assign, update)(w)
k.glue = {"melon" : "egg"}
k.turnip = 43
k = f(k, ["turnip"], ["glue"])
assert k.__name__ == "w"
assert k.turnip == 43
assert k.glue == {"melon" : "egg", "foo" : "bar"}
def test_add_metaclass():
class Meta(type):
pass
class X:
"success"
X = six.add_metaclass(Meta)(X)
assert type(X) is Meta
assert issubclass(X, object)
assert X.__module__ == __name__
assert X.__doc__ == "success"
class Base(object):
pass
class X(Base):
pass
X = six.add_metaclass(Meta)(X)
assert type(X) is Meta
assert issubclass(X, Base)
class Base2(object):
pass
class X(Base, Base2):
pass
X = six.add_metaclass(Meta)(X)
assert type(X) is Meta
assert issubclass(X, Base)
assert issubclass(X, Base2)
# Test a second-generation subclass of a type.
class Meta1(type):
m1 = "m1"
class Meta2(Meta1):
m2 = "m2"
class Base:
b = "b"
Base = six.add_metaclass(Meta1)(Base)
class X(Base):
x = "x"
X = six.add_metaclass(Meta2)(X)
assert type(X) is Meta2
assert issubclass(X, Base)
assert type(Base) is Meta1
assert "__dict__" not in vars(X)
instance = X()
instance.attr = "test"
assert vars(instance) == {"attr": "test"}
assert instance.b == Base.b
assert instance.x == X.x
# Test a class with slots.
class MySlots(object):
__slots__ = ["a", "b"]
MySlots = six.add_metaclass(Meta1)(MySlots)
assert MySlots.__slots__ == ["a", "b"]
instance = MySlots()
instance.a = "foo"
py.test.raises(AttributeError, setattr, instance, "c", "baz")
# Test a class with string for slots.
class MyStringSlots(object):
__slots__ = "ab"
MyStringSlots = six.add_metaclass(Meta1)(MyStringSlots)
assert MyStringSlots.__slots__ == "ab"
instance = MyStringSlots()
instance.ab = "foo"
py.test.raises(AttributeError, setattr, instance, "a", "baz")
py.test.raises(AttributeError, setattr, instance, "b", "baz")
class MySlotsWeakref(object):
__slots__ = "__weakref__",
MySlotsWeakref = six.add_metaclass(Meta)(MySlotsWeakref)
assert type(MySlotsWeakref) is Meta
| mpl-2.0 |
thnee/ansible | lib/ansible/modules/cloud/amazon/aws_ses_rule_set.py | 13 | 8315 | #!/usr/bin/python
# Copyright (c) 2017, Ben Tomasik <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: aws_ses_rule_set
short_description: Manages SES inbound receipt rule sets
description:
- The M(aws_ses_rule_set) module allows you to create, delete, and manage SES receipt rule sets
version_added: 2.8
author:
- "Ben Tomasik (@tomislacker)"
- "Ed Costello (@orthanc)"
requirements: [ boto3, botocore ]
options:
name:
description:
- The name of the receipt rule set.
required: True
type: str
state:
description:
- Whether to create (or update) or destroy the receipt rule set.
required: False
default: present
choices: ["absent", "present"]
type: str
active:
description:
- Whether or not this rule set should be the active rule set. Only has an impact if I(state) is C(present).
- If omitted, the active rule set will not be changed.
- If C(True) then this rule set will be made active and all others inactive.
- if C(False) then this rule set will be deactivated. Be careful with this as you can end up with no active rule set.
type: bool
required: False
force:
description:
- When deleting a rule set, deactivate it first (AWS prevents deletion of the active rule set).
type: bool
required: False
default: False
extends_documentation_fragment:
- aws
- ec2
"""
EXAMPLES = """
# Note: None of these examples set aws_access_key, aws_secret_key, or region.
# It is assumed that their matching environment variables are set.
---
- name: Create default rule set and activate it if not already
aws_ses_rule_set:
name: default-rule-set
state: present
active: yes
- name: Create some arbitrary rule set but do not activate it
aws_ses_rule_set:
name: arbitrary-rule-set
state: present
- name: Explicitly deactivate the default rule set leaving no active rule set
aws_ses_rule_set:
name: default-rule-set
state: present
active: no
- name: Remove an arbitrary inactive rule set
aws_ses_rule_set:
name: arbitrary-rule-set
state: absent
- name: Remove an ruleset even if we have to first deactivate it to remove it
aws_ses_rule_set:
name: default-rule-set
state: absent
force: yes
"""
RETURN = """
active:
description: if the SES rule set is active
returned: success if I(state) is C(present)
type: bool
sample: true
rule_sets:
description: The list of SES receipt rule sets that exist after any changes.
returned: success
type: list
sample: [{
"created_timestamp": "2018-02-25T01:20:32.690000+00:00",
"name": "default-rule-set"
}]
"""
from ansible.module_utils.aws.core import AnsibleAWSModule
from ansible.module_utils.ec2 import camel_dict_to_snake_dict, AWSRetry
try:
from botocore.exceptions import BotoCoreError, ClientError
except ImportError:
pass # handled by AnsibleAWSModule
def list_rule_sets(client, module):
try:
response = client.list_receipt_rule_sets(aws_retry=True)
except (BotoCoreError, ClientError) as e:
module.fail_json_aws(e, msg="Couldn't list rule sets.")
return response['RuleSets']
def rule_set_in(name, rule_sets):
return any([s for s in rule_sets if s['Name'] == name])
def ruleset_active(client, module, name):
try:
active_rule_set = client.describe_active_receipt_rule_set(aws_retry=True)
except (BotoCoreError, ClientError) as e:
module.fail_json_aws(e, msg="Couldn't get the active rule set.")
if active_rule_set is not None and 'Metadata' in active_rule_set:
return name == active_rule_set['Metadata']['Name']
else:
# Metadata was not set meaning there is no active rule set
return False
def deactivate_rule_set(client, module):
try:
# No ruleset name deactivates all rulesets
client.set_active_receipt_rule_set(aws_retry=True)
except (BotoCoreError, ClientError) as e:
module.fail_json_aws(e, msg="Couldn't set active rule set to None.")
def update_active_rule_set(client, module, name, desired_active):
check_mode = module.check_mode
active = ruleset_active(client, module, name)
changed = False
if desired_active is not None:
if desired_active and not active:
if not check_mode:
try:
client.set_active_receipt_rule_set(RuleSetName=name, aws_retry=True)
except (BotoCoreError, ClientError) as e:
module.fail_json_aws(e, msg="Couldn't set active rule set to {0}.".format(name))
changed = True
active = True
elif not desired_active and active:
if not check_mode:
deactivate_rule_set(client, module)
changed = True
active = False
return changed, active
def create_or_update_rule_set(client, module):
name = module.params.get('name')
check_mode = module.check_mode
changed = False
rule_sets = list_rule_sets(client, module)
if not rule_set_in(name, rule_sets):
if not check_mode:
try:
client.create_receipt_rule_set(RuleSetName=name, aws_retry=True)
except (BotoCoreError, ClientError) as e:
module.fail_json_aws(e, msg="Couldn't create rule set {0}.".format(name))
changed = True
rule_sets = list(rule_sets)
rule_sets.append({
'Name': name,
})
(active_changed, active) = update_active_rule_set(client, module, name, module.params.get('active'))
changed |= active_changed
module.exit_json(
changed=changed,
active=active,
rule_sets=[camel_dict_to_snake_dict(x) for x in rule_sets],
)
def remove_rule_set(client, module):
name = module.params.get('name')
check_mode = module.check_mode
changed = False
rule_sets = list_rule_sets(client, module)
if rule_set_in(name, rule_sets):
active = ruleset_active(client, module, name)
if active and not module.params.get('force'):
module.fail_json(
msg="Couldn't delete rule set {0} because it is currently active. Set force=true to delete an active ruleset.".format(name),
error={
"code": "CannotDelete",
"message": "Cannot delete active rule set: {0}".format(name),
}
)
if not check_mode:
if active and module.params.get('force'):
deactivate_rule_set(client, module)
try:
client.delete_receipt_rule_set(RuleSetName=name, aws_retry=True)
except (BotoCoreError, ClientError) as e:
module.fail_json_aws(e, msg="Couldn't delete rule set {0}.".format(name))
changed = True
rule_sets = [x for x in rule_sets if x['Name'] != name]
module.exit_json(
changed=changed,
rule_sets=[camel_dict_to_snake_dict(x) for x in rule_sets],
)
def main():
argument_spec = dict(
name=dict(type='str', required=True),
state=dict(type='str', default='present', choices=['present', 'absent']),
active=dict(type='bool'),
force=dict(type='bool', default=False),
)
module = AnsibleAWSModule(argument_spec=argument_spec, supports_check_mode=True)
state = module.params.get('state')
# SES APIs seem to have a much lower throttling threshold than most of the rest of the AWS APIs.
# Docs say 1 call per second. This shouldn't actually be a big problem for normal usage, but
# the ansible build runs multiple instances of the test in parallel that's caused throttling
# failures so apply a jittered backoff to call SES calls.
client = module.client('ses', retry_decorator=AWSRetry.jittered_backoff())
if state == 'absent':
remove_rule_set(client, module)
else:
create_or_update_rule_set(client, module)
if __name__ == '__main__':
main()
| gpl-3.0 |
noelbk/neutron-juniper | neutron/plugins/nec/db/api.py | 9 | 7966 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2012 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# @author: Ryota MIBU
import sqlalchemy as sa
from neutron.db import api as db
from neutron.db import model_base
from neutron.db import models_v2
from neutron.db import securitygroups_db as sg_db
from neutron.extensions import securitygroup as ext_sg
from neutron import manager
from neutron.openstack.common import log as logging
from neutron.plugins.nec.common import config # noqa
from neutron.plugins.nec.common import exceptions as nexc
from neutron.plugins.nec.db import models as nmodels
LOG = logging.getLogger(__name__)
OFP_VLAN_NONE = 0xffff
resource_map = {'ofc_tenant': nmodels.OFCTenantMapping,
'ofc_network': nmodels.OFCNetworkMapping,
'ofc_port': nmodels.OFCPortMapping,
'ofc_router': nmodels.OFCRouterMapping,
'ofc_packet_filter': nmodels.OFCFilterMapping}
old_resource_map = {'ofc_tenant': nmodels.OFCTenant,
'ofc_network': nmodels.OFCNetwork,
'ofc_port': nmodels.OFCPort,
'ofc_packet_filter': nmodels.OFCFilter}
# utitlity methods
def _get_resource_model(resource, old_style):
if old_style:
# NOTE: Some new resources are not defined in old_resource_map.
# In such case None is returned.
return old_resource_map.get(resource)
else:
return resource_map[resource]
def initialize():
db.configure_db()
def clear_db(base=model_base.BASEV2):
db.clear_db(base)
def get_ofc_item(session, resource, neutron_id, old_style=False):
model = _get_resource_model(resource, old_style)
if not model:
return None
try:
return session.query(model).filter_by(quantum_id=neutron_id).one()
except sa.orm.exc.NoResultFound:
return None
def get_ofc_id(session, resource, neutron_id, old_style=False):
ofc_item = get_ofc_item(session, resource, neutron_id, old_style)
if ofc_item:
if old_style:
return ofc_item.id
else:
return ofc_item.ofc_id
else:
return None
def exists_ofc_item(session, resource, neutron_id, old_style=False):
if get_ofc_item(session, resource, neutron_id, old_style):
return True
else:
return False
def find_ofc_item(session, resource, ofc_id, old_style=False):
try:
model = _get_resource_model(resource, old_style)
if old_style:
params = dict(id=ofc_id)
else:
params = dict(ofc_id=ofc_id)
return (session.query(model).filter_by(**params).one())
except sa.orm.exc.NoResultFound:
return None
def add_ofc_item(session, resource, neutron_id, ofc_id, old_style=False):
try:
model = _get_resource_model(resource, old_style)
if old_style:
params = dict(quantum_id=neutron_id, id=ofc_id)
else:
params = dict(quantum_id=neutron_id, ofc_id=ofc_id)
item = model(**params)
with session.begin(subtransactions=True):
session.add(item)
session.flush()
except Exception as exc:
LOG.exception(exc)
raise nexc.NECDBException(reason=exc.message)
return item
def del_ofc_item(session, resource, neutron_id, old_style=False,
warning=True):
try:
model = _get_resource_model(resource, old_style)
with session.begin(subtransactions=True):
item = session.query(model).filter_by(quantum_id=neutron_id).one()
session.delete(item)
return True
except sa.orm.exc.NoResultFound:
if warning:
LOG.warning(_("_del_ofc_item(): NotFound item "
"(model=%(model)s, id=%(id)s) "),
{'model': model, 'id': neutron_id})
return False
def get_ofc_id_lookup_both(session, resource, neutron_id):
ofc_id = get_ofc_id(session, resource, neutron_id)
# Lookup old style of OFC mapping table
if not ofc_id:
ofc_id = get_ofc_id(session, resource, neutron_id,
old_style=True)
if not ofc_id:
reason = (_("NotFound %(resource)s for neutron_id=%(id)s.")
% {'resource': resource, 'id': neutron_id})
raise nexc.OFCConsistencyBroken(reason=reason)
return ofc_id
def exists_ofc_item_lookup_both(session, resource, neutron_id):
if exists_ofc_item(session, resource, neutron_id):
return True
# Check old style of OFC mapping table
if exists_ofc_item(session, resource, neutron_id,
old_style=True):
return True
return False
def del_ofc_item_lookup_both(session, resource, neutron_id):
# Delete the mapping from new style of OFC mapping table
if del_ofc_item(session, resource, neutron_id,
old_style=False, warning=False):
return
# Delete old style of OFC mapping table
if del_ofc_item(session, resource, neutron_id,
old_style=True, warning=False):
return
# The specified resource not found
LOG.warning(_("_del_ofc_item(): NotFound item "
"(resource=%(resource)s, id=%(id)s) "),
{'resource': resource, 'id': neutron_id})
def get_portinfo(session, id):
try:
return (session.query(nmodels.PortInfo).
filter_by(id=id).
one())
except sa.orm.exc.NoResultFound:
return None
def add_portinfo(session, id, datapath_id='', port_no=0,
vlan_id=OFP_VLAN_NONE, mac=''):
try:
portinfo = nmodels.PortInfo(id=id, datapath_id=datapath_id,
port_no=port_no, vlan_id=vlan_id, mac=mac)
with session.begin(subtransactions=True):
session.add(portinfo)
except Exception as exc:
LOG.exception(exc)
raise nexc.NECDBException(reason=exc.message)
return portinfo
def del_portinfo(session, id):
try:
with session.begin(subtransactions=True):
portinfo = session.query(nmodels.PortInfo).filter_by(id=id).one()
session.delete(portinfo)
except sa.orm.exc.NoResultFound:
LOG.warning(_("del_portinfo(): NotFound portinfo for "
"port_id: %s"), id)
def get_port_from_device(port_id):
"""Get port from database."""
LOG.debug(_("get_port_with_securitygroups() called:port_id=%s"), port_id)
session = db.get_session()
sg_binding_port = sg_db.SecurityGroupPortBinding.port_id
query = session.query(models_v2.Port,
sg_db.SecurityGroupPortBinding.security_group_id)
query = query.outerjoin(sg_db.SecurityGroupPortBinding,
models_v2.Port.id == sg_binding_port)
query = query.filter(models_v2.Port.id == port_id)
port_and_sgs = query.all()
if not port_and_sgs:
return None
port = port_and_sgs[0][0]
plugin = manager.NeutronManager.get_plugin()
port_dict = plugin._make_port_dict(port)
port_dict[ext_sg.SECURITYGROUPS] = [
sg_id for port_, sg_id in port_and_sgs if sg_id]
port_dict['security_group_rules'] = []
port_dict['security_group_source_groups'] = []
port_dict['fixed_ips'] = [ip['ip_address']
for ip in port['fixed_ips']]
return port_dict
| apache-2.0 |
kenorb-contrib/BitTorrent | BTL/twisted_brpc.py | 5 | 24332 | # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""A generic resource for publishing objects via BRPC.
Requires BRPC
API Stability: semi-stable
"""
from __future__ import nested_scopes
__version__ = "$Revision: 1.32 $"[11:-2]
# System Imports
import brpc
import urlparse
from cStringIO import StringIO
from gzip import GzipFile
pipeline_debug = False
version = "1.0"
from BTL.platform import app_name
from BTL.reactor_magic import reactor
from BTL.exceptions import str_exc
from BTL.protocol import SmartReconnectingClientFactory
from BTL.brpclib import ServerProxy
import twisted.web
if twisted.web.__version__ < '0.6.0':
raise ImportError("BTL.twisted_brpc requires twisted.web 0.6.0 or greater,"
" from Twisted 2.4.0.\nYou appear to have twisted.web "
"version %s installed at:\n%s" % (twisted.web.__version__,
twisted.web.__file__))
from twisted.web import resource, server
from twisted.internet import protocol
from twisted.python import log, reflect, failure
from twisted.web import http
from twisted.internet import defer
# Useful so people don't need to import brpc directly
Fault = brpc.Fault
class NoSuchFunction(Fault):
"""There is no function by the given name."""
pass
class Handler:
"""Handle a BRPC request and store the state for a request in progress.
Override the run() method and return result using self.result,
a Deferred.
We require this class since we're not using threads, so we can't
encapsulate state in a running function if we're going to have
to wait for results.
For example, lets say we want to authenticate against twisted.cred,
run a LDAP query and then pass its result to a database query, all
as a result of a single BRPC command. We'd use a Handler instance
to store the state of the running command.
"""
def __init__(self, resource, *args):
self.resource = resource # the BRPC resource we are connected to
self.result = defer.Deferred()
self.run(*args)
def run(self, *args):
# event driven equivalent of 'raise UnimplementedError'
try:
raise NotImplementedError("Implement run() in subclasses")
except:
self.result.errback(failure.Failure())
def parse_accept_encoding(header):
a = header.split(',')
l = []
for i in a:
i = i.strip()
if ';' not in i:
type = i
# hmmm
l.append(('1', type))
else:
type, q = i.split(';')
type = type.strip()
q = q.strip()
junk, q = q.split('=')
q = q.strip()
if q != '0':
l.append((q, type))
l.sort()
l.reverse()
l = [ t for q, t in l ]
return l
class BRPC(resource.Resource):
"""A resource that implements BRPC.
You probably want to connect this to '/RPC2'.
Methods published can return BRPC serializable results, Faults,
Binary, Boolean, DateTime, Deferreds, or Handler instances.
By default methods beginning with 'brpc_' are published.
Sub-handlers for prefixed methods (e.g., system.listMethods)
can be added with putSubHandler. By default, prefixes are
separated with a '.'. Override self.separator to change this.
"""
# Error codes for Twisted, if they conflict with yours then
# modify them at runtime.
NOT_FOUND = 8001
FAILURE = 8002
isLeaf = 1
separator = '.'
def __init__(self):
resource.Resource.__init__(self)
self.subHandlers = {}
def putSubHandler(self, prefix, handler):
self.subHandlers[prefix] = handler
def getSubHandler(self, prefix):
return self.subHandlers.get(prefix, None)
def getSubHandlerPrefixes(self):
return self.subHandlers.keys()
def _err(self, *a, **kw):
log.err(*a, **kw)
def render(self, request):
request.setHeader('server', "%s/%s" % (app_name, version))
request.content.seek(0, 0)
args, functionPath = brpc.loads(request.content.read())
args, kwargs = args
request.functionPath = functionPath
try:
function = self._getFunction(functionPath)
except Fault, f:
self._cbRender(f, request)
else:
request.setHeader("content-type", "application/octet-stream")
defer.maybeDeferred(function, *args, **kwargs).addErrback(
self._ebRender
).addCallback(
self._cbRender, request
)
return server.NOT_DONE_YET
def _cbRender(self, result, request):
if isinstance(result, Handler):
result = result.result
if not isinstance(result, Fault):
result = (result,)
try:
s = brpc.dumps(result, methodresponse=1)
except Exception, e:
f = Fault(self.FAILURE,
"function:%s can't serialize output: %s" %
(request.functionPath, str_exc(e)))
self._err(f)
s = brpc.dumps(f, methodresponse=1)
encoding = request.getHeader("accept-encoding")
if encoding:
encodings = parse_accept_encoding(encoding)
if 'gzip' in encodings or '*' in encodings:
sio = StringIO()
g = GzipFile(fileobj=sio, mode='wb', compresslevel=9)
g.write(s)
g.close()
s = sio.getvalue()
request.setHeader("Content-Encoding", "gzip")
request.setHeader("content-length", str(len(s)))
request.write(s)
request.finish()
def _ebRender(self, failure):
self._err(failure)
if isinstance(failure.value, Fault):
return failure.value
return Fault(self.FAILURE, "An unhandled exception occurred: %s" %
failure.getErrorMessage())
def _getFunction(self, functionPath):
"""Given a string, return a function, or raise NoSuchFunction.
This returned function will be called, and should return the result
of the call, a Deferred, or a Fault instance.
Override in subclasses if you want your own policy. The default
policy is that given functionPath 'foo', return the method at
self.brpc_foo, i.e. getattr(self, "brpc_" + functionPath).
If functionPath contains self.separator, the sub-handler for
the initial prefix is used to search for the remaining path.
"""
if functionPath.find(self.separator) != -1:
prefix, functionPath = functionPath.split(self.separator, 1)
handler = self.getSubHandler(prefix)
if handler is None: raise NoSuchFunction(self.NOT_FOUND, "no such subHandler %s" % prefix)
return handler._getFunction(functionPath)
f = getattr(self, "brpc_%s" % functionPath, None)
if not f:
raise NoSuchFunction(self.NOT_FOUND, "function %s not found" % functionPath)
elif not callable(f):
raise NoSuchFunction(self.NOT_FOUND, "function %s not callable" % functionPath)
else:
return f
def _listFunctions(self):
"""Return a list of the names of all brpc methods."""
return reflect.prefixedMethodNames(self.__class__, 'brpc_')
class BRPCIntrospection(BRPC):
"""Implement the BRPC Introspection API.
By default, the methodHelp method returns the 'help' method attribute,
if it exists, otherwise the __doc__ method attribute, if it exists,
otherwise the empty string.
To enable the methodSignature method, add a 'signature' method attribute
containing a list of lists. See methodSignature's documentation for the
format. Note the type strings should be BRPC types, not Python types.
"""
def __init__(self, parent):
"""Implement Introspection support for an BRPC server.
@param parent: the BRPC server to add Introspection support to.
"""
BRPC.__init__(self)
self._brpc_parent = parent
def brpc_listMethods(self):
"""Return a list of the method names implemented by this server."""
functions = []
todo = [(self._brpc_parent, '')]
while todo:
obj, prefix = todo.pop(0)
functions.extend([ prefix + name for name in obj._listFunctions() ])
todo.extend([ (obj.getSubHandler(name),
prefix + name + obj.separator)
for name in obj.getSubHandlerPrefixes() ])
return functions
brpc_listMethods.signature = [['array']]
def brpc_methodHelp(self, method):
"""Return a documentation string describing the use of the given method.
"""
method = self._brpc_parent._getFunction(method)
return (getattr(method, 'help', None)
or getattr(method, '__doc__', None) or '')
brpc_methodHelp.signature = [['string', 'string']]
def brpc_methodSignature(self, method):
"""Return a list of type signatures.
Each type signature is a list of the form [rtype, type1, type2, ...]
where rtype is the return type and typeN is the type of the Nth
argument. If no signature information is available, the empty
string is returned.
"""
method = self._brpc_parent._getFunction(method)
return getattr(method, 'signature', None) or ''
brpc_methodSignature.signature = [['array', 'string'],
['string', 'string']]
def addIntrospection(brpc):
"""Add Introspection support to an BRPC server.
@param brpc: The brpc server to add Introspection support to.
"""
brpc.putSubHandler('system', BRPCIntrospection(brpc))
class Query(object):
def __init__(self, path, host, method, user=None, password=None, *args):
self.path = path
self.host = host
self.user = user
self.password = password
self.method = method
self.payload = brpc.dumps(args, method)
self.deferred = defer.Deferred()
self.decode = False
class QueryProtocol(http.HTTPClient):
# All current queries are pipelined over the connection at
# once. When the connection is made, or as queries are made
# while a connection exists, queries are all sent to the
# server. Pipelining limits can be controlled by the caller.
# When a query completes (see parseResponse), if there are no
# more queries then an idle timeout gets sets.
# The QueryFactory reopens the connection if another query occurs.
#
# twisted_brpc does currently provide a mechanism for
# per-query timeouts. This could be added with another
# timeout_call mechanism that calls loseConnection and pops the
# current query with an errback.
timeout = 300 # idle timeout.
def log(self, msg, *a):
print "%s: %s: %r" % (self.peer, msg, a)
def connectionMade(self):
http.HTTPClient.connectionMade(self)
self.current_queries = []
self.timeout_call = None
if pipeline_debug:
p = self.transport.getPeer()
p = "%s:%d" % (p.host, p.port)
self.peer = (id(self.transport), p)
self.factory.connectionMade(self)
def _cancelTimeout(self):
if self.timeout_call and self.timeout_call.active():
self.timeout_call.cancel()
self.timeout_call = None
def connectionLost(self, reason):
http.HTTPClient.connectionLost(self, reason)
if pipeline_debug: self.log('connectionLost', reason.getErrorMessage())
self._cancelTimeout()
if self.current_queries:
# queries failed, put them back
if pipeline_debug: self.log('putting back', [q.method for q in self.current_queries])
self.factory.prependQueries(self.current_queries)
self.factory.connectionLost(self)
def sendCommand(self, command, path):
self.transport.write('%s %s HTTP/1.1\r\n' % (command, path))
def setLineMode(self, rest):
# twisted is stupid.
self.firstLine = 1
return http.HTTPClient.setLineMode(self, rest)
def sendQuery(self):
self._cancelTimeout()
query = self.factory.popQuery()
if pipeline_debug: self.log('sending', query.method)
self.current_queries.append(query)
self.sendCommand('POST', query.path)
self.sendHeader('User-Agent', 'BTL/BRPC 1.0')
self.sendHeader('Host', query.host)
self.sendHeader('Accept-encoding', 'gzip')
self.sendHeader('Connection', 'Keep-Alive')
self.sendHeader('Content-type', 'application/octet-stream')
self.sendHeader('Content-length', str(len(query.payload)))
#if query.user:
# auth = '%s:%s' % (query.user, query.password)
# auth = auth.encode('base64').strip()
# self.sendHeader('Authorization', 'Basic %s' % (auth,))
self.endHeaders()
self.transport.write(query.payload)
def parseResponse(self, contents):
query = self.current_queries.pop(0)
if pipeline_debug: self.log('responded', query.method)
if not self.current_queries:
assert not self.factory.anyQueries()
assert not self.timeout_call
self.timeout_call = reactor.callLater(self.timeout,
self.transport.loseConnection)
try:
response = brpc.loads(contents)
except Exception, e:
query.deferred.errback(failure.Failure())
del query.deferred
else:
query.deferred.callback(response[0][0])
del query.deferred
def badStatus(self, status, message):
query = self.current_queries.pop(0)
if pipeline_debug: self.log('failed', query.method)
try:
raise ValueError(status, message)
except:
query.deferred.errback(failure.Failure())
del query.deferred
self.transport.loseConnection()
def handleStatus(self, version, status, message):
if status != '200':
self.badStatus(status, message)
def handleHeader(self, key, val):
if not self.current_queries[0].decode:
if key.lower() == 'content-encoding' and val.lower() == 'gzip':
self.current_queries[0].decode = True
def handleResponse(self, contents):
if self.current_queries[0].decode:
s = StringIO()
s.write(contents)
s.seek(-1)
g = GzipFile(fileobj=s, mode='rb')
contents = g.read()
g.close()
self.parseResponse(contents)
class QueryFactory(object):
def __init__(self):
self.queries = []
self.instance = None
def connectionMade(self, instance):
self.instance = instance
if pipeline_debug: print 'connection made %s' % str(instance.peer)
while self.anyQueries():
self.instance.sendQuery()
def connectionLost(self, instance):
assert self.instance == instance
if pipeline_debug: print 'connection lost %s' % str(instance.peer)
self.instance = None
def prependQueries(self, queries):
self.queries = queries + self.queries
def popQuery(self):
return self.queries.pop(0)
def anyQueries(self):
return bool(self.queries)
def addQuery(self, query):
self.queries.append(query)
if pipeline_debug: print 'addQuery: %s %s' % (self.instance, self.queries)
if self.instance:
self.instance.sendQuery()
def disconnect(self):
if not self.instance:
return
if not hasattr(self.instance, 'transport'):
return
self.instance.transport.loseConnection()
class PersistantSingletonFactory(QueryFactory, SmartReconnectingClientFactory):
def clientConnectionFailed(self, connector, reason):
if pipeline_debug: print 'clientConnectionFailed %s' % str(connector)
return SmartReconnectingClientFactory.clientConnectionFailed(self, connector, reason)
def clientConnectionLost(self, connector, unused_reason):
self.started = False
if not self.anyQueries():
self.continueTrying = False
return SmartReconnectingClientFactory.clientConnectionLost(self, connector, unused_reason)
class SingletonFactory(QueryFactory, protocol.ClientFactory):
def clientConnectionFailed(self, connector, reason):
if pipeline_debug: print 'clientConnectionFailed %s' % str(connector)
queries = list(self.queries)
del self.queries[:]
for query in queries:
query.deferred.errback(reason)
self.started = False
class Proxy:
"""A Proxy for making remote BRPC calls.
Pass the URL of the remote BRPC server to the constructor.
Use proxy.callRemote('foobar', *args) to call remote method
'foobar' with *args.
"""
def __init__(self, url, user=None, password=None, retry_forever = True):
"""
@type url: C{str}
@param url: The URL to which to post method calls. Calls will be made
over SSL if the scheme is HTTPS. If netloc contains username or
password information, these will be used to authenticate, as long as
the C{user} and C{password} arguments are not specified.
@type user: C{str} or None
@param user: The username with which to authenticate with the server
when making calls. If specified, overrides any username information
embedded in C{url}. If not specified, a value may be taken from C{url}
if present.
@type password: C{str} or None
@param password: The password with which to authenticate with the
server when making calls. If specified, overrides any password
information embedded in C{url}. If not specified, a value may be taken
from C{url} if present.
"""
scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
netlocParts = netloc.split('@')
if len(netlocParts) == 2:
userpass = netlocParts.pop(0).split(':')
self.user = userpass.pop(0)
try:
self.password = userpass.pop(0)
except:
self.password = None
else:
self.user = self.password = None
hostport = netlocParts[0].split(':')
self.host = hostport.pop(0)
try:
self.port = int(hostport.pop(0))
except:
self.port = None
self.path = path
if self.path in ['', None]:
self.path = '/'
self.secure = (scheme == 'https')
if user is not None:
self.user = user
if password is not None:
self.password = password
if not retry_forever:
_Factory = SingletonFactory
else:
_Factory = PersistantSingletonFactory
self.factory = _Factory()
self.factory.started = False
self.factory.protocol = QueryProtocol
def callRemote(self, method, *args, **kwargs):
if pipeline_debug: print 'callRemote to %s : %s' % (self.host, method)
args = (args, kwargs)
query = Query(self.path, self.host, method, self.user,
self.password, *args)
self.factory.addQuery(query)
if pipeline_debug: print 'factory started: %s' % self.factory.started
if not self.factory.started:
self.factory.started = True
def connect(host):
if self.secure:
if pipeline_debug: print 'connecting to %s' % str((host, self.port or 443))
from twisted.internet import ssl
reactor.connectSSL(host, self.port or 443,
self.factory, ssl.ClientContextFactory(),
timeout=60)
else:
if pipeline_debug: print 'connecting to %s' % str((host, self.port or 80))
reactor.connectTCP(host, self.port or 80, self.factory,
timeout=60)
df = reactor.resolve(self.host)
df.addCallback(connect)
df.addErrback(query.deferred.errback)
return query.deferred
class AsyncServerProxy(object):
def __init__(self, base_url, username=None, password=None, debug=False,
retry_forever = True):
self.base_url = base_url
self.username = username
self.password = password
self.proxy = Proxy(self.base_url, self.username, self.password, retry_forever)
self.debug = debug
def __getattr__(self, attr):
return self._make_call(attr)
def _make_call(self, methodname):
return lambda *a, **kw : self._method(methodname, *a, **kw)
def _method(self, methodname, *a, **kw):
# in case they have changed
self.proxy.user = self.username
self.proxy.password = self.password
if self.debug:
print ('callRemote:', self.__class__.__name__,
self.base_url, methodname, a, kw)
df = self.proxy.callRemote(methodname, *a, **kw)
return df
class EitherServerProxy(object):
SYNC = 0
ASYNC = 1
SYNC_DEFERRED = 2 # BE CAREFUL to call getResult() on the returned Deferred!
"""Server Proxy that supports both asynchronous and synchronous calls."""
def __init__(self, base_url, username = None, password = None, debug = False,
async = ASYNC, retry_forever = True ):
"""
The EitherServerProxy can make either synchronous or asynchronous calls.
The default is specified by the async parameter to __init__, but each
individual call can override the default behavior by passing 'async' as
a boolean keyword argument to any method call. The async keyword
argument can also be set to None. However, passing async as
None means simply 'use default behavior'. When calling with async=SYNC,
you should not be in the same thread as the reactor or you risk
blocking the reactor.
@param async: determines whether the default is asynchronous or blocking calls."""
assert async in [SYNC, ASYNC, SYNC_DEFERRED]
self.async = async
self.async_proxy = AsyncServerProxy( base_url, username, password, debug,
retry_forever = retry_forever )
# HERE HACK. retry_forever is not supported by ServerProxy.
self.sync_proxy = ServerProxy( base_url )
def __getattr__(self, attr):
return self._make_call(attr)
def _make_call(self, methodname):
return lambda *a, **kw : self._method(methodname, *a, **kw)
def _method(self, methodname, *a, **kw ):
async = kw.pop('async', self.async)
if async is None:
async = self.async
if async == ASYNC:
df = self.async_proxy._method(methodname, *a, **kw)
elif async == SYNC_DEFERRED:
df = defer.execute(getattr(self.sync_proxy, methodname), *a, **kw)
else:
return self.sync_proxy.__getattr__(methodname)(*a, **kw)
return df
SYNC = EitherServerProxy.SYNC
ASYNC = EitherServerProxy.ASYNC
SYNC_DEFERRED = EitherServerProxy.SYNC_DEFERRED
__all__ = ["BRPC", "Handler", "NoSuchFunction", "Fault", "Proxy", "AsyncServerProxy", "EitherServerProxy"]
| gpl-3.0 |
F5Networks/f5-common-python | f5/bigip/tm/asm/test/functional/test_signature_statuses.py | 1 | 2730 | # Copyright 2015 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from f5.bigip.tm.asm.signature_statuses import Signature_Status
import pytest
from requests.exceptions import HTTPError
def get_sigstatid(request, mgmt_root):
sigcoll = mgmt_root.tm.asm.signature_statuses_s.get_collection()
# We obtain the ID for the resource to test and return the hashed id
hashid = str(sigcoll[0].id)
return hashid
class TestSignatureStatuses(object):
def test_load_no_object(self, mgmt_root):
with pytest.raises(HTTPError) as err:
mgmt_root.tm.asm.signature_statuses_s.signature_status.load(
id='Lx3553-321'
)
assert err.response.status_code == 404
def test_load(self, request, mgmt_root):
hashid = get_sigstatid(request, mgmt_root)
sigstat = mgmt_root.tm.asm.signature_statuses_s.signature_status.load(id=hashid)
kind = 'tm:asm:signature-statuses:signature-statusstate'
baseuri = 'https://localhost/mgmt/tm/asm/signature-statuses/'
final_uri = baseuri + hashid
assert sigstat.id == hashid
assert sigstat.kind == kind
assert sigstat.selfLink.startswith(final_uri)
assert sigstat.isUserDefined is False
def test_refresh(self, request, mgmt_root):
hashid = get_sigstatid(request, mgmt_root)
sigstat = mgmt_root.tm.asm.signature_statuses_s.signature_status.load(id=hashid)
sigstat2 = mgmt_root.tm.asm.signature_statuses_s.signature_status.load(id=hashid)
kind = 'tm:asm:signature-statuses:signature-statusstate'
baseuri = 'https://localhost/mgmt/tm/asm/signature-statuses/'
final_uri = baseuri + hashid
assert sigstat.id == hashid
assert sigstat.kind == kind
assert sigstat.selfLink.startswith(final_uri)
sigstat2.refresh()
assert sigstat.id == sigstat2.id
assert sigstat.kind == sigstat2.kind
assert sigstat.selfLink == sigstat2.selfLink
def test_collection(self, mgmt_root):
sc = mgmt_root.tm.asm.signature_statuses_s.get_collection()
assert isinstance(sc, list)
assert len(sc)
assert isinstance(sc[0], Signature_Status)
| apache-2.0 |
shaufi10/odoo | openerp/addons/base/ir/ir_exports.py | 338 | 1672 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields,osv
class ir_exports(osv.osv):
_name = "ir.exports"
_order = 'name'
_columns = {
'name': fields.char('Export Name'),
'resource': fields.char('Resource', select=True),
'export_fields': fields.one2many('ir.exports.line', 'export_id',
'Export ID', copy=True),
}
class ir_exports_line(osv.osv):
_name = 'ir.exports.line'
_order = 'id'
_columns = {
'name': fields.char('Field Name'),
'export_id': fields.many2one('ir.exports', 'Export', select=True, ondelete='cascade'),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
cucs-numpde/class | fdtools.py | 1 | 3922 | import numpy
def cosspace(a, b, n=50):
return (a + b)/2 + (b - a)/2 * (numpy.cos(numpy.linspace(-numpy.pi, 0, n)))
def vander_chebyshev(x, n=None):
if n is None:
n = len(x)
T = numpy.ones((len(x), n))
if n > 1:
T[:,1] = x
for k in range(2,n):
T[:,k] = 2 * x * T[:,k-1] - T[:,k-2]
return T
def chebeval(z, n=None):
"""Build matrices to evaluate the n-term Chebyshev expansion and its derivatives at point(s) z"""
z = numpy.array(z, ndmin=1)
if n is None:
n = len(z)
Tz = vander_chebyshev(z, n)
dTz = numpy.zeros_like(Tz)
dTz[:,1] = 1
dTz[:,2] = 4*z
ddTz = numpy.zeros_like(Tz)
ddTz[:,2] = 4
for n in range(3,n):
dTz[:,n] = n * (2*Tz[:,n-1] + dTz[:,n-2]/(n-2))
ddTz[:,n] = n * (2*dTz[:,n-1] + ddTz[:,n-2]/(n-2))
return [Tz, dTz, ddTz]
def fdstencilV(z, x):
"""Compute finite difference weights using a Vandermonde matrix"""
x = numpy.array(x)
V = numpy.vander(x - z, increasing=True)
scaling = numpy.array([numpy.math.factorial(i) for i in range(len(x))])
return (numpy.linalg.inv(V).T * scaling).T
def fdstencil(z, x, nderiv=None):
"""Compute finite difference weights using recurrences for Lagrange polynomials (see Fornberg 1998)"""
if nderiv is None:
nderiv = len(x)
x = numpy.array(x) - z
k = numpy.arange(nderiv+1)
c = numpy.outer(0.*k, x)
c[0,0] = 1
prod = 1
for j in range(1,len(x)):
dx = x[j] - x[:j]
c[1:,j] = x[j-1]*c[1:,j-1] - k[1:]*c[:-1,j-1]
c[0,j] = x[j-1]*c[0,j-1]
c[:,j] *= -prod
prod = numpy.prod(dx)
c[:,j] /= prod
c[1:,:j] = (x[j]*c[1:,:j] - k[1:,None]*c[:-1,:j]) / dx
c[0,:j] = x[j]*c[0,:j] / dx
return c
def fdcompact(z, x, k):
"""Compute a compact (implicit) differencing scheme
b @ u^(k)(z) = c @ u(x)
that maximizes the accuracy of u^(k)(z[0])."""
z = numpy.array(z)
x = numpy.array(x)
n = len(x)
x = x - z[0]
z = z - z[0]
xmin, xmax = min(x), max(x)
dx = (xmax - xmin) / (n - 1)
y = numpy.zeros(n + len(z) - 1)
y[:n] = x
for i in range(1, len(z)):
if (z[i] < 0):
xmin -= dx
y[n + i - 1] = xmin
else:
xmax += dx
y[n + i - 1] = xmax
S = numpy.array([fdstencil(t, y, k)[k] for t in z])
b = numpy.ones(len(z))
T = S[1:,n:].T
b[1:] = numpy.linalg.lstsq(T, -S[0,n:])[0]
c = b.dot(S[:,:n])
return b, c
def dispersion(z, x, b, c):
from matplotlib import pyplot
theta = numpy.linspace(0, numpy.pi, 100)[1:]
phiz = numpy.exp(1j*numpy.outer(z, theta))
phix = numpy.exp(1j*numpy.outer(x, theta))
pyplot.plot(theta, (c.dot(phix) / b.dot(phiz)).imag, '.')
pyplot.plot(theta, theta)
pyplot.plot(theta, numpy.sin(theta))
pyplot.show()
def rk_butcher_4():
A = numpy.array([[0,0,0,0],[.5,0,0,0],[0,.5,0,0],[0,0,1,0]])
b = numpy.array([1/6, 1/3, 1/3, 1/6])
return A, b
def rk_butcher_ssp32():
A = numpy.array([[0, 0, 0],
[1/2, 0, 0],
[1/2, 1/2, 0]])
b = numpy.array([1/3, 1/3, 1/3])
return A, b
def ode_rkexplicit(f, u0, butcher=None, tfinal=1, h=.1):
if butcher is None:
A, b = rk_butcher_4()
else:
A, b = butcher
c = numpy.sum(A, axis=1)
s = len(c)
u = u0.copy()
t = 0
hist = [(t,u0)]
while t < tfinal:
if tfinal - t < 1.01*h:
h = tfinal - t
tnext = tfinal
else:
tnext = t + h
h = min(h, tfinal - t)
fY = numpy.zeros((len(u0), s))
for i in range(s):
Yi = u.copy()
for j in range(i):
Yi += h * A[i,j] * fY[:,j]
fY[:,i] = f(t + h*c[i], Yi)
u += h * fY.dot(b)
t = tnext
hist.append((t, u.copy()))
return hist
| bsd-2-clause |
UrusTeam/android_ndk_toolchain_cross | lib/python2.7/ctypes/test/test_arrays.py | 72 | 4269 | import unittest
from ctypes import *
formats = "bBhHiIlLqQfd"
formats = c_byte, c_ubyte, c_short, c_ushort, c_int, c_uint, \
c_long, c_ulonglong, c_float, c_double, c_longdouble
class ArrayTestCase(unittest.TestCase):
def test_simple(self):
# create classes holding simple numeric types, and check
# various properties.
init = range(15, 25)
for fmt in formats:
alen = len(init)
int_array = ARRAY(fmt, alen)
ia = int_array(*init)
# length of instance ok?
self.assertEqual(len(ia), alen)
# slot values ok?
values = [ia[i] for i in range(len(init))]
self.assertEqual(values, init)
# change the items
from operator import setitem
new_values = range(42, 42+alen)
[setitem(ia, n, new_values[n]) for n in range(alen)]
values = [ia[i] for i in range(len(init))]
self.assertEqual(values, new_values)
# are the items initialized to 0?
ia = int_array()
values = [ia[i] for i in range(len(init))]
self.assertEqual(values, [0] * len(init))
# Too many initializers should be caught
self.assertRaises(IndexError, int_array, *range(alen*2))
CharArray = ARRAY(c_char, 3)
ca = CharArray("a", "b", "c")
# Should this work? It doesn't:
# CharArray("abc")
self.assertRaises(TypeError, CharArray, "abc")
self.assertEqual(ca[0], "a")
self.assertEqual(ca[1], "b")
self.assertEqual(ca[2], "c")
self.assertEqual(ca[-3], "a")
self.assertEqual(ca[-2], "b")
self.assertEqual(ca[-1], "c")
self.assertEqual(len(ca), 3)
# slicing is now supported, but not extended slicing (3-argument)!
from operator import getslice, delitem
self.assertRaises(TypeError, getslice, ca, 0, 1, -1)
# cannot delete items
self.assertRaises(TypeError, delitem, ca, 0)
def test_numeric_arrays(self):
alen = 5
numarray = ARRAY(c_int, alen)
na = numarray()
values = [na[i] for i in range(alen)]
self.assertEqual(values, [0] * alen)
na = numarray(*[c_int()] * alen)
values = [na[i] for i in range(alen)]
self.assertEqual(values, [0]*alen)
na = numarray(1, 2, 3, 4, 5)
values = [i for i in na]
self.assertEqual(values, [1, 2, 3, 4, 5])
na = numarray(*map(c_int, (1, 2, 3, 4, 5)))
values = [i for i in na]
self.assertEqual(values, [1, 2, 3, 4, 5])
def test_classcache(self):
self.assertTrue(not ARRAY(c_int, 3) is ARRAY(c_int, 4))
self.assertTrue(ARRAY(c_int, 3) is ARRAY(c_int, 3))
def test_from_address(self):
# Failed with 0.9.8, reported by JUrner
p = create_string_buffer("foo")
sz = (c_char * 3).from_address(addressof(p))
self.assertEqual(sz[:], "foo")
self.assertEqual(sz[::], "foo")
self.assertEqual(sz[::-1], "oof")
self.assertEqual(sz[::3], "f")
self.assertEqual(sz[1:4:2], "o")
self.assertEqual(sz.value, "foo")
try:
create_unicode_buffer
except NameError:
pass
else:
def test_from_addressW(self):
p = create_unicode_buffer("foo")
sz = (c_wchar * 3).from_address(addressof(p))
self.assertEqual(sz[:], "foo")
self.assertEqual(sz[::], "foo")
self.assertEqual(sz[::-1], "oof")
self.assertEqual(sz[::3], "f")
self.assertEqual(sz[1:4:2], "o")
self.assertEqual(sz.value, "foo")
def test_cache(self):
# Array types are cached internally in the _ctypes extension,
# in a WeakValueDictionary. Make sure the array type is
# removed from the cache when the itemtype goes away. This
# test will not fail, but will show a leak in the testsuite.
# Create a new type:
class my_int(c_int):
pass
# Create a new array type based on it:
t1 = my_int * 1
t2 = my_int * 1
self.assertTrue(t1 is t2)
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
arpankapoor/pambu | pambu.py | 1 | 9458 | #!/usr/bin/env python3
import copy
import curses
import curses.ascii
from enum import Enum
import locale
import math
import sys
import signal
class Direction(Enum):
north, east, south, west = range(4)
def is_opp(self, other):
return ((self == Direction.north and other == Direction.south) or
(self == Direction.south and other == Direction.north) or
(self == Direction.east and other == Direction.west) or
(self == Direction.west and other == Direction.east))
class Point:
"""A point represented by a *y* and *x* coordinate"""
def __init__(self, y, x):
self.y = y
self.x = x
def __str__(self):
return str(self.__dict__)
def __eq__(self, other):
return (isinstance(other, self.__class__) and
self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self.__eq__(other)
def distance_from(self, other):
dy = other.y - self.y
dx = other.x - self.x
return math.sqrt(dy**2 + dx**2)
def has_same_y(self, other):
return self.y == other.y
def has_same_x(self, other):
return self.x == other.x
def is_to_the_left_of(self, other):
return self.x <= other.x
def is_to_the_right_of(self, other):
return self.x >= other.x
def is_above(self, other):
return self.y <= other.y
def is_below(self, other):
return self.y >= other.y
def move(self, direction):
"""Move 1 unit in given direction"""
if direction == Direction.north:
self.y -= 1
elif direction == Direction.west:
self.x -= 1
elif direction == Direction.south:
self.y += 1
elif direction == Direction.east:
self.x += 1
class LineSegment:
"""A line segment represented by a head and tail point"""
def __init__(self, head, tail):
self.head = head
self.tail = tail
def __str__(self):
return str(self.__dict__)
def __eq__(self, other):
return (isinstance(other, self.__class__) and
self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self.__eq__(other)
def length(self):
return self.head.distance_from(self.tail)
def is_vertical(self):
return self.head.has_same_x(self.tail)
def is_horizontal(self):
return self.head.has_same_y(self.tail)
def increment(self):
"""Increase the line segment length by 1 from the head"""
if self.is_horizontal():
if self.head.x < self.tail.x:
self.head.move(Direction.west)
else:
self.head.move(Direction.east)
elif self.is_vertical():
if self.head.y < self.tail.y:
self.head.move(Direction.north)
else:
self.head.move(Direction.south)
def decrement(self):
"""Decrease the line segment length by 1 from the tail"""
if self.is_horizontal():
if self.head.x < self.tail.x:
self.tail.move(Direction.west)
else:
self.tail.move(Direction.east)
elif self.is_vertical():
if self.head.y < self.tail.y:
self.tail.move(Direction.north)
else:
self.tail.move(Direction.south)
def draw(self, window):
"""Draw the line if it is horizontal or vertical"""
length = math.floor(self.length())
start_point = Point(min(self.head.y, self.tail.y),
min(self.head.x, self.tail.x))
if self.is_vertical():
window.vline(start_point.y, start_point.x, 0, length)
elif self.is_horizontal():
window.hline(start_point.y, start_point.x, 0, length)
def lies_on(self, point):
if self.is_horizontal():
return point.x <= max(self.head.x, self.tail.x) and point.x >= min(self.head.x, self.tail.x) and point.y == self.head.y
if self.is_vertical():
return point.y <= max(self.head.y, self.tail.y) and point.y >= min(self.head.y, self.tail.y) and point.x == self.head.x
def intersection_point(self, other):
if isinstance(other, self.__class__):
if self.head == other.head or self.head == other.tail:
return self.head
elif self.tail == other.head or self.tail == other.tail:
return self.tail
else:
return None
def join(self, other, window):
def join_char(hline, vline):
ch = None
if (ipoint.is_to_the_left_of(hline.head) and
ipoint.is_to_the_left_of(hline.tail)):
if (ipoint.is_above(vline.head) and
ipoint.is_above(vline.tail)):
ch = curses.ACS_ULCORNER
elif (ipoint.is_below(vline.head) and
ipoint.is_below(vline.tail)):
ch = curses.ACS_LLCORNER
elif (ipoint.is_to_the_right_of(hline.head) and
ipoint.is_to_the_right_of(hline.tail)):
if (ipoint.is_above(vline.head) and
ipoint.is_above(vline.tail)):
ch = curses.ACS_URCORNER
elif (ipoint.is_below(vline.head) and
ipoint.is_below(vline.tail)):
ch = curses.ACS_LRCORNER
return ch
if isinstance(other, self.__class__):
hline = None
vline = None
if self.is_vertical():
vline = self
elif self.is_horizontal():
hline = self
if other.is_vertical():
vline = other
elif other.is_horizontal():
hline = other
if hline is not None and vline is not None and hline != vline:
ipoint = hline.intersection_point(vline)
if ipoint is not None:
ch = join_char(hline, vline)
if ch is not None:
window.addch(ipoint.y, ipoint.x, ch)
class Snake:
def __init__(self, dimensions):
maxy, maxx = dimensions
self.points = [Point(math.floor(0.49 * maxy), math.floor(0.59 * maxx)),
Point(math.floor(0.49 * maxy), math.floor(0.40 * maxx))]
self.direction = Direction.east
def __str__(self):
return str(self.__dict__)
def __eq__(self, other):
return (isinstance(other, self.__class__) and
self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self.__eq__(other)
def draw(self, window):
prev_seg = None
for prev_point, curr_point in zip(self.points[:-1], self.points[1:]):
curr_seg = LineSegment(prev_point, curr_point)
curr_seg.draw(window)
curr_seg.join(prev_seg, window)
prev_seg = curr_seg
def detect_collision(self):
head = self.points[0]
prev_seg = None
for prev_point, curr_point in zip(self.points[1:-1], self.points[2:]):
curr_seg = LineSegment(prev_point, curr_point)
if curr_seg.lies_on(head):
curses.endwin()
print("Collision Detected!")
sys.exit(0)
def move(self, window, direction=None):
"""Move 1 unit in given direction"""
first_seg = LineSegment(self.points[0], self.points[1])
last_seg = LineSegment(self.points[-2], self.points[-1])
if (direction is None or
direction == self.direction or
direction.is_opp(self.direction)):
first_seg.increment()
else:
new_head = copy.deepcopy(first_seg.head)
new_head.move(direction)
self.points.insert(0, new_head)
self.direction = direction
self.detect_collision()
last_seg.decrement()
if last_seg.length() == 0:
del self.points[-1]
def signal_handler(signal, frame):
curses.endwin()
print("Thanks for playing pambu!")
sys.exit(0)
def main():
signal.signal(signal.SIGINT, signal_handler)
locale.setlocale(locale.LC_ALL, "") # Use system's default encoding
stdscr = curses.initscr() # Initialize
curses.cbreak() # Enter cbreak mode
curses.noecho() # Don't echo any characters
curses.curs_set(0) # Make cursor invisible
stdscr.nodelay(1) # Make getch() non-blocking
stdscr.keypad(1) # Interpret escape sequences
snk = Snake(stdscr.getmaxyx()) # Initialize our Snake!!
ch = None
while ch != curses.ascii.ESC:
stdscr.clear()
direction = None
if ch == curses.KEY_UP:
direction = Direction.north
elif ch == curses.KEY_DOWN:
direction = Direction.south
elif ch == curses.KEY_LEFT:
direction = Direction.west
elif ch == curses.KEY_RIGHT:
direction = Direction.east
snk.move(stdscr, direction)
snk.draw(stdscr)
stdscr.refresh()
curses.napms(200)
ch = stdscr.getch()
curses.flushinp()
curses.endwin()
if __name__ == "__main__":
main()
| isc |
theflofly/tensorflow | tensorflow/tools/docs/doc_generator_visitor.py | 9 | 10118 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A `traverse` visitor for processing documentation."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from tensorflow.python.util import tf_export
from tensorflow.python.util import tf_inspect
class DocGeneratorVisitor(object):
"""A visitor that generates docs for a python object when __call__ed."""
def __init__(self, root_name=''):
"""Make a visitor.
As this visitor is starting its traversal at a module or class, it will not
be told the name of that object during traversal. `root_name` is the name it
should use for that object, effectively prefixing all names with
"root_name.".
Args:
root_name: The name of the root module/class.
"""
self.set_root_name(root_name)
self._index = {}
self._tree = {}
self._reverse_index = None
self._duplicates = None
self._duplicate_of = None
def set_root_name(self, root_name):
"""Sets the root name for subsequent __call__s."""
self._root_name = root_name or ''
self._prefix = (root_name + '.') if root_name else ''
@property
def index(self):
"""A map from fully qualified names to objects to be documented.
The index is filled when the visitor is passed to `traverse`.
Returns:
The index filled by traversal.
"""
return self._index
@property
def tree(self):
"""A map from fully qualified names to all its child names for traversal.
The full name to member names map is filled when the visitor is passed to
`traverse`.
Returns:
The full name to member name map filled by traversal.
"""
return self._tree
@property
def reverse_index(self):
"""A map from `id(object)` to the preferred fully qualified name.
This map only contains non-primitive objects (no numbers or strings) present
in `index` (for primitive objects, `id()` doesn't quite do the right thing).
It is computed when it, `duplicate_of`, or `duplicates` are first accessed.
Returns:
The `id(object)` to full name map.
"""
self._maybe_find_duplicates()
return self._reverse_index
@property
def duplicate_of(self):
"""A map from duplicate full names to a preferred fully qualified name.
This map only contains names that are not themself a preferred name.
It is computed when it, `reverse_index`, or `duplicates` are first accessed.
Returns:
The map from duplicate name to preferred name.
"""
self._maybe_find_duplicates()
return self._duplicate_of
@property
def duplicates(self):
"""A map from preferred full names to a list of all names for this symbol.
This function returns a map from preferred (master) name for a symbol to a
lexicographically sorted list of all aliases for that name (incl. the master
name). Symbols without duplicate names do not appear in this map.
It is computed when it, `reverse_index`, or `duplicate_of` are first
accessed.
Returns:
The map from master name to list of all duplicate names.
"""
self._maybe_find_duplicates()
return self._duplicates
def _add_prefix(self, name):
"""Adds the root name to a name."""
return self._prefix + name if name else self._root_name
def __call__(self, parent_name, parent, children):
"""Visitor interface, see `tensorflow/tools/common:traverse` for details.
This method is called for each symbol found in a traversal using
`tensorflow/tools/common:traverse`. It should not be called directly in
user code.
Args:
parent_name: The fully qualified name of a symbol found during traversal.
parent: The Python object referenced by `parent_name`.
children: A list of `(name, py_object)` pairs enumerating, in alphabetical
order, the children (as determined by `tf_inspect.getmembers`) of
`parent`. `name` is the local name of `py_object` in `parent`.
Raises:
RuntimeError: If this visitor is called with a `parent` that is not a
class or module.
"""
parent_name = self._add_prefix(parent_name)
self._index[parent_name] = parent
self._tree[parent_name] = []
if not (tf_inspect.ismodule(parent) or tf_inspect.isclass(parent)):
raise RuntimeError('Unexpected type in visitor -- %s: %r' % (parent_name,
parent))
for i, (name, child) in enumerate(list(children)):
# Don't document __metaclass__
if name in ['__metaclass__']:
del children[i]
continue
full_name = '.'.join([parent_name, name]) if parent_name else name
self._index[full_name] = child
self._tree[parent_name].append(name)
def _score_name(self, name):
"""Return a tuple of scores indicating how to sort for the best name.
This function is meant to be used as the `key` to the `sorted` function.
This sorting in order:
Prefers names refering to the defining class, over a subclass.
Prefers names that are not in "contrib".
prefers submodules to the root namespace.
Prefers short names `tf.thing` over `tf.a.b.c.thing`
Sorts lexicographically on name parts.
Args:
name: the full name to score, for example `tf.estimator.Estimator`
Returns:
A tuple of scores. When sorted the preferred name will have the lowest
value.
"""
parts = name.split('.')
short_name = parts[-1]
container = self._index['.'.join(parts[:-1])]
defining_class_score = 1
if tf_inspect.isclass(container):
if short_name in container.__dict__:
# prefer the defining class
defining_class_score = -1
contrib_score = -1
if 'contrib' in parts:
contrib_score = 1
while parts:
container = self._index['.'.join(parts)]
if tf_inspect.ismodule(container):
break
parts.pop()
module_length = len(parts)
if len(parts) == 2:
# `tf.submodule.thing` is better than `tf.thing`
module_length_score = -1
else:
# shorter is better
module_length_score = module_length
return (defining_class_score, contrib_score, module_length_score, name)
def _maybe_find_duplicates(self):
"""Compute data structures containing information about duplicates.
Find duplicates in `index` and decide on one to be the "master" name.
Computes a reverse_index mapping each object id to its master name.
Also computes a map `duplicate_of` from aliases to their master name (the
master name itself has no entry in this map), and a map `duplicates` from
master names to a lexicographically sorted list of all aliases for that name
(incl. the master name).
All these are computed and set as fields if they haven't already.
"""
if self._reverse_index is not None:
return
# Maps the id of a symbol to its fully qualified name. For symbols that have
# several aliases, this map contains the first one found.
# We use id(py_object) to get a hashable value for py_object. Note all
# objects in _index are in memory at the same time so this is safe.
reverse_index = {}
# Make a preliminary duplicates map. For all sets of duplicate names, it
# maps the first name found to a list of all duplicate names.
raw_duplicates = {}
for full_name, py_object in six.iteritems(self._index):
# We cannot use the duplicate mechanism for some constants, since e.g.,
# id(c1) == id(c2) with c1=1, c2=1. This is unproblematic since constants
# have no usable docstring and won't be documented automatically.
if (py_object is not None and
not isinstance(py_object, six.integer_types + six.string_types +
(six.binary_type, six.text_type, float, complex, bool))
and py_object is not ()): # pylint: disable=literal-comparison
object_id = id(py_object)
if object_id in reverse_index:
master_name = reverse_index[object_id]
if master_name in raw_duplicates:
raw_duplicates[master_name].append(full_name)
else:
raw_duplicates[master_name] = [master_name, full_name]
else:
reverse_index[object_id] = full_name
# Decide on master names, rewire duplicates and make a duplicate_of map
# mapping all non-master duplicates to the master name. The master symbol
# does not have an entry in this map.
duplicate_of = {}
# Duplicates maps the main symbols to the set of all duplicates of that
# symbol (incl. itself).
duplicates = {}
for names in raw_duplicates.values():
names = sorted(names)
master_name = (
tf_export.get_canonical_name_for_symbol(self._index[names[0]])
if names else None)
if master_name:
master_name = 'tf.%s' % master_name
else:
# Choose the master name with a lexical sort on the tuples returned by
# by _score_name.
master_name = min(names, key=self._score_name)
duplicates[master_name] = names
for name in names:
if name != master_name:
duplicate_of[name] = master_name
# Set the reverse index to the canonical name.
reverse_index[id(self._index[master_name])] = master_name
self._duplicate_of = duplicate_of
self._duplicates = duplicates
self._reverse_index = reverse_index
| apache-2.0 |
wartman4404/servo | tests/wpt/web-platform-tests/tools/pywebsocket/src/mod_pywebsocket/standalone.py | 410 | 46785 | #!/usr/bin/env python
#
# Copyright 2012, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Standalone WebSocket server.
Use this file to launch pywebsocket without Apache HTTP Server.
BASIC USAGE
===========
Go to the src directory and run
$ python mod_pywebsocket/standalone.py [-p <ws_port>]
[-w <websock_handlers>]
[-d <document_root>]
<ws_port> is the port number to use for ws:// connection.
<document_root> is the path to the root directory of HTML files.
<websock_handlers> is the path to the root directory of WebSocket handlers.
If not specified, <document_root> will be used. See __init__.py (or
run $ pydoc mod_pywebsocket) for how to write WebSocket handlers.
For more detail and other options, run
$ python mod_pywebsocket/standalone.py --help
or see _build_option_parser method below.
For trouble shooting, adding "--log_level debug" might help you.
TRY DEMO
========
Go to the src directory and run standalone.py with -d option to set the
document root to the directory containing example HTMLs and handlers like this:
$ cd src
$ PYTHONPATH=. python mod_pywebsocket/standalone.py -d example
to launch pywebsocket with the sample handler and html on port 80. Open
http://localhost/console.html, click the connect button, type something into
the text box next to the send button and click the send button. If everything
is working, you'll see the message you typed echoed by the server.
USING TLS
=========
To run the standalone server with TLS support, run it with -t, -k, and -c
options. When TLS is enabled, the standalone server accepts only TLS connection.
Note that when ssl module is used and the key/cert location is incorrect,
TLS connection silently fails while pyOpenSSL fails on startup.
Example:
$ PYTHONPATH=. python mod_pywebsocket/standalone.py \
-d example \
-p 10443 \
-t \
-c ../test/cert/cert.pem \
-k ../test/cert/key.pem \
Note that when passing a relative path to -c and -k option, it will be resolved
using the document root directory as the base.
USING CLIENT AUTHENTICATION
===========================
To run the standalone server with TLS client authentication support, run it with
--tls-client-auth and --tls-client-ca options in addition to ones required for
TLS support.
Example:
$ PYTHONPATH=. python mod_pywebsocket/standalone.py -d example -p 10443 -t \
-c ../test/cert/cert.pem -k ../test/cert/key.pem \
--tls-client-auth \
--tls-client-ca=../test/cert/cacert.pem
Note that when passing a relative path to --tls-client-ca option, it will be
resolved using the document root directory as the base.
CONFIGURATION FILE
==================
You can also write a configuration file and use it by specifying the path to
the configuration file by --config option. Please write a configuration file
following the documentation of the Python ConfigParser library. Name of each
entry must be the long version argument name. E.g. to set log level to debug,
add the following line:
log_level=debug
For options which doesn't take value, please add some fake value. E.g. for
--tls option, add the following line:
tls=True
Note that tls will be enabled even if you write tls=False as the value part is
fake.
When both a command line argument and a configuration file entry are set for
the same configuration item, the command line value will override one in the
configuration file.
THREADING
=========
This server is derived from SocketServer.ThreadingMixIn. Hence a thread is
used for each request.
SECURITY WARNING
================
This uses CGIHTTPServer and CGIHTTPServer is not secure.
It may execute arbitrary Python code or external programs. It should not be
used outside a firewall.
"""
import BaseHTTPServer
import CGIHTTPServer
import SimpleHTTPServer
import SocketServer
import ConfigParser
import base64
import httplib
import logging
import logging.handlers
import optparse
import os
import re
import select
import socket
import sys
import threading
import time
from mod_pywebsocket import common
from mod_pywebsocket import dispatch
from mod_pywebsocket import handshake
from mod_pywebsocket import http_header_util
from mod_pywebsocket import memorizingfile
from mod_pywebsocket import util
from mod_pywebsocket.xhr_benchmark_handler import XHRBenchmarkHandler
_DEFAULT_LOG_MAX_BYTES = 1024 * 256
_DEFAULT_LOG_BACKUP_COUNT = 5
_DEFAULT_REQUEST_QUEUE_SIZE = 128
# 1024 is practically large enough to contain WebSocket handshake lines.
_MAX_MEMORIZED_LINES = 1024
# Constants for the --tls_module flag.
_TLS_BY_STANDARD_MODULE = 'ssl'
_TLS_BY_PYOPENSSL = 'pyopenssl'
class _StandaloneConnection(object):
"""Mimic mod_python mp_conn."""
def __init__(self, request_handler):
"""Construct an instance.
Args:
request_handler: A WebSocketRequestHandler instance.
"""
self._request_handler = request_handler
def get_local_addr(self):
"""Getter to mimic mp_conn.local_addr."""
return (self._request_handler.server.server_name,
self._request_handler.server.server_port)
local_addr = property(get_local_addr)
def get_remote_addr(self):
"""Getter to mimic mp_conn.remote_addr.
Setting the property in __init__ won't work because the request
handler is not initialized yet there."""
return self._request_handler.client_address
remote_addr = property(get_remote_addr)
def write(self, data):
"""Mimic mp_conn.write()."""
return self._request_handler.wfile.write(data)
def read(self, length):
"""Mimic mp_conn.read()."""
return self._request_handler.rfile.read(length)
def get_memorized_lines(self):
"""Get memorized lines."""
return self._request_handler.rfile.get_memorized_lines()
class _StandaloneRequest(object):
"""Mimic mod_python request."""
def __init__(self, request_handler, use_tls):
"""Construct an instance.
Args:
request_handler: A WebSocketRequestHandler instance.
"""
self._logger = util.get_class_logger(self)
self._request_handler = request_handler
self.connection = _StandaloneConnection(request_handler)
self._use_tls = use_tls
self.headers_in = request_handler.headers
def get_uri(self):
"""Getter to mimic request.uri.
This method returns the raw data at the Request-URI part of the
Request-Line, while the uri method on the request object of mod_python
returns the path portion after parsing the raw data. This behavior is
kept for compatibility.
"""
return self._request_handler.path
uri = property(get_uri)
def get_unparsed_uri(self):
"""Getter to mimic request.unparsed_uri."""
return self._request_handler.path
unparsed_uri = property(get_unparsed_uri)
def get_method(self):
"""Getter to mimic request.method."""
return self._request_handler.command
method = property(get_method)
def get_protocol(self):
"""Getter to mimic request.protocol."""
return self._request_handler.request_version
protocol = property(get_protocol)
def is_https(self):
"""Mimic request.is_https()."""
return self._use_tls
def _import_ssl():
global ssl
try:
import ssl
return True
except ImportError:
return False
def _import_pyopenssl():
global OpenSSL
try:
import OpenSSL.SSL
return True
except ImportError:
return False
class _StandaloneSSLConnection(object):
"""A wrapper class for OpenSSL.SSL.Connection to
- provide makefile method which is not supported by the class
- tweak shutdown method since OpenSSL.SSL.Connection.shutdown doesn't
accept the "how" argument.
- convert SysCallError exceptions that its recv method may raise into a
return value of '', meaning EOF. We cannot overwrite the recv method on
self._connection since it's immutable.
"""
_OVERRIDDEN_ATTRIBUTES = ['_connection', 'makefile', 'shutdown', 'recv']
def __init__(self, connection):
self._connection = connection
def __getattribute__(self, name):
if name in _StandaloneSSLConnection._OVERRIDDEN_ATTRIBUTES:
return object.__getattribute__(self, name)
return self._connection.__getattribute__(name)
def __setattr__(self, name, value):
if name in _StandaloneSSLConnection._OVERRIDDEN_ATTRIBUTES:
return object.__setattr__(self, name, value)
return self._connection.__setattr__(name, value)
def makefile(self, mode='r', bufsize=-1):
return socket._fileobject(self, mode, bufsize)
def shutdown(self, unused_how):
self._connection.shutdown()
def recv(self, bufsize, flags=0):
if flags != 0:
raise ValueError('Non-zero flags not allowed')
try:
return self._connection.recv(bufsize)
except OpenSSL.SSL.SysCallError, (err, message):
if err == -1:
# Suppress "unexpected EOF" exception. See the OpenSSL document
# for SSL_get_error.
return ''
raise
def _alias_handlers(dispatcher, websock_handlers_map_file):
"""Set aliases specified in websock_handler_map_file in dispatcher.
Args:
dispatcher: dispatch.Dispatcher instance
websock_handler_map_file: alias map file
"""
fp = open(websock_handlers_map_file)
try:
for line in fp:
if line[0] == '#' or line.isspace():
continue
m = re.match('(\S+)\s+(\S+)', line)
if not m:
logging.warning('Wrong format in map file:' + line)
continue
try:
dispatcher.add_resource_path_alias(
m.group(1), m.group(2))
except dispatch.DispatchException, e:
logging.error(str(e))
finally:
fp.close()
class WebSocketServer(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer):
"""HTTPServer specialized for WebSocket."""
# Overrides SocketServer.ThreadingMixIn.daemon_threads
daemon_threads = True
# Overrides BaseHTTPServer.HTTPServer.allow_reuse_address
allow_reuse_address = True
def __init__(self, options):
"""Override SocketServer.TCPServer.__init__ to set SSL enabled
socket object to self.socket before server_bind and server_activate,
if necessary.
"""
# Share a Dispatcher among request handlers to save time for
# instantiation. Dispatcher can be shared because it is thread-safe.
options.dispatcher = dispatch.Dispatcher(
options.websock_handlers,
options.scan_dir,
options.allow_handlers_outside_root_dir)
if options.websock_handlers_map_file:
_alias_handlers(options.dispatcher,
options.websock_handlers_map_file)
warnings = options.dispatcher.source_warnings()
if warnings:
for warning in warnings:
logging.warning('Warning in source loading: %s' % warning)
self._logger = util.get_class_logger(self)
self.request_queue_size = options.request_queue_size
self.__ws_is_shut_down = threading.Event()
self.__ws_serving = False
SocketServer.BaseServer.__init__(
self, (options.server_host, options.port), WebSocketRequestHandler)
# Expose the options object to allow handler objects access it. We name
# it with websocket_ prefix to avoid conflict.
self.websocket_server_options = options
self._create_sockets()
self.server_bind()
self.server_activate()
def _create_sockets(self):
self.server_name, self.server_port = self.server_address
self._sockets = []
if not self.server_name:
# On platforms that doesn't support IPv6, the first bind fails.
# On platforms that supports IPv6
# - If it binds both IPv4 and IPv6 on call with AF_INET6, the
# first bind succeeds and the second fails (we'll see 'Address
# already in use' error).
# - If it binds only IPv6 on call with AF_INET6, both call are
# expected to succeed to listen both protocol.
addrinfo_array = [
(socket.AF_INET6, socket.SOCK_STREAM, '', '', ''),
(socket.AF_INET, socket.SOCK_STREAM, '', '', '')]
else:
addrinfo_array = socket.getaddrinfo(self.server_name,
self.server_port,
socket.AF_UNSPEC,
socket.SOCK_STREAM,
socket.IPPROTO_TCP)
for addrinfo in addrinfo_array:
self._logger.info('Create socket on: %r', addrinfo)
family, socktype, proto, canonname, sockaddr = addrinfo
try:
socket_ = socket.socket(family, socktype)
except Exception, e:
self._logger.info('Skip by failure: %r', e)
continue
server_options = self.websocket_server_options
if server_options.use_tls:
# For the case of _HAS_OPEN_SSL, we do wrapper setup after
# accept.
if server_options.tls_module == _TLS_BY_STANDARD_MODULE:
if server_options.tls_client_auth:
if server_options.tls_client_cert_optional:
client_cert_ = ssl.CERT_OPTIONAL
else:
client_cert_ = ssl.CERT_REQUIRED
else:
client_cert_ = ssl.CERT_NONE
socket_ = ssl.wrap_socket(socket_,
keyfile=server_options.private_key,
certfile=server_options.certificate,
ssl_version=ssl.PROTOCOL_SSLv23,
ca_certs=server_options.tls_client_ca,
cert_reqs=client_cert_,
do_handshake_on_connect=False)
self._sockets.append((socket_, addrinfo))
def server_bind(self):
"""Override SocketServer.TCPServer.server_bind to enable multiple
sockets bind.
"""
failed_sockets = []
for socketinfo in self._sockets:
socket_, addrinfo = socketinfo
self._logger.info('Bind on: %r', addrinfo)
if self.allow_reuse_address:
socket_.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
try:
socket_.bind(self.server_address)
except Exception, e:
self._logger.info('Skip by failure: %r', e)
socket_.close()
failed_sockets.append(socketinfo)
if self.server_address[1] == 0:
# The operating system assigns the actual port number for port
# number 0. This case, the second and later sockets should use
# the same port number. Also self.server_port is rewritten
# because it is exported, and will be used by external code.
self.server_address = (
self.server_name, socket_.getsockname()[1])
self.server_port = self.server_address[1]
self._logger.info('Port %r is assigned', self.server_port)
for socketinfo in failed_sockets:
self._sockets.remove(socketinfo)
def server_activate(self):
"""Override SocketServer.TCPServer.server_activate to enable multiple
sockets listen.
"""
failed_sockets = []
for socketinfo in self._sockets:
socket_, addrinfo = socketinfo
self._logger.info('Listen on: %r', addrinfo)
try:
socket_.listen(self.request_queue_size)
except Exception, e:
self._logger.info('Skip by failure: %r', e)
socket_.close()
failed_sockets.append(socketinfo)
for socketinfo in failed_sockets:
self._sockets.remove(socketinfo)
if len(self._sockets) == 0:
self._logger.critical(
'No sockets activated. Use info log level to see the reason.')
def server_close(self):
"""Override SocketServer.TCPServer.server_close to enable multiple
sockets close.
"""
for socketinfo in self._sockets:
socket_, addrinfo = socketinfo
self._logger.info('Close on: %r', addrinfo)
socket_.close()
def fileno(self):
"""Override SocketServer.TCPServer.fileno."""
self._logger.critical('Not supported: fileno')
return self._sockets[0][0].fileno()
def handle_error(self, request, client_address):
"""Override SocketServer.handle_error."""
self._logger.error(
'Exception in processing request from: %r\n%s',
client_address,
util.get_stack_trace())
# Note: client_address is a tuple.
def get_request(self):
"""Override TCPServer.get_request to wrap OpenSSL.SSL.Connection
object with _StandaloneSSLConnection to provide makefile method. We
cannot substitute OpenSSL.SSL.Connection.makefile since it's readonly
attribute.
"""
accepted_socket, client_address = self.socket.accept()
server_options = self.websocket_server_options
if server_options.use_tls:
if server_options.tls_module == _TLS_BY_STANDARD_MODULE:
try:
accepted_socket.do_handshake()
except ssl.SSLError, e:
self._logger.debug('%r', e)
raise
# Print cipher in use. Handshake is done on accept.
self._logger.debug('Cipher: %s', accepted_socket.cipher())
self._logger.debug('Client cert: %r',
accepted_socket.getpeercert())
elif server_options.tls_module == _TLS_BY_PYOPENSSL:
# We cannot print the cipher in use. pyOpenSSL doesn't provide
# any method to fetch that.
ctx = OpenSSL.SSL.Context(OpenSSL.SSL.SSLv23_METHOD)
ctx.use_privatekey_file(server_options.private_key)
ctx.use_certificate_file(server_options.certificate)
def default_callback(conn, cert, errnum, errdepth, ok):
return ok == 1
# See the OpenSSL document for SSL_CTX_set_verify.
if server_options.tls_client_auth:
verify_mode = OpenSSL.SSL.VERIFY_PEER
if not server_options.tls_client_cert_optional:
verify_mode |= OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT
ctx.set_verify(verify_mode, default_callback)
ctx.load_verify_locations(server_options.tls_client_ca,
None)
else:
ctx.set_verify(OpenSSL.SSL.VERIFY_NONE, default_callback)
accepted_socket = OpenSSL.SSL.Connection(ctx, accepted_socket)
accepted_socket.set_accept_state()
# Convert SSL related error into socket.error so that
# SocketServer ignores them and keeps running.
#
# TODO(tyoshino): Convert all kinds of errors.
try:
accepted_socket.do_handshake()
except OpenSSL.SSL.Error, e:
# Set errno part to 1 (SSL_ERROR_SSL) like the ssl module
# does.
self._logger.debug('%r', e)
raise socket.error(1, '%r' % e)
cert = accepted_socket.get_peer_certificate()
if cert is not None:
self._logger.debug('Client cert subject: %r',
cert.get_subject().get_components())
accepted_socket = _StandaloneSSLConnection(accepted_socket)
else:
raise ValueError('No TLS support module is available')
return accepted_socket, client_address
def serve_forever(self, poll_interval=0.5):
"""Override SocketServer.BaseServer.serve_forever."""
self.__ws_serving = True
self.__ws_is_shut_down.clear()
handle_request = self.handle_request
if hasattr(self, '_handle_request_noblock'):
handle_request = self._handle_request_noblock
else:
self._logger.warning('Fallback to blocking request handler')
try:
while self.__ws_serving:
r, w, e = select.select(
[socket_[0] for socket_ in self._sockets],
[], [], poll_interval)
for socket_ in r:
self.socket = socket_
handle_request()
self.socket = None
finally:
self.__ws_is_shut_down.set()
def shutdown(self):
"""Override SocketServer.BaseServer.shutdown."""
self.__ws_serving = False
self.__ws_is_shut_down.wait()
class WebSocketRequestHandler(CGIHTTPServer.CGIHTTPRequestHandler):
"""CGIHTTPRequestHandler specialized for WebSocket."""
# Use httplib.HTTPMessage instead of mimetools.Message.
MessageClass = httplib.HTTPMessage
protocol_version = "HTTP/1.1"
def setup(self):
"""Override SocketServer.StreamRequestHandler.setup to wrap rfile
with MemorizingFile.
This method will be called by BaseRequestHandler's constructor
before calling BaseHTTPRequestHandler.handle.
BaseHTTPRequestHandler.handle will call
BaseHTTPRequestHandler.handle_one_request and it will call
WebSocketRequestHandler.parse_request.
"""
# Call superclass's setup to prepare rfile, wfile, etc. See setup
# definition on the root class SocketServer.StreamRequestHandler to
# understand what this does.
CGIHTTPServer.CGIHTTPRequestHandler.setup(self)
self.rfile = memorizingfile.MemorizingFile(
self.rfile,
max_memorized_lines=_MAX_MEMORIZED_LINES)
def __init__(self, request, client_address, server):
self._logger = util.get_class_logger(self)
self._options = server.websocket_server_options
# Overrides CGIHTTPServerRequestHandler.cgi_directories.
self.cgi_directories = self._options.cgi_directories
# Replace CGIHTTPRequestHandler.is_executable method.
if self._options.is_executable_method is not None:
self.is_executable = self._options.is_executable_method
# This actually calls BaseRequestHandler.__init__.
CGIHTTPServer.CGIHTTPRequestHandler.__init__(
self, request, client_address, server)
def parse_request(self):
"""Override BaseHTTPServer.BaseHTTPRequestHandler.parse_request.
Return True to continue processing for HTTP(S), False otherwise.
See BaseHTTPRequestHandler.handle_one_request method which calls
this method to understand how the return value will be handled.
"""
# We hook parse_request method, but also call the original
# CGIHTTPRequestHandler.parse_request since when we return False,
# CGIHTTPRequestHandler.handle_one_request continues processing and
# it needs variables set by CGIHTTPRequestHandler.parse_request.
#
# Variables set by this method will be also used by WebSocket request
# handling (self.path, self.command, self.requestline, etc. See also
# how _StandaloneRequest's members are implemented using these
# attributes).
if not CGIHTTPServer.CGIHTTPRequestHandler.parse_request(self):
return False
if self.command == "CONNECT":
self.send_response(200, "Connected")
self.send_header("Connection", "keep-alive")
self.end_headers()
return False
if self._options.use_basic_auth:
auth = self.headers.getheader('Authorization')
if auth != self._options.basic_auth_credential:
self.send_response(401)
self.send_header('WWW-Authenticate',
'Basic realm="Pywebsocket"')
self.end_headers()
self._logger.info('Request basic authentication')
return False
host, port, resource = http_header_util.parse_uri(self.path)
# Special paths for XMLHttpRequest benchmark
xhr_benchmark_helper_prefix = '/073be001e10950692ccbf3a2ad21c245'
if resource == (xhr_benchmark_helper_prefix + '_send'):
xhr_benchmark_handler = XHRBenchmarkHandler(
self.headers, self.rfile, self.wfile)
xhr_benchmark_handler.do_send()
return False
if resource == (xhr_benchmark_helper_prefix + '_receive'):
xhr_benchmark_handler = XHRBenchmarkHandler(
self.headers, self.rfile, self.wfile)
xhr_benchmark_handler.do_receive()
return False
if resource is None:
self._logger.info('Invalid URI: %r', self.path)
self._logger.info('Fallback to CGIHTTPRequestHandler')
return True
server_options = self.server.websocket_server_options
if host is not None:
validation_host = server_options.validation_host
if validation_host is not None and host != validation_host:
self._logger.info('Invalid host: %r (expected: %r)',
host,
validation_host)
self._logger.info('Fallback to CGIHTTPRequestHandler')
return True
if port is not None:
validation_port = server_options.validation_port
if validation_port is not None and port != validation_port:
self._logger.info('Invalid port: %r (expected: %r)',
port,
validation_port)
self._logger.info('Fallback to CGIHTTPRequestHandler')
return True
self.path = resource
request = _StandaloneRequest(self, self._options.use_tls)
try:
# Fallback to default http handler for request paths for which
# we don't have request handlers.
if not self._options.dispatcher.get_handler_suite(self.path):
self._logger.info('No handler for resource: %r',
self.path)
self._logger.info('Fallback to CGIHTTPRequestHandler')
return True
except dispatch.DispatchException, e:
self._logger.info('Dispatch failed for error: %s', e)
self.send_error(e.status)
return False
# If any Exceptions without except clause setup (including
# DispatchException) is raised below this point, it will be caught
# and logged by WebSocketServer.
try:
try:
handshake.do_handshake(
request,
self._options.dispatcher,
allowDraft75=self._options.allow_draft75,
strict=self._options.strict)
except handshake.VersionException, e:
self._logger.info('Handshake failed for version error: %s', e)
self.send_response(common.HTTP_STATUS_BAD_REQUEST)
self.send_header(common.SEC_WEBSOCKET_VERSION_HEADER,
e.supported_versions)
self.end_headers()
return False
except handshake.HandshakeException, e:
# Handshake for ws(s) failed.
self._logger.info('Handshake failed for error: %s', e)
self.send_error(e.status)
return False
request._dispatcher = self._options.dispatcher
self._options.dispatcher.transfer_data(request)
except handshake.AbortedByUserException, e:
self._logger.info('Aborted: %s', e)
return False
def log_request(self, code='-', size='-'):
"""Override BaseHTTPServer.log_request."""
self._logger.info('"%s" %s %s',
self.requestline, str(code), str(size))
def log_error(self, *args):
"""Override BaseHTTPServer.log_error."""
# Despite the name, this method is for warnings than for errors.
# For example, HTTP status code is logged by this method.
self._logger.warning('%s - %s',
self.address_string(),
args[0] % args[1:])
def is_cgi(self):
"""Test whether self.path corresponds to a CGI script.
Add extra check that self.path doesn't contains ..
Also check if the file is a executable file or not.
If the file is not executable, it is handled as static file or dir
rather than a CGI script.
"""
if CGIHTTPServer.CGIHTTPRequestHandler.is_cgi(self):
if '..' in self.path:
return False
# strip query parameter from request path
resource_name = self.path.split('?', 2)[0]
# convert resource_name into real path name in filesystem.
scriptfile = self.translate_path(resource_name)
if not os.path.isfile(scriptfile):
return False
if not self.is_executable(scriptfile):
return False
return True
return False
def _get_logger_from_class(c):
return logging.getLogger('%s.%s' % (c.__module__, c.__name__))
def _configure_logging(options):
logging.addLevelName(common.LOGLEVEL_FINE, 'FINE')
logger = logging.getLogger()
logger.setLevel(logging.getLevelName(options.log_level.upper()))
if options.log_file:
handler = logging.handlers.RotatingFileHandler(
options.log_file, 'a', options.log_max, options.log_count)
else:
handler = logging.StreamHandler()
formatter = logging.Formatter(
'[%(asctime)s] [%(levelname)s] %(name)s: %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
deflate_log_level_name = logging.getLevelName(
options.deflate_log_level.upper())
_get_logger_from_class(util._Deflater).setLevel(
deflate_log_level_name)
_get_logger_from_class(util._Inflater).setLevel(
deflate_log_level_name)
def _build_option_parser():
parser = optparse.OptionParser()
parser.add_option('--config', dest='config_file', type='string',
default=None,
help=('Path to configuration file. See the file comment '
'at the top of this file for the configuration '
'file format'))
parser.add_option('-H', '--server-host', '--server_host',
dest='server_host',
default='',
help='server hostname to listen to')
parser.add_option('-V', '--validation-host', '--validation_host',
dest='validation_host',
default=None,
help='server hostname to validate in absolute path.')
parser.add_option('-p', '--port', dest='port', type='int',
default=common.DEFAULT_WEB_SOCKET_PORT,
help='port to listen to')
parser.add_option('-P', '--validation-port', '--validation_port',
dest='validation_port', type='int',
default=None,
help='server port to validate in absolute path.')
parser.add_option('-w', '--websock-handlers', '--websock_handlers',
dest='websock_handlers',
default='.',
help=('The root directory of WebSocket handler files. '
'If the path is relative, --document-root is used '
'as the base.'))
parser.add_option('-m', '--websock-handlers-map-file',
'--websock_handlers_map_file',
dest='websock_handlers_map_file',
default=None,
help=('WebSocket handlers map file. '
'Each line consists of alias_resource_path and '
'existing_resource_path, separated by spaces.'))
parser.add_option('-s', '--scan-dir', '--scan_dir', dest='scan_dir',
default=None,
help=('Must be a directory under --websock-handlers. '
'Only handlers under this directory are scanned '
'and registered to the server. '
'Useful for saving scan time when the handler '
'root directory contains lots of files that are '
'not handler file or are handler files but you '
'don\'t want them to be registered. '))
parser.add_option('--allow-handlers-outside-root-dir',
'--allow_handlers_outside_root_dir',
dest='allow_handlers_outside_root_dir',
action='store_true',
default=False,
help=('Scans WebSocket handlers even if their canonical '
'path is not under --websock-handlers.'))
parser.add_option('-d', '--document-root', '--document_root',
dest='document_root', default='.',
help='Document root directory.')
parser.add_option('-x', '--cgi-paths', '--cgi_paths', dest='cgi_paths',
default=None,
help=('CGI paths relative to document_root.'
'Comma-separated. (e.g -x /cgi,/htbin) '
'Files under document_root/cgi_path are handled '
'as CGI programs. Must be executable.'))
parser.add_option('-t', '--tls', dest='use_tls', action='store_true',
default=False, help='use TLS (wss://)')
parser.add_option('--tls-module', '--tls_module', dest='tls_module',
type='choice',
choices = [_TLS_BY_STANDARD_MODULE, _TLS_BY_PYOPENSSL],
help='Use ssl module if "%s" is specified. '
'Use pyOpenSSL module if "%s" is specified' %
(_TLS_BY_STANDARD_MODULE, _TLS_BY_PYOPENSSL))
parser.add_option('-k', '--private-key', '--private_key',
dest='private_key',
default='', help='TLS private key file.')
parser.add_option('-c', '--certificate', dest='certificate',
default='', help='TLS certificate file.')
parser.add_option('--tls-client-auth', dest='tls_client_auth',
action='store_true', default=False,
help='Requests TLS client auth on every connection.')
parser.add_option('--tls-client-cert-optional',
dest='tls_client_cert_optional',
action='store_true', default=False,
help=('Makes client certificate optional even though '
'TLS client auth is enabled.'))
parser.add_option('--tls-client-ca', dest='tls_client_ca', default='',
help=('Specifies a pem file which contains a set of '
'concatenated CA certificates which are used to '
'validate certificates passed from clients'))
parser.add_option('--basic-auth', dest='use_basic_auth',
action='store_true', default=False,
help='Requires Basic authentication.')
parser.add_option('--basic-auth-credential',
dest='basic_auth_credential', default='test:test',
help='Specifies the credential of basic authentication '
'by username:password pair (e.g. test:test).')
parser.add_option('-l', '--log-file', '--log_file', dest='log_file',
default='', help='Log file.')
# Custom log level:
# - FINE: Prints status of each frame processing step
parser.add_option('--log-level', '--log_level', type='choice',
dest='log_level', default='warn',
choices=['fine',
'debug', 'info', 'warning', 'warn', 'error',
'critical'],
help='Log level.')
parser.add_option('--deflate-log-level', '--deflate_log_level',
type='choice',
dest='deflate_log_level', default='warn',
choices=['debug', 'info', 'warning', 'warn', 'error',
'critical'],
help='Log level for _Deflater and _Inflater.')
parser.add_option('--thread-monitor-interval-in-sec',
'--thread_monitor_interval_in_sec',
dest='thread_monitor_interval_in_sec',
type='int', default=-1,
help=('If positive integer is specified, run a thread '
'monitor to show the status of server threads '
'periodically in the specified inteval in '
'second. If non-positive integer is specified, '
'disable the thread monitor.'))
parser.add_option('--log-max', '--log_max', dest='log_max', type='int',
default=_DEFAULT_LOG_MAX_BYTES,
help='Log maximum bytes')
parser.add_option('--log-count', '--log_count', dest='log_count',
type='int', default=_DEFAULT_LOG_BACKUP_COUNT,
help='Log backup count')
parser.add_option('--allow-draft75', dest='allow_draft75',
action='store_true', default=False,
help='Obsolete option. Ignored.')
parser.add_option('--strict', dest='strict', action='store_true',
default=False, help='Obsolete option. Ignored.')
parser.add_option('-q', '--queue', dest='request_queue_size', type='int',
default=_DEFAULT_REQUEST_QUEUE_SIZE,
help='request queue size')
return parser
class ThreadMonitor(threading.Thread):
daemon = True
def __init__(self, interval_in_sec):
threading.Thread.__init__(self, name='ThreadMonitor')
self._logger = util.get_class_logger(self)
self._interval_in_sec = interval_in_sec
def run(self):
while True:
thread_name_list = []
for thread in threading.enumerate():
thread_name_list.append(thread.name)
self._logger.info(
"%d active threads: %s",
threading.active_count(),
', '.join(thread_name_list))
time.sleep(self._interval_in_sec)
def _parse_args_and_config(args):
parser = _build_option_parser()
# First, parse options without configuration file.
temporary_options, temporary_args = parser.parse_args(args=args)
if temporary_args:
logging.critical(
'Unrecognized positional arguments: %r', temporary_args)
sys.exit(1)
if temporary_options.config_file:
try:
config_fp = open(temporary_options.config_file, 'r')
except IOError, e:
logging.critical(
'Failed to open configuration file %r: %r',
temporary_options.config_file,
e)
sys.exit(1)
config_parser = ConfigParser.SafeConfigParser()
config_parser.readfp(config_fp)
config_fp.close()
args_from_config = []
for name, value in config_parser.items('pywebsocket'):
args_from_config.append('--' + name)
args_from_config.append(value)
if args is None:
args = args_from_config
else:
args = args_from_config + args
return parser.parse_args(args=args)
else:
return temporary_options, temporary_args
def _main(args=None):
"""You can call this function from your own program, but please note that
this function has some side-effects that might affect your program. For
example, util.wrap_popen3_for_win use in this method replaces implementation
of os.popen3.
"""
options, args = _parse_args_and_config(args=args)
os.chdir(options.document_root)
_configure_logging(options)
if options.allow_draft75:
logging.warning('--allow_draft75 option is obsolete.')
if options.strict:
logging.warning('--strict option is obsolete.')
# TODO(tyoshino): Clean up initialization of CGI related values. Move some
# of code here to WebSocketRequestHandler class if it's better.
options.cgi_directories = []
options.is_executable_method = None
if options.cgi_paths:
options.cgi_directories = options.cgi_paths.split(',')
if sys.platform in ('cygwin', 'win32'):
cygwin_path = None
# For Win32 Python, it is expected that CYGWIN_PATH
# is set to a directory of cygwin binaries.
# For example, websocket_server.py in Chromium sets CYGWIN_PATH to
# full path of third_party/cygwin/bin.
if 'CYGWIN_PATH' in os.environ:
cygwin_path = os.environ['CYGWIN_PATH']
util.wrap_popen3_for_win(cygwin_path)
def __check_script(scriptpath):
return util.get_script_interp(scriptpath, cygwin_path)
options.is_executable_method = __check_script
if options.use_tls:
if options.tls_module is None:
if _import_ssl():
options.tls_module = _TLS_BY_STANDARD_MODULE
logging.debug('Using ssl module')
elif _import_pyopenssl():
options.tls_module = _TLS_BY_PYOPENSSL
logging.debug('Using pyOpenSSL module')
else:
logging.critical(
'TLS support requires ssl or pyOpenSSL module.')
sys.exit(1)
elif options.tls_module == _TLS_BY_STANDARD_MODULE:
if not _import_ssl():
logging.critical('ssl module is not available')
sys.exit(1)
elif options.tls_module == _TLS_BY_PYOPENSSL:
if not _import_pyopenssl():
logging.critical('pyOpenSSL module is not available')
sys.exit(1)
else:
logging.critical('Invalid --tls-module option: %r',
options.tls_module)
sys.exit(1)
if not options.private_key or not options.certificate:
logging.critical(
'To use TLS, specify private_key and certificate.')
sys.exit(1)
if (options.tls_client_cert_optional and
not options.tls_client_auth):
logging.critical('Client authentication must be enabled to '
'specify tls_client_cert_optional')
sys.exit(1)
else:
if options.tls_module is not None:
logging.critical('Use --tls-module option only together with '
'--use-tls option.')
sys.exit(1)
if options.tls_client_auth:
logging.critical('TLS must be enabled for client authentication.')
sys.exit(1)
if options.tls_client_cert_optional:
logging.critical('TLS must be enabled for client authentication.')
sys.exit(1)
if not options.scan_dir:
options.scan_dir = options.websock_handlers
if options.use_basic_auth:
options.basic_auth_credential = 'Basic ' + base64.b64encode(
options.basic_auth_credential)
try:
if options.thread_monitor_interval_in_sec > 0:
# Run a thread monitor to show the status of server threads for
# debugging.
ThreadMonitor(options.thread_monitor_interval_in_sec).start()
server = WebSocketServer(options)
server.serve_forever()
except Exception, e:
logging.critical('mod_pywebsocket: %s' % e)
logging.critical('mod_pywebsocket: %s' % util.get_stack_trace())
sys.exit(1)
if __name__ == '__main__':
_main(sys.argv[1:])
# vi:sts=4 sw=4 et
| mpl-2.0 |
philoniare/horizon | openstack_dashboard/test/integration_tests/tests/test_user_create_delete.py | 16 | 1186 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack_dashboard.test.integration_tests import helpers
class TestUser(helpers.AdminTestCase):
USER_NAME = helpers.gen_random_resource_name("user")
def test_create_delete_user(self):
users_page = self.home_pg.go_to_identity_userspage()
password = self.TEST_PASSWORD
users_page.create_user(self.USER_NAME, password=password,
project='admin', role='admin')
self.assertTrue(users_page.is_user_present(self.USER_NAME))
users_page.delete_user(self.USER_NAME)
self.assertFalse(users_page.is_user_present(self.USER_NAME))
| apache-2.0 |
wevote/WeVoteServer | elected_office/urls.py | 1 | 1066 | # elected_office/urls.py
# Brought to you by We Vote. Be good.
# -*- coding: UTF-8 -*-
from django.conf.urls import re_path
from . import views_admin
urlpatterns = [
# views_admin
re_path(r'^$', views_admin.elected_office_list_view, name='elected_office_list', ),
re_path(r'^delete/$', views_admin.elected_office_delete_process_view, name='elected_office_delete_process'),
re_path(r'^edit_process/$', views_admin.elected_office_edit_process_view, name='elected_office_edit_process'),
re_path(r'^new/$', views_admin.elected_office_new_view, name='elected_office_new'),
re_path(r'^update/$', views_admin.elected_office_update_view, name='elected_office_update'),
re_path(r'^real_time_status/$', views_admin.elected_office_update_status, name='elected_office_update_status'),
re_path(r'^(?P<elected_office_id>[0-9]+)/edit/$', views_admin.elected_office_edit_view, name='elected_office_edit'),
re_path(r'^(?P<elected_office_id>[0-9]+)/summary/$', views_admin.elected_office_summary_view,
name='elected_office_summary'),
]
| mit |
vincepandolfo/django | django/contrib/auth/management/commands/changepassword.py | 4 | 2639 | from __future__ import unicode_literals
import getpass
from django.contrib.auth import get_user_model
from django.contrib.auth.password_validation import validate_password
from django.core.exceptions import ValidationError
from django.core.management.base import BaseCommand, CommandError
from django.db import DEFAULT_DB_ALIAS
from django.utils.encoding import force_str
class Command(BaseCommand):
help = "Change a user's password for django.contrib.auth."
requires_migrations_checks = True
requires_system_checks = False
def _get_pass(self, prompt="Password: "):
p = getpass.getpass(prompt=force_str(prompt))
if not p:
raise CommandError("aborted")
return p
def add_arguments(self, parser):
parser.add_argument('username', nargs='?',
help='Username to change password for; by default, it\'s the current username.')
parser.add_argument('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS,
help='Specifies the database to use. Default is "default".')
def handle(self, *args, **options):
if options['username']:
username = options['username']
else:
username = getpass.getuser()
UserModel = get_user_model()
try:
u = UserModel._default_manager.using(options['database']).get(**{
UserModel.USERNAME_FIELD: username
})
except UserModel.DoesNotExist:
raise CommandError("user '%s' does not exist" % username)
self.stdout.write("Changing password for user '%s'\n" % u)
MAX_TRIES = 3
count = 0
p1, p2 = 1, 2 # To make them initially mismatch.
password_validated = False
while (p1 != p2 or not password_validated) and count < MAX_TRIES:
p1 = self._get_pass()
p2 = self._get_pass("Password (again): ")
if p1 != p2:
self.stdout.write("Passwords do not match. Please try again.\n")
count += 1
# Don't validate passwords that don't match.
continue
try:
validate_password(p2, u)
except ValidationError as err:
self.stderr.write('\n'.join(err.messages))
count += 1
else:
password_validated = True
if count == MAX_TRIES:
raise CommandError("Aborting password change for user '%s' after %s attempts" % (u, count))
u.set_password(p1)
u.save()
return "Password changed successfully for user '%s'" % u
| bsd-3-clause |
PoonKang/Kernel_GT-N8013_ICS | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Util.py | 12527 | 1935 | # Util.py - Python extension for perf script, miscellaneous utility code
#
# Copyright (C) 2010 by Tom Zanussi <[email protected]>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
import errno, os
FUTEX_WAIT = 0
FUTEX_WAKE = 1
FUTEX_PRIVATE_FLAG = 128
FUTEX_CLOCK_REALTIME = 256
FUTEX_CMD_MASK = ~(FUTEX_PRIVATE_FLAG | FUTEX_CLOCK_REALTIME)
NSECS_PER_SEC = 1000000000
def avg(total, n):
return total / n
def nsecs(secs, nsecs):
return secs * NSECS_PER_SEC + nsecs
def nsecs_secs(nsecs):
return nsecs / NSECS_PER_SEC
def nsecs_nsecs(nsecs):
return nsecs % NSECS_PER_SEC
def nsecs_str(nsecs):
str = "%5u.%09u" % (nsecs_secs(nsecs), nsecs_nsecs(nsecs)),
return str
def add_stats(dict, key, value):
if not dict.has_key(key):
dict[key] = (value, value, value, 1)
else:
min, max, avg, count = dict[key]
if value < min:
min = value
if value > max:
max = value
avg = (avg + value) / 2
dict[key] = (min, max, avg, count + 1)
def clear_term():
print("\x1b[H\x1b[2J")
audit_package_warned = False
try:
import audit
machine_to_id = {
'x86_64': audit.MACH_86_64,
'alpha' : audit.MACH_ALPHA,
'ia64' : audit.MACH_IA64,
'ppc' : audit.MACH_PPC,
'ppc64' : audit.MACH_PPC64,
's390' : audit.MACH_S390,
's390x' : audit.MACH_S390X,
'i386' : audit.MACH_X86,
'i586' : audit.MACH_X86,
'i686' : audit.MACH_X86,
}
try:
machine_to_id['armeb'] = audit.MACH_ARMEB
except:
pass
machine_id = machine_to_id[os.uname()[4]]
except:
if not audit_package_warned:
audit_package_warned = True
print "Install the audit-libs-python package to get syscall names"
def syscall_name(id):
try:
return audit.audit_syscall_to_name(id, machine_id)
except:
return str(id)
def strerror(nr):
try:
return errno.errorcode[abs(nr)]
except:
return "Unknown %d errno" % nr
| gpl-2.0 |
asm-products/movie-database-service | ani/lib/python2.7/site-packages/pip/_vendor/requests/packages/charade/escprober.py | 2936 | 3187 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
from .escsm import (HZSMModel, ISO2022CNSMModel, ISO2022JPSMModel,
ISO2022KRSMModel)
from .charsetprober import CharSetProber
from .codingstatemachine import CodingStateMachine
from .compat import wrap_ord
class EscCharSetProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mCodingSM = [
CodingStateMachine(HZSMModel),
CodingStateMachine(ISO2022CNSMModel),
CodingStateMachine(ISO2022JPSMModel),
CodingStateMachine(ISO2022KRSMModel)
]
self.reset()
def reset(self):
CharSetProber.reset(self)
for codingSM in self._mCodingSM:
if not codingSM:
continue
codingSM.active = True
codingSM.reset()
self._mActiveSM = len(self._mCodingSM)
self._mDetectedCharset = None
def get_charset_name(self):
return self._mDetectedCharset
def get_confidence(self):
if self._mDetectedCharset:
return 0.99
else:
return 0.00
def feed(self, aBuf):
for c in aBuf:
# PY3K: aBuf is a byte array, so c is an int, not a byte
for codingSM in self._mCodingSM:
if not codingSM:
continue
if not codingSM.active:
continue
codingState = codingSM.next_state(wrap_ord(c))
if codingState == constants.eError:
codingSM.active = False
self._mActiveSM -= 1
if self._mActiveSM <= 0:
self._mState = constants.eNotMe
return self.get_state()
elif codingState == constants.eItsMe:
self._mState = constants.eFoundIt
self._mDetectedCharset = codingSM.get_coding_state_machine() # nopep8
return self.get_state()
return self.get_state()
| agpl-3.0 |
tensorflow/lingvo | lingvo/tools/beam_utils.py | 1 | 3573 | # Lint as: python3
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tools for car beam pipelines."""
import apache_beam as beam
def BeamInit():
"""Initialize the beam program.
Typically first thing to run in main(). This call is needed before FLAGS
are accessed, for example.
"""
pass
def GetPipelineRoot(options=None):
"""Return the root of the beam pipeline.
Typical usage looks like:
with GetPipelineRoot() as root:
_ = (root | beam.ParDo() | ...)
In this example, the pipeline is automatically executed when the context is
exited, though one can manually run the pipeline built from the root object as
well.
Args:
options: A beam.options.pipeline_options.PipelineOptions object.
Returns:
A beam.Pipeline root object.
"""
return beam.Pipeline(options=options)
def GetReader(record_format, file_pattern, value_coder, **kwargs):
"""Returns a beam Reader based on record_format and file_pattern.
Args:
record_format: String record format, e.g., 'tfrecord'.
file_pattern: String path describing files to be read.
value_coder: Coder to use for the values of each record.
**kwargs: arguments to pass to the corresponding Reader object constructor.
Returns:
A beam reader object.
Raises:
ValueError: If an unsupported record_format is provided.
"""
if record_format == "tfrecord":
return beam.io.ReadFromTFRecord(file_pattern, coder=value_coder, **kwargs)
raise ValueError("Unsupported record format: {}".format(record_format))
def GetWriter(record_format, file_pattern, value_coder, **kwargs):
"""Returns a beam Writer.
Args:
record_format: String record format, e.g., 'tfrecord' to write as.
file_pattern: String path describing files to be written to.
value_coder: Coder to use for the values of each written record.
**kwargs: arguments to pass to the corresponding Writer object constructor.
Returns:
A beam writer object.
Raises:
ValueError: If an unsupported record_format is provided.
"""
if record_format == "tfrecord":
return beam.io.WriteToTFRecord(file_pattern, coder=value_coder, **kwargs)
raise ValueError("Unsupported record format: {}".format(record_format))
def GetEmitterFn(record_format):
"""Returns an Emitter function for the given record_format.
An Emitter function takes in a key and value as arguments and returns
a structure that is compatible with the Beam Writer associated with
the corresponding record_format.
Args:
record_format: String record format, e.g., 'tfrecord' to write as.
Returns:
An emitter function of (key, value) -> Writer's input type.
Raises:
ValueError: If an unsupported record_format is provided.
"""
def _ValueEmitter(key, value):
del key
return [value]
if record_format == "tfrecord":
return _ValueEmitter
raise ValueError("Unsupported record format: {}".format(record_format))
| apache-2.0 |
zofuthan/edx-platform | common/lib/xmodule/xmodule/library_root_xblock.py | 47 | 5037 | """
'library' XBlock (LibraryRoot)
"""
import logging
from xmodule.studio_editable import StudioEditableModule
from xblock.fields import Scope, String, List, Boolean
from xblock.fragment import Fragment
from xblock.core import XBlock
log = logging.getLogger(__name__)
# Make '_' a no-op so we can scrape strings
_ = lambda text: text
class LibraryRoot(XBlock):
"""
The LibraryRoot is the root XBlock of a content library. All other blocks in
the library are its children. It contains metadata such as the library's
display_name.
"""
display_name = String(
help=_("Enter the name of the library as it should appear in Studio."),
default="Library",
display_name=_("Library Display Name"),
scope=Scope.settings
)
advanced_modules = List(
display_name=_("Advanced Module List"),
help=_("Enter the names of the advanced components to use in your library."),
scope=Scope.settings,
xml_node=True,
)
show_children_previews = Boolean(
display_name="Hide children preview",
help="Choose if preview of library contents is shown",
scope=Scope.user_state,
default=True
)
has_children = True
has_author_view = True
def __unicode__(self):
return u"Library: {}".format(self.display_name)
def __str__(self):
return unicode(self).encode('utf-8')
def author_view(self, context):
"""
Renders the Studio preview view.
"""
fragment = Fragment()
self.render_children(context, fragment, can_reorder=False, can_add=True)
return fragment
def render_children(self, context, fragment, can_reorder=False, can_add=False): # pylint: disable=unused-argument
"""
Renders the children of the module with HTML appropriate for Studio. Reordering is not supported.
"""
contents = []
paging = context.get('paging', None)
children_count = len(self.children) # pylint: disable=no-member
item_start, item_end = 0, children_count
# TODO sort children
if paging:
page_number = paging.get('page_number', 0)
raw_page_size = paging.get('page_size', None)
page_size = raw_page_size if raw_page_size is not None else children_count
item_start, item_end = page_size * page_number, page_size * (page_number + 1)
children_to_show = self.children[item_start:item_end] # pylint: disable=no-member
force_render = context.get('force_render', None)
for child_key in children_to_show:
# Children must have a separate context from the library itself. Make a copy.
child_context = context.copy()
child_context['show_preview'] = self.show_children_previews
child_context['can_edit_visibility'] = False
child = self.runtime.get_block(child_key)
child_view_name = StudioEditableModule.get_preview_view_name(child)
if unicode(child.location) == force_render:
child_context['show_preview'] = True
if child_context['show_preview']:
rendered_child = self.runtime.render_child(child, child_view_name, child_context)
else:
rendered_child = self.runtime.render_child_placeholder(child, child_view_name, child_context)
fragment.add_frag_resources(rendered_child)
contents.append({
'id': unicode(child.location),
'content': rendered_child.content,
})
fragment.add_content(
self.runtime.render_template("studio_render_paged_children_view.html", {
'items': contents,
'xblock_context': context,
'can_add': can_add,
'first_displayed': item_start,
'total_children': children_count,
'displayed_children': len(children_to_show),
'previews': self.show_children_previews
})
)
@property
def display_org_with_default(self):
"""
Org display names are not implemented. This just provides API compatibility with CourseDescriptor.
Always returns the raw 'org' field from the key.
"""
return self.scope_ids.usage_id.course_key.org
@property
def display_number_with_default(self):
"""
Display numbers are not implemented. This just provides API compatibility with CourseDescriptor.
Always returns the raw 'library' field from the key.
"""
return self.scope_ids.usage_id.course_key.library
@XBlock.json_handler
def trigger_previews(self, request_body, suffix): # pylint: disable=unused-argument
""" Enable or disable previews in studio for library children. """
self.show_children_previews = request_body.get('showChildrenPreviews', self.show_children_previews)
return {'showChildrenPreviews': self.show_children_previews}
| agpl-3.0 |
mnunberg/couchbase-python-client | examples/search_keywords.py | 1 | 3016 | #!/usr/bin/env python
#
# Copyright 2013, Couchbase, Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file demonstrates some of the functionalities available with
# view queries. This creates a bunch of key-value pairs where the value is
# a number. It also creates a view to index the key-value pairs by the
# number itself, and finally queries the view to return the ten items with
# the highest values.
from argparse import ArgumentParser
import random
import pprint
from couchbase.bucket import Bucket
ap = ArgumentParser()
ap.add_argument('-D', '--create-design', default=False,
action='store_true',
help='whether to create the design')
ap.add_argument('-n', '--number-of-terms', default=10,
type=int, help="How many terms to generate")
options = ap.parse_args()
c = Bucket('couchbase://localhost/default')
DESIGN = {
'_id': '_design/search_keywords',
'language': 'javascript',
'views': {
'top_keywords': {
'map':
"""
function(doc) {
if (typeof doc === 'number') {
emit(doc, null);
}
}
"""
}
}
}
if options.create_design:
bm = c.bucket_manager()
bm.design_create('search_keywords', DESIGN, use_devmode=False, syncwait=5)
NOUNS = ['cow', 'cat', 'dog', 'computer', 'WMD']
ADJECTIVES = ['happy', 'sad', 'thoughtful', 'extroverted']
kv = {}
for x in range(options.number_of_terms):
n = random.choice(NOUNS)
a = random.choice(ADJECTIVES)
kv[" ".join([a, n])] = random.randint(1, 100000)
c.upsert_multi(kv)
vret = c.query('search_keywords',
'top_keywords',
limit=10,
descending=True)
for row in vret:
pprint.pprint(row, indent=4)
# Sample output:
#[ { u'id': u'WMD sad', u'key': 92772, u'value': None},
# { u'id': u'WMD thoughtful', u'key': 76222, u'value': None},
# { u'id': u'cow happy', u'key': 71984, u'value': None},
# { u'id': u'computer sad', u'key': 68849, u'value': None},
# { u'id': u'cat thoughtful', u'key': 68417, u'value': None},
# { u'id': u'computer thoughtful', u'key': 67518, u'value': None},
# { u'id': u'dog thoughtful', u'key': 67350, u'value': None},
# { u'id': u'computer extroverted', u'key': 63279, u'value': None},
# { u'id': u'cow thoughtful', u'key': 60962, u'value': None},
# { u'id': u'cow sad', u'key': 49510, u'value': None}]
| apache-2.0 |
jackru/pybrain | pybrain/rl/environments/simplerace/simpleracetcp.py | 25 | 4101 | from __future__ import print_function
__author__ = 'Julian Togelius, [email protected]'
from pybrain.rl.environments import Environment
from math import sqrt
import socket
import string
from scipy import zeros
class SimpleraceEnvironment(Environment):
firstCarScore = 0
secondCarScore = 0
lastStepCurrentWp = [0, 0]
lastStepNextWp = [0, 0]
indim = 2
outdim = 7
def __init__(self, host="127.0.0.1", port=6524):
self.theSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.theSocket.connect((host, port))
self.step = 0
print("Connected to a simplerace server")
self.reset()
self.serverIsReady = False
def getSensors(self):
return self.sensors
def performAction(self, action):
# there is a nicer way of doing the following, but i'll wait with that until
# i'm a bit more fluent in Python
if (action[0] > 0.3):
if(action[1]) > 0.3:
command = 8
elif(action[1]) < -0.3:
command = 6
else:
command = 7
elif (action[0] < -0.3):
if(action[1]) > 0.3:
command = 2
elif(action[1]) < -0.3:
command = 0
else:
command = 1
else:
if(action[1]) > 0.3:
command = 5
elif(action[1]) < -0.3:
command = 3
else:
command = 4
if self.waitOne:
print('Waiting one step')
self.waitOne = False
elif self.serverIsReady:
self.theSocket.send (str(command) + "\n")
else:
print("not sending")
# get and process the answer
data = ""
while len (data) < 2:
data = self.theSocket.recv(1000)
#print("received", data)
inputs = string.split(str(data), " ")
if (inputs[0][:5] == "reset"):
print("Should we reset the scores here?")
self.reset ()
self.serverIsReady = True
self.waitOne = True
elif (inputs[0] == "data"):
inputs[2:20] = list(map(float, inputs[2:20]))
self.sensors = inputs[2:9]
currentWp = [inputs[18], inputs[19]]
# check that this is not the first step of an episode
if (self.lastStepCurrentWp[0] != 0):
# check if a way point position has changed
if (currentWp[0] != self.lastStepCurrentWp[0]):
# check that we don't have a server side change of episode
if (currentWp[0] != self.lastStepNextWp[0]):
print(("%.3f %.3f %.3f %.3f " % (currentWp[0], currentWp[1], self.lastStepNextWp[0], self.lastStepNextWp[1])))
raise Exception("Unexpected episode change")
else:
# all is fine, increase score. but for who?
ownPosition = [inputs[9], inputs[10]]
otherPosition = [inputs[14], inputs[15]]
if (self.euclideanDistance(ownPosition, self.lastStepCurrentWp) < self.euclideanDistance(otherPosition, self.lastStepCurrentWp)):
self.firstCarScore += 1
else:
self.secondCarScore += 1
# store old way point positions
self.lastStepCurrentWp = currentWp
self.step += 1
elif (len (inputs[0]) < 2):
print("impossible!")
else:
print(("incomprehensible and thus roundly ignored", data))
def reset(self):
self.step = 0
self.firstCarScore = 0
self.secondCarScore = 0
self.lastStepCurrentWp = [0, 0]
self.lastStepNextWp = [0, 0]
self.sensors = zeros(self.outdim)
self.waitOne = False
def euclideanDistance(self, firstPoint, secondPoint):
return sqrt ((firstPoint[0] - secondPoint[0]) ** 2 + (firstPoint[1] - secondPoint[1]) ** 2)
| bsd-3-clause |
arkatebi/DynamicalSystems | toggleSwitch/tSwitch-det-pSet-3.py | 1 | 9567 | #/usr/bin/env python
import auxiliary_functions as aux
import PyDSTool as dst
from PyDSTool import common as cmn
import numpy as np
from matplotlib import pyplot as plt
import sys
#------------------------------------------------------------------------------#
def defineSystem():
'''
Create an object that defines the desired ODE system.
'''
# Create an object of args class from common module
DSargs = cmn.args(name='Toggle switch of two genes X and Y')
# Set the parameters:
DSargs.pars = aux.parameter_set_3()
# Set the variables:
DSargs.varspecs = aux.equations()
# Set the auxiliary functions:
DSargs.fnspecs = aux.functions()
# Set initial conditions:
DSargs.ics = {'X': 10, 'Y': 10}
DSargs.xdomain = {'X': [0, 1.0e+4], 'Y':[0, 1.0e+4]}
# Set the range of integration:
DSargs.tdomain = [0,100]
return DSargs
#------------------------------------------------------------------------------#
def t_dynamics_X(pts):
# PyPlot commands
plt.plot(pts['t'], pts['X'])
plt.xlabel('t') # Axes labels
plt.ylabel('X') # ...
#plt.xlim([0,7000])
plt.ylim([0,200]) # Range of the y axis
plt.title(ode.name) # Figure title from model name
plt.show()
plt.figure()
#------------------------------------------------------------------------------#
def t_dynamics_Y(pts):
# PyPlot commands
plt.plot(pts['t'], pts['Y'])
plt.xlabel('t') # Axes labels
plt.ylabel('Y') # ...
#plt.xlim([0,7000])
plt.ylim([0,200]) # Range of the y axis
plt.title(ode.name) # Figure title from model name
plt.show()
plt.figure()
#------------------------------------------------------------------------------#
def t_dynamics_XY(pts):
# PyPlot commands
plt.plot(pts['X'], pts['Y'])
plt.xlabel('X') # Axes labels
plt.ylabel('Y') # ...
#plt.xlim([0,7000])
plt.ylim([0,800]) # Range of the y axis
plt.title(ode.name) # Figure title from model name
plt.show()
#------------------------------------------------------------------------------#
def t_dynamics_multi_ICs_X(ode):
plt.ylim([0,200])
plt.hold(True) # Sequences of plot commands will not clear existing figures
for i, x0 in enumerate(np.linspace(-20,10,30)):
ode.set(ics = { 'X': x0 } ) # Initial condition
# Trajectories are called pol0, pol1, ...
# sample them on the fly to create Pointset tmp
tmp = ode.compute('pol%3i' % i).sample()
plt.plot(tmp['t'], tmp['X'])
plt.xlabel('time')
plt.ylabel('X')
plt.title(ode.name + ' multi ICs')
plt.show()
#------------------------------------------------------------------------------#
def t_dynamics_multi_ICs_Y(ode):
plt.ylim([0,200])
plt.hold(True) # Sequences of plot commands will not clear existing figures
for i, y0 in enumerate(np.linspace(-20,10,30)):
ode.set(ics = { 'Y': y0 } ) # Initial condition
# Trajectories are called pol0, pol1, ...
# sample them on the fly to create Pointset tmp
tmp = ode.compute('pol%3i' % i).sample()
plt.plot(tmp['t'], tmp['Y'])
plt.xlabel('time')
plt.ylabel('Y')
plt.title(ode.name + ' multi ICs')
plt.show()
#------------------------------------------------------------------------------#
def t_dynamics_multi_ICs_X(ode):
plt.figure()
plt.ylim([0,900])
plt.hold(True) # Sequences of plot commands will not clear existing figures
for i, x0 in enumerate(np.linspace(-20,10,30)):
ode.set(ics = { 'X': x0 } ) # Initial condition
# Trajectories are called pol0, pol1, ...
# sample them on the fly to create Pointset tmp
tmp = ode.compute('pol%3i' % i).sample()
plt.plot(tmp['t'], tmp['X'])
plt.xlabel('time')
plt.ylabel('X')
plt.title(ode.name + ' multi ICs X')
plt.show()
#------------------------------------------------------------------------------#
def t_dynamics_multi_ICs_Y(ode):
plt.figure()
plt.ylim([0,900])
plt.hold(True) # Sequences of plot commands will not clear existing figures
for i, y0 in enumerate(np.linspace(-20,10,30)):
ode.set(ics = { 'Y': y0 } ) # Initial condition
# Trajectories are called pol0, pol1, ...
# sample them on the fly to create Pointset tmp
tmp = ode.compute('pol%3i' % i).sample()
plt.plot(tmp['t'], tmp['Y'])
plt.xlabel('time')
plt.ylabel('Y')
plt.title(ode.name + ' multi ICs Y')
plt.show()
#------------------------------------------------------------------------------#
def t_dynamics_multi_ICs_XY(ode):
plt.figure()
plt.ylim([0,900])
# Sequences of plot commands will not clear existing figures:
plt.hold(True)
for i, x0 in enumerate(np.linspace(1,1000,4)):
for i, y0 in enumerate(np.linspace(1,1000,4)):
# Reset the initial conditions in the Vode_ODEsystem object ode:
ode.set(ics = { 'X': x0, 'Y': y0 } )
# Trajectories are called pol0, pol1, ...
# Sample them on the fly to create tmp, a Pointset object:
tmp = ode.compute('pol%3i' % i).sample()
plt.plot(tmp['X'], tmp['Y'])
plt.xlabel('X')
plt.ylabel('Y')
#plt.title(ode.name + ': multi ICs for both')
plt.show()
#plt.savefig('./figures/parSet-1_tdynamics.pdf')
#------------------------------------------------------------------------------#
def getBifDiagrams(ode):
freepar='gX'
fp=aux.fast_fixedpoint(ode)
print(fp.values())
aux.plot_continuation(ode, freepar, keys=['X','Y'], ncol=2, nrow=1,
LocBifPoints=['LP','B'], bif_startpoint=50,
maxstep=1e+1, minstep=0.01, step=0.1,
silence=True, fs=[4,4], ics=[fp],
xlim=[0,200], ylim=[0,700], fontsize=10)
freepar='gY'
fp=aux.fast_fixedpoint(ode)
aux.plot_continuation(ode, freepar, keys=['X','Y'], ncol=2, nrow=1,
LocBifPoints=['LP','B'], bif_startpoint=50,
maxstep=1e+1, minstep=1e-2, step=1e-1,
silence=True, fs=[4,4], ics=[fp],
xlim=[0,200], ylim=[0,700], fontsize=10)
sys.exit(0)
freepar='kX'
fp=aux.fast_fixedpoint(ode)
print(fp.values())
aux.plot_continuation(ode, freepar, keys=['X','Y'], ncol=2, nrow=1,
LocBifPoints=['LP','B'], bif_startpoint=0.1,
maxstep=1e+1, minstep=0.01, step=0.1,
silence=True, fs=[4,4], ics=[fp],
xlim=[0,1], ylim=[0,700], fontsize=10)
freepar='kY'
fp=aux.fast_fixedpoint(ode)
print(fp.values())
aux.plot_continuation(ode, freepar, keys=['X','Y'], ncol=2, nrow=1,
LocBifPoints=['LP','B'], bif_startpoint=0.1,
maxstep=1e+1, minstep=0.01, step=0.1,
silence=True, fs=[4,4], ics=[fp],
xlim=[0,1], ylim=[0,700], fontsize=10)
sys.exit(0)
freepar='lX'
fp=aux.fast_fixedpoint(ode)
print(fp.values())
aux.plot_continuation(ode, freepar, keys=['X','Y'], ncol=2, nrow=1,
LocBifPoints=['LP','B'], bif_startpoint=0.1,
maxstep=1e+1, minstep=0.01, step=0.1,
silence=True, fs=[4,4], ics=[fp],
xlim=[0,1], ylim=[0,700], fontsize=10)
freepar='lY'
fp=aux.fast_fixedpoint(ode)
print(fp.values())
aux.plot_continuation(ode, freepar, keys=['X','Y'], ncol=2, nrow=1,
LocBifPoints=['LP','B'], bif_startpoint=0.1,
maxstep=1e+1, minstep=0.01, step=0.1,
silence=True, fs=[4,4], ics=[fp],
xlim=[0,1], ylim=[0,700], fontsize=10)
sys.exit(0)
#------------------------------------------------------------------------------#
def getNullClines(DSargs, ode):
from PyDSTool.Toolbox import phaseplane as pp
vlim = {'X': [1, 700], 'Y': [1, 700]}
fp = aux.eliminate_redundants(pp.find_fixedpoints(ode, n=2, maxsearch=1e+4,
eps=1e-12),
4)
stab = aux.stability(fp, ode)
for i in range(len(fp)):
print(stab[i], fp[i])
nfp=0
aux.nullclines(['X','Y'], DSargs, stab, fp, nfp=nfp, vlim=vlim,
maxpoints=[800,800],
xticks=[0, 100, 200, 300, 400, 500, 600, 700],
yticks=[0, 100, 200, 300, 400, 500, 600, 700],
step=0.01, minstep=0.001, maxstep=10, fs=[3,3],
fontsize=8, silence=False)
#------------------------------------------------------------------------------#
if __name__ == '__main__':
DSargs = defineSystem()
# Obtain a Vode_ODEsystem object:
# (similar to VODE from SciPy)
ode = dst.Generator.Vode_ODEsystem(DSargs)
# Obtain a Trajectory object (integrate ODE):
traj = ode.compute('polarization')
# Collect data points as a Pointset object:
pts = traj.sample(dt=0.01)
#t_dynamics_X(pts)
#t_dynamics_Y(pts)
#t_dynamics_XY(pts)
#t_dynamics_multi_ICs_X(ode)
#t_dynamics_multi_ICs_Y(ode)
#t_dynamics_multi_ICs_XY(ode)
#getBifDiagrams(ode)
getNullClines(DSargs, ode)
| gpl-3.0 |
thomasgilgenast/spqr-nonrel | django/contrib/localflavor/uy/forms.py | 310 | 2083 | # -*- coding: utf-8 -*-
"""
UY-specific form helpers.
"""
import re
from django.core.validators import EMPTY_VALUES
from django.forms.fields import Select, RegexField
from django.forms import ValidationError
from django.utils.translation import ugettext_lazy as _
from django.contrib.localflavor.uy.util import get_validation_digit
class UYDepartamentSelect(Select):
"""
A Select widget that uses a list of Uruguayan departaments as its choices.
"""
def __init__(self, attrs=None):
from uy_departaments import DEPARTAMENT_CHOICES
super(UYDepartamentSelect, self).__init__(attrs, choices=DEPARTAMENT_CHOICES)
class UYCIField(RegexField):
"""
A field that validates Uruguayan 'Cedula de identidad' (CI) numbers.
"""
default_error_messages = {
'invalid': _("Enter a valid CI number in X.XXX.XXX-X,"
"XXXXXXX-X or XXXXXXXX format."),
'invalid_validation_digit': _("Enter a valid CI number."),
}
def __init__(self, *args, **kwargs):
super(UYCIField, self).__init__(r'(?P<num>(\d{6,7}|(\d\.)?\d{3}\.\d{3}))-?(?P<val>\d)',
*args, **kwargs)
def clean(self, value):
"""
Validates format and validation digit.
The official format is [X.]XXX.XXX-X but usually dots and/or slash are
omitted so, when validating, those characters are ignored if found in
the correct place. The three typically used formats are supported:
[X]XXXXXXX, [X]XXXXXX-X and [X.]XXX.XXX-X.
"""
value = super(UYCIField, self).clean(value)
if value in EMPTY_VALUES:
return u''
match = self.regex.match(value)
if not match:
raise ValidationError(self.error_messages['invalid'])
number = int(match.group('num').replace('.', ''))
validation_digit = int(match.group('val'))
if not validation_digit == get_validation_digit(number):
raise ValidationError(self.error_messages['invalid_validation_digit'])
return value
| bsd-3-clause |
kizniche/Mycodo | mycodo/functions/backup_rsync.py | 1 | 15933 | # coding=utf-8
#
# backup_rsync.py - Periodically perform backup of Mycodo assets to remote system using rsync
#
# Copyright (C) 2015-2020 Kyle T. Gabriel <[email protected]>
#
# This file is part of Mycodo
#
# Mycodo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Mycodo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Mycodo. If not, see <http://www.gnu.org/licenses/>.
#
# Contact at kylegabriel.com
#
import datetime
import os
import socket
import time
from flask_babel import lazy_gettext
from mycodo.config import ALEMBIC_VERSION
from mycodo.config import MYCODO_VERSION
from mycodo.config import PATH_CAMERAS
from mycodo.config import PATH_MEASUREMENTS_BACKUP
from mycodo.config import PATH_SETTINGS_BACKUP
from mycodo.databases.models import CustomController
from mycodo.functions.base_function import AbstractFunction
from mycodo.mycodo_client import DaemonControl
from mycodo.utils.constraints_pass import constraints_pass_positive_value
from mycodo.utils.database import db_retrieve_table_daemon
from mycodo.utils.system_pi import assure_path_exists
from mycodo.utils.system_pi import cmd_output
from mycodo.utils.tools import create_measurements_export
from mycodo.utils.tools import create_settings_export
try:
host_name = socket.gethostname().replace(' ', '_')
except:
host_name = 'MY_HOST_NAME'
FUNCTION_INFORMATION = {
'function_name_unique': 'BACKUP_REMOTE_RSYNC',
'function_name': 'Backup to Remote Host (rsync)',
'options_disabled': [
'measurements_select',
'measurements_configure'
],
'message': 'This function will use rsync to back up assets on this system to a remote system. Your remote system needs to have an SSH server running and rsync installed. This system will need rsync installed and be able to access your remote system via SSH keyfile (login without a password). You can do this by creating an SSH key on this system running Mycodo with "ssh-keygen" (leave the password field empty), then run "ssh-copy-id -i ~/.ssh/id_rsa.pub pi@REMOTE_HOST_IP" to transfer your public SSH key to your remote system (changing pi and REMOTE_HOST_IP to the appropriate user and host of your remote system). You can test if this worked by trying to connect to your remote system with "ssh pi@REMOTE_HOST_IP" and you should log in without being asked for a password. Be careful not to set the Period too low, which could cause the function to begin running before the previous operation(s) complete. Therefore, it is recommended to set a relatively long Period (greater than 10 minutes). The default Period is 15 days. Note that the Period will reset if the system or the Mycodo daemon restarts and the Function will run, generating new settings and measurement archives that will be synced. There are two common ways to use this Function: 1) A short period (1 hour), only have Backup Camera Directories enabled, and use the Backup Settings Now and Backup Measurements Now buttons manually to perform a backup, and 2) A long period (15 days), only have Backup Settings and Backup Measurements enabled. You can even create two of these Functions with one set up to perform long-Period settings and measurement backups and the other set up to perform short-Period camera backups.',
'dependencies_module': [
('apt', 'rsync', 'rsync')
],
'custom_options': [
{
'id': 'period',
'type': 'float',
'default_value': 1296000,
'required': True,
'constraints_pass': constraints_pass_positive_value,
'name': lazy_gettext('Period (seconds)'),
'phrase': lazy_gettext('The duration (seconds) between measurements or actions')
},
{
'id': 'start_offset',
'type': 'integer',
'default_value': 300,
'required': True,
'name': 'Start Offset',
'phrase': 'The duration (seconds) to wait before the first operation'
},
{
'id': 'local_user',
'type': 'text',
'default_value': 'pi',
'required': True,
'name': 'Local User',
'phrase': 'The user on this system that will run rsync'
},
{
'id': 'remote_user',
'type': 'text',
'default_value': 'pi',
'required': True,
'name': 'Remote User',
'phrase': 'The user to log in to the remote host'
},
{
'id': 'remote_host',
'type': 'text',
'default_value': '192.168.0.50',
'required': True,
'name': 'Remote Host',
'phrase': 'The IP or host address to send the backup to'
},
{
'id': 'remote_backup_path',
'type': 'text',
'default_value': '/home/pi/backup_mycodo_{}'.format(host_name),
'required': True,
'name': 'Remote Backup Path',
'phrase': 'The path to backup to on the remote host'
},
{
'id': 'rsync_timeout',
'type': 'integer',
'default_value': 3600,
'required': True,
'constraints_pass': constraints_pass_positive_value,
'name': 'Rsync Timeout',
'phrase': 'How long to allow rsync to complete (seconds)'
},
{
'id': 'do_backup_settings',
'type': 'bool',
'default_value': True,
'required': True,
'name': 'Backup Settings Export File',
'phrase': 'Create and backup exported settings file'
},
{
'id': 'backup_remove_settings_archives',
'type': 'bool',
'default_value': False,
'required': True,
'name': 'Remove Local Settings Backups',
'phrase': 'Remove local settings backups after successful transfer to remote host'
},
{
'id': 'do_backup_measurements',
'type': 'bool',
'default_value': True,
'required': True,
'name': 'Backup Measurements',
'phrase': 'Backup all influxdb measurements'
},
{
'id': 'backup_remove_measurements_archives',
'type': 'bool',
'default_value': False,
'required': True,
'name': 'Remove Local Measurements Backups',
'phrase': 'Remove local measurements backups after successful transfer to remote host'
},
{
'id': 'do_backup_cameras',
'type': 'bool',
'default_value': True,
'required': True,
'name': 'Backup Camera Directories',
'phrase': 'Backup all camera directories'
},
{
'id': 'backup_remove_camera_images',
'type': 'bool',
'default_value': False,
'required': True,
'name': 'Remove Local Camera Images',
'phrase': 'Remove local camera images after successful transfer to remote host'
},
{
'id': 'ssh_port',
'type': 'integer',
'default_value': 22,
'required': False,
'name': 'SSH Port',
'phrase': 'Specify a nonstandard SSH port'
}
],
'custom_actions': [
{
'type': 'message',
'default_value': 'Backup of settings are only created if the Mycodo version or database versions change. This is due to this Function running periodically- if it created a new backup every Period, there would soon be many identical backups. Therefore, if you want to induce the backup of settings, measurements, or camera directories and sync them to your remote system, use the buttons below.',
},
{
'id': 'create_new_settings_backup',
'type': 'button',
'wait_for_return': False,
'name': 'Backup Settings Now',
'phrase': 'Backup settings via rsync'
},
{
'id': 'create_new_measurements_backup',
'type': 'button',
'wait_for_return': False,
'name': 'Backup Measurements Now',
'phrase': 'Backup measurements via rsync'
},
{
'id': 'create_new_camera_backup',
'type': 'button',
'wait_for_return': False,
'name': 'Backup Camera Directories Now',
'phrase': 'Backup camera directories via rsync'
}
]
}
class CustomModule(AbstractFunction):
"""
Class to operate custom controller
"""
def __init__(self, function, testing=False):
super(CustomModule, self).__init__(function, testing=testing, name=__name__)
self.is_setup = False
self.timer_loop = time.time()
self.control = DaemonControl()
# Initialize custom options
self.period = None
self.start_offset = None
self.local_user = None
self.remote_user = None
self.remote_host = None
self.remote_backup_path = None
self.rsync_timeout = None
self.do_backup_settings = None
self.backup_remove_settings_archives = None
self.do_backup_measurements = None
self.backup_remove_measurements_archives = None
self.do_backup_cameras = None
self.backup_remove_camera_images = None
self.ssh_port = None
# Set custom options
custom_function = db_retrieve_table_daemon(
CustomController, unique_id=self.unique_id)
self.setup_custom_options(
FUNCTION_INFORMATION['custom_options'], custom_function)
if not testing:
self.initialize_variables()
def initialize_variables(self):
self.timer_loop = time.time() + self.start_offset
self.logger.debug(
"Custom controller started with options: {}, {}, {}, {}, {}".format(
self.remote_host,
self.remote_user,
self.ssh_port,
self.remote_backup_path,
self.backup_settings))
if self.remote_host and self.remote_user and self.remote_backup_path and self.ssh_port:
self.is_setup = True
def loop(self):
if self.timer_loop > time.time():
return
while self.timer_loop < time.time():
self.timer_loop += self.period
if not self.is_setup:
self.logger.error("Cannot run: Not all options are set")
return
if self.do_backup_settings:
self.backup_settings()
if self.do_backup_measurements:
self.backup_measurements()
if self.do_backup_cameras:
self.backup_camera()
def backup_settings(self):
filename = 'Mycodo_{mver}_Settings_{aver}_{host}_{dt}.zip'.format(
mver=MYCODO_VERSION, aver=ALEMBIC_VERSION,
host=socket.gethostname().replace(' ', ''),
dt=datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S"))
path_save = os.path.join(PATH_SETTINGS_BACKUP, filename)
assure_path_exists(PATH_SETTINGS_BACKUP)
if os.path.exists(path_save):
self.logger.debug(
"Skipping backup of settings: "
"File already exists: {}".format(path_save))
else:
status, saved_path = create_settings_export(save_path=path_save)
if not status:
self.logger.debug("Saved settings file: "
"{}".format(saved_path))
else:
self.logger.debug("Could not create settings file: "
"{}".format(saved_path))
if self.backup_remove_settings_archives:
remove_files = "--remove-source-files "
else:
remove_files = ""
rsync_cmd = "rsync {rem}-avz -e 'ssh -p {port}' {path_local} {user}@{host}:{remote_path}".format(
rem=remove_files,
port=self.ssh_port,
path_local=PATH_SETTINGS_BACKUP,
user=self.remote_user,
host=self.remote_host,
remote_path=self.remote_backup_path)
self.logger.debug("rsync command: {}".format(rsync_cmd))
cmd_out, cmd_err, cmd_status = cmd_output(
rsync_cmd, timeout=self.rsync_timeout, user=self.local_user)
self.logger.debug("rsync returned:\nOut: {}\nError: {}\nStatus: {}".format(
cmd_out.decode(), cmd_err.decode(), cmd_status))
def backup_measurements(self):
influxd_version_out, _, _ = cmd_output(
'/usr/bin/influxd version')
if influxd_version_out:
influxd_version = influxd_version_out.decode('utf-8').split(' ')[1]
else:
influxd_version = "UNKNOWN"
filename = 'Mycodo_{mv}_Influxdb_{iv}_{host}_{dt}.zip'.format(
mv=MYCODO_VERSION, iv=influxd_version,
host=socket.gethostname().replace(' ', ''),
dt=datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S"))
path_save = os.path.join(PATH_MEASUREMENTS_BACKUP, filename)
assure_path_exists(PATH_MEASUREMENTS_BACKUP)
status, saved_path = create_measurements_export(save_path=path_save)
if not status:
self.logger.debug("Saved measurements file: "
"{}".format(saved_path))
else:
self.logger.debug("Could not create measurements file: "
"{}".format(saved_path))
if self.backup_remove_measurements_archives:
remove_files = "--remove-source-files "
else:
remove_files = ""
rsync_cmd = "rsync {rem}-avz -e 'ssh -p {port}' {path_local} {user}@{host}:{remote_path}".format(
rem=remove_files,
port=self.ssh_port,
path_local=PATH_MEASUREMENTS_BACKUP,
user=self.remote_user,
host=self.remote_host,
remote_path=self.remote_backup_path)
self.logger.debug("rsync command: {}".format(rsync_cmd))
cmd_out, cmd_err, cmd_status = cmd_output(
rsync_cmd, timeout=self.rsync_timeout, user=self.local_user)
self.logger.debug("rsync returned:\nOut: {}\nError: {}\nStatus: {}".format(
cmd_out.decode(), cmd_err.decode(), cmd_status))
def backup_camera(self):
if self.backup_remove_camera_images:
remove_files = "--remove-source-files "
else:
remove_files = ""
rsync_cmd = "rsync {rem}-avz -e 'ssh -p {port}' {path_local} {user}@{host}:{remote_path}".format(
rem=remove_files,
port=self.ssh_port,
path_local=PATH_CAMERAS,
user=self.remote_user,
host=self.remote_host,
remote_path=self.remote_backup_path)
self.logger.debug("rsync command: {}".format(rsync_cmd))
cmd_out, cmd_err, cmd_status = cmd_output(
rsync_cmd, timeout=self.rsync_timeout, user=self.local_user)
self.logger.debug("rsync returned:\nOut: {}\nError: {}\nStatus: {}".format(
cmd_out.decode(), cmd_err.decode(), cmd_status))
def create_new_settings_backup(self, args_dict):
self.backup_settings()
def create_new_measurements_backup(self, args_dict):
self.backup_measurements()
def create_new_camera_backup(self, args_dict):
self.backup_camera()
| gpl-3.0 |
leonardowolf/bookfree | flask/lib/python3.5/site-packages/pip/_vendor/requests/packages/urllib3/util/response.py | 515 | 2165 | from __future__ import absolute_import
from ..packages.six.moves import http_client as httplib
from ..exceptions import HeaderParsingError
def is_fp_closed(obj):
"""
Checks whether a given file-like object is closed.
:param obj:
The file-like object to check.
"""
try:
# Check via the official file-like-object way.
return obj.closed
except AttributeError:
pass
try:
# Check if the object is a container for another file-like object that
# gets released on exhaustion (e.g. HTTPResponse).
return obj.fp is None
except AttributeError:
pass
raise ValueError("Unable to determine whether fp is closed.")
def assert_header_parsing(headers):
"""
Asserts whether all headers have been successfully parsed.
Extracts encountered errors from the result of parsing headers.
Only works on Python 3.
:param headers: Headers to verify.
:type headers: `httplib.HTTPMessage`.
:raises urllib3.exceptions.HeaderParsingError:
If parsing errors are found.
"""
# This will fail silently if we pass in the wrong kind of parameter.
# To make debugging easier add an explicit check.
if not isinstance(headers, httplib.HTTPMessage):
raise TypeError('expected httplib.Message, got {0}.'.format(
type(headers)))
defects = getattr(headers, 'defects', None)
get_payload = getattr(headers, 'get_payload', None)
unparsed_data = None
if get_payload: # Platform-specific: Python 3.
unparsed_data = get_payload()
if defects or unparsed_data:
raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
def is_response_to_head(response):
"""
Checks whether the request of a response has been a HEAD-request.
Handles the quirks of AppEngine.
:param conn:
:type conn: :class:`httplib.HTTPResponse`
"""
# FIXME: Can we do this somehow without accessing private httplib _method?
method = response._method
if isinstance(method, int): # Platform-specific: Appengine
return method == 3
return method.upper() == 'HEAD'
| mit |
catapult-project/catapult | dashboard/dashboard/update_test_suites_test.py | 3 | 13778 | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import unittest
import webapp2
import webtest
from google.appengine.ext import ndb
from dashboard import update_test_suites
from dashboard.common import descriptor
from dashboard.common import namespaced_stored_object
from dashboard.common import stored_object
from dashboard.common import testing_common
from dashboard.common import utils
from dashboard.models import graph_data
class ListTestSuitesTest(testing_common.TestCase):
def setUp(self):
super(ListTestSuitesTest, self).setUp()
app = webapp2.WSGIApplication([
('/update_test_suites', update_test_suites.UpdateTestSuitesHandler)
])
self.testapp = webtest.TestApp(app)
testing_common.SetIsInternalUser('[email protected]', True)
self.UnsetCurrentUser()
stored_object.Set(descriptor.PARTIAL_TEST_SUITES_KEY, [
'TEST_PARTIAL_TEST_SUITE',
])
stored_object.Set(descriptor.GROUPABLE_TEST_SUITE_PREFIXES_KEY, [
'TEST_GROUPABLE%',
])
descriptor.Descriptor.ResetMemoizedConfigurationForTesting()
def testFetchCachedTestSuites_NotEmpty(self):
# If the cache is set, then whatever's there is returned.
key = namespaced_stored_object.NamespaceKey(
update_test_suites._LIST_SUITES_CACHE_KEY)
stored_object.Set(key, {'foo': 'bar'})
self.assertEqual({'foo': 'bar'}, update_test_suites.FetchCachedTestSuites())
def _AddSampleData(self):
testing_common.AddTests(
['Chromium'], ['win7', 'mac'], {
'dromaeo': {
'dom': {},
'jslib': {},
},
'scrolling': {
'commit_time': {
'www.yahoo.com': {},
'www.cnn.com': {},
},
'commit_time_ref': {},
},
'really': {
'nested': {
'very': {
'deeply': {
'subtest': {}
}
},
'very_very': {}
}
},
})
def testPost_ForcesCacheUpdate(self):
key = namespaced_stored_object.NamespaceKey(
update_test_suites._LIST_SUITES_CACHE_KEY)
stored_object.Set(key, {'foo': 'bar'})
self.assertEqual({'foo': 'bar'}, update_test_suites.FetchCachedTestSuites())
self._AddSampleData()
# Because there is something cached, the cache is
# not automatically updated when new data is added.
self.assertEqual({'foo': 'bar'}, update_test_suites.FetchCachedTestSuites())
stored_object.Set(
namespaced_stored_object.NamespaceKey(
update_test_suites.TEST_SUITES_2_CACHE_KEY), ['foo'])
self.assertEqual(['foo'], update_test_suites.FetchCachedTestSuites2())
# Making a request to /udate_test_suites forces an update.
self.testapp.post('/update_test_suites')
self.assertEqual(
{
'dromaeo': {
'mas': {
'Chromium': {
'mac': False,
'win7': False
}
},
},
'scrolling': {
'mas': {
'Chromium': {
'mac': False,
'win7': False
}
},
},
'really': {
'mas': {
'Chromium': {
'mac': False,
'win7': False
}
},
},
}, update_test_suites.FetchCachedTestSuites())
self.assertEqual(['dromaeo', 'really', 'scrolling'],
update_test_suites.FetchCachedTestSuites2())
def testPost_InternalOnly(self):
self.SetCurrentUser('[email protected]')
self._AddSampleData()
master_key = ndb.Key('Master', 'Chromium')
graph_data.Bot(
id='internal_mac', parent=master_key, internal_only=True).put()
t = graph_data.TestMetadata(
id='Chromium/internal_mac/internal_test', internal_only=True)
t.UpdateSheriff()
t.put()
self.testapp.post('/update_test_suites?internal_only=true')
self.assertEqual(
{
'dromaeo': {
'mas': {
'Chromium': {
'mac': False,
'win7': False
}
},
},
'internal_test': {
'mas': {
'Chromium': {
'internal_mac': False
}
},
},
'scrolling': {
'mas': {
'Chromium': {
'mac': False,
'win7': False
}
},
},
'really': {
'mas': {
'Chromium': {
'mac': False,
'win7': False
}
},
},
}, update_test_suites.FetchCachedTestSuites())
def testFetchCachedTestSuites_Empty_UpdatesWhenFetching(self):
# If the cache is not set at all, then FetchCachedTestSuites
# just updates the cache before returning the list.
self._AddSampleData()
self.assertEqual(
{
'dromaeo': {
'mas': {
'Chromium': {
'mac': False,
'win7': False
}
},
},
'scrolling': {
'mas': {
'Chromium': {
'mac': False,
'win7': False
}
},
},
'really': {
'mas': {
'Chromium': {
'mac': False,
'win7': False
}
},
},
}, update_test_suites.FetchCachedTestSuites())
def testFetchSuites_BasicDescription(self):
self._AddSampleData()
for test_path in ['Chromium/win7/scrolling', 'Chromium/mac/scrolling']:
test = utils.TestKey(test_path).get()
test.description = 'Description string.'
test.UpdateSheriff()
test.put()
self.assertEqual(
{
'dromaeo': {
'mas': {
'Chromium': {
'mac': False,
'win7': False
}
},
},
'scrolling': {
'mas': {
'Chromium': {
'mac': False,
'win7': False
}
},
'des': 'Description string.'
},
'really': {
'mas': {
'Chromium': {
'mac': False,
'win7': False
}
},
},
}, update_test_suites.FetchCachedTestSuites())
def testFetchSuites_DifferentMasters(self):
# If the cache is not set at all, then FetchCachedTestSuites
# just updates the cache before returning the list.
self._AddSampleData()
testing_common.AddTests(['ChromiumFYI'], ['linux'], {
'sunspider': {
'Total': {},
},
})
self.assertEqual(
{
'dromaeo': {
'mas': {
'Chromium': {
'mac': False,
'win7': False
}
},
},
'scrolling': {
'mas': {
'Chromium': {
'mac': False,
'win7': False
}
},
},
'really': {
'mas': {
'Chromium': {
'mac': False,
'win7': False
}
},
},
'sunspider': {
'mas': {
'ChromiumFYI': {
'linux': False
}
}
},
}, update_test_suites._CreateTestSuiteDict())
def testFetchSuites_SingleDeprecatedBot(self):
self._AddSampleData()
# For another test suite, set it as deprecated on both bots -- it should
# be marked as deprecated in the response dict.
for bot in ['win7']:
test = utils.TestKey('Chromium/%s/really' % bot).get()
test.deprecated = True
test.UpdateSheriff()
test.put()
self.assertEqual(
{
'dromaeo': {
'mas': {
'Chromium': {
'mac': False,
'win7': False
}
},
},
'scrolling': {
'mas': {
'Chromium': {
'mac': False,
'win7': False
}
},
},
'really': {
'mas': {
'Chromium': {
'mac': False,
'win7': True
}
}
},
}, update_test_suites._CreateTestSuiteDict())
def testFetchSuites_AllDeprecatedBots(self):
self._AddSampleData()
# For another test suite, set it as deprecated on both bots -- it should
# be marked as deprecated in the response dict.
for bot in ['win7', 'mac']:
test = utils.TestKey('Chromium/%s/really' % bot).get()
test.deprecated = True
test.UpdateSheriff()
test.put()
self.assertEqual(
{
'dromaeo': {
'mas': {
'Chromium': {
'mac': False,
'win7': False
}
},
},
'scrolling': {
'mas': {
'Chromium': {
'mac': False,
'win7': False
}
},
},
'really': {
'dep': True,
'mas': {
'Chromium': {
'mac': True,
'win7': True
}
}
},
}, update_test_suites._CreateTestSuiteDict())
def testFetchSuites_BasicMonitored(self):
self._AddSampleData()
self.assertEqual(
{
'dromaeo': {
'mas': {
'Chromium': {
'mac': False,
'win7': False
}
},
},
'scrolling': {
'mas': {
'Chromium': {
'mac': False,
'win7': False
}
},
},
'really': {
'mas': {
'Chromium': {
'mac': False,
'win7': False
}
}
},
}, update_test_suites._CreateTestSuiteDict())
def testFetchSuites_MultipleMonitored(self):
self._AddSampleData()
testing_common.AddTests(['ChromiumFYI'], ['linux'], {
'dromaeo': {
'foo': {},
},
})
self.assertEqual(
{
'dromaeo': {
'mas': {
'Chromium': {
'mac': False,
'win7': False
},
'ChromiumFYI': {
'linux': False
}
},
},
'scrolling': {
'mas': {
'Chromium': {
'mac': False,
'win7': False
}
},
},
'really': {
'mas': {
'Chromium': {
'mac': False,
'win7': False
}
}
},
}, update_test_suites._CreateTestSuiteDict())
def testFetchSuites(self):
self._AddSampleData()
suites = update_test_suites._FetchSuites()
suite_keys = [s.key for s in suites]
self.assertEqual(
list(
map(utils.TestKey, [
'Chromium/mac/dromaeo',
'Chromium/mac/really',
'Chromium/mac/scrolling',
'Chromium/win7/dromaeo',
'Chromium/win7/really',
'Chromium/win7/scrolling',
])), suite_keys)
def testGetSubTestPath(self):
key = utils.TestKey('Chromium/mac/my_suite/foo/bar')
self.assertEqual('foo/bar', update_test_suites._GetTestSubPath(key))
def testPartialTestSuites(self):
testing_common.AddTests(['master'], ['bot'], {
'TEST_PARTIAL_TEST_SUITE': {
'COMPOSITE': {
'measurement': {},
},
},
})
self.testapp.post('/update_test_suites')
self.assertEqual(['TEST_PARTIAL_TEST_SUITE:COMPOSITE'],
update_test_suites.FetchCachedTestSuites2())
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
Odingod/mne-python | mne/gui/tests/test_kit2fiff_gui.py | 13 | 3412 | # Authors: Christian Brodbeck <[email protected]>
#
# License: BSD (3-clause)
import os
import warnings
import numpy as np
from numpy.testing import assert_allclose, assert_array_equal
from nose.tools import assert_true, assert_false, assert_equal
import mne
from mne.io.kit.tests import data_dir as kit_data_dir
from mne.io import Raw
from mne.utils import _TempDir, requires_traits, run_tests_if_main
mrk_pre_path = os.path.join(kit_data_dir, 'test_mrk_pre.sqd')
mrk_post_path = os.path.join(kit_data_dir, 'test_mrk_post.sqd')
sqd_path = os.path.join(kit_data_dir, 'test.sqd')
hsp_path = os.path.join(kit_data_dir, 'test_hsp.txt')
fid_path = os.path.join(kit_data_dir, 'test_elp.txt')
fif_path = os.path.join(kit_data_dir, 'test_bin_raw.fif')
warnings.simplefilter('always')
@requires_traits
def test_kit2fiff_model():
"""Test CombineMarkersModel Traits Model"""
from mne.gui._kit2fiff_gui import Kit2FiffModel, Kit2FiffPanel
tempdir = _TempDir()
tgt_fname = os.path.join(tempdir, 'test-raw.fif')
model = Kit2FiffModel()
assert_false(model.can_save)
model.markers.mrk1.file = mrk_pre_path
model.markers.mrk2.file = mrk_post_path
model.sqd_file = sqd_path
model.hsp_file = hsp_path
assert_false(model.can_save)
model.fid_file = fid_path
# export raw
assert_true(model.can_save)
raw_out = model.get_raw()
raw_out.save(tgt_fname)
raw = Raw(tgt_fname)
# Compare exported raw with the original binary conversion
raw_bin = Raw(fif_path)
trans_bin = raw.info['dev_head_t']['trans']
want_keys = list(raw_bin.info.keys())
assert_equal(sorted(want_keys), sorted(list(raw.info.keys())))
trans_transform = raw_bin.info['dev_head_t']['trans']
assert_allclose(trans_transform, trans_bin, 0.1)
# Averaging markers
model.markers.mrk3.method = "Average"
trans_avg = model.dev_head_trans
assert_false(np.all(trans_avg == trans_transform))
assert_allclose(trans_avg, trans_bin, 0.1)
# Test exclusion of one marker
model.markers.mrk3.method = "Transform"
model.use_mrk = [1, 2, 3, 4]
assert_false(np.all(model.dev_head_trans == trans_transform))
assert_false(np.all(model.dev_head_trans == trans_avg))
assert_false(np.all(model.dev_head_trans == np.eye(4)))
# test setting stim channels
model.stim_slope = '+'
events_bin = mne.find_events(raw_bin, stim_channel='STI 014')
model.stim_chs = '<'
raw = model.get_raw()
events = mne.find_events(raw, stim_channel='STI 014')
assert_array_equal(events, events_bin)
events_rev = events_bin.copy()
events_rev[:, 2] = 1
model.stim_chs = '>'
raw = model.get_raw()
events = mne.find_events(raw, stim_channel='STI 014')
assert_array_equal(events, events_rev)
model.stim_chs = 'man'
model.stim_chs_manual = list(range(167, 159, -1))
raw = model.get_raw()
events = mne.find_events(raw, stim_channel='STI 014')
assert_array_equal(events, events_bin)
# test reset
model.clear_all()
assert_equal(model.use_mrk, [0, 1, 2, 3, 4])
assert_equal(model.sqd_file, "")
os.environ['_MNE_GUI_TESTING_MODE'] = 'true'
try:
with warnings.catch_warnings(record=True): # traits warnings
warnings.simplefilter('always')
Kit2FiffPanel()
finally:
del os.environ['_MNE_GUI_TESTING_MODE']
run_tests_if_main()
| bsd-3-clause |
t-artistik/browserscope | categories/richtext2/tests/forwarddelete.py | 14 | 15254 |
FORWARDDELETE_TESTS = {
'id': 'FD',
'caption': 'Forward-Delete Tests',
'command': 'forwardDelete',
'checkAttrs': True,
'checkStyle': False,
'Proposed': [
{ 'desc': '',
'tests': [
]
},
{ 'desc': 'forward-delete single characters',
'tests': [
{ 'id': 'CHAR-1_SC',
'desc': 'Delete 1 character',
'pad': 'foo^barbaz',
'expected': 'foo^arbaz' },
{ 'id': 'CHAR-2_SC',
'desc': 'Delete 1 pre-composed character o with diaeresis',
'pad': 'fo^öbarbaz',
'expected': 'fo^barbaz' },
{ 'id': 'CHAR-3_SC',
'desc': 'Delete 1 character with combining diaeresis above',
'pad': 'fo^öbarbaz',
'expected': 'fo^barbaz' },
{ 'id': 'CHAR-4_SC',
'desc': 'Delete 1 character with combining diaeresis below',
'pad': 'fo^o̤barbaz',
'expected': 'fo^barbaz' },
{ 'id': 'CHAR-5_SC',
'desc': 'Delete 1 character with combining diaeresis above and below',
'pad': 'fo^ö̤barbaz',
'expected': 'fo^barbaz' },
{ 'id': 'CHAR-6_SC',
'desc': 'Delete 1 character with enclosing square',
'pad': 'fo^o⃞barbaz',
'expected': 'fo^barbaz' },
{ 'id': 'CHAR-7_SC',
'desc': 'Delete 1 character with combining long solidus overlay',
'pad': 'fo^o̸barbaz',
'expected': 'fo^barbaz' }
]
},
{ 'desc': 'forward-delete text selections',
'tests': [
{ 'id': 'TEXT-1_SI',
'desc': 'Delete text selection',
'pad': 'foo[bar]baz',
'expected': 'foo^baz' },
{ 'id': 'B-1_SE',
'desc': 'Forward-delete at end of span',
'pad': 'foo<b>bar^</b>baz',
'expected': 'foo<b>bar^</b>az' },
{ 'id': 'B-1_SB',
'desc': 'Forward-delete from position before span',
'pad': 'foo^<b>bar</b>baz',
'expected': 'foo^<b>ar</b>baz' },
{ 'id': 'B-1_SW',
'desc': 'Delete selection that wraps the whole span content',
'pad': 'foo<b>[bar]</b>baz',
'expected': 'foo^baz' },
{ 'id': 'B-1_SO',
'desc': 'Delete selection that wraps the whole span',
'pad': 'foo[<b>bar</b>]baz',
'expected': 'foo^baz' },
{ 'id': 'B-1_SL',
'desc': 'Delete oblique selection that starts before span',
'pad': 'foo[bar<b>baz]quoz</b>quuz',
'expected': 'foo^<b>quoz</b>quuz' },
{ 'id': 'B-1_SR',
'desc': 'Delete oblique selection that ends after span',
'pad': 'foo<b>bar[baz</b>quoz]quuz',
'expected': 'foo<b>bar^</b>quuz' },
{ 'id': 'B.I-1_SM',
'desc': 'Delete oblique selection that starts and ends in different spans',
'pad': 'foo<b>bar[baz</b><i>qoz]quuz</i>quuuz',
'expected': 'foo<b>bar^</b><i>quuz</i>quuuz' },
{ 'id': 'GEN-1_SE',
'desc': 'Delete at end of span with generated content',
'pad': 'foo<gen>bar^</gen>baz',
'expected': 'foo<gen>bar^</gen>az' },
{ 'id': 'GEN-1_SB',
'desc': 'Delete from position before span with generated content',
'pad': 'foo^<gen>bar</gen>baz',
'expected': 'foo^<gen>ar</gen>baz' }
]
},
{ 'desc': 'forward-delete paragraphs',
'tests': [
{ 'id': 'P2-1_SE1',
'desc': 'Delete from collapsed selection at end of paragraph - should merge with next',
'pad': '<p>foobar^</p><p>bazqoz</p>',
'expected': '<p>foobar^bazqoz</p>' },
{ 'id': 'P2-1_SI1',
'desc': 'Delete non-collapsed selection at end of paragraph - should not merge with next',
'pad': '<p>foo[bar]</p><p>bazqoz</p>',
'expected': '<p>foo^</p><p>bazqoz</p>' },
{ 'id': 'P2-1_SM',
'desc': 'Delete non-collapsed selection spanning 2 paragraphs - should merge them',
'pad': '<p>foo[bar</p><p>baz]qoz</p>',
'expected': '<p>foo^qoz</p>' }
]
},
{ 'desc': 'forward-delete lists and list items',
'tests': [
{ 'id': 'OL-LI2-1_SO1',
'desc': 'Delete fully wrapped list item',
'pad': 'foo<ol>{<li>bar</li>}<li>baz</li></ol>qoz',
'expected': ['foo<ol>|<li>baz</li></ol>qoz',
'foo<ol><li>^baz</li></ol>qoz'] },
{ 'id': 'OL-LI2-1_SM',
'desc': 'Delete oblique range between list items within same list',
'pad': 'foo<ol><li>ba[r</li><li>b]az</li></ol>qoz',
'expected': 'foo<ol><li>ba^az</li></ol>qoz' },
{ 'id': 'OL-LI-1_SW',
'desc': 'Delete contents of last list item (list should remain)',
'pad': 'foo<ol><li>[foo]</li></ol>qoz',
'expected': ['foo<ol><li>|</li></ol>qoz',
'foo<ol><li>^</li></ol>qoz'] },
{ 'id': 'OL-LI-1_SO',
'desc': 'Delete last list item of list (should remove entire list)',
'pad': 'foo<ol>{<li>foo</li>}</ol>qoz',
'expected': 'foo^qoz' }
]
},
{ 'desc': 'forward-delete with strange selections',
'tests': [
{ 'id': 'HR.BR-1_SM',
'desc': 'Delete selection that starts and ends within nodes that don\'t have children',
'pad': 'foo<hr {>bar<br }>baz',
'expected': 'foo<hr>|<br>baz' }
]
},
{ 'desc': 'forward-delete from immediately before a table',
'tests': [
{ 'id': 'TABLE-1_SB',
'desc': 'Delete from position immediately before table (should have no effect)',
'pad': 'foo^<table><tbody><tr><td>bar</td></tr></tbody></table>baz',
'expected': 'foo^<table><tbody><tr><td>bar</td></tr></tbody></table>baz' }
]
},
{ 'desc': 'forward-delete within table cells',
'tests': [
{ 'id': 'TD-1_SE',
'desc': 'Delete from end of last cell (should have no effect)',
'pad': 'foo<table><tbody><tr><td>bar^</td></tr></tbody></table>baz',
'expected': 'foo<table><tbody><tr><td>bar^</td></tr></tbody></table>baz' },
{ 'id': 'TD2-1_SE1',
'desc': 'Delete from end of inner cell (should have no effect)',
'pad': 'foo<table><tbody><tr><td>bar^</td><td>baz</td></tr></tbody></table>quoz',
'expected': 'foo<table><tbody><tr><td>bar^</td><td>baz</td></tr></tbody></table>quoz' },
{ 'id': 'TD2-1_SM',
'desc': 'Delete with selection spanning 2 cells',
'pad': 'foo<table><tbody><tr><td>ba[r</td><td>b]az</td></tr></tbody></table>quoz',
'expected': 'foo<table><tbody><tr><td>ba^</td><td>az</td></tr></tbody></table>quoz' }
]
},
{ 'desc': 'forward-delete table rows',
'tests': [
{ 'id': 'TR3-1_SO1',
'desc': 'Delete first table row',
'pad': '<table><tbody>{<tr><td>A</td></tr>}<tr><td>B</td></tr><tr><td>C</td></tr></tbody></table>',
'expected': ['<table><tbody>|<tr><td>B</td></tr><tr><td>C</td></tr></tbody></table>',
'<table><tbody><tr><td>^B</td></tr><tr><td>C</td></tr></tbody></table>'] },
{ 'id': 'TR3-1_SO2',
'desc': 'Delete middle table row',
'pad': '<table><tbody><tr><td>A</td></tr>{<tr><td>B</td></tr>}<tr><td>C</td></tr></tbody></table>',
'expected': ['<table><tbody><tr><td>A</td></tr>|<tr><td>C</td></tr></tbody></table>',
'<table><tbody><tr><td>A</td></tr><tr><td>^C</td></tr></tbody></table>'] },
{ 'id': 'TR3-1_SO3',
'desc': 'Delete last table row',
'pad': '<table><tbody><tr><td>A</td></tr><tr><td>B</td></tr>{<tr><td>C</td></tr>}</tbody></table>',
'expected': ['<table><tbody><tr><td>A</td></tr><tr><td>B</td></tr>|</tbody></table>',
'<table><tbody><tr><td>A</td></tr><tr><td>B^</td></tr></tbody></table>'] },
{ 'id': 'TR2rs:2-1_SO1',
'desc': 'Delete first table row where a cell has rowspan 2',
'pad': '<table><tbody>{<tr><td>A</td><td rowspan=2>R</td></tr>}<tr><td>B</td></tr></tbody></table>',
'expected': ['<table><tbody>|<tr><td>B</td><td>R</td></tr></tbody></table>',
'<table><tbody><tr><td>^B</td><td>R</td></tr></tbody></table>'] },
{ 'id': 'TR2rs:2-1_SO2',
'desc': 'Delete second table row where a cell has rowspan 2',
'pad': '<table><tbody><tr><td>A</td><td rowspan=2>R</td></tr>{<tr><td>B</td></tr>}</tbody></table>',
'expected': ['<table><tbody><tr><td>A</td><td>R</td></tr>|</tbody></table>',
'<table><tbody><tr><td>A</td><td>R^</td></tr></tbody></table>'] },
{ 'id': 'TR3rs:3-1_SO1',
'desc': 'Delete first table row where a cell has rowspan 3',
'pad': '<table><tbody>{<tr><td>A</td><td rowspan=3>R</td></tr>}<tr><td>B</td></tr><tr><td>C</td></tr></tbody></table>',
'expected': ['<table><tbody>|<tr><td>A</td><td rowspan="2">R</td></tr><tr><td>C</td></tr></tbody></table>',
'<table><tbody><tr><td>^A</td><td rowspan="2">R</td></tr><tr><td>C</td></tr></tbody></table>'] },
{ 'id': 'TR3rs:3-1_SO2',
'desc': 'Delete middle table row where a cell has rowspan 3',
'pad': '<table><tbody><tr><td>A</td><td rowspan=3>R</td></tr>{<tr><td>B</td></tr>}<tr><td>C</td></tr></tbody></table>',
'expected': ['<table><tbody><tr><td>B</td><td rowspan="2">R</td></tr>|<tr><td>C</td></tr></tbody></table>',
'<table><tbody><tr><td>B</td><td rowspan="2">R</td></tr><tr><td>^C</td></tr></tbody></table>'] },
{ 'id': 'TR3rs:3-1_SO3',
'desc': 'Delete last table row where a cell has rowspan 3',
'pad': '<table><tbody><tr><td>A</td><td rowspan=3>R</td></tr><tr><td>B</td></tr>{<tr><td>C</td></tr>}</tbody></table>',
'expected': ['<table><tbody><tr><td>A</td><td rowspan="2">R</td></tr><tr><td>B</td></tr>|</tbody></table>',
'<table><tbody><tr><td>A</td><td rowspan="2">R</td></tr><tr><td>B^</td></tr></tbody></table>'] }
]
},
{ 'desc': 'delete with non-editable nested content',
'tests': [
{ 'id': 'DIV:ce:false-1_SO',
'desc': 'Delete nested non-editable <div>',
'pad': 'foo[bar<div contenteditable="false">NESTED</div>baz]qoz',
'expected': 'foo^qoz' },
{ 'id': 'DIV:ce:false-1_SB',
'desc': 'Delete from immediately before a nested non-editable <div> (should be no-op)',
'pad': 'foobar^<div contenteditable="false">NESTED</div>bazqoz',
'expected': 'foobar^<div contenteditable="false">NESTED</div>bazqoz' },
{ 'id': 'DIV:ce:false-1_SL',
'desc': 'Delete nested non-editable <div> with oblique selection',
'pad': 'foo[bar<div contenteditable="false">NES]TED</div>bazqoz',
'expected': [ 'foo^<div contenteditable="false">NESTED</div>bazqoz',
'foo<div contenteditable="false">[NES]TED</div>bazqoz' ] },
{ 'id': 'DIV:ce:false-1_SR',
'desc': 'Delete nested non-editable <div> with oblique selection',
'pad': 'foobar<div contenteditable="false">NES[TED</div>baz]qoz',
'expected': [ 'foobar<div contenteditable="false">NESTED</div>^qoz',
'foobar<div contenteditable="false">NES[TED]</div>qoz' ] },
{ 'id': 'DIV:ce:false-1_SI',
'desc': 'Delete inside nested non-editable <div> (should be no-op)',
'pad': 'foobar<div contenteditable="false">NE[ST]ED</div>bazqoz',
'expected': 'foobar<div contenteditable="false">NE[ST]ED</div>bazqoz' }
]
},
{ 'desc': 'Delete with display:inline-block',
'checkStyle': True,
'tests': [
{ 'id': 'SPAN:d:ib-1_SC',
'desc': 'Delete inside an inline-block <span>',
'pad': 'foo<span style="display: inline-block">bar^baz</span>qoz',
'expected': 'foo<span style="display: inline-block">bar^az</span>qoz' },
{ 'id': 'SPAN:d:ib-1_SA',
'desc': 'Delete from immediately before an inline-block <span>',
'pad': 'foo^<span style="display: inline-block">barbaz</span>qoz',
'expected': 'foo^<span style="display: inline-block">arbaz</span>qoz' },
{ 'id': 'SPAN:d:ib-2_SL',
'desc': 'Delete with nested inline-block <span>, oblique selection',
'pad': 'foo[DEL<span style="display: inline-block">ETE]bar</span>baz',
'expected': 'foo^<span style="display: inline-block">bar</span>baz' },
{ 'id': 'SPAN:d:ib-3_SR',
'desc': 'Delete with nested inline-block <span>, oblique selection',
'pad': 'foo<span style="display: inline-block">bar[DEL</span>ETE]baz',
'expected': 'foo<span style="display: inline-block">bar^</span>baz' },
{ 'id': 'SPAN:d:ib-4i_SI',
'desc': 'Delete with nested inline-block <span>, oblique selection',
'pad': 'foo<span style="display: inline-block">bar[DELETE]baz</span>qoz',
'expected': 'foo<span style="display: inline-block">bar^baz</span>qoz' },
{ 'id': 'SPAN:d:ib-4l_SI',
'desc': 'Delete with nested inline-block <span>, oblique selection',
'pad': 'foo<span style="display: inline-block">[DELETE]barbaz</span>qoz',
'expected': 'foo<span style="display: inline-block">^barbaz</span>qoz' },
{ 'id': 'SPAN:d:ib-4r_SI',
'desc': 'Delete with nested inline-block <span>, oblique selection',
'pad': 'foo<span style="display: inline-block">barbaz[DELETE]</span>qoz',
'expected': 'foo<span style="display: inline-block">barbaz^</span>qoz' }
]
}
]
}
| apache-2.0 |
slightstone/SickRage | lib/hachoir_parser/image/tga.py | 90 | 2927 | """
Truevision Targa Graphic (TGA) picture parser.
Author: Victor Stinner
Creation: 18 december 2006
"""
from lib.hachoir_parser import Parser
from lib.hachoir_core.field import FieldSet, UInt8, UInt16, Enum, RawBytes
from lib.hachoir_core.endian import LITTLE_ENDIAN
from lib.hachoir_parser.image.common import PaletteRGB
class Line(FieldSet):
def __init__(self, *args):
FieldSet.__init__(self, *args)
self._size = self["/width"].value * self["/bpp"].value
def createFields(self):
for x in xrange(self["/width"].value):
yield UInt8(self, "pixel[]")
class Pixels(FieldSet):
def __init__(self, *args):
FieldSet.__init__(self, *args)
self._size = self["/width"].value * self["/height"].value * self["/bpp"].value
def createFields(self):
if self["/options"].value == 0:
RANGE = xrange(self["/height"].value-1,-1,-1)
else:
RANGE = xrange(self["/height"].value)
for y in RANGE:
yield Line(self, "line[%u]" % y)
class TargaFile(Parser):
PARSER_TAGS = {
"id": "targa",
"category": "image",
"file_ext": ("tga",),
"mime": (u"image/targa", u"image/tga", u"image/x-tga"),
"min_size": 18*8,
"description": u"Truevision Targa Graphic (TGA)"
}
CODEC_NAME = {
1: u"8-bit uncompressed",
2: u"24-bit uncompressed",
9: u"8-bit RLE",
10: u"24-bit RLE",
}
endian = LITTLE_ENDIAN
def validate(self):
if self["version"].value != 1:
return "Unknown version"
if self["codec"].value not in self.CODEC_NAME:
return "Unknown codec"
if self["x_min"].value != 0 or self["y_min"].value != 0:
return "(x_min, y_min) is not (0,0)"
if self["bpp"].value not in (8, 24):
return "Unknown bits/pixel value"
return True
def createFields(self):
yield UInt8(self, "hdr_size", "Header size in bytes")
yield UInt8(self, "version", "Targa version (always one)")
yield Enum(UInt8(self, "codec", "Pixels encoding"), self.CODEC_NAME)
yield UInt16(self, "palette_ofs", "Palette absolute file offset")
yield UInt16(self, "nb_color", "Number of color")
yield UInt8(self, "color_map_size", "Color map entry size")
yield UInt16(self, "x_min")
yield UInt16(self, "y_min")
yield UInt16(self, "width")
yield UInt16(self, "height")
yield UInt8(self, "bpp", "Bits per pixel")
yield UInt8(self, "options", "Options (0: vertical mirror)")
if self["bpp"].value == 8:
yield PaletteRGB(self, "palette", 256)
if self["codec"].value == 1:
yield Pixels(self, "pixels")
else:
size = (self.size - self.current_size) // 8
if size:
yield RawBytes(self, "raw_pixels", size)
| gpl-3.0 |
nacc/autotest | mirror/config-sample.py | 6 | 4369 | """
Sample configuration file for the "mirror" script that will use
rsync://rsync.kernel.org to fetch a kernel file list and schedule jobs on new
kernel releases.
This file has to be valid python code executed by the "mirror" script. The file
may define and do anything but the following "names" are special:
- a global name "source" is expected to implement get_new_files() method which
will be used by "mirror" to fetch the list of new files
- an optional global iteratable of regular expression strings named
"filter_exprs" where for each regular expression if there is a match group
named "arg" then the original kernel filename will be replaced with the
contents of that group; if no such match group is defined then all the filename
will be considered (if there is at least one regular expression that matches
the filename, otherwise the filename is just filtered out); if "filter_exprs"
is not defined (or defined to be empty) then no filtering is performed
- an optional "trigger" instance of a trigger class; by default this is
initialized with trigger.trigger() but you can set it to another instance
(of your own site specific trigger class); even if you don't set it you
most certainly want to add a couple of actions to the trigger instance to
be executed for the new kernels (by default the list is empty and nothing
will happen with the new kernels other than being included in the known
kernels database so future lookups will not consider them new again)
"""
from autotest.mirror import database, source as source_module
from autotest.mirror import trigger as trigger_module
# create a database object where to store information about known files
db = database.dict_database('rsync.kernel.org.db')
# create a source object that will be used to fetch the list of new kernel
# files (this example uses rsync_source)
source = source_module.rsync_source(db,
'rsync://rsync.kernel.org/pub/linux/kernel',
excludes=('2.6.0-test*/', 'broken-out/', '*.sign', '*.gz'))
source.add_path('v2.6/patch-2.6.*.bz2', 'v2.6')
source.add_path('v2.6/linux-2.6.[0-9].tar.bz2', 'v2.6')
source.add_path('v2.6/linux-2.6.[0-9][0-9].tar.bz2', 'v2.6')
source.add_path('v2.6/testing/patch*.bz2', 'v2.6/testing')
source.add_path('v2.6/snapshots/*.bz2', 'v2.6/snapshots')
source.add_path('people/akpm/patches/2.6/*', 'akpm')
# Given a list of files filter and transform it for entries that look like
# legitimate releases (may be empty in which case no filtering/transformation
# is done). If you want to replace the matched filename to only a part of it
# put the part you want extracted in a match group named "arg".
filter_exprs = (
# The major tarballs
r'^(.*/)?linux-(?P<arg>2\.6\.\d+)\.tar\.bz2$',
# Stable releases
r'^(.*/)?patch-(?P<arg>2\.6\.\d+\.\d+)\.bz2$',
# -rc releases
r'^(.*/)?patch-(?P<arg>2\.6\.\d+-rc\d+)\.bz2$',
# -git releases
r'^(.*/)?patch-(?P<arg>2\.6\.\d+(-rc\d+)?-git\d+)\.bz2$',
# -mm tree
r'^(.*/)?(?P<arg>2\.6\.\d+(-rc\d+)?-mm\d+)\.bz2$',
)
# associate kernel versions with kernel config files
# all machines have the same hardware configuration so they will all
# use the same mapping for kernel version -> kernel config file
_common_kernel_config = {
'2.6.20': '/path/to/2.6.20.config',
'2.6.25': '~/kernel-2.6.25.config',
'2.6.29': 'http://somesite/configs/2.6.29.conf',
}
# a mapping of machine -> machine_info (containing list a of test names as
# they are named in the frontend database and kernel version association to
# kernel config filenames)
_tests_map = {
'mach1': trigger_module.map_action.machine_info(
('test1', 'server test2'), _common_kernel_config),
'mach2': trigger_module.map_action.machine_info(
('test1',), _common_kernel_config),
'mach3': trigger_module.map_action.machine_info(
('test3',), _common_kernel_config),
'mach4': trigger_module.map_action.machine_info(
('test4',), _common_kernel_config),
}
# no need to instantiate trigger_module.trigger() as it's already done so
# trigger = trigger_module.trigger()
# now register some trigger actions otherwise nothing will be done for the new
# kernel versions
trigger = trigger_module.trigger()
trigger.add_action(trigger_module.map_action(_tests_map, 'kerntest-%s'))
trigger.add_action(trigger_module.email_action('[email protected]'))
| gpl-2.0 |
wenxer/fbone | fbone/settings/views.py | 8 | 2765 | # -*- coding: utf-8 -*-
import os
import hashlib
from datetime import datetime
from flask import Blueprint, render_template, current_app, request, flash
from flask.ext.login import login_required, current_user
from ..extensions import db
from ..user import User
from ..utils import allowed_file, make_dir
from .forms import ProfileForm, PasswordForm
settings = Blueprint('settings', __name__, url_prefix='/settings')
@settings.route('/profile', methods=['GET', 'POST'])
@login_required
def profile():
user = User.query.filter_by(name=current_user.name).first_or_404()
form = ProfileForm(obj=user.user_detail,
email=current_user.email,
role_code=current_user.role_code,
status_code=current_user.status_code,
next=request.args.get('next'))
if form.validate_on_submit():
if form.avatar_file.data:
upload_file = request.files[form.avatar_file.name]
if upload_file and allowed_file(upload_file.filename):
# Don't trust any input, we use a random string as filename.
# or use secure_filename:
# http://flask.pocoo.org/docs/patterns/fileuploads/
user_upload_dir = os.path.join(current_app.config['UPLOAD_FOLDER'], "user_%s" % user.id)
current_app.logger.debug(user_upload_dir)
make_dir(user_upload_dir)
root, ext = os.path.splitext(upload_file.filename)
today = datetime.now().strftime('_%Y-%m-%d')
# Hash file content as filename.
hash_filename = hashlib.sha1(upload_file.read()).hexdigest() + "_" + today + ext
user.avatar = hash_filename
avatar_ab_path = os.path.join(user_upload_dir, user.avatar)
# Reset file curso since we used read()
upload_file.seek(0)
upload_file.save(avatar_ab_path)
form.populate_obj(user)
form.populate_obj(user.user_detail)
db.session.add(user)
db.session.commit()
flash('Public profile updated.', 'success')
return render_template('settings/profile.html', user=user,
active="profile", form=form)
@settings.route('/password', methods=['GET', 'POST'])
@login_required
def password():
user = User.query.filter_by(name=current_user.name).first_or_404()
form = PasswordForm(next=request.args.get('next'))
if form.validate_on_submit():
form.populate_obj(user)
user.password = form.new_password.data
db.session.add(user)
db.session.commit()
flash('Password updated.', 'success')
return render_template('settings/password.html', user=user,
active="password", form=form)
| bsd-3-clause |
newmediamedicine/indivo_server_1_0 | indivo/tests/integration/test_modules/messaging.py | 1 | 2631 | import data
PRD = 'prd'
from utils import *
def test_messaging(IndivoClient):
try:
BODY = 'body'
SUBJECT = 'subject'
MSG_ID = 'message_id'
SEVERITY = 'severity'
admin_client = IndivoClient(data.machine_app_email, data.machine_app_secret)
admin_client.set_app_id(data.app_email)
account_id = admin_client.create_account(data.account03)[PRD]['Account'][0]
admin_client.add_auth_system(account_id=account_id, data={'system':'password', 'username':data.account03['username'], 'password':data.account03['user_pass']})
record_id = admin_client.create_record(data=data.contact).response['prd']['Record'][0]
admin_client.set_record_owner(data=account_id)
admin_client.setup_app(record_id=record_id, app_id=data.app_email)
admin_client.message_record(data={SUBJECT : data.message01[SUBJECT],
BODY : data.message01[BODY],
SEVERITY: data.message01[SEVERITY]},
message_id = data.message01[MSG_ID])
admin_client.message_account(account_id = account_id,
data= { SUBJECT : data.message02[SUBJECT],
BODY : data.message02[BODY],
MSG_ID : data.message02[MSG_ID],
SEVERITY : data.message02[SEVERITY]})
token = admin_client.setup_app( record_id = record_id,
app_id = data.app_email).response[PRD]
user_client = IndivoClient(data.app_email, data.app_secret)
user_client.update_token(token)
user_client.set_app_id(data.app_email)
user_client.get_messages(record_id = record_id)
chrome_client = IndivoClient(data.chrome_consumer_key, data.chrome_consumer_secret)
chrome_client.create_session(data.account03)
#
# check that archival removes one of the messages
#
def num_messages():
messages = xpath(parse_xml(chrome_client.account_inbox(account_id = data.account03['account_id'])), "/Messages/Message")
return len(messages)
num_messages_before = num_messages()
message_id = xpath(parse_xml(chrome_client.account_inbox(account_id = data.account03['account_id'])), "/Messages/Message/@id")[0]
chrome_client.account_message_archive(account_id = data.account03['account_id'], message_id = message_id)
num_messages_after = num_messages()
assert num_messages_before - num_messages_after == 1, "message didn't get archived"
except Exception, e:
return False, e
return True
| gpl-3.0 |
anaruse/chainer | tests/chainer_tests/utils_tests/test_type_check.py | 2 | 11918 | import sys
import unittest
import numpy
from chainer.backends import cuda
from chainer import testing
from chainer.testing import attr
from chainer.utils import type_check as T
class TestConstant(unittest.TestCase):
def setUp(self):
self.x = T.Constant(10)
def test_str(self):
self.assertEqual('10', str(self.x))
def test_eval(self):
self.assertEqual(10, self.x.eval())
class TestVariable(unittest.TestCase):
def setUp(self):
self.x = T.Variable(10, 'x')
def test_str(self):
self.assertEqual('x', str(self.x))
def test_eval(self):
self.assertEqual(10, self.x.eval())
class Object(object):
def __init__(self):
self.value = 10
class TestGetAttr(unittest.TestCase):
def setUp(self):
x = Object()
self.value = T.GetAttr(T.Variable(x, 'x'), 'value')
self.value2 = T.GetAttr(T.Variable(x, 'x'), T.Constant('value'))
self.value3 = T.GetAttr(T.Variable(x, 'x'), 3)
def test_str(self):
self.assertEqual('x.value', str(self.value))
self.assertEqual('x.value', str(self.value2))
self.assertEqual('getattr(x, 3)', str(self.value3))
def test_eval(self):
self.assertEqual(10, self.value.eval())
class TestGetItem(unittest.TestCase):
def setUp(self):
x = T.Variable([1, 2, 3], 'x')
y = T.Variable({'a': 1, 'b': 2}, 'y')
self.x = x
self.v1 = T.GetItem(x, 1)
self.v2 = T.GetItem(y, 'a')
def test_str(self):
self.assertEqual('x[1]', str(self.v1))
self.assertEqual("y['a']", str(self.v2))
x = self.x
self.assertEqual('x[:]', str(x[:]))
self.assertEqual('x[:]', str(x[::]))
self.assertEqual('x[1:]', str(x[1:]))
self.assertEqual('x[:2]', str(x[:2]))
self.assertEqual('x[1:2]', str(x[1:2]))
self.assertEqual('x[1::1]', str(x[1::1]))
self.assertEqual('x[:2:1]', str(x[:2:1]))
self.assertEqual('x[1:2:1]', str(x[1:2:1]))
self.assertEqual('x[...]', str(x[...]))
self.assertEqual('x[0, 1]', str(x[0, 1]))
self.assertEqual('x[1:2, ...]', str(x[1:2:, ...]))
def test_eval(self):
self.assertEqual(2, self.v1.eval())
self.assertEqual(1, self.v2.eval())
class TestCall(unittest.TestCase):
def setUp(self):
f = T.Variable(sum, 'sum')
self.c1 = T.Call(f, ([1, 2, 3],))
self.c2 = f([1, 2, 3])
self.c3 = T.Call(f, (['', 1],))
def test_str(self):
self.assertEqual('sum([1, 2, 3])', str(self.c1))
self.assertEqual('sum([1, 2, 3])', str(self.c2))
self.assertEqual('sum([\'\', 1])', str(self.c3))
def test_eval(self):
self.assertEqual(6, self.c1.eval())
self.assertEqual(6, self.c2.eval())
# an error is occured in `eval`
with self.assertRaises(TypeError):
self.assertEqual(6, self.c3.eval())
class TestBinaryOperator(unittest.TestCase):
def setUp(self):
x = T.Variable(1, 'x')
y = T.Variable(1, 'y')
def f(x, y):
return x, y
self.op1 = T.BinaryOperator(7, x, y, '+', f)
self.op2 = T.BinaryOperator(8, x, y, '+', f)
self.op3 = T.BinaryOperator(9, x, y, '+', f)
self.op4 = T.BinaryOperator(7, x, y, '+', f, True)
self.op5 = T.BinaryOperator(8, x, y, '+', f, True)
self.op6 = T.BinaryOperator(9, x, y, '+', f, True)
def test_str(self):
self.assertEqual('x + y', str(self.op1))
self.assertEqual('x + (y)', str(self.op2))
self.assertEqual('(x) + (y)', str(self.op3))
self.assertEqual('x + y', str(self.op4))
self.assertEqual('(x) + y', str(self.op5))
self.assertEqual('(x) + (y)', str(self.op6))
def test_eval(self):
self.assertEqual((1, 1), self.op1.eval())
class TestUnaryOperator(unittest.TestCase):
def setUp(self):
x = T.Variable(1, 'x')
def f(x):
return x,
self.op1 = T.UnaryOperator(8, x, '-', f)
self.op2 = T.UnaryOperator(9, x, '-', f)
def test_str(self):
self.assertEqual('-x', str(self.op1))
self.assertEqual('-(x)', str(self.op2))
def test_eval(self):
self.assertEqual((1, ), self.op1.eval())
class TestOperators(unittest.TestCase):
def setUp(self):
self.x = T.Variable(1, 'x')
self.y = T.Variable(1, 'y')
def test_str(self):
x = self.x
y = self.y
self.assertEqual('x + y', str(x + y))
self.assertEqual('1 + x', str(1 + x))
self.assertEqual('x - y', str(x - y))
self.assertEqual('1 - x', str(1 - x))
self.assertEqual('x * y', str(x * y))
self.assertEqual('1 * x', str(1 * x))
self.assertEqual('x / y', str(x / y))
self.assertEqual('1 / x', str(1 / x))
self.assertEqual('x // y', str(x // y))
self.assertEqual('1 // x', str(1 // x))
self.assertEqual('x % y', str(x % y))
self.assertEqual('1 % x', str(1 % x))
self.assertEqual('x ** y', str(x ** y))
self.assertEqual('x ** y', str(pow(x, y)))
self.assertEqual('x << y', str(x << y))
self.assertEqual('1 << x', str(1 << x))
self.assertEqual('x >> y', str(x >> y))
self.assertEqual('1 >> x', str(1 >> x))
self.assertEqual('x & y', str(x & y))
self.assertEqual('1 & x', str(1 & x))
self.assertEqual('x ^ y', str(x ^ y))
self.assertEqual('1 ^ x', str(1 ^ x))
self.assertEqual('x | y', str(x | y))
self.assertEqual('1 | x', str(1 | x))
self.assertEqual('-x', str(-x))
self.assertEqual('+x', str(+x))
self.assertEqual('~x', str(~x))
# left-associative
self.assertEqual('x + x - x', str(x + x - x))
self.assertEqual('x + (x - x)', str(x + (x - x)))
self.assertEqual('x << (x << x)', str(x << (x << x)))
# right-associative
self.assertEqual('x ** x ** x', str(x ** x ** x))
self.assertEqual('x ** x ** x', str(x ** (x ** x)))
self.assertEqual('(x ** x) ** x', str((x ** x) ** x))
self.assertEqual('-(x + x)', str(-(x + x)))
# pow has higher priority than unary operators
self.assertEqual('-x ** x', str(-x ** x))
self.assertEqual('(-x) ** x', str((-x) ** x))
def test_priority(self):
x = self.x
y = self.y
self.assertTrue((x << y).priority == (x >> y).priority)
self.assertTrue((x + y).priority == (x - y).priority)
self.assertTrue((x * y).priority ==
(x / y).priority ==
(x // y).priority ==
(x % y).priority)
self.assertTrue((-x).priority == (+x).priority == (~x).priority)
self.assertTrue((x | y).priority <
(x ^ y).priority <
(x & y).priority <
(x << y).priority <
(x + y).priority <
(x * y).priority <
(-x).priority <
(x ** y).priority <
x.priority)
class TestDivOperator(unittest.TestCase):
def setUp(self):
self.x = T.Variable(1, 'x')
self.y = T.Variable(2, 'y')
def test_div(self):
# Behavior of '/' operator for int depends on the version of Python
if sys.version_info < (3, 0, 0):
self.assertEqual(0, (self.x / self.y).eval())
else:
self.assertEqual(0.5, (self.x / self.y).eval())
class TestGetType(unittest.TestCase):
def test_empty(self):
ts = T.get_types((), 'name', False)
self.assertIsInstance(ts, T.TypeInfoTuple)
self.assertEqual(0, len(ts))
self.assertEqual('name', ts.name)
def test_simple(self):
data = (numpy.zeros((1, 2, 3)).astype(numpy.float32),)
ts = T.get_types(data, 'name', False)
self.assertIsInstance(ts, T.TypeInfoTuple)
self.assertEqual(1, len(ts))
self.assertEqual('name', ts.name)
t = ts[0]
self.assertIsInstance(t, T.Expr)
self.assertEqual(1, t.shape[0].eval())
self.assertEqual(2, t.shape[1].eval())
self.assertEqual(3, t.shape[2].eval())
self.assertEqual(3, t.ndim.eval())
self.assertEqual(numpy.float32, t.dtype.eval())
def test_invalid_arg(self):
with self.assertRaises(AssertionError):
T.get_types(1, 'name', False)
class TestBoolBinaryOperator(unittest.TestCase):
def setUp(self):
x = T.Variable(1, 'x')
y = T.Variable(1, 'y')
z = T.Variable(2, 'z')
def f(x, y):
return x == y
self.op1 = T.BoolBinaryOperator(x, y, '==', '!=', f)
self.op2 = T.BoolBinaryOperator(x, z, '==', '!=', f)
def test_eval(self):
self.assertTrue(self.op1.eval())
def test_expect(self):
with self.assertRaises(T.InvalidType):
self.op2.expect()
def test_bool(self):
with self.assertRaises(RuntimeError):
bool(self.op1)
def test_bool_operator(self):
with self.assertRaises(RuntimeError):
not self.op1
class TestLazyGetItem(unittest.TestCase):
def setUp(self):
self.t = T.Constant(0)
def test_evaluate_size(self):
# __getitem__, __getattr__ and __call__ only make syntax trees, but
# they are not evalated yet
self.assertIsInstance(self.t[1], T.Expr)
self.assertIsInstance(self.t.x, T.Expr)
self.assertIsInstance(self.t(), T.Expr)
# an error is raised on evaluation time
with self.assertRaises(TypeError):
self.t[1].eval()
with self.assertRaises(AttributeError):
self.t.x.eval()
with self.assertRaises(TypeError):
self.t().eval()
class TestListItem(unittest.TestCase):
def test_eval_list_items(self):
self.assertTrue((T.Constant([0]) == [T.Constant(0)]).eval())
def test_list_str(self):
self.assertEqual('[0]', T._repr([T.Constant(0)]))
def test_eval_tuple_items(self):
self.assertTrue((T.Constant((0,)) == (T.Constant(0),)).eval())
def test_tuple_str(self):
self.assertEqual('()', T._repr(()))
self.assertEqual('(0,)', T._repr((T.Constant(0),)))
self.assertEqual('(0, 0)', T._repr((T.Constant(0), T.Constant(0))))
def test_eval_nest_list(self):
self.assertTrue((T.Constant([[0]]) == [[T.Constant(0)]]).eval())
def test_nest_list_str(self):
self.assertEqual('[[0]]', T._repr([[T.Constant(0)]]))
class TestProd(unittest.TestCase):
def test_name(self):
p = T.prod([])
self.assertEqual(str(p), 'prod([])')
def test_value(self):
value = T.prod([2, 3]).eval()
self.assertEqual(value, 6)
class TestSameTypes(unittest.TestCase):
def test_all_numpy_array(self):
x = numpy.array([0])
y = numpy.array([1])
z = numpy.array([2])
self.assertTrue(T.same_types(x, y, z))
def test_all_numpy_subclasses(self):
x = numpy.array([0])
y = numpy.array([[1], [2]])
z = numpy.matrix("3,4; 5,6")
self.assertTrue(T.same_types(x, y, z))
@attr.gpu
def test_all_cupy_array(self):
x = cuda.cupy.array([0])
y = cuda.cupy.array([1])
z = cuda.cupy.array([2])
self.assertTrue(T.same_types(x, y, z))
@attr.gpu
def test_numpy_cupy_mixed_1(self):
x = numpy.array([0])
y = cuda.cupy.array([1])
z = numpy.array([2])
self.assertFalse(T.same_types(x, y, z))
@attr.gpu
def test_numpy_cupy_mixed_2(self):
x = cuda.cupy.array([0])
y = numpy.array([1])
z = cuda.cupy.array([2])
self.assertFalse(T.same_types(x, y, z))
testing.run_module(__name__, __file__)
| mit |
JamesTFarrington/flask | flask/debughelpers.py | 318 | 6024 | # -*- coding: utf-8 -*-
"""
flask.debughelpers
~~~~~~~~~~~~~~~~~~
Various helpers to make the development experience better.
:copyright: (c) 2015 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from ._compat import implements_to_string, text_type
from .app import Flask
from .blueprints import Blueprint
from .globals import _request_ctx_stack
class UnexpectedUnicodeError(AssertionError, UnicodeError):
"""Raised in places where we want some better error reporting for
unexpected unicode or binary data.
"""
@implements_to_string
class DebugFilesKeyError(KeyError, AssertionError):
"""Raised from request.files during debugging. The idea is that it can
provide a better error message than just a generic KeyError/BadRequest.
"""
def __init__(self, request, key):
form_matches = request.form.getlist(key)
buf = ['You tried to access the file "%s" in the request.files '
'dictionary but it does not exist. The mimetype for the request '
'is "%s" instead of "multipart/form-data" which means that no '
'file contents were transmitted. To fix this error you should '
'provide enctype="multipart/form-data" in your form.' %
(key, request.mimetype)]
if form_matches:
buf.append('\n\nThe browser instead transmitted some file names. '
'This was submitted: %s' % ', '.join('"%s"' % x
for x in form_matches))
self.msg = ''.join(buf)
def __str__(self):
return self.msg
class FormDataRoutingRedirect(AssertionError):
"""This exception is raised by Flask in debug mode if it detects a
redirect caused by the routing system when the request method is not
GET, HEAD or OPTIONS. Reasoning: form data will be dropped.
"""
def __init__(self, request):
exc = request.routing_exception
buf = ['A request was sent to this URL (%s) but a redirect was '
'issued automatically by the routing system to "%s".'
% (request.url, exc.new_url)]
# In case just a slash was appended we can be extra helpful
if request.base_url + '/' == exc.new_url.split('?')[0]:
buf.append(' The URL was defined with a trailing slash so '
'Flask will automatically redirect to the URL '
'with the trailing slash if it was accessed '
'without one.')
buf.append(' Make sure to directly send your %s-request to this URL '
'since we can\'t make browsers or HTTP clients redirect '
'with form data reliably or without user interaction.' %
request.method)
buf.append('\n\nNote: this exception is only raised in debug mode')
AssertionError.__init__(self, ''.join(buf).encode('utf-8'))
def attach_enctype_error_multidict(request):
"""Since Flask 0.8 we're monkeypatching the files object in case a
request is detected that does not use multipart form data but the files
object is accessed.
"""
oldcls = request.files.__class__
class newcls(oldcls):
def __getitem__(self, key):
try:
return oldcls.__getitem__(self, key)
except KeyError:
if key not in request.form:
raise
raise DebugFilesKeyError(request, key)
newcls.__name__ = oldcls.__name__
newcls.__module__ = oldcls.__module__
request.files.__class__ = newcls
def _dump_loader_info(loader):
yield 'class: %s.%s' % (type(loader).__module__, type(loader).__name__)
for key, value in sorted(loader.__dict__.items()):
if key.startswith('_'):
continue
if isinstance(value, (tuple, list)):
if not all(isinstance(x, (str, text_type)) for x in value):
continue
yield '%s:' % key
for item in value:
yield ' - %s' % item
continue
elif not isinstance(value, (str, text_type, int, float, bool)):
continue
yield '%s: %r' % (key, value)
def explain_template_loading_attempts(app, template, attempts):
"""This should help developers understand what failed"""
info = ['Locating template "%s":' % template]
total_found = 0
blueprint = None
reqctx = _request_ctx_stack.top
if reqctx is not None and reqctx.request.blueprint is not None:
blueprint = reqctx.request.blueprint
for idx, (loader, srcobj, triple) in enumerate(attempts):
if isinstance(srcobj, Flask):
src_info = 'application "%s"' % srcobj.import_name
elif isinstance(srcobj, Blueprint):
src_info = 'blueprint "%s" (%s)' % (srcobj.name,
srcobj.import_name)
else:
src_info = repr(srcobj)
info.append('% 5d: trying loader of %s' % (
idx + 1, src_info))
for line in _dump_loader_info(loader):
info.append(' %s' % line)
if triple is None:
detail = 'no match'
else:
detail = 'found (%r)' % (triple[1] or '<string>')
total_found += 1
info.append(' -> %s' % detail)
seems_fishy = False
if total_found == 0:
info.append('Error: the template could not be found.')
seems_fishy = True
elif total_found > 1:
info.append('Warning: multiple loaders returned a match for the template.')
seems_fishy = True
if blueprint is not None and seems_fishy:
info.append(' The template was looked up from an endpoint that '
'belongs to the blueprint "%s".' % blueprint)
info.append(' Maybe you did not place a template in the right folder?')
info.append(' See http://flask.pocoo.org/docs/blueprints/#templates')
app.logger.info('\n'.join(info))
| bsd-3-clause |
PeterDaveHello/eden | modules/templates/CRMT/controllers.py | 20 | 3102 | # -*- coding: utf-8 -*-
from gluon import current
#from gluon.html import *
from gluon.storage import Storage
from s3 import S3CustomController
THEME = "CRMT"
# =============================================================================
class index(S3CustomController):
""" Custom Home Page """
def __call__(self):
output = {}
# Latest Activities
db = current.db
s3db = current.s3db
atable = s3db.project_activity
query = (atable.deleted == False)
output["total_activities"] = db(query).count()
#gtable = s3db.gis_location
#query &= (atable.location_id == gtable.id)
ogtable = s3db.org_group
ltable = s3db.project_activity_group
query &= (atable.id == ltable.activity_id) & \
(ogtable.id == ltable.group_id)
rows = db(query).select(atable.id,
atable.name,
atable.date,
#gtable.L3,
ogtable.name,
limitby = (0, 3),
orderby = ~atable.date
)
latest_activities = []
current.deployment_settings.L10n.date_format = "%d %b %y"
drepresent = atable.date.represent
for row in rows:
date = row["project_activity.date"]
if date:
nice_date = drepresent(date)
else:
nice_date = ""
latest_activities.append(Storage(id = row["project_activity.id"],
name = row["project_activity.name"],
date = nice_date,
date_iso = date or "",
org_group = row["org_group.name"],
#location = row["gis_location.L3"],
))
output["latest_activities"] = latest_activities
# Which Map should we link to in "Know your community"?
auth = current.auth
table = s3db.gis_config
if auth.is_logged_in() and auth.user.org_group_id:
# Coalition Map
ogtable = s3db.org_group
og = db(ogtable.id == auth.user.org_group_id).select(ogtable.pe_id,
limitby=(0, 1)
).first()
query = (table.pe_id == og.pe_id)
else:
# Default Map
query = (table.uuid == "SITE_DEFAULT")
config = db(query).select(table.id,
limitby=(0, 1)
).first()
try:
output["config_id"] = config.id
except:
output["config_id"] = None
self._view(THEME, "index.html")
return output
# END =========================================================================
| mit |
2014c2g12/c2g12 | c2wp/w2/static/Brython2.0.0-20140209-164925/Lib/ui/slider.py | 111 | 2275 | import widget
from browser import doc,html
class Slider(widget.Widget):
def __init__(self, id=None, label=False):
self._div_shell=html.DIV(Class="ui-slider ui-slider-horizontal ui-widget ui-widget-content ui-corner-all")
widget.Widget.__init__(self, self._div_shell, 'slider', id)
self._handle=html.A(Class="ui-slider-handle ui-state-default ui-corner-all",
Href='#', style={'left': '0px'})
self._value=0
self._isMouseDown=False
def startSlide(e):
self._isMouseDown=True
self._upperBound = self._div_shell.offsetWidth - self._handle.offsetWidth
pos = widget.getMousePosition(e)
self._startMouseX=pos['x']
self._lastElementLeft = parseInt(self._handle.style.left)
updatePosition(e)
def updatePosition(e):
pos = widget.getMousePosition(e)
#print('mose pos',pos)
_newPos = self._lastElementLeft + pos['x'] - self._startMouseX
_newPos = max(0, _newPos)
_newPos = min(_newPos, self._upperBound)
self._handle.style.left = '%spx' % _newPos
#print('new position',self._handle.style.left)
self._lastElementLeft = _newPos
def moving(e):
if self._isMouseDown:
updatePosition(e)
def dropCallback(e):
self._isMouseDown=False
self._handle.unbind('mousemove', moving)
self._handle.bind('mousemove', moving)
self._handle.bind('mouseup', dropCallback)
#self._handle.bind('mouseout', dropCallback)
self._handle.bind('mousedown', startSlide)
def mouseover(e):
_class=self._handle.getAttribute('class')
self._handle.setAttribute('class', '%s %s' % (_class, 'ui-state-hover'))
def mouseout(e):
self._isMouseDown=False
_class=self._handle.getAttribute('class')
self._handle.setAttribute('class', _class.replace('ui-state-hover', ''))
self._handle.bind('mouseover', mouseover)
self._handle.bind('mouseout', mouseout)
self._div_shell <= self._handle
def get_value(self):
return self._value
#def set_value(self, value):
# self._value=value
# self._handle.style.left='%spx' % value
| gpl-2.0 |
uclaros/QGIS | tests/src/python/test_selective_masking.py | 22 | 29190 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsVirtualLayerDefinition
From build dir, run: ctest -R PyQgsSelectiveMasking -V
QGIS_PREFIX_PATH=/home/hme/src/QGIS/build_ninja/output PYTHONPATH=/home/hme/src/QGIS/build_ninja/output/python/:/home/hme/src/QGIS/build_ninja/output/python/plugins:/home/hme/src/QGIS/tests/src/python python3 ~/src/QGIS/tests/src/python/test_selective_masking.py
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Hugo Mercier / Oslandia'
__date__ = '28/06/2019'
import qgis # NOQA
import os
from qgis.PyQt.QtCore import (
QSize,
QRectF,
QDir
)
from qgis.PyQt.QtGui import (
QColor,
QImage,
QPainter
)
from qgis.testing import unittest, start_app
from utilities import (
unitTestDataPath,
getTempfilePath,
renderMapToImage,
loadTestFonts,
getTestFont,
openInBrowserTab
)
from qgis.core import (
QgsMapSettings,
QgsCoordinateReferenceSystem,
QgsRectangle,
QgsProject,
QgsSymbolLayerReference,
QgsMapRendererParallelJob,
QgsMapRendererSequentialJob,
QgsRenderChecker,
QgsSimpleMarkerSymbolLayer,
QgsSimpleMarkerSymbolLayerBase,
QgsMarkerSymbol,
QgsMaskMarkerSymbolLayer,
QgsSingleSymbolRenderer,
QgsSymbolLayerId,
QgsSymbolLayerUtils,
QgsMapRendererCache,
QgsUnitTypes,
QgsOuterGlowEffect,
QgsPalLayerSettings,
QgsRuleBasedLabeling,
QgsPalLayerSettings,
QgsProperty,
QgsRenderContext,
QgsVectorLayerSimpleLabeling,
QgsLayout,
QgsLayoutItemPage,
QgsLayoutSize,
QgsLayoutItemMap,
QgsLayoutExporter,
QgsWkbTypes,
)
def renderMapToImageWithTime(mapsettings, parallel=False, cache=None):
"""
Render current map to an image, via multi-threaded renderer
:param QgsMapSettings mapsettings:
:param bool parallel: Do parallel or sequential render job
:rtype: QImage
"""
if parallel:
job = QgsMapRendererParallelJob(mapsettings)
else:
job = QgsMapRendererSequentialJob(mapsettings)
if cache:
job.setCache(cache)
job.start()
job.waitForFinished()
return (job.renderedImage(), job.renderingTime())
class TestSelectiveMasking(unittest.TestCase):
def setUp(self):
self.checker = QgsRenderChecker()
self.checker.setControlPathPrefix("selective_masking")
self.report = "<h1>Python Selective Masking Tests</h1>\n"
self.map_settings = QgsMapSettings()
crs = QgsCoordinateReferenceSystem('epsg:4326')
extent = QgsRectangle(-123.0, 22.7, -76.4, 46.9)
self.map_settings.setBackgroundColor(QColor(152, 219, 249))
self.map_settings.setOutputSize(QSize(420, 280))
self.map_settings.setOutputDpi(72)
self.map_settings.setFlag(QgsMapSettings.Antialiasing, True)
self.map_settings.setFlag(QgsMapSettings.UseAdvancedEffects, False)
self.map_settings.setDestinationCrs(crs)
self.map_settings.setExtent(extent)
# load a predefined QGIS project
self.assertTrue(QgsProject.instance().read(os.path.join(unitTestDataPath(), "selective_masking.qgs")))
self.points_layer = QgsProject.instance().mapLayersByName('points')[0]
self.lines_layer = QgsProject.instance().mapLayersByName('lines')[0]
# line layer with subsymbols
self.lines_layer2 = QgsProject.instance().mapLayersByName('lines2')[0]
# line layer with labels
self.lines_with_labels = QgsProject.instance().mapLayersByName('lines_with_labels')[0]
self.polys_layer = QgsProject.instance().mapLayersByName('polys')[0]
# polygon layer with a rule based labeling
self.polys_layer2 = QgsProject.instance().mapLayersByName('polys2')[0]
# try to fix the font for where labels are defined
# in order to have more stable image comparison tests
for layer in [self.polys_layer, self.lines_with_labels, self.polys_layer2]:
for provider in layer.labeling().subProviders():
settings = layer.labeling().settings(provider)
font = getTestFont()
font.setPointSize(32)
fmt = settings.format()
fmt.setFont(font)
fmt.setNamedStyle('Roman')
fmt.setSize(32)
fmt.setSizeUnit(QgsUnitTypes.RenderPoints)
settings.setFormat(fmt)
if (layer.geometryType == QgsWkbTypes.PolygonGeometry):
settings.placement = QgsPalLayerSettings.OverPoint
layer.labeling().setSettings(settings, provider)
# order layers for rendering
self.map_settings.setLayers([self.points_layer, self.lines_layer, self.polys_layer])
def tearDown(self):
report_file_path = "%s/qgistest.html" % QDir.tempPath()
with open(report_file_path, 'a') as report_file:
report_file.write(self.report)
def check_renderings(self, map_settings, control_name):
"""Test a rendering with different configurations:
- parallel rendering, no cache
- sequential rendering, no cache
- parallel rendering, with cache (rendered two times)
- sequential rendering, with cache (rendered two times)
"""
for do_parallel in [False, True]:
for use_cache in [False, True]:
print("=== parallel", do_parallel, "cache", use_cache)
tmp = getTempfilePath('png')
cache = None
if use_cache:
cache = QgsMapRendererCache()
# render a first time to fill the cache
renderMapToImageWithTime(self.map_settings, parallel=do_parallel, cache=cache)
img, t = renderMapToImageWithTime(self.map_settings, parallel=do_parallel, cache=cache)
img.save(tmp)
print("Image rendered in {}".format(tmp))
self.checker.setControlName(control_name)
self.checker.setRenderedImage(tmp)
suffix = "_parallel" if do_parallel else "_sequential"
res = self.checker.compareImages(control_name + suffix)
self.report += self.checker.report()
self.assertTrue(res)
print("=== Rendering took {}s".format(float(t) / 1000.0))
def test_label_mask(self):
# modify labeling settings
label_settings = self.polys_layer.labeling().settings()
fmt = label_settings.format()
# enable a mask
fmt.mask().setEnabled(True)
fmt.mask().setSize(4.0)
# and mask other symbol layers underneath
fmt.mask().setMaskedSymbolLayers([
# the black part of roads
QgsSymbolLayerReference(self.lines_layer.id(), QgsSymbolLayerId("", 0)),
# the black jets
QgsSymbolLayerReference(self.points_layer.id(), QgsSymbolLayerId("B52", 0)),
QgsSymbolLayerReference(self.points_layer.id(), QgsSymbolLayerId("Jet", 0))])
label_settings.setFormat(fmt)
self.polys_layer.labeling().setSettings(label_settings)
format = self.polys_layer.labeling().settings().format()
self.assertTrue(format.mask().enabled())
self.check_renderings(self.map_settings, "label_mask")
def test_multiple_label_masks_different_sets(self):
# modify labeling settings of the polys layer
label_settings = self.polys_layer.labeling().settings()
fmt = label_settings.format()
# enable a mask
fmt.mask().setEnabled(True)
fmt.mask().setSize(4.0)
# and mask other symbol layers underneath
fmt.mask().setMaskedSymbolLayers([
# the black part of roads
QgsSymbolLayerReference(self.lines_with_labels.id(), QgsSymbolLayerId("", 0)),
# the black jets
QgsSymbolLayerReference(self.points_layer.id(), QgsSymbolLayerId("B52", 0)),
QgsSymbolLayerReference(self.points_layer.id(), QgsSymbolLayerId("Jet", 0))])
label_settings.setFormat(fmt)
self.polys_layer.labeling().setSettings(label_settings)
format = self.polys_layer.labeling().settings().format()
self.assertTrue(format.mask().enabled())
# modify labeling settings of the lines layer
label_settings = self.lines_with_labels.labeling().settings()
fmt = label_settings.format()
# enable a mask
fmt.mask().setEnabled(True)
fmt.mask().setSize(4.0)
# and mask other symbol layers underneath
fmt.mask().setMaskedSymbolLayers([
# polygons
QgsSymbolLayerReference(self.polys_layer.id(), QgsSymbolLayerId("", 0)),
])
label_settings.setFormat(fmt)
self.lines_with_labels.labeling().setSettings(label_settings)
# new map settings with a line symbology that has labels
self.map_settings.setLayers([self.points_layer, self.lines_with_labels, self.polys_layer])
self.check_renderings(self.map_settings, "multiple_label_masks_different_sets")
# restore map settings
self.map_settings.setLayers([self.points_layer, self.lines_layer, self.polys_layer])
def test_multiple_label_masks_same_set(self):
# modify labeling settings of the polys layer
label_settings = self.polys_layer.labeling().settings()
fmt = label_settings.format()
# enable a mask
fmt.mask().setEnabled(True)
fmt.mask().setSize(4.0)
# and mask other symbol layers underneath
fmt.mask().setMaskedSymbolLayers([
# the black part of roads
QgsSymbolLayerReference(self.lines_with_labels.id(), QgsSymbolLayerId("", 0)),
])
label_settings.setFormat(fmt)
self.polys_layer.labeling().setSettings(label_settings)
format = self.polys_layer.labeling().settings().format()
self.assertTrue(format.mask().enabled())
# modify labeling settings of the lines layer
label_settings = self.lines_with_labels.labeling().settings()
fmt = label_settings.format()
# enable a mask
fmt.mask().setEnabled(True)
fmt.mask().setSize(4.0)
# and mask other symbol layers underneath
fmt.mask().setMaskedSymbolLayers([
# the black part of roads
QgsSymbolLayerReference(self.lines_with_labels.id(), QgsSymbolLayerId("", 0)),
])
label_settings.setFormat(fmt)
self.lines_with_labels.labeling().setSettings(label_settings)
# new map settings with a line symbology that has labels
self.map_settings.setLayers([self.points_layer, self.lines_with_labels, self.polys_layer])
self.check_renderings(self.map_settings, "multiple_label_masks_same_set")
# restore map settings
self.map_settings.setLayers([self.points_layer, self.lines_layer, self.polys_layer])
def test_label_mask_subsymbol(self):
# new map settings with a line symbology that has sub symbols
self.map_settings.setLayers([self.points_layer, self.lines_layer2, self.polys_layer])
# modify labeling settings
label_settings = self.polys_layer.labeling().settings()
fmt = label_settings.format()
# enable a mask
fmt.mask().setEnabled(True)
fmt.mask().setSize(4.0)
# and mask other symbol layers underneath
fmt.mask().setMaskedSymbolLayers([
# mask only vertical segments of "roads"
QgsSymbolLayerReference(self.lines_layer2.id(), QgsSymbolLayerId("", [1, 0])),
# the black jets
QgsSymbolLayerReference(self.points_layer.id(), QgsSymbolLayerId("B52", 0)),
QgsSymbolLayerReference(self.points_layer.id(), QgsSymbolLayerId("Jet", 0))])
label_settings.setFormat(fmt)
self.polys_layer.labeling().setSettings(label_settings)
format = self.polys_layer.labeling().settings().format()
self.assertTrue(format.mask().enabled())
self.check_renderings(self.map_settings, "label_mask_subsymbol")
# restore original map settings
self.map_settings.setLayers([self.points_layer, self.lines_layer, self.polys_layer])
def test_label_mask_dd(self):
""" test label mask with data defined properties """
label_settings = self.polys_layer.labeling().settings()
fmt = label_settings.format()
fmt.mask().setEnabled(False)
fmt.mask().setSize(1.0)
fmt.mask().setOpacity(0.42)
# mask other symbol layers underneath
fmt.mask().setMaskedSymbolLayers([
# the black part of roads
QgsSymbolLayerReference(self.lines_layer.id(), QgsSymbolLayerId("", 0)),
# the black jets
QgsSymbolLayerReference(self.points_layer.id(), QgsSymbolLayerId("B52", 0)),
QgsSymbolLayerReference(self.points_layer.id(), QgsSymbolLayerId("Jet", 0))])
# overwrite with data-defined properties
fmt.dataDefinedProperties().setProperty(QgsPalLayerSettings.MaskEnabled, QgsProperty.fromExpression('1'))
fmt.dataDefinedProperties().setProperty(QgsPalLayerSettings.MaskBufferSize, QgsProperty.fromExpression('4.0'))
fmt.dataDefinedProperties().setProperty(QgsPalLayerSettings.MaskOpacity, QgsProperty.fromExpression('100.0'))
context = QgsRenderContext()
fmt.updateDataDefinedProperties(context)
self.assertEqual(fmt.mask().enabled(), True)
self.assertEqual(fmt.mask().size(), 4.0)
self.assertEqual(fmt.mask().opacity(), 1.0)
label_settings.setFormat(fmt)
self.polys_layer.labeling().setSettings(label_settings)
self.check_renderings(self.map_settings, "label_mask")
def test_label_mask_rule_labeling(self):
# new map settings with a rule based labeling
self.map_settings.setLayers([self.points_layer, self.lines_layer, self.polys_layer2])
# modify labeling settings of one rule
for child in self.polys_layer2.labeling().rootRule().children():
if child.description() == 'Tadam':
break
label_settings = child.settings()
label_settings.priority = 3
fmt = label_settings.format()
# enable a mask
fmt.mask().setEnabled(True)
fmt.mask().setSize(4.0)
# and mask other symbol layers underneath
fmt.mask().setMaskedSymbolLayers([
# the black part of roads
QgsSymbolLayerReference(self.lines_layer.id(), QgsSymbolLayerId("", 0)),
# the black jets
QgsSymbolLayerReference(self.points_layer.id(), QgsSymbolLayerId("B52", 0)),
QgsSymbolLayerReference(self.points_layer.id(), QgsSymbolLayerId("Jet", 0))])
label_settings.setFormat(fmt)
child.setSettings(label_settings)
# modify labeling settings of another rule
for child in self.polys_layer2.labeling().rootRule().children():
if child.description() != 'Tadam':
break
label_settings = child.settings()
fmt = label_settings.format()
# enable a mask
fmt.mask().setEnabled(True)
fmt.mask().setSize(4.0)
# and mask other symbol layers underneath
fmt.mask().setMaskedSymbolLayers([
# the polygons
QgsSymbolLayerReference(self.polys_layer2.id(), QgsSymbolLayerId("", 0)),
])
label_settings.setFormat(fmt)
child.setSettings(label_settings)
self.check_renderings(self.map_settings, "rule_label_mask")
# restore map settings
self.map_settings.setLayers([self.points_layer, self.lines_layer, self.polys_layer])
def test_label_mask_symbol_levels(self):
# modify labeling settings
label_settings = self.polys_layer.labeling().settings()
fmt = label_settings.format()
# enable a mask
fmt.mask().setEnabled(True)
fmt.mask().setSize(4.0)
# and mask other symbol layers underneath
fmt.mask().setMaskedSymbolLayers([
# the black part of roads
QgsSymbolLayerReference(self.lines_layer.id(), QgsSymbolLayerId("", 0)),
# the black jets
QgsSymbolLayerReference(self.points_layer.id(), QgsSymbolLayerId("B52", 0)),
QgsSymbolLayerReference(self.points_layer.id(), QgsSymbolLayerId("Jet", 0))])
label_settings.setFormat(fmt)
self.polys_layer.labeling().setSettings(label_settings)
format = self.polys_layer.labeling().settings().format()
self.assertTrue(format.mask().enabled())
# enable symbol levels
self.lines_layer.renderer().setUsingSymbolLevels(True)
self.check_renderings(self.map_settings, "label_mask_symbol_levels")
def test_symbol_layer_mask(self):
p = QgsMarkerSymbol.createSimple({'color': '#fdbf6f', 'size': "7"})
self.points_layer.setRenderer(QgsSingleSymbolRenderer(p))
circle_symbol = QgsMarkerSymbol.createSimple({'size': '10'})
mask_layer = QgsMaskMarkerSymbolLayer()
mask_layer.setSubSymbol(circle_symbol)
mask_layer.setMasks([
# the black part of roads
QgsSymbolLayerReference(self.lines_layer.id(), QgsSymbolLayerId("", 0)),
])
# add this mask layer to the point layer
self.points_layer.renderer().symbol().appendSymbolLayer(mask_layer)
self.check_renderings(self.map_settings, "sl_mask")
def test_multiple_masks_same_symbol_layer(self):
"""Test multiple masks that occlude the same symbol layer"""
#
# 1. a symbol layer mask
#
p = QgsMarkerSymbol.createSimple({'color': '#fdbf6f', 'size': "7"})
self.points_layer.setRenderer(QgsSingleSymbolRenderer(p))
circle_symbol = QgsMarkerSymbol.createSimple({'size': '10'})
mask_layer = QgsMaskMarkerSymbolLayer()
mask_layer.setSubSymbol(circle_symbol)
mask_layer.setMasks([
# the black part of roads
QgsSymbolLayerReference(self.lines_layer.id(), QgsSymbolLayerId("", 0)),
])
# add this mask layer to the point layer
self.points_layer.renderer().symbol().appendSymbolLayer(mask_layer)
#
# 2. a label mask
#
# modify labeling settings
label_settings = self.polys_layer.labeling().settings()
fmt = label_settings.format()
# enable a mask
fmt.mask().setEnabled(True)
fmt.mask().setSize(4.0)
# and mask other symbol layers underneath
fmt.mask().setMaskedSymbolLayers([
# the black part of roads
QgsSymbolLayerReference(self.lines_layer.id(), QgsSymbolLayerId("", 0))
])
label_settings.setFormat(fmt)
self.polys_layer.labeling().setSettings(label_settings)
self.check_renderings(self.map_settings, "multiple_masks_same_sl")
def test_multiple_masks_different_symbol_layers_same_layer(self):
"""Test multiple masks that occlude different symbol layers of the same layer.
The UI should disallow this settings. We test here that only one mask is retained"""
#
# 1. a symbol layer mask
#
p = QgsMarkerSymbol.createSimple({'color': '#fdbf6f', 'size': "7"})
self.points_layer.setRenderer(QgsSingleSymbolRenderer(p))
circle_symbol = QgsMarkerSymbol.createSimple({'size': '10'})
mask_layer = QgsMaskMarkerSymbolLayer()
mask_layer.setSubSymbol(circle_symbol)
mask_layer.setMasks([
# the yellow part of roads
QgsSymbolLayerReference(self.lines_layer.id(), QgsSymbolLayerId("", 1)),
])
# add this mask layer to the point layer
self.points_layer.renderer().symbol().appendSymbolLayer(mask_layer)
#
# 2. a label mask
#
# modify labeling settings
label_settings = self.polys_layer.labeling().settings()
fmt = label_settings.format()
# enable a mask
fmt.mask().setEnabled(True)
fmt.mask().setSize(4.0)
# and mask other symbol layers underneath
fmt.mask().setMaskedSymbolLayers([
# the black part of roads
QgsSymbolLayerReference(self.lines_layer.id(), QgsSymbolLayerId("", 0))
])
label_settings.setFormat(fmt)
self.polys_layer.labeling().setSettings(label_settings)
self.check_renderings(self.map_settings, "multiple_masks_different_sl")
def test_multiple_masks_different_symbol_layers_same_layer2(self):
"""Test multiple masks that occlude different symbol layers of the same layer - 2nd possible order
The UI should disallow this settings. We test here that only one mask is retained"""
#
# 1. a symbol layer mask
#
p = QgsMarkerSymbol.createSimple({'color': '#fdbf6f', 'size': "7"})
self.points_layer.setRenderer(QgsSingleSymbolRenderer(p))
circle_symbol = QgsMarkerSymbol.createSimple({'size': '10'})
mask_layer = QgsMaskMarkerSymbolLayer()
mask_layer.setSubSymbol(circle_symbol)
mask_layer.setMasks([
# the black part of roads
QgsSymbolLayerReference(self.lines_layer.id(), QgsSymbolLayerId("", 0)),
])
# add this mask layer to the point layer
self.points_layer.renderer().symbol().appendSymbolLayer(mask_layer)
#
# 2. a label mask
#
# modify labeling settings
label_settings = self.polys_layer.labeling().settings()
fmt = label_settings.format()
# enable a mask
fmt.mask().setEnabled(True)
fmt.mask().setSize(4.0)
# and mask other symbol layers underneath
fmt.mask().setMaskedSymbolLayers([
# the yellow part of roads
QgsSymbolLayerReference(self.lines_layer.id(), QgsSymbolLayerId("", 1))
])
label_settings.setFormat(fmt)
self.polys_layer.labeling().setSettings(label_settings)
self.check_renderings(self.map_settings, "multiple_masks_different_sl2")
def test_mask_symbollayer_preview(self):
#
# Masks should be visible in previews
#
p = QgsMarkerSymbol.createSimple({'color': '#fdbf6f', 'size': "7"})
circle_symbol = QgsMarkerSymbol.createSimple({'size': '10'})
mask_layer = QgsMaskMarkerSymbolLayer()
mask_layer.setSubSymbol(circle_symbol)
p.insertSymbolLayer(0, mask_layer)
for control_name, render_function in [
("as_image", lambda: p.asImage(QSize(64, 64)).save(tmp)),
("as_big_preview", lambda: p.bigSymbolPreviewImage().save(tmp)),
("sl_preview", lambda:
QgsSymbolLayerUtils.symbolLayerPreviewIcon(mask_layer,
QgsUnitTypes.RenderPixels,
QSize(64, 64)).pixmap(QSize(64, 64)).save(tmp))
]:
tmp = getTempfilePath('png')
render_function()
self.checker.setControlName(control_name)
self.checker.setRenderedImage(tmp)
res = self.checker.compareImages(control_name, 90)
self.report += self.checker.report()
self.assertTrue(res)
def test_mask_with_effect(self):
p = QgsMarkerSymbol.createSimple({'color': '#fdbf6f', 'size': "7"})
self.points_layer.setRenderer(QgsSingleSymbolRenderer(p))
circle_symbol = QgsMarkerSymbol.createSimple({'size': '12'})
mask_layer = QgsMaskMarkerSymbolLayer()
mask_layer.setSubSymbol(circle_symbol)
mask_layer.setMasks([
# the yellow part of roads
QgsSymbolLayerReference(self.lines_layer.id(), QgsSymbolLayerId("", 1)),
])
# add an outer glow effect to the mask layer
blur = QgsOuterGlowEffect.create({"enabled": "1",
"blur_level": "6.445",
"blur_unit": "MM",
"opacity": "1",
"spread": "0.6",
"spread_unit": "MM",
"color1": "0,0,255,255",
"draw_mode": "2"
})
mask_layer.setPaintEffect(blur)
# add this mask layer to the point layer
self.points_layer.renderer().symbol().appendSymbolLayer(mask_layer)
self.check_renderings(self.map_settings, "mask_with_effect")
def test_label_mask_with_effect(self):
# modify labeling settings
label_settings = self.polys_layer.labeling().settings()
fmt = label_settings.format()
# enable a mask
fmt.mask().setEnabled(True)
fmt.mask().setSize(4.0)
# and mask other symbol layers underneath
fmt.mask().setMaskedSymbolLayers([
# the black part of roads
QgsSymbolLayerReference(self.lines_layer.id(), QgsSymbolLayerId("", 0)),
# the black jets
QgsSymbolLayerReference(self.points_layer.id(), QgsSymbolLayerId("B52", 0)),
QgsSymbolLayerReference(self.points_layer.id(), QgsSymbolLayerId("Jet", 0))])
# add an outer glow effect to the mask
blur = QgsOuterGlowEffect.create({"enabled": "1",
"blur_level": "6.445",
"blur_unit": "MM",
"opacity": "1",
"spread": "0.6",
"spread_unit": "MM",
"color1": "0,0,255,255",
"draw_mode": "2"
})
fmt.mask().setPaintEffect(blur)
label_settings.setFormat(fmt)
self.polys_layer.labeling().setSettings(label_settings)
format = self.polys_layer.labeling().settings().format()
self.assertTrue(format.mask().enabled())
self.check_renderings(self.map_settings, "label_mask_with_effect")
def test_layout_exports(self):
"""Test mask effects in a layout export at 300 dpi"""
# modify labeling settings
label_settings = self.polys_layer.labeling().settings()
fmt = label_settings.format()
# enable a mask
fmt.mask().setEnabled(True)
fmt.mask().setSize(4.0)
# and mask other symbol layers underneath
fmt.mask().setMaskedSymbolLayers([
# the black part of roads
QgsSymbolLayerReference(self.lines_layer.id(), QgsSymbolLayerId("", 0)),
# the black jets
QgsSymbolLayerReference(self.points_layer.id(), QgsSymbolLayerId("B52", 0)),
QgsSymbolLayerReference(self.points_layer.id(), QgsSymbolLayerId("Jet", 0))])
# add an outer glow effect to the mask
blur = QgsOuterGlowEffect.create({"enabled": "1",
"blur_level": "6.445",
"blur_unit": "MM",
"opacity": "1",
"spread": "0.6",
"spread_unit": "MM",
"color1": "0,0,255,255",
"draw_mode": "2"
})
fmt.mask().setPaintEffect(blur)
label_settings.setFormat(fmt)
self.polys_layer.labeling().setSettings(label_settings)
layout = QgsLayout(QgsProject.instance())
page = QgsLayoutItemPage(layout)
page.setPageSize(QgsLayoutSize(50, 33))
layout.pageCollection().addPage(page)
map = QgsLayoutItemMap(layout)
map.attemptSetSceneRect(QRectF(1, 1, 48, 32))
map.setFrameEnabled(True)
layout.addLayoutItem(map)
map.setExtent(self.lines_layer.extent())
map.setLayers([self.points_layer, self.lines_layer, self.polys_layer])
image = QImage(591, 591, QImage.Format_RGB32)
image.setDotsPerMeterX(300 / 25.3 * 1000)
image.setDotsPerMeterY(300 / 25.3 * 1000)
image.fill(0)
p = QPainter(image)
exporter = QgsLayoutExporter(layout)
exporter.renderPage(p, 0)
p.end()
tmp = getTempfilePath('png')
image.save(tmp)
control_name = "layout_export"
self.checker.setControlName(control_name)
self.checker.setRenderedImage(tmp)
res = self.checker.compareImages(control_name)
self.report += self.checker.report()
self.assertTrue(res)
if __name__ == '__main__':
start_app()
unittest.main()
| gpl-2.0 |
CapOM/ChromiumGStreamerBackend | chrome/test/chromedriver/embed_js_in_cpp.py | 165 | 1485 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Embeds standalone JavaScript snippets in C++ code.
Each argument to the script must be a file containing an associated JavaScript
function (e.g., evaluate_script.js should contain an evaluateScript function).
This is called the exported function of the script. The entire script will be
put into a C-style string in the form of an anonymous function which invokes
the exported function when called.
"""
import optparse
import os
import sys
import cpp_source
def main():
parser = optparse.OptionParser()
parser.add_option(
'', '--directory', type='string', default='.',
help='Path to directory where the cc/h js file should be created')
options, args = parser.parse_args()
global_string_map = {}
for js_file in args:
base_name = os.path.basename(js_file)[:-3].title().replace('_', '')
func_name = base_name[0].lower() + base_name[1:]
script_name = 'k%sScript' % base_name
with open(js_file, 'r') as f:
contents = f.read()
script = 'function() { %s; return %s.apply(null, arguments) }' % (
contents, func_name)
global_string_map[script_name] = script
cpp_source.WriteSource('js', 'chrome/test/chromedriver/chrome',
options.directory, global_string_map)
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause |
peremen/gzone_ics_kernel | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Util.py | 12527 | 1935 | # Util.py - Python extension for perf script, miscellaneous utility code
#
# Copyright (C) 2010 by Tom Zanussi <[email protected]>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
import errno, os
FUTEX_WAIT = 0
FUTEX_WAKE = 1
FUTEX_PRIVATE_FLAG = 128
FUTEX_CLOCK_REALTIME = 256
FUTEX_CMD_MASK = ~(FUTEX_PRIVATE_FLAG | FUTEX_CLOCK_REALTIME)
NSECS_PER_SEC = 1000000000
def avg(total, n):
return total / n
def nsecs(secs, nsecs):
return secs * NSECS_PER_SEC + nsecs
def nsecs_secs(nsecs):
return nsecs / NSECS_PER_SEC
def nsecs_nsecs(nsecs):
return nsecs % NSECS_PER_SEC
def nsecs_str(nsecs):
str = "%5u.%09u" % (nsecs_secs(nsecs), nsecs_nsecs(nsecs)),
return str
def add_stats(dict, key, value):
if not dict.has_key(key):
dict[key] = (value, value, value, 1)
else:
min, max, avg, count = dict[key]
if value < min:
min = value
if value > max:
max = value
avg = (avg + value) / 2
dict[key] = (min, max, avg, count + 1)
def clear_term():
print("\x1b[H\x1b[2J")
audit_package_warned = False
try:
import audit
machine_to_id = {
'x86_64': audit.MACH_86_64,
'alpha' : audit.MACH_ALPHA,
'ia64' : audit.MACH_IA64,
'ppc' : audit.MACH_PPC,
'ppc64' : audit.MACH_PPC64,
's390' : audit.MACH_S390,
's390x' : audit.MACH_S390X,
'i386' : audit.MACH_X86,
'i586' : audit.MACH_X86,
'i686' : audit.MACH_X86,
}
try:
machine_to_id['armeb'] = audit.MACH_ARMEB
except:
pass
machine_id = machine_to_id[os.uname()[4]]
except:
if not audit_package_warned:
audit_package_warned = True
print "Install the audit-libs-python package to get syscall names"
def syscall_name(id):
try:
return audit.audit_syscall_to_name(id, machine_id)
except:
return str(id)
def strerror(nr):
try:
return errno.errorcode[abs(nr)]
except:
return "Unknown %d errno" % nr
| gpl-2.0 |
JoaquimPatriarca/senpy-for-gis | gasp/gdal/manage/general.py | 1 | 3528 | """
General tools for data management
"""
def copy_features(inLyr, outLyr, outDefn, only_geom=True):
"""
Copy the features of one layer to another layer...
If the layers have the same fields, this method could also copy
the tabular data
TODO: See if the input is a layer or not and make arrangements
"""
from osgeo import ogr
for f in inLyr:
geom = f.GetGeometryRef()
new = ogr.Feature(outDefn)
new.SetGeometry(geom)
# Copy tabular data
if not only_geom:
for i in range(0, outDefn.GetFieldCount()):
new.SetField(outDefn.GetFieldDefn(i).GetNameRef(), f.GetField(i))
outLyr.CreateFeature(new)
new.Destroy()
f.Destroy()
def ogr_merge(shp_to_merge, merged_shp, srs=None, fields_to_copy=None):
"""
Merge all listed datasets into a single dataset
"""
import os
from osgeo import ogr
from gasp import goToList
from gasp.oss.info import get_filename
from gasp.gdal import get_driver_name
from gasp.gdal import get_geom_attr
from gasp.gdal.manage.fields import ogr_list_fields_defn
from gasp.gdal.proj import ogr_def_proj
# Create output
o = ogr.GetDriverByName(
get_driver_name(merged_shp)).CreateDataSource(merged_shp)
# Get SRS
if not srs:
from gasp.gdal.proj import get_shp_sref
srsObj = get_shp_sref(shp_to_merge[0])
else:
from gasp.gdal.proj import get_sref_from_epsg
srsObj = get_sref_from_epsg(srs)
olyr = o.CreateLayer(
get_filename(merged_shp, forceLower=True),
srsObj,
geom_type=get_geom_attr(
shp_to_merge[0], name=None, py_cls=True)
)
fields_to_copy = goToList(fields_to_copy)
# Add all fields existing in the inputs
fields_defn = {}
fields_shp = {}
for shp in shp_to_merge:
flds = ogr_list_fields_defn(shp)
fields_shp[shp] = flds.keys()
if not fields_to_copy:
for fld in flds:
if fld not in fields_defn:
fields_defn[fld] = flds[fld].keys()[0]
olyr.CreateField(ogr.FieldDefn(fld, flds[fld].keys()[0]))
else:
for fld in flds:
if fld not in fields_defn and fld in fields_to_copy:
fields_defn[fld] = flds[fld].keys()[0]
olyr.CreateField(ogr.FieldDefn(fld, flds[fld].keys()[0]))
# Join all features together on the same dataset
featDefn = olyr.GetLayerDefn()
for i in range(len(shp_to_merge)):
dt = ogr.GetDriverByName(
get_driver_name(shp_to_merge[i])).Open(shp_to_merge[i], 0)
lyr = dt.GetLayer()
for feat in lyr:
geom = feat.GetGeometryRef()
new = ogr.Feature(featDefn)
new.SetGeometry(geom)
for e in range(0, featDefn.GetFieldCount()):
name = featDefn.GetFieldDefn(e).GetNameRef()
if name in fields_shp[shp_to_merge[i]]:
new.SetField(name, feat.GetField(name))
olyr.CreateFeature(new)
new.Destroy()
feat.Destroy()
dt.Destroy()
o.Destroy()
return merged_shp
| gpl-3.0 |
hkariti/ansible | test/runner/lib/sanity/ansible_doc.py | 16 | 3498 | """Sanity test for ansible-doc."""
from __future__ import absolute_import, print_function
import re
from lib.sanity import (
SanityMultipleVersion,
SanityFailure,
SanitySuccess,
SanitySkipped,
SanityMessage,
)
from lib.util import (
SubprocessError,
display,
intercept_command,
)
from lib.ansible_util import (
ansible_environment,
)
from lib.config import (
SanityConfig,
)
class AnsibleDocTest(SanityMultipleVersion):
"""Sanity test for ansible-doc."""
def test(self, args, targets, python_version):
"""
:type args: SanityConfig
:type targets: SanityTargets
:type python_version: str
:rtype: SanityResult
"""
with open('test/sanity/ansible-doc/skip.txt', 'r') as skip_fd:
skip_modules = set(skip_fd.read().splitlines())
modules = sorted(set(m for i in targets.include_external for m in i.modules) -
set(m for i in targets.exclude_external for m in i.modules) -
skip_modules)
if not modules:
return SanitySkipped(self.name, python_version=python_version)
module_paths = dict((t.module, t.path) for t in targets.targets if t.module)
env = ansible_environment(args, color=False)
cmd = ['ansible-doc'] + modules
try:
stdout, stderr = intercept_command(args, cmd, target_name='ansible-doc', env=env, capture=True, python_version=python_version)
status = 0
except SubprocessError as ex:
stdout = ex.stdout
stderr = ex.stderr
status = ex.status
if stderr:
errors = stderr.strip().splitlines()
messages = [self.parse_error(e, module_paths) for e in errors]
if messages and all(messages):
return SanityFailure(self.name, messages=messages, python_version=python_version)
if status:
summary = u'%s' % SubprocessError(cmd=cmd, status=status, stderr=stderr)
return SanityFailure(self.name, summary=summary, python_version=python_version)
if stdout:
display.info(stdout.strip(), verbosity=3)
if stderr:
summary = u'Output on stderr from ansible-doc is considered an error.\n\n%s' % SubprocessError(cmd, stderr=stderr)
return SanityFailure(self.name, summary=summary, python_version=python_version)
return SanitySuccess(self.name, python_version=python_version)
@staticmethod
def parse_error(error, module_paths):
"""
:type error: str
:type module_paths: dict[str, str]
:rtype: SanityMessage | None
"""
# example error messages from lib/ansible/cli/doc.py:
# ERROR! module ping missing documentation (or could not parse documentation): expected string or buffer
# [ERROR]: module ping has a documentation error formatting or is missing documentation.
match = re.search(r'^[^ ]*ERROR[^ ]* (?P<type>[^ ]+) (?P<name>[^ ]+) (?P<text>.*)$', error)
if match:
groups = match.groupdict()
error_type = groups['type']
error_name = groups['name']
error_text = groups['text']
if error_type == 'module' and error_name in module_paths:
return SanityMessage(
message=error_text,
path=module_paths[error_name],
)
return None
| gpl-3.0 |
ryfeus/lambda-packs | Tensorflow/source/tensorboard/plugins/distribution/distributions_plugin.py | 5 | 3411 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""The TensorBoard Distributions (a.k.a. compressed histograms) plugin.
See `http_api.md` in this directory for specifications of the routes for
this plugin.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from werkzeug import wrappers
from tensorboard.backend import http_util
from tensorboard.plugins import base_plugin
from tensorboard.plugins.distribution import compressor
from tensorboard.plugins.histogram import histograms_plugin
class DistributionsPlugin(base_plugin.TBPlugin):
"""Distributions Plugin for TensorBoard.
This supports both old-style summaries (created with TensorFlow ops
that output directly to the `histo` field of the proto) and new-style
summaries (as created by the `tensorboard.plugins.histogram.summary`
module).
"""
plugin_name = 'distributions'
def __init__(self, context):
"""Instantiates DistributionsPlugin via TensorBoard core.
Args:
context: A base_plugin.TBContext instance.
"""
self._histograms_plugin = histograms_plugin.HistogramsPlugin(context)
self._multiplexer = context.multiplexer
def get_plugin_apps(self):
return {
'/distributions': self.distributions_route,
'/tags': self.tags_route,
}
def is_active(self):
"""This plugin is active iff any run has at least one histogram tag.
(The distributions plugin uses the same data source as the histogram
plugin.)
"""
return self._histograms_plugin.is_active()
def distributions_impl(self, tag, run):
"""Result of the form `(body, mime_type)`, or `ValueError`."""
(histograms, mime_type) = self._histograms_plugin.histograms_impl(
tag, run, downsample_to=None)
return ([self._compress(histogram) for histogram in histograms],
mime_type)
def _compress(self, histogram):
(wall_time, step, buckets) = histogram
converted_buckets = compressor.compress_histogram(buckets)
return [wall_time, step, converted_buckets]
def index_impl(self):
return self._histograms_plugin.index_impl()
@wrappers.Request.application
def tags_route(self, request):
index = self.index_impl()
return http_util.Respond(request, index, 'application/json')
@wrappers.Request.application
def distributions_route(self, request):
"""Given a tag and single run, return an array of compressed histograms."""
tag = request.args.get('tag')
run = request.args.get('run')
try:
(body, mime_type) = self.distributions_impl(tag, run)
code = 200
except ValueError as e:
(body, mime_type) = (str(e), 'text/plain')
code = 400
return http_util.Respond(request, body, mime_type, code=code)
| mit |
telwertowski/QGIS | tests/src/python/test_qgscolorbutton.py | 35 | 3786 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsColorButton.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Nyall Dawson'
__date__ = '25/05/2016'
__copyright__ = 'Copyright 2016, The QGIS Project'
import qgis # NOQA
from qgis.gui import QgsColorButton
from qgis.core import QgsApplication, QgsProjectColorScheme
from qgis.testing import start_app, unittest
from qgis.PyQt.QtGui import QColor
from qgis.PyQt.QtTest import QSignalSpy
start_app()
class TestQgsColorButton(unittest.TestCase):
def testClearingColors(self):
"""
Test setting colors to transparent
"""
# start with a valid color
button = QgsColorButton()
button.setAllowOpacity(True)
button.setColor(QColor(255, 100, 200, 255))
self.assertEqual(button.color(), QColor(255, 100, 200, 255))
# now set to no color
button.setToNoColor()
# ensure that only the alpha channel has changed - not the other color components
self.assertEqual(button.color(), QColor(255, 100, 200, 0))
def testNulling(self):
"""
Test clearing colors to null
"""
# start with a valid color
button = QgsColorButton()
button.setAllowOpacity(True)
button.setColor(QColor(255, 100, 200, 255))
self.assertEqual(button.color(), QColor(255, 100, 200, 255))
spy_changed = QSignalSpy(button.colorChanged)
spy_cleared = QSignalSpy(button.cleared)
button.setColor(QColor(50, 100, 200, 255))
self.assertEqual(button.color(), QColor(50, 100, 200, 255))
self.assertEqual(len(spy_changed), 1)
self.assertEqual(len(spy_cleared), 0)
# now set to null
button.setToNull()
self.assertEqual(button.color(), QColor())
self.assertEqual(len(spy_changed), 2)
self.assertEqual(len(spy_cleared), 1)
button.setToNull()
self.assertEqual(button.color(), QColor())
# should not be refired, the color wasn't changed
self.assertEqual(len(spy_changed), 2)
# SHOULD be refired
self.assertEqual(len(spy_cleared), 2)
def testLinkProjectColor(self):
"""
Test linking to a project color
"""
project_scheme = [s for s in QgsApplication.colorSchemeRegistry().schemes() if isinstance(s, QgsProjectColorScheme)][0]
project_scheme.setColors([[QColor(255, 0, 0), 'col1'], [QColor(0, 255, 0), 'col2']])
button = QgsColorButton()
spy = QSignalSpy(button.unlinked)
button.setColor(QColor(0, 0, 255))
self.assertFalse(button.linkedProjectColorName())
button.linkToProjectColor('col1')
self.assertEqual(button.linkedProjectColorName(), 'col1')
self.assertEqual(button.color().name(), '#ff0000')
self.assertEqual(len(spy), 0)
button.unlink()
self.assertFalse(button.linkedProjectColorName())
self.assertEqual(button.color().name(), '#0000ff')
self.assertEqual(len(spy), 1)
button.linkToProjectColor('col2')
self.assertEqual(button.linkedProjectColorName(), 'col2')
self.assertEqual(button.color().name(), '#00ff00')
self.assertEqual(len(spy), 1)
project_scheme.setColors([[QColor(255, 0, 0), 'xcol1'], [QColor(0, 255, 0), 'xcol2']])
# linked color no longer exists
self.assertFalse(button.linkedProjectColorName())
self.assertEqual(button.color().name(), '#0000ff')
self.assertEqual(len(spy), 2)
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
tingletech/solr_api | testindex/grab_random_solr_docs.py | 1 | 2229 | import solr
import random
import json
import datetime
SOLR_NEW = solr.Solr('http://127.0.0.1:8983/solr/dc-collection')
SOLR=solr.SearchHandler(solr.Solr('https://registry.cdlib.org/solr',
post_headers = { 'X-Authentication-Token':'xxxyyyzzz'}), "/query")
def get_collection_urls():
q_collections=SOLR(q="*:*", rows=0, facet_field="collection_url",
facet="true", facet_limit=20000)
facets = q_collections.facet_counts
f_fields = facets['facet_fields']
return f_fields['collection_url']
def get_random_docs(collection_urls):
docs = []
for u in collection_urls:
recs_in_coll = SOLR(q="collection_url:{}".format(u))
num = recs_in_coll.numFound
sample_size = num / 100 if num / 100 else 1
print "NUMBER:{} SAMPLE:{}".format(num, sample_size)
for i in range(sample_size):
rand_index = random.randrange(num)
q_rec = SOLR(q="collection_url:{}".format(u), rows=1, start=rand_index)
#save locally
doc_new = {}
for key, val in q_rec.results[0].items():
if '_ss' in key:
continue
if key in ['score', '_version_', 'timestamp',]:
continue
doc_new[key] = val
docs.append(doc_new)
return docs
def serialize_datetime(obj):
if isinstance(obj, datetime.datetime):
return obj.strftime("%Y%m%d-%H:%M:%S")
return obj
def save_docs_to_file(docs, fname=None):
if not fname:
fname = 'random_docs-{}.json'.format(
datetime.datetime.now().strftime('%Y%m%d-%H%M'))
with open(fname, 'w') as foo:
foo.write(json.dumps(docs, default=serialize_datetime))
def save_to_local_solr(docs):
#put into new index: start a docker index with mapped volume for data and
#upload docs to it
for doc in docs:
print "DOC:{}".format(doc['id'])
solr_new.add(doc)
solr_new.commit()
def create_new_random_test_index():
collection_urls = get_collection_urls()
docs_selected = get_random_docs(collection_urls)
save_docs_to_file(docs_selected)
save_to_local_solr(docs_selected)
if __name__=="__main__":
print 'Generate new test data set'
| bsd-3-clause |
matteo88/gasistafelice | gasistafelice/rest/views/blocks/transactions.py | 2 | 6543 | from django.utils.translation import ugettext as _, ugettext_lazy as _lazy
from django.core import urlresolvers
from django.http import HttpResponse, HttpResponseServerError
from flexi_auth.models import ObjectWithContext
from gasistafelice.rest.views.blocks.base import BlockSSDataTables, ResourceBlockAction, CREATE_CSV
from gasistafelice.consts import VIEW_CONFIDENTIAL, CONFIDENTIAL_VERBOSE_HTML, CASH
from gasistafelice.base.templatetags.accounting_tags import human_readable_account_csv,human_readable_kind, signed_ledger_entry_amount
from django.template.loader import render_to_string
import datetime, csv
import cStringIO as StringIO
#from simple_accounting.models import economic_subject, AccountingDescriptor
#from simple_accounting.models import account_type
#from simple_accounting.exceptions import MalformedTransaction
#from simple_accounting.models import AccountingProxy
#from simple_accounting.utils import register_transaction, register_simple_transaction
#from gasistafelice.base.accounting import PersonAccountingProxy
from gasistafelice.lib.shortcuts import render_to_xml_response, render_to_context_response
#------------------------------------------------------------------------------#
# #
#------------------------------------------------------------------------------#
#OLD: ENCODING = "iso-8859-1"
class Block(BlockSSDataTables):
BLOCK_NAME = "transactions"
BLOCK_DESCRIPTION = _("Economic transactions")
BLOCK_VALID_RESOURCE_TYPES = ["gas", "supplier", "pact"]
COLUMN_INDEX_NAME_MAP = {
0: 'id',
1: 'transaction__date',
2: '',
3: '',
4: 'amount',
5: 'transaction__description',
}
#WAS 2: 'transaction__issuer',
#WAS 3: 'transaction__source',
#WAS 3: 'transaction__kind', --> FIXME: In case of translation the search does not operate correctly
def __init__(self, *args, **kw):
super(Block, self).__init__(*args, **kw)
# Default start closed. Mainly for GAS -> Accounting tab ("Conto")
self.start_open = False
def _check_permission(self, request):
if request.resource.gas:
return request.user.has_perm(
CASH, obj=ObjectWithContext(request.resource.gas)
)
else:
return True
def _get_resource_list(self, request):
#Accounting.LedgerEntry or Transactions
return request.resource.economic_movements
def get_response(self, request, resource_type, resource_id, args):
"""Check for confidential access permission and call superclass if needed"""
if not self._check_permission(request):
return render_to_xml_response(
"blocks/table_html_message.xml",
{ 'msg' : CONFIDENTIAL_VERBOSE_HTML }
)
if args == CREATE_CSV:
return self._create_csv(request)
return super(Block, self).get_response(request, resource_type, resource_id, args)
#TODO: Filter grid by
# Date From --> To
# Kind iof transctions: can be checkbox list multiselect
# Subject: Radio or multiple checkbox onto values [GAS borselino, GASMemmbers, Suppliers]
# def options_response(self, request, resource_type, resource_id):
# """Get options for transaction block.
# WARNING: call to this method doesn't pass through get_response
# so you have to reset self.request and self.resource attribute if you want
# """
# self.request = request
# self.resource = request.resource
# fields = []
# #DATE FROM
# fields.append({
# 'field_type' : 'datetime',
# 'field_label' : 'from date',
# 'field_name' : 'from',
# 'field_values' : [{ 'value' : '22/09/2012', 'selected' : ''}]
# })
# #DATE TO
# fields.append({
# 'field_type' : 'datetime',
# 'field_label' : 'to date',
# 'field_name' : 'to',
# 'field_values' : [{ 'value' : '28/09/2012', 'label' : 'labelvalue', 'selected' : 'sel'}]
# })
# ctx = {
# 'block_name' : self.description,
# 'fields': fields,
# }
# #Can use html template loader
# return render_to_xml_response('eco-options.xml', ctx)
def _get_user_actions(self, request):
user_actions = []
resource_type = request.resource.resource_type
if self._check_permission(request):
user_actions += [
ResourceBlockAction(
block_name = self.BLOCK_NAME,
resource = request.resource,
name=CREATE_CSV, verbose_name=_("Create CSV"),
popup_form=False,
method="OPENURL",
),
]
return user_actions
def _create_csv(self, request):
""" Create CSV of this block transactions
#MATTEO TOREMOVE: lascio la prima implementazione (da levare
ovviamente dall'integrazione) come monito a me stesso -->
kiss, kiss e ancora kiss !!
#NOTA: eliminare nell'integrazione tutte le righe commentate con #OLD:
"""
headers = [_(u'Id'), _(u'Data'), _(u'Account'), _(u'Kind'), _(u'Cash amount'), _(u'Description')]
records = self._get_resource_list(request)
csvfile = StringIO.StringIO()
writer = csv.writer(csvfile, delimiter=';',quotechar='"', quoting=csv.QUOTE_MINIMAL)
writer.writerow(headers)
for res in self._get_resource_list(request):
writer.writerow([res.pk,
'{0:%a %d %b %Y %H:%M}'.format(res.date),
human_readable_account_csv(res.account),
human_readable_kind(res.transaction.kind),
signed_ledger_entry_amount(res),
res.transaction.description.encode("utf-8", "ignore")
])
csv_data = csvfile.getvalue()
if not csv_data:
rv = HttpResponseServerError(_('Report not generated'))
else:
response = HttpResponse(csv_data, content_type='text/csv')
filename = "%(res)s_%(date)s.csv" % {
'res': request.resource,
'date' : '{0:%Y%m%d_%H%M}'.format(datetime.datetime.now())
}
response['Content-Disposition'] = "attachment; filename=" + filename
rv = response
return rv
| agpl-3.0 |
Nihhaar/android_kernel_xiaomi_mocha | scripts/rt-tester/rt-tester.py | 11005 | 5307 | #!/usr/bin/python
#
# rt-mutex tester
#
# (C) 2006 Thomas Gleixner <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
import os
import sys
import getopt
import shutil
import string
# Globals
quiet = 0
test = 0
comments = 0
sysfsprefix = "/sys/devices/system/rttest/rttest"
statusfile = "/status"
commandfile = "/command"
# Command opcodes
cmd_opcodes = {
"schedother" : "1",
"schedfifo" : "2",
"lock" : "3",
"locknowait" : "4",
"lockint" : "5",
"lockintnowait" : "6",
"lockcont" : "7",
"unlock" : "8",
"signal" : "11",
"resetevent" : "98",
"reset" : "99",
}
test_opcodes = {
"prioeq" : ["P" , "eq" , None],
"priolt" : ["P" , "lt" , None],
"priogt" : ["P" , "gt" , None],
"nprioeq" : ["N" , "eq" , None],
"npriolt" : ["N" , "lt" , None],
"npriogt" : ["N" , "gt" , None],
"unlocked" : ["M" , "eq" , 0],
"trylock" : ["M" , "eq" , 1],
"blocked" : ["M" , "eq" , 2],
"blockedwake" : ["M" , "eq" , 3],
"locked" : ["M" , "eq" , 4],
"opcodeeq" : ["O" , "eq" , None],
"opcodelt" : ["O" , "lt" , None],
"opcodegt" : ["O" , "gt" , None],
"eventeq" : ["E" , "eq" , None],
"eventlt" : ["E" , "lt" , None],
"eventgt" : ["E" , "gt" , None],
}
# Print usage information
def usage():
print "rt-tester.py <-c -h -q -t> <testfile>"
print " -c display comments after first command"
print " -h help"
print " -q quiet mode"
print " -t test mode (syntax check)"
print " testfile: read test specification from testfile"
print " otherwise from stdin"
return
# Print progress when not in quiet mode
def progress(str):
if not quiet:
print str
# Analyse a status value
def analyse(val, top, arg):
intval = int(val)
if top[0] == "M":
intval = intval / (10 ** int(arg))
intval = intval % 10
argval = top[2]
elif top[0] == "O":
argval = int(cmd_opcodes.get(arg, arg))
else:
argval = int(arg)
# progress("%d %s %d" %(intval, top[1], argval))
if top[1] == "eq" and intval == argval:
return 1
if top[1] == "lt" and intval < argval:
return 1
if top[1] == "gt" and intval > argval:
return 1
return 0
# Parse the commandline
try:
(options, arguments) = getopt.getopt(sys.argv[1:],'chqt')
except getopt.GetoptError, ex:
usage()
sys.exit(1)
# Parse commandline options
for option, value in options:
if option == "-c":
comments = 1
elif option == "-q":
quiet = 1
elif option == "-t":
test = 1
elif option == '-h':
usage()
sys.exit(0)
# Select the input source
if arguments:
try:
fd = open(arguments[0])
except Exception,ex:
sys.stderr.write("File not found %s\n" %(arguments[0]))
sys.exit(1)
else:
fd = sys.stdin
linenr = 0
# Read the test patterns
while 1:
linenr = linenr + 1
line = fd.readline()
if not len(line):
break
line = line.strip()
parts = line.split(":")
if not parts or len(parts) < 1:
continue
if len(parts[0]) == 0:
continue
if parts[0].startswith("#"):
if comments > 1:
progress(line)
continue
if comments == 1:
comments = 2
progress(line)
cmd = parts[0].strip().lower()
opc = parts[1].strip().lower()
tid = parts[2].strip()
dat = parts[3].strip()
try:
# Test or wait for a status value
if cmd == "t" or cmd == "w":
testop = test_opcodes[opc]
fname = "%s%s%s" %(sysfsprefix, tid, statusfile)
if test:
print fname
continue
while 1:
query = 1
fsta = open(fname, 'r')
status = fsta.readline().strip()
fsta.close()
stat = status.split(",")
for s in stat:
s = s.strip()
if s.startswith(testop[0]):
# Separate status value
val = s[2:].strip()
query = analyse(val, testop, dat)
break
if query or cmd == "t":
break
progress(" " + status)
if not query:
sys.stderr.write("Test failed in line %d\n" %(linenr))
sys.exit(1)
# Issue a command to the tester
elif cmd == "c":
cmdnr = cmd_opcodes[opc]
# Build command string and sys filename
cmdstr = "%s:%s" %(cmdnr, dat)
fname = "%s%s%s" %(sysfsprefix, tid, commandfile)
if test:
print fname
continue
fcmd = open(fname, 'w')
fcmd.write(cmdstr)
fcmd.close()
except Exception,ex:
sys.stderr.write(str(ex))
sys.stderr.write("\nSyntax error in line %d\n" %(linenr))
if not test:
fd.close()
sys.exit(1)
# Normal exit pass
print "Pass"
sys.exit(0)
| gpl-2.0 |
lostdj/Jaklin-OpenJFX | modules/web/src/main/native/Tools/gtk/gtkdoc.py | 2 | 18084 | # Copyright (C) 2011 Igalia S.L.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import errno
import logging
import os
import os.path
import subprocess
import sys
class GTKDoc(object):
"""Class that controls a gtkdoc run.
Each instance of this class represents one gtkdoc configuration
and set of documentation. The gtkdoc package is a series of tools
run consecutively which converts inline C/C++ documentation into
docbook files and then into HTML. This class is suitable for
generating documentation or simply verifying correctness.
Keyword arguments:
output_dir -- The path where gtkdoc output should be placed. Generation
may overwrite file in this directory. Required.
module_name -- The name of the documentation module. For libraries this
is typically the library name. Required if not library path
is given.
source_dirs -- A list of paths to the source code to be scanned. Required.
ignored_files -- A list of filenames to ignore in the source directory. It is
only necessary to provide the basenames of these files.
Typically it is important to provide an updated list of
ignored files to prevent warnings about undocumented symbols.
namespace -- The library namespace.
decorator -- If a decorator is used to unhide certain symbols in header
files this parameter is required for successful scanning.
(default '')
deprecation_guard -- gtkdoc tries to ensure that symbols marked as deprecated
are encased in this C preprocessor define. This is required
to avoid gtkdoc warnings. (default '')
cflags -- This parameter specifies any preprocessor flags necessary for
building the scanner binary during gtkdoc-scanobj. Typically
this includes all absolute include paths necessary to resolve
all header dependencies. (default '')
ldflags -- This parameter specifies any linker flags necessary for
building the scanner binary during gtkdoc-scanobj. Typically
this includes "-lyourlibraryname". (default '')
library_path -- This parameter specifies the path to the directory where you
library resides used for building the scanner binary during
gtkdoc-scanobj. (default '')
doc_dir -- The path to other documentation files necessary to build
the documentation. This files in this directory as well as
the files in the 'html' subdirectory will be copied
recursively into the output directory. (default '')
main_sgml_file -- The path or name (if a doc_dir is given) of the SGML file
that is the considered the main page of your documentation.
(default: <module_name>-docs.sgml)
version -- The version number of the module. If this is provided,
a version.xml file containing the version will be created
in the output directory during documentation generation.
interactive -- Whether or not errors or warnings should prompt the user
to continue or not. When this value is false, generation
will continue despite warnings. (default False)
virtual_root -- A temporary installation directory which is used as the root
where the actual installation prefix lives; this is mostly
useful for packagers, and should be set to what is given to
make install as DESTDIR.
"""
def __init__(self, args):
# Parameters specific to scanning.
self.module_name = ''
self.source_dirs = []
self.ignored_files = []
self.namespace = ''
self.decorator = ''
self.deprecation_guard = ''
# Parameters specific to gtkdoc-scanobj.
self.cflags = ''
self.ldflags = ''
self.library_path = ''
# Parameters specific to generation.
self.output_dir = ''
self.doc_dir = ''
self.main_sgml_file = ''
# Parameters specific to gtkdoc-fixxref.
self.cross_reference_deps = []
self.interactive = False
self.logger = logging.getLogger('gtkdoc')
for key, value in iter(args.items()):
setattr(self, key, value)
def raise_error_if_not_specified(key):
if not getattr(self, key):
raise Exception('%s not specified.' % key)
raise_error_if_not_specified('output_dir')
raise_error_if_not_specified('source_dirs')
raise_error_if_not_specified('module_name')
# Make all paths absolute in case we were passed relative paths, since
# we change the current working directory when executing subcommands.
self.output_dir = os.path.abspath(self.output_dir)
self.source_dirs = [os.path.abspath(x) for x in self.source_dirs]
if self.library_path:
self.library_path = os.path.abspath(self.library_path)
if not self.main_sgml_file:
self.main_sgml_file = self.module_name + "-docs.sgml"
def generate(self, html=True):
self.saw_warnings = False
self._copy_doc_files_to_output_dir(html)
self._write_version_xml()
self._run_gtkdoc_scan()
self._run_gtkdoc_scangobj()
self._run_gtkdoc_mktmpl()
self._run_gtkdoc_mkdb()
if not html:
return
self._run_gtkdoc_mkhtml()
self._run_gtkdoc_fixxref()
def _delete_file_if_exists(self, path):
if not os.access(path, os.F_OK | os.R_OK):
return
self.logger.debug('deleting %s', path)
os.unlink(path)
def _create_directory_if_nonexistent(self, path):
try:
os.makedirs(path)
except OSError as error:
if error.errno != errno.EEXIST:
raise
def _raise_exception_if_file_inaccessible(self, path):
if not os.path.exists(path) or not os.access(path, os.R_OK):
raise Exception("Could not access file at: %s" % path)
def _output_has_warnings(self, outputs):
for output in outputs:
if output and output.find('warning'):
return True
return False
def _ask_yes_or_no_question(self, question):
if not self.interactive:
return True
question += ' [y/N] '
answer = None
while answer != 'y' and answer != 'n' and answer != '':
answer = raw_input(question).lower()
return answer == 'y'
def _run_command(self, args, env=None, cwd=None, print_output=True, ignore_warnings=False):
if print_output:
self.logger.info("Running %s", args[0])
self.logger.debug("Full command args: %s", str(args))
process = subprocess.Popen(args, env=env, cwd=cwd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = [b.decode("utf-8") for b in process.communicate()]
if print_output:
if stdout:
sys.stdout.write(stdout)
if stderr:
sys.stderr.write(stderr)
if process.returncode != 0:
raise Exception('%s produced a non-zero return code %i'
% (args[0], process.returncode))
if not ignore_warnings and ('warning' in stderr or 'warning' in stdout):
self.saw_warnings = True
if not self._ask_yes_or_no_question('%s produced warnings, '
'try to continue?' % args[0]):
raise Exception('%s step failed' % args[0])
return stdout.strip()
def _copy_doc_files_to_output_dir(self, html=True):
if not self.doc_dir:
self.logger.info('Not copying any files from doc directory,'
' because no doc directory given.')
return
def copy_file_replacing_existing(src, dest):
if os.path.isdir(src):
self.logger.debug('skipped directory %s', src)
return
if not os.access(src, os.F_OK | os.R_OK):
self.logger.debug('skipped unreadable %s', src)
return
self._delete_file_if_exists(dest)
self.logger.debug('created %s', dest)
try:
os.link(src, dest)
except OSError:
os.symlink(src, dest)
def copy_all_files_in_directory(src, dest):
for path in os.listdir(src):
copy_file_replacing_existing(os.path.join(src, path),
os.path.join(dest, path))
self.logger.info('Copying template files to output directory...')
self._create_directory_if_nonexistent(self.output_dir)
copy_all_files_in_directory(self.doc_dir, self.output_dir)
if not html:
return
self.logger.info('Copying HTML files to output directory...')
html_src_dir = os.path.join(self.doc_dir, 'html')
html_dest_dir = os.path.join(self.output_dir, 'html')
self._create_directory_if_nonexistent(html_dest_dir)
if os.path.exists(html_src_dir):
copy_all_files_in_directory(html_src_dir, html_dest_dir)
def _write_version_xml(self):
if not self.version:
self.logger.info('No version specified, so not writing version.xml')
return
version_xml_path = os.path.join(self.output_dir, 'version.xml')
src_version_xml_path = os.path.join(self.doc_dir, 'version.xml')
# Don't overwrite version.xml if it was in the doc directory.
if os.path.exists(version_xml_path) and \
os.path.exists(src_version_xml_path):
return
output_file = open(version_xml_path, 'w')
output_file.write(self.version)
output_file.close()
def _ignored_files_basenames(self):
return ' '.join([os.path.basename(x) for x in self.ignored_files])
def _run_gtkdoc_scan(self):
args = ['gtkdoc-scan',
'--module=%s' % self.module_name,
'--rebuild-types']
# Each source directory should be have its own "--source-dir=" prefix.
args.extend(['--source-dir=%s' % path for path in self.source_dirs])
if self.decorator:
args.append('--ignore-decorators=%s' % self.decorator)
if self.deprecation_guard:
args.append('--deprecated-guards=%s' % self.deprecation_guard)
if self.output_dir:
args.append('--output-dir=%s' % self.output_dir)
# gtkdoc-scan wants the basenames of ignored headers, so strip the
# dirname. Different from "--source-dir", the headers should be
# specified as one long string.
ignored_files_basenames = self._ignored_files_basenames()
if ignored_files_basenames:
args.append('--ignore-headers=%s' % ignored_files_basenames)
self._run_command(args)
def _run_gtkdoc_scangobj(self):
env = os.environ
ldflags = self.ldflags
if self.library_path:
ldflags = ' "-L%s" ' % self.library_path + ldflags
current_ld_library_path = env.get('LD_LIBRARY_PATH')
if current_ld_library_path:
env['RUN'] = 'LD_LIBRARY_PATH="%s:%s" ' % (self.library_path, current_ld_library_path)
else:
env['RUN'] = 'LD_LIBRARY_PATH="%s" ' % self.library_path
if ldflags:
env['LDFLAGS'] = '%s %s' % (ldflags, env.get('LDFLAGS', ''))
if self.cflags:
env['CFLAGS'] = '%s %s' % (self.cflags, env.get('CFLAGS', ''))
if 'CFLAGS' in env:
self.logger.debug('CFLAGS=%s', env['CFLAGS'])
if 'LDFLAGS' in env:
self.logger.debug('LDFLAGS %s', env['LDFLAGS'])
if 'RUN' in env:
self.logger.debug('RUN=%s', env['RUN'])
self._run_command(['gtkdoc-scangobj', '--module=%s' % self.module_name],
env=env, cwd=self.output_dir)
def _run_gtkdoc_mktmpl(self):
args = ['gtkdoc-mktmpl', '--module=%s' % self.module_name]
self._run_command(args, cwd=self.output_dir)
def _run_gtkdoc_mkdb(self):
sgml_file = os.path.join(self.output_dir, self.main_sgml_file)
self._raise_exception_if_file_inaccessible(sgml_file)
args = ['gtkdoc-mkdb',
'--module=%s' % self.module_name,
'--main-sgml-file=%s' % sgml_file,
'--source-suffixes=h,c,cpp,cc',
'--output-format=xml',
'--sgml-mode']
if self.namespace:
args.append('--name-space=%s' % self.namespace)
ignored_files_basenames = self._ignored_files_basenames()
if ignored_files_basenames:
args.append('--ignore-files=%s' % ignored_files_basenames)
# Each directory should be have its own "--source-dir=" prefix.
args.extend(['--source-dir=%s' % path for path in self.source_dirs])
self._run_command(args, cwd=self.output_dir)
def _run_gtkdoc_mkhtml(self):
html_dest_dir = os.path.join(self.output_dir, 'html')
if not os.path.isdir(html_dest_dir):
raise Exception("%s is not a directory, could not generate HTML"
% html_dest_dir)
elif not os.access(html_dest_dir, os.X_OK | os.R_OK | os.W_OK):
raise Exception("Could not access %s to generate HTML"
% html_dest_dir)
# gtkdoc-mkhtml expects the SGML path to be absolute.
sgml_file = os.path.join(os.path.abspath(self.output_dir),
self.main_sgml_file)
self._raise_exception_if_file_inaccessible(sgml_file)
self._run_command(['gtkdoc-mkhtml', self.module_name, sgml_file],
cwd=html_dest_dir)
def _run_gtkdoc_fixxref(self):
args = ['gtkdoc-fixxref',
'--module-dir=html',
'--html-dir=html']
args.extend(['--extra-dir=%s' % extra_dir for extra_dir in self.cross_reference_deps])
self._run_command(args, cwd=self.output_dir, ignore_warnings=True)
def rebase_installed_docs(self):
if not os.path.isdir(self.output_dir):
raise Exception("Tried to rebase documentation before generating it.")
html_dir = os.path.join(self.virtual_root + self.prefix, 'share', 'gtk-doc', 'html', self.module_name)
if not os.path.isdir(html_dir):
return
args = ['gtkdoc-rebase',
'--relative',
'--html-dir=%s' % html_dir]
args.extend(['--other-dir=%s' % extra_dir for extra_dir in self.cross_reference_deps])
if self.virtual_root:
args.extend(['--dest-dir=%s' % self.virtual_root])
self._run_command(args, cwd=self.output_dir)
def api_missing_documentation(self):
unused_doc_file = os.path.join(self.output_dir, self.module_name + "-unused.txt")
if not os.path.exists(unused_doc_file) or not os.access(unused_doc_file, os.R_OK):
return []
return open(unused_doc_file).read().splitlines()
class PkgConfigGTKDoc(GTKDoc):
"""Class reads a library's pkgconfig file to guess gtkdoc parameters.
Some gtkdoc parameters can be guessed by reading a library's pkgconfig
file, including the cflags, ldflags and version parameters. If you
provide these parameters as well, they will be appended to the ones
guessed via the pkgconfig file.
Keyword arguments:
pkg_config_path -- Path to the pkgconfig file for the library. Required.
"""
def __init__(self, pkg_config_path, args):
super(PkgConfigGTKDoc, self).__init__(args)
pkg_config = os.environ.get('PKG_CONFIG', 'pkg-config')
if not os.path.exists(pkg_config_path):
raise Exception('Could not find pkg-config file at: %s'
% pkg_config_path)
self.cflags += " " + self._run_command([pkg_config,
pkg_config_path,
'--cflags'], print_output=False)
self.ldflags += " " + self._run_command([pkg_config,
pkg_config_path,
'--libs'], print_output=False)
self.version = self._run_command([pkg_config,
pkg_config_path,
'--modversion'], print_output=False)
self.prefix = self._run_command([pkg_config,
pkg_config_path,
'--variable=prefix'], print_output=False)
| gpl-2.0 |
40223135/40223135w17 | static/Brython3.1.1-20150328-091302/Lib/pydoc.py | 637 | 102017 | #!/usr/bin/env python3
"""Generate Python documentation in HTML or text for interactive use.
In the Python interpreter, do "from pydoc import help" to provide
help. Calling help(thing) on a Python object documents the object.
Or, at the shell command line outside of Python:
Run "pydoc <name>" to show documentation on something. <name> may be
the name of a function, module, package, or a dotted reference to a
class or function within a module or module in a package. If the
argument contains a path segment delimiter (e.g. slash on Unix,
backslash on Windows) it is treated as the path to a Python source file.
Run "pydoc -k <keyword>" to search for a keyword in the synopsis lines
of all available modules.
Run "pydoc -p <port>" to start an HTTP server on the given port on the
local machine. Port number 0 can be used to get an arbitrary unused port.
Run "pydoc -b" to start an HTTP server on an arbitrary unused port and
open a Web browser to interactively browse documentation. The -p option
can be used with the -b option to explicitly specify the server port.
Run "pydoc -w <name>" to write out the HTML documentation for a module
to a file named "<name>.html".
Module docs for core modules are assumed to be in
http://docs.python.org/X.Y/library/
This can be overridden by setting the PYTHONDOCS environment variable
to a different URL or to a local directory containing the Library
Reference Manual pages.
"""
__all__ = ['help']
__author__ = "Ka-Ping Yee <[email protected]>"
__date__ = "26 February 2001"
__credits__ = """Guido van Rossum, for an excellent programming language.
Tommy Burnette, the original creator of manpy.
Paul Prescod, for all his work on onlinehelp.
Richard Chamberlain, for the first implementation of textdoc.
"""
# Known bugs that can't be fixed here:
# - imp.load_module() cannot be prevented from clobbering existing
# loaded modules, so calling synopsis() on a binary module file
# changes the contents of any existing module with the same name.
# - If the __file__ attribute on a module is a relative path and
# the current directory is changed with os.chdir(), an incorrect
# path will be displayed.
import builtins
import imp
import importlib.machinery
#brython fix me
import inspect
import io
import os
#brython fix me
#import pkgutil
import platform
import re
import sys
import time
import tokenize
import warnings
from collections import deque
from reprlib import Repr
#fix me brython
#from traceback import extract_tb, format_exception_only
# --------------------------------------------------------- common routines
def pathdirs():
"""Convert sys.path into a list of absolute, existing, unique paths."""
dirs = []
normdirs = []
for dir in sys.path:
dir = os.path.abspath(dir or '.')
normdir = os.path.normcase(dir)
if normdir not in normdirs and os.path.isdir(dir):
dirs.append(dir)
normdirs.append(normdir)
return dirs
def getdoc(object):
"""Get the doc string or comments for an object."""
result = inspect.getdoc(object) or inspect.getcomments(object)
return result and re.sub('^ *\n', '', result.rstrip()) or ''
def splitdoc(doc):
"""Split a doc string into a synopsis line (if any) and the rest."""
lines = doc.strip().split('\n')
if len(lines) == 1:
return lines[0], ''
elif len(lines) >= 2 and not lines[1].rstrip():
return lines[0], '\n'.join(lines[2:])
return '', '\n'.join(lines)
def classname(object, modname):
"""Get a class name and qualify it with a module name if necessary."""
name = object.__name__
if object.__module__ != modname:
name = object.__module__ + '.' + name
return name
def isdata(object):
"""Check if an object is of a type that probably means it's data."""
return not (inspect.ismodule(object) or inspect.isclass(object) or
inspect.isroutine(object) or inspect.isframe(object) or
inspect.istraceback(object) or inspect.iscode(object))
def replace(text, *pairs):
"""Do a series of global replacements on a string."""
while pairs:
text = pairs[1].join(text.split(pairs[0]))
pairs = pairs[2:]
return text
def cram(text, maxlen):
"""Omit part of a string if needed to make it fit in a maximum length."""
if len(text) > maxlen:
pre = max(0, (maxlen-3)//2)
post = max(0, maxlen-3-pre)
return text[:pre] + '...' + text[len(text)-post:]
return text
_re_stripid = re.compile(r' at 0x[0-9a-f]{6,16}(>+)$', re.IGNORECASE)
def stripid(text):
"""Remove the hexadecimal id from a Python object representation."""
# The behaviour of %p is implementation-dependent in terms of case.
#fix me brython
#return _re_stripid.sub(r'\1', text)
return text
def _is_some_method(obj):
return (inspect.isfunction(obj) or
inspect.ismethod(obj) or
inspect.isbuiltin(obj) or
inspect.ismethoddescriptor(obj))
def allmethods(cl):
methods = {}
for key, value in inspect.getmembers(cl, _is_some_method):
methods[key] = 1
for base in cl.__bases__:
methods.update(allmethods(base)) # all your base are belong to us
for key in methods.keys():
methods[key] = getattr(cl, key)
return methods
def _split_list(s, predicate):
"""Split sequence s via predicate, and return pair ([true], [false]).
The return value is a 2-tuple of lists,
([x for x in s if predicate(x)],
[x for x in s if not predicate(x)])
"""
yes = []
no = []
for x in s:
if predicate(x):
yes.append(x)
else:
no.append(x)
return yes, no
def visiblename(name, all=None, obj=None):
"""Decide whether to show documentation on a variable."""
# Certain special names are redundant or internal.
if name in {'__author__', '__builtins__', '__cached__', '__credits__',
'__date__', '__doc__', '__file__', '__initializing__',
'__loader__', '__module__', '__name__', '__package__',
'__path__', '__qualname__', '__slots__', '__version__'}:
return 0
# Private names are hidden, but special names are displayed.
if name.startswith('__') and name.endswith('__'): return 1
# Namedtuples have public fields and methods with a single leading underscore
if name.startswith('_') and hasattr(obj, '_fields'):
return True
if all is not None:
# only document that which the programmer exported in __all__
return name in all
else:
return not name.startswith('_')
def classify_class_attrs(object):
"""Wrap inspect.classify_class_attrs, with fixup for data descriptors."""
results = []
for (name, kind, cls, value) in inspect.classify_class_attrs(object):
if inspect.isdatadescriptor(value):
kind = 'data descriptor'
results.append((name, kind, cls, value))
return results
# ----------------------------------------------------- module manipulation
def ispackage(path):
"""Guess whether a path refers to a package directory."""
if os.path.isdir(path):
for ext in ('.py', '.pyc', '.pyo'):
if os.path.isfile(os.path.join(path, '__init__' + ext)):
return True
return False
def source_synopsis(file):
line = file.readline()
while line[:1] == '#' or not line.strip():
line = file.readline()
if not line: break
line = line.strip()
if line[:4] == 'r"""': line = line[1:]
if line[:3] == '"""':
line = line[3:]
if line[-1:] == '\\': line = line[:-1]
while not line.strip():
line = file.readline()
if not line: break
result = line.split('"""')[0].strip()
else: result = None
return result
def synopsis(filename, cache={}):
"""Get the one-line summary out of a module file."""
mtime = os.stat(filename).st_mtime
lastupdate, result = cache.get(filename, (None, None))
if lastupdate is None or lastupdate < mtime:
try:
file = tokenize.open(filename)
except IOError:
# module can't be opened, so skip it
return None
binary_suffixes = importlib.machinery.BYTECODE_SUFFIXES[:]
binary_suffixes += importlib.machinery.EXTENSION_SUFFIXES[:]
if any(filename.endswith(x) for x in binary_suffixes):
# binary modules have to be imported
file.close()
if any(filename.endswith(x) for x in
importlib.machinery.BYTECODE_SUFFIXES):
loader = importlib.machinery.SourcelessFileLoader('__temp__',
filename)
else:
loader = importlib.machinery.ExtensionFileLoader('__temp__',
filename)
try:
module = loader.load_module('__temp__')
except:
return None
result = (module.__doc__ or '').splitlines()[0]
del sys.modules['__temp__']
else:
# text modules can be directly examined
result = source_synopsis(file)
file.close()
cache[filename] = (mtime, result)
return result
class ErrorDuringImport(Exception):
"""Errors that occurred while trying to import something to document it."""
def __init__(self, filename, exc_info):
self.filename = filename
self.exc, self.value, self.tb = exc_info
def __str__(self):
exc = self.exc.__name__
return 'problem in %s - %s: %s' % (self.filename, exc, self.value)
def importfile(path):
"""Import a Python source file or compiled file given its path."""
magic = imp.get_magic()
with open(path, 'rb') as file:
if file.read(len(magic)) == magic:
kind = imp.PY_COMPILED
else:
kind = imp.PY_SOURCE
file.seek(0)
filename = os.path.basename(path)
name, ext = os.path.splitext(filename)
try:
module = imp.load_module(name, file, path, (ext, 'r', kind))
except:
raise ErrorDuringImport(path, sys.exc_info())
return module
def safeimport(path, forceload=0, cache={}):
"""Import a module; handle errors; return None if the module isn't found.
If the module *is* found but an exception occurs, it's wrapped in an
ErrorDuringImport exception and reraised. Unlike __import__, if a
package path is specified, the module at the end of the path is returned,
not the package at the beginning. If the optional 'forceload' argument
is 1, we reload the module from disk (unless it's a dynamic extension)."""
try:
# If forceload is 1 and the module has been previously loaded from
# disk, we always have to reload the module. Checking the file's
# mtime isn't good enough (e.g. the module could contain a class
# that inherits from another module that has changed).
if forceload and path in sys.modules:
if path not in sys.builtin_module_names:
# Remove the module from sys.modules and re-import to try
# and avoid problems with partially loaded modules.
# Also remove any submodules because they won't appear
# in the newly loaded module's namespace if they're already
# in sys.modules.
subs = [m for m in sys.modules if m.startswith(path + '.')]
for key in [path] + subs:
# Prevent garbage collection.
cache[key] = sys.modules[key]
del sys.modules[key]
module = __import__(path)
except:
# Did the error occur before or after the module was found?
(exc, value, tb) = info = sys.exc_info()
if path in sys.modules:
# An error occurred while executing the imported module.
raise ErrorDuringImport(sys.modules[path].__file__, info)
elif exc is SyntaxError:
# A SyntaxError occurred before we could execute the module.
raise ErrorDuringImport(value.filename, info)
#fix me brython
#elif exc is ImportError and value.name == path:
elif exc is ImportError and str(value) == str(path):
# No such module in the path.
return None
else:
# Some other error occurred during the importing process.
raise ErrorDuringImport(path, sys.exc_info())
for part in path.split('.')[1:]:
try: module = getattr(module, part)
except AttributeError: return None
return module
# ---------------------------------------------------- formatter base class
class Doc:
PYTHONDOCS = os.environ.get("PYTHONDOCS",
"http://docs.python.org/%d.%d/library"
% sys.version_info[:2])
def document(self, object, name=None, *args):
"""Generate documentation for an object."""
args = (object, name) + args
# 'try' clause is to attempt to handle the possibility that inspect
# identifies something in a way that pydoc itself has issues handling;
# think 'super' and how it is a descriptor (which raises the exception
# by lacking a __name__ attribute) and an instance.
if inspect.isgetsetdescriptor(object): return self.docdata(*args)
if inspect.ismemberdescriptor(object): return self.docdata(*args)
try:
if inspect.ismodule(object): return self.docmodule(*args)
if inspect.isclass(object): return self.docclass(*args)
if inspect.isroutine(object): return self.docroutine(*args)
except AttributeError:
pass
if isinstance(object, property): return self.docproperty(*args)
return self.docother(*args)
def fail(self, object, name=None, *args):
"""Raise an exception for unimplemented types."""
message = "don't know how to document object%s of type %s" % (
name and ' ' + repr(name), type(object).__name__)
raise TypeError(message)
docmodule = docclass = docroutine = docother = docproperty = docdata = fail
def getdocloc(self, object):
"""Return the location of module docs or None"""
try:
file = inspect.getabsfile(object)
except TypeError:
file = '(built-in)'
docloc = os.environ.get("PYTHONDOCS", self.PYTHONDOCS)
basedir = os.path.join(sys.base_exec_prefix, "lib",
"python%d.%d" % sys.version_info[:2])
if (isinstance(object, type(os)) and
(object.__name__ in ('errno', 'exceptions', 'gc', 'imp',
'marshal', 'posix', 'signal', 'sys',
'_thread', 'zipimport') or
(file.startswith(basedir) and
not file.startswith(os.path.join(basedir, 'site-packages')))) and
object.__name__ not in ('xml.etree', 'test.pydoc_mod')):
if docloc.startswith("http://"):
docloc = "%s/%s" % (docloc.rstrip("/"), object.__name__)
else:
docloc = os.path.join(docloc, object.__name__ + ".html")
else:
docloc = None
return docloc
# -------------------------------------------- HTML documentation generator
class HTMLRepr(Repr):
"""Class for safely making an HTML representation of a Python object."""
def __init__(self):
Repr.__init__(self)
self.maxlist = self.maxtuple = 20
self.maxdict = 10
self.maxstring = self.maxother = 100
def escape(self, text):
return replace(text, '&', '&', '<', '<', '>', '>')
def repr(self, object):
return Repr.repr(self, object)
def repr1(self, x, level):
if hasattr(type(x), '__name__'):
methodname = 'repr_' + '_'.join(type(x).__name__.split())
if hasattr(self, methodname):
return getattr(self, methodname)(x, level)
return self.escape(cram(stripid(repr(x)), self.maxother))
def repr_string(self, x, level):
test = cram(x, self.maxstring)
testrepr = repr(test)
if '\\' in test and '\\' not in replace(testrepr, r'\\', ''):
# Backslashes are only literal in the string and are never
# needed to make any special characters, so show a raw string.
return 'r' + testrepr[0] + self.escape(test) + testrepr[0]
return re.sub(r'((\\[\\abfnrtv\'"]|\\[0-9]..|\\x..|\\u....)+)',
r'<font color="#c040c0">\1</font>',
self.escape(testrepr))
repr_str = repr_string
def repr_instance(self, x, level):
try:
return self.escape(cram(stripid(repr(x)), self.maxstring))
except:
return self.escape('<%s instance>' % x.__class__.__name__)
repr_unicode = repr_string
class HTMLDoc(Doc):
"""Formatter class for HTML documentation."""
# ------------------------------------------- HTML formatting utilities
_repr_instance = HTMLRepr()
repr = _repr_instance.repr
escape = _repr_instance.escape
def page(self, title, contents):
"""Format an HTML page."""
return '''\
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<html><head><title>Python: %s</title>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
</head><body bgcolor="#f0f0f8">
%s
</body></html>''' % (title, contents)
def heading(self, title, fgcol, bgcol, extras=''):
"""Format a page heading."""
return '''
<table width="100%%" cellspacing=0 cellpadding=2 border=0 summary="heading">
<tr bgcolor="%s">
<td valign=bottom> <br>
<font color="%s" face="helvetica, arial"> <br>%s</font></td
><td align=right valign=bottom
><font color="%s" face="helvetica, arial">%s</font></td></tr></table>
''' % (bgcol, fgcol, title, fgcol, extras or ' ')
def section(self, title, fgcol, bgcol, contents, width=6,
prelude='', marginalia=None, gap=' '):
"""Format a section with a heading."""
if marginalia is None:
marginalia = '<tt>' + ' ' * width + '</tt>'
result = '''<p>
<table width="100%%" cellspacing=0 cellpadding=2 border=0 summary="section">
<tr bgcolor="%s">
<td colspan=3 valign=bottom> <br>
<font color="%s" face="helvetica, arial">%s</font></td></tr>
''' % (bgcol, fgcol, title)
if prelude:
result = result + '''
<tr bgcolor="%s"><td rowspan=2>%s</td>
<td colspan=2>%s</td></tr>
<tr><td>%s</td>''' % (bgcol, marginalia, prelude, gap)
else:
result = result + '''
<tr><td bgcolor="%s">%s</td><td>%s</td>''' % (bgcol, marginalia, gap)
return result + '\n<td width="100%%">%s</td></tr></table>' % contents
def bigsection(self, title, *args):
"""Format a section with a big heading."""
title = '<big><strong>%s</strong></big>' % title
return self.section(title, *args)
def preformat(self, text):
"""Format literal preformatted text."""
text = self.escape(text.expandtabs())
return replace(text, '\n\n', '\n \n', '\n\n', '\n \n',
' ', ' ', '\n', '<br>\n')
def multicolumn(self, list, format, cols=4):
"""Format a list of items into a multi-column list."""
result = ''
rows = (len(list)+cols-1)//cols
for col in range(cols):
result = result + '<td width="%d%%" valign=top>' % (100//cols)
for i in range(rows*col, rows*col+rows):
if i < len(list):
result = result + format(list[i]) + '<br>\n'
result = result + '</td>'
return '<table width="100%%" summary="list"><tr>%s</tr></table>' % result
def grey(self, text): return '<font color="#909090">%s</font>' % text
def namelink(self, name, *dicts):
"""Make a link for an identifier, given name-to-URL mappings."""
for dict in dicts:
if name in dict:
return '<a href="%s">%s</a>' % (dict[name], name)
return name
def classlink(self, object, modname):
"""Make a link for a class."""
name, module = object.__name__, sys.modules.get(object.__module__)
if hasattr(module, name) and getattr(module, name) is object:
return '<a href="%s.html#%s">%s</a>' % (
module.__name__, name, classname(object, modname))
return classname(object, modname)
def modulelink(self, object):
"""Make a link for a module."""
return '<a href="%s.html">%s</a>' % (object.__name__, object.__name__)
def modpkglink(self, modpkginfo):
"""Make a link for a module or package to display in an index."""
name, path, ispackage, shadowed = modpkginfo
if shadowed:
return self.grey(name)
if path:
url = '%s.%s.html' % (path, name)
else:
url = '%s.html' % name
if ispackage:
text = '<strong>%s</strong> (package)' % name
else:
text = name
return '<a href="%s">%s</a>' % (url, text)
def filelink(self, url, path):
"""Make a link to source file."""
return '<a href="file:%s">%s</a>' % (url, path)
def markup(self, text, escape=None, funcs={}, classes={}, methods={}):
"""Mark up some plain text, given a context of symbols to look for.
Each context dictionary maps object names to anchor names."""
escape = escape or self.escape
results = []
here = 0
pattern = re.compile(r'\b((http|ftp)://\S+[\w/]|'
r'RFC[- ]?(\d+)|'
r'PEP[- ]?(\d+)|'
r'(self\.)?(\w+))')
while True:
match = pattern.search(text, here)
if not match: break
start, end = match.span()
results.append(escape(text[here:start]))
all, scheme, rfc, pep, selfdot, name = match.groups()
if scheme:
url = escape(all).replace('"', '"')
results.append('<a href="%s">%s</a>' % (url, url))
elif rfc:
url = 'http://www.rfc-editor.org/rfc/rfc%d.txt' % int(rfc)
results.append('<a href="%s">%s</a>' % (url, escape(all)))
elif pep:
url = 'http://www.python.org/dev/peps/pep-%04d/' % int(pep)
results.append('<a href="%s">%s</a>' % (url, escape(all)))
elif text[end:end+1] == '(':
results.append(self.namelink(name, methods, funcs, classes))
elif selfdot:
results.append('self.<strong>%s</strong>' % name)
else:
results.append(self.namelink(name, classes))
here = end
results.append(escape(text[here:]))
return ''.join(results)
# ---------------------------------------------- type-specific routines
def formattree(self, tree, modname, parent=None):
"""Produce HTML for a class tree as given by inspect.getclasstree()."""
result = ''
for entry in tree:
if type(entry) is type(()):
c, bases = entry
result = result + '<dt><font face="helvetica, arial">'
result = result + self.classlink(c, modname)
if bases and bases != (parent,):
parents = []
for base in bases:
parents.append(self.classlink(base, modname))
result = result + '(' + ', '.join(parents) + ')'
result = result + '\n</font></dt>'
elif type(entry) is type([]):
result = result + '<dd>\n%s</dd>\n' % self.formattree(
entry, modname, c)
return '<dl>\n%s</dl>\n' % result
def docmodule(self, object, name=None, mod=None, *ignored):
"""Produce HTML documentation for a module object."""
name = object.__name__ # ignore the passed-in name
try:
all = object.__all__
except AttributeError:
all = None
parts = name.split('.')
links = []
for i in range(len(parts)-1):
links.append(
'<a href="%s.html"><font color="#ffffff">%s</font></a>' %
('.'.join(parts[:i+1]), parts[i]))
linkedname = '.'.join(links + parts[-1:])
head = '<big><big><strong>%s</strong></big></big>' % linkedname
try:
path = inspect.getabsfile(object)
url = path
if sys.platform == 'win32':
import nturl2path
url = nturl2path.pathname2url(path)
filelink = self.filelink(url, path)
except TypeError:
filelink = '(built-in)'
info = []
if hasattr(object, '__version__'):
version = str(object.__version__)
if version[:11] == '$' + 'Revision: ' and version[-1:] == '$':
version = version[11:-1].strip()
info.append('version %s' % self.escape(version))
if hasattr(object, '__date__'):
info.append(self.escape(str(object.__date__)))
if info:
head = head + ' (%s)' % ', '.join(info)
docloc = self.getdocloc(object)
if docloc is not None:
docloc = '<br><a href="%(docloc)s">Module Reference</a>' % locals()
else:
docloc = ''
result = self.heading(
head, '#ffffff', '#7799ee',
'<a href=".">index</a><br>' + filelink + docloc)
modules = inspect.getmembers(object, inspect.ismodule)
classes, cdict = [], {}
for key, value in inspect.getmembers(object, inspect.isclass):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None or
(inspect.getmodule(value) or object) is object):
if visiblename(key, all, object):
classes.append((key, value))
cdict[key] = cdict[value] = '#' + key
for key, value in classes:
for base in value.__bases__:
key, modname = base.__name__, base.__module__
module = sys.modules.get(modname)
if modname != name and module and hasattr(module, key):
if getattr(module, key) is base:
if not key in cdict:
cdict[key] = cdict[base] = modname + '.html#' + key
funcs, fdict = [], {}
for key, value in inspect.getmembers(object, inspect.isroutine):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None or
inspect.isbuiltin(value) or inspect.getmodule(value) is object):
if visiblename(key, all, object):
funcs.append((key, value))
fdict[key] = '#-' + key
if inspect.isfunction(value): fdict[value] = fdict[key]
data = []
for key, value in inspect.getmembers(object, isdata):
if visiblename(key, all, object):
data.append((key, value))
doc = self.markup(getdoc(object), self.preformat, fdict, cdict)
doc = doc and '<tt>%s</tt>' % doc
result = result + '<p>%s</p>\n' % doc
if hasattr(object, '__path__'):
modpkgs = []
for importer, modname, ispkg in pkgutil.iter_modules(object.__path__):
modpkgs.append((modname, name, ispkg, 0))
modpkgs.sort()
contents = self.multicolumn(modpkgs, self.modpkglink)
result = result + self.bigsection(
'Package Contents', '#ffffff', '#aa55cc', contents)
elif modules:
contents = self.multicolumn(
modules, lambda t: self.modulelink(t[1]))
result = result + self.bigsection(
'Modules', '#ffffff', '#aa55cc', contents)
if classes:
classlist = [value for (key, value) in classes]
contents = [
self.formattree(inspect.getclasstree(classlist, 1), name)]
for key, value in classes:
contents.append(self.document(value, key, name, fdict, cdict))
result = result + self.bigsection(
'Classes', '#ffffff', '#ee77aa', ' '.join(contents))
if funcs:
contents = []
for key, value in funcs:
contents.append(self.document(value, key, name, fdict, cdict))
result = result + self.bigsection(
'Functions', '#ffffff', '#eeaa77', ' '.join(contents))
if data:
contents = []
for key, value in data:
contents.append(self.document(value, key))
result = result + self.bigsection(
'Data', '#ffffff', '#55aa55', '<br>\n'.join(contents))
if hasattr(object, '__author__'):
contents = self.markup(str(object.__author__), self.preformat)
result = result + self.bigsection(
'Author', '#ffffff', '#7799ee', contents)
if hasattr(object, '__credits__'):
contents = self.markup(str(object.__credits__), self.preformat)
result = result + self.bigsection(
'Credits', '#ffffff', '#7799ee', contents)
return result
def docclass(self, object, name=None, mod=None, funcs={}, classes={},
*ignored):
"""Produce HTML documentation for a class object."""
print('docclass')
realname = object.__name__
name = name or realname
bases = object.__bases__
contents = []
push = contents.append
# Cute little class to pump out a horizontal rule between sections.
class HorizontalRule:
def __init__(self):
self.needone = 0
def maybe(self):
if self.needone:
push('<hr>\n')
self.needone = 1
hr = HorizontalRule()
# List the mro, if non-trivial.
mro = deque(inspect.getmro(object))
if len(mro) > 2:
hr.maybe()
push('<dl><dt>Method resolution order:</dt>\n')
for base in mro:
push('<dd>%s</dd>\n' % self.classlink(base,
object.__module__))
push('</dl>\n')
def spill(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
try:
value = getattr(object, name)
except Exception:
# Some descriptors may meet a failure in their __get__.
# (bug #1785)
push(self._docdescriptor(name, value, mod))
else:
push(self.document(value, name, mod,
funcs, classes, mdict, object))
push('\n')
return attrs
def spilldescriptors(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
push(self._docdescriptor(name, value, mod))
return attrs
def spilldata(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
base = self.docother(getattr(object, name), name, mod)
if callable(value) or inspect.isdatadescriptor(value):
doc = getattr(value, "__doc__", None)
else:
doc = None
if doc is None:
push('<dl><dt>%s</dl>\n' % base)
else:
doc = self.markup(getdoc(value), self.preformat,
funcs, classes, mdict)
doc = '<dd><tt>%s</tt>' % doc
push('<dl><dt>%s%s</dl>\n' % (base, doc))
push('\n')
return attrs
attrs = [(name, kind, cls, value)
for name, kind, cls, value in classify_class_attrs(object)
if visiblename(name, obj=object)]
mdict = {}
for key, kind, homecls, value in attrs:
mdict[key] = anchor = '#' + name + '-' + key
try:
value = getattr(object, name)
except Exception:
# Some descriptors may meet a failure in their __get__.
# (bug #1785)
pass
try:
# The value may not be hashable (e.g., a data attr with
# a dict or list value).
mdict[value] = anchor
except TypeError:
pass
while attrs:
if mro:
thisclass = mro.popleft()
else:
thisclass = attrs[0][2]
attrs, inherited = _split_list(attrs, lambda t: t[2] is thisclass)
if thisclass is builtins.object:
attrs = inherited
continue
elif thisclass is object:
tag = 'defined here'
else:
tag = 'inherited from %s' % self.classlink(thisclass,
object.__module__)
tag += ':<br>\n'
# Sort attrs by name.
attrs.sort(key=lambda t: t[0])
# Pump out the attrs, segregated by kind.
attrs = spill('Methods %s' % tag, attrs,
lambda t: t[1] == 'method')
attrs = spill('Class methods %s' % tag, attrs,
lambda t: t[1] == 'class method')
attrs = spill('Static methods %s' % tag, attrs,
lambda t: t[1] == 'static method')
attrs = spilldescriptors('Data descriptors %s' % tag, attrs,
lambda t: t[1] == 'data descriptor')
attrs = spilldata('Data and other attributes %s' % tag, attrs,
lambda t: t[1] == 'data')
assert attrs == []
attrs = inherited
contents = ''.join(contents)
if name == realname:
title = '<a name="%s">class <strong>%s</strong></a>' % (
name, realname)
else:
title = '<strong>%s</strong> = <a name="%s">class %s</a>' % (
name, name, realname)
if bases:
parents = []
for base in bases:
parents.append(self.classlink(base, object.__module__))
title = title + '(%s)' % ', '.join(parents)
doc = self.markup(getdoc(object), self.preformat, funcs, classes, mdict)
doc = doc and '<tt>%s<br> </tt>' % doc
return self.section(title, '#000000', '#ffc8d8', contents, 3, doc)
def formatvalue(self, object):
"""Format an argument default value as text."""
return self.grey('=' + self.repr(object))
def docroutine(self, object, name=None, mod=None,
funcs={}, classes={}, methods={}, cl=None):
"""Produce HTML documentation for a function or method object."""
realname = object.__name__
name = name or realname
anchor = (cl and cl.__name__ or '') + '-' + name
note = ''
skipdocs = 0
if inspect.ismethod(object):
imclass = object.__self__.__class__
if cl:
if imclass is not cl:
note = ' from ' + self.classlink(imclass, mod)
else:
if object.__self__ is not None:
note = ' method of %s instance' % self.classlink(
object.__self__.__class__, mod)
else:
note = ' unbound %s method' % self.classlink(imclass,mod)
object = object.__func__
if name == realname:
title = '<a name="%s"><strong>%s</strong></a>' % (anchor, realname)
else:
if (cl and realname in cl.__dict__ and
cl.__dict__[realname] is object):
reallink = '<a href="#%s">%s</a>' % (
cl.__name__ + '-' + realname, realname)
skipdocs = 1
else:
reallink = realname
title = '<a name="%s"><strong>%s</strong></a> = %s' % (
anchor, name, reallink)
if inspect.isfunction(object):
args, varargs, kwonlyargs, kwdefaults, varkw, defaults, ann = \
inspect.getfullargspec(object)
argspec = inspect.formatargspec(
args, varargs, kwonlyargs, kwdefaults, varkw, defaults, ann,
formatvalue=self.formatvalue,
formatannotation=inspect.formatannotationrelativeto(object))
if realname == '<lambda>':
title = '<strong>%s</strong> <em>lambda</em> ' % name
# XXX lambda's won't usually have func_annotations['return']
# since the syntax doesn't support but it is possible.
# So removing parentheses isn't truly safe.
argspec = argspec[1:-1] # remove parentheses
else:
argspec = '(...)'
decl = title + argspec + (note and self.grey(
'<font face="helvetica, arial">%s</font>' % note))
if skipdocs:
return '<dl><dt>%s</dt></dl>\n' % decl
else:
doc = self.markup(
getdoc(object), self.preformat, funcs, classes, methods)
doc = doc and '<dd><tt>%s</tt></dd>' % doc
return '<dl><dt>%s</dt>%s</dl>\n' % (decl, doc)
def _docdescriptor(self, name, value, mod):
results = []
push = results.append
if name:
push('<dl><dt><strong>%s</strong></dt>\n' % name)
if value.__doc__ is not None:
doc = self.markup(getdoc(value), self.preformat)
push('<dd><tt>%s</tt></dd>\n' % doc)
push('</dl>\n')
return ''.join(results)
def docproperty(self, object, name=None, mod=None, cl=None):
"""Produce html documentation for a property."""
return self._docdescriptor(name, object, mod)
def docother(self, object, name=None, mod=None, *ignored):
"""Produce HTML documentation for a data object."""
lhs = name and '<strong>%s</strong> = ' % name or ''
return lhs + self.repr(object)
def docdata(self, object, name=None, mod=None, cl=None):
"""Produce html documentation for a data descriptor."""
return self._docdescriptor(name, object, mod)
def index(self, dir, shadowed=None):
"""Generate an HTML index for a directory of modules."""
modpkgs = []
if shadowed is None: shadowed = {}
for importer, name, ispkg in pkgutil.iter_modules([dir]):
if any((0xD800 <= ord(ch) <= 0xDFFF) for ch in name):
# ignore a module if its name contains a surrogate character
continue
modpkgs.append((name, '', ispkg, name in shadowed))
shadowed[name] = 1
modpkgs.sort()
contents = self.multicolumn(modpkgs, self.modpkglink)
return self.bigsection(dir, '#ffffff', '#ee77aa', contents)
# -------------------------------------------- text documentation generator
class TextRepr(Repr):
"""Class for safely making a text representation of a Python object."""
def __init__(self):
Repr.__init__(self)
self.maxlist = self.maxtuple = 20
self.maxdict = 10
self.maxstring = self.maxother = 100
#def repr1(self, x, level):
# if hasattr(type(x), '__name__'):
# methodname = 'repr_' + '_'.join(type(x).__name__.split())
# if hasattr(self, methodname):
# return getattr(self, methodname)(x, level)
# return cram(stripid(repr(x)), self.maxother)
def repr_string(self, x, level):
test = cram(x, self.maxstring)
testrepr = repr(test)
if '\\' in test and '\\' not in replace(testrepr, r'\\', ''):
# Backslashes are only literal in the string and are never
# needed to make any special characters, so show a raw string.
return 'r' + testrepr[0] + test + testrepr[0]
return testrepr
repr_str = repr_string
def repr_instance(self, x, level):
try:
return cram(stripid(repr(x)), self.maxstring)
except:
return '<%s instance>' % x.__class__.__name__
class TextDoc(Doc):
"""Formatter class for text documentation."""
# ------------------------------------------- text formatting utilities
_repr_instance = TextRepr()
repr = _repr_instance.repr
def bold(self, text):
"""Format a string in bold by overstriking."""
return ''.join(ch + '\b' + ch for ch in text)
def indent(self, text, prefix=' '):
"""Indent text by prepending a given prefix to each line."""
if not text: return ''
lines = [prefix + line for line in text.split('\n')]
if lines: lines[-1] = lines[-1].rstrip()
return '\n'.join(lines)
def section(self, title, contents):
"""Format a section with a given heading."""
clean_contents = self.indent(contents).rstrip()
return self.bold(title) + '\n' + clean_contents + '\n\n'
# ---------------------------------------------- type-specific routines
def formattree(self, tree, modname, parent=None, prefix=''):
"""Render in text a class tree as returned by inspect.getclasstree()."""
result = ''
for entry in tree:
if type(entry) is type(()):
c, bases = entry
result = result + prefix + classname(c, modname)
if bases and bases != (parent,):
parents = (classname(c, modname) for c in bases)
result = result + '(%s)' % ', '.join(parents)
result = result + '\n'
elif type(entry) is type([]):
result = result + self.formattree(
entry, modname, c, prefix + ' ')
return result
def docmodule(self, object, name=None, mod=None):
"""Produce text documentation for a given module object."""
name = object.__name__ # ignore the passed-in name
synop, desc = splitdoc(getdoc(object))
result = self.section('NAME', name + (synop and ' - ' + synop))
all = getattr(object, '__all__', None)
docloc = self.getdocloc(object)
if docloc is not None:
result = result + self.section('MODULE REFERENCE', docloc + """
The following documentation is automatically generated from the Python
source files. It may be incomplete, incorrect or include features that
are considered implementation detail and may vary between Python
implementations. When in doubt, consult the module reference at the
location listed above.
""")
if desc:
result = result + self.section('DESCRIPTION', desc)
classes = []
for key, value in inspect.getmembers(object, inspect.isclass):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None
or (inspect.getmodule(value) or object) is object):
if visiblename(key, all, object):
classes.append((key, value))
funcs = []
for key, value in inspect.getmembers(object, inspect.isroutine):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None or
inspect.isbuiltin(value) or inspect.getmodule(value) is object):
if visiblename(key, all, object):
funcs.append((key, value))
data = []
for key, value in inspect.getmembers(object, isdata):
if visiblename(key, all, object):
data.append((key, value))
modpkgs = []
modpkgs_names = set()
if hasattr(object, '__path__'):
for importer, modname, ispkg in pkgutil.iter_modules(object.__path__):
modpkgs_names.add(modname)
if ispkg:
modpkgs.append(modname + ' (package)')
else:
modpkgs.append(modname)
modpkgs.sort()
result = result + self.section(
'PACKAGE CONTENTS', '\n'.join(modpkgs))
# Detect submodules as sometimes created by C extensions
submodules = []
for key, value in inspect.getmembers(object, inspect.ismodule):
if value.__name__.startswith(name + '.') and key not in modpkgs_names:
submodules.append(key)
if submodules:
submodules.sort()
result = result + self.section(
'SUBMODULES', '\n'.join(submodules))
if classes:
classlist = [value for key, value in classes]
contents = [self.formattree(
inspect.getclasstree(classlist, 1), name)]
for key, value in classes:
contents.append(self.document(value, key, name))
result = result + self.section('CLASSES', '\n'.join(contents))
if funcs:
contents = []
for key, value in funcs:
contents.append(self.document(value, key, name))
result = result + self.section('FUNCTIONS', '\n'.join(contents))
if data:
contents = []
for key, value in data:
contents.append(self.docother(value, key, name, maxlen=70))
result = result + self.section('DATA', '\n'.join(contents))
if hasattr(object, '__version__'):
version = str(object.__version__)
if version[:11] == '$' + 'Revision: ' and version[-1:] == '$':
version = version[11:-1].strip()
result = result + self.section('VERSION', version)
if hasattr(object, '__date__'):
result = result + self.section('DATE', str(object.__date__))
if hasattr(object, '__author__'):
result = result + self.section('AUTHOR', str(object.__author__))
if hasattr(object, '__credits__'):
result = result + self.section('CREDITS', str(object.__credits__))
try:
file = inspect.getabsfile(object)
except TypeError:
file = '(built-in)'
result = result + self.section('FILE', file)
return result
def docclass(self, object, name=None, mod=None, *ignored):
"""Produce text documentation for a given class object."""
realname = object.__name__
name = name or realname
bases = object.__bases__
def makename(c, m=object.__module__):
return classname(c, m)
if name == realname:
title = 'class ' + self.bold(realname)
else:
title = self.bold(name) + ' = class ' + realname
if bases:
parents = map(makename, bases)
title = title + '(%s)' % ', '.join(parents)
doc = getdoc(object)
contents = doc and [doc + '\n'] or []
push = contents.append
# List the mro, if non-trivial.
mro = deque(inspect.getmro(object))
if len(mro) > 2:
push("Method resolution order:")
for base in mro:
push(' ' + makename(base))
push('')
# Cute little class to pump out a horizontal rule between sections.
class HorizontalRule:
def __init__(self):
self.needone = 0
def maybe(self):
if self.needone:
push('-' * 70)
self.needone = 1
hr = HorizontalRule()
def spill(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
try:
value = getattr(object, name)
except Exception:
# Some descriptors may meet a failure in their __get__.
# (bug #1785)
push(self._docdescriptor(name, value, mod))
else:
push(self.document(value,
name, mod, object))
return attrs
def spilldescriptors(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
push(self._docdescriptor(name, value, mod))
return attrs
def spilldata(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
if callable(value) or inspect.isdatadescriptor(value):
doc = getdoc(value)
else:
doc = None
push(self.docother(getattr(object, name),
name, mod, maxlen=70, doc=doc) + '\n')
return attrs
attrs = [(name, kind, cls, value)
for name, kind, cls, value in classify_class_attrs(object)
if visiblename(name, obj=object)]
while attrs:
if mro:
thisclass = mro.popleft()
else:
thisclass = attrs[0][2]
attrs, inherited = _split_list(attrs, lambda t: t[2] is thisclass)
if thisclass is builtins.object:
attrs = inherited
continue
elif thisclass is object:
tag = "defined here"
else:
tag = "inherited from %s" % classname(thisclass,
object.__module__)
# Sort attrs by name.
attrs.sort()
# Pump out the attrs, segregated by kind.
attrs = spill("Methods %s:\n" % tag, attrs,
lambda t: t[1] == 'method')
attrs = spill("Class methods %s:\n" % tag, attrs,
lambda t: t[1] == 'class method')
attrs = spill("Static methods %s:\n" % tag, attrs,
lambda t: t[1] == 'static method')
attrs = spilldescriptors("Data descriptors %s:\n" % tag, attrs,
lambda t: t[1] == 'data descriptor')
attrs = spilldata("Data and other attributes %s:\n" % tag, attrs,
lambda t: t[1] == 'data')
assert attrs == []
attrs = inherited
contents = '\n'.join(contents)
if not contents:
return title + '\n'
return title + '\n' + self.indent(contents.rstrip(), ' | ') + '\n'
def formatvalue(self, object):
"""Format an argument default value as text."""
return '=' + self.repr(object)
def docroutine(self, object, name=None, mod=None, cl=None):
"""Produce text documentation for a function or method object."""
realname = object.__name__
name = name or realname
note = ''
skipdocs = 0
if inspect.ismethod(object):
imclass = object.__self__.__class__
if cl:
if imclass is not cl:
note = ' from ' + classname(imclass, mod)
else:
if object.__self__ is not None:
note = ' method of %s instance' % classname(
object.__self__.__class__, mod)
else:
note = ' unbound %s method' % classname(imclass,mod)
object = object.__func__
if name == realname:
title = self.bold(realname)
else:
if (cl and realname in cl.__dict__ and
cl.__dict__[realname] is object):
skipdocs = 1
title = self.bold(name) + ' = ' + realname
if inspect.isfunction(object):
args, varargs, varkw, defaults, kwonlyargs, kwdefaults, ann = \
inspect.getfullargspec(object)
argspec = inspect.formatargspec(
args, varargs, varkw, defaults, kwonlyargs, kwdefaults, ann,
formatvalue=self.formatvalue,
formatannotation=inspect.formatannotationrelativeto(object))
if realname == '<lambda>':
title = self.bold(name) + ' lambda '
# XXX lambda's won't usually have func_annotations['return']
# since the syntax doesn't support but it is possible.
# So removing parentheses isn't truly safe.
argspec = argspec[1:-1] # remove parentheses
else:
argspec = '(...)'
decl = title + argspec + note
if skipdocs:
return decl + '\n'
else:
doc = getdoc(object) or ''
return decl + '\n' + (doc and self.indent(doc).rstrip() + '\n')
def _docdescriptor(self, name, value, mod):
results = []
push = results.append
if name:
push(self.bold(name))
push('\n')
doc = getdoc(value) or ''
if doc:
push(self.indent(doc))
push('\n')
return ''.join(results)
def docproperty(self, object, name=None, mod=None, cl=None):
"""Produce text documentation for a property."""
return self._docdescriptor(name, object, mod)
def docdata(self, object, name=None, mod=None, cl=None):
"""Produce text documentation for a data descriptor."""
return self._docdescriptor(name, object, mod)
def docother(self, object, name=None, mod=None, parent=None, maxlen=None, doc=None):
"""Produce text documentation for a data object."""
repr = self.repr(object)
if maxlen:
line = (name and name + ' = ' or '') + repr
chop = maxlen - len(line)
if chop < 0: repr = repr[:chop] + '...'
line = (name and self.bold(name) + ' = ' or '') + repr
if doc is not None:
line += '\n' + self.indent(str(doc))
return line
class _PlainTextDoc(TextDoc):
"""Subclass of TextDoc which overrides string styling"""
def bold(self, text):
return text
# --------------------------------------------------------- user interfaces
def pager(text):
"""The first time this is called, determine what kind of pager to use."""
global pager
pager = getpager()
pager(text)
def getpager():
"""Decide what method to use for paging through text."""
if not hasattr(sys.stdout, "isatty"):
return plainpager
if not sys.stdin.isatty() or not sys.stdout.isatty():
return plainpager
if 'PAGER' in os.environ:
if sys.platform == 'win32': # pipes completely broken in Windows
return lambda text: tempfilepager(plain(text), os.environ['PAGER'])
elif os.environ.get('TERM') in ('dumb', 'emacs'):
return lambda text: pipepager(plain(text), os.environ['PAGER'])
else:
return lambda text: pipepager(text, os.environ['PAGER'])
if os.environ.get('TERM') in ('dumb', 'emacs'):
return plainpager
if sys.platform == 'win32' or sys.platform.startswith('os2'):
return lambda text: tempfilepager(plain(text), 'more <')
if hasattr(os, 'system') and os.system('(less) 2>/dev/null') == 0:
return lambda text: pipepager(text, 'less')
import tempfile
(fd, filename) = tempfile.mkstemp()
os.close(fd)
try:
if hasattr(os, 'system') and os.system('more "%s"' % filename) == 0:
return lambda text: pipepager(text, 'more')
else:
return ttypager
finally:
os.unlink(filename)
def plain(text):
"""Remove boldface formatting from text."""
return re.sub('.\b', '', text)
def pipepager(text, cmd):
"""Page through text by feeding it to another program."""
pipe = os.popen(cmd, 'w')
try:
pipe.write(text)
pipe.close()
except IOError:
pass # Ignore broken pipes caused by quitting the pager program.
def tempfilepager(text, cmd):
"""Page through text by invoking a program on a temporary file."""
import tempfile
filename = tempfile.mktemp()
file = open(filename, 'w')
file.write(text)
file.close()
try:
os.system(cmd + ' "' + filename + '"')
finally:
os.unlink(filename)
def ttypager(text):
"""Page through text on a text terminal."""
lines = plain(text).split('\n')
try:
import tty
fd = sys.stdin.fileno()
old = tty.tcgetattr(fd)
tty.setcbreak(fd)
getchar = lambda: sys.stdin.read(1)
except (ImportError, AttributeError):
tty = None
getchar = lambda: sys.stdin.readline()[:-1][:1]
try:
r = inc = os.environ.get('LINES', 25) - 1
sys.stdout.write('\n'.join(lines[:inc]) + '\n')
while lines[r:]:
sys.stdout.write('-- more --')
sys.stdout.flush()
c = getchar()
if c in ('q', 'Q'):
sys.stdout.write('\r \r')
break
elif c in ('\r', '\n'):
sys.stdout.write('\r \r' + lines[r] + '\n')
r = r + 1
continue
if c in ('b', 'B', '\x1b'):
r = r - inc - inc
if r < 0: r = 0
sys.stdout.write('\n' + '\n'.join(lines[r:r+inc]) + '\n')
r = r + inc
finally:
if tty:
tty.tcsetattr(fd, tty.TCSAFLUSH, old)
def plainpager(text):
"""Simply print unformatted text. This is the ultimate fallback."""
sys.stdout.write(plain(text))
def describe(thing):
"""Produce a short description of the given thing."""
if inspect.ismodule(thing):
if thing.__name__ in sys.builtin_module_names:
return 'built-in module ' + thing.__name__
if hasattr(thing, '__path__'):
return 'package ' + thing.__name__
else:
return 'module ' + thing.__name__
if inspect.isbuiltin(thing):
return 'built-in function ' + thing.__name__
if inspect.isgetsetdescriptor(thing):
return 'getset descriptor %s.%s.%s' % (
thing.__objclass__.__module__, thing.__objclass__.__name__,
thing.__name__)
if inspect.ismemberdescriptor(thing):
return 'member descriptor %s.%s.%s' % (
thing.__objclass__.__module__, thing.__objclass__.__name__,
thing.__name__)
if inspect.isclass(thing):
return 'class ' + thing.__name__
if inspect.isfunction(thing):
return 'function ' + thing.__name__
if inspect.ismethod(thing):
return 'method ' + thing.__name__
return type(thing).__name__
def locate(path, forceload=0):
"""Locate an object by name or dotted path, importing as necessary."""
parts = [part for part in path.split('.') if part]
module, n = None, 0
while n < len(parts):
nextmodule = safeimport('.'.join(parts[:n+1]), forceload)
if nextmodule: module, n = nextmodule, n + 1
else: break
if module:
object = module
else:
object = builtins
for part in parts[n:]:
try:
object = getattr(object, part)
except AttributeError:
return None
return object
# --------------------------------------- interactive interpreter interface
text = TextDoc()
plaintext = _PlainTextDoc()
html = HTMLDoc()
def resolve(thing, forceload=0):
"""Given an object or a path to an object, get the object and its name."""
if isinstance(thing, str):
object = locate(thing, forceload)
if not object:
raise ImportError('no Python documentation found for %r' % thing)
return object, thing
else:
name = getattr(thing, '__name__', None)
return thing, name if isinstance(name, str) else None
def render_doc(thing, title='Python Library Documentation: %s', forceload=0,
renderer=None):
"""Render text documentation, given an object or a path to an object."""
if renderer is None:
renderer = text
object, name = resolve(thing, forceload)
desc = describe(object)
module = inspect.getmodule(object)
if name and '.' in name:
desc += ' in ' + name[:name.rfind('.')]
elif module and module is not object:
desc += ' in module ' + module.__name__
if not (inspect.ismodule(object) or
inspect.isclass(object) or
inspect.isroutine(object) or
inspect.isgetsetdescriptor(object) or
inspect.ismemberdescriptor(object) or
isinstance(object, property)):
# If the passed object is a piece of data or an instance,
# document its available methods instead of its value.
object = type(object)
desc += ' object'
return title % desc + '\n\n' + renderer.document(object, name)
def doc(thing, title='Python Library Documentation: %s', forceload=0,
output=None):
"""Display text documentation, given an object or a path to an object."""
try:
if output is None:
pager(render_doc(thing, title, forceload))
else:
output.write(render_doc(thing, title, forceload, plaintext))
except (ImportError, ErrorDuringImport) as value:
print(value)
def writedoc(thing, forceload=0):
"""Write HTML documentation to a file in the current directory."""
try:
object, name = resolve(thing, forceload)
page = html.page(describe(object), html.document(object, name))
file = open(name + '.html', 'w', encoding='utf-8')
file.write(page)
file.close()
print('wrote', name + '.html')
except (ImportError, ErrorDuringImport) as value:
print(value)
def writedocs(dir, pkgpath='', done=None):
"""Write out HTML documentation for all modules in a directory tree."""
if done is None: done = {}
for importer, modname, ispkg in pkgutil.walk_packages([dir], pkgpath):
writedoc(modname)
return
class Helper:
# These dictionaries map a topic name to either an alias, or a tuple
# (label, seealso-items). The "label" is the label of the corresponding
# section in the .rst file under Doc/ and an index into the dictionary
# in pydoc_data/topics.py.
#
# CAUTION: if you change one of these dictionaries, be sure to adapt the
# list of needed labels in Doc/tools/sphinxext/pyspecific.py and
# regenerate the pydoc_data/topics.py file by running
# make pydoc-topics
# in Doc/ and copying the output file into the Lib/ directory.
keywords = {
'False': '',
'None': '',
'True': '',
'and': 'BOOLEAN',
'as': 'with',
'assert': ('assert', ''),
'break': ('break', 'while for'),
'class': ('class', 'CLASSES SPECIALMETHODS'),
'continue': ('continue', 'while for'),
'def': ('function', ''),
'del': ('del', 'BASICMETHODS'),
'elif': 'if',
'else': ('else', 'while for'),
'except': 'try',
'finally': 'try',
'for': ('for', 'break continue while'),
'from': 'import',
'global': ('global', 'nonlocal NAMESPACES'),
'if': ('if', 'TRUTHVALUE'),
'import': ('import', 'MODULES'),
'in': ('in', 'SEQUENCEMETHODS'),
'is': 'COMPARISON',
'lambda': ('lambda', 'FUNCTIONS'),
'nonlocal': ('nonlocal', 'global NAMESPACES'),
'not': 'BOOLEAN',
'or': 'BOOLEAN',
'pass': ('pass', ''),
'raise': ('raise', 'EXCEPTIONS'),
'return': ('return', 'FUNCTIONS'),
'try': ('try', 'EXCEPTIONS'),
'while': ('while', 'break continue if TRUTHVALUE'),
'with': ('with', 'CONTEXTMANAGERS EXCEPTIONS yield'),
'yield': ('yield', ''),
}
# Either add symbols to this dictionary or to the symbols dictionary
# directly: Whichever is easier. They are merged later.
_symbols_inverse = {
'STRINGS' : ("'", "'''", "r'", "b'", '"""', '"', 'r"', 'b"'),
'OPERATORS' : ('+', '-', '*', '**', '/', '//', '%', '<<', '>>', '&',
'|', '^', '~', '<', '>', '<=', '>=', '==', '!=', '<>'),
'COMPARISON' : ('<', '>', '<=', '>=', '==', '!=', '<>'),
'UNARY' : ('-', '~'),
'AUGMENTEDASSIGNMENT' : ('+=', '-=', '*=', '/=', '%=', '&=', '|=',
'^=', '<<=', '>>=', '**=', '//='),
'BITWISE' : ('<<', '>>', '&', '|', '^', '~'),
'COMPLEX' : ('j', 'J')
}
symbols = {
'%': 'OPERATORS FORMATTING',
'**': 'POWER',
',': 'TUPLES LISTS FUNCTIONS',
'.': 'ATTRIBUTES FLOAT MODULES OBJECTS',
'...': 'ELLIPSIS',
':': 'SLICINGS DICTIONARYLITERALS',
'@': 'def class',
'\\': 'STRINGS',
'_': 'PRIVATENAMES',
'__': 'PRIVATENAMES SPECIALMETHODS',
'`': 'BACKQUOTES',
'(': 'TUPLES FUNCTIONS CALLS',
')': 'TUPLES FUNCTIONS CALLS',
'[': 'LISTS SUBSCRIPTS SLICINGS',
']': 'LISTS SUBSCRIPTS SLICINGS'
}
for topic, symbols_ in _symbols_inverse.items():
for symbol in symbols_:
topics = symbols.get(symbol, topic)
if topic not in topics:
topics = topics + ' ' + topic
symbols[symbol] = topics
topics = {
'TYPES': ('types', 'STRINGS UNICODE NUMBERS SEQUENCES MAPPINGS '
'FUNCTIONS CLASSES MODULES FILES inspect'),
'STRINGS': ('strings', 'str UNICODE SEQUENCES STRINGMETHODS '
'FORMATTING TYPES'),
'STRINGMETHODS': ('string-methods', 'STRINGS FORMATTING'),
'FORMATTING': ('formatstrings', 'OPERATORS'),
'UNICODE': ('strings', 'encodings unicode SEQUENCES STRINGMETHODS '
'FORMATTING TYPES'),
'NUMBERS': ('numbers', 'INTEGER FLOAT COMPLEX TYPES'),
'INTEGER': ('integers', 'int range'),
'FLOAT': ('floating', 'float math'),
'COMPLEX': ('imaginary', 'complex cmath'),
'SEQUENCES': ('typesseq', 'STRINGMETHODS FORMATTING range LISTS'),
'MAPPINGS': 'DICTIONARIES',
'FUNCTIONS': ('typesfunctions', 'def TYPES'),
'METHODS': ('typesmethods', 'class def CLASSES TYPES'),
'CODEOBJECTS': ('bltin-code-objects', 'compile FUNCTIONS TYPES'),
'TYPEOBJECTS': ('bltin-type-objects', 'types TYPES'),
'FRAMEOBJECTS': 'TYPES',
'TRACEBACKS': 'TYPES',
'NONE': ('bltin-null-object', ''),
'ELLIPSIS': ('bltin-ellipsis-object', 'SLICINGS'),
'FILES': ('bltin-file-objects', ''),
'SPECIALATTRIBUTES': ('specialattrs', ''),
'CLASSES': ('types', 'class SPECIALMETHODS PRIVATENAMES'),
'MODULES': ('typesmodules', 'import'),
'PACKAGES': 'import',
'EXPRESSIONS': ('operator-summary', 'lambda or and not in is BOOLEAN '
'COMPARISON BITWISE SHIFTING BINARY FORMATTING POWER '
'UNARY ATTRIBUTES SUBSCRIPTS SLICINGS CALLS TUPLES '
'LISTS DICTIONARIES'),
'OPERATORS': 'EXPRESSIONS',
'PRECEDENCE': 'EXPRESSIONS',
'OBJECTS': ('objects', 'TYPES'),
'SPECIALMETHODS': ('specialnames', 'BASICMETHODS ATTRIBUTEMETHODS '
'CALLABLEMETHODS SEQUENCEMETHODS MAPPINGMETHODS '
'NUMBERMETHODS CLASSES'),
'BASICMETHODS': ('customization', 'hash repr str SPECIALMETHODS'),
'ATTRIBUTEMETHODS': ('attribute-access', 'ATTRIBUTES SPECIALMETHODS'),
'CALLABLEMETHODS': ('callable-types', 'CALLS SPECIALMETHODS'),
'SEQUENCEMETHODS': ('sequence-types', 'SEQUENCES SEQUENCEMETHODS '
'SPECIALMETHODS'),
'MAPPINGMETHODS': ('sequence-types', 'MAPPINGS SPECIALMETHODS'),
'NUMBERMETHODS': ('numeric-types', 'NUMBERS AUGMENTEDASSIGNMENT '
'SPECIALMETHODS'),
'EXECUTION': ('execmodel', 'NAMESPACES DYNAMICFEATURES EXCEPTIONS'),
'NAMESPACES': ('naming', 'global nonlocal ASSIGNMENT DELETION DYNAMICFEATURES'),
'DYNAMICFEATURES': ('dynamic-features', ''),
'SCOPING': 'NAMESPACES',
'FRAMES': 'NAMESPACES',
'EXCEPTIONS': ('exceptions', 'try except finally raise'),
'CONVERSIONS': ('conversions', ''),
'IDENTIFIERS': ('identifiers', 'keywords SPECIALIDENTIFIERS'),
'SPECIALIDENTIFIERS': ('id-classes', ''),
'PRIVATENAMES': ('atom-identifiers', ''),
'LITERALS': ('atom-literals', 'STRINGS NUMBERS TUPLELITERALS '
'LISTLITERALS DICTIONARYLITERALS'),
'TUPLES': 'SEQUENCES',
'TUPLELITERALS': ('exprlists', 'TUPLES LITERALS'),
'LISTS': ('typesseq-mutable', 'LISTLITERALS'),
'LISTLITERALS': ('lists', 'LISTS LITERALS'),
'DICTIONARIES': ('typesmapping', 'DICTIONARYLITERALS'),
'DICTIONARYLITERALS': ('dict', 'DICTIONARIES LITERALS'),
'ATTRIBUTES': ('attribute-references', 'getattr hasattr setattr ATTRIBUTEMETHODS'),
'SUBSCRIPTS': ('subscriptions', 'SEQUENCEMETHODS'),
'SLICINGS': ('slicings', 'SEQUENCEMETHODS'),
'CALLS': ('calls', 'EXPRESSIONS'),
'POWER': ('power', 'EXPRESSIONS'),
'UNARY': ('unary', 'EXPRESSIONS'),
'BINARY': ('binary', 'EXPRESSIONS'),
'SHIFTING': ('shifting', 'EXPRESSIONS'),
'BITWISE': ('bitwise', 'EXPRESSIONS'),
'COMPARISON': ('comparisons', 'EXPRESSIONS BASICMETHODS'),
'BOOLEAN': ('booleans', 'EXPRESSIONS TRUTHVALUE'),
'ASSERTION': 'assert',
'ASSIGNMENT': ('assignment', 'AUGMENTEDASSIGNMENT'),
'AUGMENTEDASSIGNMENT': ('augassign', 'NUMBERMETHODS'),
'DELETION': 'del',
'RETURNING': 'return',
'IMPORTING': 'import',
'CONDITIONAL': 'if',
'LOOPING': ('compound', 'for while break continue'),
'TRUTHVALUE': ('truth', 'if while and or not BASICMETHODS'),
'DEBUGGING': ('debugger', 'pdb'),
'CONTEXTMANAGERS': ('context-managers', 'with'),
}
def __init__(self, input=None, output=None):
self._input = input
self._output = output
#fix me brython
self.input = self._input or sys.stdin
self.output = self._output or sys.stdout
#fix me brython
#input = property(lambda self: self._input or sys.stdin)
#output = property(lambda self: self._output or sys.stdout)
def __repr__(self):
if inspect.stack()[1][3] == '?':
self()
return ''
return '<pydoc.Helper instance>'
_GoInteractive = object()
def __call__(self, request=_GoInteractive):
if request is not self._GoInteractive:
self.help(request)
else:
self.intro()
self.interact()
self.output.write('''
You are now leaving help and returning to the Python interpreter.
If you want to ask for help on a particular object directly from the
interpreter, you can type "help(object)". Executing "help('string')"
has the same effect as typing a particular string at the help> prompt.
''')
def interact(self):
self.output.write('\n')
while True:
try:
request = self.getline('help> ')
if not request: break
except (KeyboardInterrupt, EOFError):
break
request = replace(request, '"', '', "'", '').strip()
if request.lower() in ('q', 'quit'): break
self.help(request)
def getline(self, prompt):
"""Read one line, using input() when appropriate."""
if self.input is sys.stdin:
return input(prompt)
else:
self.output.write(prompt)
self.output.flush()
return self.input.readline()
def help(self, request):
if type(request) is type(''):
request = request.strip()
if request == 'help': self.intro()
elif request == 'keywords': self.listkeywords()
elif request == 'symbols': self.listsymbols()
elif request == 'topics': self.listtopics()
elif request == 'modules': self.listmodules()
elif request[:8] == 'modules ':
self.listmodules(request.split()[1])
elif request in self.symbols: self.showsymbol(request)
elif request in ['True', 'False', 'None']:
# special case these keywords since they are objects too
doc(eval(request), 'Help on %s:')
elif request in self.keywords: self.showtopic(request)
elif request in self.topics: self.showtopic(request)
elif request: doc(request, 'Help on %s:', output=self._output)
elif isinstance(request, Helper): self()
else: doc(request, 'Help on %s:', output=self._output)
self.output.write('\n')
def intro(self):
self.output.write('''
Welcome to Python %s! This is the interactive help utility.
If this is your first time using Python, you should definitely check out
the tutorial on the Internet at http://docs.python.org/%s/tutorial/.
Enter the name of any module, keyword, or topic to get help on writing
Python programs and using Python modules. To quit this help utility and
return to the interpreter, just type "quit".
To get a list of available modules, keywords, or topics, type "modules",
"keywords", or "topics". Each module also comes with a one-line summary
of what it does; to list the modules whose summaries contain a given word
such as "spam", type "modules spam".
''' % tuple([sys.version[:3]]*2))
def list(self, items, columns=4, width=80):
items = list(sorted(items))
colw = width // columns
rows = (len(items) + columns - 1) // columns
for row in range(rows):
for col in range(columns):
i = col * rows + row
if i < len(items):
self.output.write(items[i])
if col < columns - 1:
self.output.write(' ' + ' ' * (colw - 1 - len(items[i])))
self.output.write('\n')
def listkeywords(self):
self.output.write('''
Here is a list of the Python keywords. Enter any keyword to get more help.
''')
self.list(self.keywords.keys())
def listsymbols(self):
self.output.write('''
Here is a list of the punctuation symbols which Python assigns special meaning
to. Enter any symbol to get more help.
''')
self.list(self.symbols.keys())
def listtopics(self):
self.output.write('''
Here is a list of available topics. Enter any topic name to get more help.
''')
self.list(self.topics.keys())
def showtopic(self, topic, more_xrefs=''):
try:
import pydoc_data.topics
except ImportError:
self.output.write('''
Sorry, topic and keyword documentation is not available because the
module "pydoc_data.topics" could not be found.
''')
return
target = self.topics.get(topic, self.keywords.get(topic))
if not target:
self.output.write('no documentation found for %s\n' % repr(topic))
return
if type(target) is type(''):
return self.showtopic(target, more_xrefs)
label, xrefs = target
try:
doc = pydoc_data.topics.topics[label]
except KeyError:
self.output.write('no documentation found for %s\n' % repr(topic))
return
pager(doc.strip() + '\n')
if more_xrefs:
xrefs = (xrefs or '') + ' ' + more_xrefs
if xrefs:
import formatter
buffer = io.StringIO()
formatter.DumbWriter(buffer).send_flowing_data(
'Related help topics: ' + ', '.join(xrefs.split()) + '\n')
self.output.write('\n%s\n' % buffer.getvalue())
def _gettopic(self, topic, more_xrefs=''):
"""Return unbuffered tuple of (topic, xrefs).
If an error occurs here, the exception is caught and displayed by
the url handler.
This function duplicates the showtopic method but returns its
result directly so it can be formatted for display in an html page.
"""
try:
import pydoc_data.topics
except ImportError:
return('''
Sorry, topic and keyword documentation is not available because the
module "pydoc_data.topics" could not be found.
''' , '')
target = self.topics.get(topic, self.keywords.get(topic))
if not target:
raise ValueError('could not find topic')
if isinstance(target, str):
return self._gettopic(target, more_xrefs)
label, xrefs = target
doc = pydoc_data.topics.topics[label]
if more_xrefs:
xrefs = (xrefs or '') + ' ' + more_xrefs
return doc, xrefs
def showsymbol(self, symbol):
target = self.symbols[symbol]
topic, _, xrefs = target.partition(' ')
self.showtopic(topic, xrefs)
def listmodules(self, key=''):
if key:
self.output.write('''
Here is a list of matching modules. Enter any module name to get more help.
''')
apropos(key)
else:
self.output.write('''
Please wait a moment while I gather a list of all available modules...
''')
modules = {}
def callback(path, modname, desc, modules=modules):
if modname and modname[-9:] == '.__init__':
modname = modname[:-9] + ' (package)'
if modname.find('.') < 0:
modules[modname] = 1
def onerror(modname):
callback(None, modname, None)
ModuleScanner().run(callback, onerror=onerror)
self.list(modules.keys())
self.output.write('''
Enter any module name to get more help. Or, type "modules spam" to search
for modules whose descriptions contain the word "spam".
''')
help = Helper()
class Scanner:
"""A generic tree iterator."""
def __init__(self, roots, children, descendp):
self.roots = roots[:]
self.state = []
self.children = children
self.descendp = descendp
def next(self):
if not self.state:
if not self.roots:
return None
root = self.roots.pop(0)
self.state = [(root, self.children(root))]
node, children = self.state[-1]
if not children:
self.state.pop()
return self.next()
child = children.pop(0)
if self.descendp(child):
self.state.append((child, self.children(child)))
return child
class ModuleScanner:
"""An interruptible scanner that searches module synopses."""
def run(self, callback, key=None, completer=None, onerror=None):
if key: key = key.lower()
self.quit = False
seen = {}
for modname in sys.builtin_module_names:
if modname != '__main__':
seen[modname] = 1
if key is None:
callback(None, modname, '')
else:
name = __import__(modname).__doc__ or ''
desc = name.split('\n')[0]
name = modname + ' - ' + desc
if name.lower().find(key) >= 0:
callback(None, modname, desc)
for importer, modname, ispkg in pkgutil.walk_packages(onerror=onerror):
if self.quit:
break
if key is None:
callback(None, modname, '')
else:
try:
loader = importer.find_module(modname)
except SyntaxError:
# raised by tests for bad coding cookies or BOM
continue
if hasattr(loader, 'get_source'):
try:
source = loader.get_source(modname)
except Exception:
if onerror:
onerror(modname)
continue
desc = source_synopsis(io.StringIO(source)) or ''
if hasattr(loader, 'get_filename'):
path = loader.get_filename(modname)
else:
path = None
else:
try:
module = loader.load_module(modname)
except ImportError:
if onerror:
onerror(modname)
continue
desc = (module.__doc__ or '').splitlines()[0]
path = getattr(module,'__file__',None)
name = modname + ' - ' + desc
if name.lower().find(key) >= 0:
callback(path, modname, desc)
if completer:
completer()
def apropos(key):
"""Print all the one-line module summaries that contain a substring."""
def callback(path, modname, desc):
if modname[-9:] == '.__init__':
modname = modname[:-9] + ' (package)'
print(modname, desc and '- ' + desc)
def onerror(modname):
pass
with warnings.catch_warnings():
warnings.filterwarnings('ignore') # ignore problems during import
ModuleScanner().run(callback, key, onerror=onerror)
# --------------------------------------- enhanced Web browser interface
def _start_server(urlhandler, port):
"""Start an HTTP server thread on a specific port.
Start an HTML/text server thread, so HTML or text documents can be
browsed dynamically and interactively with a Web browser. Example use:
>>> import time
>>> import pydoc
Define a URL handler. To determine what the client is asking
for, check the URL and content_type.
Then get or generate some text or HTML code and return it.
>>> def my_url_handler(url, content_type):
... text = 'the URL sent was: (%s, %s)' % (url, content_type)
... return text
Start server thread on port 0.
If you use port 0, the server will pick a random port number.
You can then use serverthread.port to get the port number.
>>> port = 0
>>> serverthread = pydoc._start_server(my_url_handler, port)
Check that the server is really started. If it is, open browser
and get first page. Use serverthread.url as the starting page.
>>> if serverthread.serving:
... import webbrowser
The next two lines are commented out so a browser doesn't open if
doctest is run on this module.
#... webbrowser.open(serverthread.url)
#True
Let the server do its thing. We just need to monitor its status.
Use time.sleep so the loop doesn't hog the CPU.
>>> starttime = time.time()
>>> timeout = 1 #seconds
This is a short timeout for testing purposes.
>>> while serverthread.serving:
... time.sleep(.01)
... if serverthread.serving and time.time() - starttime > timeout:
... serverthread.stop()
... break
Print any errors that may have occurred.
>>> print(serverthread.error)
None
"""
import http.server
import email.message
import select
import threading
class DocHandler(http.server.BaseHTTPRequestHandler):
def do_GET(self):
"""Process a request from an HTML browser.
The URL received is in self.path.
Get an HTML page from self.urlhandler and send it.
"""
if self.path.endswith('.css'):
content_type = 'text/css'
else:
content_type = 'text/html'
self.send_response(200)
self.send_header('Content-Type', '%s; charset=UTF-8' % content_type)
self.end_headers()
self.wfile.write(self.urlhandler(
self.path, content_type).encode('utf-8'))
def log_message(self, *args):
# Don't log messages.
pass
class DocServer(http.server.HTTPServer):
def __init__(self, port, callback):
self.host = (sys.platform == 'mac') and '127.0.0.1' or 'localhost'
self.address = ('', port)
self.callback = callback
self.base.__init__(self, self.address, self.handler)
self.quit = False
def serve_until_quit(self):
while not self.quit:
rd, wr, ex = select.select([self.socket.fileno()], [], [], 1)
if rd:
self.handle_request()
self.server_close()
def server_activate(self):
self.base.server_activate(self)
if self.callback:
self.callback(self)
class ServerThread(threading.Thread):
def __init__(self, urlhandler, port):
self.urlhandler = urlhandler
self.port = int(port)
threading.Thread.__init__(self)
self.serving = False
self.error = None
def run(self):
"""Start the server."""
try:
DocServer.base = http.server.HTTPServer
DocServer.handler = DocHandler
DocHandler.MessageClass = email.message.Message
DocHandler.urlhandler = staticmethod(self.urlhandler)
docsvr = DocServer(self.port, self.ready)
self.docserver = docsvr
docsvr.serve_until_quit()
except Exception as e:
self.error = e
def ready(self, server):
self.serving = True
self.host = server.host
self.port = server.server_port
self.url = 'http://%s:%d/' % (self.host, self.port)
def stop(self):
"""Stop the server and this thread nicely"""
self.docserver.quit = True
self.serving = False
self.url = None
thread = ServerThread(urlhandler, port)
thread.start()
# Wait until thread.serving is True to make sure we are
# really up before returning.
while not thread.error and not thread.serving:
time.sleep(.01)
return thread
def _url_handler(url, content_type="text/html"):
"""The pydoc url handler for use with the pydoc server.
If the content_type is 'text/css', the _pydoc.css style
sheet is read and returned if it exits.
If the content_type is 'text/html', then the result of
get_html_page(url) is returned.
"""
class _HTMLDoc(HTMLDoc):
def page(self, title, contents):
"""Format an HTML page."""
css_path = "pydoc_data/_pydoc.css"
css_link = (
'<link rel="stylesheet" type="text/css" href="%s">' %
css_path)
return '''\
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<html><head><title>Pydoc: %s</title>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
%s</head><body bgcolor="#f0f0f8">%s<div style="clear:both;padding-top:.5em;">%s</div>
</body></html>''' % (title, css_link, html_navbar(), contents)
def filelink(self, url, path):
return '<a href="getfile?key=%s">%s</a>' % (url, path)
html = _HTMLDoc()
def html_navbar():
version = html.escape("%s [%s, %s]" % (platform.python_version(),
platform.python_build()[0],
platform.python_compiler()))
return """
<div style='float:left'>
Python %s<br>%s
</div>
<div style='float:right'>
<div style='text-align:center'>
<a href="index.html">Module Index</a>
: <a href="topics.html">Topics</a>
: <a href="keywords.html">Keywords</a>
</div>
<div>
<form action="get" style='display:inline;'>
<input type=text name=key size=15>
<input type=submit value="Get">
</form>
<form action="search" style='display:inline;'>
<input type=text name=key size=15>
<input type=submit value="Search">
</form>
</div>
</div>
""" % (version, html.escape(platform.platform(terse=True)))
def html_index():
"""Module Index page."""
def bltinlink(name):
return '<a href="%s.html">%s</a>' % (name, name)
heading = html.heading(
'<big><big><strong>Index of Modules</strong></big></big>',
'#ffffff', '#7799ee')
names = [name for name in sys.builtin_module_names
if name != '__main__']
contents = html.multicolumn(names, bltinlink)
contents = [heading, '<p>' + html.bigsection(
'Built-in Modules', '#ffffff', '#ee77aa', contents)]
seen = {}
for dir in sys.path:
contents.append(html.index(dir, seen))
contents.append(
'<p align=right><font color="#909090" face="helvetica,'
'arial"><strong>pydoc</strong> by Ka-Ping Yee'
'<[email protected]></font>')
return 'Index of Modules', ''.join(contents)
def html_search(key):
"""Search results page."""
# scan for modules
search_result = []
def callback(path, modname, desc):
if modname[-9:] == '.__init__':
modname = modname[:-9] + ' (package)'
search_result.append((modname, desc and '- ' + desc))
with warnings.catch_warnings():
warnings.filterwarnings('ignore') # ignore problems during import
ModuleScanner().run(callback, key)
# format page
def bltinlink(name):
return '<a href="%s.html">%s</a>' % (name, name)
results = []
heading = html.heading(
'<big><big><strong>Search Results</strong></big></big>',
'#ffffff', '#7799ee')
for name, desc in search_result:
results.append(bltinlink(name) + desc)
contents = heading + html.bigsection(
'key = %s' % key, '#ffffff', '#ee77aa', '<br>'.join(results))
return 'Search Results', contents
def html_getfile(path):
"""Get and display a source file listing safely."""
path = path.replace('%20', ' ')
with tokenize.open(path) as fp:
lines = html.escape(fp.read())
body = '<pre>%s</pre>' % lines
heading = html.heading(
'<big><big><strong>File Listing</strong></big></big>',
'#ffffff', '#7799ee')
contents = heading + html.bigsection(
'File: %s' % path, '#ffffff', '#ee77aa', body)
return 'getfile %s' % path, contents
def html_topics():
"""Index of topic texts available."""
def bltinlink(name):
return '<a href="topic?key=%s">%s</a>' % (name, name)
heading = html.heading(
'<big><big><strong>INDEX</strong></big></big>',
'#ffffff', '#7799ee')
names = sorted(Helper.topics.keys())
contents = html.multicolumn(names, bltinlink)
contents = heading + html.bigsection(
'Topics', '#ffffff', '#ee77aa', contents)
return 'Topics', contents
def html_keywords():
"""Index of keywords."""
heading = html.heading(
'<big><big><strong>INDEX</strong></big></big>',
'#ffffff', '#7799ee')
names = sorted(Helper.keywords.keys())
def bltinlink(name):
return '<a href="topic?key=%s">%s</a>' % (name, name)
contents = html.multicolumn(names, bltinlink)
contents = heading + html.bigsection(
'Keywords', '#ffffff', '#ee77aa', contents)
return 'Keywords', contents
def html_topicpage(topic):
"""Topic or keyword help page."""
buf = io.StringIO()
htmlhelp = Helper(buf, buf)
contents, xrefs = htmlhelp._gettopic(topic)
if topic in htmlhelp.keywords:
title = 'KEYWORD'
else:
title = 'TOPIC'
heading = html.heading(
'<big><big><strong>%s</strong></big></big>' % title,
'#ffffff', '#7799ee')
contents = '<pre>%s</pre>' % html.markup(contents)
contents = html.bigsection(topic , '#ffffff','#ee77aa', contents)
if xrefs:
xrefs = sorted(xrefs.split())
def bltinlink(name):
return '<a href="topic?key=%s">%s</a>' % (name, name)
xrefs = html.multicolumn(xrefs, bltinlink)
xrefs = html.section('Related help topics: ',
'#ffffff', '#ee77aa', xrefs)
return ('%s %s' % (title, topic),
''.join((heading, contents, xrefs)))
def html_getobj(url):
obj = locate(url, forceload=1)
if obj is None and url != 'None':
raise ValueError('could not find object')
title = describe(obj)
content = html.document(obj, url)
return title, content
def html_error(url, exc):
heading = html.heading(
'<big><big><strong>Error</strong></big></big>',
'#ffffff', '#7799ee')
contents = '<br>'.join(html.escape(line) for line in
format_exception_only(type(exc), exc))
contents = heading + html.bigsection(url, '#ffffff', '#bb0000',
contents)
return "Error - %s" % url, contents
def get_html_page(url):
"""Generate an HTML page for url."""
complete_url = url
if url.endswith('.html'):
url = url[:-5]
try:
if url in ("", "index"):
title, content = html_index()
elif url == "topics":
title, content = html_topics()
elif url == "keywords":
title, content = html_keywords()
elif '=' in url:
op, _, url = url.partition('=')
if op == "search?key":
title, content = html_search(url)
elif op == "getfile?key":
title, content = html_getfile(url)
elif op == "topic?key":
# try topics first, then objects.
try:
title, content = html_topicpage(url)
except ValueError:
title, content = html_getobj(url)
elif op == "get?key":
# try objects first, then topics.
if url in ("", "index"):
title, content = html_index()
else:
try:
title, content = html_getobj(url)
except ValueError:
title, content = html_topicpage(url)
else:
raise ValueError('bad pydoc url')
else:
title, content = html_getobj(url)
except Exception as exc:
# Catch any errors and display them in an error page.
title, content = html_error(complete_url, exc)
return html.page(title, content)
if url.startswith('/'):
url = url[1:]
if content_type == 'text/css':
path_here = os.path.dirname(os.path.realpath(__file__))
css_path = os.path.join(path_here, url)
with open(css_path) as fp:
return ''.join(fp.readlines())
elif content_type == 'text/html':
return get_html_page(url)
# Errors outside the url handler are caught by the server.
raise TypeError('unknown content type %r for url %s' % (content_type, url))
def browse(port=0, *, open_browser=True):
"""Start the enhanced pydoc Web server and open a Web browser.
Use port '0' to start the server on an arbitrary port.
Set open_browser to False to suppress opening a browser.
"""
import webbrowser
serverthread = _start_server(_url_handler, port)
if serverthread.error:
print(serverthread.error)
return
if serverthread.serving:
server_help_msg = 'Server commands: [b]rowser, [q]uit'
if open_browser:
webbrowser.open(serverthread.url)
try:
print('Server ready at', serverthread.url)
print(server_help_msg)
while serverthread.serving:
cmd = input('server> ')
cmd = cmd.lower()
if cmd == 'q':
break
elif cmd == 'b':
webbrowser.open(serverthread.url)
else:
print(server_help_msg)
except (KeyboardInterrupt, EOFError):
print()
finally:
if serverthread.serving:
serverthread.stop()
print('Server stopped')
# -------------------------------------------------- command-line interface
def ispath(x):
return isinstance(x, str) and x.find(os.sep) >= 0
def cli():
"""Command-line interface (looks at sys.argv to decide what to do)."""
import getopt
class BadUsage(Exception): pass
# Scripts don't get the current directory in their path by default
# unless they are run with the '-m' switch
if '' not in sys.path:
scriptdir = os.path.dirname(sys.argv[0])
if scriptdir in sys.path:
sys.path.remove(scriptdir)
sys.path.insert(0, '.')
try:
opts, args = getopt.getopt(sys.argv[1:], 'bk:p:w')
writing = False
start_server = False
open_browser = False
port = None
for opt, val in opts:
if opt == '-b':
start_server = True
open_browser = True
if opt == '-k':
apropos(val)
return
if opt == '-p':
start_server = True
port = val
if opt == '-w':
writing = True
if start_server:
if port is None:
port = 0
browse(port, open_browser=open_browser)
return
if not args: raise BadUsage
for arg in args:
if ispath(arg) and not os.path.exists(arg):
print('file %r does not exist' % arg)
break
try:
if ispath(arg) and os.path.isfile(arg):
arg = importfile(arg)
if writing:
if ispath(arg) and os.path.isdir(arg):
writedocs(arg)
else:
writedoc(arg)
else:
help.help(arg)
except ErrorDuringImport as value:
print(value)
except (getopt.error, BadUsage):
cmd = os.path.splitext(os.path.basename(sys.argv[0]))[0]
print("""pydoc - the Python documentation tool
{cmd} <name> ...
Show text documentation on something. <name> may be the name of a
Python keyword, topic, function, module, or package, or a dotted
reference to a class or function within a module or module in a
package. If <name> contains a '{sep}', it is used as the path to a
Python source file to document. If name is 'keywords', 'topics',
or 'modules', a listing of these things is displayed.
{cmd} -k <keyword>
Search for a keyword in the synopsis lines of all available modules.
{cmd} -p <port>
Start an HTTP server on the given port on the local machine. Port
number 0 can be used to get an arbitrary unused port.
{cmd} -b
Start an HTTP server on an arbitrary unused port and open a Web browser
to interactively browse documentation. The -p option can be used with
the -b option to explicitly specify the server port.
{cmd} -w <name> ...
Write out the HTML documentation for a module to a file in the current
directory. If <name> contains a '{sep}', it is treated as a filename; if
it names a directory, documentation is written for all the contents.
""".format(cmd=cmd, sep=os.sep))
if __name__ == '__main__':
cli()
| gpl-3.0 |
jumpstarter-io/cinder | cinder/tests/brick/fake_lvm.py | 9 | 2074 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from cinder.openstack.common import log as logging
LOG = logging.getLogger(__name__)
class FakeBrickLVM(object):
"""Logs and records calls, for unit tests."""
def __init__(self, vg_name, create, pv_list, vtype, execute=None):
super(FakeBrickLVM, self).__init__()
self.vg_size = '5.00'
self.vg_free_space = '5.00'
self.vg_name = vg_name
def supports_thin_provisioning():
return False
def get_all_volumes(vg_name=None):
if vg_name is not None:
return [vg_name]
return ['cinder-volumes', 'fake-vg-1']
def get_volumes(self):
return ['fake-volume']
def get_volume(self, name):
return ['name']
def get_all_physical_volumes(vg_name=None):
return []
def get_physical_volumes(self):
return []
def get_all_volume_groups(vg_name=None):
return ['cinder-volumes', 'fake-vg']
def update_volume_group_info(self):
pass
def create_thin_pool(self, name=None, size_str=0):
pass
def create_volume(self, name, size_str, lv_type='default', mirror_count=0):
pass
def create_lv_snapshot(self, name, source_lv_name, lv_type='default'):
pass
def delete(self, name):
pass
def revert(self, snapshot_name):
pass
def lv_has_snapshot(self, name):
return False
def activate_lv(self, lv, is_snapshot=False):
pass
def rename_volume(self, lv_name, new_name):
pass
| apache-2.0 |
puneetgkaur/backup_sugar_shell_for_cordova | src/jarabe/model/screen.py | 4 | 1532 | # Copyright (C) 2006-2008 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import logging
import dbus
_HARDWARE_MANAGER_INTERFACE = 'org.freedesktop.ohm.Keystore'
_HARDWARE_MANAGER_SERVICE = 'org.freedesktop.ohm'
_HARDWARE_MANAGER_OBJECT_PATH = '/org/freedesktop/ohm/Keystore'
_ohm_service = None
def _get_ohm():
global _ohm_service
if _ohm_service is None:
bus = dbus.SystemBus()
proxy = bus.get_object(_HARDWARE_MANAGER_SERVICE,
_HARDWARE_MANAGER_OBJECT_PATH,
follow_name_owner_changes=True)
_ohm_service = dbus.Interface(proxy, _HARDWARE_MANAGER_INTERFACE)
return _ohm_service
def set_dcon_freeze(frozen):
try:
_get_ohm().SetKey('display.dcon_freeze', frozen)
except dbus.DBusException:
logging.error('Cannot unfreeze the DCON')
| gpl-2.0 |
onecue/pybbm | test/test_project/test_app/south_migrations/0001_initial.py | 6 | 8667 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from pybb.compat import get_image_field_full_name, get_user_model_path, get_user_frozen_models
AUTH_USER = get_user_model_path()
class Migration(SchemaMigration):
def forwards(self, orm):
if AUTH_USER == 'test_app.CustomUser':
# Adding model 'CustomUser'
db.create_table('test_app_customuser', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('password', self.gf('django.db.models.fields.CharField')(max_length=128)),
('last_login', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
('is_superuser', self.gf('django.db.models.fields.BooleanField')(default=False)),
('username', self.gf('django.db.models.fields.CharField')(unique=True, max_length=100)),
('email', self.gf('django.db.models.fields.EmailField')(max_length=75)),
('is_staff', self.gf('django.db.models.fields.BooleanField')(default=False)),
('is_active', self.gf('django.db.models.fields.BooleanField')(default=True)),
('date_joined', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
))
db.send_create_signal('test_app', ['CustomUser'])
# Adding M2M table for field groups on 'CustomUser'
m2m_table_name = db.shorten_name('test_app_customuser_groups')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('customuser', models.ForeignKey(orm['test_app.customuser'], null=False)),
('group', models.ForeignKey(orm['auth.group'], null=False))
))
db.create_unique(m2m_table_name, ['customuser_id', 'group_id'])
# Adding M2M table for field user_permissions on 'CustomUser'
m2m_table_name = db.shorten_name('test_app_customuser_user_permissions')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('customuser', models.ForeignKey(orm['test_app.customuser'], null=False)),
('permission', models.ForeignKey(orm['auth.permission'], null=False))
))
db.create_unique(m2m_table_name, ['customuser_id', 'permission_id'])
# Adding model 'CustomProfile'
db.create_table('test_app_customprofile', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('signature', self.gf('django.db.models.fields.TextField')(max_length=1024, blank=True)),
('signature_html', self.gf('django.db.models.fields.TextField')(max_length=1054, blank=True)),
('time_zone', self.gf('django.db.models.fields.FloatField')(default=3.0)),
('language', self.gf('django.db.models.fields.CharField')(default='en-us', max_length=10, blank=True)),
('show_signatures', self.gf('django.db.models.fields.BooleanField')(default=True)),
('post_count', self.gf('django.db.models.fields.IntegerField')(default=0, blank=True)),
('avatar', self.gf(get_image_field_full_name())(max_length=100, null=True, blank=True)),
('autosubscribe', self.gf('django.db.models.fields.BooleanField')(default=True)),
('user', self.gf('django.db.models.fields.related.OneToOneField')(related_name='pybb_customprofile', unique=True, to=orm[AUTH_USER])),
))
db.send_create_signal('test_app', ['CustomProfile'])
def backwards(self, orm):
if AUTH_USER == 'test_app.CustomUser':
# Deleting model 'CustomUser'
db.delete_table('test_app_customuser')
# Removing M2M table for field groups on 'CustomUser'
db.delete_table(db.shorten_name('test_app_customuser_groups'))
# Removing M2M table for field user_permissions on 'CustomUser'
db.delete_table(db.shorten_name('test_app_customuser_user_permissions'))
# Deleting model 'CustomProfile'
db.delete_table('test_app_customprofile')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'test_app.customprofile': {
'Meta': {'object_name': 'CustomProfile'},
'autosubscribe': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'avatar': (get_image_field_full_name(), [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'default': "'en-us'", 'max_length': '10', 'blank': 'True'}),
'post_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'show_signatures': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'signature': ('django.db.models.fields.TextField', [], {'max_length': '1024', 'blank': 'True'}),
'signature_html': ('django.db.models.fields.TextField', [], {'max_length': '1054', 'blank': 'True'}),
'time_zone': ('django.db.models.fields.FloatField', [], {'default': '3.0'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'pybb_customprofile'", 'unique': 'True', 'to': "orm['%s']" % AUTH_USER})
},
}
if AUTH_USER == 'test_app.CustomUser':
models['test_app.customuser'] = {
'Meta': {'object_name': 'CustomUser'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'user_set'", 'blank': 'True', 'to': "orm['auth.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'user_set'", 'blank': 'True', 'to': "orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
}
else:
models.update(get_user_frozen_models(AUTH_USER))
complete_apps = ['test_app'] | bsd-2-clause |
chouseknecht/galaxy | galaxy/wsgi.py | 1 | 1085 | # (c) 2012-2018, Ansible by Red Hat
#
# This file is part of Ansible Galaxy
#
# Ansible Galaxy is free software: you can redistribute it and/or modify
# it under the terms of the Apache License as published by
# the Apache Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# Ansible Galaxy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Apache License for more details.
#
# You should have received a copy of the Apache License
# along with Galaxy. If not, see <http://www.apache.org/licenses/>.
"""
WSGI config for Galaxy project.
"""
import os
from django.core.wsgi import get_wsgi_application
from galaxy import prepare_env
# For public Galaxy, we need to default /etc/galaxy/settings.py
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'galaxy.settings.custom')
# Prepare the galaxy environment.
prepare_env()
# Return the default Django WSGI application.
application = get_wsgi_application()
| apache-2.0 |
dcalacci/Interactive_estimation | game/interactive/migrations/0001_squashed_0011_interactive_channel.py | 2 | 3024 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-25 06:31
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
replaces = [('interactive', '0001_initial'), ('interactive', '0002_auto_20160924_2245'), ('interactive', '0003_auto_20160924_2245'), ('interactive', '0004_auto_20160925_0129'), ('interactive', '0005_auto_20160925_0132'), ('interactive', '0006_auto_20160925_0201'), ('interactive', '0007_auto_20160925_0204'), ('interactive', '0008_auto_20160925_0207'), ('interactive', '0009_auto_20160925_0210'), ('interactive', '0010_auto_20160925_0213'), ('interactive', '0011_interactive_channel')]
initial = True
dependencies = [
('round', '0002_round_round_order'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Interactive',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('end_time', models.DateTimeField(null=True)),
('start_time', models.DateTimeField(auto_now_add=True, null=True)),
],
),
migrations.CreateModel(
name='InteractiveRound',
fields=[
('round_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='round.Round')),
('influencers', models.ManyToManyField(to=settings.AUTH_USER_MODEL)),
],
bases=('round.round',),
),
migrations.AddField(
model_name='interactive',
name='users',
field=models.ManyToManyField(to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='interactive',
name='started',
field=models.BooleanField(default=False),
),
migrations.CreateModel(
name='Settings',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('max_users', models.PositiveSmallIntegerField()),
('min_users', models.PositiveSmallIntegerField()),
('max_influencers', models.PositiveSmallIntegerField()),
('min_influencers', models.PositiveSmallIntegerField()),
],
),
migrations.AddField(
model_name='interactive',
name='constraints',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='interactive.Settings'),
),
migrations.AddField(
model_name='interactive',
name='channel',
field=models.CharField(default='Helloworld', max_length=100, unique=True),
preserve_default=False,
),
]
| mit |
interactomix/iis | iis/tests/test_daemonize.py | 1 | 1182 | import time
from iis.extensions import db
from test_utils.base import BaseTestCase
from iis.util.daemons import daemonize
import iis.models
class TestDaemonize(BaseTestCase):
DAEMON_PID_PATH = "/tmp"
def test_no_exception_raised_and_returns_pid(self):
self.app.logger.debug("Testing daemonize")
def test_worker(uuid):
self.app.logger.debug("Executing test process")
comp = iis.models.Computation(process_uid=uuid, input_data="",
output_data="", status="finished",
progress=100)
db.session.add(comp)
db.session.commit()
self.app.logger.debug("Commited Computation model.")
uid = daemonize(test_worker,
pid_base=TestDaemonize.DAEMON_PID_PATH) # type: str
self.assertTrue(isinstance(uid, str))
time.sleep(5)
self.app.logger.debug("Accessing Computation model.")
comp = iis.models.Computation.query.filter_by(process_uid=uid).first()
self.assertEqual(comp.status, "finished")
self.app.logger.debug("Testing daemonize successful")
| agpl-3.0 |
SRabbelier/Melange | app/soc/modules/gci/logic/models/timeline.py | 1 | 1347 | #!/usr/bin/env python2.5
#
# Copyright 2009 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GCITimeline (Model) query functions.
"""
__authors__ = [
'"Madhusudan.C.S" <[email protected]>'
]
from soc.logic.models import timeline
from soc.logic.models import sponsor as sponsor_logic
import soc.models.timeline
import soc.modules.gci.models.timeline
class Logic(timeline.Logic):
"""Logic methods for the GCITimeline model.
"""
def __init__(self, model=soc.modules.gci.models.timeline.GCITimeline,
base_model=soc.models.timeline.Timeline,
scope_logic=sponsor_logic):
"""Defines the name, key_name and model for this entity.
"""
super(Logic, self).__init__(model, base_model=base_model,
scope_logic=scope_logic)
logic = Logic()
| apache-2.0 |
hojel/calibre | src/chardet/escprober.py | 215 | 3029 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import constants, sys
from escsm import HZSMModel, ISO2022CNSMModel, ISO2022JPSMModel, ISO2022KRSMModel
from charsetprober import CharSetProber
from codingstatemachine import CodingStateMachine
class EscCharSetProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mCodingSM = [ \
CodingStateMachine(HZSMModel),
CodingStateMachine(ISO2022CNSMModel),
CodingStateMachine(ISO2022JPSMModel),
CodingStateMachine(ISO2022KRSMModel)
]
self.reset()
def reset(self):
CharSetProber.reset(self)
for codingSM in self._mCodingSM:
if not codingSM: continue
codingSM.active = constants.True
codingSM.reset()
self._mActiveSM = len(self._mCodingSM)
self._mDetectedCharset = None
def get_charset_name(self):
return self._mDetectedCharset
def get_confidence(self):
if self._mDetectedCharset:
return 0.99
else:
return 0.00
def feed(self, aBuf):
for c in aBuf:
for codingSM in self._mCodingSM:
if not codingSM: continue
if not codingSM.active: continue
codingState = codingSM.next_state(c)
if codingState == constants.eError:
codingSM.active = constants.False
self._mActiveSM -= 1
if self._mActiveSM <= 0:
self._mState = constants.eNotMe
return self.get_state()
elif codingState == constants.eItsMe:
self._mState = constants.eFoundIt
self._mDetectedCharset = codingSM.get_coding_state_machine()
return self.get_state()
return self.get_state()
| gpl-3.0 |
Vimos/scikit-learn | sklearn/metrics/__init__.py | 28 | 3604 | """
The :mod:`sklearn.metrics` module includes score functions, performance metrics
and pairwise metrics and distance computations.
"""
from .ranking import auc
from .ranking import average_precision_score
from .ranking import coverage_error
from .ranking import label_ranking_average_precision_score
from .ranking import label_ranking_loss
from .ranking import precision_recall_curve
from .ranking import roc_auc_score
from .ranking import roc_curve
from .classification import accuracy_score
from .classification import classification_report
from .classification import cohen_kappa_score
from .classification import confusion_matrix
from .classification import f1_score
from .classification import fbeta_score
from .classification import hamming_loss
from .classification import hinge_loss
from .classification import jaccard_similarity_score
from .classification import log_loss
from .classification import matthews_corrcoef
from .classification import precision_recall_fscore_support
from .classification import precision_score
from .classification import recall_score
from .classification import zero_one_loss
from .classification import brier_score_loss
from . import cluster
from .cluster import adjusted_mutual_info_score
from .cluster import adjusted_rand_score
from .cluster import completeness_score
from .cluster import consensus_score
from .cluster import homogeneity_completeness_v_measure
from .cluster import homogeneity_score
from .cluster import mutual_info_score
from .cluster import normalized_mutual_info_score
from .cluster import fowlkes_mallows_score
from .cluster import silhouette_samples
from .cluster import silhouette_score
from .cluster import calinski_harabaz_score
from .cluster import v_measure_score
from .pairwise import euclidean_distances
from .pairwise import pairwise_distances
from .pairwise import pairwise_distances_argmin
from .pairwise import pairwise_distances_argmin_min
from .pairwise import pairwise_kernels
from .regression import explained_variance_score
from .regression import mean_absolute_error
from .regression import mean_squared_error
from .regression import mean_squared_log_error
from .regression import median_absolute_error
from .regression import r2_score
from .scorer import make_scorer
from .scorer import SCORERS
from .scorer import get_scorer
__all__ = [
'accuracy_score',
'adjusted_mutual_info_score',
'adjusted_rand_score',
'auc',
'average_precision_score',
'classification_report',
'cluster',
'completeness_score',
'confusion_matrix',
'consensus_score',
'coverage_error',
'euclidean_distances',
'explained_variance_score',
'f1_score',
'fbeta_score',
'get_scorer',
'hamming_loss',
'hinge_loss',
'homogeneity_completeness_v_measure',
'homogeneity_score',
'jaccard_similarity_score',
'label_ranking_average_precision_score',
'label_ranking_loss',
'log_loss',
'make_scorer',
'matthews_corrcoef',
'mean_absolute_error',
'mean_squared_error',
'mean_squared_log_error',
'median_absolute_error',
'mutual_info_score',
'normalized_mutual_info_score',
'pairwise_distances',
'pairwise_distances_argmin',
'pairwise_distances_argmin_min',
'pairwise_distances_argmin_min',
'pairwise_kernels',
'precision_recall_curve',
'precision_recall_fscore_support',
'precision_score',
'r2_score',
'recall_score',
'roc_auc_score',
'roc_curve',
'SCORERS',
'silhouette_samples',
'silhouette_score',
'v_measure_score',
'zero_one_loss',
'brier_score_loss',
]
| bsd-3-clause |
nikitasingh981/scikit-learn | sklearn/decomposition/fastica_.py | 9 | 18717 | """
Python implementation of the fast ICA algorithms.
Reference: Tables 8.3 and 8.4 page 196 in the book:
Independent Component Analysis, by Hyvarinen et al.
"""
# Authors: Pierre Lafaye de Micheaux, Stefan van der Walt, Gael Varoquaux,
# Bertrand Thirion, Alexandre Gramfort, Denis A. Engemann
# License: BSD 3 clause
import warnings
import numpy as np
from scipy import linalg
from ..base import BaseEstimator, TransformerMixin
from ..externals import six
from ..externals.six import moves
from ..utils import check_array, as_float_array, check_random_state
from ..utils.extmath import fast_dot
from ..utils.validation import check_is_fitted
from ..utils.validation import FLOAT_DTYPES
__all__ = ['fastica', 'FastICA']
def _gs_decorrelation(w, W, j):
"""
Orthonormalize w wrt the first j rows of W
Parameters
----------
w : ndarray of shape(n)
Array to be orthogonalized
W : ndarray of shape(p, n)
Null space definition
j : int < p
The no of (from the first) rows of Null space W wrt which w is
orthogonalized.
Notes
-----
Assumes that W is orthogonal
w changed in place
"""
w -= np.dot(np.dot(w, W[:j].T), W[:j])
return w
def _sym_decorrelation(W):
""" Symmetric decorrelation
i.e. W <- (W * W.T) ^{-1/2} * W
"""
s, u = linalg.eigh(np.dot(W, W.T))
# u (resp. s) contains the eigenvectors (resp. square roots of
# the eigenvalues) of W * W.T
return np.dot(np.dot(u * (1. / np.sqrt(s)), u.T), W)
def _ica_def(X, tol, g, fun_args, max_iter, w_init):
"""Deflationary FastICA using fun approx to neg-entropy function
Used internally by FastICA.
"""
n_components = w_init.shape[0]
W = np.zeros((n_components, n_components), dtype=X.dtype)
n_iter = []
# j is the index of the extracted component
for j in range(n_components):
w = w_init[j, :].copy()
w /= np.sqrt((w ** 2).sum())
for i in moves.xrange(max_iter):
gwtx, g_wtx = g(fast_dot(w.T, X), fun_args)
w1 = (X * gwtx).mean(axis=1) - g_wtx.mean() * w
_gs_decorrelation(w1, W, j)
w1 /= np.sqrt((w1 ** 2).sum())
lim = np.abs(np.abs((w1 * w).sum()) - 1)
w = w1
if lim < tol:
break
n_iter.append(i + 1)
W[j, :] = w
return W, max(n_iter)
def _ica_par(X, tol, g, fun_args, max_iter, w_init):
"""Parallel FastICA.
Used internally by FastICA --main loop
"""
W = _sym_decorrelation(w_init)
del w_init
p_ = float(X.shape[1])
for ii in moves.xrange(max_iter):
gwtx, g_wtx = g(fast_dot(W, X), fun_args)
W1 = _sym_decorrelation(fast_dot(gwtx, X.T) / p_
- g_wtx[:, np.newaxis] * W)
del gwtx, g_wtx
# builtin max, abs are faster than numpy counter parts.
lim = max(abs(abs(np.diag(fast_dot(W1, W.T))) - 1))
W = W1
if lim < tol:
break
else:
warnings.warn('FastICA did not converge. Consider increasing '
'tolerance or the maximum number of iterations.')
return W, ii + 1
# Some standard non-linear functions.
# XXX: these should be optimized, as they can be a bottleneck.
def _logcosh(x, fun_args=None):
alpha = fun_args.get('alpha', 1.0) # comment it out?
x *= alpha
gx = np.tanh(x, x) # apply the tanh inplace
g_x = np.empty(x.shape[0])
# XXX compute in chunks to avoid extra allocation
for i, gx_i in enumerate(gx): # please don't vectorize.
g_x[i] = (alpha * (1 - gx_i ** 2)).mean()
return gx, g_x
def _exp(x, fun_args):
exp = np.exp(-(x ** 2) / 2)
gx = x * exp
g_x = (1 - x ** 2) * exp
return gx, g_x.mean(axis=-1)
def _cube(x, fun_args):
return x ** 3, (3 * x ** 2).mean(axis=-1)
def fastica(X, n_components=None, algorithm="parallel", whiten=True,
fun="logcosh", fun_args=None, max_iter=200, tol=1e-04, w_init=None,
random_state=None, return_X_mean=False, compute_sources=True,
return_n_iter=False):
"""Perform Fast Independent Component Analysis.
Read more in the :ref:`User Guide <ICA>`.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Training vector, where n_samples is the number of samples and
n_features is the number of features.
n_components : int, optional
Number of components to extract. If None no dimension reduction
is performed.
algorithm : {'parallel', 'deflation'}, optional
Apply a parallel or deflational FASTICA algorithm.
whiten : boolean, optional
If True perform an initial whitening of the data.
If False, the data is assumed to have already been
preprocessed: it should be centered, normed and white.
Otherwise you will get incorrect results.
In this case the parameter n_components will be ignored.
fun : string or function, optional. Default: 'logcosh'
The functional form of the G function used in the
approximation to neg-entropy. Could be either 'logcosh', 'exp',
or 'cube'.
You can also provide your own function. It should return a tuple
containing the value of the function, and of its derivative, in the
point. Example:
def my_g(x):
return x ** 3, 3 * x ** 2
fun_args : dictionary, optional
Arguments to send to the functional form.
If empty or None and if fun='logcosh', fun_args will take value
{'alpha' : 1.0}
max_iter : int, optional
Maximum number of iterations to perform.
tol : float, optional
A positive scalar giving the tolerance at which the
un-mixing matrix is considered to have converged.
w_init : (n_components, n_components) array, optional
Initial un-mixing array of dimension (n.comp,n.comp).
If None (default) then an array of normal r.v.'s is used.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
return_X_mean : bool, optional
If True, X_mean is returned too.
compute_sources : bool, optional
If False, sources are not computed, but only the rotation matrix.
This can save memory when working with big data. Defaults to True.
return_n_iter : bool, optional
Whether or not to return the number of iterations.
Returns
-------
K : array, shape (n_components, n_features) | None.
If whiten is 'True', K is the pre-whitening matrix that projects data
onto the first n_components principal components. If whiten is 'False',
K is 'None'.
W : array, shape (n_components, n_components)
Estimated un-mixing matrix.
The mixing matrix can be obtained by::
w = np.dot(W, K.T)
A = w.T * (w * w.T).I
S : array, shape (n_samples, n_components) | None
Estimated source matrix
X_mean : array, shape (n_features, )
The mean over features. Returned only if return_X_mean is True.
n_iter : int
If the algorithm is "deflation", n_iter is the
maximum number of iterations run across all components. Else
they are just the number of iterations taken to converge. This is
returned only when return_n_iter is set to `True`.
Notes
-----
The data matrix X is considered to be a linear combination of
non-Gaussian (independent) components i.e. X = AS where columns of S
contain the independent components and A is a linear mixing
matrix. In short ICA attempts to `un-mix' the data by estimating an
un-mixing matrix W where ``S = W K X.``
This implementation was originally made for data of shape
[n_features, n_samples]. Now the input is transposed
before the algorithm is applied. This makes it slightly
faster for Fortran-ordered input.
Implemented using FastICA:
`A. Hyvarinen and E. Oja, Independent Component Analysis:
Algorithms and Applications, Neural Networks, 13(4-5), 2000,
pp. 411-430`
"""
random_state = check_random_state(random_state)
fun_args = {} if fun_args is None else fun_args
# make interface compatible with other decompositions
# a copy is required only for non whitened data
X = check_array(X, copy=whiten, dtype=FLOAT_DTYPES).T
alpha = fun_args.get('alpha', 1.0)
if not 1 <= alpha <= 2:
raise ValueError('alpha must be in [1,2]')
if fun == 'logcosh':
g = _logcosh
elif fun == 'exp':
g = _exp
elif fun == 'cube':
g = _cube
elif callable(fun):
def g(x, fun_args):
return fun(x, **fun_args)
else:
exc = ValueError if isinstance(fun, six.string_types) else TypeError
raise exc("Unknown function %r;"
" should be one of 'logcosh', 'exp', 'cube' or callable"
% fun)
n, p = X.shape
if not whiten and n_components is not None:
n_components = None
warnings.warn('Ignoring n_components with whiten=False.')
if n_components is None:
n_components = min(n, p)
if (n_components > min(n, p)):
n_components = min(n, p)
warnings.warn('n_components is too large: it will be set to %s' % n_components)
if whiten:
# Centering the columns (ie the variables)
X_mean = X.mean(axis=-1)
X -= X_mean[:, np.newaxis]
# Whitening and preprocessing by PCA
u, d, _ = linalg.svd(X, full_matrices=False)
del _
K = (u / d).T[:n_components] # see (6.33) p.140
del u, d
X1 = np.dot(K, X)
# see (13.6) p.267 Here X1 is white and data
# in X has been projected onto a subspace by PCA
X1 *= np.sqrt(p)
else:
# X must be casted to floats to avoid typing issues with numpy
# 2.0 and the line below
X1 = as_float_array(X, copy=False) # copy has been taken care of
if w_init is None:
w_init = np.asarray(random_state.normal(size=(n_components,
n_components)), dtype=X1.dtype)
else:
w_init = np.asarray(w_init)
if w_init.shape != (n_components, n_components):
raise ValueError('w_init has invalid shape -- should be %(shape)s'
% {'shape': (n_components, n_components)})
kwargs = {'tol': tol,
'g': g,
'fun_args': fun_args,
'max_iter': max_iter,
'w_init': w_init}
if algorithm == 'parallel':
W, n_iter = _ica_par(X1, **kwargs)
elif algorithm == 'deflation':
W, n_iter = _ica_def(X1, **kwargs)
else:
raise ValueError('Invalid algorithm: must be either `parallel` or'
' `deflation`.')
del X1
if whiten:
if compute_sources:
S = fast_dot(fast_dot(W, K), X).T
else:
S = None
if return_X_mean:
if return_n_iter:
return K, W, S, X_mean, n_iter
else:
return K, W, S, X_mean
else:
if return_n_iter:
return K, W, S, n_iter
else:
return K, W, S
else:
if compute_sources:
S = fast_dot(W, X).T
else:
S = None
if return_X_mean:
if return_n_iter:
return None, W, S, None, n_iter
else:
return None, W, S, None
else:
if return_n_iter:
return None, W, S, n_iter
else:
return None, W, S
class FastICA(BaseEstimator, TransformerMixin):
"""FastICA: a fast algorithm for Independent Component Analysis.
Read more in the :ref:`User Guide <ICA>`.
Parameters
----------
n_components : int, optional
Number of components to use. If none is passed, all are used.
algorithm : {'parallel', 'deflation'}
Apply parallel or deflational algorithm for FastICA.
whiten : boolean, optional
If whiten is false, the data is already considered to be
whitened, and no whitening is performed.
fun : string or function, optional. Default: 'logcosh'
The functional form of the G function used in the
approximation to neg-entropy. Could be either 'logcosh', 'exp',
or 'cube'.
You can also provide your own function. It should return a tuple
containing the value of the function, and of its derivative, in the
point. Example:
def my_g(x):
return x ** 3, 3 * x ** 2
fun_args : dictionary, optional
Arguments to send to the functional form.
If empty and if fun='logcosh', fun_args will take value
{'alpha' : 1.0}.
max_iter : int, optional
Maximum number of iterations during fit.
tol : float, optional
Tolerance on update at each iteration.
w_init : None of an (n_components, n_components) ndarray
The mixing matrix to be used to initialize the algorithm.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Attributes
----------
components_ : 2D array, shape (n_components, n_features)
The unmixing matrix.
mixing_ : array, shape (n_features, n_components)
The mixing matrix.
n_iter_ : int
If the algorithm is "deflation", n_iter is the
maximum number of iterations run across all components. Else
they are just the number of iterations taken to converge.
Notes
-----
Implementation based on
`A. Hyvarinen and E. Oja, Independent Component Analysis:
Algorithms and Applications, Neural Networks, 13(4-5), 2000,
pp. 411-430`
"""
def __init__(self, n_components=None, algorithm='parallel', whiten=True,
fun='logcosh', fun_args=None, max_iter=200, tol=1e-4,
w_init=None, random_state=None):
super(FastICA, self).__init__()
self.n_components = n_components
self.algorithm = algorithm
self.whiten = whiten
self.fun = fun
self.fun_args = fun_args
self.max_iter = max_iter
self.tol = tol
self.w_init = w_init
self.random_state = random_state
def _fit(self, X, compute_sources=False):
"""Fit the model
Parameters
----------
X : array-like, shape (n_samples, n_features)
Training data, where n_samples is the number of samples
and n_features is the number of features.
compute_sources : bool
If False, sources are not computes but only the rotation matrix.
This can save memory when working with big data. Defaults to False.
Returns
-------
X_new : array-like, shape (n_samples, n_components)
"""
fun_args = {} if self.fun_args is None else self.fun_args
whitening, unmixing, sources, X_mean, self.n_iter_ = fastica(
X=X, n_components=self.n_components, algorithm=self.algorithm,
whiten=self.whiten, fun=self.fun, fun_args=fun_args,
max_iter=self.max_iter, tol=self.tol, w_init=self.w_init,
random_state=self.random_state, return_X_mean=True,
compute_sources=compute_sources, return_n_iter=True)
if self.whiten:
self.components_ = np.dot(unmixing, whitening)
self.mean_ = X_mean
self.whitening_ = whitening
else:
self.components_ = unmixing
self.mixing_ = linalg.pinv(self.components_)
if compute_sources:
self.__sources = sources
return sources
def fit_transform(self, X, y=None):
"""Fit the model and recover the sources from X.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Training data, where n_samples is the number of samples
and n_features is the number of features.
Returns
-------
X_new : array-like, shape (n_samples, n_components)
"""
return self._fit(X, compute_sources=True)
def fit(self, X, y=None):
"""Fit the model to X.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Training data, where n_samples is the number of samples
and n_features is the number of features.
Returns
-------
self
"""
self._fit(X, compute_sources=False)
return self
def transform(self, X, y=None, copy=True):
"""Recover the sources from X (apply the unmixing matrix).
Parameters
----------
X : array-like, shape (n_samples, n_features)
Data to transform, where n_samples is the number of samples
and n_features is the number of features.
copy : bool (optional)
If False, data passed to fit are overwritten. Defaults to True.
Returns
-------
X_new : array-like, shape (n_samples, n_components)
"""
check_is_fitted(self, 'mixing_')
X = check_array(X, copy=copy, dtype=FLOAT_DTYPES)
if self.whiten:
X -= self.mean_
return fast_dot(X, self.components_.T)
def inverse_transform(self, X, copy=True):
"""Transform the sources back to the mixed data (apply mixing matrix).
Parameters
----------
X : array-like, shape (n_samples, n_components)
Sources, where n_samples is the number of samples
and n_components is the number of components.
copy : bool (optional)
If False, data passed to fit are overwritten. Defaults to True.
Returns
-------
X_new : array-like, shape (n_samples, n_features)
"""
check_is_fitted(self, 'mixing_')
X = check_array(X, copy=(copy and self.whiten), dtype=FLOAT_DTYPES)
X = fast_dot(X, self.mixing_.T)
if self.whiten:
X += self.mean_
return X
| bsd-3-clause |
sysalexis/kbengine | kbe/src/lib/python/Lib/tokenize.py | 78 | 24876 | """Tokenization help for Python programs.
tokenize(readline) is a generator that breaks a stream of bytes into
Python tokens. It decodes the bytes according to PEP-0263 for
determining source file encoding.
It accepts a readline-like method which is called repeatedly to get the
next line of input (or b"" for EOF). It generates 5-tuples with these
members:
the token type (see token.py)
the token (a string)
the starting (row, column) indices of the token (a 2-tuple of ints)
the ending (row, column) indices of the token (a 2-tuple of ints)
the original line (string)
It is designed to match the working of the Python tokenizer exactly, except
that it produces COMMENT tokens for comments and gives type OP for all
operators. Additionally, all token lists start with an ENCODING token
which tells you which encoding was used to decode the bytes stream.
"""
__author__ = 'Ka-Ping Yee <[email protected]>'
__credits__ = ('GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, '
'Skip Montanaro, Raymond Hettinger, Trent Nelson, '
'Michael Foord')
import builtins
from codecs import lookup, BOM_UTF8
import collections
from io import TextIOWrapper
from itertools import chain
import re
import sys
from token import *
cookie_re = re.compile(r'^[ \t\f]*#.*coding[:=][ \t]*([-\w.]+)', re.ASCII)
blank_re = re.compile(br'^[ \t\f]*(?:[#\r\n]|$)', re.ASCII)
import token
__all__ = token.__all__ + ["COMMENT", "tokenize", "detect_encoding",
"NL", "untokenize", "ENCODING", "TokenInfo"]
del token
COMMENT = N_TOKENS
tok_name[COMMENT] = 'COMMENT'
NL = N_TOKENS + 1
tok_name[NL] = 'NL'
ENCODING = N_TOKENS + 2
tok_name[ENCODING] = 'ENCODING'
N_TOKENS += 3
EXACT_TOKEN_TYPES = {
'(': LPAR,
')': RPAR,
'[': LSQB,
']': RSQB,
':': COLON,
',': COMMA,
';': SEMI,
'+': PLUS,
'-': MINUS,
'*': STAR,
'/': SLASH,
'|': VBAR,
'&': AMPER,
'<': LESS,
'>': GREATER,
'=': EQUAL,
'.': DOT,
'%': PERCENT,
'{': LBRACE,
'}': RBRACE,
'==': EQEQUAL,
'!=': NOTEQUAL,
'<=': LESSEQUAL,
'>=': GREATEREQUAL,
'~': TILDE,
'^': CIRCUMFLEX,
'<<': LEFTSHIFT,
'>>': RIGHTSHIFT,
'**': DOUBLESTAR,
'+=': PLUSEQUAL,
'-=': MINEQUAL,
'*=': STAREQUAL,
'/=': SLASHEQUAL,
'%=': PERCENTEQUAL,
'&=': AMPEREQUAL,
'|=': VBAREQUAL,
'^=': CIRCUMFLEXEQUAL,
'<<=': LEFTSHIFTEQUAL,
'>>=': RIGHTSHIFTEQUAL,
'**=': DOUBLESTAREQUAL,
'//': DOUBLESLASH,
'//=': DOUBLESLASHEQUAL,
'@': AT
}
class TokenInfo(collections.namedtuple('TokenInfo', 'type string start end line')):
def __repr__(self):
annotated_type = '%d (%s)' % (self.type, tok_name[self.type])
return ('TokenInfo(type=%s, string=%r, start=%r, end=%r, line=%r)' %
self._replace(type=annotated_type))
@property
def exact_type(self):
if self.type == OP and self.string in EXACT_TOKEN_TYPES:
return EXACT_TOKEN_TYPES[self.string]
else:
return self.type
def group(*choices): return '(' + '|'.join(choices) + ')'
def any(*choices): return group(*choices) + '*'
def maybe(*choices): return group(*choices) + '?'
# Note: we use unicode matching for names ("\w") but ascii matching for
# number literals.
Whitespace = r'[ \f\t]*'
Comment = r'#[^\r\n]*'
Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
Name = r'\w+'
Hexnumber = r'0[xX][0-9a-fA-F]+'
Binnumber = r'0[bB][01]+'
Octnumber = r'0[oO][0-7]+'
Decnumber = r'(?:0+|[1-9][0-9]*)'
Intnumber = group(Hexnumber, Binnumber, Octnumber, Decnumber)
Exponent = r'[eE][-+]?[0-9]+'
Pointfloat = group(r'[0-9]+\.[0-9]*', r'\.[0-9]+') + maybe(Exponent)
Expfloat = r'[0-9]+' + Exponent
Floatnumber = group(Pointfloat, Expfloat)
Imagnumber = group(r'[0-9]+[jJ]', Floatnumber + r'[jJ]')
Number = group(Imagnumber, Floatnumber, Intnumber)
StringPrefix = r'(?:[bB][rR]?|[rR][bB]?|[uU])?'
# Tail end of ' string.
Single = r"[^'\\]*(?:\\.[^'\\]*)*'"
# Tail end of " string.
Double = r'[^"\\]*(?:\\.[^"\\]*)*"'
# Tail end of ''' string.
Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''"
# Tail end of """ string.
Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""'
Triple = group(StringPrefix + "'''", StringPrefix + '"""')
# Single-line ' or " string.
String = group(StringPrefix + r"'[^\n'\\]*(?:\\.[^\n'\\]*)*'",
StringPrefix + r'"[^\n"\\]*(?:\\.[^\n"\\]*)*"')
# Because of leftmost-then-longest match semantics, be sure to put the
# longest operators first (e.g., if = came before ==, == would get
# recognized as two instances of =).
Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"!=",
r"//=?", r"->",
r"[+\-*/%&|^=<>]=?",
r"~")
Bracket = '[][(){}]'
Special = group(r'\r?\n', r'\.\.\.', r'[:;.,@]')
Funny = group(Operator, Bracket, Special)
PlainToken = group(Number, Funny, String, Name)
Token = Ignore + PlainToken
# First (or only) line of ' or " string.
ContStr = group(StringPrefix + r"'[^\n'\\]*(?:\\.[^\n'\\]*)*" +
group("'", r'\\\r?\n'),
StringPrefix + r'"[^\n"\\]*(?:\\.[^\n"\\]*)*' +
group('"', r'\\\r?\n'))
PseudoExtras = group(r'\\\r?\n|\Z', Comment, Triple)
PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
def _compile(expr):
return re.compile(expr, re.UNICODE)
endpats = {"'": Single, '"': Double,
"'''": Single3, '"""': Double3,
"r'''": Single3, 'r"""': Double3,
"b'''": Single3, 'b"""': Double3,
"R'''": Single3, 'R"""': Double3,
"B'''": Single3, 'B"""': Double3,
"br'''": Single3, 'br"""': Double3,
"bR'''": Single3, 'bR"""': Double3,
"Br'''": Single3, 'Br"""': Double3,
"BR'''": Single3, 'BR"""': Double3,
"rb'''": Single3, 'rb"""': Double3,
"Rb'''": Single3, 'Rb"""': Double3,
"rB'''": Single3, 'rB"""': Double3,
"RB'''": Single3, 'RB"""': Double3,
"u'''": Single3, 'u"""': Double3,
"R'''": Single3, 'R"""': Double3,
"U'''": Single3, 'U"""': Double3,
'r': None, 'R': None, 'b': None, 'B': None,
'u': None, 'U': None}
triple_quoted = {}
for t in ("'''", '"""',
"r'''", 'r"""', "R'''", 'R"""',
"b'''", 'b"""', "B'''", 'B"""',
"br'''", 'br"""', "Br'''", 'Br"""',
"bR'''", 'bR"""', "BR'''", 'BR"""',
"rb'''", 'rb"""', "rB'''", 'rB"""',
"Rb'''", 'Rb"""', "RB'''", 'RB"""',
"u'''", 'u"""', "U'''", 'U"""',
):
triple_quoted[t] = t
single_quoted = {}
for t in ("'", '"',
"r'", 'r"', "R'", 'R"',
"b'", 'b"', "B'", 'B"',
"br'", 'br"', "Br'", 'Br"',
"bR'", 'bR"', "BR'", 'BR"' ,
"rb'", 'rb"', "rB'", 'rB"',
"Rb'", 'Rb"', "RB'", 'RB"' ,
"u'", 'u"', "U'", 'U"',
):
single_quoted[t] = t
tabsize = 8
class TokenError(Exception): pass
class StopTokenizing(Exception): pass
class Untokenizer:
def __init__(self):
self.tokens = []
self.prev_row = 1
self.prev_col = 0
self.encoding = None
def add_whitespace(self, start):
row, col = start
if row < self.prev_row or row == self.prev_row and col < self.prev_col:
raise ValueError("start ({},{}) precedes previous end ({},{})"
.format(row, col, self.prev_row, self.prev_col))
row_offset = row - self.prev_row
if row_offset:
self.tokens.append("\\\n" * row_offset)
self.prev_col = 0
col_offset = col - self.prev_col
if col_offset:
self.tokens.append(" " * col_offset)
def untokenize(self, iterable):
it = iter(iterable)
for t in it:
if len(t) == 2:
self.compat(t, it)
break
tok_type, token, start, end, line = t
if tok_type == ENCODING:
self.encoding = token
continue
if tok_type == ENDMARKER:
break
self.add_whitespace(start)
self.tokens.append(token)
self.prev_row, self.prev_col = end
if tok_type in (NEWLINE, NL):
self.prev_row += 1
self.prev_col = 0
return "".join(self.tokens)
def compat(self, token, iterable):
indents = []
toks_append = self.tokens.append
startline = token[0] in (NEWLINE, NL)
prevstring = False
for tok in chain([token], iterable):
toknum, tokval = tok[:2]
if toknum == ENCODING:
self.encoding = tokval
continue
if toknum in (NAME, NUMBER):
tokval += ' '
# Insert a space between two consecutive strings
if toknum == STRING:
if prevstring:
tokval = ' ' + tokval
prevstring = True
else:
prevstring = False
if toknum == INDENT:
indents.append(tokval)
continue
elif toknum == DEDENT:
indents.pop()
continue
elif toknum in (NEWLINE, NL):
startline = True
elif startline and indents:
toks_append(indents[-1])
startline = False
toks_append(tokval)
def untokenize(iterable):
"""Transform tokens back into Python source code.
It returns a bytes object, encoded using the ENCODING
token, which is the first token sequence output by tokenize.
Each element returned by the iterable must be a token sequence
with at least two elements, a token number and token value. If
only two tokens are passed, the resulting output is poor.
Round-trip invariant for full input:
Untokenized source will match input source exactly
Round-trip invariant for limited intput:
# Output bytes will tokenize the back to the input
t1 = [tok[:2] for tok in tokenize(f.readline)]
newcode = untokenize(t1)
readline = BytesIO(newcode).readline
t2 = [tok[:2] for tok in tokenize(readline)]
assert t1 == t2
"""
ut = Untokenizer()
out = ut.untokenize(iterable)
if ut.encoding is not None:
out = out.encode(ut.encoding)
return out
def _get_normal_name(orig_enc):
"""Imitates get_normal_name in tokenizer.c."""
# Only care about the first 12 characters.
enc = orig_enc[:12].lower().replace("_", "-")
if enc == "utf-8" or enc.startswith("utf-8-"):
return "utf-8"
if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \
enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")):
return "iso-8859-1"
return orig_enc
def detect_encoding(readline):
"""
The detect_encoding() function is used to detect the encoding that should
be used to decode a Python source file. It requires one argument, readline,
in the same way as the tokenize() generator.
It will call readline a maximum of twice, and return the encoding used
(as a string) and a list of any lines (left as bytes) it has read in.
It detects the encoding from the presence of a utf-8 bom or an encoding
cookie as specified in pep-0263. If both a bom and a cookie are present,
but disagree, a SyntaxError will be raised. If the encoding cookie is an
invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found,
'utf-8-sig' is returned.
If no encoding is specified, then the default of 'utf-8' will be returned.
"""
try:
filename = readline.__self__.name
except AttributeError:
filename = None
bom_found = False
encoding = None
default = 'utf-8'
def read_or_stop():
try:
return readline()
except StopIteration:
return b''
def find_cookie(line):
try:
# Decode as UTF-8. Either the line is an encoding declaration,
# in which case it should be pure ASCII, or it must be UTF-8
# per default encoding.
line_string = line.decode('utf-8')
except UnicodeDecodeError:
msg = "invalid or missing encoding declaration"
if filename is not None:
msg = '{} for {!r}'.format(msg, filename)
raise SyntaxError(msg)
match = cookie_re.match(line_string)
if not match:
return None
encoding = _get_normal_name(match.group(1))
try:
codec = lookup(encoding)
except LookupError:
# This behaviour mimics the Python interpreter
if filename is None:
msg = "unknown encoding: " + encoding
else:
msg = "unknown encoding for {!r}: {}".format(filename,
encoding)
raise SyntaxError(msg)
if bom_found:
if encoding != 'utf-8':
# This behaviour mimics the Python interpreter
if filename is None:
msg = 'encoding problem: utf-8'
else:
msg = 'encoding problem for {!r}: utf-8'.format(filename)
raise SyntaxError(msg)
encoding += '-sig'
return encoding
first = read_or_stop()
if first.startswith(BOM_UTF8):
bom_found = True
first = first[3:]
default = 'utf-8-sig'
if not first:
return default, []
encoding = find_cookie(first)
if encoding:
return encoding, [first]
if not blank_re.match(first):
return default, [first]
second = read_or_stop()
if not second:
return default, [first]
encoding = find_cookie(second)
if encoding:
return encoding, [first, second]
return default, [first, second]
def open(filename):
"""Open a file in read only mode using the encoding detected by
detect_encoding().
"""
buffer = builtins.open(filename, 'rb')
encoding, lines = detect_encoding(buffer.readline)
buffer.seek(0)
text = TextIOWrapper(buffer, encoding, line_buffering=True)
text.mode = 'r'
return text
def tokenize(readline):
"""
The tokenize() generator requires one argment, readline, which
must be a callable object which provides the same interface as the
readline() method of built-in file objects. Each call to the function
should return one line of input as bytes. Alternately, readline
can be a callable function terminating with StopIteration:
readline = open(myfile, 'rb').__next__ # Example of alternate readline
The generator produces 5-tuples with these members: the token type; the
token string; a 2-tuple (srow, scol) of ints specifying the row and
column where the token begins in the source; a 2-tuple (erow, ecol) of
ints specifying the row and column where the token ends in the source;
and the line on which the token was found. The line passed is the
logical line; continuation lines are included.
The first token sequence will always be an ENCODING token
which tells you which encoding was used to decode the bytes stream.
"""
# This import is here to avoid problems when the itertools module is not
# built yet and tokenize is imported.
from itertools import chain, repeat
encoding, consumed = detect_encoding(readline)
rl_gen = iter(readline, b"")
empty = repeat(b"")
return _tokenize(chain(consumed, rl_gen, empty).__next__, encoding)
def _tokenize(readline, encoding):
lnum = parenlev = continued = 0
numchars = '0123456789'
contstr, needcont = '', 0
contline = None
indents = [0]
if encoding is not None:
if encoding == "utf-8-sig":
# BOM will already have been stripped.
encoding = "utf-8"
yield TokenInfo(ENCODING, encoding, (0, 0), (0, 0), '')
while True: # loop over lines in stream
try:
line = readline()
except StopIteration:
line = b''
if encoding is not None:
line = line.decode(encoding)
lnum += 1
pos, max = 0, len(line)
if contstr: # continued string
if not line:
raise TokenError("EOF in multi-line string", strstart)
endmatch = endprog.match(line)
if endmatch:
pos = end = endmatch.end(0)
yield TokenInfo(STRING, contstr + line[:end],
strstart, (lnum, end), contline + line)
contstr, needcont = '', 0
contline = None
elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n':
yield TokenInfo(ERRORTOKEN, contstr + line,
strstart, (lnum, len(line)), contline)
contstr = ''
contline = None
continue
else:
contstr = contstr + line
contline = contline + line
continue
elif parenlev == 0 and not continued: # new statement
if not line: break
column = 0
while pos < max: # measure leading whitespace
if line[pos] == ' ':
column += 1
elif line[pos] == '\t':
column = (column//tabsize + 1)*tabsize
elif line[pos] == '\f':
column = 0
else:
break
pos += 1
if pos == max:
break
if line[pos] in '#\r\n': # skip comments or blank lines
if line[pos] == '#':
comment_token = line[pos:].rstrip('\r\n')
nl_pos = pos + len(comment_token)
yield TokenInfo(COMMENT, comment_token,
(lnum, pos), (lnum, pos + len(comment_token)), line)
yield TokenInfo(NL, line[nl_pos:],
(lnum, nl_pos), (lnum, len(line)), line)
else:
yield TokenInfo((NL, COMMENT)[line[pos] == '#'], line[pos:],
(lnum, pos), (lnum, len(line)), line)
continue
if column > indents[-1]: # count indents or dedents
indents.append(column)
yield TokenInfo(INDENT, line[:pos], (lnum, 0), (lnum, pos), line)
while column < indents[-1]:
if column not in indents:
raise IndentationError(
"unindent does not match any outer indentation level",
("<tokenize>", lnum, pos, line))
indents = indents[:-1]
yield TokenInfo(DEDENT, '', (lnum, pos), (lnum, pos), line)
else: # continued statement
if not line:
raise TokenError("EOF in multi-line statement", (lnum, 0))
continued = 0
while pos < max:
pseudomatch = _compile(PseudoToken).match(line, pos)
if pseudomatch: # scan for tokens
start, end = pseudomatch.span(1)
spos, epos, pos = (lnum, start), (lnum, end), end
if start == end:
continue
token, initial = line[start:end], line[start]
if (initial in numchars or # ordinary number
(initial == '.' and token != '.' and token != '...')):
yield TokenInfo(NUMBER, token, spos, epos, line)
elif initial in '\r\n':
yield TokenInfo(NL if parenlev > 0 else NEWLINE,
token, spos, epos, line)
elif initial == '#':
assert not token.endswith("\n")
yield TokenInfo(COMMENT, token, spos, epos, line)
elif token in triple_quoted:
endprog = _compile(endpats[token])
endmatch = endprog.match(line, pos)
if endmatch: # all on one line
pos = endmatch.end(0)
token = line[start:pos]
yield TokenInfo(STRING, token, spos, (lnum, pos), line)
else:
strstart = (lnum, start) # multiple lines
contstr = line[start:]
contline = line
break
elif initial in single_quoted or \
token[:2] in single_quoted or \
token[:3] in single_quoted:
if token[-1] == '\n': # continued string
strstart = (lnum, start)
endprog = _compile(endpats[initial] or
endpats[token[1]] or
endpats[token[2]])
contstr, needcont = line[start:], 1
contline = line
break
else: # ordinary string
yield TokenInfo(STRING, token, spos, epos, line)
elif initial.isidentifier(): # ordinary name
yield TokenInfo(NAME, token, spos, epos, line)
elif initial == '\\': # continued stmt
continued = 1
else:
if initial in '([{':
parenlev += 1
elif initial in ')]}':
parenlev -= 1
yield TokenInfo(OP, token, spos, epos, line)
else:
yield TokenInfo(ERRORTOKEN, line[pos],
(lnum, pos), (lnum, pos+1), line)
pos += 1
for indent in indents[1:]: # pop remaining indent levels
yield TokenInfo(DEDENT, '', (lnum, 0), (lnum, 0), '')
yield TokenInfo(ENDMARKER, '', (lnum, 0), (lnum, 0), '')
# An undocumented, backwards compatible, API for all the places in the standard
# library that expect to be able to use tokenize with strings
def generate_tokens(readline):
return _tokenize(readline, None)
def main():
import argparse
# Helper error handling routines
def perror(message):
print(message, file=sys.stderr)
def error(message, filename=None, location=None):
if location:
args = (filename,) + location + (message,)
perror("%s:%d:%d: error: %s" % args)
elif filename:
perror("%s: error: %s" % (filename, message))
else:
perror("error: %s" % message)
sys.exit(1)
# Parse the arguments and options
parser = argparse.ArgumentParser(prog='python -m tokenize')
parser.add_argument(dest='filename', nargs='?',
metavar='filename.py',
help='the file to tokenize; defaults to stdin')
parser.add_argument('-e', '--exact', dest='exact', action='store_true',
help='display token names using the exact type')
args = parser.parse_args()
try:
# Tokenize the input
if args.filename:
filename = args.filename
with builtins.open(filename, 'rb') as f:
tokens = list(tokenize(f.readline))
else:
filename = "<stdin>"
tokens = _tokenize(sys.stdin.readline, None)
# Output the tokenization
for token in tokens:
token_type = token.type
if args.exact:
token_type = token.exact_type
token_range = "%d,%d-%d,%d:" % (token.start + token.end)
print("%-20s%-15s%-15r" %
(token_range, tok_name[token_type], token.string))
except IndentationError as err:
line, column = err.args[1][1:3]
error(err.args[0], filename, (line, column))
except TokenError as err:
line, column = err.args[1]
error(err.args[0], filename, (line, column))
except SyntaxError as err:
error(err, filename)
except OSError as err:
error(err)
except KeyboardInterrupt:
print("interrupted\n")
except Exception as err:
perror("unexpected error: %s" % err)
raise
if __name__ == "__main__":
main()
| lgpl-3.0 |
pattisdr/osf.io | osf/migrations/0086_pre_migrate_collections.py | 17 | 4590 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.9 on 2018-03-05 16:30
from __future__ import unicode_literals
from django.conf import settings
import django.contrib.postgres.fields
from django.db import migrations, models
import django.db.models.deletion
import django_extensions.db.fields
import osf.models.validators
import osf.utils.fields
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('osf', '0085_merge_20180316_1625'),
]
operations = [
migrations.CreateModel(
name='CollectedGuidMetadata',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('collected_type', models.CharField(blank=True, max_length=31)),
('status', models.CharField(blank=True, max_length=31)),
('creator', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('guid', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='osf.Guid')),
],
options={
'abstract': False,
},
),
migrations.DeleteModel(
name='Collection',
),
migrations.CreateModel(
name='CollectionProvider',
fields=[
],
options={
'proxy': True,
'indexes': [],
},
bases=('osf.abstractprovider',),
),
migrations.AlterField(
model_name='abstractprovider',
name='type',
field=models.CharField(choices=[('osf.collectionprovider', 'collection provider'), ('osf.preprintprovider', 'preprint provider')], db_index=True, max_length=255),
),
migrations.CreateModel(
name='Collection',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('content_type_pk', models.PositiveIntegerField(blank=True, null=True)),
('title', models.CharField(max_length=200, validators=[osf.models.validators.validate_title])),
('collected_type_choices', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=31), blank=True, default=list, size=None)),
('status_choices', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=31), blank=True, default=list, size=None)),
('is_public', models.BooleanField(db_index=True, default=False)),
('is_promoted', models.BooleanField(db_index=True, default=False)),
('is_bookmark_collection', models.BooleanField(db_index=True, default=False)),
('collected_types', models.ManyToManyField(related_name='_collection_collected_types_+', to='contenttypes.ContentType')),
('creator', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('guid_links', models.ManyToManyField(related_name='collections', through='osf.CollectedGuidMetadata', to='osf.Guid')),
('provider', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='osf.AbstractProvider')),
('deleted', osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True)),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='collectedguidmetadata',
name='collection',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='osf.Collection'),
),
migrations.AlterOrderWithRespectTo(
name='collectedguidmetadata',
order_with_respect_to='collection',
),
migrations.AlterUniqueTogether(
name='collectedguidmetadata',
unique_together=set([('collection', 'guid')]),
),
]
| apache-2.0 |
DarkSand/Sasila | sasila/system_normal/downloader/http/selenium_response.py | 1 | 1112 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import re
from posixpath import normpath
if sys.version_info < (3, 0):
reload(sys)
sys.setdefaultencoding('utf-8')
from urlparse import urljoin, urlparse, urlunparse
else:
from urllib.parse import urljoin, urlparse, urlunparse
class SeleniumResponse(object):
def __init__(self, m_response=None, request=None):
self.request = request
self.m_response = m_response
def __str__(self):
if self.m_response:
return "<SeleniumResponse [%s] [%.2f KB]>" % (self.request.url, (float(len(self.m_response.content)) / 1000))
else:
return "<SeleniumResponse failed: %s>" % self.request.url
def nice_join(self, url):
url1 = urljoin(self.request.url, url)
arr = urlparse(url1)
path = normpath(arr[2])
return urlunparse((arr.scheme, arr.netloc, path, arr.params, arr.query, arr.fragment))
def is_url(self, url):
if re.match(r'^https?:/{2}\w.+$', url):
return True
else:
return False
__repr__ = __str__
| apache-2.0 |
apixandru/intellij-community | python/lib/Lib/site-packages/django/db/backends/sqlite3/base.py | 93 | 12206 | """
SQLite3 backend for django.
Python 2.4 requires pysqlite2 (http://pysqlite.org/).
Python 2.5 and later can use a pysqlite2 module or the sqlite3 module in the
standard library.
"""
import re
import sys
import datetime
from django.db import utils
from django.db.backends import *
from django.db.backends.signals import connection_created
from django.db.backends.sqlite3.client import DatabaseClient
from django.db.backends.sqlite3.creation import DatabaseCreation
from django.db.backends.sqlite3.introspection import DatabaseIntrospection
from django.utils.safestring import SafeString
try:
try:
from pysqlite2 import dbapi2 as Database
except ImportError, e1:
from sqlite3 import dbapi2 as Database
except ImportError, exc:
import sys
from django.core.exceptions import ImproperlyConfigured
if sys.version_info < (2, 5, 0):
module = 'pysqlite2 module'
exc = e1
else:
module = 'either pysqlite2 or sqlite3 modules (tried in that order)'
raise ImproperlyConfigured("Error loading %s: %s" % (module, exc))
DatabaseError = Database.DatabaseError
IntegrityError = Database.IntegrityError
Database.register_converter("bool", lambda s: str(s) == '1')
Database.register_converter("time", util.typecast_time)
Database.register_converter("date", util.typecast_date)
Database.register_converter("datetime", util.typecast_timestamp)
Database.register_converter("timestamp", util.typecast_timestamp)
Database.register_converter("TIMESTAMP", util.typecast_timestamp)
Database.register_converter("decimal", util.typecast_decimal)
Database.register_adapter(decimal.Decimal, util.rev_typecast_decimal)
if Database.version_info >= (2,4,1):
# Starting in 2.4.1, the str type is not accepted anymore, therefore,
# we convert all str objects to Unicode
# As registering a adapter for a primitive type causes a small
# slow-down, this adapter is only registered for sqlite3 versions
# needing it.
Database.register_adapter(str, lambda s:s.decode('utf-8'))
Database.register_adapter(SafeString, lambda s:s.decode('utf-8'))
class DatabaseFeatures(BaseDatabaseFeatures):
# SQLite cannot handle us only partially reading from a cursor's result set
# and then writing the same rows to the database in another cursor. This
# setting ensures we always read result sets fully into memory all in one
# go.
can_use_chunked_reads = False
test_db_allows_multiple_connections = False
supports_unspecified_pk = True
supports_1000_query_parameters = False
supports_mixed_date_datetime_comparisons = False
def _supports_stddev(self):
"""Confirm support for STDDEV and related stats functions
SQLite supports STDDEV as an extension package; so
connection.ops.check_aggregate_support() can't unilaterally
rule out support for STDDEV. We need to manually check
whether the call works.
"""
cursor = self.connection.cursor()
cursor.execute('CREATE TABLE STDDEV_TEST (X INT)')
try:
cursor.execute('SELECT STDDEV(*) FROM STDDEV_TEST')
has_support = True
except utils.DatabaseError:
has_support = False
cursor.execute('DROP TABLE STDDEV_TEST')
return has_support
class DatabaseOperations(BaseDatabaseOperations):
def date_extract_sql(self, lookup_type, field_name):
# sqlite doesn't support extract, so we fake it with the user-defined
# function django_extract that's registered in connect(). Note that
# single quotes are used because this is a string (and could otherwise
# cause a collision with a field name).
return "django_extract('%s', %s)" % (lookup_type.lower(), field_name)
def date_interval_sql(self, sql, connector, timedelta):
# It would be more straightforward if we could use the sqlite strftime
# function, but it does not allow for keeping six digits of fractional
# second information, nor does it allow for formatting date and datetime
# values differently. So instead we register our own function that
# formats the datetime combined with the delta in a manner suitable
# for comparisons.
return u'django_format_dtdelta(%s, "%s", "%d", "%d", "%d")' % (sql,
connector, timedelta.days, timedelta.seconds, timedelta.microseconds)
def date_trunc_sql(self, lookup_type, field_name):
# sqlite doesn't support DATE_TRUNC, so we fake it with a user-defined
# function django_date_trunc that's registered in connect(). Note that
# single quotes are used because this is a string (and could otherwise
# cause a collision with a field name).
return "django_date_trunc('%s', %s)" % (lookup_type.lower(), field_name)
def drop_foreignkey_sql(self):
return ""
def pk_default_value(self):
return 'NULL'
def quote_name(self, name):
if name.startswith('"') and name.endswith('"'):
return name # Quoting once is enough.
return '"%s"' % name
def no_limit_value(self):
return -1
def sql_flush(self, style, tables, sequences):
# NB: The generated SQL below is specific to SQLite
# Note: The DELETE FROM... SQL generated below works for SQLite databases
# because constraints don't exist
sql = ['%s %s %s;' % \
(style.SQL_KEYWORD('DELETE'),
style.SQL_KEYWORD('FROM'),
style.SQL_FIELD(self.quote_name(table))
) for table in tables]
# Note: No requirement for reset of auto-incremented indices (cf. other
# sql_flush() implementations). Just return SQL at this point
return sql
def year_lookup_bounds(self, value):
first = '%s-01-01'
second = '%s-12-31 23:59:59.999999'
return [first % value, second % value]
def convert_values(self, value, field):
"""SQLite returns floats when it should be returning decimals,
and gets dates and datetimes wrong.
For consistency with other backends, coerce when required.
"""
internal_type = field.get_internal_type()
if internal_type == 'DecimalField':
return util.typecast_decimal(field.format_number(value))
elif internal_type and internal_type.endswith('IntegerField') or internal_type == 'AutoField':
return int(value)
elif internal_type == 'DateField':
return util.typecast_date(value)
elif internal_type == 'DateTimeField':
return util.typecast_timestamp(value)
elif internal_type == 'TimeField':
return util.typecast_time(value)
# No field, or the field isn't known to be a decimal or integer
return value
class DatabaseWrapper(BaseDatabaseWrapper):
vendor = 'sqlite'
# SQLite requires LIKE statements to include an ESCAPE clause if the value
# being escaped has a percent or underscore in it.
# See http://www.sqlite.org/lang_expr.html for an explanation.
operators = {
'exact': '= %s',
'iexact': "LIKE %s ESCAPE '\\'",
'contains': "LIKE %s ESCAPE '\\'",
'icontains': "LIKE %s ESCAPE '\\'",
'regex': 'REGEXP %s',
'iregex': "REGEXP '(?i)' || %s",
'gt': '> %s',
'gte': '>= %s',
'lt': '< %s',
'lte': '<= %s',
'startswith': "LIKE %s ESCAPE '\\'",
'endswith': "LIKE %s ESCAPE '\\'",
'istartswith': "LIKE %s ESCAPE '\\'",
'iendswith': "LIKE %s ESCAPE '\\'",
}
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.features = DatabaseFeatures(self)
self.ops = DatabaseOperations()
self.client = DatabaseClient(self)
self.creation = DatabaseCreation(self)
self.introspection = DatabaseIntrospection(self)
self.validation = BaseDatabaseValidation(self)
def _cursor(self):
if self.connection is None:
settings_dict = self.settings_dict
if not settings_dict['NAME']:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("Please fill out the database NAME in the settings module before using the database.")
kwargs = {
'database': settings_dict['NAME'],
'detect_types': Database.PARSE_DECLTYPES | Database.PARSE_COLNAMES,
}
kwargs.update(settings_dict['OPTIONS'])
self.connection = Database.connect(**kwargs)
# Register extract, date_trunc, and regexp functions.
self.connection.create_function("django_extract", 2, _sqlite_extract)
self.connection.create_function("django_date_trunc", 2, _sqlite_date_trunc)
self.connection.create_function("regexp", 2, _sqlite_regexp)
self.connection.create_function("django_format_dtdelta", 5, _sqlite_format_dtdelta)
connection_created.send(sender=self.__class__, connection=self)
return self.connection.cursor(factory=SQLiteCursorWrapper)
def close(self):
# If database is in memory, closing the connection destroys the
# database. To prevent accidental data loss, ignore close requests on
# an in-memory db.
if self.settings_dict['NAME'] != ":memory:":
BaseDatabaseWrapper.close(self)
FORMAT_QMARK_REGEX = re.compile(r'(?![^%])%s')
class SQLiteCursorWrapper(Database.Cursor):
"""
Django uses "format" style placeholders, but pysqlite2 uses "qmark" style.
This fixes it -- but note that if you want to use a literal "%s" in a query,
you'll need to use "%%s".
"""
def execute(self, query, params=()):
query = self.convert_query(query)
try:
return Database.Cursor.execute(self, query, params)
except Database.IntegrityError, e:
raise utils.IntegrityError, utils.IntegrityError(*tuple(e)), sys.exc_info()[2]
except Database.DatabaseError, e:
raise utils.DatabaseError, utils.DatabaseError(*tuple(e)), sys.exc_info()[2]
def executemany(self, query, param_list):
query = self.convert_query(query)
try:
return Database.Cursor.executemany(self, query, param_list)
except Database.IntegrityError, e:
raise utils.IntegrityError, utils.IntegrityError(*tuple(e)), sys.exc_info()[2]
except Database.DatabaseError, e:
raise utils.DatabaseError, utils.DatabaseError(*tuple(e)), sys.exc_info()[2]
def convert_query(self, query):
return FORMAT_QMARK_REGEX.sub('?', query).replace('%%','%')
def _sqlite_extract(lookup_type, dt):
if dt is None:
return None
try:
dt = util.typecast_timestamp(dt)
except (ValueError, TypeError):
return None
if lookup_type == 'week_day':
return (dt.isoweekday() % 7) + 1
else:
return getattr(dt, lookup_type)
def _sqlite_date_trunc(lookup_type, dt):
try:
dt = util.typecast_timestamp(dt)
except (ValueError, TypeError):
return None
if lookup_type == 'year':
return "%i-01-01 00:00:00" % dt.year
elif lookup_type == 'month':
return "%i-%02i-01 00:00:00" % (dt.year, dt.month)
elif lookup_type == 'day':
return "%i-%02i-%02i 00:00:00" % (dt.year, dt.month, dt.day)
def _sqlite_format_dtdelta(dt, conn, days, secs, usecs):
try:
dt = util.typecast_timestamp(dt)
delta = datetime.timedelta(int(days), int(secs), int(usecs))
if conn.strip() == '+':
dt = dt + delta
else:
dt = dt - delta
except (ValueError, TypeError):
return None
if isinstance(dt, datetime.datetime):
rv = dt.strftime("%Y-%m-%d %H:%M:%S")
if dt.microsecond:
rv = "%s.%0.6d" % (rv, dt.microsecond)
else:
rv = dt.strftime("%Y-%m-%d")
return rv
def _sqlite_regexp(re_pattern, re_string):
import re
try:
return bool(re.search(re_pattern, re_string))
except:
return False
| apache-2.0 |
sxjscience/tvm | python/tvm/relay/frontend/qnn_torch.py | 2 | 27334 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name, import-outside-toplevel
""" Functions to convert quantized torch models to QNN """
import logging
import numpy as np
import tvm
from tvm import relay
from tvm.relay import expr as _expr
from tvm.relay import op as _op
from tvm.relay.frontend.common import infer_shape
class QNNParam:
""" A placeholder for weight quantization parameters """
def __init__(self, weight, bias, scale, zero_point, param_key):
param_prefix = param_key[: -len("._packed_params")]
self.weight_var = _expr.var(param_prefix + "_weight", shape=weight.shape)
self.weight = weight
if bias is not None:
self.bias_var = _expr.var(param_prefix + "_bias", shape=bias.shape)
self.bias = bias.detach().numpy()
else:
self.bias_var = None
self.bias = None
self.scale = _expr.const(scale)
self.zero_point = _expr.const(zero_point, dtype="int32")
def _unpack_quant_params(param_name, packed_params, unpack_func):
# Torch stores quantized params in a custom packed format,
# need to unpack and retrieve them as numpy arrays
qweight, bias = unpack_func(packed_params)
weight_np = qweight.dequantize().numpy()
import torch
if qweight.qscheme() == torch.per_tensor_affine:
param = QNNParam(
weight_np, bias, qweight.q_scale(), int(qweight.q_zero_point()), param_name
)
else:
scales = qweight.q_per_channel_scales().numpy()
zero_points = qweight.q_per_channel_zero_points().numpy()
# This is an assumption posed by QNN
msg = "The values of zero points should be all zero for per channel"
assert np.all(zero_points == 0), msg
param = QNNParam(weight_np, bias, scales, 0, param_name)
return param
def get_weight_quant_params(script_module):
""" Retrive and unpack weight parameters from quantized modules """
conv_packed_params = []
linear_packed_params = []
import torch
# conv and linear requires different unpacking function
# extract all conv and linear parameters separately to distinguish them
for name, m in script_module.named_modules():
if isinstance(m, torch.jit.RecursiveScriptModule):
if "Conv" in m.original_name:
conv_packed_params.append((name, m.state_dict()))
elif m.original_name == "LinearPackedParams":
linear_packed_params.append((name, m.state_dict()))
pairs = [
(torch.ops.quantized.conv2d_unpack, conv_packed_params),
(torch.ops.quantized.linear_unpack, linear_packed_params),
]
quant_params = {}
param_name = "_packed_params"
for unpack_func, params in pairs:
for name, state_dict in params:
assert len(state_dict) == 1
assert param_name in state_dict
key = name + "." + param_name
packed_param = state_dict[param_name]
quant_params[key] = _unpack_quant_params(key, packed_param, unpack_func)
return quant_params
def add_quant_params_to_outputs(outputs, packed_param_map, quant_params):
"""
Add quant params to outputs so that they can be referenced by other
ops later. Weights are quantized here.
"""
for node_name, packed_param_name in packed_param_map.items():
qparam = quant_params[packed_param_name]
qweight = relay.qnn.op.quantize(
qparam.weight_var, qparam.scale, qparam.zero_point, out_dtype="int8", axis=0
)
param_tup = (qweight, qparam.scale, qparam.zero_point, qparam.bias_var)
outputs[node_name] = param_tup
def _get_quant_param_for_input(input_value):
"""
We want to know the input scale and zp of this input_value, since
input quant params are not explicitly passed around in torch (they
are embeded in a QTensor data structure, not visible statically).
We know that it is quantized using output scale and zp
of some previous quantized op. The purpose of this function
is to find that pair of parameters.
"""
# Indices for output scale and zp
# For example, in quantized::conv2d(%input, %1, %2, %3, %4, %5, %6, %7),
# 6th and 7th arg are output scale and zp respectively.
output_quant_param_indices = {
"aten::quantize_per_tensor": (1, 2),
"quantized::conv2d": (6, 7),
"quantized::conv2d_relu": (6, 7),
"quantized::linear": (2, 3),
"quantized::linear_relu": (2, 3),
"quantized::add_relu": (2, 3),
"quantized::add": (2, 3),
"quantized::mul_relu": (2, 3),
"quantized::mul": (2, 3),
"quantized::cat": (2, 3),
"quantized::mul_scalar": (2, 3),
"quantized::add_scalar": (2, 3),
}
def dfs(current_node):
# trace back to find the producer of this input value
current_op = current_node.kind()
if current_op in output_quant_param_indices:
indices = output_quant_param_indices[current_op]
scale = current_node.inputsAt(indices[0])
zp = current_node.inputsAt(indices[1])
return scale, zp
# Trace back eariler nodes, dfs order
# Assume quantized tensor comes earlier in the args
for arg in current_node.inputs():
return dfs(arg.node())
# shouldn't happen
assert False, "No producer for %s" % (str(current_node))
return dfs(input_value.node())
def _get_add_scalar_output_quant_param(input_scale, input_zero_point, scalar):
"""
Determine the output scale and zp of quantized::add_scalar op
This is used for mobilenet v3
Refer to aten/src/ATen/native/quantized/cpu/qadd.cpp
The names of variables are the same as torch impl
"""
q_min = 0
q_max = 255
s = input_scale
z = input_zero_point
c = scalar
c_q = round(c / s)
if q_min > z - c_q:
s_prime = (float(q_max) - (z - c_q)) / (float(q_max) - q_min) * s
z_prime = q_min
elif q_max < z - c_q:
s_prime = (float(z - c_q) - q_min) / (float(q_max) - q_min) * s
z_prime = q_max
else:
s_prime = s
z_prime = z - c_q
return s_prime, z_prime
def _get_mul_scalar_output_quant_param(input_scale, input_zero_point, scalar):
"""
Determine the output scale and zp of quantized::mul_scalar op
This is used for mobilenet v3
Refer to aten/src/ATen/native/quantized/cpu/qmul.cpp
The names of variables are the same as torch impl
"""
q_min = 0
q_max = 255
self_scale = input_scale
self_zero_point = input_zero_point
other_val = scalar
if other_val > 0.0:
s_prime = other_val * self_scale
z_prime = self_zero_point
elif other_val == 0.0:
s_prime = 1.0
z_prime = 0
else:
s_prime = abs(other_val) * self_scale
z_prime = q_max - (self_zero_point - q_min)
return s_prime, z_prime
def _add_output_quant_params_to_scalar_op(node, graph, input_scale, input_zero_point, scalar):
"""
The output scale and zp of {add,mul}_scalar op are not explicit in the IR
They are required for _get_quant_param_for_input above to work correctly
So calculate these params using the same way torch does, and make new
constant nodes in the input IR. Also add these params to the inputs of
scalar op.
For example,
%6 : float = prim::Constant[value=3.]()
%input : QUInt8(1, 3, 224, 224) = quantized::add_scalar(%x.1, %6)
becomes
%6 : float = prim::Constant[value=3.]()
%7 : float = prim::Constant[value=0.015686161816120148]()
%8 : int = prim::Constant[value=0]()
%input : UInt8(1, 3, 224, 224) = quantized::add_scalar(%x.1, %6, %7, %8)
%7 and %8 are newly created output scale and zp constant nodes
"""
import torch
operator = node.kind()
if operator == "quantized::mul_scalar":
out_scale, out_zero_point = _get_mul_scalar_output_quant_param(
input_scale, input_zero_point, scalar
)
elif operator == "quantized::add_scalar":
out_scale, out_zero_point = _get_add_scalar_output_quant_param(
input_scale, input_zero_point, scalar
)
else:
raise NotImplementedError("unsupported scalar op: %s" % operator)
# create new constant nodes and add them to graph
out_scale_node = graph.create("prim::Constant")
out_zero_point_node = graph.create("prim::Constant")
out_scale_node.insertBefore(node)
out_zero_point_node.insertBefore(node)
out_scale_node.f_("value", out_scale)
out_zero_point_node.i_("value", out_zero_point)
out_scale_node.output().setType(torch._C.FloatType.get())
out_zero_point_node.output().setType(torch._C.IntType.get())
node.addInput(out_scale_node.output())
node.addInput(out_zero_point_node.output())
def add_input_quant_params_to_op_inputs(graph):
"""
In Torch, input quant params are not explicitly passed around
Instead, they are stored in QTensor data structure, and retrieved
at runtime by each quantized ops.
However, they need to be known statically for QNN translation.
To workaround and simplify the translation of inputs, we manually add
input quant params to inputs of Torch quantized operators listed below.
See _quantized_conv2d() below for example of why this is helpful.
For example,
%input : QUInt8(1, 512, 7, 7) = quantized::add(%x.8, %x.9, %434, %435)
becomes
%395 : float = prim::Constant[value=0.036212071776390076]()
%396 : int = prim::Constant[value=0]()
%430 : float = prim::Constant[value=0.16080744564533234]()
%431 : int = prim::Constant[value=42]()
%input : QUInt8(1, 512, 7, 7) = quantized::add(%x.8, %x.9, %434, %435,
%430, %431, %395, %396)
%434, %435 are output scale and zp of quantized::add op
%430, %431, %395, %396 are two pairs of input (scale, zp) for two tensors
added by this function
"""
# How many quantized tensors each op takes as inputs?
# A pair of (scale, zp) for each input quantized tensor will be added
# to the input nodes
num_quantized_inputs = {
"quantized::conv2d": 1,
"quantized::conv2d_relu": 1,
"quantized::linear": 1,
"quantized::linear_relu": 1,
"quantized::add_relu": 2,
"quantized::add": 2,
"quantized::mul_relu": 2,
"quantized::mul": 2,
"aten::dequantize": 1,
"aten::mean": 1,
"aten::upsample_bilinear2d": 1,
"aten::relu_": 1,
"aten::relu": 1,
"quantized::add_scalar": 1,
"quantized::mul_scalar": 1,
"quantized::relu6": 1,
}
need_input_quant_param = set(num_quantized_inputs.keys())
need_input_quant_param.add("quantized::cat")
for node in graph.nodes():
operator = node.kind()
if operator not in need_input_quant_param:
continue
input_scales = []
input_zero_points = []
if operator == "quantized::cat":
# the number of inputs to concat is not constant
# so handle it separately
inputs = node.inputsAt(0).node().inputs()
for inp in inputs:
scale, zp = _get_quant_param_for_input(inp)
input_scales.append(scale)
input_zero_points.append(zp)
else:
for i in range(num_quantized_inputs[operator]):
scale, zp = _get_quant_param_for_input(node.inputsAt(i))
input_scales.append(scale)
input_zero_points.append(zp)
if operator in ["quantized::add_scalar", "quantized::mul_scalar"]:
scalar = node.inputsAt(1).node().f("value")
inp_scale = input_scales[0].node().f("value")
inp_zero_point = input_zero_points[0].node().i("value")
# see the comments in this function above
_add_output_quant_params_to_scalar_op(node, graph, inp_scale, inp_zero_point, scalar)
for scale, zp in zip(input_scales, input_zero_points):
node.addInput(scale)
node.addInput(zp)
def add_quant_params(params, quant_params):
""" Add quant parameters to TVM param map """
for qparam in quant_params.values():
params[qparam.weight_var.name_hint] = tvm.nd.array(qparam.weight)
if qparam.bias is not None:
params[qparam.bias_var.name_hint] = tvm.nd.array(qparam.bias)
def apply_with_upcast(data, func):
inp = _op.cast(data, dtype="int32")
out = func(inp)
return _op.cast(out, "uint8")
def quantized_mean(data, input_scale, input_zero_point, func_fp32):
# refer to aten/src/ATen/native/quantized/cpu/qreduction.cpp
dequantized = relay.qnn.op.dequantize(data, input_scale, input_zero_point)
out = func_fp32(dequantized)
return relay.qnn.op.quantize(out, input_scale, input_zero_point, out_dtype="uint8", axis=1)
def quantized_upsample(data, input_scale, input_zero_point, func_fp32):
# currently piggy backs to fp32, it gets identical output as torch
data = relay.qnn.op.dequantize(data, input_scale, input_zero_point)
out = func_fp32(data)
return relay.qnn.op.quantize(out, input_scale, input_zero_point, out_dtype="uint8", axis=1)
def quantized_relu(data, input_zero_point):
# refer to aten/src/ATen/native/quantized/cpu/qrelu.cpp
zp = _op.cast(input_zero_point, dtype="uint8")
return _op.tensor.maximum(data, zp)
def _quantize_per_tensor():
def _impl(inputs, _):
return relay.qnn.op.quantize(
inputs[0], _expr.const(inputs[1]), _expr.const(inputs[2]), out_dtype="uint8", axis=1
)
return _impl
def _dequantize():
def _impl(inputs, _):
assert len(inputs) == 3, "Input quant params not found in op inputs"
inp_scale = _expr.const(inputs[1])
inp_zero_point = _expr.const(inputs[2])
return relay.qnn.op.dequantize(inputs[0], inp_scale, inp_zero_point)
return _impl
def _get_numpy(relay_const_scalar):
return relay_const_scalar.data.asnumpy()
def _get_scalar(relay_const_scalar):
return np.asscalar(_get_numpy(relay_const_scalar))
def _do_bias_and_requantize(
output, bias, input_scale, weight_scale, output_scale, output_zero_point, with_relu
):
""" Output processing for conv and linear """
# this is a vector for per channel case
requant_input_scale = _expr.const(_get_numpy(input_scale) * _get_numpy(weight_scale))
# Torch does bias add and requanize scale in fp32
# refer to third_party/fbgemm/include/fbgemm/OutputProcessing-inl.h
# Instead, we do bias add in int32 and use qnn requantize, which needs
# integer input.
# We observed no loss in accuracy in doing this way, and it is better
# for tvm because bias quantization can be done at compile time
# Instead, the torch way requires rounding of activation at runtime
if bias is not None:
qbias = relay.qnn.op.quantize(
bias, requant_input_scale, _expr.const(0, "int32"), out_dtype="int32", axis=0
)
requantize_input = _op.nn.bias_add(output, qbias)
else:
requantize_input = output
requantized = relay.qnn.op.requantize(
requantize_input,
requant_input_scale,
relay.const(0, "int32"),
output_scale,
output_zero_point,
out_dtype="int32",
axis=1,
)
clip_min = 0
if with_relu:
clip_min = _get_scalar(output_zero_point)
clip = _op.tensor.clip(requantized, clip_min, 255.0)
return _op.cast(clip, dtype="uint8")
def _quantized_conv2d(with_relu=False):
def _impl(inputs, _):
# refer to src/ATen/native/quantized/cpu/qconv.cpp
# inputs[0]: input tensor
# inputs[1]: (weight, scale, zero_point, bias)
# inputs[2-5]: stride, padding, dilation, groups
# inputs[6]: output_scale
# inputs[7]: output_zero_point
# inputs[8]: input_scale (added manually by frontend)
# inputs[9]: input_zero_point (added manually by frontend)
weight = inputs[1][0]
weight_scale = inputs[1][1]
weight_zero_point = inputs[1][2]
output_scale = _expr.const(inputs[6])
output_zero_point = _expr.const(inputs[7])
assert len(inputs) == 10, "Input quant params not found in op inputs"
# These are manually added by add_input_quant_params_to_op_inputs above
# In torch, they are retrieved from QTensor data structure at runtime
input_scale = _expr.const(inputs[8])
input_zero_point = _expr.const(inputs[9])
strides, padding, dilation = inputs[2], inputs[3], inputs[4]
strides = inputs[2]
padding = inputs[3]
dilation = inputs[4]
groups = inputs[5]
weight_shape = infer_shape(weight)
kernel_size = (weight_shape[2], weight_shape[3])
out_channels = weight_shape[0]
if padding[0] != 0 or padding[1] != 0:
pad_val = _get_scalar(input_zero_point)
inp = _op.nn.pad(
inputs[0],
pad_width=((0, 0), (0, 0), (padding[0], padding[0]), (padding[1], padding[1])),
pad_value=float(pad_val),
)
else:
inp = inputs[0]
# padding is (0, 0) because we did explicit pad op with
# pad value being zero point above
conv_out = relay.qnn.op.conv2d(
inp,
weight,
input_zero_point,
weight_zero_point,
input_scale,
weight_scale,
kernel_size=kernel_size,
dilation=dilation,
strides=strides,
padding=(0, 0),
groups=groups,
channels=out_channels,
)
bias_var = inputs[1][3]
return _do_bias_and_requantize(
conv_out,
bias_var,
input_scale,
weight_scale,
output_scale,
output_zero_point,
with_relu,
)
return _impl
def _linear(with_relu=False):
# similar to conv
def _impl(inputs, _):
weight = inputs[1][0]
weight_scale = inputs[1][1]
weight_zero_point = inputs[1][2]
output_scale = _expr.const(inputs[2])
output_zero_point = _expr.const(inputs[3])
assert len(inputs) == 6, "Input quant params not found in op inputs"
# Manually added by add_input_quant_params_to_op_inputs above
input_scale = _expr.const(inputs[4])
input_zero_point = _expr.const(inputs[5])
weight_shape = infer_shape(weight)
dense = relay.qnn.op.dense(
inputs[0],
weight,
input_zero_point,
weight_zero_point,
input_scale,
weight_scale,
units=weight_shape[0],
)
bias_var = inputs[1][3]
return _do_bias_and_requantize(
dense, bias_var, input_scale, weight_scale, output_scale, output_zero_point, with_relu
)
return _impl
def _binop(relay_op, with_relu=False, fp32_piggy_back=False):
def qnn_impl(
lhs,
rhs,
input_scale_lhs,
input_zero_point_lhs,
input_scale_rhs,
input_zero_point_rhs,
output_scale,
output_zero_point,
):
qnn_out = relay_op(
lhs,
rhs,
input_scale_lhs,
input_zero_point_lhs,
input_scale_rhs,
input_zero_point_rhs,
output_scale,
output_zero_point,
)
if with_relu:
clip_min = _get_scalar(output_zero_point)
return _op.tensor.clip(qnn_out, clip_min, 255)
return qnn_out
# refer to aten/src/ATen/native/quantized/cpu/{qadd, qmul}.cpp
# they piggy backs to fp32 math by dequantize -> fp32 math -> quantize
def torch_impl(
lhs,
rhs,
input_scale_lhs,
input_zero_point_lhs,
input_scale_rhs,
input_zero_point_rhs,
output_scale,
output_zero_point,
):
if isinstance(lhs, _expr.Call) and lhs.op.name == "qnn.quantize":
lhs = lhs.args[0]
else:
lhs = relay.qnn.op.dequantize(lhs, input_scale_lhs, input_zero_point_lhs)
if isinstance(rhs, _expr.Call) and rhs.op.name == "qnn.quantize":
rhs = rhs.args[0]
else:
rhs = relay.qnn.op.dequantize(rhs, input_scale_rhs, input_zero_point_rhs)
fp32_out = relay_op(lhs, rhs)
if with_relu:
fp32_out = _op.nn.relu(fp32_out)
return relay.qnn.op.quantize(
fp32_out, output_scale, output_zero_point, axis=-1, out_dtype="uint8"
)
def _impl(inputs, _):
lhs = inputs[0]
rhs = inputs[1]
output_scale = _expr.const(inputs[2])
output_zero_point = _expr.const(inputs[3])
assert len(inputs) == 8, "Input quant params not found in op inputs"
# Manually added by add_input_quant_params_to_op_inputs above
input_scale_lhs = _expr.const(inputs[4])
input_zero_point_lhs = _expr.const(inputs[5])
input_scale_rhs = _expr.const(inputs[6])
input_zero_point_rhs = _expr.const(inputs[7])
if fp32_piggy_back:
logging.info("Piggy backing to FP32 op (PyTorch way)")
return torch_impl(
lhs,
rhs,
input_scale_lhs,
input_zero_point_lhs,
input_scale_rhs,
input_zero_point_rhs,
output_scale,
output_zero_point,
)
return qnn_impl(
lhs,
rhs,
input_scale_lhs,
input_zero_point_lhs,
input_scale_rhs,
input_zero_point_rhs,
output_scale,
output_zero_point,
)
return _impl
def _cat(fp32_piggy_back=False):
# refer to aten/src/ATen/native/quantized/cpu/qconcat.cpp
# for concat they also piggy backs to fp32(!)
# dequantize -> fp32 math -> quantize
def torch_impl(inputs, input_scales, input_zero_points, output_scale, output_zero_point, axis):
dequantized = []
for inp, inp_scale, inp_zp in zip(inputs, input_scales, input_zero_points):
dequantized.append(relay.qnn.op.dequantize(inp, inp_scale, inp_zp))
concat = _op.tensor.concatenate(dequantized, axis=axis)
return relay.qnn.op.quantize(
concat, output_scale, output_zero_point, axis=axis, out_dtype="uint8"
)
def _impl(inputs, _):
axis = inputs[1]
output_scale = _expr.const(inputs[2])
output_zero_point = _expr.const(inputs[3])
num_inputs = (len(inputs) - 4) // 2
input_scales = []
input_zero_points = []
for i in range(0, num_inputs):
input_scales.append(_expr.const(inputs[4 + i * 2]))
input_zero_points.append(_expr.const(inputs[4 + i * 2 + 1]))
if fp32_piggy_back:
return torch_impl(
inputs[0], input_scales, input_zero_points, output_scale, output_zero_point, axis
)
return relay.qnn.op.concatenate(
inputs[0], input_scales, input_zero_points, output_scale, output_zero_point, axis
)
return _impl
def _add_scalar():
# this is used for mobilenet v3
def _impl(inputs, _):
# refer to aten/src/ATen/native/quantized/cpu/qadd.cpp
assert len(inputs) == 6, "Input quant params not found in op inputs"
s = inputs[4]
z = inputs[5]
c = inputs[1]
c_q = round(c / s)
q_min = 0
q_max = 255
# math for calculating output scale and zp are already done
# during _add_output_quant_params_to_scalar_op above
out_scale = _expr.const(inputs[2])
out_zp = _expr.const(inputs[3])
if q_min > z - c_q or q_max < z - c_q:
dequant = relay.qnn.op.dequantize(inputs[0], _expr.const(s), _expr.const(z))
dequantized_add = _op.tensor.add(dequant, _expr.const(c_q * s))
return relay.qnn.op.quantize(
dequantized_add, out_scale, out_zp, axis=1, out_dtype="uint8"
)
# only scale change
return inputs[0]
return _impl
def quantize_scalar(data, scale, zero_point):
# used to quantize 6., in mobilenet v3
transformed = zero_point + data / scale
return max(0, min(round(transformed), 255))
def _relu6():
# refer to src/ATen/native/quantized/cpu/qrelu.cpp
def _impl(inputs, _):
assert len(inputs) == 4, "Input quant params not found in op inputs"
input_scale = inputs[2]
input_zero_point = inputs[3]
six = quantize_scalar(6.0, input_scale, input_zero_point)
return _op.tensor.clip(inputs[0], input_zero_point, six)
return _impl
def _mul_scalar():
# this is used for mobilenet v3
def _impl(inputs, _):
# refer to aten/src/ATen/native/quantized/cpu/qmul.cpp
# math for calculating output scale and zp are already done
# during _add_output_quant_params_to_scalar_op above
assert len(inputs) == 6, "Input quant params not found in op inputs"
other_val = inputs[1] # scalar
if other_val > 0.0:
# only scale change
return inputs[0]
if other_val == 0.0:
shape = infer_shape(inputs[0])
return _op.full(_expr.const(0), shape, dtype="uint8")
# negative scale case
q_min = 0
q_max = 255
bias = _expr.const(q_max + q_min, dtype="int8")
int8 = bias - _op.cast(inputs[0], "int8")
return _op.cast(int8, "uint8")
return _impl
convert_map = {
"aten::quantize_per_tensor": _quantize_per_tensor(),
"quantized::conv2d_relu": _quantized_conv2d(with_relu=True),
"aten::dequantize": _dequantize(),
"quantized::conv2d": _quantized_conv2d(),
"quantized::add_relu": _binop(relay.qnn.op.add, with_relu=True),
"quantized::add": _binop(relay.qnn.op.add),
"quantized::mul_relu": _binop(relay.qnn.op.mul, with_relu=True),
"quantized::mul": _binop(relay.qnn.op.mul),
"quantized::linear": _linear(),
"quantized::linear_relu": _linear(with_relu=True),
"quantized::cat": _cat(),
"quantized::add_scalar": _add_scalar(),
"quantized::mul_scalar": _mul_scalar(),
"quantized::relu6": _relu6(),
}
| apache-2.0 |
lamby/jenkins.debian.net | bin/reproducible_scheduler.py | 1 | 31885 | #!/usr/bin/python3
# -*- coding: utf-8 -*-
#
# Copyright © 2015 Mattia Rizzolo <[email protected]>
# Copyright © 2015-2017 Holger Levsen <[email protected]>
# Based on reproducible_scheduler.sh © 2014-2015 Holger Levsen <[email protected]>
# Licensed under GPL-2
#
# Depends: python3 python3-debian
#
# Schedule packages to be build.
import sys
import lzma
import deb822
import aptsources.sourceslist
import smtplib
from subprocess import call
from apt_pkg import version_compare
from urllib.request import urlopen
from sqlalchemy import sql
from email.mime.text import MIMEText
from reproducible_common import *
from reproducible_html_live_status import generate_schedule
from reproducible_html_packages import gen_packages_html
from reproducible_html_packages import purge_old_pages
"""
How the scheduler chooses which limit to apply, based on the MAXIMA
and LIMIT arrays:
First, the scheduler is only started for an architecture if the number of
currently scheduled packages is lower than MAXIMA*3. Then if the number of
scheduled packages is higher than MAXIMA, only new versions are scheduled...
Then, for each category (totally _untested_ packages, _new_ versions,
_ftbfs_ and _depwait_ packages and _old_ versions) it depends on how many
packages are already scheduled in that category, in a 3 steps process.
Only when scheduling old versions MINIMUM_AGE is respected.
Let's go by an example:
'unstable': {1: (250, 40), 2: (350, 20), '*': 5},
is translated to:
if total < 250:
40
elif total < 350:
20
else:
5
* 1st step, if there are less than 250 packages in the queue, schedule 40
* 2nd step, if there are less than 350 packages in the queue, schedule 20
* 3rd step, schedule 5
So, the 3rd step happens only when there are more than 350 packages queued up.
LIMITS_404 defines how many packages with status 404 are rescheduled at max.
"""
# only old packages older than this will be rescheduled
MINIMUM_AGE = {'amd64': 30, 'i386': 33, 'arm64': 21, 'armhf':30 }
# maximum queue size, see explainations above
MAXIMA = {'amd64': 750, 'i386': 750, 'arm64': 1000, 'armhf': 750}
# limits, see explainations above
LIMITS = {
'untested': {
'amd64': {
'testing': {'*': 440},
'unstable': {'*': 440},
'experimental': {'*': 440},
},
'i386': {
'testing': {'*': 440},
'unstable': {'*': 440},
'experimental': {'*': 440},
},
'arm64': {
'testing': {'*': 660},
'unstable': {'*': 660},
'experimental': {'*': 440},
},
'armhf': {
'testing': {'*': 440},
'unstable': {'*': 440},
'experimental': {'*': 440},
},
},
'new': {
'amd64': {
'testing': {1: (100, 250), 2: (200, 200), '*': 100},
'unstable': {1: (100, 250), 2: (200, 200), '*': 150},
'experimental': {1: (100, 250), 2: (200, 200), '*': 50},
},
'i386': {
'testing': {1: (100, 250), 2: (200, 200), '*': 100},
'unstable': {1: (100, 250), 2: (200, 200), '*': 150},
'experimental': {1: (100, 250), 2: (200, 200), '*': 50},
},
'arm64': {
'testing': {1: (100, 250), 2: (200, 200), '*': 50},
'unstable': {1: (100, 250), 2: (200, 200), '*': 75},
'experimental': {1: (100, 200), 2: (200, 200), '*': 25},
},
'armhf': {
'testing': {1: (100, 200), 2: (200, 200), '*': 50},
'unstable': {1: (100, 200), 2: (200, 200), '*': 75},
'experimental': {1: (100, 200), 2: (200, 200), '*': 25},
},
},
'ftbfs': {
'amd64': {
'testing': {1: (700, 40), 2: (500, 20), '*': 5},
'unstable': {1: (700, 40), 2: (500, 20), '*': 5},
'experimental': {1: (700, 40), 2: (500, 20), '*': 2},
},
'i386': {
'testing': {1: (700, 40), 2: (500, 20), '*': 5},
'unstable': {1: (700, 40), 2: (500, 20), '*': 5},
'experimental': {1: (700, 40), 2: (500, 20), '*': 2},
},
'arm64': {
'testing': {1: (700, 40), 2: (500, 20), '*': 5},
'unstable': {1: (700, 40), 2: (500, 20), '*': 5},
'experimental': {1: (700, 40), 2: (500, 20), '*': 2},
},
'armhf': {
'testing': {1: (575, 20), 2: (450, 10), '*': 5},
'unstable': {1: (575, 20), 2: (450, 10), '*': 5},
'experimental': {1: (575, 20), 2: (450, 10), '*': 2},
}
},
'depwait': {
'amd64': {
'testing': {1: (700, 400), 2: (500, 200), '*': 50},
'unstable': {1: (700, 400), 2: (500, 200), '*': 50},
'experimental': {1: (700, 400), 2: (500, 200), '*': 20},
},
'i386': {
'testing': {1: (700, 400), 2: (500, 200), '*': 50},
'unstable': {1: (700, 400), 2: (500, 200), '*': 50},
'experimental': {1: (700, 400), 2: (500, 200), '*': 20},
},
'arm64': {
'testing': {1: (700, 400), 2: (500, 200), '*': 50},
'unstable': {1: (700, 400), 2: (500, 200), '*': 50},
'experimental': {1: (700, 400), 2: (500, 200), '*': 20},
},
'armhf': {
'testing': {1: (575, 200), 2: (450, 100), '*': 50},
'unstable': {1: (575, 200), 2: (450, 100), '*': 50},
'experimental': {1: (575, 200), 2: (450, 100), '*': 20},
}
},
'old': {
'amd64': {
'testing': {1: (300, 800), 2: (500, 500), '*': 0},
'unstable': {1: (300, 800), 2: (500, 500), '*': 0},
'experimental': {1: (300, 70), 2: (500, 50), '*': 0},
},
'i386': {
'testing': {1: (300, 800), 2: (500, 500), '*': 0},
'unstable': {1: (300, 800), 2: (500, 500), '*': 0},
'experimental': {1: (300, 70), 2: (500, 50), '*': 0},
},
'arm64': {
'testing': {1: (300, 800), 2: (500, 500), '*': 0},
'unstable': {1: (300, 800), 2: (500, 500), '*': 0},
'experimental': {1: (300, 70), 2: (500, 50), '*': 0},
},
'armhf': {
'testing': {1: (300, 800), 2: (500, 500), '*': 0},
'unstable': {1: (300, 800), 2: (500, 500), '*': 0},
'experimental': {1: (300, 70), 2: (500, 50), '*': 0},
}
}
}
# maximum amount of packages with status 404 which will be rescheduled
LIMIT_404 = 255
class Limit:
def __init__(self, arch, queue):
self.arch = arch
self.queue = queue
def get_level(self, stage):
try:
return int(LIMITS[self.queue][self.arch][self.suite][stage][0])
except KeyError:
log.error('No limit defined for the %s queue on %s/%s stage %s. '
'Returning 1', self.queue, self.suite, self.arch, stage)
return 1
except IndexError:
log.critical('The limit is not in the format "(level, limit)". '
'I can\'t guess what you want, giving up')
sys.exit(1)
def get_limit(self, stage):
try:
limit = LIMITS[self.queue][self.arch][self.suite][stage]
limit = limit[1]
except KeyError:
log.error('No limit defined for the %s queue on %s/%s stage %s. '
'Returning 1', self.queue, self.suite, self.arch, stage)
return 1
except IndexError:
log.critical('The limit is not in the format "(level, limit)". '
'I can\'t guess what you want, giving up')
sys.exit(1)
except TypeError:
# this is the case of the default target
if isinstance(limit, int):
pass
else:
raise
return int(limit)
def get_staged_limit(self, current_total):
if current_total <= self.get_level(1):
return self.get_limit(1)
elif current_total <= self.get_level(2):
return self.get_limit(2)
else:
return self.get_limit('*')
def update_sources(suite):
# download the sources file for this suite
mirror = 'http://ftp.de.debian.org/debian'
remotefile = mirror + '/dists/' + suite + '/main/source/Sources.xz'
log.info('Downloading sources file for %s: %s', suite, remotefile)
sources = lzma.decompress(urlopen(remotefile).read()).decode('utf8')
log.debug('\tdownloaded')
for arch in ARCHS:
log.info('Updating sources db for %s/%s...', suite, arch)
update_sources_db(suite, arch, sources)
log.info('DB update done for %s/%s done at %s.', suite, arch, datetime.now())
def update_sources_db(suite, arch, sources):
# extract relevant info (package name and version) from the sources file
new_pkgs = set()
newest_version = {}
for src in deb822.Sources.iter_paragraphs(sources.split('\n')):
pkg = (src['Package'], src['Version'], suite, arch)
# only keep the most recent version of a src for each package/suite/arch
key = src['Package'] + suite + arch
if key in newest_version:
oldversion = newest_version[key]
oldpackage = (src['Package'], oldversion, suite, arch)
new_pkgs.remove(oldpackage)
newest_version[key] = src['Version']
new_pkgs.add(pkg)
# get the current packages in the database
query = "SELECT name, version, suite, architecture FROM sources " + \
"WHERE suite='{}' AND architecture='{}'".format(suite, arch)
cur_pkgs = set([(p.name, p.version, p.suite, p.architecture) for p in query_db(query)])
pkgs_to_add = []
updated_pkgs = []
different_pkgs = [x for x in new_pkgs if x not in cur_pkgs]
log.debug('Packages different in the archive and in the db: %s',
different_pkgs)
for pkg in different_pkgs:
# pkg: (name, version, suite, arch)
query = "SELECT id, version, notify_maintainer FROM sources " + \
"WHERE name='{}' AND suite='{}' AND architecture='{}'"
query = query.format(pkg[0], pkg[2], pkg[3])
try:
result = query_db(query)[0]
except IndexError: # new package
pkgs_to_add.append({
'name': pkg[0],
'version': pkg[1],
'suite': pkg[2],
'architecture': pkg[3],
})
continue
pkg_id = result[0]
old_version = result[1]
notify_maint = int(result[2])
if version_compare(pkg[1], old_version) > 0:
log.debug('New version: ' + str(pkg) + ' (we had ' +
old_version + ')')
updated_pkgs.append({
'update_id': pkg_id,
'name': pkg[0],
'version': pkg[1],
'suite': pkg[2],
'architecture': pkg[3],
'notify_maintainer': notify_maint,
})
# Now actually update the database:
sources_table = db_table('sources')
# updated packages
log.info('Pushing ' + str(len(updated_pkgs)) +
' updated packages to the database...')
if updated_pkgs:
transaction = conn_db.begin()
update_query = sources_table.update().\
where(sources_table.c.id == sql.bindparam('update_id'))
conn_db.execute(update_query, updated_pkgs)
transaction.commit()
# new packages
if pkgs_to_add:
log.info('Now inserting %i new sources in the database: %s',
len(pkgs_to_add), pkgs_to_add)
transaction = conn_db.begin()
conn_db.execute(sources_table.insert(), pkgs_to_add)
transaction.commit()
# RM'ed packages
cur_pkgs_name = [x[0] for x in cur_pkgs]
new_pkgs_name = [x[0] for x in new_pkgs]
rmed_pkgs = [x for x in cur_pkgs_name if x not in new_pkgs_name]
log.info('Now deleting %i removed packages: %s', len(rmed_pkgs),
rmed_pkgs)
rmed_pkgs_id = []
pkgs_to_rm = []
query = "SELECT id FROM sources WHERE name='{}' AND suite='{}' " + \
"AND architecture='{}'"
for pkg in rmed_pkgs:
result = query_db(query.format(pkg, suite, arch))
rmed_pkgs_id.append({'deleteid': result[0][0]})
pkgs_to_rm.append({'name': pkg, 'suite': suite, 'architecture': arch})
log.debug('removed packages ID: %s',
[str(x['deleteid']) for x in rmed_pkgs_id])
log.debug('removed packages: %s', pkgs_to_rm)
if rmed_pkgs_id:
transaction = conn_db.begin()
results_table = db_table('results')
schedule_table = db_table('schedule')
notes_table = db_table('notes')
removed_packages_table = db_table('removed_packages')
delete_results_query = results_table.delete().\
where(results_table.c.package_id == sql.bindparam('deleteid'))
delete_schedule_query = schedule_table.delete().\
where(schedule_table.c.package_id == sql.bindparam('deleteid'))
delete_notes_query = notes_table.delete().\
where(notes_table.c.package_id == sql.bindparam('deleteid'))
delete_sources_query = sources_table.delete().\
where(sources_table.c.id == sql.bindparam('deleteid'))
conn_db.execute(delete_results_query, rmed_pkgs_id)
conn_db.execute(delete_schedule_query, rmed_pkgs_id)
conn_db.execute(delete_notes_query, rmed_pkgs_id)
conn_db.execute(delete_sources_query, rmed_pkgs_id)
conn_db.execute(removed_packages_table.insert(), pkgs_to_rm)
transaction.commit()
# finally check whether the db has the correct number of packages
query = "SELECT count(*) FROM sources WHERE suite='{}' " + \
"AND architecture='{}'"
pkgs_end = query_db(query.format(suite, arch))
count_new_pkgs = len(set([x[0] for x in new_pkgs]))
if int(pkgs_end[0][0]) != count_new_pkgs:
print_critical_message('AH! The number of source in the Sources file' +
' is different than the one in the DB!')
log.critical('source in the debian archive for the %s suite: %s',
suite, str(count_new_pkgs))
log.critical('source in the reproducible db for the %s suite: %s',
suite, str(pkgs_end[0][0]))
sys.exit(1)
if pkgs_to_add:
log.info('Building pages for the new packages')
gen_packages_html([Package(x['name']) for x in pkgs_to_add], no_clean=True)
def print_schedule_result(suite, arch, criteria, packages):
'''
`packages` is the usual list-of-tuples returned by SQL queries,
where the first item is the id and the second one the package name
'''
log.info('Criteria: ' + criteria)
log.info('Suite/Arch: ' + suite + '/' + arch)
log.info('Amount: ' + str(len(packages)))
log.info('Packages: ' + ' '.join([x[1] for x in packages]))
def queue_packages(all_pkgs, packages, date):
date = date.strftime('%Y-%m-%d %H:%M')
pkgs = [x for x in packages if x[0] not in all_pkgs]
if len(pkgs) > 0:
log.info('The following ' + str(len(pkgs)) + ' source packages have ' +
'been queued up for scheduling at ' + date + ': ' +
' '.join([str(x[1]) for x in pkgs]))
all_pkgs.update({x[0]: date for x in pkgs})
return all_pkgs
def schedule_packages(packages):
pkgs = [{'package_id': x, 'date_scheduled': packages[x]} for x in packages.keys()]
log.debug('IDs about to be scheduled: %s', packages.keys())
if pkgs:
conn_db.execute(db_table('schedule').insert(), pkgs)
def add_up_numbers(packages, arch):
packages_sum = '+'.join([str(len(packages[x])) for x in SUITES])
if packages_sum == '0+0+0':
packages_sum = '0'
return packages_sum
def query_untested_packages(suite, arch, limit):
criteria = 'not tested before, randomly sorted'
query = """SELECT DISTINCT *
FROM (
SELECT sources.id, sources.name FROM sources
WHERE sources.suite='{suite}' AND sources.architecture='{arch}'
AND sources.id NOT IN
(SELECT schedule.package_id FROM schedule)
AND sources.id NOT IN
(SELECT results.package_id FROM results)
ORDER BY random()
) AS tmp
LIMIT {limit}""".format(suite=suite, arch=arch, limit=limit)
packages = query_db(query)
print_schedule_result(suite, arch, criteria, packages)
return packages
def query_new_versions(suite, arch, limit):
criteria = 'tested before, new version available, sorted by last build date'
query = """SELECT s.id, s.name, s.version, r.version, max(r.build_date) max_date
FROM sources AS s JOIN results AS r ON s.id = r.package_id
WHERE s.suite='{suite}' AND s.architecture='{arch}'
AND s.version != r.version
AND r.status != 'blacklisted'
AND s.id IN (SELECT package_id FROM results)
AND s.id NOT IN (SELECT schedule.package_id FROM schedule)
GROUP BY s.id, s.name, s.version, r.version
ORDER BY max_date
LIMIT {limit}""".format(suite=suite, arch=arch, limit=limit)
pkgs = query_db(query)
# the next line avoids constant rescheduling of packages:
# packages in our repository != sid or testing,
# so they will always be selected by the query above
# so we only accept them if there version is greater than the already tested one
packages = [(x[0], x[1]) for x in pkgs if version_compare(x[2], x[3]) > 0]
print_schedule_result(suite, arch, criteria, packages)
return packages
def query_old_ftbfs_versions(suite, arch, limit):
criteria = 'status ftbfs, no bug filed, tested at least 3 days ago, ' + \
'no new version available, sorted by last build date'
date = (datetime.now()-timedelta(days=3)).strftime('%Y-%m-%d %H:%M')
query = """SELECT s.id, s.name, max(r.build_date) max_date
FROM sources AS s JOIN results AS r ON s.id = r.package_id
JOIN notes AS n ON n.package_id=s.id
WHERE s.suite='{suite}' AND s.architecture='{arch}'
AND r.status='FTBFS'
AND ( n.bugs = '[]' OR n.bugs IS NULL )
AND r.build_date < '{date}'
AND s.id NOT IN (SELECT schedule.package_id FROM schedule)
GROUP BY s.id, s.name
ORDER BY max_date
LIMIT {limit}""".format(suite=suite, arch=arch, limit=limit,
date=date)
packages = query_db(query)
print_schedule_result(suite, arch, criteria, packages)
return packages
def query_old_depwait_versions(suite, arch, limit):
criteria = 'status depwait, no bug filed, tested at least 2 days ago, ' + \
'no new version available, sorted by last build date'
date = (datetime.now()-timedelta(days=2)).strftime('%Y-%m-%d %H:%M')
query = """SELECT s.id, s.name, max(r.build_date) max_date
FROM sources AS s JOIN results AS r ON s.id = r.package_id
WHERE s.suite='{suite}' AND s.architecture='{arch}'
AND r.status='depwait'
AND r.build_date < '{date}'
AND s.id NOT IN (SELECT schedule.package_id FROM schedule)
GROUP BY s.id, s.name
ORDER BY max_date
LIMIT {limit}""".format(suite=suite, arch=arch, limit=limit,
date=date)
packages = query_db(query)
print_schedule_result(suite, arch, criteria, packages)
return packages
def query_old_versions(suite, arch, limit):
criteria = """tested at least {minimum_age} days ago, no new version available,
sorted by last build date""".format(minimum_age=MINIMUM_AGE[arch])
date = (datetime.now()-timedelta(days=MINIMUM_AGE[arch]))\
.strftime('%Y-%m-%d %H:%M')
query = """SELECT s.id, s.name, max(r.build_date) max_date
FROM sources AS s JOIN results AS r ON s.id = r.package_id
WHERE s.suite='{suite}' AND s.architecture='{arch}'
AND r.status != 'blacklisted'
AND r.build_date < '{date}'
AND s.id NOT IN (SELECT schedule.package_id FROM schedule)
GROUP BY s.id, s.name
ORDER BY max_date
LIMIT {limit}""".format(suite=suite, arch=arch,
date=date, limit=limit)
packages = query_db(query)
print_schedule_result(suite, arch, criteria, packages)
return packages
def query_404_versions(suite, arch, limit):
criteria = """tested at least a day ago, status 404,
sorted by last build date"""
date = (datetime.now()-timedelta(days=1)).strftime('%Y-%m-%d %H:%M')
query = """SELECT s.id, s.name, max(r.build_date) max_date
FROM sources AS s JOIN results AS r ON s.id = r.package_id
WHERE s.suite='{suite}' AND s.architecture='{arch}'
AND r.status = '404'
AND r.build_date < '{date}'
AND s.id NOT IN (SELECT schedule.package_id FROM schedule)
GROUP BY s.id, s.name
ORDER BY max_date
LIMIT {limit}""".format(suite=suite, arch=arch, limit=limit,
date=date)
packages = query_db(query)
print_schedule_result(suite, arch, criteria, packages)
return packages
def schedule_untested_packages(arch, total):
packages = {}
limit = Limit(arch, 'untested')
for suite in SUITES:
limit.suite = suite
many_untested = limit.get_limit('*')
log.info('Requesting %s untested packages in %s/%s...',
many_untested, suite, arch)
packages[suite] = query_untested_packages(suite, arch, many_untested)
log.info('Received ' + str(len(packages[suite])) +
' untested packages in ' + suite + '/' + arch + 'to schedule.')
log.info('--------------------------------------------------------------')
msg = add_up_numbers(packages, arch)
if msg != '0':
msg += ' new packages'
else:
msg = ''
return packages, msg
def schedule_new_versions(arch, total):
packages = {}
limit = Limit(arch, 'new')
for suite in SUITES:
limit.suite = suite
many_new = limit.get_staged_limit(total)
log.info('Requesting %s new versions in %s/%s...',
many_new, suite, arch)
packages[suite] = query_new_versions(suite, arch, many_new)
log.info('Received ' + str(len(packages[suite])) +
' new packages in ' + suite + '/' + arch + ' to schedule.')
log.info('--------------------------------------------------------------')
msg = add_up_numbers(packages, arch)
if msg != '0':
msg += ' new versions'
else:
msg = ''
return packages, msg
def schedule_old_ftbfs_versions(arch, total):
packages = {}
limit = Limit(arch, 'ftbfs')
for suite in SUITES:
limit.suite = suite
old_ftbfs = limit.get_staged_limit(total)
log.info('Requesting %s old ftbfs packages in %s/%s...', old_ftbfs,
suite, arch)
packages[suite] = query_old_ftbfs_versions(suite, arch, old_ftbfs)
log.info('Received ' + str(len(packages[suite])) +
' old ftbfs packages in ' + suite + '/' + arch + ' to schedule.')
log.info('--------------------------------------------------------------')
msg = add_up_numbers(packages, arch)
if msg != '0':
msg += ' ftbfs without bugs filed'
else:
msg = ''
return packages, msg
def schedule_old_depwait_versions(arch, total):
packages = {}
limit = Limit(arch, 'depwait')
for suite in SUITES:
limit.suite = suite
old_depwait = limit.get_staged_limit(total)
log.info('Requesting %s old depwait packages in %s/%s...', old_depwait,
suite, arch)
packages[suite] = query_old_depwait_versions(suite, arch, old_depwait)
log.info('Received ' + str(len(packages[suite])) +
' old depwait packages in ' + suite + '/' + arch + ' to schedule.')
log.info('--------------------------------------------------------------')
msg = add_up_numbers(packages, arch)
if msg != '0':
msg += ' in depwait state'
else:
msg = ''
return packages, msg
def schedule_old_versions(arch, total):
packages = {}
limit = Limit(arch, 'old')
for suite in SUITES:
limit.suite = suite
many_old = limit.get_staged_limit(total)
log.info('Requesting %s old packages in %s/%s...', many_old,
suite, arch)
packages[suite] = query_old_versions(suite, arch, many_old)
log.info('Received ' + str(len(packages[suite])) +
' old packages in ' + suite + '/' + arch + ' to schedule.')
log.info('--------------------------------------------------------------')
msg = add_up_numbers(packages, arch)
if msg != '0':
msg += ' known versions'
else:
msg = ''
return packages, msg
def schedule_404_versions(arch, total):
packages = {}
for suite in SUITES:
log.info('Requesting 404 packages in %s/%s...',
suite, arch)
packages[suite] = query_404_versions(suite, arch, LIMIT_404)
log.info('Received ' + str(len(packages[suite])) +
' 404 packages in ' + suite + '/' + arch + ' to schedule.')
log.info('--------------------------------------------------------------')
msg = add_up_numbers(packages, arch)
if msg != '0':
msg += ' with status \'404\''
else:
msg = ''
return packages, msg
def scheduler(arch):
query = "SELECT count(*) " + \
"FROM schedule AS p JOIN sources AS s ON p.package_id=s.id " + \
"WHERE s.architecture='{arch}'"
total = int(query_db(query.format(arch=arch))[0][0])
log.info('==============================================================')
log.info('Currently scheduled packages in all suites on ' + arch + ': ' + str(total))
if total > MAXIMA[arch]:
log.info(str(total) + ' packages already scheduled' +
', only scheduling new versions.')
empty_pkgs = {}
for suite in SUITES:
empty_pkgs[suite] = []
untested, msg_untested = empty_pkgs, ''
new, msg_new = schedule_new_versions(arch, total)
old_ftbfs, msg_old_ftbfs = empty_pkgs, ''
old_depwait, msg_old_depwait = empty_pkgs, ''
old, msg_old = empty_pkgs, ''
four04, msg_404 = empty_pkgs, ''
else:
log.info(str(total) + ' packages already scheduled' +
', scheduling some more...')
untested, msg_untested = schedule_untested_packages(arch, total)
new, msg_new = schedule_new_versions(arch, total+len(untested))
old_ftbfs, msg_old_ftbfs = schedule_old_ftbfs_versions(arch, total+len(untested)+len(new))
old_depwait, msg_old_depwait = schedule_old_depwait_versions(arch, total+len(untested)+len(new)+len(old_ftbfs))
four04, msg_404 = schedule_404_versions(arch, total+len(untested)+len(new)+len(old_ftbfs)+len(old_depwait))
old, msg_old = schedule_old_versions(arch, total+len(untested)+len(new)+len(old_ftbfs)+len(old_depwait)+len(four04))
now_queued_here = {}
# make sure to schedule packages in unstable first
# (but keep the view ordering everywhere else)
priotized_suite_order = ['unstable']
for suite in SUITES:
if suite not in priotized_suite_order:
priotized_suite_order.append(suite)
for suite in priotized_suite_order:
query = "SELECT count(*) " \
"FROM schedule AS p JOIN sources AS s ON p.package_id=s.id " \
"WHERE s.suite='{suite}' AND s.architecture='{arch}'"
query = query.format(suite=suite, arch=arch)
now_queued_here[suite] = int(query_db(query)[0][0]) + \
len(untested[suite]+new[suite]+old[suite])
# schedule packages differently in the queue...
to_be_scheduled = queue_packages({}, untested[suite], datetime.now()+timedelta(minutes=-720))
assert(isinstance(to_be_scheduled, dict))
to_be_scheduled = queue_packages(to_be_scheduled, new[suite], datetime.now()+timedelta(minutes=-1440))
to_be_scheduled = queue_packages(to_be_scheduled, old_ftbfs[suite], datetime.now()+timedelta(minutes=360))
to_be_scheduled = queue_packages(to_be_scheduled, old_depwait[suite], datetime.now()+timedelta(minutes=-360))
to_be_scheduled = queue_packages(to_be_scheduled, old[suite], datetime.now()+timedelta(minutes=720))
to_be_scheduled = queue_packages(to_be_scheduled, four04[suite], datetime.now())
schedule_packages(to_be_scheduled)
# update the scheduled page
generate_schedule(arch) # from reproducible_html_indexes
# build the message text for this arch
message = ' - ' + arch + ': '
if msg_untested:
message += msg_untested + ', '
if msg_new:
message += msg_new + ', '
if msg_404:
message += msg_404 + ', '
if msg_old_ftbfs:
message += msg_old_ftbfs + ', '
if msg_old_depwait:
message += msg_old_depwait + ', '
if msg_old:
message += msg_old + ', '
total = [now_queued_here[x] for x in SUITES]
message += 'for ' + str(sum(total))
message += ' or ' + '+'.join([str(now_queued_here[x]) for x in SUITES])
message += ' in total.'
log.info('Scheduling for architecture ' + arch + ' done.')
log.info('--------------------------------------------------------------')
# only notifiy irc if there were packages scheduled in any suite
for x in SUITES:
if len(untested[x])+len(new[x])+len(old[x])+len(old_ftbfs[x])+len(old_depwait[x]) > 0:
return message
return ''
if __name__ == '__main__':
log.info('Updating sources tables for all suites.')
for suite in SUITES:
update_sources(suite)
log.info('Sources for suite %s done at %s.', suite, datetime.now())
purge_old_pages()
query = "SELECT count(*) " + \
"FROM schedule AS p JOIN sources AS s ON s.id=p.package_id " + \
"WHERE s.architecture='{}'"
message = ''
for arch in ARCHS:
log.info('Scheduling for %s...', arch)
overall = int(query_db(query.format(arch))[0][0])
if overall > (MAXIMA[arch]*3):
log.info('%s packages already scheduled for %s, nothing to do.', overall, arch)
continue
log.info('%s packages already scheduled for %s, probably scheduling some '
'more...', overall, arch)
message += scheduler(arch) + '\n'
log.info('Arch %s scheduled at %s.', arch, datetime.now())
if message != '':
# build the kgb message text
message = 'Scheduled in ' + '+'.join(SUITES) + ':\n' + message
log.info(message)
# irc_msg(message, channel='debian-reproducible-changes')
# send mail instead of notifying via irc, less intrusive
msg = MIMEText(message)
mail_from = '[email protected]'
mail_to = '[email protected]'
msg['From'] = mail_from
msg['To'] = mail_to
msg['Subject'] = 'packages scheduled for reproducible Debian'
s = smtplib.SMTP('localhost')
s.sendmail(mail_from, [mail_to], msg.as_string())
s.quit()
| gpl-2.0 |
sdague/home-assistant | homeassistant/components/lifx_cloud/scene.py | 15 | 2756 | """Support for LIFX Cloud scenes."""
import asyncio
import logging
from typing import Any
import aiohttp
from aiohttp.hdrs import AUTHORIZATION
import async_timeout
import voluptuous as vol
from homeassistant.components.scene import Scene
from homeassistant.const import (
CONF_PLATFORM,
CONF_TIMEOUT,
CONF_TOKEN,
HTTP_OK,
HTTP_UNAUTHORIZED,
)
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEFAULT_TIMEOUT = 10
PLATFORM_SCHEMA = vol.Schema(
{
vol.Required(CONF_PLATFORM): "lifx_cloud",
vol.Required(CONF_TOKEN): cv.string,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the scenes stored in the LIFX Cloud."""
token = config.get(CONF_TOKEN)
timeout = config.get(CONF_TIMEOUT)
headers = {AUTHORIZATION: f"Bearer {token}"}
url = "https://api.lifx.com/v1/scenes"
try:
httpsession = async_get_clientsession(hass)
with async_timeout.timeout(timeout):
scenes_resp = await httpsession.get(url, headers=headers)
except (asyncio.TimeoutError, aiohttp.ClientError):
_LOGGER.exception("Error on %s", url)
return False
status = scenes_resp.status
if status == HTTP_OK:
data = await scenes_resp.json()
devices = [LifxCloudScene(hass, headers, timeout, scene) for scene in data]
async_add_entities(devices)
return True
if status == HTTP_UNAUTHORIZED:
_LOGGER.error("Unauthorized (bad token?) on %s", url)
return False
_LOGGER.error("HTTP error %d on %s", scenes_resp.status, url)
return False
class LifxCloudScene(Scene):
"""Representation of a LIFX Cloud scene."""
def __init__(self, hass, headers, timeout, scene_data):
"""Initialize the scene."""
self.hass = hass
self._headers = headers
self._timeout = timeout
self._name = scene_data["name"]
self._uuid = scene_data["uuid"]
@property
def name(self):
"""Return the name of the scene."""
return self._name
async def async_activate(self, **kwargs: Any) -> None:
"""Activate the scene."""
url = f"https://api.lifx.com/v1/scenes/scene_id:{self._uuid}/activate"
try:
httpsession = async_get_clientsession(self.hass)
with async_timeout.timeout(self._timeout):
await httpsession.put(url, headers=self._headers)
except (asyncio.TimeoutError, aiohttp.ClientError):
_LOGGER.exception("Error on %s", url)
| apache-2.0 |
tiexinliu/odoo_addons | smile_model_methods/wizard/wizard_ir_model_method.py | 5 | 1527 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2014 Smile (<http://www.smile.fr>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import api, fields, models
class WizardIrModelMethods(models.TransientModel):
_name = 'wizard.ir.model.methods'
_description = 'Wizard Model Method'
_rec_name = ''
models_id = fields.Many2many('ir.model', 'ir_model_methotds_rel', 'wizard_model_id', 'model_id', string="Model list")
to_update = fields.Boolean("Update lines ?")
@api.multi
def button_call(self):
self.ensure_one()
self.env['ir.model.methods'].with_context(to_update=self.to_update).update_list(self.models_id.mapped('model'))
| agpl-3.0 |
alfredodeza/boto | boto/sqs/regioninfo.py | 167 | 1524 | # Copyright (c) 2006-2010 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2010, Eucalyptus Systems, Inc.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.regioninfo import RegionInfo
class SQSRegionInfo(RegionInfo):
def __init__(self, connection=None, name=None, endpoint=None,
connection_cls=None):
from boto.sqs.connection import SQSConnection
super(SQSRegionInfo, self).__init__(connection, name, endpoint,
SQSConnection)
| mit |
shenghaozou/PythonGrader | P6/gradeSettings.py | 1 | 8808 | import re
ASSIGNMENT_NAME = 'P6'
ASSIGNMENT_TEST_NUM = 7
EMAIL_SEND = 0
EAMIL_SEND_UPPER_BOUND = 0
def toset(x):
if x != None:
return set(x)
else:
return None
OUTPUT_RESULT_REG_EXP = []
SCRIPT_REG_EXP = []
SCRIPT_EXISTENCE_REG_EXP = []
FUNCTION_ORDER = ['get_level', 'get_walk', 'update', 'run_simulation']
TEST_FUNC = {'get_level':[ {'input_args':[0], 'return_val':0},
{'input_args':[6], 'return_val':2},
{'input_args':[15], 'return_val':4},
{'input_args':[12], 'return_val':4},
{'input_args':[11], 'return_val':3}],
'get_walk':[ {'prerun':'get_walk.py', 'input_args':[3], 'return_val':[1,5,3]},
{'prerun':'get_walk.py', 'input_args':[2], 'return_val':[2,5,3]},
{'prerun':'get_walk2.py', 'input_args':[1], 'return_val':[3,4,3]},
{'prerun':'get_walk2.py', 'input_args':[4], 'return_val':[1,4,3]},
{'prerun':'get_walk.py', 'input_args':[3], 'return_val':set([1,5,3]), 'ret_mapper':'toset'},
{'prerun':'get_walk.py', 'input_args':[2], 'return_val':set([2,5,3]), 'ret_mapper':'toset'},
{'prerun':'get_walk2.py', 'input_args':[1], 'return_val':set([3,4]), 'ret_mapper':'toset'},
{'prerun':'get_walk2.py', 'input_args':[4], 'return_val':set([1,4,3]), 'ret_mapper':'toset'}],
'update':[ {'input_args':[12, -1, 3],'return_val': 9},
{'input_args':[0, 1, 2],'return_val': 2},
{'input_args':[5, -1, 3],'return_val': 2},
{'input_args':[8, 1, 5],'return_val': 13}],
'run_simulation':[ {'input_file': 'run_simulation_1.txt',
'return_val': [0, 2, 4, 6, 8, 10, 8, 6, 4, 2, 4, 6, 8, 10, 12, 10, 8, 6, 4, 6, 8, 10, 12, 14, 12, 10, 8, 6],
'stdout_pat_setting': re.I,
'stdout_pat_file': 'run_simulation_1_out.txt'},
{'input_file': 'run_simulation_2.txt',
'return_val': [0, 4, 8, 12, 16, 20, 24, 20, 16, 12, 8, 4, 8, 12, 16, 20, 24, 28, 24, 20, 16, 12, 8, 12, 16, 20, 24, 28, 32, 28, 24, 20, 16, 12],
'stdout_pat_setting': re.I,
'stdout_pat_file': 'run_simulation_2_out.txt'},
{'input_file': 'run_simulation_1.txt',
'stdout_pat_setting': re.I,
'stdout_pat_file': 'run_simulation_prompt_out.txt'},
{'input_file': 'run_simulation_1.txt',
'stdout_pat_setting': re.I,
'stdout_pat_file': 'run_simulation_value_out.txt'},
{'input_file': 'run_simulation_3.txt',
'return_val': [0, 2, 4, 6, 8, 10, 12, 10, 8, 6, 4, 6, 8, 10, 12, 14, 16, 14, 12, 10, 8, 10, 12, 14, 16, 18, 20, 18, 16, 14, 12, 14,
16, 18, 20, 22, 24, 22, 20, 18, 16, 18, 20, 22, 24, 26, 28, 26, 24, 22, 20, 22, 24, 26, 28, 30, 32, 30, 28, 26, 24,
26, 28, 30, 32, 34, 36, 34, 32, 30, 28, 30, 32, 34, 36, 38, 40, 38, 36, 34, 32, 34, 36, 38, 40, 42, 44, 42, 40, 38, 36],
'stdout_pat_setting': re.I,
'stdout_pat_file': 'run_simulation_3_out.txt'}]}
TEST_SCRIPT = {'get_level':[{'script_pat':'"""[\s\S]*?"""|\'\'\'[\s\S]*?\'\'\''}],
'get_walk':[{'script_pat':'"""[\s\S]*?"""|\'\'\'[\s\S]*?\'\'\''}],
'update':[{'script_pat':'"""[\s\S]*?"""|\'\'\'[\s\S]*?\'\'\''}],
'run_simulation':[{'script_pat':'"""[\s\S]*?"""|\'\'\'[\s\S]*?\'\'\''}]
}
GRADING_RULES_ORDER = ['get_level',
'get_walk',
'update',
'run_simulation_prompt',
'run_simulation_value',
'run_simulation_termination',
'docstring']
GRADING_RULES = {'get_level':{'rules':'and','order':0,'points':1,
'test':[{'type':'func','func_name':'get_level', 'index':0,'check':'return_val','error':'test 1 of get_level'},
{'type':'func','func_name':'get_level', 'index':1,'check':'return_val','error':'test 2 of get_level'},
{'type':'func','func_name':'get_level', 'index':2,'check':'return_val','error':'test 3 of get_level'},
{'type':'func','func_name':'get_level', 'index':3,'check':'return_val','error':'test 4 of get_level'},
{'type':'func','func_name':'get_level', 'index':4,'check':'return_val','error':'test 5 of get_level'}]},
'get_walk':{'rules':'groupadd','order':1,'points':2,
'groups':[(1,[{'type':'func','func_name':'get_walk', 'index':0,'check':'return_val','error':'test 1 of get_walk'},
{'type':'func','func_name':'get_walk', 'index':1,'check':'return_val','error':'test 2 of get_walk'},
{'type':'func','func_name':'get_walk', 'index':2,'check':'return_val','error':'test 3 of get_walk'},
{'type':'func','func_name':'get_walk', 'index':3,'check':'return_val','error':'test 4 of get_walk'}]),
(1,[{'type':'func','func_name':'get_walk', 'index':4,'check':'return_val','error':'test 1 of get_walk set'},
{'type':'func','func_name':'get_walk', 'index':5,'check':'return_val','error':'test 2 of get_walk set'},
{'type':'func','func_name':'get_walk', 'index':6,'check':'return_val','error':'test 3 of get_walk set'},
{'type':'func','func_name':'get_walk', 'index':7,'check':'return_val','error':'test 4 of get_walk set'}])]},
'update': {'rules':'and','order':2,'points':1,
'test':[{'type':'func','func_name':'update', 'index':0,'check':'return_val','error':'test 1 of update'},
{'type':'func','func_name':'update', 'index':1,'check':'return_val','error':'test 2 of update'},
{'type':'func','func_name':'update', 'index':2,'check':'return_val','error':'test 3 of update'},
{'type':'func','func_name':'update', 'index':3,'check':'return_val','error':'test 4 of update'}]},
'run_simulation_prompt':{'rules':'and','order':3,'points':2,
'test':[{'type':'func','func_name':'run_simulation', 'index':2, 'check':'stdout_pat','points':2, 'error':'test of run_simulation prompt correctly'}]},
'run_simulation_value': {'rules':'and','order':4,'points':1,
'test':[{'type':'func','func_name':'run_simulation', 'index':3, 'check':'stdout_pat', 'error':'test of run_simulation values correctly'}]},
'run_simulation_termination':{'rules':'add','order':5,'points':2,
'test':[{'type':'func','func_name':'run_simulation', 'index':0,'check':'return_val','error':'test 1 of run_simulation termination'},
{'type':'func','func_name':'run_simulation', 'index':4,'check':'return_val','error':'test 2 of run_simulation termination'}]},
'docstring':{'rules':'and','order':6,'points':1,
'test':[{'type':'script','func_name':'get_level', 'index':0, 'check':'script_pat', 'error':'docstring in {func_name}'},
{'type':'script','func_name':'get_walk', 'index':0, 'check':'script_pat', 'error':'docstring in {func_name}'},
{'type':'script','func_name':'update', 'index':0, 'check':'script_pat', 'error':'docstring in {func_name}'},
{'type':'script','func_name':'run_simulation', 'index':0, 'check':'script_pat', 'error':'docstring in {func_name}'}]}
}
SCRIPT_TEST = True
| apache-2.0 |
CyanogenMod/android_kernel_htc_m7 | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Util.py | 12527 | 1935 | # Util.py - Python extension for perf script, miscellaneous utility code
#
# Copyright (C) 2010 by Tom Zanussi <[email protected]>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
import errno, os
FUTEX_WAIT = 0
FUTEX_WAKE = 1
FUTEX_PRIVATE_FLAG = 128
FUTEX_CLOCK_REALTIME = 256
FUTEX_CMD_MASK = ~(FUTEX_PRIVATE_FLAG | FUTEX_CLOCK_REALTIME)
NSECS_PER_SEC = 1000000000
def avg(total, n):
return total / n
def nsecs(secs, nsecs):
return secs * NSECS_PER_SEC + nsecs
def nsecs_secs(nsecs):
return nsecs / NSECS_PER_SEC
def nsecs_nsecs(nsecs):
return nsecs % NSECS_PER_SEC
def nsecs_str(nsecs):
str = "%5u.%09u" % (nsecs_secs(nsecs), nsecs_nsecs(nsecs)),
return str
def add_stats(dict, key, value):
if not dict.has_key(key):
dict[key] = (value, value, value, 1)
else:
min, max, avg, count = dict[key]
if value < min:
min = value
if value > max:
max = value
avg = (avg + value) / 2
dict[key] = (min, max, avg, count + 1)
def clear_term():
print("\x1b[H\x1b[2J")
audit_package_warned = False
try:
import audit
machine_to_id = {
'x86_64': audit.MACH_86_64,
'alpha' : audit.MACH_ALPHA,
'ia64' : audit.MACH_IA64,
'ppc' : audit.MACH_PPC,
'ppc64' : audit.MACH_PPC64,
's390' : audit.MACH_S390,
's390x' : audit.MACH_S390X,
'i386' : audit.MACH_X86,
'i586' : audit.MACH_X86,
'i686' : audit.MACH_X86,
}
try:
machine_to_id['armeb'] = audit.MACH_ARMEB
except:
pass
machine_id = machine_to_id[os.uname()[4]]
except:
if not audit_package_warned:
audit_package_warned = True
print "Install the audit-libs-python package to get syscall names"
def syscall_name(id):
try:
return audit.audit_syscall_to_name(id, machine_id)
except:
return str(id)
def strerror(nr):
try:
return errno.errorcode[abs(nr)]
except:
return "Unknown %d errno" % nr
| gpl-2.0 |
style95/openwhisk | tools/actionProxy/invoke.py | 3 | 5428 | #!/usr/bin/env python
"""Executable Python script for testing the action proxy.
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
This script is useful for testing the action proxy (or its derivatives)
by simulating invoker interactions. Use it in combination with
docker run <image> which starts up the action proxy.
Example:
docker run -i -t -p 8080:8080 dockerskeleton # locally built images may be referenced without a tag
./invoke.py init <action source file>
./invoke.py run '{"some":"json object as a string"}'
For additional help, try ./invoke.py -h
"""
import os
import re
import sys
import json
import base64
import requests
import codecs
import argparse
try:
import argcomplete
except ImportError:
argcomplete = False
def main():
try:
args = parseArgs()
exitCode = {
'init' : init,
'run' : run
}[args.cmd](args)
except Exception as e:
print(e)
exitCode = 1
sys.exit(exitCode)
def dockerHost():
dockerHost = 'localhost'
if 'DOCKER_HOST' in os.environ:
try:
dockerHost = re.compile('tcp://(.*):[\d]+').findall(os.environ['DOCKER_HOST'])[0]
except Exception:
print('cannot determine docker host from %s' % os.environ['DOCKER_HOST'])
sys.exit(-1)
return dockerHost
def containerRoute(args, path):
return 'http://%s:%s/%s' % (args.host, args.port, path)
class objectify(object):
def __init__(self, d):
self.__dict__ = d
def parseArgs():
parser = argparse.ArgumentParser(description='initialize and run an OpenWhisk action container')
parser.add_argument('-v', '--verbose', help='verbose output', action='store_true')
parser.add_argument('--host', help='action container host', default=dockerHost())
parser.add_argument('-p', '--port', help='action container port number', default=8080, type=int)
subparsers = parser.add_subparsers(title='available commands', dest='cmd')
initmenu = subparsers.add_parser('init', help='initialize container with src or zip/tgz file')
initmenu.add_argument('-b', '--binary', help='treat artifact as binary', action='store_true')
initmenu.add_argument('-r', '--run', nargs='?', default=None, help='run after init')
initmenu.add_argument('main', nargs='?', default='main', help='name of the "main" entry method for the action')
initmenu.add_argument('artifact', help='a source file or zip/tgz archive')
initmenu.add_argument('env', nargs='?', help='the environment variables to export to the action, either a reference to a file or an inline JSON object', default=None)
runmenu = subparsers.add_parser('run', help='send arguments to container to run action')
runmenu.add_argument('payload', nargs='?', help='the arguments to send to the action, either a reference to a file or an inline JSON object', default=None)
if argcomplete:
argcomplete.autocomplete(parser)
return parser.parse_args()
def init(args):
main = args.main
artifact = args.artifact
if artifact and (args.binary or artifact.endswith('.zip') or artifact.endswith('tgz') or artifact.endswith('jar')):
with open(artifact, 'rb') as fp:
contents = fp.read()
contents = str(base64.b64encode(contents), 'utf-8')
binary = True
elif artifact != '':
with(codecs.open(artifact, 'r', 'utf-8')) as fp:
contents = fp.read()
binary = False
else:
contents = None
binary = False
r = requests.post(
containerRoute(args, 'init'),
json = {
"value": {
"code": contents,
"binary": binary,
"main": main,
"env": processPayload(args.env)
}
})
print(r.text)
if r.status_code == 200 and args.run != None:
runArgs = objectify({})
runArgs.__dict__ = args.__dict__.copy()
runArgs.payload = args.run
run(runArgs)
def run(args):
value = processPayload(args.payload)
if args.verbose:
print('Sending value: %s...' % json.dumps(value)[0:40])
r = requests.post(containerRoute(args, 'run'), json = {"value": value})
print(str(r.content, 'utf-8'))
def processPayload(payload):
if payload and os.path.exists(payload):
with open(payload) as fp:
return json.load(fp)
try:
d = json.loads(payload if payload else '{}')
if isinstance(d, dict):
return d
else:
raise
except:
print('payload must be a JSON object.')
sys.exit(-1)
if __name__ == '__main__':
main()
| apache-2.0 |
manassolanki/erpnext | erpnext/setup/setup_wizard/operations/company_setup.py | 6 | 4042 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import cstr, getdate
from frappe.utils.file_manager import save_file
from .default_website import website_maker
from erpnext.accounts.doctype.account.account import RootNotEditable
def create_fiscal_year_and_company(args):
if (args.get('fy_start_date')):
curr_fiscal_year = get_fy_details(args.get('fy_start_date'), args.get('fy_end_date'))
frappe.get_doc({
"doctype":"Fiscal Year",
'year': curr_fiscal_year,
'year_start_date': args.get('fy_start_date'),
'year_end_date': args.get('fy_end_date'),
}).insert()
if (args.get('company_name')):
frappe.get_doc({
"doctype":"Company",
'company_name':args.get('company_name'),
'enable_perpetual_inventory': 1,
'abbr':args.get('company_abbr'),
'default_currency':args.get('currency'),
'country': args.get('country'),
'create_chart_of_accounts_based_on': 'Standard Template',
'chart_of_accounts': args.get('chart_of_accounts'),
'domain': args.get('domains')[0]
}).insert()
def enable_shopping_cart(args):
# Needs price_lists
frappe.get_doc({
"doctype": "Shopping Cart Settings",
"enabled": 1,
'company': args.get('company_name') ,
'price_list': frappe.db.get_value("Price List", {"selling": 1}),
'default_customer_group': _("Individual"),
'quotation_series': "QTN-",
}).insert()
def create_bank_account(args):
if args.get("bank_account"):
company_name = args.get('company_name')
bank_account_group = frappe.db.get_value("Account",
{"account_type": "Bank", "is_group": 1, "root_type": "Asset",
"company": company_name})
if bank_account_group:
bank_account = frappe.get_doc({
"doctype": "Account",
'account_name': args.get("bank_account"),
'parent_account': bank_account_group,
'is_group':0,
'company': company_name,
"account_type": "Bank",
})
try:
return bank_account.insert()
except RootNotEditable:
frappe.throw(_("Bank account cannot be named as {0}").format(args.get("bank_account")))
except frappe.DuplicateEntryError:
# bank account same as a CoA entry
pass
def create_email_digest():
from frappe.utils.user import get_system_managers
system_managers = get_system_managers(only_name=True)
if not system_managers:
return
companies = frappe.db.sql_list("select name FROM `tabCompany`")
for company in companies:
if not frappe.db.exists("Email Digest", "Default Weekly Digest - " + company):
edigest = frappe.get_doc({
"doctype": "Email Digest",
"name": "Default Weekly Digest - " + company,
"company": company,
"frequency": "Weekly",
"recipient_list": "\n".join(system_managers)
})
for df in edigest.meta.get("fields", {"fieldtype": "Check"}):
if df.fieldname != "scheduler_errors":
edigest.set(df.fieldname, 1)
edigest.insert()
# scheduler errors digest
if companies:
edigest = frappe.new_doc("Email Digest")
edigest.update({
"name": "Scheduler Errors",
"company": companies[0],
"frequency": "Daily",
"recipient_list": "\n".join(system_managers),
"scheduler_errors": 1,
"enabled": 1
})
edigest.insert()
def create_logo(args):
if args.get("attach_logo"):
attach_logo = args.get("attach_logo").split(",")
if len(attach_logo)==3:
filename, filetype, content = attach_logo
fileurl = save_file(filename, content, "Website Settings", "Website Settings",
decode=True).file_url
frappe.db.set_value("Website Settings", "Website Settings", "brand_html",
"<img src='{0}' style='max-width: 40px; max-height: 25px;'> {1}".format(fileurl, args.get("company_name") ))
def create_website(args):
website_maker(args)
def get_fy_details(fy_start_date, fy_end_date):
start_year = getdate(fy_start_date).year
if start_year == getdate(fy_end_date).year:
fy = cstr(start_year)
else:
fy = cstr(start_year) + '-' + cstr(start_year + 1)
return fy | gpl-3.0 |
ticosax/django | tests/middleware/tests.py | 5 | 32482 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import gzip
import random
import re
from io import BytesIO
from unittest import skipIf
from django.conf import settings
from django.core import mail
from django.http import (
FileResponse, HttpRequest, HttpResponse, HttpResponsePermanentRedirect,
HttpResponseRedirect, StreamingHttpResponse,
)
from django.middleware.clickjacking import XFrameOptionsMiddleware
from django.middleware.common import (
BrokenLinkEmailsMiddleware, CommonMiddleware,
)
from django.middleware.gzip import GZipMiddleware
from django.middleware.http import ConditionalGetMiddleware
from django.test import RequestFactory, TestCase, override_settings
from django.test.utils import patch_logger
from django.utils import six
from django.utils.encoding import force_str
from django.utils.six.moves import range
from django.utils.six.moves.urllib.parse import quote
@override_settings(ROOT_URLCONF='middleware.urls')
class CommonMiddlewareTest(TestCase):
rf = RequestFactory()
@override_settings(APPEND_SLASH=True)
def test_append_slash_have_slash(self):
"""
URLs with slashes should go unmolested.
"""
request = self.rf.get('/slash/')
self.assertEqual(CommonMiddleware().process_request(request), None)
@override_settings(APPEND_SLASH=True)
def test_append_slash_slashless_resource(self):
"""
Matches to explicit slashless URLs should go unmolested.
"""
request = self.rf.get('/noslash')
self.assertEqual(CommonMiddleware().process_request(request), None)
@override_settings(APPEND_SLASH=True)
def test_append_slash_slashless_unknown(self):
"""
APPEND_SLASH should not redirect to unknown resources.
"""
request = self.rf.get('/unknown')
self.assertEqual(CommonMiddleware().process_request(request), None)
@override_settings(APPEND_SLASH=True)
def test_append_slash_redirect(self):
"""
APPEND_SLASH should redirect slashless URLs to a valid pattern.
"""
request = self.rf.get('/slash')
r = CommonMiddleware().process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, '/slash/')
@override_settings(APPEND_SLASH=True)
def test_append_slash_redirect_querystring(self):
"""
APPEND_SLASH should preserve querystrings when redirecting.
"""
request = self.rf.get('/slash?test=1')
r = CommonMiddleware().process_request(request)
self.assertEqual(r.url, '/slash/?test=1')
@override_settings(APPEND_SLASH=True, DEBUG=True)
def test_append_slash_no_redirect_on_POST_in_DEBUG(self):
"""
Tests that while in debug mode, an exception is raised with a warning
when a failed attempt is made to POST, PUT, or PATCH to an URL which
would normally be redirected to a slashed version.
"""
msg = "maintaining %s data. Change your form to point to testserver/slash/"
request = self.rf.get('/slash')
request.method = 'POST'
with six.assertRaisesRegex(self, RuntimeError, msg % request.method):
CommonMiddleware().process_request(request)
request = self.rf.get('/slash')
request.method = 'PUT'
with six.assertRaisesRegex(self, RuntimeError, msg % request.method):
CommonMiddleware().process_request(request)
request = self.rf.get('/slash')
request.method = 'PATCH'
with six.assertRaisesRegex(self, RuntimeError, msg % request.method):
CommonMiddleware().process_request(request)
@override_settings(APPEND_SLASH=False)
def test_append_slash_disabled(self):
"""
Disabling append slash functionality should leave slashless URLs alone.
"""
request = self.rf.get('/slash')
self.assertEqual(CommonMiddleware().process_request(request), None)
@override_settings(APPEND_SLASH=True)
def test_append_slash_quoted(self):
"""
URLs which require quoting should be redirected to their slash version ok.
"""
request = self.rf.get(quote('/needsquoting#'))
r = CommonMiddleware().process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(
r.url,
'/needsquoting%23/')
@override_settings(APPEND_SLASH=False, PREPEND_WWW=True)
def test_prepend_www(self):
request = self.rf.get('/path/')
r = CommonMiddleware().process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(
r.url,
'http://www.testserver/path/')
@override_settings(APPEND_SLASH=True, PREPEND_WWW=True)
def test_prepend_www_append_slash_have_slash(self):
request = self.rf.get('/slash/')
r = CommonMiddleware().process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url,
'http://www.testserver/slash/')
@override_settings(APPEND_SLASH=True, PREPEND_WWW=True)
def test_prepend_www_append_slash_slashless(self):
request = self.rf.get('/slash')
r = CommonMiddleware().process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url,
'http://www.testserver/slash/')
# The following tests examine expected behavior given a custom urlconf that
# overrides the default one through the request object.
@override_settings(APPEND_SLASH=True)
def test_append_slash_have_slash_custom_urlconf(self):
"""
URLs with slashes should go unmolested.
"""
request = self.rf.get('/customurlconf/slash/')
request.urlconf = 'middleware.extra_urls'
self.assertEqual(CommonMiddleware().process_request(request), None)
@override_settings(APPEND_SLASH=True)
def test_append_slash_slashless_resource_custom_urlconf(self):
"""
Matches to explicit slashless URLs should go unmolested.
"""
request = self.rf.get('/customurlconf/noslash')
request.urlconf = 'middleware.extra_urls'
self.assertEqual(CommonMiddleware().process_request(request), None)
@override_settings(APPEND_SLASH=True)
def test_append_slash_slashless_unknown_custom_urlconf(self):
"""
APPEND_SLASH should not redirect to unknown resources.
"""
request = self.rf.get('/customurlconf/unknown')
request.urlconf = 'middleware.extra_urls'
self.assertEqual(CommonMiddleware().process_request(request), None)
@override_settings(APPEND_SLASH=True)
def test_append_slash_redirect_custom_urlconf(self):
"""
APPEND_SLASH should redirect slashless URLs to a valid pattern.
"""
request = self.rf.get('/customurlconf/slash')
request.urlconf = 'middleware.extra_urls'
r = CommonMiddleware().process_request(request)
self.assertIsNotNone(r,
"CommonMiddlware failed to return APPEND_SLASH redirect using request.urlconf")
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, '/customurlconf/slash/')
@override_settings(APPEND_SLASH=True, DEBUG=True)
def test_append_slash_no_redirect_on_POST_in_DEBUG_custom_urlconf(self):
"""
Tests that while in debug mode, an exception is raised with a warning
when a failed attempt is made to POST to an URL which would normally be
redirected to a slashed version.
"""
request = self.rf.get('/customurlconf/slash')
request.urlconf = 'middleware.extra_urls'
request.method = 'POST'
with six.assertRaisesRegex(self, RuntimeError, 'end in a slash'):
CommonMiddleware().process_request(request)
@override_settings(APPEND_SLASH=False)
def test_append_slash_disabled_custom_urlconf(self):
"""
Disabling append slash functionality should leave slashless URLs alone.
"""
request = self.rf.get('/customurlconf/slash')
request.urlconf = 'middleware.extra_urls'
self.assertEqual(CommonMiddleware().process_request(request), None)
@override_settings(APPEND_SLASH=True)
def test_append_slash_quoted_custom_urlconf(self):
"""
URLs which require quoting should be redirected to their slash version ok.
"""
request = self.rf.get(quote('/customurlconf/needsquoting#'))
request.urlconf = 'middleware.extra_urls'
r = CommonMiddleware().process_request(request)
self.assertIsNotNone(r,
"CommonMiddlware failed to return APPEND_SLASH redirect using request.urlconf")
self.assertEqual(r.status_code, 301)
self.assertEqual(
r.url,
'/customurlconf/needsquoting%23/')
@override_settings(APPEND_SLASH=False, PREPEND_WWW=True)
def test_prepend_www_custom_urlconf(self):
request = self.rf.get('/customurlconf/path/')
request.urlconf = 'middleware.extra_urls'
r = CommonMiddleware().process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(
r.url,
'http://www.testserver/customurlconf/path/')
@override_settings(APPEND_SLASH=True, PREPEND_WWW=True)
def test_prepend_www_append_slash_have_slash_custom_urlconf(self):
request = self.rf.get('/customurlconf/slash/')
request.urlconf = 'middleware.extra_urls'
r = CommonMiddleware().process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url,
'http://www.testserver/customurlconf/slash/')
@override_settings(APPEND_SLASH=True, PREPEND_WWW=True)
def test_prepend_www_append_slash_slashless_custom_urlconf(self):
request = self.rf.get('/customurlconf/slash')
request.urlconf = 'middleware.extra_urls'
r = CommonMiddleware().process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url,
'http://www.testserver/customurlconf/slash/')
# Other tests
@override_settings(DISALLOWED_USER_AGENTS=[re.compile(r'foo')])
def test_disallowed_user_agents(self):
with patch_logger('django.request', 'warning') as log_messages:
request = self.rf.get('/slash')
request.META['HTTP_USER_AGENT'] = 'foo'
r = CommonMiddleware().process_request(request)
self.assertEqual(r.status_code, 403)
self.assertEqual(log_messages, ['Forbidden (User agent): /slash'])
def test_non_ascii_query_string_does_not_crash(self):
"""Regression test for #15152"""
request = self.rf.get('/slash')
request.META['QUERY_STRING'] = force_str('drink=café')
response = CommonMiddleware().process_request(request)
self.assertEqual(response.status_code, 301)
def test_response_redirect_class(self):
request = self.rf.get('/slash')
r = CommonMiddleware().process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, '/slash/')
self.assertIsInstance(r, HttpResponsePermanentRedirect)
def test_response_redirect_class_subclass(self):
class MyCommonMiddleware(CommonMiddleware):
response_redirect_class = HttpResponseRedirect
request = self.rf.get('/slash')
r = MyCommonMiddleware().process_request(request)
self.assertEqual(r.status_code, 302)
self.assertEqual(r.url, '/slash/')
self.assertIsInstance(r, HttpResponseRedirect)
@override_settings(
IGNORABLE_404_URLS=[re.compile(r'foo')],
MANAGERS=['[email protected]'],
)
class BrokenLinkEmailsMiddlewareTest(TestCase):
rf = RequestFactory()
def setUp(self):
self.req = self.rf.get('/regular_url/that/does/not/exist')
self.resp = self.client.get(self.req.path)
def test_404_error_reporting(self):
self.req.META['HTTP_REFERER'] = '/another/url/'
BrokenLinkEmailsMiddleware().process_response(self.req, self.resp)
self.assertEqual(len(mail.outbox), 1)
self.assertIn('Broken', mail.outbox[0].subject)
def test_404_error_reporting_no_referer(self):
BrokenLinkEmailsMiddleware().process_response(self.req, self.resp)
self.assertEqual(len(mail.outbox), 0)
def test_404_error_reporting_ignored_url(self):
self.req.path = self.req.path_info = 'foo_url/that/does/not/exist'
BrokenLinkEmailsMiddleware().process_response(self.req, self.resp)
self.assertEqual(len(mail.outbox), 0)
@skipIf(six.PY3, "HTTP_REFERER is str type on Python 3")
def test_404_error_nonascii_referrer(self):
# Such referer strings should not happen, but anyway, if it happens,
# let's not crash
self.req.META['HTTP_REFERER'] = b'http://testserver/c/\xd0\xbb\xd0\xb8/'
BrokenLinkEmailsMiddleware().process_response(self.req, self.resp)
self.assertEqual(len(mail.outbox), 1)
@skipIf(six.PY3, "HTTP_USER_AGENT is str type on Python 3")
def test_404_error_nonascii_user_agent(self):
# Such user agent strings should not happen, but anyway, if it happens,
# let's not crash
self.req.META['HTTP_REFERER'] = '/another/url/'
self.req.META['HTTP_USER_AGENT'] = b'\xd0\xbb\xd0\xb8\xff\xff'
BrokenLinkEmailsMiddleware().process_response(self.req, self.resp)
self.assertEqual(len(mail.outbox), 1)
self.assertIn('User agent: \u043b\u0438\ufffd\ufffd\n', mail.outbox[0].body)
def test_custom_request_checker(self):
class SubclassedMiddleware(BrokenLinkEmailsMiddleware):
ignored_user_agent_patterns = (re.compile(r'Spider.*'),
re.compile(r'Robot.*'))
def is_ignorable_request(self, request, uri, domain, referer):
'''Check user-agent in addition to normal checks.'''
if super(SubclassedMiddleware, self).is_ignorable_request(request, uri, domain, referer):
return True
user_agent = request.META['HTTP_USER_AGENT']
return any(pattern.search(user_agent) for pattern in
self.ignored_user_agent_patterns)
self.req.META['HTTP_REFERER'] = '/another/url/'
self.req.META['HTTP_USER_AGENT'] = 'Spider machine 3.4'
SubclassedMiddleware().process_response(self.req, self.resp)
self.assertEqual(len(mail.outbox), 0)
self.req.META['HTTP_USER_AGENT'] = 'My user agent'
SubclassedMiddleware().process_response(self.req, self.resp)
self.assertEqual(len(mail.outbox), 1)
@override_settings(ROOT_URLCONF='middleware.cond_get_urls')
class ConditionalGetMiddlewareTest(TestCase):
def setUp(self):
self.req = RequestFactory().get('/')
self.resp = self.client.get(self.req.path_info)
# Tests for the Date header
def test_date_header_added(self):
self.assertNotIn('Date', self.resp)
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertIn('Date', self.resp)
# Tests for the Content-Length header
def test_content_length_header_added(self):
content_length = len(self.resp.content)
self.assertNotIn('Content-Length', self.resp)
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertIn('Content-Length', self.resp)
self.assertEqual(int(self.resp['Content-Length']), content_length)
def test_content_length_header_not_added(self):
resp = StreamingHttpResponse('content')
self.assertNotIn('Content-Length', resp)
resp = ConditionalGetMiddleware().process_response(self.req, resp)
self.assertNotIn('Content-Length', resp)
def test_content_length_header_not_changed(self):
bad_content_length = len(self.resp.content) + 10
self.resp['Content-Length'] = bad_content_length
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertEqual(int(self.resp['Content-Length']), bad_content_length)
# Tests for the ETag header
def test_if_none_match_and_no_etag(self):
self.req.META['HTTP_IF_NONE_MATCH'] = 'spam'
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.resp.status_code, 200)
def test_no_if_none_match_and_etag(self):
self.resp['ETag'] = 'eggs'
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.resp.status_code, 200)
def test_if_none_match_and_same_etag(self):
self.req.META['HTTP_IF_NONE_MATCH'] = self.resp['ETag'] = 'spam'
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.resp.status_code, 304)
def test_if_none_match_and_different_etag(self):
self.req.META['HTTP_IF_NONE_MATCH'] = 'spam'
self.resp['ETag'] = 'eggs'
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.resp.status_code, 200)
def test_if_none_match_and_redirect(self):
self.req.META['HTTP_IF_NONE_MATCH'] = self.resp['ETag'] = 'spam'
self.resp['Location'] = '/'
self.resp.status_code = 301
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.resp.status_code, 301)
def test_if_none_match_and_client_error(self):
self.req.META['HTTP_IF_NONE_MATCH'] = self.resp['ETag'] = 'spam'
self.resp.status_code = 400
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.resp.status_code, 400)
@override_settings(USE_ETAGS=True)
def test_etag(self):
req = HttpRequest()
res = HttpResponse('content')
self.assertTrue(
CommonMiddleware().process_response(req, res).has_header('ETag'))
@override_settings(USE_ETAGS=True)
def test_etag_streaming_response(self):
req = HttpRequest()
res = StreamingHttpResponse(['content'])
res['ETag'] = 'tomatoes'
self.assertEqual(
CommonMiddleware().process_response(req, res).get('ETag'),
'tomatoes')
@override_settings(USE_ETAGS=True)
def test_no_etag_streaming_response(self):
req = HttpRequest()
res = StreamingHttpResponse(['content'])
self.assertFalse(
CommonMiddleware().process_response(req, res).has_header('ETag'))
# Tests for the Last-Modified header
def test_if_modified_since_and_no_last_modified(self):
self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT'
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.resp.status_code, 200)
def test_no_if_modified_since_and_last_modified(self):
self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:38:44 GMT'
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.resp.status_code, 200)
def test_if_modified_since_and_same_last_modified(self):
self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT'
self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:38:44 GMT'
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.resp.status_code, 304)
def test_if_modified_since_and_last_modified_in_the_past(self):
self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT'
self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:35:44 GMT'
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.resp.status_code, 304)
def test_if_modified_since_and_last_modified_in_the_future(self):
self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT'
self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:41:44 GMT'
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.resp.status_code, 200)
def test_if_modified_since_and_redirect(self):
self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT'
self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:35:44 GMT'
self.resp['Location'] = '/'
self.resp.status_code = 301
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.resp.status_code, 301)
def test_if_modified_since_and_client_error(self):
self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT'
self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:35:44 GMT'
self.resp.status_code = 400
self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.resp.status_code, 400)
class XFrameOptionsMiddlewareTest(TestCase):
"""
Tests for the X-Frame-Options clickjacking prevention middleware.
"""
def test_same_origin(self):
"""
Tests that the X_FRAME_OPTIONS setting can be set to SAMEORIGIN to
have the middleware use that value for the HTTP header.
"""
with override_settings(X_FRAME_OPTIONS='SAMEORIGIN'):
r = XFrameOptionsMiddleware().process_response(HttpRequest(),
HttpResponse())
self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN')
with override_settings(X_FRAME_OPTIONS='sameorigin'):
r = XFrameOptionsMiddleware().process_response(HttpRequest(),
HttpResponse())
self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN')
def test_deny(self):
"""
Tests that the X_FRAME_OPTIONS setting can be set to DENY to
have the middleware use that value for the HTTP header.
"""
with override_settings(X_FRAME_OPTIONS='DENY'):
r = XFrameOptionsMiddleware().process_response(HttpRequest(),
HttpResponse())
self.assertEqual(r['X-Frame-Options'], 'DENY')
with override_settings(X_FRAME_OPTIONS='deny'):
r = XFrameOptionsMiddleware().process_response(HttpRequest(),
HttpResponse())
self.assertEqual(r['X-Frame-Options'], 'DENY')
def test_defaults_sameorigin(self):
"""
Tests that if the X_FRAME_OPTIONS setting is not set then it defaults
to SAMEORIGIN.
"""
with override_settings(X_FRAME_OPTIONS=None):
del settings.X_FRAME_OPTIONS # restored by override_settings
r = XFrameOptionsMiddleware().process_response(HttpRequest(),
HttpResponse())
self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN')
def test_dont_set_if_set(self):
"""
Tests that if the X-Frame-Options header is already set then the
middleware does not attempt to override it.
"""
with override_settings(X_FRAME_OPTIONS='DENY'):
response = HttpResponse()
response['X-Frame-Options'] = 'SAMEORIGIN'
r = XFrameOptionsMiddleware().process_response(HttpRequest(),
response)
self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN')
with override_settings(X_FRAME_OPTIONS='SAMEORIGIN'):
response = HttpResponse()
response['X-Frame-Options'] = 'DENY'
r = XFrameOptionsMiddleware().process_response(HttpRequest(),
response)
self.assertEqual(r['X-Frame-Options'], 'DENY')
def test_response_exempt(self):
"""
Tests that if the response has a xframe_options_exempt attribute set
to False then it still sets the header, but if it's set to True then
it does not.
"""
with override_settings(X_FRAME_OPTIONS='SAMEORIGIN'):
response = HttpResponse()
response.xframe_options_exempt = False
r = XFrameOptionsMiddleware().process_response(HttpRequest(),
response)
self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN')
response = HttpResponse()
response.xframe_options_exempt = True
r = XFrameOptionsMiddleware().process_response(HttpRequest(),
response)
self.assertEqual(r.get('X-Frame-Options', None), None)
def test_is_extendable(self):
"""
Tests that the XFrameOptionsMiddleware method that determines the
X-Frame-Options header value can be overridden based on something in
the request or response.
"""
class OtherXFrameOptionsMiddleware(XFrameOptionsMiddleware):
# This is just an example for testing purposes...
def get_xframe_options_value(self, request, response):
if getattr(request, 'sameorigin', False):
return 'SAMEORIGIN'
if getattr(response, 'sameorigin', False):
return 'SAMEORIGIN'
return 'DENY'
with override_settings(X_FRAME_OPTIONS='DENY'):
response = HttpResponse()
response.sameorigin = True
r = OtherXFrameOptionsMiddleware().process_response(HttpRequest(),
response)
self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN')
request = HttpRequest()
request.sameorigin = True
r = OtherXFrameOptionsMiddleware().process_response(request,
HttpResponse())
self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN')
with override_settings(X_FRAME_OPTIONS='SAMEORIGIN'):
r = OtherXFrameOptionsMiddleware().process_response(HttpRequest(),
HttpResponse())
self.assertEqual(r['X-Frame-Options'], 'DENY')
class GZipMiddlewareTest(TestCase):
"""
Tests the GZip middleware.
"""
short_string = b"This string is too short to be worth compressing."
compressible_string = b'a' * 500
uncompressible_string = b''.join(six.int2byte(random.randint(0, 255)) for _ in range(500))
sequence = [b'a' * 500, b'b' * 200, b'a' * 300]
sequence_unicode = ['a' * 500, 'é' * 200, 'a' * 300]
def setUp(self):
self.req = RequestFactory().get('/')
self.req.META['HTTP_ACCEPT_ENCODING'] = 'gzip, deflate'
self.req.META['HTTP_USER_AGENT'] = 'Mozilla/5.0 (Windows NT 5.1; rv:9.0.1) Gecko/20100101 Firefox/9.0.1'
self.resp = HttpResponse()
self.resp.status_code = 200
self.resp.content = self.compressible_string
self.resp['Content-Type'] = 'text/html; charset=UTF-8'
self.stream_resp = StreamingHttpResponse(self.sequence)
self.stream_resp['Content-Type'] = 'text/html; charset=UTF-8'
self.stream_resp_unicode = StreamingHttpResponse(self.sequence_unicode)
self.stream_resp_unicode['Content-Type'] = 'text/html; charset=UTF-8'
@staticmethod
def decompress(gzipped_string):
return gzip.GzipFile(mode='rb', fileobj=BytesIO(gzipped_string)).read()
def test_compress_response(self):
"""
Tests that compression is performed on responses with compressible content.
"""
r = GZipMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.decompress(r.content), self.compressible_string)
self.assertEqual(r.get('Content-Encoding'), 'gzip')
self.assertEqual(r.get('Content-Length'), str(len(r.content)))
def test_compress_streaming_response(self):
"""
Tests that compression is performed on responses with streaming content.
"""
r = GZipMiddleware().process_response(self.req, self.stream_resp)
self.assertEqual(self.decompress(b''.join(r)), b''.join(self.sequence))
self.assertEqual(r.get('Content-Encoding'), 'gzip')
self.assertFalse(r.has_header('Content-Length'))
def test_compress_streaming_response_unicode(self):
"""
Tests that compression is performed on responses with streaming Unicode content.
"""
r = GZipMiddleware().process_response(self.req, self.stream_resp_unicode)
self.assertEqual(self.decompress(b''.join(r)), b''.join(x.encode('utf-8') for x in self.sequence_unicode))
self.assertEqual(r.get('Content-Encoding'), 'gzip')
self.assertFalse(r.has_header('Content-Length'))
def test_compress_file_response(self):
"""
Tests that compression is performed on FileResponse.
"""
open_file = lambda: open(__file__, 'rb')
with open_file() as file1:
file_resp = FileResponse(file1)
file_resp['Content-Type'] = 'text/html; charset=UTF-8'
r = GZipMiddleware().process_response(self.req, file_resp)
with open_file() as file2:
self.assertEqual(self.decompress(b''.join(r)), file2.read())
self.assertEqual(r.get('Content-Encoding'), 'gzip')
self.assertIsNot(r.file_to_stream, file1)
def test_compress_non_200_response(self):
"""
Tests that compression is performed on responses with a status other than 200.
See #10762.
"""
self.resp.status_code = 404
r = GZipMiddleware().process_response(self.req, self.resp)
self.assertEqual(self.decompress(r.content), self.compressible_string)
self.assertEqual(r.get('Content-Encoding'), 'gzip')
def test_no_compress_short_response(self):
"""
Tests that compression isn't performed on responses with short content.
"""
self.resp.content = self.short_string
r = GZipMiddleware().process_response(self.req, self.resp)
self.assertEqual(r.content, self.short_string)
self.assertEqual(r.get('Content-Encoding'), None)
def test_no_compress_compressed_response(self):
"""
Tests that compression isn't performed on responses that are already compressed.
"""
self.resp['Content-Encoding'] = 'deflate'
r = GZipMiddleware().process_response(self.req, self.resp)
self.assertEqual(r.content, self.compressible_string)
self.assertEqual(r.get('Content-Encoding'), 'deflate')
def test_no_compress_uncompressible_response(self):
"""
Tests that compression isn't performed on responses with uncompressible content.
"""
self.resp.content = self.uncompressible_string
r = GZipMiddleware().process_response(self.req, self.resp)
self.assertEqual(r.content, self.uncompressible_string)
self.assertEqual(r.get('Content-Encoding'), None)
@override_settings(USE_ETAGS=True)
class ETagGZipMiddlewareTest(TestCase):
"""
Tests if the ETag middleware behaves correctly with GZip middleware.
"""
rf = RequestFactory()
compressible_string = b'a' * 500
def test_compress_response(self):
"""
Tests that ETag is changed after gzip compression is performed.
"""
request = self.rf.get('/', HTTP_ACCEPT_ENCODING='gzip, deflate')
response = GZipMiddleware().process_response(request,
CommonMiddleware().process_response(request,
HttpResponse(self.compressible_string)))
gzip_etag = response.get('ETag')
request = self.rf.get('/', HTTP_ACCEPT_ENCODING='')
response = GZipMiddleware().process_response(request,
CommonMiddleware().process_response(request,
HttpResponse(self.compressible_string)))
nogzip_etag = response.get('ETag')
self.assertNotEqual(gzip_etag, nogzip_etag)
| bsd-3-clause |
edusegzy/pychemqt | lib/reaction.py | 1 | 9477 | #!/usr/bin/python
# -*- coding: utf-8 -*-
###############################################################################
# Module to define chemical reaction functionality
###############################################################################
from math import exp, log
import sqlite3
from numpy import polyval
from scipy.optimize import fsolve
from PyQt4.QtGui import QApplication
from lib import unidades
from lib.sql import databank_name
class Reaction(object):
"""Chemical reaction object"""
status = 0
msg = QApplication.translate("pychemqt", "undefined")
error = 0
kwargs = {"comp": [],
"coef": [],
"tipo": 0,
"fase": 0,
"key": 0,
"base": 0,
"customHr": False,
"Hr": 0.0,
"formula": False,
"conversion": None,
"keq": None}
kwargsValue = ("Hr",)
kwargsList = ("tipo", "fase", "key", "base")
kwargsCheck = ("customHr", "formula")
calculateValue = ("DeltaP", "DeltaP_f", "DeltaP_ac", "DeltaP_h",
"DeltaP_v", "DeltaP_100ft", "V", "f", "Re", "Tout")
TEXT_TYPE = [QApplication.translate("pychemqt", "Estequiometric"),
QApplication.translate("pychemqt", "Equilibrium"),
QApplication.translate("pychemqt", "Kinetic"),
QApplication.translate("pychemqt", "Catalitic")]
TEXT_PHASE = [QApplication.translate("pychemqt", "Global"),
QApplication.translate("pychemqt", "Liquid"),
QApplication.translate("pychemqt", "Gas")]
TEXT_BASE = [QApplication.translate("pychemqt", "Mole"),
QApplication.translate("pychemqt", "Mass"),
QApplication.translate("pychemqt", "Partial pressure")]
def __init__(self, **kwargs):
"""constructor, kwargs keys can be:
comp: array with index of reaction components
coef: array with stequiometric coefficient for each component
fase: Phase where reaction work
0 - Global
1 - Liquid
2 - Gas
key: Index of key component
base
0 - Mol
1 - Mass
2 - Partial pressure
Hr: Heat of reaction, calculate from heat of formation if no input
formula: boolean to show compound names in formules
tipo: Kind of reaction
0 - Stequiometric, without equilibrium or kinetic calculations
1 - Equilibrium, without kinetic calculation
2 - Equilibrium by minimization of Gibbs free energy
3 - Kinetic
4 - Catalytic
conversion: conversion value for reaction with tipo=0
keq: equilibrium constant for reation with tipo=1
-it is float if it don't depend with temperature
-it is array if it depends with temperature
"""
self.kwargs = Reaction.kwargs.copy()
if kwargs:
self.__call__(**kwargs)
def __call__(self, **kwargs):
oldkwargs = self.kwargs.copy()
self.kwargs.update(kwargs)
if oldkwargs != self.kwargs and self.isCalculable:
self.calculo()
@property
def isCalculable(self):
self.msg = ""
self.status = 1
if not self.kwargs["comp"]:
self.msg = QApplication.translate("pychemqt", "undefined components")
self.status = 0
return
if not self.kwargs["coef"]:
self.msg = QApplication.translate("pychemqt", "undefined stequiometric")
self.status = 0
return
if self.kwargs["tipo"] == 0:
if self.kwargs["conversion"] is None:
self.msg = QApplication.translate("pychemqt", "undefined conversion")
self.status = 3
elif self.kwargs["tipo"] == 1:
if self.kwargs["keq"] is None:
self.msg = QApplication.translate("pychemqt", "undefined equilibrium constants")
self.status = 3
elif self.kwargs["tipo"] == 2:
pass
elif self.kwargs["tipo"] == 3:
pass
return True
def calculo(self):
self.componentes = self.kwargs["comp"]
self.coef = self.kwargs["coef"]
self.tipo = self.kwargs["tipo"]
self.base = self.kwargs["base"]
self.fase = self.kwargs["fase"]
self.calor = self.kwargs["Hr"]
self.formulas = self.kwargs["formula"]
self.keq = self.kwargs["keq"]
databank = sqlite3.connect(databank_name).cursor()
databank.execute("select nombre, peso_molecular, formula, \
calor_formacion_gas from compuestos where id IN \
%s" % str(tuple(self.componentes)))
nombre = []
peso_molecular = []
formula = []
calor_reaccion = 0
check_estequiometria = 0
for i, compuesto in enumerate(databank):
nombre.append(compuesto[0])
peso_molecular.append(compuesto[1])
formula.append(compuesto[2])
calor_reaccion += compuesto[3]*self.coef[i]
check_estequiometria += self.coef[i]*compuesto[1]
self.nombre = nombre
self.peso_molecular = peso_molecular
self.formula = formula
if self.calor:
self.Hr = self.kwargs.get("Hr", 0)
else:
self.Hr = unidades.MolarEnthalpy(calor_reaccion/abs(
self.coef[self.base]), "Jkmol")
self.error = round(check_estequiometria, 1)
self.state = self.error == 0
self.text = self._txt(self.formulas)
def conversion(self, corriente, T):
"""Calculate reaction conversion
corriente: Corriente instance for reaction
T: Temperature of reaction"""
if self.tipo == 0:
# Material balance without equilibrium or kinetics considerations
alfa = self.kwargs["conversion"]
elif self.tipo == 1:
# Chemical equilibrium without kinetics
if isinstance(self.keq, list):
A, B, C, D, E, F, G, H = self.keq
keq = exp(A+B/T+C*log(T)+D*T+E*T**2+F*T**3+G*T**4+H*T**5)
else:
keq = self.keq
def f(alfa):
conc_out = [
(corriente.caudalunitariomolar[i]+alfa*self.coef[i])
/ corriente.Q.m3h for i in range(len(self.componentes))]
productorio = 1
for i in range(len(self.componentes)):
productorio *= conc_out[i]**self.coef[i]
return keq-productorio
alfa = fsolve(f, 0.5)
print alfa, f(alfa)
avance = alfa*self.coef[self.base]*corriente.caudalunitariomolar[self.base]
Q_out = [corriente.caudalunitariomolar[i]+avance*self.coef[i] /
self.coef[self.base] for i in range(len(self.componentes))]
minimo = min(Q_out)
if minimo < 0:
# The key component is not correct, redo the result
indice = Q_out.index(minimo)
avance = self.coef[indice]*corriente.caudalunitariomolar[indice]
Q_out = [corriente.caudalunitariomolar[i]+avance*self.coef[i] /
self.coef[indice] for i in range(len(self.componentes))]
h = unidades.Power(self.Hr*self.coef[self.base] /
self.coef[indice]*avance, "Jh")
else:
h = unidades.Power(self.Hr*avance, "Jh")
print alfa, avance
caudal = sum(Q_out)
fraccion = [caudal_i/caudal for caudal_i in Q_out]
return fraccion, h
# def cinetica(self, tipo, Ko, Ei):
# """Método que define la velocidad de reacción"""
#
#
def _txt(self, nombre=False):
"""Function to get text representation for reaction"""
if nombre:
txt = self.nombre
else:
txt = self.formula
reactivos = []
productos = []
for i in range(len(self.componentes)):
if self.coef[i] == int(self.coef[i]):
self.coef[i] = int(self.coef[i])
if self.coef[i] < -1:
reactivos.append(str(-self.coef[i])+txt[i])
elif self.coef[i] == -1:
reactivos.append(txt[i])
elif -1 < self.coef[i] < 0:
reactivos.append(str(-self.coef[i])+txt[i])
elif 0 < self.coef[i] < 1:
productos.append(str(self.coef[i])+txt[i])
elif self.coef[i] == 1:
productos.append(txt[i])
elif self.coef[i] > 1:
productos.append(str(self.coef[i])+txt[i])
return " + ".join(reactivos)+" ---> "+" + ".join(productos)
def __repr__(self):
if self.status:
eq = self._txt()
return eq + " " + "Hr= %0.4e Jkmol" % self.Hr
else:
return str(self.msg)
if __name__ == "__main__":
# from lib.corriente import Corriente, Mezcla
# mezcla=Corriente(300, 1, 1000, Mezcla([1, 46, 47, 62], [0.03, 0.01, 0.96, 0]))
# reaccion=Reaction([1, 46, 47, 62], [-2, 0, -1, 2], base=2)
# reaccion.conversion(mezcla)
# print reaccion
reaccion = Reaction(comp=[1, 47, 62], coef=[-2, -1, 2])
print reaccion
| gpl-3.0 |
jqug/microscopy-object-detection | readdata.py | 1 | 10627 | import skimage
from lxml import etree
import os
import glob
from sklearn.cross_validation import train_test_split
import numpy as np
from progress_bar import ProgressBar
from skimage import io
from scipy import misc
def create_sets(img_dir, train_set_proportion=.6, test_set_proportion=.2, val_set_proportion=.2):
'''Split a list of image files up into training, testing and validation sets.'''
imgfilenames = glob.glob(img_dir + '*.jpg')
baseimgfilenames = [os.path.basename(f) for f in imgfilenames]
if train_set_proportion + test_set_proportion < 1:
train,val = train_test_split(np.arange(len(baseimgfilenames)),
train_size=train_set_proportion+test_set_proportion,
test_size=val_set_proportion,
random_state=1)
else:
train = np.arange(len(baseimgfilenames))
val = []
train_test_prop = train_set_proportion + test_set_proportion
train,test = train_test_split(train,
train_size=train_set_proportion/train_test_prop,
test_size=test_set_proportion/train_test_prop,
random_state=1)
trainfiles = [baseimgfilenames[i] for i in train]
testfiles = [baseimgfilenames[i] for i in test]
valfiles = [baseimgfilenames[i] for i in val]
return trainfiles, valfiles,testfiles
def get_patch_labels_for_single_image(img_filename, image_dir,annotation_dir, size, step,width, height, objectclass=None):
'''
Read the XML annotation files to get the labels of each patch for a
given image. The labels are 0 if there is no object in the corresponding
patch, and 1 if an object is present.
'''
annotation_filename = annotation_dir + img_filename[:-3] + 'xml'
boundingboxes = get_bounding_boxes_for_single_image(annotation_filename, objectclass=objectclass)
# Scan through patch locations in the image
labels = []
y = (height-(height/step)*step)/2
while y+(size) < height:
#rows
x = (width-(width/step)*step)/2
while (x+(size) < width):
objecthere=0
for bb in boundingboxes:
margin = 0
xmin = bb[0] + margin
xmax = bb[1] - margin
ymin = bb[2] + margin
ymax = bb[3] - margin
cx = x + size/2
cy = y + size/2
if (cx>xmin and cx<xmax and cy>ymin and cy<ymax):
objecthere = 1
break
# Output the details for this patch
labels.append(objecthere)
x+=step
y += step
return np.array(labels)
#http://codereview.stackexchange.com/questions/31352/overlapping-rectangles
def range_overlap(a_min, a_max, b_min, b_max):
'''Neither range is completely greater than the other
'''
return (a_min <= b_max) and (b_min <= a_max)
def overlap(r1, r2):
'''Overlapping rectangles overlap both horizontally & vertically
'''
return range_overlap(r1[0], r1[1], r2[0], r2[1]) and range_overlap(r1[2], r1[3], r2[2], r2[3])
def get_image_negatives(img, boundingboxes, size, step, grayscale=False, downsample=1, discard_rate=0.9):
'''Negative-labelled patches, taken at random from any part of the image
not overlapping an annotated bounding box.
Since there are typically many potential negative patches in each image, only
the proprtion 1-discard_rate of negative patches are stored.'''
c,height, width = img.shape
patches_per_img = 0
#lazy way to count how many patches we can take
max_y=0
while max_y+(size) < height:
max_x = 0
while max_x+(size) < width:
patches_per_img += 1
max_x += step
max_y += step
max_x /= step
max_y /= step
neg = []
y = (height-(max_y * step))/2
while y+(size) < height:
#rows
x = (width-(max_x * step))/2
while (x+(size) < width):
if np.random.rand()>discard_rate:
left = x
right = x+(size)
top = y
bottom = y+(size)
is_pos=False
for bb in boundingboxes:
if overlap([left,right,top,bottom], bb):
is_pos=True
break
if not is_pos:
patch = img[:, top:bottom:downsample, left:right:downsample]
neg.append(patch.copy()) # without copy seems to leak memory
x += step
y += step
return neg
def get_image_positives(img, boundingboxes, size, downsample=1):
'''Positive-labelled patches, centred on annotated bounding boxes.'''
pos = []
for bb in boundingboxes:
cy = (bb[0] + (bb[1]-bb[0])/2)
cx = (bb[2] + (bb[3]-bb[2])/2)
patch = img[..., cx-size/2:cx+size/2,cy-size/2:cy+size/2]
s= patch.shape
if s[1]<size or s[2]<size:
continue
patch = patch[:,::downsample,::downsample]
pos.append(patch.copy())
return pos
def create_patches(img_basenames, annotation_dir, image_dir, size, step, grayscale=True, progressbar=True, downsample=1, objectclass=None, negative_discard_rate=.9):
'''Extract a set of image patches with labels, from the supplied list of
annotated images. Positive-labelled patches are extracted centered on the
annotated bounding box; negative-labelled patches are extracted at random
from any part of the image which does not overlap an annotated bounding box.'''
if progressbar:
pb = ProgressBar(len(img_basenames))
if not annotation_dir[-1] == os.path.sep:
annotation_dir = annotation_dir + os.path.sep
if not image_dir[-1] == os.path.sep:
image_dir = image_dir + os.path.sep
color_type = 0
if grayscale:
channels=1
else:
channels=3
pos = []
neg = []
s = 1
for img_filename in img_basenames:
if progressbar:
pb.step(s)
s +=1
annotation_filename = annotation_dir + img_filename[:-3] + 'xml'
boundingboxes = get_bounding_boxes_for_single_image(annotation_filename, objectclass)
#colortype = cv2.IMREAD_COLOR
#img = cv2.imread(image_dir + img_filename, colortype)
img = misc.imread(image_dir + img_filename)
height,width,channels=img.shape
img = img.reshape((height, width,channels))
img = np.rollaxis(img,2)
image_pos = get_image_positives(img,boundingboxes,size,downsample=downsample)
pos.append(image_pos)
image_neg = get_image_negatives(img,boundingboxes,size,step,downsample=downsample,discard_rate=negative_discard_rate)
neg.append(image_neg)
pos = [item for sublist in pos for item in sublist]
neg = [item for sublist in neg for item in sublist]
patches = pos+neg
index = np.arange(len(patches))
np.random.seed(0)
np.random.shuffle(index)
np_patches = np.empty((len(patches),channels,size/downsample,size/downsample),dtype=np.uint8)
np_labels = np.empty(len(patches),dtype=int)
max_pos=len(pos)
for i,j in zip(index,xrange(len(index))):
if i < max_pos:
np_patches[j,] = pos[i]
np_labels[j] = 1
else:
np_patches[j,] = neg[i-max_pos]
np_labels[j] = 0
np_labels = np_labels.astype(np.uint8)
return np_labels,np_patches
def balance(X,y,mult_neg=10):
'''Returns an array with all the positive samples and as many negatives as
mult_neg*npos'''
np.random.seed(0)
neg = np.where(y==0)[0]
neg_count = len(neg)
pos = np.where(y==1)[0]
pos_count = len(pos)
np.random.shuffle(neg,)
neg = neg[0:pos_count*mult_neg]
index = np.concatenate((pos, neg))
np.random.shuffle(index)
y = y.take(index)
X = X.take(index,axis=0)
return X,y
def augment(X,y):
'''Create rotated and flipped versions of all patches.'''
shape = X.shape
num_org=shape[0]
shape = (shape[0]*8, shape[1], shape[2], shape[3])
aug_X = np.empty(shape,dtype=np.uint8)
aug_y = np.empty(shape[0],dtype=int)
new_patch_order = np.arange(shape[0])
np.random.shuffle(new_patch_order)
for i,j in zip(new_patch_order,xrange(shape[0])):
orig_patch = i/8
rot_n = i%4
do_flip = i%8>3
x = np.rollaxis(X[orig_patch],0,3 )
if do_flip:
x = np.flipud(x)
x = np.rot90(x,rot_n)
rot_X = np.rollaxis(x,2)
aug_X[j,] = (rot_X)
aug_y[j]=(y[orig_patch])
aug_y = aug_y.astype('uint8')
return aug_X,aug_y
def augment_positives(X,y):
'''Create rotated and flipped versions of only the positive-labelled
patches.'''
pos_indices = np.where(y)[0]
neg_indices = np.where(y==0)[0]
aug_X_pos, aug_y_pos = augment(X[pos_indices,], y[pos_indices])
aug_X = np.vstack((aug_X_pos, X[neg_indices,]))
aug_y = np.hstack((aug_y_pos, y[neg_indices]))
new_order = np.random.permutation(aug_y.shape[0])
aug_X = aug_X[new_order,]
aug_y = aug_y[new_order]
aug_y = aug_y.astype('uint8')
return aug_X, aug_y
def get_bounding_boxes_for_single_image(filename, objectclass=None):
'''
Given an annotation XML filename, get a list of the bounding boxes around
each object (the ground truth object locations).
'''
annofile = filename[:-3] + 'xml'
file_exists = os.path.exists(filename)
boundingboxes = []
if (file_exists):
# Read the bounding boxes from xml annotation
tree = etree.parse(filename)
r = tree.xpath('//bndbox')
if (len(r) != 0):
for i in range(len(r)):
if (objectclass==None) or (objectclass in r[i].getparent().xpath('label')[0].text.lower()):
xmin = round(float(r[i].xpath('xmin')[0].text))
xmin = max(xmin,1)
xmax = round(float(r[i].xpath('xmax')[0].text))
ymin = round(float(r[i].xpath('ymin')[0].text))
ymin = max(ymin,1)
ymax = round(float(r[i].xpath('ymax')[0].text))
xmin, xmax, ymin, ymax = int(xmin),int(xmax),int(ymin),int(ymax)
boundingboxes.append((xmin,xmax,ymin,ymax))
if len(boundingboxes) == 0:
return np.array([])
return np.vstack(boundingboxes)
| mit |
keszybz/gnuplot-py | gp_unix.py | 1 | 8276 | # $Id$
# Copyright (C) 1998-2003 Michael Haggerty <[email protected]>
#
# This file is licensed under the GNU Lesser General Public License
# (LGPL). See LICENSE.txt for details.
"""gp_unix -- an interface to gnuplot used for unix platforms.
This file implements a low-level interface to a gnuplot program for a
unix platform (actually it is used for any non-Windows, non-Mac
system). This file should be imported through gp.py, which in turn
should be imported via 'import Gnuplot' rather than these low-level
interfaces.
"""
# ############ Configuration variables: ################################
class GnuplotOpts:
"""The configuration options for gnuplot on generic platforms.
Store the options in a class to make them easy to import and
modify en masse. If you want to modify the options from the
command line or within a running program, do something like the
following::
import Gnuplot
Gnuplot.GnuplotOpts.gnuplot_command = '/bin/mygnuplot'
"""
# Command to start up the gnuplot program. If your version of
# gnuplot is run otherwise, specify the correct command here. You
# could also specify a full path or append command-line options
# here if you wish.
gnuplot_command = 'gnuplot'
# Recent versions of gnuplot (at least for Xwindows) allow a
# `-persist' command-line option when starting up gnuplot. When
# this option is specified, graph windows remain on the screen
# even after you quit gnuplot (type `q' in the window to close
# it). This can be handy but unfortunately it is not supported by
# older versions of gnuplot. The following configuration variable
# specifies whether the user's version of gnuplot recognizes this
# option or not. You can set this variable to 1 (supports
# -persist) or 0 (doesn't support) yourself; if you leave it with
# the value None then the first time you create a Gnuplot object
# it will try to detect automatically whether your version accepts
# this option.
recognizes_persist = None # test automatically on first use
# What should be the default if the persist option is not
# specified explicitly?
prefer_persist = 0
# Recent versions of gnuplot allow you to specify a `binary'
# option to the splot command for grid data, which means that the
# data file is to be read in binary format. This option saves
# substantial time writing and reading the file, and can also save
# substantial disk space and therefore it is the default for that
# type of plot. But if you have an older version of gnuplot (or
# you prefer text format) you can disable the binary option in
# either of two ways: (a) set the following variable to 0; or (b)
# pass `binary=0' to the GridData constructor. (Note that the
# demo uses binary=0 to maximize portability.)
recognizes_binary_splot = 1
# Data can be passed to gnuplot through a temporary file or as
# inline data (i.e., the filename is set to '-' and the data is
# entered into the gnuplot interpreter followed by 'e'). If
# prefer_inline_data is true, then use the inline method as
# default whenever it is supported. This should be fast but will
# use more memory since currently the inline data is put into a
# big string when the PlotItem is created.
prefer_inline_data = 0
# Does Python implement the threading module and os.mkfifo on this
# operating system? If so, the _FIFOFileItem class will be
# defined in PlotItem.py.
support_fifo = 1
# Should FIFOs be used to send data to gnuplot by default?
prefer_fifo_data = 1
# After a hardcopy is produced, we have to set the terminal type
# back to `on screen' using gnuplot's `set terminal' command. The
# following is the usual setting for Xwindows. If it is wrong,
# change the following line to select the terminal type you prefer
# to use for on-screen work.
default_term = 'x11'
# Gnuplot can plot to a printer by using "set output '| ...'"
# where ... is the name of a program that sends its stdin to a
# printer, or by "set output 'printer_device', where
# 'printer_device is the name of a file-like interface to the
# printer. On my machine the appropriate program is `lpr', as set
# below. On your computer it may be something different (like
# `lp'); you can set that by changing the variable below. You can
# also add options to the print command if needed.
default_lpr = '| lpr'
# Enhanced postscript is an option to the postscript terminal
# driver that requests enhanced treatment of strings (for example,
# font changes, superscripts, and subscripts). Set to 1 to enable
# or 0 to disable. If you have a version of gnuplot earlier than
# 3.7, you should set this to None (*not* 0!) so that the option
# is not used at all.
prefer_enhanced_postscript = 1
# ############ End of configuration options ############################
from os import popen
def test_persist():
"""Determine whether gnuplot recognizes the option '-persist'.
If the configuration variable 'recognizes_persist' is set (i.e.,
to something other than 'None'), return that value. Otherwise,
try to determine whether the installed version of gnuplot
recognizes the -persist option. (If it doesn't, it should emit an
error message with '-persist' in the first line.) Then set
'recognizes_persist' accordingly for future reference.
"""
if GnuplotOpts.recognizes_persist is None:
g = popen('echo | %s -persist 2>&1' % GnuplotOpts.gnuplot_command, 'r')
response = g.readlines()
g.close()
GnuplotOpts.recognizes_persist = not (response and
'-persist' in response[0])
return GnuplotOpts.recognizes_persist
class GnuplotProcess:
"""Unsophisticated interface to a running gnuplot program.
This represents a running gnuplot program and the means to
communicate with it at a primitive level (i.e., pass it commands
or data). When the object is destroyed, the gnuplot program exits
(unless the 'persist' option was set). The communication is
one-way; gnuplot's text output just goes to stdout with no attempt
to check it for error messages.
Members:
'gnuplot' -- the pipe to the gnuplot command.
Methods:
'__init__' -- start up the program.
'__call__' -- pass an arbitrary string to the gnuplot program,
followed by a newline.
'write' -- pass an arbitrary string to the gnuplot program.
'flush' -- cause pending output to be written immediately.
'close' -- close the connection to gnuplot.
"""
def __init__(self, persist=None):
"""Start a gnuplot process.
Create a 'GnuplotProcess' object. This starts a gnuplot
program and prepares to write commands to it.
Keyword arguments:
'persist=1' -- start gnuplot with the '-persist' option,
(which leaves the plot window on the screen even after
the gnuplot program ends, and creates a new plot window
each time the terminal type is set to 'x11'). This
option is not available on older versions of gnuplot.
"""
if persist is None:
persist = GnuplotOpts.prefer_persist
if persist:
if not test_persist():
raise ('-persist does not seem to be supported '
'by your version of gnuplot!')
self.gnuplot = popen('%s -persist' % GnuplotOpts.gnuplot_command,
'w')
else:
self.gnuplot = popen(GnuplotOpts.gnuplot_command, 'w')
# forward write and flush methods:
self.write = self.gnuplot.write
self.flush = self.gnuplot.flush
def close(self):
if self.gnuplot is not None:
self.gnuplot.close()
self.gnuplot = None
def __del__(self):
self.close()
def __call__(self, s):
"""Send a command string to gnuplot, followed by newline."""
self.write(s + '\n')
self.flush()
| lgpl-2.1 |
cherusk/ansible | lib/ansible/modules/cloud/vmware/vmware_guest.py | 10 | 58143 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This module is also sponsored by E.T.A.I. (www.etai.fr)
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: vmware_guest
short_description: Manages virtual machines in vcenter
description:
- Create new virtual machines (from templates or not)
- Power on/power off/restart a virtual machine
- Modify, rename or remove a virtual machine
version_added: 2.2
author:
- James Tanner (@jctanner) <[email protected]>
- Loic Blot (@nerzhul) <[email protected]>
notes:
- Tested on vSphere 5.5 and 6.0
requirements:
- "python >= 2.6"
- PyVmomi
options:
state:
description:
- What state should the virtual machine be in?
- If C(state) is set to C(present) and VM exists, ensure the VM configuration conforms to task arguments
required: True
choices: ['present', 'absent', 'poweredon', 'poweredoff', 'restarted', 'suspended', 'shutdownguest', 'rebootguest']
name:
description:
- Name of the VM to work with
required: True
name_match:
description:
- If multiple VMs matching the name, use the first or last found
default: 'first'
choices: ['first', 'last']
uuid:
description:
- UUID of the instance to manage if known, this is VMware's unique identifier.
- This is required if name is not supplied.
template:
description:
- Template used to create VM.
- If this value is not set, VM is created without using a template.
- If the VM exists already this setting will be ignored.
is_template:
description:
- Flag the instance as a template
default: False
version_added: "2.3"
folder:
description:
- Destination folder, absolute path to find an existing guest or create the new guest
hardware:
description:
- "Manage some VM hardware attributes."
- "Valid attributes are: memory_mb, num_cpus and scsi"
- "scsi: Valid values are buslogic, lsilogic, lsilogicsas and paravirtual (default)"
guest_id:
description:
- "Set the guest ID (Debian, RHEL, Windows...)"
- "This field is required when creating a VM"
- >
Valid values are referenced here:
https://www.vmware.com/support/developer/converter-sdk/conv55_apireference/vim.vm.GuestOsDescriptor.GuestOsIdentifier.html
version_added: "2.3"
disk:
description:
- "A list of disks to add"
- "Valid attributes are: size_[tb,gb,mb,kb], type, datastore and autoselect_datastore"
- "type: Valid value is thin (default: None)"
- "datastore: Datastore to use for the disk. If autoselect_datastore is True, filter datastore selection."
- "autoselect_datastore (bool): select the less used datastore."
resource_pool:
description:
- Affect machine to the given resource pool
- Resource pool should be child of the selected host parent
default: None
version_added: "2.3"
wait_for_ip_address:
description:
- Wait until vCenter detects an IP address for the VM
- This requires vmware-tools (vmtoolsd) to properly work after creation
default: False
force:
description:
- Ignore warnings and complete the actions
datacenter:
description:
- Destination datacenter for the deploy operation
default: ha-datacenter
cluster:
description:
- The cluster name where the VM will run.
version_added: "2.3"
esxi_hostname:
description:
- The esxi hostname where the VM will run.
annotation:
description:
- A note or annotation to include in the VM
version_added: "2.3"
customvalues:
description:
- Define a list of customvalues to set on VM.
- "A customvalue object takes 2 fields 'key' and 'value'."
version_added: "2.3"
networks:
description:
- Network to use should include C(name) or C(vlan) entry
- Add an optional C(ip) and C(netmask) for network configuration
- Add an optional C(gateway) entry to configure a gateway
- Add an optional C(mac) entry to customize mac address
- Add an optional C(dns_servers) or C(domain) entry per interface (Windows)
- Add an optional C(device_type) to configure the virtual NIC (pcnet32, vmxnet2, vmxnet3, e1000, e1000e)
version_added: "2.3"
customization:
description:
- "Parameters to customize template"
- "Common parameters (Linux/Windows):"
- " C(dns_servers) (list): List of DNS servers to configure"
- " C(dns_suffix) (list): List of domain suffixes, aka DNS search path (default: C(domain) parameter)"
- " C(domain) (string): DNS domain name to use"
- " C(hostname) (string): Computer hostname (default: C(name) parameter)"
- "Parameters related to windows customization:"
- " C(autologon) (bool): Auto logon after VM customization (default: False)"
- " C(autologoncount) (int): Number of autologon after reboot (default: 1)"
- " C(domainadmin) (string): User used to join in AD domain (mandatory with joindomain)"
- " C(domainadminpassword) (string): Password used to join in AD domain (mandatory with joindomain)"
- " C(fullname) (string): Server owner name (default: Administrator)"
- " C(joindomain) (string): AD domain to join (Not compatible with C(joinworkgroup))"
- " C(joinworkgroup) (string): Workgroup to join (Not compatible with C(joindomain), default: WORKGROUP)"
- " C(orgname) (string): Organisation name (default: ACME)"
- " C(password) (string): Local administrator password (mandatory)"
- " C(productid) (string): Product ID"
- " C(runonce) (list): List of commands to run at first user logon"
- " C(timezone) (int): Timezone (default: 85) See U(https://msdn.microsoft.com/en-us/library/ms912391(v=winembedded.11).aspx)"
version_added: "2.3"
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = '''
# Create a VM from a template
- name: create the VM
vmware_guest:
hostname: 192.0.2.44
username: [email protected]
password: vmware
validate_certs: no
esxi_hostname: 192.0.2.117
datacenter: datacenter1
folder: testvms
name: testvm_2
state: poweredon
guest_id: centos64guest
disk:
- size_gb: 10
type: thin
datastore: g73_datastore
hardware:
memory_mb: 512
num_cpus: 1
scsi: paravirtual
networks:
- name: VM Network
ip: 192.168.1.100
netmask: 255.255.255.0
mac: 'aa:bb:dd:aa:00:14'
template: template_el7
wait_for_ip_address: yes
delegate_to: localhost
register: deploy
# Clone a VM from Template and customize
- name: Clone template and customize
vmware_guest:
hostname: 192.168.1.209
username: [email protected]
password: vmware
validate_certs: no
datacenter: datacenter1
cluster: cluster
name: testvm-2
template: template_windows
networks:
- name: VM Network
ip: 192.168.1.100
netmask: 255.255.255.0
gateway: 192.168.1.1
mac: 'aa:bb:dd:aa:00:14'
domain: my_domain
dns_servers:
- 192.168.1.1
- 192.168.1.2
customization:
autologon: True
dns_servers:
- 192.168.1.1
- 192.168.1.2
domain: my_domain
password: new_vm_password
runonce:
- powershell.exe -ExecutionPolicy Unrestricted -File C:\Windows\Temp\Enable-WinRM.ps1 -ForceNewSSLCert
delegate_to: localhost
# Create a VM template
- name: create a VM template
vmware_guest:
hostname: 192.0.2.88
username: [email protected]
password: vmware
validate_certs: no
datacenter: datacenter1
cluster: vmware_cluster_esx
resource_pool: highperformance_pool
folder: testvms
name: testvm_6
is_template: yes
guest_id: debian6_64Guest
disk:
- size_gb: 10
type: thin
datastore: g73_datastore
hardware:
memory_mb: 512
num_cpus: 1
scsi: lsilogic
wait_for_ip_address: yes
delegate_to: localhost
register: deploy
# Rename a VM (requires the VM's uuid)
- vmware_guest:
hostname: 192.168.1.209
username: [email protected]
password: vmware
uuid: 421e4592-c069-924d-ce20-7e7533fab926
name: new_name
state: present
delegate_to: localhost
# Remove a VM by uuid
- vmware_guest:
hostname: 192.168.1.209
username: [email protected]
password: vmware
uuid: 421e4592-c069-924d-ce20-7e7533fab926
state: absent
delegate_to: localhost
'''
RETURN = """
instance:
descripton: metadata about the new virtualmachine
returned: always
type: dict
sample: None
"""
import os
import time
# import module snippets
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
from ansible.module_utils.six import iteritems
from ansible.module_utils.urls import fetch_url
from ansible.module_utils.vmware import get_all_objs, connect_to_api, gather_vm_facts
try:
import json
except ImportError:
import simplejson as json
HAS_PYVMOMI = False
try:
import pyVmomi
from pyVmomi import vim
HAS_PYVMOMI = True
except ImportError:
pass
class PyVmomiDeviceHelper(object):
""" This class is a helper to create easily VMWare Objects for PyVmomiHelper """
def __init__(self, module):
self.module = module
self.next_disk_unit_number = 0
@staticmethod
def create_scsi_controller(scsi_type):
scsi_ctl = vim.vm.device.VirtualDeviceSpec()
scsi_ctl.operation = vim.vm.device.VirtualDeviceSpec.Operation.add
if scsi_type == 'lsilogic':
scsi_ctl.device = vim.vm.device.VirtualLsiLogicController()
elif scsi_type == 'paravirtual':
scsi_ctl.device = vim.vm.device.ParaVirtualSCSIController()
elif scsi_type == 'buslogic':
scsi_ctl.device = vim.vm.device.VirtualBusLogicController()
elif scsi_type == 'lsilogicsas':
scsi_ctl.device = vim.vm.device.VirtualLsiLogicSASController()
scsi_ctl.device.deviceInfo = vim.Description()
scsi_ctl.device.slotInfo = vim.vm.device.VirtualDevice.PciBusSlotInfo()
scsi_ctl.device.slotInfo.pciSlotNumber = 16
scsi_ctl.device.controllerKey = 100
scsi_ctl.device.unitNumber = 3
scsi_ctl.device.busNumber = 0
scsi_ctl.device.hotAddRemove = True
scsi_ctl.device.sharedBus = 'noSharing'
scsi_ctl.device.scsiCtlrUnitNumber = 7
return scsi_ctl
@staticmethod
def is_scsi_controller(device):
return isinstance(device, vim.vm.device.VirtualLsiLogicController) or \
isinstance(device, vim.vm.device.ParaVirtualSCSIController) or \
isinstance(device, vim.vm.device.VirtualBusLogicController) or \
isinstance(device, vim.vm.device.VirtualLsiLogicSASController)
def create_scsi_disk(self, scsi_ctl, disk_index=None):
diskspec = vim.vm.device.VirtualDeviceSpec()
diskspec.operation = vim.vm.device.VirtualDeviceSpec.Operation.add
diskspec.fileOperation = vim.vm.device.VirtualDeviceSpec.FileOperation.create
diskspec.device = vim.vm.device.VirtualDisk()
diskspec.device.backing = vim.vm.device.VirtualDisk.FlatVer2BackingInfo()
diskspec.device.backing.diskMode = 'persistent'
diskspec.device.controllerKey = scsi_ctl.device.key
assert self.next_disk_unit_number != 7
assert disk_index != 7
"""
Configure disk unit number.
"""
if disk_index is not None:
diskspec.device.unitNumber = disk_index
self.next_disk_unit_number = disk_index + 1
else:
diskspec.device.unitNumber = self.next_disk_unit_number
self.next_disk_unit_number += 1
# unit number 7 is reserved to SCSI controller, increase next index
if self.next_disk_unit_number == 7:
self.next_disk_unit_number += 1
return diskspec
def create_nic(self, device_type, device_label, device_infos):
nic = vim.vm.device.VirtualDeviceSpec()
if device_type == 'pcnet32':
nic.device = vim.vm.device.VirtualPCNet32()
elif device_type == 'vmxnet2':
nic.device = vim.vm.device.VirtualVmxnet2()
elif device_type == 'vmxnet3':
nic.device = vim.vm.device.VirtualVmxnet3()
elif device_type == 'e1000':
nic.device = vim.vm.device.VirtualE1000()
elif device_type == 'e1000e':
nic.device = vim.vm.device.VirtualE1000e()
elif device_type == 'sriov':
nic.device = vim.vm.device.VirtualSriovEthernetCard()
else:
self.module.fail_json(msg="Invalid device_type '%s' for network %s" %
(device_type, device_infos['name']))
nic.device.wakeOnLanEnabled = True
nic.device.addressType = 'assigned'
nic.device.deviceInfo = vim.Description()
nic.device.deviceInfo.label = device_label
nic.device.deviceInfo.summary = device_infos['name']
nic.device.connectable = vim.vm.device.VirtualDevice.ConnectInfo()
nic.device.connectable.startConnected = True
nic.device.connectable.allowGuestControl = True
nic.device.connectable.connected = True
if 'mac' in device_infos:
nic.device.macAddress = device_infos['mac']
return nic
class PyVmomiCache(object):
""" This class caches references to objects which are requested multiples times but not modified """
def __init__(self, content):
self.content = content
self.networks = {}
self.clusters = {}
self.esx_hosts = {}
def get_network(self, network):
if network not in self.networks:
self.networks[network] = get_obj(self.content, [vim.Network], network)
return self.networks[network]
def get_cluster(self, cluster):
if cluster not in self.clusters:
self.clusters[cluster] = get_obj(self.content, [vim.ClusterComputeResource], cluster)
return self.clusters[cluster]
def get_esx_host(self, host):
if host not in self.esx_hosts:
self.esx_hosts[host] = get_obj(self.content, [vim.HostSystem], host)
return self.esx_hosts[host]
class PyVmomiHelper(object):
def __init__(self, module):
if not HAS_PYVMOMI:
module.fail_json(msg='pyvmomi module required')
self.module = module
self.device_helper = PyVmomiDeviceHelper(self.module)
self.params = module.params
self.si = None
self.content = connect_to_api(self.module)
self.configspec = None
self.change_detected = False
self.customspec = None
self.current_vm_obj = None
self.cache = PyVmomiCache(self.content)
def should_deploy_from_template(self):
return self.params.get('template') is not None
def getvm(self, name=None, uuid=None, folder=None):
# https://www.vmware.com/support/developer/vc-sdk/visdk2xpubs/ReferenceGuide/vim.SearchIndex.html
# self.si.content.searchIndex.FindByInventoryPath('DC1/vm/test_folder')
vm = None
searchpath = None
if uuid:
vm = self.content.searchIndex.FindByUuid(uuid=uuid, vmSearch=True)
elif folder:
# Build the absolute folder path to pass into the search method
if not self.params['folder'].startswith('/'):
self.module.fail_json(msg="Folder %(folder)s needs to be an absolute path, starting with '/'." % self.params)
searchpath = '%(datacenter)s%(folder)s' % self.params
# get all objects for this path ...
f_obj = self.content.searchIndex.FindByInventoryPath(searchpath)
if f_obj:
if isinstance(f_obj, vim.Datacenter):
f_obj = f_obj.vmFolder
for c_obj in f_obj.childEntity:
if not isinstance(c_obj, vim.VirtualMachine):
continue
if c_obj.name == name:
vm = c_obj
if self.params['name_match'] == 'first':
break
if vm:
self.current_vm_obj = vm
return vm
def set_powerstate(self, vm, state, force):
"""
Set the power status for a VM determined by the current and
requested states. force is forceful
"""
facts = self.gather_facts(vm)
expected_state = state.replace('_', '').lower()
current_state = facts['hw_power_status'].lower()
result = dict(
changed=False,
failed=False,
)
# Need Force
if not force and current_state not in ['poweredon', 'poweredoff']:
result['failed'] = True
result['msg'] = "VM is in %s power state. Force is required!" % current_state
return result
# State is not already true
if current_state != expected_state:
task = None
try:
if expected_state == 'poweredoff':
task = vm.PowerOff()
elif expected_state == 'poweredon':
task = vm.PowerOn()
elif expected_state == 'restarted':
if current_state in ('poweredon', 'poweringon', 'resetting', 'poweredoff'):
task = vm.Reset()
else:
result['failed'] = True
result['msg'] = "Cannot restart VM in the current state %s" % current_state
elif expected_state == 'suspended':
if current_state in ('poweredon', 'poweringon'):
task = vm.Suspend()
else:
result['failed'] = True
result['msg'] = 'Cannot suspend VM in the current state %s' % current_state
elif expected_state in ['shutdownguest', 'rebootguest']:
if current_state == 'poweredon' and vm.guest.toolsRunningStatus == 'guestToolsRunning':
if expected_state == 'shutdownguest':
task = vm.ShutdownGuest()
else:
task = vm.RebootGuest()
else:
result['failed'] = True
result['msg'] = "VM %s must be in poweredon state & tools should be installed for guest shutdown/reboot" % vm.name
except Exception:
e = get_exception()
result['failed'] = True
result['msg'] = str(e)
if task:
self.wait_for_task(task)
if task.info.state == 'error':
result['failed'] = True
result['msg'] = str(task.info.error.msg)
else:
result['changed'] = True
# need to get new metadata if changed
if result['changed']:
newvm = self.getvm(uuid=vm.config.uuid)
facts = self.gather_facts(newvm)
result['instance'] = facts
return result
def gather_facts(self, vm):
return gather_vm_facts(self.content, vm)
def remove_vm(self, vm):
# https://www.vmware.com/support/developer/converter-sdk/conv60_apireference/vim.ManagedEntity.html#destroy
task = vm.Destroy()
self.wait_for_task(task)
if task.info.state == 'error':
return {'changed': False, 'failed': True, 'msg': task.info.error.msg}
else:
return {'changed': True, 'failed': False}
def configure_guestid(self, vm_obj, vm_creation=False):
# guest_id is not required when using templates
if self.should_deploy_from_template() and self.params.get('guest_id') is None:
return
# guest_id is only mandatory on VM creation
if vm_creation and self.params['guest_id'] is None:
self.module.fail_json(msg="guest_id attribute is mandatory for VM creation")
if vm_obj is None or self.params['guest_id'] != vm_obj.summary.config.guestId:
self.change_detected = True
self.configspec.guestId = self.params['guest_id']
def configure_cpu_and_memory(self, vm_obj, vm_creation=False):
# set cpu/memory/etc
if 'hardware' in self.params:
if 'num_cpus' in self.params['hardware']:
self.configspec.numCPUs = int(self.params['hardware']['num_cpus'])
if vm_obj is None or self.configspec.numCPUs != vm_obj.config.hardware.numCPU:
self.change_detected = True
# num_cpu is mandatory for VM creation
elif vm_creation and not self.should_deploy_from_template():
self.module.fail_json(msg="hardware.num_cpus attribute is mandatory for VM creation")
if 'memory_mb' in self.params['hardware']:
self.configspec.memoryMB = int(self.params['hardware']['memory_mb'])
if vm_obj is None or self.configspec.memoryMB != vm_obj.config.hardware.memoryMB:
self.change_detected = True
# memory_mb is mandatory for VM creation
elif vm_creation and not self.should_deploy_from_template():
self.module.fail_json(msg="hardware.memory_mb attribute is mandatory for VM creation")
def get_vm_network_interfaces(self, vm=None):
if vm is None:
return []
device_list = []
for device in vm.config.hardware.device:
if isinstance(device, vim.vm.device.VirtualPCNet32) or \
isinstance(device, vim.vm.device.VirtualVmxnet2) or \
isinstance(device, vim.vm.device.VirtualVmxnet3) or \
isinstance(device, vim.vm.device.VirtualE1000) or \
isinstance(device, vim.vm.device.VirtualE1000e) or \
isinstance(device, vim.vm.device.VirtualSriovEthernetCard):
device_list.append(device)
return device_list
def configure_network(self, vm_obj):
# Ignore empty networks, this permits to keep networks when deploying a template/cloning a VM
if len(self.params['networks']) == 0:
return
network_devices = list()
for network in self.params['networks']:
if 'ip' in network or 'netmask' in network:
if 'ip' not in network or not 'netmask' in network:
self.module.fail_json(msg="Both 'ip' and 'netmask' are required together.")
if 'name' in network:
if get_obj(self.content, [vim.Network], network['name']) is None:
self.module.fail_json(msg="Network '%(name)s' does not exists" % network)
elif 'vlan' in network:
dvps = get_all_objs(self.content, [vim.dvs.DistributedVirtualPortgroup])
for dvp in dvps:
if hasattr(dvp.config.defaultPortConfig, 'vlan') and dvp.config.defaultPortConfig.vlan.vlanId == network['vlan']:
network['name'] = dvp.config.name
break
if dvp.config.name == network['vlan']:
network['name'] = dvp.config.name
break
else:
self.module.fail_json(msg="VLAN '%(vlan)s' does not exist" % network)
else:
self.module.fail_json(msg="You need to define a network name or a vlan")
network_devices.append(network)
# List current device for Clone or Idempotency
current_net_devices = self.get_vm_network_interfaces(vm=vm_obj)
if len(network_devices) < len(current_net_devices):
self.module.fail_json(msg="given network device list is lesser than current VM device list (%d < %d). "
"Removing interfaces is not allowed"
% (len(network_devices), len(current_net_devices)))
for key in range(0, len(network_devices)):
# Default device type is vmxnet3, VMWare best practice
device_type = network_devices[key].get('device_type', 'vmxnet3')
nic = self.device_helper.create_nic(device_type,
'Network Adapter %s' % (key + 1),
network_devices[key])
nic_change_detected = False
if key < len(current_net_devices) and (vm_obj or self.should_deploy_from_template()):
nic.operation = vim.vm.device.VirtualDeviceSpec.Operation.edit
# Changing mac address has no effect when editing interface
if 'mac' in network_devices[key] and nic.device.macAddress != current_net_devices[key].macAddress:
self.module.fail_json(msg="Changing MAC address has not effect when interface is already present. "
"The failing new MAC address is %s" % nic.device.macAddress)
nic.device = current_net_devices[key]
nic.device.deviceInfo = vim.Description()
else:
nic.operation = vim.vm.device.VirtualDeviceSpec.Operation.add
nic_change_detected = True
if hasattr(self.cache.get_network(network_devices[key]['name']), 'portKeys'):
# VDS switch
pg_obj = get_obj(self.content, [vim.dvs.DistributedVirtualPortgroup], network_devices[key]['name'])
if (nic.device.backing and
(nic.device.backing.port.portgroupKey != pg_obj.key or
nic.device.backing.port.switchUuid != pg_obj.config.distributedVirtualSwitch.uuid)):
nic_change_detected = True
dvs_port_connection = vim.dvs.PortConnection()
dvs_port_connection.portgroupKey = pg_obj.key
dvs_port_connection.switchUuid = pg_obj.config.distributedVirtualSwitch.uuid
nic.device.backing = vim.vm.device.VirtualEthernetCard.DistributedVirtualPortBackingInfo()
nic.device.backing.port = dvs_port_connection
nic_change_detected = True
else:
# vSwitch
if not isinstance(nic.device.backing, vim.vm.device.VirtualEthernetCard.NetworkBackingInfo):
nic.device.backing = vim.vm.device.VirtualEthernetCard.NetworkBackingInfo()
nic_change_detected = True
net_obj = self.cache.get_network(network_devices[key]['name'])
if nic.device.backing.network != net_obj:
nic.device.backing.network = net_obj
nic_change_detected = True
if nic.device.backing.deviceName != network_devices[key]['name']:
nic.device.backing.deviceName = network_devices[key]['name']
nic_change_detected = True
if nic_change_detected:
self.configspec.deviceChange.append(nic)
self.change_detected = True
def customize_customvalues(self, vm_obj):
if len(self.params['customvalues']) == 0:
return
facts = self.gather_facts(vm_obj)
for kv in self.params['customvalues']:
if 'key' not in kv or 'value' not in kv:
self.module.exit_json(msg="customvalues items required both 'key' and 'value fields.")
# If kv is not kv fetched from facts, change it
if kv['key'] not in facts['customvalues'] or facts['customvalues'][kv['key']] != kv['value']:
try:
vm_obj.setCustomValue(key=kv['key'], value=kv['value'])
self.change_detected = True
except Exception:
e = get_exception()
self.module.fail_json(msg="Failed to set custom value for key='%s' and value='%s'. Error was: %s"
% (kv['key'], kv['value'], e))
def customize_vm(self, vm_obj):
# Network settings
adaptermaps = []
for network in self.params['networks']:
if 'ip' in network and 'netmask' in network:
guest_map = vim.vm.customization.AdapterMapping()
guest_map.adapter = vim.vm.customization.IPSettings()
guest_map.adapter.ip = vim.vm.customization.FixedIp()
guest_map.adapter.ip.ipAddress = str(network['ip'])
guest_map.adapter.subnetMask = str(network['netmask'])
if 'gateway' in network:
guest_map.adapter.gateway = network['gateway']
# On Windows, DNS domain and DNS servers can be set by network interface
# https://pubs.vmware.com/vi3/sdk/ReferenceGuide/vim.vm.customization.IPSettings.html
if 'domain' in network:
guest_map.adapter.dnsDomain = network['domain']
elif self.params['customization'].get('domain'):
guest_map.adapter.dnsDomain = self.params['customization']['domain']
if 'dns_servers' in network:
guest_map.adapter.dnsServerList = network['dns_servers']
elif self.params['customization'].get('dns_servers'):
guest_map.adapter.dnsServerList = self.params['customization']['dns_servers']
adaptermaps.append(guest_map)
# Global DNS settings
globalip = vim.vm.customization.GlobalIPSettings()
if 'dns_servers' in self.params['customization']:
globalip.dnsServerList = self.params['customization'].get('dns_servers')
# TODO: Maybe list the different domains from the interfaces here by default ?
if 'dns_suffix' in self.params['customization'] or 'domain' in self.params['customization']:
globalip.dnsSuffixList = self.params['customization'].get('dns_suffix', self.params['customization']['domain'])
if self.params['guest_id']:
guest_id = self.params['guest_id']
else:
guest_id = vm_obj.summary.config.guestId
# If I install a Windows use Sysprep
# https://pubs.vmware.com/vi3/sdk/ReferenceGuide/vim.vm.customization.Sysprep.html#field_detail
if 'win' in guest_id:
ident = vim.vm.customization.Sysprep()
ident.userData = vim.vm.customization.UserData()
ident.userData.computerName = vim.vm.customization.FixedName()
ident.userData.computerName.name = str(self.params['customization'].get('hostname', self.params['name']))
ident.userData.fullName = str(self.params['customization'].get('fullname', 'Administrator'))
ident.userData.orgName = str(self.params['customization'].get('orgname', 'ACME'))
ident.guiUnattended = vim.vm.customization.GuiUnattended()
ident.guiUnattended.autoLogon = self.params['customization'].get('autologon', False)
ident.guiUnattended.autoLogonCount = self.params['customization'].get('autologoncount', 1)
ident.guiUnattended.timeZone = self.params['customization'].get('timezone', 85)
ident.identification = vim.vm.customization.Identification()
if self.params['customization'].get('password', '') != '':
ident.guiUnattended.password = vim.vm.customization.Password()
ident.guiUnattended.password.value = str(self.params['customization']['password'])
ident.guiUnattended.password.plainText = True
else:
self.module.fail_json(msg="The 'customization' section requires a 'password' entry, which cannot be empty.")
if 'productid' in self.params['customization']:
ident.userData.orgName = str(self.params['customization']['productid'])
if 'joindomain' in self.params['customization']:
if 'domainadmin' not in self.params['customization'] or 'domainadminpassword' not in self.params['customization']:
self.module.fail_json(msg="'domainadmin' and 'domainadminpassword' entries are mandatory in 'customization' section to use "
"joindomain feature")
ident.identification.domainAdmin = str(self.params['customization'].get('domainadmin'))
ident.identification.joinDomain = str(self.params['customization'].get('joindomain'))
ident.identification.domainAdminPassword = vim.vm.customization.Password()
ident.identification.domainAdminPassword.value = str(self.params['customization'].get('domainadminpassword'))
ident.identification.domainAdminPassword.plainText = True
elif 'joinworkgroup' in self.params['customization']:
ident.identification.joinWorkgroup = str(self.params['customization'].get('joinworkgroup'))
if 'runonce' in self.params['customization']:
ident.guiRunOnce = vim.vm.customization.GuiRunOnce()
ident.guiRunOnce.commandList = self.params['customization']['runonce']
else:
# Else use LinuxPrep
# https://pubs.vmware.com/vi3/sdk/ReferenceGuide/vim.vm.customization.LinuxPrep.html
ident = vim.vm.customization.LinuxPrep()
# TODO: Maybe add domain from interface if missing ?
if 'domain' in self.params['customization']:
ident.domain = str(self.params['customization'].get('domain'))
ident.hostName = vim.vm.customization.FixedName()
ident.hostName.name = str(self.params['customization'].get('hostname', self.params['name']))
self.customspec = vim.vm.customization.Specification()
self.customspec.nicSettingMap = adaptermaps
self.customspec.globalIPSettings = globalip
self.customspec.identity = ident
def get_vm_scsi_controller(self, vm_obj):
# If vm_obj doesn't exists no SCSI controller to find
if vm_obj is None:
return None
for device in vm_obj.config.hardware.device:
if self.device_helper.is_scsi_controller(device):
scsi_ctl = vim.vm.device.VirtualDeviceSpec()
scsi_ctl.device = device
return scsi_ctl
return None
def get_configured_disk_size(self, expected_disk_spec):
# what size is it?
if [x for x in expected_disk_spec.keys() if x.startswith('size_') or x == 'size']:
# size_tb, size_gb, size_mb, size_kb, size_b ...?
if 'size' in expected_disk_spec:
expected = ''.join(c for c in expected_disk_spec['size'] if c.isdigit())
unit = expected_disk_spec['size'].replace(expected, '').lower()
expected = int(expected)
else:
param = [x for x in expected_disk_spec.keys() if x.startswith('size_')][0]
unit = param.split('_')[-1].lower()
expected = [x[1] for x in expected_disk_spec.items() if x[0].startswith('size_')][0]
expected = int(expected)
if unit == 'tb':
return expected * 1024 * 1024 * 1024
elif unit == 'gb':
return expected * 1024 * 1024
elif unit == ' mb':
return expected * 1024
elif unit == 'kb':
return expected
self.module.fail_json(
msg='%s is not a supported unit for disk size. Supported units are kb, mb, gb or tb' % unit)
# No size found but disk, fail
self.module.fail_json(
msg="No size, size_kb, size_mb, size_gb or size_tb attribute found into disk configuration")
def configure_disks(self, vm_obj):
# Ignore empty disk list, this permits to keep disks when deploying a template/cloning a VM
if len(self.params['disk']) == 0:
return
scsi_ctl = self.get_vm_scsi_controller(vm_obj)
# Create scsi controller only if we are deploying a new VM, not a template or reconfiguring
if vm_obj is None or scsi_ctl is None:
scsi_ctl = self.device_helper.create_scsi_controller(self.get_scsi_type())
self.change_detected = True
self.configspec.deviceChange.append(scsi_ctl)
disks = [x for x in vm_obj.config.hardware.device if isinstance(x, vim.vm.device.VirtualDisk)] \
if vm_obj is not None else None
if disks is not None and self.params.get('disk') and len(self.params.get('disk')) < len(disks):
self.module.fail_json(msg="Provided disks configuration has less disks than "
"the target object (%d vs %d)" % (len(self.params.get('disk')), len(disks)))
disk_index = 0
for expected_disk_spec in self.params.get('disk'):
disk_modified = False
# If we are manipulating and existing objects which has disks and disk_index is in disks
if vm_obj is not None and disks is not None and disk_index < len(disks):
diskspec = vim.vm.device.VirtualDeviceSpec()
# set the operation to edit so that it knows to keep other settings
diskspec.operation = vim.vm.device.VirtualDeviceSpec.Operation.edit
diskspec.device = disks[disk_index]
else:
diskspec = self.device_helper.create_scsi_disk(scsi_ctl, disk_index)
disk_modified = True
# is it thin?
if 'type' in expected_disk_spec:
if expected_disk_spec.get('type', '').lower() == 'thin':
diskspec.device.backing.thinProvisioned = True
# which datastore?
if expected_disk_spec.get('datastore'):
# TODO: This is already handled by the relocation spec,
# but it needs to eventually be handled for all the
# other disks defined
pass
# increment index for next disk search
disk_index += 1
# index 7 is reserved to SCSI controller
if disk_index == 7:
disk_index += 1
kb = self.get_configured_disk_size(expected_disk_spec)
# VMWare doesn't allow to reduce disk sizes
if kb < diskspec.device.capacityInKB:
self.module.fail_json(
msg="Given disk size is lesser than found (%d < %d). Reducing disks is not allowed." %
(kb, diskspec.device.capacityInKB))
if kb != diskspec.device.capacityInKB or disk_modified:
diskspec.device.capacityInKB = kb
self.configspec.deviceChange.append(diskspec)
self.change_detected = True
def select_host(self):
# if the user wants a cluster, get the list of hosts for the cluster and use the first one
if self.params['cluster']:
cluster = self.cache.get_cluster(self.params['cluster'])
if not cluster:
self.module.fail_json(msg="Failed to find a cluster named %(cluster)s" % self.params)
hostsystems = [x for x in cluster.host]
# TODO: add a policy to select host
hostsystem = hostsystems[0]
else:
hostsystem = self.cache.get_esx_host(self.params['esxi_hostname'])
if not hostsystem:
self.module.fail_json(msg="Failed to find a host named %(esxi_hostname)s" % self.params)
return hostsystem
def select_datastore(self, vm_obj=None):
datastore = None
datastore_name = None
if len(self.params['disk']) != 0:
# TODO: really use the datastore for newly created disks
if 'autoselect_datastore' in self.params['disk'][0] and self.params['disk'][0]['autoselect_datastore']:
datastores = get_all_objs(self.content, [vim.Datastore])
if datastores is None or len(datastores) == 0:
self.module.fail_json(msg="Unable to find a datastore list when autoselecting")
datastore_freespace = 0
for ds in datastores:
if ds.summary.freeSpace > datastore_freespace:
# If datastore field is provided, filter destination datastores
if 'datastore' in self.params['disk'][0] and \
isinstance(self.params['disk'][0]['datastore'], str) and \
ds.name.find(self.params['disk'][0]['datastore']) < 0:
continue
datastore = ds
datastore_name = datastore.name
datastore_freespace = ds.summary.freeSpace
elif 'datastore' in self.params['disk'][0]:
datastore_name = self.params['disk'][0]['datastore']
datastore = get_obj(self.content, [vim.Datastore], datastore_name)
else:
self.module.fail_json(msg="Either datastore or autoselect_datastore "
"should be provided to select datastore")
if not datastore and self.should_deploy_from_template():
# use the template's existing DS
disks = [x for x in vm_obj.config.hardware.device if isinstance(x, vim.vm.device.VirtualDisk)]
datastore = disks[0].backing.datastore
datastore_name = datastore.name
if not datastore:
self.module.fail_json(msg="Failed to find a matching datastore")
return datastore, datastore_name
def obj_has_parent(self, obj, parent):
assert obj is not None and parent is not None
current_parent = obj
while True:
if current_parent.name == parent.name:
return True
current_parent = current_parent.parent
if current_parent is None:
return False
def select_resource_pool(self, host):
resource_pools = get_all_objs(self.content, [vim.ResourcePool])
for rp in resource_pools.items():
if not rp[0]:
continue
if not hasattr(rp[0], 'parent'):
continue
# Find resource pool on host
if self.obj_has_parent(rp[0].parent, host.parent):
# If no resource_pool selected or it's the selected pool, return it
if self.module.params['resource_pool'] is None or rp[0].name == self.module.params['resource_pool']:
return rp[0]
if self.module.params['resource_pool'] is not None:
self.module.fail_json(msg="Could not find resource_pool %s for selected host %s"
% (self.module.params['resource_pool'], host.name))
else:
self.module.fail_json(msg="Failed to find a resource group for %s" % host.name)
def get_scsi_type(self):
disk_controller_type = "paravirtual"
# set cpu/memory/etc
if 'hardware' in self.params:
if 'scsi' in self.params['hardware']:
if self.params['hardware']['scsi'] in ['buslogic', 'paravirtual', 'lsilogic', 'lsilogicsas']:
disk_controller_type = self.params['hardware']['scsi']
else:
self.module.fail_json(msg="hardware.scsi attribute should be 'paravirtual' or 'lsilogic'")
return disk_controller_type
def deploy_vm(self):
# https://github.com/vmware/pyvmomi-community-samples/blob/master/samples/clone_vm.py
# https://www.vmware.com/support/developer/vc-sdk/visdk25pubs/ReferenceGuide/vim.vm.CloneSpec.html
# https://www.vmware.com/support/developer/vc-sdk/visdk25pubs/ReferenceGuide/vim.vm.ConfigSpec.html
# https://www.vmware.com/support/developer/vc-sdk/visdk41pubs/ApiReference/vim.vm.RelocateSpec.html
# FIXME:
# - multiple datacenters
# - multiple templates by the same name
# - static IPs
#datacenters = get_all_objs(self.content, [vim.Datacenter])
datacenter = get_obj(self.content, [vim.Datacenter], self.params['datacenter'])
if not datacenter:
self.module.fail_json(msg='No datacenter named %(datacenter)s was found' % self.params)
destfolder = None
if not self.params['folder'].startswith('/'):
self.module.fail_json(msg="Folder %(folder)s needs to be an absolute path, starting with '/'." % self.params)
f_obj = self.content.searchIndex.FindByInventoryPath('/%(datacenter)s%(folder)s' % self.params)
if f_obj is None:
self.module.fail_json(msg='No folder matched the path: %(folder)s' % self.params)
destfolder = f_obj
hostsystem = self.select_host()
if self.should_deploy_from_template():
# FIXME: need to search for this in the same way as guests to ensure accuracy
vm_obj = get_obj(self.content, [vim.VirtualMachine], self.params['template'])
if not vm_obj:
self.module.fail_json(msg="Could not find a template named %(template)s" % self.params)
else:
vm_obj = None
# set the destination datastore for VM & disks
(datastore, datastore_name) = self.select_datastore(vm_obj)
resource_pool = self.select_resource_pool(hostsystem)
self.configspec = vim.vm.ConfigSpec(cpuHotAddEnabled=True, memoryHotAddEnabled=True)
self.configspec.deviceChange = []
self.configure_guestid(vm_obj=vm_obj, vm_creation=True)
self.configure_cpu_and_memory(vm_obj=vm_obj, vm_creation=True)
self.configure_disks(vm_obj=vm_obj)
self.configure_network(vm_obj=vm_obj)
if len(self.params['customization']) > 0 or len(self.params['networks']) > 0:
self.customize_vm(vm_obj=vm_obj)
try:
if self.should_deploy_from_template():
# create the relocation spec
relospec = vim.vm.RelocateSpec()
relospec.host = hostsystem
relospec.datastore = datastore
relospec.pool = resource_pool
clonespec = vim.vm.CloneSpec(template=self.params['is_template'], location=relospec)
if self.customspec:
clonespec.customization = self.customspec
clonespec.config = self.configspec
task = vm_obj.Clone(folder=destfolder, name=self.params['name'], spec=clonespec)
self.change_detected = True
else:
# ConfigSpec require name for VM creation
self.configspec.name = self.params['name']
self.configspec.files = vim.vm.FileInfo(logDirectory=None,
snapshotDirectory=None,
suspendDirectory=None,
vmPathName="[" + datastore_name + "] " + self.params["name"])
task = destfolder.CreateVM_Task(config=self.configspec, pool=resource_pool)
self.change_detected = True
self.wait_for_task(task)
except TypeError:
self.module.fail_json(msg="TypeError was returned, please ensure to give correct inputs.")
if task.info.state == 'error':
# https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2021361
# https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2173
return {'changed': self.change_detected, 'failed': True, 'msg': task.info.error.msg}
else:
# set annotation
vm = task.info.result
if self.params['annotation']:
annotation_spec = vim.vm.ConfigSpec()
annotation_spec.annotation = str(self.params['annotation'])
task = vm.ReconfigVM_Task(annotation_spec)
self.wait_for_task(task)
self.customize_customvalues(vm_obj=vm)
if self.params['wait_for_ip_address'] or self.params['state'] in ['poweredon', 'restarted']:
self.set_powerstate(vm, 'poweredon', force=False)
if self.params['wait_for_ip_address']:
self.wait_for_vm_ip(vm)
vm_facts = self.gather_facts(vm)
return {'changed': self.change_detected, 'failed': False, 'instance': vm_facts}
def reconfigure_vm(self):
self.configspec = vim.vm.ConfigSpec()
self.configspec.deviceChange = []
self.configure_guestid(vm_obj=self.current_vm_obj)
self.configure_cpu_and_memory(vm_obj=self.current_vm_obj)
self.configure_disks(vm_obj=self.current_vm_obj)
self.configure_network(vm_obj=self.current_vm_obj)
self.customize_customvalues(vm_obj=self.current_vm_obj)
if self.params['annotation'] and self.current_vm_obj.config.annotation != self.params['annotation']:
self.configspec.annotation = str(self.params['annotation'])
self.change_detected = True
relospec = vim.vm.RelocateSpec()
hostsystem = self.select_host()
relospec.pool = self.select_resource_pool(hostsystem)
change_applied = False
if relospec.pool != self.current_vm_obj.resourcePool:
task = self.current_vm_obj.RelocateVM_Task(spec=relospec)
self.wait_for_task(task)
change_applied = True
# Only send VMWare task if we see a modification
if self.change_detected:
task = self.current_vm_obj.ReconfigVM_Task(spec=self.configspec)
self.wait_for_task(task)
change_applied = True
if task.info.state == 'error':
# https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2021361
# https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2173
return {'changed': change_applied, 'failed': True, 'msg': task.info.error.msg}
# Rename VM
if self.params['uuid'] and self.params['name'] and self.params['name'] != self.current_vm_obj.config.name:
task = self.current_vm_obj.Rename_Task(self.params['name'])
self.wait_for_task(task)
change_applied = True
if task.info.state == 'error':
return {'changed': change_applied, 'failed': True, 'msg': task.info.error.msg}
# Mark VM as Template
if self.params['is_template']:
self.current_vm_obj.MarkAsTemplate()
change_applied = True
vm_facts = self.gather_facts(self.current_vm_obj)
return {'changed': change_applied, 'failed': False, 'instance': vm_facts}
@staticmethod
def wait_for_task(task):
# https://www.vmware.com/support/developer/vc-sdk/visdk25pubs/ReferenceGuide/vim.Task.html
# https://www.vmware.com/support/developer/vc-sdk/visdk25pubs/ReferenceGuide/vim.TaskInfo.html
# https://github.com/virtdevninja/pyvmomi-community-samples/blob/master/samples/tools/tasks.py
while task.info.state not in ['success', 'error']:
time.sleep(1)
def wait_for_vm_ip(self, vm, poll=100, sleep=5):
ips = None
facts = {}
thispoll = 0
while not ips and thispoll <= poll:
newvm = self.getvm(uuid=vm.config.uuid)
facts = self.gather_facts(newvm)
if facts['ipv4'] or facts['ipv6']:
ips = True
else:
time.sleep(sleep)
thispoll += 1
return facts
def get_obj(content, vimtype, name):
"""
Return an object by name, if name is None the
first found object is returned
"""
obj = None
container = content.viewManager.CreateContainerView(
content.rootFolder, vimtype, True)
for c in container.view:
if name:
if c.name == name:
obj = c
break
else:
obj = c
break
container.Destroy()
return obj
def main():
module = AnsibleModule(
argument_spec=dict(
hostname=dict(
type='str',
default=os.environ.get('VMWARE_HOST')
),
username=dict(
type='str',
default=os.environ.get('VMWARE_USER')
),
password=dict(
type='str', no_log=True,
default=os.environ.get('VMWARE_PASSWORD')
),
state=dict(
required=False,
choices=[
'poweredon',
'poweredoff',
'present',
'absent',
'restarted',
'suspended',
'shutdownguest',
'rebootguest'
],
default='present'),
validate_certs=dict(type='bool', default=True),
template_src=dict(type='str', aliases=['template']),
is_template=dict(type='bool', default=False),
annotation=dict(type='str', aliases=['notes']),
customvalues=dict(type='list', default=[]),
name=dict(required=True, type='str'),
name_match=dict(type='str', default='first'),
uuid=dict(type='str'),
folder=dict(type='str', default='/vm'),
guest_id=dict(type='str'),
disk=dict(type='list', default=[]),
hardware=dict(type='dict', default={}),
force=dict(type='bool', default=False),
datacenter=dict(type='str', default='ha-datacenter'),
esxi_hostname=dict(type='str'),
cluster=dict(type='str'),
wait_for_ip_address=dict(type='bool', default=False),
networks=dict(type='list', default=[]),
resource_pool=dict(type='str'),
customization=dict(type='dict', no_log=True, default={}),
),
supports_check_mode=True,
mutually_exclusive=[
['esxi_hostname', 'cluster'],
],
required_together=[
['state', 'force'],
['template'],
],
)
result = {'failed': False, 'changed': False}
# Prepend /vm if it was missing from the folder path, also strip trailing slashes
if not module.params['folder'].startswith('/vm') and module.params['folder'].startswith('/'):
module.params['folder'] = '/vm%(folder)s' % module.params
module.params['folder'] = module.params['folder'].rstrip('/')
pyv = PyVmomiHelper(module)
# Check if the VM exists before continuing
vm = pyv.getvm(name=module.params['name'],
folder=module.params['folder'],
uuid=module.params['uuid'])
# VM already exists
if vm:
if module.params['state'] == 'absent':
# destroy it
if module.params['force']:
# has to be poweredoff first
pyv.set_powerstate(vm, 'poweredoff', module.params['force'])
result = pyv.remove_vm(vm)
elif module.params['state'] == 'present':
result = pyv.reconfigure_vm()
elif module.params['state'] in ['poweredon', 'poweredoff', 'restarted', 'suspended', 'shutdownguest', 'rebootguest']:
# set powerstate
tmp_result = pyv.set_powerstate(vm, module.params['state'], module.params['force'])
if tmp_result['changed']:
result["changed"] = True
if not tmp_result["failed"]:
result["failed"] = False
else:
# This should not happen
assert False
# VM doesn't exist
else:
if module.params['state'] in ['poweredon', 'poweredoff', 'present', 'restarted', 'suspended']:
# Create it ...
result = pyv.deploy_vm()
if 'failed' not in result:
result['failed'] = False
if result['failed']:
module.fail_json(**result)
else:
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
jasonbot/django | tests/template_tests/filter_tests/test_add.py | 503 | 1688 | from datetime import date, timedelta
from django.template.defaultfilters import add
from django.test import SimpleTestCase
from ..utils import setup
class AddTests(SimpleTestCase):
"""
Tests for #11687 and #16676
"""
@setup({'add01': '{{ i|add:"5" }}'})
def test_add01(self):
output = self.engine.render_to_string('add01', {'i': 2000})
self.assertEqual(output, '2005')
@setup({'add02': '{{ i|add:"napis" }}'})
def test_add02(self):
output = self.engine.render_to_string('add02', {'i': 2000})
self.assertEqual(output, '')
@setup({'add03': '{{ i|add:16 }}'})
def test_add03(self):
output = self.engine.render_to_string('add03', {'i': 'not_an_int'})
self.assertEqual(output, '')
@setup({'add04': '{{ i|add:"16" }}'})
def test_add04(self):
output = self.engine.render_to_string('add04', {'i': 'not_an_int'})
self.assertEqual(output, 'not_an_int16')
@setup({'add05': '{{ l1|add:l2 }}'})
def test_add05(self):
output = self.engine.render_to_string('add05', {'l1': [1, 2], 'l2': [3, 4]})
self.assertEqual(output, '[1, 2, 3, 4]')
@setup({'add06': '{{ t1|add:t2 }}'})
def test_add06(self):
output = self.engine.render_to_string('add06', {'t1': (3, 4), 't2': (1, 2)})
self.assertEqual(output, '(3, 4, 1, 2)')
@setup({'add07': '{{ d|add:t }}'})
def test_add07(self):
output = self.engine.render_to_string('add07', {'d': date(2000, 1, 1), 't': timedelta(10)})
self.assertEqual(output, 'Jan. 11, 2000')
class FunctionTests(SimpleTestCase):
def test_add(self):
self.assertEqual(add('1', '2'), 3)
| bsd-3-clause |
redhat-openstack/manila | manila_tempest_tests/tests/api/admin/test_quotas.py | 2 | 13084 | # Copyright 2014 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import config # noqa
from tempest import test # noqa
from manila_tempest_tests import clients_share as clients
from manila_tempest_tests.tests.api import base
CONF = config.CONF
class SharesAdminQuotasTest(base.BaseSharesAdminTest):
@classmethod
def resource_setup(cls):
cls.os = clients.AdminManager()
super(SharesAdminQuotasTest, cls).resource_setup()
cls.user_id = cls.shares_client.user_id
cls.tenant_id = cls.shares_client.tenant_id
@test.attr(type=["gate", "smoke", ])
def test_default_quotas(self):
quotas = self.shares_client.default_quotas(self.tenant_id)
self.assertGreater(int(quotas["gigabytes"]), -2)
self.assertGreater(int(quotas["snapshot_gigabytes"]), -2)
self.assertGreater(int(quotas["shares"]), -2)
self.assertGreater(int(quotas["snapshots"]), -2)
self.assertGreater(int(quotas["share_networks"]), -2)
@test.attr(type=["gate", "smoke", ])
def test_show_quotas(self):
quotas = self.shares_client.show_quotas(self.tenant_id)
self.assertGreater(int(quotas["gigabytes"]), -2)
self.assertGreater(int(quotas["snapshot_gigabytes"]), -2)
self.assertGreater(int(quotas["shares"]), -2)
self.assertGreater(int(quotas["snapshots"]), -2)
self.assertGreater(int(quotas["share_networks"]), -2)
@test.attr(type=["gate", "smoke", ])
def test_show_quotas_for_user(self):
quotas = self.shares_client.show_quotas(self.tenant_id, self.user_id)
self.assertGreater(int(quotas["gigabytes"]), -2)
self.assertGreater(int(quotas["snapshot_gigabytes"]), -2)
self.assertGreater(int(quotas["shares"]), -2)
self.assertGreater(int(quotas["snapshots"]), -2)
self.assertGreater(int(quotas["share_networks"]), -2)
class SharesAdminQuotasUpdateTest(base.BaseSharesAdminTest):
force_tenant_isolation = True
@test.attr(type=["gate", "smoke", ])
def test_update_tenant_quota_shares(self):
client = self.get_client_with_isolated_creds()
# get current quotas
quotas = client.show_quotas(client.tenant_id)
new_quota = int(quotas["shares"]) + 2
# set new quota for shares
updated = client.update_quotas(client.tenant_id, shares=new_quota)
self.assertEqual(int(updated["shares"]), new_quota)
@test.attr(type=["gate", "smoke", ])
def test_update_user_quota_shares(self):
client = self.get_client_with_isolated_creds()
# get current quotas
quotas = client.show_quotas(client.tenant_id, client.user_id)
new_quota = int(quotas["shares"]) - 1
# set new quota for shares
updated = client.update_quotas(
client.tenant_id, client.user_id, shares=new_quota)
self.assertEqual(int(updated["shares"]), new_quota)
@test.attr(type=["gate", "smoke", ])
def test_update_tenant_quota_snapshots(self):
client = self.get_client_with_isolated_creds()
# get current quotas
quotas = client.show_quotas(client.tenant_id)
new_quota = int(quotas["snapshots"]) + 2
# set new quota for snapshots
updated = client.update_quotas(client.tenant_id, snapshots=new_quota)
self.assertEqual(int(updated["snapshots"]), new_quota)
@test.attr(type=["gate", "smoke", ])
def test_update_user_quota_snapshots(self):
client = self.get_client_with_isolated_creds()
# get current quotas
quotas = client.show_quotas(client.tenant_id, client.user_id)
new_quota = int(quotas["snapshots"]) - 1
# set new quota for snapshots
updated = client.update_quotas(
client.tenant_id, client.user_id, snapshots=new_quota)
self.assertEqual(int(updated["snapshots"]), new_quota)
@test.attr(type=["gate", "smoke", ])
def test_update_tenant_quota_gigabytes(self):
client = self.get_client_with_isolated_creds()
# get current quotas
custom = client.show_quotas(client.tenant_id)
# make quotas for update
gigabytes = int(custom["gigabytes"]) + 2
# set new quota for shares
updated = client.update_quotas(
client.tenant_id, gigabytes=gigabytes)
self.assertEqual(int(updated["gigabytes"]), gigabytes)
@test.attr(type=["gate", "smoke", ])
def test_update_tenant_quota_snapshot_gigabytes(self):
client = self.get_client_with_isolated_creds()
# get current quotas
custom = client.show_quotas(client.tenant_id)
# make quotas for update
snapshot_gigabytes = int(custom["snapshot_gigabytes"]) + 2
# set new quota for shares
updated = client.update_quotas(
client.tenant_id,
snapshot_gigabytes=snapshot_gigabytes)
self.assertEqual(
int(updated["snapshot_gigabytes"]), snapshot_gigabytes)
@test.attr(type=["gate", "smoke", ])
def test_update_user_quota_gigabytes(self):
client = self.get_client_with_isolated_creds()
# get current quotas
custom = client.show_quotas(client.tenant_id, client.user_id)
# make quotas for update
gigabytes = int(custom["gigabytes"]) - 1
# set new quota for shares
updated = client.update_quotas(
client.tenant_id, client.user_id,
gigabytes=gigabytes)
self.assertEqual(int(updated["gigabytes"]), gigabytes)
@test.attr(type=["gate", "smoke", ])
def test_update_user_quota_snapshot_gigabytes(self):
client = self.get_client_with_isolated_creds()
# get current quotas
custom = client.show_quotas(client.tenant_id, client.user_id)
# make quotas for update
snapshot_gigabytes = int(custom["snapshot_gigabytes"]) - 1
# set new quota for shares
updated = client.update_quotas(
client.tenant_id, client.user_id,
snapshot_gigabytes=snapshot_gigabytes)
self.assertEqual(
int(updated["snapshot_gigabytes"]), snapshot_gigabytes)
@test.attr(type=["gate", "smoke", ])
def test_update_tenant_quota_share_networks(self):
client = self.get_client_with_isolated_creds()
# get current quotas
quotas = client.show_quotas(client.tenant_id)
new_quota = int(quotas["share_networks"]) + 2
# set new quota for share-networks
updated = client.update_quotas(
client.tenant_id, share_networks=new_quota)
self.assertEqual(int(updated["share_networks"]), new_quota)
@test.attr(type=["gate", "smoke", ])
def test_update_user_quota_share_networks(self):
client = self.get_client_with_isolated_creds()
# get current quotas
quotas = client.show_quotas(
client.tenant_id, client.user_id)
new_quota = int(quotas["share_networks"]) - 1
# set new quota for share-networks
updated = client.update_quotas(
client.tenant_id, client.user_id,
share_networks=new_quota)
self.assertEqual(int(updated["share_networks"]), new_quota)
@test.attr(type=["gate", "smoke", ])
def test_reset_tenant_quotas(self):
client = self.get_client_with_isolated_creds()
# get default_quotas
default = client.default_quotas(client.tenant_id)
# get current quotas
custom = client.show_quotas(client.tenant_id)
# make quotas for update
shares = int(custom["shares"]) + 2
snapshots = int(custom["snapshots"]) + 2
gigabytes = int(custom["gigabytes"]) + 2
snapshot_gigabytes = int(custom["snapshot_gigabytes"]) + 2
share_networks = int(custom["share_networks"]) + 2
# set new quota
updated = client.update_quotas(
client.tenant_id,
shares=shares,
snapshots=snapshots,
gigabytes=gigabytes,
snapshot_gigabytes=snapshot_gigabytes,
share_networks=share_networks)
self.assertEqual(int(updated["shares"]), shares)
self.assertEqual(int(updated["snapshots"]), snapshots)
self.assertEqual(int(updated["gigabytes"]), gigabytes)
self.assertEqual(
int(updated["snapshot_gigabytes"]), snapshot_gigabytes)
self.assertEqual(int(updated["share_networks"]), share_networks)
# reset customized quotas
client.reset_quotas(client.tenant_id)
# verify quotas
reseted = client.show_quotas(client.tenant_id)
self.assertEqual(int(reseted["shares"]), int(default["shares"]))
self.assertEqual(int(reseted["snapshots"]), int(default["snapshots"]))
self.assertEqual(int(reseted["gigabytes"]), int(default["gigabytes"]))
self.assertEqual(int(reseted["share_networks"]),
int(default["share_networks"]))
@test.attr(type=["gate", "smoke", ])
def test_unlimited_quota_for_shares(self):
client = self.get_client_with_isolated_creds()
client.update_quotas(client.tenant_id, shares=-1)
quotas = client.show_quotas(client.tenant_id)
self.assertEqual(-1, quotas.get('shares'))
@test.attr(type=["gate", "smoke", ])
def test_unlimited_user_quota_for_shares(self):
client = self.get_client_with_isolated_creds()
client.update_quotas(
client.tenant_id, client.user_id,
shares=-1)
quotas = client.show_quotas(client.tenant_id, client.user_id)
self.assertEqual(-1, quotas.get('shares'))
@test.attr(type=["gate", "smoke", ])
def test_unlimited_quota_for_snapshots(self):
client = self.get_client_with_isolated_creds()
client.update_quotas(client.tenant_id, snapshots=-1)
quotas = client.show_quotas(client.tenant_id)
self.assertEqual(-1, quotas.get('snapshots'))
@test.attr(type=["gate", "smoke", ])
def test_unlimited_user_quota_for_snapshots(self):
client = self.get_client_with_isolated_creds()
client.update_quotas(
client.tenant_id, client.user_id,
snapshots=-1)
quotas = client.show_quotas(client.tenant_id, client.user_id)
self.assertEqual(-1, quotas.get('snapshots'))
@test.attr(type=["gate", "smoke", ])
def test_unlimited_quota_for_gigabytes(self):
client = self.get_client_with_isolated_creds()
client.update_quotas(client.tenant_id, gigabytes=-1)
quotas = client.show_quotas(client.tenant_id)
self.assertEqual(-1, quotas.get('gigabytes'))
@test.attr(type=["gate", "smoke", ])
def test_unlimited_quota_for_snapshot_gigabytes(self):
client = self.get_client_with_isolated_creds()
client.update_quotas(
client.tenant_id, snapshot_gigabytes=-1)
quotas = client.show_quotas(client.tenant_id)
self.assertEqual(-1, quotas.get('snapshot_gigabytes'))
@test.attr(type=["gate", "smoke", ])
def test_unlimited_user_quota_for_gigabytes(self):
client = self.get_client_with_isolated_creds()
client.update_quotas(
client.tenant_id, client.user_id,
gigabytes=-1)
quotas = client.show_quotas(client.tenant_id, client.user_id)
self.assertEqual(-1, quotas.get('gigabytes'))
@test.attr(type=["gate", "smoke", ])
def test_unlimited_user_quota_for_snapshot_gigabytes(self):
client = self.get_client_with_isolated_creds()
client.update_quotas(
client.tenant_id, client.user_id,
snapshot_gigabytes=-1)
quotas = client.show_quotas(client.tenant_id, client.user_id)
self.assertEqual(-1, quotas.get('snapshot_gigabytes'))
@test.attr(type=["gate", "smoke", ])
def test_unlimited_quota_for_share_networks(self):
client = self.get_client_with_isolated_creds()
client.update_quotas(client.tenant_id, share_networks=-1)
quotas = client.show_quotas(client.tenant_id)
self.assertEqual(-1, quotas.get('share_networks'))
@test.attr(type=["gate", "smoke", ])
def test_unlimited_user_quota_for_share_networks(self):
client = self.get_client_with_isolated_creds()
client.update_quotas(
client.tenant_id, client.user_id,
share_networks=-1)
quotas = client.show_quotas(client.tenant_id, client.user_id)
self.assertEqual(-1, quotas.get('share_networks'))
| apache-2.0 |
vnsofthe/odoo-dev | openerp/addons/test_impex/tests/test_import.py | 231 | 30712 | # -*- coding: utf-8 -*-
import openerp.modules.registry
import openerp
from openerp.tests import common
from openerp.tools.misc import mute_logger
def ok(n):
""" Successful import of ``n`` records
:param int n: number of records which should have been imported
"""
return n, 0, 0, 0
def error(row, message, record=None, **kwargs):
""" Failed import of the record ``record`` at line ``row``, with the error
message ``message``
:param str message:
:param dict record:
"""
return (
-1, dict(record or {}, **kwargs),
"Line %d : %s" % (row, message),
'')
def values(seq, field='value'):
return [item[field] for item in seq]
class ImporterCase(common.TransactionCase):
model_name = False
def __init__(self, *args, **kwargs):
super(ImporterCase, self).__init__(*args, **kwargs)
self.model = None
def setUp(self):
super(ImporterCase, self).setUp()
self.model = self.registry(self.model_name)
def import_(self, fields, rows, context=None):
return self.model.import_data(
self.cr, openerp.SUPERUSER_ID, fields, rows, context=context)
def read(self, fields=('value',), domain=(), context=None):
return self.model.read(
self.cr, openerp.SUPERUSER_ID,
self.model.search(self.cr, openerp.SUPERUSER_ID, domain, context=context),
fields=fields, context=context)
def browse(self, domain=(), context=None):
return self.model.browse(
self.cr, openerp.SUPERUSER_ID,
self.model.search(self.cr, openerp.SUPERUSER_ID, domain, context=context),
context=context)
def xid(self, record):
ModelData = self.registry('ir.model.data')
ids = ModelData.search(
self.cr, openerp.SUPERUSER_ID,
[('model', '=', record._name), ('res_id', '=', record.id)])
if ids:
d = ModelData.read(
self.cr, openerp.SUPERUSER_ID, ids, ['name', 'module'])[0]
if d['module']:
return '%s.%s' % (d['module'], d['name'])
return d['name']
name = record.name_get()[0][1]
# fix dotted name_get results, otherwise xid lookups blow up
name = name.replace('.', '-')
ModelData.create(self.cr, openerp.SUPERUSER_ID, {
'name': name,
'model': record._name,
'res_id': record.id,
'module': '__test__'
})
return '__test__.' + name
class test_ids_stuff(ImporterCase):
model_name = 'export.integer'
def test_create_with_id(self):
self.assertEqual(
self.import_(['.id', 'value'], [['42', '36']]),
error(1, u"Unknown database identifier '42'"))
def test_create_with_xid(self):
self.assertEqual(
self.import_(['id', 'value'], [['somexmlid', '42']]),
ok(1))
self.assertEqual(
'somexmlid',
self.xid(self.browse()[0]))
def test_update_with_id(self):
id = self.model.create(self.cr, openerp.SUPERUSER_ID, {'value': 36})
self.assertEqual(
36,
self.model.browse(self.cr, openerp.SUPERUSER_ID, id).value)
self.assertEqual(
self.import_(['.id', 'value'], [[str(id), '42']]),
ok(1))
self.assertEqual(
[42], # updated value to imported
values(self.read()))
def test_update_with_xid(self):
self.import_(['id', 'value'], [['somexmlid', '36']])
self.assertEqual([36], values(self.read()))
self.import_(['id', 'value'], [['somexmlid', '1234567']])
self.assertEqual([1234567], values(self.read()))
def test_wrong_format(self):
self.assertEqual(
self.import_(['value'], [['50%']]),
error(1, u"'50%' does not seem to be an integer for field 'unknown'"))
class test_boolean_field(ImporterCase):
model_name = 'export.boolean'
def test_empty(self):
self.assertEqual(
self.import_(['value'], []),
ok(0))
def test_exported(self):
self.assertEqual(
self.import_(['value'], [
['False'],
['True'],
]),
ok(2))
records = self.read()
self.assertEqual([
False,
True,
], values(records))
def test_falses(self):
self.assertEqual(
self.import_(['value'], [
[u'0'],
[u'no'],
[u'false'],
[u'FALSE'],
[u''],
]),
ok(5))
self.assertEqual([
False,
False,
False,
False,
False,
],
values(self.read()))
def test_trues(self):
self.assertEqual(
self.import_(['value'], [
['off'],
['None'],
['nil'],
['()'],
['f'],
['#f'],
# Problem: OpenOffice (and probably excel) output localized booleans
['VRAI'],
[u'OFF'],
[u'是的'],
['!&%#${}'],
['%(field)s'],
]),
ok(11))
self.assertEqual(
[True] * 11,
values(self.read()))
class test_integer_field(ImporterCase):
model_name = 'export.integer'
def test_none(self):
self.assertEqual(
self.import_(['value'], []),
ok(0))
def test_empty(self):
self.assertEqual(
self.import_(['value'], [['']]),
ok(1))
self.assertEqual(
[False],
values(self.read()))
def test_zero(self):
self.assertEqual(
self.import_(['value'], [['0']]),
ok(1))
self.assertEqual(
self.import_(['value'], [['-0']]),
ok(1))
self.assertEqual([False, False], values(self.read()))
def test_positives(self):
self.assertEqual(
self.import_(['value'], [
['1'],
['42'],
[str(2**31-1)],
['12345678']
]),
ok(4))
self.assertEqual([
1, 42, 2**31-1, 12345678
], values(self.read()))
def test_negatives(self):
self.assertEqual(
self.import_(['value'], [
['-1'],
['-42'],
[str(-(2**31 - 1))],
[str(-(2**31))],
['-12345678']
]),
ok(5))
self.assertEqual([
-1, -42, -(2**31 - 1), -(2**31), -12345678
], values(self.read()))
@mute_logger('openerp.sql_db')
def test_out_of_range(self):
self.assertEqual(
self.import_(['value'], [[str(2**31)]]),
error(1, "integer out of range\n"))
# auto-rollbacks if error is in process_liness, but not during
# ir.model.data write. Can differentiate because former ends lines
# error lines with "!"
self.cr.rollback()
self.assertEqual(
self.import_(['value'], [[str(-2**32)]]),
error(1, "integer out of range\n"))
def test_nonsense(self):
self.assertEqual(
self.import_(['value'], [['zorglub']]),
error(1, u"'zorglub' does not seem to be an integer for field 'unknown'"))
class test_float_field(ImporterCase):
model_name = 'export.float'
def test_none(self):
self.assertEqual(
self.import_(['value'], []),
ok(0))
def test_empty(self):
self.assertEqual(
self.import_(['value'], [['']]),
ok(1))
self.assertEqual(
[False],
values(self.read()))
def test_zero(self):
self.assertEqual(
self.import_(['value'], [['0']]),
ok(1))
self.assertEqual(
self.import_(['value'], [['-0']]),
ok(1))
self.assertEqual([False, False], values(self.read()))
def test_positives(self):
self.assertEqual(
self.import_(['value'], [
['1'],
['42'],
[str(2**31-1)],
['12345678'],
[str(2**33)],
['0.000001'],
]),
ok(6))
self.assertEqual([
1, 42, 2**31-1, 12345678, 2.0**33, .000001
], values(self.read()))
def test_negatives(self):
self.assertEqual(
self.import_(['value'], [
['-1'],
['-42'],
[str(-2**31 + 1)],
[str(-2**31)],
['-12345678'],
[str(-2**33)],
['-0.000001'],
]),
ok(7))
self.assertEqual([
-1, -42, -(2**31 - 1), -(2**31), -12345678, -2.0**33, -.000001
], values(self.read()))
def test_nonsense(self):
self.assertEqual(
self.import_(['value'], [['foobar']]),
error(1, u"'foobar' does not seem to be a number for field 'unknown'"))
class test_string_field(ImporterCase):
model_name = 'export.string.bounded'
def test_empty(self):
self.assertEqual(
self.import_(['value'], [['']]),
ok(1))
self.assertEqual([False], values(self.read()))
def test_imported(self):
self.assertEqual(
self.import_(['value'], [
[u'foobar'],
[u'foobarbaz'],
[u'Með suð í eyrum við spilum endalaust'],
[u"People 'get' types. They use them all the time. Telling "
u"someone he can't pound a nail with a banana doesn't much "
u"surprise him."]
]),
ok(4))
self.assertEqual([
u"foobar",
u"foobarbaz",
u"Með suð í eyrum ",
u"People 'get' typ",
], values(self.read()))
class test_unbound_string_field(ImporterCase):
model_name = 'export.string'
def test_imported(self):
self.assertEqual(
self.import_(['value'], [
[u'í dag viðrar vel til loftárása'],
# ackbar.jpg
[u"If they ask you about fun, you tell them – fun is a filthy"
u" parasite"]
]),
ok(2))
self.assertEqual([
u"í dag viðrar vel til loftárása",
u"If they ask you about fun, you tell them – fun is a filthy parasite"
], values(self.read()))
class test_text(ImporterCase):
model_name = 'export.text'
def test_empty(self):
self.assertEqual(
self.import_(['value'], [['']]),
ok(1))
self.assertEqual([False], values(self.read()))
def test_imported(self):
s = (u"Breiðskífa er notað um útgefna hljómplötu sem inniheldur "
u"stúdíóupptökur frá einum flytjanda. Breiðskífur eru oftast "
u"milli 25-80 mínútur og er lengd þeirra oft miðuð við 33⅓ "
u"snúninga 12 tommu vínylplötur (sem geta verið allt að 30 mín "
u"hvor hlið).\n\nBreiðskífur eru stundum tvöfaldar og eru þær þá"
u" gefnar út á tveimur geisladiskum eða tveimur vínylplötum.")
self.assertEqual(
self.import_(['value'], [[s]]),
ok(1))
self.assertEqual([s], values(self.read()))
class test_selection(ImporterCase):
model_name = 'export.selection'
translations_fr = [
("Qux", "toto"),
("Bar", "titi"),
("Foo", "tete"),
]
def test_imported(self):
self.assertEqual(
self.import_(['value'], [
['Qux'],
['Bar'],
['Foo'],
['2'],
]),
ok(4))
self.assertEqual([3, 2, 1, 2], values(self.read()))
def test_imported_translated(self):
self.registry('res.lang').create(self.cr, openerp.SUPERUSER_ID, {
'name': u'Français',
'code': 'fr_FR',
'translatable': True,
'date_format': '%d.%m.%Y',
'decimal_point': ',',
'thousands_sep': ' ',
})
Translations = self.registry('ir.translation')
for source, value in self.translations_fr:
Translations.create(self.cr, openerp.SUPERUSER_ID, {
'name': 'export.selection,value',
'lang': 'fr_FR',
'type': 'selection',
'src': source,
'value': value
})
self.assertEqual(
self.import_(['value'], [
['toto'],
['tete'],
['titi'],
], context={'lang': 'fr_FR'}),
ok(3))
self.assertEqual([3, 1, 2], values(self.read()))
self.assertEqual(
self.import_(['value'], [['Foo']], context={'lang': 'fr_FR'}),
ok(1))
def test_invalid(self):
self.assertEqual(
self.import_(['value'], [['Baz']]),
error(1, u"Value 'Baz' not found in selection field 'unknown'"))
self.cr.rollback()
self.assertEqual(
self.import_(['value'], [[42]]),
error(1, u"Value '42' not found in selection field 'unknown'"))
class test_selection_function(ImporterCase):
model_name = 'export.selection.function'
translations_fr = [
("Corge", "toto"),
("Grault", "titi"),
("Wheee", "tete"),
("Moog", "tutu"),
]
def test_imported(self):
""" import uses fields_get, so translates import label (may or may not
be good news) *and* serializes the selection function to reverse it:
import does not actually know that the selection field uses a function
"""
# NOTE: conflict between a value and a label => ?
self.assertEqual(
self.import_(['value'], [
['3'],
["Grault"],
]),
ok(2))
self.assertEqual(
[3, 1],
values(self.read()))
def test_translated(self):
""" Expects output of selection function returns translated labels
"""
self.registry('res.lang').create(self.cr, openerp.SUPERUSER_ID, {
'name': u'Français',
'code': 'fr_FR',
'translatable': True,
'date_format': '%d.%m.%Y',
'decimal_point': ',',
'thousands_sep': ' ',
})
Translations = self.registry('ir.translation')
for source, value in self.translations_fr:
Translations.create(self.cr, openerp.SUPERUSER_ID, {
'name': 'export.selection,value',
'lang': 'fr_FR',
'type': 'selection',
'src': source,
'value': value
})
self.assertEqual(
self.import_(['value'], [
['toto'],
['tete'],
], context={'lang': 'fr_FR'}),
ok(2))
self.assertEqual(
self.import_(['value'], [['Wheee']], context={'lang': 'fr_FR'}),
ok(1))
class test_m2o(ImporterCase):
model_name = 'export.many2one'
def test_by_name(self):
# create integer objects
integer_id1 = self.registry('export.integer').create(
self.cr, openerp.SUPERUSER_ID, {'value': 42})
integer_id2 = self.registry('export.integer').create(
self.cr, openerp.SUPERUSER_ID, {'value': 36})
# get its name
name1 = dict(self.registry('export.integer').name_get(
self.cr, openerp.SUPERUSER_ID,[integer_id1]))[integer_id1]
name2 = dict(self.registry('export.integer').name_get(
self.cr, openerp.SUPERUSER_ID,[integer_id2]))[integer_id2]
self.assertEqual(
self.import_(['value'], [
# import by name_get
[name1],
[name1],
[name2],
]),
ok(3))
# correct ids assigned to corresponding records
self.assertEqual([
(integer_id1, name1),
(integer_id1, name1),
(integer_id2, name2),],
values(self.read()))
def test_by_xid(self):
ExportInteger = self.registry('export.integer')
integer_id = ExportInteger.create(
self.cr, openerp.SUPERUSER_ID, {'value': 42})
xid = self.xid(ExportInteger.browse(
self.cr, openerp.SUPERUSER_ID, [integer_id])[0])
self.assertEqual(
self.import_(['value/id'], [[xid]]),
ok(1))
b = self.browse()
self.assertEqual(42, b[0].value.value)
def test_by_id(self):
integer_id = self.registry('export.integer').create(
self.cr, openerp.SUPERUSER_ID, {'value': 42})
self.assertEqual(
self.import_(['value/.id'], [[integer_id]]),
ok(1))
b = self.browse()
self.assertEqual(42, b[0].value.value)
def test_by_names(self):
integer_id1 = self.registry('export.integer').create(
self.cr, openerp.SUPERUSER_ID, {'value': 42})
integer_id2 = self.registry('export.integer').create(
self.cr, openerp.SUPERUSER_ID, {'value': 42})
name1 = dict(self.registry('export.integer').name_get(
self.cr, openerp.SUPERUSER_ID,[integer_id1]))[integer_id1]
name2 = dict(self.registry('export.integer').name_get(
self.cr, openerp.SUPERUSER_ID,[integer_id2]))[integer_id2]
# names should be the same
self.assertEqual(name1, name2)
self.assertEqual(
self.import_(['value'], [[name2]]),
ok(1))
self.assertEqual([
(integer_id1, name1)
], values(self.read()))
def test_fail_by_implicit_id(self):
""" Can't implicitly import records by id
"""
# create integer objects
integer_id1 = self.registry('export.integer').create(
self.cr, openerp.SUPERUSER_ID, {'value': 42})
integer_id2 = self.registry('export.integer').create(
self.cr, openerp.SUPERUSER_ID, {'value': 36})
self.assertEqual(
self.import_(['value'], [
# import by id, without specifying it
[integer_id1],
[integer_id2],
[integer_id1],
]),
error(1, u"No matching record found for name '%s' in field 'unknown'" % integer_id1))
def test_sub_field(self):
""" Does not implicitly create the record, does not warn that you can't
import m2o subfields (at all)...
"""
self.assertEqual(
self.import_(['value/value'], [['42']]),
error(1, u"Can not create Many-To-One records indirectly, import the field separately"))
def test_fail_noids(self):
self.assertEqual(
self.import_(['value'], [['nameisnoexist:3']]),
error(1, u"No matching record found for name 'nameisnoexist:3' in field 'unknown'"))
self.cr.rollback()
self.assertEqual(
self.import_(['value/id'], [['noxidhere']]),
error(1, u"No matching record found for external id 'noxidhere' in field 'unknown'"))
self.cr.rollback()
self.assertEqual(
self.import_(['value/.id'], [[66]]),
error(1, u"No matching record found for database id '66' in field 'unknown'"))
class test_m2m(ImporterCase):
model_name = 'export.many2many'
# apparently, one and only thing which works is a
# csv_internal_sep-separated list of ids, xids, or names (depending if
# m2m/.id, m2m/id or m2m[/anythingelse]
def test_ids(self):
id1 = self.registry('export.many2many.other').create(
self.cr, openerp.SUPERUSER_ID, {'value': 3, 'str': 'record0'})
id2 = self.registry('export.many2many.other').create(
self.cr, openerp.SUPERUSER_ID, {'value': 44, 'str': 'record1'})
id3 = self.registry('export.many2many.other').create(
self.cr, openerp.SUPERUSER_ID, {'value': 84, 'str': 'record2'})
id4 = self.registry('export.many2many.other').create(
self.cr, openerp.SUPERUSER_ID, {'value': 9, 'str': 'record3'})
id5 = self.registry('export.many2many.other').create(
self.cr, openerp.SUPERUSER_ID, {'value': 99, 'str': 'record4'})
self.assertEqual(
self.import_(['value/.id'], [
['%d,%d' % (id1, id2)],
['%d,%d,%d' % (id1, id3, id4)],
['%d,%d,%d' % (id1, id2, id3)],
['%d' % id5]
]),
ok(4))
ids = lambda records: [record.id for record in records]
b = self.browse()
self.assertEqual(ids(b[0].value), [id1, id2])
self.assertEqual(values(b[0].value), [3, 44])
self.assertEqual(ids(b[2].value), [id1, id2, id3])
self.assertEqual(values(b[2].value), [3, 44, 84])
def test_noids(self):
self.assertEqual(
self.import_(['value/.id'], [['42']]),
error(1, u"No matching record found for database id '42' in field 'unknown'"))
def test_xids(self):
M2O_o = self.registry('export.many2many.other')
id1 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 3, 'str': 'record0'})
id2 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 44, 'str': 'record1'})
id3 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 84, 'str': 'record2'})
id4 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 9, 'str': 'record3'})
records = M2O_o.browse(self.cr, openerp.SUPERUSER_ID, [id1, id2, id3, id4])
self.assertEqual(
self.import_(['value/id'], [
['%s,%s' % (self.xid(records[0]), self.xid(records[1]))],
['%s' % self.xid(records[3])],
['%s,%s' % (self.xid(records[2]), self.xid(records[1]))],
]),
ok(3))
b = self.browse()
self.assertEqual(values(b[0].value), [3, 44])
self.assertEqual(values(b[2].value), [44, 84])
def test_noxids(self):
self.assertEqual(
self.import_(['value/id'], [['noxidforthat']]),
error(1, u"No matching record found for external id 'noxidforthat' in field 'unknown'"))
def test_names(self):
M2O_o = self.registry('export.many2many.other')
id1 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 3, 'str': 'record0'})
id2 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 44, 'str': 'record1'})
id3 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 84, 'str': 'record2'})
id4 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 9, 'str': 'record3'})
records = M2O_o.browse(self.cr, openerp.SUPERUSER_ID, [id1, id2, id3, id4])
name = lambda record: record.name_get()[0][1]
self.assertEqual(
self.import_(['value'], [
['%s,%s' % (name(records[1]), name(records[2]))],
['%s,%s,%s' % (name(records[0]), name(records[1]), name(records[2]))],
['%s,%s' % (name(records[0]), name(records[3]))],
]),
ok(3))
b = self.browse()
self.assertEqual(values(b[1].value), [3, 44, 84])
self.assertEqual(values(b[2].value), [3, 9])
def test_nonames(self):
self.assertEqual(
self.import_(['value'], [['wherethem2mhavenonames']]),
error(1, u"No matching record found for name 'wherethem2mhavenonames' in field 'unknown'"))
def test_import_to_existing(self):
M2O_o = self.registry('export.many2many.other')
id1 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 3, 'str': 'record0'})
id2 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 44, 'str': 'record1'})
id3 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 84, 'str': 'record2'})
id4 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 9, 'str': 'record3'})
xid = 'myxid'
self.assertEqual(
self.import_(['id', 'value/.id'], [[xid, '%d,%d' % (id1, id2)]]),
ok(1))
self.assertEqual(
self.import_(['id', 'value/.id'], [[xid, '%d,%d' % (id3, id4)]]),
ok(1))
b = self.browse()
self.assertEqual(len(b), 1)
# TODO: replacement of existing m2m values is correct?
self.assertEqual(values(b[0].value), [84, 9])
class test_o2m(ImporterCase):
model_name = 'export.one2many'
def test_name_get(self):
s = u'Java is a DSL for taking large XML files and converting them to' \
u' stack traces'
self.assertEqual(
self.import_(
['const', 'value'],
[['5', s]]),
error(1, u"No matching record found for name '%s' in field 'unknown'" % s))
def test_single(self):
self.assertEqual(
self.import_(['const', 'value/value'], [
['5', '63']
]),
ok(1))
(b,) = self.browse()
self.assertEqual(b.const, 5)
self.assertEqual(values(b.value), [63])
def test_multicore(self):
self.assertEqual(
self.import_(['const', 'value/value'], [
['5', '63'],
['6', '64'],
]),
ok(2))
b1, b2 = self.browse()
self.assertEqual(b1.const, 5)
self.assertEqual(values(b1.value), [63])
self.assertEqual(b2.const, 6)
self.assertEqual(values(b2.value), [64])
def test_multisub(self):
self.assertEqual(
self.import_(['const', 'value/value'], [
['5', '63'],
['', '64'],
['', '65'],
['', '66'],
]),
ok(4))
(b,) = self.browse()
self.assertEqual(values(b.value), [63, 64, 65, 66])
def test_multi_subfields(self):
self.assertEqual(
self.import_(['value/str', 'const', 'value/value'], [
['this', '5', '63'],
['is', '', '64'],
['the', '', '65'],
['rhythm', '', '66'],
]),
ok(4))
(b,) = self.browse()
self.assertEqual(values(b.value), [63, 64, 65, 66])
self.assertEqual(
values(b.value, 'str'),
'this is the rhythm'.split())
def test_link_inline(self):
id1 = self.registry('export.one2many.child').create(self.cr, openerp.SUPERUSER_ID, {
'str': 'Bf', 'value': 109
})
id2 = self.registry('export.one2many.child').create(self.cr, openerp.SUPERUSER_ID, {
'str': 'Me', 'value': 262
})
try:
self.import_(['const', 'value/.id'], [
['42', '%d,%d' % (id1, id2)]
])
except ValueError, e:
# should be Exception(Database ID doesn't exist: export.one2many.child : $id1,$id2)
self.assertIs(type(e), ValueError)
self.assertEqual(
e.args[0],
"invalid literal for int() with base 10: '%d,%d'" % (id1, id2))
def test_link(self):
id1 = self.registry('export.one2many.child').create(self.cr, openerp.SUPERUSER_ID, {
'str': 'Bf', 'value': 109
})
id2 = self.registry('export.one2many.child').create(self.cr, openerp.SUPERUSER_ID, {
'str': 'Me', 'value': 262
})
self.assertEqual(
self.import_(['const', 'value/.id'], [
['42', str(id1)],
['', str(id2)],
]),
ok(2))
[b] = self.browse()
self.assertEqual(b.const, 42)
# automatically forces link between core record and o2ms
self.assertEqual(values(b.value), [109, 262])
self.assertEqual(values(b.value, field='parent_id'), [b, b])
def test_link_2(self):
O2M_c = self.registry('export.one2many.child')
id1 = O2M_c.create(self.cr, openerp.SUPERUSER_ID, {
'str': 'Bf', 'value': 109
})
id2 = O2M_c.create(self.cr, openerp.SUPERUSER_ID, {
'str': 'Me', 'value': 262
})
self.assertEqual(
self.import_(['const', 'value/.id', 'value/value'], [
['42', str(id1), '1'],
['', str(id2), '2'],
]),
ok(2))
[b] = self.browse()
self.assertEqual(b.const, 42)
self.assertEqual(values(b.value), [1, 2])
self.assertEqual(values(b.value, field='parent_id'), [b, b])
class test_o2m_multiple(ImporterCase):
model_name = 'export.one2many.multiple'
def test_multi_mixed(self):
self.assertEqual(
self.import_(['const', 'child1/value', 'child2/value'], [
['5', '11', '21'],
['', '12', '22'],
['', '13', '23'],
['', '14', ''],
]),
ok(4))
[b] = self.browse()
self.assertEqual(values(b.child1), [11, 12, 13, 14])
self.assertEqual(values(b.child2), [21, 22, 23])
def test_multi(self):
self.assertEqual(
self.import_(['const', 'child1/value', 'child2/value'], [
['5', '11', '21'],
['', '12', ''],
['', '13', ''],
['', '14', ''],
['', '', '22'],
['', '', '23'],
]),
ok(6))
[b] = self.browse()
self.assertEqual(values(b.child1), [11, 12, 13, 14])
self.assertEqual(values(b.child2), [21, 22, 23])
def test_multi_fullsplit(self):
self.assertEqual(
self.import_(['const', 'child1/value', 'child2/value'], [
['5', '11', ''],
['', '12', ''],
['', '13', ''],
['', '14', ''],
['', '', '21'],
['', '', '22'],
['', '', '23'],
]),
ok(7))
[b] = self.browse()
self.assertEqual(b.const, 5)
self.assertEqual(values(b.child1), [11, 12, 13, 14])
self.assertEqual(values(b.child2), [21, 22, 23])
# function, related, reference: written to db as-is...
# => function uses @type for value coercion/conversion
| agpl-3.0 |
MIPS/external-chromium_org | chrome/test/chromedriver/embed_extension_in_cpp.py | 158 | 1124 | #!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Embeds Chrome user data files in C++ code."""
import base64
import optparse
import os
import StringIO
import sys
import zipfile
import cpp_source
def main():
parser = optparse.OptionParser()
parser.add_option(
'', '--directory', type='string', default='.',
help='Path to directory where the cc/h file should be created')
options, args = parser.parse_args()
global_string_map = {}
string_buffer = StringIO.StringIO()
zipper = zipfile.ZipFile(string_buffer, 'w')
for f in args:
zipper.write(f, os.path.basename(f), zipfile.ZIP_STORED)
zipper.close()
global_string_map['kAutomationExtension'] = base64.b64encode(
string_buffer.getvalue())
string_buffer.close()
cpp_source.WriteSource('embedded_automation_extension',
'chrome/test/chromedriver/chrome',
options.directory, global_string_map)
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause |
cloudera/hue | desktop/core/ext-py/pytest-django-3.10.0/tests/test_db_setup.py | 2 | 16810 | import pytest
from pytest_django.lazy_django import get_django_version
from pytest_django_test.db_helpers import (
db_exists,
drop_database,
mark_database,
mark_exists,
skip_if_sqlite_in_memory,
)
def test_db_reuse_simple(django_testdir):
"A test for all backends to check that `--reuse-db` works."
django_testdir.create_test_module(
"""
import pytest
from .app.models import Item
@pytest.mark.django_db
def test_db_can_be_accessed():
assert Item.objects.count() == 0
"""
)
result = django_testdir.runpytest_subprocess("-v", "--reuse-db")
assert result.ret == 0
result.stdout.fnmatch_lines(["*test_db_can_be_accessed PASSED*"])
def test_db_order(django_testdir):
"""Test order in which tests are being executed."""
django_testdir.create_test_module('''
from unittest import TestCase
import pytest
from django.test import SimpleTestCase, TestCase as DjangoTestCase, TransactionTestCase
from .app.models import Item
@pytest.mark.django_db(transaction=True)
def test_run_second_decorator():
pass
def test_run_second_fixture(transactional_db):
pass
def test_run_first_fixture(db):
pass
@pytest.mark.django_db
def test_run_first_decorator():
pass
class MyTestCase(TestCase):
def test_run_last_test_case(self):
pass
class MySimpleTestCase(SimpleTestCase):
def test_run_last_simple_test_case(self):
pass
class MyDjangoTestCase(DjangoTestCase):
def test_run_first_django_test_case(self):
pass
class MyTransactionTestCase(TransactionTestCase):
def test_run_second_transaction_test_case(self):
pass
''')
result = django_testdir.runpytest_subprocess('-v', '-s')
assert result.ret == 0
result.stdout.fnmatch_lines([
"*test_run_first_fixture*",
"*test_run_first_decorator*",
"*test_run_first_django_test_case*",
"*test_run_second_decorator*",
"*test_run_second_fixture*",
"*test_run_second_transaction_test_case*",
"*test_run_last_test_case*",
"*test_run_last_simple_test_case*",
])
def test_db_reuse(django_testdir):
"""
Test the re-use db functionality.
"""
skip_if_sqlite_in_memory()
django_testdir.create_test_module(
"""
import pytest
from .app.models import Item
@pytest.mark.django_db
def test_db_can_be_accessed():
assert Item.objects.count() == 0
"""
)
# Use --create-db on the first run to make sure we are not just re-using a
# database from another test run
drop_database()
assert not db_exists()
# Do not pass in --create-db to make sure it is created when it
# does not exist
result_first = django_testdir.runpytest_subprocess("-v", "--reuse-db")
assert result_first.ret == 0
result_first.stdout.fnmatch_lines(["*test_db_can_be_accessed PASSED*"])
assert not mark_exists()
mark_database()
assert mark_exists()
result_second = django_testdir.runpytest_subprocess("-v", "--reuse-db")
assert result_second.ret == 0
result_second.stdout.fnmatch_lines(["*test_db_can_be_accessed PASSED*"])
# Make sure the database has not been re-created
assert mark_exists()
result_third = django_testdir.runpytest_subprocess(
"-v", "--reuse-db", "--create-db"
)
assert result_third.ret == 0
result_third.stdout.fnmatch_lines(["*test_db_can_be_accessed PASSED*"])
# Make sure the database has been re-created and the mark is gone
assert db_exists()
assert not mark_exists()
class TestSqlite:
db_settings = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "db_name",
"TEST": {"NAME": "test_custom_db_name"},
}
}
def test_sqlite_test_name_used(self, django_testdir):
django_testdir.create_test_module(
"""
import pytest
from django.db import connections
from django import VERSION
@pytest.mark.django_db
def test_a():
(conn, ) = connections.all()
assert conn.vendor == 'sqlite'
print(conn.settings_dict)
assert conn.settings_dict['NAME'] == 'test_custom_db_name'
"""
)
result = django_testdir.runpytest_subprocess("--tb=short", "-v")
assert result.ret == 0
result.stdout.fnmatch_lines(["*test_a*PASSED*"])
def test_xdist_with_reuse(django_testdir):
pytest.importorskip("xdist")
skip_if_sqlite_in_memory()
drop_database("gw0")
drop_database("gw1")
assert not db_exists("gw0")
assert not db_exists("gw1")
django_testdir.create_test_module(
"""
import pytest
from .app.models import Item
def _check(settings):
# Make sure that the database name looks correct
db_name = settings.DATABASES['default']['NAME']
assert db_name.endswith('_gw0') or db_name.endswith('_gw1')
assert Item.objects.count() == 0
Item.objects.create(name='foo')
assert Item.objects.count() == 1
@pytest.mark.django_db
def test_a(settings):
_check(settings)
@pytest.mark.django_db
def test_b(settings):
_check(settings)
@pytest.mark.django_db
def test_c(settings):
_check(settings)
@pytest.mark.django_db
def test_d(settings):
_check(settings)
"""
)
result = django_testdir.runpytest_subprocess("-vv", "-n2", "-s", "--reuse-db")
assert result.ret == 0
result.stdout.fnmatch_lines(["*PASSED*test_a*"])
result.stdout.fnmatch_lines(["*PASSED*test_b*"])
result.stdout.fnmatch_lines(["*PASSED*test_c*"])
result.stdout.fnmatch_lines(["*PASSED*test_d*"])
assert db_exists("gw0")
assert db_exists("gw1")
result = django_testdir.runpytest_subprocess("-vv", "-n2", "-s", "--reuse-db")
assert result.ret == 0
result.stdout.fnmatch_lines(["*PASSED*test_a*"])
result.stdout.fnmatch_lines(["*PASSED*test_b*"])
result.stdout.fnmatch_lines(["*PASSED*test_c*"])
result.stdout.fnmatch_lines(["*PASSED*test_d*"])
result = django_testdir.runpytest_subprocess(
"-vv", "-n2", "-s", "--reuse-db", "--create-db"
)
assert result.ret == 0
result.stdout.fnmatch_lines(["*PASSED*test_a*"])
result.stdout.fnmatch_lines(["*PASSED*test_b*"])
result.stdout.fnmatch_lines(["*PASSED*test_c*"])
result.stdout.fnmatch_lines(["*PASSED*test_d*"])
# Cleanup.
drop_database("gw0")
drop_database("gw1")
class TestSqliteWithXdist:
db_settings = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "/tmp/should-not-be-used",
}
}
def test_sqlite_in_memory_used(self, django_testdir):
pytest.importorskip("xdist")
django_testdir.create_test_module(
"""
import pytest
from django.db import connections
@pytest.mark.django_db
def test_a():
(conn, ) = connections.all()
assert conn.vendor == 'sqlite'
db_name = conn.creation._get_test_db_name()
assert 'file:memorydb' in db_name or db_name == ':memory:'
"""
)
result = django_testdir.runpytest_subprocess("--tb=short", "-vv", "-n1")
assert result.ret == 0
result.stdout.fnmatch_lines(["*PASSED*test_a*"])
class TestSqliteWithMultipleDbsAndXdist:
db_settings = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "/tmp/should-not-be-used",
},
"db2": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "db_name",
"TEST": {"NAME": "test_custom_db_name"},
}
}
def test_sqlite_database_renamed(self, django_testdir):
pytest.importorskip("xdist")
django_testdir.create_test_module(
"""
import pytest
from django.db import connections
@pytest.mark.django_db
def test_a():
(conn_db2, conn_default) = sorted(
connections.all(),
key=lambda conn: conn.alias,
)
assert conn_default.vendor == 'sqlite'
db_name = conn_default.creation._get_test_db_name()
# can_share_in_memory_db was removed in Django 2.1, and
# used in _get_test_db_name before.
if getattr(conn_default.features, "can_share_in_memory_db", True):
assert 'file:memorydb' in db_name
else:
assert db_name == ":memory:"
assert conn_db2.vendor == 'sqlite'
db_name = conn_db2.creation._get_test_db_name()
assert db_name.startswith('test_custom_db_name_gw')
"""
)
result = django_testdir.runpytest_subprocess("--tb=short", "-vv", "-n1")
assert result.ret == 0
result.stdout.fnmatch_lines(["*PASSED*test_a*"])
class TestSqliteWithTox:
db_settings = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "db_name",
"TEST": {"NAME": "test_custom_db_name"},
}
}
def test_db_with_tox_suffix(self, django_testdir, monkeypatch):
"A test to check that Tox DB suffix works when running in parallel."
monkeypatch.setenv("TOX_PARALLEL_ENV", "py37-django22")
django_testdir.create_test_module(
"""
import pytest
from django.db import connections
@pytest.mark.django_db
def test_inner():
(conn, ) = connections.all()
assert conn.vendor == 'sqlite'
db_name = conn.creation._get_test_db_name()
assert db_name == 'test_custom_db_name_py37-django22'
"""
)
result = django_testdir.runpytest_subprocess("--tb=short", "-vv")
assert result.ret == 0
result.stdout.fnmatch_lines(["*test_inner*PASSED*"])
def test_db_with_empty_tox_suffix(self, django_testdir, monkeypatch):
"A test to check that Tox DB suffix is not used when suffix would be empty."
monkeypatch.setenv("TOX_PARALLEL_ENV", "")
django_testdir.create_test_module(
"""
import pytest
from django.db import connections
@pytest.mark.django_db
def test_inner():
(conn,) = connections.all()
assert conn.vendor == 'sqlite'
db_name = conn.creation._get_test_db_name()
assert db_name == 'test_custom_db_name'
"""
)
result = django_testdir.runpytest_subprocess("--tb=short", "-vv")
assert result.ret == 0
result.stdout.fnmatch_lines(["*test_inner*PASSED*"])
class TestSqliteWithToxAndXdist:
db_settings = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": "db_name",
"TEST": {"NAME": "test_custom_db_name"},
}
}
def test_db_with_tox_suffix(self, django_testdir, monkeypatch):
"A test to check that both Tox and xdist suffixes work together."
pytest.importorskip("xdist")
monkeypatch.setenv("TOX_PARALLEL_ENV", "py37-django22")
django_testdir.create_test_module(
"""
import pytest
from django.db import connections
@pytest.mark.django_db
def test_inner():
(conn, ) = connections.all()
assert conn.vendor == 'sqlite'
db_name = conn.creation._get_test_db_name()
assert db_name.startswith('test_custom_db_name_py37-django22_gw')
"""
)
result = django_testdir.runpytest_subprocess("--tb=short", "-vv", "-n1")
assert result.ret == 0
result.stdout.fnmatch_lines(["*PASSED*test_inner*"])
class TestSqliteInMemoryWithXdist:
db_settings = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
"TEST": {"NAME": ":memory:"},
}
}
def test_sqlite_in_memory_used(self, django_testdir):
pytest.importorskip("xdist")
django_testdir.create_test_module(
"""
import pytest
from django.db import connections
@pytest.mark.django_db
def test_a():
(conn, ) = connections.all()
assert conn.vendor == 'sqlite'
db_name = conn.creation._get_test_db_name()
assert 'file:memorydb' in db_name or db_name == ':memory:'
"""
)
result = django_testdir.runpytest_subprocess("--tb=short", "-vv", "-n1")
assert result.ret == 0
result.stdout.fnmatch_lines(["*PASSED*test_a*"])
@pytest.mark.skipif(
get_django_version() >= (1, 9),
reason=(
"Django 1.9 requires migration and has no concept of initial data fixtures"
),
)
def test_initial_data(django_testdir_initial):
"""Test that initial data gets loaded."""
django_testdir_initial.create_test_module(
"""
import pytest
from .app.models import Item
@pytest.mark.django_db
def test_inner():
assert [x.name for x in Item.objects.all()] \
== ["mark_initial_data"]
"""
)
result = django_testdir_initial.runpytest_subprocess("--tb=short", "-v")
assert result.ret == 0
result.stdout.fnmatch_lines(["*test_inner*PASSED*"])
class TestNativeMigrations(object):
""" Tests for Django Migrations """
def test_no_migrations(self, django_testdir):
django_testdir.create_test_module(
"""
import pytest
@pytest.mark.django_db
def test_inner_migrations():
from .app.models import Item
Item.objects.create()
"""
)
migration_file = django_testdir.project_root.join(
"tpkg/app/migrations/0001_initial.py"
)
assert migration_file.isfile()
migration_file.write(
'raise Exception("This should not get imported.")', ensure=True
)
result = django_testdir.runpytest_subprocess(
"--nomigrations", "--tb=short", "-vv", "-s",
)
assert result.ret == 0
assert "Operations to perform:" not in result.stdout.str()
result.stdout.fnmatch_lines(["*= 1 passed in *"])
def test_migrations_run(self, django_testdir):
testdir = django_testdir
testdir.create_test_module(
"""
import pytest
@pytest.mark.django_db
def test_inner_migrations():
from .app.models import Item
Item.objects.create()
"""
)
testdir.create_app_file(
"""
from django.db import migrations, models
def print_it(apps, schema_editor):
print("mark_migrations_run")
class Migration(migrations.Migration):
dependencies = []
operations = [
migrations.CreateModel(
name='Item',
fields=[
('id', models.AutoField(serialize=False,
auto_created=True,
primary_key=True)),
('name', models.CharField(max_length=100)),
],
options={
},
bases=(models.Model,),
),
migrations.RunPython(
print_it,
),
]
""",
"migrations/0001_initial.py",
)
result = testdir.runpytest_subprocess("--tb=short", "-v", "-s")
assert result.ret == 0
result.stdout.fnmatch_lines(["*mark_migrations_run*"])
result = testdir.runpytest_subprocess(
"--no-migrations", "--migrations", "--tb=short", "-v", "-s"
)
assert result.ret == 0
result.stdout.fnmatch_lines(["*mark_migrations_run*"])
| apache-2.0 |
lfairchild/PmagPy | programs/di_eq.py | 3 | 1144 | #!/usr/bin/env python
from __future__ import print_function
import sys
import numpy
import pmagpy.pmag as pmag
def main():
"""
NAME
di_eq.py
DESCRIPTION
converts dec, inc pairs to x,y pairs using equal area projection
NB: do only upper or lower hemisphere at a time: does not distinguish between up and down.
SYNTAX
di_eq.py [command line options] [< filename]
OPTIONS
-h prints help message and quits
-f FILE, input file
"""
out=""
UP=0
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-f' in sys.argv:
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
DI=numpy.loadtxt(file,dtype=numpy.float)
else:
DI = numpy.loadtxt(sys.stdin,dtype=numpy.float) # read from standard input
Ds=DI.transpose()[0]
Is=DI.transpose()[1]
if len(DI)>1: #array of data
XY=pmag.dimap_V(Ds,Is)
for xy in XY:
print('%f %f'%(xy[0],xy[1]))
else: # single data point
XY=pmag.dimap(Ds,Is)
print('%f %f'%(XY[0],XY[1]))
if __name__ == "__main__":
main()
| bsd-3-clause |
haroldl/homeworklog | django/contrib/formtools/tests/__init__.py | 151 | 15220 | import os
from django import forms, http
from django.conf import settings
from django.contrib.formtools import preview, wizard, utils
from django.test import TestCase
from django.utils import unittest
success_string = "Done was called!"
class TestFormPreview(preview.FormPreview):
def get_context(self, request, form):
context = super(TestFormPreview, self).get_context(request, form)
context.update({'custom_context': True})
return context
def get_initial(self, request):
return {'field1': 'Works!'}
def done(self, request, cleaned_data):
return http.HttpResponse(success_string)
class TestForm(forms.Form):
field1 = forms.CharField()
field1_ = forms.CharField()
bool1 = forms.BooleanField(required=False)
class UserSecuredFormPreview(TestFormPreview):
"""
FormPreview with a custum security_hash method
"""
def security_hash(self, request, form):
return "123"
class PreviewTests(TestCase):
urls = 'django.contrib.formtools.tests.urls'
def setUp(self):
# Create a FormPreview instance to share between tests
self.preview = preview.FormPreview(TestForm)
input_template = '<input type="hidden" name="%s" value="%s" />'
self.input = input_template % (self.preview.unused_name('stage'), "%d")
self.test_data = {'field1':u'foo', 'field1_':u'asdf'}
def test_unused_name(self):
"""
Verifies name mangling to get uniue field name.
"""
self.assertEqual(self.preview.unused_name('field1'), 'field1__')
def test_form_get(self):
"""
Test contrib.formtools.preview form retrieval.
Use the client library to see if we can sucessfully retrieve
the form (mostly testing the setup ROOT_URLCONF
process). Verify that an additional hidden input field
is created to manage the stage.
"""
response = self.client.get('/test1/')
stage = self.input % 1
self.assertContains(response, stage, 1)
self.assertEqual(response.context['custom_context'], True)
self.assertEqual(response.context['form'].initial, {'field1': 'Works!'})
def test_form_preview(self):
"""
Test contrib.formtools.preview form preview rendering.
Use the client library to POST to the form to see if a preview
is returned. If we do get a form back check that the hidden
value is correctly managing the state of the form.
"""
# Pass strings for form submittal and add stage variable to
# show we previously saw first stage of the form.
self.test_data.update({'stage': 1})
response = self.client.post('/test1/', self.test_data)
# Check to confirm stage is set to 2 in output form.
stage = self.input % 2
self.assertContains(response, stage, 1)
def test_form_submit(self):
"""
Test contrib.formtools.preview form submittal.
Use the client library to POST to the form with stage set to 3
to see if our forms done() method is called. Check first
without the security hash, verify failure, retry with security
hash and verify sucess.
"""
# Pass strings for form submittal and add stage variable to
# show we previously saw first stage of the form.
self.test_data.update({'stage':2})
response = self.client.post('/test1/', self.test_data)
self.assertNotEqual(response.content, success_string)
hash = self.preview.security_hash(None, TestForm(self.test_data))
self.test_data.update({'hash': hash})
response = self.client.post('/test1/', self.test_data)
self.assertEqual(response.content, success_string)
def test_bool_submit(self):
"""
Test contrib.formtools.preview form submittal when form contains:
BooleanField(required=False)
Ticket: #6209 - When an unchecked BooleanField is previewed, the preview
form's hash would be computed with no value for ``bool1``. However, when
the preview form is rendered, the unchecked hidden BooleanField would be
rendered with the string value 'False'. So when the preview form is
resubmitted, the hash would be computed with the value 'False' for
``bool1``. We need to make sure the hashes are the same in both cases.
"""
self.test_data.update({'stage':2})
hash = self.preview.security_hash(None, TestForm(self.test_data))
self.test_data.update({'hash':hash, 'bool1':u'False'})
response = self.client.post('/test1/', self.test_data)
self.assertEqual(response.content, success_string)
def test_form_submit_django12_hash(self):
"""
Test contrib.formtools.preview form submittal, using the hash function
used in Django 1.2
"""
# Pass strings for form submittal and add stage variable to
# show we previously saw first stage of the form.
self.test_data.update({'stage':2})
response = self.client.post('/test1/', self.test_data)
self.assertNotEqual(response.content, success_string)
hash = utils.security_hash(None, TestForm(self.test_data))
self.test_data.update({'hash': hash})
response = self.client.post('/test1/', self.test_data)
self.assertEqual(response.content, success_string)
def test_form_submit_django12_hash_custom_hash(self):
"""
Test contrib.formtools.preview form submittal, using the hash function
used in Django 1.2 and a custom security_hash method.
"""
# Pass strings for form submittal and add stage variable to
# show we previously saw first stage of the form.
self.test_data.update({'stage':2})
response = self.client.post('/test2/', self.test_data)
self.assertEqual(response.status_code, 200)
self.assertNotEqual(response.content, success_string)
hash = utils.security_hash(None, TestForm(self.test_data))
self.test_data.update({'hash': hash})
response = self.client.post('/test2/', self.test_data)
self.assertNotEqual(response.content, success_string)
class SecurityHashTests(unittest.TestCase):
def test_textfield_hash(self):
"""
Regression test for #10034: the hash generation function should ignore
leading/trailing whitespace so as to be friendly to broken browsers that
submit it (usually in textareas).
"""
f1 = HashTestForm({'name': 'joe', 'bio': 'Nothing notable.'})
f2 = HashTestForm({'name': ' joe', 'bio': 'Nothing notable. '})
hash1 = utils.security_hash(None, f1)
hash2 = utils.security_hash(None, f2)
self.assertEqual(hash1, hash2)
def test_empty_permitted(self):
"""
Regression test for #10643: the security hash should allow forms with
empty_permitted = True, or forms where data has not changed.
"""
f1 = HashTestBlankForm({})
f2 = HashTestForm({}, empty_permitted=True)
hash1 = utils.security_hash(None, f1)
hash2 = utils.security_hash(None, f2)
self.assertEqual(hash1, hash2)
class FormHmacTests(unittest.TestCase):
"""
Same as SecurityHashTests, but with form_hmac
"""
def test_textfield_hash(self):
"""
Regression test for #10034: the hash generation function should ignore
leading/trailing whitespace so as to be friendly to broken browsers that
submit it (usually in textareas).
"""
f1 = HashTestForm({'name': 'joe', 'bio': 'Nothing notable.'})
f2 = HashTestForm({'name': ' joe', 'bio': 'Nothing notable. '})
hash1 = utils.form_hmac(f1)
hash2 = utils.form_hmac(f2)
self.assertEqual(hash1, hash2)
def test_empty_permitted(self):
"""
Regression test for #10643: the security hash should allow forms with
empty_permitted = True, or forms where data has not changed.
"""
f1 = HashTestBlankForm({})
f2 = HashTestForm({}, empty_permitted=True)
hash1 = utils.form_hmac(f1)
hash2 = utils.form_hmac(f2)
self.assertEqual(hash1, hash2)
class HashTestForm(forms.Form):
name = forms.CharField()
bio = forms.CharField()
class HashTestBlankForm(forms.Form):
name = forms.CharField(required=False)
bio = forms.CharField(required=False)
#
# FormWizard tests
#
class WizardPageOneForm(forms.Form):
field = forms.CharField()
class WizardPageTwoForm(forms.Form):
field = forms.CharField()
class WizardPageTwoAlternativeForm(forms.Form):
field = forms.CharField()
class WizardPageThreeForm(forms.Form):
field = forms.CharField()
class WizardClass(wizard.FormWizard):
def get_template(self, step):
return 'formwizard/wizard.html'
def done(self, request, cleaned_data):
return http.HttpResponse(success_string)
class UserSecuredWizardClass(WizardClass):
"""
Wizard with a custum security_hash method
"""
def security_hash(self, request, form):
return "123"
class DummyRequest(http.HttpRequest):
def __init__(self, POST=None):
super(DummyRequest, self).__init__()
self.method = POST and "POST" or "GET"
if POST is not None:
self.POST.update(POST)
self._dont_enforce_csrf_checks = True
class WizardTests(TestCase):
urls = 'django.contrib.formtools.tests.urls'
def setUp(self):
self.old_TEMPLATE_DIRS = settings.TEMPLATE_DIRS
settings.TEMPLATE_DIRS = (
os.path.join(
os.path.dirname(__file__),
'templates'
),
)
# Use a known SECRET_KEY to make security_hash tests deterministic
self.old_SECRET_KEY = settings.SECRET_KEY
settings.SECRET_KEY = "123"
def tearDown(self):
settings.TEMPLATE_DIRS = self.old_TEMPLATE_DIRS
settings.SECRET_KEY = self.old_SECRET_KEY
def test_step_starts_at_zero(self):
"""
step should be zero for the first form
"""
response = self.client.get('/wizard/')
self.assertEqual(0, response.context['step0'])
def test_step_increments(self):
"""
step should be incremented when we go to the next page
"""
response = self.client.post('/wizard/', {"0-field":"test", "wizard_step":"0"})
self.assertEqual(1, response.context['step0'])
def test_bad_hash(self):
"""
Form should not advance if the hash is missing or bad
"""
response = self.client.post('/wizard/',
{"0-field":"test",
"1-field":"test2",
"wizard_step": "1"})
self.assertEqual(0, response.context['step0'])
def test_good_hash_django12(self):
"""
Form should advance if the hash is present and good, as calculated using
django 1.2 method.
"""
# We are hard-coding a hash value here, but that is OK, since we want to
# ensure that we don't accidentally change the algorithm.
data = {"0-field": "test",
"1-field": "test2",
"hash_0": "2fdbefd4c0cad51509478fbacddf8b13",
"wizard_step": "1"}
response = self.client.post('/wizard/', data)
self.assertEqual(2, response.context['step0'])
def test_good_hash_django12_subclass(self):
"""
The Django 1.2 method of calulating hashes should *not* be used as a
fallback if the FormWizard subclass has provided their own method
of calculating a hash.
"""
# We are hard-coding a hash value here, but that is OK, since we want to
# ensure that we don't accidentally change the algorithm.
data = {"0-field": "test",
"1-field": "test2",
"hash_0": "2fdbefd4c0cad51509478fbacddf8b13",
"wizard_step": "1"}
response = self.client.post('/wizard2/', data)
self.assertEqual(0, response.context['step0'])
def test_good_hash_current(self):
"""
Form should advance if the hash is present and good, as calculated using
current method.
"""
data = {"0-field": "test",
"1-field": "test2",
"hash_0": "7e9cea465f6a10a6fb47fcea65cb9a76350c9a5c",
"wizard_step": "1"}
response = self.client.post('/wizard/', data)
self.assertEqual(2, response.context['step0'])
def test_14498(self):
"""
Regression test for ticket #14498. All previous steps' forms should be
validated.
"""
reached = [False]
that = self
class WizardWithProcessStep(WizardClass):
def process_step(self, request, form, step):
that.assertTrue(hasattr(form, 'cleaned_data'))
reached[0] = True
wizard = WizardWithProcessStep([WizardPageOneForm,
WizardPageTwoForm,
WizardPageThreeForm])
data = {"0-field": "test",
"1-field": "test2",
"hash_0": "7e9cea465f6a10a6fb47fcea65cb9a76350c9a5c",
"wizard_step": "1"}
wizard(DummyRequest(POST=data))
self.assertTrue(reached[0])
def test_14576(self):
"""
Regression test for ticket #14576.
The form of the last step is not passed to the done method.
"""
reached = [False]
that = self
class Wizard(WizardClass):
def done(self, request, form_list):
reached[0] = True
that.assertTrue(len(form_list) == 2)
wizard = Wizard([WizardPageOneForm,
WizardPageTwoForm])
data = {"0-field": "test",
"1-field": "test2",
"hash_0": "7e9cea465f6a10a6fb47fcea65cb9a76350c9a5c",
"wizard_step": "1"}
wizard(DummyRequest(POST=data))
self.assertTrue(reached[0])
def test_15075(self):
"""
Regression test for ticket #15075. Allow modifying wizard's form_list
in process_step.
"""
reached = [False]
that = self
class WizardWithProcessStep(WizardClass):
def process_step(self, request, form, step):
if step == 0:
self.form_list[1] = WizardPageTwoAlternativeForm
if step == 1:
that.assertTrue(isinstance(form, WizardPageTwoAlternativeForm))
reached[0] = True
wizard = WizardWithProcessStep([WizardPageOneForm,
WizardPageTwoForm,
WizardPageThreeForm])
data = {"0-field": "test",
"1-field": "test2",
"hash_0": "7e9cea465f6a10a6fb47fcea65cb9a76350c9a5c",
"wizard_step": "1"}
wizard(DummyRequest(POST=data))
self.assertTrue(reached[0])
| bsd-3-clause |
VitalPet/account-financial-tools | account_journal_period_close/model/account_period.py | 39 | 2628 | # -*- coding: utf-8 -*-
#
#
# Authors: Adrien Peiffer
# Copyright (c) 2014 Acsone SA/NV (http://www.acsone.eu)
# All Rights Reserved
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs.
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly advised to contact a Free Software
# Service Company.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
from openerp.osv import orm, fields
class AccountPeriod(orm.Model):
_inherit = 'account.period'
_columns = {
'journal_period_ids': fields.one2many('account.journal.period',
'period_id', 'Journal states'),
}
def add_all_journals(self, cr, uid, ids, context=None):
this = self.browse(cr, uid, ids, context=context)[0]
journal_period_obj = self.pool.get('account.journal.period')
journal_period_ids = journal_period_obj\
.search(cr, uid, [('period_id', '=', this.id)], context=context)
journal_list = []
for journal_period in journal_period_obj.browse(cr,
uid,
journal_period_ids,
context=context):
journal_list.append(journal_period.journal_id.id)
journal_ids = self.pool.get('account.journal')\
.search(cr, uid, [('id', 'not in', journal_list)], context=context)
for journal_id in journal_ids:
journal_period_obj.create(cr,
uid,
{'period_id': this.id,
'journal_id': journal_id,
'state': this.state})
| agpl-3.0 |
aliaspider/RetroArch | tools/vulkan_loader_generator.py | 17 | 6901 | #!/usr/bin/env python3
import sys
import re
def entry_is_device(entry):
first_arg_type = entry[1][1:].split(' ')[0]
device_types = ['VkDevice', 'VkCommandBuffer', 'VkQueue']
return (first_arg_type in device_types) and (entry[0] != 'vkGetDeviceProcAddr')
def main():
pure_entrypoints = []
entrypoints = []
extensions = []
pure_list = ['vkCreateInstance', 'vkEnumerateInstanceExtensionProperties', 'vkEnumerateInstanceLayerProperties']
with open(sys.argv[1], 'r') as f:
header = f.readlines()
for line in header:
m = re.search('typedef \S+.*PFN_([^\)]+)\)(.*);$', line)
if m and m.group(1)[-3:] != 'KHR' and m.group(1)[-3:] != 'EXT' and m.group(2) != '(void)':
entry = m.group(1)
if entry == 'vkGetInstanceProcAddr':
continue
if entry in pure_list:
pure_entrypoints.append((m.group(1), m.group(2)))
else:
entrypoints.append((m.group(1), m.group(2)))
elif m and (m.group(1)[-3:] == 'KHR' or m.group(1)[-3:] == 'EXT') and m.group(2) != '(void)':
entry = m.group(1)
if 'Android' in entry:
continue
if 'Xlib' in entry:
continue
if 'Xcb' in entry:
continue
if 'Win32' in entry:
continue
if 'Wayland' in entry:
continue
if 'Mir' in entry:
continue
extensions.append((m.group(1), m.group(2)))
with open(sys.argv[2], 'w') as f:
print('''
/* This header is autogenerated by vulkan_loader_generator.py */
#ifndef VULKAN_SYMBOL_WRAPPER_H
#define VULKAN_SYMBOL_WRAPPER_H
#define VK_NO_PROTOTYPES
#include <vulkan/vulkan.h>
#ifdef __cplusplus
extern "C" {
#endif
''', file = f)
for entry in pure_entrypoints:
s = entry[0]
print('extern PFN_{} vulkan_symbol_wrapper_{};'.format(s, s), file = f)
print('#define {} vulkan_symbol_wrapper_{}'.format(s, s), file = f)
for entry in entrypoints:
s = entry[0]
print('extern PFN_{} vulkan_symbol_wrapper_{};'.format(s, s), file = f)
print('#define {} vulkan_symbol_wrapper_{}'.format(s, s), file = f)
for entry in extensions:
s = entry[0]
print('extern PFN_{} vulkan_symbol_wrapper_{};'.format(s, s), file = f)
print('#define {} vulkan_symbol_wrapper_{}'.format(s, s), file = f)
print('''
void vulkan_symbol_wrapper_init(PFN_vkGetInstanceProcAddr get_instance_proc_addr);
PFN_vkGetInstanceProcAddr vulkan_symbol_wrapper_instance_proc_addr(void);
VkBool32 vulkan_symbol_wrapper_load_global_symbols(void);
VkBool32 vulkan_symbol_wrapper_load_core_instance_symbols(VkInstance instance);
VkBool32 vulkan_symbol_wrapper_load_core_symbols(VkInstance instance);
VkBool32 vulkan_symbol_wrapper_load_core_device_symbols(VkDevice device);
VkBool32 vulkan_symbol_wrapper_load_instance_symbol(VkInstance instance, const char *name, PFN_vkVoidFunction *ppSymbol);
VkBool32 vulkan_symbol_wrapper_load_device_symbol(VkDevice device, const char *name, PFN_vkVoidFunction *ppSymbol);
#define VULKAN_SYMBOL_WRAPPER_LOAD_INSTANCE_SYMBOL(instance, name, pfn) vulkan_symbol_wrapper_load_instance_symbol(instance, name, (PFN_vkVoidFunction*) &(pfn))
#define VULKAN_SYMBOL_WRAPPER_LOAD_INSTANCE_EXTENSION_SYMBOL(instance, name) vulkan_symbol_wrapper_load_instance_symbol(instance, #name, (PFN_vkVoidFunction*) & name)
#define VULKAN_SYMBOL_WRAPPER_LOAD_DEVICE_SYMBOL(device, name, pfn) vulkan_symbol_wrapper_load_device_symbol(device, name, (PFN_vkVoidFunction*) &(pfn))
#define VULKAN_SYMBOL_WRAPPER_LOAD_DEVICE_EXTENSION_SYMBOL(device, name) vulkan_symbol_wrapper_load_device_symbol(device, #name, (PFN_vkVoidFunction*) & name)
''', file = f)
print('''
#ifdef __cplusplus
}
#endif
#endif
''', file = f)
with open(sys.argv[3], 'w') as f:
print('''
/* This header is autogenerated by vulkan_loader_generator.py */
#include "vulkan_symbol_wrapper.h"
''', file = f)
for entry in pure_entrypoints:
s = entry[0]
print('PFN_{} vulkan_symbol_wrapper_{};'.format(s, s), file = f)
for entry in entrypoints:
s = entry[0]
print('PFN_{} vulkan_symbol_wrapper_{};'.format(s, s), file = f)
for entry in extensions:
s = entry[0]
print('PFN_{} vulkan_symbol_wrapper_{};'.format(s, s), file = f)
print('''
static PFN_vkGetInstanceProcAddr GetInstanceProcAddr;
void vulkan_symbol_wrapper_init(PFN_vkGetInstanceProcAddr get_instance_proc_addr)
{
GetInstanceProcAddr = get_instance_proc_addr;
}
PFN_vkGetInstanceProcAddr vulkan_symbol_wrapper_instance_proc_addr(void)
{
return GetInstanceProcAddr;
}
''', file = f)
print('''
VkBool32 vulkan_symbol_wrapper_load_instance_symbol(VkInstance instance, const char *name, PFN_vkVoidFunction *ppSymbol)
{
*ppSymbol = GetInstanceProcAddr(instance, name);
return *ppSymbol != NULL;
}''', file = f)
print('''
VkBool32 vulkan_symbol_wrapper_load_device_symbol(VkDevice device, const char *name, PFN_vkVoidFunction *ppSymbol)
{
*ppSymbol = vkGetDeviceProcAddr(device, name);
return *ppSymbol != NULL;
}''', file = f)
print('''
VkBool32 vulkan_symbol_wrapper_load_global_symbols(void)
{''', file = f)
for pure in pure_entrypoints:
print(' if (!VULKAN_SYMBOL_WRAPPER_LOAD_INSTANCE_SYMBOL(NULL, "{}", {})) return VK_FALSE;'.format(pure[0], pure[0]), file = f)
print(' return VK_TRUE;', file = f)
print('}', file = f)
print('''
VkBool32 vulkan_symbol_wrapper_load_core_symbols(VkInstance instance)
{''', file = f)
for entry in entrypoints:
print(' if (!VULKAN_SYMBOL_WRAPPER_LOAD_INSTANCE_SYMBOL(instance, "{}", {})) return VK_FALSE;'.format(entry[0], entry[0]), file = f)
print(' return VK_TRUE;', file = f)
print('}', file = f)
print('''
VkBool32 vulkan_symbol_wrapper_load_core_instance_symbols(VkInstance instance)
{''', file = f)
for entry in entrypoints:
if not entry_is_device(entry):
print(' if (!VULKAN_SYMBOL_WRAPPER_LOAD_INSTANCE_SYMBOL(instance, "{}", {})) return VK_FALSE;'.format(entry[0], entry[0]), file = f)
print(' return VK_TRUE;', file = f)
print('}', file = f)
print('''
VkBool32 vulkan_symbol_wrapper_load_core_device_symbols(VkDevice device)
{''', file = f)
for entry in entrypoints:
if entry_is_device(entry):
print(' if (!VULKAN_SYMBOL_WRAPPER_LOAD_DEVICE_SYMBOL(device, "{}", {})) return VK_FALSE;'.format(entry[0], entry[0]), file = f)
print(' return VK_TRUE;', file = f)
print('}', file = f)
if __name__ == '__main__':
main()
| gpl-3.0 |
mattclay/ansible | test/units/modules/test_unarchive.py | 11 | 2342 | from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from ansible.modules.unarchive import ZipArchive, TgzArchive
class AnsibleModuleExit(Exception):
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
class ExitJson(AnsibleModuleExit):
pass
class FailJson(AnsibleModuleExit):
pass
@pytest.fixture
def fake_ansible_module():
return FakeAnsibleModule()
class FakeAnsibleModule:
def __init__(self):
self.params = {}
self.tmpdir = None
def exit_json(self, *args, **kwargs):
raise ExitJson(*args, **kwargs)
def fail_json(self, *args, **kwargs):
raise FailJson(*args, **kwargs)
class TestCaseZipArchive:
@pytest.mark.parametrize(
'side_effect, expected_reason', (
([ValueError, '/bin/zipinfo'], "Unable to find required 'unzip'"),
(ValueError, "Unable to find required 'unzip' or 'zipinfo'"),
)
)
def test_no_zip_zipinfo_binary(self, mocker, fake_ansible_module, side_effect, expected_reason):
mocker.patch("ansible.modules.unarchive.get_bin_path", side_effect=side_effect)
fake_ansible_module.params = {
"extra_opts": "",
"exclude": "",
"include": "",
}
z = ZipArchive(
src="",
b_dest="",
file_args="",
module=fake_ansible_module,
)
can_handle, reason = z.can_handle_archive()
assert can_handle is False
assert expected_reason in reason
assert z.cmd_path is None
class TestCaseTgzArchive:
def test_no_tar_binary(self, mocker, fake_ansible_module):
mocker.patch("ansible.modules.unarchive.get_bin_path", side_effect=ValueError)
fake_ansible_module.params = {
"extra_opts": "",
"exclude": "",
"include": "",
}
fake_ansible_module.check_mode = False
t = TgzArchive(
src="",
b_dest="",
file_args="",
module=fake_ansible_module,
)
can_handle, reason = t.can_handle_archive()
assert can_handle is False
assert 'Unable to find required' in reason
assert t.cmd_path is None
assert t.tar_type is None
| gpl-3.0 |
abelkhan/websearch | websearch/chardet/langbulgarianmodel.py | 2965 | 12784 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# Character Mapping Table:
# this table is modified base on win1251BulgarianCharToOrderMap, so
# only number <64 is sure valid
Latin5_BulgarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40
110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50
253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60
116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70
194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80
210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 90
81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a0
31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b0
39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c0
1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d0
7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e0
62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0
)
win1251BulgarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40
110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50
253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60
116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70
206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80
221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 90
88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a0
73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b0
31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c0
39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d0
1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e0
7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 96.9392%
# first 1024 sequences:3.0618%
# rest sequences: 0.2992%
# negative sequences: 0.0020%
BulgarianLangModel = (
0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2,
3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1,
0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0,
0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0,
0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0,
0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0,
0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3,
2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1,
3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2,
1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0,
3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1,
1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0,
2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2,
2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0,
3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2,
1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0,
2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2,
2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2,
1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0,
2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2,
2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0,
2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2,
1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0,
2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2,
1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0,
3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2,
1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0,
3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1,
1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0,
2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1,
1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0,
2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2,
1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0,
2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1,
1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2,
1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1,
2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2,
1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2,
1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1,
0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2,
1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1,
1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,
1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1,
0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1,
0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0,
1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1,
1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
)
Latin5BulgarianModel = {
'charToOrderMap': Latin5_BulgarianCharToOrderMap,
'precedenceMatrix': BulgarianLangModel,
'mTypicalPositiveRatio': 0.969392,
'keepEnglishLetter': False,
'charsetName': "ISO-8859-5"
}
Win1251BulgarianModel = {
'charToOrderMap': win1251BulgarianCharToOrderMap,
'precedenceMatrix': BulgarianLangModel,
'mTypicalPositiveRatio': 0.969392,
'keepEnglishLetter': False,
'charsetName': "windows-1251"
}
# flake8: noqa
| gpl-3.0 |
steventimberman/masterDebater | venv/lib/python2.7/site-packages/django/utils/dateparse.py | 44 | 4235 | """Functions to parse datetime objects."""
# We're using regular expressions rather than time.strptime because:
# - They provide both validation and parsing.
# - They're more flexible for datetimes.
# - The date/datetime/time constructors produce friendlier error messages.
import datetime
import re
from django.utils import six
from django.utils.timezone import get_fixed_timezone, utc
date_re = re.compile(
r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})$'
)
time_re = re.compile(
r'(?P<hour>\d{1,2}):(?P<minute>\d{1,2})'
r'(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?'
)
datetime_re = re.compile(
r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})'
r'[T ](?P<hour>\d{1,2}):(?P<minute>\d{1,2})'
r'(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?'
r'(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$'
)
standard_duration_re = re.compile(
r'^'
r'(?:(?P<days>-?\d+) (days?, )?)?'
r'((?:(?P<hours>-?\d+):)(?=\d+:\d+))?'
r'(?:(?P<minutes>-?\d+):)?'
r'(?P<seconds>-?\d+)'
r'(?:\.(?P<microseconds>\d{1,6})\d{0,6})?'
r'$'
)
# Support the sections of ISO 8601 date representation that are accepted by
# timedelta
iso8601_duration_re = re.compile(
r'^(?P<sign>[-+]?)'
r'P'
r'(?:(?P<days>\d+(.\d+)?)D)?'
r'(?:T'
r'(?:(?P<hours>\d+(.\d+)?)H)?'
r'(?:(?P<minutes>\d+(.\d+)?)M)?'
r'(?:(?P<seconds>\d+(.\d+)?)S)?'
r')?'
r'$'
)
def parse_date(value):
"""Parses a string and return a datetime.date.
Raises ValueError if the input is well formatted but not a valid date.
Returns None if the input isn't well formatted.
"""
match = date_re.match(value)
if match:
kw = {k: int(v) for k, v in six.iteritems(match.groupdict())}
return datetime.date(**kw)
def parse_time(value):
"""Parses a string and return a datetime.time.
This function doesn't support time zone offsets.
Raises ValueError if the input is well formatted but not a valid time.
Returns None if the input isn't well formatted, in particular if it
contains an offset.
"""
match = time_re.match(value)
if match:
kw = match.groupdict()
if kw['microsecond']:
kw['microsecond'] = kw['microsecond'].ljust(6, '0')
kw = {k: int(v) for k, v in six.iteritems(kw) if v is not None}
return datetime.time(**kw)
def parse_datetime(value):
"""Parses a string and return a datetime.datetime.
This function supports time zone offsets. When the input contains one,
the output uses a timezone with a fixed offset from UTC.
Raises ValueError if the input is well formatted but not a valid datetime.
Returns None if the input isn't well formatted.
"""
match = datetime_re.match(value)
if match:
kw = match.groupdict()
if kw['microsecond']:
kw['microsecond'] = kw['microsecond'].ljust(6, '0')
tzinfo = kw.pop('tzinfo')
if tzinfo == 'Z':
tzinfo = utc
elif tzinfo is not None:
offset_mins = int(tzinfo[-2:]) if len(tzinfo) > 3 else 0
offset = 60 * int(tzinfo[1:3]) + offset_mins
if tzinfo[0] == '-':
offset = -offset
tzinfo = get_fixed_timezone(offset)
kw = {k: int(v) for k, v in six.iteritems(kw) if v is not None}
kw['tzinfo'] = tzinfo
return datetime.datetime(**kw)
def parse_duration(value):
"""Parses a duration string and returns a datetime.timedelta.
The preferred format for durations in Django is '%d %H:%M:%S.%f'.
Also supports ISO 8601 representation.
"""
match = standard_duration_re.match(value)
if not match:
match = iso8601_duration_re.match(value)
if match:
kw = match.groupdict()
sign = -1 if kw.pop('sign', '+') == '-' else 1
if kw.get('microseconds'):
kw['microseconds'] = kw['microseconds'].ljust(6, '0')
if kw.get('seconds') and kw.get('microseconds') and kw['seconds'].startswith('-'):
kw['microseconds'] = '-' + kw['microseconds']
kw = {k: float(v) for k, v in six.iteritems(kw) if v is not None}
return sign * datetime.timedelta(**kw)
| mit |
booto/dolphin | Externals/fmt/support/manage.py | 10 | 8973 | #!/usr/bin/env python
"""Manage site and releases.
Usage:
manage.py release [<branch>]
manage.py site
"""
from __future__ import print_function
import datetime, docopt, errno, fileinput, json, os
import re, requests, shutil, sys, tempfile
from contextlib import contextmanager
from distutils.version import LooseVersion
from subprocess import check_call
class Git:
def __init__(self, dir):
self.dir = dir
def call(self, method, args, **kwargs):
return check_call(['git', method] + list(args), **kwargs)
def add(self, *args):
return self.call('add', args, cwd=self.dir)
def checkout(self, *args):
return self.call('checkout', args, cwd=self.dir)
def clean(self, *args):
return self.call('clean', args, cwd=self.dir)
def clone(self, *args):
return self.call('clone', list(args) + [self.dir])
def commit(self, *args):
return self.call('commit', args, cwd=self.dir)
def pull(self, *args):
return self.call('pull', args, cwd=self.dir)
def push(self, *args):
return self.call('push', args, cwd=self.dir)
def reset(self, *args):
return self.call('reset', args, cwd=self.dir)
def update(self, *args):
clone = not os.path.exists(self.dir)
if clone:
self.clone(*args)
return clone
def clean_checkout(repo, branch):
repo.clean('-f', '-d')
repo.reset('--hard')
repo.checkout(branch)
class Runner:
def __init__(self, cwd):
self.cwd = cwd
def __call__(self, *args, **kwargs):
kwargs['cwd'] = kwargs.get('cwd', self.cwd)
check_call(args, **kwargs)
def create_build_env():
"""Create a build environment."""
class Env:
pass
env = Env()
# Import the documentation build module.
env.fmt_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, os.path.join(env.fmt_dir, 'doc'))
import build
env.build_dir = 'build'
env.versions = build.versions
# Virtualenv and repos are cached to speed up builds.
build.create_build_env(os.path.join(env.build_dir, 'virtualenv'))
env.fmt_repo = Git(os.path.join(env.build_dir, 'fmt'))
return env
@contextmanager
def rewrite(filename):
class Buffer:
pass
buffer = Buffer()
if not os.path.exists(filename):
buffer.data = ''
yield buffer
return
with open(filename) as f:
buffer.data = f.read()
yield buffer
with open(filename, 'w') as f:
f.write(buffer.data)
fmt_repo_url = '[email protected]:fmtlib/fmt'
def update_site(env):
env.fmt_repo.update(fmt_repo_url)
doc_repo = Git(os.path.join(env.build_dir, 'fmtlib.github.io'))
doc_repo.update('[email protected]:fmtlib/fmtlib.github.io')
for version in env.versions:
clean_checkout(env.fmt_repo, version)
target_doc_dir = os.path.join(env.fmt_repo.dir, 'doc')
# Remove the old theme.
for entry in os.listdir(target_doc_dir):
path = os.path.join(target_doc_dir, entry)
if os.path.isdir(path):
shutil.rmtree(path)
# Copy the new theme.
for entry in ['_static', '_templates', 'basic-bootstrap', 'bootstrap',
'conf.py', 'fmt.less']:
src = os.path.join(env.fmt_dir, 'doc', entry)
dst = os.path.join(target_doc_dir, entry)
copy = shutil.copytree if os.path.isdir(src) else shutil.copyfile
copy(src, dst)
# Rename index to contents.
contents = os.path.join(target_doc_dir, 'contents.rst')
if not os.path.exists(contents):
os.rename(os.path.join(target_doc_dir, 'index.rst'), contents)
# Fix issues in reference.rst/api.rst.
for filename in ['reference.rst', 'api.rst']:
pattern = re.compile('doxygenfunction.. (bin|oct|hexu|hex)$', re.M)
with rewrite(os.path.join(target_doc_dir, filename)) as b:
b.data = b.data.replace('std::ostream &', 'std::ostream&')
b.data = re.sub(pattern, r'doxygenfunction:: \1(int)', b.data)
b.data = b.data.replace('std::FILE*', 'std::FILE *')
b.data = b.data.replace('unsigned int', 'unsigned')
b.data = b.data.replace('operator""_', 'operator"" _')
# Fix a broken link in index.rst.
index = os.path.join(target_doc_dir, 'index.rst')
with rewrite(index) as b:
b.data = b.data.replace(
'doc/latest/index.html#format-string-syntax', 'syntax.html')
# Build the docs.
html_dir = os.path.join(env.build_dir, 'html')
if os.path.exists(html_dir):
shutil.rmtree(html_dir)
include_dir = env.fmt_repo.dir
if LooseVersion(version) >= LooseVersion('5.0.0'):
include_dir = os.path.join(include_dir, 'include', 'fmt')
elif LooseVersion(version) >= LooseVersion('3.0.0'):
include_dir = os.path.join(include_dir, 'fmt')
import build
build.build_docs(version, doc_dir=target_doc_dir,
include_dir=include_dir, work_dir=env.build_dir)
shutil.rmtree(os.path.join(html_dir, '.doctrees'))
# Create symlinks for older versions.
for link, target in {'index': 'contents', 'api': 'reference'}.items():
link = os.path.join(html_dir, link) + '.html'
target += '.html'
if os.path.exists(os.path.join(html_dir, target)) and \
not os.path.exists(link):
os.symlink(target, link)
# Copy docs to the website.
version_doc_dir = os.path.join(doc_repo.dir, version)
try:
shutil.rmtree(version_doc_dir)
except OSError as e:
if e.errno != errno.ENOENT:
raise
shutil.move(html_dir, version_doc_dir)
def release(args):
env = create_build_env()
fmt_repo = env.fmt_repo
branch = args.get('<branch>')
if branch is None:
branch = 'master'
if not fmt_repo.update('-b', branch, fmt_repo_url):
clean_checkout(fmt_repo, branch)
# Convert changelog from RST to GitHub-flavored Markdown and get the
# version.
changelog = 'ChangeLog.rst'
changelog_path = os.path.join(fmt_repo.dir, changelog)
import rst2md
changes, version = rst2md.convert(changelog_path)
cmakelists = 'CMakeLists.txt'
for line in fileinput.input(os.path.join(fmt_repo.dir, cmakelists),
inplace=True):
prefix = 'set(FMT_VERSION '
if line.startswith(prefix):
line = prefix + version + ')\n'
sys.stdout.write(line)
# Update the version in the changelog.
title_len = 0
for line in fileinput.input(changelog_path, inplace=True):
if line.decode('utf-8').startswith(version + ' - TBD'):
line = version + ' - ' + datetime.date.today().isoformat()
title_len = len(line)
line += '\n'
elif title_len:
line = '-' * title_len + '\n'
title_len = 0
sys.stdout.write(line)
# Add the version to the build script.
script = os.path.join('doc', 'build.py')
script_path = os.path.join(fmt_repo.dir, script)
for line in fileinput.input(script_path, inplace=True):
m = re.match(r'( *versions = )\[(.+)\]', line)
if m:
line = '{}[{}, \'{}\']\n'.format(m.group(1), m.group(2), version)
sys.stdout.write(line)
fmt_repo.checkout('-B', 'release')
fmt_repo.add(changelog, cmakelists, script)
fmt_repo.commit('-m', 'Update version')
# Build the docs and package.
run = Runner(fmt_repo.dir)
run('cmake', '.')
run('make', 'doc', 'package_source')
update_site(env)
# Create a release on GitHub.
fmt_repo.push('origin', 'release')
params = {'access_token': os.getenv('FMT_TOKEN')}
r = requests.post('https://api.github.com/repos/fmtlib/fmt/releases',
params=params,
data=json.dumps({'tag_name': version,
'target_commitish': 'release',
'body': changes, 'draft': True}))
if r.status_code != 201:
raise Exception('Failed to create a release ' + str(r))
id = r.json()['id']
uploads_url = 'https://uploads.github.com/repos/fmtlib/fmt/releases'
package = 'fmt-{}.zip'.format(version)
r = requests.post(
'{}/{}/assets?name={}'.format(uploads_url, id, package),
headers={'Content-Type': 'application/zip'},
params=params, data=open('build/fmt/' + package, 'rb'))
if r.status_code != 201:
raise Exception('Failed to upload an asset ' + str(r))
if __name__ == '__main__':
args = docopt.docopt(__doc__)
if args.get('release'):
release(args)
elif args.get('site'):
update_site(create_build_env())
| gpl-2.0 |
ahb0327/intellij-community | python/lib/Lib/site-packages/django/contrib/contenttypes/models.py | 307 | 4052 | from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import smart_unicode
class ContentTypeManager(models.Manager):
# Cache to avoid re-looking up ContentType objects all over the place.
# This cache is shared by all the get_for_* methods.
_cache = {}
def get_by_natural_key(self, app_label, model):
try:
ct = self.__class__._cache[self.db][(app_label, model)]
except KeyError:
ct = self.get(app_label=app_label, model=model)
return ct
def get_for_model(self, model):
"""
Returns the ContentType object for a given model, creating the
ContentType if necessary. Lookups are cached so that subsequent lookups
for the same model don't hit the database.
"""
opts = model._meta
while opts.proxy:
model = opts.proxy_for_model
opts = model._meta
key = (opts.app_label, opts.object_name.lower())
try:
ct = self.__class__._cache[self.db][key]
except KeyError:
# Load or create the ContentType entry. The smart_unicode() is
# needed around opts.verbose_name_raw because name_raw might be a
# django.utils.functional.__proxy__ object.
ct, created = self.get_or_create(
app_label = opts.app_label,
model = opts.object_name.lower(),
defaults = {'name': smart_unicode(opts.verbose_name_raw)},
)
self._add_to_cache(self.db, ct)
return ct
def get_for_id(self, id):
"""
Lookup a ContentType by ID. Uses the same shared cache as get_for_model
(though ContentTypes are obviously not created on-the-fly by get_by_id).
"""
try:
ct = self.__class__._cache[self.db][id]
except KeyError:
# This could raise a DoesNotExist; that's correct behavior and will
# make sure that only correct ctypes get stored in the cache dict.
ct = self.get(pk=id)
self._add_to_cache(self.db, ct)
return ct
def clear_cache(self):
"""
Clear out the content-type cache. This needs to happen during database
flushes to prevent caching of "stale" content type IDs (see
django.contrib.contenttypes.management.update_contenttypes for where
this gets called).
"""
self.__class__._cache.clear()
def _add_to_cache(self, using, ct):
"""Insert a ContentType into the cache."""
model = ct.model_class()
key = (model._meta.app_label, model._meta.object_name.lower())
self.__class__._cache.setdefault(using, {})[key] = ct
self.__class__._cache.setdefault(using, {})[ct.id] = ct
class ContentType(models.Model):
name = models.CharField(max_length=100)
app_label = models.CharField(max_length=100)
model = models.CharField(_('python model class name'), max_length=100)
objects = ContentTypeManager()
class Meta:
verbose_name = _('content type')
verbose_name_plural = _('content types')
db_table = 'django_content_type'
ordering = ('name',)
unique_together = (('app_label', 'model'),)
def __unicode__(self):
return self.name
def model_class(self):
"Returns the Python model class for this type of content."
from django.db import models
return models.get_model(self.app_label, self.model)
def get_object_for_this_type(self, **kwargs):
"""
Returns an object of this type for the keyword arguments given.
Basically, this is a proxy around this object_type's get_object() model
method. The ObjectNotExist exception, if thrown, will not be caught,
so code that calls this method should catch it.
"""
return self.model_class()._default_manager.using(self._state.db).get(**kwargs)
def natural_key(self):
return (self.app_label, self.model)
| apache-2.0 |
RhodriM/rugby_rankings_py | tests/test_ratings_input.py | 1 | 1207 | import unittest
import rugby_rankings.ratings_input
class TestRatingsInput(unittest.TestCase):
def test_construct(self):
inputObj = rugby_rankings.ratings_input.RatingsInput(0.0, 0.0, 0, 0)
self.assertTrue(
isinstance(inputObj, rugby_rankings.ratings_input.RatingsInput)
)
inputObj = rugby_rankings.ratings_input.RatingsInput(
0.0, 0.0, 0, 0, True, True
)
self.assertTrue(
isinstance(inputObj, rugby_rankings.ratings_input.RatingsInput)
)
def test_types(self):
inputObj = rugby_rankings.ratings_input.RatingsInput(
1.111, 90.199, 3, 2.2
)
self.assertTrue(
isinstance(inputObj.get_rounded_team_a_rating(), float)
)
self.assertEqual(inputObj.get_rounded_team_a_rating(), 1.11)
self.assertEqual(inputObj.get_rounded_team_b_rating(), 90.20)
inputObj = rugby_rankings.ratings_input.RatingsInput(
1.111, 90.199, 3, 2.2, True, True
)
self.assertEqual(inputObj.is_rugby_world_cup, True)
self.assertEqual(inputObj.is_neutral_venue, True)
if __name__ == "__main__":
unittest.main()
| mit |
bverburg/CouchPotatoServer | couchpotato/core/notifications/pushbullet.py | 32 | 3088 | import base64
import json
from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.helpers.variable import splitString
from couchpotato.core.logger import CPLog
from couchpotato.core.notifications.base import Notification
log = CPLog(__name__)
autoload = 'Pushbullet'
class Pushbullet(Notification):
url = 'https://api.pushbullet.com/v2/%s'
def notify(self, message = '', data = None, listener = None):
if not data: data = {}
devices = self.getDevices()
if devices is None:
return False
# Get all the device IDs linked to this user
if not len(devices):
devices = [None]
successful = 0
for device in devices:
response = self.request(
'pushes',
cache = False,
device_iden = device,
type = 'note',
title = self.default_title,
body = toUnicode(message)
)
if response:
successful += 1
else:
log.error('Unable to push notification to Pushbullet device with ID %s' % device)
return successful == len(devices)
def getDevices(self):
return splitString(self.conf('devices'))
def request(self, method, cache = True, **kwargs):
try:
base64string = base64.encodestring('%s:' % self.conf('api_key'))[:-1]
headers = {
"Authorization": "Basic %s" % base64string
}
if cache:
return self.getJsonData(self.url % method, headers = headers, data = kwargs)
else:
data = self.urlopen(self.url % method, headers = headers, data = kwargs)
return json.loads(data)
except Exception as ex:
log.error('Pushbullet request failed')
log.debug(ex)
return None
config = [{
'name': 'pushbullet',
'groups': [
{
'tab': 'notifications',
'list': 'notification_providers',
'name': 'pushbullet',
'options': [
{
'name': 'enabled',
'default': 0,
'type': 'enabler',
},
{
'name': 'api_key',
'label': 'Access Token',
'description': 'Can be found on <a href="https://www.pushbullet.com/account" target="_blank">Account Settings</a>',
},
{
'name': 'devices',
'default': '',
'advanced': True,
'description': 'IDs of devices to send notifications to, empty = all devices'
},
{
'name': 'on_snatch',
'default': 0,
'type': 'bool',
'advanced': True,
'description': 'Also send message when movie is snatched.',
},
],
}
],
}]
| gpl-3.0 |
lhfei/spark-in-action | spark-2.x/src/main/python/wordcount.py | 1 | 1501 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
import sys
from operator import add
from pyspark.sql import SparkSession
if __name__ == "__main__":
if len(sys.argv) != 2:
print("Usage: wordcount <file>", file=sys.stderr)
sys.exit(-1)
spark = SparkSession\
.builder\
.appName("PythonWordCount")\
.getOrCreate()
lines = spark.read.text(sys.argv[1]).rdd.map(lambda r: r[0])
counts = lines.flatMap(lambda x: x.split(' ')) \
.map(lambda x: (x, 1)) \
.reduceByKey(add)
output = counts.collect()
for (word, count) in output:
print("%s: %i" % (word, count))
spark.stop()
| apache-2.0 |
ismailsunni/inasafe | safe/common/parameters/test/example.py | 6 | 1728 | # coding=utf-8
"""Example usage of custom parameters."""
import sys
from safe.definitions.constants import INASAFE_TEST
from safe.test.utilities import get_qgis_app
QGIS_APP, CANVAS, IFACE, PARENT = get_qgis_app(qsetting=INASAFE_TEST)
from qgis.PyQt.QtWidgets import QApplication, QWidget, QGridLayout # NOQA
from parameters.qt_widgets.parameter_container import (
ParameterContainer) # NOQA
from safe.common.parameters.default_value_parameter import (
DefaultValueParameter) # NOQA
from safe.common.parameters.default_value_parameter_widget import (
DefaultValueParameterWidget) # NOQA
__copyright__ = "Copyright 2016, The InaSAFE Project"
__license__ = "GPL version 3"
__email__ = "[email protected]"
__revision__ = '$Format:%H$'
def main():
"""Main function to run the example."""
app = QApplication([])
default_value_parameter = DefaultValueParameter()
default_value_parameter.name = 'Value parameter'
default_value_parameter.help_text = 'Help text'
default_value_parameter.description = 'Description'
default_value_parameter.labels = [
'Setting', 'Do not report', 'Custom']
default_value_parameter.options = [0, 1, None]
parameters = [
default_value_parameter
]
extra_parameters = [
(DefaultValueParameter, DefaultValueParameterWidget)
]
parameter_container = ParameterContainer(
parameters, extra_parameters=extra_parameters)
parameter_container.setup_ui()
widget = QWidget()
layout = QGridLayout()
layout.addWidget(parameter_container)
widget.setLayout(layout)
widget.setGeometry(0, 0, 500, 500)
widget.show()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
| gpl-3.0 |
liam2/liam2 | tools/simulation_txt2yaml.py | 1 | 31398 | import csv
import itertools
from itertools import izip
import operator
import os
from os import path
import sys
import yaml
from expr import *
from align_txt2csv import convert_txt_align
# TODO
# - filter fields: output only those which are actually used (comment out
# the rest)
# - convert "leaf" expression literals to the type of the variable being
# defined (only absolutely needed for bool)
# - use "abfrage" to determine fields
# ? remove useless bounds (eg age)
# ? implement choose for top-level filter
# ? build variable dependency tree and enclose any field which is used before it
# is computed in a lag function
# ? generic if -> choose transformation:
# if(c1, v1, if(c2, v2, if(c3, v3, if(c4, v4, 0))))
# ->
# choose(c1, v1,
# c2, v2,
# c3, v3,
# c4, v4)
# ? include original comments
# ? extract common condition parts in a filter to the choose function?
# ? implement between
# TODO manually:
# - if(p_yob=2003-60, MINR[2003], ...
# ->
# if((yob >= 1943) & (yob <= 2000), MINR[yob + 60], 0)
# - divorce function
# - KillPerson: what is not handled by normal "kill" function
def load_renames(fpath):
if fpath is not None:
with open(fpath) as f:
return yaml.load(f)
else:
return {}
def load_txt_def(input_path, name_idx):
with open(input_path, "rb") as f:
lines = list(csv.reader(f, delimiter='\t'))
firstline = lines[0]
colnames = firstline[:name_idx] + firstline[name_idx+1:]
current_obj = None
data = {}
for line in lines[1:]:
if not line:
continue
if all(not cell for cell in line):
continue
name, line = line[name_idx], line[:name_idx] + line[name_idx+1:]
if name.startswith('first_'):
current_obj = name[6:]
data[current_obj] = {}
print("reading '%s' variables" % current_obj)
elif name.startswith('end_'):
current_obj = None
print("done")
elif current_obj is not None:
data[current_obj][name] = dict(zip(colnames, line))
return data
def load_links(input_path):
return load_txt_def(input_path, 0)['linkage']
def load_fields(input_path):
data = load_txt_def(input_path, 1)
typemap = {
'char': float, # should be int but "char" is used all over the place for
# anything
'int': int,
'int1000': float
}
print("determining field types...")
for obj_type, obj_fields in data.iteritems():
print(" *", obj_type)
for name, fdef in obj_fields.iteritems():
real_dtype = typemap.get(fdef['Type'])
if real_dtype is None:
print("Warning: unknown type '%s', using int" % fdef['Type'])
real_dtype = int
ncateg = int(fdef['nCategory'])
if ncateg == 2:
assert fdef['Categories'] == "[0,1]", \
"field %s has 2 categories that are != from [0, 1]" \
% name
real_dtype = bool
elif ncateg > 2:
# TODO: import the list of possible values
real_dtype = int
obj_fields[name] = {'type': real_dtype}
print(" done")
return data
def transpose_table(data):
numrows = len(data)
numcols = len(data[0])
for rownum, row in enumerate(data, 1):
if len(row) != numcols:
raise Exception('line %d has %d columns instead of %d !'
% (rownum, len(row), numcols))
return [[data[rownum][colnum] for rownum in range(numrows)]
for colnum in range(numcols)]
def transpose_and_convert(lines):
transposed = transpose_table(lines)
names = transposed.pop(0)
funcs = [float for _ in range(len(lines))]
funcs[0] = int
converted = [tuple([func(cell.replace('--', 'NaN'))
for cell, func in izip(row, funcs)])
for row in transposed]
return names, converted
def load_av_globals(input_path):
# macro.av is a csv with tabs OR spaces as separator and a header of 1 line
with open(input_path, "rb") as f:
lines = [line.split() for line in f.read().splitlines()]
# eg: "sample 1955Y1 2060Y1"
firstline = lines.pop(0)
assert firstline[0] == "sample"
def year_str2int(s):
return int(s.replace('Y1', ''))
start, stop = year_str2int(firstline[1]), year_str2int(firstline[2])
num_periods = stop - start + 1
names, data = transpose_and_convert(lines)
assert names[0] == 'YEAR'
# rename YEAR to period
names[0] = 'period'
assert len(data) == num_periods
return (start, stop), names, data
def load_agespine(input_path):
# read process names until "end_spine"
with open(input_path, "rb") as f:
lines = [line.strip() for line in f.read().splitlines() if line]
# lines are of the form "regr_p_xxx" or "tran_p_xxx"
return list(itertools.takewhile(lambda l: l != 'end_spine', lines))
# ================================
class TextImporter(object):
keywords = None
def __init__(self, input_path, fields, obj_type, renames):
self.input_path = input_path
self.fields = fields
self.obj_type = obj_type
self.renames = renames
self.current_condition = None
self.conditions = None
def unimplemented(self, pos, line, lines):
print("unimplemented keyword: %s" % line[0])
return pos + 1, None
def skipline(self, pos, line, lines):
return pos + 1, None
def skipifzero(self, pos, line, lines):
if len(line) > 1 and line[1] and float(line[1]):
print("unimplemented keyword:", line[0])
return pos + 1, None
def readvalues(self, *args):
def f(pos, line, lines):
values = [func(str_value)
for func, str_value in zip(args, line[1:])
if func is not None]
if len(args) == 1:
empty = {str: '', int: 0}
values = values[0] if values else empty[args[0]]
return pos + 1, values
return f
def end(self, *args):
raise StopIteration
def numorcond(self, pos, line, lines):
# int=m, + skip line + m * (skipword, int=numand, +
# numand * (str, float=min, float=max))
num_or = int(line[1])
pos += 2
or_conds = []
for i in range(num_or):
line = lines[pos]
num_and = int(line[1]) if len(line) >= 1 else 0
and_conds = [(line[2 + j * 3],
float(line[3 + j * 3]),
float(line[4 + j * 3]))
for j in range(num_and)]
or_conds.append(and_conds)
pos += 1
self.conditions[self.current_condition] = {'condition': or_conds}
# We return self.conditions for each condition. It will be overwritten
# by later conditions if any, but this ensures they are stored even
# if there are actually less conditions than declared.
return pos, self.conditions
# return pos, res
def condition(self, pos, line, lines):
self.current_condition = int(line[1]) - 1
return pos + 1, None
def numconditions(self, pos, line, lines):
self.conditions = [None] * int(line[1])
return pos + 1, None
def load_txt_file(self):
# regr_p_alive_f.txt -> alive_f:
fpath, fname = path.split(self.input_path)
basename, ext = path.splitext(fname)
chunks = basename.split('_', 2)
assert len(chunks[1]) == 1
del chunks[1]
name = '_'.join(chunks)
with open(self.input_path, "rb") as f:
lines = list(csv.reader(f, delimiter='\t'))
values = {'name': name}
pos = 0
while pos < len(lines):
line = lines[pos]
if not line:
pos += 1
continue
keyword = line[0].lower()
if not keyword or keyword.isspace():
pos += 1
continue
f = self.keywords.get(keyword)
if f is None:
print("unknown keyword: '%s'" % keyword)
pos += 1
continue
try:
pos, value = f(pos, line, lines)
if value is not None:
values[keyword] = value
except StopIteration:
break
return values
# ------------------------
# transform to expression
# ------------------------
def var_type(self, name):
var_def = self.fields.get(name)
if var_def is None:
print("Warning: field '%s' not found (assuming int) !" % name)
return int
else:
return var_def['type']
def var_name(self, name):
assert name[1] == '_'
name = name[2:]
return self.renames.get(self.obj_type, {}).get(name, name)
def simplecond2expr(self, cond):
name, minvalue, maxvalue = cond
v = Variable(self.var_name(name), self.var_type(name))
return (v >= minvalue) & (v <= maxvalue)
def andcond2expr(self, andconditions):
if andconditions:
expr = self.simplecond2expr(andconditions[0])
for andcond in andconditions[1:]:
expr = expr & self.simplecond2expr(andcond)
return expr
else:
return True
def condition2expr(self, condition):
assert condition
expr = self.andcond2expr(condition[0])
for orcond in condition[1:]:
if orcond:
expr = expr | self.andcond2expr(orcond)
return expr
def import_file(self):
data = self.load_txt_file()
predictor, expr = self.data2expr(data)
return data['name'], predictor, expr
class RegressionImporter(TextImporter):
def __init__(self, input_path, fields, renames):
TextImporter.__init__(self, input_path, fields, obj_type, renames)
# cfr readdyparam.cpp
self.keywords = {
'file description:': self.readvalues(str),
# Time est toujours = 1 sauf dans trap_p_coeduach.txt
'time': self.skipline, #readvalues(int),
'align': self.readvalues(int),
'predictor': self.readvalues(str, int, int, int),
'numconditions': self.numconditions,
'macro_align_multiple': self.unimplemented, #float,
'mmkt_cond_var': self.unimplemented, #str,
'mmkt_gender_var': self.unimplemented, #str,
'macro_align': self.unimplemented, #float,
'macro_align_relate': self.unimplemented, #str,
'macro_align_type': self.unimplemented, #str,
'ntransformations': self.unimplemented, #int + n * (str, int)
'marrmkt': self.unimplemented, #int + n * (str, str -- which is then parsed)
'condition': self.condition,
'endoffile': self.end,
'numorcond': self.numorcond,
'indepentvar': self.indepentvar,
'interactionterms': self.interactionterms,
'u_varname': self.readvalues(str),
's_u': self.skipifzero, # float, skipword, skipword, str (unused?)
's_v': self.skipifzero, # float (unused in MIDAS?)
'r': self.skipifzero, # float (unused?)
# ignore common junk
'conditions': self.skipline,
'distribution': self.skipline,
'coefficients and structure': self.skipline,
'errorstructure': self.skipline
}
def indepentvar(self, pos, line, lines):
# int = m + skip line +
# m * (skipword, str=name, skipword, float=min,
# float=max, float=coef)
# name="constant" is a special case
num_vars = int(line[1])
pos += 2
vars = []
def floatorempty(s):
return float(s) if s else 0.0
readvariable = self.readvalues(str, None,
floatorempty, floatorempty, floatorempty)
for i in range(num_vars):
line = lines[pos]
pos, values = readvariable(pos, line, lines)
vars.append(values)
self.conditions[self.current_condition]['vars'] = vars
return pos, None
def interactionterms(self, pos, line, lines):
numterms = int(line[1]) if line[1] else 0
if numterms:
print("unimplemented keyword: interactionterms")
return pos + 1, None
# ------------------------
# transform to expression
# ------------------------
def var2expr(self, var):
name, minvalue, maxvalue, coef = var
if name == 'constant':
return coef
else:
v = Variable(self.var_name(name), self.var_type(name))
return v * coef
# return ZeroClip(v, minvalue, maxvalue) * coef
def vars2expr(self, vars):
assert vars
expr = self.var2expr(vars[0])
for var in vars[1:]:
expr = expr + self.var2expr(var)
return expr
def data2expr(self, data):
conditions = data['numorcond']
assert conditions
if len(conditions) == 1:
condition = conditions[0]
expr = self.vars2expr(condition['vars'])
filter_expr = self.condition2expr(condition['condition'])
else:
lastcond = conditions[-1]
cond_expr = self.condition2expr(lastcond['condition'])
expr = Where(cond_expr, self.vars2expr(lastcond['vars']), 0)
filter_expr = cond_expr
for cond in conditions[-2::-1]:
cond_expr = self.condition2expr(cond['condition'])
expr = Where(cond_expr, self.vars2expr(cond['vars']), expr)
filter_expr |= cond_expr
kwargs = {'filter': filter_expr}
predictor, pred_type, _, _ = data['predictor']
predictor = self.var_name(predictor)
if data.get('u_varname'):
# another option would be to do:
#expr += Variable(self.var_name(data['u_varname']))"
kwargs['error_var'] = self.var_name(data['u_varname'])
if bool(data['align']):
kwargs['align'] = 'al_p_%s.csv' % data['name']
if pred_type != 2:
print("unimplemented align for pred_type:", pred_type)
if pred_type == 0: # continuous
expr = ContRegr(expr, **kwargs)
elif pred_type == 1: # clipped continuous
expr = ClipRegr(expr, **kwargs)
elif pred_type == 2: # logit
expr = LogitRegr(expr, **kwargs)
elif pred_type == 3: # logged continuous
expr = LogRegr(expr, **kwargs)
elif pred_type == 4: # clipped logged continuous
print("Converting clipped logged continuous to logged continuous")
expr = LogRegr(expr, **kwargs)
else:
print("unimplemented predictor type:", pred_type)
return predictor, expr
class TransitionImporter(TextImporter):
def __init__(self, input_path, fields, constants, links, obj_type, renames):
TextImporter.__init__(self, input_path, fields, obj_type, renames)
self.constants = constants
self.links = links
# cfr readdyparam.cpp
self.keywords = {
'file description:': self.readvalues(str),
# Time est toujours = 1 sauf dans trap_p_coeduach.txt
'time': self.skipline, #readvalues(int),
'align': self.readvalues(int),
'predictor': self.readvalues(str, int),
'numconditions': self.numconditions,
'condition': self.condition,
'endoffile': self.end,
'numorcond': self.numorcond,
'gen': self.gen, # str
'fpbcalc': self.fpbcalc, # str
'fgen': self.fgen, # str
'zero': self.skipifzero,
'first': self.skipifzero,
'second': self.skipifzero,
'third': self.skipifzero,
# ignore common junk
'conditions': self.skipline,
'type': self.skipline,
}
def gen(self, pos, line, lines):
# min(arg1, arg2)
# max(arg1, arg2)
# setto[value]
# expression with "( ) + - * / ^ , min max"
s = line[1]
# add spaces around operators
s = re.sub(r'([+\-*/^])', r' \1 ', s)
s = s.replace('^', '**')
s = re.sub(r'setto\[([^\]]+)\]', r'\1', s)
self.conditions[self.current_condition]['action'] = s
return pos + 1, None
def fgen(self, pos, line, lines):
# function(args)
# - KillPerson(varname1=amount1;varnamz2=amount2;...)
# -> also updates marital status of spouse
# - duration(variable,const)
# -> const is 1 char
# ... (see smile p.13, 14 and 15)
s = line[1]
s = s.replace('CreatePerson(', "new('person', ")
s = s.replace('newbirth(', "new('person', ")
s = s.replace('newhousehold(', "new('household', ")
s = re.sub(r'duration\((\w+),(\d+)\)', r'duration(\1 == \2)', s)
# remove extra , inserted by above replacements
s = s.replace(', )', ')')
s = s.replace(';', ', ')
# getlink(ps,p_inc) -> ps.p_inc
if "getlink" in s:
s = re.sub(r'getlink\((\w{2}),(\w+)\)', r'\1.\2', s)
link, var = s.split('.')
assert var[1] == '_'
var = var[2:]
s = "%s.%s" % (link, var)
s = s.replace('mean(', 'tavg(')
s = s.replace('prev(', 'lag(')
# prevent name collision
s = s.replace('divorce(', 'do_divorce(')
self.conditions[self.current_condition]['action'] = s
return pos + 1, None
def fpbcalc(self, pos, line, lines):
s = line[1]
s = s.replace('grandom(', 'normal(')
# add space around +, -, * and / operators, if not present
s = re.sub(r'(\S)([+\-*/^])(\S)', r'\1 \2 \3', s)
# idem for < and >
s = re.sub(r'([<>]=?)', r' \1 ', s)
# = -> ==
s = re.sub(r'([^<>])=', r'\1 == ', s)
# CONST[ddddYd] -> CONST[dddd]
s = re.sub(r'([A-Z_][A-Z0-9_]*)\[(\d{4})Y1\]', r'\1[\2]', s)
self.conditions[self.current_condition]['action'] = s
return pos + 1, None
# def zero(self, pos, line, lines):
# if line[1] != "0":
# 1) find line with "predict" keyword
# 2) for each pred_cat, cond.value[n] = float(word[n+1])
# 3) find line with "mean" keyword
# 4) cond.nZero = int(word[1])
# 5) for each pred_cat, cond.mean[n] = float(word[n+1])
# ------------------------
# tranform to expression
# ------------------------
def action2expr(self, data):
const_sample, const_names = self.constants
globals = dict((name, SubscriptableVariable(name))
for name in const_names)
globals.update((name, Variable(self.var_name(name),
self.var_type(name)))
for name in self.fields.keys())
links = [(name, Link(name, link_def['keyorig'], link_def['desttype'],
self.renames.get(link_def['desttype'], {})))
for name, link_def in self.links.iteritems()]
globals.update(links)
return parse(data, globals)
def data2expr(self, data):
# pred_type seem to be ignored for transitions
predictor, pred_type = data['predictor']
local_name = self.var_name(predictor)
conditions = data['numorcond']
assert conditions
# this is a hack to work around useless conditions in liam 1
for cond in conditions:
for orcond in cond['condition']:
if ('p_co_alive', 1.0, 1.0) in orcond:
print(" Warning: removed 'p_co_alive == 1' condition")
orcond.remove(('p_co_alive', 1.0, 1.0))
lastcond = conditions[-1]
if lastcond is None:
raise Exception('Actual number of conditions do not match the '
'number of conditions declared !')
cond_expr = self.condition2expr(lastcond['condition'])
v = Variable(local_name, self.var_type(predictor))
expr = Where(cond_expr, self.action2expr(lastcond['action']), v)
for cond in conditions[-2::-1]:
cond_expr = self.condition2expr(cond['condition'])
expr = Where(cond_expr, self.action2expr(cond['action']), expr)
return local_name, expr
class TrapImporter(TextImporter):
pass
# =====================
def load_processes(input_path, fnames,
fields, constants, links, obj_type, renames):
print("=" * 40)
data = []
predictor_seen = {}
parsed = []
obj_renames = renames.get(obj_type, {})
print("pass 1: parsing files...")
for fname in fnames:
print(" - %s" % fname)
fpath = path.join(input_path, fname)
if fname.startswith('regr_'):
importer = RegressionImporter(fpath, fields, renames)
elif fname.startswith('tran_'):
importer = TransitionImporter(fpath, fields, constants, links,
obj_type, renames)
else:
importer = None
if importer is not None:
fullname, predictor, expr = importer.import_file()
type_, name = fullname.split('_', 1)
name = obj_renames.get(name, name)
fullname = '%s_%s' % (type_, name)
parsed.append((fname, fullname, predictor, expr))
predictor_seen.setdefault(predictor, []).append(fullname)
print("-" * 40)
print("pass 2: simplifying...")
other_types = {
'regr': ('tran', 'trap'),
'tran': ('regr', 'trap'),
'trap': ('tran', 'regr')
}
proc_name_per_file = {}
proc_names = {}
for fname, fullname, predictor, expr in parsed:
print(" - %s (%s)" % (fname, predictor))
type_, name = fullname.split('_', 1)
expr_str = str(simplify(expr))
if len(predictor_seen[predictor]) == 1:
if name != predictor:
print(" renaming '%s' process to '%s'" % (name, predictor))
name = predictor
res = expr_str
else:
conflicting_names = predictor_seen[predictor]
assert len(conflicting_names) > 1
names_to_check = ['%s_%s' % (other_type, name)
for other_type in other_types[type_]]
if any(name in conflicting_names for name in names_to_check):
name = fullname
while name in proc_names:
name += '_dupe'
print(" renaming process to '%s'" % name)
res = {'predictor': predictor,
'expr': expr_str}
proc_names[name] = True
data.append((name, res))
proc_name_per_file[fname] = name
print("=" * 40)
return proc_name_per_file, data
def convert_all_align(input_path):
import glob
for fpath in glob.glob(path.join(input_path, 'al_regr_*.txt')):
convert_txt_align(fpath)
# =====================
# OUTPUT
# =====================
def orderedmap2yaml(items, indent):
sep = '\n' + ' ' * indent
return sep.join("- %s: %s" % f for f in items)
def links2yaml(links):
if links:
# ('hp', {'desttype': 'p', 'prefix': 'p',
# 'origintype': 'h', 'keyorig': 'pid'})]
sep = '\n '
return """
links:
%s""" % sep.join("%s: {type: many2one, target: %s, field: %s}" %
(name, l['desttype'], l['keyorig'])
for name, l in links)
else:
return ''
def process2yaml(processes):
if processes:
sep = '\n '
processes_str = []
for name, expr in processes:
if isinstance(expr, dict):
expr_lines = expr['expr'].splitlines()
# + 2 is for ": "
indent = '\n' + ' ' * (16 + len(expr['predictor']) + 2)
expr_str = indent.join(expr_lines)
process_str = """%s:
%s: %s""" % (name, expr['predictor'], expr_str)
else:
expr_lines = expr.splitlines()
indent = '\n' + ' ' * (12 + len(name) + 2) # + 2 is for ": "
expr = indent.join(expr_lines)
process_str = '%s: %s' % (name, expr)
processes_str.append(process_str)
return """
processes:
%s""" % sep.join(processes_str)
else:
return ''
def constants2yaml(constants):
const_defs = [(name, 'float') for name in constants[1]]
return orderedmap2yaml(const_defs, indent=2)
def entities2yaml(entities):
entity_tmpl = " %s:%s%s%s\n"
e_strings = []
for ent_name, entity in entities.iteritems():
fields = entity['fields']
if fields:
fields = sorted([(fname, f['type'].__name__)
for fname, f in fields.iteritems()])
fields_str = '\n fields:\n %s' \
% orderedmap2yaml(fields, 3)
else:
fields_str = ''
links_str = links2yaml(entity['links'])
process_str = process2yaml(entity['processes'])
e_strings.append(entity_tmpl % (ent_name, fields_str, links_str,
process_str))
return '\n'.join(e_strings)
def process_list2yaml(processes):
s = []
for ent_name, ent_processes in itertools.groupby(processes,
operator.itemgetter(0)):
p_str = ',\n '.join(pname
for ent_name, pname in ent_processes)
s.append(' - %s: [%s]' % (ent_name, p_str))
return '\n'.join(s)
def simulation2yaml(constants, entities, process_list):
constants_str = constants2yaml(constants)
entities_str = entities2yaml(entities)
process_list_str = process_list2yaml(process_list)
return """globals:
periodic:
# period is implicit
%s
entities:
%s
simulation:
processes:
%s
input:
file: base.h5
output:
file: simulation.h5
start_period: 2003 # first simulated period
periods: 20
""" % (constants_str, entities_str, process_list_str)
# =====================
if __name__ == '__main__':
argv = sys.argv
if len(argv) < 3:
print(
"Usage: %s input_path output_path [rename_file] [filtered]" % argv[0]
)
sys.exit()
else:
input_path = argv[1]
output_path = argv[2]
rename_path = None if len(argv) < 4 else argv[3]
filtered = True if len(argv) < 5 else argv[4] == "filtered"
if not path.isdir(input_path):
input_path, fname = path.split(input_path)
else:
fname = None
renames = load_renames(rename_path)
fields_per_obj = load_fields(path.join(input_path, 'dyvardesc.txt'))
constants = load_av_globals(path.join(input_path, 'macro.av'))[:2]
links = load_links(path.join(input_path, 'linkage.txt'))
process_list = load_agespine(path.join(input_path, 'agespine.txt'))
fields = {}
for obj_type, obj_fields in fields_per_obj.iteritems():
for name, fdef in obj_fields.iteritems():
fields['%s_%s' % (obj_type, name)] = fdef
if fname is None:
raw_names = os.listdir(input_path)
else:
raw_names = [fname]
filtered = False
if filtered:
base_names = process_list
else:
base_names = []
for raw_name in raw_names:
basename, ext = path.splitext(raw_name)
if ext == '.txt':
base_names.append(basename)
process_files = []
proc_per_obj = {}
for basename in base_names:
chunks = basename.split('_', 2)
if len(chunks) < 3: # tran_p_x
continue
proc_type, obj_type, name = chunks
if proc_type == 'al':
continue
if len(obj_type) != 1:
continue
file_name = basename + '.txt'
process_files.append((obj_type, file_name))
proc_per_obj.setdefault(obj_type, []).append(file_name)
proc_name_per_file = {}
entities = {}
for obj_type, obj_fields in fields_per_obj.iteritems():
obj_links = [(k, v) for k, v in links.items()
if v['origintype'] == obj_type]
obj_fields.update([(v['keyorig'], {'type': int}) for k, v in obj_links])
obj_proc_files = proc_per_obj.get(obj_type, [])
print("loading processes for %s" % obj_type)
obj_proc_names, obj_processes = load_processes(input_path,
obj_proc_files,
fields, constants, links,
obj_type,
renames)
proc_name_per_file.update(obj_proc_names)
obj_renames = renames.get(obj_type, {})
for old_name in obj_fields.keys():
new_name = obj_renames.get(old_name)
if new_name is not None:
obj_fields[new_name] = obj_fields.pop(old_name)
entities[obj_type] = {
'fields': obj_fields,
'links': obj_links,
'processes': obj_processes
}
process_names = []
for obj, file_name in process_files:
proc_name = proc_name_per_file.get(file_name)
if proc_name is not None:
process_names.append((obj, proc_name))
print("exporting to '%s'" % output_path)
with open(output_path, 'w') as f_out:
# default YAML serialization is ugly, so we produce the string ourselves
f_out.write(simulation2yaml(constants, entities, process_names))
# yaml.dump(yamldata, f_out, default_flow_style=False,
# default_style='"', indent=4)
if fname is None:
convert_all_align(input_path)
print("done.")
| gpl-3.0 |
scalable-networks/gnuradio-3.7.0.1 | gr-filter/python/filter/qa_fractional_interpolator.py | 10 | 3149 | #!/usr/bin/env python
#
# Copyright 2007,2010,2012,2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest, filter, blocks
import math
def sig_source_f(samp_rate, freq, amp, N):
t = map(lambda x: float(x)/samp_rate, xrange(N))
y = map(lambda x: math.sin(2.*math.pi*freq*x), t)
return y
def sig_source_c(samp_rate, freq, amp, N):
t = map(lambda x: float(x)/samp_rate, xrange(N))
y = map(lambda x: math.cos(2.*math.pi*freq*x) + \
1j*math.sin(2.*math.pi*freq*x), t)
return y
class test_fractional_resampler(gr_unittest.TestCase):
def setUp(self):
self.tb = gr.top_block()
def tearDown(self):
self.tb = None
def test_001_ff(self):
N = 10000 # number of samples to use
fs = 1000 # baseband sampling rate
rrate = 1.123 # resampling rate
freq = 10
data = sig_source_f(fs, freq, 1, N)
signal = blocks.vector_source_f(data)
op = filter.fractional_interpolator_ff(0, rrate)
snk = blocks.vector_sink_f()
self.tb.connect(signal, op, snk)
self.tb.run()
Ntest = 5000
L = len(snk.data())
t = map(lambda x: float(x)/(fs/rrate), xrange(L))
phase = 0.1884
expected_data = map(lambda x: math.sin(2.*math.pi*freq*x+phase), t)
dst_data = snk.data()
self.assertFloatTuplesAlmostEqual(expected_data[-Ntest:], dst_data[-Ntest:], 3)
def test_002_cc(self):
N = 10000 # number of samples to use
fs = 1000 # baseband sampling rate
rrate = 1.123 # resampling rate
freq = 10
data = sig_source_c(fs, freq, 1, N)
signal = blocks.vector_source_c(data)
op = filter.fractional_interpolator_cc(0.0, rrate)
snk = blocks.vector_sink_c()
self.tb.connect(signal, op, snk)
self.tb.run()
Ntest = 5000
L = len(snk.data())
t = map(lambda x: float(x)/(fs/rrate), xrange(L))
phase = 0.1884
expected_data = map(lambda x: math.cos(2.*math.pi*freq*x+phase) + \
1j*math.sin(2.*math.pi*freq*x+phase), t)
dst_data = snk.data()
self.assertComplexTuplesAlmostEqual(expected_data[-Ntest:], dst_data[-Ntest:], 3)
if __name__ == '__main__':
gr_unittest.run(test_fractional_resampler, "test_fractional_resampler.xml")
| gpl-3.0 |
mvanderkolff/xhtml2pdf | xhtml2pdf/pdf.py | 41 | 1949 | # -*- coding: utf-8 -*-
# Copyright 2010 Dirk Holtwick, holtwick.it
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from xhtml2pdf.util import pisaTempFile, getFile
import logging
log = logging.getLogger("xhtml2pdf")
class pisaPDF:
def __init__(self, capacity=-1):
self.capacity = capacity
self.files = []
def addFromURI(self, url, basepath=None):
obj = getFile(url, basepath)
if obj and (not obj.notFound()):
self.files.append(obj.getFile())
addFromFileName = addFromURI
def addFromFile(self, f):
if hasattr(f, "read"):
self.files.append(f)
self.addFromURI(f)
def addFromString(self, data):
self.files.append(pisaTempFile(data, capacity=self.capacity))
def addDocument(self, doc):
if hasattr(doc.dest, "read"):
self.files.append(doc.dest)
def join(self, file=None):
import pyPdf # TODO: Why is this in the middle of everything?
output = pyPdf.PdfFileWriter()
for pdffile in self.files:
input = pyPdf.PdfFileReader(pdffile)
for pageNumber in xrange(input.getNumPages()):
output.addPage(input.getPage(pageNumber))
if file is not None:
output.write(file)
return file
out = pisaTempFile(capacity=self.capacity)
output.write(out)
return out.getvalue()
getvalue = join
__str__ = join
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.