gt
stringclasses 1
value | context
stringlengths 2.49k
119k
|
---|---|
# This module implements PEP 386
# 2010-04-20: hg clone http://bitbucket.org/tarek/distutilsversion/
"""
"Rational" version definition and parsing for DistutilsVersionFight
discussion at PyCon 2009.
"""
import sys
import re
class IrrationalVersionError(Exception):
"""This is an irrational version."""
pass
class HugeMajorVersionNumError(IrrationalVersionError):
"""An irrational version because the major version number is huge
(often because a year or date was used).
See `error_on_huge_major_num` option in `NormalizedVersion` for details.
This guard can be disabled by setting that option False.
"""
pass
# A marker used in the second and third parts of the `parts` tuple, for
# versions that don't have those segments, to sort properly. An example
# of versions in sort order ('highest' last):
# 1.0b1 ((1,0), ('b',1), ('f',))
# 1.0.dev345 ((1,0), ('f',), ('dev', 345))
# 1.0 ((1,0), ('f',), ('f',))
# 1.0.post256.dev345 ((1,0), ('f',), ('f', 'post', 256, 'dev', 345))
# 1.0.post345 ((1,0), ('f',), ('f', 'post', 345, 'f'))
# ^ ^ ^
# 'b' < 'f' ---------------------/ | |
# | |
# 'dev' < 'f' < 'post' -------------------/ |
# |
# 'dev' < 'f' ----------------------------------------------/
# Other letters would do, but 'f' for 'final' is kind of nice.
FINAL_MARKER = ('f',)
VERSION_RE = re.compile(r'''
^
(?P<version>\d+\.\d+) # minimum 'N.N'
(?P<extraversion>(?:\.\d+)*) # any number of extra '.N' segments
(?:
(?P<prerel>[abc]|rc) # 'a'=alpha, 'b'=beta, 'c'=release candidate
# 'rc'= alias for release candidate
(?P<prerelversion>\d+(?:\.\d+)*)
)?
(?P<postdev>(\.post(?P<post>\d+))?(\.dev(?P<dev>\d+))?)?
$''', re.VERBOSE)
class NormalizedVersion(object):
"""A rational version.
Good:
1.2 # equivalent to "1.2.0"
1.2.0
1.2a1
1.2.3a2
1.2.3b1
1.2.3c1
1.2.3.4
TODO: fill this out
Bad:
1 # mininum two numbers
1.2a # release level must have a release serial
1.2.3b
"""
def __init__(self, s, error_on_huge_major_num=True):
"""Create a NormalizedVersion instance from a version string.
@param s {str} The version string.
@param error_on_huge_major_num {bool} Whether to consider an
apparent use of a year or full date as the major version number
an error. Default True. One of the observed patterns on PyPI before
the introduction of `NormalizedVersion` was version numbers like this:
2009.01.03
20040603
2005.01
This guard is here to strongly encourage the package author to
use an alternate version, because a release deployed into PyPI
and, e.g. downstream Linux package managers, will forever remove
the possibility of using a version number like "1.0" (i.e.
where the major number is less than that huge major number).
"""
self._parse(s, error_on_huge_major_num)
@classmethod
def from_parts(cls, version, prerelease=FINAL_MARKER,
devpost=FINAL_MARKER):
return cls(cls.parts_to_str((version, prerelease, devpost)))
def _parse(self, s, error_on_huge_major_num=True):
"""Parses a string version into parts."""
match = VERSION_RE.search(s)
if not match:
raise IrrationalVersionError(s)
groups = match.groupdict()
parts = []
# main version
block = self._parse_numdots(groups['version'], s, False, 2)
extraversion = groups.get('extraversion')
if extraversion not in ('', None):
block += self._parse_numdots(extraversion[1:], s)
parts.append(tuple(block))
# prerelease
prerel = groups.get('prerel')
if prerel is not None:
block = [prerel]
block += self._parse_numdots(groups.get('prerelversion'), s,
pad_zeros_length=1)
parts.append(tuple(block))
else:
parts.append(FINAL_MARKER)
# postdev
if groups.get('postdev'):
post = groups.get('post')
dev = groups.get('dev')
postdev = []
if post is not None:
postdev.extend([FINAL_MARKER[0], 'post', int(post)])
if dev is None:
postdev.append(FINAL_MARKER[0])
if dev is not None:
postdev.extend(['dev', int(dev)])
parts.append(tuple(postdev))
else:
parts.append(FINAL_MARKER)
self.parts = tuple(parts)
if error_on_huge_major_num and self.parts[0][0] > 1980:
raise HugeMajorVersionNumError("huge major version number, %r, "
"which might cause future problems: %r" % (self.parts[0][0], s))
def _parse_numdots(self, s, full_ver_str, drop_trailing_zeros=True,
pad_zeros_length=0):
"""Parse 'N.N.N' sequences, return a list of ints.
@param s {str} 'N.N.N..." sequence to be parsed
@param full_ver_str {str} The full version string from which this
comes. Used for error strings.
@param drop_trailing_zeros {bool} Whether to drop trailing zeros
from the returned list. Default True.
@param pad_zeros_length {int} The length to which to pad the
returned list with zeros, if necessary. Default 0.
"""
nums = []
for n in s.split("."):
if len(n) > 1 and n[0] == '0':
raise IrrationalVersionError("cannot have leading zero in "
"version number segment: '%s' in %r" % (n, full_ver_str))
nums.append(int(n))
if drop_trailing_zeros:
while nums and nums[-1] == 0:
nums.pop()
while len(nums) < pad_zeros_length:
nums.append(0)
return nums
def __str__(self):
return self.parts_to_str(self.parts)
@classmethod
def parts_to_str(cls, parts):
"""Transforms a version expressed in tuple into its string
representation."""
# XXX This doesn't check for invalid tuples
main, prerel, postdev = parts
s = '.'.join(str(v) for v in main)
if prerel is not FINAL_MARKER:
s += prerel[0]
s += '.'.join(str(v) for v in prerel[1:])
if postdev and postdev is not FINAL_MARKER:
if postdev[0] == 'f':
postdev = postdev[1:]
i = 0
while i < len(postdev):
if i % 2 == 0:
s += '.'
s += str(postdev[i])
i += 1
return s
def __repr__(self):
return "%s('%s')" % (self.__class__.__name__, self)
def _cannot_compare(self, other):
raise TypeError("cannot compare %s and %s"
% (type(self).__name__, type(other).__name__))
def __eq__(self, other):
if not isinstance(other, NormalizedVersion):
self._cannot_compare(other)
return self.parts == other.parts
def __lt__(self, other):
if not isinstance(other, NormalizedVersion):
self._cannot_compare(other)
return self.parts < other.parts
def __ne__(self, other):
return not self.__eq__(other)
def __gt__(self, other):
return not (self.__lt__(other) or self.__eq__(other))
def __le__(self, other):
return self.__eq__(other) or self.__lt__(other)
def __ge__(self, other):
return self.__eq__(other) or self.__gt__(other)
def suggest_normalized_version(s):
"""Suggest a normalized version close to the given version string.
If you have a version string that isn't rational (i.e. NormalizedVersion
doesn't like it) then you might be able to get an equivalent (or close)
rational version from this function.
This does a number of simple normalizations to the given string, based
on observation of versions currently in use on PyPI. Given a dump of
those version during PyCon 2009, 4287 of them:
- 2312 (53.93%) match NormalizedVersion without change
- with the automatic suggestion
- 3474 (81.04%) match when using this suggestion method
@param s {str} An irrational version string.
@returns A rational version string, or None, if couldn't determine one.
"""
try:
NormalizedVersion(s)
return s # already rational
except IrrationalVersionError:
pass
rs = s.lower()
# part of this could use maketrans
for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'),
('beta', 'b'), ('rc', 'c'), ('-final', ''),
('-pre', 'c'),
('-release', ''), ('.release', ''), ('-stable', ''),
('+', '.'), ('_', '.'), (' ', ''), ('.final', ''),
('final', '')):
rs = rs.replace(orig, repl)
# if something ends with dev or pre, we add a 0
rs = re.sub(r"pre$", r"pre0", rs)
rs = re.sub(r"dev$", r"dev0", rs)
# if we have something like "b-2" or "a.2" at the end of the
# version, that is pobably beta, alpha, etc
# let's remove the dash or dot
rs = re.sub(r"([abc|rc])[\-\.](\d+)$", r"\1\2", rs)
# 1.0-dev-r371 -> 1.0.dev371
# 0.1-dev-r79 -> 0.1.dev79
rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs)
# Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1
rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs)
# Clean: v0.3, v1.0
if rs.startswith('v'):
rs = rs[1:]
# Clean leading '0's on numbers.
#TODO: unintended side-effect on, e.g., "2003.05.09"
# PyPI stats: 77 (~2%) better
rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs)
# Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers
# zero.
# PyPI stats: 245 (7.56%) better
rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs)
# the 'dev-rNNN' tag is a dev tag
rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs)
# clean the - when used as a pre delimiter
rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs)
# a terminal "dev" or "devel" can be changed into ".dev0"
rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs)
# a terminal "dev" can be changed into ".dev0"
rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs)
# a terminal "final" or "stable" can be removed
rs = re.sub(r"(final|stable)$", "", rs)
# The 'r' and the '-' tags are post release tags
# 0.4a1.r10 -> 0.4a1.post10
# 0.9.33-17222 -> 0.9.3.post17222
# 0.9.33-r17222 -> 0.9.3.post17222
rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs)
# Clean 'r' instead of 'dev' usage:
# 0.9.33+r17222 -> 0.9.3.dev17222
# 1.0dev123 -> 1.0.dev123
# 1.0.git123 -> 1.0.dev123
# 1.0.bzr123 -> 1.0.dev123
# 0.1a0dev.123 -> 0.1a0.dev123
# PyPI stats: ~150 (~4%) better
rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs)
# Clean '.pre' (normalized from '-pre' above) instead of 'c' usage:
# 0.2.pre1 -> 0.2c1
# 0.2-c1 -> 0.2c1
# 1.0preview123 -> 1.0c123
# PyPI stats: ~21 (0.62%) better
rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs)
# Tcl/Tk uses "px" for their post release markers
rs = re.sub(r"p(\d+)$", r".post\1", rs)
try:
NormalizedVersion(rs)
return rs # already rational
except IrrationalVersionError:
pass
return None
|
|
# Copyright 2018 The AdaNet Authors. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# https://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""TensorFlow major version compatibility code."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from distutils.version import LooseVersion
import tensorflow.compat.v1 as tf
import tensorflow.compat.v2 as tf_v2
# pylint: disable=unused-import
# pylint: disable=g-direct-tensorflow-import
from tensorflow.python import tf2
from tensorflow.python.keras.metrics import Metric
from tensorflow.python.tpu import tpu_function
from tensorflow_estimator.python.estimator.head import regression_head
# pylint: enable=g-direct-tensorflow-import
# pylint: enable=unused-import
DatasetV1 = tf.compat.v1.data.Dataset
DatasetV2 = tf.compat.v2.data.Dataset
v1 = tf.compat.v1
v2 = tf.compat.v2
try:
SessionRunHook = tf.estimator.SessionRunHook
except AttributeError:
SessionRunHook = tf.train.SessionRunHook
try:
SessionRunArgs = tf.estimator.SessionRunArgs
except AttributeError:
SessionRunArgs = tf.train.SessionRunArgs
try:
SummarySaverHook = tf.estimator.SummarySaverHook
except AttributeError:
SummarySaverHook = tf.train.SummarySaverHook
try:
CheckpointSaverHook = tf.estimator.CheckpointSaverHook
except AttributeError:
CheckpointSaverHook = tf.train.CheckpointSaverHook
try:
# Loss reduction strings change between TF 1.13 and TF 1.14, which causes
# Heads to raise errors.
regression_head.RegressionHead(
loss_reduction=tf.losses.Reduction.SUM_OVER_BATCH_SIZE)
SUM_OVER_BATCH_SIZE = tf.losses.Reduction.SUM_OVER_BATCH_SIZE
SUM = tf.losses.Reduction.SUM
except ValueError:
SUM_OVER_BATCH_SIZE = "sum_over_batch_size"
SUM = "sum"
def tensor_name(tensor):
"""Returns the Tensor's name.
Tensor names always have the structure <op_name>:<int>. This method
returns the portion before the ':'.
Args:
tensor: Tensor.
Returns:
String name of the Tensor.
"""
return tensor.name.split(":")[-2]
def version_greater_or_equal(semver):
"""Returns whether the current TF version is >= to semver string."""
try:
tf_version = tf.version.VERSION
except AttributeError:
tf_version = tf.VERSION
return LooseVersion(tf_version) >= LooseVersion(semver)
def make_one_shot_iterator(dataset):
"""Returns a dataset's one-shot iterator."""
try:
return v1.data.make_one_shot_iterator(dataset)
except AttributeError:
return dataset.make_one_shot_iterator()
def random_normal(*args, **kwargs):
"""Returns a random normal distribution Tensor."""
try:
return tf.random.normal(*args, **kwargs)
except AttributeError:
return tf.random_normal(*args, **kwargs)
def metric_op(metric):
"""Converts Keras metrics into a metric op tuple.
NOTE: If this method is called in for loop, the runtime is O(n^2). However
the number of eval metrics at any given time should be small enough that
this does not affect performance. Any impact is only during graph construction
time, and therefore has no effect on steps/s.
Args:
metric: Either a `tf.keras.metric.Metric` instance or a tuple of Tensor
value and update op.
Returns:
A tuple of metric Tensor value and update op.
"""
if not isinstance(metric, tf.keras.metrics.Metric):
return metric
vars_to_add = {}
for var in metric.variables:
vars_to_add[_hashable_var_key(var)] = var
metric = (metric.result(), metric.updates[0])
_update_variable_collection(v1.GraphKeys.LOCAL_VARIABLES, vars_to_add)
_update_variable_collection(v1.GraphKeys.METRIC_VARIABLES, vars_to_add)
return metric
def _hashable_var_key(var):
"""Returns a hashable key to identify the given Variable."""
# In TF 2, Variables themselves are not hashable, so cannot be dict keys.
# Error is "Tensor is unhashable if Tensor equality is enabled. Instead, use
# tensor.experimental_ref() as the key". For a related issue, see:
# https://github.com/tensorflow/tensorflow/issues/32139
ref_op = getattr(var, "experimental_ref", None)
if callable(ref_op):
return ref_op()
return var
def _update_variable_collection(collection_name, vars_to_add):
"""Add variables to collection."""
collection = {}
for var in v1.get_collection(collection_name):
collection[_hashable_var_key(var)] = var
# Skip variables that are in the collection already: O(n) runtime.
for var_ref in vars_to_add:
if var_ref in collection:
continue
v1.add_to_collection(collection_name, vars_to_add[var_ref])
def skip_for_tf2(f):
"""Decorator that skips tests when using TensorFlow 2."""
def test_wrapper(*args, **kwargs):
"""Wraps the decorated function to determine whether to skip."""
# Extract test case instance from args.
self = args[0]
try:
# If tf.contrib doesn't exist, we are in TF 2.0.
_ = tf.contrib
_ = tf.contrib.estimator.regression_head(
loss_reduction=tf.compat.v1.losses.Reduction.SUM_OVER_BATCH_SIZE)
except (AttributeError, ImportError):
self.skipTest("Skipping test in TF 2.0.")
return f(*args, **kwargs)
return test_wrapper
def skip_for_tf1(f):
"""Decorator that skips tests when using TensorFlow 1."""
def test_wrapper(*args, **kwargs):
"""Wraps the decorated function to determine whether to skip."""
# Extract test case instance from args.
self = args[0]
try:
# If tf.contrib doesn't exist, we are in TF 2.0.
_ = tf_v2.contrib
except (AttributeError, ImportError):
return f(*args, **kwargs)
self.skipTest("Skipping test in TF 1.0.")
return f(*args, **kwargs)
return test_wrapper
def is_v2_behavior_enabled():
"""Returns if user called tf.enable_v2_behavior."""
# Since there is no actual tf.is_v2_behavior enabled, check that the
# settings were enabled.
return tf2.enabled()
def load_variable(checkpoint_path, var_name, shape, dtype):
"""Loads a variable from a given checkpoint."""
with tf.Graph().as_default():
variable = v1.get_variable(
var_name,
shape=shape,
dtype=dtype,
initializer=v1.zeros_initializer(),
trainable=False)
trackable_vars = {var_name: variable}
checkpoint = v2.train.Checkpoint(**trackable_vars)
status = checkpoint.restore(checkpoint_path)
status.expect_partial()
with v1.Session() as session:
status.initialize_or_restore(session)
return session.run(variable)
|
|
#!/usr/bin/env python3
# Copyright (c) 2016-2019 The Starwels developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the SegWit changeover logic."""
from test_framework.address import (
key_to_p2sh_p2wpkh,
key_to_p2wpkh,
program_to_witness,
script_to_p2sh_p2wsh,
script_to_p2wsh,
)
from test_framework.blocktools import witness_script, send_to_witness
from test_framework.test_framework import StarwelsTestFramework
from test_framework.util import *
from test_framework.mininode import sha256, CTransaction, CTxIn, COutPoint, CTxOut, COIN, ToHex, FromHex
from test_framework.address import script_to_p2sh, key_to_p2pkh
from test_framework.script import CScript, OP_HASH160, OP_CHECKSIG, OP_0, hash160, OP_EQUAL, OP_DUP, OP_EQUALVERIFY, OP_1, OP_2, OP_CHECKMULTISIG, OP_TRUE, OP_DROP
from io import BytesIO
NODE_0 = 0
NODE_2 = 2
WIT_V0 = 0
WIT_V1 = 1
def getutxo(txid):
utxo = {}
utxo["vout"] = 0
utxo["txid"] = txid
return utxo
def find_unspent(node, min_value):
for utxo in node.listunspent():
if utxo['amount'] >= min_value:
return utxo
class SegWitTest(StarwelsTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
# This test tests SegWit both pre and post-activation, so use the normal BIP9 activation.
self.extra_args = [["-walletprematurewitness", "-rpcserialversion=0", "-vbparams=segwit:0:999999999999", "-addresstype=legacy", "-deprecatedrpc=addwitnessaddress"],
["-blockversion=4", "-promiscuousmempoolflags=517", "-prematurewitness", "-walletprematurewitness", "-rpcserialversion=1", "-vbparams=segwit:0:999999999999", "-addresstype=legacy", "-deprecatedrpc=addwitnessaddress"],
["-blockversion=536870915", "-promiscuousmempoolflags=517", "-prematurewitness", "-walletprematurewitness", "-vbparams=segwit:0:999999999999", "-addresstype=legacy", "-deprecatedrpc=addwitnessaddress"]]
def setup_network(self):
super().setup_network()
connect_nodes(self.nodes[0], 2)
self.sync_all()
def success_mine(self, node, txid, sign, redeem_script=""):
send_to_witness(1, node, getutxo(txid), self.pubkey[0], False, Decimal("49.998"), sign, redeem_script)
block = node.generate(1)
assert_equal(len(node.getblock(block[0])["tx"]), 2)
sync_blocks(self.nodes)
def skip_mine(self, node, txid, sign, redeem_script=""):
send_to_witness(1, node, getutxo(txid), self.pubkey[0], False, Decimal("49.998"), sign, redeem_script)
block = node.generate(1)
assert_equal(len(node.getblock(block[0])["tx"]), 1)
sync_blocks(self.nodes)
def fail_accept(self, node, error_msg, txid, sign, redeem_script=""):
assert_raises_rpc_error(-26, error_msg, send_to_witness, 1, node, getutxo(txid), self.pubkey[0], False, Decimal("49.998"), sign, redeem_script)
def fail_mine(self, node, txid, sign, redeem_script=""):
send_to_witness(1, node, getutxo(txid), self.pubkey[0], False, Decimal("49.998"), sign, redeem_script)
assert_raises_rpc_error(-1, "CreateNewBlock: TestBlockValidity failed", node.generate, 1)
sync_blocks(self.nodes)
def run_test(self):
self.nodes[0].generate(161) #block 161
self.log.info("Verify sigops are counted in GBT with pre-BIP141 rules before the fork")
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
tmpl = self.nodes[0].getblocktemplate({})
assert(tmpl['sizelimit'] == 1000000)
assert('weightlimit' not in tmpl)
assert(tmpl['sigoplimit'] == 20000)
assert(tmpl['transactions'][0]['hash'] == txid)
assert(tmpl['transactions'][0]['sigops'] == 2)
tmpl = self.nodes[0].getblocktemplate({'rules':['segwit']})
assert(tmpl['sizelimit'] == 1000000)
assert('weightlimit' not in tmpl)
assert(tmpl['sigoplimit'] == 20000)
assert(tmpl['transactions'][0]['hash'] == txid)
assert(tmpl['transactions'][0]['sigops'] == 2)
self.nodes[0].generate(1) #block 162
balance_presetup = self.nodes[0].getbalance()
self.pubkey = []
p2sh_ids = [] # p2sh_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE embedded in p2sh
wit_ids = [] # wit_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE via bare witness
for i in range(3):
newaddress = self.nodes[i].getnewaddress()
self.pubkey.append(self.nodes[i].validateaddress(newaddress)["pubkey"])
multiscript = CScript([OP_1, hex_str_to_bytes(self.pubkey[-1]), OP_1, OP_CHECKMULTISIG])
p2sh_addr = self.nodes[i].addwitnessaddress(newaddress)
bip173_addr = self.nodes[i].addwitnessaddress(newaddress, False)
p2sh_ms_addr = self.nodes[i].addmultisigaddress(1, [self.pubkey[-1]], '', 'p2sh-segwit')['address']
bip173_ms_addr = self.nodes[i].addmultisigaddress(1, [self.pubkey[-1]], '', 'bech32')['address']
assert_equal(p2sh_addr, key_to_p2sh_p2wpkh(self.pubkey[-1]))
assert_equal(bip173_addr, key_to_p2wpkh(self.pubkey[-1]))
assert_equal(p2sh_ms_addr, script_to_p2sh_p2wsh(multiscript))
assert_equal(bip173_ms_addr, script_to_p2wsh(multiscript))
p2sh_ids.append([])
wit_ids.append([])
for v in range(2):
p2sh_ids[i].append([])
wit_ids[i].append([])
for i in range(5):
for n in range(3):
for v in range(2):
wit_ids[n][v].append(send_to_witness(v, self.nodes[0], find_unspent(self.nodes[0], 50), self.pubkey[n], False, Decimal("49.999")))
p2sh_ids[n][v].append(send_to_witness(v, self.nodes[0], find_unspent(self.nodes[0], 50), self.pubkey[n], True, Decimal("49.999")))
self.nodes[0].generate(1) #block 163
sync_blocks(self.nodes)
# Make sure all nodes recognize the transactions as theirs
assert_equal(self.nodes[0].getbalance(), balance_presetup - 60*50 + 20*Decimal("49.999") + 50)
assert_equal(self.nodes[1].getbalance(), 20*Decimal("49.999"))
assert_equal(self.nodes[2].getbalance(), 20*Decimal("49.999"))
self.nodes[0].generate(260) #block 423
sync_blocks(self.nodes)
self.log.info("Verify default node can't accept any witness format txs before fork")
# unsigned, no scriptsig
self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", wit_ids[NODE_0][WIT_V0][0], False)
self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", wit_ids[NODE_0][WIT_V1][0], False)
self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V0][0], False)
self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V1][0], False)
# unsigned with redeem script
self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V0][0], False, witness_script(False, self.pubkey[0]))
self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V1][0], False, witness_script(True, self.pubkey[0]))
# signed
self.fail_accept(self.nodes[0], "no-witness-yet", wit_ids[NODE_0][WIT_V0][0], True)
self.fail_accept(self.nodes[0], "no-witness-yet", wit_ids[NODE_0][WIT_V1][0], True)
self.fail_accept(self.nodes[0], "no-witness-yet", p2sh_ids[NODE_0][WIT_V0][0], True)
self.fail_accept(self.nodes[0], "no-witness-yet", p2sh_ids[NODE_0][WIT_V1][0], True)
self.log.info("Verify witness txs are skipped for mining before the fork")
self.skip_mine(self.nodes[2], wit_ids[NODE_2][WIT_V0][0], True) #block 424
self.skip_mine(self.nodes[2], wit_ids[NODE_2][WIT_V1][0], True) #block 425
self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][0], True) #block 426
self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][0], True) #block 427
# TODO: An old node would see these txs without witnesses and be able to mine them
self.log.info("Verify unsigned bare witness txs in versionbits-setting blocks are valid before the fork")
self.success_mine(self.nodes[2], wit_ids[NODE_2][WIT_V0][1], False) #block 428
self.success_mine(self.nodes[2], wit_ids[NODE_2][WIT_V1][1], False) #block 429
self.log.info("Verify unsigned p2sh witness txs without a redeem script are invalid")
self.fail_accept(self.nodes[2], "mandatory-script-verify-flag", p2sh_ids[NODE_2][WIT_V0][1], False)
self.fail_accept(self.nodes[2], "mandatory-script-verify-flag", p2sh_ids[NODE_2][WIT_V1][1], False)
self.log.info("Verify unsigned p2sh witness txs with a redeem script in versionbits-settings blocks are valid before the fork")
self.success_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][1], False, witness_script(False, self.pubkey[2])) #block 430
self.success_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][1], False, witness_script(True, self.pubkey[2])) #block 431
self.log.info("Verify previous witness txs skipped for mining can now be mined")
assert_equal(len(self.nodes[2].getrawmempool()), 4)
block = self.nodes[2].generate(1) #block 432 (first block with new rules; 432 = 144 * 3)
sync_blocks(self.nodes)
assert_equal(len(self.nodes[2].getrawmempool()), 0)
segwit_tx_list = self.nodes[2].getblock(block[0])["tx"]
assert_equal(len(segwit_tx_list), 5)
self.log.info("Verify block and transaction serialization rpcs return differing serializations depending on rpc serialization flag")
assert(self.nodes[2].getblock(block[0], False) != self.nodes[0].getblock(block[0], False))
assert(self.nodes[1].getblock(block[0], False) == self.nodes[2].getblock(block[0], False))
for i in range(len(segwit_tx_list)):
tx = FromHex(CTransaction(), self.nodes[2].gettransaction(segwit_tx_list[i])["hex"])
assert(self.nodes[2].getrawtransaction(segwit_tx_list[i]) != self.nodes[0].getrawtransaction(segwit_tx_list[i]))
assert(self.nodes[1].getrawtransaction(segwit_tx_list[i], 0) == self.nodes[2].getrawtransaction(segwit_tx_list[i]))
assert(self.nodes[0].getrawtransaction(segwit_tx_list[i]) != self.nodes[2].gettransaction(segwit_tx_list[i])["hex"])
assert(self.nodes[1].getrawtransaction(segwit_tx_list[i]) == self.nodes[2].gettransaction(segwit_tx_list[i])["hex"])
assert(self.nodes[0].getrawtransaction(segwit_tx_list[i]) == bytes_to_hex_str(tx.serialize_without_witness()))
self.log.info("Verify witness txs without witness data are invalid after the fork")
self.fail_mine(self.nodes[2], wit_ids[NODE_2][WIT_V0][2], False)
self.fail_mine(self.nodes[2], wit_ids[NODE_2][WIT_V1][2], False)
self.fail_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][2], False, witness_script(False, self.pubkey[2]))
self.fail_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][2], False, witness_script(True, self.pubkey[2]))
self.log.info("Verify default node can now use witness txs")
self.success_mine(self.nodes[0], wit_ids[NODE_0][WIT_V0][0], True) #block 432
self.success_mine(self.nodes[0], wit_ids[NODE_0][WIT_V1][0], True) #block 433
self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V0][0], True) #block 434
self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V1][0], True) #block 435
self.log.info("Verify sigops are counted in GBT with BIP141 rules after the fork")
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
tmpl = self.nodes[0].getblocktemplate({'rules':['segwit']})
assert(tmpl['sizelimit'] >= 3999577) # actual maximum size is lower due to minimum mandatory non-witness data
assert(tmpl['weightlimit'] == 4000000)
assert(tmpl['sigoplimit'] == 80000)
assert(tmpl['transactions'][0]['txid'] == txid)
assert(tmpl['transactions'][0]['sigops'] == 8)
self.nodes[0].generate(1) # Mine a block to clear the gbt cache
self.log.info("Non-segwit miners are able to use GBT response after activation.")
# Create a 3-tx chain: tx1 (non-segwit input, paying to a segwit output) ->
# tx2 (segwit input, paying to a non-segwit output) ->
# tx3 (non-segwit input, paying to a non-segwit output).
# tx1 is allowed to appear in the block, but no others.
txid1 = send_to_witness(1, self.nodes[0], find_unspent(self.nodes[0], 50), self.pubkey[0], False, Decimal("49.996"))
hex_tx = self.nodes[0].gettransaction(txid)['hex']
tx = FromHex(CTransaction(), hex_tx)
assert(tx.wit.is_null()) # This should not be a segwit input
assert(txid1 in self.nodes[0].getrawmempool())
# Now create tx2, which will spend from txid1.
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(int(txid1, 16), 0), b''))
tx.vout.append(CTxOut(int(49.99 * COIN), CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE])))
tx2_hex = self.nodes[0].signrawtransaction(ToHex(tx))['hex']
txid2 = self.nodes[0].sendrawtransaction(tx2_hex)
tx = FromHex(CTransaction(), tx2_hex)
assert(not tx.wit.is_null())
# Now create tx3, which will spend from txid2
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(int(txid2, 16), 0), b""))
tx.vout.append(CTxOut(int(49.95 * COIN), CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE]))) # Huge fee
tx.calc_sha256()
txid3 = self.nodes[0].sendrawtransaction(ToHex(tx))
assert(tx.wit.is_null())
assert(txid3 in self.nodes[0].getrawmempool())
# Now try calling getblocktemplate() without segwit support.
template = self.nodes[0].getblocktemplate()
# Check that tx1 is the only transaction of the 3 in the template.
template_txids = [ t['txid'] for t in template['transactions'] ]
assert(txid2 not in template_txids and txid3 not in template_txids)
assert(txid1 in template_txids)
# Check that running with segwit support results in all 3 being included.
template = self.nodes[0].getblocktemplate({"rules": ["segwit"]})
template_txids = [ t['txid'] for t in template['transactions'] ]
assert(txid1 in template_txids)
assert(txid2 in template_txids)
assert(txid3 in template_txids)
# Check that wtxid is properly reported in mempool entry
assert_equal(int(self.nodes[0].getmempoolentry(txid3)["wtxid"], 16), tx.calc_sha256(True))
# Mine a block to clear the gbt cache again.
self.nodes[0].generate(1)
self.log.info("Verify behaviour of importaddress, addwitnessaddress and listunspent")
# Some public keys to be used later
pubkeys = [
"0363D44AABD0F1699138239DF2F042C3282C0671CC7A76826A55C8203D90E39242", # cPiM8Ub4heR9NBYmgVzJQiUH1if44GSBGiqaeJySuL2BKxubvgwb
"02D3E626B3E616FC8662B489C123349FECBFC611E778E5BE739B257EAE4721E5BF", # cPpAdHaD6VoYbW78kveN2bsvb45Q7G5PhaPApVUGwvF8VQ9brD97
"04A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538A62F5BD8EC85C2477F39650BD391EA6250207065B2A81DA8B009FC891E898F0E", # 91zqCU5B9sdWxzMt1ca3VzbtVm2YM6Hi5Rxn4UDtxEaN9C9nzXV
"02A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538", # cPQFjcVRpAUBG8BA9hzr2yEzHwKoMgLkJZBBtK9vJnvGJgMjzTbd
"036722F784214129FEB9E8129D626324F3F6716555B603FFE8300BBCB882151228", # cQGtcm34xiLjB1v7bkRa4V3aAc9tS2UTuBZ1UnZGeSeNy627fN66
"0266A8396EE936BF6D99D17920DB21C6C7B1AB14C639D5CD72B300297E416FD2EC", # cTW5mR5M45vHxXkeChZdtSPozrFwFgmEvTNnanCW6wrqwaCZ1X7K
"0450A38BD7F0AC212FEBA77354A9B036A32E0F7C81FC4E0C5ADCA7C549C4505D2522458C2D9AE3CEFD684E039194B72C8A10F9CB9D4764AB26FCC2718D421D3B84", # 92h2XPssjBpsJN5CqSP7v9a7cf2kgDunBC6PDFwJHMACM1rrVBJ
]
# Import a compressed key and an uncompressed key, generate some multisig addresses
self.nodes[0].importprivkey("92e6XLo5jVAVwrQKPNTs93oQco8f8sDNBcpv73Dsrs397fQtFQn")
uncompressed_spendable_address = ["mvozP4UwyGD2mGZU4D2eMvMLPB9WkMmMQu"]
self.nodes[0].importprivkey("cNC8eQ5dg3mFAVePDX4ddmPYpPbw41r9bm2jd1nLJT77e6RrzTRR")
compressed_spendable_address = ["mmWQubrDomqpgSYekvsU7HWEVjLFHAakLe"]
assert ((self.nodes[0].validateaddress(uncompressed_spendable_address[0])['iscompressed'] == False))
assert ((self.nodes[0].validateaddress(compressed_spendable_address[0])['iscompressed'] == True))
self.nodes[0].importpubkey(pubkeys[0])
compressed_solvable_address = [key_to_p2pkh(pubkeys[0])]
self.nodes[0].importpubkey(pubkeys[1])
compressed_solvable_address.append(key_to_p2pkh(pubkeys[1]))
self.nodes[0].importpubkey(pubkeys[2])
uncompressed_solvable_address = [key_to_p2pkh(pubkeys[2])]
spendable_anytime = [] # These outputs should be seen anytime after importprivkey and addmultisigaddress
spendable_after_importaddress = [] # These outputs should be seen after importaddress
solvable_after_importaddress = [] # These outputs should be seen after importaddress but not spendable
unsolvable_after_importaddress = [] # These outputs should be unsolvable after importaddress
solvable_anytime = [] # These outputs should be solvable after importpubkey
unseen_anytime = [] # These outputs should never be seen
uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], compressed_spendable_address[0]])['address'])
uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], uncompressed_spendable_address[0]])['address'])
compressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_spendable_address[0]])['address'])
uncompressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], uncompressed_solvable_address[0]])['address'])
compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_solvable_address[0]])['address'])
compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_solvable_address[0], compressed_solvable_address[1]])['address'])
unknown_address = ["mtKKyoHabkk6e4ppT7NaM7THqPUt7AzPrT", "2NDP3jLWAFT8NDAiUa9qiE6oBt2awmMq7Dx"]
# Test multisig_without_privkey
# We have 2 public keys without private keys, use addmultisigaddress to add to wallet.
# Money sent to P2SH of multisig of this should only be seen after importaddress with the BASE58 P2SH address.
multisig_without_privkey_address = self.nodes[0].addmultisigaddress(2, [pubkeys[3], pubkeys[4]])['address']
script = CScript([OP_2, hex_str_to_bytes(pubkeys[3]), hex_str_to_bytes(pubkeys[4]), OP_2, OP_CHECKMULTISIG])
solvable_after_importaddress.append(CScript([OP_HASH160, hash160(script), OP_EQUAL]))
for i in compressed_spendable_address:
v = self.nodes[0].validateaddress(i)
if (v['isscript']):
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
# bare and p2sh multisig with compressed keys should always be spendable
spendable_anytime.extend([bare, p2sh])
# P2WSH and P2SH(P2WSH) multisig with compressed keys are spendable after direct importaddress
spendable_after_importaddress.extend([p2wsh, p2sh_p2wsh])
else:
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
# normal P2PKH and P2PK with compressed keys should always be spendable
spendable_anytime.extend([p2pkh, p2pk])
# P2SH_P2PK, P2SH_P2PKH with compressed keys are spendable after direct importaddress
spendable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])
# P2WPKH and P2SH_P2WPKH with compressed keys should always be spendable
spendable_anytime.extend([p2wpkh, p2sh_p2wpkh])
for i in uncompressed_spendable_address:
v = self.nodes[0].validateaddress(i)
if (v['isscript']):
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
# bare and p2sh multisig with uncompressed keys should always be spendable
spendable_anytime.extend([bare, p2sh])
# P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen
unseen_anytime.extend([p2wsh, p2sh_p2wsh])
else:
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
# normal P2PKH and P2PK with uncompressed keys should always be spendable
spendable_anytime.extend([p2pkh, p2pk])
# P2SH_P2PK and P2SH_P2PKH are spendable after direct importaddress
spendable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh])
# Witness output types with uncompressed keys are never seen
unseen_anytime.extend([p2wpkh, p2sh_p2wpkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])
for i in compressed_solvable_address:
v = self.nodes[0].validateaddress(i)
if (v['isscript']):
# Multisig without private is not seen after addmultisigaddress, but seen after importaddress
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
solvable_after_importaddress.extend([bare, p2sh, p2wsh, p2sh_p2wsh])
else:
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
# normal P2PKH, P2PK, P2WPKH and P2SH_P2WPKH with compressed keys should always be seen
solvable_anytime.extend([p2pkh, p2pk, p2wpkh, p2sh_p2wpkh])
# P2SH_P2PK, P2SH_P2PKH with compressed keys are seen after direct importaddress
solvable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])
for i in uncompressed_solvable_address:
v = self.nodes[0].validateaddress(i)
if (v['isscript']):
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
# Base uncompressed multisig without private is not seen after addmultisigaddress, but seen after importaddress
solvable_after_importaddress.extend([bare, p2sh])
# P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen
unseen_anytime.extend([p2wsh, p2sh_p2wsh])
else:
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
# normal P2PKH and P2PK with uncompressed keys should always be seen
solvable_anytime.extend([p2pkh, p2pk])
# P2SH_P2PK, P2SH_P2PKH with uncompressed keys are seen after direct importaddress
solvable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh])
# Witness output types with uncompressed keys are never seen
unseen_anytime.extend([p2wpkh, p2sh_p2wpkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])
op1 = CScript([OP_1])
op0 = CScript([OP_0])
# 2N7MGY19ti4KDMSzRfPAssP6Pxyuxoi6jLe is the P2SH(P2PKH) version of mjoE3sSrb8ByYEvgnC3Aox86u1CHnfJA4V
unsolvable_address = ["mjoE3sSrb8ByYEvgnC3Aox86u1CHnfJA4V", "2N7MGY19ti4KDMSzRfPAssP6Pxyuxoi6jLe", script_to_p2sh(op1), script_to_p2sh(op0)]
unsolvable_address_key = hex_str_to_bytes("02341AEC7587A51CDE5279E0630A531AEA2615A9F80B17E8D9376327BAEAA59E3D")
unsolvablep2pkh = CScript([OP_DUP, OP_HASH160, hash160(unsolvable_address_key), OP_EQUALVERIFY, OP_CHECKSIG])
unsolvablep2wshp2pkh = CScript([OP_0, sha256(unsolvablep2pkh)])
p2shop0 = CScript([OP_HASH160, hash160(op0), OP_EQUAL])
p2wshop1 = CScript([OP_0, sha256(op1)])
unsolvable_after_importaddress.append(unsolvablep2pkh)
unsolvable_after_importaddress.append(unsolvablep2wshp2pkh)
unsolvable_after_importaddress.append(op1) # OP_1 will be imported as script
unsolvable_after_importaddress.append(p2wshop1)
unseen_anytime.append(op0) # OP_0 will be imported as P2SH address with no script provided
unsolvable_after_importaddress.append(p2shop0)
spendable_txid = []
solvable_txid = []
spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime, 2))
solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime, 1))
self.mine_and_test_listunspent(spendable_after_importaddress + solvable_after_importaddress + unseen_anytime + unsolvable_after_importaddress, 0)
importlist = []
for i in compressed_spendable_address + uncompressed_spendable_address + compressed_solvable_address + uncompressed_solvable_address:
v = self.nodes[0].validateaddress(i)
if (v['isscript']):
bare = hex_str_to_bytes(v['hex'])
importlist.append(bytes_to_hex_str(bare))
importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(bare)])))
else:
pubkey = hex_str_to_bytes(v['pubkey'])
p2pk = CScript([pubkey, OP_CHECKSIG])
p2pkh = CScript([OP_DUP, OP_HASH160, hash160(pubkey), OP_EQUALVERIFY, OP_CHECKSIG])
importlist.append(bytes_to_hex_str(p2pk))
importlist.append(bytes_to_hex_str(p2pkh))
importlist.append(bytes_to_hex_str(CScript([OP_0, hash160(pubkey)])))
importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(p2pk)])))
importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(p2pkh)])))
importlist.append(bytes_to_hex_str(unsolvablep2pkh))
importlist.append(bytes_to_hex_str(unsolvablep2wshp2pkh))
importlist.append(bytes_to_hex_str(op1))
importlist.append(bytes_to_hex_str(p2wshop1))
for i in importlist:
# import all generated addresses. The wallet already has the private keys for some of these, so catch JSON RPC
# exceptions and continue.
try_rpc(-4, "The wallet already contains the private key for this address or script", self.nodes[0].importaddress, i, "", False, True)
self.nodes[0].importaddress(script_to_p2sh(op0)) # import OP_0 as address only
self.nodes[0].importaddress(multisig_without_privkey_address) # Test multisig_without_privkey
spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime + spendable_after_importaddress, 2))
solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime + solvable_after_importaddress, 1))
self.mine_and_test_listunspent(unsolvable_after_importaddress, 1)
self.mine_and_test_listunspent(unseen_anytime, 0)
# addwitnessaddress should refuse to return a witness address if an uncompressed key is used
# note that no witness address should be returned by unsolvable addresses
for i in uncompressed_spendable_address + uncompressed_solvable_address + unknown_address + unsolvable_address:
assert_raises_rpc_error(-4, "Public key or redeemscript not known to wallet, or the key is uncompressed", self.nodes[0].addwitnessaddress, i)
# addwitnessaddress should return a witness addresses even if keys are not in the wallet
self.nodes[0].addwitnessaddress(multisig_without_privkey_address)
for i in compressed_spendable_address + compressed_solvable_address:
witaddress = self.nodes[0].addwitnessaddress(i)
# addwitnessaddress should return the same address if it is a known P2SH-witness address
assert_equal(witaddress, self.nodes[0].addwitnessaddress(witaddress))
spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime + spendable_after_importaddress, 2))
solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime + solvable_after_importaddress, 1))
self.mine_and_test_listunspent(unsolvable_after_importaddress, 1)
self.mine_and_test_listunspent(unseen_anytime, 0)
# Repeat some tests. This time we don't add witness scripts with importaddress
# Import a compressed key and an uncompressed key, generate some multisig addresses
self.nodes[0].importprivkey("927pw6RW8ZekycnXqBQ2JS5nPyo1yRfGNN8oq74HeddWSpafDJH")
uncompressed_spendable_address = ["mguN2vNSCEUh6rJaXoAVwY3YZwZvEmf5xi"]
self.nodes[0].importprivkey("cMcrXaaUC48ZKpcyydfFo8PxHAjpsYLhdsp6nmtB3E2ER9UUHWnw")
compressed_spendable_address = ["n1UNmpmbVUJ9ytXYXiurmGPQ3TRrXqPWKL"]
self.nodes[0].importpubkey(pubkeys[5])
compressed_solvable_address = [key_to_p2pkh(pubkeys[5])]
self.nodes[0].importpubkey(pubkeys[6])
uncompressed_solvable_address = [key_to_p2pkh(pubkeys[6])]
spendable_after_addwitnessaddress = [] # These outputs should be seen after importaddress
solvable_after_addwitnessaddress=[] # These outputs should be seen after importaddress but not spendable
unseen_anytime = [] # These outputs should never be seen
solvable_anytime = [] # These outputs should be solvable after importpubkey
unseen_anytime = [] # These outputs should never be seen
uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], compressed_spendable_address[0]])['address'])
uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], uncompressed_spendable_address[0]])['address'])
compressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_spendable_address[0]])['address'])
uncompressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_solvable_address[0], uncompressed_solvable_address[0]])['address'])
compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_solvable_address[0]])['address'])
premature_witaddress = []
for i in compressed_spendable_address:
v = self.nodes[0].validateaddress(i)
if (v['isscript']):
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
# P2WSH and P2SH(P2WSH) multisig with compressed keys are spendable after addwitnessaddress
spendable_after_addwitnessaddress.extend([p2wsh, p2sh_p2wsh])
premature_witaddress.append(script_to_p2sh(p2wsh))
else:
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
# P2WPKH, P2SH_P2WPKH are always spendable
spendable_anytime.extend([p2wpkh, p2sh_p2wpkh])
for i in uncompressed_spendable_address + uncompressed_solvable_address:
v = self.nodes[0].validateaddress(i)
if (v['isscript']):
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
# P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen
unseen_anytime.extend([p2wsh, p2sh_p2wsh])
else:
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
# P2WPKH, P2SH_P2WPKH with uncompressed keys are never seen
unseen_anytime.extend([p2wpkh, p2sh_p2wpkh])
for i in compressed_solvable_address:
v = self.nodes[0].validateaddress(i)
if (v['isscript']):
# P2WSH multisig without private key are seen after addwitnessaddress
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
solvable_after_addwitnessaddress.extend([p2wsh, p2sh_p2wsh])
premature_witaddress.append(script_to_p2sh(p2wsh))
else:
[p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v)
# P2SH_P2PK, P2SH_P2PKH with compressed keys are always solvable
solvable_anytime.extend([p2wpkh, p2sh_p2wpkh])
self.mine_and_test_listunspent(spendable_anytime, 2)
self.mine_and_test_listunspent(solvable_anytime, 1)
self.mine_and_test_listunspent(spendable_after_addwitnessaddress + solvable_after_addwitnessaddress + unseen_anytime, 0)
# addwitnessaddress should refuse to return a witness address if an uncompressed key is used
# note that a multisig address returned by addmultisigaddress is not solvable until it is added with importaddress
# premature_witaddress are not accepted until the script is added with addwitnessaddress first
for i in uncompressed_spendable_address + uncompressed_solvable_address + premature_witaddress:
# This will raise an exception
assert_raises_rpc_error(-4, "Public key or redeemscript not known to wallet, or the key is uncompressed", self.nodes[0].addwitnessaddress, i)
# after importaddress it should pass addwitnessaddress
v = self.nodes[0].validateaddress(compressed_solvable_address[1])
self.nodes[0].importaddress(v['hex'],"",False,True)
for i in compressed_spendable_address + compressed_solvable_address + premature_witaddress:
witaddress = self.nodes[0].addwitnessaddress(i)
assert_equal(witaddress, self.nodes[0].addwitnessaddress(witaddress))
spendable_txid.append(self.mine_and_test_listunspent(spendable_after_addwitnessaddress + spendable_anytime, 2))
solvable_txid.append(self.mine_and_test_listunspent(solvable_after_addwitnessaddress + solvable_anytime, 1))
self.mine_and_test_listunspent(unseen_anytime, 0)
# Check that createrawtransaction/decoderawtransaction with non-v0 Bech32 works
v1_addr = program_to_witness(1, [3,5])
v1_tx = self.nodes[0].createrawtransaction([getutxo(spendable_txid[0])],{v1_addr: 1})
v1_decoded = self.nodes[1].decoderawtransaction(v1_tx)
assert_equal(v1_decoded['vout'][0]['scriptPubKey']['addresses'][0], v1_addr)
assert_equal(v1_decoded['vout'][0]['scriptPubKey']['hex'], "51020305")
# Check that spendable outputs are really spendable
self.create_and_mine_tx_from_txids(spendable_txid)
# import all the private keys so solvable addresses become spendable
self.nodes[0].importprivkey("cPiM8Ub4heR9NBYmgVzJQiUH1if44GSBGiqaeJySuL2BKxubvgwb")
self.nodes[0].importprivkey("cPpAdHaD6VoYbW78kveN2bsvb45Q7G5PhaPApVUGwvF8VQ9brD97")
self.nodes[0].importprivkey("91zqCU5B9sdWxzMt1ca3VzbtVm2YM6Hi5Rxn4UDtxEaN9C9nzXV")
self.nodes[0].importprivkey("cPQFjcVRpAUBG8BA9hzr2yEzHwKoMgLkJZBBtK9vJnvGJgMjzTbd")
self.nodes[0].importprivkey("cQGtcm34xiLjB1v7bkRa4V3aAc9tS2UTuBZ1UnZGeSeNy627fN66")
self.nodes[0].importprivkey("cTW5mR5M45vHxXkeChZdtSPozrFwFgmEvTNnanCW6wrqwaCZ1X7K")
self.create_and_mine_tx_from_txids(solvable_txid)
# Test that importing native P2WPKH/P2WSH scripts works
for use_p2wsh in [False, True]:
if use_p2wsh:
scriptPubKey = "00203a59f3f56b713fdcf5d1a57357f02c44342cbf306ffe0c4741046837bf90561a"
transaction = "01000000000100e1f505000000002200203a59f3f56b713fdcf5d1a57357f02c44342cbf306ffe0c4741046837bf90561a00000000"
else:
scriptPubKey = "a9142f8c469c2f0084c48e11f998ffbe7efa7549f26d87"
transaction = "01000000000100e1f5050000000017a9142f8c469c2f0084c48e11f998ffbe7efa7549f26d8700000000"
self.nodes[1].importaddress(scriptPubKey, "", False)
rawtxfund = self.nodes[1].fundrawtransaction(transaction)['hex']
rawtxfund = self.nodes[1].signrawtransaction(rawtxfund)["hex"]
txid = self.nodes[1].sendrawtransaction(rawtxfund)
assert_equal(self.nodes[1].gettransaction(txid, True)["txid"], txid)
assert_equal(self.nodes[1].listtransactions("*", 1, 0, True)[0]["txid"], txid)
# Assert it is properly saved
self.stop_node(1)
self.start_node(1)
assert_equal(self.nodes[1].gettransaction(txid, True)["txid"], txid)
assert_equal(self.nodes[1].listtransactions("*", 1, 0, True)[0]["txid"], txid)
def mine_and_test_listunspent(self, script_list, ismine):
utxo = find_unspent(self.nodes[0], 50)
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(int('0x'+utxo['txid'],0), utxo['vout'])))
for i in script_list:
tx.vout.append(CTxOut(10000000, i))
tx.rehash()
signresults = self.nodes[0].signrawtransaction(bytes_to_hex_str(tx.serialize_without_witness()))['hex']
txid = self.nodes[0].sendrawtransaction(signresults, True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
watchcount = 0
spendcount = 0
for i in self.nodes[0].listunspent():
if (i['txid'] == txid):
watchcount += 1
if (i['spendable'] == True):
spendcount += 1
if (ismine == 2):
assert_equal(spendcount, len(script_list))
elif (ismine == 1):
assert_equal(watchcount, len(script_list))
assert_equal(spendcount, 0)
else:
assert_equal(watchcount, 0)
return txid
def p2sh_address_to_script(self,v):
bare = CScript(hex_str_to_bytes(v['hex']))
p2sh = CScript(hex_str_to_bytes(v['scriptPubKey']))
p2wsh = CScript([OP_0, sha256(bare)])
p2sh_p2wsh = CScript([OP_HASH160, hash160(p2wsh), OP_EQUAL])
return([bare, p2sh, p2wsh, p2sh_p2wsh])
def p2pkh_address_to_script(self,v):
pubkey = hex_str_to_bytes(v['pubkey'])
p2wpkh = CScript([OP_0, hash160(pubkey)])
p2sh_p2wpkh = CScript([OP_HASH160, hash160(p2wpkh), OP_EQUAL])
p2pk = CScript([pubkey, OP_CHECKSIG])
p2pkh = CScript(hex_str_to_bytes(v['scriptPubKey']))
p2sh_p2pk = CScript([OP_HASH160, hash160(p2pk), OP_EQUAL])
p2sh_p2pkh = CScript([OP_HASH160, hash160(p2pkh), OP_EQUAL])
p2wsh_p2pk = CScript([OP_0, sha256(p2pk)])
p2wsh_p2pkh = CScript([OP_0, sha256(p2pkh)])
p2sh_p2wsh_p2pk = CScript([OP_HASH160, hash160(p2wsh_p2pk), OP_EQUAL])
p2sh_p2wsh_p2pkh = CScript([OP_HASH160, hash160(p2wsh_p2pkh), OP_EQUAL])
return [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh]
def create_and_mine_tx_from_txids(self, txids, success = True):
tx = CTransaction()
for i in txids:
txtmp = CTransaction()
txraw = self.nodes[0].getrawtransaction(i)
f = BytesIO(hex_str_to_bytes(txraw))
txtmp.deserialize(f)
for j in range(len(txtmp.vout)):
tx.vin.append(CTxIn(COutPoint(int('0x'+i,0), j)))
tx.vout.append(CTxOut(0, CScript()))
tx.rehash()
signresults = self.nodes[0].signrawtransaction(bytes_to_hex_str(tx.serialize_without_witness()))['hex']
self.nodes[0].sendrawtransaction(signresults, True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
if __name__ == '__main__':
SegWitTest().main()
|
|
# -*- coding: utf-8 -*-
from __future__ import with_statement
from django.core.cache import cache
from django.template import Template, RequestContext
from django.test.utils import override_settings
from sekizai.context import SekizaiContext
from cms import plugin_rendering
from cms.api import create_page, add_plugin
from cms.models import Page
from cms.models.placeholdermodel import Placeholder
from cms.models.pluginmodel import CMSPlugin
from cms.plugin_rendering import render_plugins, PluginContext, render_placeholder_toolbar
from cms.templatetags.cms_tags import _clean_key, _get_cache_key
from cms.test_utils.project.placeholderapp.models import Example1
from cms.test_utils.testcases import CMSTestCase
from cms.test_utils.util.context_managers import ChangeModel
from cms.test_utils.util.mock import AttributeObject
from cms.views import details
TEMPLATE_NAME = 'tests/rendering/base.html'
def sample_plugin_processor(instance, placeholder, rendered_content, original_context):
original_context_var = original_context['original_context_var']
return '%s|test_plugin_processor_ok|%s|%s|%s' % (
rendered_content,
instance.body,
placeholder.slot,
original_context_var
)
def sample_plugin_context_processor(instance, placeholder, original_context):
content = 'test_plugin_context_processor_ok|' + instance.body + '|' + \
placeholder.slot + '|' + original_context['original_context_var']
return {
'test_plugin_context_processor': content,
}
@override_settings(
CMS_TEMPLATES=[(TEMPLATE_NAME, TEMPLATE_NAME), ('extra_context.html', 'extra_context.html')],
)
class RenderingTestCase(CMSTestCase):
def setUp(self):
super(RenderingTestCase, self).setUp()
self.test_user = self._create_user("test", True, True)
with self.login_user_context(self.test_user):
self.test_data = {
'title': u'RenderingTestCase-title',
'slug': u'renderingtestcase-slug',
'reverse_id': u'renderingtestcase-reverse-id',
'text_main': u'RenderingTestCase-main',
'text_sub': u'RenderingTestCase-sub',
}
self.test_data2 = {
'title': u'RenderingTestCase-title2',
'slug': u'RenderingTestCase-slug2',
'reverse_id': u'renderingtestcase-reverse-id2',
}
self.test_data3 = {
'title': u'RenderingTestCase-title3',
'slug': u'RenderingTestCase-slug3',
'reverse_id': u'renderingtestcase-reverse-id3',
'text_sub': u'RenderingTestCase-sub3',
}
self.test_data4 = {
'title': u'RenderingTestCase-title3',
'no_extra': u'no extra var!',
'placeholderconf': {'extra_context': {'extra_context': {'extra_var': 'found extra var'}}},
'extra': u'found extra var',
}
self.insert_test_content()
def insert_test_content(self):
# Insert a page
p = create_page(self.test_data['title'], TEMPLATE_NAME, 'en',
slug=self.test_data['slug'], created_by=self.test_user,
reverse_id=self.test_data['reverse_id'], published=True)
# Placeholders have been inserted on post_save signal:
self.test_placeholders = {}
for placeholder in p.placeholders.all():
self.test_placeholders[placeholder.slot] = placeholder
# Insert some test Text plugins
add_plugin(self.test_placeholders['main'], 'TextPlugin', 'en',
body=self.test_data['text_main'])
add_plugin(self.test_placeholders['sub'], 'TextPlugin', 'en',
body=self.test_data['text_sub'])
p.publish('en')
# Insert another page that is not the home page
p2 = create_page(self.test_data2['title'], TEMPLATE_NAME, 'en',
parent=p, slug=self.test_data2['slug'], published=True,
reverse_id=self.test_data2['reverse_id'])
p2.publish('en')
# Insert another page that is not the home page
p3 = create_page(self.test_data3['title'], TEMPLATE_NAME, 'en',
slug=self.test_data3['slug'], parent=p2,
reverse_id=self.test_data3['reverse_id'], published=True)
# Placeholders have been inserted on post_save signal:
self.test_placeholders3 = {}
for placeholder in p3.placeholders.all():
self.test_placeholders3[placeholder.slot] = placeholder
# # Insert some test Text plugins
add_plugin(self.test_placeholders3['sub'], 'TextPlugin', 'en',
body=self.test_data3['text_sub'])
p3.publish('en')
# Insert another page that is not the home
p4 = create_page(self.test_data4['title'], 'extra_context.html', 'en', parent=p)
# Placeholders have been inserted on post_save signal:
self.test_placeholders4 = {}
for placeholder in p4.placeholders.all():
self.test_placeholders4[placeholder.slot] = placeholder
# Insert some test plugins
add_plugin(self.test_placeholders4['extra_context'], 'ExtraContextPlugin', 'en')
p4.publish('en')
# Reload test pages
self.test_page = self.reload(p.publisher_public)
self.test_page2 = self.reload(p2.publisher_public)
self.test_page3 = self.reload(p3.publisher_public)
self.test_page4 = self.reload(p4.publisher_public)
def get_context(self, page, context_vars={}):
request = self.get_request(page)
return RequestContext(request, context_vars)
def get_request(self, page, *args, **kwargs):
request = super(RenderingTestCase, self).get_request(*args, **kwargs)
request.current_page = page
return request
def strip_rendered(self, content):
return content.strip().replace(u"\n", u"")
@override_settings(CMS_TEMPLATES=[(TEMPLATE_NAME, '')])
def render(self, template, page, context_vars={}):
c = self.get_context(page, context_vars)
t = Template(template)
r = t.render(c)
return self.strip_rendered(r)
@override_settings(CMS_TEMPLATES=[(TEMPLATE_NAME, '')])
def test_details_view(self):
"""
Tests that the `detail` view is working.
"""
response = details(self.get_request(self.test_page), '')
response.render()
r = self.strip_rendered(response.content.decode('utf8'))
self.assertEqual(r, u'|' + self.test_data['text_main'] + u'|' + self.test_data['text_sub'] + u'|')
@override_settings(
CMS_PLUGIN_PROCESSORS=('cms.tests.rendering.sample_plugin_processor',),
CMS_PLUGIN_CONTEXT_PROCESSORS=('cms.tests.rendering.sample_plugin_context_processor',),
)
def test_processors(self):
"""
Tests that default plugin context processors are working, that plugin processors and plugin context processors
can be defined in settings and are working and that extra plugin context processors can be passed to PluginContext.
"""
def test_passed_plugin_context_processor(instance, placeholder, context):
return {'test_passed_plugin_context_processor': 'test_passed_plugin_context_processor_ok'}
t = u'{% load cms_tags %}' + \
u'{{ plugin.counter }}|{{ plugin.instance.body }}|{{ test_passed_plugin_context_processor }}|{{ test_plugin_context_processor }}'
instance, plugin = CMSPlugin.objects.all()[0].get_plugin_instance()
instance.render_template = Template(t)
context = PluginContext({'original_context_var': 'original_context_var_ok'}, instance,
self.test_placeholders['main'], processors=(test_passed_plugin_context_processor,))
plugin_rendering._standard_processors = {}
c = render_plugins((instance,), context, self.test_placeholders['main'])
r = "".join(c)
self.assertEqual(r, u'1|' + self.test_data[
'text_main'] + '|test_passed_plugin_context_processor_ok|test_plugin_context_processor_ok|' +
self.test_data['text_main'] + '|main|original_context_var_ok|test_plugin_processor_ok|' + self.test_data[
'text_main'] + '|main|original_context_var_ok')
plugin_rendering._standard_processors = {}
def test_placeholder(self):
"""
Tests the {% placeholder %} templatetag.
"""
t = u'{% load cms_tags %}' + \
u'|{% placeholder "main" %}|{% placeholder "empty" %}'
r = self.render(t, self.test_page)
self.assertEqual(r, u'|' + self.test_data['text_main'] + '|')
def test_placeholder_extra_context(self):
t = u'{% load cms_tags %}{% placeholder "extra_context" %}'
r = self.render(t, self.test_page4)
self.assertEqual(r, self.test_data4['no_extra'])
cache.clear()
with self.settings(CMS_PLACEHOLDER_CONF=self.test_data4['placeholderconf']):
r = self.render(t, self.test_page4)
self.assertEqual(r, self.test_data4['extra'])
def test_placeholder_or(self):
"""
Tests the {% placeholder %} templatetag.
"""
t = u'{% load cms_tags %}' + \
u'|{% placeholder "empty" or %}No content{% endplaceholder %}'
r = self.render(t, self.test_page)
self.assertEqual(r, u'|No content')
def test_render_placeholder_tag(self):
"""
Tests the {% render_placeholder %} templatetag.
"""
render_placeholder_body = "I'm the render placeholder body"
ex1 = Example1(char_1="char_1", char_2="char_2", char_3="char_3",
char_4="char_4")
ex1.save()
add_plugin(ex1.placeholder, u"TextPlugin", u"en", body=render_placeholder_body)
t = '''{% extends "base.html" %}
{% load cms_tags %}
{% block content %}
<h1>{% render_placeholder ex1.placeholder %}</h1>
<h2>{% render_placeholder ex1.placeholder as tempvar %}</h2>
<h3>{{ tempvar }}</h3>
{% endblock content %}
'''
r = self.render(t, self.test_page, {'ex1': ex1})
self.assertIn(
'<h1>%s</h1>' % render_placeholder_body,
r
)
self.assertIn(
'<h2></h2>',
r
)
self.assertIn(
'<h3>%s</h3>' % render_placeholder_body,
r
)
def test_show_placeholder(self):
"""
Tests the {% show_placeholder %} templatetag, using lookup by pk/dict/reverse_id and passing a Page object.
"""
t = u'{% load cms_tags %}' + \
u'|{% show_placeholder "main" ' + str(self.test_page.pk) + ' %}' + \
u'|{% show_placeholder "main" test_dict %}' + \
u'|{% show_placeholder "sub" "' + str(self.test_page.reverse_id) + '" %}' + \
u'|{% show_placeholder "sub" test_page %}'
r = self.render(t, self.test_page, {'test_page': self.test_page, 'test_dict': {'pk': self.test_page.pk}})
self.assertEqual(r, (u'|' + self.test_data['text_main']) * 2 + (u'|' + self.test_data['text_sub']) * 2)
def test_show_placeholder_extra_context(self):
t = u'{% load cms_tags %}{% show_uncached_placeholder "extra_context" ' + str(self.test_page4.pk) + ' %}'
r = self.render(t, self.test_page4)
self.assertEqual(r, self.test_data4['no_extra'])
cache.clear()
with self.settings(CMS_PLACEHOLDER_CONF=self.test_data4['placeholderconf']):
r = self.render(t, self.test_page4)
self.assertEqual(r, self.test_data4['extra'])
def test_show_uncached_placeholder_by_pk(self):
"""
Tests the {% show_uncached_placeholder %} templatetag, using lookup by pk.
"""
template = u'{%% load cms_tags %%}{%% show_uncached_placeholder "main" %s %%}' % self.test_page.pk
output = self.render(template, self.test_page)
self.assertEqual(output, self.test_data['text_main'])
def test_show_uncached_placeholder_by_lookup_dict(self):
template = u'{% load cms_tags %}{% show_uncached_placeholder "main" test_dict %}'
output = self.render(template, self.test_page, {'test_dict': {'pk': self.test_page.pk}})
self.assertEqual(output, self.test_data['text_main'])
def test_show_uncached_placeholder_by_reverse_id(self):
template = u'{%% load cms_tags %%}{%% show_uncached_placeholder "sub" "%s" %%}' % self.test_page.reverse_id
output = self.render(template, self.test_page)
self.assertEqual(output, self.test_data['text_sub'])
def test_show_uncached_placeholder_by_page(self):
template = u'{% load cms_tags %}{% show_uncached_placeholder "sub" test_page %}'
output = self.render(template, self.test_page, {'test_page': self.test_page})
self.assertEqual(output, self.test_data['text_sub'])
def test_show_uncached_placeholder_tag_no_use_cache(self):
"""
Tests that {% show_uncached_placeholder %} does not populate cache.
"""
template = '{% load cms_tags %}<h1>{% show_uncached_placeholder "sub" test_page %}</h1>'
base_key = _get_cache_key('_show_placeholder_for_page', self.test_page, "en",
self.test_page.site_id)
cache_key = _clean_key('%s_placeholder:%s' % (base_key, "sub"))
cache_value_before = cache.get(cache_key)
output = self.render(template, self.test_page, {'test_page': self.test_page})
cache_value_after = cache.get(cache_key)
self.assertEqual(output, '<h1>%s</h1>' % self.test_data['text_sub'])
self.assertEqual(cache_value_before, cache_value_after)
self.assertIsNone(cache_value_after)
def test_page_url_by_pk(self):
template = u'{%% load cms_tags %%}{%% page_url %s %%}' % self.test_page2.pk
output = self.render(template, self.test_page)
self.assertEqual(output, self.test_page2.get_absolute_url())
def test_page_url_by_dictionary(self):
template = u'{% load cms_tags %}{% page_url test_dict %}'
output = self.render(template, self.test_page, {'test_dict': {'pk': self.test_page2.pk}})
self.assertEqual(output, self.test_page2.get_absolute_url())
def test_page_url_by_reverse_id(self):
template = u'{%% load cms_tags %%}{%% page_url "%s" %%}' % self.test_page2.reverse_id
output = self.render(template, self.test_page)
self.assertEqual(output, self.test_page2.get_absolute_url())
def test_page_url_by_reverse_id_not_on_a_page(self):
template = u'{%% load cms_tags %%}{%% page_url "%s" %%}' % self.test_page2.reverse_id
output = self.render(template, None)
self.assertEqual(output, self.test_page2.get_absolute_url())
def test_page_url_by_page(self):
template = u'{% load cms_tags %}{% page_url test_page %}'
output = self.render(template, self.test_page, {'test_page': self.test_page2})
self.assertEqual(output, self.test_page2.get_absolute_url())
def test_page_url_by_page_as(self):
template = u'{% load cms_tags %}{% page_url test_page as test_url %}{{ test_url }}'
output = self.render(template, self.test_page, {'test_page': self.test_page2})
self.assertEqual(output, self.test_page2.get_absolute_url())
#
# To ensure compatible behaviour, test that page_url swallows any
# Page.DoesNotExist exceptions when NOT in DEBUG mode.
#
@override_settings(DEBUG=False)
def test_page_url_on_bogus_page(self):
template = u'{% load cms_tags %}{% page_url "bogus_page" %}'
output = self.render(template, self.test_page, {'test_page': self.test_page2})
self.assertEqual(output, '')
#
# To ensure compatible behaviour, test that page_url will raise a
# Page.DoesNotExist exception when the page argument does not eval to a
# valid page
#
@override_settings(DEBUG=True)
def test_page_url_on_bogus_page_in_debug(self):
template = u'{% load cms_tags %}{% page_url "bogus_page" %}'
self.assertRaises(
Page.DoesNotExist,
self.render,
template,
self.test_page,
{'test_page': self.test_page2}
)
#
# In the 'as varname' form, ensure that the tag will always swallow
# Page.DoesNotExist exceptions both when DEBUG is False and...
#
@override_settings(DEBUG=False)
def test_page_url_as_on_bogus_page(self):
template = u'{% load cms_tags %}{% page_url "bogus_page" as test_url %}{{ test_url }}'
output = self.render(template, self.test_page, {'test_page': self.test_page2})
self.assertEqual(output, '')
#
# ...when it is True.
#
@override_settings(DEBUG=True)
def test_page_url_as_on_bogus_page_in_debug(self):
template = u'{% load cms_tags %}{% page_url "bogus_page" as test_url %}{{ test_url }}'
output = self.render(template, self.test_page, {'test_page': self.test_page2})
self.assertEqual(output, '')
def test_page_attribute(self):
"""
Tests the {% page_attribute %} templatetag, using current page, lookup by pk/dict/reverse_id and passing a Page object.
"""
t = u'{% load cms_tags %}' + \
u'|{% page_attribute title %}' + \
u'{% page_attribute title as title %}' + \
u'|{{ title }}' + \
u'|{% page_attribute title ' + str(self.test_page2.pk) + ' %}' + \
u'{% page_attribute title ' + str(self.test_page2.pk) + ' as title %}' + \
u'|{{ title }}' + \
u'|{% page_attribute title test_dict %}' + \
u'{% page_attribute title test_dict as title %}' + \
u'|{{ title }}' + \
u'|{% page_attribute slug "' + str(self.test_page2.reverse_id) + '" %}' + \
u'{% page_attribute slug "' + str(self.test_page2.reverse_id) + '" as slug %}' + \
u'|{{ slug }}' + \
u'|{% page_attribute slug test_page %}' + \
u'{% page_attribute slug test_page as slug %}' + \
u'|{{ slug }}'
r = self.render(t, self.test_page, {'test_page': self.test_page2, 'test_dict': {'pk': self.test_page2.pk}})
self.assertEqual(r, (u'|' + self.test_data['title']) * 2 + (u'|' + self.test_data2['title']) * 4 + (
u'|' + self.test_data2['slug']) * 4)
def test_inherit_placeholder(self):
t = u'{% load cms_tags %}' + \
u'|{% placeholder "main" inherit %}|{% placeholder "sub" %}'
r = self.render(t, self.test_page3)
self.assertEqual(r, u'|' + self.test_data['text_main'] + '|' + self.test_data3['text_sub'])
def test_extra_context_isolation(self):
with ChangeModel(self.test_page, template='extra_context.html'):
response = self.client.get(self.test_page.get_absolute_url())
self.assertTrue('width' not in response.context)
def test_render_placeholder_toolbar(self):
placeholder = Placeholder()
placeholder.slot = 'test'
placeholder.pk = placeholder.id = 99
context = SekizaiContext()
context['request'] = AttributeObject(
REQUEST={'language': 'en'},
GET=[],
session={},
path='/',
user=self.test_user,
current_page=None,
method='GET',
)
classes = [
"cms_placeholder-%s" % placeholder.pk,
'cms_placeholder',
]
output = render_placeholder_toolbar(placeholder, context, 'test', 'en')
for cls in classes:
self.assertTrue(cls in output, '%r is not in %r' % (cls, output))
|
|
from configparser import ConfigParser
import os
import re
from qldsmanager import app_dir
from qldsmanager.util.matheval import eval_expr
from qldsmanager.util.filesystem import FSCheck
class AbstractConfig:
def __init__(self):
self.__config_dir = os.path.expanduser('~/.qldsmanager/') #has to end with /
if not hasattr(self, 'filename'):
self.filename = None
if not hasattr(self, 'filepath'):
self.filepath = None
if not hasattr(self, 'reuired'):
self.required = None
self._configure()
self.parser = self.__get_parser()
if self.required:
self.__check_missing()
self.extra_check()
def __get_parser(self):
parser = ConfigParser()
if self.filepath is None:
if os.path.isfile(app_dir + self.filename):
parser.read_file(open(app_dir + self.filename))
parser.read(os.path.expanduser(self.__config_dir + self.filename))
else:
parser.read(os.path.expanduser(self.filepath))
return parser
def __check_missing(self):
options = self.required
missing = self._has_missing(self.parser.sections(), options.keys())
if missing:
print('Missing sections in configuration: %s' % ', '.join(missing))
exit(10)
missing_options = dict()
for section,values in options.items():
missing = self._has_missing(self.parser.options(section), values)
if missing:
missing_options[section] = missing
if missing_options:
print('Missing options in sections\n %s' % '\n '.join([
'%s: %s' % (key, value) for (key, value) in missing_options.items()
]))
exit(11)
def _has_missing(self, data, required_keys):
missing = []
for k in required_keys:
if k not in data:
missing.append(k)
return missing
def update(self):
config_file = self.__config_dir + self.filename
if not os.path.isdir(self.__config_dir):
os.mkdir(self.__config_dir)
if os.path.isfile(config_file) and not os.access(config_file, os.W_OK):
raise IOError('Cannot write to file %s' % config_file)
with (open(config_file, 'w+')) as config_fp:
self.parser.write(config_fp)
def set(self, section: str, option: str, value):
return self.parser.set(section, option, value)
def get(self, section, option):
try:
return self.parser.get(section, option)
except:
return None
def get_config_dir(self):
return self.__config_dir
def extra_check(self):
return True
def pre_parse(self):
return True
def _configure(self):
return True
class Configuration(AbstractConfig):
def _configure(self):
self.filename = 'config'
self.required = dict(
dir=['ql', 'steamcmd'],
config=['servers']
)
class ServerConfig(AbstractConfig):
def __init__(self):
self.config = Configuration()
self.extra_required = ['net_port']
self.servers = {}
self.parameters = {}
self.defaults = {}
self.extra = {}
self.loop = {}
self._configure()
self.filepath = os.path.expanduser(self.config.get('config', 'servers'))
self.filename = os.path.basename(self.filepath)
servers_file_dir = os.path.dirname(self.filepath)
servers_file_dir_fs = FSCheck(servers_file_dir)
if not servers_file_dir_fs.exists(error=False):
os.makedirs(servers_file_dir)
servers_file_fs = FSCheck(self.filepath)
if not servers_file_fs.exists(error=False):
open(self.servers_file, 'a').close()
if not servers_file_fs.access('r'):
print('Cannot open server list configuration for reading')
exit(33)
super(ServerConfig, self).__init__()
def extra_check(self):
self.__compile()
missing_options = dict()
for sid,data in self.servers.items():
missing = self.check_required(data)
if missing:
missing_options[sid] = missing
if missing_options:
print('Missing options in servers\n %s' % '\n '.join([
'%s: %s' % (key, value) for (key, value) in missing_options.items()
]))
exit(11)
def __compile(self):
self.parameters = self.__parse_section('parameters')
self.defaults = self.__parse_section('defaults')
self.extra = self.__parse_section('extra')
for section in self.parser.sections():
name = None
if section.startswith('defaults:'):
name = section.split(':', 1)
if len(name) > 1:
self.defaults[name[1]] = self.__parse_section(section)
if section.startswith('extra:'):
name = section.split(':', 1)
if len(name) > 1:
self.extra[name[1]] = self.__parse_section(section)
if section.startswith('server:'):
sid = section.split(':', 1)
if len(sid) > 1:
self.servers[sid[1]] = self.__parse_section(section)
if name is not None and len(name) > 1:
self.loop[name[1]] = 1
global_loop = 1
tmp_servers = {}
for sid in sorted(self.servers):
extend = None
name = sid.split(':', 1)
if len(name) > 1:
extend = name[1]
tmp_servers[name[0]] = self.__parse_server(sid, extend, global_loop)
global_loop += 1
if extend is not None:
self.loop[extend] += 1
self.servers = tmp_servers
def __parse_section(self, section):
tmp = {}
if self.parser.has_section(section):
for o,v in self.parser.items(section):
tmp[o] = v
return tmp
def __parse_server(self, sid, extend, global_loop):
parsed = {}
server = self.servers[sid]
if extend is not None and extend in self.defaults:
tmp = self.defaults[extend].copy()
tmp.update(server)
server = tmp
for k,v in server.items():
if extend is not None and extend in self.extra:
if k in self.extra[extend]:
v = self.extra[extend][k].replace('${self}', v)
v = self.__replace_parameters(v, server, extend, global_loop)
parsed[k] = v
return parsed
def __replace_parameters(self, str_, server, extend, global_loop):
#find all parameters
for param in re.findall('\$\{parameters\.(\w+)\}', str_):
if param in self.parameters:
str_ = str_.replace('${parameters.' + param + '}', self.parameters[param])
for param in re.findall('\$\{server\.(\w+)\}', str_):
if param in server:
str_ = str_.replace('${server.' + param + '}', server.get(param))
str_ = str_.replace('${global.loop}', str(global_loop))
if extend is not None and extend in self.loop:
str_ = str_.replace('${loop}', str(self.loop[extend]))
for math in re.findall('>>(.+)<<', str_):
str_ = str_.replace(
'>>' + math + '<<',
str(eval_expr(
self.__replace_parameters(math, server, extend, global_loop))
)
)
return str_
def check_required(self, server: list):
return self._has_missing(server, self.extra_required)
class RconConfig(ServerConfig):
def _configure(self):
self.extra_required = ['zmq_rcon_port']
self.servers_file = os.path.expanduser(self.config.get('config', 'rcon'))
|
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._gallery_application_versions_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_list_by_gallery_application_request, build_update_request_initial
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class GalleryApplicationVersionsOperations:
"""GalleryApplicationVersionsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.compute.v2019_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _create_or_update_initial(
self,
resource_group_name: str,
gallery_name: str,
gallery_application_name: str,
gallery_application_version_name: str,
gallery_application_version: "_models.GalleryApplicationVersion",
**kwargs: Any
) -> "_models.GalleryApplicationVersion":
cls = kwargs.pop('cls', None) # type: ClsType["_models.GalleryApplicationVersion"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(gallery_application_version, 'GalleryApplicationVersion')
request = build_create_or_update_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
gallery_name=gallery_name,
gallery_application_name=gallery_application_name,
gallery_application_version_name=gallery_application_version_name,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('GalleryApplicationVersion', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('GalleryApplicationVersion', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('GalleryApplicationVersion', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/applications/{galleryApplicationName}/versions/{galleryApplicationVersionName}'} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
gallery_name: str,
gallery_application_name: str,
gallery_application_version_name: str,
gallery_application_version: "_models.GalleryApplicationVersion",
**kwargs: Any
) -> AsyncLROPoller["_models.GalleryApplicationVersion"]:
"""Create or update a gallery Application Version.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param gallery_name: The name of the Shared Application Gallery in which the Application
Definition resides.
:type gallery_name: str
:param gallery_application_name: The name of the gallery Application Definition in which the
Application Version is to be created.
:type gallery_application_name: str
:param gallery_application_version_name: The name of the gallery Application Version to be
created. Needs to follow semantic version name pattern: The allowed characters are digit and
period. Digits must be within the range of a 32-bit integer. Format:
:code:`<MajorVersion>`.:code:`<MinorVersion>`.:code:`<Patch>`.
:type gallery_application_version_name: str
:param gallery_application_version: Parameters supplied to the create or update gallery
Application Version operation.
:type gallery_application_version:
~azure.mgmt.compute.v2019_12_01.models.GalleryApplicationVersion
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either GalleryApplicationVersion or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2019_12_01.models.GalleryApplicationVersion]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.GalleryApplicationVersion"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
gallery_name=gallery_name,
gallery_application_name=gallery_application_name,
gallery_application_version_name=gallery_application_version_name,
gallery_application_version=gallery_application_version,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('GalleryApplicationVersion', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/applications/{galleryApplicationName}/versions/{galleryApplicationVersionName}'} # type: ignore
async def _update_initial(
self,
resource_group_name: str,
gallery_name: str,
gallery_application_name: str,
gallery_application_version_name: str,
gallery_application_version: "_models.GalleryApplicationVersionUpdate",
**kwargs: Any
) -> "_models.GalleryApplicationVersion":
cls = kwargs.pop('cls', None) # type: ClsType["_models.GalleryApplicationVersion"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(gallery_application_version, 'GalleryApplicationVersionUpdate')
request = build_update_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
gallery_name=gallery_name,
gallery_application_name=gallery_application_name,
gallery_application_version_name=gallery_application_version_name,
content_type=content_type,
json=_json,
template_url=self._update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('GalleryApplicationVersion', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/applications/{galleryApplicationName}/versions/{galleryApplicationVersionName}'} # type: ignore
@distributed_trace_async
async def begin_update(
self,
resource_group_name: str,
gallery_name: str,
gallery_application_name: str,
gallery_application_version_name: str,
gallery_application_version: "_models.GalleryApplicationVersionUpdate",
**kwargs: Any
) -> AsyncLROPoller["_models.GalleryApplicationVersion"]:
"""Update a gallery Application Version.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param gallery_name: The name of the Shared Application Gallery in which the Application
Definition resides.
:type gallery_name: str
:param gallery_application_name: The name of the gallery Application Definition in which the
Application Version is to be updated.
:type gallery_application_name: str
:param gallery_application_version_name: The name of the gallery Application Version to be
updated. Needs to follow semantic version name pattern: The allowed characters are digit and
period. Digits must be within the range of a 32-bit integer. Format:
:code:`<MajorVersion>`.:code:`<MinorVersion>`.:code:`<Patch>`.
:type gallery_application_version_name: str
:param gallery_application_version: Parameters supplied to the update gallery Application
Version operation.
:type gallery_application_version:
~azure.mgmt.compute.v2019_12_01.models.GalleryApplicationVersionUpdate
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either GalleryApplicationVersion or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2019_12_01.models.GalleryApplicationVersion]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.GalleryApplicationVersion"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_initial(
resource_group_name=resource_group_name,
gallery_name=gallery_name,
gallery_application_name=gallery_application_name,
gallery_application_version_name=gallery_application_version_name,
gallery_application_version=gallery_application_version,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('GalleryApplicationVersion', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/applications/{galleryApplicationName}/versions/{galleryApplicationVersionName}'} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
gallery_name: str,
gallery_application_name: str,
gallery_application_version_name: str,
expand: Optional[Union[str, "_models.ReplicationStatusTypes"]] = None,
**kwargs: Any
) -> "_models.GalleryApplicationVersion":
"""Retrieves information about a gallery Application Version.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param gallery_name: The name of the Shared Application Gallery in which the Application
Definition resides.
:type gallery_name: str
:param gallery_application_name: The name of the gallery Application Definition in which the
Application Version resides.
:type gallery_application_name: str
:param gallery_application_version_name: The name of the gallery Application Version to be
retrieved.
:type gallery_application_version_name: str
:param expand: The expand expression to apply on the operation.
:type expand: str or ~azure.mgmt.compute.v2019_12_01.models.ReplicationStatusTypes
:keyword callable cls: A custom type or function that will be passed the direct response
:return: GalleryApplicationVersion, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2019_12_01.models.GalleryApplicationVersion
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.GalleryApplicationVersion"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
gallery_name=gallery_name,
gallery_application_name=gallery_application_name,
gallery_application_version_name=gallery_application_version_name,
expand=expand,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('GalleryApplicationVersion', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/applications/{galleryApplicationName}/versions/{galleryApplicationVersionName}'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
gallery_name: str,
gallery_application_name: str,
gallery_application_version_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
gallery_name=gallery_name,
gallery_application_name=gallery_application_name,
gallery_application_version_name=gallery_application_version_name,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/applications/{galleryApplicationName}/versions/{galleryApplicationVersionName}'} # type: ignore
@distributed_trace_async
async def begin_delete(
self,
resource_group_name: str,
gallery_name: str,
gallery_application_name: str,
gallery_application_version_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Delete a gallery Application Version.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param gallery_name: The name of the Shared Application Gallery in which the Application
Definition resides.
:type gallery_name: str
:param gallery_application_name: The name of the gallery Application Definition in which the
Application Version resides.
:type gallery_application_name: str
:param gallery_application_version_name: The name of the gallery Application Version to be
deleted.
:type gallery_application_version_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
gallery_name=gallery_name,
gallery_application_name=gallery_application_name,
gallery_application_version_name=gallery_application_version_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/applications/{galleryApplicationName}/versions/{galleryApplicationVersionName}'} # type: ignore
@distributed_trace
def list_by_gallery_application(
self,
resource_group_name: str,
gallery_name: str,
gallery_application_name: str,
**kwargs: Any
) -> AsyncIterable["_models.GalleryApplicationVersionList"]:
"""List gallery Application Versions in a gallery Application Definition.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param gallery_name: The name of the Shared Application Gallery in which the Application
Definition resides.
:type gallery_name: str
:param gallery_application_name: The name of the Shared Application Gallery Application
Definition from which the Application Versions are to be listed.
:type gallery_application_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either GalleryApplicationVersionList or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.compute.v2019_12_01.models.GalleryApplicationVersionList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.GalleryApplicationVersionList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_gallery_application_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
gallery_name=gallery_name,
gallery_application_name=gallery_application_name,
template_url=self.list_by_gallery_application.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_by_gallery_application_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
gallery_name=gallery_name,
gallery_application_name=gallery_application_name,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("GalleryApplicationVersionList", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_gallery_application.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/applications/{galleryApplicationName}/versions'} # type: ignore
|
|
#!/usr/bin/env python
# coding: utf-8
"""Test clusters_diff.py."""
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import print_function
import unittest
from biggraphite.cli import clusters_diff
import tempfile
import mock
import json
import collections
class TestRequest(unittest.TestCase):
"""Test the class Rquest."""
def test_prepare(self):
"""Should correctly create an http request."""
request = clusters_diff.Request('http://fakeurl.com', 'auth_key', 5.0)
self.assertEquals(request._request.get_method(), 'GET')
self.assertEquals(request._request.get_full_url(), 'http://fakeurl.com')
self.assertEquals(request._request.get_header('Authorization'), 'Basic auth_key')
def test_parse_request_result(self):
"""Should correctly parse a jsonObject into a list of DiffableTarget."""
data = [{"target": "target_1", "datapoints": [[0.1, 10], [0.2, 20]]},
{"target": "target_2", "datapoints": [[0.1, 60], [0.2, 70]]}]
data_json = json.dumps(data)
diffable_target_1 = clusters_diff.DiffableTarget("target_1", {10: 0.1, 20: 0.2})
diffable_target_2 = clusters_diff.DiffableTarget("target_2", {60: 0.1, 70: 0.2})
request = clusters_diff.Request('http://fakeurl.com', 'auth_key', 5.0)
self.assertEquals(len(request._parse_request_result(data_json)), 2)
self.assertEquals(request._parse_request_result(data_json)[0].name,
diffable_target_1.name)
self.assertEquals(request._parse_request_result(data_json)[0].ts_to_val,
diffable_target_1.ts_to_val)
self.assertEquals(request._parse_request_result(data_json)[1].name,
diffable_target_2.name)
self.assertEquals(request._parse_request_result(data_json)[1].ts_to_val,
diffable_target_2.ts_to_val)
class TestHostResult(unittest.TestCase):
"""Test the class HostResult."""
def test_host_result(self):
"""Should correctly update an HostResult."""
host_result = clusters_diff.HostResult('host')
self.assertEquals(host_result.name, 'host')
self.assertEquals(host_result.query_to_error, {})
self.assertEquals(host_result.query_to_time_s, {})
self.assertEquals(host_result.diffable_queries, [])
host_result.add_error('query', 'error')
host_result.add_time_s('query1', 1)
host_result.add_time_s('query2', 2)
host_result.add_diffable_query('diffable_query')
self.assertEquals(host_result.name, 'host')
self.assertEquals(host_result.query_to_error, {'query': 'error'})
self.assertEquals(host_result.query_to_time_s, {'query1': 1, 'query2': 2})
self.assertEquals(host_result.diffable_queries, ['diffable_query'])
def test_get_error_to_query(self):
"""Should correctly reverse query_to_error to get error_to_queries."""
host_result = clusters_diff.HostResult('host')
host_result.add_error('query1', 'error1')
host_result.add_error('query2', 'error1')
host_result.add_error('query3', 'error2')
error_to_queries = host_result.get_error_to_query()
self.assertEquals({k: sorted(v) for k, v in error_to_queries.iteritems()},
{'error1': ['query1', 'query2'], 'error2': ['query3']})
class TestDiffableTarget(unittest.TestCase):
"""Test mesure_disymmetries in DiffableTarget."""
def test_measure_disymmetry(self):
"""Should correctly mesure dissymmetries with an other instance."""
diffable_target_1 = clusters_diff.DiffableTarget("target_1", {10: 0.1, 20: 0.2})
diffable_target_2 = clusters_diff.DiffableTarget("target_2", {10: 0.3, 20: 0.6})
dissymmetry = diffable_target_1.measure_dissymmetry(diffable_target_2)
measures = dissymmetry.measures
rounded_measures = [round(i, 1) for i in measures]
self.assertEquals(rounded_measures, [0.5, 0.5])
class TestDiffableQuery(unittest.TestCase):
"""Test mesure_disymmetries in DiffableQuery."""
def test_measure_disymmetry(self):
"""Should correctly mesure dissymmetries with an other instance."""
diffable_target_1 = clusters_diff.DiffableTarget("target", {10: 0.1, 20: 0.2})
diffable_query_1 = clusters_diff.DiffableQuery("query_1", [diffable_target_1], 0.01)
diffable_target_2 = clusters_diff.DiffableTarget("target", {10: 0.1, 20: 0.5})
diffable_query_2 = clusters_diff.DiffableQuery("query_2", [diffable_target_2], 0.01)
dissymmetry = diffable_query_1.measure_dissymmetry(diffable_query_2)
measures = dissymmetry.measures
rounded_measures = [round(i, 1) for i in measures]
self.assertEquals(rounded_measures, [0.5])
class TestClustersDiff(unittest.TestCase):
"""Test all methods without class of clusters_diff.py."""
def test_read_queries(self):
"""Sould correctly read inputs."""
with tempfile.NamedTemporaryFile() as tmp_file:
inputs = "\n".join([
"# comment 1",
"query1",
" query 2 ",
"query3 # comment 2 ",
" #### comment 3",
" ",
""])
tmp_file.write(inputs)
tmp_file.flush()
predicted_queries = ["query1", "query 2", "query3"]
queries = clusters_diff._read_queries(tmp_file.name)
self.assertEquals(predicted_queries, queries)
def test_get_url_from_query(self):
"""Should correctly create an url from a query."""
host = "host"
prefix = "foo."
query = "query 1,*()"
from_param = "-24hours"
until_param = "-2minutes"
predicted_url = ("http://host/render/?noCache&format=json&from=-24hours" +
"&until=-2minutes&target=foo.query%201%2C%2A%28%29")
url = clusters_diff._get_url_from_query(host, prefix, query, from_param, until_param)
self.assertEquals(predicted_url, url)
def test_fetch_queries(self):
"""Should correctly fill host_results."""
mocked_return_val = ("""[{"target": "target_1", "datapoints": [[0.1, 10], [0.2, 20]]},
{"target": "target_2", "datapoints": [[0.1, 60], [0.2, 70]]}]""")
with mock.patch('urllib2.urlopen') as urlopen:
urlopen.return_value.read.return_value = mocked_return_val
host_result = clusters_diff.fetch_queries(
"host", "prefix", "auth_key", ["query"], "-24hours", "-2minutes",
"5.0", "0.01", lambda x: x)
self.assertEquals(len(host_result.diffable_queries), 1)
self.assertEquals(len(host_result.query_to_error), 0)
self.assertEquals(len(host_result.query_to_time_s), 1)
self.assertEquals(len(host_result.diffable_queries[0].diffable_targets), 2)
self.assertEquals(host_result.name, "host")
self.assertEquals(host_result.diffable_queries[0].name, "query")
predicted_dt_1 = clusters_diff.DiffableTarget("target_1", {10: 0.1, 20: 0.2})
predicted_dt_2 = clusters_diff.DiffableTarget("target_2", {60: 0.1, 70: 0.2})
diffable_targets = host_result.diffable_queries[0].diffable_targets
self.assertEquals(predicted_dt_1.name, diffable_targets[0].name)
self.assertEquals(predicted_dt_1.ts_to_val, diffable_targets[0].ts_to_val)
self.assertEquals(predicted_dt_2.name, diffable_targets[1].name)
self.assertEquals(predicted_dt_2.ts_to_val, diffable_targets[1].ts_to_val)
def test_fetch_queries_error(self):
"""Should correctly raise RequestError."""
with mock.patch('urllib2.urlopen') as urlopen:
urlopen.side_effect = clusters_diff.RequestError('not found')
host_result = clusters_diff.fetch_queries(
"host", "prefix", "auth_key", ["query"], "-24hours", "-2minutes",
"5.0", "0.01", lambda x: None)
self.assertEquals(len(host_result.diffable_queries), 1)
self.assertEquals(len(host_result.query_to_error), 1)
self.assertEquals(len(host_result.query_to_time_s), 0)
self.assertEquals(len(host_result.diffable_queries[0].diffable_targets), 0)
self.assertEquals(host_result.name, "host")
self.assertEquals(host_result.diffable_queries[0].name, "query")
self.assertEquals(host_result.query_to_error, {'query': 'not found'})
def test_compute_pctls(self):
"""Should correctly compute pctls from mesures."""
mesures = [1, 2, 4, 8, 9, 6, 2, 4, 6, 1, 4, 5, 8, 4, 6, 7, 1, 3, 4, 8, 6, 3, 4, 5, 8, 7, 2]
pctls = clusters_diff._compute_pctls(mesures)
self.assertEquals(pctls, {50: 4, 75: 7, 90: 8, 99: 9, 99.9: 9})
def test_compute_dissymmetries_pctls(self):
"""Sould correctly compute dissymmetries pctls."""
diffable_target_1 = clusters_diff.DiffableTarget("target_1", {10: 0.1, 20: 0.2})
diffable_target_2 = clusters_diff.DiffableTarget("target_2", {60: 0.1, 70: 0.2})
args1 = [(None, None), (None, []), ([], None), ([], []), ]
for diffable_target_a, diffable_target_b in args1:
dissymmetries = clusters_diff.compute_dissymmetries(diffable_target_a,
diffable_target_b)
self.assertEquals(len(dissymmetries), 0)
args2 = [([diffable_target_1], [diffable_target_1])]
for diffable_target_a, diffable_target_b in args2:
dissymmetries = clusters_diff.compute_dissymmetries(diffable_target_a,
diffable_target_b)
self.assertEquals(len(dissymmetries), 1)
self.assertEquals(dissymmetries[0].name, "target_1")
self.assertEquals(dissymmetries[0].measures, [0, 0])
self.assertEquals(dissymmetries[0].pctls,
collections.OrderedDict(
[(50, 0.0), (75, 0.0), (90, 0.0), (99, 0.0), (99.9, 0.0)]))
args3 = [([diffable_target_1], [diffable_target_2])]
for diffable_target_a, diffable_target_b in args3:
dissymmetries = clusters_diff.compute_dissymmetries(diffable_target_a,
diffable_target_b)
self.assertEquals(len(dissymmetries), 2)
assert "target_1" in [dissymmetries[0].name, dissymmetries[1].name]
self.assertEquals(dissymmetries[0].measures, [1, 1])
self.assertEquals(dissymmetries[0].pctls,
collections.OrderedDict(
[(50, 1.0), (75, 1.0), (90, 1.0), (99, 1.0), (99.9, 1.0)]))
assert "target_2" in [dissymmetries[0].name, dissymmetries[1].name]
self.assertEquals(dissymmetries[1].measures, [1, 1])
self.assertEquals(dissymmetries[1].pctls,
collections.OrderedDict(
[(50, 1.0), (75, 1.0), (90, 1.0), (99, 1.0), (99.9, 1.0)]))
args4 = [([diffable_target_1], []), ([], [diffable_target_1])]
for diffable_target_a, diffable_target_b in args4:
dissymmetries = clusters_diff.compute_dissymmetries(diffable_target_a,
diffable_target_b)
self.assertEquals(len(dissymmetries), 1)
self.assertEquals(dissymmetries[0].name, "target_1")
self.assertEquals(dissymmetries[0].measures, [1, 1])
self.assertEquals(dissymmetries[0].pctls,
collections.OrderedDict(
[(50, 1.0), (75, 1.0), (90, 1.0), (99, 1.0), (99.9, 1.0)]))
def test_outer_join_diffables(self):
"""Should correctly compute outer join on diffable name."""
diffables_a = [
clusters_diff.DiffableTarget('1', {01: 101}),
clusters_diff.DiffableTarget('2', {01: 102}),
clusters_diff.DiffableTarget('6', {01: 106}),
clusters_diff.DiffableTarget('7', {01: 107}),
clusters_diff.DiffableTarget('9', {01: 109}),
clusters_diff.DiffableTarget('11', {01: 111}),
clusters_diff.DiffableTarget('14', {01: 114})
]
diffables_b = [
clusters_diff.DiffableTarget('2', {01: 202}),
clusters_diff.DiffableTarget('4', {01: 204}),
clusters_diff.DiffableTarget('5', {01: 205}),
clusters_diff.DiffableTarget('9', {01: 209}),
clusters_diff.DiffableTarget('12', {01: 212}),
clusters_diff.DiffableTarget('14', {01: 214})
]
result = clusters_diff._outer_join_diffables(diffables_a, diffables_b)
formated_result = []
for (y, z) in result:
assert not y or not z or y.name == z.name
val1 = y.ts_to_val if y else None
val2 = z.ts_to_val if z else None
formated_result.append((val1, val2))
predicted_result = [
({1: 111}, None), (None, {1: 212}), ({1: 114}, {1: 214}), ({1: 101}, None),
({1: 102}, {1: 202}), (None, {1: 205}), (None, {1: 204}), ({1: 107}, None),
({1: 106}, None), ({1: 109}, {1: 209})
]
self.assertEquals(sorted(formated_result), sorted(predicted_result))
if __name__ == '__main__':
unittest.main()
|
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import urllib
from tempest.common import http
from tempest.common import rest_client
from tempest import config
from tempest import exceptions
from xml.etree import ElementTree as etree
CONF = config.CONF
class AccountClient(rest_client.RestClient):
def __init__(self, auth_provider):
super(AccountClient, self).__init__(auth_provider)
self.service = CONF.object_storage.catalog_type
def create_account(self, data=None,
params=None,
metadata={},
remove_metadata={},
metadata_prefix='X-Account-Meta-',
remove_metadata_prefix='X-Remove-Account-Meta-'):
"""Create an account."""
url = ''
if params:
url += '?%s' % urllib.urlencode(params)
headers = {}
for key in metadata:
headers[metadata_prefix + key] = metadata[key]
for key in remove_metadata:
headers[remove_metadata_prefix + key] = remove_metadata[key]
resp, body = self.put(url, data, headers)
return resp, body
def delete_account(self, data=None, params=None):
"""Delete an account."""
url = ''
if params:
if 'bulk-delete' in params:
url += 'bulk-delete&'
url = '?%s%s' % (url, urllib.urlencode(params))
resp, body = self.delete(url, headers={}, body=data)
return resp, body
def list_account_metadata(self):
"""
HEAD on the storage URL
Returns all account metadata headers
"""
resp, body = self.head('')
return resp, body
def create_account_metadata(self, metadata,
metadata_prefix='X-Account-Meta-'):
"""Creates an account metadata entry."""
headers = {}
for key in metadata:
headers[metadata_prefix + key] = metadata[key]
resp, body = self.post('', headers=headers, body=None)
return resp, body
def delete_account_metadata(self, metadata,
metadata_prefix='X-Remove-Account-Meta-'):
"""
Deletes an account metadata entry.
"""
headers = {}
for item in metadata:
headers[metadata_prefix + item] = metadata[item]
resp, body = self.post('', headers=headers, body=None)
return resp, body
def create_and_delete_account_metadata(
self,
create_metadata=None,
delete_metadata=None,
create_metadata_prefix='X-Account-Meta-',
delete_metadata_prefix='X-Remove-Account-Meta-'):
"""
Creates and deletes an account metadata entry.
"""
headers = {}
for key in create_metadata:
headers[create_metadata_prefix + key] = create_metadata[key]
for key in delete_metadata:
headers[delete_metadata_prefix + key] = delete_metadata[key]
resp, body = self.post('', headers=headers, body=None)
return resp, body
def list_account_containers(self, params=None):
"""
GET on the (base) storage URL
Given valid X-Auth-Token, returns a list of all containers for the
account.
Optional Arguments:
limit=[integer value N]
Limits the number of results to at most N values
DEFAULT: 10,000
marker=[string value X]
Given string value X, return object names greater in value
than the specified marker.
DEFAULT: No Marker
format=[string value, either 'json' or 'xml']
Specify either json or xml to return the respective serialized
response.
DEFAULT: Python-List returned in response body
"""
url = '?%s' % urllib.urlencode(params) if params else ''
resp, body = self.get(url, headers={})
if params and params.get('format') == 'json':
body = json.loads(body)
elif params and params.get('format') == 'xml':
body = etree.fromstring(body)
else:
body = body.strip().splitlines()
return resp, body
def list_extensions(self):
self.skip_path()
try:
resp, body = self.get('info')
finally:
self.reset_path()
body = json.loads(body)
return resp, body
class AccountClientCustomizedHeader(rest_client.RestClient):
# TODO(andreaf) This class is now redundant, to be removed in next patch
def __init__(self, auth_provider):
super(AccountClientCustomizedHeader, self).__init__(
auth_provider)
# Overwrites json-specific header encoding in rest_client.RestClient
self.service = CONF.object_storage.catalog_type
self.format = 'json'
def request(self, method, url, extra_headers=False, headers=None,
body=None):
"""A simple HTTP request interface."""
self.http_obj = http.ClosingHttp()
if headers is None:
headers = {}
elif extra_headers:
try:
headers.update(self.get_headers())
except (ValueError, TypeError):
headers = {}
# Authorize the request
req_url, req_headers, req_body = self.auth_provider.auth_request(
method=method, url=url, headers=headers, body=body,
filters=self.filters
)
# use original body
resp, resp_body = self.http_obj.request(req_url, method,
headers=req_headers,
body=req_body)
self._log_request(method, req_url, resp)
if resp.status == 401 or resp.status == 403:
raise exceptions.Unauthorized()
return resp, resp_body
def list_account_containers(self, params=None, metadata=None):
"""
GET on the (base) storage URL
Given a valid X-Auth-Token, returns a list of all containers for the
account.
Optional Arguments:
limit=[integer value N]
Limits the number of results to at most N values
DEFAULT: 10,000
marker=[string value X]
Given string value X, return object names greater in value
than the specified marker.
DEFAULT: No Marker
format=[string value, either 'json' or 'xml']
Specify either json or xml to return the respective serialized
response.
DEFAULT: Python-List returned in response body
"""
url = '?format=%s' % self.format
if params:
url += '&%s' + urllib.urlencode(params)
headers = {}
if metadata:
for key in metadata:
headers[str(key)] = metadata[key]
resp, body = self.get(url, headers=headers)
return resp, body
|
|
'''
Train and test Neural Event Model (NEM). This module also comes with a main function that acts as a CLI for NEM.
'''
# pylint: disable=wrong-import-position
import sys
import argparse
import pickle
import os
import numpy
numpy.random.seed(21957)
from keras.models import Model, load_model
from keras.layers import Input, Dense, Dropout, Embedding, LSTM
from metrics import precision, recall, f1_score
from keras_extensions import AnyShapeEmbedding, TimeDistributedRNN, MaskedFlatten
from read_data import DataProcessor
NUM_EPOCHS = 50
PATIENCE = 5
class NEM:
'''
Neural Event Model
'''
def __init__(self, use_event_structure=True, embedding_dim=50):
self.use_event_structure = use_event_structure
self.embedding_dim = embedding_dim
self.data_processor = DataProcessor()
self.model = None
model_type = "structured" if use_event_structure else "flat"
if not os.path.exists("saved_models"):
os.makedirs("saved_models")
self.model_prefix = "saved_models/nem_%s_dim=%d" % (model_type, embedding_dim)
# Custom metrics
self.custom_objects = {"precision": precision, "recall": recall, "f1_score": f1_score}
if use_event_structure:
# Custom layers
self.custom_objects.update({"AnyShapeEmbedding": AnyShapeEmbedding,
"MaskedFlatten": MaskedFlatten,
"TimeDistributedRNN": TimeDistributedRNN})
def train_nem(self, inputs, labels, pretrained_embedding_file=None, tune_embedding=False):
'''
Train NEM. Depending on whether `use_event_structure` is set in the initializer, the model
uses either the semantic role structure or just the sentences.
'''
pretrained_embedding = None
if pretrained_embedding_file is not None:
pretrained_embedding = self.data_processor.get_embedding(pretrained_embedding_file)
if self.use_event_structure:
model = self._build_structured_model(inputs, pretrained_embedding, tune_embedding)
else:
model = self._build_flat_model(inputs, pretrained_embedding, tune_embedding)
model.summary()
model.compile("adam", "categorical_crossentropy", metrics=["accuracy", precision,
recall, f1_score])
self.model = model
best_accuracy = 0.0
best_epoch = 0
num_worse_epochs = 0
for i in range(NUM_EPOCHS):
print("Epoch %d" % i, file=sys.stdout)
history = self.model.fit(inputs, labels, epochs=1, validation_split=0.1)
validation_accuracy = history.history['val_acc'][0]
if validation_accuracy > best_accuracy:
self._save_model(i)
best_epoch = i
num_worse_epochs = 0
best_accuracy = validation_accuracy
elif validation_accuracy < best_accuracy:
num_worse_epochs += 1
if num_worse_epochs >= PATIENCE:
print("Ran out of patience. Stopping training.", file=sys.stdout)
break
self._save_model_as_best(best_epoch)
def test_nem(self, inputs, labels, output_filename=None):
'''
Evaluate NEM on unseen data.
'''
metric_values = self.model.evaluate(inputs, labels)
for metric_name, metric_value in zip(self.model.metrics_names, metric_values):
print("%s: %.4f" % (metric_name, metric_value))
if output_filename is not None:
predictions = self.model.predict(inputs)
predicted_classes = numpy.argmax(predictions, axis=-1)
output_file = open(output_filename, "w")
for pred_class in predicted_classes:
print(pred_class, file=output_file)
def _build_structured_model(self, inputs, pretrained_embedding=None, tune_embedding=False) -> Model:
# pylint: disable=too-many-locals
_, num_slots, num_words = inputs.shape
# (batch_size, num_slots, num_words)
if pretrained_embedding is None:
# Override tune_embedding if no pretrained embedding is given.
tune_embedding = True
input_layer = Input(shape=(num_slots, num_words), name="EventInput", dtype='int32')
embedding_weights = None if pretrained_embedding is None else [pretrained_embedding]
embedding = AnyShapeEmbedding(input_dim=self.data_processor.get_vocabulary_size(),
output_dim=self.embedding_dim, weights=embedding_weights,
mask_zero=True, trainable=tune_embedding, name="Embedding")
embedded_inputs = embedding(input_layer) # (batch_size, num_slots, num_words, embedding_dim)
embedded_inputs = Dropout(0.5)(embedded_inputs)
encoder = TimeDistributedRNN(LSTM(self.embedding_dim), name="ArgumentEncoder")
encoded_inputs = encoder(embedded_inputs) # (batch_size, num_slots, embedding_dim)
encoded_inputs = Dropout(0.2)(encoded_inputs)
# (batch_size, num_slots * embedding_dim)
concatenated_slots = MaskedFlatten(name="SlotConcatenator")(encoded_inputs)
# Note: We essentially have different projection weights for slots here.
event_composer = Dense(self.embedding_dim, activation='tanh', name="EventComposer")
# (batch_size, embedding_dim)
composed_event = event_composer(concatenated_slots)
# Assuming binary classification.
event_scorer = Dense(2, activation='softmax', name="EventScorer")
event_prediction = event_scorer(composed_event) # (batch_size, 2)
model = Model(inputs=input_layer, outputs=event_prediction)
return model
def _build_flat_model(self, inputs, pretrained_embedding=None, tune_embedding=False) -> Model:
# pylint: disable=too-many-locals
_, num_words = inputs.shape
if pretrained_embedding is None:
# Override tune_embedding if no pretrained embedding is given.
tune_embedding = True
input_layer = Input(shape=(num_words,), name="SentenceInput", dtype='int32')
embedding_weights = None if pretrained_embedding is None else [pretrained_embedding]
embedding = Embedding(input_dim=self.data_processor.get_vocabulary_size(), output_dim=self.embedding_dim,
weights=embedding_weights, mask_zero=True, trainable=tune_embedding,
name="Embedding")
embedded_inputs = embedding(input_layer) # (batch_size, num_words, embedding_dim)
embedded_inputs = Dropout(0.5)(embedded_inputs)
encoder = LSTM(self.embedding_dim, name="SentenceEncoder")
encoded_inputs = encoder(embedded_inputs) # (batch_size, embedding_dim)
encoded_inputs = Dropout(0.2)(encoded_inputs)
# Project encoding to make the depth of this variant comparable to that of the structured variant.
# (batch_size, embedding_dim)
projected_encoding = Dense(self.embedding_dim, activation="tanh", name="Projection")(encoded_inputs)
sentence_scorer = Dense(2, activation='softmax', name="SentenceScorer")
sentence_prediction = sentence_scorer(projected_encoding)
model = Model(inputs=input_layer, outputs=sentence_prediction)
return model
def make_inputs(self, filename: str, for_test=False, pad_info=None, include_sentences_in_events=False):
'''
Read in a file and use the data processor to make train or test inputs.
'''
add_new_words = not for_test
sentence_inputs, event_inputs, labels = self.data_processor.index_data(filename, add_new_words, pad_info,
include_sentences_in_events)
if self.use_event_structure:
return event_inputs, labels
else:
return sentence_inputs, labels
def _save_model(self, epoch: int):
model_file = "%s_%d.h5" % (self.model_prefix, epoch)
data_processor_file = "%s_dp.pkl" % self.model_prefix
self.model.save(model_file)
pickle.dump(self.data_processor, open(data_processor_file, "wb"))
def _save_model_as_best(self, epoch: int):
best_model_file = "%s_%d.h5" % (self.model_prefix, epoch)
new_name = "%s_best.h5" % self.model_prefix
os.rename(best_model_file, new_name)
def load_model(self, epoch: int=None):
'''
Load a pretrained model, optionally from a specific epoch. If no epoch is specified, the model that gave
the best validation accuracy will be loaded.
'''
data_processor_file = "%s_dp.pkl" % self.model_prefix
self.data_processor = pickle.load(open(data_processor_file, "rb"))
if epoch is None:
model_file = "%s_best.h5" % self.model_prefix
else:
model_file = "%s_%d.h5" % (self.model_prefix, epoch)
self.model = load_model(model_file, custom_objects=self.custom_objects)
def main():
'''
CLI for NEM
'''
argument_parser = argparse.ArgumentParser(description="CLI for training and testing Neural Event Model (NEM)")
argument_parser.add_argument("--train_file", type=str, help="Train file (JSON). Required for training.")
argument_parser.add_argument("--test_file", type=str, help="Test file (JSON). Required for testing.")
argument_parser.add_argument("--embedding_file", type=str, help="Gzipped embedding file.")
argument_parser.add_argument("--tune_embedding", help="Tune embedding if embedding file is provided.",
action='store_true')
argument_parser.add_argument("--wanted_args", type=str, nargs='+', help="Arguments to use in the event"
" structure")
argument_parser.add_argument("--ignore_structure", help="Encode sentences instead of events.",
action='store_true')
argument_parser.add_argument("--include_sentences_in_events", help="Make the whole sentence an additional"
" argument in the event structure.", action='store_true')
argument_parser.add_argument("--embedding_dim", type=int, help="Dimensionality of the whole network.",
default=50)
argument_parser.add_argument("--output_file", type=str, help="Output file name to print predictions.")
args = argument_parser.parse_args()
use_event_structure = not args.ignore_structure
nem = NEM(use_event_structure=use_event_structure, embedding_dim=args.embedding_dim)
if args.train_file is not None:
pad_info = {"wanted_args": args.wanted_args} if args.wanted_args is not None else {}
train_inputs, train_labels = nem.make_inputs(args.train_file, for_test=False, pad_info=pad_info,
include_sentences_in_events=args.include_sentences_in_events)
nem.train_nem(train_inputs, train_labels, args.embedding_file, args.tune_embedding)
if args.test_file is not None:
# Even if we trained NEM in this run, we should load the best model.
nem.load_model()
pad_info_after_train = nem.data_processor.get_pad_info()
test_inputs, test_labels = nem.make_inputs(args.test_file, for_test=True, pad_info=pad_info_after_train,
include_sentences_in_events=args.include_sentences_in_events)
nem.test_nem(test_inputs, test_labels, output_filename=args.output_file)
if __name__ == "__main__":
main()
|
|
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from mobly import asserts
from mobly import signals
MSG_EXPECTED_EXCEPTION = 'This is an expected exception.'
_OBJECT_1 = object()
_OBJECT_2 = object()
class AssertsTest(unittest.TestCase):
"""Verifies that asserts.xxx functions raise the correct test signals."""
def test_assert_false(self):
asserts.assert_false(False, MSG_EXPECTED_EXCEPTION)
with self.assertRaisesRegex(signals.TestFailure, MSG_EXPECTED_EXCEPTION):
asserts.assert_false(True, MSG_EXPECTED_EXCEPTION)
def test_assert_not_equal_pass(self):
asserts.assert_not_equal(1, 2)
def test_assert_not_equal_pass_with_msg_and_extras(self):
asserts.assert_not_equal(1, 2, msg='Message', extras='Extras')
def test_assert_not_equal_fail(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_not_equal(1, 1)
self.assertEqual(cm.exception.details, '1 == 1')
def test_assert_not_equal_fail_with_msg_and_extras(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_not_equal(1, 1, msg='Message', extras='Extras')
self.assertEqual(cm.exception.details, '1 == 1 Message')
self.assertEqual(cm.exception.extras, 'Extras')
def test_assert_almost_equal_pass(self):
asserts.assert_almost_equal(1.000001, 1.000002, places=3)
asserts.assert_almost_equal(1.0, 1.05, delta=0.1)
def test_assert_almost_equal_fail(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_almost_equal(1, 1.0005, places=7)
self.assertRegex(cm.exception.details, r'1 != 1\.0005 within 7 places')
def test_assert_almost_equal_fail_with_msg_and_extras(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_almost_equal(1,
2,
delta=0.1,
msg='Message',
extras='Extras')
self.assertRegex(cm.exception.details, r'1 != 2 within 0\.1 delta.*Message')
self.assertEqual(cm.exception.extras, 'Extras')
def test_assert_not_almost_equal_pass(self):
asserts.assert_not_almost_equal(1.001, 1.002, places=3)
asserts.assert_not_almost_equal(1.0, 1.05, delta=0.01)
def test_assert_not_almost_equal_fail(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_not_almost_equal(1, 1.0005, places=3)
self.assertRegex(cm.exception.details, r'1 == 1\.0005 within 3 places')
def test_assert_not_almost_equal_fail_with_msg_and_extras(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_not_almost_equal(1,
2,
delta=1,
msg='Message',
extras='Extras')
self.assertRegex(cm.exception.details, r'1 == 2 within 1 delta.*Message')
self.assertEqual(cm.exception.extras, 'Extras')
def test_assert_in_pass(self):
asserts.assert_in(1, [1, 2, 3])
asserts.assert_in(1, (1, 2, 3))
asserts.assert_in(1, {1: 2, 3: 4})
asserts.assert_in('a', 'abcd')
def test_assert_in_fail(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_in(4, [1, 2, 3])
self.assertEqual(cm.exception.details, '4 not found in [1, 2, 3]')
def test_assert_in_fail_with_msg_and_extras(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_in(4, [1, 2, 3], msg='Message', extras='Extras')
self.assertEqual(cm.exception.details, '4 not found in [1, 2, 3] Message')
self.assertEqual(cm.exception.extras, 'Extras')
def test_assert_not_in_pass(self):
asserts.assert_not_in(4, [1, 2, 3])
asserts.assert_not_in(4, (1, 2, 3))
asserts.assert_not_in(4, {1: 2, 3: 4})
asserts.assert_not_in('e', 'abcd')
def test_assert_not_in_fail(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_not_in(1, [1, 2, 3])
self.assertEqual(cm.exception.details, '1 unexpectedly found in [1, 2, 3]')
def test_assert_not_in_fail_with_msg_and_extras(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_not_in(1, [1, 2, 3], msg='Message', extras='Extras')
self.assertEqual(cm.exception.details,
'1 unexpectedly found in [1, 2, 3] Message')
self.assertEqual(cm.exception.extras, 'Extras')
def test_assert_is_pass(self):
asserts.assert_is(_OBJECT_1, _OBJECT_1)
def test_assert_is_fail(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_is(_OBJECT_1, _OBJECT_2)
self.assertEqual(cm.exception.details, f'{_OBJECT_1} is not {_OBJECT_2}')
def test_assert_is_fail_with_msg_and_extras(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_is(_OBJECT_1, _OBJECT_2, msg='Message', extras='Extras')
self.assertEqual(cm.exception.details,
f'{_OBJECT_1} is not {_OBJECT_2} Message')
self.assertEqual(cm.exception.extras, 'Extras')
def test_assert_is_not_pass(self):
asserts.assert_is_not(_OBJECT_1, _OBJECT_2)
def test_assert_is_not_fail(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_is_not(_OBJECT_1, _OBJECT_1)
self.assertEqual(cm.exception.details,
f'unexpectedly identical: {_OBJECT_1}')
def test_assert_is_not_fail_with_msg_and_extras(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_is_not(_OBJECT_1,
_OBJECT_1,
msg='Message',
extras='Extras')
self.assertEqual(cm.exception.details,
f'unexpectedly identical: {_OBJECT_1} Message')
self.assertEqual(cm.exception.extras, 'Extras')
def test_assert_count_equal_pass(self):
asserts.assert_count_equal((1, 3, 3), [3, 1, 3])
def test_assert_count_equal_fail(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_count_equal([3, 3], [3])
self.assertEqual(
cm.exception.details,
'Element counts were not equal:\nFirst has 2, Second has 1: 3')
def test_assert_count_equal_fail_with_msg_and_extras(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_count_equal((3, 3), (4, 4), msg='Message', extras='Extras')
self.assertEqual(cm.exception.details,
('Element counts were not equal:\n'
'First has 2, Second has 0: 3\n'
'First has 0, Second has 2: 4 Message'))
self.assertEqual(cm.exception.extras, 'Extras')
def test_assert_less_pass(self):
asserts.assert_less(1.0, 2)
def test_assert_less_fail(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_less(1, 1)
self.assertEqual(cm.exception.details, '1 not less than 1')
def test_assert_less_fail_with_msg_and_extras(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_less(2, 1, msg='Message', extras='Extras')
self.assertEqual(cm.exception.details, '2 not less than 1 Message')
self.assertEqual(cm.exception.extras, 'Extras')
def test_assert_less_equal_pass(self):
asserts.assert_less_equal(1.0, 2)
asserts.assert_less_equal(1, 1)
def test_assert_less_equal_fail(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_less_equal(2, 1)
self.assertEqual(cm.exception.details, '2 not less than or equal to 1')
def test_assert_less_equal_fail_with_msg_and_extras(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_less_equal(2, 1, msg='Message', extras='Extras')
self.assertEqual(cm.exception.details,
'2 not less than or equal to 1 Message')
self.assertEqual(cm.exception.extras, 'Extras')
def test_assert_greater_pass(self):
asserts.assert_greater(2, 1.0)
def test_assert_greater_fail(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_greater(1, 1)
self.assertEqual(cm.exception.details, '1 not greater than 1')
def test_assert_greater_fail_with_msg_and_extras(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_greater(1, 2, msg='Message', extras='Extras')
self.assertEqual(cm.exception.details, '1 not greater than 2 Message')
self.assertEqual(cm.exception.extras, 'Extras')
def test_assert_greater_equal_pass(self):
asserts.assert_greater_equal(2, 1.0)
asserts.assert_greater_equal(1, 1)
def test_assert_greater_equal_fail(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_greater_equal(1, 2)
self.assertEqual(cm.exception.details, '1 not greater than or equal to 2')
def test_assert_greater_equal_fail_with_msg_and_extras(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_greater_equal(1, 2, msg='Message', extras='Extras')
self.assertEqual(cm.exception.details,
'1 not greater than or equal to 2 Message')
self.assertEqual(cm.exception.extras, 'Extras')
def test_assert_is_none_pass(self):
asserts.assert_is_none(None)
def test_assert_is_none_fail(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_is_none(1)
self.assertEqual(cm.exception.details, '1 is not None')
def test_assert_is_none_fail_with_msg_and_extras(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_is_none(1, msg='Message', extras='Extras')
self.assertEqual(cm.exception.details, '1 is not None Message')
self.assertEqual(cm.exception.extras, 'Extras')
def test_assert_is_not_none_pass(self):
asserts.assert_is_not_none(1)
def test_assert_is_not_none_fail(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_is_not_none(None)
self.assertEqual(cm.exception.details, 'unexpectedly None')
def test_assert_is_none_not_fail_with_msg_and_extras(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_is_not_none(None, msg='Message', extras='Extras')
self.assertEqual(cm.exception.details, 'unexpectedly None Message')
self.assertEqual(cm.exception.extras, 'Extras')
def test_assert_is_instance_pass(self):
asserts.assert_is_instance('foo', str)
asserts.assert_is_instance(1, int)
asserts.assert_is_instance(1.0, float)
def test_assert_is_instance_fail(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_is_instance(1, str)
self.assertEqual(cm.exception.details, f'1 is not an instance of {str}')
def test_assert_is_instance_fail_with_msg_and_extras(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_is_instance(1.0, int, msg='Message', extras='Extras')
self.assertEqual(cm.exception.details,
f'1.0 is not an instance of {int} Message')
self.assertEqual(cm.exception.extras, 'Extras')
def test_assert_not_is_instance_pass(self):
asserts.assert_not_is_instance('foo', int)
asserts.assert_not_is_instance(1, float)
asserts.assert_not_is_instance(1.0, int)
def test_assert_not_is_instance_fail(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_not_is_instance(1, int)
self.assertEqual(cm.exception.details, f'1 is an instance of {int}')
def test_assert_not_is_instance_fail_with_msg_and_extras(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_not_is_instance('foo', str, msg='Message', extras='Extras')
self.assertEqual(cm.exception.details,
f"'foo' is an instance of {str} Message")
self.assertEqual(cm.exception.extras, 'Extras')
def test_assert_regex_pass(self):
asserts.assert_regex('Big rocks', r'(r|m)ocks')
def test_assert_regex_fail(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_regex('Big socks', r'(r|m)ocks')
self.assertEqual(
cm.exception.details,
"Regex didn't match: '(r|m)ocks' not found in 'Big socks'")
def test_assert_regex_fail_with_msg_and_extras(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_regex('Big socks',
r'(r|m)ocks',
msg='Message',
extras='Extras')
self.assertEqual(
cm.exception.details,
("Regex didn't match: '(r|m)ocks' not found in 'Big socks' "
'Message'))
self.assertEqual(cm.exception.extras, 'Extras')
def test_assert_not_regex_pass(self):
asserts.assert_not_regex('Big socks', r'(r|m)ocks')
def test_assert_not_regex_fail(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_not_regex('Big rocks', r'(r|m)ocks')
self.assertEqual(
cm.exception.details,
"Regex matched: 'rocks' matches '(r|m)ocks' in 'Big rocks'")
def test_assert_not_regex_fail_with_msg_and_extras(self):
with self.assertRaises(signals.TestFailure) as cm:
asserts.assert_not_regex('Big mocks',
r'(r|m)ocks',
msg='Message',
extras='Extras')
self.assertEqual(
cm.exception.details,
("Regex matched: 'mocks' matches '(r|m)ocks' in 'Big mocks' "
'Message'))
self.assertEqual(cm.exception.extras, 'Extras')
if __name__ == '__main__':
unittest.main()
|
|
# -*- coding: utf-8 -*-
"""
Various i18n functions.
Helper functions for both the internal translation system
and for TranslateWiki-based translations.
By default messages are assumed to reside in a package called
'scripts.i18n'. In pywikibot 2.0, that package is not packaged
with pywikibot, and pywikibot 2.0 does not have a hard dependency
on any i18n messages. However, there are three user input questions
in pagegenerators which will use i18 messages if they can be loaded.
The default message location may be changed by calling
L{set_message_package} with a package name. The package must contain
an __init__.py, and a message bundle called 'pywikibot' containing
messages. See L{twntranslate} for more information on the messages.
"""
#
# (C) Pywikibot team, 2004-2017
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
__version__ = '$Id$'
#
import json
import os
import pkgutil
import re
from collections import defaultdict, Mapping
from warnings import warn
import pywikibot
from pywikibot import __url__
from pywikibot import config
from pywikibot.exceptions import Error
from pywikibot.plural import plural_rules
from pywikibot.tools import (
deprecated, deprecated_args, issue_deprecation_warning, StringTypes)
PLURAL_PATTERN = r'{{PLURAL:(?:%\()?([^\)]*?)(?:\)d)?\|(.*?)}}'
# Package name for the translation messages. The messages data must loaded
# relative to that package name. In the top of this package should be
# directories named after for each script/message bundle, and each directory
# should contain JSON files called <lang>.json
_messages_package_name = 'scripts.i18n'
# Flag to indicate whether translation messages are available
_messages_available = None
# Cache of translated messages
_cache = defaultdict(dict)
def set_messages_package(package_name):
"""Set the package name where i18n messages are located."""
global _messages_package_name
global _messages_available
_messages_package_name = package_name
_messages_available = None
def messages_available():
"""
Return False if there are no i18n messages available.
To determine if messages are available, it looks for the package name
set using L{set_messages_package} for a message bundle called 'pywikibot'
containing messages.
@rtype: bool
"""
global _messages_available
if _messages_available is not None:
return _messages_available
try:
mod = __import__(_messages_package_name, fromlist=[str('__path__')])
except ImportError:
_messages_available = False
return False
if not os.listdir(next(iter(mod.__path__))):
_messages_available = False
return False
_messages_available = True
return True
def _altlang(code):
"""Define fallback languages for particular languages.
If no translation is available to a specified language, translate() will
try each of the specified fallback languages, in order, until it finds
one with a translation, with 'en' and '_default' as a last resort.
For example, if for language 'xx', you want the preference of languages
to be: xx > fr > ru > en, you let this method return ['fr', 'ru'].
This code is used by other translating methods below.
@param code: The language code
@type code: string
@return: language codes
@rtype: list of str
"""
# Akan
if code in ['ak', 'tw']:
return ['ak', 'tw']
# Amharic
if code in ['aa', 'ti']:
return ['am']
# Arab
if code in ['arc', 'arz', 'so']:
return ['ar']
if code == 'kab':
return ['ar', 'fr']
# Bulgarian
if code in ['cu', 'mk']:
return ['bg', 'sr', 'sh']
# Czech
if code in ['cs', 'sk']:
return ['cs', 'sk']
# German
if code in ['bar', 'frr', 'ksh', 'pdc', 'pfl']:
return ['de']
if code == 'lb':
return ['de', 'fr']
if code in ['als', 'gsw']:
return ['als', 'gsw', 'de']
if code == 'nds':
return ['nds-nl', 'de']
if code in ['dsb', 'hsb']:
return ['hsb', 'dsb', 'de']
if code == 'sli':
return ['de', 'pl']
if code == 'rm':
return ['de', 'it']
if code == 'stq':
return ['nds', 'de']
# Greek
if code in ['grc', 'pnt']:
return ['el']
# Esperanto
if code in ['io', 'nov']:
return ['eo']
# Spanish
if code in ['an', 'arn', 'ast', 'ay', 'ca', 'ext', 'lad', 'nah', 'nv', 'qu',
'yua']:
return ['es']
if code in ['gl', 'gn']:
return ['es', 'pt']
if code == 'eu':
return ['es', 'fr']
if code == 'cbk-zam':
return ['es', 'tl']
# Estonian
if code in ['fiu-vro', 'vro']:
return ['fiu-vro', 'vro', 'et']
if code == 'liv':
return ['et', 'lv']
# Persian (Farsi)
if code == 'ps':
return ['fa']
if code in ['glk', 'mzn']:
return ['glk', 'mzn', 'fa', 'ar']
# Finnish
if code == 'vep':
return ['fi', 'ru']
if code == 'fit':
return ['fi', 'sv']
# French
if code in ['bm', 'br', 'ht', 'kg', 'ln', 'mg', 'nrm', 'pcd',
'rw', 'sg', 'ty', 'wa']:
return ['fr']
if code == 'oc':
return ['fr', 'ca', 'es']
if code in ['co', 'frp']:
return ['fr', 'it']
# Hindi
if code in ['sa']:
return ['hi']
if code in ['ne', 'new']:
return ['ne', 'new', 'hi']
if code in ['bh', 'bho']:
return ['bh', 'bho']
# Indonesian and Malay
if code in ['ace', 'bug', 'bjn', 'id', 'jv', 'ms', 'su']:
return ['id', 'ms', 'jv']
if code == 'map-bms':
return ['jv', 'id', 'ms']
# Inuit languages
if code in ['ik', 'iu']:
return ['iu', 'kl']
if code == 'kl':
return ['da', 'iu', 'no', 'nb']
# Italian
if code in ['eml', 'fur', 'lij', 'lmo', 'nap', 'pms', 'roa-tara', 'sc',
'scn', 'vec']:
return ['it']
# Lithuanian
if code in ['bat-smg', 'sgs']:
return ['bat-smg', 'sgs', 'lt']
# Latvian
if code == 'ltg':
return ['lv']
# Dutch
if code in ['af', 'fy', 'li', 'pap', 'srn', 'vls', 'zea']:
return ['nl']
if code == ['nds-nl']:
return ['nds', 'nl']
# Polish
if code in ['csb', 'szl']:
return ['pl']
# Portuguese
if code in ['fab', 'mwl', 'tet']:
return ['pt']
# Romanian
if code in ['roa-rup', 'rup']:
return ['roa-rup', 'rup', 'ro']
if code == 'mo':
return ['ro']
# Russian and Belarusian
if code in ['ab', 'av', 'ba', 'bxr', 'ce', 'cv', 'inh', 'kk', 'koi', 'krc',
'kv', 'ky', 'lbe', 'lez', 'mdf', 'mhr', 'mn', 'mrj', 'myv',
'os', 'sah', 'tg', 'udm', 'uk', 'xal']:
return ['ru']
if code in ['kbd', 'ady']:
return ['kbd', 'ady', 'ru']
if code == 'tt':
return ['tt-cyrl', 'ru']
if code in ['be', 'be-tarask']:
return ['be', 'be-tarask', 'ru']
if code == 'kaa':
return ['uz', 'ru']
# Serbocroatian
if code in ['bs', 'hr', 'sh']:
return ['sh', 'hr', 'bs', 'sr', 'sr-el']
if code == 'sr':
return ['sr-el', 'sh', 'hr', 'bs']
# Tagalog
if code in ['bcl', 'ceb', 'ilo', 'pag', 'pam', 'war']:
return ['tl']
# Turkish and Kurdish
if code in ['diq', 'ku']:
return ['ku', 'ku-latn', 'tr']
if code == 'gag':
return ['tr']
if code == 'ckb':
return ['ku']
# Ukrainian
if code in ['crh', 'crh-latn']:
return ['crh', 'crh-latn', 'uk', 'ru']
if code in ['rue']:
return ['uk', 'ru']
# Chinese
if code in ['zh-classical', 'lzh', 'minnan', 'zh-min-nan', 'nan', 'zh-tw',
'zh', 'zh-hans']:
return ['zh', 'zh-hans', 'zh-tw', 'zh-cn', 'zh-classical', 'lzh']
if code in ['cdo', 'gan', 'hak', 'ii', 'wuu', 'za', 'zh-classical', 'lzh',
'zh-cn', 'zh-yue', 'yue']:
return ['zh', 'zh-hans' 'zh-cn', 'zh-tw', 'zh-classical', 'lzh']
# Scandinavian languages
if code in ['da', 'sv']:
return ['da', 'no', 'nb', 'sv', 'nn']
if code in ['fo', 'is']:
return ['da', 'no', 'nb', 'nn', 'sv']
if code == 'nn':
return ['no', 'nb', 'sv', 'da']
if code in ['no', 'nb']:
return ['no', 'nb', 'da', 'nn', 'sv']
if code == 'se':
return ['sv', 'no', 'nb', 'nn', 'fi']
# Other languages
if code in ['bi', 'tpi']:
return ['bi', 'tpi']
if code == 'yi':
return ['he', 'de']
if code in ['ia', 'ie']:
return ['ia', 'la', 'it', 'fr', 'es']
if code == 'xmf':
return ['ka']
if code in ['nso', 'st']:
return ['st', 'nso']
if code in ['kj', 'ng']:
return ['kj', 'ng']
if code in ['meu', 'hmo']:
return ['meu', 'hmo']
if code == ['as']:
return ['bn']
# Default value
return []
class TranslationError(Error, ImportError):
"""Raised when no correct translation could be found."""
# Inherits from ImportError, as this exception is now used
# where previously an ImportError would have been raised,
# and may have been caught by scripts as such.
pass
def _get_translation(lang, twtitle):
"""
Return message of certain twtitle if exists.
For internal use, don't use it directly.
"""
if twtitle in _cache[lang]:
return _cache[lang][twtitle]
message_bundle = twtitle.split('-')[0]
trans_text = None
filename = '%s/%s.json' % (message_bundle, lang)
try:
trans_text = pkgutil.get_data(
_messages_package_name, filename).decode('utf-8')
except (OSError, IOError): # file open can cause several exceptions
_cache[lang][twtitle] = None
return
transdict = json.loads(trans_text)
_cache[lang].update(transdict)
try:
return transdict[twtitle]
except KeyError:
return
def _extract_plural(code, message, parameters):
"""Check for the plural variants in message and replace them.
@param message: the message to be replaced
@type message: unicode string
@param parameters: plural parameters passed from other methods
@type parameters: Mapping of str to int
@return: The message with the plural instances replaced
@rtype: str
"""
def static_plural_value(n):
return rule['plural']
def replace_plural(match):
selector = match.group(1)
variants = match.group(2)
num = parameters[selector]
if not isinstance(num, int):
issue_deprecation_warning(
'type {0} for value {1} ({2})'.format(type(num), selector, num),
'an int', 1)
num = int(num)
plural_entries = []
specific_entries = {}
# A plural entry can not start at the end of the variants list,
# and must end with | or the end of the variants list.
for number, plural in re.findall(r'(?!$)(?: *(\d+) *= *)?(.*?)(?:\||$)',
variants):
if number:
specific_entries[int(number)] = plural
else:
assert not specific_entries, \
'generic entries defined after specific in "{0}"'.format(variants)
plural_entries += [plural]
if num in specific_entries:
return specific_entries[num]
index = plural_value(num)
if rule['nplurals'] == 1:
assert index == 0
if index >= len(plural_entries):
raise IndexError(
'requested plural {0} for {1} but only {2} ("{3}") '
'provided'.format(
index, selector, len(plural_entries),
'", "'.join(plural_entries)))
return plural_entries[index]
assert isinstance(parameters, Mapping), \
'parameters is not Mapping but {0}'.format(type(parameters))
try:
rule = plural_rules[code]
except KeyError:
rule = plural_rules['_default']
plural_value = rule['plural']
if not callable(plural_value):
assert rule['nplurals'] == 1
plural_value = static_plural_value
return re.sub(PLURAL_PATTERN, replace_plural, message)
class _PluralMappingAlias(Mapping):
"""
Aliasing class to allow non mappings in _extract_plural.
That function only uses __getitem__ so this is only implemented here.
"""
def __init__(self, source):
if isinstance(source, StringTypes):
source = int(source)
self.source = source
self.index = -1
super(_PluralMappingAlias, self).__init__()
def __getitem__(self, key):
self.index += 1
if isinstance(self.source, dict):
return int(self.source[key])
elif isinstance(self.source, (tuple, list)):
if self.index < len(self.source):
return int(self.source[self.index])
raise ValueError('Length of parameter does not match PLURAL '
'occurrences.')
else:
return self.source
def __iter__(self):
raise NotImplementedError
def __len__(self):
raise NotImplementedError
DEFAULT_FALLBACK = ('_default', )
def translate(code, xdict, parameters=None, fallback=False):
"""Return the most appropriate translation from a translation dict.
Given a language code and a dictionary, returns the dictionary's value for
key 'code' if this key exists; otherwise tries to return a value for an
alternative language that is most applicable to use on the wiki in
language 'code' except fallback is False.
The language itself is always checked first, then languages that
have been defined to be alternatives, and finally English. If none of
the options gives result, we just take the one language from xdict which may
not be always the same. When fallback is iterable it'll return None if no
code applies (instead of returning one).
For PLURAL support have a look at the twtranslate method.
@param code: The language code
@type code: string or Site object
@param xdict: dictionary with language codes as keys or extended dictionary
with family names as keys containing language dictionaries or
a single (unicode) string. May contain PLURAL tags as
described in twtranslate
@type xdict: dict, string, unicode
@param parameters: For passing (plural) parameters
@type parameters: dict, string, unicode, int
@param fallback: Try an alternate language code. If it's iterable it'll
also try those entries and choose the first match.
@type fallback: boolean or iterable
@raise IndexError: If the language supports and requires more plurals than
defined for the given translation template.
"""
family = pywikibot.config.family
# If a site is given instead of a code, use its language
if hasattr(code, 'code'):
family = code.family.name
code = code.code
# Check whether xdict has multiple projects
if isinstance(xdict, dict):
if family in xdict:
xdict = xdict[family]
elif 'wikipedia' in xdict:
xdict = xdict['wikipedia']
# Get the translated string
if not isinstance(xdict, dict):
trans = xdict
elif not xdict:
trans = None
else:
codes = [code]
if fallback is True:
codes += _altlang(code) + ['_default', 'en']
elif fallback is not False:
codes += list(fallback)
for code in codes:
if code in xdict:
trans = xdict[code]
break
else:
if fallback is not True:
# this shouldn't simply return "any one" code but when fallback
# was True before 65518573d2b0, it did just that. When False it
# did just return None. It's now also returning None in the new
# iterable mode.
return
code = list(xdict.keys())[0]
trans = xdict[code]
if trans is None:
return # return None if we have no translation found
if parameters is None:
return trans
if not isinstance(parameters, Mapping):
issue_deprecation_warning('parameters not being a mapping', None, 2)
plural_parameters = _PluralMappingAlias(parameters)
else:
plural_parameters = parameters
# else we check for PLURAL variants
trans = _extract_plural(code, trans, plural_parameters)
if parameters:
try:
return trans % parameters
except (KeyError, TypeError):
# parameter is for PLURAL variants only, don't change the string
pass
return trans
@deprecated_args(code='source')
def twtranslate(source, twtitle, parameters=None, fallback=True,
only_plural=False):
"""
Translate a message using JSON files in messages_package_name.
fallback parameter must be True for i18n and False for L10N or testing
purposes.
Support for plural is implemented like in MediaWiki extension. If the
TranslateWiki message contains a plural tag inside which looks like::
{{PLURAL:<number>|<variant1>|<variant2>[|<variantn>]}}
it takes that variant calculated by the plural_rules depending on the number
value. Multiple plurals are allowed.
As an examples, if we had several json dictionaries in test folder like:
en.json::
{
"test-plural": "Bot: Changing %(num)s {{PLURAL:%(num)d|page|pages}}.",
}
fr.json::
{
"test-plural": "Robot: Changer %(descr)s {{PLURAL:num|une page|quelques pages}}.",
}
and so on.
>>> from pywikibot import i18n
>>> i18n.set_messages_package('tests.i18n')
>>> # use a dictionary
>>> str(i18n.twtranslate('en', 'test-plural', {'num':2}))
'Bot: Changing 2 pages.'
>>> # use additional format strings
>>> str(i18n.twtranslate('fr', 'test-plural', {'num': 1, 'descr': 'seulement'}))
'Robot: Changer seulement une page.'
>>> # use format strings also outside
>>> str(i18n.twtranslate('fr', 'test-plural', {'num': 10}, only_plural=True)
... % {'descr': 'seulement'})
'Robot: Changer seulement quelques pages.'
@param source: When it's a site it's using the lang attribute and otherwise
it is using the value directly.
@type source: BaseSite or str
@param twtitle: The TranslateWiki string title, in <package>-<key> format
@param parameters: For passing parameters. It should be a mapping but for
backwards compatibility can also be a list, tuple or a single value.
They are also used for plural entries in which case they must be a
Mapping and will cause a TypeError otherwise.
@param fallback: Try an alternate language code
@type fallback: boolean
@param only_plural: Define whether the parameters should be only applied to
plural instances. If this is False it will apply the parameters also
to the resulting string. If this is True the placeholders must be
manually applied afterwards.
@type only_plural: bool
@raise IndexError: If the language supports and requires more plurals than
defined for the given translation template.
"""
if not messages_available():
raise TranslationError(
'Unable to load messages package %s for bundle %s'
'\nIt can happen due to lack of i18n submodule or files. '
'Read %s/i18n'
% (_messages_package_name, twtitle, __url__))
source_needed = False
# If a site is given instead of a lang, use its language
if hasattr(source, 'lang'):
lang = source.lang
# check whether we need the language code back
elif isinstance(source, list):
# For backwards compatibility still support lists, when twntranslate
# was not deprecated and needed a way to get the used language code back
warn('The source argument should not be a list but either a BaseSite '
'or a str/unicode.', DeprecationWarning, 2)
lang = source.pop()
source_needed = True
else:
lang = source
# There are two possible failure modes: the translation dict might not have
# the language altogether, or a specific key could be untranslated. Both
# modes are caught with the KeyError.
langs = [lang]
if fallback:
langs += _altlang(lang) + ['en']
for alt in langs:
trans = _get_translation(alt, twtitle)
if trans:
break
else:
raise TranslationError(
'No %s translation has been defined for TranslateWiki key'
' %r\nIt can happen due to lack of i18n submodule or files. '
'Read https://mediawiki.org/wiki/PWB/i18n'
% ('English' if 'en' in langs else "'%s'" % lang,
twtitle))
# send the language code back via the given mutable list parameter
if source_needed:
source.append(alt)
if '{{PLURAL:' in trans:
# _extract_plural supports in theory non-mappings, but they are
# deprecated
if not isinstance(parameters, Mapping):
raise TypeError('parameters must be a mapping.')
trans = _extract_plural(alt, trans, parameters)
# this is only the case when called in twntranslate, and that didn't apply
# parameters when it wasn't a dict
if isinstance(parameters, _PluralMappingAlias):
# This is called due to the old twntranslate function which ignored
# KeyError. Instead only_plural should be used.
if isinstance(parameters.source, dict):
try:
trans %= parameters.source
except KeyError:
pass
parameters = None
if parameters is not None and not isinstance(parameters, Mapping):
issue_deprecation_warning('parameters not being a Mapping', None, 2)
if not only_plural and parameters:
return trans % parameters
else:
return trans
@deprecated('twtranslate')
@deprecated_args(code='source')
def twntranslate(source, twtitle, parameters=None):
"""DEPRECATED: Get translated string for the key."""
if parameters is not None:
parameters = _PluralMappingAlias(parameters)
return twtranslate(source, twtitle, parameters)
@deprecated_args(code='source')
def twhas_key(source, twtitle):
"""
Check if a message has a translation in the specified language code.
The translations are retrieved from i18n.<package>, based on the callers
import table.
No code fallback is made.
@param source: When it's a site it's using the lang attribute and otherwise
it is using the value directly.
@type source: BaseSite or str
@param twtitle: The TranslateWiki string title, in <package>-<key> format
"""
# If a site is given instead of a code, use its language
lang = getattr(source, 'lang', source)
transdict = _get_translation(lang, twtitle)
return transdict is not None
def twget_keys(twtitle):
"""
Return all language codes for a special message.
@param twtitle: The TranslateWiki string title, in <package>-<key> format
@raises OSError: the package i18n can not be loaded
"""
# obtain the directory containing all the json files for this package
package = twtitle.split("-")[0]
mod = __import__(_messages_package_name, fromlist=[str('__file__')])
pathname = os.path.join(next(iter(mod.__path__)), package)
# build a list of languages in that directory
langs = [filename.partition('.')[0]
for filename in sorted(os.listdir(pathname))
if filename.endswith('.json')]
# exclude languages does not have this specific message in that package
# i.e. an incomplete set of translated messages.
return [lang for lang in langs
if lang != 'qqq' and
_get_translation(lang, twtitle)]
def input(twtitle, parameters=None, password=False, fallback_prompt=None):
"""
Ask the user a question, return the user's answer.
The prompt message is retrieved via L{twtranslate} and uses the
config variable 'userinterface_lang'.
@param twtitle: The TranslateWiki string title, in <package>-<key> format
@param parameters: The values which will be applied to the translated text
@param password: Hides the user's input (for password entry)
@param fallback_prompt: The English prompt if i18n is not available.
@rtype: unicode string
"""
if not messages_available():
if not fallback_prompt:
raise TranslationError(
'Unable to load messages package %s for bundle %s'
% (_messages_package_name, twtitle))
else:
prompt = fallback_prompt
else:
code = config.userinterface_lang
prompt = twtranslate(code, twtitle, parameters)
return pywikibot.input(prompt, password)
|
|
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import logging
import typing
from collections import defaultdict
from collections.abc import Iterable
from dataclasses import dataclass, field
from enum import Enum
from typing import Any, DefaultDict, Dict, Set, Type, cast
from pants.build_graph.build_file_aliases import BuildFileAliases
from pants.engine.goal import GoalSubsystem
from pants.engine.rules import Rule, RuleIndex
from pants.engine.target import Target
from pants.engine.unions import UnionRule
from pants.option.global_options import GlobalOptions
from pants.option.optionable import Optionable
from pants.option.scope import normalize_scope
from pants.util.ordered_set import FrozenOrderedSet, OrderedSet
from pants.vcs.changed import Changed
logger = logging.getLogger(__name__)
# No goal or target_type can have a name from this set, so that `./pants help <name>`
# is unambiguous.
_RESERVED_NAMES = {"global", "targets", "goals"}
# Subsystems used outside of any rule.
_GLOBAL_SUBSYSTEMS: FrozenOrderedSet[Type[Optionable]] = FrozenOrderedSet({GlobalOptions, Changed})
@dataclass(frozen=True)
class BuildConfiguration:
"""Stores the types and helper functions exposed to BUILD files."""
registered_aliases: BuildFileAliases
optionables: FrozenOrderedSet[Type[Optionable]]
rules: FrozenOrderedSet[Rule]
union_rules: FrozenOrderedSet[UnionRule]
target_types: FrozenOrderedSet[Type[Target]]
allow_unknown_options: bool
@property
def all_optionables(self) -> FrozenOrderedSet[Type[Optionable]]:
"""Return all optionables in the system: global and those registered via rule usage."""
return _GLOBAL_SUBSYSTEMS | self.optionables
def __post_init__(self) -> None:
class Category(Enum):
goal = "goal"
reserved_name = "reserved name"
subsystem = "subsystem"
target_type = "target type"
name_to_categories: DefaultDict[str, Set[Category]] = defaultdict(set)
normalized_to_orig_name: Dict[str, str] = {}
for opt in self.all_optionables:
scope = cast(str, opt.options_scope)
normalized_scope = normalize_scope(scope)
name_to_categories[normalized_scope].add(
Category.goal if issubclass(opt, GoalSubsystem) else Category.subsystem
)
normalized_to_orig_name[normalized_scope] = scope
for tgt_type in self.target_types:
name_to_categories[normalize_scope(tgt_type.alias)].add(Category.target_type)
for reserved_name in _RESERVED_NAMES:
name_to_categories[normalize_scope(reserved_name)].add(Category.reserved_name)
found_collision = False
for name, cats in name_to_categories.items():
if len(cats) > 1:
scats = sorted(cat.value for cat in cats)
cats_str = ", ".join(f"a {cat}" for cat in scats[:-1]) + f" and a {scats[-1]}."
colliding_names = "`/`".join(
sorted({name, normalized_to_orig_name.get(name, name)})
)
logger.error(f"Naming collision: `{colliding_names}` is registered as {cats_str}")
found_collision = True
if found_collision:
raise TypeError("Found naming collisions. See log for details.")
@dataclass
class Builder:
_exposed_object_by_alias: Dict[Any, Any] = field(default_factory=dict)
_exposed_context_aware_object_factory_by_alias: Dict[Any, Any] = field(default_factory=dict)
_optionables: OrderedSet = field(default_factory=OrderedSet)
_rules: OrderedSet = field(default_factory=OrderedSet)
_union_rules: OrderedSet = field(default_factory=OrderedSet)
_target_types: OrderedSet[Type[Target]] = field(default_factory=OrderedSet)
_allow_unknown_options: bool = False
def registered_aliases(self) -> BuildFileAliases:
"""Return the registered aliases exposed in BUILD files.
These returned aliases aren't so useful for actually parsing BUILD files.
They are useful for generating online documentation.
:returns: A new BuildFileAliases instance containing this BuildConfiguration's
registered alias mappings.
"""
return BuildFileAliases(
objects=self._exposed_object_by_alias.copy(),
context_aware_object_factories=self._exposed_context_aware_object_factory_by_alias.copy(),
)
def register_aliases(self, aliases):
"""Registers the given aliases to be exposed in parsed BUILD files.
:param aliases: The BuildFileAliases to register.
:type aliases: :class:`pants.build_graph.build_file_aliases.BuildFileAliases`
"""
if not isinstance(aliases, BuildFileAliases):
raise TypeError("The aliases must be a BuildFileAliases, given {}".format(aliases))
for alias, obj in aliases.objects.items():
self._register_exposed_object(alias, obj)
for (
alias,
context_aware_object_factory,
) in aliases.context_aware_object_factories.items():
self._register_exposed_context_aware_object_factory(
alias, context_aware_object_factory
)
def _register_exposed_object(self, alias, obj):
if alias in self._exposed_object_by_alias:
logger.debug(
"Object alias {} has already been registered. Overwriting!".format(alias)
)
self._exposed_object_by_alias[alias] = obj
# obj doesn't implement any common base class, so we have to test for this attr.
if hasattr(obj, "subsystems"):
self.register_optionables(obj.subsystems())
def _register_exposed_context_aware_object_factory(
self, alias, context_aware_object_factory
):
if alias in self._exposed_context_aware_object_factory_by_alias:
logger.debug(
"This context aware object factory alias {} has already been registered. "
"Overwriting!".format(alias)
)
self._exposed_context_aware_object_factory_by_alias[
alias
] = context_aware_object_factory
# NB: We expect the parameter to be Iterable[Type[Optionable]], but we can't be confident
# in this because we pass whatever people put in their `register.py`s to this function;
# I.e., this is an impure function that reads from the outside world. So, we use the type
# hint `Any` and perform runtime type checking.
def register_optionables(self, optionables: typing.Iterable[Type[Optionable]] | Any):
"""Registers the given subsystem types."""
if not isinstance(optionables, Iterable):
raise TypeError("The optionables must be an iterable, given {}".format(optionables))
optionables = tuple(optionables)
if not optionables:
return
invalid_optionables = [
s for s in optionables if not isinstance(s, type) or not issubclass(s, Optionable)
]
if invalid_optionables:
raise TypeError(
"The following items from the given optionables are not Optionable "
"subclasses:\n\t{}".format("\n\t".join(str(i) for i in invalid_optionables))
)
self._optionables.update(optionables)
def register_rules(self, rules):
"""Registers the given rules.
param rules: The rules to register.
:type rules: :class:`collections.Iterable` containing
:class:`pants.engine.rules.Rule` instances.
"""
if not isinstance(rules, Iterable):
raise TypeError("The rules must be an iterable, given {!r}".format(rules))
# "Index" the rules to normalize them and expand their dependencies.
rule_index = RuleIndex.create(rules)
self._rules.update(rule_index.rules)
self._rules.update(rule_index.queries)
self._union_rules.update(rule_index.union_rules)
self.register_optionables(
rule.output_type for rule in self._rules if issubclass(rule.output_type, Optionable)
)
# NB: We expect the parameter to be Iterable[Type[Target]], but we can't be confident in
# this because we pass whatever people put in their `register.py`s to this function;
# I.e., this is an impure function that reads from the outside world. So, we use the type
# hint `Any` and perform runtime type checking.
def register_target_types(self, target_types: typing.Iterable[Type[Target]] | Any) -> None:
"""Registers the given target types."""
if not isinstance(target_types, Iterable):
raise TypeError(
f"The entrypoint `target_types` must return an iterable. "
f"Given {repr(target_types)}"
)
bad_elements = [
tgt_type
for tgt_type in target_types
if not isinstance(tgt_type, type) or not issubclass(tgt_type, Target)
]
if bad_elements:
raise TypeError(
"Every element of the entrypoint `target_types` must be a subclass of "
f"{Target.__name__}. Bad elements: {bad_elements}."
)
self._target_types.update(target_types)
def allow_unknown_options(self, allow: bool = True) -> None:
"""Allows overriding whether Options parsing will fail for unrecognized Options.
Used to defer options failures while bootstrapping BuildConfiguration until after the
complete set of plugins is known.
"""
self._allow_unknown_options = True
def create(self) -> BuildConfiguration:
registered_aliases = BuildFileAliases(
objects=self._exposed_object_by_alias.copy(),
context_aware_object_factories=self._exposed_context_aware_object_factory_by_alias.copy(),
)
return BuildConfiguration(
registered_aliases=registered_aliases,
optionables=FrozenOrderedSet(self._optionables),
rules=FrozenOrderedSet(self._rules),
union_rules=FrozenOrderedSet(self._union_rules),
target_types=FrozenOrderedSet(self._target_types),
allow_unknown_options=self._allow_unknown_options,
)
|
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=protected-access
"""Home of estimator related functions.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from tensorflow.python.client import session
from tensorflow.python.estimator import estimator as estimator_lib
from tensorflow.python.estimator import export as export_lib
from tensorflow.python.estimator import model_fn as model_fn_lib
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import sparse_tensor as sparse_tensor_lib
from tensorflow.python.keras._impl.keras import backend as K
from tensorflow.python.keras._impl.keras import models
from tensorflow.python.keras._impl.keras.utils.generic_utils import CustomObjectScope
from tensorflow.python.ops import metrics as metrics_module
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.saved_model import signature_constants
from tensorflow.python.training import saver as saver_lib
from tensorflow.python.training import training_util
_DEFAULT_SERVING_KEY = signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY
def _create_ordered_io(keras_model, estimator_io_dict, is_input=True):
"""Create a list of tensors from IO dictionary based on Keras IO order.
Args:
keras_model: an instance of compiled keras model.
estimator_io_dict: features or labels dictionary from model_fn.
is_input: True if dictionary is for inputs.
Returns:
a list of tensors based on Keras IO order.
Raises:
ValueError: if dictionary keys cannot be found in Keras model input_names
or output_names.
"""
if is_input:
keras_io_names = keras_model.input_names
else:
keras_io_names = keras_model.output_names
for key in estimator_io_dict:
if key not in keras_io_names:
raise ValueError(
'Cannot find %s with name "%s" in Keras Model. It needs to match '
'one of the following: %s' % ('input' if is_input else 'output', key,
', '.join(keras_io_names)))
tensors = []
for io_name in keras_io_names:
tensors.append(estimator_io_dict[io_name])
return tensors
def _clone_and_build_model(mode,
keras_model,
custom_objects,
features=None,
labels=None):
"""Clone and build the given keras_model.
Args:
mode: training mode.
keras_model: an instance of compiled keras model.
custom_objects: Dictionary for custom objects.
features:
labels:
Returns:
The newly built model.
"""
# Set to True during training, False for inference.
K.set_learning_phase(mode == model_fn_lib.ModeKeys.TRAIN)
# Clone keras model.
input_tensors = None if features is None else _create_ordered_io(
keras_model, features)
if custom_objects:
with CustomObjectScope(custom_objects):
model = models.clone_model(keras_model, input_tensors=input_tensors)
else:
model = models.clone_model(keras_model, input_tensors=input_tensors)
# Compile/Build model
if mode is model_fn_lib.ModeKeys.PREDICT and not model.built:
model.build()
else:
optimizer_config = keras_model.optimizer.get_config()
optimizer = keras_model.optimizer.__class__.from_config(optimizer_config)
optimizer.iterations = training_util.get_or_create_global_step()
# Get list of outputs.
if labels is None:
target_tensors = None
elif isinstance(labels, dict):
target_tensors = _create_ordered_io(keras_model, labels, is_input=False)
else:
target_tensors = [
sparse_tensor_lib.convert_to_tensor_or_sparse_tensor(labels)
]
model.compile(
optimizer,
keras_model.loss,
metrics=keras_model.metrics,
loss_weights=keras_model.loss_weights,
sample_weight_mode=keras_model.sample_weight_mode,
weighted_metrics=keras_model.weighted_metrics,
target_tensors=target_tensors)
if isinstance(model, models.Sequential):
model = model.model
return model
def _create_keras_model_fn(keras_model, custom_objects=None):
"""Creates model_fn for keras Estimator.
Args:
keras_model: an instance of compiled keras model.
custom_objects: Dictionary for custom objects.
Returns:
The model_fn for a keras Estimator.
"""
def model_fn(features, labels, mode):
"""model_fn for keras Estimator."""
model = _clone_and_build_model(mode, keras_model, custom_objects, features,
labels)
# Get inputs to EstimatorSpec
predictions = dict(zip(model.output_names, model.outputs))
loss = None
train_op = None
eval_metric_ops = None
# Set loss and metric only during train and evaluate.
if mode is not model_fn_lib.ModeKeys.PREDICT:
model._make_train_function() # pylint: disable=protected-access
loss = model.total_loss
if model.metrics:
eval_metric_ops = {}
# When each metric maps to an output
if isinstance(model.metrics, dict):
for i, output_name in enumerate(model.metrics.keys()):
metric_name = model.metrics[output_name]
if callable(metric_name):
metric_name = metric_name.__name__
# When some outputs use the same metric
if list(model.metrics.values()).count(metric_name) > 1:
metric_name += '_' + output_name
eval_metric_ops[metric_name] = metrics_module.mean(
model.metrics_tensors[i - len(model.metrics)])
else:
for i, metric_name in enumerate(model.metrics):
if callable(metric_name):
metric_name = metric_name.__name__
eval_metric_ops[metric_name] = metrics_module.mean(
model.metrics_tensors[i])
# Set train_op only during train.
if mode is model_fn_lib.ModeKeys.TRAIN:
train_op = model.train_function.updates_op
return model_fn_lib.EstimatorSpec(
mode=mode,
predictions=predictions,
loss=loss,
train_op=train_op,
eval_metric_ops=eval_metric_ops,
export_outputs={
_DEFAULT_SERVING_KEY:
export_lib.export_output.PredictOutput(predictions)
})
return model_fn
def _save_first_checkpoint(keras_model, estimator, custom_objects,
keras_weights):
"""Save first checkpoint for the keras Estimator.
Args:
keras_model: an instance of compiled keras model.
estimator: keras estimator.
custom_objects: Dictionary for custom objects.
keras_weights: A flat list of Numpy arrays for weights of given keras_model.
Returns:
The model_fn for a keras Estimator.
"""
with ops.Graph().as_default() as g, g.device(estimator._device_fn):
random_seed.set_random_seed(estimator.config.tf_random_seed)
training_util.create_global_step()
model = _clone_and_build_model(model_fn_lib.ModeKeys.TRAIN, keras_model,
custom_objects)
if isinstance(model, models.Sequential):
model = model.model
# Load weights and save to checkpoint if there is no checkpoint
latest_path = saver_lib.latest_checkpoint(estimator.model_dir)
if not latest_path:
with session.Session() as sess:
model.set_weights(keras_weights)
# Make update ops and initialize all variables.
if not model.train_function:
# pylint: disable=protected-access
model._make_train_function()
K._initialize_variables(sess)
# pylint: enable=protected-access
saver = saver_lib.Saver()
saver.save(sess, os.path.join(estimator.model_dir, 'keras_model.ckpt'))
def model_to_estimator(keras_model=None,
keras_model_path=None,
custom_objects=None,
model_dir=None,
config=None):
"""Constructs an `Estimator` instance from given keras model.
For usage example, please see
@{$programmers_guide/estimators$creating_estimators_from_keras_models}.
Args:
keras_model: Keras model in memory.
keras_model_path: Directory to a keras model on disk.
custom_objects: Dictionary for custom objects.
model_dir: Directory to save Estimator model parameters, graph and etc.
config: Configuration object.
Returns:
An Estimator from given keras model.
Raises:
ValueError: if neither keras_model nor keras_model_path was given.
ValueError: if both keras_model and keras_model_path was given.
ValueError: if the keras_model_path is a GCS URI.
ValueError: if keras_model has not been compiled.
"""
if (not keras_model) and (not keras_model_path):
raise ValueError(
'Either keras_model or keras_model_path needs to be provided.')
if keras_model and keras_model_path:
raise ValueError(
'Please specity either keras_model or keras_model_path but not both.')
if not keras_model:
if keras_model_path.startswith(
'gs://') or 'storage.googleapis.com' in keras_model_path:
raise ValueError(
'%s is not a local path. Please copy the model locally first.' %
keras_model_path)
logging.info('Loading models from %s', keras_model_path)
keras_model = models.load_model(keras_model_path)
else:
logging.info('Using the Keras model from memory.')
keras_model = keras_model
if not hasattr(keras_model, 'optimizer'):
raise ValueError(
'Given keras model has not been compiled yet. Please compile first '
'before creating the estimator.')
keras_weights = keras_model.get_weights()
keras_model_fn = _create_keras_model_fn(keras_model, custom_objects)
est = estimator_lib.Estimator(
keras_model_fn, model_dir=model_dir, config=config)
# TODO(yifeif): move checkpoint initialization to scaffold.init_fn
_save_first_checkpoint(keras_model, est, custom_objects, keras_weights)
return est
|
|
# encoding: utf-8
"""Gherkin step implementations for chart data features."""
from __future__ import (
absolute_import, division, print_function, unicode_literals
)
import datetime
from behave import given, then, when
from pptx.chart.data import (
BubbleChartData, Category, CategoryChartData, XyChartData
)
from pptx.enum.chart import XL_CHART_TYPE
from pptx.util import Inches
# given ===================================================
@given('a BubbleChartData object with number format {strval}')
def given_a_BubbleChartData_object_with_number_format(context, strval):
params = {}
if strval != 'None':
params['number_format'] = int(strval)
context.chart_data = BubbleChartData(**params)
@given('a Categories object with number format {init_nf}')
def given_a_Categories_object_with_number_format_init_nf(context, init_nf):
categories = CategoryChartData().categories
if init_nf != 'left as default':
categories.number_format = init_nf
context.categories = categories
@given('a Category object')
def given_a_Category_object(context):
context.category = Category(None, None)
@given('a CategoryChartData object')
def given_a_CategoryChartData_object(context):
context.chart_data = CategoryChartData()
@given('a CategoryChartData object having date categories')
def given_a_CategoryChartData_object_having_date_categories(context):
chart_data = CategoryChartData()
chart_data.categories = [
datetime.date(2016, 12, 27),
datetime.date(2016, 12, 28),
datetime.date(2016, 12, 29),
]
context.chart_data = chart_data
@given('a CategoryChartData object with number format {strval}')
def given_a_CategoryChartData_object_with_number_format(context, strval):
params = {}
if strval != 'None':
params['number_format'] = int(strval)
context.chart_data = CategoryChartData(**params)
@given('a XyChartData object with number format {strval}')
def given_a_XyChartData_object_with_number_format(context, strval):
params = {}
if strval != 'None':
params['number_format'] = int(strval)
context.chart_data = XyChartData(**params)
@given('the categories are of type {type_}')
def given_the_categories_are_of_type(context, type_):
label = {
'date': datetime.date(2016, 12, 22),
'float': 42.24,
'int': 42,
'str': 'foobar',
}[type_]
context.categories.add_category(label)
# when ====================================================
@when('I add a bubble data point with number format {strval}')
def when_I_add_a_bubble_data_point_with_number_format(context, strval):
series_data = context.series_data
params = {'x': 1, 'y': 2, 'size': 10}
if strval != 'None':
params['number_format'] = int(strval)
context.data_point = series_data.add_data_point(**params)
@when('I add a data point with number format {strval}')
def when_I_add_a_data_point_with_number_format(context, strval):
series_data = context.series_data
params = {'value': 42}
if strval != 'None':
params['number_format'] = int(strval)
context.data_point = series_data.add_data_point(**params)
@when('I add an XY data point with number format {strval}')
def when_I_add_an_XY_data_point_with_number_format(context, strval):
series_data = context.series_data
params = {'x': 1, 'y': 2}
if strval != 'None':
params['number_format'] = int(strval)
context.data_point = series_data.add_data_point(**params)
@when('I add an {xy_type} chart having 2 series of 3 points each')
def when_I_add_an_xy_chart_having_2_series_of_3_points(context, xy_type):
chart_type = getattr(XL_CHART_TYPE, xy_type)
data = (
('Series 1', ((-0.1, 0.5), (16.2, 0.0), (8.0, 0.2))),
('Series 2', ((12.4, 0.8), (-7.5, -0.5), (-5.1, -0.2)))
)
chart_data = XyChartData()
for series_data in data:
series_label, points = series_data
series = chart_data.add_series(series_label)
for point in points:
x, y = point
series.add_data_point(x, y)
context.chart = context.slide.shapes.add_chart(
chart_type, Inches(1), Inches(1), Inches(8), Inches(5), chart_data
).chart
@when("I assign ['a', 'b', 'c'] to chart_data.categories")
def when_I_assign_a_b_c_to_chart_data_categories(context):
chart_data = context.chart_data
chart_data.categories = ['a', 'b', 'c']
# then ====================================================
@then("[c.label for c in chart_data.categories] is ['a', 'b', 'c']")
def then_c_label_for_c_in_chart_data_categories_is_a_b_c(context):
chart_data = context.chart_data
assert [c.label for c in chart_data.categories] == ['a', 'b', 'c']
@then('categories.number_format is {value}')
def then_categories_number_format_is_value(context, value):
expected_value = value
number_format = context.categories.number_format
assert number_format == expected_value, 'got %s' % number_format
@then('category.add_sub_category(name) is a Category object')
def then_category_add_sub_category_is_a_Category_object(context):
category = context.category
context.sub_category = sub_category = category.add_sub_category('foobar')
assert type(sub_category).__name__ == 'Category'
@then('category.sub_categories[-1] is the new category')
def then_category_sub_categories_minus_1_is_the_new_category(context):
category, sub_category = context.category, context.sub_category
assert category.sub_categories[-1] is sub_category
@then('chart_data.add_category(name) is a Category object')
def then_chart_data_add_category_name_is_a_Category_object(context):
chart_data = context.chart_data
context.category = category = chart_data.add_category('foobar')
assert type(category).__name__ == 'Category'
@then('chart_data.add_series(name, values) is a CategorySeriesData object')
def then_chart_data_add_series_is_a_CategorySeriesData_object(context):
chart_data = context.chart_data
context.series = series = chart_data.add_series('Series X', (1, 2, 3))
assert type(series).__name__ == 'CategorySeriesData'
@then('chart_data.categories is a Categories object')
def then_chart_data_categories_is_a_Categories_object(context):
chart_data = context.chart_data
assert type(chart_data.categories).__name__ == 'Categories'
@then('chart_data.categories[-1] is the category')
def then_chart_data_categories_minus_1_is_the_category(context):
chart_data, category = context.chart_data, context.category
assert chart_data.categories[-1] is category
@then('chart_data.number_format is {value_str}')
def then_chart_data_number_format_is(context, value_str):
chart_data = context.chart_data
number_format = value_str if value_str == 'General' else int(value_str)
assert chart_data.number_format == number_format
@then('chart_data[-1] is the new series')
def then_chart_data_minus_1_is_the_new_series(context):
chart_data, series = context.chart_data, context.series
assert chart_data[-1] is series
@then('series_data.number_format is {value_str}')
def then_series_data_number_format_is(context, value_str):
series_data = context.series_data
number_format = value_str if value_str == 'General' else int(value_str)
assert series_data.number_format == number_format
|
|
""" test scalar indexing, including at and iat """
import pytest
import numpy as np
from pandas import (Series, DataFrame, Timestamp,
Timedelta, date_range)
from pandas.util import testing as tm
from pandas.tests.indexing.common import Base
class TestScalar(Base):
def test_at_and_iat_get(self):
def _check(f, func, values=False):
if f is not None:
indicies = self.generate_indices(f, values)
for i in indicies:
result = getattr(f, func)[i]
expected = self.get_value(f, i, values)
tm.assert_almost_equal(result, expected)
for o in self._objs:
d = getattr(self, o)
# iat
for f in [d['ints'], d['uints']]:
_check(f, 'iat', values=True)
for f in [d['labels'], d['ts'], d['floats']]:
if f is not None:
pytest.raises(ValueError, self.check_values, f, 'iat')
# at
for f in [d['ints'], d['uints'], d['labels'],
d['ts'], d['floats']]:
_check(f, 'at')
def test_at_and_iat_set(self):
def _check(f, func, values=False):
if f is not None:
indicies = self.generate_indices(f, values)
for i in indicies:
getattr(f, func)[i] = 1
expected = self.get_value(f, i, values)
tm.assert_almost_equal(expected, 1)
for t in self._objs:
d = getattr(self, t)
# iat
for f in [d['ints'], d['uints']]:
_check(f, 'iat', values=True)
for f in [d['labels'], d['ts'], d['floats']]:
if f is not None:
pytest.raises(ValueError, _check, f, 'iat')
# at
for f in [d['ints'], d['uints'], d['labels'],
d['ts'], d['floats']]:
_check(f, 'at')
def test_at_iat_coercion(self):
# as timestamp is not a tuple!
dates = date_range('1/1/2000', periods=8)
df = DataFrame(np.random.randn(8, 4),
index=dates,
columns=['A', 'B', 'C', 'D'])
s = df['A']
result = s.at[dates[5]]
xp = s.values[5]
assert result == xp
# GH 7729
# make sure we are boxing the returns
s = Series(['2014-01-01', '2014-02-02'], dtype='datetime64[ns]')
expected = Timestamp('2014-02-02')
for r in [lambda: s.iat[1], lambda: s.iloc[1]]:
result = r()
assert result == expected
s = Series(['1 days', '2 days'], dtype='timedelta64[ns]')
expected = Timedelta('2 days')
for r in [lambda: s.iat[1], lambda: s.iloc[1]]:
result = r()
assert result == expected
def test_iat_invalid_args(self):
pass
def test_imethods_with_dups(self):
# GH6493
# iat/iloc with dups
s = Series(range(5), index=[1, 1, 2, 2, 3], dtype='int64')
result = s.iloc[2]
assert result == 2
result = s.iat[2]
assert result == 2
pytest.raises(IndexError, lambda: s.iat[10])
pytest.raises(IndexError, lambda: s.iat[-10])
result = s.iloc[[2, 3]]
expected = Series([2, 3], [2, 2], dtype='int64')
tm.assert_series_equal(result, expected)
df = s.to_frame()
result = df.iloc[2]
expected = Series(2, index=[0], name=2)
tm.assert_series_equal(result, expected)
result = df.iat[2, 0]
assert result == 2
def test_at_to_fail(self):
# at should not fallback
# GH 7814
s = Series([1, 2, 3], index=list('abc'))
result = s.at['a']
assert result == 1
pytest.raises(ValueError, lambda: s.at[0])
df = DataFrame({'A': [1, 2, 3]}, index=list('abc'))
result = df.at['a', 'A']
assert result == 1
pytest.raises(ValueError, lambda: df.at['a', 0])
s = Series([1, 2, 3], index=[3, 2, 1])
result = s.at[1]
assert result == 3
pytest.raises(ValueError, lambda: s.at['a'])
df = DataFrame({0: [1, 2, 3]}, index=[3, 2, 1])
result = df.at[1, 0]
assert result == 3
pytest.raises(ValueError, lambda: df.at['a', 0])
# GH 13822, incorrect error string with non-unique columns when missing
# column is accessed
df = DataFrame({'x': [1.], 'y': [2.], 'z': [3.]})
df.columns = ['x', 'x', 'z']
# Check that we get the correct value in the KeyError
tm.assert_raises_regex(KeyError, r"\['y'\] not in index",
lambda: df[['x', 'y', 'z']])
def test_at_with_tz(self):
# gh-15822
df = DataFrame({'name': ['John', 'Anderson'],
'date': [Timestamp(2017, 3, 13, 13, 32, 56),
Timestamp(2017, 2, 16, 12, 10, 3)]})
df['date'] = df['date'].dt.tz_localize('Asia/Shanghai')
expected = Timestamp('2017-03-13 13:32:56+0800', tz='Asia/Shanghai')
result = df.loc[0, 'date']
assert result == expected
result = df.at[0, 'date']
assert result == expected
def test_mixed_index_at_iat_loc_iloc_series(self):
# GH 19860
s = Series([1, 2, 3, 4, 5], index=['a', 'b', 'c', 1, 2])
for el, item in s.iteritems():
assert s.at[el] == s.loc[el] == item
for i in range(len(s)):
assert s.iat[i] == s.iloc[i] == i + 1
with pytest.raises(KeyError):
s.at[4]
with pytest.raises(KeyError):
s.loc[4]
def test_mixed_index_at_iat_loc_iloc_dataframe(self):
# GH 19860
df = DataFrame([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]],
columns=['a', 'b', 'c', 1, 2])
for rowIdx, row in df.iterrows():
for el, item in row.iteritems():
assert df.at[rowIdx, el] == df.loc[rowIdx, el] == item
for row in range(2):
for i in range(5):
assert df.iat[row, i] == df.iloc[row, i] == row * 5 + i
with pytest.raises(KeyError):
df.at[0, 3]
with pytest.raises(KeyError):
df.loc[0, 3]
|
|
# Lint as: python2, python3
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""TFX taxi template pipeline definition.
This file defines TFX pipeline and various components in the pipeline.
"""
from typing import Any, Dict, List, Optional
import tensorflow_model_analysis as tfma
from ml_metadata.proto import metadata_store_pb2
from tfx.components import CsvExampleGen # pylint: disable=unused-import
from tfx.components import (
Evaluator,
ExampleValidator,
Pusher,
ResolverNode,
SchemaGen,
StatisticsGen,
Trainer,
Transform,
)
from tfx.components.trainer import executor as trainer_executor
from tfx.dsl.components.base import executor_spec
from tfx.dsl.experimental import latest_blessed_model_resolver
from tfx.extensions.google_cloud_ai_platform.pusher import (
executor as ai_platform_pusher_executor,
)
from tfx.extensions.google_cloud_ai_platform.trainer import (
executor as ai_platform_trainer_executor,
)
from tfx.extensions.google_cloud_big_query.example_gen import (
component as big_query_example_gen_component, # pylint: disable=unused-import
)
from tfx.orchestration import pipeline
from tfx.proto import pusher_pb2, trainer_pb2
from tfx.types import Channel
from tfx.types.standard_artifacts import Model, ModelBlessing
from tfx.utils.dsl_utils import external_input
def create_pipeline(
pipeline_name: str,
pipeline_root: str,
data_path: str, # pylint: disable=unused-argument
# TODO(step 7): (Optional) Uncomment here to use BigQuery as a data source.
query: str,
preprocessing_fn: str,
run_fn: str,
train_args: trainer_pb2.TrainArgs,
eval_args: trainer_pb2.EvalArgs,
eval_accuracy_threshold: float,
serving_model_dir: str,
metadata_connection_config: Optional[
metadata_store_pb2.ConnectionConfig
] = None,
beam_pipeline_args: Optional[List[str]] = None,
ai_platform_training_args: Optional[Dict[str, str]] = None,
ai_platform_serving_args: Optional[Dict[str, Any]] = None,
) -> pipeline.Pipeline:
"""Implements the chicago taxi pipeline with TFX."""
components = []
# Brings data into the pipeline or otherwise joins/converts training data.
# example_gen = CsvExampleGen(input=external_input(data_path))
# TODO(step 7): (Optional) Uncomment here to use BigQuery as a data source.
example_gen = big_query_example_gen_component.BigQueryExampleGen(
query=query
)
components.append(example_gen)
# Computes statistics over data for visualization and example validation.
statistics_gen = StatisticsGen(examples=example_gen.outputs["examples"])
# TODO(step 5): Uncomment here to add StatisticsGen to the pipeline.
components.append(statistics_gen)
# Generates schema based on statistics files.
schema_gen = SchemaGen(
statistics=statistics_gen.outputs["statistics"],
infer_feature_shape=True,
)
# TODO(step 5): Uncomment here to add SchemaGen to the pipeline.
components.append(schema_gen)
# Performs anomaly detection based on statistics and data schema.
example_validator = ExampleValidator( # pylint: disable=unused-variable
statistics=statistics_gen.outputs["statistics"],
schema=schema_gen.outputs["schema"],
)
# TODO(step 5): Uncomment here to add ExampleValidator to the pipeline.
components.append(example_validator)
# Performs transformations and feature engineering in training and serving.
transform = Transform(
examples=example_gen.outputs["examples"],
schema=schema_gen.outputs["schema"],
preprocessing_fn=preprocessing_fn,
)
# TODO(step 6): Uncomment here to add Transform to the pipeline.
components.append(transform)
# Uses user-provided Python function that implements a model using TF-Learn.
trainer_args = {
"run_fn": run_fn,
"transformed_examples": transform.outputs["transformed_examples"],
"schema": schema_gen.outputs["schema"],
"transform_graph": transform.outputs["transform_graph"],
"train_args": train_args,
"eval_args": eval_args,
"custom_executor_spec": executor_spec.ExecutorClassSpec(
trainer_executor.GenericExecutor
),
}
if ai_platform_training_args is not None:
trainer_args.update(
{
"custom_executor_spec": executor_spec.ExecutorClassSpec(
ai_platform_trainer_executor.GenericExecutor
),
"custom_config": {
# pylint: disable-next=line-too-long
ai_platform_trainer_executor.TRAINING_ARGS_KEY: ai_platform_training_args,
},
}
)
trainer = Trainer(**trainer_args)
# TODO(step 6): Uncomment here to add Trainer to the pipeline.
components.append(trainer)
# Get the latest blessed model for model validation.
model_resolver = ResolverNode(
instance_name="latest_blessed_model_resolver",
resolver_class=latest_blessed_model_resolver.LatestBlessedModelResolver,
model=Channel(type=Model),
model_blessing=Channel(type=ModelBlessing),
)
# TODO(step 6): Uncomment here to add ResolverNode to the pipeline.
components.append(model_resolver)
# Uses TFMA to compute a evaluation statistics over features of a model and
# perform quality validation of a candidate model (compared to a baseline).
eval_config = tfma.EvalConfig(
model_specs=[tfma.ModelSpec(label_key="big_tipper")],
slicing_specs=[tfma.SlicingSpec()],
metrics_specs=[
tfma.MetricsSpec(
metrics=[
tfma.MetricConfig(
class_name="BinaryAccuracy",
threshold=tfma.MetricThreshold(
value_threshold=tfma.GenericValueThreshold(
lower_bound={"value": eval_accuracy_threshold}
),
change_threshold=tfma.GenericChangeThreshold(
direction=tfma.MetricDirection.HIGHER_IS_BETTER,
absolute={"value": -1e-10},
),
),
)
]
)
],
)
evaluator = Evaluator(
examples=example_gen.outputs["examples"],
model=trainer.outputs["model"],
baseline_model=model_resolver.outputs["model"],
# Change threshold will be ignored if there is no baseline (first run).
eval_config=eval_config,
)
# TODO(step 6): Uncomment here to add Evaluator to the pipeline.
components.append(evaluator)
# Checks whether the model passed the validation steps and pushes the model
# to a file destination if check passed.
pusher_args = {
"model": trainer.outputs["model"],
"model_blessing": evaluator.outputs["blessing"],
"push_destination": pusher_pb2.PushDestination(
filesystem=pusher_pb2.PushDestination.Filesystem(
base_directory=serving_model_dir
)
),
}
if ai_platform_serving_args is not None:
pusher_args.update(
{
"custom_executor_spec": executor_spec.ExecutorClassSpec(
ai_platform_pusher_executor.Executor
),
"custom_config": {
# pylint: disable-next=line-too-long
ai_platform_pusher_executor.SERVING_ARGS_KEY: ai_platform_serving_args
},
}
)
pusher = Pusher(**pusher_args) # pylint: disable=unused-variable
# TODO(step 6): Uncomment here to add Pusher to the pipeline.
components.append(pusher)
return pipeline.Pipeline(
pipeline_name=pipeline_name,
pipeline_root=pipeline_root,
components=components,
# Change this value to control caching of execution results. Default
# value is `False`.
enable_cache=False,
metadata_connection_config=metadata_connection_config,
beam_pipeline_args=beam_pipeline_args,
)
|
|
# this document contains a lookup table for
# address prefixes owned by various manufacturers
# of network cards.
manufacturers = {
"Apple": [
"00:03:93",
"00:05:02",
"00:0a:27",
"00:0a:95",
"00:0d:93",
"00:10:fa",
"00:11:24",
"00:14:51",
"00:16:cb",
"00:17:f2",
"00:19:e3",
"00:1b:63",
"00:1c:b3",
"00:1d:4f",
"00:1e:52",
"00:1e:c2",
"00:1f:5b",
"00:1f:f3",
"00:21:e9",
"00:22:41",
"00:23:12",
"00:23:32",
"00:23:6c",
"00:23:df",
"00:24:36",
"00:25:00",
"00:25:4b",
"00:25:bc",
"00:26:08",
"00:26:4a",
"00:26:b0",
"00:26:bb",
"00:30:65",
"00:3e:e1",
"00:50:e4",
"00:88:65",
"00:a0:40",
"00:c6:10",
"00:f4:b9",
"04:0c:ce",
"04:15:52",
"04:1e:64",
"04:26:65",
"04:54:53",
"04:db:56",
"04:e5:36",
"04:f1:3e",
"04:f7:e4",
"08:00:07",
"08:70:45",
"0c:30:21",
"0c:3e:9f",
"0c:4d:e9",
"0c:74:c2",
"0c:77:1a",
"10:1c:0c",
"10:40:f3",
"10:93:e9",
"10:9a:dd",
"10:dd:b1",
"14:10:9f",
"14:5a:05",
"14:8f:c6",
"14:99:e2",
"18:20:32",
"18:34:51",
"18:9e:fc",
"18:af:61",
"18:af:8f",
"18:e7:f4",
"1c:ab:a7",
"1c:e6:2b",
"20:7d:74",
"20:c9:d0",
"24:a2:e1",
"24:ab:81",
"28:0b:5c",
"28:37:37",
"28:6a:b8",
"28:6a:ba",
"28:cf:da",
"28:cf:e9",
"28:e0:2c",
"28:e1:4c",
"28:e7:cf",
"2c:b4:3a",
"30:10:e4",
"30:90:ab",
"30:f7:c5",
"34:15:9e",
"34:51:c9",
"34:c0:59",
"38:0f:4a",
"38:48:4c",
"3c:07:54",
"3c:d0:f8",
"3c:e0:72",
"40:30:04",
"40:3c:fc",
"40:6c:8f",
"40:a6:d9",
"40:b3:95",
"40:d3:2d",
"44:2a:60",
"44:4c:0c",
"44:d8:84",
"44:fb:42",
"48:60:bc",
"48:74:6e",
"4c:8d:79",
"4c:b1:99",
"50:ea:d6",
"54:26:96",
"54:72:4f",
"54:ae:27",
"54:e4:3a",
"54:ea:a8",
"58:1f:aa",
"58:55:ca",
"58:b0:35",
"5c:59:48",
"5c:95:ae",
"5c:96:9d",
"5c:f9:38",
"60:03:08",
"60:33:4b",
"60:69:44",
"60:c5:47",
"60:fa:cd",
"60:fb:42",
"60:fe:c5",
"64:20:0c",
"64:a3:cb",
"64:b9:e8",
"64:e6:82",
"68:09:27",
"68:5b:35",
"68:96:7b",
"68:9c:70",
"68:a8:6d",
"6c:3e:6d",
"6c:c2:6b",
"70:11:24",
"70:56:81",
"70:73:cb",
"70:cd:60",
"70:de:e2",
"74:e1:b6",
"74:e2:f5",
"78:31:c1",
"78:6c:1c",
"78:a3:e4",
"78:ca:39",
"7c:11:be",
"7c:6d:62",
"7c:c3:a1",
"7c:c5:37",
"7c:d1:c3",
"7c:f0:5f",
"7c:fa:df",
"80:00:6e",
"80:49:71",
"80:92:9f",
"80:ea:96",
"84:29:99",
"84:38:35",
"84:85:06",
"84:8e:0c",
"84:fc:fe",
"88:1f:a1",
"88:53:95",
"88:c6:63",
"88:cb:87",
"8c:00:6d",
"8c:29:37",
"8c:2d:aa",
"8c:58:77",
"8c:7b:9d",
"8c:7c:92",
"8c:fa:ba",
"90:27:e4",
"90:72:40",
"90:84:0d",
"90:b2:1f",
"90:b9:31",
"94:94:26",
"98:03:d8",
"98:b8:e3",
"98:d6:bb",
"98:f0:ab",
"98:fe:94",
"9c:04:eb",
"9c:20:7b",
"a0:ed:cd",
"a4:67:06",
"a4:b1:97",
"a4:c3:61",
"a4:d1:d2",
"a8:20:66",
"a8:86:dd",
"a8:88:08",
"a8:96:8a",
"a8:bb:cf",
"a8:fa:d8",
"ac:3c:0b",
"ac:7f:3e",
"ac:cf:5c",
"ac:fd:ec",
"b0:34:95",
"b0:65:bd",
"b0:9f:ba",
"b4:18:d1",
"b4:f0:ab",
"b8:17:c2",
"b8:78:2e",
"b8:8d:12",
"b8:c7:5d",
"b8:e8:56",
"b8:f6:b1",
"b8:ff:61",
"bc:3b:af",
"bc:52:b7",
"bc:67:78",
"bc:92:6b",
"c0:63:94",
"c0:84:7a",
"c0:9f:42",
"c4:2c:03",
"c8:2a:14",
"c8:33:4b",
"c8:6f:1d",
"c8:b5:b7",
"c8:bc:c8",
"c8:e0:eb",
"cc:08:e0",
"cc:78:5f",
"d0:23:db",
"d0:e1:40",
"d4:9a:20",
"d8:00:4d",
"d8:30:62",
"d8:96:95",
"d8:9e:3f",
"d8:a2:5e",
"d8:d1:cb",
"dc:2b:61",
"dc:86:d8",
"dc:9b:9c",
"e0:b9:ba",
"e0:c9:7a",
"e0:f5:c6",
"e0:f8:47",
"e4:25:e7",
"e4:8b:7f",
"e4:98:d6",
"e4:c6:3d",
"e4:ce:8f",
"e8:04:0b",
"e8:06:88",
"e8:8d:28",
"ec:35:86",
"ec:85:2f",
"f0:b4:79",
"f0:c1:f1",
"f0:cb:a1",
"f0:d1:a9",
"f0:db:f8",
"f0:dc:e2",
"f0:f6:1c",
"f4:1b:a1",
"f4:37:b7",
"f4:f1:5a",
"f4:f9:51",
"f8:1e:df",
"f8:27:93",
"fc:25:3f",
],
"Asus": [
"00:0c:6e",
"00:0e:a6",
"00:11:2f",
"00:11:d8",
"00:13:d4",
"00:15:f2",
"00:17:31",
"00:18:f3",
"00:1a:92",
"00:1b:fc",
"00:1d:60",
"00:1e:8c",
"00:1f:c6",
"00:22:15",
"00:23:54",
"00:24:8c",
"00:26:18",
"00:e0:18",
"08:60:6e",
"10:bf:48",
"14:da:e9",
"20:cf:30",
"30:85:a9",
"48:5b:39",
"50:46:5d",
"54:04:a6",
"60:a4:4c",
"74:d0:2b",
"90:e6:ba",
"ac:22:0b",
"bc:ae:c5",
"bc:ee:7b",
"c8:60:00",
"d8:50:e6",
"e0:3f:49",
"e0:cb:4e",
"f4:6d:04",
],
"Broadcom": [
"00:05:b5",
"00:0a:f7",
"00:0d:b6",
"00:10:18",
"00:1b:e9",
"18:c0:86",
"d4:01:29",
],
"Cisco": [
"00:00:0c",
"00:01:42",
"00:01:43",
"00:01:63",
"00:01:64",
"00:01:96",
"00:01:97",
"00:01:c7",
"00:01:c9",
"00:02:16",
"00:02:17",
"00:02:3d",
"00:02:4a",
"00:02:4b",
"00:02:7d",
"00:02:7e",
"00:02:b9",
"00:02:ba",
"00:02:fc",
"00:02:fd",
"00:03:31",
"00:03:32",
"00:03:6b",
"00:03:6c",
"00:03:9f",
"00:03:a0",
"00:03:e3",
"00:03:e4",
"00:03:fd",
"00:03:fe",
"00:04:27",
"00:04:28",
"00:04:4d",
"00:04:4e",
"00:04:6d",
"00:04:6e",
"00:04:9a",
"00:04:9b",
"00:04:c0",
"00:04:c1",
"00:04:dd",
"00:04:de",
"00:05:00",
"00:05:01",
"00:05:31",
"00:05:32",
"00:05:5e",
"00:05:5f",
"00:05:73",
"00:05:74",
"00:05:9a",
"00:05:9b",
"00:05:dc",
"00:05:dd",
"00:06:28",
"00:06:2a",
"00:06:52",
"00:06:53",
"00:06:7c",
"00:06:c1",
"00:06:d6",
"00:06:d7",
"00:06:f6",
"00:07:0d",
"00:07:0e",
"00:07:4f",
"00:07:50",
"00:07:7d",
"00:07:84",
"00:07:85",
"00:07:b3",
"00:07:b4",
"00:07:eb",
"00:07:ec",
"00:08:20",
"00:08:21",
"00:08:2f",
"00:08:30",
"00:08:31",
"00:08:32",
"00:08:7c",
"00:08:7d",
"00:08:a3",
"00:08:a4",
"00:08:c2",
"00:08:e2",
"00:08:e3",
"00:09:11",
"00:09:12",
"00:09:43",
"00:09:44",
"00:09:7b",
"00:09:7c",
"00:09:b6",
"00:09:b7",
"00:09:e8",
"00:09:e9",
"00:0a:41",
"00:0a:42",
"00:0a:8a",
"00:0a:8b",
"00:0a:b7",
"00:0a:b8",
"00:0a:f3",
"00:0a:f4",
"00:0b:45",
"00:0b:46",
"00:0b:5f",
"00:0b:60",
"00:0b:85",
"00:0b:be",
"00:0b:bf",
"00:0b:fc",
"00:0b:fd",
"00:0c:30",
"00:0c:31",
"00:0c:41",
"00:0c:85",
"00:0c:86",
"00:0c:ce",
"00:0c:cf",
"00:0d:28",
"00:0d:29",
"00:0d:65",
"00:0d:66",
"00:0d:bc",
"00:0d:bd",
"00:0d:ec",
"00:0d:ed",
"00:0e:08",
"00:0e:38",
"00:0e:39",
"00:0e:83",
"00:0e:84",
"00:0e:d6",
"00:0e:d7",
"00:0f:23",
"00:0f:24",
"00:0f:34",
"00:0f:35",
"00:0f:66",
"00:0f:8f",
"00:0f:90",
"00:0f:f7",
"00:0f:f8",
"00:10:07",
"00:10:0b",
"00:10:0d",
"00:10:11",
"00:10:14",
"00:10:1f",
"00:10:29",
"00:10:2f",
"00:10:54",
"00:10:79",
"00:10:7b",
"00:10:a6",
"00:10:f6",
"00:10:ff",
"00:11:20",
"00:11:21",
"00:11:5c",
"00:11:5d",
"00:11:92",
"00:11:93",
"00:11:bb",
"00:11:bc",
"00:12:00",
"00:12:01",
"00:12:17",
"00:12:43",
"00:12:44",
"00:12:7f",
"00:12:80",
"00:12:d9",
"00:12:da",
"00:13:10",
"00:13:19",
"00:13:1a",
"00:13:5f",
"00:13:60",
"00:13:7f",
"00:13:80",
"00:13:c3",
"00:13:c4",
"00:14:1b",
"00:14:1c",
"00:14:69",
"00:14:6a",
"00:14:a8",
"00:14:a9",
"00:14:bf",
"00:14:f1",
"00:14:f2",
"00:15:2b",
"00:15:2c",
"00:15:62",
"00:15:63",
"00:15:c6",
"00:15:c7",
"00:15:f9",
"00:15:fa",
"00:16:46",
"00:16:47",
"00:16:9c",
"00:16:9d",
"00:16:b6",
"00:16:c7",
"00:16:c8",
"00:17:0e",
"00:17:0f",
"00:17:3b",
"00:17:59",
"00:17:5a",
"00:17:94",
"00:17:95",
"00:17:df",
"00:17:e0",
"00:18:18",
"00:18:19",
"00:18:39",
"00:18:68",
"00:18:73",
"00:18:74",
"00:18:b9",
"00:18:ba",
"00:18:f8",
"00:19:06",
"00:19:07",
"00:19:2f",
"00:19:30",
"00:19:47",
"00:19:55",
"00:19:56",
"00:19:a9",
"00:19:aa",
"00:19:e7",
"00:19:e8",
"00:1a:2f",
"00:1a:30",
"00:1a:6c",
"00:1a:6d",
"00:1a:70",
"00:1a:a1",
"00:1a:a2",
"00:1a:e2",
"00:1a:e3",
"00:1b:0c",
"00:1b:0d",
"00:1b:2a",
"00:1b:2b",
"00:1b:53",
"00:1b:54",
"00:1b:67",
"00:1b:8f",
"00:1b:90",
"00:1b:d4",
"00:1b:d5",
"00:1b:d7",
"00:1c:0e",
"00:1c:0f",
"00:1c:10",
"00:1c:57",
"00:1c:58",
"00:1c:b0",
"00:1c:b1",
"00:1c:f6",
"00:1c:f9",
"00:1d:45",
"00:1d:46",
"00:1d:70",
"00:1d:71",
"00:1d:7e",
"00:1d:a1",
"00:1d:a2",
"00:1d:e5",
"00:1d:e6",
"00:1e:13",
"00:1e:14",
"00:1e:49",
"00:1e:4a",
"00:1e:6b",
"00:1e:79",
"00:1e:7a",
"00:1e:bd",
"00:1e:be",
"00:1e:e5",
"00:1e:f6",
"00:1e:f7",
"00:1f:26",
"00:1f:27",
"00:1f:6c",
"00:1f:6d",
"00:1f:9d",
"00:1f:9e",
"00:1f:c9",
"00:1f:ca",
"00:21:1b",
"00:21:1c",
"00:21:29",
"00:21:55",
"00:21:56",
"00:21:a0",
"00:21:a1",
"00:21:be",
"00:21:d7",
"00:21:d8",
"00:22:0c",
"00:22:0d",
"00:22:3a",
"00:22:55",
"00:22:56",
"00:22:6b",
"00:22:90",
"00:22:91",
"00:22:bd",
"00:22:be",
"00:22:ce",
"00:23:04",
"00:23:05",
"00:23:33",
"00:23:34",
"00:23:5d",
"00:23:5e",
"00:23:69",
"00:23:ab",
"00:23:ac",
"00:23:be",
"00:23:ea",
"00:23:eb",
"00:24:13",
"00:24:14",
"00:24:50",
"00:24:51",
"00:24:97",
"00:24:98",
"00:24:c3",
"00:24:c4",
"00:24:f7",
"00:24:f9",
"00:25:2e",
"00:25:45",
"00:25:46",
"00:25:83",
"00:25:84",
"00:25:9c",
"00:25:b4",
"00:25:b5",
"00:26:0a",
"00:26:0b",
"00:26:51",
"00:26:52",
"00:26:98",
"00:26:99",
"00:26:ca",
"00:26:cb",
"00:27:0c",
"00:27:0d",
"00:2a:6a",
"00:30:19",
"00:30:24",
"00:30:40",
"00:30:71",
"00:30:78",
"00:30:7b",
"00:30:80",
"00:30:85",
"00:30:94",
"00:30:96",
"00:30:a3",
"00:30:b6",
"00:30:f2",
"00:3a:98",
"00:3a:99",
"00:3a:9a",
"00:3a:9b",
"00:3a:9c",
"00:40:0b",
"00:40:96",
"00:50:0b",
"00:50:0f",
"00:50:14",
"00:50:2a",
"00:50:3e",
"00:50:50",
"00:50:53",
"00:50:54",
"00:50:73",
"00:50:80",
"00:50:a2",
"00:50:a7",
"00:50:bd",
"00:50:d1",
"00:50:e2",
"00:50:f0",
"00:60:09",
"00:60:2f",
"00:60:3e",
"00:60:47",
"00:60:5c",
"00:60:70",
"00:60:83",
"00:64:40",
"00:90:0c",
"00:90:21",
"00:90:2b",
"00:90:5f",
"00:90:6d",
"00:90:6f",
"00:90:86",
"00:90:92",
"00:90:a6",
"00:90:ab",
"00:90:b1",
"00:90:bf",
"00:90:d9",
"00:90:f2",
"00:b0:4a",
"00:b0:64",
"00:b0:8e",
"00:b0:c2",
"00:d0:06",
"00:d0:58",
"00:d0:63",
"00:d0:79",
"00:d0:90",
"00:d0:97",
"00:d0:ba",
"00:d0:bb",
"00:d0:bc",
"00:d0:c0",
"00:d0:d3",
"00:d0:e4",
"00:d0:ff",
"00:de:fb",
"00:e0:14",
"00:e0:1e",
"00:e0:34",
"00:e0:4f",
"00:e0:8f",
"00:e0:a3",
"00:e0:b0",
"00:e0:f7",
"00:e0:f9",
"00:e0:fe",
"04:c5:a4",
"04:da:d2",
"04:fe:7f",
"08:17:35",
"08:1f:f3",
"08:80:39",
"08:cc:68",
"08:d0:9f",
"0c:27:24",
"0c:68:03",
"0c:85:25",
"0c:d9:96",
"10:5f:49",
"10:8c:cf",
"10:bd:18",
"10:ea:59",
"10:f3:11",
"18:33:9d",
"18:55:0f",
"18:59:33",
"18:9c:5d",
"18:ef:63",
"1c:17:d3",
"1c:1d:86",
"1c:aa:07",
"1c:df:0f",
"1c:e6:c7",
"20:37:06",
"20:3a:07",
"20:aa:4b",
"20:bb:c0",
"24:01:c7",
"24:37:4c",
"24:76:7d",
"24:b6:57",
"24:e9:b3",
"28:93:fe",
"28:94:0f",
"2c:36:f8",
"2c:3f:38",
"2c:54:2d",
"30:37:a6",
"30:e4:db",
"30:f7:0d",
"34:a8:4e",
"34:bd:c8",
"34:bd:fa",
"34:db:fd",
"38:c8:5c",
"3c:08:f6",
"3c:0e:23",
"3c:ce:73",
"3c:df:1e",
"40:55:39",
"40:f4:ec",
"44:03:a7",
"44:2b:03",
"44:58:29",
"44:ad:d9",
"44:d3:ca",
"44:e0:8e",
"44:e4:d9",
"48:44:87",
"48:f8:b3",
"4c:00:82",
"4c:4e:35",
"50:06:04",
"50:17:ff",
"50:39:55",
"50:3d:e5",
"50:57:a8",
"54:75:d0",
"54:78:1a",
"54:7f:ee",
"54:d4:6f",
"58:35:d9",
"58:6d:8f",
"58:8d:09",
"58:97:1e",
"58:bc:27",
"58:bf:ea",
"5c:50:15",
"5c:a4:8a",
"60:2a:d0",
"60:73:5c",
"64:00:f1",
"64:16:8d",
"64:9e:f3",
"64:a0:e7",
"64:ae:0c",
"64:d8:14",
"64:d9:89",
"64:e9:50",
"68:7f:74",
"68:86:a7",
"68:bc:0c",
"68:bd:ab",
"68:ee:96",
"68:ef:bd",
"6c:20:56",
"6c:41:6a",
"6c:50:4d",
"6c:99:89",
"6c:9c:ed",
"70:10:5c",
"70:81:05",
"70:ca:9b",
"78:da:6e",
"7c:69:f6",
"7c:95:f3",
"7c:ad:74",
"7c:b2:1b",
"84:78:ac",
"88:43:e1",
"88:5a:92",
"88:75:56",
"88:f0:77",
"8c:60:4f",
"8c:b6:4f",
"98:fc:11",
"9c:4e:20",
"9c:af:ca",
"a0:cf:5b",
"a4:0c:c3",
"a4:18:75",
"a4:4c:11",
"a4:56:30",
"a4:93:4c",
"a4:a2:4a",
"a8:0c:0d",
"a8:b1:d4",
"ac:a0:16",
"ac:f2:c5",
"b0:fa:eb",
"b4:14:89",
"b4:a4:e3",
"b4:e9:b0",
"b8:38:61",
"b8:62:1f",
"b8:be:bf",
"bc:16:65",
"bc:c8:10",
"c0:25:5c",
"c0:62:6b",
"c0:67:af",
"c0:7b:bc",
"c0:8c:60",
"c0:c1:c0",
"c0:c6:87",
"c4:0a:cb",
"c4:14:3c",
"c4:64:13",
"c4:71:fe",
"c4:7d:4f",
"c8:4c:75",
"c8:9c:1d",
"c8:b3:73",
"c8:d7:19",
"c8:f9:f9",
"c8:fb:26",
"cc:0d:ec",
"cc:d5:39",
"cc:ef:48",
"d0:57:4c",
"d0:c2:82",
"d0:c7:89",
"d0:d0:fd",
"d4:8c:b5",
"d4:a0:2a",
"d4:d7:48",
"d8:24:bd",
"d8:67:d9",
"dc:7b:94",
"dc:a5:f4",
"e0:2f:6d",
"e0:5f:b9",
"e4:48:c7",
"e4:c7:22",
"e4:d3:f1",
"e8:04:62",
"e8:40:40",
"e8:b7:48",
"e8:ba:70",
"e8:ed:f3",
"ec:30:91",
"ec:44:76",
"ec:c8:82",
"ec:e1:a9",
"f0:25:72",
"f0:29:29",
"f0:f7:55",
"f4:1f:c2",
"f4:5f:d4",
"f4:7f:35",
"f4:ac:c1",
"f4:cf:e2",
"f4:ea:67",
"f8:4f:57",
"f8:66:f2",
"f8:72:ea",
"fc:99:47",
"fc:fb:fb",
],
"Dell": [
"00:06:5b",
"00:08:74",
"00:0b:db",
"00:0d:56",
"00:0f:1f",
"00:11:43",
"00:12:3f",
"00:13:72",
"00:14:22",
"00:15:c5",
"00:16:f0",
"00:18:8b",
"00:19:b9",
"00:1a:a0",
"00:1c:23",
"00:1d:09",
"00:1e:4f",
"00:21:70",
"00:21:9b",
"00:22:19",
"00:23:ae",
"00:24:e8",
"00:25:64",
"00:25:bd",
"00:26:b9",
"00:b0:d0",
"00:c0:4f",
"14:fe:b5",
"18:03:73",
"18:a9:9b",
"24:b6:fd",
"28:c8:25",
"34:17:eb",
"5c:26:0a",
"5c:f9:dd",
"74:86:7a",
"78:2b:cb",
"78:45:c4",
"84:2b:2b",
"84:2b:bc",
"84:8f:69",
"90:b1:1c",
"a4:1f:72",
"a4:ba:db",
"b8:ac:6f",
"b8:ca:3a",
"bc:30:5b",
"c8:1f:66",
"d0:67:e5",
"d4:ae:52",
"d4:be:d9",
"e0:db:55",
"ec:f4:bb",
"f0:1f:af",
"f0:4d:a2",
"f8:b1:56",
"f8:bc:12",
"f8:db:88",
],
"Nokia": [
"00:02:ee",
"00:0b:e1",
"00:0e:ed",
"00:0f:bb",
"00:10:b3",
"00:11:9f",
"00:12:62",
"00:13:70",
"00:13:fd",
"00:14:a7",
"00:15:2a",
"00:15:a0",
"00:15:de",
"00:16:4e",
"00:16:bc",
"00:17:4b",
"00:17:b0",
"00:18:0f",
"00:18:42",
"00:18:8d",
"00:18:c5",
"00:19:2d",
"00:19:4f",
"00:19:79",
"00:19:b7",
"00:1a:16",
"00:1a:89",
"00:1a:dc",
"00:1b:33",
"00:1b:af",
"00:1b:ee",
"00:1c:35",
"00:1c:9a",
"00:1c:d4",
"00:1c:d6",
"00:1d:3b",
"00:1d:6e",
"00:1d:98",
"00:1d:e9",
"00:1d:fd",
"00:1e:3a",
"00:1e:3b",
"00:1e:a3",
"00:1e:a4",
"00:1f:00",
"00:1f:01",
"00:1f:5c",
"00:1f:5d",
"00:1f:de",
"00:1f:df",
"00:21:08",
"00:21:09",
"00:21:aa",
"00:21:ab",
"00:21:fc",
"00:21:fe",
"00:22:65",
"00:22:66",
"00:22:fc",
"00:22:fd",
"00:23:b4",
"00:24:03",
"00:24:04",
"00:24:7c",
"00:24:7d",
"00:25:47",
"00:25:48",
"00:25:cf",
"00:25:d0",
"00:26:68",
"00:26:69",
"00:26:cc",
"00:40:43",
"00:bd:3a",
"00:e0:03",
"04:5a:95",
"04:a8:2a",
"0c:c6:6a",
"0c:dd:ef",
"10:f9:ee",
"14:36:05",
"18:14:56",
"18:86:ac",
"20:d6:07",
"28:47:aa",
"28:d1:af",
"2c:5a:05",
"2c:cc:15",
"2c:d2:e7",
"30:38:55",
"34:7e:39",
"34:c8:03",
"38:19:2f",
"3c:36:3d",
"3c:c2:43",
"3c:f7:2a",
"40:7a:80",
"48:dc:fb",
"4c:25:78",
"50:2d:1d",
"54:44:08",
"54:79:75",
"5c:57:c8",
"6c:9b:02",
"6c:a7:80",
"6c:e9:07",
"70:8d:09",
"78:2e:ef",
"78:ca:04",
"80:50:1b",
"88:44:f6",
"90:cf:15",
"94:00:70",
"94:20:53",
"94:3a:f0",
"9c:18:74",
"9c:4a:7b",
"9c:ca:d9",
"a0:4e:04",
"a0:71:a9",
"a0:f4:19",
"a4:77:60",
"a4:e7:31",
"a8:44:81",
"a8:7b:39",
"a8:7e:33",
"a8:e0:18",
"ac:81:f3",
"ac:93:2f",
"b0:35:8d",
"b0:5c:e5",
"bc:c6:db",
"c0:38:f9",
"c0:64:c6",
"c8:3d:97",
"c8:97:9f",
"c8:d1:0b",
"c8:df:7c",
"cc:89:fd",
"d0:db:32",
"d4:5d:42",
"d4:c1:fc",
"d4:cb:af",
"d8:2a:7e",
"d8:75:33",
"dc:3e:f8",
"dc:9f:a4",
"dc:c7:93",
"e0:a6:70",
"e4:ec:10",
"e8:cb:a1",
"ec:9b:5b",
"ec:f3:5b",
"f4:8e:09",
"f4:f5:a5",
"f8:5f:2a",
"fc:92:3b",
"fc:e5:57",
],
"HTC": [
"00:01:6b",
"00:07:14",
"00:09:2d",
"00:23:76",
"00:25:24",
"18:87:96",
"1c:b0:94",
"38:e7:d8",
"50:2e:5c",
"64:a7:69",
"7c:61:93",
"84:7a:88",
"90:21:55",
"98:0d:2e",
"a0:f4:50",
"a8:26:d9",
"bc:cf:cc",
"d4:20:6d",
"d8:b3:77",
"e8:99:c4",
"f8:db:7f",
],
"LG": [
"00:1c:62",
"00:1e:75",
"00:1f:6b",
"00:1f:e3",
"00:21:fb",
"00:22:a9",
"00:24:83",
"00:25:e5",
"00:26:e2",
"00:aa:70",
"00:e0:91",
"10:68:3f",
"10:f9:6f",
"20:21:a5",
"30:76:6f",
"3c:bd:d8",
"40:b0:fa",
"58:a2:b5",
"6c:d0:32",
"6c:d6:8a",
"70:05:14",
"74:a7:22",
"8c:3a:e3",
"98:93:cc",
"98:d6:f7",
"a8:16:b2",
"a8:92:2c",
"bc:f5:ac",
"c0:41:f6",
"c4:43:8f",
"cc:2d:8c",
"cc:fa:00",
"e8:5b:5b",
"e8:92:a4",
"f0:1c:13",
"f8:0c:f3",
"f8:a9:d0",
],
"Samsung": [
"00:00:f0",
"00:02:78",
"00:07:ab",
"00:09:18",
"00:0d:ae",
"00:0d:e5",
"00:12:47",
"00:12:fb",
"00:13:77",
"00:15:99",
"00:15:b9",
"00:16:32",
"00:16:6b",
"00:16:6c",
"00:16:db",
"00:17:c9",
"00:17:d5",
"00:18:af",
"00:1a:8a",
"00:1b:98",
"00:1c:43",
"00:1d:25",
"00:1d:f6",
"00:1e:7d",
"00:1e:e1",
"00:1e:e2",
"00:1f:cc",
"00:1f:cd",
"00:21:19",
"00:21:4c",
"00:21:d1",
"00:21:d2",
"00:23:39",
"00:23:3a",
"00:23:99",
"00:23:c2",
"00:23:d6",
"00:23:d7",
"00:24:54",
"00:24:90",
"00:24:91",
"00:24:e9",
"00:25:38",
"00:25:66",
"00:25:67",
"00:26:37",
"00:26:5d",
"00:26:5f",
"00:73:e0",
"00:e0:64",
"00:e3:b2",
"04:18:0f",
"04:1b:ba",
"04:fe:31",
"08:08:c2",
"08:37:3d",
"08:d4:2b",
"08:fc:88",
"08:fd:0e",
"0c:14:20",
"0c:71:5d",
"0c:89:10",
"0c:df:a4",
"10:1d:c0",
"10:3b:59",
"10:77:b1",
"10:d5:42",
"14:49:e0",
"14:89:fd",
"14:f4:2a",
"18:1e:b0",
"18:26:66",
"18:3f:47",
"18:46:17",
"18:67:b0",
"18:e2:c2",
"1c:5a:3e",
"1c:62:b8",
"1c:66:aa",
"1c:af:05",
"20:13:e0",
"20:64:32",
"20:d3:90",
"20:d5:bf",
"24:c6:96",
"24:db:ed",
"24:f5:aa",
"28:98:7b",
"28:ba:b5",
"28:cc:01",
"2c:44:01",
"30:19:66",
"30:cd:a7",
"30:d6:c9",
"34:23:ba",
"34:31:11",
"34:aa:8b",
"34:be:00",
"34:c3:ac",
"38:01:97",
"38:0a:94",
"38:0b:40",
"38:16:d1",
"38:aa:3c",
"38:ec:e4",
"3c:5a:37",
"3c:62:00",
"3c:8b:fe",
"40:0e:85",
"44:4e:1a",
"44:f4:59",
"48:44:f7",
"4c:3c:16",
"4c:bc:a5",
"50:01:bb",
"50:32:75",
"50:85:69",
"50:a4:c8",
"50:b7:c3",
"50:cc:f8",
"50:f5:20",
"50:fc:9f",
"54:88:0e",
"54:92:be",
"54:9b:12",
"58:c3:8b",
"5c:0a:5b",
"5c:3c:27",
"5c:a3:9d",
"5c:e8:eb",
"5c:f6:dc",
"60:6b:bd",
"60:a1:0a",
"60:d0:a9",
"64:77:91",
"64:b3:10",
"68:48:98",
"68:eb:ae",
"6c:83:36",
"6c:b7:f4",
"6c:f3:73",
"70:f9:27",
"74:45:8a",
"74:5f:00",
"78:1f:db",
"78:25:ad",
"78:47:1d",
"78:52:1a",
"78:59:5e",
"78:9e:d0",
"78:a8:73",
"78:ab:bb",
"78:d6:f0",
"78:f7:be",
"80:18:a7",
"80:57:19",
"84:0b:2d",
"84:25:db",
"84:51:81",
"88:32:9b",
"88:9b:39",
"8c:71:f8",
"8c:77:12",
"8c:c8:cd",
"90:18:7c",
"90:f1:aa",
"94:01:c2",
"94:35:0a",
"94:51:03",
"94:63:d1",
"94:d7:71",
"98:0c:82",
"98:52:b1",
"9c:02:98",
"9c:3a:af",
"9c:e6:e7",
"a0:07:98",
"a0:0b:ba",
"a0:21:95",
"a0:75:91",
"a0:82:1f",
"a4:eb:d3",
"a8:06:00",
"a8:f2:74",
"ac:36:13",
"b0:c4:e7",
"b0:d0:9c",
"b0:df:3a",
"b0:ec:71",
"b4:07:f9",
"b4:3a:28",
"b4:62:93",
"b8:5e:7b",
"b8:c6:8e",
"b8:d9:ce",
"bc:20:a4",
"bc:44:86",
"bc:47:60",
"bc:72:b1",
"bc:79:ad",
"bc:85:1f",
"bc:8c:cd",
"bc:b1:f3",
"c0:65:99",
"c4:50:06",
"c4:62:ea",
"c4:73:1e",
"c4:88:e5",
"c8:14:79",
"c8:19:f7",
"c8:7e:75",
"cc:05:1b",
"cc:07:ab",
"cc:3a:61",
"cc:f9:e8",
"cc:fe:3c",
"d0:17:6a",
"d0:22:be",
"d0:66:7b",
"d0:c1:b1",
"d0:df:c7",
"d4:87:d8",
"d4:88:90",
"d4:e8:b2",
"d8:31:cf",
"d8:57:ef",
"d8:90:e8",
"dc:71:44",
"e4:12:1d",
"e4:32:cb",
"e4:40:e2",
"e4:7c:f9",
"e4:92:fb",
"e4:b0:21",
"e4:e0:c5",
"e8:03:9a",
"e8:11:32",
"e8:4e:84",
"e8:e5:d6",
"ec:e0:9b",
"f0:08:f1",
"f0:25:b7",
"f0:5a:09",
"f0:6b:ca",
"f0:72:8c",
"f0:e7:7e",
"f4:7b:5e",
"f4:9f:54",
"f4:d9:fb",
"f8:d0:bd",
"fc:00:12",
"fc:1f:19",
"fc:a1:3e",
"fc:c7:34",
],
"Netgear": [
"00:09:5b",
"00:0f:b5",
"00:14:6c",
"00:18:4d",
"00:1b:2f",
"00:1e:2a",
"00:1f:33",
"00:22:3f",
"00:24:b2",
"00:26:f2",
"00:8e:f2",
"04:a1:51",
"08:bd:43",
"10:0d:7f",
"20:0c:c8",
"20:4e:7f",
"20:e5:2a",
"28:c6:8e",
"2c:b0:5d",
"30:46:9a",
"44:94:fc",
"4c:60:de",
"74:44:01",
"84:1b:5e",
"9c:d3:6d",
"a0:21:b7",
"c0:3f:0e",
"c4:04:15",
"c4:3d:c7",
"e0:46:9a",
"e0:91:f5",
],
"D-Link": [
"00:05:5d",
"00:0d:88",
"00:0f:3d",
"00:11:95",
"00:13:46",
"00:15:e9",
"00:17:9a",
"00:19:5b",
"00:1b:11",
"00:1c:f0",
"00:1e:58",
"00:21:91",
"00:22:b0",
"00:24:01",
"00:26:5a",
"00:50:ba",
"00:80:c8",
"14:d6:4d",
"1c:7e:e5",
"1c:af:f7",
"1c:bd:b9",
"28:10:7b",
"34:08:04",
"5c:d9:98",
"78:54:2e",
"84:c9:b2",
"90:94:e4",
"9c:d6:43",
"ac:f1:df",
"b8:a3:86",
"bc:f6:85",
"c0:a0:bb",
"c4:a8:1d",
"c8:be:19",
"c8:d3:a3",
"cc:b2:55",
"d8:fe:e3",
"f0:7d:68",
"fc:75:16",
],
"Giga-Byte": [
"00:0d:61",
"00:0f:ea",
"00:13:f3",
"00:14:85",
"00:16:e6",
"00:1a:4d",
"00:1d:7d",
"00:1f:d0",
"00:20:ed",
"00:24:1d",
"1c:6f:65",
"50:e5:49",
"6c:f0:49",
"74:d4:35",
"90:2b:34",
"94:de:80",
],
"Sony": [
"00:00:95",
"00:01:4a",
"00:04:1f",
"00:0a:d9",
"00:0e:07",
"00:0f:de",
"00:12:ee",
"00:13:15",
"00:13:a9",
"00:15:c1",
"00:16:20",
"00:16:b8",
"00:18:13",
"00:19:63",
"00:19:c5",
"00:1a:75",
"00:1a:80",
"00:1b:59",
"00:1c:a4",
"00:1d:0d",
"00:1d:28",
"00:1d:ba",
"00:1e:45",
"00:1f:a7",
"00:1f:e4",
"00:21:9e",
"00:22:98",
"00:22:a6",
"00:23:45",
"00:23:f1",
"00:24:8d",
"00:24:be",
"00:24:ef",
"00:25:e7",
"00:eb:2d",
"08:00:46",
"18:00:2d",
"1c:7b:21",
"20:54:76",
"24:21:ab",
"28:0d:fc",
"30:17:c8",
"30:39:26",
"30:f9:ed",
"3c:07:71",
"40:2b:a1",
"4c:21:d0",
"54:42:49",
"54:53:ed",
"58:17:0c",
"5c:b5:24",
"68:76:4f",
"6c:0e:0d",
"6c:23:b9",
"70:9e:29",
"78:84:3c",
"84:00:d2",
"8c:64:22",
"90:c1:15",
"94:ce:2c",
"a0:e4:53",
"a8:e3:ee",
"b4:52:7d",
"b4:52:7e",
"b8:f9:34",
"d0:51:62",
"d8:d4:3c",
"e0:63:e5",
"f0:bf:97",
"f8:d0:ac",
"fc:0f:e6",
],
"HP": [
"00:30:6e",
"00:80:a0",
"08:00:09",
"08:2e:5f",
"10:60:4b",
"24:be:05",
"28:92:4a",
"2c:44:fd",
"2c:59:e5",
"38:ea:a7",
"40:a8:f0",
"44:31:92",
"6c:3b:e5",
"74:46:a0",
"80:c1:6e",
"84:34:97",
"88:51:fb",
"9c:b6:54",
"a0:1d:48",
"a0:48:1c",
"a0:b3:cc",
"a0:d3:c1",
"a4:5d:36",
"ac:16:2d",
"b4:b5:2f",
"c8:cb:b8",
"cc:3e:5f",
"d0:7e:28",
"d4:c9:ef",
"d8:9d:67",
"e4:11:5b",
"e8:39:35",
"ec:9a:74",
"f0:92:1c",
"fc:15:b4",
],
"ZTE": [
"00:15:eb",
"00:19:c6",
"00:1e:73",
"00:22:93",
"00:25:12",
"00:26:ed",
"08:18:1a",
"0c:12:62",
"14:60:80",
"20:89:86",
"2c:26:c5",
"2c:95:7f",
"30:f3:1d",
"34:4b:50",
"34:de:34",
"34:e0:cf",
"38:46:08",
"48:28:2f",
"4c:09:b4",
"4c:ac:0a",
"4c:cb:f5",
"54:22:f8",
"68:1a:b2",
"6c:8b:2f",
"78:e8:b6",
"84:74:2a",
"8c:e0:81",
"90:1d:27",
"98:6c:f5",
"98:f5:37",
"9c:a9:e4",
"9c:d2:4b",
"a0:ec:80",
"b0:75:d5",
"b4:98:42",
"b4:b3:62",
"c8:64:c7",
"c8:7b:5b",
"cc:1a:fa",
"cc:7b:35",
"d0:15:4a",
"dc:02:8e",
"e0:c3:f3",
"e4:77:23",
"f0:84:c9",
"f4:6d:e2",
"f8:df:a8",
"fc:c8:97",
],
"IBM": [
"00:02:55",
"00:04:ac",
"00:06:29",
"00:09:6b",
"00:0d:60",
"00:10:d9",
"00:11:25",
"00:14:5e",
"00:17:ef",
"00:18:b1",
"00:1a:64",
"00:20:35",
"00:21:5e",
"00:22:00",
"00:25:03",
"00:50:76",
"00:60:94",
"08:00:5a",
"08:17:f4",
"10:00:5a",
"34:40:b5",
"40:f2:e9",
"5c:f3:fc",
"6c:ae:8b",
"74:99:75",
"a8:97:dc",
"e4:1f:13",
"fc:cf:62",
],
"Huawei": [
"00:18:82",
"00:1e:10",
"00:22:a1",
"00:25:68",
"00:25:9e",
"00:46:4b",
"00:66:4b",
"00:e0:fc",
"04:c0:6f",
"04:f9:38",
"08:19:a6",
"08:63:61",
"08:7a:4c",
"0c:37:dc",
"0c:96:bf",
"10:1b:54",
"10:47:80",
"10:c6:1f",
"14:b9:68",
"1c:1d:67",
"20:08:ed",
"20:0b:c7",
"20:2b:c1",
"20:f3:a3",
"24:69:a5",
"24:db:ac",
"28:31:52",
"28:3c:e4",
"28:5f:db",
"28:6e:d4",
"30:87:30",
"34:6b:d3",
"34:cd:be",
"3c:df:bd",
"40:4d:8e",
"40:cb:a8",
"48:46:fb",
"4c:1f:cc",
"4c:54:99",
"4c:8b:ef",
"4c:b1:6c",
"50:9f:27",
"54:39:df",
"54:89:98",
"54:a5:1b",
"5c:4c:a9",
"5c:7d:5e",
"60:de:44",
"64:16:f0",
"70:54:f5",
"70:72:3c",
"70:7b:e8",
"70:a8:e3",
"78:1d:ba",
"78:6a:89",
"78:f5:fd",
"7c:60:97",
"80:b6:86",
"80:fb:06",
"84:a8:e4",
"88:53:d4",
"88:86:03",
"88:e3:ab",
"90:4e:2b",
"a4:99:47",
"ac:4e:91",
"ac:85:3d",
"ac:e2:15",
"ac:e8:7b",
"b4:15:13",
"bc:76:70",
"c4:05:28",
"c8:d1:5e",
"cc:53:b5",
"cc:96:a0",
"cc:cc:81",
"d0:2d:b3",
"d0:7a:b5",
"d4:6a:a8",
"d4:6e:5c",
"d4:b1:10",
"d8:49:0b",
"dc:d2:fc",
"e0:24:7f",
"e4:68:a3",
"e8:08:8b",
"e8:cd:2d",
"ec:23:3d",
"f4:55:9c",
"f4:c7:14",
"f4:dc:f9",
"f8:01:13",
"f8:3d:ff",
"f8:4a:bf",
"fc:48:ef",
],
"Intel": [
"00:01:2a",
"00:02:b3",
"00:03:47",
"00:04:23",
"00:04:3a",
"00:07:e9",
"00:08:1a",
"00:09:58",
"00:0c:c7",
"00:0c:f1",
"00:0e:0c",
"00:0e:35",
"00:10:e6",
"00:11:11",
"00:11:76",
"00:12:f0",
"00:13:02",
"00:13:20",
"00:13:b1",
"00:13:ce",
"00:13:e8",
"00:14:4d",
"00:15:00",
"00:15:17",
"00:15:1f",
"00:16:6f",
"00:16:76",
"00:16:ea",
"00:16:eb",
"00:18:de",
"00:19:00",
"00:19:25",
"00:19:d1",
"00:19:d2",
"00:1a:3f",
"00:1b:0a",
"00:1b:21",
"00:1b:77",
"00:1b:b2",
"00:1b:f9",
"00:1c:bf",
"00:1c:c0",
"00:1d:e0",
"00:1d:e1",
"00:1e:64",
"00:1e:65",
"00:1e:67",
"00:1f:0c",
"00:1f:3b",
"00:1f:3c",
"00:20:7b",
"00:21:5c",
"00:21:5d",
"00:21:6a",
"00:21:6b",
"00:22:46",
"00:22:e6",
"00:22:fa",
"00:22:fb",
"00:23:14",
"00:23:15",
"00:23:b2",
"00:24:d6",
"00:24:d7",
"00:24:ea",
"00:26:21",
"00:26:c6",
"00:26:c7",
"00:26:fd",
"00:27:0e",
"00:27:10",
"00:30:72",
"00:50:f1",
"00:60:aa",
"00:6b:a0",
"00:80:53",
"00:90:27",
"00:a0:c9",
"00:aa:00",
"00:aa:01",
"00:aa:02",
"00:c2:c6",
"00:d0:5d",
"00:d0:b7",
"00:db:df",
"08:11:96",
"08:3f:76",
"08:d4:0c",
"0c:8b:fd",
"0c:d2:92",
"10:0b:a9",
"14:1b:f0",
"18:06:75",
"18:3d:a2",
"24:77:03",
"28:b2:bd",
"30:3a:64",
"34:13:e8",
"34:de:1a",
"3c:a9:f4",
"40:25:c2",
"44:85:00",
"4c:79:ba",
"4c:80:93",
"4c:eb:42",
"50:2d:a2",
"58:91:cf",
"58:94:6b",
"5c:51:4f",
"5c:c5:d4",
"5c:d2:e4",
"60:36:dd",
"60:46:16",
"60:57:18",
"60:67:20",
"60:6c:66",
"64:55:63",
"64:80:99",
"64:d4:da",
"68:05:ca",
"68:17:29",
"68:5d:43",
"6c:29:95",
"6c:88:14",
"74:e5:0b",
"78:3d:5b",
"78:92:9c",
"78:ff:57",
"7c:7a:91",
"7c:cf:cf",
"80:00:0b",
"80:86:f2",
"80:9b:20",
"84:3a:4b",
"84:a6:c8",
"88:53:2e",
"8c:70:5a",
"8c:a9:82",
"90:49:fa",
"90:e2:ba",
"98:4f:ee",
"9c:31:78",
"9c:4e:36",
"a0:36:9f",
"a0:6d:09",
"a0:88:69",
"a0:88:b4",
"a0:98:ed",
"a0:a8:cd",
"a4:4e:31",
"ac:72:89",
"ac:7b:a1",
"b4:b6:76",
"b8:03:05",
"b8:8a:60",
"bc:77:37",
"c0:8b:6f",
"c4:85:08",
"c4:d9:87",
"c8:f7:33",
"cc:2a:80",
"d0:7e:35",
"d8:fc:93",
"dc:a9:71",
"e0:61:b2",
"e0:94:67",
"e0:9d:31",
"e8:2a:ea",
"e8:b1:fc",
"ec:23:68",
"f8:16:54",
"fc:8f:c4",
"fc:f8:ae",
],
"Ericsson": [
"04:4e:06",
"34:07:fb",
"34:84:46",
"3c:19:7d",
"74:c9:9a",
"74:d0:dc",
"00:15:e0",
],
"Lite-On": [
"00:14:92",
"00:22:5f",
"1c:65:9d",
"20:16:d8",
"20:68:9d",
"24:fd:52",
"2c:d0:5a",
"40:f0:2f",
"44:6d:57",
"48:d2:24",
"68:a3:c4",
"70:1a:04",
"70:f1:a1",
"74:de:2b",
"74:e5:43",
"9c:b7:0d",
"a4:db:30",
"b0:05:94",
"d0:df:9a",
"e8:61:7e",
"00:02:e3",
"00:16:44",
"00:23:03",
"00:a0:cc",
"08:b7:38",
"58:87:4c",
],
"Motorola": [
"00:0a:28",
"00:0e:c7",
"00:15:70",
"00:20:75",
"00:23:68",
"00:24:37",
"00:24:92",
"00:a0:bf",
"00:e0:0c",
"40:83:de",
"48:2c:ea",
"4c:cc:34",
"5c:0e:8b",
"60:be:b5",
"84:24:8d",
"b0:79:94",
"b4:c7:99",
"c4:7d:cc",
"e0:75:7d",
"f8:e0:79",
"f8:f1:b6",
"fc:0a:81",
],
}
|
|
# Copyright 2015, Ansible, Inc.
# Luke Sneeringer <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
import copy
import functools
import json
import warnings
from requests.exceptions import ConnectionError
from requests.sessions import Session
from requests.models import Response
from requests.packages import urllib3
from tower_cli.conf import settings
from tower_cli.utils import data_structures, debug, exceptions as exc
class Client(Session):
"""A class for making HTTP requests to the Ansible Tower API and
returning the responses.
This functions as a wrapper around [requests][1], and returns its
responses; therefore, interact with response objects to this class the
same way you would with objects you get back from `requests.get` or
similar.
[1]: http://docs.python-requests.org/en/latest/
"""
def __init__(self):
super(Client, self).__init__()
for adapter in self.adapters.values():
adapter.max_retries = 3
@property
def prefix(self):
"""Return the appropriate URL prefix to prepend to requests,
based on the host provided in settings.
"""
host = settings.host
if '://' not in host:
host = 'https://%s' % host.strip('/')
return '%s/api/v1/' % host.rstrip('/')
@functools.wraps(Session.request)
def request(self, method, url, *args, **kwargs):
"""Make a request to the Ansible Tower API, and return the
response.
"""
# Piece together the full URL.
url = '%s%s' % (self.prefix, url.lstrip('/'))
# Ansible Tower expects authenticated requests; add the authentication
# from settings if it's provided.
kwargs.setdefault('auth', (settings.username, settings.password))
# POST and PUT requests will send JSON by default; make this
# the content_type by default. This makes it such that we don't have
# to constantly write that in our code, which gets repetitive.
headers = kwargs.get('headers', {})
if method.upper() in ('PATCH', 'POST', 'PUT'):
headers.setdefault('Content-Type', 'application/json')
kwargs['headers'] = headers
# If debugging is on, print the URL and data being sent.
debug.log('%s %s' % (method, url), fg='blue', bold=True)
if method in ('POST', 'PUT', 'PATCH'):
debug.log('Data: %s' % kwargs.get('data', {}),
fg='blue', bold=True)
if method == 'GET' or kwargs.get('params', None):
debug.log('Params: %s' % kwargs.get('params', {}),
fg='blue', bold=True)
debug.log('')
# If this is a JSON request, encode the data value.
if headers.get('Content-Type', '') == 'application/json':
kwargs['data'] = json.dumps(kwargs.get('data', {}))
# Disable urllib3 warnings # still warn if verrify_ssl None
if (settings.verify_ssl is False) or hasattr(settings, 'insecure'):
urllib3.disable_warnings()
# Call the superclass method.
try:
with warnings.catch_warnings():
r = super(Client, self).request(method, url, *args,
verify=False, **kwargs)
except ConnectionError as ex:
if settings.verbose:
debug.log('Cannot connect to Tower:', fg='yellow', bold=True)
debug.log(str(ex), fg='yellow', bold=True, nl=2)
raise exc.ConnectionError(
'There was a network error of some kind trying to connect '
'to Tower.\n\nThe most common reason for this is a settings '
'issue; is your "host" value in `tower-cli config` correct?\n'
'Right now it is: "%s".' % settings.host
)
# Sanity check: Did the server send back some kind of internal error?
# If so, bubble this up.
if r.status_code >= 500:
raise exc.ServerError('The Tower server sent back a server error. '
'Please try again later.')
# Sanity check: Did we fail to authenticate properly?
# If so, fail out now; this is always a failure.
if r.status_code == 401:
raise exc.AuthError('Invalid Tower authentication credentials.')
# Sanity check: Did we get a forbidden response, which means that
# the user isn't allowed to do this? Report that.
if r.status_code == 403:
raise exc.Forbidden("You don't have permission to do that.")
# Sanity check: Did we get a 404 response?
# Requests with primary keys will return a 404 if there is no response,
# and we want to consistently trap these.
if r.status_code == 404:
raise exc.NotFound('The requested object could not be found.')
# Sanity check: Did we get a 405 response?
# A 405 means we used a method that isn't allowed. Usually this
# is a bad request, but it requires special treatment because the
# API sends it as a logic error in a few situations (e.g. trying to
# cancel a job that isn't running).
if r.status_code == 405:
raise exc.MethodNotAllowed(
"The Tower server says you can't make a request with the "
"%s method to that URL (%s)." % (method, url),
)
# Sanity check: Did we get some other kind of error?
# If so, write an appropriate error message.
if r.status_code >= 400:
raise exc.BadRequest(
'The Tower server claims it was sent a bad request.\n\n'
'%s %s\nParams: %s\nData: %s\n\nResponse: %s' %
(method, url, kwargs.get('params', None),
kwargs.get('data', None), r.content.decode('utf8'))
)
# Django REST Framework intelligently prints API keys in the
# order that they are defined in the models and serializer.
#
# We want to preserve this behavior when it is possible to do so
# with minimal effort, because while the order has no explicit meaning,
# we make some effort to order keys in a convenient manner.
#
# To this end, make this response into an APIResponse subclass
# (defined below), which has a `json` method that doesn't lose key
# order.
r.__class__ = APIResponse
# Return the response object.
return r
@property
@contextlib.contextmanager
def test_mode(self):
"""Replace the HTTP adapters with a fauxquests.FauxAdapter, which
will make the client into a faux client.
"""
# Import this here, because we don't want to require fauxquests
# in order for the app to work.
from fauxquests.adapter import FauxAdapter
with settings.runtime_values(host='20.12.4.21', username='meagan',
password='This is the best wine.',
verbose=False, format='json'):
adapters = copy.copy(self.adapters)
faux_adapter = FauxAdapter(
url_pattern=self.prefix.rstrip('/') + '%s',
)
try:
self.adapters.clear()
self.mount('https://', faux_adapter)
self.mount('http://', faux_adapter)
yield faux_adapter
finally:
self.adapters = adapters
class APIResponse(Response):
"""A Response subclass which preseves JSON key order (but makes no other
changes).
"""
def json(self, **kwargs):
kwargs.setdefault('object_pairs_hook', data_structures.OrderedDict)
return super(APIResponse, self).json(**kwargs)
client = Client()
|
|
# -*- coding: utf-8 -*-
import copy
from django.test.client import RequestFactory
from django.utils import translation
import six
from elasticsearch_dsl import Search
from mock import Mock, patch
from rest_framework import serializers
from olympia import amo
from olympia.amo.tests import TestCase
from olympia.constants.categories import CATEGORIES
from olympia.search.filters import (
ReviewedContentFilter, SearchParameterFilter, SearchQueryFilter,
SortingFilter)
class FilterTestsBase(TestCase):
# Base TestCase class - Does not need to inherit from ESTestCase as the
# queries will never actually be executed.
def setUp(self):
super(FilterTestsBase, self).setUp()
self.req = RequestFactory().get('/')
self.view_class = Mock()
def _filter(self, req=None, data=None):
req = req or RequestFactory().get('/', data=data or {})
queryset = Search()
for filter_class in self.filter_classes:
queryset = filter_class().filter_queryset(req, queryset,
self.view_class)
return queryset.to_dict()
class TestQueryFilter(FilterTestsBase):
filter_classes = [SearchQueryFilter]
def _test_q(self, qs):
# Spot check a few queries.
should = qs['query']['function_score']['query']['bool']['should']
expected = {
'match_phrase': {
'name': {
'query': 'tea pot', 'boost': 8.0, 'slop': 1,
'_name': 'MatchPhrase(name)',
}
}
}
assert expected in should
expected = {
'prefix': {
'name': {
'boost': 3.0, 'value': 'tea pot',
'_name': 'Prefix(name)',
}
}
}
assert expected in should
expected = {
'match': {
'name_l10n_english': {
'query': 'tea pot', 'boost': 5.0,
'analyzer': 'english',
'operator': 'and',
'_name': 'Match(name_l10n_english)',
}
}
}
assert expected in should
expected = {
'multi_match': {
'_name': (
'MultiMatch(Match(summary),Match(summary_l10n_english))'),
'query': 'tea pot',
'operator': 'and',
'fields': ['summary', 'summary_l10n_english'],
'boost': 3.0,
}
}
assert expected in should
expected = {
'multi_match': {
'_name': (
'MultiMatch(Match(description),'
'Match(description_l10n_english))'),
'query': 'tea pot',
'operator': 'and',
'fields': ['description', 'description_l10n_english'],
'boost': 2.0,
}
}
assert expected in should
functions = qs['query']['function_score']['functions']
assert len(functions) == 2
assert functions[0] == {
'field_value_factor': {
'field': 'average_daily_users', 'modifier': 'log2p'
}
}
assert functions[1] == {
'filter': {
'bool': {
'must': [
{'term': {'is_experimental': False}},
{'terms': {'status': (4,)}},
{'exists': {'field': 'current_version'}},
{'term': {'is_disabled': False}}
]
}
},
'weight': 4.0
}
return qs
def test_no_rescore_if_not_sorting_by_relevance(self):
qs = self._test_q(
self._filter(data={'q': 'tea pot', 'sort': 'rating'}))
assert 'rescore' not in qs
def test_q(self):
qs = self._test_q(self._filter(data={'q': 'tea pot'}))
expected_rescore = {
'bool': {
'should': [
{
'multi_match': {
'_name': (
'MultiMatch(MatchPhrase(summary),'
'MatchPhrase(summary_l10n_english))'),
'query': 'tea pot',
'slop': 10,
'type': 'phrase',
'fields': ['summary', 'summary_l10n_english'],
'boost': 3.0,
},
},
{
'multi_match': {
'_name': (
'MultiMatch(MatchPhrase(description),'
'MatchPhrase(description_l10n_english))'),
'query': 'tea pot',
'slop': 10,
'type': 'phrase',
'fields': ['description',
'description_l10n_english'],
'boost': 2.0,
},
}
]
}
}
assert qs['rescore'] == {
'window_size': 10,
'query': {
'rescore_query': expected_rescore
}
}
def test_q_too_long(self):
with self.assertRaises(serializers.ValidationError):
self._filter(data={'q': 'a' * 101})
def test_fuzzy_single_word(self):
qs = self._filter(data={'q': 'blah'})
should = qs['query']['function_score']['query']['bool']['should']
expected = {
'dis_max': {
'queries': [
{
'match': {
'name': {
'prefix_length': 2,
'query': 'blah',
'fuzziness': 'AUTO',
'minimum_should_match': '2<2 3<-25%',
}
}
},
{
'match': {
'name.trigrams': {
'query': 'blah',
'minimum_should_match': '66%',
}
}
},
],
'boost': 4.0,
'_name': 'DisMax(FuzzyMatch(name), Match(name.trigrams))'
}
}
assert expected in should
def test_fuzzy_multi_word(self):
qs = self._filter(data={'q': 'search terms'})
should = qs['query']['function_score']['query']['bool']['should']
expected = {
'dis_max': {
'queries': [
{
'match': {
'name': {
'prefix_length': 2,
'query': 'search terms',
'fuzziness': 'AUTO',
'minimum_should_match': '2<2 3<-25%',
}
}
},
{
'match': {
'name.trigrams': {
'query': 'search terms',
'minimum_should_match': '66%',
}
}
},
],
'boost': 4.0,
'_name': 'DisMax(FuzzyMatch(name), Match(name.trigrams))'
}
}
assert expected in should
def test_no_fuzzy_if_query_too_long(self):
def do_test():
qs = self._filter(data={'q': 'this search query is too long.'})
should = qs['query']['function_score']['query']['bool']['should']
return should
# Make sure there is no fuzzy clause (the search query is too long).
should = do_test()
expected = {
'dis_max': {
'queries': [
{
'match': {
'name': {
'prefix_length': 2,
'query': 'this search query is too long.',
'fuzziness': 'AUTO',
'minimum_should_match': '2<2 3<-25%',
}
}
},
{
'match': {
'name.trigrams': {
'query': 'this search query is too long.',
'minimum_should_match': '66%',
}
}
},
],
'boost': 4.0,
'_name': 'DisMax(FuzzyMatch(name), Match(name.trigrams))'
}
}
assert expected not in should
# Re-do the same test but mocking the limit to a higher value, the
# fuzzy query should be present.
with patch.object(
SearchQueryFilter, 'MAX_QUERY_LENGTH_FOR_FUZZY_SEARCH', 100):
should = do_test()
assert expected in should
def test_q_exact(self):
qs = self._filter(data={'q': 'Adblock Plus'})
should = qs['query']['function_score']['query']['bool']['should']
expected = {
'dis_max': {
'queries': [
{'term': {'name.raw': u'adblock plus'}},
{'term': {'name_l10n_english.raw': u'adblock plus'}},
],
'boost': 100.0,
'_name': 'DisMax(Term(name.raw), Term(name_l10n_english.raw))'
}
}
assert expected in should
# In a language we don't have a language-specific analyzer for, it
# should fall back to the "name.raw" field that uses the default locale
# translation.
with translation.override('mn'):
qs = self._filter(data={'q': 'Adblock Plus'})
should = qs['query']['function_score']['query']['bool']['should']
expected = {
'term': {
'name.raw': {
'boost': 100, 'value': u'adblock plus',
'_name': 'Term(name.raw)'
}
}
}
assert expected in should
class TestReviewedContentFilter(FilterTestsBase):
filter_classes = [ReviewedContentFilter]
def test_status(self):
qs = self._filter(self.req)
assert 'must' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert {'terms': {'status': amo.REVIEWED_STATUSES}} in filter_
assert {'exists': {'field': 'current_version'}} in filter_
assert {'term': {'is_disabled': False}} in filter_
class TestSortingFilter(FilterTestsBase):
filter_classes = [SortingFilter]
def _reformat_order(self, key):
# elasticsearch-dsl transforms '-something' for us, so we have to
# expect the sort param in this format when we inspect the resulting
# queryset object.
return {key[1:]: {'order': 'desc'}} if key.startswith('-') else key
def test_sort_default(self):
qs = self._filter(data={'q': 'something'})
assert qs['sort'] == [self._reformat_order('_score')]
qs = self._filter()
assert qs['sort'] == [self._reformat_order('-weekly_downloads')]
def test_sort_query(self):
SORTING_PARAMS = copy.copy(SortingFilter.SORTING_PARAMS)
SORTING_PARAMS.pop('random') # Tested separately below.
for param in SORTING_PARAMS:
qs = self._filter(data={'sort': param})
assert qs['sort'] == [self._reformat_order(SORTING_PARAMS[param])]
# Having a search query does not change anything, the requested sort
# takes precedence.
for param in SORTING_PARAMS:
qs = self._filter(data={'q': 'something', 'sort': param})
assert qs['sort'] == [self._reformat_order(SORTING_PARAMS[param])]
# If the sort query is wrong.
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'sort': 'WRONGLOL'})
assert context.exception.detail == ['Invalid "sort" parameter.']
# Same as above but with a search query.
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'q': 'something', 'sort': 'WRONGLOL'})
assert context.exception.detail == ['Invalid "sort" parameter.']
def test_sort_query_multiple(self):
qs = self._filter(data={'sort': ['rating,created']})
assert qs['sort'] == [self._reformat_order('-bayesian_rating'),
self._reformat_order('-created')]
qs = self._filter(data={'sort': 'created,rating'})
assert qs['sort'] == [self._reformat_order('-created'),
self._reformat_order('-bayesian_rating')]
# If the sort query is wrong.
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'sort': ['LOLWRONG,created']})
assert context.exception.detail == ['Invalid "sort" parameter.']
def test_cant_combine_sorts_with_random(self):
expected = 'The "random" "sort" parameter can not be combined.'
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'sort': ['rating,random']})
assert context.exception.detail == [expected]
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'sort': 'random,created'})
assert context.exception.detail == [expected]
def test_sort_random_restrictions(self):
expected = ('The "sort" parameter "random" can only be specified when '
'the "featured" parameter is also present, and the "q" '
'parameter absent.')
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'q': 'something', 'sort': 'random'})
assert context.exception.detail == [expected]
with self.assertRaises(serializers.ValidationError) as context:
self._filter(
data={'q': 'something', 'featured': 'true', 'sort': 'random'})
assert context.exception.detail == [expected]
def test_sort_random(self):
qs = self._filter(data={'featured': 'true', 'sort': 'random'})
# Note: this test does not call AddonFeaturedQueryParam so it won't
# apply the featured filtering. That's tested below in
# TestCombinedFilter.test_filter_featured_sort_random
assert qs['sort'] == ['_score']
assert qs['query']['function_score']['functions'] == [
{'random_score': {}}
]
class TestSearchParameterFilter(FilterTestsBase):
filter_classes = [SearchParameterFilter]
def test_search_by_type_invalid(self):
with self.assertRaises(serializers.ValidationError) as context:
self._filter(
data={'type': six.text_type(amo.ADDON_EXTENSION + 666)})
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'type': 'nosuchtype'})
assert context.exception.detail == ['Invalid "type" parameter.']
def test_search_by_type_id(self):
qs = self._filter(data={'type': six.text_type(amo.ADDON_EXTENSION)})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert {'terms': {'type': [amo.ADDON_EXTENSION]}} in filter_
qs = self._filter(data={'type': six.text_type(amo.ADDON_PERSONA)})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert {'terms': {'type': [amo.ADDON_PERSONA]}} in filter_
def test_search_by_type_string(self):
qs = self._filter(data={'type': 'extension'})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert {'terms': {'type': [amo.ADDON_EXTENSION]}} in filter_
qs = self._filter(data={'type': 'persona'})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert {'terms': {'type': [amo.ADDON_PERSONA]}} in filter_
qs = self._filter(data={'type': 'persona,extension'})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert (
{'terms': {'type': [amo.ADDON_PERSONA, amo.ADDON_EXTENSION]}}
in filter_)
def test_search_by_app_invalid(self):
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'app': six.text_type(amo.FIREFOX.id + 666)})
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'app': 'nosuchapp'})
assert context.exception.detail == ['Invalid "app" parameter.']
def test_search_by_app_id(self):
qs = self._filter(data={'app': six.text_type(amo.FIREFOX.id)})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert {'term': {'app': amo.FIREFOX.id}} in filter_
qs = self._filter(data={'app': six.text_type(amo.ANDROID.id)})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert {'term': {'app': amo.ANDROID.id}} in filter_
def test_search_by_app_string(self):
qs = self._filter(data={'app': 'firefox'})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert {'term': {'app': amo.FIREFOX.id}} in filter_
qs = self._filter(data={'app': 'android'})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert {'term': {'app': amo.ANDROID.id}} in filter_
def test_search_by_appversion_app_missing(self):
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'appversion': '46.0'})
assert context.exception.detail == ['Invalid "app" parameter.']
def test_search_by_appversion_app_invalid(self):
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'appversion': '46.0',
'app': 'internet_explorer'})
assert context.exception.detail == ['Invalid "app" parameter.']
def test_search_by_appversion_invalid(self):
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'appversion': 'not_a_version',
'app': 'firefox'})
assert context.exception.detail == ['Invalid "appversion" parameter.']
def test_search_by_appversion(self):
qs = self._filter(data={'appversion': '46.0',
'app': 'firefox'})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert {'term': {'app': amo.FIREFOX.id}} in filter_
assert {'range': {'current_version.compatible_apps.1.min':
{'lte': 46000000200100}}} in filter_
assert {'range': {'current_version.compatible_apps.1.max':
{'gte': 46000000000100}}} in filter_
def test_search_by_platform_invalid(self):
with self.assertRaises(serializers.ValidationError) as context:
self._filter(
data={'platform': six.text_type(amo.PLATFORM_WIN.id + 42)})
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'platform': 'nosuchplatform'})
assert context.exception.detail == ['Invalid "platform" parameter.']
def test_search_by_platform_id(self):
qs = self._filter(
data={'platform': six.text_type(amo.PLATFORM_WIN.id)})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert {'terms': {'platforms': [
amo.PLATFORM_WIN.id, amo.PLATFORM_ALL.id]}} in filter_
qs = self._filter(
data={'platform': six.text_type(amo.PLATFORM_LINUX.id)})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert {'terms': {'platforms': [
amo.PLATFORM_LINUX.id, amo.PLATFORM_ALL.id]}} in filter_
def test_search_by_platform_string(self):
qs = self._filter(data={'platform': 'windows'})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert {'terms': {'platforms': [
amo.PLATFORM_WIN.id, amo.PLATFORM_ALL.id]}} in filter_
qs = self._filter(data={'platform': 'win'})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert {'terms': {'platforms': [
amo.PLATFORM_WIN.id, amo.PLATFORM_ALL.id]}} in filter_
qs = self._filter(data={'platform': 'darwin'})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert {'terms': {'platforms': [
amo.PLATFORM_MAC.id, amo.PLATFORM_ALL.id]}} in filter_
qs = self._filter(data={'platform': 'mac'})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert {'terms': {'platforms': [
amo.PLATFORM_MAC.id, amo.PLATFORM_ALL.id]}} in filter_
qs = self._filter(data={'platform': 'macosx'})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert {'terms': {'platforms': [
amo.PLATFORM_MAC.id, amo.PLATFORM_ALL.id]}} in filter_
qs = self._filter(data={'platform': 'linux'})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert {'terms': {'platforms': [
amo.PLATFORM_LINUX.id, amo.PLATFORM_ALL.id]}} in filter_
def test_search_by_category_slug_no_app_or_type(self):
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'category': 'other'})
assert context.exception.detail == ['Invalid "app" parameter.']
def test_search_by_category_id_no_app_or_type(self):
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'category': 1})
assert context.exception.detail == ['Invalid "app" parameter.']
def test_search_by_category_slug(self):
category = CATEGORIES[amo.FIREFOX.id][amo.ADDON_EXTENSION]['other']
qs = self._filter(data={
'category': 'other',
'app': 'firefox',
'type': 'extension'
})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert {'terms': {'category': [category.id]}} in filter_
def test_search_by_category_slug_multiple_types(self):
category_a = CATEGORIES[amo.FIREFOX.id][amo.ADDON_EXTENSION]['other']
category_b = CATEGORIES[amo.FIREFOX.id][amo.ADDON_PERSONA]['other']
qs = self._filter(data={
'category': 'other',
'app': 'firefox',
'type': 'extension,persona'
})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert (
{'terms': {'category': [category_a.id, category_b.id]}} in filter_)
def test_search_by_category_id(self):
qs = self._filter(data={
'category': 1,
'app': 'firefox',
'type': 'extension'
})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert {'terms': {'category': [1]}} in filter_
def test_search_by_category_invalid(self):
with self.assertRaises(serializers.ValidationError) as context:
self._filter(
data={'category': 666, 'app': 'firefox', 'type': 'extension'})
assert context.exception.detail == ['Invalid "category" parameter.']
def test_search_by_tag(self):
qs = self._filter(data={'tag': 'foo'})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert {'term': {'tags': 'foo'}} in filter_
qs = self._filter(data={'tag': 'foo,bar'})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert {'term': {'tags': 'foo'}} in filter_
assert {'term': {'tags': 'bar'}} in filter_
def test_search_by_author(self):
qs = self._filter(data={'author': 'fooBar'})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert len(filter_) == 1
should = filter_[0]['bool']['should']
assert {'terms': {'listed_authors.id': []}} in should
assert {'terms': {'listed_authors.username': ['fooBar']}} in should
qs = self._filter(data={'author': 'foo,bar'})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert len(filter_) == 1
should = filter_[0]['bool']['should']
assert {'terms': {'listed_authors.id': []}} in should
assert {'terms': {'listed_authors.username': ['foo', 'bar']}} in should
qs = self._filter(data={'author': '123,456'})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert len(filter_) == 1
should = filter_[0]['bool']['should']
assert {'terms': {'listed_authors.id': ['123', '456']}} in should
assert {'terms': {'listed_authors.username': []}} in should
qs = self._filter(data={'author': '123,bar'})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert len(filter_) == 1
should = filter_[0]['bool']['should']
assert {'terms': {'listed_authors.id': ['123']}} in should
assert {'terms': {'listed_authors.username': ['bar']}} in should
def test_exclude_addons(self):
qs = self._filter(data={'exclude_addons': 'fooBar'})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
# We've got another bool query inside our filter to handle the
# must_not here.
filter_ = qs['query']['bool']['filter']
assert len(filter_) == 1
assert 'must' not in filter_[0]['bool']
must_not = filter_[0]['bool']['must_not']
assert must_not == [{'terms': {'slug': [u'fooBar']}}]
qs = self._filter(data={'exclude_addons': 1})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert len(filter_) == 1
assert 'must' not in filter_[0]['bool']
must_not = filter_[0]['bool']['must_not']
assert must_not == [{'ids': {'values': [u'1']}}]
qs = self._filter(data={'exclude_addons': 'fooBar,1'})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
# elasticsearch-dsl seems to separate our 2 bool clauses instead of
# keeping them together. It might be a byproduct of using
# nested+filter. The resulting query is ugly but it should still work,
# and it's an edge-case anyway, usually clients won't pass 2 different
# types of identifiers.
assert len(filter_) == 2
assert 'must' not in filter_[0]['bool']
assert 'must' not in filter_[1]['bool']
must_not = filter_[0]['bool']['must_not']
assert {'ids': {'values': [u'1']}} in must_not
must_not = filter_[1]['bool']['must_not']
assert {'terms': {'slug': [u'fooBar']}} in must_not
def test_search_by_featured_no_app_no_locale(self):
qs = self._filter(data={'featured': 'true'})
assert 'must' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert {'term': {'is_featured': True}} in filter_
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'featured': 'false'})
assert context.exception.detail == ['Invalid "featured" parameter.']
def test_search_by_featured_yes_app_no_locale(self):
qs = self._filter(data={'featured': 'true', 'app': 'firefox'})
assert 'must' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert len(filter_) == 2
assert filter_[0] == {'term': {'app': amo.FIREFOX.id}}
inner = filter_[1]['nested']['query']['bool']['filter']
assert len(inner) == 1
assert {'term': {'featured_for.application': amo.FIREFOX.id}} in inner
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'featured': 'true', 'app': 'foobaa'})
assert context.exception.detail == ['Invalid "app" parameter.']
def test_search_by_featured_yes_app_yes_locale(self):
qs = self._filter(data={'featured': 'true', 'app': 'firefox',
'lang': 'fr'})
assert 'must' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert len(filter_) == 2
assert filter_[0] == {'term': {'app': amo.FIREFOX.id}}
inner = filter_[1]['nested']['query']['bool']['filter']
assert len(inner) == 2
assert {'term': {'featured_for.application': amo.FIREFOX.id}} in inner
assert {'terms': {'featured_for.locales': ['fr', 'ALL']}} in inner
with self.assertRaises(serializers.ValidationError) as context:
self._filter(data={'featured': 'true', 'app': 'foobaa'})
assert context.exception.detail == ['Invalid "app" parameter.']
def test_search_by_featured_no_app_yes_locale(self):
qs = self._filter(data={'featured': 'true', 'lang': 'fr'})
assert 'must' not in qs['query']['bool']
assert 'must_not' not in qs['query']['bool']
filter_ = qs['query']['bool']['filter']
assert len(filter_) == 1
inner = filter_[0]['nested']['query']['bool']['filter']
assert len(inner) == 1
assert {'terms': {'featured_for.locales': ['fr', 'ALL']}} in inner
def test_search_by_color(self):
qs = self._filter(data={'color': 'ff0000'})
filter_ = qs['query']['bool']['filter']
assert len(filter_) == 1
inner = filter_[0]['nested']['query']['bool']['filter']
assert len(inner) == 4
assert inner == [
{'range': {'colors.s': {'gt': 6.375}}},
{'range': {'colors.l': {'gt': 12.75, 'lt': 249.9}}},
{'bool': {'should': [
{'range': {'colors.h': {'gte': 229}}},
{'range': {'colors.h': {'lte': 26}}}]}},
{'range': {'colors.ratio': {'gte': 0.25}}},
]
qs = self._filter(data={'color': '703839'})
filter_ = qs['query']['bool']['filter']
assert len(filter_) == 1
inner = filter_[0]['nested']['query']['bool']['filter']
assert len(inner) == 4
assert inner == [
{'range': {'colors.s': {'gt': 6.375}}},
{'range': {'colors.l': {'gt': 12.75, 'lt': 249.9}}},
{'bool': {'should': [
{'range': {'colors.h': {'gte': 228}}},
{'range': {'colors.h': {'lte': 25}}}]}},
{'range': {'colors.ratio': {'gte': 0.25}}},
]
qs = self._filter(data={'color': '#00ffff'})
filter_ = qs['query']['bool']['filter']
assert len(filter_) == 1
inner = filter_[0]['nested']['query']['bool']['filter']
assert len(inner) == 4
assert inner == [
{'range': {'colors.s': {'gt': 6.375}}},
{'range': {'colors.l': {'gt': 12.75, 'lt': 249.9}}},
{'range': {'colors.h': {'gte': 101, 'lte': 153}}},
{'range': {'colors.ratio': {'gte': 0.25}}},
]
def test_search_by_color_grey(self):
qs = self._filter(data={'color': '#f6f6f6'})
filter_ = qs['query']['bool']['filter']
assert len(filter_) == 1
inner = filter_[0]['nested']['query']['bool']['filter']
assert len(inner) == 3
assert inner == [
{'range': {'colors.s': {'lte': 6.375}}},
{'range': {'colors.l': {'gte': 182, 'lte': 255}}},
{'range': {'colors.ratio': {'gte': 0.25}}},
]
qs = self._filter(data={'color': '333'})
filter_ = qs['query']['bool']['filter']
assert len(filter_) == 1
inner = filter_[0]['nested']['query']['bool']['filter']
assert len(inner) == 3
assert inner == [
{'range': {'colors.s': {'lte': 6.375}}},
{'range': {'colors.l': {'gte': 0, 'lte': 115}}},
{'range': {'colors.ratio': {'gte': 0.25}}},
]
def test_search_by_color_luminosity_extremes(self):
qs = self._filter(data={'color': '080603'})
filter_ = qs['query']['bool']['filter']
assert len(filter_) == 1
inner = filter_[0]['nested']['query']['bool']['filter']
assert len(inner) == 2
assert inner == [
{'range': {'colors.l': {'lte': 12.75}}},
{'range': {'colors.ratio': {'gte': 0.25}}},
]
qs = self._filter(data={'color': 'FEFDFB'})
filter_ = qs['query']['bool']['filter']
assert len(filter_) == 1
inner = filter_[0]['nested']['query']['bool']['filter']
assert len(inner) == 2
assert inner == [
{'range': {'colors.l': {'gte': 249.9}}},
{'range': {'colors.ratio': {'gte': 0.25}}},
]
class TestCombinedFilter(FilterTestsBase):
"""
Basic test to ensure that when filters are combined they result in the
expected query structure.
"""
filter_classes = [SearchQueryFilter, ReviewedContentFilter, SortingFilter]
def test_combined(self):
qs = self._filter(data={'q': 'test'})
bool_ = qs['query']['bool']
assert 'must_not' not in bool_
filter_ = bool_['filter']
assert {'terms': {'status': amo.REVIEWED_STATUSES}} in filter_
assert {'exists': {'field': 'current_version'}} in filter_
assert {'term': {'is_disabled': False}} in filter_
assert qs['sort'] == ['_score']
should = bool_['must'][0]['function_score']['query']['bool']['should']
expected = {
'match': {
'name_l10n_english': {
'analyzer': 'english', 'boost': 5.0, 'query': u'test',
'operator': 'and', '_name': 'Match(name_l10n_english)',
}
}
}
assert expected in should
def test_filter_featured_sort_random(self):
qs = self._filter(data={'featured': 'true', 'sort': 'random'})
bool_ = qs['query']['bool']
assert 'must_not' not in bool_
filter_ = bool_['filter']
assert {'terms': {'status': amo.REVIEWED_STATUSES}} in filter_
assert {'exists': {'field': 'current_version'}} in filter_
assert {'term': {'is_disabled': False}} in filter_
assert qs['sort'] == ['_score']
assert bool_['must'][0]['function_score']['functions'] == [
{'random_score': {}}
]
|
|
"""Compatibility fixes for older version of python, numpy and scipy
If you add content to this file, please give the version of the package
at which the fix is no longer needed.
# XXX : originally copied from scikit-learn
"""
# Authors: Emmanuelle Gouillart <[email protected]>
# Gael Varoquaux <[email protected]>
# Fabian Pedregosa <[email protected]>
# Lars Buitinck <[email protected]>
# License: BSD
from __future__ import division
import inspect
import re
import warnings
import numpy as np
from scipy import linalg
###############################################################################
# Misc
# helpers to get function arguments
if hasattr(inspect, 'signature'): # py35
def _get_args(function, varargs=False):
params = inspect.signature(function).parameters
args = [key for key, param in params.items()
if param.kind not in (param.VAR_POSITIONAL, param.VAR_KEYWORD)]
if varargs:
varargs = [param.name for param in params.values()
if param.kind == param.VAR_POSITIONAL]
if len(varargs) == 0:
varargs = None
return args, varargs
else:
return args
else:
def _get_args(function, varargs=False):
out = inspect.getargspec(function) # args, varargs, keywords, defaults
if varargs:
return out[:2]
else:
return out[0]
def _safe_svd(A, **kwargs):
"""Wrapper to get around the SVD did not converge error of death"""
# Intel has a bug with their GESVD driver:
# https://software.intel.com/en-us/forums/intel-distribution-for-python/topic/628049 # noqa
# For SciPy 0.18 and up, we can work around it by using
# lapack_driver='gesvd' instead.
if kwargs.get('overwrite_a', False):
raise ValueError('Cannot set overwrite_a=True with this function')
try:
return linalg.svd(A, **kwargs)
except np.linalg.LinAlgError as exp:
from .utils import warn
if 'lapack_driver' in _get_args(linalg.svd):
warn('SVD error (%s), attempting to use GESVD instead of GESDD'
% (exp,))
return linalg.svd(A, lapack_driver='gesvd', **kwargs)
else:
raise
###############################################################################
# Back porting scipy.signal.sosfilt (0.17) and sosfiltfilt (0.18)
def _sosfiltfilt(sos, x, axis=-1, padtype='odd', padlen=None):
"""copy of SciPy sosfiltfilt"""
sos, n_sections = _validate_sos(sos)
# `method` is "pad"...
ntaps = 2 * n_sections + 1
ntaps -= min((sos[:, 2] == 0).sum(), (sos[:, 5] == 0).sum())
edge, ext = _validate_pad(padtype, padlen, x, axis,
ntaps=ntaps)
# These steps follow the same form as filtfilt with modifications
zi = sosfilt_zi(sos) # shape (n_sections, 2) --> (n_sections, ..., 2, ...)
zi_shape = [1] * x.ndim
zi_shape[axis] = 2
zi.shape = [n_sections] + zi_shape
x_0 = axis_slice(ext, stop=1, axis=axis)
(y, zf) = sosfilt(sos, ext, axis=axis, zi=zi * x_0)
y_0 = axis_slice(y, start=-1, axis=axis)
(y, zf) = sosfilt(sos, axis_reverse(y, axis=axis), axis=axis, zi=zi * y_0)
y = axis_reverse(y, axis=axis)
if edge > 0:
y = axis_slice(y, start=edge, stop=-edge, axis=axis)
return y
def axis_slice(a, start=None, stop=None, step=None, axis=-1):
"""Take a slice along axis 'axis' from 'a'"""
a_slice = [slice(None)] * a.ndim
a_slice[axis] = slice(start, stop, step)
b = a[a_slice]
return b
def axis_reverse(a, axis=-1):
"""Reverse the 1-d slices of `a` along axis `axis`."""
return axis_slice(a, step=-1, axis=axis)
def _validate_pad(padtype, padlen, x, axis, ntaps):
"""Helper to validate padding for filtfilt"""
if padtype not in ['even', 'odd', 'constant', None]:
raise ValueError(("Unknown value '%s' given to padtype. padtype "
"must be 'even', 'odd', 'constant', or None.") %
padtype)
if padtype is None:
padlen = 0
if padlen is None:
# Original padding; preserved for backwards compatibility.
edge = ntaps * 3
else:
edge = padlen
# x's 'axis' dimension must be bigger than edge.
if x.shape[axis] <= edge:
raise ValueError("The length of the input vector x must be at least "
"padlen, which is %d." % edge)
if padtype is not None and edge > 0:
# Make an extension of length `edge` at each
# end of the input array.
if padtype == 'even':
ext = even_ext(x, edge, axis=axis)
elif padtype == 'odd':
ext = odd_ext(x, edge, axis=axis)
else:
ext = const_ext(x, edge, axis=axis)
else:
ext = x
return edge, ext
def _validate_sos(sos):
"""Helper to validate a SOS input"""
sos = np.atleast_2d(sos)
if sos.ndim != 2:
raise ValueError('sos array must be 2D')
n_sections, m = sos.shape
if m != 6:
raise ValueError('sos array must be shape (n_sections, 6)')
if not (sos[:, 3] == 1).all():
raise ValueError('sos[:, 3] should be all ones')
return sos, n_sections
def odd_ext(x, n, axis=-1):
"""Generate a new ndarray by making an odd extension of x along an axis."""
if n < 1:
return x
if n > x.shape[axis] - 1:
raise ValueError(("The extension length n (%d) is too big. " +
"It must not exceed x.shape[axis]-1, which is %d.")
% (n, x.shape[axis] - 1))
left_end = axis_slice(x, start=0, stop=1, axis=axis)
left_ext = axis_slice(x, start=n, stop=0, step=-1, axis=axis)
right_end = axis_slice(x, start=-1, axis=axis)
right_ext = axis_slice(x, start=-2, stop=-(n + 2), step=-1, axis=axis)
ext = np.concatenate((2 * left_end - left_ext,
x,
2 * right_end - right_ext),
axis=axis)
return ext
def even_ext(x, n, axis=-1):
"""Create an ndarray that is an even extension of x along an axis."""
if n < 1:
return x
if n > x.shape[axis] - 1:
raise ValueError(("The extension length n (%d) is too big. " +
"It must not exceed x.shape[axis]-1, which is %d.")
% (n, x.shape[axis] - 1))
left_ext = axis_slice(x, start=n, stop=0, step=-1, axis=axis)
right_ext = axis_slice(x, start=-2, stop=-(n + 2), step=-1, axis=axis)
ext = np.concatenate((left_ext,
x,
right_ext),
axis=axis)
return ext
def const_ext(x, n, axis=-1):
"""Create an ndarray that is a constant extension of x along an axis"""
if n < 1:
return x
left_end = axis_slice(x, start=0, stop=1, axis=axis)
ones_shape = [1] * x.ndim
ones_shape[axis] = n
ones = np.ones(ones_shape, dtype=x.dtype)
left_ext = ones * left_end
right_end = axis_slice(x, start=-1, axis=axis)
right_ext = ones * right_end
ext = np.concatenate((left_ext,
x,
right_ext),
axis=axis)
return ext
def sosfilt_zi(sos):
"""Compute an initial state `zi` for the sosfilt function"""
from scipy.signal import lfilter_zi
sos = np.asarray(sos)
if sos.ndim != 2 or sos.shape[1] != 6:
raise ValueError('sos must be shape (n_sections, 6)')
n_sections = sos.shape[0]
zi = np.empty((n_sections, 2))
scale = 1.0
for section in range(n_sections):
b = sos[section, :3]
a = sos[section, 3:]
zi[section] = scale * lfilter_zi(b, a)
# If H(z) = B(z)/A(z) is this section's transfer function, then
# b.sum()/a.sum() is H(1), the gain at omega=0. That's the steady
# state value of this section's step response.
scale *= b.sum() / a.sum()
return zi
def sosfilt(sos, x, axis=-1, zi=None):
"""Filter data along one dimension using cascaded second-order sections"""
from scipy.signal import lfilter
x = np.asarray(x)
sos = np.atleast_2d(sos)
if sos.ndim != 2:
raise ValueError('sos array must be 2D')
n_sections, m = sos.shape
if m != 6:
raise ValueError('sos array must be shape (n_sections, 6)')
use_zi = zi is not None
if use_zi:
zi = np.asarray(zi)
x_zi_shape = list(x.shape)
x_zi_shape[axis] = 2
x_zi_shape = tuple([n_sections] + x_zi_shape)
if zi.shape != x_zi_shape:
raise ValueError('Invalid zi shape. With axis=%r, an input with '
'shape %r, and an sos array with %d sections, zi '
'must have shape %r.' %
(axis, x.shape, n_sections, x_zi_shape))
zf = np.zeros_like(zi)
for section in range(n_sections):
if use_zi:
x, zf[section] = lfilter(sos[section, :3], sos[section, 3:],
x, axis, zi=zi[section])
else:
x = lfilter(sos[section, :3], sos[section, 3:], x, axis)
out = (x, zf) if use_zi else x
return out
def get_sosfiltfilt():
"""Helper to get sosfiltfilt from scipy"""
try:
from scipy.signal import sosfiltfilt
except ImportError:
sosfiltfilt = _sosfiltfilt
return sosfiltfilt
###############################################################################
# Misc utilities
def assert_true(expr, msg='False is not True'):
"""Fake assert_true without message"""
if not expr:
raise AssertionError(msg)
def assert_is(expr1, expr2, msg=None):
"""Fake assert_is without message"""
assert_true(expr2 is expr2, msg)
def assert_is_not(expr1, expr2, msg=None):
"""Fake assert_is_not without message"""
assert_true(expr1 is not expr2, msg)
assert_raises_regex_impl = None
# from numpy 1.9.1
def assert_raises_regex(exception_class, expected_regexp,
callable_obj=None, *args, **kwargs):
"""
Fail unless an exception of class exception_class and with message that
matches expected_regexp is thrown by callable when invoked with arguments
args and keyword arguments kwargs.
Name of this function adheres to Python 3.2+ reference, but should work in
all versions down to 2.6.
"""
__tracebackhide__ = True # Hide traceback for py.test
import nose
global assert_raises_regex_impl
if assert_raises_regex_impl is None:
try:
# Python 3.2+
assert_raises_regex_impl = nose.tools.assert_raises_regex
except AttributeError:
try:
# 2.7+
assert_raises_regex_impl = nose.tools.assert_raises_regexp
except AttributeError:
# 2.6
# This class is copied from Python2.7 stdlib almost verbatim
class _AssertRaisesContext(object):
def __init__(self, expected, expected_regexp=None):
self.expected = expected
self.expected_regexp = expected_regexp
def failureException(self, msg):
return AssertionError(msg)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
if exc_type is None:
try:
exc_name = self.expected.__name__
except AttributeError:
exc_name = str(self.expected)
raise self.failureException(
"{0} not raised".format(exc_name))
if not issubclass(exc_type, self.expected):
# let unexpected exceptions pass through
return False
self.exception = exc_value # store for later retrieval
if self.expected_regexp is None:
return True
expected_regexp = self.expected_regexp
if isinstance(expected_regexp, basestring):
expected_regexp = re.compile(expected_regexp)
if not expected_regexp.search(str(exc_value)):
raise self.failureException(
'"%s" does not match "%s"' %
(expected_regexp.pattern, str(exc_value)))
return True
def impl(cls, regex, callable_obj, *a, **kw):
mgr = _AssertRaisesContext(cls, regex)
if callable_obj is None:
return mgr
with mgr:
callable_obj(*a, **kw)
assert_raises_regex_impl = impl
return assert_raises_regex_impl(exception_class, expected_regexp,
callable_obj, *args, **kwargs)
def _read_volume_info(fobj):
"""An implementation of nibabel.freesurfer.io._read_volume_info, since old
versions of nibabel (<=2.1.0) don't have it.
"""
volume_info = dict()
head = np.fromfile(fobj, '>i4', 1)
if not np.array_equal(head, [20]): # Read two bytes more
head = np.concatenate([head, np.fromfile(fobj, '>i4', 2)])
if not np.array_equal(head, [2, 0, 20]):
warnings.warn("Unknown extension code.")
return volume_info
volume_info['head'] = head
for key in ['valid', 'filename', 'volume', 'voxelsize', 'xras', 'yras',
'zras', 'cras']:
pair = fobj.readline().decode('utf-8').split('=')
if pair[0].strip() != key or len(pair) != 2:
raise IOError('Error parsing volume info.')
if key in ('valid', 'filename'):
volume_info[key] = pair[1].strip()
elif key == 'volume':
volume_info[key] = np.array(pair[1].split()).astype(int)
else:
volume_info[key] = np.array(pair[1].split()).astype(float)
# Ignore the rest
return volume_info
def _serialize_volume_info(volume_info):
"""An implementation of nibabel.freesurfer.io._serialize_volume_info, since
old versions of nibabel (<=2.1.0) don't have it."""
keys = ['head', 'valid', 'filename', 'volume', 'voxelsize', 'xras', 'yras',
'zras', 'cras']
diff = set(volume_info.keys()).difference(keys)
if len(diff) > 0:
raise ValueError('Invalid volume info: %s.' % diff.pop())
strings = list()
for key in keys:
if key == 'head':
if not (np.array_equal(volume_info[key], [20]) or np.array_equal(
volume_info[key], [2, 0, 20])):
warnings.warn("Unknown extension code.")
strings.append(np.array(volume_info[key], dtype='>i4').tostring())
elif key in ('valid', 'filename'):
val = volume_info[key]
strings.append('{0} = {1}\n'.format(key, val).encode('utf-8'))
elif key == 'volume':
val = volume_info[key]
strings.append('{0} = {1} {2} {3}\n'.format(
key, val[0], val[1], val[2]).encode('utf-8'))
else:
val = volume_info[key]
strings.append('{0} = {1:0.10g} {2:0.10g} {3:0.10g}\n'.format(
key.ljust(6), val[0], val[1], val[2]).encode('utf-8'))
return b''.join(strings)
|
|
"""
Copyright 2015 Hewlett-Packard
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import time
import falcon
from freezer_api.api.common import resource
from freezer_api.common import exceptions as freezer_api_exc
from freezer_api import policy
class SessionsCollectionResource(resource.BaseResource):
"""
Handler for endpoint: /v1/sessions
"""
def __init__(self, storage_driver):
self.db = storage_driver
@policy.enforce('sessions:get_all')
def on_get(self, req, resp):
# GET /v1/sessions(?limit,offset) Lists sessions
user_id = req.get_header('X-User-ID')
offset = req.get_param_as_int('offset') or 0
limit = req.get_param_as_int('limit') or 10
search = self.json_body(req)
obj_list = self.db.search_session(user_id=user_id, offset=offset,
limit=limit, search=search)
resp.body = {'sessions': obj_list}
@policy.enforce('sessions:create')
def on_post(self, req, resp):
# POST /v1/sessions Creates session entry
doc = self.json_body(req)
if not doc:
raise freezer_api_exc.BadDataFormat(
message='Missing request body')
user_id = req.get_header('X-User-ID')
session_id = self.db.add_session(user_id=user_id, doc=doc)
resp.status = falcon.HTTP_201
resp.body = {'session_id': session_id}
class SessionsResource(resource.BaseResource):
"""
Handler for endpoint: /v1/sessions/{session_id}
"""
def __init__(self, storage_driver):
self.db = storage_driver
@policy.enforce('sessions:get')
def on_get(self, req, resp, session_id):
# GET /v1/sessions/{session_id} retrieves the specified session
# search in body
user_id = req.get_header('X-User-ID') or ''
obj = self.db.get_session(user_id=user_id, session_id=session_id)
if obj:
resp.body = obj
else:
resp.status = falcon.HTTP_404
@policy.enforce('sessions:delete')
def on_delete(self, req, resp, session_id):
# DELETE /v1/sessions/{session_id} Deletes the specified session
user_id = req.get_header('X-User-ID')
obj = self.db.get_session(user_id=user_id,
session_id=session_id)
if not obj:
raise freezer_api_exc.DocumentNotFound(
message='No session found with ID:{0}'.
format(session_id))
else:
self.db.delete_session(user_id=user_id, session_id=session_id)
resp.body = {'session_id': session_id}
resp.status = falcon.HTTP_204
@policy.enforce('sessions:update')
def on_patch(self, req, resp, session_id):
# PATCH /v1/sessions/{session_id} updates the specified session
user_id = req.get_header('X-User-ID') or ''
doc = self.json_body(req)
new_version = self.db.update_session(user_id=user_id,
session_id=session_id,
patch_doc=doc)
resp.body = {'session_id': session_id, 'version': new_version}
@policy.enforce('sessions:replace')
def on_post(self, req, resp, session_id):
# PUT /v1/sessions/{session_id} creates/replaces the specified session
user_id = req.get_header('X-User-ID') or ''
doc = self.json_body(req)
if not doc:
raise freezer_api_exc.BadDataFormat(
message='Missing request body')
new_version = self.db.replace_session(user_id=user_id,
session_id=session_id,
doc=doc)
resp.status = falcon.HTTP_201
resp.body = {'session_id': session_id, 'version': new_version}
class SessionsAction(resource.BaseResource):
"""
Handler for endpoint: /v1/sessions/{session_id}/action
"""
def __init__(self, storage_driver):
self.db = storage_driver
@policy.enforce('sessions:action:create')
def on_post(self, req, resp, session_id):
# POST /v1/sessions/{session_id}/action
# executes an action on the specified session
user_id = req.get_header('X-User-ID') or ''
doc = self.json_body(req)
try:
action, params = next(iter(doc.items()))
except Exception:
raise freezer_api_exc.BadDataFormat("Bad action request format")
session_doc = self.db.get_session(user_id=user_id,
session_id=session_id)
session = Session(session_doc)
session.execute_action(action, params)
if session.need_update:
self.db.update_session(user_id=user_id,
session_id=session_id,
patch_doc=session.doc)
resp.status = falcon.HTTP_202
resp.body = {'result': session.action_result,
'session_tag': session.session_tag}
class Session(resource.BaseResource):
"""
A class to manage the actions that can be taken upon a
Session data structure.
It modifies information contained in its document
in accordance to the requested action
"""
def __init__(self, doc):
self.doc = doc
self.action_result = ''
self.need_update = False
@property
def session_tag(self):
return int(self.doc.get('session_tag', 0))
@session_tag.setter
def session_tag(self, value):
self.doc['session_tag'] = int(value)
def execute_action(self, action, params):
if action == 'start':
try:
self.start(params['job_id'], params['current_tag'])
except freezer_api_exc.BadDataFormat:
raise
except Exception as e:
raise freezer_api_exc.FreezerAPIException(e)
elif action == 'end':
try:
self.end(params['job_id'], params['result'])
except freezer_api_exc.BadDataFormat:
raise
except Exception as e:
raise freezer_api_exc.FreezerAPIException(e)
else:
raise freezer_api_exc.MethodNotImplemented("Bad Action Method")
def end(self, job_id, result):
"""
Apply the 'end' action to the session object
If the request can be accepted it modifies the relevant fields
and sets the need_update member to notify that the stored
document needs to be updated
"""
now = int(time.time())
self.set_job_end(job_id, result, now)
new_result = self.get_job_overall_result()
if self.doc.get('status', '') != 'completed':
if new_result in ['fail', 'success']:
self.doc['time_end'] = now
self.doc['result'] = new_result
self.doc['status'] = 'completed'
self.action_result = 'success'
self.need_update = True
def start(self, job_id, job_tag):
"""
Apply the 'start' action to the session object
If the request can be accepted it modifies the relevant fields
and sets the need_update member to notify that the stored
document needs to be updated
"""
job_tag = int(job_tag)
self.session_tag = int(self.session_tag)
now = int(time.time())
time_since_last_start = now - self.doc.get('time_start', 0)
if job_tag > self.session_tag:
raise freezer_api_exc.BadDataFormat(
'requested tag value too high. Session Tag: {0} '
'Job Tag: {1}'.format(self.session_tag, job_tag))
if time_since_last_start <= self.doc.get('hold_off', 60):
# session has been started not so long ago
# tag increments are not allowed during hold_off
if job_tag < self.session_tag:
self.action_result = 'success'
self.set_job_start(job_id, now)
self.need_update = True
else:
self.action_result = 'hold-off'
self.need_update = False
elif time_since_last_start > self.doc.get('hold_off', 60):
# out of hold_off window:
# - ok to trigger new action start (job_tag == session_tag)
# if job_tag < session_tag client is probably out-of-sync
if self.session_tag == job_tag:
self.session_tag += 1
self.doc['time_start'] = now
self.doc['status'] = 'running'
self.doc['result'] = ''
self.action_result = 'success'
self.set_job_start(job_id, now)
self.need_update = True
else:
self.action_result = 'out-of-sync'
self.need_update = False
def get_job_overall_result(self):
"""
check the status of all the jobs and return the overall session result
"""
for job in self.doc['jobs'].values():
if job['status'] != 'completed':
return 'running'
if job['result'] != 'success':
return 'fail'
return 'success'
def set_job_end(self, job_id, result, timestamp):
try:
job = self.doc['jobs'][job_id]
except Exception:
raise freezer_api_exc.BadDataFormat('job_id not found in session')
job['status'] = 'completed'
job['result'] = result
job['time_ended'] = timestamp
def set_job_start(self, job_id, timestamp):
try:
job = self.doc['jobs'][job_id]
except Exception:
raise freezer_api_exc.BadDataFormat('job_id not found in session')
job['status'] = 'running'
job['result'] = ''
job['time_started'] = timestamp
class SessionsJob(resource.BaseResource):
"""
Handler for endpoint: /v1/sessions/{session_id}/jobs/{job_id}
"""
def __init__(self, storage_driver):
self.db = storage_driver
@policy.enforce('sessions:job:add')
def on_put(self, req, resp, session_id, job_id):
"""
add a job to a session
:param req:
:param resp:
:param session_id:
:param job_id:
:return:
"""
user_id = req.get_header('X-User-ID', '')
# --- update session object
job_doc = self.db.get_job(user_id=user_id, job_id=job_id)
job_schedule = job_doc.get('job_schedule', {})
session_update_doc = {
'jobs': {
job_id: {
'client_id': job_doc['client_id'],
'status': job_schedule.get('status', ''),
'result': job_schedule.get('result', ''),
'time_started': job_schedule.get('time_started', ''),
'time_ended': job_schedule.get('time_ended', '')
}
}
}
self.db.update_session(user_id=user_id,
session_id=session_id,
patch_doc=session_update_doc)
# --- update job object
session_doc = self.db.get_session(user_id=user_id,
session_id=session_id)
job_update_doc = {
'session_id': session_id,
'session_tag': session_doc['session_tag'],
'job_schedule': session_doc['schedule']
}
self.db.update_job(user_id=user_id,
job_id=job_id,
patch_doc=job_update_doc)
resp.status = falcon.HTTP_204
@policy.enforce('sessions:job:remove')
def on_delete(self, req, resp, session_id, job_id):
"""
remove a job from the session
:param req:
:param resp:
:param session_id:
:param job_id:
:return:
"""
user_id = req.get_header('X-User-ID') or ''
session_doc = self.db.get_session(user_id=user_id,
session_id=session_id)
session_doc['jobs'].pop(job_id, None)
# when replacing, db might raise a VersionConflictEngineException
self.db.replace_session(user_id=user_id,
session_id=session_id,
doc=session_doc)
job_update_doc = {
'session_id': '',
'session_tag': 0,
'job_schedule': {
'event': 'stop'
}
}
self.db.update_job(user_id=user_id,
job_id=job_id,
patch_doc=job_update_doc)
resp.status = falcon.HTTP_204
|
|
from sympy import (sin, cos, tan, sec, csc, cot, log, exp, atan, asin, acos,
Symbol, Integral, integrate, pi, Dummy, Derivative,
diff, I, sqrt, erf, Piecewise, Eq, symbols,
And, Heaviside, S, asinh, acosh)
from sympy.integrals.manualintegrate import manualintegrate, find_substitutions, \
_parts_rule
x, y, u, n, a, b = symbols('x y u n a b')
def test_find_substitutions():
assert find_substitutions((cot(x)**2 + 1)**2*csc(x)**2*cot(x)**2, x, u) == \
[(cot(x), 1, -u**6 - 2*u**4 - u**2)]
assert find_substitutions((sec(x)**2 + tan(x) * sec(x)) / (sec(x) + tan(x)),
x, u) == [(sec(x) + tan(x), 1, 1/u)]
assert find_substitutions(x * exp(-x**2), x, u) == [(-x**2, -S.Half, exp(u))]
def test_manualintegrate_polynomials():
assert manualintegrate(y, x) == x*y
assert manualintegrate(exp(2), x) == x * exp(2)
assert manualintegrate(x**2, x) == x**3 / 3
assert manualintegrate(3 * x**2 + 4 * x**3, x) == x**3 + x**4
assert manualintegrate((x + 2)**3, x) == (x + 2)**4 / 4
assert manualintegrate((3*x + 4)**2, x) == (3*x + 4)**3 / 9
assert manualintegrate((u + 2)**3, u) == (u + 2)**4 / 4
assert manualintegrate((3*u + 4)**2, u) == (3*u + 4)**3 / 9
def test_manualintegrate_exponentials():
assert manualintegrate(exp(2*x), x) == exp(2*x) / 2
assert manualintegrate(2**x, x) == (2 ** x) / log(2)
assert manualintegrate(1 / x, x) == log(x)
assert manualintegrate(1 / (2*x + 3), x) == log(2*x + 3) / 2
assert manualintegrate(log(x)**2 / x, x) == log(x)**3 / 3
def test_manualintegrate_parts():
assert manualintegrate(exp(x) * sin(x), x) == \
(exp(x) * sin(x)) / 2 - (exp(x) * cos(x)) / 2
assert manualintegrate(2*x*cos(x), x) == 2*x*sin(x) + 2*cos(x)
assert manualintegrate(x * log(x), x) == x**2*log(x)/2 - x**2/4
assert manualintegrate(log(x), x) == x * log(x) - x
assert manualintegrate((3*x**2 + 5) * exp(x), x) == \
-6*x*exp(x) + (3*x**2 + 5)*exp(x) + 6*exp(x)
assert manualintegrate(atan(x), x) == x*atan(x) - log(x**2 + 1)/2
# Make sure _parts_rule doesn't pick u = constant but can pick dv =
# constant if necessary, e.g. for integrate(atan(x))
assert _parts_rule(cos(x), x) == None
assert _parts_rule(exp(x), x) == None
assert _parts_rule(x**2, x) == None
result = _parts_rule(atan(x), x)
assert result[0] == atan(x) and result[1] == 1
def test_manualintegrate_trigonometry():
assert manualintegrate(sin(x), x) == -cos(x)
assert manualintegrate(tan(x), x) == -log(cos(x))
assert manualintegrate(sec(x), x) == log(sec(x) + tan(x))
assert manualintegrate(csc(x), x) == -log(csc(x) + cot(x))
assert manualintegrate(sin(x) * cos(x), x) in [sin(x) ** 2 / 2, -cos(x)**2 / 2]
assert manualintegrate(-sec(x) * tan(x), x) == -sec(x)
assert manualintegrate(csc(x) * cot(x), x) == -csc(x)
assert manualintegrate(sec(x)**2, x) == tan(x)
assert manualintegrate(csc(x)**2, x) == -cot(x)
assert manualintegrate(x * sec(x**2), x) == log(tan(x**2) + sec(x**2))/2
assert manualintegrate(cos(x)*csc(sin(x)), x) == -log(cot(sin(x)) + csc(sin(x)))
def test_manualintegrate_trigpowers():
assert manualintegrate(sin(x)**2 * cos(x), x) == sin(x)**3 / 3
assert manualintegrate(sin(x)**2 * cos(x) **2, x) == \
x / 8 - sin(4*x) / 32
assert manualintegrate(sin(x) * cos(x)**3, x) == -cos(x)**4 / 4
assert manualintegrate(sin(x)**3 * cos(x)**2, x) == \
cos(x)**5 / 5 - cos(x)**3 / 3
assert manualintegrate(tan(x)**3 * sec(x), x) == sec(x)**3/3 - sec(x)
assert manualintegrate(tan(x) * sec(x) **2, x) == sec(x)**2/2
assert manualintegrate(cot(x)**5 * csc(x), x) == \
-csc(x)**5/5 + 2*csc(x)**3/3 - csc(x)
assert manualintegrate(cot(x)**2 * csc(x)**6, x) == \
-cot(x)**7/7 - 2*cot(x)**5/5 - cot(x)**3/3
def test_manualintegrate_inversetrig():
# atan
assert manualintegrate(exp(x) / (1 + exp(2*x)), x) == atan(exp(x))
assert manualintegrate(1 / (4 + 9 * x**2), x) == atan(3 * x/2) / 6
assert manualintegrate(1 / (16 + 16 * x**2), x) == atan(x) / 16
assert manualintegrate(1 / (4 + x**2), x) == atan(x / 2) / 2
assert manualintegrate(1 / (1 + 4 * x**2), x) == atan(2*x) / 2
assert manualintegrate(1/(a + b*x**2), x) == \
Piecewise(((sqrt(a/b)*atan(x*sqrt(b/a))/a), And(a > 0, b > 0)))
assert manualintegrate(1/(4 + b*x**2), x) == \
Piecewise((sqrt(1/b)*atan(sqrt(b)*x/2)/2, b > 0))
assert manualintegrate(1/(a + 4*x**2), x) == \
Piecewise((atan(2*x*sqrt(1/a))/(2*sqrt(a)), a > 0))
assert manualintegrate(1/(4 + 4*x**2), x) == atan(x) / 4
# asin
assert manualintegrate(1/sqrt(1-x**2), x) == asin(x)
assert manualintegrate(1/sqrt(4-4*x**2), x) == asin(x)/2
assert manualintegrate(3/sqrt(1-9*x**2), x) == asin(3*x)
assert manualintegrate(1/sqrt(4-9*x**2), x) == asin(3*x/2)/3
# asinh
assert manualintegrate(1/sqrt(x**2 + 1), x) == \
asinh(x)
assert manualintegrate(1/sqrt(x**2 + 4), x) == \
asinh(x/2)
assert manualintegrate(1/sqrt(4*x**2 + 4), x) == \
asinh(x)/2
assert manualintegrate(1/sqrt(4*x**2 + 1), x) == \
asinh(2*x)/2
assert manualintegrate(1/sqrt(a*x**2 + 1), x) == \
Piecewise((sqrt(-1/a)*asin(x*sqrt(-a)), a < 0), (sqrt(1/a)*asinh(sqrt(a)*x), a > 0))
assert manualintegrate(1/sqrt(a + x**2), x) == \
Piecewise((asinh(x*sqrt(1/a)), a > 0), (acosh(x*sqrt(-1/a)), a < 0))
# acosh
assert manualintegrate(1/sqrt(x**2 - 1), x) == \
acosh(x)
assert manualintegrate(1/sqrt(x**2 - 4), x) == \
acosh(x/2)
assert manualintegrate(1/sqrt(4*x**2 - 4), x) == \
acosh(x)/2
assert manualintegrate(1/sqrt(9*x**2 - 1), x) == \
acosh(3*x)/3
assert manualintegrate(1/sqrt(a*x**2 - 4), x) == \
Piecewise((sqrt(1/a)*acosh(sqrt(a)*x/2), a > 0))
assert manualintegrate(1/sqrt(-a + 4*x**2), x) == \
Piecewise((asinh(2*x*sqrt(-1/a))/2, -a > 0), (acosh(2*x*sqrt(1/a))/2, -a < 0))
# piecewise
assert manualintegrate(1/sqrt(a-b*x**2), x) == \
Piecewise((sqrt(a/b)*asin(x*sqrt(b/a))/sqrt(a), And(-b < 0, a > 0)),
(sqrt(-a/b)*asinh(x*sqrt(-b/a))/sqrt(a), And(-b > 0, a > 0)),
(sqrt(a/b)*acosh(x*sqrt(b/a))/sqrt(-a), And(-b > 0, a < 0)))
assert manualintegrate(1/sqrt(a + b*x**2), x) == \
Piecewise((sqrt(-a/b)*asin(x*sqrt(-b/a))/sqrt(a), And(a > 0, b < 0)),
(sqrt(a/b)*asinh(x*sqrt(b/a))/sqrt(a), And(a > 0, b > 0)),
(sqrt(-a/b)*acosh(x*sqrt(-b/a))/sqrt(-a), And(a < 0, b > 0)))
def test_manualintegrate_trig_substitution():
assert manualintegrate(sqrt(16*x**2 - 9)/x, x) == \
Piecewise((sqrt(16*x**2 - 9) - 3*acos(3/(4*x)),
And(x < 3*S.One/4, x > -3*S.One/4)))
assert manualintegrate(1/(x**4 * sqrt(25-x**2)), x) == \
Piecewise((-sqrt(-x**2/25 + 1)/(125*x) -
(-x**2/25 + 1)**(3*S.Half)/(15*x**3), And(x < 5, x > -5)))
assert manualintegrate(x**7/(49*x**2 + 1)**(3 * S.Half), x) == \
((49*x**2 + 1)**(5*S.Half)/28824005 -
(49*x**2 + 1)**(3*S.Half)/5764801 +
3*sqrt(49*x**2 + 1)/5764801 + 1/(5764801*sqrt(49*x**2 + 1)))
def test_manualintegrate_rational():
assert manualintegrate(1/(4 - x**2), x) == -log(x - 2)/4 + log(x + 2)/4
assert manualintegrate(1/(-1 + x**2), x) == log(x - 1)/2 - log(x + 1)/2
def test_manualintegrate_derivative():
assert manualintegrate(pi * Derivative(x**2 + 2*x + 3), x) == \
pi * ((x**2 + 2*x + 3))
assert manualintegrate(Derivative(x**2 + 2*x + 3, y), x) == \
x * Derivative(x**2 + 2*x + 3, y)
assert manualintegrate(Derivative(sin(x), x, x, y, x), x) == \
Derivative(sin(x), x, x, y)
def test_manualintegrate_Heaviside():
assert manualintegrate(Heaviside(x), x) == x*Heaviside(x)
assert manualintegrate(x*Heaviside(2), x) == x**2/2
assert manualintegrate(x*Heaviside(-2), x) == 0
assert manualintegrate(x*Heaviside( x), x) == x**2*Heaviside( x)/2
assert manualintegrate(x*Heaviside(-x), x) == x**2*Heaviside(-x)/2
assert manualintegrate(Heaviside(2*x + 4), x) == (x+2)*Heaviside(2*x + 4)
assert manualintegrate(x*Heaviside(x), x) == x**2*Heaviside(x)/2
assert manualintegrate(Heaviside(x + 1)*Heaviside(1 - x)*x**2, x) == \
((x**3/3 + S(1)/3)*Heaviside(x + 1) - S(2)/3)*Heaviside(-x + 1)
y = Symbol('y')
assert manualintegrate(sin(7 + x)*Heaviside(3*x - 7), x) == \
(- cos(x + 7) + cos(S(28)/3))*Heaviside(3*x - S(7))
assert manualintegrate(sin(y + x)*Heaviside(3*x - y), x) == \
(cos(4*y/3) - cos(x + y))*Heaviside(3*x - y)
def test_issue_6799():
r, x, phi = map(Symbol, 'r x phi'.split())
n = Symbol('n', integer=True, positive=True)
integrand = (cos(n*(x-phi))*cos(n*x))
limits = (x, -pi, pi)
assert manualintegrate(integrand, x).has(Integral)
assert r * integrate(integrand.expand(trig=True), limits) / pi == r * cos(n * phi)
assert not integrate(integrand, limits).has(Dummy)
def test_issue_3796():
assert manualintegrate(diff(exp(x + x**2)), x) == exp(x + x**2)
assert integrate(x * exp(x**4), x, risch=False) == -I*sqrt(pi)*erf(I*x**2)/4
def test_manual_true():
assert integrate(exp(x) * sin(x), x, manual=True) == \
(exp(x) * sin(x)) / 2 - (exp(x) * cos(x)) / 2
assert integrate(sin(x) * cos(x), x, manual=True) in \
[sin(x) ** 2 / 2, -cos(x)**2 / 2]
def test_issue_6746():
y = Symbol('y')
n = Symbol('n')
assert manualintegrate(y**x, x) == \
Piecewise((x, Eq(log(y), 0)), (y**x/log(y), True))
assert manualintegrate(y**(n*x), x) == \
Piecewise(
(x, Eq(n, 0)),
(Piecewise(
(n*x, Eq(log(y), 0)),
(y**(n*x)/log(y), True))/n, True))
assert manualintegrate(exp(n*x), x) == \
Piecewise((x, Eq(n, 0)), (exp(n*x)/n, True))
y = Symbol('y', positive=True)
assert manualintegrate((y + 1)**x, x) == (y + 1)**x/log(y + 1)
y = Symbol('y', zero=True)
assert manualintegrate((y + 1)**x, x) == x
y = Symbol('y')
n = Symbol('n', nonzero=True)
assert manualintegrate(y**(n*x), x) == \
Piecewise((n*x, Eq(log(y), 0)), (y**(n*x)/log(y), True))/n
y = Symbol('y', positive=True)
assert manualintegrate((y + 1)**(n*x), x) == \
(y + 1)**(n*x)/(n*log(y + 1))
a = Symbol('a', negative=True)
assert manualintegrate(1 / (a + b*x**2), x) == \
Integral(1/(a + b*x**2), x)
def test_issue_2850():
assert manualintegrate(asin(x)*log(x), x) == -x*asin(x) - sqrt(-x**2 + 1) \
+ (x*asin(x) + sqrt(-x**2 + 1))*log(x) - Integral(sqrt(-x**2 + 1)/x, x)
assert manualintegrate(acos(x)*log(x), x) == -x*acos(x) + sqrt(-x**2 + 1) + \
(x*acos(x) - sqrt(-x**2 + 1))*log(x) + Integral(sqrt(-x**2 + 1)/x, x)
assert manualintegrate(atan(x)*log(x), x) == -x*atan(x) + (x*atan(x) - \
log(x**2 + 1)/2)*log(x) + log(x**2 + 1)/2 + Integral(log(x**2 + 1)/x, x)/2
def test_constant_independent_of_symbol():
assert manualintegrate(Integral(y, (x, 1, 2)), x) == x*Integral(y, (x, 1, 2))
|
|
import pytest
import unittest.mock as mock
import tilemapbase.mapping as mapping
def test_projection_against_pyproj():
import random
for _ in range(1000):
lon = random.random() * 360 - 180
lat = random.random() * 85 * 2 - 85
x, y = mapping.project(lon, lat)
xx, yy = mapping.project_3785(lon, lat)
assert( x == pytest.approx(xx) )
assert( y == pytest.approx(yy) )
xx, yy = mapping.project_3857(lon, lat)
assert( x == pytest.approx(xx) )
assert( y == pytest.approx(yy) )
def test_project_and_back_to_lonlat():
import random
for _ in range(1000):
lon = random.random() * 360 - 180
lat = random.random() * 85 * 2 - 85
x, y = mapping.project(lon, lat)
lo, la = mapping.to_lonlat(x, y)
assert( lon == pytest.approx(lo) )
assert( lat == pytest.approx(la) )
def test_project_swapped_lon_lat():
with pytest.raises(ValueError):
mapping.project(45, 170)
##### Extent class
def test_Extent_construct():
mapping.Extent(0.2, 0.5, 0.3, 0.4)
mapping.Extent(-0.8, -0.5, 0.3, 0.4)
with pytest.raises(ValueError):
mapping.Extent(0.5, 0.2, 0.3, 0.4)
with pytest.raises(ValueError):
mapping.Extent(0.2, 0.5, 0.4, 0.3)
with pytest.raises(ValueError):
mapping.Extent(0.2, 0.5, -0.1, 0.4)
with pytest.raises(ValueError):
mapping.Extent(0.2, 0.5, 0.3, 1.1)
def assert_standard_properties(ex):
assert ex.xmin == pytest.approx(0.2)
assert ex.xmax == pytest.approx(0.5)
assert ex.width == pytest.approx(0.3)
assert ex.xrange == pytest.approx((0.2, 0.5))
assert ex.ymin == pytest.approx(0.3)
assert ex.ymax == pytest.approx(0.4)
assert ex.height == pytest.approx(0.1)
assert ex.yrange == pytest.approx((0.4, 0.3))
assert str(ex) == "Extent((0.2,0.3)->(0.5,0.4) projected as normal)"
def test_Extent_properties():
ex = mapping.Extent(0.2, 0.5, 0.3, 0.4)
assert_standard_properties(ex)
def test_Extent_from_centre():
ex = mapping.Extent.from_centre(0.3, 0.2, xsize=0.1)
assert ex.xrange == pytest.approx((0.25, 0.35))
assert ex.yrange == pytest.approx((0.25, 0.15))
ex = mapping.Extent.from_centre(0.3, 0.2, xsize=0.1, aspect=2)
assert ex.xrange == pytest.approx((0.25, 0.35))
assert ex.yrange == pytest.approx((0.225, 0.175))
ex = mapping.Extent.from_centre(0.3, 0.2, ysize=0.1)
assert ex.xrange == pytest.approx((0.25, 0.35))
assert ex.yrange == pytest.approx((0.25, 0.15))
ex = mapping.Extent.from_centre(0.3, 0.2, ysize=0.1, aspect=2)
assert ex.xrange == pytest.approx((0.2, 0.4))
assert ex.yrange == pytest.approx((0.25, 0.15))
ex = mapping.Extent.from_centre(0.3, 0.2, xsize=0.3, ysize=0.1)
assert ex.xrange == pytest.approx((0.15, 0.45))
assert ex.yrange == pytest.approx((0.25, 0.15))
def test_Extent_from_lonlat():
x, y = mapping.project(32, -10)
ex = mapping.Extent.from_centre_lonlat(32, -10, xsize=0.2)
assert ex.xrange == pytest.approx((x-0.1, x+0.1))
assert ex.yrange == pytest.approx((y+0.1, y-0.1))
xx, yy = mapping.project(34, -12)
ex = mapping.Extent.from_lonlat(32, 34, -12, -10)
assert ex.xrange == pytest.approx((x, xx))
assert ex.yrange == pytest.approx((yy, y))
def test_Extent_from_3857():
x, y = mapping._to_3857(0.2, 0.3)
ex = mapping.Extent.from_centre(0.2, 0.3, xsize=0.1).to_project_3857()
ex1 = mapping.Extent.from_centre_3857(x, y, xsize=0.1)
assert ex1.xrange == pytest.approx(ex.xrange)
assert ex1.yrange == pytest.approx(ex.yrange)
xx, yy = mapping._to_3857(0.25, 0.4)
ex = mapping.Extent.from_3857(x, xx, y, yy)
ex1 = mapping.Extent(0.2, 0.25, 0.3, 0.4).to_project_3857()
assert ex1.xrange == pytest.approx(ex.xrange)
assert ex1.yrange == pytest.approx(ex.yrange)
def test_Extent_projection():
ex = mapping.Extent(0.2, 0.5, 0.3, 0.4)
ex1 = ex.to_project_3857()
ex2 = ex.to_project_web_mercator()
assert_standard_properties(ex)
assert_standard_properties(ex2)
x, y = mapping._to_3857(0.2, 0.3)
xx, yy = mapping._to_3857(0.5, 0.4)
assert ex1.xmin == pytest.approx(x)
assert ex1.xmax == pytest.approx(xx)
assert ex1.width == pytest.approx(xx - x)
assert ex1.xrange == pytest.approx((x, xx))
assert ex1.ymin == pytest.approx(y)
assert ex1.ymax == pytest.approx(yy)
assert ex1.height == pytest.approx(yy - y)
assert ex1.yrange == pytest.approx((yy, y))
assert str(ex1).endswith(" projected as epsg:3857)")
def test_Extent_with_centre():
ex = mapping.Extent(0.2, 0.5, 0.3, 0.4)
ex1 = ex.with_centre(0.3, 0.4)
assert ex1.xrange == pytest.approx((.15, .45))
assert ex1.yrange == pytest.approx((.45, .35))
ex1 = ex.with_centre(0, 0.4)
assert ex1.xrange == pytest.approx((-.15, .15))
assert ex1.yrange == pytest.approx((.45, .35))
ex1 = ex.with_centre(0, 0.01)
assert ex1.xrange == pytest.approx((-.15, .15))
assert ex1.yrange == pytest.approx((0.1, 0))
ex1 = ex.with_centre(0, 0.98)
assert ex1.xrange == pytest.approx((-.15, .15))
assert ex1.yrange == pytest.approx((1, 0.9))
def test_Extent_with_centre_lonlat():
x, y = mapping.project(32, 15)
ex = mapping.Extent(0.2, 0.4, 0.3, 0.5)
ex1 = ex.with_centre_lonlat(32, 15)
assert ex1.xrange == pytest.approx((x-.1, x+.1))
assert ex1.yrange == pytest.approx((y+.1, y-.1))
def test_Extent_to_aspect():
ex = mapping.Extent(0.2, 0.5, 0.3, 0.4)
ex1 = ex.to_aspect(3)
assert ex1.xrange == pytest.approx(ex.xrange)
assert ex1.yrange == pytest.approx(ex.yrange)
ex1 = ex.to_aspect(1)
assert ex1.xrange == pytest.approx((0.3, 0.4))
assert ex1.yrange == pytest.approx(ex.yrange)
ex1 = ex.to_aspect(6)
assert ex1.xrange == pytest.approx(ex.xrange)
assert ex1.yrange == pytest.approx((0.375, 0.325))
ex1 = ex.to_aspect(3, False)
assert ex1.xrange == pytest.approx(ex.xrange)
assert ex1.yrange == pytest.approx(ex.yrange)
ex1 = ex.to_aspect(1, False)
assert ex1.xrange == pytest.approx(ex.xrange)
assert ex1.yrange == pytest.approx((0.35+0.15, 0.35-0.15))
ex1 = ex.to_aspect(6, False)
assert ex1.xrange == pytest.approx((0.35-0.3, 0.35+0.3))
assert ex1.yrange == pytest.approx(ex.yrange)
def test_Extent_with_absolute_translation():
ex = mapping.Extent(0.2, 0.5, 0.3, 0.4)
ex1 = ex.with_absolute_translation(0.6, 0.2)
assert ex1.xrange == pytest.approx((0.8, 1.1))
assert ex1.yrange == pytest.approx((0.6, 0.5))
ex1 = ex.with_absolute_translation(0.6, 0.7)
assert ex1.xrange == pytest.approx((0.8, 1.1))
assert ex1.yrange == pytest.approx((1, 0.9))
ex1 = ex.with_absolute_translation(0.6, -0.4)
assert ex1.xrange == pytest.approx((0.8, 1.1))
assert ex1.yrange == pytest.approx((.1, 0))
def test_Extent_with_translation():
ex = mapping.Extent(0.2, 0.5, 0.3, 0.4)
ex1 = ex.with_translation(2, -1)
assert ex1.xrange == pytest.approx((0.8, 1.1))
assert ex1.yrange == pytest.approx((0.3, 0.2))
def test_Extent_with_scaling():
ex = mapping.Extent(0.2, 0.5, 0.3, 0.4)
ex1 = ex.with_scaling(1)
assert ex1.xrange == pytest.approx(ex1.xrange)
assert ex1.yrange == pytest.approx(ex1.yrange)
ex1 = ex.with_scaling(2)
assert ex1.xrange == pytest.approx((0.35 - 0.075, 0.35 + 0.075))
assert ex1.yrange == pytest.approx((0.375, 0.325))
###### Plotter tests
@pytest.fixture
def ex():
return mapping.Extent(0.2, 0.5, 0.3, 0.4)
@pytest.fixture
def tile_provider():
tp = mock.Mock()
tp.maxzoom = 19
tp.tilesize = 256
return tp
def test_Plotter_constructs(ex, tile_provider):
with pytest.raises(ValueError):
mapping.Plotter(ex, tile_provider)
with pytest.raises(ValueError):
mapping.Plotter(ex, tile_provider, zoom=2, width=100)
with pytest.raises(ValueError):
mapping.Plotter(ex, tile_provider, zoom=2, height=100)
plot = mapping.Plotter(ex, tile_provider, zoom=10)
assert plot.zoom == 10
assert plot.extent is ex
assert plot.extent_in_web_mercator.xrange == ex.xrange
assert plot.extent_in_web_mercator.yrange == ex.yrange
assert plot.xtilemin == int(1024*0.2)
assert plot.xtilemax == int(1024*0.5)
assert plot.ytilemin == int(1024*0.3)
assert plot.ytilemax == int(1024*0.4)
def test_Plotter_auto_zoom(ex, tile_provider):
tile_provider.tilesize = 256
plot = mapping.Plotter(ex, tile_provider, width=100)
# Tile is 256 wide, needed width is 0.3, or 76.8
# Each zoom level doubles that
assert plot.zoom == 1
plot = mapping.Plotter(ex, tile_provider, width=1000)
assert plot.zoom == 4
plot = mapping.Plotter(ex, tile_provider, width=76)
assert plot.zoom == 0
plot = mapping.Plotter(ex, tile_provider, width=77)
assert plot.zoom == 1
tile_provider.tilesize = 512
plot = mapping.Plotter(ex, tile_provider, width=1000)
assert plot.zoom == 3
plot = mapping.Plotter(ex, tile_provider, width=5033164)
assert plot.zoom == 15
plot = mapping.Plotter(ex, tile_provider, width=5033165)
assert plot.zoom == 16
plot = mapping.Plotter(ex, tile_provider, height=1000)
assert plot.zoom == 5
plot = mapping.Plotter(ex, tile_provider, width=1000, height=1000)
assert plot.zoom == 5
def imshow_calls_to_list(ax_mock):
out = []
for call in ax_mock.imshow.call_args_list:
assert len(call[0]) == 1
assert call[1]["interpolation"] == "lanczos"
out.append((call[0][0], call[1]["extent"]))
return out
def test_Plotter_plotlq_1x1(ex, tile_provider):
plot = mapping.Plotter(ex, tile_provider, width=50)
ax = mock.Mock()
plot.plotlq(ax)
assert tile_provider.get_tile.call_args_list == [ mock.call(0,0,0) ]
tile, extent = imshow_calls_to_list(ax)[0]
assert tile == tile_provider.get_tile.return_value
assert extent == pytest.approx((0,1,1,0))
ax.set.assert_called_with(xlim=(0.2, 0.5), ylim=(0.4, 0.3))
def test_Plotter_plotlq_2x2(ex, tile_provider):
plot = mapping.Plotter(ex, tile_provider, width=100)
ax = mock.Mock()
plot.plotlq(ax)
assert tile_provider.get_tile.call_args_list == [ mock.call(0,0,1), mock.call(1,0,1) ]
imshow = imshow_calls_to_list(ax)
assert len(imshow) == 2
tile, extent = imshow[0]
assert tile == tile_provider.get_tile.return_value
assert extent == pytest.approx((0,0.5,0.5,0))
tile, extent = imshow[1]
assert tile == tile_provider.get_tile.return_value
assert extent == pytest.approx((0.5,1,0.5,0))
ax.set.assert_called_with(xlim=(0.2, 0.5), ylim=(0.4, 0.3))
@pytest.fixture
def new_image():
with mock.patch("PIL.Image.new") as image_mock:
yield image_mock
def test_Plotter_as_one_image_1x1(ex, tile_provider, new_image):
plot = mapping.Plotter(ex, tile_provider, width=50)
image = plot.as_one_image()
assert new_image.called_with("RGB", (256,256))
assert image == new_image.return_value
assert tile_provider.get_tile.call_args_list == [ mock.call(0,0,0) ]
tile = tile_provider.get_tile.return_value
image.paste.assert_called_with(tile, (0,0))
def test_Plotter_as_one_image_2x2(ex, tile_provider, new_image):
plot = mapping.Plotter(ex, tile_provider, width=100)
image = plot.as_one_image()
assert new_image.called_with("RGB", (512,256))
assert image == new_image.return_value
assert tile_provider.get_tile.call_args_list == [ mock.call(0,0,1), mock.call(1,0,1) ]
tile = tile_provider.get_tile.return_value
assert image.paste.call_args_list == [ mock.call(tile,(0,0)), mock.call(tile,(256,0)) ]
def test_Plotter_plot_2x2(ex, tile_provider, new_image):
plot = mapping.Plotter(ex, tile_provider, width=100)
ax = mock.Mock()
plot.plot(ax)
image = new_image.return_value
imshow = imshow_calls_to_list(ax)
assert len(imshow) == 1
tile, extent = imshow[0]
assert tile == image
assert extent == pytest.approx((0,1,0.5,0))
ax.set.assert_called_with(xlim=(0.2, 0.5), ylim=(0.4, 0.3))
def test_Plotter_too_many_tiles(ex, tile_provider):
plot = mapping.Plotter(ex, tile_provider, width=10000)
with pytest.raises(ValueError):
plot.plot(mock.Mock())
def test_Plotter_plot_epsg(ex, tile_provider, new_image):
ex = ex.to_project_3857()
plot = mapping.Plotter(ex, tile_provider, width=100)
ax = mock.Mock()
plot.plot(ax)
image = new_image.return_value
imshow = imshow_calls_to_list(ax)
assert len(imshow) == 1
tile, extent = imshow[0]
assert tile == image
x, y = mapping._to_3857(0,0)
xx, yy = mapping._to_3857(1,0.5)
assert extent == pytest.approx((x,xx,yy,y))
kwargs = ax.set.call_args_list[0][1]
x, y = mapping._to_3857(0.2,0.3)
xx, yy = mapping._to_3857(0.5,0.4)
kwargs["xlim"] == pytest.approx((x,xx))
kwargs["ylim"] == pytest.approx((yy,y))
## Geopands support
def test_parse_crs():
code = mapping._parse_crs(None)
assert(code==4326)
crs = {"init" : "epsg:3857"}
code = mapping._parse_crs(crs)
assert(code==3857)
crs = {"init" : "epsg:3785"}
code = mapping._parse_crs(crs)
assert(code==3857)
with pytest.raises(ValueError):
mapping._parse_crs({"init" : "bob:132"})
with pytest.raises(ValueError):
mapping._parse_crs({"init" : "epsg:132"})
with pytest.raises(ValueError):
mapping._parse_crs({"init" : "boajsg136"})
def test_parse_crs_new_style():
crs = mock.Mock()
crs.srs = "EPSG:4326"
code = mapping._parse_crs(crs)
assert(code == 4326)
crs.srs = "epsg:3785"
code = mapping._parse_crs(crs)
assert(code==3857)
with pytest.raises(ValueError):
mapping._parse_crs({"bob":"fish"})
|
|
"""
The module defining the Monitor and MonitorChannel objects used for
tracking the changes in values of various quantities throughout training
"""
__authors__ = "Ian Goodfellow"
__copyright__ = "Copyright 2010-2012, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
__email__ = "pylearn-dev@googlegroups"
import copy
import time
import warnings
import logging
import numpy as np
from theano.compat import six
from pylearn2.compat import OrderedDict
import theano.sparse
from theano import config
from theano import tensor as T
from theano.printing import var_descriptor
from pylearn2.config import yaml_parse
from pylearn2.datasets.dataset import Dataset
from pylearn2.space import Space, CompositeSpace, NullSpace
from pylearn2.utils import function, sharedX, safe_zip, safe_izip
from pylearn2.utils.exc import reraise_as
from pylearn2.utils.iteration import is_stochastic
from pylearn2.utils.data_specs import DataSpecsMapping
from pylearn2.utils.string_utils import number_aware_alphabetical_key
from pylearn2.utils.timing import log_timing
log = logging.getLogger(__name__)
class Monitor(object):
"""
A class for monitoring Models while they are being trained.
A monitor object records the number of minibatches and number of
examples the model has trained, as well as any number of "channels"
that track quantities of interest (examples: the objective
function, measures of hidden unit activity, reconstruction error,
sum of squared second derivatives, average norm of the weight
vectors, etc.)
Parameters
----------
model : `pylearn2.models.model.Model`
Attributes
----------
on_channel_conflict : string
`error` : this is a behavior when there is conlfict
on creating a channel twice
`copy_history` : this is a behavior when creating a
new channel and transfering history of old_monitor
`overwrite` : this is a behavior when creating a
new channel without taking an account of old_monitor
"""
def __init__(self, model):
self.training_succeeded = False
self.model = model
self.channels = OrderedDict()
self._num_batches_seen = 0
self._examples_seen = 0
self._epochs_seen = 0
self._datasets = []
self._iteration_mode = []
self._batch_size = []
self._num_batches = []
self._dirty = True
self._rng_seed = []
self.names_to_del = ['theano_function_mode']
self.t0 = time.time()
self.theano_function_mode = None
self.on_channel_conflict = 'error'
# Initialize self._nested_data_specs, self._data_specs_mapping,
# and self._flat_data_specs
self._build_data_specs()
def _build_data_specs(self):
"""
Computes a nested data_specs for input and all channels
Also computes the mapping to flatten it. This function is
called from redo_theano.
"""
# Ask the model what it needs
m_space, m_source = self.model.get_monitoring_data_specs()
input_spaces = [m_space]
input_sources = [m_source]
for channel in self.channels.values():
space = channel.data_specs[0]
assert isinstance(space, Space)
input_spaces.append(space)
input_sources.append(channel.data_specs[1])
nested_space = CompositeSpace(input_spaces)
nested_source = tuple(input_sources)
self._nested_data_specs = (nested_space, nested_source)
self._data_specs_mapping = DataSpecsMapping(self._nested_data_specs)
flat_space = self._data_specs_mapping.flatten(nested_space,
return_tuple=True)
flat_source = self._data_specs_mapping.flatten(nested_source,
return_tuple=True)
self._flat_data_specs = (CompositeSpace(flat_space), flat_source)
def set_theano_function_mode(self, mode):
"""
.. todo::
WRITEME
Parameters
----------
mode : theano.compile.Mode
Theano functions for the monitoring channels will be
compiled and run using this mode.
"""
if self.theano_function_mode != mode:
self._dirty = True
self.theano_function_mode = mode
def add_dataset(self, dataset, mode='sequential', batch_size=None,
num_batches=None, seed=None):
"""
Determines the data used to calculate the values of each channel.
Parameters
----------
dataset : object
A `pylearn2.datasets.Dataset` object.
mode : str or object, optional
Iteration mode; see the docstring of the `iterator` method
on `pylearn2.datasets.Dataset` for details.
batch_size : int, optional
The size of an individual batch. Optional if `mode` is
'sequential' and `num_batches` is specified (batch size
will be calculated based on full dataset size).
num_batches : int, optional
The total number of batches. Unnecessary if `mode` is
'sequential' and `batch_size` is specified (number of
batches will be calculated based on full dataset size).
seed : int, optional
Optional. The seed to be used for random iteration modes.
"""
# The user can ommit using lists if only one dataset is set
if not isinstance(dataset, list):
dataset = [dataset]
if not isinstance(mode, list):
mode = [mode]
if not isinstance(batch_size, list):
batch_size = [batch_size]
if not isinstance(num_batches, list):
num_batches = [num_batches]
if seed is None:
seed = [None] * len(dataset)
if not isinstance(seed, list):
seed = [seed]
if len(mode) != len(dataset):
raise ValueError("Received " + str(len(dataset)) +
" dataset but " + str(len(mode)) + " modes.")
if any([len(l) != len(dataset) for l in [batch_size, seed]]):
raise ValueError("make sure each dataset has its iteration " +
"batch size and number of batches.")
for (d, m, b, n, sd) in safe_izip(dataset, mode, batch_size,
num_batches, seed):
try:
it = d.iterator(mode=m,
batch_size=b,
num_batches=n,
data_specs=self._flat_data_specs,
return_tuple=True,
rng=sd)
except ValueError as exc:
reraise_as(ValueError("invalid iteration parameters in " +
"Monitor.add_dataset: " + str(exc)))
if it.stochastic:
# Must be a seed, not a random number generator. If it were a
# random number generator, different iterators using it would
# update its state, so we would not get the same iterator
# each time. Also, must not be None, because this makes the
# iterator pick a seed based on the clock
if sd is None:
raise TypeError("Monitor requires a seed when using " +
"stochastic iteration modes.")
if not isinstance(sd, (list, tuple, int)):
raise TypeError("Monitor requires a seed (not a random " +
"number generator) when using " +
"stochastic iteration modes.")
else:
# The iterator should catch this, but let's double-check
assert sd is None
if d not in self._datasets:
self._datasets.append(d)
self._iteration_mode.append(m)
self._batch_size.append(b)
self._num_batches.append(n)
self._rng_seed.append(sd)
def __call__(self):
"""
Runs the model on the monitoring dataset in order to add one
data point to each of the channels.
"""
# If the channels have changed at all, we need to recompile the theano
# functions used to compute them
if self._dirty:
self.redo_theano()
datasets = self._datasets
# Set all channels' val_shared to 0
self.begin_record_entry()
for d, i, b, n, a, sd, ne in safe_izip(datasets,
self._iteration_mode,
self._batch_size,
self._num_batches,
self.accum,
self._rng_seed,
self.num_examples):
if isinstance(d, six.string_types):
d = yaml_parse.load(d)
raise NotImplementedError()
# need to put d back into self._datasets
myiterator = d.iterator(mode=i,
batch_size=b,
num_batches=n,
data_specs=self._flat_data_specs,
return_tuple=True,
rng=sd)
# If self._flat_data_specs is empty, no channel needs data,
# so we do not need to call the iterator in order to average
# the monitored values across different batches, we only
# have to call them once.
if len(self._flat_data_specs[1]) == 0:
X = ()
self.run_prereqs(X, d)
a(*X)
else:
actual_ne = 0
for X in myiterator:
# X is a flat (not nested) tuple
self.run_prereqs(X, d)
a(*X)
actual_ne += self._flat_data_specs[0].np_batch_size(X)
# end for X
if actual_ne != ne:
raise RuntimeError("At compile time, your iterator said "
"it had %d examples total, but at "
"runtime it gave us %d." %
(ne, actual_ne))
# end for d
log.info("Monitoring step:")
log.info("\tEpochs seen: %d" % self._epochs_seen)
log.info("\tBatches seen: %d" % self._num_batches_seen)
log.info("\tExamples seen: %d" % self._examples_seen)
t = time.time() - self.t0
for channel_name in sorted(self.channels.keys(),
key=number_aware_alphabetical_key):
channel = self.channels[channel_name]
channel.time_record.append(t)
channel.batch_record.append(self._num_batches_seen)
channel.example_record.append(self._examples_seen)
channel.epoch_record.append(self._epochs_seen)
val = channel.val_shared.get_value()
channel.val_record.append(val)
# TODO: use logging infrastructure so that user can configure
# formatting
if abs(val) < 1e4:
val_str = str(val)
else:
val_str = '%.3e' % val
log.info("\t%s: %s" % (channel_name, val_str))
def run_prereqs(self, data, dataset):
"""
Runs all "prerequistie functions" on a batch of data. Always
called right before computing the monitoring channels on that
batch.
Parameters
----------
data : tuple or Variable
a member of the Space used as input to the monitoring
functions
dataset : Dataset
the Dataset the data was drawn from
"""
if dataset not in self.prereqs:
return
for prereq in self.prereqs[dataset]:
prereq(*data)
def get_batches_seen(self):
"""
Returns the number of batches the model has learned on
(assuming that the learning code has been calling
Monitor.report_batch correctly).
"""
return self._num_batches_seen
def get_epochs_seen(self):
"""
.. todo::
WRITEME
Returns
-------
epochs_seen : int
The number of epochs the model has been trained on.
One "epoch" is one pass through Dataset.iterator.
"""
return self._epochs_seen
def get_examples_seen(self):
"""
.. todo::
WRITEME
Returns
-------
examples_seen : int
The number of examples the model has learned on (assuming
that the learning code has been calling Monitor.report_batch
correctly)
"""
return self._examples_seen
def report_batch(self, num_examples):
"""
Call this whenever the model has learned on another batch of
examples. Report how many examples were learned on.
Parameters
----------
num_examples : int
The number of examples learned on in this minibatch.
"""
self._examples_seen += num_examples
self._num_batches_seen += 1
def report_epoch(self):
"""
Call this whenever the model has completed another "epoch" of
learning. We regard one pass through Dataset.iterator as one
epoch.
"""
self._epochs_seen += 1
def redo_theano(self):
"""
Recompiles Theano functions used by this monitor.
This is called any time we need to evaluate the channels and
the channel definitions have changed since last we called it,
or if the theano functions are unavailable for any other reason
(first time they are needed after construction or
deserialization, etc.)
All channels are compiled as part of the same theano function
so that the theano optimizations can eliminate subexpressions
that are shared between multiple channels.
"""
self._dirty = False
# Recompute the data specs, since the channels may have changed.
self._build_data_specs()
init_names = dir(self)
self.prereqs = OrderedDict()
for channel in self.channels.values():
if channel.prereqs is not None:
dataset = channel.dataset
if dataset not in self.prereqs:
self.prereqs[dataset] = []
prereqs = self.prereqs[dataset]
for prereq in channel.prereqs:
if prereq not in prereqs:
prereqs.append(prereq)
updates = OrderedDict()
for channel in self.channels.values():
updates[channel.val_shared] = np.cast[config.floatX](0.0)
with log_timing(log, "compiling begin_record_entry"):
self.begin_record_entry = function(
inputs=[],
updates=updates,
mode=self.theano_function_mode,
name='Monitor.begin_record_entry'
)
updates = OrderedDict()
givens = OrderedDict()
# Get the appropriate kind of theano variable to represent the data
# the model acts on
batch_names = ['monitoring_%s' % s for s in self._flat_data_specs[1]]
theano_args = self._flat_data_specs[0].make_theano_batch(batch_names)
# Get a symbolic expression of the batch size
# We do it here, rather than for each channel, because channels with an
# empty data_specs do not use data, and are unable to extract the batch
# size. The case where the whole data specs is empty is not supported.
batch_size = self._flat_data_specs[0].batch_size(theano_args)
# Also get a nested representation, for joint iteration
# with each of channel.graph_input
nested_theano_args = self._data_specs_mapping.nest(theano_args)
if not isinstance(nested_theano_args, tuple):
nested_theano_args = (nested_theano_args,)
assert len(nested_theano_args) == (len(self.channels) + 1)
log.info('Monitored channels: ')
for key in sorted(self.channels.keys()):
mode = self.theano_function_mode
if mode is not None and hasattr(mode, 'record'):
mode.record.handle_line('compiling monitor including ' +
'channel ' + key + '\n')
log.info('\t%s' % key)
it = []
for d, i, n, b in safe_izip(self._datasets, self._iteration_mode,
self._num_batches, self._batch_size):
it.append(d.iterator(mode=i, num_batches=n, batch_size=b,
data_specs=self._flat_data_specs,
return_tuple=True))
self.num_examples = [i.num_examples for i in it]
givens = [OrderedDict() for d in self._datasets]
updates = [OrderedDict() for d in self._datasets]
for i, channel in enumerate(self.channels.values()):
index = self._datasets.index(channel.dataset)
d = self._datasets[index]
g = givens[index]
cur_num_examples = self.num_examples[index]
u = updates[index]
# Flatten channel.graph_input and the appropriate part of
# nested_theano_args, to iterate jointly over them.
c_mapping = DataSpecsMapping(channel.data_specs)
channel_inputs = c_mapping.flatten(channel.graph_input,
return_tuple=True)
inputs = c_mapping.flatten(nested_theano_args[i + 1],
return_tuple=True)
for (channel_X, X) in safe_izip(channel_inputs, inputs):
assert channel_X not in g or g[channel_X] is X
assert channel_X.type == X.type, (channel_X.type, X.type)
g[channel_X] = X
if batch_size == 0:
# No channel does need any data, so there is not need to
# average results, and we will call the accum functions only
# once.
# TODO: better handling of channels not needing data when
# some other channels need data.
assert len(self._flat_data_specs[1]) == 0
val = channel.val
else:
if n == 0:
raise ValueError("Iterating over 0 examples results in " +
"divide by 0")
val = T.cast(channel.val * T.cast(batch_size, 'float64')
/ cur_num_examples, config.floatX)
u[channel.val_shared] = channel.val_shared + val
with log_timing(log, "Compiling accum"):
# Check type of update expressions
for up in updates:
for key in up:
if key.dtype != up[key].dtype:
raise TypeError('Monitoring channel shared variable ' +
key.name + ' has dtype ' + key.dtype +
' but is driven by an expression ' +
'with type ' + up[key].dtype)
self.accum = []
for idx, packed in enumerate(safe_izip(givens, updates)):
g, u = packed
mode = self.theano_function_mode
if mode is not None and hasattr(mode, 'record'):
for elem in g:
mode.record.handle_line('g key ' +
var_descriptor(elem) + '\n')
mode.record.handle_line('g val ' +
var_descriptor(g[elem]) + '\n')
for elem in u:
mode.record.handle_line('u key ' +
var_descriptor(elem) + '\n')
mode.record.handle_line('u val ' +
var_descriptor(u[elem]) + '\n')
function_name = 'Monitor.accum[%d]' % idx
if mode is not None and hasattr(mode, 'record'):
mode.record.handle_line('compiling supervised accum\n')
# Some channels may not depend on the data, ie, they might just
# monitor the model parameters, or some shared variable updated
# by the training algorithm, so we need to ignore the unused
# input error
self.accum.append(function(theano_args,
givens=g,
updates=u,
mode=self.theano_function_mode,
name=function_name))
for a in self.accum:
if mode is not None and hasattr(mode, 'record'):
for elem in a.maker.fgraph.outputs:
mode.record.handle_line('accum output ' +
var_descriptor(elem) + '\n')
log.info("graph size: %d" % len(a.maker.fgraph.toposort()))
final_names = dir(self)
self.register_names_to_del([name for name in final_names
if name not in init_names])
def register_names_to_del(self, names):
"""
Register names of fields that should be deleted before pickling.
Parameters
----------
names : list
A list of attribute names as strings.
"""
for name in names:
if name not in self.names_to_del:
self.names_to_del.append(name)
def __getstate__(self):
"""
In order to avoid pickling a copy of the dataset whenever a
monitor is saved, the __getstate__ method replaces the dataset
field with the dataset's yaml source. This is not a perfect
solution because it won't work with job resuming, which would
require saving the state of the dataset's random number
generator.
Like in the Model class, we also need to avoid saving any
Theano functions, so we delete everything that can be
regenerated with `redo_theano` by deleting the fields in
`self.names_to_del`
"""
# Patch old pickled monitors
if not hasattr(self, '_datasets'):
self._datasets = [self._dataset]
del self._dataset
temp = self._datasets
if self._datasets:
self._datasets = []
for dataset in temp:
if isinstance(dataset, six.string_types):
self._datasets.append(dataset)
else:
try:
self._datasets.append(dataset.yaml_src)
except AttributeError:
warnings.warn('Trained model saved without ' +
'indicating yaml_src')
d = copy.copy(self.__dict__)
self._datasets = temp
for name in self.names_to_del:
if name in d:
del d[name]
return d
def __setstate__(self, d):
"""
Sets the object to have the state described by `d`.
Parameters
----------
d : dict
A dictionary mapping string names of fields to values for
these fields.
"""
# patch old pkl files
if '_dataset' in d:
d['_datasets'] = [d['_dataset']]
del d['_dataset']
self.__dict__.update(d)
def add_channel(self, name, ipt, val, dataset=None, prereqs=None,
data_specs=None):
"""
Asks the monitor to start tracking a new value. Can be called
even after the monitor is already in use.
Parameters
----------
name : str
The display name in the monitor.
ipt : tensor_like
The symbolic tensor which should be clamped to the data.
(or a list/tuple containing symbolic tensors, following the
data_specs)
val : tensor_like
The value (function of `ipt`) to be tracked.
dataset : pylearn2.datasets.Dataset
Which dataset to compute this channel on
prereqs : list of callables that take a list of numpy tensors
Each prereq must be called exactly once per each new batch
of data drawn *from dataset* before the channel value is
computed if two channels provide a prereq with exactly the
same id, that prereq will only be called once
data_specs : (space, source) pair
Identifies the order, format and semantics of ipt
"""
if six.PY3:
numeric = (float, int)
else:
numeric = (float, int, long) # noqa
if isinstance(val, numeric):
val = np.cast[theano.config.floatX](val)
val = T.as_tensor_variable(val)
if data_specs is None:
warnings.warn("parameter 'data_specs' should be provided when " +
"calling add_channel. We will build a default one.",
stacklevel=2)
if isinstance(ipt, list):
ipt = tuple(ipt)
if ipt is not None and not isinstance(ipt, tuple):
ipt = (ipt,)
if ipt is None:
data_specs = (NullSpace(), '')
elif len(ipt) == 0:
data_specs = (CompositeSpace([]), ())
elif hasattr(dataset, 'get_data_specs'):
dataset_space, dataset_source = dataset.get_data_specs()
if (len(ipt) == 1 and
dataset_source is not None and
(not isinstance(dataset_source, tuple) or
len(dataset_source) == 1) and
'features' in dataset_source):
data_specs = (dataset_space, dataset_source)
elif (len(ipt) == 2 and
dataset_source == ('features', 'targets')):
data_specs = (dataset_space, dataset_source)
else:
raise ValueError("Cannot infer default data_specs for " +
"the following input points and " +
"dataset: ipt = %s, dataset = %s"
% (ipt, dataset))
data_specs[0].validate(ipt)
mapping = DataSpecsMapping(data_specs)
flat_ipt = mapping.flatten(ipt)
if not isinstance(flat_ipt, tuple):
flat_ipt = (flat_ipt,)
inputs = theano.gof.graph.inputs([val])
for elem in inputs:
if not hasattr(elem, 'get_value') and \
not isinstance(elem, theano.gof.graph.Constant):
if elem not in flat_ipt:
raise ValueError("Unspecified input: " + str(elem) +
". This may be due to an incorrect " +
"implementation of a cost's " +
"get_data_specs() method, or of a " +
"model's get_monitoring_data_specs() " +
"method.")
mode = self.theano_function_mode
if mode is not None and hasattr(mode, 'record'):
mode.record.handle_line('Adding monitor channel '+name+'\n')
assert isinstance(flat_ipt, tuple)
if len(flat_ipt) != 1:
for elem in flat_ipt:
mode.record.handle_line('Includes input var ' +
var_descriptor(elem) + '\n')
else:
mode.record.handle_line(name + ' input var is ' +
var_descriptor(flat_ipt[0]) + '\n')
mode.record.handle_line('channel ' + name + ' is ' +
var_descriptor(val) + '\n')
if dataset is None:
if len(self._datasets) == 1:
dataset = self._datasets[0]
elif len(self._datasets) == 0:
raise ValueError(_err_no_data)
else:
raise ValueError(_err_ambig_data)
try:
self._datasets.index(dataset)
except ValueError:
reraise_as(ValueError("The dataset specified is not one of the " +
"monitor's datasets"))
if ((self.on_channel_conflict not in
('error', 'copy_history', 'overwrite'))):
raise ValueError("on_channel_conflict should be either 'error'" +
"'copy_history', or 'overwrite'")
if name in self.channels and self.on_channel_conflict == 'error':
raise ValueError("Tried to create the same channel twice (%s)" %
name)
elif ((name in self.channels and
self.on_channel_conflict == 'copy_history')):
self.channels[name] = MonitorChannel(ipt, val, name, data_specs,
dataset, prereqs,
self.channels[name])
elif ((name not in self.channels or
self.on_channel_conflict == 'overwrite')):
self.channels[name] = MonitorChannel(ipt, val, name, data_specs,
dataset, prereqs)
self._dirty = True
def _sanity_check(self):
"""
Sometimes we serialize models and then load them somewhere else
but still try to use their Monitor, and the Monitor is in a
mangled state. I've added some calls to _sanity_check to try to
catch when that happens. Not sure what to do for a long term
fix. I think it requires making theano graphs serializable
first.
"""
for name in self.channels:
channel = self.channels[name]
assert hasattr(channel, 'prereqs')
@classmethod
def get_monitor(cls, model):
"""
Returns a model's monitor. If the model doesn't have a monitor
yet, installs one and returns that.
Parameters
----------
model : object
An object that implements the `Model` interface specified
in `pylearn2.models`.
"""
if hasattr(model, 'monitor'):
rval = model.monitor
rval._sanity_check()
else:
rval = Monitor(model)
model.monitor = rval
return rval
# TODO: find out if this method is used anywhere, remove if not.
@property
def batch_size(self):
"""
.. todo::
WRITEME
Returns
-------
batch_size : int
The size of the batches used for monitoring
"""
return self._batch_size
# TODO: find out if this method is used anywhere, remove if not.
@property
def num_batches(self):
"""
.. todo::
WRITEME
Returns
-------
num_batches : int
The number of batches used for monitoring
"""
return self._num_batches
def setup(self, dataset, cost, batch_size, num_batches=None,
extra_costs=None, mode='sequential', obj_prereqs=None,
cost_monitoring_args=None):
"""
Sets up the monitor for a cost minimization problem.
Adds channels defined by both the model and the cost for
the specified dataset(s), as well as a channel called
'objective' defined by the costs' __call__ method.
Parameters
----------
dataset : pylearn2.datasets.Dataset
Dataset or dictionary mapping string names to Datasets.
If string names are used, then for every dataset, each
channel defined by the model or cost will be replicated
with that dataset's name followed by an underscore as the
prefix. For example, if your cost defines a channel called
'misclass', and datasets is
{'train' : train_dataset, 'valid' : valid_dataset},
you will get channels called 'train_misclass' and
'valid_misclass'.
cost : pylearn2.costs.Cost
The cost being optimized by training. The value of the cost
will appear as the `objective` channel. Its
`get_monitoring_channels` method will also be used to
supply other channels.
extra_costs : OrderedDict, optional
A dictionary mapping channel names to Cost objects.
Their value will appear as the specified channel name.
They will also provide more monitoring channels via their
`get_monitoring_channels` method.
obj_prereqs : None, or list of functions
Functions to pass as prerequisites to the `objective` channel.
cost_monitoring_args : dict
Dictionary of kwargs that will be passed to
`cost.get_monitoring_channels()`
(but not for the extra_costs).
"""
if dataset is None:
return
if isinstance(dataset, Dataset):
dataset = {'': dataset}
else:
assert isinstance(dataset, dict)
assert all(isinstance(key, str) for key in dataset)
assert all(isinstance(dataset[key], Dataset) for key in dataset)
if extra_costs is None:
costs = {}
else:
assert isinstance(extra_costs, (OrderedDict, dict))
costs = extra_costs
assert '' not in costs
costs[''] = cost
if cost_monitoring_args is None:
cost_monitoring_args = {}
model = self.model
# Build a composite data_specs containing the specs for all costs,
# then the specs of the model
cost_names = sorted(costs.keys())
spaces = []
sources = []
for c in cost_names:
c_space, c_source = costs[c].get_data_specs(model)
spaces.append(c_space)
sources.append(c_source)
# Ask the model for the data_specs needed
m_space, m_source = model.get_monitoring_data_specs()
spaces.append(m_space)
sources.append(m_source)
nested_space = CompositeSpace(spaces)
nested_sources = tuple(sources)
# Flatten this data_specs, so we build only one symbolic Theano
# variable for each of the unique (space, source) pairs.
mapping = DataSpecsMapping((nested_space, nested_sources))
space_tuple = mapping.flatten(nested_space, return_tuple=True)
source_tuple = mapping.flatten(nested_sources, return_tuple=True)
ipt = tuple(space.make_theano_batch(name='monitor_%s' % source,
batch_size=None)
for (space, source) in safe_zip(space_tuple, source_tuple))
# Build a nested tuple from ipt, to dispatch the appropriate parts
# of the ipt batch to each cost
nested_ipt = mapping.nest(ipt)
custom_channels = {}
for i, cost_name in enumerate(cost_names):
if cost_name == '':
prefix = ''
else:
prefix = cost_name + '_'
cost = costs[cost_name]
cost_ipt = nested_ipt[i]
raw_channels = cost.get_monitoring_channels(model, cost_ipt)
channels = {}
for name in raw_channels:
# We need three things: the value itself (raw_channels[name]),
# the input variables (cost_ipt), and the data_specs for
# these input variables ((spaces[i], sources[i]))
channels[prefix + name] = (raw_channels[name],
cost_ipt,
(spaces[i], sources[i]))
custom_channels.update(channels)
# Use the last inputs from nested_ipt for the model
model_channels = model.get_monitoring_channels(nested_ipt[-1])
channels = {}
for name in model_channels:
# Note: some code used to consider that model_channels[name]
# could be a a (channel, prereqs) pair, this is not supported.
channels[name] = (model_channels[name],
nested_ipt[-1],
(spaces[-1], sources[-1]))
custom_channels.update(channels)
if is_stochastic(mode):
seed = [[2013, 2, 22]]
else:
seed = None
for dataset_name in dataset:
cur_dataset = dataset[dataset_name]
self.add_dataset(dataset=cur_dataset,
mode=mode,
batch_size=batch_size,
num_batches=num_batches,
seed=seed)
if dataset_name == '':
dprefix = ''
else:
dprefix = dataset_name + '_'
# These channel name 'objective' must not vary, since callbacks
# that respond to the values in the monitor use the name to find
# it.
for i, cost_name in enumerate(cost_names):
cost = costs[cost_name]
cost_ipt = nested_ipt[i]
cost_value = cost.expr(model, cost_ipt)
if cost_value is not None:
if cost_name == '':
name = dprefix + 'objective'
prereqs = obj_prereqs
else:
name = dprefix + cost_name
prereqs = None
cost.get_data_specs(model)[0].validate(cost_ipt)
self.add_channel(name=name,
ipt=cost_ipt,
val=cost_value,
data_specs=cost.get_data_specs(model),
dataset=cur_dataset,
prereqs=prereqs)
for key in custom_channels:
val, ipt, data_specs = custom_channels[key]
data_specs[0].validate(ipt)
self.add_channel(name=dprefix + key,
ipt=ipt,
val=val,
data_specs=data_specs,
dataset=cur_dataset)
class MonitorChannel(object):
"""
A class representing a specific quantity to be monitored.
Parameters
----------
graph_input : tensor_like
The symbolic tensor which should be clamped to the data.
val : tensor_like
The value (symbolic function of `graph_input`) to be evaluated
and recorded.
name : str
The display name in the monitor.
data_specs : (space, source) pair
Identifies the order, format and semantics of graph_input
prereqs : list of callables
Callables that take numpy tensors each prereq must be called
exactly once per each new batch of data before the channel
value is computed if two channels provide a prereq with exactly
the same id, that prereq will only be called once
old_channel : MonitorChannel
MonitorChannel of old monitor, if not None, records of
MonitorChannel will be initialized with records of old channel.
When initializing the channel, the last value will be excluded,
since it will be instantly recomputed by the next launch.
"""
def __init__(self, graph_input, val, name, data_specs, dataset,
prereqs=None, old_channel=None):
self.name = name
self.prereqs = prereqs
self.graph_input = graph_input
self.data_specs = data_specs
if isinstance(val, float):
val = T.constant(np.cast[config.floatX](val))
self.val = val
self.val_shared = sharedX(0.0, name + "_tracker")
assert self.val_shared.dtype == config.floatX, \
"expected %s, got %s" % (config.floatX, self.val_shared.dtype)
if not hasattr(val, 'dtype'):
raise TypeError('Monitor channel ' + name + ' has value of type ' +
str(type(val)))
if val.dtype != self.val_shared.dtype:
raise ValueError('monitor channels are expected to have dtype ' +
str(self.val_shared.dtype) + ' but "' + name +
'" has dtype ' + str(val.dtype))
if val.ndim != 0:
raise ValueError('monitor channels are supposed to have zero ' +
'dimensions but "' + name + '" has ' +
str(val.ndim))
# Dataset monitored by this channel
self.dataset = dataset
if old_channel is not None:
# Value of the desired quantity at measurement time.
self.val_record = old_channel.val_record[:-1]
# Number of batches seen at measurement time.
self.batch_record = old_channel.batch_record[:-1]
# Number of examples seen at measurement time (batch sizes may
# fluctuate).
self.example_record = old_channel.example_record[:-1]
self.epoch_record = old_channel.epoch_record[:-1]
self.time_record = old_channel.time_record[:-1]
else:
# Value of the desired quantity at measurement time.
self.val_record = []
# Number of batches seen at measurement time.
self.batch_record = []
# Number of examples seen at measurement time (batch sizes may
# fluctuate).
self.example_record = []
self.epoch_record = []
self.time_record = []
def __str__(self):
"""
.. todo::
WRITEME
Returns
-------
s : str
A reasonably human-readable string representation of the object.
"""
try:
graph_input_str = str(self.graph_input)
except Exception:
graph_input_str = '<bad graph input>'
try:
val_str = str(self.val)
except Exception:
val_str = '<bad val>'
try:
name_str = str(self.name)
except Exception:
name_str = '<bad name>'
try:
prereqs_str = str(self.prereqs)
except Exception:
prereqs_str = '<bad prereqs>'
return "MonitorChannel(%s,%s,%s,%s)" % (graph_input_str,
val_str,
name_str,
prereqs_str)
def __getstate__(self):
"""
.. todo::
WRITEME
Returns
-------
d : dict
A dictionary mapping the string names of the fields of the class
to values appropriate for pickling.
"""
# We need to figure out a good way of saving the other fields. In the
# current setup, since there's no good way of coordinating with the
# model/training algorithm, the theano based fields might be invalid
# after a repickle. This means we can't, for instance, resume a job
# with monitoring after a crash. For now, to make sure no one
# erroneously depends on these bad values, I exclude them from the
# pickle.
if hasattr(self, 'val'):
doc = get_monitor_doc(self.val)
else:
# Hack to deal with Theano expressions not being serializable.
# If this is channel that has been serialized and then
# deserialized, the expression is gone, but we should have
# stored the doc
if hasattr(self, "doc"):
doc = self.doc
else:
# Support pickle files that are older than the doc system
doc = None
return {
'doc': doc,
'example_record': self.example_record,
'batch_record': self.batch_record,
'time_record': self.time_record,
'epoch_record': self.epoch_record,
'val_record': self.val_record
}
def __setstate__(self, d):
"""
Sets the object to have the state described by `d`.
Parameters
----------
d : dict
A dictionary mapping string names of fields to values for
these fields.
"""
self.__dict__.update(d)
if 'batch_record' not in d:
self.batch_record = [None] * len(self.val_record)
# Patch old pickle files that don't have the "epoch_record" field
if 'epoch_record' not in d:
# This is not necessarily correct but it is in the most common use
# case where you don't add monitoring channels over time.
self.epoch_record = range(len(self.val_record))
if 'time_record' not in d:
self.time_record = [None] * len(self.val_record)
def push_monitor(model, name, transfer_experience=False,
save_records=False):
"""
When you load a model in a yaml file and you want to store its
old monitor under a different name and start a new monitor, wrap
the model in this function call.
Parameters
----------
model : pylearn2.models.model.Model
The model you loaded
name : str
Will save the old monitor to model.name
transfer_experience : bool
If True, the new monitor will start with its epochs seen,
batches seen, and examples seen set to where the old monitor
left off. This is nice for stitching together learning curves
across multiple stages of learning.
save_records : bool
If True, val_record, batch_record, example_record, epoch_record,
and time_record of the new monitor will be initialzed with the
records of old monitor.
Returns
-------
model : WRITEME
Returns the model itself so you can use an !obj:push_monitor
call as the definition of a model in a YAML file.
"""
assert hasattr(model, 'monitor')
old_monitor = model.monitor
setattr(model, name, old_monitor)
del model.monitor
if transfer_experience:
monitor = Monitor.get_monitor(model)
assert monitor is not old_monitor
monitor._num_batches_seen = old_monitor._num_batches_seen
monitor._examples_seen = old_monitor._examples_seen
monitor._epochs_seen = old_monitor._epochs_seen
if save_records:
monitor.on_channel_conflict = 'copy_history'
monitor.channels = copy.copy(old_monitor.channels)
for key, value in list(monitor.channels.items()):
value.prereqs = None
return model
def read_channel(model, channel_name, monitor_name='monitor'):
"""
Returns the last value recorded in a channel.
Parameters
----------
model : Model
The model to read the channel from
channel_name : str
The name of the channel to read from
monitor_name : str, optional
The name of the Monitor to read from
(In case you want to read from an old Monitor moved by
`push_monitor`)
Returns
-------
value : float
The last value recorded in this monitoring channel
"""
return getattr(model, monitor_name).channels[channel_name].val_record[-1]
def get_channel(model, dataset, channel, cost, batch_size):
"""
Make a temporary monitor and return the value of a channel in it.
Parameters
----------
model : pylearn2.models.model.Model
Will evaluate the channel for this Model.
dataset : pylearn2.datasets.Dataset
The Dataset to run on
channel : str
A string identifying the channel name to evaluate
cost : pylearn2.costs.Cost
The Cost to setup for monitoring
batch_size : int
The size of the batch to use when running the monitor
Returns
-------
value : WRITEME
The value of the requested channel.
Notes
-----
This doesn't modify the model (unless some of the channel prereqs do).
In particular, it does not change model.monitor.
"""
monitor = Monitor(model)
monitor.setup(dataset=dataset, cost=cost, batch_size=batch_size)
monitor()
channels = monitor.channels
channel = channels[channel]
val_record = channel.val_record
value, = val_record
return value
def get_monitor_doc(var):
"""
Returns the __doc__ field of var or None. This field is used on
theano Variables to document the meaning of monitor channels.
Parameters
----------
var : theano.gof.Variable
The variable to get the documentation of
Returns
-------
doc : str or None
var.__doc__ if var has an instance-level doc, otherwise None
"""
doc = None
if var.__doc__ is not var.__class__.__doc__:
doc = var.__doc__
return doc
_err_no_data = "You tried to add a channel to a Monitor that has no dataset."
_err_ambig_data = ("You added a channel to a Monitor that has multiple " +
"datasets, and did not specify which dataset to use it " +
"with.")
|
|
# -*- coding: utf-8 -*-
"""
celery.five
~~~~~~~~~~~
Compatibility implementations of features
only available in newer Python versions.
"""
from __future__ import absolute_import
__all__ = ['Counter', 'reload', 'UserList', 'UserDict', 'Queue', 'Empty',
'zip_longest', 'StringIO', 'BytesIO', 'map', 'string', 'string_t',
'long_t', 'text_t', 'range', 'int_types', 'items', 'keys', 'values',
'nextfun', 'reraise', 'WhateverIO', 'with_metaclass',
'OrderedDict', 'THREAD_TIMEOUT_MAX', 'format_d',
'class_property', 'reclassmethod', 'create_module',
'recreate_module', 'monotonic']
try:
from collections import Counter
except ImportError: # pragma: no cover
from collections import defaultdict
def Counter(): # noqa
return defaultdict(int)
############## py3k #########################################################
import sys
PY3 = sys.version_info[0] == 3
try:
reload = reload # noqa
except NameError: # pragma: no cover
from imp import reload # noqa
try:
from UserList import UserList # noqa
except ImportError: # pragma: no cover
from collections import UserList # noqa
try:
from UserDict import UserDict # noqa
except ImportError: # pragma: no cover
from collections import UserDict # noqa
from kombu.five import monotonic
if PY3: # pragma: no cover
import builtins
from queue import Queue, Empty
from itertools import zip_longest
from io import StringIO, BytesIO
map = map
string = str
string_t = str
long_t = int
text_t = str
range = range
int_types = (int, )
open_fqdn = 'builtins.open'
def items(d):
return d.items()
def keys(d):
return d.keys()
def values(d):
return d.values()
def nextfun(it):
return it.__next__
exec_ = getattr(builtins, 'exec')
def reraise(tp, value, tb=None):
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
class WhateverIO(StringIO):
def write(self, data):
if isinstance(data, bytes):
data = data.encode()
StringIO.write(self, data)
else:
import __builtin__ as builtins # noqa
from Queue import Queue, Empty # noqa
from itertools import imap as map, izip_longest as zip_longest # noqa
from StringIO import StringIO # noqa
string = unicode # noqa
string_t = basestring # noqa
text_t = unicode
long_t = long # noqa
range = xrange
int_types = (int, long)
open_fqdn = '__builtin__.open'
def items(d): # noqa
return d.iteritems()
def keys(d): # noqa
return d.iterkeys()
def values(d): # noqa
return d.itervalues()
def nextfun(it): # noqa
return it.next
def exec_(code, globs=None, locs=None): # pragma: no cover
"""Execute code in a namespace."""
if globs is None:
frame = sys._getframe(1)
globs = frame.f_globals
if locs is None:
locs = frame.f_locals
del frame
elif locs is None:
locs = globs
exec("""exec code in globs, locs""")
exec_("""def reraise(tp, value, tb=None): raise tp, value, tb""")
BytesIO = WhateverIO = StringIO # noqa
def with_metaclass(Type, skip_attrs=set(['__dict__', '__weakref__'])):
"""Class decorator to set metaclass.
Works with both Python 2 and Python 3 and it does not add
an extra class in the lookup order like ``six.with_metaclass`` does
(that is -- it copies the original class instead of using inheritance).
"""
def _clone_with_metaclass(Class):
attrs = dict((key, value) for key, value in items(vars(Class))
if key not in skip_attrs)
return Type(Class.__name__, Class.__bases__, attrs)
return _clone_with_metaclass
############## collections.OrderedDict ######################################
# was moved to kombu
from kombu.utils.compat import OrderedDict # noqa
############## threading.TIMEOUT_MAX #######################################
try:
from threading import TIMEOUT_MAX as THREAD_TIMEOUT_MAX
except ImportError:
THREAD_TIMEOUT_MAX = 1e10 # noqa
############## format(int, ',d') ##########################
if sys.version_info >= (2, 7): # pragma: no cover
def format_d(i):
return format(i, ',d')
else: # pragma: no cover
def format_d(i): # noqa
s = '%d' % i
groups = []
while s and s[-1].isdigit():
groups.append(s[-3:])
s = s[:-3]
return s + ','.join(reversed(groups))
############## Module Generation ##########################
# Utilities to dynamically
# recreate modules, either for lazy loading or
# to create old modules at runtime instead of
# having them litter the source tree.
import operator
import sys
# import fails in python 2.5. fallback to reduce in stdlib
try:
from functools import reduce
except ImportError:
pass
from importlib import import_module
from types import ModuleType
MODULE_DEPRECATED = """
The module %s is deprecated and will be removed in a future version.
"""
DEFAULT_ATTRS = set(['__file__', '__path__', '__doc__', '__all__'])
# im_func is no longer available in Py3.
# instead the unbound method itself can be used.
if sys.version_info[0] == 3: # pragma: no cover
def fun_of_method(method):
return method
else:
def fun_of_method(method): # noqa
return method.im_func
def getappattr(path):
"""Gets attribute from the current_app recursively,
e.g. getappattr('amqp.get_task_consumer')``."""
from celery import current_app
return current_app._rgetattr(path)
def _compat_task_decorator(*args, **kwargs):
from celery import current_app
kwargs.setdefault('accept_magic_kwargs', True)
return current_app.task(*args, **kwargs)
def _compat_periodic_task_decorator(*args, **kwargs):
from celery.task import periodic_task
kwargs.setdefault('accept_magic_kwargs', True)
return periodic_task(*args, **kwargs)
COMPAT_MODULES = {
'celery': {
'execute': {
'send_task': 'send_task',
},
'decorators': {
'task': _compat_task_decorator,
'periodic_task': _compat_periodic_task_decorator,
},
'log': {
'get_default_logger': 'log.get_default_logger',
'setup_logger': 'log.setup_logger',
'setup_loggig_subsystem': 'log.setup_logging_subsystem',
'redirect_stdouts_to_logger': 'log.redirect_stdouts_to_logger',
},
'messaging': {
'TaskPublisher': 'amqp.TaskPublisher',
'TaskConsumer': 'amqp.TaskConsumer',
'establish_connection': 'connection',
'get_consumer_set': 'amqp.TaskConsumer',
},
'registry': {
'tasks': 'tasks',
},
},
'celery.task': {
'control': {
'broadcast': 'control.broadcast',
'rate_limit': 'control.rate_limit',
'time_limit': 'control.time_limit',
'ping': 'control.ping',
'revoke': 'control.revoke',
'discard_all': 'control.purge',
'inspect': 'control.inspect',
},
'schedules': 'celery.schedules',
'chords': 'celery.canvas',
}
}
class class_property(object):
def __init__(self, getter=None, setter=None):
if getter is not None and not isinstance(getter, classmethod):
getter = classmethod(getter)
if setter is not None and not isinstance(setter, classmethod):
setter = classmethod(setter)
self.__get = getter
self.__set = setter
info = getter.__get__(object) # just need the info attrs.
self.__doc__ = info.__doc__
self.__name__ = info.__name__
self.__module__ = info.__module__
def __get__(self, obj, type=None):
if obj and type is None:
type = obj.__class__
return self.__get.__get__(obj, type)()
def __set__(self, obj, value):
if obj is None:
return self
return self.__set.__get__(obj)(value)
def setter(self, setter):
return self.__class__(self.__get, setter)
def reclassmethod(method):
return classmethod(fun_of_method(method))
class MagicModule(ModuleType):
_compat_modules = ()
_all_by_module = {}
_direct = {}
_object_origins = {}
def __getattr__(self, name):
if name in self._object_origins:
module = __import__(self._object_origins[name], None, None, [name])
for item in self._all_by_module[module.__name__]:
setattr(self, item, getattr(module, item))
return getattr(module, name)
elif name in self._direct: # pragma: no cover
module = __import__(self._direct[name], None, None, [name])
setattr(self, name, module)
return module
return ModuleType.__getattribute__(self, name)
def __dir__(self):
return list(set(self.__all__) | DEFAULT_ATTRS)
def __reduce__(self):
return import_module, (self.__name__, )
def create_module(name, attrs, cls_attrs=None, pkg=None,
base=MagicModule, prepare_attr=None):
fqdn = '.'.join([pkg.__name__, name]) if pkg else name
cls_attrs = {} if cls_attrs is None else cls_attrs
pkg, _, modname = name.rpartition('.')
cls_attrs['__module__'] = pkg
attrs = dict((attr_name, prepare_attr(attr) if prepare_attr else attr)
for attr_name, attr in items(attrs))
module = sys.modules[fqdn] = type(modname, (base, ), cls_attrs)(fqdn)
module.__dict__.update(attrs)
return module
def recreate_module(name, compat_modules=(), by_module={}, direct={},
base=MagicModule, **attrs):
old_module = sys.modules[name]
origins = get_origins(by_module)
compat_modules = COMPAT_MODULES.get(name, ())
cattrs = dict(
_compat_modules=compat_modules,
_all_by_module=by_module, _direct=direct,
_object_origins=origins,
__all__=tuple(set(reduce(
operator.add,
[tuple(v) for v in [compat_modules, origins, direct, attrs]],
))),
)
new_module = create_module(name, attrs, cls_attrs=cattrs, base=base)
new_module.__dict__.update(dict((mod, get_compat_module(new_module, mod))
for mod in compat_modules))
return old_module, new_module
def get_compat_module(pkg, name):
from .local import Proxy
def prepare(attr):
if isinstance(attr, string_t):
return Proxy(getappattr, (attr, ))
return attr
attrs = COMPAT_MODULES[pkg.__name__][name]
if isinstance(attrs, string_t):
fqdn = '.'.join([pkg.__name__, name])
module = sys.modules[fqdn] = import_module(attrs)
return module
attrs['__all__'] = list(attrs)
return create_module(name, dict(attrs), pkg=pkg, prepare_attr=prepare)
def get_origins(defs):
origins = {}
for module, attrs in items(defs):
origins.update(dict((attr, module) for attr in attrs))
return origins
|
|
from __future__ import absolute_import
import os
import numpy as nm
from sfepy.base.testing import TestCommon
from sfepy import data_dir
from six.moves import range
# n_vertex, n_edge, n_face, n_cell
# d1 -> d2 : num, n_incident
expected = {
'1_2_2.mesh' : ([3, 2, 0, 0], {
(0, 0) : (3, 4),
(0, 1) : (3, 4),
(1, 0) : (2, 4),
(1, 1) : (2, 2),
}),
'2_3_2.mesh' : ([4, 5, 2, 0], {
(0, 0) : (4, 10),
(0, 1) : (4, 10),
(0, 2) : (4, 6),
(1, 0) : (5, 10),
(1, 1) : (5, 16),
(1, 2) : (5, 6),
(2, 0) : (2, 6),
(2, 1) : (2, 6),
(2, 2) : (2, 2),
}),
'2_4_2.mesh' : ([6, 7, 2, 0], {
(0, 0) : (6, 22),
(0, 1) : (6, 14),
(0, 2) : (6, 8),
(1, 0) : (7, 14),
(1, 1) : (7, 20),
(1, 2) : (7, 8),
(2, 0) : (2, 8),
(2, 1) : (2, 8),
(2, 2) : (2, 2),
}),
'3_4_2.mesh' : ([5, 9, 7, 2], {
(0, 0) : (5, 18),
(0, 1) : (5, 18),
(0, 2) : (5, 21),
(0, 3) : (5, 8),
(1, 0) : (9, 18),
(1, 1) : (9, 48),
(1, 2) : (9, 21),
(1, 3) : (9, 12),
(2, 0) : (7, 21),
(2, 1) : (7, 21),
(2, 2) : (7, 42),
(2, 3) : (7, 8),
(3, 0) : (2, 8),
(3, 1) : (2, 12),
(3, 2) : (2, 8),
(3, 3) : (2, 2),
}),
'3_8_2.mesh' : ([12, 20, 11, 2], {
(0, 0) : (12, 100),
(0, 1) : (12, 40),
(0, 2) : (12, 44),
(0, 3) : (12, 16),
(1, 0) : (20, 40),
(1, 1) : (20, 96),
(1, 2) : (20, 44),
(1, 3) : (20, 24),
(2, 0) : (11, 44),
(2, 1) : (11, 44),
(2, 2) : (11, 72),
(2, 3) : (11, 12),
(3, 0) : (2, 16),
(3, 1) : (2, 24),
(3, 2) : (2, 12),
(3, 3) : (2, 2),
}),
'square_triquad.mesh' : ([470, 1127, 658, 0], {
(0, 0) : (470, 3054),
(0, 1) : (470, 2254),
(0, 2) : (470, 2174),
(1, 0) : (1127, 2254),
(1, 1) : (1127, 9174),
(1, 2) : (1127, 2174),
(2, 0) : (658, 2174),
(2, 1) : (658, 2174),
(2, 2) : (658, 6686),
}),
}
class Test(TestCommon):
@staticmethod
def from_conf(conf, options):
filename_meshes = [data_dir + '/meshes/elements/%s_2.mesh' % geom
for geom in ['1_2', '2_3', '2_4', '3_4', '3_8']]
filename_meshes.append(data_dir
+ '/meshes/2d/special/square_triquad.mesh')
test = Test(filename_meshes=filename_meshes,
conf=conf, options=options)
return test
def test_cmesh_counts(self):
from sfepy.discrete.fem import Mesh
from sfepy.discrete.fem.geometry_element import create_geometry_elements
from sfepy.discrete.common.extmods.cmesh import CMesh, get_cmem_usage
gels = create_geometry_elements()
ok = True
for filename in self.filename_meshes:
basename = os.path.basename(filename)
enum, esizes = expected[basename]
self.report('mesh: %s' % basename)
mesh = Mesh.from_file(filename)
cmesh = mesh.cmesh
cmesh.set_local_entities(gels)
cmesh.setup_entities()
self.report('dim:', cmesh.dim)
self.report('n_vertex: %d, n_edge: %d, n_face: %d, n_cell: %d' %
tuple(cmesh.num))
_ok = (enum == cmesh.num).all()
if not _ok:
self.report('%s == %s failed!' % (enum, cmesh.num))
ok = ok and _ok
dim = cmesh.dim
for ir in range(dim + 1):
for ic in range(dim + 1):
cmesh.setup_connectivity(ir, ic)
mem_usage1 = get_cmem_usage()[0]
if (ir == dim) and (ic == 0):
continue
cmesh.free_connectivity(ir, ic)
mem_usage2 = get_cmem_usage()[0]
cmesh.setup_connectivity(ir, ic)
mem_usage3 = get_cmem_usage()[0]
conn = cmesh.get_conn(ir, ic)
self.report('(%d, %d) : (%d, %d)'
% (ir, ic, conn.num, conn.n_incident))
sizes = nm.array([conn.num, conn.n_incident])
_ok = (esizes[ir, ic] == sizes).all()
if not _ok:
self.report('%s == %s failed!' % (esizes, sizes))
ok = ok and _ok
_ok1 = mem_usage3 == mem_usage1
_ok2 = mem_usage3 > mem_usage2
if not (_ok1 and _ok2):
self.report('unexpected memory usage! (%s)'
% (mem_usage1, mem_usage2, mem_usage3))
ok = ok and (_ok1 and _ok2)
return ok
def test_entity_volumes(self):
import sfepy
from sfepy.discrete.fem import Mesh, FEDomain
from sfepy.discrete.common import Field
from sfepy.discrete import Integral
mesh = Mesh.from_file('meshes/3d/special/cross3d.mesh',
prefix_dir=sfepy.data_dir)
domain = FEDomain('domain', mesh)
omega = domain.create_region('Omega', 'all')
gamma = domain.create_region('Gamma', 'vertices of surface', 'facet')
top = domain.create_region('Top', 'cell 2')
vfield = Field.from_args('v', nm.float64, 'scalar', omega,
approx_order=1)
sfield = Field.from_args('s', nm.float64, 'scalar', gamma,
approx_order=1)
integral = Integral('i', order=3)
vgeo, _ = vfield.get_mapping(omega, integral, 'volume')
domain.create_surface_group(gamma)
sgeo, _ = sfield.get_mapping(gamma, integral, 'surface')
evols = mesh.cmesh.get_volumes(1)
fvols = mesh.cmesh.get_volumes(2) # Approximate for non-planar faces.
cvols = mesh.cmesh.get_volumes(3)
ok = True
_ok = abs(cvols.sum() - vgeo.volume.sum()) < 1e-15
self.report('total cell volume: %s (ok: %s)' % (cvols.sum(), _ok))
ok = _ok and ok
top_evols = nm.array([ 1. , 1. ,
1. , 1. ,
0.7211102550927979, 0.7211102550927979,
0.7211102550927979, 0.7211102550927979,
1.16619037896906 , 1.16619037896906 ,
1.16619037896906 , 1.16619037896906 ])
_ok = nm.allclose(top_evols, evols[top.edges], rtol=0.0, atol=1e-15)
self.report('total top cell edge length: %s (ok: %s)'
% (evols[top.edges].sum(), _ok))
ok = _ok and ok
i1 = [5, 6, 8, 9]
i2 = nm.setdiff1d(nm.arange(len(gamma.faces)), i1)
aux = fvols[gamma.faces] - sgeo.volume.ravel()
_ok = nm.allclose(aux[i1], 0.10560208437556773, rtol=0.0, atol=1e-15)
ok = _ok and ok
self.report('non-planar faces diff: %s (ok: %s)' % (aux[i1], _ok))
_ok = (nm.abs(aux[i2]) < 1e-15).all()
self.report('max. planar faces diff: %s (ok: %s)'
% (nm.abs(aux[i2]).max(), _ok))
ok = _ok and ok
return ok
|
|
import datetime
from django import forms
#
# ExtJs Field models
#
#
# MISSING :
#
#'CommaSeparatedIntegerField' : {'type':'string'}
#'FileField' : {'type':'string'}
#'FilePathField' : {'type':'string'}
#'ImageField' : {'type':'string'}
#'IPAddressField' : {'type':'string'}
#'NullBooleanField' : {'type':'boolean'}
#'PositiveIntegerField' : {'type':'int'}
#'PositiveSmallIntegerField' : {'type':'int'}
#'SmallIntegerField' : {'type':'int'}
#'TextField' : {'type':'string'}
class Field(object):
WIDTH = 200
COL_WIDTH = None
def __init__(self, field):
self.field = field # django field
def getEditor(self, initialValue = False, data = {}):
name = self.getName()
label = name
if getattr(self.field, 'verbose_name', None):
label = unicode(self.field.verbose_name)
# if not self.field.blank:
# label += '*'
conf = {
'xtype':'textfield'
,'fieldLabel':label
,'allowBlank':self.allowBlank()
,'name':name
}
if getattr(self.field, 'initial', None):
conf['value'] = unicode(self.field.initial)
if initialValue:
conf['value'] = self.getValue(initialValue)
if getattr(self.field, 'help_text', None):
conf['emptyText'] = unicode(getattr(self.field, 'help_text'))
#conf['tooltip'] = unicode(getattr(self.field, 'help_text'))
if getattr(self.field, 'max_length', None):
pixels = self.field.max_length*5
if pixels<40:
pixels=40
if pixels>300:
pixels=300
conf['width'] = pixels
if self.WIDTH:
conf['width'] = self.WIDTH
# disable some fields : eg: autofields and auto datetimefields
if not getattr(self.field, 'editable', True) or self.field.__class__.__name__ == 'AutoField':
conf = {
'xtype':'hidden'
,'disabled':True
,'editable':False
,'name':name
}
conf.update(data)
# if self.field.name in .in _get_validation_exclusions
return conf
def getName(self):
if isinstance(self.field, forms.Field):
name = self.field.label
else:
name = self.field.name
if not name:
name = self.field.__class__.__name__
return unicode(name)
def allowBlank(self):
allow = True
if isinstance(self.field, forms.Field):
allow = not(self.field.required)
else:
allow = self.field.blank
return allow
def getReaderConfig(self):
conf = {
'name': self.getName()
,'allowBlank': self.allowBlank()
}
return conf
def getColumnConfig(self):
conf = {
'header': unicode(self.field.verbose_name),
'tooltip': unicode(self.field.verbose_name),
'name':unicode(self.field.name),
'sortable': True,
'dataIndex': unicode(self.field.name),
'editor':self.getEditor()
}
if self.COL_WIDTH:
conf['width'] = self.COL_WIDTH
return conf
def parseValue(self, value):
# called by the handler
# transform input data to fit field format
return value
def getValue(self, value):
# format data for ExtJs emitter
return value
class AutoField(Field):
WIDTH=40
def getEditor(self, *args, **kwargs):
conf = super(AutoField, self).getEditor(*args, **kwargs)
conf.update({'xtype':'hidden', 'editable':False} )
return conf
def getColumnConfig(self):
conf = super(AutoField, self).getColumnConfig()
conf['hidden'] = True
return conf
class EmailField(Field):
WIDTH=250
def getEditor(self, *args, **kwargs):
conf = super(EmailField, self).getEditor(*args, **kwargs)
conf.update({'xtype':'textfield', 'vtype':'email'} )
return conf
class URLField(Field):
WIDTH=250
def getEditor(self, *args, **kwargs):
conf = super(URLField, self).getEditor(*args, **kwargs)
conf.update({'xtype':'textfield', 'vtype':'url'} )
return conf
class CharField(Field):
def getEditor(self, *args, **kwargs):
conf = super(CharField, self).getEditor(*args, **kwargs)
if getattr(self.field, 'choices', None):
choices = {
'xtype':'awesomecombo'
,'format':'string'
,'displayField':'value'
,'hiddenName':conf.get('name')
,'valueField':'id'
,'mode':'local'
,'triggerAction':'all'
,'editable':False
,'forceSelection': True
,'store':{
'xtype':'simplestore'
,'fields':['id','value']
,'data':self.field.choices
}
}
conf.update(choices )
return conf
return {'xtype':'textfield'}
ChoiceField = CharField
SlugField = CharField
class MultipleChoiceField(ChoiceField):
def getEditor(self, *args, **kwargs):
conf = super(MultipleChoiceField, self).getEditor(*args, **kwargs)
conf['enableMultiSelect'] = True
conf['format'] = 'array'
return conf
class MultipleStringChoiceField(ChoiceField):
def getEditor(self, *args, **kwargs):
conf = super(MultipleStringChoiceField, self).getEditor(*args, **kwargs)
conf['enableMultiSelect'] = True
conf['format'] = 'string'
return conf
class DecimalField(Field):
FORMAT_RENDERER = '0.00'
TYPE = 'float'
COL_WIDTH = 50
def getEditor(self, *args, **kwargs):
conf = super(DecimalField, self).getEditor(*args, **kwargs)
conf.update({'xtype':'numberfield', 'style':'text-align:right', 'width':50} )
return conf
def getReaderConfig(self):
conf = super(DecimalField, self).getReaderConfig()
conf['type'] = self.TYPE
return conf
def getColumnConfig(self):
conf = super(DecimalField, self).getColumnConfig()
conf['xtype'] = 'numbercolumn'
conf['align'] = 'right'
conf['format'] = self.FORMAT_RENDERER
return conf
def parseValue(self, value):
if value:
value = str(value)
return value
class IntegerField(DecimalField):
FORMAT_RENDERER = '0'
TYPE = 'int'
FloatField = DecimalField
class DateTimeField(Field):
FORMAT = 'Y-m-d H:i:s'
FORMAT_RENDERER = 'Y-m-d H:i'
EDITOR_XTYPE = 'datefield'
FORMAT_PARSE = '%Y-%m-%dT%H:%M:%S'
FORMAT_GET = '%Y-%m-%dT%H:%M:%S'
WIDTH = 50
COL_WIDTH = 50
def getEditor(self, *args, **kwargs):
conf = super(DateTimeField, self).getEditor(*args, **kwargs)
conf.update({'xtype':self.EDITOR_XTYPE, 'format':self.FORMAT} )
return conf
def getReaderConfig(self):
conf = super(DateTimeField, self).getReaderConfig()
conf['dateFormat'] = self.FORMAT
conf['type'] = 'date'
return conf
def getColumnConfig(self):
conf = super(DateTimeField, self).getColumnConfig()
conf['xtype'] = 'datecolumn'
conf['align'] = 'center'
conf['format'] = self.FORMAT_RENDERER
return conf
def parseValue(self, value):
if value:
value = datetime.datetime.strptime(value, self.FORMAT_PARSE)
return value
def getValue(self, value):
# format data for ExtJs emitter
return value.strftime(self.FORMAT_GET)
class DateField(DateTimeField):
FORMAT = 'Y-m-d'
FORMAT_RENDERER = 'Y-m-d'
FORMAT_PARSE = '%Y-%m-%d'
WIDTH = 30
COL_WIDTH = 30
def parseValue(self, value):
if value:
if value.find('T')>0:
value = value.split('T')[0]
value = datetime.datetime.strptime(value, self.FORMAT_PARSE).date()
return value
class TimeField(DateTimeField):
FORMAT = 'H:i:s'
FORMAT_RENDERER = 'H:i'
EDITOR_XTYPE = 'timefield'
FORMAT_PARSE = '%H:%M:%S'
WIDTH = 30
COL_WIDTH = 30
def parseValue(self, value):
if value:
if value.find('T')>0:
value = value.split('T')[1]
value = datetime.datetime.strptime(value, self.FORMAT_PARSE).time()
return value
class BooleanField(Field):
WIDTH = 30
COL_WIDTH = 30
def getEditor(self, *args, **kwargs):
conf = super(BooleanField, self).getEditor(*args, **kwargs)
conf.update({'xtype':'checkbox'} )
if kwargs.get('initialValue') == True:
conf.update({'checked':True} )
if getattr(self.field, 'initial', None) == True:
conf['checked'] = True
return conf
def getColumnConfig(self):
conf = super(BooleanField, self).getColumnConfig()
conf['xtype'] = 'checkcolumn'
return conf
def getReaderConfig(self):
conf = super(BooleanField, self).getReaderConfig()
conf['type'] = 'bool'
return conf
class ForeignKey(Field):
MANYTOMANY = False
RENDERER = 'Ext.django.FKRenderer'
def getEditor(self, *args, **kwargs):
conf = super(ForeignKey, self).getEditor(*args, **kwargs)
conf.update({
'xtype': 'djangocombo',
'enableMultiSelect': self.MANYTOMANY,
'model': '%s.%s' % (
self.field.related.parent_model._meta.app_label,
self.field.related.parent_model._meta.object_name,
),
})
return conf
def getColumnConfig(self):
conf = super(ForeignKey, self).getColumnConfig()
conf['related'] = True
conf['renderer'] = {'fn': self.RENDERER, 'scope': 'this'}
return conf
def getReaderConfig(self):
conf = super(ForeignKey, self).getReaderConfig()
conf['defaultValue'] = ''
return conf
def parseValue(self, value):
if value:
value = self.parseFK(self.field.rel.to, value)[0]
if not value:
value = None
return value
def parseFK(self, cls, value):
''' translates FK or M2M values to instance list '''
relateds = []
if isinstance(value, list):
for id in value:
if isinstance(id, dict) and id.has_key('id'):
item = cls.objects.get(pk=id['id'])
else:
item = cls.objects.get(pk=id)
relateds.append(item)
elif isinstance(value, dict) and value.has_key('id'):
relateds.append(cls.objects.get(pk=value['id']))
else:
relateds.append(cls.objects.get(pk=value))
return relateds
class ModelChoiceField(ForeignKey):
def getEditor(self, *args, **kwargs):
conf = super(ForeignKey, self).getEditor(*args, **kwargs)
model = self.field._queryset.model
conf.update({
'xtype': 'djangocombo',
'enableMultiSelect': self.MANYTOMANY,
'model': '%s.%s' % (
model._meta.app_label,
model._meta.object_name,
),
})
return conf
class ManyToManyField(ForeignKey):
MANYTOMANY = True
RENDERER = 'Ext.django.M2MRenderer'
def parseValue(self, value):
if value:
value = self.parseFK(self.field.rel.to, value)
return value
class ModelMultipleChoiceField(ModelChoiceField, ManyToManyField):
pass
|
|
from __future__ import annotations
import asyncio
import email.utils
import functools
import http
import inspect
import logging
import socket
import warnings
from types import TracebackType
from typing import (
Any,
Awaitable,
Callable,
Generator,
Iterable,
List,
Optional,
Sequence,
Set,
Tuple,
Type,
Union,
cast,
)
from ..connection import State
from ..datastructures import Headers, HeadersLike, MultipleValuesError
from ..exceptions import (
AbortHandshake,
InvalidHandshake,
InvalidHeader,
InvalidMessage,
InvalidOrigin,
InvalidUpgrade,
NegotiationError,
)
from ..extensions import Extension, ServerExtensionFactory
from ..extensions.permessage_deflate import enable_server_permessage_deflate
from ..headers import (
build_extension,
parse_extension,
parse_subprotocol,
validate_subprotocols,
)
from ..http import USER_AGENT
from ..typing import ExtensionHeader, LoggerLike, Origin, Subprotocol
from .compatibility import loop_if_py_lt_38
from .handshake import build_response, check_request
from .http import read_request
from .protocol import WebSocketCommonProtocol
__all__ = ["serve", "unix_serve", "WebSocketServerProtocol", "WebSocketServer"]
HeadersLikeOrCallable = Union[HeadersLike, Callable[[str, Headers], HeadersLike]]
HTTPResponse = Tuple[http.HTTPStatus, HeadersLike, bytes]
class WebSocketServerProtocol(WebSocketCommonProtocol):
"""
WebSocket server connection.
:class:`WebSocketServerProtocol` provides :meth:`recv` and :meth:`send`
coroutines for receiving and sending messages.
It supports asynchronous iteration to receive messages::
async for message in websocket:
await process(message)
The iterator exits normally when the connection is closed with close code
1000 (OK) or 1001 (going away). It raises
a :exc:`~websockets.exceptions.ConnectionClosedError` when the connection
is closed with any other code.
You may customize the opening handshake in a subclass by
overriding :meth:`process_request` or :meth:`select_subprotocol`.
Args:
ws_server: WebSocket server that created this connection.
See :func:`serve` for the documentation of ``ws_handler``, ``logger``, ``origins``,
``extensions``, ``subprotocols``, and ``extra_headers``.
See :class:`~websockets.legacy.protocol.WebSocketCommonProtocol` for the
documentation of ``ping_interval``, ``ping_timeout``, ``close_timeout``,
``max_size``, ``max_queue``, ``read_limit``, and ``write_limit``.
"""
is_client = False
side = "server"
def __init__(
self,
ws_handler: Union[
Callable[[WebSocketServerProtocol], Awaitable[Any]],
Callable[[WebSocketServerProtocol, str], Awaitable[Any]], # deprecated
],
ws_server: WebSocketServer,
*,
logger: Optional[LoggerLike] = None,
origins: Optional[Sequence[Optional[Origin]]] = None,
extensions: Optional[Sequence[ServerExtensionFactory]] = None,
subprotocols: Optional[Sequence[Subprotocol]] = None,
extra_headers: Optional[HeadersLikeOrCallable] = None,
process_request: Optional[
Callable[[str, Headers], Awaitable[Optional[HTTPResponse]]]
] = None,
select_subprotocol: Optional[
Callable[[Sequence[Subprotocol], Sequence[Subprotocol]], Subprotocol]
] = None,
**kwargs: Any,
) -> None:
if logger is None:
logger = logging.getLogger("websockets.server")
super().__init__(logger=logger, **kwargs)
# For backwards compatibility with 6.0 or earlier.
if origins is not None and "" in origins:
warnings.warn("use None instead of '' in origins", DeprecationWarning)
origins = [None if origin == "" else origin for origin in origins]
# For backwards compatibility with 10.0 or earlier. Done here in
# addition to serve to trigger the deprecation warning on direct
# use of WebSocketServerProtocol.
self.ws_handler = remove_path_argument(ws_handler)
self.ws_server = ws_server
self.origins = origins
self.available_extensions = extensions
self.available_subprotocols = subprotocols
self.extra_headers = extra_headers
self._process_request = process_request
self._select_subprotocol = select_subprotocol
def connection_made(self, transport: asyncio.BaseTransport) -> None:
"""
Register connection and initialize a task to handle it.
"""
super().connection_made(transport)
# Register the connection with the server before creating the handler
# task. Registering at the beginning of the handler coroutine would
# create a race condition between the creation of the task, which
# schedules its execution, and the moment the handler starts running.
self.ws_server.register(self)
self.handler_task = self.loop.create_task(self.handler())
async def handler(self) -> None:
"""
Handle the lifecycle of a WebSocket connection.
Since this method doesn't have a caller able to handle exceptions, it
attemps to log relevant ones and guarantees that the TCP connection is
closed before exiting.
"""
try:
try:
await self.handshake(
origins=self.origins,
available_extensions=self.available_extensions,
available_subprotocols=self.available_subprotocols,
extra_headers=self.extra_headers,
)
# Remove this branch when dropping support for Python < 3.8
# because CancelledError no longer inherits Exception.
except asyncio.CancelledError: # pragma: no cover
raise
except ConnectionError:
raise
except Exception as exc:
if isinstance(exc, AbortHandshake):
status, headers, body = exc.status, exc.headers, exc.body
elif isinstance(exc, InvalidOrigin):
if self.debug:
self.logger.debug("! invalid origin", exc_info=True)
status, headers, body = (
http.HTTPStatus.FORBIDDEN,
Headers(),
f"Failed to open a WebSocket connection: {exc}.\n".encode(),
)
elif isinstance(exc, InvalidUpgrade):
if self.debug:
self.logger.debug("! invalid upgrade", exc_info=True)
status, headers, body = (
http.HTTPStatus.UPGRADE_REQUIRED,
Headers([("Upgrade", "websocket")]),
(
f"Failed to open a WebSocket connection: {exc}.\n"
f"\n"
f"You cannot access a WebSocket server directly "
f"with a browser. You need a WebSocket client.\n"
).encode(),
)
elif isinstance(exc, InvalidHandshake):
if self.debug:
self.logger.debug("! invalid handshake", exc_info=True)
status, headers, body = (
http.HTTPStatus.BAD_REQUEST,
Headers(),
f"Failed to open a WebSocket connection: {exc}.\n".encode(),
)
else:
self.logger.error("opening handshake failed", exc_info=True)
status, headers, body = (
http.HTTPStatus.INTERNAL_SERVER_ERROR,
Headers(),
(
b"Failed to open a WebSocket connection.\n"
b"See server log for more information.\n"
),
)
headers.setdefault("Date", email.utils.formatdate(usegmt=True))
headers.setdefault("Server", USER_AGENT)
headers.setdefault("Content-Length", str(len(body)))
headers.setdefault("Content-Type", "text/plain")
headers.setdefault("Connection", "close")
self.write_http_response(status, headers, body)
self.logger.info(
"connection failed (%d %s)", status.value, status.phrase
)
await self.close_transport()
return
try:
await self.ws_handler(self)
except Exception:
self.logger.error("connection handler failed", exc_info=True)
if not self.closed:
self.fail_connection(1011)
raise
try:
await self.close()
except ConnectionError:
raise
except Exception:
self.logger.error("closing handshake failed", exc_info=True)
raise
except Exception:
# Last-ditch attempt to avoid leaking connections on errors.
try:
self.transport.close()
except Exception: # pragma: no cover
pass
finally:
# Unregister the connection with the server when the handler task
# terminates. Registration is tied to the lifecycle of the handler
# task because the server waits for tasks attached to registered
# connections before terminating.
self.ws_server.unregister(self)
self.logger.info("connection closed")
async def read_http_request(self) -> Tuple[str, Headers]:
"""
Read request line and headers from the HTTP request.
If the request contains a body, it may be read from ``self.reader``
after this coroutine returns.
Raises:
InvalidMessage: if the HTTP message is malformed or isn't an
HTTP/1.1 GET request.
"""
try:
path, headers = await read_request(self.reader)
except asyncio.CancelledError: # pragma: no cover
raise
except Exception as exc:
raise InvalidMessage("did not receive a valid HTTP request") from exc
if self.debug:
self.logger.debug("< GET %s HTTP/1.1", path)
for key, value in headers.raw_items():
self.logger.debug("< %s: %s", key, value)
self.path = path
self.request_headers = headers
return path, headers
def write_http_response(
self, status: http.HTTPStatus, headers: Headers, body: Optional[bytes] = None
) -> None:
"""
Write status line and headers to the HTTP response.
This coroutine is also able to write a response body.
"""
self.response_headers = headers
if self.debug:
self.logger.debug("> HTTP/1.1 %d %s", status.value, status.phrase)
for key, value in headers.raw_items():
self.logger.debug("> %s: %s", key, value)
if body is not None:
self.logger.debug("> [body] (%d bytes)", len(body))
# Since the status line and headers only contain ASCII characters,
# we can keep this simple.
response = f"HTTP/1.1 {status.value} {status.phrase}\r\n"
response += str(headers)
self.transport.write(response.encode())
if body is not None:
self.transport.write(body)
async def process_request(
self, path: str, request_headers: Headers
) -> Optional[HTTPResponse]:
"""
Intercept the HTTP request and return an HTTP response if appropriate.
You may override this method in a :class:`WebSocketServerProtocol`
subclass, for example:
* to return a HTTP 200 OK response on a given path; then a load
balancer can use this path for a health check;
* to authenticate the request and return a HTTP 401 Unauthorized or a
HTTP 403 Forbidden when authentication fails.
You may also override this method with the ``process_request``
argument of :func:`serve` and :class:`WebSocketServerProtocol`. This
is equivalent, except ``process_request`` won't have access to the
protocol instance, so it can't store information for later use.
:meth:`process_request` is expected to complete quickly. If it may run
for a long time, then it should await :meth:`wait_closed` and exit if
:meth:`wait_closed` completes, or else it could prevent the server
from shutting down.
Args:
path: request path, including optional query string.
request_headers: request headers.
Returns:
Optional[Tuple[http.HTTPStatus, HeadersLike, bytes]]: :obj:`None`
to continue the WebSocket handshake normally.
An HTTP response, represented by a 3-uple of the response status,
headers, and body, to abort the WebSocket handshake and return
that HTTP response instead.
"""
if self._process_request is not None:
response = self._process_request(path, request_headers)
if isinstance(response, Awaitable):
return await response
else:
# For backwards compatibility with 7.0.
warnings.warn(
"declare process_request as a coroutine", DeprecationWarning
)
return response
return None
@staticmethod
def process_origin(
headers: Headers, origins: Optional[Sequence[Optional[Origin]]] = None
) -> Optional[Origin]:
"""
Handle the Origin HTTP request header.
Args:
headers: request headers.
origins: optional list of acceptable origins.
Raises:
InvalidOrigin: if the origin isn't acceptable.
"""
# "The user agent MUST NOT include more than one Origin header field"
# per https://www.rfc-editor.org/rfc/rfc6454.html#section-7.3.
try:
origin = cast(Optional[Origin], headers.get("Origin"))
except MultipleValuesError as exc:
raise InvalidHeader("Origin", "more than one Origin header found") from exc
if origins is not None:
if origin not in origins:
raise InvalidOrigin(origin)
return origin
@staticmethod
def process_extensions(
headers: Headers,
available_extensions: Optional[Sequence[ServerExtensionFactory]],
) -> Tuple[Optional[str], List[Extension]]:
"""
Handle the Sec-WebSocket-Extensions HTTP request header.
Accept or reject each extension proposed in the client request.
Negotiate parameters for accepted extensions.
Return the Sec-WebSocket-Extensions HTTP response header and the list
of accepted extensions.
:rfc:`6455` leaves the rules up to the specification of each
:extension.
To provide this level of flexibility, for each extension proposed by
the client, we check for a match with each extension available in the
server configuration. If no match is found, the extension is ignored.
If several variants of the same extension are proposed by the client,
it may be accepted several times, which won't make sense in general.
Extensions must implement their own requirements. For this purpose,
the list of previously accepted extensions is provided.
This process doesn't allow the server to reorder extensions. It can
only select a subset of the extensions proposed by the client.
Other requirements, for example related to mandatory extensions or the
order of extensions, may be implemented by overriding this method.
Args:
headers: request headers.
extensions: optional list of supported extensions.
Raises:
InvalidHandshake: to abort the handshake with an HTTP 400 error.
"""
response_header_value: Optional[str] = None
extension_headers: List[ExtensionHeader] = []
accepted_extensions: List[Extension] = []
header_values = headers.get_all("Sec-WebSocket-Extensions")
if header_values and available_extensions:
parsed_header_values: List[ExtensionHeader] = sum(
[parse_extension(header_value) for header_value in header_values], []
)
for name, request_params in parsed_header_values:
for ext_factory in available_extensions:
# Skip non-matching extensions based on their name.
if ext_factory.name != name:
continue
# Skip non-matching extensions based on their params.
try:
response_params, extension = ext_factory.process_request_params(
request_params, accepted_extensions
)
except NegotiationError:
continue
# Add matching extension to the final list.
extension_headers.append((name, response_params))
accepted_extensions.append(extension)
# Break out of the loop once we have a match.
break
# If we didn't break from the loop, no extension in our list
# matched what the client sent. The extension is declined.
# Serialize extension header.
if extension_headers:
response_header_value = build_extension(extension_headers)
return response_header_value, accepted_extensions
# Not @staticmethod because it calls self.select_subprotocol()
def process_subprotocol(
self, headers: Headers, available_subprotocols: Optional[Sequence[Subprotocol]]
) -> Optional[Subprotocol]:
"""
Handle the Sec-WebSocket-Protocol HTTP request header.
Return Sec-WebSocket-Protocol HTTP response header, which is the same
as the selected subprotocol.
Args:
headers: request headers.
available_subprotocols: optional list of supported subprotocols.
Raises:
InvalidHandshake: to abort the handshake with an HTTP 400 error.
"""
subprotocol: Optional[Subprotocol] = None
header_values = headers.get_all("Sec-WebSocket-Protocol")
if header_values and available_subprotocols:
parsed_header_values: List[Subprotocol] = sum(
[parse_subprotocol(header_value) for header_value in header_values], []
)
subprotocol = self.select_subprotocol(
parsed_header_values, available_subprotocols
)
return subprotocol
def select_subprotocol(
self,
client_subprotocols: Sequence[Subprotocol],
server_subprotocols: Sequence[Subprotocol],
) -> Optional[Subprotocol]:
"""
Pick a subprotocol among those offered by the client.
If several subprotocols are supported by the client and the server,
the default implementation selects the preferred subprotocol by
giving equal value to the priorities of the client and the server.
If no subprotocol is supported by the client and the server, it
proceeds without a subprotocol.
This is unlikely to be the most useful implementation in practice.
Many servers providing a subprotocol will require that the client
uses that subprotocol. Such rules can be implemented in a subclass.
You may also override this method with the ``select_subprotocol``
argument of :func:`serve` and :class:`WebSocketServerProtocol`.
Args:
client_subprotocols: list of subprotocols offered by the client.
server_subprotocols: list of subprotocols available on the server.
Returns:
Optional[Subprotocol]: Selected subprotocol.
:obj:`None` to continue without a subprotocol.
"""
if self._select_subprotocol is not None:
return self._select_subprotocol(client_subprotocols, server_subprotocols)
subprotocols = set(client_subprotocols) & set(server_subprotocols)
if not subprotocols:
return None
priority = lambda p: (
client_subprotocols.index(p) + server_subprotocols.index(p)
)
return sorted(subprotocols, key=priority)[0]
async def handshake(
self,
origins: Optional[Sequence[Optional[Origin]]] = None,
available_extensions: Optional[Sequence[ServerExtensionFactory]] = None,
available_subprotocols: Optional[Sequence[Subprotocol]] = None,
extra_headers: Optional[HeadersLikeOrCallable] = None,
) -> str:
"""
Perform the server side of the opening handshake.
Args:
origins: list of acceptable values of the Origin HTTP header;
include :obj:`None` if the lack of an origin is acceptable.
extensions: list of supported extensions, in order in which they
should be tried.
subprotocols: list of supported subprotocols, in order of
decreasing preference.
extra_headers: arbitrary HTTP headers to add to the response when
the handshake succeeds.
Returns:
str: path of the URI of the request.
Raises:
InvalidHandshake: if the handshake fails.
"""
path, request_headers = await self.read_http_request()
# Hook for customizing request handling, for example checking
# authentication or treating some paths as plain HTTP endpoints.
early_response_awaitable = self.process_request(path, request_headers)
if isinstance(early_response_awaitable, Awaitable):
early_response = await early_response_awaitable
else:
# For backwards compatibility with 7.0.
warnings.warn("declare process_request as a coroutine", DeprecationWarning)
early_response = early_response_awaitable
# The connection may drop while process_request is running.
if self.state is State.CLOSED:
raise self.connection_closed_exc() # pragma: no cover
# Change the response to a 503 error if the server is shutting down.
if not self.ws_server.is_serving():
early_response = (
http.HTTPStatus.SERVICE_UNAVAILABLE,
[],
b"Server is shutting down.\n",
)
if early_response is not None:
raise AbortHandshake(*early_response)
key = check_request(request_headers)
self.origin = self.process_origin(request_headers, origins)
extensions_header, self.extensions = self.process_extensions(
request_headers, available_extensions
)
protocol_header = self.subprotocol = self.process_subprotocol(
request_headers, available_subprotocols
)
response_headers = Headers()
build_response(response_headers, key)
if extensions_header is not None:
response_headers["Sec-WebSocket-Extensions"] = extensions_header
if protocol_header is not None:
response_headers["Sec-WebSocket-Protocol"] = protocol_header
if callable(extra_headers):
extra_headers = extra_headers(path, self.request_headers)
if extra_headers is not None:
response_headers.update(extra_headers)
response_headers.setdefault("Date", email.utils.formatdate(usegmt=True))
response_headers.setdefault("Server", USER_AGENT)
self.write_http_response(http.HTTPStatus.SWITCHING_PROTOCOLS, response_headers)
self.logger.info("connection open")
self.connection_open()
return path
class WebSocketServer:
"""
WebSocket server returned by :func:`serve`.
This class provides the same interface as :class:`~asyncio.Server`,
notably the :meth:`~asyncio.Server.close`
and :meth:`~asyncio.Server.wait_closed` methods.
It keeps track of WebSocket connections in order to close them properly
when shutting down.
Args:
logger: logger for this server;
defaults to ``logging.getLogger("websockets.server")``;
see the :doc:`logging guide <../topics/logging>` for details.
"""
def __init__(self, logger: Optional[LoggerLike] = None):
if logger is None:
logger = logging.getLogger("websockets.server")
self.logger = logger
# Keep track of active connections.
self.websockets: Set[WebSocketServerProtocol] = set()
# Task responsible for closing the server and terminating connections.
self.close_task: Optional[asyncio.Task[None]] = None
# Completed when the server is closed and connections are terminated.
self.closed_waiter: asyncio.Future[None]
def wrap(self, server: asyncio.base_events.Server) -> None:
"""
Attach to a given :class:`~asyncio.Server`.
Since :meth:`~asyncio.loop.create_server` doesn't support injecting a
custom ``Server`` class, the easiest solution that doesn't rely on
private :mod:`asyncio` APIs is to:
- instantiate a :class:`WebSocketServer`
- give the protocol factory a reference to that instance
- call :meth:`~asyncio.loop.create_server` with the factory
- attach the resulting :class:`~asyncio.Server` with this method
"""
self.server = server
for sock in server.sockets:
if sock.family == socket.AF_INET:
name = "%s:%d" % sock.getsockname()
elif sock.family == socket.AF_INET6:
name = "[%s]:%d" % sock.getsockname()[:2]
elif sock.family == socket.AF_UNIX:
name = sock.getsockname()
# In the unlikely event that someone runs websockets over a
# protocol other than IP or Unix sockets, avoid crashing.
else: # pragma: no cover
name = str(sock.getsockname())
self.logger.info("server listening on %s", name)
# Initialized here because we need a reference to the event loop.
# This should be moved back to __init__ in Python 3.10.
self.closed_waiter = server.get_loop().create_future()
def register(self, protocol: WebSocketServerProtocol) -> None:
"""
Register a connection with this server.
"""
self.websockets.add(protocol)
def unregister(self, protocol: WebSocketServerProtocol) -> None:
"""
Unregister a connection with this server.
"""
self.websockets.remove(protocol)
def close(self) -> None:
"""
Close the server.
This method:
* closes the underlying :class:`~asyncio.Server`;
* rejects new WebSocket connections with an HTTP 503 (service
unavailable) error; this happens when the server accepted the TCP
connection but didn't complete the WebSocket opening handshake prior
to closing;
* closes open WebSocket connections with close code 1001 (going away).
:meth:`close` is idempotent.
"""
if self.close_task is None:
self.close_task = self.get_loop().create_task(self._close())
async def _close(self) -> None:
"""
Implementation of :meth:`close`.
This calls :meth:`~asyncio.Server.close` on the underlying
:class:`~asyncio.Server` object to stop accepting new connections and
then closes open connections with close code 1001.
"""
self.logger.info("server closing")
# Stop accepting new connections.
self.server.close()
# Wait until self.server.close() completes.
await self.server.wait_closed()
# Wait until all accepted connections reach connection_made() and call
# register(). See https://bugs.python.org/issue34852 for details.
await asyncio.sleep(0, **loop_if_py_lt_38(self.get_loop()))
# Close OPEN connections with status code 1001. Since the server was
# closed, handshake() closes OPENING connections with a HTTP 503
# error. Wait until all connections are closed.
# asyncio.wait doesn't accept an empty first argument
if self.websockets:
await asyncio.wait(
[
asyncio.create_task(websocket.close(1001))
for websocket in self.websockets
],
**loop_if_py_lt_38(self.get_loop()),
)
# Wait until all connection handlers are complete.
# asyncio.wait doesn't accept an empty first argument.
if self.websockets:
await asyncio.wait(
[websocket.handler_task for websocket in self.websockets],
**loop_if_py_lt_38(self.get_loop()),
)
# Tell wait_closed() to return.
self.closed_waiter.set_result(None)
self.logger.info("server closed")
async def wait_closed(self) -> None:
"""
Wait until the server is closed.
When :meth:`wait_closed` returns, all TCP connections are closed and
all connection handlers have returned.
To ensure a fast shutdown, a connection handler should always be
awaiting at least one of:
* :meth:`~WebSocketServerProtocol.recv`: when the connection is closed,
it raises :exc:`~websockets.exceptions.ConnectionClosedOK`;
* :meth:`~WebSocketServerProtocol.wait_closed`: when the connection is
closed, it returns.
Then the connection handler is immediately notified of the shutdown;
it can clean up and exit.
"""
await asyncio.shield(self.closed_waiter)
def get_loop(self) -> asyncio.AbstractEventLoop:
"""
See :meth:`asyncio.Server.get_loop`.
"""
return self.server.get_loop()
def is_serving(self) -> bool:
"""
See :meth:`asyncio.Server.is_serving`.
"""
return self.server.is_serving()
async def start_serving(self) -> None:
"""
See :meth:`asyncio.Server.start_serving`.
"""
await self.server.start_serving() # pragma: no cover
async def serve_forever(self) -> None:
"""
See :meth:`asyncio.Server.serve_forever`.
"""
await self.server.serve_forever() # pragma: no cover
@property
def sockets(self) -> Iterable[socket.socket]:
"""
See :attr:`asyncio.Server.sockets`.
"""
return self.server.sockets
async def __aenter__(self) -> WebSocketServer:
return self # pragma: no cover
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType],
) -> None:
self.close() # pragma: no cover
await self.wait_closed() # pragma: no cover
class Serve:
"""
Start a WebSocket server listening on ``host`` and ``port``.
Whenever a client connects, the server creates a
:class:`WebSocketServerProtocol`, performs the opening handshake, and
delegates to the connection handler, ``ws_handler``.
The handler receives the :class:`WebSocketServerProtocol` and uses it to
send and receive messages.
Once the handler completes, either normally or with an exception, the
server performs the closing handshake and closes the connection.
Awaiting :func:`serve` yields a :class:`WebSocketServer`. This object
provides :meth:`~WebSocketServer.close` and
:meth:`~WebSocketServer.wait_closed` methods for shutting down the server.
:func:`serve` can be used as an asynchronous context manager::
stop = asyncio.Future() # set this future to exit the server
async with serve(...):
await stop
The server is shut down automatically when exiting the context.
Args:
ws_handler: connection handler. It receives the WebSocket connection,
which is a :class:`WebSocketServerProtocol`, in argument.
host: network interfaces the server is bound to;
see :meth:`~asyncio.loop.create_server` for details.
port: TCP port the server listens on;
see :meth:`~asyncio.loop.create_server` for details.
create_protocol: factory for the :class:`asyncio.Protocol` managing
the connection; defaults to :class:`WebSocketServerProtocol`; may
be set to a wrapper or a subclass to customize connection handling.
logger: logger for this server;
defaults to ``logging.getLogger("websockets.server")``;
see the :doc:`logging guide <../topics/logging>` for details.
compression: shortcut that enables the "permessage-deflate" extension
by default; may be set to :obj:`None` to disable compression;
see the :doc:`compression guide <../topics/compression>` for details.
origins: acceptable values of the ``Origin`` header; include
:obj:`None` in the list if the lack of an origin is acceptable.
This is useful for defending against Cross-Site WebSocket
Hijacking attacks.
extensions: list of supported extensions, in order in which they
should be tried.
subprotocols: list of supported subprotocols, in order of decreasing
preference.
extra_headers (Union[HeadersLike, Callable[[str, Headers], HeadersLike]]):
arbitrary HTTP headers to add to the request; this can be
a :data:`~websockets.datastructures.HeadersLike` or a callable
taking the request path and headers in arguments and returning
a :data:`~websockets.datastructures.HeadersLike`.
process_request (Optional[Callable[[str, Headers], \
Awaitable[Optional[Tuple[http.HTTPStatus, HeadersLike, bytes]]]]]):
intercept HTTP request before the opening handshake;
see :meth:`~WebSocketServerProtocol.process_request` for details.
select_subprotocol: select a subprotocol supported by the client;
see :meth:`~WebSocketServerProtocol.select_subprotocol` for details.
See :class:`~websockets.legacy.protocol.WebSocketCommonProtocol` for the
documentation of ``ping_interval``, ``ping_timeout``, ``close_timeout``,
``max_size``, ``max_queue``, ``read_limit``, and ``write_limit``.
Any other keyword arguments are passed the event loop's
:meth:`~asyncio.loop.create_server` method.
For example:
* You can set ``ssl`` to a :class:`~ssl.SSLContext` to enable TLS.
* You can set ``sock`` to a :obj:`~socket.socket` that you created
outside of websockets.
Returns:
WebSocketServer: WebSocket server.
"""
def __init__(
self,
ws_handler: Union[
Callable[[WebSocketServerProtocol], Awaitable[Any]],
Callable[[WebSocketServerProtocol, str], Awaitable[Any]], # deprecated
],
host: Optional[Union[str, Sequence[str]]] = None,
port: Optional[int] = None,
*,
create_protocol: Optional[Callable[[Any], WebSocketServerProtocol]] = None,
logger: Optional[LoggerLike] = None,
compression: Optional[str] = "deflate",
origins: Optional[Sequence[Optional[Origin]]] = None,
extensions: Optional[Sequence[ServerExtensionFactory]] = None,
subprotocols: Optional[Sequence[Subprotocol]] = None,
extra_headers: Optional[HeadersLikeOrCallable] = None,
process_request: Optional[
Callable[[str, Headers], Awaitable[Optional[HTTPResponse]]]
] = None,
select_subprotocol: Optional[
Callable[[Sequence[Subprotocol], Sequence[Subprotocol]], Subprotocol]
] = None,
ping_interval: Optional[float] = 20,
ping_timeout: Optional[float] = 20,
close_timeout: Optional[float] = None,
max_size: Optional[int] = 2**20,
max_queue: Optional[int] = 2**5,
read_limit: int = 2**16,
write_limit: int = 2**16,
**kwargs: Any,
) -> None:
# Backwards compatibility: close_timeout used to be called timeout.
timeout: Optional[float] = kwargs.pop("timeout", None)
if timeout is None:
timeout = 10
else:
warnings.warn("rename timeout to close_timeout", DeprecationWarning)
# If both are specified, timeout is ignored.
if close_timeout is None:
close_timeout = timeout
# Backwards compatibility: create_protocol used to be called klass.
klass: Optional[Type[WebSocketServerProtocol]] = kwargs.pop("klass", None)
if klass is None:
klass = WebSocketServerProtocol
else:
warnings.warn("rename klass to create_protocol", DeprecationWarning)
# If both are specified, klass is ignored.
if create_protocol is None:
create_protocol = klass
# Backwards compatibility: recv() used to return None on closed connections
legacy_recv: bool = kwargs.pop("legacy_recv", False)
# Backwards compatibility: the loop parameter used to be supported.
_loop: Optional[asyncio.AbstractEventLoop] = kwargs.pop("loop", None)
if _loop is None:
loop = asyncio.get_event_loop()
else:
loop = _loop
warnings.warn("remove loop argument", DeprecationWarning)
ws_server = WebSocketServer(logger=logger)
secure = kwargs.get("ssl") is not None
if compression == "deflate":
extensions = enable_server_permessage_deflate(extensions)
elif compression is not None:
raise ValueError(f"unsupported compression: {compression}")
if subprotocols is not None:
validate_subprotocols(subprotocols)
factory = functools.partial(
create_protocol,
# For backwards compatibility with 10.0 or earlier. Done here in
# addition to WebSocketServerProtocol to trigger the deprecation
# warning once per serve() call rather than once per connection.
remove_path_argument(ws_handler),
ws_server,
host=host,
port=port,
secure=secure,
ping_interval=ping_interval,
ping_timeout=ping_timeout,
close_timeout=close_timeout,
max_size=max_size,
max_queue=max_queue,
read_limit=read_limit,
write_limit=write_limit,
loop=_loop,
legacy_recv=legacy_recv,
origins=origins,
extensions=extensions,
subprotocols=subprotocols,
extra_headers=extra_headers,
process_request=process_request,
select_subprotocol=select_subprotocol,
logger=logger,
)
if kwargs.pop("unix", False):
path: Optional[str] = kwargs.pop("path", None)
# unix_serve(path) must not specify host and port parameters.
assert host is None and port is None
create_server = functools.partial(
loop.create_unix_server, factory, path, **kwargs
)
else:
create_server = functools.partial(
loop.create_server, factory, host, port, **kwargs
)
# This is a coroutine function.
self._create_server = create_server
self.ws_server = ws_server
# async with serve(...)
async def __aenter__(self) -> WebSocketServer:
return await self
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType],
) -> None:
self.ws_server.close()
await self.ws_server.wait_closed()
# await serve(...)
def __await__(self) -> Generator[Any, None, WebSocketServer]:
# Create a suitable iterator by calling __await__ on a coroutine.
return self.__await_impl__().__await__()
async def __await_impl__(self) -> WebSocketServer:
server = await self._create_server()
self.ws_server.wrap(server)
return self.ws_server
# yield from serve(...) - remove when dropping Python < 3.10
__iter__ = __await__
serve = Serve
def unix_serve(
ws_handler: Union[
Callable[[WebSocketServerProtocol], Awaitable[Any]],
Callable[[WebSocketServerProtocol, str], Awaitable[Any]], # deprecated
],
path: Optional[str] = None,
**kwargs: Any,
) -> Serve:
"""
Similar to :func:`serve`, but for listening on Unix sockets.
This function builds upon the event
loop's :meth:`~asyncio.loop.create_unix_server` method.
It is only available on Unix.
It's useful for deploying a server behind a reverse proxy such as nginx.
Args:
path: file system path to the Unix socket.
"""
return serve(ws_handler, path=path, unix=True, **kwargs)
def remove_path_argument(
ws_handler: Union[
Callable[[WebSocketServerProtocol], Awaitable[Any]],
Callable[[WebSocketServerProtocol, str], Awaitable[Any]],
]
) -> Callable[[WebSocketServerProtocol], Awaitable[Any]]:
try:
inspect.signature(ws_handler).bind(None)
except TypeError:
try:
inspect.signature(ws_handler).bind(None, "")
except TypeError: # pragma: no cover
# ws_handler accepts neither one nor two arguments; leave it alone.
pass
else:
# ws_handler accepts two arguments; activate backwards compatibility.
# Enable deprecation warning and announce deprecation in 11.0.
# warnings.warn("remove second argument of ws_handler", DeprecationWarning)
async def _ws_handler(websocket: WebSocketServerProtocol) -> Any:
return await cast(
Callable[[WebSocketServerProtocol, str], Awaitable[Any]],
ws_handler,
)(websocket, websocket.path)
return _ws_handler
return cast(
Callable[[WebSocketServerProtocol], Awaitable[Any]],
ws_handler,
)
|
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=protected-access
"""Wrapper layers: layers that augment the functionality of another layer.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
from tensorflow.python.framework import tensor_shape
from tensorflow.python.keras import backend as K
from tensorflow.python.keras.engine.base_layer import InputSpec
from tensorflow.python.keras.engine.base_layer import Layer
from tensorflow.python.keras.layers.recurrent import _standardize_args
from tensorflow.python.keras.utils import generic_utils
from tensorflow.python.keras.utils import tf_utils
from tensorflow.python.ops import array_ops
from tensorflow.python.util.tf_export import tf_export
@tf_export('keras.layers.Wrapper')
class Wrapper(Layer):
"""Abstract wrapper base class.
Wrappers take another layer and augment it in various ways.
Do not use this class as a layer, it is only an abstract base class.
Two usable wrappers are the `TimeDistributed` and `Bidirectional` wrappers.
Arguments:
layer: The layer to be wrapped.
"""
def __init__(self, layer, **kwargs):
assert isinstance(layer, Layer)
self.layer = layer
self._track_checkpointable(layer, name='layer')
# Tracks mapping of Wrapper inputs to inner layer inputs. Useful when
# the inner layer has update ops that depend on its inputs (as opposed
# to the inputs to the Wrapper layer).
self._input_map = {}
super(Wrapper, self).__init__(**kwargs)
def build(self, input_shape=None):
self.built = True
@property
def activity_regularizer(self):
if hasattr(self.layer, 'activity_regularizer'):
return self.layer.activity_regularizer
else:
return None
@property
def trainable(self):
return self.layer.trainable
@trainable.setter
def trainable(self, value):
self.layer.trainable = value
@property
def trainable_weights(self):
return self.layer.trainable_weights
@property
def non_trainable_weights(self):
return self.layer.non_trainable_weights
@property
def updates(self):
return self.layer.updates + self._updates
@property
def losses(self):
return self.layer.losses + self._losses
def get_weights(self):
return self.layer.get_weights()
def set_weights(self, weights):
self.layer.set_weights(weights)
def get_config(self):
config = {
'layer': {
'class_name': self.layer.__class__.__name__,
'config': self.layer.get_config()
}
}
base_config = super(Wrapper, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
@classmethod
def from_config(cls, config, custom_objects=None):
from tensorflow.python.keras.layers import deserialize as deserialize_layer # pylint: disable=g-import-not-at-top
layer = deserialize_layer(
config.pop('layer'), custom_objects=custom_objects)
return cls(layer, **config)
@tf_export('keras.layers.TimeDistributed')
class TimeDistributed(Wrapper):
"""This wrapper allows to apply a layer to every temporal slice of an input.
The input should be at least 3D, and the dimension of index one
will be considered to be the temporal dimension.
Consider a batch of 32 samples,
where each sample is a sequence of 10 vectors of 16 dimensions.
The batch input shape of the layer is then `(32, 10, 16)`,
and the `input_shape`, not including the samples dimension, is `(10, 16)`.
You can then use `TimeDistributed` to apply a `Dense` layer
to each of the 10 timesteps, independently:
```python
# as the first layer in a model
model = Sequential()
model.add(TimeDistributed(Dense(8), input_shape=(10, 16)))
# now model.output_shape == (None, 10, 8)
```
The output will then have shape `(32, 10, 8)`.
In subsequent layers, there is no need for the `input_shape`:
```python
model.add(TimeDistributed(Dense(32)))
# now model.output_shape == (None, 10, 32)
```
The output will then have shape `(32, 10, 32)`.
`TimeDistributed` can be used with arbitrary layers, not just `Dense`,
for instance with a `Conv2D` layer:
```python
model = Sequential()
model.add(TimeDistributed(Conv2D(64, (3, 3)),
input_shape=(10, 299, 299, 3)))
```
Arguments:
layer: a layer instance.
Raises:
ValueError: If not initialized with a `Layer` instance.
"""
def __init__(self, layer, **kwargs):
if not isinstance(layer, Layer):
raise ValueError(
'Please initialize `TimeDistributed` layer with a '
'`Layer` instance. You passed: {input}'.format(input=layer))
super(TimeDistributed, self).__init__(layer, **kwargs)
self.supports_masking = True
def build(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
assert len(input_shape) >= 3
self.input_spec = InputSpec(shape=input_shape)
child_input_shape = [input_shape[0]] + input_shape[2:]
if not self.layer.built:
self.layer.build(child_input_shape)
self.layer.built = True
super(TimeDistributed, self).build()
self.built = True
def compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
child_input_shape = tensor_shape.TensorShape([input_shape[0]] +
input_shape[2:])
child_output_shape = self.layer.compute_output_shape(
child_input_shape).as_list()
timesteps = input_shape[1]
return tensor_shape.TensorShape([child_output_shape[0], timesteps] +
child_output_shape[1:])
def call(self, inputs, training=None, mask=None):
kwargs = {}
if generic_utils.has_arg(self.layer.call, 'training'):
kwargs['training'] = training
uses_learning_phase = False # pylint: disable=redefined-outer-name
input_shape = K.int_shape(inputs)
if input_shape[0]:
# batch size matters, use rnn-based implementation
def step(x, _):
global uses_learning_phase # pylint: disable=global-variable-undefined
output = self.layer.call(x, **kwargs)
if hasattr(output, '_uses_learning_phase'):
uses_learning_phase = (output._uses_learning_phase or
uses_learning_phase)
return output, []
_, outputs, _ = K.rnn(
step,
inputs,
initial_states=[],
input_length=input_shape[1],
unroll=False)
y = outputs
else:
# No batch size specified, therefore the layer will be able
# to process batches of any size.
# We can go with reshape-based implementation for performance.
input_length = input_shape[1]
if not input_length:
input_length = array_ops.shape(inputs)[1]
# Shape: (num_samples * timesteps, ...). And track the
# transformation in self._input_map.
input_uid = generic_utils.object_list_uid(inputs)
inputs = array_ops.reshape(inputs, (-1,) + input_shape[2:])
self._input_map[input_uid] = inputs
# (num_samples * timesteps, ...)
y = self.layer.call(inputs, **kwargs)
if hasattr(y, '_uses_learning_phase'):
uses_learning_phase = y._uses_learning_phase
# Shape: (num_samples, timesteps, ...)
output_shape = self.compute_output_shape(input_shape).as_list()
y = array_ops.reshape(y, (-1, input_length) + tuple(output_shape[2:]))
# Apply activity regularizer if any:
if (hasattr(self.layer, 'activity_regularizer') and
self.layer.activity_regularizer is not None):
regularization_loss = self.layer.activity_regularizer(y)
self.add_loss(regularization_loss, inputs)
if uses_learning_phase:
y._uses_learning_phase = True
return y
@tf_export('keras.layers.Bidirectional')
class Bidirectional(Wrapper):
"""Bidirectional wrapper for RNNs.
Arguments:
layer: `Recurrent` instance.
merge_mode: Mode by which outputs of the
forward and backward RNNs will be combined.
One of {'sum', 'mul', 'concat', 'ave', None}.
If None, the outputs will not be combined,
they will be returned as a list.
Raises:
ValueError: If not initialized with a `Layer` instance or
In case of invalid `merge_mode` argument.
Examples:
```python
model = Sequential()
model.add(Bidirectional(LSTM(10, return_sequences=True), input_shape=(5,
10)))
model.add(Bidirectional(LSTM(10)))
model.add(Dense(5))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='rmsprop')
```
"""
def __init__(self, layer, merge_mode='concat', weights=None, **kwargs):
if not isinstance(layer, Layer):
raise ValueError(
'Please initialize `Bidirectional` layer with a '
'`Layer` instance. You passed: {input}'.format(input=layer))
if merge_mode not in ['sum', 'mul', 'ave', 'concat', None]:
raise ValueError('Invalid merge mode. '
'Merge mode should be one of '
'{"sum", "mul", "ave", "concat", None}')
self.forward_layer = copy.copy(layer)
config = layer.get_config()
config['go_backwards'] = not config['go_backwards']
self.backward_layer = layer.__class__.from_config(config)
self.forward_layer._name = 'forward_' + self.forward_layer.name
self.backward_layer._name = 'backward_' + self.backward_layer.name
self.merge_mode = merge_mode
if weights:
nw = len(weights)
self.forward_layer.initial_weights = weights[:nw // 2]
self.backward_layer.initial_weights = weights[nw // 2:]
self.stateful = layer.stateful
self.return_sequences = layer.return_sequences
self.return_state = layer.return_state
self.supports_masking = True
self._trainable = True
self._num_constants = None
super(Bidirectional, self).__init__(layer, **kwargs)
self.input_spec = layer.input_spec
@property
def trainable(self):
return self._trainable
@trainable.setter
def trainable(self, value):
self._trainable = value
self.forward_layer.trainable = value
self.backward_layer.trainable = value
def get_weights(self):
return self.forward_layer.get_weights() + self.backward_layer.get_weights()
def set_weights(self, weights):
nw = len(weights)
self.forward_layer.set_weights(weights[:nw // 2])
self.backward_layer.set_weights(weights[nw // 2:])
@tf_utils.shape_type_conversion
def compute_output_shape(self, input_shape):
output_shape = tuple(self.forward_layer.compute_output_shape(
input_shape).as_list())
if self.return_state:
state_shape = output_shape[1:]
output_shape = output_shape[0]
if self.merge_mode == 'concat':
output_shape = list(output_shape)
output_shape[-1] *= 2
output_shape = tuple(output_shape)
elif self.merge_mode is None:
output_shape = [output_shape, copy.copy(output_shape)]
if self.return_state:
if self.merge_mode is None:
return output_shape + state_shape + copy.copy(state_shape)
return [output_shape] + state_shape + copy.copy(state_shape)
return output_shape
def __call__(self, inputs, initial_state=None, constants=None, **kwargs):
"""`Bidirectional.__call__` implements the same API as the wrapped `RNN`."""
inputs, initial_state, constants = _standardize_args(
inputs, initial_state, constants, self._num_constants)
if isinstance(inputs, list):
if len(inputs) > 1:
initial_state = inputs[1:]
inputs = inputs[0]
if initial_state is None and constants is None:
return super(Bidirectional, self).__call__(inputs, **kwargs)
# Applies the same workaround as in `RNN.__call__`
additional_inputs = []
additional_specs = []
if initial_state is not None:
# Check if `initial_state` can be splitted into half
num_states = len(initial_state)
if num_states % 2 > 0:
raise ValueError(
'When passing `initial_state` to a Bidirectional RNN, '
'the state should be a list containing the states of '
'the underlying RNNs. '
'Found: ' + str(initial_state))
kwargs['initial_state'] = initial_state
additional_inputs += initial_state
state_specs = [InputSpec(shape=K.int_shape(state))
for state in initial_state]
self.forward_layer.state_spec = state_specs[:num_states // 2]
self.backward_layer.state_spec = state_specs[num_states // 2:]
additional_specs += state_specs
if constants is not None:
kwargs['constants'] = constants
additional_inputs += constants
constants_spec = [InputSpec(shape=K.int_shape(constant))
for constant in constants]
self.forward_layer.constants_spec = constants_spec
self.backward_layer.constants_spec = constants_spec
additional_specs += constants_spec
self._num_constants = len(constants)
self.forward_layer._num_constants = self._num_constants
self.backward_layer._num_constants = self._num_constants
is_keras_tensor = K.is_keras_tensor(additional_inputs[0])
for tensor in additional_inputs:
if K.is_keras_tensor(tensor) != is_keras_tensor:
raise ValueError('The initial state of a Bidirectional'
' layer cannot be specified with a mix of'
' Keras tensors and non-Keras tensors'
' (a "Keras tensor" is a tensor that was'
' returned by a Keras layer, or by `Input`)')
if is_keras_tensor:
# Compute the full input spec, including state
full_input = [inputs] + additional_inputs
full_input_spec = self.input_spec + additional_specs
# Perform the call with temporarily replaced input_spec
original_input_spec = self.input_spec
self.input_spec = full_input_spec
output = super(Bidirectional, self).__call__(full_input, **kwargs)
self.input_spec = original_input_spec
return output
else:
return super(Bidirectional, self).__call__(inputs, **kwargs)
def call(self, inputs,
training=None,
mask=None,
initial_state=None,
constants=None):
"""`Bidirectional.call` implements the same API as the wrapped `RNN`."""
kwargs = {}
if generic_utils.has_arg(self.layer.call, 'training'):
kwargs['training'] = training
if generic_utils.has_arg(self.layer.call, 'mask'):
kwargs['mask'] = mask
if generic_utils.has_arg(self.layer.call, 'constants'):
kwargs['constants'] = constants
if initial_state is not None and generic_utils.has_arg(
self.layer.call, 'initial_state'):
forward_state = initial_state[:len(initial_state) // 2]
backward_state = initial_state[len(initial_state) // 2:]
y = self.forward_layer.call(inputs, initial_state=forward_state, **kwargs)
y_rev = self.backward_layer.call(
inputs, initial_state=backward_state, **kwargs)
else:
y = self.forward_layer.call(inputs, **kwargs)
y_rev = self.backward_layer.call(inputs, **kwargs)
if self.return_state:
states = y[1:] + y_rev[1:]
y = y[0]
y_rev = y_rev[0]
if self.return_sequences:
y_rev = K.reverse(y_rev, 1)
if self.merge_mode == 'concat':
output = K.concatenate([y, y_rev])
elif self.merge_mode == 'sum':
output = y + y_rev
elif self.merge_mode == 'ave':
output = (y + y_rev) / 2
elif self.merge_mode == 'mul':
output = y * y_rev
elif self.merge_mode is None:
output = [y, y_rev]
# Properly set learning phase
if (getattr(y, '_uses_learning_phase', False) or
getattr(y_rev, '_uses_learning_phase', False)):
if self.merge_mode is None:
for out in output:
out._uses_learning_phase = True
else:
output._uses_learning_phase = True
if self.return_state:
if self.merge_mode is None:
return output + states
return [output] + states
return output
def reset_states(self):
self.forward_layer.reset_states()
self.backward_layer.reset_states()
def build(self, input_shape):
with K.name_scope(self.forward_layer.name):
self.forward_layer.build(input_shape)
with K.name_scope(self.backward_layer.name):
self.backward_layer.build(input_shape)
self.built = True
def compute_mask(self, inputs, mask):
if isinstance(mask, list):
mask = mask[0]
if self.return_sequences:
if not self.merge_mode:
output_mask = [mask, mask]
else:
output_mask = mask
else:
output_mask = [None, None] if not self.merge_mode else None
if self.return_state:
states = self.forward_layer.states
state_mask = [None for _ in states]
if isinstance(output_mask, list):
return output_mask + state_mask * 2
return [output_mask] + state_mask * 2
return output_mask
@property
def trainable_weights(self):
if hasattr(self.forward_layer, 'trainable_weights'):
return (self.forward_layer.trainable_weights +
self.backward_layer.trainable_weights)
return []
@property
def non_trainable_weights(self):
if hasattr(self.forward_layer, 'non_trainable_weights'):
return (self.forward_layer.non_trainable_weights +
self.backward_layer.non_trainable_weights)
return []
@property
def updates(self):
if hasattr(self.forward_layer, 'updates'):
return self.forward_layer.updates + self.backward_layer.updates
return []
@property
def losses(self):
if hasattr(self.forward_layer, 'losses'):
return self.forward_layer.losses + self.backward_layer.losses
return []
@property
def constraints(self):
constraints = {}
if hasattr(self.forward_layer, 'constraints'):
constraints.update(self.forward_layer.constraints)
constraints.update(self.backward_layer.constraints)
return constraints
def get_config(self):
config = {'merge_mode': self.merge_mode}
if self._num_constants is not None:
config['num_constants'] = self._num_constants
base_config = super(Bidirectional, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
@classmethod
def from_config(cls, config, custom_objects=None):
num_constants = config.pop('num_constants', None)
layer = super(Bidirectional, cls).from_config(config,
custom_objects=custom_objects)
layer._num_constants = num_constants
return layer
|
|
from datetime import datetime, date, time
from dateutil.parser import parse as parse_date
import pytest
import pytz
from predicthq.endpoints import decorators, schemas
from predicthq.endpoints.base import BaseEndpoint
def test_datetime_type():
class SchemaExample(schemas.Model):
my_datetime = schemas.DateTimeType()
test_date = datetime(2016, 1, 1, tzinfo=pytz.UTC)
assert SchemaExample({"my_datetime": "2016-01-01T00:00:00+00:00"}).my_datetime == test_date
assert SchemaExample({"my_datetime": "2016-01-01T00:00:00+0000"}).my_datetime == test_date
assert SchemaExample({"my_datetime": "2016-01-01T00:00:00Z"}).my_datetime == test_date
assert SchemaExample({"my_datetime": test_date}).my_datetime == test_date
def test_date_or_datetime_type():
class SchemaExample(schemas.Model):
my_datetime = schemas.DateOrDateTimeType()
test_date = date(2016, 1, 1)
test_datetime = datetime(2016, 1, 1, 12, 30, 42, tzinfo=pytz.UTC)
assert SchemaExample({"my_datetime": test_date}).my_datetime == test_date
assert SchemaExample({"my_datetime": "2016-01-01"}).my_datetime == test_date
assert SchemaExample({"my_datetime": "2016-01-01T12:30:42+00:00"}).my_datetime == test_datetime
assert SchemaExample({"my_datetime": "2016-01-01T12:30:42+0000"}).my_datetime == test_datetime
assert SchemaExample({"my_datetime": "2016-01-01T12:30:42Z"}).my_datetime == test_datetime
assert SchemaExample({"my_datetime": test_datetime}).my_datetime == test_datetime
def test_date_type():
class SchemaExample(schemas.Model):
my_date = schemas.DateType()
test_date = date(2016, 1, 1)
assert SchemaExample({"my_date": "2016-01-01"}).my_date == test_date
assert SchemaExample({"my_date": "2016-01-01T00:00:00+0000"}).my_date == test_date
assert SchemaExample({"my_date": "2016-01-01T00:00:00Z"}).my_date == test_date
assert SchemaExample({"my_date": test_date}).my_date == test_date
def test_string_model_and_string_model_type():
class MyModel(schemas.StringModel):
import_format = r"(?P<left>.*)==(?P<right>\d*)"
export_format = "{left}=={right}"
left = schemas.StringType()
right = schemas.IntType()
class SchemaExample(schemas.Model):
my_model = schemas.StringModelType(MyModel)
short_data = {"my_model": "ten==10"}
long_data = {"my_model": {"left": "ten", "right": 10}}
model_data = {"my_model": MyModel("ten==10")}
invalid_data = {"my_model": "10==ten"}
expected_data = {"my_model": "ten==10"}
m = SchemaExample()
assert m.import_data(short_data).to_primitive() == expected_data
assert m.import_data(long_data).to_primitive() == expected_data
assert m.import_data(model_data).to_primitive() == expected_data
assert m.import_data(short_data).to_dict() == expected_data
assert m.import_data(long_data).to_dict() == expected_data
assert m.import_data(model_data).to_dict() == expected_data
with pytest.raises(schemas.SchematicsDataError):
m.import_data(invalid_data)
def test_string_list_type():
class SchemaExample(schemas.Model):
area_list = schemas.StringListType(schemas.StringModelType(schemas.Area), separator="+")
string_list = schemas.StringListType(schemas.StringType, separator="+")
string_data = {"string_list": "a+b+c", "area_list": "[email protected],[email protected],174.778265"}
list_data = {
"string_list": ["a", "b", "c"],
"area_list": ["[email protected],174.765742", "[email protected],174.778265"],
}
dict_data = {
"string_list": ["a", "b", "c"],
"area_list": [
{"radius": "10km", "latitude": -36.847585, "longitude": 174.765742},
{"radius": "10km", "latitude": -41.288058, "longitude": 174.778265},
],
}
expected_data = {"string_list": "a+b+c", "area_list": "[email protected],[email protected],174.778265"}
m = SchemaExample()
assert m.import_data(string_data).to_primitive() == expected_data
assert m.import_data(list_data).to_primitive() == expected_data
assert m.import_data(dict_data).to_primitive() == expected_data
unique_item_data = {"string_list": "a", "area_list": "[email protected],174.765742"}
unique_item_dict_data = {
"string_list": "a",
"area_list": {"radius": "10km", "latitude": -36.847585, "longitude": 174.765742},
}
assert m.import_data(unique_item_data).to_primitive() == unique_item_data
assert m.import_data(unique_item_dict_data).to_primitive() == unique_item_data
def test_list_type():
class SchemaExample(schemas.Model):
string_list = schemas.ListType(schemas.StringType)
m = SchemaExample()
assert m.import_data({"string_list": "string"}).to_primitive() == {"string_list": ["string"]}
assert m.import_data({"string_list": ["string1", "string2"]}).to_primitive() == {
"string_list": ["string1", "string2"]
}
def test_geo_json_point_type():
class SchemaExample(schemas.Model):
point = schemas.GeoJSONPointType()
m = SchemaExample()
assert m.import_data({"point": [174.765742, -36.847585]}).to_primitive() == {"point": [174.765742, -36.847585]}
with pytest.raises(schemas.SchematicsDataError):
m.import_data({"point": [-36.847585, 174.765742]}, validate=True)
def test_date_around_type():
class SchemaExample(schemas.Model):
around = schemas.ModelType(schemas.DateAround)
m = SchemaExample()
assert m.import_data(
{"around": {"origin": "2020-01-01", "offset": "1d", "scale": "0d", "decay": "0.1"}}
).to_primitive() == {"around": {"origin": "2020-01-01", "decay": 0.1, "scale": u"0d", "offset": u"1d"}}
with pytest.raises(schemas.SchematicsDataError):
m.import_data({"around": "2020-01-01"}, validate=True)
def test_location_around_type():
class SchemaExample(schemas.Model):
around = schemas.ModelType(schemas.LocationAround)
m = SchemaExample()
assert m.import_data(
{"around": {"origin": "40.730610,-73.935242", "offset": "1km", "scale": "2km", "decay": "0.1"}}
).to_primitive() == {"around": {"origin": u"40.730610,-73.935242", "decay": 0.1, "scale": u"2km", "offset": u"1km"}}
with pytest.raises(schemas.SchematicsDataError):
m.import_data({"around": "40.730610,-73.935242"}, validate=True)
def test_area_model():
class SchemaExample(schemas.Model):
area = schemas.StringModelType(schemas.Area)
short_data = {"area": "[email protected],174.765742"}
long_data = {"area": {"radius": "10km", "latitude": -36.847585, "longitude": 174.765742}}
model_data = {"area": schemas.Area("[email protected],174.765742")}
invalid_data = {"area": "[email protected],174.765742"}
expected_expected = {"area": "[email protected],174.765742"}
m = SchemaExample()
assert m.import_data(short_data).to_primitive() == expected_expected
assert m.import_data(long_data).to_primitive() == expected_expected
assert m.import_data(model_data).to_primitive() == expected_expected
assert m.import_data(short_data).to_dict() == expected_expected
assert m.import_data(long_data).to_dict() == expected_expected
assert m.import_data(model_data).to_dict() == expected_expected
with pytest.raises(schemas.SchematicsDataError):
m.import_data(invalid_data)
def test_location_model():
class SchemaExample(schemas.Model):
location = schemas.StringModelType(schemas.Location)
short_data = {"location": "@-36.847585,174.765742"}
long_data = {"location": {"latitude": -36.847585, "longitude": 174.765742}}
model_data = {"location": schemas.Location("@-36.847585,174.765742")}
invalid_data = {"location": "-36.847585,174.765742"}
expected_expected = {"location": "@-36.847585,174.765742"}
m = SchemaExample()
assert m.import_data(short_data).to_primitive() == expected_expected
assert m.import_data(long_data).to_primitive() == expected_expected
assert m.import_data(model_data).to_primitive() == expected_expected
assert m.import_data(short_data).to_dict() == expected_expected
assert m.import_data(long_data).to_dict() == expected_expected
assert m.import_data(model_data).to_dict() == expected_expected
with pytest.raises(schemas.SchematicsDataError):
m.import_data(invalid_data)
def test_resultset():
class ResultExample(schemas.Model):
value = schemas.IntType()
class ResultSetExample(schemas.ResultSet):
results = schemas.ResultType(ResultExample)
class EndpointExample(BaseEndpoint):
@decorators.returns(ResultSetExample)
def load_page(self, page):
page = int(page)
return {
"count": 9,
"next": f"http://example.org/?page={page + 1}" if page < 3 else None,
"previous": f"http://example.org/?page={page - 1}" if page > 1 else None,
"results": [
{"value": 1 + (3 * (page - 1))},
{"value": 2 + (3 * (page - 1))},
{"value": 3 + (3 * (page - 1))},
],
}
endpoint = EndpointExample(None)
p1 = endpoint.load_page(page=1)
assert p1.count == 9
assert list(p1) == [ResultExample({"value": 1}), ResultExample({"value": 2}), ResultExample({"value": 3})]
assert p1.has_previous() is False
assert p1.has_next() is True
assert p1.get_previous() is None
p2 = p1.get_next()
assert list(p2) == [ResultExample({"value": 4}), ResultExample({"value": 5}), ResultExample({"value": 6})]
assert p2.has_previous() is True
assert p2.has_next() is True
p3 = p2.get_next()
assert list(p3) == [ResultExample({"value": 7}), ResultExample({"value": 8}), ResultExample({"value": 9})]
assert p3.has_previous() is True
assert p3.has_next() is False
assert p3.get_next() is None
assert list(p3.get_previous()) == list(p2)
assert list(p1.iter_pages()) == [endpoint.load_page(page=2), endpoint.load_page(page=3)]
assert list(p1.iter_all()) == list(p1) + list(p2) + list(p3)
for item in p1.iter_all():
assert item._endpoint == endpoint
|
|
import os
import numpy as np
import tensorflow as tf
import random
from unittest.mock import MagicMock
def _print_success_message():
return print('Tests Passed')
def test_folder_path(cifar10_dataset_folder_path):
assert cifar10_dataset_folder_path is not None,\
'Cifar-10 data folder not set.'
assert cifar10_dataset_folder_path[-1] != '/',\
'The "/" shouldn\'t be added to the end of the path.'
assert os.path.exists(cifar10_dataset_folder_path),\
'Path not found.'
assert os.path.isdir(cifar10_dataset_folder_path),\
'{} is not a folder.'.format(os.path.basename(cifar10_dataset_folder_path))
train_files = [cifar10_dataset_folder_path + '/data_batch_' + str(batch_id) for batch_id in range(1, 6)]
other_files = [cifar10_dataset_folder_path + '/batches.meta', cifar10_dataset_folder_path + '/test_batch']
missing_files = [path for path in train_files + other_files if not os.path.exists(path)]
assert not missing_files,\
'Missing files in directory: {}'.format(missing_files)
print('All files found!')
def test_normalize(normalize):
test_shape = (np.random.choice(range(1000)), 32, 32, 3)
test_numbers = np.random.choice(range(256), test_shape)
normalize_out = normalize(test_numbers)
assert type(normalize_out).__module__ == np.__name__,\
'Not Numpy Object'
assert normalize_out.shape == test_shape,\
'Incorrect Shape. {} shape found'.format(normalize_out.shape)
assert normalize_out.max() <= 1 and normalize_out.min() >= 0,\
'Incorect Range. {} to {} found'.format(normalize_out.min(), normalize_out.max())
_print_success_message()
def test_one_hot_encode(one_hot_encode):
test_shape = np.random.choice(range(1000))
test_numbers = np.random.choice(range(10), test_shape)
one_hot_out = one_hot_encode(test_numbers)
assert type(one_hot_out).__module__ == np.__name__,\
'Not Numpy Object'
assert one_hot_out.shape == (test_shape, 10),\
'Incorrect Shape. {} shape found'.format(one_hot_out.shape)
n_encode_tests = 5
test_pairs = list(zip(test_numbers, one_hot_out))
test_indices = np.random.choice(len(test_numbers), n_encode_tests)
labels = [test_pairs[test_i][0] for test_i in test_indices]
enc_labels = np.array([test_pairs[test_i][1] for test_i in test_indices])
new_enc_labels = one_hot_encode(labels)
assert np.array_equal(enc_labels, new_enc_labels),\
'Encodings returned different results for the same numbers.\n' \
'For the first call it returned:\n' \
'{}\n' \
'For the second call it returned\n' \
'{}\n' \
'Make sure you save the map of labels to encodings outside of the function.'.format(enc_labels, new_enc_labels)
for one_hot in new_enc_labels:
assert (one_hot==1).sum() == 1,\
'Each one-hot-encoded value should include the number 1 exactly once.\n' \
'Found {}\n'.format(one_hot)
assert (one_hot==0).sum() == len(one_hot)-1,\
'Each one-hot-encoded value should include zeros in all but one position.\n' \
'Found {}\n'.format(one_hot)
_print_success_message()
def test_nn_image_inputs(neural_net_image_input):
image_shape = (32, 32, 3)
nn_inputs_out_x = neural_net_image_input(image_shape)
assert nn_inputs_out_x.get_shape().as_list() == [None, image_shape[0], image_shape[1], image_shape[2]],\
'Incorrect Image Shape. Found {} shape'.format(nn_inputs_out_x.get_shape().as_list())
assert nn_inputs_out_x.op.type == 'Placeholder',\
'Incorrect Image Type. Found {} type'.format(nn_inputs_out_x.op.type)
assert nn_inputs_out_x.name == 'x:0', \
'Incorrect Name. Found {}'.format(nn_inputs_out_x.name)
print('Image Input Tests Passed.')
def test_nn_label_inputs(neural_net_label_input):
n_classes = 10
nn_inputs_out_y = neural_net_label_input(n_classes)
assert nn_inputs_out_y.get_shape().as_list() == [None, n_classes],\
'Incorrect Label Shape. Found {} shape'.format(nn_inputs_out_y.get_shape().as_list())
assert nn_inputs_out_y.op.type == 'Placeholder',\
'Incorrect Label Type. Found {} type'.format(nn_inputs_out_y.op.type)
assert nn_inputs_out_y.name == 'y:0', \
'Incorrect Name. Found {}'.format(nn_inputs_out_y.name)
print('Label Input Tests Passed.')
def test_nn_keep_prob_inputs(neural_net_keep_prob_input):
nn_inputs_out_k = neural_net_keep_prob_input()
assert nn_inputs_out_k.get_shape().ndims is None,\
'Too many dimensions found for keep prob. Found {} dimensions. It should be a scalar (0-Dimension Tensor).'.format(nn_inputs_out_k.get_shape().ndims)
assert nn_inputs_out_k.op.type == 'Placeholder',\
'Incorrect keep prob Type. Found {} type'.format(nn_inputs_out_k.op.type)
assert nn_inputs_out_k.name == 'keep_prob:0', \
'Incorrect Name. Found {}'.format(nn_inputs_out_k.name)
print('Keep Prob Tests Passed.')
def test_con_pool(conv2d_maxpool):
test_x = tf.placeholder(tf.float32, [None, 32, 32, 5])
test_num_outputs = 10
test_con_k = (2, 2)
test_con_s = (4, 4)
test_pool_k = (2, 2)
test_pool_s = (2, 2)
conv2d_maxpool_out = conv2d_maxpool(test_x, test_num_outputs, test_con_k, test_con_s, test_pool_k, test_pool_s)
assert conv2d_maxpool_out.get_shape().as_list() == [None, 4, 4, 10],\
'Incorrect Shape. Found {} shape'.format(conv2d_maxpool_out.get_shape().as_list())
_print_success_message()
def test_flatten(flatten):
test_x = tf.placeholder(tf.float32, [None, 10, 30, 6])
flat_out = flatten(test_x)
assert flat_out.get_shape().as_list() == [None, 10*30*6],\
'Incorrect Shape. Found {} shape'.format(flat_out.get_shape().as_list())
_print_success_message()
def test_fully_conn(fully_conn):
test_x = tf.placeholder(tf.float32, [None, 128])
test_num_outputs = 40
fc_out = fully_conn(test_x, test_num_outputs)
assert fc_out.get_shape().as_list() == [None, 40],\
'Incorrect Shape. Found {} shape'.format(fc_out.get_shape().as_list())
_print_success_message()
def test_output(output):
test_x = tf.placeholder(tf.float32, [None, 128])
test_num_outputs = 40
output_out = output(test_x, test_num_outputs)
assert output_out.get_shape().as_list() == [None, 40],\
'Incorrect Shape. Found {} shape'.format(output_out.get_shape().as_list())
_print_success_message()
def test_conv_net(conv_net):
test_x = tf.placeholder(tf.float32, [None, 32, 32, 3])
test_k = tf.placeholder(tf.float32)
logits_out = conv_net(test_x, test_k)
assert logits_out.get_shape().as_list() == [None, 10],\
'Incorrect Model Output. Found {}'.format(logits_out.get_shape().as_list())
print('Neural Network Built!')
def test_train_nn(train_neural_network):
mock_session = tf.Session()
test_x = np.random.rand(128, 32, 32, 3)
test_y = np.random.rand(128, 10)
test_k = np.random.rand(1)
test_optimizer = tf.train.AdamOptimizer()
mock_session.run = MagicMock()
train_neural_network(mock_session, test_optimizer, test_k, test_x, test_y)
assert mock_session.run.called, 'Session not used'
_print_success_message()
|
|
"""Support for Wink thermostats and Air Conditioners."""
import logging
import pywink
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
CURRENT_HVAC_COOL,
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
CURRENT_HVAC_OFF,
FAN_AUTO,
FAN_HIGH,
FAN_LOW,
FAN_MEDIUM,
FAN_ON,
HVAC_MODE_AUTO,
HVAC_MODE_COOL,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
PRESET_AWAY,
PRESET_ECO,
PRESET_NONE,
SUPPORT_AUX_HEAT,
SUPPORT_FAN_MODE,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_TEMPERATURE,
SUPPORT_TARGET_TEMPERATURE_RANGE,
)
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_TENTHS, TEMP_CELSIUS
from homeassistant.helpers.temperature import display_temp as show_temp
from . import DOMAIN, WinkDevice
_LOGGER = logging.getLogger(__name__)
ATTR_ECO_TARGET = "eco_target"
ATTR_EXTERNAL_TEMPERATURE = "external_temperature"
ATTR_OCCUPIED = "occupied"
ATTR_SCHEDULE_ENABLED = "schedule_enabled"
ATTR_SMART_TEMPERATURE = "smart_temperature"
ATTR_TOTAL_CONSUMPTION = "total_consumption"
HA_HVAC_TO_WINK = {
HVAC_MODE_AUTO: "auto",
HVAC_MODE_COOL: "cool_only",
HVAC_MODE_FAN_ONLY: "fan_only",
HVAC_MODE_HEAT: "heat_only",
HVAC_MODE_OFF: "off",
}
WINK_HVAC_TO_HA = {value: key for key, value in HA_HVAC_TO_WINK.items()}
SUPPORT_FLAGS_THERMOSTAT = (
SUPPORT_TARGET_TEMPERATURE
| SUPPORT_TARGET_TEMPERATURE_RANGE
| SUPPORT_FAN_MODE
| SUPPORT_AUX_HEAT
)
SUPPORT_FAN_THERMOSTAT = [FAN_AUTO, FAN_ON]
SUPPORT_PRESET_THERMOSTAT = [PRESET_AWAY, PRESET_ECO]
SUPPORT_FLAGS_AC = SUPPORT_TARGET_TEMPERATURE | SUPPORT_FAN_MODE | SUPPORT_PRESET_MODE
SUPPORT_FAN_AC = [FAN_HIGH, FAN_LOW, FAN_MEDIUM]
SUPPORT_PRESET_AC = [PRESET_NONE, PRESET_ECO]
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Wink climate devices."""
for climate in pywink.get_thermostats():
_id = climate.object_id() + climate.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkThermostat(climate, hass)])
for climate in pywink.get_air_conditioners():
_id = climate.object_id() + climate.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkAC(climate, hass)])
class WinkThermostat(WinkDevice, ClimateEntity):
"""Representation of a Wink thermostat."""
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_FLAGS_THERMOSTAT
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
self.hass.data[DOMAIN]["entities"]["climate"].append(self)
@property
def temperature_unit(self):
"""Return the unit of measurement."""
# The Wink API always returns temp in Celsius
return TEMP_CELSIUS
@property
def extra_state_attributes(self):
"""Return the optional device state attributes."""
data = {}
if self.external_temperature is not None:
data[ATTR_EXTERNAL_TEMPERATURE] = show_temp(
self.hass,
self.external_temperature,
self.temperature_unit,
PRECISION_TENTHS,
)
if self.smart_temperature:
data[ATTR_SMART_TEMPERATURE] = self.smart_temperature
if self.occupied is not None:
data[ATTR_OCCUPIED] = self.occupied
if self.eco_target is not None:
data[ATTR_ECO_TARGET] = self.eco_target
return data
@property
def current_temperature(self):
"""Return the current temperature."""
return self.wink.current_temperature()
@property
def current_humidity(self):
"""Return the current humidity."""
if self.wink.current_humidity() is not None:
# The API states humidity will be a float 0-1
# the only example API response with humidity listed show an int
# This will address both possibilities
if self.wink.current_humidity() < 1:
return self.wink.current_humidity() * 100
return self.wink.current_humidity()
return None
@property
def external_temperature(self):
"""Return the current external temperature."""
return self.wink.current_external_temperature()
@property
def smart_temperature(self):
"""Return the current average temp of all remote sensor."""
return self.wink.current_smart_temperature()
@property
def eco_target(self):
"""Return status of eco target (Is the thermostat in eco mode)."""
return self.wink.eco_target()
@property
def occupied(self):
"""Return status of if the thermostat has detected occupancy."""
return self.wink.occupied()
@property
def preset_mode(self):
"""Return the current preset mode, e.g., home, away, temp."""
mode = self.wink.current_hvac_mode()
if mode == "eco":
return PRESET_ECO
if self.wink.away():
return PRESET_AWAY
return None
@property
def preset_modes(self):
"""Return a list of available preset modes."""
return SUPPORT_PRESET_THERMOSTAT
@property
def target_humidity(self):
"""Return the humidity we try to reach."""
target_hum = None
if self.wink.current_humidifier_mode() == "on":
if self.wink.current_humidifier_set_point() is not None:
target_hum = self.wink.current_humidifier_set_point() * 100
elif self.wink.current_dehumidifier_mode() == "on":
if self.wink.current_dehumidifier_set_point() is not None:
target_hum = self.wink.current_dehumidifier_set_point() * 100
else:
target_hum = None
return target_hum
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
if self.hvac_mode != HVAC_MODE_AUTO and not self.wink.away():
if self.hvac_mode == HVAC_MODE_COOL:
return self.wink.current_max_set_point()
if self.hvac_mode == HVAC_MODE_HEAT:
return self.wink.current_min_set_point()
return None
@property
def target_temperature_low(self):
"""Return the lower bound temperature we try to reach."""
if self.hvac_mode == HVAC_MODE_AUTO:
return self.wink.current_min_set_point()
return None
@property
def target_temperature_high(self):
"""Return the higher bound temperature we try to reach."""
if self.hvac_mode == HVAC_MODE_AUTO:
return self.wink.current_max_set_point()
return None
@property
def is_aux_heat(self):
"""Return true if aux heater."""
if "aux" not in self.wink.hvac_modes():
return None
if self.wink.current_hvac_mode() == "aux":
return True
return False
@property
def hvac_mode(self) -> str:
"""Return hvac operation ie. heat, cool mode.
Need to be one of HVAC_MODE_*.
"""
if not self.wink.is_on():
return HVAC_MODE_OFF
wink_mode = self.wink.current_hvac_mode()
if wink_mode == "aux":
return HVAC_MODE_HEAT
if wink_mode == "eco":
return HVAC_MODE_AUTO
return WINK_HVAC_TO_HA.get(wink_mode, "")
@property
def hvac_modes(self):
"""Return the list of available hvac operation modes.
Need to be a subset of HVAC_MODES.
"""
hvac_list = [HVAC_MODE_OFF]
modes = self.wink.hvac_modes()
for mode in modes:
if mode in ("eco", "aux"):
continue
try:
ha_mode = WINK_HVAC_TO_HA[mode]
hvac_list.append(ha_mode)
except KeyError:
_LOGGER.error(
"Invalid operation mode mapping. %s doesn't map. "
"Please report this",
mode,
)
return hvac_list
@property
def hvac_action(self):
"""Return the current running hvac operation if supported.
Need to be one of CURRENT_HVAC_*.
"""
if not self.wink.is_on():
return CURRENT_HVAC_OFF
if self.wink.cool_on():
return CURRENT_HVAC_COOL
if self.wink.heat_on():
return CURRENT_HVAC_HEAT
return CURRENT_HVAC_IDLE
def set_temperature(self, **kwargs):
"""Set new target temperature."""
target_temp = kwargs.get(ATTR_TEMPERATURE)
target_temp_low = kwargs.get(ATTR_TARGET_TEMP_LOW)
target_temp_high = kwargs.get(ATTR_TARGET_TEMP_HIGH)
if target_temp is not None:
if self.hvac_mode == HVAC_MODE_COOL:
target_temp_high = target_temp
if self.hvac_mode == HVAC_MODE_HEAT:
target_temp_low = target_temp
self.wink.set_temperature(target_temp_low, target_temp_high)
def set_hvac_mode(self, hvac_mode):
"""Set new target hvac mode."""
hvac_mode_to_set = HA_HVAC_TO_WINK.get(hvac_mode)
self.wink.set_operation_mode(hvac_mode_to_set)
def set_preset_mode(self, preset_mode):
"""Set new preset mode."""
# Away
if preset_mode != PRESET_AWAY and self.wink.away():
self.wink.set_away_mode(False)
elif preset_mode == PRESET_AWAY:
self.wink.set_away_mode()
if preset_mode == PRESET_ECO:
self.wink.set_operation_mode("eco")
@property
def fan_mode(self):
"""Return whether the fan is on."""
if self.wink.current_fan_mode() == "on":
return FAN_ON
if self.wink.current_fan_mode() == "auto":
return FAN_AUTO
# No Fan available so disable slider
return None
@property
def fan_modes(self):
"""List of available fan modes."""
if self.wink.has_fan():
return SUPPORT_FAN_THERMOSTAT
return None
def set_fan_mode(self, fan_mode):
"""Turn fan on/off."""
self.wink.set_fan_mode(fan_mode.lower())
def turn_aux_heat_on(self):
"""Turn auxiliary heater on."""
self.wink.set_operation_mode("aux")
def turn_aux_heat_off(self):
"""Turn auxiliary heater off."""
self.wink.set_operation_mode("heat_only")
@property
def min_temp(self):
"""Return the minimum temperature."""
minimum = 7 # Default minimum
min_min = self.wink.min_min_set_point()
min_max = self.wink.min_max_set_point()
if self.hvac_mode == HVAC_MODE_HEAT:
if min_min:
return_value = min_min
else:
return_value = minimum
elif self.hvac_mode == HVAC_MODE_COOL:
if min_max:
return_value = min_max
else:
return_value = minimum
elif self.hvac_mode == HVAC_MODE_AUTO:
if min_min and min_max:
return_value = min(min_min, min_max)
else:
return_value = minimum
else:
return_value = minimum
return return_value
@property
def max_temp(self):
"""Return the maximum temperature."""
maximum = 35 # Default maximum
max_min = self.wink.max_min_set_point()
max_max = self.wink.max_max_set_point()
if self.hvac_mode == HVAC_MODE_HEAT:
if max_min:
return_value = max_min
else:
return_value = maximum
elif self.hvac_mode == HVAC_MODE_COOL:
if max_max:
return_value = max_max
else:
return_value = maximum
elif self.hvac_mode == HVAC_MODE_AUTO:
if max_min and max_max:
return_value = min(max_min, max_max)
else:
return_value = maximum
else:
return_value = maximum
return return_value
class WinkAC(WinkDevice, ClimateEntity):
"""Representation of a Wink air conditioner."""
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_FLAGS_AC
@property
def temperature_unit(self):
"""Return the unit of measurement."""
# The Wink API always returns temp in Celsius
return TEMP_CELSIUS
@property
def extra_state_attributes(self):
"""Return the optional device state attributes."""
data = {}
data[ATTR_TOTAL_CONSUMPTION] = self.wink.total_consumption()
data[ATTR_SCHEDULE_ENABLED] = self.wink.schedule_enabled()
return data
@property
def current_temperature(self):
"""Return the current temperature."""
return self.wink.current_temperature()
@property
def preset_mode(self):
"""Return the current preset mode, e.g., home, away, temp."""
if not self.wink.is_on():
return PRESET_NONE
mode = self.wink.current_mode()
if mode == "auto_eco":
return PRESET_ECO
return PRESET_NONE
@property
def preset_modes(self):
"""Return a list of available preset modes."""
return SUPPORT_PRESET_AC
@property
def hvac_mode(self) -> str:
"""Return hvac operation ie. heat, cool mode.
Need to be one of HVAC_MODE_*.
"""
if not self.wink.is_on():
return HVAC_MODE_OFF
wink_mode = self.wink.current_mode()
if wink_mode == "auto_eco":
return HVAC_MODE_COOL
return WINK_HVAC_TO_HA.get(wink_mode, "")
@property
def hvac_modes(self):
"""Return the list of available hvac operation modes.
Need to be a subset of HVAC_MODES.
"""
hvac_list = [HVAC_MODE_OFF]
modes = self.wink.modes()
for mode in modes:
if mode == "auto_eco":
continue
try:
ha_mode = WINK_HVAC_TO_HA[mode]
hvac_list.append(ha_mode)
except KeyError:
_LOGGER.error(
"Invalid operation mode mapping. %s doesn't map. "
"Please report this",
mode,
)
return hvac_list
def set_temperature(self, **kwargs):
"""Set new target temperature."""
target_temp = kwargs.get(ATTR_TEMPERATURE)
self.wink.set_temperature(target_temp)
def set_hvac_mode(self, hvac_mode):
"""Set new target hvac mode."""
hvac_mode_to_set = HA_HVAC_TO_WINK.get(hvac_mode)
self.wink.set_operation_mode(hvac_mode_to_set)
def set_preset_mode(self, preset_mode):
"""Set new preset mode."""
if preset_mode == PRESET_ECO:
self.wink.set_operation_mode("auto_eco")
elif self.hvac_mode == HVAC_MODE_COOL and preset_mode == PRESET_NONE:
self.set_hvac_mode(HVAC_MODE_COOL)
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self.wink.current_max_set_point()
@property
def fan_mode(self):
"""
Return the current fan mode.
The official Wink app only supports 3 modes [low, medium, high]
which are equal to [0.33, 0.66, 1.0] respectively.
"""
speed = self.wink.current_fan_speed()
if speed <= 0.33:
return FAN_LOW
if speed <= 0.66:
return FAN_MEDIUM
return FAN_HIGH
@property
def fan_modes(self):
"""Return a list of available fan modes."""
return SUPPORT_FAN_AC
def set_fan_mode(self, fan_mode):
"""
Set fan speed.
The official Wink app only supports 3 modes [low, medium, high]
which are equal to [0.33, 0.66, 1.0] respectively.
"""
if fan_mode == FAN_LOW:
speed = 0.33
elif fan_mode == FAN_MEDIUM:
speed = 0.66
elif fan_mode == FAN_HIGH:
speed = 1.0
self.wink.set_ac_fan_speed(speed)
|
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from heat.common import exception
from heat.common.i18n import _
from heat.engine import attributes
from heat.engine import constraints
from heat.engine import properties
from heat.engine.resources.openstack.neutron import neutron
from heat.engine.resources.openstack.neutron import subnet
from heat.engine import support
from heat.engine import translation
class Router(neutron.NeutronResource):
"""A resource that implements Neutron router.
Router is a physical or virtual network device that passes network traffic
between different networks.
"""
required_service_extension = 'router'
entity = 'router'
PROPERTIES = (
NAME, EXTERNAL_GATEWAY, VALUE_SPECS, ADMIN_STATE_UP,
L3_AGENT_ID, L3_AGENT_IDS, DISTRIBUTED, HA, TAGS,
) = (
'name', 'external_gateway_info', 'value_specs', 'admin_state_up',
'l3_agent_id', 'l3_agent_ids', 'distributed', 'ha', 'tags',
)
_EXTERNAL_GATEWAY_KEYS = (
EXTERNAL_GATEWAY_NETWORK, EXTERNAL_GATEWAY_ENABLE_SNAT,
EXTERNAL_GATEWAY_FIXED_IPS,
) = (
'network', 'enable_snat', 'external_fixed_ips',
)
_EXTERNAL_GATEWAY_FIXED_IPS_KEYS = (
IP_ADDRESS, SUBNET
) = (
'ip_address', 'subnet'
)
ATTRIBUTES = (
STATUS, EXTERNAL_GATEWAY_INFO_ATTR, NAME_ATTR, ADMIN_STATE_UP_ATTR,
TENANT_ID,
) = (
'status', 'external_gateway_info', 'name', 'admin_state_up',
'tenant_id',
)
properties_schema = {
NAME: properties.Schema(
properties.Schema.STRING,
_('The name of the router.'),
update_allowed=True
),
EXTERNAL_GATEWAY: properties.Schema(
properties.Schema.MAP,
_('External network gateway configuration for a router.'),
schema={
EXTERNAL_GATEWAY_NETWORK: properties.Schema(
properties.Schema.STRING,
_('ID or name of the external network for the gateway.'),
required=True,
update_allowed=True
),
EXTERNAL_GATEWAY_ENABLE_SNAT: properties.Schema(
properties.Schema.BOOLEAN,
_('Enables Source NAT on the router gateway. NOTE: The '
'default policy setting in Neutron restricts usage of '
'this property to administrative users only.'),
update_allowed=True
),
EXTERNAL_GATEWAY_FIXED_IPS: properties.Schema(
properties.Schema.LIST,
_('External fixed IP addresses for the gateway.'),
schema=properties.Schema(
properties.Schema.MAP,
schema={
IP_ADDRESS: properties.Schema(
properties.Schema.STRING,
_('External fixed IP address.'),
constraints=[
constraints.CustomConstraint('ip_addr'),
]
),
SUBNET: properties.Schema(
properties.Schema.STRING,
_('Subnet of external fixed IP address.'),
constraints=[
constraints.CustomConstraint(
'neutron.subnet')
]
),
}
),
update_allowed=True,
support_status=support.SupportStatus(version='6.0.0')
),
},
update_allowed=True
),
VALUE_SPECS: properties.Schema(
properties.Schema.MAP,
_('Extra parameters to include in the creation request.'),
default={},
update_allowed=True
),
ADMIN_STATE_UP: properties.Schema(
properties.Schema.BOOLEAN,
_('The administrative state of the router.'),
default=True,
update_allowed=True
),
L3_AGENT_ID: properties.Schema(
properties.Schema.STRING,
_('ID of the L3 agent. NOTE: The default policy setting in '
'Neutron restricts usage of this property to administrative '
'users only.'),
update_allowed=True,
support_status=support.SupportStatus(
status=support.HIDDEN,
version='6.0.0',
previous_status=support.SupportStatus(
status=support.DEPRECATED,
version='2015.1',
message=_('Use property %s.') % L3_AGENT_IDS,
previous_status=support.SupportStatus(version='2014.1')
)
),
),
L3_AGENT_IDS: properties.Schema(
properties.Schema.LIST,
_('ID list of the L3 agent. User can specify multi-agents '
'for highly available router. NOTE: The default policy '
'setting in Neutron restricts usage of this property to '
'administrative users only.'),
schema=properties.Schema(
properties.Schema.STRING,
),
update_allowed=True,
support_status=support.SupportStatus(version='2015.1')
),
DISTRIBUTED: properties.Schema(
properties.Schema.BOOLEAN,
_('Indicates whether or not to create a distributed router. '
'NOTE: The default policy setting in Neutron restricts usage '
'of this property to administrative users only. This property '
'can not be used in conjunction with the L3 agent ID.'),
support_status=support.SupportStatus(version='2015.1')
),
HA: properties.Schema(
properties.Schema.BOOLEAN,
_('Indicates whether or not to create a highly available router. '
'NOTE: The default policy setting in Neutron restricts usage '
'of this property to administrative users only. And now neutron '
'do not support distributed and ha at the same time.'),
support_status=support.SupportStatus(version='2015.1')
),
TAGS: properties.Schema(
properties.Schema.LIST,
_('The tags to be added to the router.'),
schema=properties.Schema(properties.Schema.STRING),
update_allowed=True,
support_status=support.SupportStatus(version='9.0.0')
),
}
attributes_schema = {
STATUS: attributes.Schema(
_("The status of the router."),
type=attributes.Schema.STRING
),
EXTERNAL_GATEWAY_INFO_ATTR: attributes.Schema(
_("Gateway network for the router."),
type=attributes.Schema.MAP
),
NAME_ATTR: attributes.Schema(
_("Friendly name of the router."),
type=attributes.Schema.STRING
),
ADMIN_STATE_UP_ATTR: attributes.Schema(
_("Administrative state of the router."),
type=attributes.Schema.STRING
),
TENANT_ID: attributes.Schema(
_("Tenant owning the router."),
type=attributes.Schema.STRING
),
}
def translation_rules(self, props):
client_plugin = self.client_plugin()
rules = [
translation.TranslationRule(
props,
translation.TranslationRule.RESOLVE,
[self.EXTERNAL_GATEWAY, self.EXTERNAL_GATEWAY_NETWORK],
client_plugin=client_plugin,
finder='find_resourceid_by_name_or_id',
entity=client_plugin.RES_TYPE_NETWORK
),
translation.TranslationRule(
props,
translation.TranslationRule.RESOLVE,
[self.EXTERNAL_GATEWAY, self.EXTERNAL_GATEWAY_FIXED_IPS,
self.SUBNET],
client_plugin=client_plugin,
finder='find_resourceid_by_name_or_id',
entity=client_plugin.RES_TYPE_SUBNET
),
]
if props.get(self.L3_AGENT_ID):
rules.extend([
translation.TranslationRule(
props,
translation.TranslationRule.ADD,
[self.L3_AGENT_IDS],
[props.get(self.L3_AGENT_ID)]),
translation.TranslationRule(
props,
translation.TranslationRule.DELETE,
[self.L3_AGENT_ID]
)])
return rules
def validate(self):
super(Router, self).validate()
is_distributed = self.properties[self.DISTRIBUTED]
l3_agent_id = self.properties[self.L3_AGENT_ID]
l3_agent_ids = self.properties[self.L3_AGENT_IDS]
is_ha = self.properties[self.HA]
if l3_agent_id and l3_agent_ids:
raise exception.ResourcePropertyConflict(self.L3_AGENT_ID,
self.L3_AGENT_IDS)
# do not specific l3 agent when creating a distributed router
if is_distributed and (l3_agent_id or l3_agent_ids):
raise exception.ResourcePropertyConflict(
self.DISTRIBUTED,
"/".join([self.L3_AGENT_ID, self.L3_AGENT_IDS]))
if is_ha and is_distributed:
raise exception.ResourcePropertyConflict(self.DISTRIBUTED,
self.HA)
if not is_ha and l3_agent_ids and len(l3_agent_ids) > 1:
msg = _('Non HA routers can only have one L3 agent.')
raise exception.StackValidationFailed(message=msg)
def add_dependencies(self, deps):
super(Router, self).add_dependencies(deps)
external_gw = self.properties[self.EXTERNAL_GATEWAY]
if external_gw:
external_gw_net = external_gw.get(self.EXTERNAL_GATEWAY_NETWORK)
for res in six.itervalues(self.stack):
if res.has_interface('OS::Neutron::Subnet'):
try:
subnet_net = res.properties.get(subnet.Subnet.NETWORK)
except (ValueError, TypeError):
# Properties errors will be caught later in validation,
# where we can report them in their proper context.
continue
if subnet_net == external_gw_net:
deps += (self, res)
def _resolve_gateway(self, props):
gateway = props.get(self.EXTERNAL_GATEWAY)
if gateway:
gateway['network_id'] = gateway.pop(self.EXTERNAL_GATEWAY_NETWORK)
if gateway[self.EXTERNAL_GATEWAY_ENABLE_SNAT] is None:
del gateway[self.EXTERNAL_GATEWAY_ENABLE_SNAT]
if gateway[self.EXTERNAL_GATEWAY_FIXED_IPS] is None:
del gateway[self.EXTERNAL_GATEWAY_FIXED_IPS]
else:
self._resolve_subnet(gateway)
return props
def _get_l3_agent_list(self, props):
l3_agent_id = props.pop(self.L3_AGENT_ID, None)
l3_agent_ids = props.pop(self.L3_AGENT_IDS, None)
if not l3_agent_ids and l3_agent_id:
l3_agent_ids = [l3_agent_id]
return l3_agent_ids
def _resolve_subnet(self, gateway):
external_gw_fixed_ips = gateway[self.EXTERNAL_GATEWAY_FIXED_IPS]
for fixed_ip in external_gw_fixed_ips:
for key, value in fixed_ip.copy().items():
if value is None:
fixed_ip.pop(key)
if self.SUBNET in fixed_ip:
fixed_ip['subnet_id'] = fixed_ip.pop(self.SUBNET)
def handle_create(self):
props = self.prepare_properties(
self.properties,
self.physical_resource_name())
self._resolve_gateway(props)
l3_agent_ids = self._get_l3_agent_list(props)
tags = props.pop(self.TAGS, [])
router = self.client().create_router({'router': props})['router']
self.resource_id_set(router['id'])
if l3_agent_ids:
self._replace_agent(l3_agent_ids)
if tags:
self.set_tags(tags)
def check_create_complete(self, *args):
attributes = self._show_resource()
return self.is_built(attributes)
def handle_delete(self):
try:
self.client().delete_router(self.resource_id)
except Exception as ex:
self.client_plugin().ignore_not_found(ex)
else:
return True
def handle_update(self, json_snippet, tmpl_diff, prop_diff):
if self.EXTERNAL_GATEWAY in prop_diff:
self._resolve_gateway(prop_diff)
if self.L3_AGENT_IDS in prop_diff or self.L3_AGENT_ID in prop_diff:
l3_agent_ids = self._get_l3_agent_list(prop_diff)
self._replace_agent(l3_agent_ids)
if self.TAGS in prop_diff:
tags = prop_diff.pop(self.TAGS)
self.set_tags(tags)
if prop_diff:
self.prepare_update_properties(prop_diff)
self.client().update_router(
self.resource_id, {'router': prop_diff})
def _replace_agent(self, l3_agent_ids=None):
ret = self.client().list_l3_agent_hosting_routers(
self.resource_id)
for agent in ret['agents']:
self.client().remove_router_from_l3_agent(
agent['id'], self.resource_id)
if l3_agent_ids:
for l3_agent_id in l3_agent_ids:
self.client().add_router_to_l3_agent(
l3_agent_id, {'router_id': self.resource_id})
def parse_live_resource_data(self, resource_properties, resource_data):
result = super(Router, self).parse_live_resource_data(
resource_properties, resource_data)
try:
ret = self.client().list_l3_agent_hosting_routers(self.resource_id)
if ret:
result[self.L3_AGENT_IDS] = list(
agent['id'] for agent in ret['agents'])
except self.client_plugin().exceptions.Forbidden:
# Just pass if forbidden
pass
gateway = resource_data.get(self.EXTERNAL_GATEWAY)
if gateway is not None:
result[self.EXTERNAL_GATEWAY] = {
self.EXTERNAL_GATEWAY_NETWORK: gateway.get('network_id'),
self.EXTERNAL_GATEWAY_ENABLE_SNAT: gateway.get('enable_snat')
}
return result
class RouterInterface(neutron.NeutronResource):
"""A resource for managing Neutron router interfaces.
Router interfaces associate routers with existing subnets or ports.
"""
required_service_extension = 'router'
PROPERTIES = (
ROUTER, ROUTER_ID, SUBNET_ID, SUBNET, PORT_ID, PORT
) = (
'router', 'router_id', 'subnet_id', 'subnet', 'port_id', 'port'
)
properties_schema = {
ROUTER: properties.Schema(
properties.Schema.STRING,
_('The router.'),
required=True,
constraints=[
constraints.CustomConstraint('neutron.router')
],
),
ROUTER_ID: properties.Schema(
properties.Schema.STRING,
_('ID of the router.'),
support_status=support.SupportStatus(
status=support.HIDDEN,
version='6.0.0',
previous_status=support.SupportStatus(
status=support.DEPRECATED,
message=_('Use property %s.') % ROUTER,
version='2015.1',
previous_status=support.SupportStatus(version='2013.1')
)
),
constraints=[
constraints.CustomConstraint('neutron.router')
],
),
SUBNET_ID: properties.Schema(
properties.Schema.STRING,
support_status=support.SupportStatus(
status=support.HIDDEN,
message=_('Use property %s.') % SUBNET,
version='5.0.0',
previous_status=support.SupportStatus(
status=support.DEPRECATED,
version='2014.2'
)
),
constraints=[
constraints.CustomConstraint('neutron.subnet')
]
),
SUBNET: properties.Schema(
properties.Schema.STRING,
_('The subnet, either subnet or port should be '
'specified.'),
constraints=[
constraints.CustomConstraint('neutron.subnet')
]
),
PORT_ID: properties.Schema(
properties.Schema.STRING,
_('The port id, either subnet or port_id should be specified.'),
support_status=support.SupportStatus(
status=support.HIDDEN,
version='6.0.0',
previous_status=support.SupportStatus(
status=support.DEPRECATED,
message=_('Use property %s.') % PORT,
version='2015.1',
previous_status=support.SupportStatus(version='2014.1')
)
),
constraints=[
constraints.CustomConstraint('neutron.port')
]
),
PORT: properties.Schema(
properties.Schema.STRING,
_('The port, either subnet or port should be specified.'),
support_status=support.SupportStatus(version='2015.1'),
constraints=[
constraints.CustomConstraint('neutron.port')
]
)
}
def translation_rules(self, props):
client_plugin = self.client_plugin()
return [
translation.TranslationRule(
props,
translation.TranslationRule.REPLACE,
[self.PORT],
value_path=[self.PORT_ID]
),
translation.TranslationRule(
props,
translation.TranslationRule.REPLACE,
[self.ROUTER],
value_path=[self.ROUTER_ID]
),
translation.TranslationRule(
props,
translation.TranslationRule.REPLACE,
[self.SUBNET],
value_path=[self.SUBNET_ID]
),
translation.TranslationRule(
props,
translation.TranslationRule.RESOLVE,
[self.PORT],
client_plugin=client_plugin,
finder='find_resourceid_by_name_or_id',
entity=client_plugin.RES_TYPE_PORT
),
translation.TranslationRule(
props,
translation.TranslationRule.RESOLVE,
[self.ROUTER],
client_plugin=client_plugin,
finder='find_resourceid_by_name_or_id',
entity=client_plugin.RES_TYPE_ROUTER
),
translation.TranslationRule(
props,
translation.TranslationRule.RESOLVE,
[self.SUBNET],
client_plugin=client_plugin,
finder='find_resourceid_by_name_or_id',
entity=client_plugin.RES_TYPE_SUBNET
)
]
def validate(self):
"""Validate any of the provided params."""
super(RouterInterface, self).validate()
prop_subnet_exists = self.properties.get(self.SUBNET) is not None
prop_port_exists = self.properties.get(self.PORT) is not None
if prop_subnet_exists and prop_port_exists:
raise exception.ResourcePropertyConflict(self.SUBNET,
self.PORT)
if not prop_subnet_exists and not prop_port_exists:
raise exception.PropertyUnspecifiedError(self.SUBNET,
self.PORT)
def handle_create(self):
router_id = dict(self.properties).get(self.ROUTER)
key = 'subnet_id'
value = dict(self.properties).get(self.SUBNET)
if not value:
key = 'port_id'
value = dict(self.properties).get(self.PORT)
self.client().add_interface_router(
router_id,
{key: value})
self.resource_id_set('%s:%s=%s' % (router_id, key, value))
def handle_delete(self):
if not self.resource_id:
return
tokens = self.resource_id.replace('=', ':').split(':')
if len(tokens) == 2: # compatible with old data
tokens.insert(1, 'subnet_id')
(router_id, key, value) = tokens
with self.client_plugin().ignore_not_found:
self.client().remove_interface_router(
router_id,
{key: value})
class RouterGateway(neutron.NeutronResource):
support_status = support.SupportStatus(
status=support.HIDDEN,
message=_('Use the `external_gateway_info` property in '
'the router resource to set up the gateway.'),
version='5.0.0',
previous_status=support.SupportStatus(
status=support.DEPRECATED,
version='2014.1'
)
)
PROPERTIES = (
ROUTER_ID, NETWORK_ID, NETWORK,
) = (
'router_id', 'network_id', 'network'
)
properties_schema = {
ROUTER_ID: properties.Schema(
properties.Schema.STRING,
_('ID of the router.'),
required=True,
constraints=[
constraints.CustomConstraint('neutron.router')
]
),
NETWORK_ID: properties.Schema(
properties.Schema.STRING,
support_status=support.SupportStatus(
status=support.HIDDEN,
message=_('Use property %s.') % NETWORK,
version='9.0.0',
previous_status=support.SupportStatus(
status=support.DEPRECATED,
version='2014.2'
)
),
constraints=[
constraints.CustomConstraint('neutron.network')
],
),
NETWORK: properties.Schema(
properties.Schema.STRING,
_('external network for the gateway.'),
constraints=[
constraints.CustomConstraint('neutron.network')
],
),
}
def translation_rules(self, props):
client_plugin = self.client_plugin()
return [
translation.TranslationRule(
props,
translation.TranslationRule.REPLACE,
[self.NETWORK],
value_path=[self.NETWORK_ID]
),
translation.TranslationRule(
props,
translation.TranslationRule.RESOLVE,
[self.NETWORK],
client_plugin=client_plugin,
finder='find_resourceid_by_name_or_id',
entity=client_plugin.RES_TYPE_NETWORK
)
]
def add_dependencies(self, deps):
super(RouterGateway, self).add_dependencies(deps)
for resource in six.itervalues(self.stack):
# depend on any RouterInterface in this template with the same
# router_id as this router_id
if resource.has_interface('OS::Neutron::RouterInterface'):
try:
dep_router_id = resource.properties[RouterInterface.ROUTER]
router_id = self.properties[self.ROUTER_ID]
except (ValueError, TypeError):
# Properties errors will be caught later in validation,
# where we can report them in their proper context.
continue
if dep_router_id == router_id:
deps += (self, resource)
# depend on any subnet in this template with the same network_id
# as this network_id, as the gateway implicitly creates a port
# on that subnet
if resource.has_interface('OS::Neutron::Subnet'):
try:
dep_network = resource.properties[subnet.Subnet.NETWORK]
network = self.properties[self.NETWORK]
except (ValueError, TypeError):
# Properties errors will be caught later in validation,
# where we can report them in their proper context.
continue
if dep_network == network:
deps += (self, resource)
def handle_create(self):
router_id = self.properties[self.ROUTER_ID]
network_id = dict(self.properties).get(self.NETWORK)
self.client().add_gateway_router(
router_id,
{'network_id': network_id})
self.resource_id_set('%s:%s' % (router_id, network_id))
def handle_delete(self):
if not self.resource_id:
return
(router_id, network_id) = self.resource_id.split(':')
with self.client_plugin().ignore_not_found:
self.client().remove_gateway_router(router_id)
def resource_mapping():
return {
'OS::Neutron::Router': Router,
'OS::Neutron::RouterInterface': RouterInterface,
'OS::Neutron::RouterGateway': RouterGateway,
}
|
|
#SPDX-License-Identifier: MIT
import os
import json
import logging
# generate random integer values
from random import seed
from random import randint
# seed random number generator
# DO NOT create a seed.
# Actual randomness is generated without a seed
# for this use case.
contributor_breadth_worker_p = randint(48000, 48500)
facade_worker_p = randint(48501, 49000)
insight_worker_p = randint(49002, 49500)
metric_status_worker_p = randint(49501, 50000)
pull_request_worker_p = randint(50001, 50500)
repo_info_worker_p = randint(50501, 51000)
value_worker_p = randint(51002, 51500)
contributor_worker_p = randint(52000, 52500)
message_insights_worker_p = randint(53000, 53499)
pull_request_analysis_worker_p = randint(54000, 54500)
discourse_analysis_worker_p = randint(54500, 54999)
message_insights_worker_p = randint(55000, 55499)
clustering_worker_p = randint(57000, 57499)
github_worker_p = randint(57500, 58000)
linux_badge_worker_p = randint(47000,47499)
gitlab_issues_worker_p = randint(47500,47999)
release_worker_p = randint(56000, 56499)
gitlab_merge_request_worker_p = randint(56500, 56999)
deps_worker_p = randint(58001, 58499)
main_port = 5000 #randint(5001,5300)
from augur.logging import ROOT_AUGUR_DIRECTORY
ENVVAR_PREFIX = "AUGUR_"
CONFIG_HOME = f"{os.getenv('HOME', '~')}/.augur"
default_config = {
"version": 1,
"Augur": {
"developer": 0
},
"Database": {
"name": "augur",
"host": "localhost",
"key": "key",
"password": "augur",
"port": 5432,
"user": "augur",
"gitlab_api_key":"gitlab_api_key"
},
"Housekeeper": {
"update_redirects": {
"switch": 0,
"repo_group_id": 0
},
"jobs": [
{
"delay": 150000,
"given": [
"git_url"
],
"model": "deps",
"repo_group_id": 0
},
{
"delay": 150000,
"given": [
"github_url"
],
"model": "contributor_breadth",
"repo_group_id": 0
},
{
"all_focused": 1,
"delay": 150000,
"given": [
"github_url"
],
"model": "issues",
"repo_group_id": 0
},
{
"all_focused": 1,
"delay": 150000,
"given": [
"git_url"
],
"model": "merge_requests",
"repo_group_id": 0
},
{
"all_focused": 1,
"delay": 150000,
"given": [
"git_url"
],
"model": "merge_request_commits",
"repo_group_id": 0
},
{
"all_focused": 1,
"delay": 150000,
"given": [
"git_url"
],
"model": "merge_request_files",
"repo_group_id": 0
},
{
"delay": 150000,
"given": [
"github_url"
],
"model": "pull_request_commits",
"repo_group_id": 0
},
{
"delay": 150000,
"given": [
"github_url"
],
"model": "repo_info",
"repo_group_id": 0
},
{
"delay": 150000,
"given": [
"repo_group"
],
"model": "commits",
"repo_group_id": 0
},
{
"delay": 1000000,
"given": [
"github_url"
],
"model": "pull_requests",
"repo_group_id": 0
},
{
"delay": 1000000,
"given": [
"git_url"
],
"model": "contributors",
"repo_group_id": 0
},
{
"delay": 1000000,
"given": [
"git_url"
],
"model": "insights",
"repo_group_id": 0
},
{
"delay": 1000000,
"given": [
"git_url"
],
"model": "badges",
"repo_group_id": 0
},
{
"delay": 1000000,
"given": [
"git_url"
],
"model": "value",
"repo_group_id": 0
},
{
"delay": 100000,
"given": [
"github_url"
],
"model": "pull_request_files",
"repo_group_id": 0
},
{
"delay": 100000,
"given": [
"github_url"
],
"model": "releases",
"repo_group_id": 0
},
{
"delay": 100000,
"given": [
"github_url"
],
"model": "message_analysis",
"repo_group_id": 0
},
{
"delay": 100000,
"given": [
"github_url"
],
"model": "pull_request_analysis",
"repo_group_id": 0
},
{
"delay": 10000,
"given":[
"git_url"
],
"model" : "discourse_analysis",
"repo_group_id" : 0
},
{
"delay": 10000,
"given": [
"git_url"
],
"model": "clustering",
"repo_group_id": 0
},
{
"all_focused": 1,
"delay": 150000,
"given": [
"git_url"
],
"model": "gitlab_issues",
"repo_group_id": 0
},
{
"delay": 150000,
"given": [
"git_url"
],
"model": "deps",
"repo_group_id": 0
}
]
},
"Workers": {
"contributor_breadth_worker": {
"port": contributor_breadth_worker_p,
"switch": 0,
"workers": 1
},
"facade_worker": {
"port": facade_worker_p,
"repo_directory": "repos/",
"switch": 1,
"workers": 1
},
"github_worker": {
"port": github_worker_p,
"switch": 1,
"workers": 1
},
"insight_worker": {
"port": insight_worker_p,
"metrics": {"issues-new": "issues", "code-changes": "commit_count", "code-changes-lines": "added",
"reviews": "pull_requests", "contributors-new": "new_contributors"},
"confidence_interval": 95,
"contamination": 0.1,
"switch": 1,
"workers": 1,
"training_days": 1000,
"anomaly_days": 14
},
"linux_badge_worker": {
"port": linux_badge_worker_p ,
"switch": 1,
"workers": 1
},
"metric_status_worker": {
"port": metric_status_worker_p,
"switch": 0,
"workers": 1
},
"pull_request_worker": {
"port": pull_request_worker_p,
"switch": 1,
"workers": 1
},
"repo_info_worker": {
"port": repo_info_worker_p,
"switch": 1,
"workers": 1
},
"value_worker": {
"port": value_worker_p,
"scc_bin": "scc",
"switch": 0,
"workers": 1
},
"contributor_worker": {
"port": contributor_worker_p,
"switch": 0,
"workers": 1
},
"gitlab_issues_worker": {
"port": gitlab_issues_worker_p,
"switch": 1,
"workers": 1
},
"release_worker": {
"port": release_worker_p,
"switch": 1,
"workers": 1
},
"gitlab_merge_request_worker": {
"port": gitlab_merge_request_worker_p,
"switch": 0,
"workers": 1
},
"message_insights_worker": {
"port": message_insights_worker_p,
"switch": 0,
"workers": 1,
"insight_days": 30,
"models_dir": "message_models"
},
"pull_request_analysis_worker": {
"port": pull_request_analysis_worker_p,
"switch": 0,
"workers": 1,
"insight_days": 30
},
"discourse_analysis_worker":{
"port" : discourse_analysis_worker_p,
"switch": 0,
"workers": 1
},
"message_insights_worker": {
"port": message_insights_worker_p,
"switch": 0,
"workers": 1,
"insight_days": 30,
"models_dir": "message_models"
},
"pull_request_analysis_worker": {
"port": pull_request_analysis_worker_p,
"switch": 0,
"workers": 1,
"insight_days": 30
},
"discourse_analysis_worker":{
"port" : discourse_analysis_worker_p,
"switch": 0,
"workers": 1
},
"clustering_worker": {
"port": clustering_worker_p,
"switch": 0,
"workers": 1,
"max_df" : 0.9,
"max_features" : 1000,
"min_df": 0.1,
"num_clusters" : 4
},
"deps_worker": {
"port": deps_worker_p,
"switch": 0,
"workers": 1
}
},
"Facade": {
"check_updates": 1,
"clone_repos": 1,
"create_xlsx_summary_files": 1,
"delete_marked_repos": 0,
"fix_affiliations": 1,
"force_analysis": 1,
"force_invalidate_caches": 1,
"force_updates": 1,
"limited_run": 0,
"multithreaded": 0,
"nuke_stored_affiliations": 0,
"pull_repos": 1,
"rebuild_caches": 1,
"run_analysis": 1
},
"Server": {
"cache_expire": "3600",
"host": "0.0.0.0",
"port": main_port,
"workers": 4,
"timeout": 60
},
"Frontend": {
"host": "0.0.0.0",
"port": main_port
},
"Logging": {
"logs_directory": "logs/",
"log_level": "INFO",
"verbose": 0,
"quiet": 0,
"debug": 0
}
}
logger = logging.getLogger(__name__)
class AugurConfig():
"""docstring for AugurConfig"""
def __init__(self, root_augur_dir, given_config={}):
self._default_config_file_name = 'augur.config.json'
self._root_augur_dir = root_augur_dir
self._default_config = default_config
self._env_config = {}
self.config_file_location = None
self.load_config()
self.version = self.get_version()
self._config.update(given_config)
def get_section(self, section_name):
try:
return self._config[section_name]
except KeyError as e:
if not self.using_default_config:
logger.warn(f"{section_name} not found in loaded config. Checking default config")
try:
return self._default_config[section_name]
except KeyError as e:
logger.error(f"No defaults found for {section_name}")
raise(e)
else:
logger.debug(f"Already using default config, skipping check for {section_name}")
def get_version(self):
try:
return self._config["version"]
except KeyError as e:
logger.warning("No config version found. Setting version to 0.")
return 0
def get_value(self, section_name, value):
try:
return self._config[section_name][value]
except KeyError as e:
if not self.using_default_config:
logger.warn(f"{section_name}:{value} not found in loaded config. Checking default config")
try:
return self._default_config[section_name][value]
except KeyError as e:
logger.error(f"No defaults found for {section_name}:{value}")
raise(e)
else:
logger.debug(f"Already using default config, skipping check for {section_name}:{value}")
def load_config(self):
self._config = None
self.using_default_config = False
logger.debug("Attempting to load config file")
try:
self.discover_config_file()
try:
with open(self.config_file_location, 'r+') as config_file_handle:
self._config = json.loads(config_file_handle.read())
logger.debug("Config file loaded successfully")
except json.decoder.JSONDecodeError as e:
logger.warning("Unable to parse config. Using default configuration")
self.using_default_config = True
self._config = default_config
except AugurConfigFileNotFoundException as e:
logger.warning("Config file not found. Using default configuration")
self.using_default_config = True
self._config = default_config
self.load_env_configuration()
def discover_config_file(self):
developer_config_location = ROOT_AUGUR_DIRECTORY + "/" + self._default_config_file_name
config_file_path = None
config_locations = [developer_config_location, CONFIG_HOME + "/" + self._default_config_file_name
, f"/opt/augur/{self._default_config_file_name}"]
if os.getenv('AUGUR_CONFIG_FILE', None) is not None:
config_file_path = os.getenv('AUGUR_CONFIG_FILE')
else:
for location in config_locations:
try:
f = open(location, "r+")
config_file_path = os.path.abspath(location)
f.close()
break
except FileNotFoundError:
pass
if config_file_path:
self.config_file_location = config_file_path
else:
raise(AugurConfigFileNotFoundException(message="Config file was not found", errors=None))
def load_env_configuration(self):
self.set_env_value(section='Database', name='key', environment_variable='AUGUR_GITHUB_API_KEY')
self.set_env_value(section='Database', name='host', environment_variable='AUGUR_DB_HOST')
self.set_env_value(section='Database', name='name', environment_variable='AUGUR_DB_NAME')
self.set_env_value(section='Database', name='port', environment_variable='AUGUR_DB_PORT')
self.set_env_value(section='Database', name='user', environment_variable='AUGUR_DB_USER')
self.set_env_value(section='Database', name='password', environment_variable='AUGUR_DB_PASSWORD')
self.set_env_value(section='Logging', name='log_level', environment_variable='AUGUR_LOG_LEVEL')
self.set_env_value(section='Logging', name='quiet', environment_variable='AUGUR_LOG_QUIET')
self.set_env_value(section='Logging', name='debug', environment_variable='AUGUR_LOG_DEBUG')
self.set_env_value(section='Logging', name='verbose', environment_variable='AUGUR_LOG_VERBOSE')
def set_env_value(self, section, name, environment_variable, sub_config=None):
"""
Sets names and values of specified config section according to their environment variables.
"""
# using sub_config lets us grab values from nested config blocks
if sub_config is None:
sub_config = self._config
env_value = os.getenv(environment_variable)
if env_value is not None:
self._env_config[environment_variable] = env_value
sub_config[section][name] = env_value
# logger.info(f"{section}:[\"{name}\"] set to {env_value} by: {environment_variable}")
else:
self._env_config[environment_variable] = self.get_value(section, name)
def get_raw_config(self):
return self._config
def get_default_config(self):
return self._default_config
def get_env_config(self):
return self._env_config
class AugurConfigFileNotFoundException(Exception):
def __init__(self, message, errors):
super().__init__(message)
|
|
# strategies.py
# Copyright (C) 2005, 2006, 2007, 2008 Michael Bayer [email protected]
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""sqlalchemy.orm.interfaces.LoaderStrategy implementations, and related MapperOptions."""
from sqlalchemy import sql, util, exceptions, logging
from sqlalchemy.sql import util as sql_util
from sqlalchemy.sql import visitors, expression, operators
from sqlalchemy.orm import mapper, attributes
from sqlalchemy.orm.interfaces import LoaderStrategy, StrategizedOption, MapperOption, PropertyOption, serialize_path, deserialize_path
from sqlalchemy.orm import session as sessionlib
from sqlalchemy.orm import util as mapperutil
class ColumnLoader(LoaderStrategy):
"""Default column loader."""
def init(self):
super(ColumnLoader, self).init()
self.columns = self.parent_property.columns
self._should_log_debug = logging.is_debug_enabled(self.logger)
self.is_composite = hasattr(self.parent_property, 'composite_class')
def setup_query(self, context, parentclauses=None, **kwargs):
for c in self.columns:
if parentclauses is not None:
context.secondary_columns.append(parentclauses.aliased_column(c))
else:
context.primary_columns.append(c)
def init_class_attribute(self):
self.is_class_level = True
if self.is_composite:
self._init_composite_attribute()
else:
self._init_scalar_attribute()
def _init_composite_attribute(self):
self.logger.info("register managed composite attribute %s on class %s" % (self.key, self.parent.class_.__name__))
def copy(obj):
return self.parent_property.composite_class(
*obj.__composite_values__())
def compare(a, b):
for col, aprop, bprop in zip(self.columns,
a.__composite_values__(),
b.__composite_values__()):
if not col.type.compare_values(aprop, bprop):
return False
else:
return True
sessionlib.register_attribute(self.parent.class_, self.key, uselist=False, useobject=False, copy_function=copy, compare_function=compare, mutable_scalars=True, comparator=self.parent_property.comparator)
def _init_scalar_attribute(self):
self.logger.info("register managed attribute %s on class %s" % (self.key, self.parent.class_.__name__))
coltype = self.columns[0].type
sessionlib.register_attribute(self.parent.class_, self.key, uselist=False, useobject=False, copy_function=coltype.copy_value, compare_function=coltype.compare_values, mutable_scalars=self.columns[0].type.is_mutable(), comparator=self.parent_property.comparator)
def create_row_processor(self, selectcontext, mapper, row):
if self.is_composite:
for c in self.columns:
if c not in row:
break
else:
def new_execute(instance, row, **flags):
if self._should_log_debug:
self.logger.debug("populating %s with %s/%s..." % (mapperutil.attribute_str(instance, self.key), row.__class__.__name__, self.columns[0].key))
instance.__dict__[self.key] = self.parent_property.composite_class(*[row[c] for c in self.columns])
if self._should_log_debug:
self.logger.debug("Returning active composite column fetcher for %s %s" % (mapper, self.key))
return (new_execute, None, None)
elif self.columns[0] in row:
def new_execute(instance, row, **flags):
if self._should_log_debug:
self.logger.debug("populating %s with %s/%s" % (mapperutil.attribute_str(instance, self.key), row.__class__.__name__, self.columns[0].key))
instance.__dict__[self.key] = row[self.columns[0]]
if self._should_log_debug:
self.logger.debug("Returning active column fetcher for %s %s" % (mapper, self.key))
return (new_execute, None, None)
else:
def new_execute(instance, row, isnew, **flags):
if isnew:
instance._state.expire_attributes([self.key])
if self._should_log_debug:
self.logger.debug("Deferring load for %s %s" % (mapper, self.key))
return (new_execute, None, None)
ColumnLoader.logger = logging.class_logger(ColumnLoader)
class DeferredColumnLoader(LoaderStrategy):
"""Deferred column loader, a per-column or per-column-group lazy loader."""
def create_row_processor(self, selectcontext, mapper, row):
if self.columns[0] in row:
return self.parent_property._get_strategy(ColumnLoader).create_row_processor(selectcontext, mapper, row)
elif not self.is_class_level or len(selectcontext.options):
def new_execute(instance, row, **flags):
if self._should_log_debug:
self.logger.debug("set deferred callable on %s" % mapperutil.attribute_str(instance, self.key))
instance._state.set_callable(self.key, self.setup_loader(instance))
return (new_execute, None, None)
else:
def new_execute(instance, row, **flags):
if self._should_log_debug:
self.logger.debug("set deferred callable on %s" % mapperutil.attribute_str(instance, self.key))
instance._state.reset(self.key)
return (new_execute, None, None)
def init(self):
super(DeferredColumnLoader, self).init()
if hasattr(self.parent_property, 'composite_class'):
raise NotImplementedError("Deferred loading for composite types not implemented yet")
self.columns = self.parent_property.columns
self.group = self.parent_property.group
self._should_log_debug = logging.is_debug_enabled(self.logger)
def init_class_attribute(self):
self.is_class_level = True
self.logger.info("register managed attribute %s on class %s" % (self.key, self.parent.class_.__name__))
sessionlib.register_attribute(self.parent.class_, self.key, uselist=False, useobject=False, callable_=self.class_level_loader, copy_function=self.columns[0].type.copy_value, compare_function=self.columns[0].type.compare_values, mutable_scalars=self.columns[0].type.is_mutable(), comparator=self.parent_property.comparator)
def setup_query(self, context, only_load_props=None, **kwargs):
if \
(self.group is not None and context.attributes.get(('undefer', self.group), False)) or \
(only_load_props and self.key in only_load_props):
self.parent_property._get_strategy(ColumnLoader).setup_query(context, **kwargs)
def class_level_loader(self, instance, props=None):
if not mapper.has_mapper(instance):
return None
localparent = mapper.object_mapper(instance)
# adjust for the ColumnProperty associated with the instance
# not being our own ColumnProperty. This can occur when entity_name
# mappers are used to map different versions of the same ColumnProperty
# to the class.
prop = localparent.get_property(self.key)
if prop is not self.parent_property:
return prop._get_strategy(DeferredColumnLoader).setup_loader(instance)
return LoadDeferredColumns(instance, self.key, props)
def setup_loader(self, instance, props=None, create_statement=None):
return LoadDeferredColumns(instance, self.key, props, optimizing_statement=create_statement)
DeferredColumnLoader.logger = logging.class_logger(DeferredColumnLoader)
class LoadDeferredColumns(object):
"""callable, serializable loader object used by DeferredColumnLoader"""
def __init__(self, instance, key, keys, optimizing_statement=None):
self.instance = instance
self.key = key
self.keys = keys
self.optimizing_statement = optimizing_statement
def __getstate__(self):
return {'instance':self.instance, 'key':self.key, 'keys':self.keys}
def __setstate__(self, state):
self.instance = state['instance']
self.key = state['key']
self.keys = state['keys']
self.optimizing_statement = None
def __call__(self):
if not mapper.has_identity(self.instance):
return None
localparent = mapper.object_mapper(self.instance, raiseerror=False)
prop = localparent.get_property(self.key)
strategy = prop._get_strategy(DeferredColumnLoader)
if self.keys:
toload = self.keys
elif strategy.group:
toload = [p.key for p in localparent.iterate_properties if isinstance(p.strategy, DeferredColumnLoader) and p.group==strategy.group]
else:
toload = [self.key]
# narrow the keys down to just those which have no history
group = [k for k in toload if k in self.instance._state.unmodified]
if strategy._should_log_debug:
strategy.logger.debug("deferred load %s group %s" % (mapperutil.attribute_str(self.instance, self.key), group and ','.join(group) or 'None'))
session = sessionlib.object_session(self.instance)
if session is None:
raise exceptions.UnboundExecutionError("Parent instance %s is not bound to a Session; deferred load operation of attribute '%s' cannot proceed" % (self.instance.__class__, self.key))
query = session.query(localparent)
if not self.optimizing_statement:
ident = self.instance._instance_key[1]
query._get(None, ident=ident, only_load_props=group, refresh_instance=self.instance._state)
else:
statement, params = self.optimizing_statement(self.instance)
query.from_statement(statement).params(params)._get(None, only_load_props=group, refresh_instance=self.instance._state)
return attributes.ATTR_WAS_SET
class DeferredOption(StrategizedOption):
def __init__(self, key, defer=False):
super(DeferredOption, self).__init__(key)
self.defer = defer
def get_strategy_class(self):
if self.defer:
return DeferredColumnLoader
else:
return ColumnLoader
class UndeferGroupOption(MapperOption):
def __init__(self, group):
self.group = group
def process_query(self, query):
query._attributes[('undefer', self.group)] = True
class AbstractRelationLoader(LoaderStrategy):
def init(self):
super(AbstractRelationLoader, self).init()
for attr in ['primaryjoin', 'secondaryjoin', 'secondary', 'foreign_keys', 'mapper', 'target', 'table', 'uselist', 'cascade', 'attributeext', 'order_by', 'remote_side', 'direction']:
setattr(self, attr, getattr(self.parent_property, attr))
self._should_log_debug = logging.is_debug_enabled(self.logger)
def _init_instance_attribute(self, instance, callable_=None):
if callable_:
instance._state.set_callable(self.key, callable_)
else:
instance._state.initialize(self.key)
def _register_attribute(self, class_, callable_=None, **kwargs):
self.logger.info("register managed %s attribute %s on class %s" % ((self.uselist and "list-holding" or "scalar"), self.key, self.parent.class_.__name__))
sessionlib.register_attribute(class_, self.key, uselist=self.uselist, useobject=True, extension=self.attributeext, cascade=self.cascade, trackparent=True, typecallable=self.parent_property.collection_class, callable_=callable_, comparator=self.parent_property.comparator, **kwargs)
class NoLoader(AbstractRelationLoader):
def init_class_attribute(self):
self.is_class_level = True
self._register_attribute(self.parent.class_)
def create_row_processor(self, selectcontext, mapper, row):
def new_execute(instance, row, ispostselect, **flags):
if not ispostselect:
if self._should_log_debug:
self.logger.debug("initializing blank scalar/collection on %s" % mapperutil.attribute_str(instance, self.key))
self._init_instance_attribute(instance)
return (new_execute, None, None)
NoLoader.logger = logging.class_logger(NoLoader)
class LazyLoader(AbstractRelationLoader):
def init(self):
super(LazyLoader, self).init()
(self.__lazywhere, self.__bind_to_col, self._equated_columns) = self.__create_lazy_clause(self.parent_property)
self.logger.info(str(self.parent_property) + " lazy loading clause " + str(self.__lazywhere))
# determine if our "lazywhere" clause is the same as the mapper's
# get() clause. then we can just use mapper.get()
#from sqlalchemy.orm import query
self.use_get = not self.uselist and self.mapper._get_clause[0].compare(self.__lazywhere)
if self.use_get:
self.logger.info(str(self.parent_property) + " will use query.get() to optimize instance loads")
def init_class_attribute(self):
self.is_class_level = True
self._register_attribute(self.parent.class_, callable_=self.class_level_loader)
def lazy_clause(self, instance, reverse_direction=False, alias_secondary=False):
if instance is None:
return self._lazy_none_clause(reverse_direction)
if not reverse_direction:
(criterion, bind_to_col, rev) = (self.__lazywhere, self.__bind_to_col, self._equated_columns)
else:
(criterion, bind_to_col, rev) = LazyLoader.__create_lazy_clause(self.parent_property, reverse_direction=reverse_direction)
def visit_bindparam(bindparam):
mapper = reverse_direction and self.parent_property.mapper or self.parent_property.parent
if bindparam.key in bind_to_col:
# use the "committed" (database) version to get query column values
# also its a deferred value; so that when used by Query, the committed value is used
# after an autoflush occurs
bindparam.value = lambda: mapper._get_committed_attr_by_column(instance, bind_to_col[bindparam.key])
if self.secondary and alias_secondary:
criterion = sql_util.ClauseAdapter(self.secondary.alias()).traverse(criterion)
return visitors.traverse(criterion, clone=True, visit_bindparam=visit_bindparam)
def _lazy_none_clause(self, reverse_direction=False):
if not reverse_direction:
(criterion, bind_to_col, rev) = (self.__lazywhere, self.__bind_to_col, self._equated_columns)
else:
(criterion, bind_to_col, rev) = LazyLoader.__create_lazy_clause(self.parent_property, reverse_direction=reverse_direction)
def visit_binary(binary):
mapper = reverse_direction and self.parent_property.mapper or self.parent_property.parent
if isinstance(binary.left, expression._BindParamClause) and binary.left.key in bind_to_col:
# reverse order if the NULL is on the left side
binary.left = binary.right
binary.right = expression.null()
binary.operator = operators.is_
elif isinstance(binary.right, expression._BindParamClause) and binary.right.key in bind_to_col:
binary.right = expression.null()
binary.operator = operators.is_
return visitors.traverse(criterion, clone=True, visit_binary=visit_binary)
def class_level_loader(self, instance, options=None, path=None):
if not mapper.has_mapper(instance):
return None
localparent = mapper.object_mapper(instance)
# adjust for the PropertyLoader associated with the instance
# not being our own PropertyLoader. This can occur when entity_name
# mappers are used to map different versions of the same PropertyLoader
# to the class.
prop = localparent.get_property(self.key)
if prop is not self.parent_property:
return prop._get_strategy(LazyLoader).setup_loader(instance)
return LoadLazyAttribute(instance, self.key, options, path)
def setup_loader(self, instance, options=None, path=None):
return LoadLazyAttribute(instance, self.key, options, path)
def create_row_processor(self, selectcontext, mapper, row):
if not self.is_class_level or len(selectcontext.options):
def new_execute(instance, row, ispostselect, **flags):
if not ispostselect:
if self._should_log_debug:
self.logger.debug("set instance-level lazy loader on %s" % mapperutil.attribute_str(instance, self.key))
# we are not the primary manager for this attribute on this class - set up a per-instance lazyloader,
# which will override the class-level behavior
self._init_instance_attribute(instance, callable_=self.setup_loader(instance, selectcontext.options, selectcontext.query._current_path + selectcontext.path))
return (new_execute, None, None)
else:
def new_execute(instance, row, ispostselect, **flags):
if not ispostselect:
if self._should_log_debug:
self.logger.debug("set class-level lazy loader on %s" % mapperutil.attribute_str(instance, self.key))
# we are the primary manager for this attribute on this class - reset its per-instance attribute state,
# so that the class-level lazy loader is executed when next referenced on this instance.
# this usually is not needed unless the constructor of the object referenced the attribute before we got
# to load data into it.
instance._state.reset(self.key)
return (new_execute, None, None)
def __create_lazy_clause(cls, prop, reverse_direction=False):
binds = {}
lookup = {}
equated_columns = {}
if reverse_direction and not prop.secondaryjoin:
for l, r in prop.local_remote_pairs:
_list = lookup.setdefault(r, [])
_list.append((r, l))
equated_columns[l] = r
else:
for l, r in prop.local_remote_pairs:
_list = lookup.setdefault(l, [])
_list.append((l, r))
equated_columns[r] = l
def col_to_bind(col):
if col in lookup:
for tobind, equated in lookup[col]:
if equated in binds:
return None
if col not in binds:
binds[col] = sql.bindparam(None, None, type_=col.type)
return binds[col]
return None
lazywhere = prop.primaryjoin
if not prop.secondaryjoin or not reverse_direction:
lazywhere = visitors.traverse(lazywhere, before_clone=col_to_bind, clone=True)
if prop.secondaryjoin is not None:
secondaryjoin = prop.secondaryjoin
if reverse_direction:
secondaryjoin = visitors.traverse(secondaryjoin, before_clone=col_to_bind, clone=True)
lazywhere = sql.and_(lazywhere, secondaryjoin)
bind_to_col = dict([(binds[col].key, col) for col in binds])
return (lazywhere, bind_to_col, equated_columns)
__create_lazy_clause = classmethod(__create_lazy_clause)
LazyLoader.logger = logging.class_logger(LazyLoader)
class LoadLazyAttribute(object):
"""callable, serializable loader object used by LazyLoader"""
def __init__(self, instance, key, options, path):
self.instance = instance
self.key = key
self.options = options
self.path = path
def __getstate__(self):
return {'instance':self.instance, 'key':self.key, 'options':self.options, 'path':serialize_path(self.path)}
def __setstate__(self, state):
self.instance = state['instance']
self.key = state['key']
self.options= state['options']
self.path = deserialize_path(state['path'])
def __call__(self):
instance = self.instance
if not mapper.has_identity(instance):
return None
instance_mapper = mapper.object_mapper(instance)
prop = instance_mapper.get_property(self.key)
strategy = prop._get_strategy(LazyLoader)
if strategy._should_log_debug:
strategy.logger.debug("lazy load attribute %s on instance %s" % (self.key, mapperutil.instance_str(instance)))
session = sessionlib.object_session(instance)
if session is None:
try:
session = instance_mapper.get_session()
except exceptions.InvalidRequestError:
raise exceptions.UnboundExecutionError("Parent instance %s is not bound to a Session, and no contextual session is established; lazy load operation of attribute '%s' cannot proceed" % (instance.__class__, self.key))
q = session.query(prop.mapper).autoflush(False)
if self.path:
q = q._with_current_path(self.path)
# if we have a simple primary key load, use mapper.get()
# to possibly save a DB round trip
if strategy.use_get:
ident = []
allnulls = True
for primary_key in prop.mapper.primary_key:
val = instance_mapper._get_committed_attr_by_column(instance, strategy._equated_columns[primary_key])
allnulls = allnulls and val is None
ident.append(val)
if allnulls:
return None
if self.options:
q = q._conditional_options(*self.options)
return q.get(ident)
if strategy.order_by is not False:
q = q.order_by(strategy.order_by)
elif strategy.secondary is not None and strategy.secondary.default_order_by() is not None:
q = q.order_by(strategy.secondary.default_order_by())
if self.options:
q = q._conditional_options(*self.options)
q = q.filter(strategy.lazy_clause(instance))
result = q.all()
if strategy.uselist:
return result
else:
if result:
return result[0]
else:
return None
class EagerLoader(AbstractRelationLoader):
"""Loads related objects inline with a parent query."""
def init(self):
super(EagerLoader, self).init()
self.clauses = {}
self.join_depth = self.parent_property.join_depth
def init_class_attribute(self):
# class-level eager strategy; add the PropertyLoader
# to the parent's list of "eager loaders"; this tells the Query
# that eager loaders will be used in a normal query
self.parent._eager_loaders.add(self.parent_property)
# initialize a lazy loader on the class level attribute
self.parent_property._get_strategy(LazyLoader).init_class_attribute()
def setup_query(self, context, parentclauses=None, parentmapper=None, **kwargs):
"""Add a left outer join to the statement thats being constructed."""
path = context.path
# check for join_depth or basic recursion,
# if the current path was not explicitly stated as
# a desired "loaderstrategy" (i.e. via query.options())
if ("loaderstrategy", path) not in context.attributes:
if self.join_depth:
if len(path) / 2 > self.join_depth:
return
else:
if self.mapper.base_mapper in path:
return
if ("eager_row_processor", path) in context.attributes:
# if user defined eager_row_processor, that's contains_eager().
# don't render LEFT OUTER JOIN, generate an AliasedClauses from
# the decorator (this is a hack here, cleaned up in 0.5)
cl = context.attributes[("eager_row_processor", path)]
if cl:
row = cl(None)
class ActsLikeAliasedClauses(object):
def aliased_column(self, col):
return row.map[col]
clauses = ActsLikeAliasedClauses()
else:
clauses = None
else:
clauses = self.__create_eager_join(context, path, parentclauses, parentmapper, **kwargs)
if not clauses:
return
for value in self.mapper._iterate_polymorphic_properties():
context.exec_with_path(self.mapper, value.key, value.setup, context, parentclauses=clauses, parentmapper=self.mapper)
def __create_eager_join(self, context, path, parentclauses, parentmapper, **kwargs):
if parentmapper is None:
localparent = context.mapper
else:
localparent = parentmapper
if context.eager_joins:
towrap = context.eager_joins
else:
towrap = context.from_clause
# create AliasedClauses object to build up the eager query. this is cached after 1st creation.
try:
clauses = self.clauses[path]
except KeyError:
clauses = mapperutil.PropertyAliasedClauses(self.parent_property, self.parent_property.primaryjoin, self.parent_property.secondaryjoin, parentclauses)
self.clauses[path] = clauses
# place the "row_decorator" from the AliasedClauses into the QueryContext, where it will
# be picked up in create_row_processor() when results are fetched
context.attributes[("eager_row_processor", path)] = clauses.row_decorator
if self.secondaryjoin is not None:
context.eager_joins = sql.outerjoin(towrap, clauses.secondary, clauses.primaryjoin).outerjoin(clauses.alias, clauses.secondaryjoin)
# TODO: check for "deferred" cols on parent/child tables here ? this would only be
# useful if the primary/secondaryjoin are against non-PK columns on the tables (and therefore might be deferred)
if self.order_by is False and self.secondary.default_order_by() is not None:
context.eager_order_by += clauses.secondary.default_order_by()
else:
context.eager_joins = towrap.outerjoin(clauses.alias, clauses.primaryjoin)
# ensure all the cols on the parent side are actually in the
# columns clause (i.e. are not deferred), so that aliasing applied by the Query propagates
# those columns outward. This has the effect of "undefering" those columns.
for col in sql_util.find_columns(clauses.primaryjoin):
if localparent.mapped_table.c.contains_column(col):
context.primary_columns.append(col)
if self.order_by is False and clauses.alias.default_order_by() is not None:
context.eager_order_by += clauses.alias.default_order_by()
if clauses.order_by:
context.eager_order_by += util.to_list(clauses.order_by)
return clauses
def _create_row_decorator(self, selectcontext, row, path):
"""Create a *row decorating* function that will apply eager
aliasing to the row.
Also check that an identity key can be retrieved from the row,
else return None.
"""
#print "creating row decorator for path ", "->".join([str(s) for s in path])
if ("eager_row_processor", path) in selectcontext.attributes:
decorator = selectcontext.attributes[("eager_row_processor", path)]
if decorator is None:
decorator = lambda row: row
else:
if self._should_log_debug:
self.logger.debug("Could not locate aliased clauses for key: " + str(path))
return None
try:
decorated_row = decorator(row)
# check for identity key
identity_key = self.mapper.identity_key_from_row(decorated_row)
# and its good
return decorator
except KeyError, k:
# no identity key - dont return a row processor, will cause a degrade to lazy
if self._should_log_debug:
self.logger.debug("could not locate identity key from row '%s'; missing column '%s'" % (repr(decorated_row), str(k)))
return None
def create_row_processor(self, selectcontext, mapper, row):
row_decorator = self._create_row_decorator(selectcontext, row, selectcontext.path)
pathstr = ','.join([str(x) for x in selectcontext.path])
if row_decorator is not None:
def execute(instance, row, isnew, **flags):
decorated_row = row_decorator(row)
if not self.uselist:
if self._should_log_debug:
self.logger.debug("eagerload scalar instance on %s" % mapperutil.attribute_str(instance, self.key))
if isnew:
# set a scalar object instance directly on the
# parent object, bypassing InstrumentedAttribute
# event handlers.
#
instance.__dict__[self.key] = self.mapper._instance(selectcontext, decorated_row, None)
else:
# call _instance on the row, even though the object has been created,
# so that we further descend into properties
self.mapper._instance(selectcontext, decorated_row, None)
else:
if isnew or self.key not in instance._state.appenders:
# appender_key can be absent from selectcontext.attributes with isnew=False
# when self-referential eager loading is used; the same instance may be present
# in two distinct sets of result columns
if self._should_log_debug:
self.logger.debug("initialize UniqueAppender on %s" % mapperutil.attribute_str(instance, self.key))
collection = attributes.init_collection(instance, self.key)
appender = util.UniqueAppender(collection, 'append_without_event')
instance._state.appenders[self.key] = appender
result_list = instance._state.appenders[self.key]
if self._should_log_debug:
self.logger.debug("eagerload list instance on %s" % mapperutil.attribute_str(instance, self.key))
self.mapper._instance(selectcontext, decorated_row, result_list)
if self._should_log_debug:
self.logger.debug("Returning eager instance loader for %s" % str(self))
return (execute, execute, None)
else:
if self._should_log_debug:
self.logger.debug("eager loader %s degrading to lazy loader" % str(self))
return self.parent_property._get_strategy(LazyLoader).create_row_processor(selectcontext, mapper, row)
def __str__(self):
return str(self.parent) + "." + self.key
EagerLoader.logger = logging.class_logger(EagerLoader)
class EagerLazyOption(StrategizedOption):
def __init__(self, key, lazy=True, chained=False, mapper=None):
super(EagerLazyOption, self).__init__(key, mapper)
self.lazy = lazy
self.chained = chained
def is_chained(self):
return not self.lazy and self.chained
def process_query_property(self, query, paths):
if self.lazy:
if paths[-1] in query._eager_loaders:
query._eager_loaders = query._eager_loaders.difference(util.Set([paths[-1]]))
else:
if not self.chained:
paths = [paths[-1]]
res = util.Set()
for path in paths:
if len(path) - len(query._current_path) == 2:
res.add(path)
query._eager_loaders = query._eager_loaders.union(res)
super(EagerLazyOption, self).process_query_property(query, paths)
def get_strategy_class(self):
if self.lazy:
return LazyLoader
elif self.lazy is False:
return EagerLoader
elif self.lazy is None:
return NoLoader
EagerLazyOption.logger = logging.class_logger(EagerLazyOption)
class RowDecorateOption(PropertyOption):
def __init__(self, key, decorator=None, alias=None):
super(RowDecorateOption, self).__init__(key)
self.decorator = decorator
self.alias = alias
def process_query_property(self, query, paths):
if self.alias is not None and self.decorator is None:
(mapper, propname) = paths[-1][-2:]
prop = mapper.get_property(propname, resolve_synonyms=True)
if isinstance(self.alias, basestring):
self.alias = prop.target.alias(self.alias)
self.decorator = mapperutil.create_row_adapter(self.alias)
query._attributes[("eager_row_processor", paths[-1])] = self.decorator
RowDecorateOption.logger = logging.class_logger(RowDecorateOption)
|
|
#
# Copyright 2017 Luma Pictures
#
# Licensed under the Apache License, Version 2.0 (the "Apache License")
# with the following modification you may not use this file except in
# compliance with the Apache License and the following modification to it:
# Section 6. Trademarks. is deleted and replaced with:
#
# 6. Trademarks. This License does not grant permission to use the trade
# names, trademarks, service marks, or product names of the Licensor
# and its affiliates, except as required to comply with Section 4(c) of
# the License and to reproduce the content of the NOTICE file.
#
# You may obtain a copy of the Apache License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the Apache License with the above modification is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the Apache License for the specific
# language governing permissions and limitations under the Apache License.
#
'''
Not currently used
'''
from __future__ import absolute_import
from treemodel.itemtree import TreeItem, ItemLookupError
from treemodel.qt.base import AbstractTreeModelMixin, Column
from typing import (Any, Dict, Generic, Iterable, Iterator, List, Optional,
Tuple, TypeVar, Union, TYPE_CHECKING)
from pxr.UsdQt._Qt import QtCore, QtGui
T = TypeVar('T')
U = TypeVar('U')
NULL_INDEX = QtCore.QModelIndex()
# TODO: Signals?
class ItemIndexMap(Generic[T]):
'''
Generic storage container for hashable objects which acts like an ordered
set and provides fast index lookup.
'''
__slots__ = ('_itemIndexMap', '_itemList')
def __init__(self):
self._itemIndexMap = {}
self._itemList = []
def __len__(self):
'''
Returns
-------
int
'''
return len(self._itemList)
def __contains__(self, item):
return item in self._itemIndexMap
def item(self, index):
'''
Parameters
----------
index : int
Returns
-------
T
'''
try:
return self._itemList[index]
except IndexError:
pass
def index(self, item):
'''
Parameters
----------
item : T
Returns
-------
int
'''
return self._itemIndexMap.get(item)
def addItem(self, item):
'''
Add an item if it isn't already present, and return its index.
Parameters
----------
item : T
Returns
-------
int
'''
itemIndex = self._itemIndexMap.get(item)
if itemIndex is None:
self._itemList.append(item)
itemIndex = len(self._itemList) - 1
self._itemIndexMap[item] = itemIndex
return itemIndex
def removeItem(self, item):
'''
Remove an item. Return whether it was present.
Parameters
----------
item : T
Returns
-------
bool
'''
index = self._itemIndexMap.pop(item, None)
if index is not None:
replacement = self._itemList.pop()
self._itemList[index] = replacement
self._itemIndexMap[replacement] = index
return True
return False
# TODO: Break out data store class that includes flags and allows all data for
# an item to be looked up without a compound key, so it can be shared by other
# application components.
class ItemDataModel(QtCore.QAbstractTableModel):
'''
A table of data. Intended to be used in conjunction with a
`QAbstractProxyModel`, such as `ProxyTreeModel`.
Each cell of data is identified by a row item, a column, and a
Qt display role.
The row item can be any hashable object.
'''
def __init__(self, columns, itemIndexMap=None, parent=None):
'''
Parameters
----------
columns : Iterable[Column]
The columns to allocate in the data model.
itemIndexMap : Optional[ItemIndexMap[T]]
Provides the mapping between table row indices and items. If None,
a new empty instance will be created and used.
'''
super(ItemDataModel, self).__init__(parent)
self._itemIndexMap = None # type: ItemIndexMap[T]
self._itemIndexMap = itemIndexMap or ItemIndexMap()
self.columns = None # type: Tuple[Column, ...]
self.columnNameToIndex = None # type: Dict[str, int]
self._dataStore = {} # type: Dict[Tuple[T, str, int], U]
self.setColumns(columns)
# Qt methods ---------------------------------------------------------------
def index(self, row, column, parentIndex):
if parentIndex.isValid():
raise RuntimeError('NodeDataModel.index: parent index should never '
'be valid')
# return NULL_INDEX
item = self._itemIndexMap.item(row)
assert item, 'ItemIndexMap lookup returned None in index()'
return QtCore.QAbstractTableModel.createIndex(self, row, column, item)
def rowCount(self, parentIndex):
if parentIndex.isValid():
return 0
return len(self._itemIndexMap)
def columnCount(self, parentIndex):
return len(self.columns)
def headerData(self, section, orientation, role=QtCore.Qt.DisplayRole):
if orientation == QtCore.Qt.Horizontal and role == QtCore.Qt.DisplayRole:
return self.column(section).label
def data(self, modelIndex, role=QtCore.Qt.DisplayRole):
'''
Parameters
----------
modelIndex : QtCore.QModelIndex
role : int
The Qt role.
Returns
-------
Optional[U]
'''
if modelIndex.isValid():
column = self.column(modelIndex.column())
if column is None:
return
item = self._itemIndexMap.item(modelIndex.row())
assert item, 'ItemIndexMap lookup returned None in data()'
return self._dataStore.get((item, column.name, role))
def setData(self, modelIndex, value, role):
'''
Parameters
----------
modelIndex : QtCore.QModelIndex
value : U
role : int
The Qt role.
Returns
-------
bool
'''
if modelIndex.isValid():
column = self.column(modelIndex.column())
if column is None:
return False
item = self._itemIndexMap.item(modelIndex.row())
assert item, 'ItemIndexMap lookup returned None in setData()'
self._dataStore[(item, column.name, role)] = value
self.dataChanged.emit(modelIndex, modelIndex)
return True
return False
# Custom methods -----------------------------------------------------------
def setColumns(self, columns):
'''
Parameters
----------
columns : Iterable[Column]
'''
self.columns = tuple(columns)
# Map column names to indices
self.columnNameToIndex = dict((c.name, i)
for i, c in enumerate(self.columns))
def addItem(self, item):
'''
Register `item` in the table.
This gives the `item` a row index which Qt methods can use to access it.
Parameters
----------
item : T
'''
self._itemIndexMap.addItem(item)
def getItemIndex(self, item, column):
'''
Parameters
----------
item : T
column : Union[str, int, long]
Returns
-------
QtCore.QModelIndex
'''
row = self._itemIndexMap.index(item)
if row is not None:
if not isinstance(column, (int, long)):
column = self.columnNameToIndex[str(column)]
return QtCore.QAbstractItemModel.createIndex(self, row, column,
item)
return NULL_INDEX
def column(self, indexOrName):
'''
Return a ``Column`` instance given its name or index in the model.
Parameters
----------
indexOrName : Union[str, int, long]
Returns
-------
Optional[Column]
'''
if isinstance(indexOrName, basestring):
try:
return self.columns[self.columnNameToIndex[indexOrName]]
except KeyError:
return
else:
try:
return self.columns[indexOrName]
except IndexError:
return
def getItemData(self, itemOrRow, column, role=QtCore.Qt.DisplayRole):
'''
Get the data for a given ``DataItem``, column, and role.
This returns the same results as `self.data()`.
Parameters
----------
itemOrRow : Union[T, int]
The item (i.e. row key), or its internal index.
column : Union[str, int, long]
The column, as a column name or index.
role : int
The Qt role.
Returns
-------
Optional[U]
'''
# FIXME: this code prevents us from using an int as an `item`:
if isinstance(itemOrRow, (int, long)):
item = self._itemIndexMap.item(itemOrRow)
if item is None:
return
else:
item = itemOrRow
column = self.column(column)
if column is None:
return
return self._dataStore.get((item, column.name, role))
def setItemData(self, itemOrRow, column, role, value, emit=False):
'''
Directly set the data for a given item, column, and role.
NOTE: This does **not** emit any Qt signals, so connected proxy models
and their views may not pick up the change without manual intervention.
Parameters
----------
itemOrRow : Union[T, int]
The item (i.e. row key), or its internal index.
column : Union[str, int, long]
The column, as a column name or index.
role : int
The Qt role.
value : U
The data to store.
Returns
-------
bool
whether the data was successfully set
'''
# FIXME: this code prevents us from using an int as an `item`:
if isinstance(itemOrRow, (int, long)):
item = self._itemIndexMap.item(itemOrRow)
if item is None:
return False
else:
item = itemOrRow
if item not in self._itemIndexMap:
raise ValueError('Given item does not exist in item-index mapping')
column = self.column(column)
if column is None:
return False
self._dataStore[(item, column.name, role)] = value
# TODO
if emit:
pass
return True
class ProxyTreeModel(AbstractTreeModelMixin, QtCore.QAbstractProxyModel):
'''
Maps the data stored in an `ItemDataModel` to the tree structure provided by
an `ItemTree`. Both must contain the same `TreeItem` instances.
'''
def __init__(self, sourceModel, sourceColumns=None, itemTree=None,
parent=None):
'''
Parameters
----------
sourceModel : ItemDataModel[TreeItem, U]
sourceColumns : Optional[Iterable[Union[str, Column]]]
Columns (or names of columns) within `sourceModel` which should be
used as the columns for this model. If None, defaults to the
complete list of columns from `sourceModel`.
itemTree : Optional[ItemTree]
parent
'''
if parent is None:
parent = sourceModel
super(ProxyTreeModel, self).__init__(itemTree=itemTree, parent=parent)
# this is just a fast lookup for QAbstractProxyModel.sourceModel()
self._sourceModel = None # type: ItemDataModel[TreeItem, U]
# Maps proxy column indices to source column indices
self._proxyToSourceColumn = None # type: List[int]
# Maps source column indices to proxy column indices
self._sourceToProxyColumn = None # type: Dict[int, int]
self.setSourceModel(sourceModel)
self.setSourceColumns(sourceColumns or sourceModel.columns)
def columnCount(self, parentIndex):
return len(self._proxyToSourceColumn)
def data(self, modelIndex, role=QtCore.Qt.DisplayRole):
'''
Parameters
----------
modelIndex : QtCore.QModelIndex
role : int
Returns
-------
U
'''
if not modelIndex.isValid():
return
sourceIndex = self.mapToSource(modelIndex)
# if role == QtCore.Qt.SizeHintRole:
# return QtCore.QSize(10, 32)
item = sourceIndex.internalPointer() # type: TreeItem
return self._sourceModel.getItemData(item, sourceIndex.column(), role)
# TODO: Do we need to override this?
# def setData(self, modelIndex, value, role):
# if modelIndex.isValid():
# try:
# columnName = self.columnIndexToName[modelIndex.column()]
# except IndexError:
# return False
# item = modelIndex.internalPointer()
# self._dataStore[(item, columnName, role)] = value
# self.dataChanged.emit(modelIndex, modelIndex)
# return True
# return False
def mapFromSource(self, sourceIndex):
'''
Parameters
----------
sourceIndex : QtCore.QModelIndex
Returns
-------
QtCore.QModelIndex
'''
if sourceIndex.isValid():
try:
mappedColumn = self._sourceToProxyColumn[sourceIndex.column()]
except KeyError:
pass
else:
item = sourceIndex.internalPointer() # type: TreeItem
try:
rowIndex = self.itemTree.rowIndex(item)
except ItemLookupError:
pass
else:
return self.createIndex(rowIndex, mappedColumn, item)
return NULL_INDEX
def mapToSource(self, proxyIndex):
'''
Parameters
----------
proxyIndex : QtCore.QModelIndex
Returns
-------
QtCore.QModelIndex
'''
if proxyIndex.isValid():
try:
mappedColumn = self._proxyToSourceColumn[proxyIndex.column()]
except IndexError:
pass
else:
item = proxyIndex.internalPointer() # type: TreeItem
return self._sourceModel.getItemIndex(item, mappedColumn)
return NULL_INDEX
def setSourceModel(self, sourceModel):
# tell QAbstractProxyModel about our source model.
super(ProxyTreeModel, self).setSourceModel(sourceModel)
# we record self._sourceModel to avoid calls to
# QAbstractProxyModel.sourceModel().
# it might be over-cautious but the sourceModel gets accessed a lot and
# it's unclear whether it incurs a penalty for marshalling from
# c++ -> python.
self._sourceModel = sourceModel
# Custom methods -----------------------------------------------------------
def setSourceColumns(self, sourceColumns):
'''
Set the list of source columns that this model will present to its view,
as indices or column names.
Parameters
----------
sourceColumns : Iterable[Union[str, Columns]]
'''
forwardMap = [] # type: List[int]
for col in sourceColumns:
if isinstance(col, basestring):
name = col
elif isinstance(col, Column):
name = col.name
else:
raise TypeError(col)
forwardMap.append(self._sourceModel.columnNameToIndex[name])
self._proxyToSourceColumn = forwardMap
self._sourceToProxyColumn = dict((val, i)
for i, val in enumerate(forwardMap))
def getItemIndex(self, item, column):
'''
Parameters
----------
item : TreeItem
column : Union[str, int, long]
Returns
-------
QtCore.QModelIndex
'''
if isinstance(column, (int, long)):
column = self._proxyToSourceColumn[column]
return self.mapFromSource(self._sourceModel.getItemIndex(item, column))
def column(self, indexOrName):
'''
Return a source ``Column`` instance using its name or proxy index.
Parameters
----------
indexOrName : Union[str, int, long]
Returns
-------
Optional[Column]
'''
# Integers are treated as proxy column indices
if isinstance(indexOrName, (int, long)):
indexOrName = self._proxyToSourceColumn[indexOrName]
return self._sourceModel.column(indexOrName)
def columns(self):
'''
Returns
-------
List[Column]
'''
return [self._sourceModel.column(index)
for index in self._proxyToSourceColumn]
def itemsChanged(self, items, column=None):
'''
Parameters
----------
items : List[TreeItem]
column : Union[Union[str, int, long]]
'''
# Column is an optimization/simplification. May not be worth keeping.
if column is None:
startIndex = self.getItemIndex(items[0], 0)
endIndex = self.getItemIndex(items[-1], len(self._proxyToSourceColumn) - 1)
else:
startIndex = self.getItemIndex(items[0], column)
endIndex = self.getItemIndex(items[-1], column)
self.dataChanged.emit(startIndex, endIndex)
|
|
import json
import errno
import os
import time
import logging
import traceback
from cme.protocols.smb.remotefile import RemoteFile
from impacket import smb
from impacket.smb3structs import FILE_READ_DATA
from impacket.smbconnection import SessionError
CHUNK_SIZE = 4096
suffixes = ['Bytes', 'KB', 'MB', 'GB', 'TB', 'PB']
def humansize(nbytes):
i = 0
while nbytes >= 1024 and i < len(suffixes)-1:
nbytes /= 1024.
i += 1
f = ('%.2f' % nbytes).rstrip('0').rstrip('.')
return '%s %s' % (f, suffixes[i])
def humaclock(time):
return time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time))
def make_dirs(path):
"""
Create the directory structure. We handle an exception `os.errno.EEXIST` that
may occured while the OS is creating the directories.
"""
try:
os.makedirs(path)
except OSError as e:
if e.errno != errno.EEXIST:
raise
pass
get_list_from_option = lambda opt: list(map(lambda o: o.lower(), filter(bool, opt.split(','))))
class SMBSpiderPlus:
def __init__(self, smb, logger, read_only, exclude_dirs, exclude_exts, max_file_size, output_folder):
self.smb = smb
self.host = self.smb.conn.getRemoteHost()
self.conn_retry = 5
self.logger = logger
self.results = {}
self.read_only = read_only
self.exclude_dirs = exclude_dirs
self.exclude_exts = exclude_exts
self.max_file_size = max_file_size
self.output_folder = output_folder
# Make sure the output_folder exists
make_dirs(self.output_folder)
def reconnect(self):
if self.conn_retry > 0:
self.conn_retry -= 1
self.logger.info(f"Reconnect to server {self.conn_retry}")
# Renogociate the session
time.sleep(3)
self.smb.create_conn_obj()
self.smb.login()
return True
return False
def list_path(self, share, subfolder):
filelist = []
try:
# Get file list for the current folder
filelist = self.smb.conn.listPath(share, subfolder + '*')
except SessionError as e:
self.logger.debug(f'Failed listing files on share "{share}" in directory {subfolder}.')
self.logger.debug(str(e))
if 'STATUS_ACCESS_DENIED' in str(e):
self.logger.debug(f"Cannot list files in directory \"{subfolder}\"")
elif 'STATUS_OBJECT_PATH_NOT_FOUND' in str(e):
self.logger.debug(f"The directory {subfolder} does not exist.")
elif self.reconnect():
filelist = self.list_path(share, subfolder)
return filelist
def get_remote_file(self, share, path):
try:
remote_file = RemoteFile(self.smb.conn, path, share, access=FILE_READ_DATA)
return remote_file
except SessionError:
if self.reconnect():
return self.get_remote_file(share, path)
return None
def read_chunk(self, remote_file, chunk_size=CHUNK_SIZE):
"""
Read the next chunk of data from the remote file.
We retry 3 times if there is a SessionError that is not a `STATUS_END_OF_FILE`.
"""
chunk = ''
retry = 3
while retry > 0:
retry -= 1
try:
chunk = remote_file.read(chunk_size)
break
except SessionError:
if self.reconnect():
# Little hack to reset the smb connection instance
remote_file.__smbConnection = self.smb.conn
return self.read_chunk(remote_file)
except Exception:
traceback.print_exc()
break
return chunk
def spider(self):
self.logger.debug("Enumerating shares for spidering")
shares = self.smb.shares()
try:
# Get all available shares for the SMB connection
for share in shares:
perms = share['access']
name = share['name']
self.logger.debug(f"Share \"{name}\" has perms {perms}")
# We only want to spider readable shares
if not 'READ' in perms:
continue
# `exclude_dirs` is applied to the shares name
if name.lower() in self.exclude_dirs:
self.logger.debug(f"Share \"{name}\" has been excluded.")
continue
try:
# Start the spider at the root of the share folder
self.results[name] = {}
self._spider(name, '')
except SessionError:
traceback.print_exc()
self.logger.error(f"Got a session error while spidering")
self.reconnect()
except Exception as e:
traceback.print_exc()
self.logger.error(f"Error enumerating shares: {str(e)}")
# Save the server shares metadatas if we want to grep on filenames
self.dump_folder_metadata(self.results)
return self.results
def _spider(self, share, subfolder):
self.logger.debug(f'Spider share "{share}" on folder "{subfolder}"')
filelist = self.list_path(share, subfolder + '*')
if share.lower() in self.exclude_dirs:
self.logger.debug(f'The directory has been excluded')
return
# For each entry:
# - It's a directory then we spider it (skipping `.` and `..`)
# - It's a file then we apply the checks
for result in filelist:
next_path = subfolder + result.get_longname()
next_path_lower = next_path.lower()
self.logger.debug(f'Current file on share "{share}": {next_path}')
# Exclude the current result if it's in the exlude_dirs list
if any(map(lambda d: d in next_path_lower, self.exclude_dirs)):
self.logger.debug(f'The path "{next_path}" has been excluded')
continue
if result.is_directory():
if result.get_longname() in ['.', '..']:
continue
self._spider(share, next_path + '/')
else:
# Record the file metadata
self.results[share][next_path] = {
'size': humansize(result.get_filesize()),
#'ctime': time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(result.get_ctime())),
'ctime_epoch': time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(result.get_ctime_epoch())),
#'mtime': time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(result.get_mtime())),
'mtime_epoch': time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(result.get_mtime_epoch())),
#'atime': time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(result.get_atime())),
'atime_epoch': time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(result.get_atime_epoch()))
}
# The collection logic is here. You can add more checks based
# on the file size, content, name, date...
# Check the file extension. We check here to prevent the creation
# of a RemoteFile object that perform a remote connection.
file_extension = next_path[next_path.rfind('.')+1:]
if file_extension in self.exclude_exts:
self.logger.debug(f'The file "{next_path}" has an excluded extension')
continue
# If there is not results in the file but the size is correct,
# then we save it
if result.get_filesize() > self.max_file_size:
self.logger.debug(f'File {result.get_longname()} has size {result.get_filesize()}')
continue
## You can add more checks here: date, ...
if self.read_only == True:
continue
# The file passes the checks, then we fetch it!
remote_file = self.get_remote_file(share, next_path)
if not remote_file:
self.logger.error(f'Cannot open remote file "{next_path}".')
continue
try:
remote_file.open()
## TODO: add checks on the file content here
self.save_file(remote_file)
remote_file.close()
except SessionError as e:
if 'STATUS_SHARING_VIOLATION' in str(e):
pass
except Exception as e:
traceback.print_exc()
self.logger.error(f'Error reading file {next_path}: {str(e)}')
def save_file(self, remote_file):
# Reset the remote_file to point to the begining of the file
remote_file.seek(0, 0)
# remove the "\\" before the remote host part
file_path = str(remote_file)[2:]
# The remote_file.file_name contains '/'
file_path = file_path.replace('/', os.path.sep)
file_path = file_path.replace('\\', os.path.sep)
filename = file_path.split(os.path.sep)[-1]
directory = os.path.join(self.output_folder, file_path[:-len(filename)])
# Create the subdirectories based on the share name and file path
self.logger.debug(f'Create directory "{directory}"')
make_dirs(directory)
with open(os.path.join(directory, filename), 'wb') as fd:
while True:
chunk = self.read_chunk(remote_file)
if not chunk:
break
fd.write(chunk)
def dump_folder_metadata(self, results):
# Save the remote host shares metadatas to a json file
# TODO: use the json file as an input to save only the new or modified
# files since the last time.
path = os.path.join(self.output_folder, f'{self.host}.json')
with open(path, 'w', encoding='utf-8') as fd:
fd.write(json.dumps(results, indent=4, sort_keys=True))
class CMEModule:
'''
Spider plus module
Module by @vincd
'''
name = 'spider_plus'
description = 'List files on the target server (excluding `DIR` directories and `EXT` extensions) and save them to the `OUTPUT` directory if they are smaller then `SIZE`'
supported_protocols = ['smb']
opsec_safe= True # Does the module touch disk?
multiple_hosts = True # Does it make sense to run this module on multiple hosts at a time?
def options(self, context, module_options):
"""
READ_ONLY Only list files and put the name into a JSON (default: True)
EXCLUDE_EXTS Extension file to exclude (Default: ico,lnk)
EXCLUDE_DIR Directory to exclude (Default: print$)
MAX_FILE_SIZE Max file size allowed to dump (Default: 51200)
OUTPUT_FOLDER Path of the remote folder where the dump will occur (Default: /tmp/cme_spider_plus)
"""
self.read_only = module_options.get('READ_ONLY', True)
self.exclude_exts = get_list_from_option(module_options.get('EXCLUDE_EXTS', 'ico,lnk'))
self.exlude_dirs = get_list_from_option(module_options.get('EXCLUDE_DIR', 'print$'))
self.max_file_size = int(module_options.get('SIZE', 50 * 1024))
self.output_folder = module_options.get('OUTPUT', os.path.join('/tmp', 'cme_spider_plus'))
def on_login(self, context, connection):
context.log.info('Started spidering plus with option:')
context.log.info(' DIR: {dir}'.format(dir=self.exlude_dirs))
context.log.info(' EXT: {ext}'.format(ext=self.exclude_exts))
context.log.info(' SIZE: {size}'.format(size=self.max_file_size))
context.log.info(' OUTPUT: {output}'.format(output=self.output_folder))
spider = SMBSpiderPlus(
connection,
context.log,
self.read_only,
self.exlude_dirs,
self.exclude_exts,
self.max_file_size,
self.output_folder,
)
spider.spider()
|
|
import sys
import time
from django.db.backends.creation import BaseDatabaseCreation
TEST_DATABASE_PREFIX = 'test_'
PASSWORD = 'Im_a_lumberjack'
class DatabaseCreation(BaseDatabaseCreation):
# This dictionary maps Field objects to their associated Oracle column
# types, as strings. Column-type strings can contain format strings; they'll
# be interpolated against the values of Field.__dict__ before being output.
# If a column type is set to None, it won't be included in the output.
#
# Any format strings starting with "qn_" are quoted before being used in the
# output (the "qn_" prefix is stripped before the lookup is performed.
data_types = {
'AutoField': 'NUMBER(11)',
'BooleanField': 'NUMBER(1) CHECK (%(qn_column)s IN (0,1))',
'CharField': 'NVARCHAR2(%(max_length)s)',
'CommaSeparatedIntegerField': 'VARCHAR2(%(max_length)s)',
'DateField': 'DATE',
'DateTimeField': 'TIMESTAMP',
'DecimalField': 'NUMBER(%(max_digits)s, %(decimal_places)s)',
'FileField': 'NVARCHAR2(%(max_length)s)',
'FilePathField': 'NVARCHAR2(%(max_length)s)',
'FloatField': 'DOUBLE PRECISION',
'IntegerField': 'NUMBER(11)',
'BigIntegerField': 'NUMBER(19)',
'IPAddressField': 'VARCHAR2(15)',
'GenericIPAddressField': 'VARCHAR2(39)',
'NullBooleanField': 'NUMBER(1) CHECK ((%(qn_column)s IN (0,1)) OR (%(qn_column)s IS NULL))',
'OneToOneField': 'NUMBER(11)',
'PositiveIntegerField': 'NUMBER(11) CHECK (%(qn_column)s >= 0)',
'PositiveSmallIntegerField': 'NUMBER(11) CHECK (%(qn_column)s >= 0)',
'SlugField': 'NVARCHAR2(%(max_length)s)',
'SmallIntegerField': 'NUMBER(11)',
'TextField': 'NCLOB',
'TimeField': 'TIMESTAMP',
'URLField': 'VARCHAR2(%(max_length)s)',
}
def __init__(self, connection):
super(DatabaseCreation, self).__init__(connection)
def _create_test_db(self, verbosity=1, autoclobber=False):
TEST_NAME = self._test_database_name()
TEST_USER = self._test_database_user()
TEST_PASSWD = self._test_database_passwd()
TEST_TBLSPACE = self._test_database_tblspace()
TEST_TBLSPACE_TMP = self._test_database_tblspace_tmp()
parameters = {
'dbname': TEST_NAME,
'user': TEST_USER,
'password': TEST_PASSWD,
'tblspace': TEST_TBLSPACE,
'tblspace_temp': TEST_TBLSPACE_TMP,
}
cursor = self.connection.cursor()
if self._test_database_create():
try:
self._execute_test_db_creation(cursor, parameters, verbosity)
except Exception, e:
sys.stderr.write("Got an error creating the test database: %s\n" % e)
if not autoclobber:
confirm = raw_input("It appears the test database, %s, already exists. Type 'yes' to delete it, or 'no' to cancel: " % TEST_NAME)
if autoclobber or confirm == 'yes':
try:
if verbosity >= 1:
print "Destroying old test database '%s'..." % self.connection.alias
self._execute_test_db_destruction(cursor, parameters, verbosity)
self._execute_test_db_creation(cursor, parameters, verbosity)
except Exception, e:
sys.stderr.write("Got an error recreating the test database: %s\n" % e)
sys.exit(2)
else:
print "Tests cancelled."
sys.exit(1)
if self._test_user_create():
if verbosity >= 1:
print "Creating test user..."
try:
self._create_test_user(cursor, parameters, verbosity)
except Exception, e:
sys.stderr.write("Got an error creating the test user: %s\n" % e)
if not autoclobber:
confirm = raw_input("It appears the test user, %s, already exists. Type 'yes' to delete it, or 'no' to cancel: " % TEST_USER)
if autoclobber or confirm == 'yes':
try:
if verbosity >= 1:
print "Destroying old test user..."
self._destroy_test_user(cursor, parameters, verbosity)
if verbosity >= 1:
print "Creating test user..."
self._create_test_user(cursor, parameters, verbosity)
except Exception, e:
sys.stderr.write("Got an error recreating the test user: %s\n" % e)
sys.exit(2)
else:
print "Tests cancelled."
sys.exit(1)
self.connection.settings_dict['SAVED_USER'] = self.connection.settings_dict['USER']
self.connection.settings_dict['SAVED_PASSWORD'] = self.connection.settings_dict['PASSWORD']
self.connection.settings_dict['TEST_USER'] = self.connection.settings_dict['USER'] = TEST_USER
self.connection.settings_dict['PASSWORD'] = TEST_PASSWD
return self.connection.settings_dict['NAME']
def _destroy_test_db(self, test_database_name, verbosity=1):
"""
Destroy a test database, prompting the user for confirmation if the
database already exists. Returns the name of the test database created.
"""
TEST_NAME = self._test_database_name()
TEST_USER = self._test_database_user()
TEST_PASSWD = self._test_database_passwd()
TEST_TBLSPACE = self._test_database_tblspace()
TEST_TBLSPACE_TMP = self._test_database_tblspace_tmp()
self.connection.settings_dict['USER'] = self.connection.settings_dict['SAVED_USER']
self.connection.settings_dict['PASSWORD'] = self.connection.settings_dict['SAVED_PASSWORD']
parameters = {
'dbname': TEST_NAME,
'user': TEST_USER,
'password': TEST_PASSWD,
'tblspace': TEST_TBLSPACE,
'tblspace_temp': TEST_TBLSPACE_TMP,
}
cursor = self.connection.cursor()
time.sleep(1) # To avoid "database is being accessed by other users" errors.
if self._test_user_create():
if verbosity >= 1:
print 'Destroying test user...'
self._destroy_test_user(cursor, parameters, verbosity)
if self._test_database_create():
if verbosity >= 1:
print 'Destroying test database tables...'
self._execute_test_db_destruction(cursor, parameters, verbosity)
self.connection.close()
def _execute_test_db_creation(self, cursor, parameters, verbosity):
if verbosity >= 2:
print "_create_test_db(): dbname = %s" % parameters['dbname']
statements = [
"""CREATE TABLESPACE %(tblspace)s
DATAFILE '%(tblspace)s.dbf' SIZE 20M
REUSE AUTOEXTEND ON NEXT 10M MAXSIZE 200M
""",
"""CREATE TEMPORARY TABLESPACE %(tblspace_temp)s
TEMPFILE '%(tblspace_temp)s.dbf' SIZE 20M
REUSE AUTOEXTEND ON NEXT 10M MAXSIZE 100M
""",
]
self._execute_statements(cursor, statements, parameters, verbosity)
def _create_test_user(self, cursor, parameters, verbosity):
if verbosity >= 2:
print "_create_test_user(): username = %s" % parameters['user']
statements = [
"""CREATE USER %(user)s
IDENTIFIED BY %(password)s
DEFAULT TABLESPACE %(tblspace)s
TEMPORARY TABLESPACE %(tblspace_temp)s
QUOTA UNLIMITED ON %(tblspace)s
""",
"""GRANT CONNECT, RESOURCE TO %(user)s""",
]
self._execute_statements(cursor, statements, parameters, verbosity)
def _execute_test_db_destruction(self, cursor, parameters, verbosity):
if verbosity >= 2:
print "_execute_test_db_destruction(): dbname=%s" % parameters['dbname']
statements = [
'DROP TABLESPACE %(tblspace)s INCLUDING CONTENTS AND DATAFILES CASCADE CONSTRAINTS',
'DROP TABLESPACE %(tblspace_temp)s INCLUDING CONTENTS AND DATAFILES CASCADE CONSTRAINTS',
]
self._execute_statements(cursor, statements, parameters, verbosity)
def _destroy_test_user(self, cursor, parameters, verbosity):
if verbosity >= 2:
print "_destroy_test_user(): user=%s" % parameters['user']
print "Be patient. This can take some time..."
statements = [
'DROP USER %(user)s CASCADE',
]
self._execute_statements(cursor, statements, parameters, verbosity)
def _execute_statements(self, cursor, statements, parameters, verbosity):
for template in statements:
stmt = template % parameters
if verbosity >= 2:
print stmt
try:
cursor.execute(stmt)
except Exception, err:
sys.stderr.write("Failed (%s)\n" % (err))
raise
def _test_database_name(self):
name = TEST_DATABASE_PREFIX + self.connection.settings_dict['NAME']
try:
if self.connection.settings_dict['TEST_NAME']:
name = self.connection.settings_dict['TEST_NAME']
except AttributeError:
pass
return name
def _test_database_create(self):
return self.connection.settings_dict.get('TEST_CREATE', True)
def _test_user_create(self):
return self.connection.settings_dict.get('TEST_USER_CREATE', True)
def _test_database_user(self):
name = TEST_DATABASE_PREFIX + self.connection.settings_dict['USER']
try:
if self.connection.settings_dict['TEST_USER']:
name = self.connection.settings_dict['TEST_USER']
except KeyError:
pass
return name
def _test_database_passwd(self):
name = PASSWORD
try:
if self.connection.settings_dict['TEST_PASSWD']:
name = self.connection.settings_dict['TEST_PASSWD']
except KeyError:
pass
return name
def _test_database_tblspace(self):
name = TEST_DATABASE_PREFIX + self.connection.settings_dict['NAME']
try:
if self.connection.settings_dict['TEST_TBLSPACE']:
name = self.connection.settings_dict['TEST_TBLSPACE']
except KeyError:
pass
return name
def _test_database_tblspace_tmp(self):
name = TEST_DATABASE_PREFIX + self.connection.settings_dict['NAME'] + '_temp'
try:
if self.connection.settings_dict['TEST_TBLSPACE_TMP']:
name = self.connection.settings_dict['TEST_TBLSPACE_TMP']
except KeyError:
pass
return name
def _get_test_db_name(self):
"""
We need to return the 'production' DB name to get the test DB creation
machinery to work. This isn't a great deal in this case because DB
names as handled by Django haven't real counterparts in Oracle.
"""
return self.connection.settings_dict['NAME']
def test_db_signature(self):
settings_dict = self.connection.settings_dict
return (
settings_dict['HOST'],
settings_dict['PORT'],
settings_dict['ENGINE'],
settings_dict['NAME'],
self._test_database_user(),
)
def set_autocommit(self):
self.connection.connection.autocommit = True
|
|
# -*- coding: utf-8 -*-
from Tkinter import *
from tkColorChooser import askcolor
from turtle import *
from Settings import *
import tkMessageBox, traceback, os, turtle, re, platform, math, sys, StringIO, contextlib, tkFileDialog
def get_color():
"""
Opens the color picker dialog and assigns three variables to the chosen colors
Colors are in RGB scale
"""
global text, LAN
color = askcolor()[0]
if color is not None:
text.insert(END, LAN.RED + ' = ' + str(color[0]) + '\n')
text.insert(END, LAN.GREEN + ' = ' + str(color[1]) + '\n')
text.insert(END, BLUE + ' = ' + str(color[2]) + '\n')
else:
pass
def get_prev():
"""
Sets previously created script to the text widget if exists
"""
global prev_arr, text, cur_index
if (len(prev_arr) > 0 and cur_index > 0):
text.delete('1.0', END)
cur_index -= 1
text.insert(0.0, prev_arr[cur_index])
else:
pass
def get_next():
"""
Sets the followign script to the text widget if exists
"""
global prev_arr, text, cur_index
if (len(prev_arr) > 0 and cur_index < len(prev_arr) - 1):
text.delete('1.0', END)
cur_index += 1
text.insert(0.0, prev_arr[cur_index])
else:
pass
def generate_code(turtle_code):
"""
Generates Python code from the given param, checks for hazardous functions
@type turtle_code: string
@param turtle_code: turtle drawing script
@rtype: string
@return: equivalent Python code
"""
global cur_difficulty, LAN
update_previous(turtle_code)
if cur_difficulty == 1:
text = LAN.re_lev_one(turtle_code)
elif cur_difficulty == 2:
text = LAN.re_lev_two(turtle_code)
elif cur_difficulty == 3:
text = LAN.re_lev_three(turtle_code)
elif cur_difficulty == 4:
text = LAN.re_lev_four(turtle_code)
else:
text = LAN.re_lev_five(turtle_code)
return text
def update_previous(code):
"""
Updates previously inserted turtle drawing code
@type code: string
@param code: turtle drawing code
"""
global prev_arr, cur_index
prev_arr.append(code)
cur_index = len(prev_arr) - 1
def is_empty(str):
"""
Checks if a string contains only white spaces or is empty
@type str: string
@param str: string to check
"""
return True if str.strip() == '' else False # --> if str.strip() == '': return True else: return False
def append_python_code():
"""
Gets a turtle drawing code from the text widget and sets the equivalent Python code to a new window
"""
global text, root, LAN
if is_empty(text.get('1.0', END)):
return
code_text = text.get('1.0', END)
code_text = generate_code(code_text)
toplevel = Toplevel()
toplevel.title(LAN.TITLE_CODE)
toplevel_text = Text(toplevel)
toplevel_text.insert(0.0, code_text)
toplevel_text.insert(END, 'turtle.mainloop()')
toplevel_text.insert(0.0, 'import turtle\ns = turtle.Screen()\nt = turtle.Turtle()\ns.colormode(255)\n')
toplevel_text.pack(expand = 1)
icon = PhotoImage(file = os.path.join('res', 'turtle.gif'))
toplevel.tk.call('wm', 'iconphoto', toplevel._w, icon)
menubar = Menu(toplevel)
menubar.add_command(label = LAN.COPY_CODE, command = copy_code(src = toplevel_text))
toplevel.config(menu = menubar)
toplevel.focus_set()
def is_hazard(code):
"""
Checks if string contains hazardous function calls
@type code: string
@param code: program code to check
"""
return True if re.search('remove|rmdir|rmtree|rename|move|open|file|import|turtle|tk|screen', code.lower()) is not None else False
@contextlib.contextmanager
def stdoutIO(stdout = None):
'''
Usage:
with stdoutIO() as s:
exec(code)
s.getvalue()
redirects exec's stdout to console (print() for instance), can then be used in turtle screen...
'''
old = sys.stdout
if stdout is None:
stdout = StringIO.StringIO()
sys.stdout = stdout
yield stdout
sys.stdout = old
def highlight_line(from_line, to_line):
'''
Highlights line or lines in text widget
@type from_line: int
@type to_line: int
@param from_line: line where highlight begins
@param to_line: line were highlight goes
'''
global text
text.tag_configure("current_line", background = "#FFF806")
text.tag_remove("current_line", 1.0, "end")
text.tag_add("current_line", str(from_line) + ".0", str(to_line) + '.0')
def split_into_blocks(txt):
'''
Splits given text into blocks by indents
@type txt: string
@param txt: string to split into blocks
@return: block array
'''
txt = txt.encode('utf8')
temp_arr = txt.split('\n')
arr = []
indent_arr = []
flag = False
for i, item in enumerate(temp_arr):
leading_spaces = len(item) - len(item.lstrip())
if leading_spaces == 0 and ('for i in range(' in item or 'if ' in item):
if flag == True:
temp = '\n'.join(indent_arr)
arr.append(temp)
indent_arr = []
indent_arr.append(item)
elif leading_spaces == 1 and flag == False: # first indent
flag = True
indent_arr.append(item)
elif flag == True and leading_spaces != 0:
indent_arr.append(item)
elif leading_spaces == 0 and flag == True:
flag = False
temp = '\n'.join(indent_arr)
arr.append(temp)
arr.append(item)
indent_arr = []
elif leading_spaces == 0 and flag == False:
arr.append(item)
if len(indent_arr) > 0:
arr.append('\n'.join(indent_arr))
return arr
def run_code(debug = False):
"""
Executes string as a Python code from the text widget
"""
global text, s, LAN, show_flow, t
t.reset() # reset canvas
code_text = text.get('1.0', END)
code = generate_code(code_text)
from_line = to_line = None
try:
if is_hazard(code) == False:
if is_empty(code) == False:
if show_flow.get() == 0 and debug == False:
arr = split_into_blocks(code)
from_line = 0
to_line = 1
for i, item in enumerate(arr):
block_size = len(item.split('\n'))
if block_size == 1:
from_line += 1
to_line += 1
exec(item)
else:
from_line += 1
to_line += block_size
exec(item)
from_line += block_size - 1
else:
from time import sleep
arr = split_into_blocks(code)
from_line = 0
to_line = 1
for i, item in enumerate(arr):
if item != '':
block_size = len(item.split('\n'))
if block_size == 1:
from_line += 1
to_line += 1
highlight_line(from_line, to_line)
sleep(0.5)
te = item.encode('utf-8')
exec(te)
else:
from_line += 1
to_line += block_size
highlight_line(from_line, to_line)
sleep(0.5)
te = item.encode('utf-8')
exec(te)
from_line += block_size - 1
sleep(3)
text.tag_remove("current_line", 1.0, "end")
else:
tkMessageBox.showerror(LAN.GENERIC_ERROR, LAN.ERROR_HAZARD)
except Exception as e: # try to show a non-generic error message
print str(e)
if isinstance(e, IndentationError):
tkMessageBox.showerror(LAN.GENERIC_ERROR, LAN.ERROR_INDENTATION)
highlight_line(from_line, to_line)
elif isinstance(e, TypeError):
tkMessageBox.showerror(LAN.GENERIC_ERROR, LAN.ERROR_BRACKETS)
highlight_line(from_line, to_line)
elif isinstance(e, NameError):
tkMessageBox.showerror(LAN.GENERIC_ERROR, LAN.ERROR_NAME)
highlight_line(from_line, to_line)
elif isinstance(e, SyntaxError):
tkMessageBox.showerror(LAN.GENERIC_ERROR, LAN.ERROR_SYNTAX)
highlight_line(from_line, to_line)
elif isinstance(e, turtle.TurtleGraphicsError):
tkMessageBox.showerror(LAN.GENERIC_ERROR, LAN.ERROR_TURTLE)
highlight_line(from_line, to_line)
elif isinstance(e, AttributeError):
tkMessageBox.showerror(LAN.GENERIC_ERROR, LAN.ERROR_NAME)
highlight_line(from_line, to_line)
else:
print traceback.format_exc()
def on_close_root():
"""
Handles the closing operation of the root window
"""
global root
root.destroy()
def copy_code(src = None):
"""
Copies the string from the text widget to the clipboard
@type src: text widget
@param src: source of the copy, if None, copy from the root widget
"""
global text, root
code_text = text.get('1.0', END) if src == None else src.get('1.0', END)
root.clipboard_clear()
root.clipboard_append(code_text)
def paste_code():
"""
Pastes the code from the clipboard to the root text widget
"""
global root, text
try:
update_previous(text.get('1.0', END))
text.insert(INSERT, root.clipboard_get().strip())
except Exception as e:
pass
def remove_code():
"""
Clears the text widget, still saving the string to array
"""
global text
update_previous(text.get('1.0', END))
text.delete('1.0', END)
def get_selected(event):
'''
Gets the double clicked item from the command list
'''
global text
widget = event.widget
selection = widget.curselection()
value = widget.get(selection[0])
if value != '':
text.insert(END, value.split('-')[0].strip() + '\n')
syn()
def syn(event = None):
'''
For syntax highlighting
'''
global text, LAN
text.tag_remove("current_line", '1.0', END)
text.tag_remove("tagname","1.0", END)
first = "1.0"
count = IntVar()
while True:
first = text.search(LAN.HIGH_WORDS, first, END, count = count, regexp = True)
if not first:
break
last = first + "+" + str(count.get()) + "c"
text.tag_add("tagname", first, last)
text.tag_config("tagname", foreground = "#A51029")
first = last
__syn_brackets(text, LAN)
def __syn_brackets(text, LAN):
text.tag_remove("brack","1.0", END)
first = '1.0'
count = IntVar()
while True:
first = text.search(r'\(([^)]+)\)|\(\)', first, END, count = count, regexp = True)
if not first:
return
last = first + '+' + str(count.get()) + 'c'
text.tag_add('brack', first, last)
text.tag_config('brack', foreground = '#56B290')
first = last
def display():
"""
Creates the command text widget from the param and creates an empty input text widget
@type data: string
@param data: a string of commands
"""
global text, help, root, LAN
help = Listbox(root)
help.bind('<Double-Button-1>', get_selected)
text = Text(root)
txt_scrollbar = Scrollbar(root)
help_scrollbar = Scrollbar(root)
txt_scrollbar.config(command = text.yview)
help_scrollbar.config(command = help.yview)
text.config(yscrollcommand = txt_scrollbar.set)
help.config(yscrollcommand = help_scrollbar.set)
txt_scrollbar.pack(side = 'left', fill = 'y', expand = 0)
help_scrollbar.pack(side = 'right', fill = 'y', expand = 0)
arr = LAN.COMMANDS_LEV_ONE.split('\n')
for i, item in enumerate(arr):
help.insert(i, item)
if i % 2 != 0:
help.itemconfig(i, { 'bg': '#EEEEEE' })
else:
help.itemconfig(i, { 'bg': '#D5E0FF' })
text.insert(0.0, LAN.START_TXT)
help.pack(side = RIGHT, fill = BOTH, expand = 1)
text.pack(side = RIGHT, fill = 'x', expand = 1)
'''
Below are the handlers for scaffolding
'''
def easy():
global help, cur_difficulty, menubar, LAN
filemenu.entryconfig(LAN.COLOR_MAPS, state = 'disabled')
cur_difficulty = 1
help.delete(0, END)
arr = LAN.COMMANDS_LEV_ONE.split('\n')
for i, item in enumerate(arr):
help.insert(i, item)
if i % 2 != 0:
help.itemconfig(i, { 'bg': '#EEEEEE' })
else:
help.itemconfig(i, { 'bg': '#D5E0FF' })
def normal_easy():
global help, cur_difficulty, menubar, LAN
filemenu.entryconfig(LAN.COLOR_MAPS, state = 'disabled')
cur_difficulty = 2
help.delete(0, END)
arr = LAN.COMMANDS_LEV_TWO.split('\n')
for i, item in enumerate(arr):
help.insert(i, item)
if i % 2 != 0:
help.itemconfig(i, { 'bg': '#EEEEEE' })
else:
help.itemconfig(i, { 'bg': '#D5E0FF' })
def normal():
global help, cur_difficulty, menubar, LAN
filemenu.entryconfig(LAN.COLOR_MAPS, state = 'normal')
cur_difficulty = 3
help.delete(0, END)
arr = LAN.COMMANDS_LEV_THREE.split('\n')
for i, item in enumerate(arr):
help.insert(i, item)
if i % 2 != 0:
help.itemconfig(i, { 'bg': '#EEEEEE' })
else:
help.itemconfig(i, { 'bg': '#D5E0FF' })
def normal_hard():
global help, cur_difficulty, menubar, LAN
filemenu.entryconfig(LAN.COLOR_MAPS, state = 'normal')
cur_difficulty = 4
help.delete(0, END)
arr = LAN.COMMANDS_LEV_FOUR.split('\n')
for i, item in enumerate(arr):
help.insert(i, item)
if i % 2 != 0:
help.itemconfig(i, { 'bg': '#EEEEEE' })
else:
help.itemconfig(i, { 'bg': '#D5E0FF' })
def advanced():
global help, cur_difficulty, menubar, LAN
filemenu.entryconfig(LAN.COLOR_MAPS, state = 'normal')
cur_difficulty = 5
help.delete(0, END)
arr = LAN.COMMANDS_LEV_FIVE.split('\n')
for i, item in enumerate(arr):
help.insert(i, item)
if i % 2 != 0:
help.itemconfig(i, { 'bg': '#EEEEEE' })
else:
help.itemconfig(i, { 'bg': '#D5E0FF' })
'''
end scaffolding
'''
'''
Draws the coordinates, positions etc
'''
def refresher():
global root, canvas_compass, t
canvas_compass.delete('all')
angle = t.heading()
angle = math.radians(angle)
x = int(50 * math.cos(angle))
y = int(50 * math.sin(angle))
if x >= 0:
x += 50
elif x < 0:
x = 50 - abs(x)
if y == 0:
y = 50
elif y > 0:
y = 50 - abs(y)
elif y < 0:
y += 150
canvas_compass.create_line(50, 0, 50, 100)
canvas_compass.create_line(0, 50, 100, 50)
x_pos = canvas_compass.create_text(120, 60, anchor = 'nw')
y_pos = canvas_compass.create_text(120, 80, anchor = 'nw')
canvas_compass.itemconfig(x_pos, text = 'x: ' + str('%.1f' % t.pos()[0]))
canvas_compass.itemconfig(y_pos, text = 'y: ' + str('%.1f' % t.pos()[1]))
canvas_compass.insert(x_pos, 4, '')
canvas_compass.insert(y_pos, 4, '')
id_0 = canvas_compass.create_text(90, 35, anchor = 'nw')
id_90 = canvas_compass.create_text(30, 4, anchor = 'nw')
id_180 = canvas_compass.create_text(10, 54, anchor = 'nw')
id_270 = canvas_compass.create_text(55, 85, anchor = 'nw')
canvas_compass.itemconfig(id_0, text = '0')
canvas_compass.itemconfig(id_90, text = '90')
canvas_compass.itemconfig(id_180, text = '180')
canvas_compass.itemconfig(id_270, text = '270')
canvas_compass.insert(id_0, 4, '')
canvas_compass.insert(id_90, 4, '')
canvas_compass.insert(id_180, 4, '')
canvas_compass.insert(id_270, 4, '')
canvas_compass.create_oval(x - 1, y - 1, x + 1, y + 1, fill = 'red')
canvas_compass.create_line(x, y, 50, 50, fill = 'red', dash = (4, 4))
root.after(500, refresher)
'''
Saves code to xml file
'''
def save_code():
global text, cur_difficulty, LAN
code = text.get('1.0', END).lower()
temp = code.split('\n')
for i, item in enumerate(temp):
li = item
index = li.find('#')
if index > -1:
li = li[:index] + '<comment>' + li[index + 1:] + '</comment>'
temp[i] = li
arr = []
for i in temp:
temp_str = i#re.sub("\\s+(?=[^()]*\\))", "", i)
temp_arr = temp_str.split(' ')
for j, item in enumerate(temp_arr):
if j == len(temp_arr) - 1:
arr.append(item + '<n>')
else:
arr.append(item)
script_arr = []
script_arr.append('<begin_script>')
script_arr.append('<difficulty=' + str(cur_difficulty) + '>')
indent = 0
for i, item in enumerate(arr):
leading_spaces = len(item) - len(item.lstrip())
if leading_spaces > indent:
indent = leading_spaces
elif leading_spaces < indent:
indent = leading_spaces
for j in range(indent):
script_arr.append('<indent>')
if item.endswith('<n>'):
script_arr.append(item.strip())
else:
script_arr.append(item.strip() + '<s>')
script_arr.append('<end_script>')
code_str = ''.join(script_arr)
f = tkFileDialog.asksaveasfile(mode = 'w', defaultextension = ".xml")
if f is None:
return
f.write(code_str.encode('utf8'))
f.close()
tkMessageBox.showinfo(LAN.SAVED_TITLE, LAN.SAVED_CONTENT)
'''
Opens a code from the xml file
'''
def open_code():
global text, LAN, cur_difficulty
from tkFileDialog import askopenfilename
filename = askopenfilename()
content = None
if filename.strip() != '':
with open(filename) as f:
content = f.readlines()
f.close()
if content is not None:
parsed_code = parse_code(content)
if parsed_code is None:
tkMessageBox.showerror(LAN.GENERIC_ERROR, LAN.ERROR_PARSE)
return
text.delete('1.0', END)
text.insert(0.0, parsed_code)
syn()
if cur_difficulty == 1:
easy()
elif cur_difficulty == 2:
normal_easy()
elif cur_difficulty == 3:
normal()
elif cur_difficulty == 4:
normal_hard()
else:
advanced()
'''
Parses xml format to python drawing format
'''
def parse_code(arr):
global cur_difficulty
code_str = ''.join(arr)
temp = code_str.find('<difficulty=')
nu = temp + len('<difficulty=')
cur_difficulty = int(code_str[nu])
code_str = code_str[:temp] + code_str[nu + 2:]
start = code_str.find('<begin_script>')
end = code_str.find('<end_script>')
if start == 0 and end > 0:
code_str = code_str[len('<begin_script>'):end]
import string
code_str = string.replace(code_str, '<comment>', '#')
code_str = string.replace(code_str, '</comment>', '')
code_str = string.replace(code_str, '<s>', ' ')
code_str = string.replace(code_str, '<indent>', '\t')
code_str = string.replace(code_str, '<n>', '\n')
else:
return None
return code_str
'''
Changes the language of the program during the runtime
'''
def lan(param):
global LAN, help, menubar, editmenu, pref_menu, filemenu, levelmenu, cur_difficulty, text
code = text.get('1.0', END)
text.delete('1.0', END)
code = generate_code(code)
prev_lan = LAN
LAN = param
menubar.entryconfig(prev_lan.PREF, label = LAN.PREF)
pref_menu.entryconfig(prev_lan.EN, label = LAN.EN)
pref_menu.entryconfig(prev_lan.FI, label = LAN.FI)
menubar.entryconfig(prev_lan.EDIT, label = LAN.EDIT)
editmenu.entryconfig(prev_lan.COPY, label = LAN.COPY)
editmenu.entryconfig(prev_lan.PASTE, label = LAN.PASTE)
editmenu.entryconfig(prev_lan.CLEAR, label = LAN.CLEAR)
editmenu.entryconfig(prev_lan.PREV, label = LAN.PREV)
editmenu.entryconfig(prev_lan.NEXT, label = LAN.NEXT)
filemenu.entryconfig(prev_lan.SAVE, label = LAN.SAVE)
filemenu.entryconfig(prev_lan.OPEN, label = LAN.OPEN)
menubar.entryconfig(prev_lan.OS_X_FILE, label = LAN.OS_X_FILE)
filemenu.entryconfig(prev_lan.DRAW, label = LAN.DRAW)
filemenu.entryconfig(prev_lan.COLOR_MAPS, label = LAN.COLOR_MAPS)
filemenu.entryconfig(prev_lan.SHOW_CODE, label = LAN.SHOW_CODE)
menubar.entryconfig(prev_lan.COMMANDS, label = LAN.COMMANDS)
levelmenu.entryconfig(prev_lan.NOVICE, label = LAN.NOVICE)
levelmenu.entryconfig(prev_lan.ADVANCED_BEGINNER, label = LAN.ADVANCED_BEGINNER)
levelmenu.entryconfig(prev_lan.COMPETENT, label = LAN.COMPETENT)
levelmenu.entryconfig(prev_lan.PROFICIENT, label = LAN.PROFICIENT)
levelmenu.entryconfig(prev_lan.EXPERT, label = LAN.EXPERT)
if cur_difficulty == 1:
easy()
elif cur_difficulty == 2:
normal_easy()
elif cur_difficulty == 3:
normal()
elif cur_difficulty == 4:
normal_hard()
else:
advanced()
code = LAN.from_python_to_turtle(code)
text.insert(0.0, code)
syn()
'''
------------ Global variables ------------
'''
cur_difficulty = 1
prev_arr = []
cur_index = 0
root = Tk()
root.geometry(str(root.winfo_screenwidth()) + 'x' + str(root.winfo_screenheight()))
LAN = FI_LAN # USE THIS TO SET DEFAULT LANGUAGE LAN = EN_LAN FOR INSTANCE
root.title(LAN.TITLE_ROOT)
img = os.path.join('res', 'turtle.gif')
icon = PhotoImage(file = img)
canvas_compass = Canvas(root, width = 200, height = 100)
canvas_compass.pack(side = TOP)
canvas_compass.configure(background = 'white')
help = text = None
cv = turtle.Canvas(root)
cv.pack(expand = 1, fill = BOTH)
s = turtle.TurtleScreen(cv)
s.colormode(255)
s.addshape(img)
t = turtle.RawTurtle(s)
t.shape(img)
t.pensize(1)
t.width(1)
t.ondrag(t.goto)
display()
menubar = Menu(root)
editmenu = Menu(menubar, tearoff = 0)
editmenu.add_command(label = LAN.COPY, command = copy_code)
editmenu.add_command(label = LAN.PASTE, command = paste_code)
editmenu.add_command(label = LAN.CLEAR, command = remove_code)
editmenu.add_command(label = LAN.PREV, command = get_prev)
editmenu.add_command(label = LAN.NEXT, command = get_next)
pref_menu = Menu(menubar, tearoff = 0)
pref_menu.add_command(label = LAN.EN, command = lambda: lan(EN_LAN))
pref_menu.add_command(label = LAN.FI, command = lambda: lan(FI_LAN))
pref_menu.add_separator()
show_flow = IntVar()
pref_menu.add_checkbutton(label = LAN.SHOW_FLOW, onvalue = 1, offvalue = 0, variable = show_flow)
filemenu = Menu(menubar, tearoff = 0)
filemenu.add_command(label = LAN.DRAW, command = run_code)
filemenu.add_command(label = LAN.COLOR_MAPS, command = get_color)
filemenu.entryconfig(LAN.COLOR_MAPS, state = 'disabled')
filemenu.add_command(label = LAN.SHOW_CODE, command = append_python_code)
filemenu.add_separator()
filemenu.add_command(label = LAN.SAVE, command = save_code)
filemenu.add_command(label = LAN.OPEN, command = open_code)
menubar.add_cascade(label = LAN.OS_X_FILE, menu = filemenu)
menubar.add_cascade(label = LAN.EDIT, menu = editmenu)
menubar.add_cascade(label = LAN.PREF, menu = pref_menu)
levelmenu = Menu(menubar, tearoff = 0)
levelmenu.add_command(label = LAN.NOVICE, command = easy)
levelmenu.add_command(label = LAN.ADVANCED_BEGINNER, command = normal_easy)
levelmenu.add_command(label = LAN.COMPETENT, command = normal)
levelmenu.add_command(label = LAN.PROFICIENT, command = normal_hard)
levelmenu.add_command(label = LAN.EXPERT, command = advanced)
menubar.add_cascade(label = LAN.COMMANDS, menu = levelmenu)
root.config(menu = menubar)
root.protocol('WM_DELETE_WINDOW', on_close_root)
root.tk.call('wm', 'iconphoto', root._w, icon)
refresher()
root.bind("<Key>", syn)
root.mainloop()
|
|
#!/usr/bin/python2.5
#
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generic Parser for Apiary API data.
This module contains code to take an arbitrary type of python data from an
Apiary API and convert it into entities in the datastore.
"""
__author__ = "[email protected] (Dwight Guth)"
import datetime
import logging
from google.appengine.api import urlfetch
from google.appengine.ext import db
import properties
class Parser(object):
"""Parses API data into App Engine datastore entities."""
_EXCLUDED_FIELDS = ("kind", "etag")
_RESERVED_WORDS = ("parent")
def __init__(self, entity_to_parse, parent_entity, snapshot, method, model,
date_type="friendly", index=False, **args):
"""Creates a new Parser object.
Args:
entity_to_parse: the subclass of db.Model to be created.
parent_entity: the entity to set as parent of the created entities,
or None if there is no parent.
snapshot: the entity representing the snapshot this data is for.
method: the method which is called to invoke the API.
model: a module containing the many_many_mapping and child_mapping
dictionaries necessary for parsing.
date_type: "friendly" for strings of the format "%Y-%m-%dT%H:%M:%S",
"timestamp" for unix timestamps with milliseconds. Default is
"friendly".
index: False for token-based paging, True for index-based paging. Default
is False.
args: keyword parameters to pass to the method that invokes the API.
"""
self.entity_to_parse = entity_to_parse
self.parent_entity = parent_entity
self.snapshot = snapshot
self.method = method
self.model = model
self.date_type = date_type
self.index = index
self.args = args
def ParseAndStore(self, api_data):
"""Parses the provided data and stores the resulting entities.
This method automatically pages through all results by reinvoking
the API on each successive page.
Args:
api_data: a Python dict or list returned by the Apiary API.
Returns:
The list of entities created by parsing api_data.
"""
if ("items" not in api_data and "entry" not in api_data and not
isinstance(api_data, list)):
# top level is a record itself
if not [item for item in api_data if item != "kind" and item != "etag"]:
return []
return self.ParseItem(api_data, self.entity_to_parse, self.parent_entity)
if self.index:
return self.ParseIndexPaging(api_data)
else:
return self.ParseTokenPaging(api_data)
def ParseTokenPaging(self, api_data):
"""Parses the provided data and stores the resulting entities.
This method automatically uses token-based paging to page through all the
results by reinvoking the API on each successive page.
Args:
api_data: a Python dict returned by the Apairy API.
Returns:
The list of entities created by parsing api_data.
"""
results = []
while "nextPageToken" in api_data:
results += self.ParsePage(api_data)
args = self.args.copy()
args["pageToken"] = api_data["nextPageToken"]
api_data = self.method(**args).execute()
results += self.ParsePage(api_data)
return results
def ParseIndexPaging(self, api_data):
"""Parses the provided data and stores the resulting entities.
This method automatically uses index-based paging to page through all the
results by reinvoking the API on each successive page.
Args:
api_data: a Python dict returned by the Apairy API.
Returns:
The list of entities created by parsing api_data.
"""
results = []
start_index = 0
while api_data:
next_page = self.ParsePage(api_data)
results += next_page
start_index += len(next_page)
args = self.args.copy()
args["start_index"] = start_index
api_data = self.method(**args).execute()
return results
def ParsePage(self, api_data):
"""Parses a single page of API data and stores the resulting entities.
Args:
api_data: a Python dict returned by the Apiary API.
Returns:
The list of entities created from that page of data.
"""
page = []
if "items" in api_data:
l = api_data["items"]
elif "entry" in api_data:
l = api_data["entry"]
elif isinstance(api_data, list):
l = api_data
else:
# page is empty
l = []
for item in l:
page.append(self.ParseItem(item, self.entity_to_parse,
self.parent_entity))
return page
def ParseItem(self, item, entity_to_parse, parent_entity):
"""Parses a single item of API data and stores the resulting entity.
Args:
item: a Python dict representing a single item of data.
entity_to_parse: the type of entity being created.
parent_entity: the value to set the entity's parent_entity property to.
Raises:
ValueError: if an unknown property is found in the results.
Returns:
The entity created by parsing item.
"""
if "id" in item:
model_obj = entity_to_parse(parent=self.snapshot,
key_name=str(item["id"]))
else:
logging.warning("no id: %s" % item)
model_obj = entity_to_parse(parent=self.snapshot)
model_obj.put()
if parent_entity:
model_obj.parent_entity = parent_entity
props = model_obj.properties()
for key, value in item.items():
if key not in Parser._EXCLUDED_FIELDS:
prop_name = Parser.ApiToModel(key)
if (entity_to_parse, key) in self.model.child_mapping:
for item in value:
self.ParseItem(item, self.model.child_mapping[entity_to_parse, key],
model_obj)
elif (isinstance(props[prop_name], db.StringProperty) or
isinstance(props[prop_name], db.TextProperty) or
isinstance(props[prop_name], db.BooleanProperty) or
isinstance(props[prop_name], db.IntegerProperty)):
setattr(model_obj, prop_name, value)
elif isinstance(props[prop_name], db.FloatProperty):
setattr(model_obj, prop_name, float(value))
elif isinstance(props[prop_name], db.LinkProperty):
link = db.Link(value)
setattr(model_obj, prop_name, link)
elif isinstance(props[prop_name], db.PhoneNumberProperty):
pn = db.PhoneNumber(value)
setattr(model_obj, prop_name, pn)
elif isinstance(props[prop_name], db.BlobProperty):
blob = db.Blob(urlfetch.fetch(value).content)
setattr(model_obj, prop_name, blob)
elif isinstance(props[prop_name], db.DateProperty):
# The elif clause for DateProperty must come ABOVE the elif clause for
# DateTimeProperty because DateProperty is a subclass of
# DateTimeProperty. If we ever add a TimeProperty we will need it
# to be above DateTimeProperty as well.
d = datetime.datetime.strptime(value, "%Y-%m-%dT00:00:00.000Z").date()
setattr(model_obj, prop_name, d)
elif isinstance(props[prop_name], db.DateTimeProperty):
if self.date_type == "friendly":
part1, part2 = value.split(".")
dt = datetime.datetime.strptime(part1, "%Y-%m-%dT%H:%M:%S")
dt = dt.replace(microsecond=int(part2[0:3])*1000)
elif self.date_type == "timestamp":
part1 = value[:-3]
part2 = value[-3:]
dt = datetime.datetime.fromtimestamp(long(part1))
dt = dt.replace(microsecond=int(part2)*1000)
else:
raise ValueError("Not a valid date_type: %s" % self.date_type)
setattr(model_obj, prop_name, dt)
elif isinstance(props[prop_name], db.ReferenceProperty):
key_obj = db.Key.from_path(
self.snapshot.kind(), self.snapshot.key().id(),
props[prop_name].reference_class.kind(), value)
setattr(model_obj, prop_name, key_obj)
elif isinstance(props[prop_name], db.ListProperty):
if props[prop_name].item_type == db.Key:
key_objs = []
for key_obj in value:
key_objs.append(
db.Key.from_path(
self.snapshot.kind(), self.snapshot.key().id(),
self.model.many_many_mapping[entity_to_parse,
key].__name__, key_obj))
setattr(model_obj, prop_name, key_objs)
else:
setattr(model_obj, prop_name, value)
elif isinstance(props[prop_name], properties.TimeDeltaProperty):
milliseconds = long(value)
dt = datetime.timedelta(seconds=milliseconds / 1000,
milliseconds=milliseconds % 1000)
setattr(model_obj, prop_name, dt)
elif isinstance(props[prop_name], properties.DictProperty):
setattr(model_obj, prop_name, value)
else:
raise ValueError("Could not parse property %s.\n"
"Value: %s" % (key, value))
model_obj.put()
return model_obj
@staticmethod
def ApiToModel(key):
"""Converts an API property name to a Model property name.
Args:
key: the name of the property in the API results.
Returns:
The name of the same property in the datastore model.
"""
if key in Parser._RESERVED_WORDS:
return key + "_"
return key
|
|
# Copyright (c) 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import weakref
import semantic_version
import six
from yaql.language import utils
from murano.dsl import constants
from murano.dsl import dsl
from murano.dsl import dsl_types
from murano.dsl import exceptions
from murano.dsl import helpers
from murano.dsl import murano_method
from murano.dsl import murano_object
from murano.dsl import murano_property
from murano.dsl import namespace_resolver
from murano.dsl import yaql_integration
class MuranoClass(dsl_types.MuranoClass):
def __init__(self, ns_resolver, name, package, parents=None):
self._package = weakref.ref(package)
self._methods = {}
self._namespace_resolver = ns_resolver
self._name = name
self._properties = {}
self._config = {}
if self._name == constants.CORE_LIBRARY_OBJECT:
self._parents = []
else:
self._parents = parents or [
package.find_class(constants.CORE_LIBRARY_OBJECT)]
self._context = None
self._parent_mappings = self._build_parent_remappings()
self._property_values = {}
@classmethod
def create(cls, data, package, name=None):
namespaces = data.get('Namespaces') or {}
ns_resolver = namespace_resolver.NamespaceResolver(namespaces)
if not name:
name = ns_resolver.resolve_name(str(data['Name']))
parent_class_names = data.get('Extends')
parent_classes = []
if parent_class_names:
if not utils.is_sequence(parent_class_names):
parent_class_names = [parent_class_names]
for parent_name in parent_class_names:
full_name = ns_resolver.resolve_name(str(parent_name))
parent_classes.append(package.find_class(full_name))
type_obj = cls(ns_resolver, name, package, parent_classes)
properties = data.get('Properties') or {}
for property_name, property_spec in six.iteritems(properties):
spec = murano_property.MuranoProperty(
type_obj, property_name, property_spec)
type_obj.add_property(property_name, spec)
methods = data.get('Methods') or data.get('Workflow') or {}
method_mappings = {
'initialize': '.init',
'destroy': '.destroy'
}
for method_name, payload in six.iteritems(methods):
type_obj.add_method(
method_mappings.get(method_name, method_name), payload)
return type_obj
@property
def name(self):
return self._name
@property
def package(self):
return self._package()
@property
def namespace_resolver(self):
return self._namespace_resolver
@property
def declared_parents(self):
return self._parents
@property
def methods(self):
return self._methods
@property
def all_method_names(self):
names = set(self.methods.keys())
for c in self.ancestors():
names.update(c.methods.keys())
return tuple(names)
@property
def parent_mappings(self):
return self._parent_mappings
def extend_with_class(self, cls):
ctor = yaql_integration.get_class_factory_definition(cls, self)
self.add_method('__init__', ctor)
def add_method(self, name, payload):
method = murano_method.MuranoMethod(self, name, payload)
self._methods[name] = method
self._context = None
return method
@property
def properties(self):
return self._properties
@property
def all_property_names(self):
names = set(self.properties.keys())
for c in self.ancestors():
names.update(c.properties.keys())
return tuple(names)
def add_property(self, name, property_typespec):
if not isinstance(property_typespec, murano_property.MuranoProperty):
raise TypeError('property_typespec')
self._properties[name] = property_typespec
def _find_symbol_chains(self, func, origin):
queue = collections.deque([(self, ())])
while queue:
cls, path = queue.popleft()
symbol = func(cls)
segment = (symbol,) if symbol is not None else ()
leaf = True
for p in cls.parents(origin):
leaf = False
queue.append((p, path + segment))
if leaf:
path = path + segment
if path:
yield path
def _choose_symbol(self, func):
chains = sorted(
self._find_symbol_chains(func, self),
key=lambda t: len(t))
result = []
for i in range(len(chains)):
if chains[i][0] in result:
continue
add = True
for j in range(i + 1, len(chains)):
common = 0
if not add:
break
for p in range(len(chains[i])):
if chains[i][-p - 1] is chains[j][-p - 1]:
common += 1
else:
break
if common == len(chains[i]):
add = False
break
if add:
result.append(chains[i][0])
return result
def find_method(self, name):
return self._choose_symbol(lambda cls: cls.methods.get(name))
def find_property(self, name):
return self._choose_symbol(
lambda cls: cls.properties.get(name))
def find_static_property(self, name):
def prop_func(cls):
prop = cls.properties.get(name)
if prop is not None and prop.usage == 'Static':
return prop
result = self._choose_symbol(prop_func)
if len(result) < 1:
raise exceptions.NoPropertyFound(name)
elif len(result) > 1:
raise exceptions.AmbiguousPropertyNameError(name)
return result[0]
def find_single_method(self, name):
result = self.find_method(name)
if len(result) < 1:
raise exceptions.NoMethodFound(name)
elif len(result) > 1:
raise exceptions.AmbiguousMethodName(name)
return result[0]
def find_methods(self, predicate):
result = list(filter(predicate, self.methods.values()))
for c in self.ancestors():
for method in six.itervalues(c.methods):
if predicate(method) and method not in result:
result.append(method)
return result
def find_properties(self, predicate):
result = list(filter(predicate, self.properties.values()))
for c in self.ancestors():
for prop in c.properties.values():
if predicate(prop) and prop not in result:
result.append(prop)
return result
def _iterate_unique_methods(self):
for name in self.all_method_names:
try:
yield self.find_single_method(name)
except exceptions.AmbiguousMethodName as e:
def func(*args, **kwargs):
raise e
yield murano_method.MuranoMethod(self, name, func)
def find_single_property(self, name):
result = self.find_property(name)
if len(result) < 1:
raise exceptions.NoPropertyFound(name)
elif len(result) > 1:
raise exceptions.AmbiguousPropertyNameError(name)
return result[0]
def invoke(self, name, executor, this, args, kwargs, context=None):
method = self.find_single_method(name)
return method.invoke(executor, this, args, kwargs, context)
def is_compatible(self, obj):
if isinstance(obj, (murano_object.MuranoObject,
dsl.MuranoObjectInterface)):
obj = obj.type
if obj is self:
return True
return any(cls is self for cls in obj.ancestors())
def new(self, owner, object_store, executor, **kwargs):
obj = murano_object.MuranoObject(
self, owner, object_store, executor, **kwargs)
def initializer(__context, **params):
if __context is None:
__context = executor.create_object_context(obj)
init_context = __context.create_child_context()
init_context[constants.CTX_ALLOW_PROPERTY_WRITES] = True
obj.initialize(init_context, object_store, params)
return obj
initializer.object = obj
return initializer
def __repr__(self):
return 'MuranoClass({0}/{1})'.format(self.name, self.version)
@property
def version(self):
return self.package.version
def _build_parent_remappings(self):
"""Remaps class parents.
In case of multiple inheritance class may indirectly get several
versions of the same class. It is reasonable to try to replace them
with single version to avoid conflicts. We can do that when within
versions that satisfy our class package requirements.
But in order to merge several classes that are not our parents but
grand parents we will need to modify classes that may be used
somewhere else (with another set of requirements). We cannot do this.
So instead we build translation table that will tell which ancestor
class need to be replaced with which so that we minimize number of
versions used for single class (or technically packages since version
is a package attribute). For translation table to work there should
be a method that returns all class virtual ancestors so that everybody
will see them instead of accessing class parents directly and getting
declared ancestors.
"""
result = {}
aggregation = {
self.package.name: {(
self.package,
semantic_version.Spec('==' + str(self.package.version))
)}
}
for cls, parent in helpers.traverse(
((self, parent) for parent in self._parents),
lambda (c, p): ((p, anc) for anc in p.declared_parents)):
if cls.package != parent.package:
requirement = cls.package.requirements[parent.package.name]
aggregation.setdefault(parent.package.name, set()).add(
(parent.package, requirement))
package_bindings = {}
for versions in six.itervalues(aggregation):
mappings = self._remap_package(versions)
package_bindings.update(mappings)
for cls in helpers.traverse(
self.declared_parents, lambda c: c.declared_parents):
if cls.package in package_bindings:
package2 = package_bindings[cls.package]
cls2 = package2.classes[cls.name]
result[cls] = cls2
return result
@staticmethod
def _remap_package(versions):
result = {}
reverse_mappings = {}
versions_list = sorted(versions, key=lambda x: x[0].version)
i = 0
while i < len(versions_list):
package1, requirement1 = versions_list[i]
dst_package = None
for j, (package2, _) in enumerate(versions_list):
if i == j:
continue
if package2.version in requirement1 and (
dst_package is None or
dst_package.version < package2.version):
dst_package = package2
if dst_package:
result[package1] = dst_package
reverse_mappings.setdefault(dst_package, []).append(package1)
for package in reverse_mappings.get(package1, []):
result[package] = dst_package
del versions_list[i]
else:
i += 1
return result
def parents(self, origin):
mappings = origin.parent_mappings
yielded = set()
for p in self._parents:
parent = mappings.get(p, p)
if parent not in yielded:
yielded.add(parent)
yield parent
def ancestors(self):
for c in helpers.traverse(self, lambda t: t.parents(self)):
if c is not self:
yield c
@property
def context(self):
if not self._context:
self._context = yaql_integration.create_empty_context()
for m in self._iterate_unique_methods():
self._context.register_function(
m.yaql_function_definition,
name=m.yaql_function_definition.name)
return self._context
def get_property(self, name, context):
prop = self.find_static_property(name)
cls = prop.murano_class
value = cls._property_values.get(name, prop.default)
return prop.validate(value, cls, None, context)
def set_property(self, name, value, context):
prop = self.find_static_property(name)
cls = prop.murano_class
cls._property_values[name] = prop.validate(value, cls, None, context)
def get_reference(self):
return dsl_types.MuranoTypeReference(self)
|
|
import re, sys
from stackexchange.core import *
LaterClass = lambda name: LaterClassIn(name, sys.modules[__name__])
#### Revisions #
class RevisionType(Enumeration):
SingleUser = 'single_user'
VoteBased = 'vote_based'
class PostRevision(JSONModel):
transfer = ('body', 'comment', 'is_question', 'is_rollback', 'last_body',
'last_title', 'revision_guid', 'revision_number', 'title',
'set_community_wiki', 'post_id', 'last_tags', 'tags',
('creation_date', UNIXTimestamp),
('revision_type', RevisionType.from_string))
def _extend(self, json, site):
part = json.user
self.user = User.partial(lambda self: self.site.user(self.id), site, {
'id': part['user_id'],
'user_type': Enumeration.from_string(part['user_type'], UserType),
'display_name': part['display_name'],
'reputation': part['reputation'],
'profile_image': part['profile_image']
})
def _get_post(self):
if self.is_question:
return self.site.question(self.post_id)
else:
return self.site.answer(self.post_id)
post = property(_get_post)
# The SE API seems quite inconsistent in this regard; the other methods give a post_type in their JSON
def _get_post_type(self):
return PostType.Question if self.is_question else PostType.Answer
post_type = property(_get_post_type)
def __repr__(self):
return '<Revision %d of %s%d>' % (self.revision_number, 'Q' if self.is_question else 'A', self.post_id)
class PostType(Enumeration):
"""Denotes the type of a post: a question or an answer."""
Question, Answer = 'question', 'answer'
## Timeline ##
class TimelineEventType(Enumeration):
"""Denotes the type of a timeline event."""
_map = {'askoranswered': 'AskOrAnswered'}
Comment = 'comment'
AskOrAnswered = 'askoranswered'
Badge = 'badge'
Revision = 'revision'
Accepted = 'accepted'
class TimelineEvent(JSONModel):
transfer = ('user_id', 'post_id', 'comment_id', 'action', 'description',
'detail', 'comment_id',
('timeline_type', TimelineEventType.from_string),
('post_type', PostType.from_string),
('creation_date', UNIXTimestamp))
_post_related = (TimelineEventType.AskOrAnswered, TimelineEventType.Revision, TimelineEventType.Comment)
def _get_post(self):
if self.timeline_type in self._post_related:
if self.post_type == PostType.Question:
return self.site.question(self.post_id)
else:
return self.site.answer(self.post_id)
else:
return None
def _get_comment(self):
if self.timeline_type == TimelineEventType.Comment:
return self.site.comment(self.comment_id)
else:
return None
def _get_badge(self):
if self.timeline_type == TimelineEventType.Badge:
return self.site.badge(name = self.description)
else:
return None
post = property(_get_post)
comment = property(_get_comment)
badge = property(_get_badge)
##### Content Types ###
class Comment(JSONModel):
"""Describes a comment to a question or answer on a StackExchange site."""
transfer = ('post_id', 'score', 'edit_count', 'body',
('creation_date', UNIXTimestamp), ('post_type', PostType.from_string))
def _extend(self, json, site):
self.id = json.comment_id
if hasattr(json, 'owner'):
self.owner_id = json.owner['owner_id'] if 'owner_id' in json.owner else json.owner['user_id']
self.owner = User.partial(lambda self: self.site.user(self.id), site, {
'id': self.owner_id,
'user_type': Enumeration.from_string(json.owner['user_type'], UserType),
'display_name': json.owner['display_name'],
'reputation': json.owner['reputation'],
'profile_image': json.owner['profile_image']})
else:
self.owner = None
if hasattr(json, 'reply_to'):
self.reply_to_user_id = json.reply_to['user_id']
self.reply_to = User.partial(lambda self: self.site.user(self.id), site, {
'id': self.reply_to_user_id,
'user_type': Enumeration.from_string(json.reply_to['user_type'], UserType),
'display_name': json.reply_to['display_name'],
'reputation': json.reply_to['reputation'],
'profile_image': json.reply_to['profile_image']})
@property
def post(self):
if self.post_type == PostType.Question:
return self.site.question(self.post_id)
elif self.post_type == PostType.Answer:
return self.site.answer(self.post_id)
else:
return None
def __unicode__(self):
return u'Comment ' + str(self.id)
def __str__(self):
return str(unicode(self))
class Answer(JSONModel):
"""Describes an answer on a StackExchange site."""
transfer = ('is_accepted', 'locked_date', 'question_id', 'up_vote_count',
'down_vote_count', 'view_count', 'score', 'community_owned', 'title',
'body', 'body_markdown', ('creation_date', UNIXTimestamp),
('last_edit_date', UNIXTimestamp),
('last_activity_date', UNIXTimestamp),
('revisions', LazySequenceField(PostRevision, 'posts/{id}/revisions')))
alias = (('id', 'answer_id'), ('accepted', 'is_accepted'))
def _extend(self, json, site):
if not hasattr(json, '_params_'):
comment = False
else:
comment = ('comment' in json._params_ and json._params_['comment'])
answer_comments_url = 'answers/%d/comments' % self.id
self.comments = site.build_from_snippet(json.comments, Comment) if comment else StackExchangeLazySequence(Comment, None, site, answer_comments_url, self._up('comments'), filter = '!-*7AsUyrEan0')
self._question, self._owner = None, None
if hasattr(json, 'owner'):
self.owner_id = json.owner.get('user_id')
self.owner_info = tuple(json.owner.values())
if hasattr(self, 'up_vote_count') and hasattr(self, 'down_vote_count'):
self.votes = (self.up_vote_count, self.down_vote_count)
self.url = 'http://' + self.site.root_domain + '/questions/' + str(self.question_id) + '/' + str(self.id) + '#' + str(self.id)
def _get_user(self, id):
if self._owner is None:
self._owner = self.site.user(id)
return self._owner
def _set_user(self, ob):
self._owner = ob
def _get_quest(self, id):
if self._question is None:
self._question = self.site.question(id)
return self._question
def _set_quest(self, ob):
self._question = ob
question = property(_get_quest, _set_quest)
owner = property(_get_user, _set_user)
def fetch_callback(self, _, site):
return site.answer(self.id)
def __unicode__(self):
return u'Answer %d' % self.id
def __str__(self):
return str(unicode(self))
def __repr__(self):
return '<Answer %d @ %x>' % (self.id, id(self))
class Question(JSONModel):
"""Describes a question on a StackExchange site."""
transfer = ('tags', 'favorite_count', 'up_vote_count', 'down_vote_count',
'view_count', 'score', 'community_owned', 'title', 'body',
'body_markdown',
('creation_date', UNIXTimestamp),
('timeline', LazySequenceField(TimelineEvent, 'questions/{id}/timeline')),
('revisions', LazySequenceField(PostRevision, 'posts/{id}/revisions')),
('comments', LazySequenceField(Comment, 'questions/{id}/comments', filter = '!-*7AsUyrEan0')),
('answers', ListOf(ModelRef(Answer))))
alias = (('id', 'question_id'),)
def _extend(self, json, site):
if hasattr(json, 'owner') and 'user_id' in json.owner:
self.owner_id = json.owner['user_id']
owner_dict = dict(json.owner)
owner_dict['id'] = self.owner_id
del owner_dict['user_id']
owner_dict['user_type'] = UserType.from_string(owner_dict['user_type'])
self.owner = User.partial(lambda self: self.site.user(self.id), site, owner_dict)
self.url = 'http://' + self.site.root_domain + '/questions/' + str(self.id)
def fetch_callback(self, _, site):
return site.question(self.id)
def linked(self):
return self.site.questions(linked_to = self.id)
def related(self):
return self.site.questions(related_to = self.id)
def __repr__(self):
return "<Question '%s' @ %x>" % (self.title, id(self))
##### Tags #####
class TagSynonym(JSONModel):
transfer = ('from_tag', 'to_tag', 'applied_count',
('creation_date', UNIXTimestamp),
('last_applied_date', UNIXTimestamp))
def __repr__(self):
return "<TagSynonym '%s'->'%s'>" % (self.from_tag, self.to_tag)
class TagWiki(JSONModel):
transfer = ('tag_name', 'body', 'excerpt',
('body_last_edit_date', UNIXTimestamp),
('excerpt_last_edit_date', UNIXTimestamp))
def _extend(self, json, site):
if hasattr(json, 'last_body_editor'):
body_editor = dict(json.last_body_editor)
body_editor['id'] = body_editor['user_id']
del body_editor['user_id']
self.last_body_editor = User.partial(lambda s: s.site.user(self.id), site, body_editor)
if hasattr(json, 'last_excerpt_editor'):
excerpt_editor = dict(json.last_excerpt_editor)
excerpt_editor['id'] = excerpt_editor['user_id']
del excerpt_editor['user_id']
self.last_excerpt_editor = User.partial(lambda s: s.site.user(self.id), site, excerpt_editor)
class Period(Enumeration):
AllTime, Month = 'all-time', 'month'
class TopUser(JSONModel):
transfer = ('score', 'post_count')
def _extend(self, json, site):
user_dict = dict(json.user)
user_dict['id'] = user_dict['user_id']
del user_dict['user_id']
self.user = User.partial(lambda self: self.site.user(self.id), site, user_dict)
def __repr__(self):
return "<TopUser '%s' (score %d)>" % (self.user.display_name, self.score)
class Tag(JSONModel):
transfer = ('name', 'count', 'fulfills_required')
# Hack so that Site.vectorise() works correctly
id = property(lambda self: self.name)
def _extend(self, json, site):
self.synonyms = StackExchangeLazySequence(TagSynonym, None, site, 'tags/%s/synonyms' % self.name, self._up('synonyms'), 'tag_synonyms')
self.wiki = StackExchangeLazyObject(TagWiki, site, 'tags/%s/wikis' % self.name, self._up('wiki'), 'tag_wikis')
def top_askers(self, period, **kw):
return self.site.build('tags/%s/top-askers/%s' % (self.name, period), TopUser, 'top_users', kw)
def top_answerers(self, period, **kw):
return self.site.build('tags/%s/top-answerers/%s' % (self.name, period), TopUser, 'top_users', kw)
class RepChange(JSONModel):
"""Describes an event which causes a change in reputation."""
transfer = ('user_id', 'post_id', 'post_type', 'title', 'positive_rep',
'negative_rep', ('on_date', UNIXTimestamp))
def _extend(self, json, site):
if hasattr(json, 'positive_rep') and hasattr(json, 'negative_rep'):
self.score = json.positive_rep - json.negative_rep
class UserType(Enumeration):
"""Denotes the status of a user on a site: whether it is Anonymous, Unregistered, Registered or a Moderator."""
Anonymous = 'anonymous'
Registered = 'registered'
Unregistered = 'unregistered'
Moderator = 'moderator'
class FormattedReputation(int):
def format(rep):
"""Formats the reputation score like it is formatted on the sites. Heavily based on CMS' JavaScript implementation at
http://stackapps.com/questions/1012/how-to-format-reputation-numbers-similar-to-stack-exchange-sites/1019#1019"""
str_rep = str(rep)
if rep < 1000:
return str_rep
elif rep < 10000:
return '%s,%s' % (str_rep[0], str_rep[1:])
elif rep % 1000 == 0:
return '%dk' % (rep / 1000.0)
else:
return '%.1fk' % (rep / 1000.0)
class TopTag(JSONModel):
transfer = ('tag_name', 'question_score', 'question_count', 'answer_score', 'answer_count')
def __repr__(self):
return "<TopTag '%s' Q:%d A:%d>" % (self.tag_name, self.question_score, self.answer_score)
class User(JSONModel):
"""Describes a user on a StackExchange site."""
transfer = ('display_name', 'profile_image', 'age', 'website_url',
'location', 'about_me', 'view_count', 'up_vote_count',
'down_vote_count', 'account_id', 'profile_image',
('creation_date', UNIXTimestamp),
('last_access_date', UNIXTimestamp),
('reputation', FormattedReputation),
('favorites', LazySequenceField(Question, 'users/{id}/favorites', response_key = 'questions')),
('no_answers_questions', LazySequenceField(Question, 'users/{id}/questions/no-answers', response_key = 'questions')),
('unanswered_questions', LazySequenceField(Question, 'users/{id}/questions/unanswered', response_key = 'questions')),
('unaccepted_questions', LazySequenceField(Question, 'users/{id}/questions/unaccepted', response_key = 'questions')),
('tags', LazySequenceField(Tag, 'users/{id}/tags')),
('badges', LazySequenceField(LaterClass('Badge'), 'users/{id}/badges')),
('timeline', LazySequenceField(TimelineEvent, 'users/{id}/timeline', response_key = 'user_timelines')),
('reputation_detail', LazySequenceField(RepChange, 'users/{id}/reputation')),
('mentioned', LazySequenceField(Comment, 'users/{id}/mentioned', response_key = 'comments')),
('comments', LazySequenceField(Comment, 'users/{id}/comments')),
('top_answer_tags', LazySequenceField(TopTag, 'users/{id}/top-answer-tags', response_key = 'top_tags')),
('top_question_tags', LazySequenceField(TopTag, 'users/{id}/top-question-tags', response_key = 'top_tags')),
)
# for compatibility reasons; association_id changed in v2.x
alias = (('id', 'user_id'), ('association_id', 'account_id'),
('type', 'user_type', UserType.from_string))
badge_types = ('gold', 'silver', 'bronze')
def _extend(self, json, site):
user_questions_url = 'users/%d/questions' % self.id
question_count = getattr(json, 'question_count', None)
self.questions = StackExchangeLazySequence(Question, question_count, site, user_questions_url, self._up('questions'))
user_answers_url = 'users/%d/answers' % self.id
answer_count = getattr(json, 'answer_count', None)
self.answers = StackExchangeLazySequence(Answer, answer_count, site, user_answers_url, self._up('answers'))
if hasattr(self, 'up_vote_count') and hasattr(self, 'down_vote_count'):
self.vote_counts = (self.up_vote_count, self.down_vote_count)
if hasattr(json, 'badge_counts'):
self.badge_counts_t = tuple(json.badge_counts.get(c, 0) for c in ('gold', 'silver', 'bronze'))
self.gold_badges, self.silver_badges, self.bronze_badges = self.badge_counts_t
self.badge_counts = {
BadgeType.Gold: self.gold_badges,
BadgeType.Silver: self.silver_badges,
BadgeType.Bronze: self.bronze_badges
}
self.badge_total = sum(self.badge_counts_t)
if hasattr(self, 'type'):
self.is_moderator = self.type == UserType.Moderator
self.url = 'http://' + self.site.root_domain + '/users/' + str(self.id)
def has_privilege(self, privilege):
return self.reputation >= privilege.reputation
def _get_real_tag(self, tag):
return tag.name if isinstance(tag, Tag) else tag
def top_answers_in_tag(self, tag, **kw):
return self.site.build('users/%d/tags/%s/top-answers' % (self.id, self._get_real_tag(tag)), Answer, 'answers', kw)
def top_questions_in_tag(self, tag, **kw):
return self.site.build('users/%d/tags/%s/top-questions' % (self.id, self._get_real_tag(tag)), Question, 'questions', kw)
def comments_to(self, user, **kw):
uid = user.id if isinstance(user, User) else user
return self.site.build('users/%d/comments/%d' % (self.id, uid), Comment, 'comments' ,kw)
def __unicode__(self):
return 'User %d [%s]' % (self.id, self.display_name)
def __str__(self):
return str(unicode(self))
def __repr__(self):
return "<User '%s' (%d) @ %x>" % (self.display_name, self.id, id(self))
class BadgeType(Enumeration):
"""Describes the rank or type of a badge: one of Bronze, Silver or Gold."""
Bronze, Silver, Gold = range(3)
class Badge(JSONModel):
"""Describes a badge awardable on a StackExchange site."""
transfer = ('name', 'description', 'award_count', 'tag_based',
('user', PartialModelRef(User, lambda s: s.site.user(s.id), extend = True)))
alias = (('id', 'badge_id'),)
@property
def recipients(self):
for badge in self.site.badge_recipients([self.id]):
yield badge.user
def __str__(self):
return self.name
def __repr__(self):
return '<Badge \'%s\' @ %x>' % (self.name, id(self))
class Privilege(JSONModel):
transfer = ('short_description', 'description', 'reputation')
class QuestionsQuery(object):
def __init__(self, site):
self.site = site
def __call__(self, ids = None, user_id = None, **kw):
self.site.check_filter(kw)
# Compatibility hack, as user_id= was in versions below v1.1
if ids is None and user_id is not None:
return self.by_user(user_id, **kw)
elif ids is None and user_id is None:
return self.site.build('questions', Question, 'questions', kw)
else:
return self.site._get(Question, ids, 'questions', kw)
def linked_to(self, qn, **kw):
self.site.check_filter(kw)
url = 'questions/%s/linked' % self.site.vectorise(qn, Question)
return self.site.build(url, Question, 'questions', kw)
def related_to(self, qn, **kw):
self.site.check_filter(kw)
url = 'questions/%s/related' % self.site.vectorise(qn, Question)
return self.site.build(url, Question, 'questions', kw)
def by_user(self, usr, **kw):
self.site.check_filter(kw)
kw['user_id'] = usr
return self.site._user_prop('questions', Question, 'questions', kw)
def unanswered(self, by = None, **kw):
self.site.check_filter(kw)
if by is None:
return self.site.build('questions/unanswered', Question, 'questions', kw)
else:
kw['user_id'] = by
return self.site._user_prop('questions/unanswered', Question, 'questions', kw)
def no_answers(self, by = None, **kw):
self.site.check_filter(kw)
if by is None:
return self.site.build('questions/no-answers', Question, 'questions', kw)
else:
kw['user_id'] = by
return self.site._user_prop('questions/no-answers', Question, 'questions', kw)
def unaccepted(self, by, **kw):
self.site.check_filter(kw)
kw['user_id'] = by
return self.site._user_prop('questions/unaccepted', Questions, 'questions', kw)
def favorited_by(self, by, **kw):
self.site.check_filter(kw)
kw['user_id'] = by
return self.site._user_prop('favorites', Question, 'questions', kw)
|
|
# Copyright Anne M. Archibald 2008
# Released under the scipy license
from __future__ import division, print_function, absolute_import
import sys
import numpy as np
from heapq import heappush, heappop
import scipy.sparse
__all__ = ['minkowski_distance_p', 'minkowski_distance',
'distance_matrix',
'Rectangle', 'KDTree']
def minkowski_distance_p(x, y, p=2):
"""
Compute the p-th power of the L**p distance between two arrays.
For efficiency, this function computes the L**p distance but does
not extract the pth root. If `p` is 1 or infinity, this is equal to
the actual L**p distance.
Parameters
----------
x : (M, K) array_like
Input array.
y : (N, K) array_like
Input array.
p : float, 1 <= p <= infinity
Which Minkowski p-norm to use.
Examples
--------
>>> from scipy.spatial import minkowski_distance_p
>>> minkowski_distance_p([[0,0],[0,0]], [[1,1],[0,1]])
array([2, 1])
"""
x = np.asarray(x)
y = np.asarray(y)
if p == np.inf:
return np.amax(np.abs(y-x), axis=-1)
elif p == 1:
return np.sum(np.abs(y-x), axis=-1)
else:
return np.sum(np.abs(y-x)**p, axis=-1)
def minkowski_distance(x, y, p=2):
"""
Compute the L**p distance between two arrays.
Parameters
----------
x : (M, K) array_like
Input array.
y : (N, K) array_like
Input array.
p : float, 1 <= p <= infinity
Which Minkowski p-norm to use.
Examples
--------
>>> from scipy.spatial import minkowski_distance
>>> minkowski_distance([[0,0],[0,0]], [[1,1],[0,1]])
array([ 1.41421356, 1. ])
"""
x = np.asarray(x)
y = np.asarray(y)
if p == np.inf or p == 1:
return minkowski_distance_p(x, y, p)
else:
return minkowski_distance_p(x, y, p)**(1./p)
class Rectangle(object):
"""Hyperrectangle class.
Represents a Cartesian product of intervals.
"""
def __init__(self, maxes, mins):
"""Construct a hyperrectangle."""
self.maxes = np.maximum(maxes,mins).astype(float)
self.mins = np.minimum(maxes,mins).astype(float)
self.m, = self.maxes.shape
def __repr__(self):
return "<Rectangle %s>" % list(zip(self.mins, self.maxes))
def volume(self):
"""Total volume."""
return np.prod(self.maxes-self.mins)
def split(self, d, split):
"""
Produce two hyperrectangles by splitting.
In general, if you need to compute maximum and minimum
distances to the children, it can be done more efficiently
by updating the maximum and minimum distances to the parent.
Parameters
----------
d : int
Axis to split hyperrectangle along.
split : float
Position along axis `d` to split at.
"""
mid = np.copy(self.maxes)
mid[d] = split
less = Rectangle(self.mins, mid)
mid = np.copy(self.mins)
mid[d] = split
greater = Rectangle(mid, self.maxes)
return less, greater
def min_distance_point(self, x, p=2.):
"""
Return the minimum distance between input and points in the hyperrectangle.
Parameters
----------
x : array_like
Input.
p : float, optional
Input.
"""
return minkowski_distance(0, np.maximum(0,np.maximum(self.mins-x,x-self.maxes)),p)
def max_distance_point(self, x, p=2.):
"""
Return the maximum distance between input and points in the hyperrectangle.
Parameters
----------
x : array_like
Input array.
p : float, optional
Input.
"""
return minkowski_distance(0, np.maximum(self.maxes-x,x-self.mins),p)
def min_distance_rectangle(self, other, p=2.):
"""
Compute the minimum distance between points in the two hyperrectangles.
Parameters
----------
other : hyperrectangle
Input.
p : float
Input.
"""
return minkowski_distance(0, np.maximum(0,np.maximum(self.mins-other.maxes,other.mins-self.maxes)),p)
def max_distance_rectangle(self, other, p=2.):
"""
Compute the maximum distance between points in the two hyperrectangles.
Parameters
----------
other : hyperrectangle
Input.
p : float, optional
Input.
"""
return minkowski_distance(0, np.maximum(self.maxes-other.mins,other.maxes-self.mins),p)
class KDTree(object):
"""
kd-tree for quick nearest-neighbor lookup
This class provides an index into a set of k-dimensional points which
can be used to rapidly look up the nearest neighbors of any point.
Parameters
----------
data : (N,K) array_like
The data points to be indexed. This array is not copied, and
so modifying this data will result in bogus results.
leafsize : int, optional
The number of points at which the algorithm switches over to
brute-force. Has to be positive.
Raises
------
RuntimeError
The maximum recursion limit can be exceeded for large data
sets. If this happens, either increase the value for the `leafsize`
parameter or increase the recursion limit by::
>>> import sys
>>> sys.setrecursionlimit(10000)
See Also
--------
cKDTree : Implementation of `KDTree` in Cython
Notes
-----
The algorithm used is described in Maneewongvatana and Mount 1999.
The general idea is that the kd-tree is a binary tree, each of whose
nodes represents an axis-aligned hyperrectangle. Each node specifies
an axis and splits the set of points based on whether their coordinate
along that axis is greater than or less than a particular value.
During construction, the axis and splitting point are chosen by the
"sliding midpoint" rule, which ensures that the cells do not all
become long and thin.
The tree can be queried for the r closest neighbors of any given point
(optionally returning only those within some maximum distance of the
point). It can also be queried, with a substantial gain in efficiency,
for the r approximate closest neighbors.
For large dimensions (20 is already large) do not expect this to run
significantly faster than brute force. High-dimensional nearest-neighbor
queries are a substantial open problem in computer science.
The tree also supports all-neighbors queries, both with arrays of points
and with other kd-trees. These do use a reasonably efficient algorithm,
but the kd-tree is not necessarily the best data structure for this
sort of calculation.
"""
def __init__(self, data, leafsize=10):
self.data = np.asarray(data)
self.n, self.m = np.shape(self.data)
self.leafsize = int(leafsize)
if self.leafsize < 1:
raise ValueError("leafsize must be at least 1")
self.maxes = np.amax(self.data,axis=0)
self.mins = np.amin(self.data,axis=0)
self.tree = self.__build(np.arange(self.n), self.maxes, self.mins)
class node(object):
if sys.version_info[0] >= 3:
def __lt__(self, other):
return id(self) < id(other)
def __gt__(self, other):
return id(self) > id(other)
def __le__(self, other):
return id(self) <= id(other)
def __ge__(self, other):
return id(self) >= id(other)
def __eq__(self, other):
return id(self) == id(other)
class leafnode(node):
def __init__(self, idx):
self.idx = idx
self.children = len(idx)
class innernode(node):
def __init__(self, split_dim, split, less, greater):
self.split_dim = split_dim
self.split = split
self.less = less
self.greater = greater
self.children = less.children+greater.children
def __build(self, idx, maxes, mins):
if len(idx) <= self.leafsize:
return KDTree.leafnode(idx)
else:
data = self.data[idx]
# maxes = np.amax(data,axis=0)
# mins = np.amin(data,axis=0)
d = np.argmax(maxes-mins)
maxval = maxes[d]
minval = mins[d]
if maxval == minval:
# all points are identical; warn user?
return KDTree.leafnode(idx)
data = data[:,d]
# sliding midpoint rule; see Maneewongvatana and Mount 1999
# for arguments that this is a good idea.
split = (maxval+minval)/2
less_idx = np.nonzero(data <= split)[0]
greater_idx = np.nonzero(data > split)[0]
if len(less_idx) == 0:
split = np.amin(data)
less_idx = np.nonzero(data <= split)[0]
greater_idx = np.nonzero(data > split)[0]
if len(greater_idx) == 0:
split = np.amax(data)
less_idx = np.nonzero(data < split)[0]
greater_idx = np.nonzero(data >= split)[0]
if len(less_idx) == 0:
# _still_ zero? all must have the same value
if not np.all(data == data[0]):
raise ValueError("Troublesome data array: %s" % data)
split = data[0]
less_idx = np.arange(len(data)-1)
greater_idx = np.array([len(data)-1])
lessmaxes = np.copy(maxes)
lessmaxes[d] = split
greatermins = np.copy(mins)
greatermins[d] = split
return KDTree.innernode(d, split,
self.__build(idx[less_idx],lessmaxes,mins),
self.__build(idx[greater_idx],maxes,greatermins))
def __query(self, x, k=1, eps=0, p=2, distance_upper_bound=np.inf):
side_distances = np.maximum(0,np.maximum(x-self.maxes,self.mins-x))
if p != np.inf:
side_distances **= p
min_distance = np.sum(side_distances)
else:
min_distance = np.amax(side_distances)
# priority queue for chasing nodes
# entries are:
# minimum distance between the cell and the target
# distances between the nearest side of the cell and the target
# the head node of the cell
q = [(min_distance,
tuple(side_distances),
self.tree)]
# priority queue for the nearest neighbors
# furthest known neighbor first
# entries are (-distance**p, i)
neighbors = []
if eps == 0:
epsfac = 1
elif p == np.inf:
epsfac = 1/(1+eps)
else:
epsfac = 1/(1+eps)**p
if p != np.inf and distance_upper_bound != np.inf:
distance_upper_bound = distance_upper_bound**p
while q:
min_distance, side_distances, node = heappop(q)
if isinstance(node, KDTree.leafnode):
# brute-force
data = self.data[node.idx]
ds = minkowski_distance_p(data,x[np.newaxis,:],p)
for i in range(len(ds)):
if ds[i] < distance_upper_bound:
if len(neighbors) == k:
heappop(neighbors)
heappush(neighbors, (-ds[i], node.idx[i]))
if len(neighbors) == k:
distance_upper_bound = -neighbors[0][0]
else:
# we don't push cells that are too far onto the queue at all,
# but since the distance_upper_bound decreases, we might get
# here even if the cell's too far
if min_distance > distance_upper_bound*epsfac:
# since this is the nearest cell, we're done, bail out
break
# compute minimum distances to the children and push them on
if x[node.split_dim] < node.split:
near, far = node.less, node.greater
else:
near, far = node.greater, node.less
# near child is at the same distance as the current node
heappush(q,(min_distance, side_distances, near))
# far child is further by an amount depending only
# on the split value
sd = list(side_distances)
if p == np.inf:
min_distance = max(min_distance, abs(node.split-x[node.split_dim]))
elif p == 1:
sd[node.split_dim] = np.abs(node.split-x[node.split_dim])
min_distance = min_distance - side_distances[node.split_dim] + sd[node.split_dim]
else:
sd[node.split_dim] = np.abs(node.split-x[node.split_dim])**p
min_distance = min_distance - side_distances[node.split_dim] + sd[node.split_dim]
# far child might be too far, if so, don't bother pushing it
if min_distance <= distance_upper_bound*epsfac:
heappush(q,(min_distance, tuple(sd), far))
if p == np.inf:
return sorted([(-d,i) for (d,i) in neighbors])
else:
return sorted([((-d)**(1./p),i) for (d,i) in neighbors])
def query(self, x, k=1, eps=0, p=2, distance_upper_bound=np.inf):
"""
Query the kd-tree for nearest neighbors
Parameters
----------
x : array_like, last dimension self.m
An array of points to query.
k : int, optional
The number of nearest neighbors to return.
eps : nonnegative float, optional
Return approximate nearest neighbors; the kth returned value
is guaranteed to be no further than (1+eps) times the
distance to the real kth nearest neighbor.
p : float, 1<=p<=infinity, optional
Which Minkowski p-norm to use.
1 is the sum-of-absolute-values "Manhattan" distance
2 is the usual Euclidean distance
infinity is the maximum-coordinate-difference distance
distance_upper_bound : nonnegative float, optional
Return only neighbors within this distance. This is used to prune
tree searches, so if you are doing a series of nearest-neighbor
queries, it may help to supply the distance to the nearest neighbor
of the most recent point.
Returns
-------
d : float or array of floats
The distances to the nearest neighbors.
If x has shape tuple+(self.m,), then d has shape tuple if
k is one, or tuple+(k,) if k is larger than one. Missing
neighbors (e.g. when k > n or distance_upper_bound is
given) are indicated with infinite distances. If k is None,
then d is an object array of shape tuple, containing lists
of distances. In either case the hits are sorted by distance
(nearest first).
i : integer or array of integers
The locations of the neighbors in self.data. i is the same
shape as d.
Examples
--------
>>> from scipy import spatial
>>> x, y = np.mgrid[0:5, 2:8]
>>> tree = spatial.KDTree(list(zip(x.ravel(), y.ravel())))
>>> tree.data
array([[0, 2],
[0, 3],
[0, 4],
[0, 5],
[0, 6],
[0, 7],
[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 2],
[2, 3],
[2, 4],
[2, 5],
[2, 6],
[2, 7],
[3, 2],
[3, 3],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 2],
[4, 3],
[4, 4],
[4, 5],
[4, 6],
[4, 7]])
>>> pts = np.array([[0, 0], [2.1, 2.9]])
>>> tree.query(pts)
(array([ 2. , 0.14142136]), array([ 0, 13]))
>>> tree.query(pts[0])
(2.0, 0)
"""
x = np.asarray(x)
if np.shape(x)[-1] != self.m:
raise ValueError("x must consist of vectors of length %d but has shape %s" % (self.m, np.shape(x)))
if p < 1:
raise ValueError("Only p-norms with 1<=p<=infinity permitted")
retshape = np.shape(x)[:-1]
if retshape != ():
if k is None:
dd = np.empty(retshape,dtype=object)
ii = np.empty(retshape,dtype=object)
elif k > 1:
dd = np.empty(retshape+(k,),dtype=float)
dd.fill(np.inf)
ii = np.empty(retshape+(k,),dtype=int)
ii.fill(self.n)
elif k == 1:
dd = np.empty(retshape,dtype=float)
dd.fill(np.inf)
ii = np.empty(retshape,dtype=int)
ii.fill(self.n)
else:
raise ValueError("Requested %s nearest neighbors; acceptable numbers are integers greater than or equal to one, or None")
for c in np.ndindex(retshape):
hits = self.__query(x[c], k=k, eps=eps, p=p, distance_upper_bound=distance_upper_bound)
if k is None:
dd[c] = [d for (d,i) in hits]
ii[c] = [i for (d,i) in hits]
elif k > 1:
for j in range(len(hits)):
dd[c+(j,)], ii[c+(j,)] = hits[j]
elif k == 1:
if len(hits) > 0:
dd[c], ii[c] = hits[0]
else:
dd[c] = np.inf
ii[c] = self.n
return dd, ii
else:
hits = self.__query(x, k=k, eps=eps, p=p, distance_upper_bound=distance_upper_bound)
if k is None:
return [d for (d,i) in hits], [i for (d,i) in hits]
elif k == 1:
if len(hits) > 0:
return hits[0]
else:
return np.inf, self.n
elif k > 1:
dd = np.empty(k,dtype=float)
dd.fill(np.inf)
ii = np.empty(k,dtype=int)
ii.fill(self.n)
for j in range(len(hits)):
dd[j], ii[j] = hits[j]
return dd, ii
else:
raise ValueError("Requested %s nearest neighbors; acceptable numbers are integers greater than or equal to one, or None")
def __query_ball_point(self, x, r, p=2., eps=0):
R = Rectangle(self.maxes, self.mins)
def traverse_checking(node, rect):
if rect.min_distance_point(x, p) > r / (1. + eps):
return []
elif rect.max_distance_point(x, p) < r * (1. + eps):
return traverse_no_checking(node)
elif isinstance(node, KDTree.leafnode):
d = self.data[node.idx]
return node.idx[minkowski_distance(d, x, p) <= r].tolist()
else:
less, greater = rect.split(node.split_dim, node.split)
return traverse_checking(node.less, less) + \
traverse_checking(node.greater, greater)
def traverse_no_checking(node):
if isinstance(node, KDTree.leafnode):
return node.idx.tolist()
else:
return traverse_no_checking(node.less) + \
traverse_no_checking(node.greater)
return traverse_checking(self.tree, R)
def query_ball_point(self, x, r, p=2., eps=0):
"""Find all points within distance r of point(s) x.
Parameters
----------
x : array_like, shape tuple + (self.m,)
The point or points to search for neighbors of.
r : positive float
The radius of points to return.
p : float, optional
Which Minkowski p-norm to use. Should be in the range [1, inf].
eps : nonnegative float, optional
Approximate search. Branches of the tree are not explored if their
nearest points are further than ``r / (1 + eps)``, and branches are
added in bulk if their furthest points are nearer than
``r * (1 + eps)``.
Returns
-------
results : list or array of lists
If `x` is a single point, returns a list of the indices of the
neighbors of `x`. If `x` is an array of points, returns an object
array of shape tuple containing lists of neighbors.
Notes
-----
If you have many points whose neighbors you want to find, you may save
substantial amounts of time by putting them in a KDTree and using
query_ball_tree.
Examples
--------
>>> from scipy import spatial
>>> x, y = np.mgrid[0:5, 0:5]
>>> points = zip(x.ravel(), y.ravel())
>>> tree = spatial.KDTree(points)
>>> tree.query_ball_point([2, 0], 1)
[5, 10, 11, 15]
Query multiple points and plot the results:
>>> import matplotlib.pyplot as plt
>>> points = np.asarray(points)
>>> plt.plot(points[:,0], points[:,1], '.')
>>> for results in tree.query_ball_point(([2, 0], [3, 3]), 1):
... nearby_points = points[results]
... plt.plot(nearby_points[:,0], nearby_points[:,1], 'o')
>>> plt.margins(0.1, 0.1)
>>> plt.show()
"""
x = np.asarray(x)
if x.shape[-1] != self.m:
raise ValueError("Searching for a %d-dimensional point in a "
"%d-dimensional KDTree" % (x.shape[-1], self.m))
if len(x.shape) == 1:
return self.__query_ball_point(x, r, p, eps)
else:
retshape = x.shape[:-1]
result = np.empty(retshape, dtype=object)
for c in np.ndindex(retshape):
result[c] = self.__query_ball_point(x[c], r, p=p, eps=eps)
return result
def query_ball_tree(self, other, r, p=2., eps=0):
"""Find all pairs of points whose distance is at most r
Parameters
----------
other : KDTree instance
The tree containing points to search against.
r : float
The maximum distance, has to be positive.
p : float, optional
Which Minkowski norm to use. `p` has to meet the condition
``1 <= p <= infinity``.
eps : float, optional
Approximate search. Branches of the tree are not explored
if their nearest points are further than ``r/(1+eps)``, and
branches are added in bulk if their furthest points are nearer
than ``r * (1+eps)``. `eps` has to be non-negative.
Returns
-------
results : list of lists
For each element ``self.data[i]`` of this tree, ``results[i]`` is a
list of the indices of its neighbors in ``other.data``.
"""
results = [[] for i in range(self.n)]
def traverse_checking(node1, rect1, node2, rect2):
if rect1.min_distance_rectangle(rect2, p) > r/(1.+eps):
return
elif rect1.max_distance_rectangle(rect2, p) < r*(1.+eps):
traverse_no_checking(node1, node2)
elif isinstance(node1, KDTree.leafnode):
if isinstance(node2, KDTree.leafnode):
d = other.data[node2.idx]
for i in node1.idx:
results[i] += node2.idx[minkowski_distance(d,self.data[i],p) <= r].tolist()
else:
less, greater = rect2.split(node2.split_dim, node2.split)
traverse_checking(node1,rect1,node2.less,less)
traverse_checking(node1,rect1,node2.greater,greater)
elif isinstance(node2, KDTree.leafnode):
less, greater = rect1.split(node1.split_dim, node1.split)
traverse_checking(node1.less,less,node2,rect2)
traverse_checking(node1.greater,greater,node2,rect2)
else:
less1, greater1 = rect1.split(node1.split_dim, node1.split)
less2, greater2 = rect2.split(node2.split_dim, node2.split)
traverse_checking(node1.less,less1,node2.less,less2)
traverse_checking(node1.less,less1,node2.greater,greater2)
traverse_checking(node1.greater,greater1,node2.less,less2)
traverse_checking(node1.greater,greater1,node2.greater,greater2)
def traverse_no_checking(node1, node2):
if isinstance(node1, KDTree.leafnode):
if isinstance(node2, KDTree.leafnode):
for i in node1.idx:
results[i] += node2.idx.tolist()
else:
traverse_no_checking(node1, node2.less)
traverse_no_checking(node1, node2.greater)
else:
traverse_no_checking(node1.less, node2)
traverse_no_checking(node1.greater, node2)
traverse_checking(self.tree, Rectangle(self.maxes, self.mins),
other.tree, Rectangle(other.maxes, other.mins))
return results
def query_pairs(self, r, p=2., eps=0):
"""
Find all pairs of points within a distance.
Parameters
----------
r : positive float
The maximum distance.
p : float, optional
Which Minkowski norm to use. `p` has to meet the condition
``1 <= p <= infinity``.
eps : float, optional
Approximate search. Branches of the tree are not explored
if their nearest points are further than ``r/(1+eps)``, and
branches are added in bulk if their furthest points are nearer
than ``r * (1+eps)``. `eps` has to be non-negative.
Returns
-------
results : set
Set of pairs ``(i,j)``, with ``i < j``, for which the corresponding
positions are close.
"""
results = set()
def traverse_checking(node1, rect1, node2, rect2):
if rect1.min_distance_rectangle(rect2, p) > r/(1.+eps):
return
elif rect1.max_distance_rectangle(rect2, p) < r*(1.+eps):
traverse_no_checking(node1, node2)
elif isinstance(node1, KDTree.leafnode):
if isinstance(node2, KDTree.leafnode):
# Special care to avoid duplicate pairs
if id(node1) == id(node2):
d = self.data[node2.idx]
for i in node1.idx:
for j in node2.idx[minkowski_distance(d,self.data[i],p) <= r]:
if i < j:
results.add((i,j))
else:
d = self.data[node2.idx]
for i in node1.idx:
for j in node2.idx[minkowski_distance(d,self.data[i],p) <= r]:
if i < j:
results.add((i,j))
elif j < i:
results.add((j,i))
else:
less, greater = rect2.split(node2.split_dim, node2.split)
traverse_checking(node1,rect1,node2.less,less)
traverse_checking(node1,rect1,node2.greater,greater)
elif isinstance(node2, KDTree.leafnode):
less, greater = rect1.split(node1.split_dim, node1.split)
traverse_checking(node1.less,less,node2,rect2)
traverse_checking(node1.greater,greater,node2,rect2)
else:
less1, greater1 = rect1.split(node1.split_dim, node1.split)
less2, greater2 = rect2.split(node2.split_dim, node2.split)
traverse_checking(node1.less,less1,node2.less,less2)
traverse_checking(node1.less,less1,node2.greater,greater2)
# Avoid traversing (node1.less, node2.greater) and
# (node1.greater, node2.less) (it's the same node pair twice
# over, which is the source of the complication in the
# original KDTree.query_pairs)
if id(node1) != id(node2):
traverse_checking(node1.greater,greater1,node2.less,less2)
traverse_checking(node1.greater,greater1,node2.greater,greater2)
def traverse_no_checking(node1, node2):
if isinstance(node1, KDTree.leafnode):
if isinstance(node2, KDTree.leafnode):
# Special care to avoid duplicate pairs
if id(node1) == id(node2):
for i in node1.idx:
for j in node2.idx:
if i < j:
results.add((i,j))
else:
for i in node1.idx:
for j in node2.idx:
if i < j:
results.add((i,j))
elif j < i:
results.add((j,i))
else:
traverse_no_checking(node1, node2.less)
traverse_no_checking(node1, node2.greater)
else:
# Avoid traversing (node1.less, node2.greater) and
# (node1.greater, node2.less) (it's the same node pair twice
# over, which is the source of the complication in the
# original KDTree.query_pairs)
if id(node1) == id(node2):
traverse_no_checking(node1.less, node2.less)
traverse_no_checking(node1.less, node2.greater)
traverse_no_checking(node1.greater, node2.greater)
else:
traverse_no_checking(node1.less, node2)
traverse_no_checking(node1.greater, node2)
traverse_checking(self.tree, Rectangle(self.maxes, self.mins),
self.tree, Rectangle(self.maxes, self.mins))
return results
def count_neighbors(self, other, r, p=2.):
"""
Count how many nearby pairs can be formed.
Count the number of pairs (x1,x2) can be formed, with x1 drawn
from self and x2 drawn from `other`, and where
``distance(x1, x2, p) <= r``.
This is the "two-point correlation" described in Gray and Moore 2000,
"N-body problems in statistical learning", and the code here is based
on their algorithm.
Parameters
----------
other : KDTree instance
The other tree to draw points from.
r : float or one-dimensional array of floats
The radius to produce a count for. Multiple radii are searched with
a single tree traversal.
p : float, 1<=p<=infinity, optional
Which Minkowski p-norm to use
Returns
-------
result : int or 1-D array of ints
The number of pairs. Note that this is internally stored in a numpy
int, and so may overflow if very large (2e9).
"""
def traverse(node1, rect1, node2, rect2, idx):
min_r = rect1.min_distance_rectangle(rect2,p)
max_r = rect1.max_distance_rectangle(rect2,p)
c_greater = r[idx] > max_r
result[idx[c_greater]] += node1.children*node2.children
idx = idx[(min_r <= r[idx]) & (r[idx] <= max_r)]
if len(idx) == 0:
return
if isinstance(node1,KDTree.leafnode):
if isinstance(node2,KDTree.leafnode):
ds = minkowski_distance(self.data[node1.idx][:,np.newaxis,:],
other.data[node2.idx][np.newaxis,:,:],
p).ravel()
ds.sort()
result[idx] += np.searchsorted(ds,r[idx],side='right')
else:
less, greater = rect2.split(node2.split_dim, node2.split)
traverse(node1, rect1, node2.less, less, idx)
traverse(node1, rect1, node2.greater, greater, idx)
else:
if isinstance(node2,KDTree.leafnode):
less, greater = rect1.split(node1.split_dim, node1.split)
traverse(node1.less, less, node2, rect2, idx)
traverse(node1.greater, greater, node2, rect2, idx)
else:
less1, greater1 = rect1.split(node1.split_dim, node1.split)
less2, greater2 = rect2.split(node2.split_dim, node2.split)
traverse(node1.less,less1,node2.less,less2,idx)
traverse(node1.less,less1,node2.greater,greater2,idx)
traverse(node1.greater,greater1,node2.less,less2,idx)
traverse(node1.greater,greater1,node2.greater,greater2,idx)
R1 = Rectangle(self.maxes, self.mins)
R2 = Rectangle(other.maxes, other.mins)
if np.shape(r) == ():
r = np.array([r])
result = np.zeros(1,dtype=int)
traverse(self.tree, R1, other.tree, R2, np.arange(1))
return result[0]
elif len(np.shape(r)) == 1:
r = np.asarray(r)
n, = r.shape
result = np.zeros(n,dtype=int)
traverse(self.tree, R1, other.tree, R2, np.arange(n))
return result
else:
raise ValueError("r must be either a single value or a one-dimensional array of values")
def sparse_distance_matrix(self, other, max_distance, p=2.):
"""
Compute a sparse distance matrix
Computes a distance matrix between two KDTrees, leaving as zero
any distance greater than max_distance.
Parameters
----------
other : KDTree
max_distance : positive float
p : float, optional
Returns
-------
result : dok_matrix
Sparse matrix representing the results in "dictionary of keys" format.
"""
result = scipy.sparse.dok_matrix((self.n,other.n))
def traverse(node1, rect1, node2, rect2):
if rect1.min_distance_rectangle(rect2, p) > max_distance:
return
elif isinstance(node1, KDTree.leafnode):
if isinstance(node2, KDTree.leafnode):
for i in node1.idx:
for j in node2.idx:
d = minkowski_distance(self.data[i],other.data[j],p)
if d <= max_distance:
result[i,j] = d
else:
less, greater = rect2.split(node2.split_dim, node2.split)
traverse(node1,rect1,node2.less,less)
traverse(node1,rect1,node2.greater,greater)
elif isinstance(node2, KDTree.leafnode):
less, greater = rect1.split(node1.split_dim, node1.split)
traverse(node1.less,less,node2,rect2)
traverse(node1.greater,greater,node2,rect2)
else:
less1, greater1 = rect1.split(node1.split_dim, node1.split)
less2, greater2 = rect2.split(node2.split_dim, node2.split)
traverse(node1.less,less1,node2.less,less2)
traverse(node1.less,less1,node2.greater,greater2)
traverse(node1.greater,greater1,node2.less,less2)
traverse(node1.greater,greater1,node2.greater,greater2)
traverse(self.tree, Rectangle(self.maxes, self.mins),
other.tree, Rectangle(other.maxes, other.mins))
return result
def distance_matrix(x, y, p=2, threshold=1000000):
"""
Compute the distance matrix.
Returns the matrix of all pair-wise distances.
Parameters
----------
x : (M, K) array_like
TODO: description needed
y : (N, K) array_like
TODO: description needed
p : float, 1 <= p <= infinity
Which Minkowski p-norm to use.
threshold : positive int
If ``M * N * K`` > `threshold`, algorithm uses a Python loop instead
of large temporary arrays.
Returns
-------
result : (M, N) ndarray
Distance matrix.
Examples
--------
>>> from scipy.spatial import distance_matrix
>>> distance_matrix([[0,0],[0,1]], [[1,0],[1,1]])
array([[ 1. , 1.41421356],
[ 1.41421356, 1. ]])
"""
x = np.asarray(x)
m, k = x.shape
y = np.asarray(y)
n, kk = y.shape
if k != kk:
raise ValueError("x contains %d-dimensional vectors but y contains %d-dimensional vectors" % (k, kk))
if m*n*k <= threshold:
return minkowski_distance(x[:,np.newaxis,:],y[np.newaxis,:,:],p)
else:
result = np.empty((m,n),dtype=float) # FIXME: figure out the best dtype
if m < n:
for i in range(m):
result[i,:] = minkowski_distance(x[i],y,p)
else:
for j in range(n):
result[:,j] = minkowski_distance(x,y[j],p)
return result
|
|
# -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Rupesh Tare <[email protected]>`
'''
# Import Python libs
from __future__ import absolute_import
import os
# Import Salt Testing Libs
from salttesting import TestCase, skipIf
from salttesting.mock import (
mock_open,
MagicMock,
patch,
NO_MOCK,
NO_MOCK_REASON
)
# Import Salt Libs
import salt.utils
from salt.exceptions import CommandExecutionError
from salt.modules import mount
# Globals
mount.__grains__ = {}
mount.__salt__ = {}
mount.__context__ = {}
MOCK_SHELL_FILE = 'A B C D F G\n'
@skipIf(NO_MOCK, NO_MOCK_REASON)
class MountTestCase(TestCase):
'''
Test cases for salt.modules.mount
'''
def test_active(self):
'''
List the active mounts.
'''
with patch.dict(mount.__grains__, {'os': 'FreeBSD', 'kernel': 'FreeBSD'}):
# uid=user1 tests the improbable case where a OS returns a name
# instead of a numeric id, for #25293
mock = MagicMock(return_value='A B C D,E,F,uid=user1,gid=grp1')
mock_user = MagicMock(return_value={'uid': '100'})
mock_group = MagicMock(return_value={'gid': '100'})
with patch.dict(mount.__salt__, {'cmd.run_stdout': mock,
'user.info': mock_user,
'group.info': mock_group}):
self.assertEqual(mount.active(), {'B':
{'device': 'A',
'opts': ['D', 'E', 'F',
'uid=100',
'gid=100'],
'fstype': 'C'}})
with patch.dict(mount.__grains__, {'os': 'Solaris', 'kernel': 'SunOS'}):
mock = MagicMock(return_value='A * B * C D/E/F')
with patch.dict(mount.__salt__, {'cmd.run_stdout': mock}):
self.assertEqual(mount.active(), {'B':
{'device': 'A',
'opts': ['D', 'E', 'F'],
'fstype': 'C'}})
with patch.dict(mount.__grains__, {'os': 'OpenBSD', 'kernel': 'OpenBSD'}):
mock = MagicMock(return_value={})
with patch.object(mount, '_active_mounts_openbsd', mock):
self.assertEqual(mount.active(), {})
with patch.dict(mount.__grains__, {'os': 'MacOS', 'kernel': 'Darwin'}):
mock = MagicMock(return_value={})
with patch.object(mount, '_active_mounts_darwin', mock):
self.assertEqual(mount.active(), {})
with patch.dict(mount.__grains__, {'os': 'MacOS', 'kernel': 'Darwin'}):
mock = MagicMock(return_value={})
with patch.object(mount, '_active_mountinfo', mock):
with patch.object(mount, '_active_mounts_darwin', mock):
self.assertEqual(mount.active(extended=True), {})
def test_fstab(self):
'''
List the content of the fstab
'''
mock = MagicMock(return_value=False)
with patch.object(os.path, 'isfile', mock):
self.assertEqual(mount.fstab(), {})
mock = MagicMock(return_value=True)
with patch.dict(mount.__grains__, {'kernel': ''}):
with patch.object(os.path, 'isfile', mock):
file_data = '\n'.join(['#',
'A B C D,E,F G H'])
with patch('salt.utils.fopen',
mock_open(read_data=file_data),
create=True) as m:
m.return_value.__iter__.return_value = file_data.splitlines()
self.assertEqual(mount.fstab(), {'B': {'device': 'A',
'dump': 'G',
'fstype': 'C',
'opts': ['D', 'E', 'F'],
'pass': 'H'}})
def test_vfstab(self):
'''
List the content of the vfstab
'''
mock = MagicMock(return_value=False)
with patch.object(os.path, 'isfile', mock):
self.assertEqual(mount.vfstab(), {})
mock = MagicMock(return_value=True)
with patch.dict(mount.__grains__, {'kernel': 'SunOS'}):
with patch.object(os.path, 'isfile', mock):
file_data = '\n'.join(['#',
'swap - /tmp tmpfs - yes size=2048m'])
with patch('salt.utils.fopen',
mock_open(read_data=file_data),
create=True) as m:
m.return_value.__iter__.return_value = file_data.splitlines()
self.assertEqual(mount.fstab(), {'/tmp': {'device': 'swap',
'device_fsck': '-',
'fstype': 'tmpfs',
'mount_at_boot': 'yes',
'opts': ['size=2048m'],
'pass_fsck': '-'}})
def test_rm_fstab(self):
'''
Remove the mount point from the fstab
'''
mock_fstab = MagicMock(return_value={})
with patch.dict(mount.__grains__, {'kernel': ''}):
with patch.object(mount, 'fstab', mock_fstab):
with patch('salt.utils.fopen', mock_open()):
self.assertTrue(mount.rm_fstab('name', 'device'))
mock_fstab = MagicMock(return_value={'name': 'name'})
with patch.dict(mount.__grains__, {'kernel': ''}):
with patch.object(mount, 'fstab', mock_fstab):
with patch('salt.utils.fopen', mock_open()) as m_open:
helper_open = m_open()
helper_open.write.assertRaises(CommandExecutionError,
mount.rm_fstab,
config=None)
def test_set_fstab(self):
'''
Tests to verify that this mount is represented in the fstab,
change the mount to match the data passed, or add the mount
if it is not present.
'''
mock = MagicMock(return_value=False)
with patch.object(os.path, 'isfile', mock):
self.assertRaises(CommandExecutionError,
mount.set_fstab, 'A', 'B', 'C')
mock = MagicMock(return_value=True)
mock_read = MagicMock(side_effect=OSError)
with patch.object(os.path, 'isfile', mock):
with patch.object(salt.utils, 'fopen', mock_read):
self.assertRaises(CommandExecutionError,
mount.set_fstab, 'A', 'B', 'C')
mock = MagicMock(return_value=True)
with patch.object(os.path, 'isfile', mock):
with patch('salt.utils.fopen',
mock_open(read_data=MOCK_SHELL_FILE)):
self.assertEqual(mount.set_fstab('A', 'B', 'C'), 'new')
def test_rm_automaster(self):
'''
Remove the mount point from the auto_master
'''
mock = MagicMock(return_value={})
with patch.object(mount, 'automaster', mock):
self.assertTrue(mount.rm_automaster('name', 'device'))
mock = MagicMock(return_value={'name': 'name'})
with patch.object(mount, 'fstab', mock):
with patch('salt.utils.fopen', mock_open()) as m_open:
helper_open = m_open()
helper_open.write.assertRaises(CommandExecutionError,
mount.rm_automaster,
'name', 'device')
def test_set_automaster(self):
'''
Verify that this mount is represented in the auto_salt, change the mount
to match the data passed, or add the mount if it is not present.
'''
mock = MagicMock(return_value=True)
with patch.object(os.path, 'isfile', mock):
self.assertRaises(CommandExecutionError,
mount.set_automaster,
'A', 'B', 'C')
def test_automaster(self):
'''
Test the list the contents of the fstab
'''
self.assertDictEqual(mount.automaster(), {})
def test_mount(self):
'''
Mount a device
'''
with patch.dict(mount.__grains__, {'os': 'MacOS'}):
mock = MagicMock(return_value=True)
with patch.object(os.path, 'exists', mock):
mock = MagicMock(return_value=None)
with patch.dict(mount.__salt__, {'file.mkdir': None}):
mock = MagicMock(return_value={'retcode': True,
'stderr': True})
with patch.dict(mount.__salt__, {'cmd.run_all': mock}):
self.assertTrue(mount.mount('name', 'device'))
mock = MagicMock(return_value={'retcode': False,
'stderr': False})
with patch.dict(mount.__salt__, {'cmd.run_all': mock}):
self.assertTrue(mount.mount('name', 'device'))
def test_remount(self):
'''
Attempt to remount a device, if the device is not already mounted, mount
is called
'''
with patch.dict(mount.__grains__, {'os': 'MacOS'}):
mock = MagicMock(return_value=[])
with patch.object(mount, 'active', mock):
mock = MagicMock(return_value=True)
with patch.object(mount, 'mount', mock):
self.assertTrue(mount.remount('name', 'device'))
def test_umount(self):
'''
Attempt to unmount a device by specifying the directory it is
mounted on
'''
mock = MagicMock(return_value={})
with patch.object(mount, 'active', mock):
self.assertEqual(mount.umount('name'),
'name does not have anything mounted')
mock = MagicMock(return_value={'name': 'name'})
with patch.object(mount, 'active', mock):
mock = MagicMock(return_value={'retcode': True, 'stderr': True})
with patch.dict(mount.__salt__, {'cmd.run_all': mock}):
self.assertTrue(mount.umount('name'))
mock = MagicMock(return_value={'retcode': False})
with patch.dict(mount.__salt__, {'cmd.run_all': mock}):
self.assertTrue(mount.umount('name'))
def test_is_fuse_exec(self):
'''
Returns true if the command passed is a fuse mountable application
'''
with patch.object(salt.utils, 'which', return_value=None):
self.assertFalse(mount.is_fuse_exec('cmd'))
with patch.object(salt.utils, 'which', return_value=True):
self.assertFalse(mount.is_fuse_exec('cmd'))
mock = MagicMock(side_effect=[1, 0])
with patch.object(salt.utils, 'which', mock):
self.assertFalse(mount.is_fuse_exec('cmd'))
def test_swaps(self):
'''
Return a dict containing information on active swap
'''
file_data = '\n'.join(['Filename Type Size Used Priority',
'/dev/sda1 partition 31249404 4100 -1'])
with patch.dict(mount.__grains__, {'os': '', 'kernel': ''}):
with patch('salt.utils.fopen',
mock_open(read_data=file_data),
create=True) as m:
m.return_value.__iter__.return_value = file_data.splitlines()
self.assertDictEqual(mount.swaps(), {'/dev/sda1':
{'priority': '-1',
'size': '31249404',
'type': 'partition',
'used': '4100'}})
file_data = '\n'.join(['Device Size Used Unknown Unknown Priority',
'/dev/sda1 31249404 4100 unknown unknown -1'])
mock = MagicMock(return_value=file_data)
with patch.dict(mount.__grains__, {'os': 'OpenBSD', 'kernel': 'OpenBSD'}):
with patch.dict(mount.__salt__, {'cmd.run_stdout': mock}):
self.assertDictEqual(mount.swaps(), {'/dev/sda1':
{'priority': '-1',
'size': '31249404',
'type': 'partition',
'used': '4100'}})
def test_swapon(self):
'''
Activate a swap disk
'''
mock = MagicMock(return_value={'name': 'name'})
with patch.dict(mount.__grains__, {'kernel': ''}):
with patch.object(mount, 'swaps', mock):
self.assertEqual(mount.swapon('name'),
{'stats': 'name', 'new': False})
mock = MagicMock(return_value={})
with patch.dict(mount.__grains__, {'kernel': ''}):
with patch.object(mount, 'swaps', mock):
mock = MagicMock(return_value=None)
with patch.dict(mount.__salt__, {'cmd.run': mock}):
self.assertEqual(mount.swapon('name', False), {})
mock = MagicMock(side_effect=[{}, {'name': 'name'}])
with patch.dict(mount.__grains__, {'kernel': ''}):
with patch.object(mount, 'swaps', mock):
mock = MagicMock(return_value=None)
with patch.dict(mount.__salt__, {'cmd.run': mock}):
self.assertEqual(mount.swapon('name'), {'stats': 'name',
'new': True})
def test_swapoff(self):
'''
Deactivate a named swap mount
'''
mock = MagicMock(return_value={})
with patch.dict(mount.__grains__, {'kernel': ''}):
with patch.object(mount, 'swaps', mock):
self.assertEqual(mount.swapoff('name'), None)
mock = MagicMock(return_value={'name': 'name'})
with patch.dict(mount.__grains__, {'kernel': ''}):
with patch.object(mount, 'swaps', mock):
with patch.dict(mount.__grains__, {'os': 'test'}):
mock = MagicMock(return_value=None)
with patch.dict(mount.__salt__, {'cmd.run': mock}):
self.assertFalse(mount.swapoff('name'))
mock = MagicMock(side_effect=[{'name': 'name'}, {}])
with patch.dict(mount.__grains__, {'kernel': ''}):
with patch.object(mount, 'swaps', mock):
with patch.dict(mount.__grains__, {'os': 'test'}):
mock = MagicMock(return_value=None)
with patch.dict(mount.__salt__, {'cmd.run': mock}):
self.assertTrue(mount.swapoff('name'))
def test_is_mounted(self):
'''
Provide information if the path is mounted
'''
mock = MagicMock(return_value={})
with patch.object(mount, 'active', mock):
self.assertFalse(mount.is_mounted('name'))
mock = MagicMock(return_value={'name': 'name'})
with patch.object(mount, 'active', mock):
self.assertTrue(mount.is_mounted('name'))
if __name__ == '__main__':
from integration import run_tests
run_tests(MountTestCase, needs_daemon=False)
|
|
"""
Create PyML datasets from Brainvoyager data.
The location of the Brainvoyager data is assumed to be
in the current directory and can't be otherwise specified.
Requires preSVM.py, assumed to be in 'Users/type/Code/mvpa'
(which you may need to change, see around line 18).
These have been tested and run inside an ipython session
using the macports install of python26.
"""
import nifti as nf
import PyML as ml
import os as os
## source preSVM.py
oldpth = os.getcwd()
os.chdir('/Users/type/Code/mvpa/')
import preSVM as pre
#reload(pre)
# excessive reloading for easy debugging
os.chdir(oldpth)
def createDataset(svmName='',roiName='',labFiles=[],niiFiles=[],lab1='',lab2=''):
"""
Workhorse...
"""
if os.path.exists(svmName):
print('Overwriting {0}.'.format(svmName))
os.remove(svmName)
for labF, niiF in zip(labFiles, niiFiles):
print(niiF,labF)
## Get the needed data
vtc = nf.NiftiImage(niiF)
roi = nf.NiftiImage(roiName)
vols, labels = pre.readLabList(labF)
## Preprocess the data
reducedRoi = pre.roiReduce(roi,vtc)
maskedVtc = pre.maskVtc(vtc,reducedRoi)
reference = pre.createRefVtc(maskedVtc)
## Filter labels and vols by trainLab1, trainLab2
## then change recode the labels as 1 and 2
l1mask = labels == lab1
l2mask = labels == lab2
l1l2mask = l1mask != l2mask
labels[l1mask] = 1
labels[l2mask] = 2
vols = vols[l1l2mask]
labels = labels[l1l2mask]
pre.writeSVM(maskedVtc,reference,labels,vols,svmName)
else:
print('Z-normalizing the data.')
pre.zSparse(svmName)
## z-norm the data, requires sparse conversion
def diffLabels(roi='',train=('STI','STU'),test=('TESTSTI','TESTSTU')):
"""
This function creates an PyML dataset from vtc amd roi (Brainvoyager)
formatted data. It should be used when the training and testing labels
are different but all files (relevant) in the PWD are of interest.
Returns:
- a pair of z-normalized BOLD datasets, one for svm training
and one for testing; BOLD data was extracted from the full
(vtc) timecourse via the given roi (as .vmr)
- data from identical runs is silently overwritten, but
user is informed of this
"""
niiFiles, labFiles = pre.nii_LabelListMatch('.')
## Create lists of all the
## needed files
svmBaseName = (str.split(roi,'.'))[0] + '.txt'
## suffix of the file to be written
## TRAIN SET: ###################################################
svmName = 'train_'+ train[0] + 'x' + train[1] + '_' + svmBaseName
print('Creating training data: {0}'.format(svmName))
if os.path.exists(svmName):
print('Overwriting {0}.'.format(svmName))
os.remove(svmName)
createDataset(svmName,roi,labFiles,niiFiles,train[0],train[1])
## createDataset(svmName='',roiName='',labFiles,niiFiles,lab1='',lab2='')
## TEST SET: ###################################################
svmName = 'test_'+ test[0] + 'x' + test[1] + '_' + svmBaseName
print('Creating testing data: {0}'.format(svmName))
if os.path.exists(svmName):
print('Overwriting {0}.'.format(svmName))
os.remove(svmName)
createDataset(svmName,roi,labFiles,niiFiles,test[0],test[1])
## createDataset(svmName='',roiName='',labFiles,niiFiles,lab1='',lab2='')
def testOnly(roi='',test=('TESTSTI','TESTSTU')):
"""
This function creates an PyML dataset from vtc amd roi (Brainvoyager)
formatted data. It should be used when the ONLY TEST
labels are needed but different labels will be used for each
(as in diffLabels()).
Returns:
- a pair of z-normalized BOLD datasets, one for svm training
and one for testing; BOLD data was extracted from the full
(vtc) timecourse via the given roi (as .vmr)
- data from identical runs is silently overwritten, but
user is informed of this
"""
niiFiles, labFiles = pre.nii_LabelListMatch('.')
## Create lists of all the needed files
svmBaseName = (str.split(roi,'.'))[0] + '.txt'
## suffix of the file to be written
## TEST SET: ###################################################
svmName = 'test_'+ test[0] + 'x' + test[1] + '_' + svmBaseName
print('Creating testing data: {0}'.format(svmName))
if os.path.exists(svmName):
print('Overwriting {0}.'.format(svmName))
os.remove(svmName)
createDataset(svmName,roi,labFiles,niiFiles,test[0],test[1])
## createDataset(name='',roi='nof of roi.vmr',labFiles,niiFiles,lab1='',lab2='')
def trainOnly(roi='',train=('TESTSTU','TESTSTI')):
"""
This function creates an PyML dataset from vtc amd roi (Brainvoyager)
formatted data. It should be used when the ONLY TRAINING
labels are needed but different labels will be used for each
(as in diffLabels()).
Returns:
- a pair of z-normalized BOLD datasets, one for svm training
and one for testing; BOLD data was extracted from the full
(vtc) timecourse via the given roi (as .vmr)
- data from identical runs is silently overwritten, but
user is informed of this
"""
niiFiles, labFiles = pre.nii_LabelListMatch('.')
## Create lists of all the needed files
svmBaseName = (str.split(roi,'.'))[0] + '.txt'
## suffix of the file to be written
## TRAIN SET: ###################################################
svmName = 'train_'+ train[0] + 'x' + train[1] + '_' + svmBaseName
print('Creating training data: {0}'.format(svmName))
if os.path.exists(svmName):
print('Overwriting {0}.'.format(svmName))
os.remove(svmName)
createDataset(svmName,roi,labFiles,niiFiles,train[0],train[1])
## createDataset(svmName='',roiName='',labFiles,niiFiles,lab1='',lab2='')
def sameLabels(roi='',fracTrain=0.3,labels=('STI','STU')):
"""
Creates a unmodified and z-normalized PyML datasets from BV data
dividing part (as specified by percentTrain) into
a training set and part into a testing set.
This is to be used when the same labels are applied to training
and testing sets AND the training and testing are pulled from
the same Brainvoyager data files.
"""
niiFiles, labFiles = pre.nii_LabelListMatch('.')
svmBaseName = (str.split(roi,'.'))[0] + '.txt'
svmName = labels[0] + 'x' + labels[1] + '_' + svmBaseName
createDataset(svmName,roi,labFiles,niiFiles,labels[0],labels[1])
pre.vSplit(vecName='vec_'+svmName,fracTrain=0.3)
def diffFilesSameLabels(roi='',trainFiles=('name',[],[]),testFiles=('name',[],[]),labels=('','')):
"""
Creates a unmodified and z-normalized PyML datasets from BV data
specified in trainFiles and testFiles using the same labels for
each. This is to be used when you want to split up the BV into known
groups but the labels for each group are the same.
IMPORTANT: trainFiles and testFiles are tuples of a name and two
file lists. The later needed to enusre that file output does
not collide with unrelated files and cause sensible names are nice.
The list containing labels should be the first entry in each,
a list of nii files should be the second.
"""
svmBaseName = (str.split(roi,'.'))[0] + '.txt'
svmName_trn = 'train_{0}_{1}x{2}_{3}'.format(trainFiles[0],labels[0],labels[1],svmBaseName)
svmName_tst = 'test_{0}_{1}x{2}_{3}'.format(testFiles[0],labels[0],labels[1],svmBaseName)
createDataset(svmName_trn,roi,trainFiles[1],trainFiles[2],labels[0],labels[1])
createDataset(svmName_tst,roi,testFiles[1],testFiles[2],labels[0],labels[1])
def diffFilesDiffLabels(roi='',trainFiles=('name',[],[]),testFiles=('name',[],[]),train=('',''),test=('','')):
"""
Creates a unmodified and z-normalized PyML datasets from BV data
specified in trainFiles and testFiles using the differnt labels for
each. This is to be used when both unqiue files and labels are to be
employed in the training and testing sets.
IMPORTANT: trainFiles and testFiles are tuples of file lists. Files
containing labels should be the first entry in each, a list of nii
files should be the second.
"""
svmBaseName = (str.split(roi,'.'))[0] + '.txt'
svmName_trn ='train_{0}_{1}x{2}_{3}'.format(trainFiles[0],train[0],train[1],svmBaseName)
svmName_tst ='test_{0}_{1}x{2}_{3}'.format(testFiles[0],test[0],test[1],svmBaseName)
createDataset(svmName_trn,roi,trainFiles[1],trainFiles[2],train[0],train[1])
createDataset(svmName_tst,roi,testFiles[1],testFiles[2],test[0],test[1])
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
A simple logger.
Based on the 'borg' recipe from [http://code.activestate.com/recipes/66531/].
"""
import os
import shutil
import time
import datetime
import traceback
# default maximum length of filename
MaxNameLength = 15
################################################################################
# A simple logger.
#
# Simple usage:
# import log
# log = log.Log('my_log.txt', Log.DEBUG)
# log('A line in the log at the default level (DEBUG)')
# log('A log line at WARN level', Log.WARN)
# log.debug('log line issued at DEBUG level')
#
# Log levels styled on the Python 'logging' module.
################################################################################
class Log(object):
__shared_state = {} # this __dict__ shared by ALL instances
# the predefined logging levels
CRITICAL = 50
ERROR = 40
WARN = 30
INFO = 20
DEBUG = 10
NOTSET = 0
_level_num_to_name = {NOTSET: 'NOTSET',
DEBUG: 'DEBUG',
INFO: 'INFO',
WARN: 'WARN',
ERROR: 'ERROR',
CRITICAL: 'CRITICAL'}
def __init__(self, logfile=None, level=NOTSET, append=False,
name_length=MaxNameLength):
"""Initialise the logging object.
logfile the path to the log file
level logging level - don't log below this level
append True if log file is appended to
name_length max length of module name in log output
"""
# make sure we have same state as all other log objects
self.__dict__ = Log.__shared_state
# don't allow logfile to change after initially set
if hasattr(self, 'logfile'):
#self.critical('Ignore attempt to reconfigure logging')
return
# OK, configure logging
if logfile is None:
logfile = '%s.log' % __name__
# get correct open option
opt = 'w'
if append:
opt = 'a'
try:
self.logfd = open(logfile, opt)
except IOError:
# assume we have readonly filesystem, create elsewhere
basefile = os.path.basename(logfile)
if os.name == 'nt':
# TODO: should use user-specific directory?
logfile = os.path.join('C:\\', basefile)
elif os.name == 'posix':
logfile = os.path.join('~', basefile)
else:
raise Exception('Unrecognized platform: %s' % os.name)
# try to open logfile again
self.logfd = open(logfile, opt)
self.logfile = logfile
# convert 'level' param to a number if it was a string
if isinstance(level, basestring):
new_level = Log.NOTSET
for (l, n) in Log._level_num_to_name.items():
if str(level) == n:
new_level = l
break
level = new_level
self.level = level
self.name_length = name_length
self.critical('='*65)
self.critical('Log started on %s, log level=%s'
% (datetime.datetime.now().ctime(), self.level2string()))
self.critical('-'*65)
def __call__(self, msg=None, level=None):
"""Call on the logging object.
msg message string to log
level level to log 'msg' at (if not given, assume self.level)
"""
# get level to log at
if level is None:
level = self.level
# are we going to log?
if level < self.level:
return
if msg is None: # if user just wants a blank line
msg = ''
# get time
to = datetime.datetime.now()
hr = to.hour
min = to.minute
sec = to.second
msec = to.microsecond
# caller information - look back for first module != <this module name>
frames = traceback.extract_stack()
frames.reverse()
try:
(_, mod_name) = __name__.rsplit('.', 1)
except ValueError:
mod_name = __name__
for (fpath, lnum, mname, _) in frames:
fname = os.path.basename(fpath).rsplit('.', 1)
if len(fname) > 1:
fname = fname[0]
if fname != mod_name:
break
# get string for log level
loglevel = (self.level2string() + ' ')[:8]
fname = fname[:self.name_length]
self.logfd.write('%02d:%02d:%02d.%06d|%8s|%*s:%-4d|%s\n'
% (hr, min, sec, msec, loglevel, self.name_length,
fname, lnum, msg))
self.logfd.flush()
def critical(self, msg):
"""Log a message at CRITICAL level."""
self(msg, Log.CRITICAL)
def error(self, msg):
"""Log a message at ERROR level."""
self(msg, Log.ERROR)
def warn(self, msg):
"""Log a message at WARN level."""
self(msg, Log.WARN)
def info(self, msg):
"""Log a message at INFO level."""
self(msg, Log.INFO)
def debug(self, msg):
"""Log a message at DEBUG level."""
self(msg, Log.DEBUG)
# def __del__(self):
# self.logfd.close()
def set_level(self, level):
self.level = level
self('Set logging level to %d' % level, Log.CRITICAL)
def bump_level(self, bump=10):
"""Increase the logging level.
bump the amount to increase logging level by (default 10)
"""
if self.level > Log.DEBUG:
self.level -= bump
if self.level < Log.DEBUG:
self.level = Log.DEBUG
self('Set logging level to %s' % self.level2string(), Log.CRITICAL)
def level2string(self):
"""Convert a logging level to a string."""
base_level = int(self.level / 10) * 10
rem_level = self.level % 10
base_level_str = Log._level_num_to_name[base_level]
if rem_level == 0:
return base_level_str
else:
return '%s+%d' % (base_level_str, rem_level)
|
|
#!/usr/bin/env python
__description__ = \
"""
A set of objects and functions for blasting sequences against local and
remote servers. Also functions for filtering sequences. Uses Biopython. It
also requires ncbi's blast+ tools and cdhit to be installed and visible in
the path.
"""
__author__ = "Michael J. Harms"
__date__ = "110528"
__usage__ = "orthologBlast.py seed_fasta_file"
__version__ = "0.1"
# Modules for running sundry processes
import subprocess, shlex, re, sys, os, string
# Modules for blasting, etc.
from Bio import SeqIO, Entrez
from Bio.Seq import Seq
from Bio.Blast.Applications import NcbiblastpCommandline as blastp
from Bio.Blast import NCBIWWW, NCBIXML
# Modules for parsing XML
from xml.etree import ElementTree as ET
# Set email address here for NCBI/Entrez server
Entrez.email = "PUT_YOUR_EMAIL_ADDRESS_HERE"
# Global unique name counter
homolog_counter = 0
class BlastToolsError(Exception):
"""
General error class for this module.
"""
pass
class Homolog:
"""
A class that holds homologs (minimally, accession and name; maximally,
sequence information and name of ortholog).
"""
def __init__(self,definition,accession):
"""
Create a new homolog.
"""
global homolog_counter
# Record homolog definition and accession number
self.definition = str(definition)
self.accession = str(accession)
self.rank = sys.maxint
# Assign this homolog a unique name.
self.unique_name = "%i" % (homolog_counter)
self.unique_name = self.unique_name.zfill(8)
self.unique_name = "XX%s" % self.unique_name
homolog_counter += 1
# Initialize other variables that we don't yet know
self.sequence = None
self.length = None
self.taxid = None
self.organism = None
self.ortholog_name = None
def loadSequence(self,sequence,length,taxid,organism):
"""
Load sequence data for this homolog.
"""
self.sequence = Seq(sequence)
self.length = length
self.taxid = taxid
self.organism = organism
def formatFasta(self,seq_name=None):
"""
Return a fasta formatted string with either "unique_name" or seq_name
as the name.
"""
if seq_name != None:
name = seq_name
else:
seq_name = self.unique_name
return ">%s\n%s\n" % (seq_name,self.sequence)
def formatTabDelim(self):
"""
Write the data for this ortholog out onto a single, tab-delimited
line.
"""
pretty_name = "%s-%s-%s" % (self.ortholog_name,self.organism,
self.accession)
to_write = (self.unique_name,
self.ortholog_name,
self.organism,
self.rank,
self.accession,
self.length,
str(self.sequence),
self.definition,
pretty_name)
to_write = "\t".join(["%r" % w for w in to_write])
return "%s\n" % to_write
def environmentCheck():
"""
Make sure that all accessory programs are available in the path.
"""
print "Checking for required external programs...",
to_check = ["cdhit","blastp","tblastn","makeblastdb"]
failed = []
for c in to_check:
args = shlex.split(c)
try:
out = subprocess.Popen(args,stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
except OSError:
failed.append(c)
except:
print "Unexpected error when running %s:" % c, sys.exc_info()[0]
raise
if len(failed) > 0:
print "FAILURE\n"
return failed
else:
print "SUCCESS\n"
if Entrez.email == None:
print "No email address has been set! To avoid typing this in the"
print "future, edit the line 'Entrez.email = None' to point to your"
print "email address (e.g. Entrez.email = \"[email protected]\")."
print "\nPlease visit the entrez website for more information about"
print "Entrez usage rules and why providing an email address is useful."
print ""
print "http://www.ncbi.nlm.nih.gov/entrez/query/static/eutils_help.html#UserSystemRequirements"
print ""
email = raw_input("Please enter your email address:\n")
Entrez.email = email
print ""
return []
def flattenConcatenatedXML(input_file,key_tag):
"""
Clean up naively concatenated XML files by deleting begin/end tags that
occur at the place where the two files were concatenated.
NOTE: This will break and break royally if the key_tags are on the same
lines as other important entries.
"""
f = open(input_file,'r')
input = f.readlines()
f.close()
set_start = re.compile("<%s>" % key_tag)
set_end = re.compile("</%s>" % key_tag)
# Find all beginning and end tags...
starts = [i for i, l in enumerate(input) if set_start.search(l) != None]
# If this tag occurs more than once...
if (len(starts) != 1):
# Keep the first start reverse so we are chewing from the bottom.
starts.pop(0)
starts.reverse()
# Remove all lines between each end and start, again chewing from the
# bottom.
for i in range(len(starts)):
e = starts[i]
while set_end.search(input[e]) == None:
input.pop(e),
e = e - 1
input.pop(e)
# Return freshly minted, clean XML
return "".join(input)
def parseFastaXML(sequence_file):
"""
Load a set of sequences from an NCBI xml/fasta file into a set of Homolog
objects.
"""
homolog_list = []
print "Parsing sequences in %s" % sequence_file,
# Fix screwed up XML because sequences downloaded and output concatenated
sequence_input = flattenConcatenatedXML(sequence_file,"TSeqSet")
# Now we should have valid XML...
sequence_input = ET.XML(sequence_input)
for i, sequence in enumerate(sequence_input):
properties = dict([(s.tag,str(s.text)) for s in sequence])
definition = properties["TSeq_defline"].strip()
definition = re.sub("\t"," ",definition)
accession = properties["TSeq_gi"].strip()
homolog_list.append(Homolog(definition,accession))
homolog_list[-1].loadSequence(properties["TSeq_sequence"].strip(),
int(properties["TSeq_length"]),
int(properties["TSeq_taxid"]),
properties["TSeq_orgname"].strip())
print "DONE."
return homolog_list
def parseBlastXML(blast_file,tag_list=("Hit_def","Hit_id")):
"""
Parse BLAST xml output, extracting tags specified in tag_list and putting
into a list. E-value is always appended after the last requested tag.
"""
# Fix screwed up XML if blasts were done in series...
blast_input = flattenConcatenatedXML(blast_file,"BlastOutput_iterations")
# Read blast file properties (in tag_list) into a list to dump out
blast_input = ET.XML(blast_input)
all_hits = []
for blast_output in blast_input:
if blast_output.tag != "BlastOutput_iterations":
continue
for iteration in blast_output:
if iteration.tag != "Iteration":
continue
for hits in iteration:
if hits.tag != "Iteration_hits":
continue
for hit in hits:
properties = dict([(h.tag,str(h.text)) for h in hit])
all_hits.append([properties[t] for t in tag_list])
for property in hit:
if property.tag == "Hit_hsps":
for hsp in property:
hsp_properties = dict([(p.tag,str(p.text))
for p in hsp])
all_hits[-1].append(hsp_properties["Hsp_evalue"])
break
return all_hits
def downloadSequences(accession_list,out_file,db="protein",
batch_download_size=50,force=False):
"""
Download a list of accessions in fasta/xml format.
accession_list: list of ncbi accesion numbers
out_file: file in which to write output in fasta/xml format
db: database to use for accession
batch_download_size: size of individual download packets
force: True/False. Overwrite existing download file. If False, the program
throws a notice that an old file is being used rather than re-
downloading.
"""
# check for existance of out file
if os.path.exists(out_file):
if force:
print "Deleting existing download file (%s)!" % out_file
os.remove(out_file)
else:
print "%s already exists. Not downloading." % out_file
return
print "Posting list of unique accession numbers to NCBI...",
# Upload the list of sequences to NCBI
to_download = ",".join([l.strip() for l in accession_list])
post_xml = Entrez.read(Entrez.epost(db, id=to_download))
webenv = post_xml["WebEnv"]
query_key = post_xml["QueryKey"]
print "DONE.\n"
print "Downloading sequences."
# Now download the sequences (in fasta/xml format).
count = len(accession_list)
out_handle = open(out_file, "w")
for start in range(0,count,batch_download_size):
end = min(count, start+batch_download_size)
print "Downloading %i to %i of %i" % (start+1,end,count)
fetch_handle = Entrez.efetch(db=db, rettype="fasta",
retmode="xml",retstart=start,
retmax=batch_download_size,
webenv=webenv,query_key=query_key)
data = fetch_handle.read()
fetch_handle.close()
out_handle.write(data)
out_handle.close()
def runCdhit(homolog_list,redund_cutoff=0.99,tmp_file_suffix="oB_cdhit",
keep_tmp=False):
"""
Remove redundant homologs using cdhit. After clustering with
the redundancy cutoff, take the member of each cluster with the lowest
rank. Return a subset of homolog_list.
"""
# Write out the fasta file with a unique name for each sequence that goes
# >0, >1...>N. Those numbers point to the index of the sequence in
# homolog_list.
# Don't do anything for empty list
if len(homolog_list) == 0:
print "Warning: empty list passed to cdhit! Ignoring."
return homolog_list
fasta_string = "".join([s.formatFasta(i) for i,s in enumerate(homolog_list)])
f = open("%s.fasta" % tmp_file_suffix,'w')
f.write(fasta_string)
f.close()
# Run cdhit
cdhit_cmd = "cdhit -i %s.fasta -o %s_cdhit -c %.3f" % (tmp_file_suffix,
tmp_file_suffix,
redund_cutoff)
args = shlex.split(cdhit_cmd)
run = subprocess.Popen(args,stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
stdoutdata, stderrdata = run.communicate()
if run.returncode != 0:
print stdoutdata
err = "cdhit failed!\n"
raise BlastToolsError(err)
# Now parse the output of cdhit and grab members of clusters with the
# lowest rank
f = open("%s_cdhit.clstr" % tmp_file_suffix,'r')
out = []
in_cluster = []
line = f.readline()
while line != "":
# If we are starting a new cluster
if line.startswith(">"):
# ... and this is not the first cluster
if in_cluster != []:
# Take the member of in_cluster with the minimum rank
ranks = [homolog_list[c].rank for c in in_cluster]
best = in_cluster[ranks.index(min(ranks))]
out.append(homolog_list[best])
in_cluster = []
# If this is not a blank line, record the seq_id in in_cluster
elif line[0] in string.digits:
seq_id = int(line.split(">")[1])
in_cluster.append(seq_id)
# Read the next line
line = f.readline()
# Grab the last cluster
ranks = [homolog_list[c].rank for c in in_cluster]
best = in_cluster[ranks.index(min(ranks))]
out.append(homolog_list[best])
f.close()
# Delete temporary files
if not keep_tmp:
os.remove("%s.fasta" % tmp_file_suffix)
os.remove("%s_cdhit" % tmp_file_suffix)
os.remove("%s_cdhit.clstr" % tmp_file_suffix)
os.remove("%s_cdhit.bak.clstr" % tmp_file_suffix)
print "cdhit lowered redundancy @ %.3f, %i of %i kept" % (redund_cutoff,
len(out),
len(homolog_list))
return out
def seq2blastdb(fasta_set,db_name,db_type="prot",quiet=False):
"""
Convert a set of fasta-type sequences into a blast database.
fasta_set: list of sequence strings in fasta format.
db_name: database name.
db_type: type of database to generate
quiet: don't print status-y things
"""
f = open("%s.fasta" % db_name,'w')
f.write("".join(fasta_set))
f.close()
if not quiet:
print "Creating blast database %s..." % db_name,
# Create command to run
cmd = "makeblastdb -in %s.fasta -dbtype %s -out %s" % (db_name,db_type,db_name)
args = shlex.split(cmd)
# Run command
run = subprocess.Popen(args,stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
stdout, stderr = run.communicate()
# Make sure it exited cleanly
if run.poll() != 0:
print stdout
err = "Error running makeblastdb!\n"
raise BlastToolsError(err)
if not quiet:
print "DONE."
def localBlast(query_fasta,out_file,db,e_value_cutoff=10.0,filter=False,
hitlist_size=500,gapcosts=(11,1),max_query_size=10,num_threads=2,
force=False,quiet=False):
"""
Perform a blast using query against the subject database, generating a blast
xml file.
query_fasta: a list of sequences in fasta format.
db: the BLAST database file
out_root: the suffix for the blast output file
e_value_cutoff: e value cutoff
filter: apply a complexity filter mask
hitlist_size: number of hits to grab for each input sequence
gap_cost: tuple with (opening cost, extension cost)
max_query_size: break the query into sets of sequences of this size and
combine results locally.
force: True/False. Overwrite existing blast file. If False, the program
throws a notice that an old file is being used rather than re-
BLASTING.
quiet: Don't print status-y stuff.
"""
tmp_in_file = "tmp_fasta_for_blast"
tmp_out_file = "tmp_blast_output"
if os.path.exists(out_file):
if force:
if not quiet:
print "Deleting existing blast file (%s)!" % out_file
os.remove(out_file)
else:
if not quiet:
print "%s already exists. Not performing blast." % out_file
return
# Go through a set of query requests no larger than max_query_size
for counter in range(0,len(query_fasta),max_query_size):
f = open(tmp_in_file,'w')
f.write("".join(query_fasta[counter:counter+max_query_size]))
f.close()
count = len(query_fasta[counter:counter+max_query_size])
if not quiet:
print "BLASTing %i sequences against the local %s database" % (count,db)
print "e_value: %.4e" % e_value_cutoff
print "filter low complexity: %r" % filter
print "num hits: %i" % hitlist_size
print "gap costs: %i %i" % gapcosts
print "num threads: %i" % num_threads
io_cmd = "blastp -query %s -db %s -outfmt 5 -out %s -num_threads %i" % \
(tmp_in_file,db,tmp_out_file,num_threads)
blast_cmd = "-evalue %f -gapopen %i -gapextend %i -soft_masking %s" % \
(e_value_cutoff,gapcosts[0],gapcosts[1],filter)
total_cmd = "%s %s" % (io_cmd,blast_cmd)
args = shlex.split(total_cmd)
run = subprocess.Popen(args,stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdoutdata, stderrdata = run.communicate()
if run.returncode != 0:
print stdoutdata
err = "blastp failed!\n"
raise BlastToolsError(err)
f = open(out_file, "a")
g = open(tmp_out_file,"r")
f.write(g.read())
f.close()
g.close()
os.remove(tmp_out_file)
if not quiet:
print "DONE.\n"
def ncbiBlast(query_fasta,out_file,db="nr",entrez_query='(none)',
e_value_cutoff=10.0,filter=False,hitlist_size=1000,gapcosts=(11,1),
max_query_size=10,force=False,quiet=False):
"""
BLAST a set of sequences against an NCBI database, generating a blast xml
file.
query_fasta: a list of fasta-formatted sequences to blast
db: the BLAST database
entrez_query: additional queries for entrez (e.g. species limit)
e_value_cutoff: e value cutoff
filter: apply a complexity filter mask
hitlist_size: number of hits to grab for each input sequence
gap_cost: tuple with (opening cost, extension cost)
max_query_size: break the query into sets of sequences of this size and
combine results locally.
force: True/False. Overwrite existing blast file. If False, the program
throws a notice that an old file is being used rather than re-
BLASTING.
quiet: don't print status-y things
"""
# Set up proper BLAST input
if not filter:
filter = "none"
else:
filter = "on"
gapcosts = "%i %i" % gapcosts
if os.path.exists(out_file):
if force:
if not quiet:
print "Deleting existing blast file (%s)!" % out_file
os.remove(out_file)
else:
if not quiet:
print "%s already exists. Not performing blast." % out_file
return
# Go through a set of query requests no larger than max_query_size
for counter in range(0,len(query_fasta),max_query_size):
this_query = "".join(query_fasta[counter:counter+max_query_size])
count = len(query_fasta[counter:counter+max_query_size])
if not quiet:
print "BLASTing %i sequences against the NCBI %s database" % (count,db)
print "e_value: %.4e" % e_value_cutoff
print "filter low complexity: %r" % filter
print "num hits: %i" % hitlist_size
print "gap costs: %s" % gapcosts
print "entrez query: \'%s\'" % entrez_query
# Run BLAST and download input
result = NCBIWWW.qblast("blastp", "nr", this_query,
hitlist_size=hitlist_size,
entrez_query=entrez_query,
expect=e_value_cutoff,
gapcosts=gapcosts)
f = open(out_file, "a")
f.write(result.read())
f.close()
result.close()
if not quiet:
print "DONE.\n"
def cleanHomologs(homolog_list,ignore=("pdb","fragment","synthetic"),
dubious=("putative","hypothetical","unnamed","possible",
"predicted","unknown","uncharacterized","mutant","isoform"),
gi_exclusion=(),rank_offset=0,min_length=None,max_length=None,
quiet=False):
"""
Clean up sets of homologs output. Remove duplicate, "ignore" entries,
gi_exclusion. Rank sequence by quality (1 by default, 2 if the hit definition
had a word in "dubious"). You can also specify rank_offset, which allows
you to specify a priori that this blast is better or worse than some other
(e.g., make a nr blast 0, an EST tblastn 10).
"""
if not quiet:
print "Cleaning up sequences."
# Remove pure duplicates (with exactly the same accession number)
homolog_list = dict([(r.accession,r) for r in homolog_list]).values()
# Compile regular expressions
ignore_pattern = re.compile("|".join(ignore))
dubious_pattern = re.compile("|".join(dubious))
gi_removal = 0
short_removal = 0
long_removal = 0
ignore_removal = 0
clean_homologs = []
for r in homolog_list:
# Don't keep guys in gi_exclusion
if r.accession in gi_exclusion:
gi_removal += 1
continue
# Remove short and long sequences
if r.length != None:
if min_length != None and r.length < min_length:
short_removal += 1
continue
if max_length != None and r.length > max_length:
long_removal += 1
continue
# Sanitize names (remove \t, replace with " ")
r.definition = re.sub("\t"," ",r.definition)
# Does one of the ignore entries occur on this line?
tmp_definition = r.definition.lower()
if ignore_pattern.search(tmp_definition) != None:
ignore_removal += 1
continue
# Does one of the dubious entries occur on this line?
if dubious_pattern.search(tmp_definition) != None:
r.rank = 2 + rank_offset
else:
r.rank = 1 + rank_offset
clean_homologs.append(r)
num_rank_0 = len([h for h in clean_homologs if h.rank == 0])
num_rank_1 = len([h for h in clean_homologs if h.rank == 1])
num_rank_2 = len([h for h in clean_homologs if h.rank == 2])
if not quiet:
print "Kept %i of %i hits." % (len(clean_homologs),len(homolog_list))
print "gi exclusion: %i, ignored: %i, short %i, long: %i" % \
(gi_removal,ignore_removal,short_removal,long_removal)
print "Class 0: %i, Class 1: %i, Class 2: %i" % (num_rank_0,num_rank_1,
num_rank_2)
print ""
return clean_homologs
# Check the program environment when module is loaded
print "Loading blastTools version %s" % __version__
failed_programs = environmentCheck()
if len(failed_programs) != 0:
err = "Some required programs are not in the path!\n"
err += "Please make sure that these program(s) are available:\n"
err += " \n".join(failed_programs)
raise BlastToolsError(err)
|
|
import struct
class Instruction():
"""Base class for all 6502 instructions"""
def __init__(self, core):
if not hasattr(self, 'is_branch'):
self.is_branch = False
self.core = core
self.opcode = core.memory[core.pc]
try:
self.init_args()
except AttributeError:
pass
def get_arg(self):
"""gets overwritten by an addressmode mixin
will return an argument as a function of the core
state and the addressmode of the operation"""
def set_arg(self, val):
"""gets overwritten by an addressmode mixin
will set a value in memory as a function of the core state
and the addressmode of the operation"""
def __call__(self):
"""implemented by derived classes"""
def description(self):
"""return a debug string describing this instruction"""
return "no description"
class Branch():
is_branch = True
class ADC(Instruction):
"""Add Memory to Accumulator with Carry"""
instruction_name = "ADC"
def __call__(self):
core = self.core
arg = self.get_arg()
core.acc = core.add(arg)
def description(self):
return "add {0} ({1}) to accumulator {2}".format(self.get_arg(), self.addressmode_info(), self.core.acc)
class AND(Instruction):
"""AND Memory with Accumulator"""
instruction_name = "AND"
def __call__(self):
core = self.core
arg = self.get_arg()
core.acc &= arg
def description(self):
return "AND {0} ({1}) with accumulator {2}".format(self.get_arg(), self.addressmode_info(), self.core.acc)
class ASL(Instruction):
"""Shift Left One Bit (Memory or Accumulator)"""
instruction_name = "ASL"
def __call__(self):
core = self.core
val = self.get_arg()
core.status.c = bool(val >> 7 & 0x01)
self.set_arg(core, val << 1)
def description(self):
return "Shift left one bit {0} ({1})".format(self.get_arg(), self.addressmode_info(), self.core.acc)
class BCC(Instruction, Branch):
"""Branch on Carry Clear"""
instruction_name = "BCC"
def __call__(self):
core = self.core
if not core.status.c:
core.pc = self.get_arg()
else:
core.pc += self.num_bytes
def description(self):
return "Branch to {0} ({1}) on Carry Clear (carry = {2})".format(self.get_arg(), self.addressmode_info(), self.core.status.c)
class BCS(Instruction, Branch):
"""Branch on Carry Set"""
instruction_name = "BCS"
def __call__(self):
core = self.core
if core.status.c:
core.pc = self.get_arg()
else:
core.pc += self.num_bytes
def description(self):
return "Branch to {0} ({1}) on Carry Set (carry = {2})".format(self.get_arg(), self.addressmode_info(), self.core.status.c)
class BEQ(Instruction, Branch):
"""Branch on Result Zero"""
instruction_name = "BEQ"
def __call__(self):
core = self.core
if core.status.z:
core.pc = self.get_arg()
else:
core.pc += self.num_bytes
def description(self):
return "Branch to {0} ({1}) on Result Zero (resultzero = {2})".format(self.get_arg(), self.addressmode_info(), self.core.status.z)
class BIT(Instruction):
"""Test Bits in Memory with Accumulator"""
instruction_name = "BIT"
def __call__(self):
core = self.core
val = core.acc & self.get_arg()
core.status.z = not val
core.status.v = bool(val >> 6 & 0x01)
core.status.s = bool(val >> 7 & 0x01)
def description(self):
return "AND accumulator with {0} ({1}), set zerobit, overflow, sign".format(self.get_arg(), self.addressmode_info())
class BMI(Instruction, Branch):
"""Branch on Result Minus"""
instruction_name = "BMI"
def __call__(self):
if self.core.s:
self.core.pc = self.get_arg()
else:
self.core.pc += self.num_bytes
def description(self):
return "Branch to {0} ({1}) on Result Minux (resultminus = {2})".format(self.get_arg(), self.addressmode_info(), self.core.status.s)
class BNE(Instruction, Branch):
"""Branch on Result not Zero"""
instruction_name = "BNE"
def __call__(self):
if not self.core.z:
self.core.pc = self.get_arg()
else:
self.core.pc += self.num_bytes
def description(self):
return "Branch to {0} ({1}) on Result Not Zero (resultzero = {2})".format(self.get_arg(), self.addressmode_info(), self.core.status.z)
class BPL(Instruction, Branch):
"""Branch on Result Plus"""
instruction_name = "BPL"
def __call__(self):
if not self.core.s:
self.core.pc = self.get_arg()
else:
self.core.pc += self.num_bytes
def description(self):
return "Branch to {0} ({1}) on Result Positive (resultsign = {2})".format(self.get_arg(), self.addressmode_info(), self.core.status.s)
class BRK(Instruction):
"""Force Break"""
instruction_name = "BRK"
def __call__(self):
self.core.stack.push(core)
self.core.stack.push(core)
self.core.status.i = True
def description(self):
return "Force Break"
class BVC(Instruction, Branch):
"""Branch on Overflow Clear"""
def __call__(self):
if not self.status.v:
self.core.pc = self.get_arg()
else:
self.core.pc += self.num_bytes
class BVS(Instruction, Branch):
"""Branch on Overflow Set"""
def __call__(self):
if self.status.v:
self.core.pc = self.get_arg()
else:
self.core.cp += self.num_bytes
class CLC(Instruction):
"""Clear Carry Flag"""
def __call__(self):
self.core.status.c = False
class CLD(Instruction):
"""Clear Decimal Mode"""
def __call__(self):
self.core.status.d = False
class CLI(Instruction):
"""Clear interrupt Disable Bit"""
def __call__(self):
self.core.status.i = False
class CLV(Instruction):
"""Clear Overflow Flag"""
def __call__(self):
self.core.status.v = False
class CMP(Instruction):
"""Compare Memory and Accumulator"""
def __call__(self):
val = self.core.sub(self.core.acc, self.get_arg(), False)
self.core.update_zero_neg(val)
class CPX(Instruction):
"""Compare Memory and Index X"""
def __call__(self):
val = self.core.sub(self.core.x, self.get_arg(), False)
self.core.update_zero_neg(val)
class CPY(Instruction):
"""Compare Memory and Index Y"""
def __call__(self):
val = self.core.sub(self.core.y, self.get_arg(), False)
self.core.update_zero_neg(val)
class DEC(Instruction):
"""Decrement Memory by One"""
def __call__(self):
val = self.get_arg() - 1
self.core.set_zero_neg(val)
self.set_arg(core, val)
class DEX(Instruction):
"""Decrement Index X by One"""
def __call__(self):
core.x -= 1
class DEY(Instruction):
"""Decrement Index Y by One"""
def __call__(self):
core.y -= 1
class EOR(Instruction):
"""Exclusive-Or Memory with Accumulator"""
def __call__(self):
core.acc = self.get_arg() ^ core.acc
class INC(Instruction):
"""Increment Memory by One"""
def __call__(self):
val = self.get_arg() + 1
self.core.set_zero_neg(val)
self.set_arg(self.core, val)
class INX(Instruction):
"""Increment Index X by One"""
def __call__(self):
self.core.x += 1
class INY(Instruction):
"""Increment Index Y by One"""
def __call__(self):
self.core.y += 1
# TODO: not sure on what this is supposed to do
class JMP(Instruction, Branch):
"""Jump to New Location"""
def __call__(self):
self.core.pc = self.get_arg()
class JSR(Instruction, Branch):
"""JSR Jump to new location saving return address"""
def __call__(self):
sef.core.stack.push(self.core + 2)
self.core.pc = self.get_arg()
class LDA(Instruction):
"""LDA Load accumulator with memory"""
def __call__(self):
self.core.acc = self.get_arg()
class LDX(Instruction):
"""LDX Load index X with memory"""
def __call__(self):
self.core.x = self.get_arg()
class LDY(Instruction):
"""LDY Load index Y with memory"""
def __call__(self):
self.core.y = self.get_arg()
class LSR(Instruction):
"""LSR Shift right one bit (memory or accumulator)"""
def __call__(self):
val = self.get_arg()
self.core.status.c = val & 0x01
val >>= 1
self.set_arg(val)
self.core.update_zero_neg(val)
class NOP(Instruction):
"""NOP No operation"""
def __call__(self):
pass
class ORA(Instruction):
"""ORA "OR" memory with accumulator"""
def __call__(self):
core.acc = core.acc | self.get_arg()
class PHA(Instruction):
"""PHA Push accumulator on stack"""
def __call__(self):
self.core.stack.push(self.core.acc)
class PHP(Instruction):
"""PHP Push processor status on stack"""
def __call__(self):
self.core.stack.push(self.core.status)
class PLA(Instruction):
"""PLA Pull accumulator from stack"""
def __call__(self):
# we don't want to affect the status registers
self.core._acc = self.core.stack.pop()
class PLP(Instruction):
"""PLP Pull processor status from stack"""
def __call__(self):
self.core.status = self.core.stack.pop()
class ROL(Instruction):
"""ROL Rotate one bit left (memory)"""
def __call__(self):
val = self.get_arg()
c = val >> 7 & 0x01
val <<= 1
val |= int(self.core.status.c)
self.set_arg(self.core, val)
self.core.status.c = bool(c)
self.core.set_zero_neg(val)
class ROR(Instruction):
"""ROR Rotate one bit right (memory)"""
def __call__(self):
val = self.get_arg()
c = val & 0x01
val >>= 1
val |= (int(self.core.status.c) << 7)
self.set_arg(self.core, val)
self.core.status.c = bool(c)
self.core.set_zero_neg(val)
class RTI(Instruction, Branch):
"""RTI Return from interrupt"""
def __call__(self):
self.core.status = self.core.stack.pop()
self.core.pc = self.core.stack.pop()
class RTS(Instruction, Branch):
"""RTS Return from subroutine"""
def __call__(self):
self.core.pc = self.core.stack.pop() + 1
class SBC(Instruction):
"""SBC Subtract memory from accumulator with borrow"""
def __call__(self):
self.core.acc = self.core.sub(acc, self.get_arg())
class SEC(Instruction):
"""SEC Set carry flag"""
def __call__(self):
self.core.status.c = True
class SED(Instruction):
"""SED Set decimal mode"""
def __call__(self):
self.core.status.d = True
class SEI(Instruction):
"""SEI Set interrupt disable status"""
def __call__(self):
self.core.status.i = True
class STA(Instruction):
"""STA Store accumulator in memory"""
def __call__(self):
self.set_arg(self.core, self.core.acc)
class STX(Instruction):
"""STX Store index X in memory"""
def __call__(self):
self.set_arg(self.core, self.core.x)
class STY(Instruction):
"""STY Store index Y in memory"""
def __call__(self):
self.set_arg(self.core, self.core.y)
class TAX(Instruction):
"""TAX Transfer accumulator to index X"""
def __call__(self):
self.core.x = self.core.acc
class TAY(Instruction):
"""TAY Transfer accumulator to index Y"""
def __call__(self):
self.core.y = self.core.acc
class TSX(Instruction):
"""TSX Transfer stack pointer to index X"""
def __call__(self):
self.core.x = self.core.stack.sp
class TXA(Instruction):
"""TXA Transfer index X to accumulator"""
def __call__(self):
self.core.acc = self.core.x
class TXS(Instruction):
"""TXS Transfer index X to stack pointer"""
def __call__(self):
self.core.stack.sp = self.core.x
class TYA(Instruction):
"""TYA Transfer index Y to accumulator"""
def __call__(self):
self.core.acc = self.core.y
|
|
import itertools
import matplotlib.pyplot as plt
import numpy as np
from scipy.io import wavfile
from scipy.signal import detrend, lfilter, bilinear, spectrogram, filtfilt, resample, fftconvolve
import acoustics
from acoustics.standards.iso_tr_25417_2007 import REFERENCE_PRESSURE
from acoustics.standards.iec_61672_1_2013 import WEIGHTING_SYSTEMS
from acoustics.standards.iec_61672_1_2013 import (NOMINAL_OCTAVE_CENTER_FREQUENCIES,
NOMINAL_THIRD_OCTAVE_CENTER_FREQUENCIES)
class Signal(np.ndarray):
"""A signal consisting of samples (array) and a sample frequency (float).
"""
def __new__(cls, data, fs):
obj = np.asarray(data).view(cls)
obj.fs = fs
return obj
def __array_prepare__(self, array, context=None):
try:
a = context[1][0]
b = context[1][1]
except IndexError:
return array
if hasattr(a, 'fs') and hasattr(b, 'fs'):
if a.fs == b.fs:
return array
else:
raise ValueError("Sample frequencies do not match.")
else:
return array
def __array_wrap__(self, out_arr, context=None):
return np.ndarray.__array_wrap__(self, out_arr, context)
def __array_finalize__(self, obj):
# see InfoArray.__array_finalize__ for comments
if obj is None:
return
self.fs = getattr(obj, 'fs', None)
def __reduce__(self):
# Get the parent's __reduce__ tuple
pickled_state = super(Signal, self).__reduce__()
# Create our own tuple to pass to __setstate__
new_state = pickled_state[2] + (self.fs, )
# Return a tuple that replaces the parent's __setstate__ tuple with our own
return (pickled_state[0], pickled_state[1], new_state)
def __setstate__(self, state):
self.fs = state[-1] # Set the info attribute
# Call the parent's __setstate__ with the other tuple elements.
super(Signal, self).__setstate__(state[0:-1])
def __repr__(self):
return "Signal({})".format(str(self))
def _construct(self, x):
"""Construct signal like x."""
return Signal(x, self.fs)
@property
def samples(self):
"""Amount of samples in signal."""
return self.shape[-1]
@property
def channels(self):
"""Amount of channels.
"""
if self.ndim > 1:
return self.shape[-2]
else:
return 1
@property
def duration(self):
"""Duration of signal in seconds.
"""
return float(self.samples / self.fs)
@property
def values(self):
"""Return the values of this signal as an instance of :class:`np.ndarray`."""
return np.array(self)
def calibrate_to(self, decibel, inplace=False):
"""Calibrate signal to value `decibel`.
:param decibel: Value to calibrate to.
:param inplace: Whether to perform inplace or not.
:returns: Calibrated signal.
:rtype: :class:`Signal`
Values of `decibel` are broadcasted. To set a value per channel, use `decibel[...,None]`.
"""
decibel = decibel * np.ones(self.shape)
gain = decibel - self.leq()[..., None]
return self.gain(gain, inplace=inplace)
def calibrate_with(self, other, decibel, inplace=False):
"""Calibrate signal with other signal.
:param other: Other signal/array.
:param decibel: Signal level of `other`.
:param inplace: Whether to perform inplace or not.
:returns: calibrated signal.
:rtype: :class:`Signal`
"""
if not isinstance(other, Signal):
other = Signal(other, self.fs)
gain = decibel - other.leq()
return self.gain(gain, inplace=inplace)
def decimate(self, factor, zero_phase=False, ftype='iir', order=None):
"""Decimate signal by integer `factor`. Before downsampling a low-pass filter is applied.
:param factor: Downsampling factor.
:param zero_phase: Prevent phase shift by filtering with ``filtfilt`` instead of ``lfilter``.
:param ftype: Filter type.
:param order: Filter order.
:returns: Decimated signal.
:rtype: :class:`Signal`
.. seealso:: :func:`scipy.signal.decimate`
.. seealso:: :meth:`resample`
"""
return Signal(
acoustics.signal.decimate(x=self, q=factor, n=order, ftype=ftype, zero_phase=zero_phase), self.fs / factor)
def resample(self, nsamples, times=None, axis=-1, window=None):
"""Resample signal.
:param samples: New amount of samples.
:param times: Times corresponding to samples.
:param axis: Axis.
:param window: Window.
.. seealso:: :func:`scipy.signal.resample`
.. seealso:: :meth:`decimate`
You might want to low-pass filter this signal before resampling.
"""
return Signal(resample(self, nsamples, times, axis, window), nsamples / self.samples * self.fs)
def upsample(self, factor, axis=-1):
"""Upsample signal with integer factor.
:param factor: Upsample factor.
:param axis: Axis.
.. seealso:: :meth:`resample`
"""
return self.resample(int(self.samples * factor), axis=axis)
def gain(self, decibel, inplace=False):
"""Apply gain of `decibel` decibels.
:param decibel: Decibels
:param inplace: In place
:returns: Amplified signal.
:rtype: :class:`Signal`
"""
factor = 10.0**(decibel / 20.0)
if inplace:
self *= factor
return self
else:
return self * factor
def pick(self, start=0.0, stop=None):
"""Get signal from start time to stop time.
:param start: Start time.
:type start: float
:param stop: End time.
:type stop: float
:returns: Selected part of the signal.
:rtype: :class:`Signal`
"""
if start is not None:
start = int(np.floor(start * self.fs))
if stop is not None:
stop = int(np.floor(stop * self.fs))
return self[..., start:stop]
def times(self):
"""Time vector.
:returns: A vector with a timestamp for each sample.
:rtype: :class:`np.ndarray`
"""
return np.arange(0, self.samples) / self.fs
def energy(self):
"""Signal energy.
:returns: Total energy per channel.
:rtype: :class:`np.ndarray`
.. math:: E = \\sum_{n=0}^{N-1} |x_n|^2
"""
return float((self * self).sum())
def power(self):
"""Signal power.
.. math:: P = \\frac{1}{N} \\sum_{n=0}^{N-1} |x_n|^2
"""
return self.energy() / len(self)
def ms(self):
"""Mean value squared of signal.
.. seealso:: :func:`acoustics.signal.ms`
"""
return acoustics.signal.ms(self)
def rms(self):
"""Root mean squared of signal.
.. seealso:: :func:`acoustics.signal.rms`
"""
return acoustics.signal.rms(self)
#return np.sqrt(self.power())
def weigh(self, weighting='A', zero_phase=False):
"""Apply frequency-weighting. By default 'A'-weighting is applied.
:param weighting: Frequency-weighting filter to apply.
Valid options are 'A', 'C' and 'Z'. Default weighting is 'A'.
:returns: Weighted signal.
:rtype: :class:`Signal`.
By default the weighting filter is applied using
:func:`scipy.signal.lfilter` causing a frequency-dependent delay. In case a
delay is undesired, the filter can be applied using :func:`scipy.signal.filtfilt`
by setting `zero_phase=True`.
"""
num, den = WEIGHTING_SYSTEMS[weighting]()
b, a = bilinear(num, den, self.fs)
func = filtfilt if zero_phase else lfilter
return self._construct(func(b, a, self))
def correlate(self, other=None, mode='full'):
"""Correlate signal with `other` signal. In case `other==None` this
method returns the autocorrelation.
:param other: Other signal.
:param mode: Mode.
.. seealso:: :func:`np.correlate`, :func:`scipy.signal.fftconvolve`
"""
if other is None:
other = self
if self.fs != other.fs:
raise ValueError("Cannot correlate. Sample frequencies are not the same.")
if self.channels > 1 or other.channels > 1:
raise ValueError("Cannot correlate. Not supported for multichannel signals.")
return self._construct(fftconvolve(self, other[::-1], mode=mode))
def amplitude_envelope(self):
"""Amplitude envelope.
:returns: Amplitude envelope of signal.
:rtype: :class:`Signal`
.. seealso:: :func:`acoustics.signal.amplitude_envelope`
"""
return self._construct(acoustics.signal.amplitude_envelope(self, self.fs))
def instantaneous_frequency(self):
"""Instantaneous frequency.
:returns: Instantaneous frequency of signal.
:rtype: :class:`Signal`
.. seealso:: :func:`acoustics.signal.instantaneous_frequency`
"""
return self._construct(acoustics.signal.instantaneous_frequency(self, self.fs))
def instantaneous_phase(self):
"""Instantaneous phase.
:returns: Instantaneous phase of signal.
:rtype: :class:`Signal`
.. seealso:: :func:`acoustics.signal.instantaneous_phase`
"""
return self._construct(acoustics.signal.instantaneous_phase(self, self.fs))
def detrend(self, **kwargs):
"""Detrend signal.
:returns: Detrended version of signal.
:rtype: :class:`Signal`
.. seealso:: :func:`scipy.signal.detrend`
"""
return self._construct(detrend(self, **kwargs))
def unwrap(self):
"""Unwrap signal in case the signal represents wrapped phase.
:returns: Unwrapped signal.
:rtype: :class:`Signal`
.. seealso:: :func:`np.unwrap`
"""
return self._construct(np.unwrap(self))
def complex_cepstrum(self, N=None):
"""Complex cepstrum.
:param N: Amount of bins.
:returns: Quefrency, complex cepstrum and delay in amount of samples.
.. seealso:: :func:`acoustics.cepstrum.complex_cepstrum`
"""
if N is not None:
times = np.linspace(0.0, self.duration, N, endpoint=False)
else:
times = self.times()
cepstrum, ndelay = acoustics.cepstrum.complex_cepstrum(self, n=N)
return times, cepstrum, ndelay
def real_cepstrum(self, N=None):
"""Real cepstrum.
:param N: Amount of bins.
:returns: Quefrency and real cepstrum.
.. seealso:: :func:`acoustics.cepstrum.real_cepstrum`
"""
if N is not None:
times = np.linspace(0.0, self.duration, N, endpoint=False)
else:
times = self.times()
return times, acoustics.cepstrum.real_cepstrum(self, n=N)
def power_spectrum(self, N=None):
"""Power spectrum.
:param N: Amount of bins.
.. seealso:: :func:`acoustics.signal.power_spectrum`
"""
return acoustics.signal.power_spectrum(self, self.fs, N=N)
def angle_spectrum(self, N=None):
"""Phase angle spectrum. Wrapped.
:param N: amount of bins.
.. seealso::
:func:`acoustics.signal.angle_spectrum`, :func:`acoustics.signal.phase_spectrum`
and :meth:`phase_spectrum`.
"""
return acoustics.signal.angle_spectrum(self, self.fs, N=N)
def phase_spectrum(self, N=None):
"""Phase spectrum. Unwrapped.
:param N: Amount of bins.
.. seealso::
:func:`acoustics.signal.phase_spectrum`, :func:`acoustics.signal.angle_spectrum`
and :meth:`angle_spectrum`.
"""
return acoustics.signal.phase_spectrum(self, self.fs, N=N)
def peak(self, axis=-1):
"""Peak sound pressure.
:param axis: Axis.
.. seealso::
:func:`acoustic.standards.iso_tr_25417_2007.peak_sound_pressure`
"""
return acoustics.standards.iso_tr_25417_2007.peak_sound_pressure(self, axis=axis)
def peak_level(self, axis=-1):
"""Peak sound pressure level.
:param axis: Axis.
.. seealso::
:func:`acoustics.standards.iso_tr_25417_2007.peak_sound_pressure_level`
"""
return acoustics.standards.iso_tr_25417_2007.peak_sound_pressure_level(self, axis=axis)
def min(self, axis=-1):
"""Return the minimum along a given axis.
Refer to `np.amin` for full documentation.
"""
return np.ndarray.min(self, axis=axis)
def max(self, axis=-1):
"""Return the minimum along a given axis.
Refer to `np.amax` for full documentation.
"""
return np.ndarray.max(self, axis=axis)
def max_level(self, axis=-1):
"""Maximum sound pressure level.
:param axis: Axis.
.. seealso:: :func:`acoustics.standards.iso_tr_25417_2007.max_sound_pressure_level`
"""
return acoustics.standards.iso_tr_25417_2007.max_sound_pressure_level(self, axis=axis)
def sound_exposure(self, axis=-1):
"""Sound exposure.
:param axis: Axis.
.. seealso:: :func:`acoustics.standards.iso_tr_25417_2007.sound_exposure`
"""
return acoustics.standards.iso_tr_25417_2007.sound_exposure(self, self.fs, axis=axis)
def sound_exposure_level(self, axis=-1):
"""Sound exposure level.
:param axis: Axis.
.. seealso:: :func:`acoustics.standards.iso_tr_25417_2007.sound_exposure_level`
"""
return acoustics.standards.iso_tr_25417_2007.sound_exposure_level(self, self.fs, axis=axis)
def plot_complex_cepstrum(self, N=None, **kwargs):
"""Plot complex cepstrum of signal.
Valid kwargs:
* xscale
* yscale
* xlim
* ylim
* frequency: Boolean indicating whether the x-axis should show time in seconds or quefrency
* xlabel_frequency: Label in case frequency is shown.
"""
params = {
'xscale': 'linear',
'yscale': 'linear',
'xlabel': "$t$ in s",
'ylabel': "$C$",
'title': 'Complex cepstrum',
'frequency': False,
'xlabel_frequency': "$f$ in Hz",
}
params.update(kwargs)
t, ceps, _ = self.complex_cepstrum(N=N)
if params['frequency']:
t = 1. / t
params['xlabel'] = params['xlabel_frequency']
t = t[::-1]
ceps = ceps[::-1]
return _base_plot(t, ceps, params)
def plot_real_cepstrum(self, N=None, **kwargs):
"""Plot real cepstrum of signal.
Valid kwargs:
* xscale
* yscale
* xlim
* ylim
* frequency: Boolean indicating whether the x-axis should show time in seconds or quefrency
* xlabel_frequency: Label in case frequency is shown.
"""
params = {
'xscale': 'linear',
'yscale': 'linear',
'xlabel': "$t$ in s",
'ylabel': "$C$",
'title': 'Real cepstrum',
'frequency': False,
'xlabel_frequency': "$f$ in Hz",
}
params.update(kwargs)
t, ceps = self.real_cepstrum(N=N)
if params['frequency']:
t = 1. / t
params['xlabel'] = params['xlabel_frequency']
t = t[::-1]
ceps = ceps[::-1]
return _base_plot(t, ceps, params)
def plot_power_spectrum(self, N=None, **kwargs): #filename=None, scale='log'):
"""Plot spectrum of signal.
Valid kwargs:
* xscale
* yscale
* xlim
* ylim
* reference: Reference power
.. seealso:: :meth:`power_spectrum`
"""
params = {
'xscale': 'log',
'yscale': 'linear',
'xlabel': "$f$ in Hz",
'ylabel': "$L_{p}$ in dB",
'title': 'SPL',
'reference': REFERENCE_PRESSURE**2.0,
}
params.update(kwargs)
f, o = self.power_spectrum(N=N)
return _base_plot(f, 10.0 * np.log10(o / params['reference']), params)
def plot_angle_spectrum(self, N=None, **kwargs):
"""Plot phase angle spectrum of signal. Wrapped.
Valid kwargs:
* xscale
* yscale
* xlim
* ylim
* reference: Reference power
"""
params = {
'xscale': 'linear',
'yscale': 'linear',
'xlabel': "$f$ in Hz",
'ylabel': r"$\angle \phi$",
'title': 'Phase response (wrapped)',
}
params.update(kwargs)
f, o = self.angle_spectrum(N=N)
return _base_plot(f, o, params)
def plot_phase_spectrum(self, N=None, **kwargs):
"""Plot phase spectrum of signal. Unwrapped.
Valid kwargs:
* xscale
* yscale
* xlim
* ylim
* reference: Reference power
"""
params = {
'xscale': 'linear',
'yscale': 'linear',
'xlabel': "$f$ in Hz",
'ylabel': r"$\angle \phi$",
'title': 'Phase response (unwrapped)',
}
params.update(kwargs)
f, o = self.phase_spectrum(N=N)
return _base_plot(f, o, params)
def spectrogram(self, **kwargs):
"""Spectrogram of signal.
:returns: Spectrogram.
See :func:`scipy.signal.spectrogram`. Some of the default values have been changed.
The generated spectrogram consists by default of complex values.
"""
params = {
'nfft': 4096,
'noverlap': 128,
'mode': 'complex',
}
params.update(kwargs)
t, s, P = spectrogram(self, fs=self.fs, **params)
return t, s, P
def plot_spectrogram(self, **kwargs):
"""
Plot spectrogram of the signal.
Valid kwargs:
* xlim
* ylim
* clim
.. note:: This method only works for a single channel.
"""
# To do, use :meth:`spectrogram`.
params = {
'xlim': None,
'ylim': None,
'clim': None,
'NFFT': 4096,
'noverlap': 128,
'title': 'Spectrogram',
'xlabel': '$t$ in s',
'ylabel': '$f$ in Hz',
'clabel': 'SPL in dB',
'colorbar': True,
}
params.update(kwargs)
if self.channels > 1:
raise ValueError("Cannot plot spectrogram of multichannel signal. Please select a single channel.")
# Check if an axes object is passed in. Otherwise, create one.
ax0 = params.get('ax', plt.figure().add_subplot(111))
ax0.set_title(params['title'])
data = np.squeeze(self)
try:
_, _, _, im = ax0.specgram(data, Fs=self.fs, noverlap=params['noverlap'], NFFT=params['NFFT'],
mode='magnitude', scale_by_freq=False)
except AttributeError:
raise NotImplementedError(
"Your version of matplotlib is incompatible due to lack of support of the mode keyword argument to matplotlib.mlab.specgram."
)
if params['colorbar']:
cb = ax0.get_figure().colorbar(mappable=im)
cb.set_label(params['clabel'])
ax0.set_xlim(params['xlim'])
ax0.set_ylim(params['ylim'])
im.set_clim(params['clim'])
ax0.set_xlabel(params['xlabel'])
ax0.set_ylabel(params['ylabel'])
return ax0
def levels(self, time=0.125, method='average'):
"""Calculate sound pressure level as function of time.
:param time: Averaging time or integration time constant. Default value is 0.125 corresponding to FAST.
:param method: Use time `average` or time `weighting`. Default option is `average`.
:returns: sound pressure level as function of time.
.. seealso:: :func:`acoustics.standards.iec_61672_1_2013.time_averaged_sound_level`
.. seealso:: :func:`acoustics.standards.iec_61672_1_2013.time_weighted_sound_level`
"""
if method == 'average':
return acoustics.standards.iec_61672_1_2013.time_averaged_sound_level(self.values, self.fs, time)
elif method == 'weighting':
return acoustics.standards.iec_61672_1_2013.time_weighted_sound_level(self.values, self.fs, time)
else:
raise ValueError("Invalid method")
def leq(self):
"""Equivalent level. Single-value number.
.. seealso:: :func:`acoustics.standards.iso_tr_25417_2007.equivalent_sound_pressure_level`
"""
return acoustics.standards.iso_tr_25417_2007.equivalent_sound_pressure_level(self.values)
def plot_levels(self, **kwargs):
"""Plot sound pressure level as function of time.
.. seealso:: :meth:`levels`
"""
params = {
'xscale': 'linear',
'yscale': 'linear',
'xlabel': '$t$ in s',
'ylabel': '$L_{p,F}$ in dB',
'title': 'SPL',
'time': 0.125,
'method': 'average',
'labels': None,
}
params.update(kwargs)
t, L = self.levels(params['time'], params['method'])
L_masked = np.ma.masked_where(np.isinf(L), L)
return _base_plot(t, L_masked, params)
#def octave(self, frequency, fraction=1):
#"""Determine fractional-octave `fraction` at `frequency`.
#.. seealso:: :func:`acoustics.signal.fractional_octaves`
#"""
#return acoustics.signal.fractional_octaves(self, self.fs, frequency,
#frequency, fraction, False)[1]
def bandpass(self, lowcut, highcut, order=8, zero_phase=False):
"""Filter signal with band-pass filter.
:param lowcut: Lower cornerfrequency.
:param highcut: Upper cornerfrequency.
:param order: Filter order.
:param zero_phase: Prevent phase error by filtering in both directions (filtfilt).
:returns: Band-pass filtered signal.
:rtype: :class:`Signal`.
.. seealso:: :func:`acoustics.signal.bandpass`
"""
return type(self)(acoustics.signal.bandpass(self, lowcut, highcut, self.fs, order=order, zero_phase=zero_phase),
self.fs)
def bandstop(self, lowcut, highcut, order=8, zero_phase=False):
"""Filter signal with band-stop filter.
:param lowcut: Lower cornerfrequency.
:param highcut: Upper cornerfrequency.
:param order: Filter order.
:param zero_phase: Prevent phase error by filtering in both directions (filtfilt).
:returns: Band-pass filtered signal.
:rtype: :class:`Signal`.
.. seealso:: :func:`acoustics.signal.bandstop`
"""
return type(self)(acoustics.signal.bandstop(self, lowcut, highcut, self.fs, order=order, zero_phase=zero_phase),
self.fs)
def highpass(self, cutoff, order=4, zero_phase=False):
"""Filter signal with high-pass filter.
:param cutoff: Cornerfrequency.
:param order: Filter order.
:param zero_phase: Prevent phase error by filtering in both directions (filtfilt).
:returns: High-pass filtered signal.
:rtype: :class:`Signal`.
.. seealso:: :func:`acoustics.signal.highpass`
"""
return type(self)(acoustics.signal.highpass(self, cutoff, self.fs, order=order, zero_phase=zero_phase), self.fs)
def lowpass(self, cutoff, order=4, zero_phase=False):
"""Filter signal with low-pass filter.
:param cutoff: Cornerfrequency.
:param order: Filter order.
:param zero_phase: Prevent phase error by filtering in both directions (filtfilt).
:returns: Low-pass filtered signal.
:rtype: :class:`Signal`.
.. seealso:: :func:`acoustics.signal.lowpass`
"""
return type(self)(acoustics.signal.lowpass(self, cutoff, self.fs, order=order, zero_phase=zero_phase), self.fs)
def octavepass(self, center, fraction, order=8, zero_phase=False):
"""Filter signal with fractional-octave band-pass filter.
:param center: Center frequency. Any value in the band will suffice.
:param fraction: Band designator.
:param order: Filter order.
:param zero_phase: Prevent phase error by filtering in both directions (filtfilt).
:returns: Band-pass filtered signal.
:rtype: :class:`Signal`.
.. seealso:: :func:`acoustics.signal.octavepass`
"""
return type(self)(acoustics.signal.octavepass(self, center, self.fs, fraction=fraction, order=order,
zero_phase=zero_phase), self.fs)
def bandpass_frequencies(self, frequencies, order=8, purge=True, zero_phase=False):
"""Apply bandpass filters for frequencies.
:param frequencies: Band-pass filter frequencies.
:type frequencies: Instance of :class:`acoustics.signal.Frequencies`
:param order: Filter order.
:param purge: Discard bands of which the upper corner frequency is above the Nyquist frequency.
:param zero_phase: Prevent phase error by filtering in both directions (filtfilt).
:returns: Frequencies and band-pass filtered signal.
.. seealso:: :func:`acoustics.signal.bandpass_frequencies`
"""
frequencies, filtered = acoustics.signal.bandpass_frequencies(self, self.fs, frequencies, order, purge,
zero_phase=zero_phase)
return frequencies, type(self)(filtered, self.fs)
def octaves(self, frequencies=NOMINAL_OCTAVE_CENTER_FREQUENCIES, order=8, purge=True, zero_phase=False):
"""Apply 1/1-octaves bandpass filters.
:param frequencies: Band-pass filter frequencies.
:type frequencies: :class:`np.ndarray` with (approximate) center-frequencies or an instance of :class:`acoustics.signal.Frequencies`
:param order: Filter order.
:param purge: Discard bands of which the upper corner frequency is above the Nyquist frequency.
:param zero_phase: Prevent phase error by filtering in both directions (filtfilt).
:returns: Frequencies and band-pass filtered signal.
.. seealso:: :func:`acoustics.signal.bandpass_octaves`
"""
frequencies, octaves = acoustics.signal.bandpass_octaves(self, self.fs, frequencies, order, purge,
zero_phase=zero_phase)
return frequencies, type(self)(octaves, self.fs)
def third_octaves(self, frequencies=NOMINAL_THIRD_OCTAVE_CENTER_FREQUENCIES, order=8, purge=True, zero_phase=False):
"""Apply 1/3-octaves bandpass filters.
:param frequencies: Band-pass filter frequencies.
:type frequencies: :class:`np.ndarray` with (approximate) center-frequencies or an instance of :class:`acoustics.signal.Frequencies`
:param order: Filter order.
:param purge: Discard bands of which the upper corner frequency is above the Nyquist frequency.
:param zero_phase: Prevent phase error by filtering in both directions (filtfilt).
:returns: Frequencies and band-pass filtered signal.
.. seealso:: :func:`acoustics.signal.bandpass_third_octaves`
"""
frequencies, octaves = acoustics.signal.bandpass_third_octaves(self, self.fs, frequencies, order, purge,
zero_phase=zero_phase)
return frequencies, type(self)(octaves, self.fs)
def fractional_octaves(self, frequencies=None, fraction=1, order=8, purge=True, zero_phase=False):
"""Apply 1/N-octaves bandpass filters.
:param frequencies: Band-pass filter frequencies.
:type frequencies: Instance of :class:`acoustics.signal.Frequencies`
:param fraction: Default band-designator of fractional-octaves.
:param order: Filter order.
:param purge: Discard bands of which the upper corner frequency is above the Nyquist frequency.
:param zero_phase: Prevent phase error by filtering in both directions (filtfilt).
:returns: Frequencies and band-pass filtered signal.
.. seealso:: :func:`acoustics.signal.bandpass_fractional_octaves`
"""
if frequencies is None:
frequencies = acoustics.signal.OctaveBand(fstart=NOMINAL_THIRD_OCTAVE_CENTER_FREQUENCIES[0],
fstop=self.fs / 2.0, fraction=fraction)
frequencies, octaves = acoustics.signal.bandpass_fractional_octaves(self, self.fs, frequencies, fraction, order,
purge, zero_phase=zero_phase)
return frequencies, type(self)(octaves, self.fs)
def plot_octaves(self, **kwargs):
"""Plot octaves.
.. seealso:: :meth:`octaves`
"""
params = {
'xscale': 'log',
'yscale': 'linear',
'xlabel': '$f$ in Hz',
'ylabel': '$L_{p}$ in dB',
'title': '1/1-Octaves SPL',
}
params.update(kwargs)
f, o = self.octaves()
print(len(f.center), len(o.leq()))
return _base_plot(f.center, o.leq().T, params)
def plot_third_octaves(self, **kwargs):
"""Plot 1/3-octaves.
.. seealso:: :meth:`third_octaves`
"""
params = {
'xscale': 'log',
'yscale': 'linear',
'xlabel': '$f$ in Hz',
'ylabel': '$L_{p}$ in dB',
'title': '1/3-Octaves SPL',
}
params.update(kwargs)
f, o = self.third_octaves()
return _base_plot(f.center, o.leq().T, params)
def plot_fractional_octaves(self, frequencies=None, fraction=1, order=8, purge=True, zero_phase=False, **kwargs):
"""Plot fractional octaves.
"""
title = '1/{}-Octaves SPL'.format(fraction)
params = {
'xscale': 'log',
'yscale': 'linear',
'xlabel': '$f$ in Hz',
'ylabel': '$L_p$ in dB',
'title': title,
}
params.update(kwargs)
f, o = self.fractional_octaves(frequencies=frequencies, fraction=fraction, order=order, purge=purge,
zero_phase=zero_phase)
return _base_plot(f.center, o.leq().T, params)
def plot(self, **kwargs):
"""Plot signal as function of time. By default the entire signal is plotted.
:param filename: Name of file.
:param start: First sample index.
:type start: Start time in seconds from start of signal.
:param stop: Last sample index.
:type stop: Stop time in seconds. from stop of signal.
"""
params = {
'xscale': 'linear',
'yscale': 'linear',
'xlabel': '$t$ in s',
'ylabel': '$x$ in -',
'title': 'Signal',
}
params.update(kwargs)
return _base_plot(self.times(), self, params)
#def plot_scalo(self, filename=None):
#"""
#Plot scalogram
#"""
#from scipy.signal import ricker, cwt
#wavelet = ricker
#widths = np.logspace(-1, 3.5, 10)
#x = cwt(self, wavelet, widths)
#interpolation = 'nearest'
#from matplotlib.ticker import LinearLocator, AutoLocator, MaxNLocator
#majorLocator = LinearLocator()
#majorLocator = MaxNLocator()
#fig = plt.figure()
#ax = fig.add_subplot(111)
#ax.set_title('Scaleogram')
##ax.set_xticks(np.arange(0, x.shape[1])*self.fs)
##ax.xaxis.set_major_locator(majorLocator)
##ax.imshow(10.0 * np.log10(x**2.0), interpolation=interpolation, aspect='auto', origin='lower')#, extent=[0, 1, 0, len(x)])
#ax.pcolormesh(np.arange(0.0, x.shape[1])/self.fs, widths, 10.0*np.log(x**2.0))
#if filename:
#fig.savefig(filename)
#else:
#return fig
#def plot_scaleogram(self, filename):
#"""
#Plot scaleogram
#"""
#import pywt
#wavelet = 'dmey'
#level = pywt.dwt_max_level(len(self), pywt.Wavelet(wavelet))
#print level
#level = 20
#order = 'freq'
#interpolation = 'nearest'
#wp = pywt.WaveletPacket(self, wavelet, 'sym', maxlevel=level)
#nodes = wp.get_level(level, order=order)
#labels = [n.path for n in nodes]
#values = np.abs(np.array([n.data for n in nodes], 'd'))
#fig = plt.figure()
#ax = fig.add_subplot(111)
#ax.set_title('Scaleogram')
#ax.imshow(values, interpolation=interpolation, aspect='auto', origin='lower', extent=[0, 1, 0, len(values)])
##ax.set_yticks(np.arange(0.5, len(labels) + 0.5))
##ax.set_yticklabels(labels)
#fig.savefig(filename)
def normalize(self, gap=6.0, inplace=False):
"""Normalize signal.
:param gap: Gap between maximum value and ceiling in decibel.
:param inplace: Normalize signal in place.
The parameter `gap` can be understood as using `gap` decibels fewer for the dynamic range.
By default a 6 decibel gap is used.
"""
factor = (np.abs(self).max() * 10.0**(gap/20.0))
if inplace:
self /= factor[..., None]
return self
else:
return self / factor[..., None]
def to_wav(self, filename, depth=16):
"""Save signal as WAV file.
:param filename: Name of file to save to.
:param depth: If given, convert to integer with specified depth. Else, try to store using the original data type.
By default, this function saves a normalized 16-bit version of the signal with at least 6 dB range till clipping occurs.
"""
data = self
dtype = data.dtype if not depth else 'int' + str(depth)
if depth:
data = (data * 2**(depth - 1) - 1).astype(dtype)
wavfile.write(filename, int(self.fs), data.T)
#wavfile.write(filename, int(self.fs), self._data/np.abs(self._data).max() * 0.5)
#wavfile.write(filename, int(self.fs), np.int16(self._data/(np.abs(self._data).max()) * 32767) )
@classmethod
def from_wav(cls, filename, normalize=True):
"""
Create an instance of `Signal` from a WAV file.
:param filename: Filename
:param normalize: Whether to normalize the signal.
"""
fs, data = wavfile.read(filename)
data = data.astype(np.float32, copy=False).T
if normalize:
data /= np.max(np.abs(data))
return cls(data, fs=fs)
_PLOTTING_PARAMS = {
'title': None,
'xlabel': None,
'ylabel': None,
'xscale': 'linear',
'yscale': 'linear',
'xlim': (None, None),
'ylim': (None, None),
'labels': None,
'linestyles': ['-', '-.', '--', ':'],
}
def _get_plotting_params():
d = dict()
d.update(_PLOTTING_PARAMS)
return d
def _base_plot(x, y, given_params):
"""Common function for creating plots.
:returns: Axes object.
:rtype: :class:`matplotlib.Axes`
"""
params = _get_plotting_params()
params.update(given_params)
linestyles = itertools.cycle(iter(params['linestyles']))
# Check if an axes object is passed in. Otherwise, create one.
ax0 = params.get('ax', plt.figure().add_subplot(111))
ax0.set_title(params['title'])
if y.ndim > 1:
for channel in y:
ax0.plot(x, channel, linestyle=next(linestyles))
else:
ax0.plot(x, y)
ax0.set_xlabel(params['xlabel'])
ax0.set_ylabel(params['ylabel'])
ax0.set_xscale(params['xscale'])
ax0.set_yscale(params['yscale'])
ax0.set_xlim(params['xlim'])
ax0.set_ylim(params['ylim'])
if params['labels'] is None and y.ndim > 1:
params['labels'] = np.arange(y.shape[-2]) + 1
if params['labels'] is not None:
ax0.legend(labels=params['labels'])
return ax0
__all__ = ["Signal"]
|
|
# Copyright 2020 Tensorforce Team. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from datetime import datetime
import importlib
import logging
import numpy as np
import tensorflow as tf
from tensorforce import TensorforceError
epsilon = 1e-6
log_levels = dict(
info=logging.INFO,
debug=logging.DEBUG,
critical=logging.CRITICAL,
warning=logging.WARNING,
fatal=logging.FATAL
)
class NullContext(object):
singleton = None
def __new__(cls):
if cls.singleton is None:
cls.singleton = super().__new__(cls)
return cls.singleton
def __enter__(self):
return self
def __exit__(self, etype, exception, traceback):
pass
def __getattr__(self, name):
raise AttributeError
def __setattr__(self, name, value):
raise NotImplementedError
def __delattr__(self, name):
raise NotImplementedError
def debug(message):
logging.warning('{}: {}'.format(datetime.now().strftime('%H:%M:%S-%f')[:-3], message))
def overwrite_staticmethod(obj, function):
qualname = getattr(obj, function).__qualname__
def overwritten(*args, **kwargs):
raise TensorforceError(message="Function {}() is a static method.".format(qualname))
setattr(obj, function, overwritten)
def try_import_module(*, module, parent_class=None):
try:
module = importlib.import_module(name=module)
assert parent_class is not None
classes = list()
for cls in dir(module):
cls = getattr(module, cls)
if isinstance(cls, type) and issubclass(cls, parent_class):
classes.append(cls)
if len(classes) > 1:
filter_classes = list()
for cls in classes:
if not all(issubclass(x, cls) for x in classes): # check whether not super-class
filter_classes.append(cls)
classes = filter_classes
if len(classes) == 0:
return None
elif len(classes) > 1:
raise TensorforceError(message="Ambiguous import modules: {}".format(
', '.join(str(cls) for cls in classes)
))
cls = classes[0]
if isinstance(parent_class, tuple):
assert all(cls != parent_cls for parent_cls in parent_class)
else:
assert cls != parent_class
return cls
except ImportError:
pass
if '.' not in module:
return None
module, class_name = module.rsplit('.', 1)
try:
module = importlib.import_module(name=module)
cls = getattr(module, class_name)
assert issubclass(cls, parent_class) and cls != parent_class
return cls
except ImportError:
return None
def is_iterable(x):
if isinstance(x, (str, dict, np.ndarray, tf.Tensor)):
return False
try:
iter(x)
return True
except TypeError:
return False
def is_equal(x, y):
if isinstance(x, tuple):
return isinstance(y, tuple) and all(is_equal(x=x, y=y) for x, y in zip(x, y))
elif isinstance(x, (list, tuple)):
return isinstance(y, list) and all(is_equal(x=x, y=y) for x, y in zip(x, y))
elif isinstance(x, dict):
return isinstance(y, dict) and len(x) == len(y) and \
all(k in y and is_equal(x=v, y=y[k]) for k, v in x.items())
elif isinstance(x, np.ndarray):
return isinstance(y, np.ndarray) and (x == y).all()
else:
return x == y
def unary_tuple(x, depth):
assert depth > 0
for _ in range(depth):
x = (x,)
return x
def product(xs, empty=1):
result = None
for x in xs:
if result is None:
result = x
else:
result *= x
if result is None:
result = empty
return result
def deep_disjoint_update(target, source): # , ignore=()
for key, value in source.items():
if key not in target:
target[key] = value
# elif key in ignore:
# continue
elif isinstance(target[key], dict):
if not isinstance(value, dict):
raise TensorforceError.mismatch(
name='spec', argument=key, value1=target[key], value2=value
)
deep_disjoint_update(target=target[key], source=value)
elif is_iterable(x=target[key]):
if not is_iterable(x=value) or len(target[key]) != len(value):
raise TensorforceError.mismatch(
name='spec', argument=key, value1=target[key], value2=value
)
for x, y in zip(target[key], value):
if x != y:
raise TensorforceError.mismatch(
name='spec', argument=key, value1=target[key], value2=value
)
elif target[key] != value:
raise TensorforceError.mismatch(
name='spec', argument=key, value1=target[key], value2=value
)
def py_dtype(dtype):
if dtype == 'float': # or dtype == float or dtype == np.float32 or dtype == tf.float32:
return float
elif dtype == 'int' or dtype == 'long':
# dtype == int or dtype == np.int32 or dtype == tf.int32 or
# or dtype == np.int64 or dtype == tf.int64
return int
elif dtype == 'bool': # or dtype == bool or dtype == np.bool8 or dtype == tf.bool:
return bool
else:
raise TensorforceError.value(name='util.py_dtype', argument='dtype', value=dtype)
np_dtype_mapping = dict(bool=np.bool8, int=np.int64, long=np.int64, float=np.float32)
def np_dtype(dtype):
"""Translates dtype specifications in configurations to numpy data types.
Args:
dtype: String describing a numerical type (e.g. 'float') or numerical type primitive.
Returns: Numpy data type
"""
if dtype in np_dtype_mapping:
return np_dtype_mapping[dtype]
else:
raise TensorforceError.value(name='util.np_dtype', argument='dtype', value=dtype)
|
|
'''
Kivy framework
==============
Kivy is an open source library for developing multi-touch applications. It is
cross-platform (Linux/OSX/Windows/Android/iOS) and released under
the terms of the `MIT License <https://en.wikipedia.org/wiki/MIT_License>`_.
It comes with native support for many multi-touch input devices, a growing
library of multi-touch aware widgets and hardware accelerated OpenGL drawing.
Kivy is designed to let you focus on building custom and highly interactive
applications as quickly and easily as possible.
With Kivy, you can take full advantage of the dynamic nature of Python. There
are thousands of high-quality, free libraries that can be integrated in your
application. At the same time, performance-critical parts are implemented
using `Cython <http://cython.org/>`_.
See http://kivy.org for more information.
'''
__all__ = (
'require',
'kivy_configure', 'kivy_register_post_configuration',
'kivy_options', 'kivy_base_dir',
'kivy_modules_dir', 'kivy_data_dir', 'kivy_shader_dir',
'kivy_icons_dir', 'kivy_home_dir',
'kivy_config_fn', 'kivy_usermodules_dir',
)
import sys
import shutil
from getopt import getopt, GetoptError
from os import environ, mkdir
from os.path import dirname, join, basename, exists, expanduser
import pkgutil
from kivy.compat import PY2
from kivy.logger import Logger, LOG_LEVELS
from kivy.utils import platform
MAJOR = 1
MINOR = 9
MICRO = 2
RELEASE = False
__version__ = '%d.%d.%d' % (MAJOR, MINOR, MICRO)
if not RELEASE and '.dev0' not in __version__:
__version__ += '.dev0'
try:
from kivy.version import __hash__, __date__
__hash__ = __hash__[:7]
except ImportError:
__hash__ = __date__ = ''
# internals for post-configuration
__kivy_post_configuration = []
if platform == 'macosx' and sys.maxsize < 9223372036854775807:
r = '''Unsupported Python version detected!:
Kivy requires a 64 bit version of Python to run on OS X. We strongly
advise you to use the version of Python that is provided by Apple
(don't use ports, fink or homebrew unless you know what you're
doing).
See http://kivy.org/docs/installation/installation-macosx.html for
details.
'''
Logger.critical(r)
def require(version):
'''Require can be used to check the minimum version required to run a Kivy
application. For example, you can start your application code like this::
import kivy
kivy.require('1.0.1')
If a user attempts to run your application with a version of Kivy that is
older than the specified version, an Exception is raised.
The Kivy version string is built like this::
X.Y.Z[-tag[-tagrevision]]
X is the major version
Y is the minor version
Z is the bugfixes revision
The tag is optional, but may be one of 'dev', 'alpha', or 'beta'.
The tagrevision is the revision of the tag.
.. warning::
You must not ask for a version with a tag, except -dev. Asking for a
'dev' version will just warn the user if the current Kivy
version is not a -dev, but it will never raise an exception.
You must not ask for a version with a tagrevision.
'''
def parse_version(version):
# check for tag
tag = None
tagrev = None
if '-' in version:
v = version.split('-')
if len(v) == 2:
version, tag = v
elif len(v) == 3:
version, tag, tagrev = v
else:
raise Exception('Revision format must be X.Y.Z[-tag]')
# check x y z
v = version.split('.')
if len(v) != 3:
if 'dev0' in v:
tag = v.pop()
else:
raise Exception('Revision format must be X.Y.Z[-tag]')
return [int(x) for x in v], tag, tagrev
# user version
revision, tag, tagrev = parse_version(version)
# current version
sysrevision, systag, systagrev = parse_version(__version__)
# ensure that the required version don't contain tag, except dev
if tag not in (None, 'dev'):
raise Exception('Revision format must not have any tag except "dev"')
if tag == 'dev' and systag != 'dev':
Logger.warning('Application requested a -dev version of Kivy. '
'(You have %s, but the application requires %s)' % (
__version__, version))
# not tag rev (-alpha-1, -beta-x) allowed.
if tagrev is not None:
raise Exception('Revision format must not contain any tagrevision')
# finally, checking revision
if sysrevision < revision:
raise Exception('The version of Kivy installed on this system '
'is too old. '
'(You have %s, but the application requires %s)' % (
__version__, version))
def kivy_configure():
'''Call post-configuration of Kivy.
This function must be called if you create the window yourself.
'''
for callback in __kivy_post_configuration:
callback()
def get_includes():
'''Retrieves the directories containing includes needed to build new Cython
modules with Kivy as a dependency. Currently returns the location of the
kivy.graphics module.
.. versionadded:: 1.9.1
'''
root_dir = dirname(__file__)
return [join(root_dir, 'graphics'), join(root_dir, 'tools', 'gles_compat'),
join(root_dir, 'include')]
def kivy_register_post_configuration(callback):
'''Register a function to be called when kivy_configure() is called.
.. warning::
Internal use only.
'''
__kivy_post_configuration.append(callback)
def kivy_usage():
'''Kivy Usage: %s [OPTION...]::
-h, --help
Prints this help message.
-d, --debug
Shows debug log.
-a, --auto-fullscreen
Force 'auto' fullscreen mode (no resolution change).
Uses your display's resolution. This is most likely what you want.
-c, --config section:key[:value]
Set a custom [section] key=value in the configuration object.
-f, --fullscreen
Force running in fullscreen mode.
-k, --fake-fullscreen
Force 'fake' fullscreen mode (no window border/decoration).
Uses the resolution specified by width and height in your config.
-w, --windowed
Force running in a window.
-p, --provider id:provider[,options]
Add an input provider (eg: ccvtable1:tuio,192.168.0.1:3333).
-m mod, --module=mod
Activate a module (use "list" to get a list of available modules).
-r, --rotation
Rotate the window's contents (0, 90, 180, 270).
-s, --save
Save current Kivy configuration.
--size=640x480
Size of window geometry.
--dpi=96
Manually overload the Window DPI (for testing only.)
'''
print(kivy_usage.__doc__ % (basename(sys.argv[0])))
#: Global settings options for kivy
kivy_options = {
'window': ('egl_rpi', 'sdl2', 'pygame', 'sdl', 'x11'),
'text': ('pil', 'sdl2', 'pygame', 'sdlttf'),
'video': (
'gstplayer', 'ffmpeg', 'ffpyplayer', 'null'),
'audio': (
'gstplayer', 'pygame', 'ffpyplayer', 'sdl2',
'avplayer'),
'image': ('tex', 'imageio', 'dds', 'sdl2', 'pygame', 'pil', 'ffpy', 'gif'),
'camera': ('opencv', 'gi', 'avfoundation',
'android'),
'spelling': ('enchant', 'osxappkit', ),
'clipboard': (
'android', 'winctypes', 'xsel', 'xclip', 'dbusklipper', 'nspaste',
'sdl2', 'pygame', 'dummy', 'gtk3', )}
# Read environment
for option in kivy_options:
key = 'KIVY_%s' % option.upper()
if key in environ:
try:
if type(kivy_options[option]) in (list, tuple):
kivy_options[option] = environ[key].split(',')
else:
kivy_options[option] = environ[key].lower() in \
('true', '1', 'yes')
except Exception:
Logger.warning('Core: Wrong value for %s environment key' % key)
Logger.exception('')
# Extract all needed path in kivy
#: Kivy directory
kivy_base_dir = dirname(sys.modules[__name__].__file__)
#: Kivy modules directory
kivy_modules_dir = environ.get('KIVY_MODULES_DIR',
join(kivy_base_dir, 'modules'))
#: Kivy data directory
kivy_data_dir = environ.get('KIVY_DATA_DIR',
join(kivy_base_dir, 'data'))
#: Kivy binary deps directory
kivy_binary_deps_dir = environ.get('KIVY_BINARY_DEPS',
join(kivy_base_dir, 'binary_deps'))
#: Kivy glsl shader directory
kivy_shader_dir = join(kivy_data_dir, 'glsl')
#: Kivy icons config path (don't remove the last '')
kivy_icons_dir = join(kivy_data_dir, 'icons', '')
#: Kivy user-home storage directory
kivy_home_dir = ''
#: Kivy configuration filename
kivy_config_fn = ''
#: Kivy user modules directory
kivy_usermodules_dir = ''
# if there are deps, import them so they can do their magic.
import kivy.deps
_packages = []
for importer, modname, ispkg in pkgutil.iter_modules(kivy.deps.__path__):
if not ispkg:
continue
if modname.startswith('gst'):
_packages.insert(0, (importer, modname))
else:
_packages.append((importer, modname))
for importer, modname in _packages:
try:
importer.find_module(modname).load_module(modname)
except ImportError as e:
Logger.warning("deps: Error importing dependency: {}".format(str(e)))
# Don't go further if we generate documentation
if any(name in sys.argv[0] for name in ('sphinx-build', 'autobuild.py')):
environ['KIVY_DOC'] = '1'
if 'sphinx-build' in sys.argv[0]:
environ['KIVY_DOC_INCLUDE'] = '1'
if any('nosetests' in arg for arg in sys.argv):
environ['KIVY_UNITTEST'] = '1'
if any('pyinstaller' in arg.lower() for arg in sys.argv):
environ['KIVY_PACKAGING'] = '1'
if not environ.get('KIVY_DOC_INCLUDE'):
# Configuration management
if 'KIVY_HOME' in environ:
kivy_home_dir = expanduser(environ['KIVY_HOME'])
else:
user_home_dir = expanduser('~')
if platform == 'android':
user_home_dir = environ['ANDROID_APP_PATH']
elif platform == 'ios':
user_home_dir = join(expanduser('~'), 'Documents')
kivy_home_dir = join(user_home_dir, '.kivy')
if PY2:
kivy_home_dir = kivy_home_dir.decode(sys.getfilesystemencoding())
kivy_config_fn = join(kivy_home_dir, 'config.ini')
kivy_usermodules_dir = join(kivy_home_dir, 'mods')
icon_dir = join(kivy_home_dir, 'icon')
if 'KIVY_NO_CONFIG' not in environ:
if not exists(kivy_home_dir):
mkdir(kivy_home_dir)
if not exists(kivy_usermodules_dir):
mkdir(kivy_usermodules_dir)
if not exists(icon_dir):
try:
shutil.copytree(join(kivy_data_dir, 'logo'), icon_dir)
except:
Logger.exception('Error when copying logo directory')
# configuration
from kivy.config import Config
# Set level of logger
level = LOG_LEVELS.get(Config.get('kivy', 'log_level'))
Logger.setLevel(level=level)
# Can be overrided in command line
if ('KIVY_UNITTEST' not in environ and
'KIVY_PACKAGING' not in environ and
'KIVY_NO_ARGS' not in environ):
# save sys argv, otherwise, gstreamer use it and display help..
sys_argv = sys.argv
sys.argv = sys.argv[:1]
try:
opts, args = getopt(sys_argv[1:], 'hp:fkawFem:sr:dc:', [
'help', 'fullscreen', 'windowed', 'fps', 'event',
'module=', 'save', 'fake-fullscreen', 'auto-fullscreen',
'multiprocessing-fork', 'display=', 'size=', 'rotate=',
'config=', 'debug', 'dpi='])
except GetoptError as err:
Logger.error('Core: %s' % str(err))
kivy_usage()
sys.exit(2)
mp_fork = None
try:
for opt, arg in opts:
if opt == '--multiprocessing-fork':
mp_fork = True
break
except:
pass
# set argv to the non-read args
sys.argv = sys_argv[0:1] + args
if mp_fork is not None:
# Needs to be first opt for support_freeze to work
sys.argv.insert(1, '--multiprocessing-fork')
else:
opts = []
args = []
need_save = False
for opt, arg in opts:
if opt in ('-h', '--help'):
kivy_usage()
sys.exit(0)
elif opt in ('-p', '--provider'):
try:
pid, args = arg.split(':', 1)
Config.set('input', pid, args)
except ValueError:
# when we are doing an executable on macosx with
# pyinstaller, they are passing information with -p. so
# it will conflict with our current -p option. since the
# format is not the same, just avoid it.
pass
elif opt in ('-a', '--auto-fullscreen'):
Config.set('graphics', 'fullscreen', 'auto')
elif opt in ('-c', '--config'):
ol = arg.split(':', 2)
if len(ol) == 2:
Config.set(ol[0], ol[1], '')
elif len(ol) == 3:
Config.set(ol[0], ol[1], ol[2])
else:
raise Exception('Invalid --config value')
if ol[0] == 'kivy' and ol[1] == 'log_level':
level = LOG_LEVELS.get(Config.get('kivy', 'log_level'))
Logger.setLevel(level=level)
elif opt in ('-k', '--fake-fullscreen'):
Config.set('graphics', 'fullscreen', 'fake')
elif opt in ('-f', '--fullscreen'):
Config.set('graphics', 'fullscreen', '1')
elif opt in ('-w', '--windowed'):
Config.set('graphics', 'fullscreen', '0')
elif opt in ('--size', ):
w, h = str(arg).split('x')
Config.set('graphics', 'width', w)
Config.set('graphics', 'height', h)
elif opt in ('--display', ):
Config.set('graphics', 'display', str(arg))
elif opt in ('-m', '--module'):
if str(arg) == 'list':
from kivy.modules import Modules
Modules.usage_list()
sys.exit(0)
args = arg.split(':', 1)
if len(args) == 1:
args += ['']
Config.set('modules', args[0], args[1])
elif opt in ('-s', '--save'):
need_save = True
elif opt in ('-r', '--rotation'):
Config.set('graphics', 'rotation', arg)
elif opt in ('-d', '--debug'):
level = LOG_LEVELS.get('debug')
Logger.setLevel(level=level)
elif opt == '--dpi':
environ['KIVY_DPI'] = arg
if need_save and 'KIVY_NO_CONFIG' not in environ:
try:
with open(kivy_config_fn, 'w') as fd:
Config.write(fd)
except Exception as e:
Logger.exception('Core: error while saving default'
'configuration file:', str(e))
Logger.info('Core: Kivy configuration saved.')
sys.exit(0)
# configure all activated modules
from kivy.modules import Modules
Modules.configure()
# android hooks: force fullscreen and add android touch input provider
if platform in ('android', 'ios'):
from kivy.config import Config
Config.set('graphics', 'fullscreen', 'auto')
Config.remove_section('input')
Config.add_section('input')
if platform == 'android':
Config.set('input', 'androidtouch', 'android')
if RELEASE:
Logger.info('Kivy: v%s' % (__version__))
elif not RELEASE and __hash__ and __date__:
Logger.info('Kivy: v%s, git-%s, %s' % (__version__, __hash__, __date__))
Logger.info('Python: v{}'.format(sys.version))
|
|
from __future__ import print_function, unicode_literals
import os
import random
import re
import select
import socket
import ssl
import string
import subprocess
import sys
import time
from itertools import count
from threading import Event, Lock, Thread
import paho.mqtt.client as mqtt
import ttfw_idf
DEFAULT_MSG_SIZE = 16
def _path(f):
return os.path.join(os.path.dirname(os.path.realpath(__file__)),f)
def set_server_cert_cn(ip):
arg_list = [
['openssl', 'req', '-out', _path('srv.csr'), '-key', _path('server.key'),'-subj', '/CN={}'.format(ip), '-new'],
['openssl', 'x509', '-req', '-in', _path('srv.csr'), '-CA', _path('ca.crt'),
'-CAkey', _path('ca.key'), '-CAcreateserial', '-out', _path('srv.crt'), '-days', '360']]
for args in arg_list:
if subprocess.check_call(args) != 0:
raise('openssl command {} failed'.format(args))
def get_my_ip():
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
# doesn't even have to be reachable
s.connect(('10.255.255.255', 1))
IP = s.getsockname()[0]
except Exception:
IP = '127.0.0.1'
finally:
s.close()
return IP
# Publisher class creating a python client to send/receive published data from esp-mqtt client
class MqttPublisher:
def __init__(self, dut, transport, qos, repeat, published, queue, publish_cfg, log_details=False):
# instance variables used as parameters of the publish test
self.event_stop_client = Event()
self.sample_string = ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(DEFAULT_MSG_SIZE))
self.client = None
self.dut = dut
self.log_details = log_details
self.repeat = repeat
self.publish_cfg = publish_cfg
self.publish_cfg['qos'] = qos
self.publish_cfg['queue'] = queue
self.publish_cfg['transport'] = transport
self.lock = Lock()
# static variables used to pass options to and from static callbacks of paho-mqtt client
MqttPublisher.event_client_connected = Event()
MqttPublisher.event_client_got_all = Event()
MqttPublisher.published = published
MqttPublisher.event_client_connected.clear()
MqttPublisher.event_client_got_all.clear()
MqttPublisher.expected_data = self.sample_string * self.repeat
def print_details(self, text):
if self.log_details:
print(text)
def mqtt_client_task(self, client, lock):
while not self.event_stop_client.is_set():
with lock:
client.loop()
time.sleep(0.001) # yield to other threads
# The callback for when the client receives a CONNACK response from the server (needs to be static)
@staticmethod
def on_connect(_client, _userdata, _flags, _rc):
MqttPublisher.event_client_connected.set()
# The callback for when a PUBLISH message is received from the server (needs to be static)
@staticmethod
def on_message(client, userdata, msg):
payload = msg.payload.decode()
if payload == MqttPublisher.expected_data:
userdata += 1
client.user_data_set(userdata)
if userdata == MqttPublisher.published:
MqttPublisher.event_client_got_all.set()
def __enter__(self):
qos = self.publish_cfg['qos']
queue = self.publish_cfg['queue']
transport = self.publish_cfg['transport']
broker_host = self.publish_cfg['broker_host_' + transport]
broker_port = self.publish_cfg['broker_port_' + transport]
# Start the test
self.print_details("PUBLISH TEST: transport:{}, qos:{}, sequence:{}, enqueue:{}, sample msg:'{}'"
.format(transport, qos, MqttPublisher.published, queue, MqttPublisher.expected_data))
try:
if transport in ['ws', 'wss']:
self.client = mqtt.Client(transport='websockets')
else:
self.client = mqtt.Client()
self.client.on_connect = MqttPublisher.on_connect
self.client.on_message = MqttPublisher.on_message
self.client.user_data_set(0)
if transport in ['ssl', 'wss']:
self.client.tls_set(None, None, None, cert_reqs=ssl.CERT_NONE, tls_version=ssl.PROTOCOL_TLSv1_2, ciphers=None)
self.client.tls_insecure_set(True)
self.print_details('Connecting...')
self.client.connect(broker_host, broker_port, 60)
except Exception:
self.print_details('ENV_TEST_FAILURE: Unexpected error while connecting to broker {}'.format(broker_host))
raise
# Starting a py-client in a separate thread
thread1 = Thread(target=self.mqtt_client_task, args=(self.client, self.lock))
thread1.start()
self.print_details('Connecting py-client to broker {}:{}...'.format(broker_host, broker_port))
if not MqttPublisher.event_client_connected.wait(timeout=30):
raise ValueError('ENV_TEST_FAILURE: Test script cannot connect to broker: {}'.format(broker_host))
with self.lock:
self.client.subscribe(self.publish_cfg['subscribe_topic'], qos)
self.dut.write(' '.join(str(x) for x in (transport, self.sample_string, self.repeat, MqttPublisher.published, qos, queue)), eol='\n')
try:
# waiting till subscribed to defined topic
self.dut.expect(re.compile(r'MQTT_EVENT_SUBSCRIBED'), timeout=30)
for _ in range(MqttPublisher.published):
with self.lock:
self.client.publish(self.publish_cfg['publish_topic'], self.sample_string * self.repeat, qos)
self.print_details('Publishing...')
self.print_details('Checking esp-client received msg published from py-client...')
self.dut.expect(re.compile(r'Correct pattern received exactly x times'), timeout=60)
if not MqttPublisher.event_client_got_all.wait(timeout=60):
raise ValueError('Not all data received from ESP32')
print(' - all data received from ESP32')
finally:
self.event_stop_client.set()
thread1.join()
def __exit__(self, exc_type, exc_value, traceback):
self.client.disconnect()
self.event_stop_client.clear()
# Simple server for mqtt over TLS connection
class TlsServer:
def __init__(self, port, client_cert=False, refuse_connection=False, use_alpn=False):
self.port = port
self.socket = socket.socket()
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.settimeout(10.0)
self.shutdown = Event()
self.client_cert = client_cert
self.refuse_connection = refuse_connection
self.ssl_error = None
self.use_alpn = use_alpn
self.negotiated_protocol = None
def __enter__(self):
try:
self.socket.bind(('', self.port))
except socket.error as e:
print('Bind failed:{}'.format(e))
raise
self.socket.listen(1)
self.server_thread = Thread(target=self.run_server)
self.server_thread.start()
return self
def __exit__(self, exc_type, exc_value, traceback):
self.shutdown.set()
self.server_thread.join()
self.socket.close()
if (self.conn is not None):
self.conn.close()
def get_last_ssl_error(self):
return self.ssl_error
def get_negotiated_protocol(self):
return self.negotiated_protocol
def run_server(self):
context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
if self.client_cert:
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(cafile=_path('ca.crt'))
context.load_cert_chain(certfile=_path('srv.crt'), keyfile=_path('server.key'))
if self.use_alpn:
context.set_alpn_protocols(['mymqtt', 'http/1.1'])
self.socket = context.wrap_socket(self.socket, server_side=True)
try:
self.conn, address = self.socket.accept() # accept new connection
self.socket.settimeout(10.0)
print(' - connection from: {}'.format(address))
if self.use_alpn:
self.negotiated_protocol = self.conn.selected_alpn_protocol()
print(' - negotiated_protocol: {}'.format(self.negotiated_protocol))
self.handle_conn()
except ssl.SSLError as e:
self.conn = None
self.ssl_error = str(e)
print(' - SSLError: {}'.format(str(e)))
def handle_conn(self):
while not self.shutdown.is_set():
r,w,e = select.select([self.conn], [], [], 1)
try:
if self.conn in r:
self.process_mqtt_connect()
except socket.error as err:
print(' - error: {}'.format(err))
raise
def process_mqtt_connect(self):
try:
data = bytearray(self.conn.recv(1024))
message = ''.join(format(x, '02x') for x in data)
if message[0:16] == '101800044d515454':
if self.refuse_connection is False:
print(' - received mqtt connect, sending ACK')
self.conn.send(bytearray.fromhex('20020000'))
else:
# injecting connection not authorized error
print(' - received mqtt connect, sending NAK')
self.conn.send(bytearray.fromhex('20020005'))
else:
raise Exception(' - error process_mqtt_connect unexpected connect received: {}'.format(message))
finally:
# stop the server after the connect message in happy flow, or if any exception occur
self.shutdown.set()
def connection_tests(dut, cases):
ip = get_my_ip()
set_server_cert_cn(ip)
server_port = 2222
def teardown_connection_suite():
dut.write('conn teardown 0 0')
def start_connection_case(case, desc):
print('Starting {}: {}'.format(case, desc))
case_id = cases[case]
dut.write('conn {} {} {}'.format(ip, server_port, case_id))
dut.expect('Test case:{} started'.format(case_id))
return case_id
for case in ['CONFIG_EXAMPLE_CONNECT_CASE_NO_CERT', 'CONFIG_EXAMPLE_CONNECT_CASE_SERVER_CERT', 'CONFIG_EXAMPLE_CONNECT_CASE_SERVER_DER_CERT']:
# All these cases connect to the server with no server verification or with server only verification
with TlsServer(server_port):
test_nr = start_connection_case(case, 'default server - expect to connect normally')
dut.expect('MQTT_EVENT_CONNECTED: Test={}'.format(test_nr), timeout=30)
with TlsServer(server_port, refuse_connection=True):
test_nr = start_connection_case(case, 'ssl shall connect, but mqtt sends connect refusal')
dut.expect('MQTT_EVENT_ERROR: Test={}'.format(test_nr), timeout=30)
dut.expect('MQTT ERROR: 0x5') # expecting 0x5 ... connection not authorized error
with TlsServer(server_port, client_cert=True) as s:
test_nr = start_connection_case(case, 'server with client verification - handshake error since client presents no client certificate')
dut.expect('MQTT_EVENT_ERROR: Test={}'.format(test_nr), timeout=30)
dut.expect('ESP-TLS ERROR: ESP_ERR_MBEDTLS_SSL_HANDSHAKE_FAILED') # expect ... handshake error (PEER_DID_NOT_RETURN_A_CERTIFICATE)
if 'PEER_DID_NOT_RETURN_A_CERTIFICATE' not in s.get_last_ssl_error():
raise('Unexpected ssl error from the server {}'.format(s.get_last_ssl_error()))
for case in ['CONFIG_EXAMPLE_CONNECT_CASE_MUTUAL_AUTH', 'CONFIG_EXAMPLE_CONNECT_CASE_MUTUAL_AUTH_KEY_PWD']:
# These cases connect to server with both server and client verification (client key might be password protected)
with TlsServer(server_port, client_cert=True):
test_nr = start_connection_case(case, 'server with client verification - expect to connect normally')
dut.expect('MQTT_EVENT_CONNECTED: Test={}'.format(test_nr), timeout=30)
case = 'CONFIG_EXAMPLE_CONNECT_CASE_INVALID_SERVER_CERT'
with TlsServer(server_port) as s:
test_nr = start_connection_case(case, 'invalid server certificate on default server - expect ssl handshake error')
dut.expect('MQTT_EVENT_ERROR: Test={}'.format(test_nr), timeout=30)
dut.expect('ESP-TLS ERROR: ESP_ERR_MBEDTLS_SSL_HANDSHAKE_FAILED') # expect ... handshake error (TLSV1_ALERT_UNKNOWN_CA)
if 'alert unknown ca' not in s.get_last_ssl_error():
raise Exception('Unexpected ssl error from the server {}'.format(s.get_last_ssl_error()))
case = 'CONFIG_EXAMPLE_CONNECT_CASE_MUTUAL_AUTH_BAD_CRT'
with TlsServer(server_port, client_cert=True) as s:
test_nr = start_connection_case(case, 'Invalid client certificate on server with client verification - expect ssl handshake error')
dut.expect('MQTT_EVENT_ERROR: Test={}'.format(test_nr), timeout=30)
dut.expect('ESP-TLS ERROR: ESP_ERR_MBEDTLS_SSL_HANDSHAKE_FAILED') # expect ... handshake error (CERTIFICATE_VERIFY_FAILED)
if 'CERTIFICATE_VERIFY_FAILED' not in s.get_last_ssl_error():
raise Exception('Unexpected ssl error from the server {}'.format(s.get_last_ssl_error()))
for case in ['CONFIG_EXAMPLE_CONNECT_CASE_NO_CERT', 'CONFIG_EXAMPLE_CONNECT_CASE_NO_CERT_ALPN']:
with TlsServer(server_port, use_alpn=True) as s:
test_nr = start_connection_case(case, 'server with alpn - expect connect, check resolved protocol')
dut.expect('MQTT_EVENT_CONNECTED: Test={}'.format(test_nr), timeout=30)
if case == 'CONFIG_EXAMPLE_CONNECT_CASE_NO_CERT' and s.get_negotiated_protocol() is None:
print(' - client with alpn off, no negotiated protocol: OK')
elif case == 'CONFIG_EXAMPLE_CONNECT_CASE_NO_CERT_ALPN' and s.get_negotiated_protocol() == 'mymqtt':
print(' - client with alpn on, negotiated protocol resolved: OK')
else:
raise Exception('Unexpected negotiated protocol {}'.format(s.get_negotiated_protocol()))
teardown_connection_suite()
@ttfw_idf.idf_custom_test(env_tag='Example_EthKitV1', group='test-apps')
def test_app_protocol_mqtt_publish_connect(env, extra_data):
"""
steps:
1. join AP
2. connect to uri specified in the config
3. send and receive data
"""
dut1 = env.get_dut('mqtt_publish_connect_test', 'tools/test_apps/protocols/mqtt/publish_connect_test')
# check and log bin size
binary_file = os.path.join(dut1.app.binary_path, 'mqtt_publish_connect_test.bin')
bin_size = os.path.getsize(binary_file)
ttfw_idf.log_performance('mqtt_publish_connect_test_bin_size', '{}KB'.format(bin_size // 1024))
# Look for test case symbolic names and publish configs
cases = {}
publish_cfg = {}
try:
# Get connection test cases configuration: symbolic names for test cases
for case in ['CONFIG_EXAMPLE_CONNECT_CASE_NO_CERT',
'CONFIG_EXAMPLE_CONNECT_CASE_SERVER_CERT',
'CONFIG_EXAMPLE_CONNECT_CASE_MUTUAL_AUTH',
'CONFIG_EXAMPLE_CONNECT_CASE_INVALID_SERVER_CERT',
'CONFIG_EXAMPLE_CONNECT_CASE_SERVER_DER_CERT',
'CONFIG_EXAMPLE_CONNECT_CASE_MUTUAL_AUTH_KEY_PWD',
'CONFIG_EXAMPLE_CONNECT_CASE_MUTUAL_AUTH_BAD_CRT',
'CONFIG_EXAMPLE_CONNECT_CASE_NO_CERT_ALPN']:
cases[case] = dut1.app.get_sdkconfig()[case]
except Exception:
print('ENV_TEST_FAILURE: Some mandatory CONNECTION test case not found in sdkconfig')
raise
dut1.start_app()
esp_ip = dut1.expect(re.compile(r' IPv4 address: ([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)'), timeout=30)
print('Got IP={}'.format(esp_ip[0]))
if not os.getenv('MQTT_SKIP_CONNECT_TEST'):
connection_tests(dut1,cases)
#
# start publish tests only if enabled in the environment (for weekend tests only)
if not os.getenv('MQTT_PUBLISH_TEST'):
return
# Get publish test configuration
try:
def get_host_port_from_dut(dut1, config_option):
value = re.search(r'\:\/\/([^:]+)\:([0-9]+)', dut1.app.get_sdkconfig()[config_option])
if value is None:
return None, None
return value.group(1), int(value.group(2))
publish_cfg['publish_topic'] = dut1.app.get_sdkconfig()['CONFIG_EXAMPLE_SUBSCIBE_TOPIC'].replace('"','')
publish_cfg['subscribe_topic'] = dut1.app.get_sdkconfig()['CONFIG_EXAMPLE_PUBLISH_TOPIC'].replace('"','')
publish_cfg['broker_host_ssl'], publish_cfg['broker_port_ssl'] = get_host_port_from_dut(dut1, 'CONFIG_EXAMPLE_BROKER_SSL_URI')
publish_cfg['broker_host_tcp'], publish_cfg['broker_port_tcp'] = get_host_port_from_dut(dut1, 'CONFIG_EXAMPLE_BROKER_TCP_URI')
publish_cfg['broker_host_ws'], publish_cfg['broker_port_ws'] = get_host_port_from_dut(dut1, 'CONFIG_EXAMPLE_BROKER_WS_URI')
publish_cfg['broker_host_wss'], publish_cfg['broker_port_wss'] = get_host_port_from_dut(dut1, 'CONFIG_EXAMPLE_BROKER_WSS_URI')
except Exception:
print('ENV_TEST_FAILURE: Some mandatory PUBLISH test case not found in sdkconfig')
raise
def start_publish_case(transport, qos, repeat, published, queue):
print('Starting Publish test: transport:{}, qos:{}, nr_of_msgs:{}, msg_size:{}, enqueue:{}'
.format(transport, qos, published, repeat * DEFAULT_MSG_SIZE, queue))
with MqttPublisher(dut1, transport, qos, repeat, published, queue, publish_cfg):
pass
# Initialize message sizes and repeat counts (if defined in the environment)
messages = []
for i in count(0):
# Check env variable: MQTT_PUBLISH_MSG_{len|repeat}_{x}
env_dict = {var:'MQTT_PUBLISH_MSG_' + var + '_' + str(i) for var in ['len', 'repeat']}
if os.getenv(env_dict['len']) and os.getenv(env_dict['repeat']):
messages.append({var: int(os.getenv(env_dict[var])) for var in ['len', 'repeat']})
continue
break
if not messages: # No message sizes present in the env - set defaults
messages = [{'len':0, 'repeat':5}, # zero-sized messages
{'len':2, 'repeat':10}, # short messages
{'len':200, 'repeat':3}, # long messages
{'len':20, 'repeat':50} # many medium sized
]
# Iterate over all publish message properties
for qos in [0, 1, 2]:
for transport in ['tcp', 'ssl', 'ws', 'wss']:
for q in [0, 1]:
if publish_cfg['broker_host_' + transport] is None:
print('Skipping transport: {}...'.format(transport))
continue
for msg in messages:
start_publish_case(transport, qos, msg['len'], msg['repeat'], q)
if __name__ == '__main__':
test_app_protocol_mqtt_publish_connect(dut=ttfw_idf.ESP32QEMUDUT if sys.argv[1:] == ['qemu'] else ttfw_idf.ESP32DUT)
|
|
"""Test HTTP API application
"""
import datetime
import json
import os
from unittest.mock import Mock
import pytest
import smif
from flask import current_app
from smif.data_layer.store import Store
from smif.exception import SmifDataNotFoundError
from smif.http_api import create_app
@pytest.fixture
def mock_scheduler():
def get_status(arg):
if arg == 'model_never_started':
return {
'status': 'unstarted'
}
elif arg == 'model_started_and_running':
return {
'status': 'running',
}
elif arg == 'model_started_and_done':
return {
'status': 'done',
}
elif arg in ('unique_model_run_name'):
return {
'status': 'running',
}
attrs = {
'get_status.side_effect': get_status
}
return Mock(**attrs)
@pytest.fixture
def mock_data_interface(model_run, get_sos_model, get_sector_model,
get_scenario, get_narrative, get_dimension):
def read_model_run(arg):
_check_exist('model_run', arg)
return model_run
def read_sos_model(arg):
_check_exist('sos_model', arg)
return get_sos_model
def read_model(arg, skip_coords=False):
_check_exist('sector_model', arg)
return get_sector_model
def read_scenario(arg, skip_coords=False):
_check_exist('scenario', arg)
return get_scenario
def read_dimension(arg, skip_coords=False):
_check_exist('dimension', arg)
return get_dimension
def _check_exist(config, name):
if name == 'does_not_exist':
raise SmifDataNotFoundError("%s '%s' not found" % (config, name))
attrs = {
'read_model_runs.side_effect': [[model_run]],
'read_model_run.side_effect': read_model_run,
'read_sos_models.side_effect': [[get_sos_model]],
'read_sos_model.side_effect': read_sos_model,
'read_models.side_effect': [[get_sector_model]],
'read_model.side_effect': read_model,
'read_scenarios.side_effect': [[get_scenario]],
'read_scenario.side_effect': read_scenario,
'read_dimensions.side_effect': [[get_dimension]],
'read_dimension.side_effect': read_dimension
}
return Mock(spec=Store, **attrs)
@pytest.fixture
def app(request, mock_scheduler, mock_data_interface):
"""Return an app
"""
test_app = create_app(
static_folder=os.path.join(os.path.dirname(__file__), '..', 'fixtures', 'http'),
template_folder=os.path.join(os.path.dirname(__file__), '..', 'fixtures', 'http'),
data_interface=mock_data_interface,
scheduler=mock_scheduler
)
with test_app.app_context():
yield test_app
@pytest.fixture
def app_fail(request, mock_scheduler, mock_data_interface):
"""Return an app which will fail to find templates
"""
test_app = create_app(
static_folder=os.path.join(os.path.dirname(__file__), '..', 'fixtures', '404'),
template_folder=os.path.join(os.path.dirname(__file__), '..', 'fixtures', '404'),
data_interface=mock_data_interface,
scheduler=mock_scheduler
)
with test_app.app_context():
yield test_app
@pytest.fixture
def client(request, app):
"""Return an API client
"""
test_client = app.test_client()
def teardown():
pass
request.addfinalizer(teardown)
return test_client
@pytest.fixture
def client_fail(request, app_fail):
"""Return an API client which will fail on request for home page
"""
test_client = app_fail.test_client()
def teardown():
pass
request.addfinalizer(teardown)
return test_client
def parse_json(response):
"""Parse response data
"""
return json.loads(response.data.decode('utf-8'))
def serialise_json(data):
return json.dumps(data, default=timestamp_serialiser)
def timestamp_serialiser(obj):
"""Serialist datetime
"""
if isinstance(obj, datetime.datetime):
return obj.isoformat()
def test_hello(client):
"""Start with a welcome message
"""
response = client.get('/')
assert "Welcome to smif" in str(response.data)
def test_template_not_found(client_fail):
"""Clear error if template not found
"""
response = client_fail.get('/')
assert "Error: smif app template not found" in str(response.data)
def test_get_smif(client):
"""GET smif details
"""
response = client.get('/api/v1/smif/')
data = parse_json(response)
assert data['data']['version'] == smif.__version__
def test_get_smif_version(client):
"""GET smif version
"""
response = client.get('/api/v1/smif/version')
data = parse_json(response)
assert data['data'] == smif.__version__
def test_model_runs(client, model_run):
"""GET all model runs
"""
response = client.get('/api/v1/model_runs/')
assert current_app.config.data_interface.read_model_runs.call_count == 1
assert response.status_code == 200
data = parse_json(response)
assert data['data'] == [model_run]
def test_model_runs_filtered_running(client, model_run):
"""GET all model runs
"""
response = client.get('/api/v1/model_runs/?status=running')
assert response.status_code == 200
data = parse_json(response)
assert data['data'] == [model_run]
def test_model_run(client, model_run):
"""GET single model run
"""
name = model_run['name']
response = client.get('/api/v1/model_runs/{}'.format(name))
current_app.config.data_interface.read_model_run.assert_called_with(name)
assert response.status_code == 200
data = parse_json(response)
assert data['data'] == model_run
def test_model_run_missing(client):
"""GET missing system-of-systems model run
"""
response = client.get('/api/v1/model_runs/does_not_exist')
data = parse_json(response)
assert data['error']['SmifDataNotFoundError'] == ["model_run 'does_not_exist' not found"]
def test_post_model_run(client, model_run):
"""POST model run
"""
name = 'test_post_model_run'
model_run['name'] = name
send = serialise_json(model_run)
response = client.post(
'/api/v1/model_runs/',
data=send,
content_type='application/json')
current_app.config.data_interface.write_model_run.assert_called_with(model_run)
data = parse_json(response)
assert response.status_code == 201
assert data['message'] == 'success'
def test_put_model_run(client, model_run):
"""PUT model run
"""
send = serialise_json(model_run)
response = client.put(
'/api/v1/model_runs/' + model_run['name'],
data=send,
content_type='application/json')
current_app.config.data_interface.update_model_run.assert_called_with(
model_run['name'], model_run)
assert response.status_code == 200
def test_delete_model_run(client, model_run):
"""DELETE model_run
"""
send = serialise_json(model_run)
response = client.delete(
'/api/v1/model_runs/' + model_run['name'],
data=send,
content_type='application/json')
current_app.config.data_interface.delete_model_run.assert_called_with(model_run['name'])
assert response.status_code == 200
def test_start_model_run(client):
"""POST model run START
"""
# Start a model_run
send = serialise_json({
'args': {
'verbosity': 0,
'warm_start': False,
'output_format': 'local_csv'
}})
response = client.post(
'/api/v1/model_runs/20170918_energy_water/start',
data=send,
content_type='application/json')
call = (current_app.config.scheduler.add.call_args)
assert call[0][0] == '20170918_energy_water'
assert call[0][1]['verbosity'] == 0
assert call[0][1]['warm_start'] is False
assert call[0][1]['output_format'] == 'local_csv'
data = parse_json(response)
assert response.status_code == 201
assert data['message'] == 'success'
def test_kill_model_run(client):
"""POST model run START
"""
# Kill a model_run
response = client.post(
'/api/v1/model_runs/20170918_energy_water/kill',
data={},
content_type='application/json')
data = parse_json(response)
assert response.status_code == 201
assert data['message'] == 'success'
current_app.config.scheduler.kill.assert_called_with('20170918_energy_water')
def test_get_modelrun_status_modelrun_never_started(client):
"""GET model run STATUS
"""
# Check if the modelrun is running
response = client.get(
'/api/v1/model_runs/model_never_started/status'
)
data = parse_json(response)
assert response.status_code == 200
assert data['data']['status'] == 'unstarted'
def test_get_modelrun_status_modelrun_running(client):
"""GET model run STATUS
"""
# Check if the modelrun is running
response = client.get(
'/api/v1/model_runs/model_started_and_running/status'
)
data = parse_json(response)
assert response.status_code == 200
assert data['data']['status'] == 'running'
def test_get_modelrun_status_modelrun_done(client):
"""GET model run STATUS
"""
# Check if the modelrun was successful
response = client.get(
'/api/v1/model_runs/model_started_and_done/status'
)
data = parse_json(response)
assert response.status_code == 200
assert data['data']['status'] == 'done'
def test_get_sos_models(client, get_sos_model):
"""GET all system-of-systems models
"""
response = client.get('/api/v1/sos_models/')
assert current_app.config.data_interface.read_sos_models.called == 1
assert response.status_code == 200
data = parse_json(response)
assert data['data'] == [get_sos_model]
def test_get_sos_model(client, get_sos_model):
"""GET single system-of-systems model
"""
name = get_sos_model['name']
response = client.get('/api/v1/sos_models/{}'.format(name))
current_app.config.data_interface.read_sos_model.assert_called_with(name)
assert response.status_code == 200
data = parse_json(response)
assert data['data'] == get_sos_model
def test_get_sos_model_missing(client):
"""GET missing system-of-systems model
"""
response = client.get('/api/v1/sos_models/does_not_exist')
data = parse_json(response)
assert data['error']['SmifDataNotFoundError'] == ["sos_model 'does_not_exist' not found"]
def test_post_sos_model(client, get_sos_model):
"""POST system-of-systems model
"""
name = 'test_post_sos_model'
get_sos_model['name'] = name
send = serialise_json(get_sos_model)
response = client.post(
'/api/v1/sos_models/',
data=send,
content_type='application/json')
assert current_app.config.data_interface.write_sos_model.called == 1
data = parse_json(response)
assert response.status_code == 201
assert data['message'] == 'success'
def test_put_sos_model(client, get_sos_model):
"""PUT sos_model
"""
send = serialise_json(get_sos_model)
response = client.put(
'/api/v1/sos_models/' + get_sos_model['name'],
data=send,
content_type='application/json')
current_app.config.data_interface.update_sos_model.assert_called_with(
get_sos_model['name'], get_sos_model)
assert response.status_code == 200
def test_delete_sos_model(client, get_sos_model):
"""DELETE sos_model
"""
send = serialise_json(get_sos_model)
response = client.delete(
'/api/v1/sos_models/' + get_sos_model['name'],
data=send,
content_type='application/json')
current_app.config.data_interface.delete_sos_model.assert_called_with(
get_sos_model['name'])
assert response.status_code == 200
def test_get_sector_models(client, get_sector_model):
"""GET all model runs
"""
response = client.get('/api/v1/sector_models/')
assert current_app.config.data_interface.read_models.called == 1
assert response.status_code == 200
data = parse_json(response)
assert data['data'] == [get_sector_model]
def test_get_sector_model(client, get_sector_model):
"""GET single model run
"""
name = get_sector_model['name']
response = client.get('/api/v1/sector_models/{}'.format(name))
current_app.config.data_interface.read_model.assert_called_with(
name, skip_coords=True)
data = parse_json(response)
assert data['data'] == get_sector_model
def test_get_sector_model_missing(client):
"""GET missing model run
"""
response = client.get('/api/v1/sector_models/does_not_exist')
data = parse_json(response)
assert data['error']['SmifDataNotFoundError'] == \
["sector_model 'does_not_exist' not found"]
def test_post_sector_model(client, get_sector_model):
"""POST sector model
"""
name = 'test_post_sector_model'
get_sector_model['name'] = name
send = serialise_json(get_sector_model)
response = client.post(
'/api/v1/sector_models/',
data=send,
content_type='application/json')
current_app.config.data_interface.write_model.assert_called_with(get_sector_model)
data = parse_json(response)
assert response.status_code == 201
assert data['message'] == 'success'
def test_put_sector_model(client, get_sector_model):
"""PUT sector_model
"""
send = serialise_json(get_sector_model)
response = client.put(
'/api/v1/sector_models/' + get_sector_model['name'],
data=send,
content_type='application/json')
current_app.config.data_interface.update_model.assert_called_with(
get_sector_model['name'], get_sector_model)
assert response.status_code == 200
def test_delete_sector_model(client, get_sector_model):
"""DELETE sector_model
"""
send = serialise_json(get_sector_model)
response = client.delete(
'/api/v1/sector_models/' + get_sector_model['name'],
data=send,
content_type='application/json')
current_app.config.data_interface.delete_model.assert_called_with(
get_sector_model['name'])
assert response.status_code == 200
def test_get_scenarios(client, get_scenario):
"""GET all scenarios
"""
response = client.get('/api/v1/scenarios/')
assert current_app.config.data_interface.read_scenarios.called == 1
assert response.status_code == 200
data = parse_json(response)
assert data['data'] == [get_scenario]
def test_get_scenario(client, get_scenario):
"""GET single system-of-systems model
"""
name = get_scenario['name']
response = client.get('/api/v1/scenarios/{}'.format(name))
current_app.config.data_interface.read_scenario.assert_called_with(name, skip_coords=True)
assert response.status_code == 200
data = parse_json(response)
assert data['data'] == get_scenario
def test_get_scenario_missing(client):
"""GET missing system-of-systems model
"""
response = client.get('/api/v1/scenarios/does_not_exist')
data = parse_json(response)
assert data['error']['SmifDataNotFoundError'] == ["scenario 'does_not_exist' not found"]
def test_post_scenario(client, get_scenario):
"""POST system-of-systems model
"""
name = 'test_post_scenario'
get_scenario['name'] = name
send = serialise_json(get_scenario)
response = client.post(
'/api/v1/scenarios/',
data=send,
content_type='application/json')
current_app.config.data_interface.write_scenario.assert_called_with(get_scenario)
data = parse_json(response)
assert response.status_code == 201
assert data['message'] == 'success'
def test_delete_scenario(client, get_scenario):
"""DELETE scenario
"""
send = serialise_json(get_scenario)
response = client.delete(
'/api/v1/scenarios/' + get_scenario['name'],
data=send,
content_type='application/json')
current_app.config.data_interface.delete_scenario.assert_called_with(get_scenario['name'])
assert response.status_code == 200
def test_put_scenario(client, get_scenario):
"""PUT scenario
"""
send = serialise_json(get_scenario)
response = client.put(
'/api/v1/scenarios/' + get_scenario['name'],
data=send,
content_type='application/json')
current_app.config.data_interface.update_scenario.assert_called_with(
get_scenario['name'], get_scenario)
assert response.status_code == 200
def test_get_dimensions(client, get_dimension):
"""GET all dimensions
"""
response = client.get('/api/v1/dimensions/')
assert current_app.config.data_interface.read_dimensions.called == 1
assert response.status_code == 200
data = parse_json(response)
assert data['data'] == [get_dimension]
def test_get_dimension(client, get_dimension):
"""GET single system-of-systems model
"""
name = get_dimension['name']
response = client.get('/api/v1/dimensions/{}'.format(name))
current_app.config.data_interface.read_dimension.assert_called_with(name, skip_coords=True)
assert response.status_code == 200
data = parse_json(response)
assert data['data'] == get_dimension
def test_get_dimension_missing(client):
"""GET missing system-of-systems model
"""
response = client.get('/api/v1/dimensions/does_not_exist')
data = parse_json(response)
assert data['error']['SmifDataNotFoundError'] == ["dimension 'does_not_exist' not found"]
def test_post_dimension(client, get_dimension):
"""POST system-of-systems model
"""
name = 'test_post_dimension'
get_dimension['name'] = name
send = serialise_json(get_dimension)
response = client.post(
'/api/v1/dimensions/',
data=send,
content_type='application/json')
current_app.config.data_interface.write_dimension.assert_called_with(
get_dimension)
data = parse_json(response)
assert response.status_code == 201
assert data['message'] == 'success'
def test_put_dimension(client, get_dimension):
"""PUT dimension
"""
send = serialise_json(get_dimension)
response = client.put(
'/api/v1/dimensions/' + get_dimension['name'],
data=send,
content_type='application/json')
current_app.config.data_interface.update_dimension.assert_called_with(
get_dimension['name'], get_dimension)
assert response.status_code == 200
def test_delete_dimension(client, get_dimension):
"""DELETE dimension
"""
send = serialise_json(get_dimension)
response = client.delete(
'/api/v1/dimensions/' + get_dimension['name'],
data=send,
content_type='application/json')
current_app.config.data_interface.delete_dimension.assert_called_with(
get_dimension['name'])
assert response.status_code == 200
|
|
# utils/SwiftBuildSupport.py - Utilities for Swift build scripts -*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
from __future__ import print_function
try:
# Python 2
import ConfigParser
except ImportError:
# Python 3
import configparser as ConfigParser
import os
import pipes
import subprocess
import sys
HOME = os.environ.get("HOME", "/")
def _get_default_source_root():
result = ""
# Are we in a Swift checkout? Start from this file and check its parent
# directories.
#
# $SWIFT_SOURCE_ROOT/swift/utils/SwiftBuildSupport.py
(swift_path, parent_dirname) = os.path.split(os.path.dirname(__file__))
if parent_dirname != "utils":
return result
if not os.path.exists(os.path.join(swift_path, 'CMakeLists.txt')):
return result
result = os.path.dirname(swift_path)
# Are we in an LLVM checkout? Start from the Swift checkout and check /its/
# parent directories.
#
# $SWIFT_SOURCE_ROOT/llvm/tools/swift/utils/SwiftBuildSupport.py
(llvm_path, parent_dirname) = os.path.split(result)
if parent_dirname != "tools":
return result
if not os.path.exists(os.path.join(llvm_path, 'CMakeLists.txt')):
return result
result = os.path.dirname(llvm_path)
return result
# Set SWIFT_SOURCE_ROOT in your environment to control where the sources
# are found.
SWIFT_SOURCE_ROOT = os.environ.get(
"SWIFT_SOURCE_ROOT", _get_default_source_root())
# Set SWIFT_BUILD_ROOT to a directory that will contain a subdirectory
# for each build configuration
SWIFT_BUILD_ROOT = os.environ.get(
"SWIFT_BUILD_ROOT", os.path.join(SWIFT_SOURCE_ROOT, "build"))
def print_with_argv0(message):
print(sys.argv[0] + ": " + message)
def quote_shell_command(args):
return " ".join([pipes.quote(a) for a in args])
def check_call(args, print_command=False, verbose=False):
if print_command:
print(os.getcwd() + "$ " + quote_shell_command(args))
try:
return subprocess.check_call(args)
except subprocess.CalledProcessError as e:
if verbose:
print_with_argv0(e.strerror)
else:
print_with_argv0(
"command terminated with a non-zero exit status " +
str(e.returncode) + ", aborting")
sys.stdout.flush()
sys.exit(1)
except OSError as e:
print_with_argv0("could not execute '" + quote_shell_command(args) +
"': " + e.strerror)
sys.stdout.flush()
sys.exit(1)
def check_output(args, print_command=False, verbose=False):
if print_command:
print(os.getcwd() + "$ " + quote_shell_command(args))
try:
return subprocess.check_output(args)
except subprocess.CalledProcessError as e:
if verbose:
print_with_argv0(e.strerror)
else:
print_with_argv0(
"command terminated with a non-zero exit status " +
str(e.returncode) + ", aborting")
sys.stdout.flush()
sys.exit(1)
except OSError as e:
print_with_argv0("could not execute '" + quote_shell_command(args) +
"': " + e.strerror)
sys.stdout.flush()
sys.exit(1)
def _load_preset_files_impl(preset_file_names, substitutions={}):
config = ConfigParser.SafeConfigParser(substitutions, allow_no_value=True)
if config.read(preset_file_names) == []:
print_with_argv0(
"preset file not found (tried " + str(preset_file_names) + ")")
sys.exit(1)
return config
_PRESET_PREFIX = "preset: "
def _get_preset_options_impl(config, substitutions, preset_name):
section_name = _PRESET_PREFIX + preset_name
if section_name not in config.sections():
return (None, None, None)
build_script_opts = []
build_script_impl_opts = []
missing_opts = []
dash_dash_seen = False
for o in config.options(section_name):
try:
a = config.get(section_name, o)
except ConfigParser.InterpolationMissingOptionError as e:
# e.reference contains the correctly formatted option
missing_opts.append(e.reference)
continue
if not a:
a = ""
if o in substitutions:
continue
opt = None
if o == "mixin-preset":
# Split on newlines and filter out empty lines.
mixins = filter(None, [m.strip() for m in a.splitlines()])
for mixin in mixins:
(base_build_script_opts,
base_build_script_impl_opts,
base_missing_opts) = \
_get_preset_options_impl(config, substitutions, mixin)
build_script_opts += base_build_script_opts
build_script_impl_opts += base_build_script_impl_opts
missing_opts += base_missing_opts
elif o == "dash-dash":
dash_dash_seen = True
elif a == "":
opt = "--" + o
else:
opt = "--" + o + "=" + a
if opt:
if not dash_dash_seen:
build_script_opts.append(opt)
else:
build_script_impl_opts.append(opt)
return (build_script_opts, build_script_impl_opts, missing_opts)
def get_preset_options(substitutions, preset_file_names, preset_name):
config = _load_preset_files_impl(preset_file_names, substitutions)
(build_script_opts, build_script_impl_opts, missing_opts) = \
_get_preset_options_impl(config, substitutions, preset_name)
if not build_script_opts:
print_with_argv0("preset '" + preset_name + "' not found")
sys.exit(1)
if missing_opts:
print_with_argv0("missing option(s) for preset '" + preset_name +
"': " + ", ".join(missing_opts))
sys.exit(1)
return build_script_opts + ["--"] + build_script_impl_opts
def get_all_preset_names(preset_file_names):
config = _load_preset_files_impl(preset_file_names)
return [name[len(_PRESET_PREFIX):] for name in config.sections()
if name.startswith(_PRESET_PREFIX)]
# A context manager for changing the current working directory.
#
# with WorkingDirectory('/tmp'):
# ... do work in /tmp...
class WorkingDirectory(object):
def __init__(self, new_cwd):
self.new_cwd = new_cwd
def __enter__(self):
self.old_cwd = os.getcwd()
os.chdir(self.new_cwd)
def __exit__(self, type, value, traceback):
os.chdir(self.old_cwd)
|
|
# -*- coding: utf-8 -*-
"""Parser for .customDestinations-ms files."""
import logging
import os
import construct
from dfvfs.lib import definitions
from dfvfs.lib import errors as dfvfs_errors
from dfvfs.path import factory as path_spec_factory
from dfvfs.resolver import resolver
from plaso.lib import errors
from plaso.parsers import interface
from plaso.parsers import manager
from plaso.parsers import winlnk
class CustomDestinationsParser(interface.SingleFileBaseParser):
"""Parses .customDestinations-ms files."""
_INITIAL_FILE_OFFSET = None
NAME = u'custom_destinations'
DESCRIPTION = u'Parser for *.customDestinations-ms files.'
# We cannot use the parser registry here since winlnk could be disabled.
# TODO: see if there is a more elegant solution for this.
_WINLNK_PARSER = winlnk.WinLnkParser()
_LNK_GUID = (
b'\x01\x14\x02\x00\x00\x00\x00\x00\xc0\x00\x00\x00\x00\x00\x00\x46')
_FOOTER_SIGNATURE = 0xbabffbab
_FILE_HEADER = construct.Struct(
u'file_header',
construct.ULInt32(u'unknown1'),
construct.ULInt32(u'unknown2'),
construct.ULInt32(u'unknown3'),
construct.ULInt32(u'header_values_type'))
_HEADER_VALUE_TYPE_0 = construct.Struct(
u'header_value_type_0',
construct.ULInt32(u'number_of_characters'),
construct.String(u'string', lambda ctx: ctx.number_of_characters * 2),
construct.ULInt32(u'unknown1'))
_HEADER_VALUE_TYPE_1_OR_2 = construct.Struct(
u'header_value_type_1_or_2',
construct.ULInt32(u'unknown1'))
_ENTRY_HEADER = construct.Struct(
u'entry_header',
construct.String(u'guid', 16))
_FILE_FOOTER = construct.Struct(
u'file_footer',
construct.ULInt32(u'signature'))
def _ParseLNKFile(
self, parser_mediator, file_entry, file_offset, remaining_file_size):
"""Parses a LNK file stored within the .customDestinations-ms file.
Args:
parser_mediator: A parser mediator object (instance of ParserMediator).
file_entry: A file entry object (instance of dfvfs.FileEntry).
file_offset: The offset of the LNK file data.
remaining_file_size: The size of the data remaining in the
.customDestinations-ms file.
Returns:
The size of the LNK file data or 0 if the LNK file could not be read.
"""
path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_DATA_RANGE, range_offset=file_offset,
range_size=remaining_file_size, parent=file_entry.path_spec)
display_name = u'{0:s} # 0x{1:08x}'.format(file_entry.name, file_offset)
try:
lnk_file_object = resolver.Resolver.OpenFileObject(path_spec)
except (dfvfs_errors.BackEndError, RuntimeError) as exception:
message = (
u'Unable to open LNK file: {0:s} with error {1:s}').format(
display_name, exception)
parser_mediator.ProduceParseError(message)
return 0
self._WINLNK_PARSER.UpdateChainAndParseFileObject(
parser_mediator, lnk_file_object, display_name=display_name)
# We cannot trust the file size in the LNK data so we get the last offset
# that was read instead.
lnk_file_size = lnk_file_object.get_offset()
lnk_file_object.close()
return lnk_file_size
def ParseFileObject(self, parser_mediator, file_object, **kwargs):
"""Parses a .customDestinations-ms file-like object.
Args:
parser_mediator: A parser mediator object (instance of ParserMediator).
file_object: A file-like object.
Raises:
UnableToParseFile: when the file cannot be parsed.
"""
file_entry = parser_mediator.GetFileEntry()
display_name = parser_mediator.GetDisplayName()
file_object.seek(0, os.SEEK_SET)
try:
file_header = self._FILE_HEADER.parse_stream(file_object)
except (IOError, construct.FieldError) as exception:
raise errors.UnableToParseFile((
u'Invalid Custom Destination: {0:s} - unable to parse '
u'file header with error: {1:s}').format(display_name, exception))
if file_header.unknown1 != 2:
raise errors.UnableToParseFile((
u'Unsupported Custom Destination file: {0:s} - invalid unknown1: '
u'{1:d}.').format(display_name, file_header.unknown1))
if file_header.header_values_type > 2:
raise errors.UnableToParseFile((
u'Unsupported Custom Destination file: {0:s} - invalid header value '
u'type: {1:d}.').format(display_name, file_header.header_values_type))
if file_header.header_values_type == 0:
data_structure = self._HEADER_VALUE_TYPE_0
else:
data_structure = self._HEADER_VALUE_TYPE_1_OR_2
try:
_ = data_structure.parse_stream(file_object)
except (IOError, construct.FieldError) as exception:
raise errors.UnableToParseFile((
u'Invalid Custom Destination file: {0:s} - unable to parse '
u'header value with error: {1:s}').format(
display_name, exception))
file_size = file_object.get_size()
file_offset = file_object.get_offset()
remaining_file_size = file_size - file_offset
# The Custom Destination file does not have a unique signature in
# the file header that is why we use the first LNK class identifier (GUID)
# as a signature.
first_guid_checked = False
while remaining_file_size > 4:
try:
entry_header = self._ENTRY_HEADER.parse_stream(file_object)
except (IOError, construct.FieldError) as exception:
error_message = (
u'Invalid Custom Destination file: {0:s} - unable to parse '
u'entry header with error: {1:s}').format(
display_name, exception)
if not first_guid_checked:
raise errors.UnableToParseFile(error_message)
logging.warning(error_message)
break
if entry_header.guid != self._LNK_GUID:
error_message = (
u'Unsupported Custom Destination file: {0:s} - invalid entry '
u'header.').format(display_name)
if not first_guid_checked:
raise errors.UnableToParseFile(error_message)
file_object.seek(-16, os.SEEK_CUR)
try:
file_footer = self._FILE_FOOTER.parse_stream(file_object)
except (IOError, construct.FieldError) as exception:
raise IOError((
u'Unable to parse file footer at offset: 0x{0:08x} '
u'with error: {1:s}').format(file_offset, exception))
if file_footer.signature != self._FOOTER_SIGNATURE:
logging.warning(error_message)
file_object.seek(-4, os.SEEK_CUR)
# TODO: add support for Jump List LNK file recovery.
break
first_guid_checked = True
file_offset += 16
remaining_file_size -= 16
lnk_file_size = self._ParseLNKFile(
parser_mediator, file_entry, file_offset, remaining_file_size)
file_offset += lnk_file_size
remaining_file_size -= lnk_file_size
file_object.seek(file_offset, os.SEEK_SET)
try:
file_footer = self._FILE_FOOTER.parse_stream(file_object)
except (IOError, construct.FieldError) as exception:
logging.warning((
u'Invalid Custom Destination file: {0:s} - unable to parse '
u'footer with error: {1:s}').format(display_name, exception))
if file_footer.signature != self._FOOTER_SIGNATURE:
logging.warning((
u'Unsupported Custom Destination file: {0:s} - invalid footer '
u'signature.').format(display_name))
manager.ParsersManager.RegisterParser(CustomDestinationsParser)
|
|
from django.conf import settings
# noinspection PyProtectedMember
from django.core.cache import caches
from django.utils.six import iteritems
from logging import getLogger
import requests
import requests.exceptions
import string
import random
import socket
logger = getLogger(__name__)
def random_password(minlength=20, maxlength=30):
"""
Generate random string used as password.
"""
length = random.randint(minlength, maxlength)
letters = string.ascii_letters + string.digits
return ''.join([random.choice(letters) for _ in range(length)])
def _test_vnc(host, port, timeout=3):
"""
Test VNC connection.
"""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(timeout)
try:
sock.connect((host, port))
if sock.recv(1024).startswith('RFB'):
return True
except (socket.error, socket.timeout, socket.herror, socket.gaierror) as err:
logger.warning('Error "%s" when testing VNC on "%s:%s"', err, host, port)
finally:
sock.close()
return False
class Guacamole(object):
"""
Manipulate guacamole authorization from django.
"""
def __init__(self, request, vm=None, username=None, password=None, save_password=False, load_password=False):
"""
:param request: django request object.
:param vm: vm object or list of vm objects (queryset). If it's a object
it will be turned into a list.
:param username: if not specified it will be set to the username
attribute of request.user object.
:param password: if not specified it will be auto generated.
:param save_password: if True, then save the password in the
request.session object.
:param load_password: if True, then load the password from the
request.session object.
"""
self.auth = None
self.tree = None
self.request = request
self.vm = vm
if self.vm and not hasattr(self.vm, '__iter__'):
self.vm = [vm]
self.usr = username
if not self.usr:
self.usr = request.user.username
self.key = settings.GUACAMOLE_KEY + self.usr
self.pwd = password
if not self.pwd:
if load_password:
self.pwd = self.request.session.get(self.key, random_password())
else:
self.pwd = random_password()
if save_password:
self.request.session[self.key] = self.pwd
def __set_tree(self):
self.tree = {}
def __set_auth(self):
self.tree['password'] = self.pwd
def __set_vm(self):
for i in self.vm:
self.tree[i.hostname] = {
'protocol': 'vnc',
'hostname': i.node.address,
'port': i.vnc_port
}
@classmethod
def test_vnc(cls, vm, timeout=2):
"""
Test VNC connection on VM.
"""
return _test_vnc(vm.node.address, vm.vnc_port, timeout=timeout)
def usermap(self):
"""
Generate the user-mapping XML and return it along with the key string.
"""
logger.debug('Creating guacamole user-mapping for user %s.', self.usr)
self.__set_tree()
self.__set_auth()
if self.vm:
self.__set_vm()
return self.key, self.tree
def login(self, save_cookie=True):
"""
Perform a login to guacamole by issuing a POST request to /api/tokens.
"""
logger.info('Performing guacamole login of user %s.', self.usr)
exc = None
r = None
try:
r = requests.post(
settings.GUACAMOLE_URI + '/api/tokens',
data={'username': self.usr, 'password': self.pwd},
headers={'User-Agent': settings.GUACAMOLE_USERAGENT},
timeout=settings.GUACAMOLE_TIMEOUT,
allow_redirects=False
)
except requests.exceptions.RequestException as exc:
logger.exception(exc)
status = None
else:
status = r.status_code
if status == 200 and settings.GUACAMOLE_COOKIE in r.cookies:
token = r.json().get('authToken', '')
cookie = r.cookies[settings.GUACAMOLE_COOKIE]
logger.info('User %s got guacamole cookie=%s and token=%s.', self.usr, cookie, token)
if save_cookie:
self.request.session[settings.GUACAMOLE_COOKIE] = cookie
self.request.session[settings.GUACAMOLE_TOKEN] = token
res = {
'token': token,
'cookie': {
'key': settings.GUACAMOLE_COOKIE,
'value': cookie,
'path': settings.GUACAMOLE_COOKIEPATH,
'domain': settings.GUACAMOLE_COOKIEDOMAIN,
'httponly': False
}
}
else:
logger.error('User %s could not login to guacamole, status=%s, response="%r".',
self.usr, status, exc or r.text)
res = {}
return res
def logout(self):
"""
Perform a logout from guacamole by issuing a DELETE request to /api/tokens/<token>.
"""
session = self.request.session
token = ''
logger.info('Performing guacamole logout of user %s.', self.usr)
if settings.GUACAMOLE_COOKIE in session and settings.GUACAMOLE_TOKEN in session:
token = session[settings.GUACAMOLE_TOKEN]
try:
r = requests.delete(
settings.GUACAMOLE_URI + '/api/tokens/' + token,
cookies={settings.GUACAMOLE_COOKIE: session[settings.GUACAMOLE_COOKIE]},
headers={'User-Agent': settings.GUACAMOLE_USERAGENT},
timeout=settings.GUACAMOLE_TIMEOUT,
allow_redirects=False
)
r.raise_for_status()
except requests.exceptions.RequestException as exc:
if exc.response and exc.response.status_code == 404:
logger.warning('User %s could not logout from guacamole because the token "%s" '
'does not exist anymore', self.usr, token)
else:
logger.exception(exc)
logger.error('User %s could not logout from guacamole (%r).', self.usr, exc)
else:
logger.info('User %s has no guacamole cookie and/or token.', self.usr)
return {
'token': token,
'cookie': {
'key': settings.GUACAMOLE_COOKIE,
'path': settings.GUACAMOLE_COOKIEPATH,
}
}
class GuacamoleAuth(Guacamole):
"""
Manipulate guacamole-auth-redis keys.
"""
redis = caches['redis'].master_client
def set_auth(self):
"""
Create Guacamole usermap and store it in redis.
"""
username, configs = self.usermap()
pipe = self.redis.pipeline()
pipe.hset(username, 'password', configs.pop('password', None))
for key, cfg in iteritems(configs):
val = '\n'.join([str(i) + '=' + str(j) for i, j in iteritems(cfg)])
pipe.hset(username, key, val)
return pipe.execute()
def del_auth(self):
"""
Remove Guacamole usermap from redis.
"""
return self.redis.delete(self.key)
|
|
#!/usr/bin/python2.4
#
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Browse and retrieve builds from a chrome build repository."""
# Standard imports
import cStringIO as StringIO
import datetime
import httplib
import optparse
import os
import posixpath
import re
import shutil
import socket
import sys
import urllib2
import urlparse
import zipfile
# Local imports
import log_helper
# The default regular expresson to use when searching for build ids.
DEFAULT_BUILD_ID_PATTERN = r'\d+\.\d+\.\d+\.\d+'
# The list of files we're interested in.
FILE_LIST = [
'chrome-win32-syms.zip',
'chrome-win32.zip',
'chrome-win32.test/automated_ui_tests.exe',
'chrome-win32.test/reliability_tests.exe',
]
# The set of build subdirs we're interested in. There has been some flakiness
# in building the latest whole-program-optimized official binaries in 'win',
# so an unoptimized build has been introduced in 'win_unopt'. We can try them
# in priority order when looking for a successful build, giving preference
# to the optimized build.
SUBDIRS = [ 'win', 'win_unopt' ]
# The logger object used by this module
_LOGGER = log_helper.GetLogger(__file__)
class Error(Exception):
"""Base class for all exception thrown by this module."""
pass
class DownloadError(Error):
"""Raised on errors when downloading from the repository."""
pass
class FormatError(Error):
"""Raised on errors parsing a response from the repository."""
pass
class NotFoundError(Error):
"""Raised on errors searching the repository for a build id."""
pass
class ChromeRepo(object):
"""Browses and retrieves builds from a chrome build repository."""
# Python's date parsing utilities depend on the locale ... decouple that.
_MONTHS = {
'jan' : 1, 'feb' : 2, 'mar' : 3, 'apr' : 4, 'may' : 5, 'jun' : 6,
'jul' : 7, 'aug' : 8, 'sep' : 9, 'oct' : 10, 'nov' : 11, 'dec' : 12,
}
# Used to extract the date from an HTML directory listing.
_BUILD_DATE_REGEX = re.compile(
r'(?P<day>[0-3]\d)-(?P<month>(%s))-(?P<year>\d{4})\s+'
r'(?P<hours>[0-2]\d):(?P<minutes>[0-5]\d)' % '|'.join(_MONTHS.keys()),
re.IGNORECASE | re.VERBOSE)
def __init__(self, repo_url,
build_id_pattern=DEFAULT_BUILD_ID_PATTERN,
proxy_server=None):
"""Initialize a ChromeRepo instance.
Args:
repo_url: The root url which returns the contents of the repository
as a directory listing.
build_id_pattern: The regular expression pattern to use for identifying
build id strings. This allows you to be more specific in your
searches. For example you can specify "10\.\d+\.\d+\.\d+" to get
all version 10 builds.
proxy_server: The URL to the HTTP(s) proxy server to use, or None, if no
proxy server is to be explicitly set.
"""
# pylint: disable=E1103
# --> pylint can't infer the named properties of a SplitResult.
url_parts = urlparse.urlsplit(repo_url)
self._scheme = url_parts.scheme.lower()
self._netloc = url_parts.netloc
self._root_dir = url_parts.path
self._query = url_parts.query
self._fragment = url_parts.fragment
# pylint: enable=E1103
if self._scheme not in ('http', 'https'):
raise ValueError('Unsupported URL scheme (%s)' % self._scheme)
if proxy_server:
custom_handlers = [urllib2.ProxyHandler({self._scheme:proxy_server})]
else:
custom_handlers = []
self._url_opener = urllib2.build_opener(*custom_handlers)
self._build_id_pattern = build_id_pattern
self._build_id_regex = re.compile(r'href="(?P<id>%s)/"' % build_id_pattern)
def _PerformRequest(self, method, path, out_stream, body=None, headers=None,
max_attempts=3):
"""Carries out an HTTP request.
The server used will be that given in the repo_url parameter when this
ChromeRepo object was initialized.
Args:
method: The HTTP method.
path: The path of the request (including query params, fragments, etc).
out_stream: A file object to which the response body will be written.
body: The optional body to include in the request.
headers: The optional HTTP headers to include in the request.
max_attempts: The maximum number of times to attempt the request if it
fails due to a server or network error (default: 3).
Returns:
A triple containing the HTTP status code, the headers of the response,
and the complete URL of the request. The body of the response will have
been written to the out_stream parameter.
"""
chunk_size = 32768
url = '%s://%s%s' % (self._scheme, self._netloc, path)
error_result = -1, {}, url
_LOGGER.debug('Performing %s to %s', method, url)
for attempt in xrange(1, max_attempts + 1):
try:
request = urllib2.Request(url, body, headers or {})
response = self._url_opener.open(request)
while out_stream is not None:
chunk = response.read(chunk_size)
if not chunk:
break
out_stream.write(chunk)
return 200, response.info(), response.geturl()
except (IOError, socket.error, httplib.HTTPException), error:
_LOGGER.error('[%d/%d] %s', attempt, max_attempts, error)
status = (error.code if hasattr(error, 'code') else 500)
error_result = status, {}, url
if status >= 400 and status < 500:
break
out_stream.seek(0)
return error_result
def GetBuildIndex(self):
"""Retrieve the list of build (id, timestamp) pairs from the build repo.
The returned list will be sorted from most recently to least recently
modified. Note that it's possible that a build is in progress, so you
may not want to take the most recently modified build.
"""
build_index = list()
response_buffer = StringIO.StringIO()
url_parts = (None, None, self._root_dir, self._query, self._fragment)
path = urlparse.urlunsplit(url_parts)
status, _headers, url = self._PerformRequest('GET', path, response_buffer)
if status != 200:
message = '(%s) Failed to download index [%s]' % (status, url)
_LOGGER.error('%s', message)
raise DownloadError(message)
for line in response_buffer.getvalue().split('\n'):
id_match = self._build_id_regex.search(line)
if not id_match:
continue
date_match = self._BUILD_DATE_REGEX.search(line)
if not date_match:
raise FormatError('Found build id but no date!: %s' % line)
build_id = id_match.group('id')
timestamp = datetime.datetime(
year=int(date_match.group('year')),
month=self._MONTHS[date_match.group('month').lower()],
day=int(date_match.group('day')),
hour=int(date_match.group('hours')),
minute=int(date_match.group('minutes')))
sort_key = (timestamp,) + tuple(int(x) for x in build_id.split('.'))
build_index.append((build_id, timestamp, sort_key))
return sorted(build_index, key=lambda x: x[2], reverse=True)
def _GetFilePath(self, build_id, subdir, relative_path):
"""Generates the path in the repo to a given file for a given build.
Args:
build_id: The identifier for the build
subdir: The build sub-directory for the file.
relative_path: The path to the file, relative to the windows build
root for build_id and the subdir.
Returns:
The absolute path (a string) to the file in the repository.
"""
return posixpath.join(self._root_dir, build_id, subdir, relative_path)
def _FileExists(self, path):
"""Checks if the build artifact given by path exists in the build repo.
Args:
path: The path to the build artifact. Use _GetFilePath to construct
an appropriate path.
Returns:
true if the artifact exists.
"""
status, _headers, _url = self._PerformRequest('HEAD', path, None,
max_attempts=2)
return status == 200
def GetLatestBuildId(self, build_index=None):
"""Pulls out the id and timestamp of the lastest build.
Searches through the (already sorted by date) build_index for the
first build archive that contains all of the required files (i.e.,
that's not a build in progress).
Args:
build_index: The index to search, if you've already downloaded it.
If None, it will be downloaded automatically.
Returns:
A build-id (string), timestamp (datetime) pair; or (None, None) if
no valid build can be found.
"""
if build_index is None:
build_index = self.GetBuildIndex()
for build_id, timestamp, _sort_key in build_index:
found = True
for subdir in SUBDIRS:
for file_name in FILE_LIST:
path = self._GetFilePath(build_id, subdir, file_name)
if not self._FileExists(path):
_LOGGER.debug('Build %s is missing %s', build_id, file_name)
found = False
break
if found:
_LOGGER.info('Build %s has all required files', build_id)
return build_id, timestamp, subdir
raise NotFoundError(
'No latest build found matching %s' % self._build_id_pattern)
def DownloadBuild(self, work_dir, build_id=None, subdir=None):
"""Download a build (by id or latest) into work_dir/build_id.
Args:
work_dir: The directory in which to place the downloaded files
build_id: the (optional) id of the build to fetch. If not
specified, this will download the "latest" build.
subdir: The build sub-directory for the files.
Returns:
The final path to the extracted chrome directory; for
example, work_dir/build_id. Under that directory will be the
chrome_win32 and chrome-win32-syms directories.
"""
if build_id is None:
build_id, dummy_timestamp, subdir = self.GetLatestBuildId()
elif subdir is None:
for ddir in SUBDIRS:
if self._FileExists(self._GetFilePath(build_id, ddir, FILE_LIST[0])):
subdir = ddir
break
if subdir is None:
raise NotFoundError(
'Could not find build artifacts for build %s' % build_id)
build_dir = os.path.abspath(os.path.join(work_dir, build_id))
chrome_dir = os.path.abspath(os.path.join(build_dir, 'chrome-win32'))
if not os.path.exists(build_dir):
os.makedirs(build_dir)
for file_name in FILE_LIST:
_LOGGER.info('Downloading %s', file_name)
name = os.path.basename(file_name)
dest = os.path.join(build_dir, name)
with open(dest, 'wb') as out_stream:
status, headers, url = self._PerformRequest(
'GET', self._GetFilePath(build_id, subdir, file_name), out_stream)
if status == 404:
os.remove(dest)
raise NotFoundError('(%s) Not Found - %s' % (status, file_name))
if status != 200 \
or int(headers['Content-Length']) != os.stat(dest).st_size:
os.remove(dest)
raise DownloadError('(%s) Failed to download %s' % (status, url))
if file_name.lower().endswith('.zip'):
_LOGGER.info('Extracting files from %s', dest)
zipfile.ZipFile(dest, 'r', allowZip64=True).extractall(build_dir)
_LOGGER.info('Extraction complete.')
os.remove(dest)
else:
shutil.move(dest, os.path.join(chrome_dir, name))
return build_dir
def AddCommandLineOptions(option_parser):
"""Adds the group of repository related options to the given option_parser.
Args:
option_parser: the option parser object to update. This is expected
to be an instance of optparse.OptionParser.
"""
group = optparse.OptionGroup(option_parser, 'Build Repository Options')
group.add_option(
'--repo-url', metavar='URL',
help='The root url where builds are archived')
group.add_option(
'--repo-build-id', metavar='ID', help='The id of the build do download')
group.add_option(
'--repo-work-dir', metavar='DIR', default='.',
help='Where to put downloaded builds')
group.add_option(
'--repo-build-id-pattern', metavar='PATTERN',
default=DEFAULT_BUILD_ID_PATTERN,
help='Regular expression for recognizing build ids (default: %default)')
group.add_option(
'--repo-build-subdir', metavar='DIR',
help='The subdirectory in which the unoptimized build resides.')
group.add_option(
'--repo-proxy', metavar='URL',
help='The proxy server to use when accessing the repository')
option_parser.add_option_group(group)
return group
def ParseArgs():
"""Parse the command line options, returning an options object."""
usage = 'Usage: %prog [options] LIST|GET|LATEST'
option_parser = optparse.OptionParser(usage)
AddCommandLineOptions(option_parser)
log_helper.AddCommandLineOptions(option_parser)
options, args = option_parser.parse_args()
if not options.repo_url:
option_parser.error('--repo-url is required')
if len(args) == 1:
action = args[0].lower()
if action in ('list', 'latest', 'get'):
return options, action
option_parser.error(
'A single repository action (LIST, GET, or LATEST) is required')
def main():
"""Main script function."""
options, action = ParseArgs()
log_helper.InitLogger(options)
repo = ChromeRepo(options.repo_url, options.repo_build_id_pattern,
proxy_server=options.repo_proxy)
try:
if action == 'list':
build_index = repo.GetBuildIndex()
format_str = '%20s %30s'
print format_str % ('Build ID', 'Last Modified')
print format_str % ('-' * 16, '-' * 22)
for build_id, timestamp, _sort_key in build_index:
print format_str % (build_id, timestamp)
elif action == 'latest':
build_id, timestamp, subdir = repo.GetLatestBuildId()
print '%s (%s, %s)' % (build_id, timestamp, subdir)
elif action == 'get':
print repo.DownloadBuild(options.repo_work_dir,
options.repo_build_id,
options.repo_build_subdir)
except (NotFoundError, DownloadError), error:
_LOGGER.error('%s', error)
sys.exit(1)
if __name__ == '__main__':
main()
|
|
from __future__ import unicode_literals
import os
import unittest
from ship.tuflow import tuflowfilepart as tfp
from ship.tuflow.tuflowfilepart import TuflowPart
from ship.tuflow import FILEPART_TYPES as ft
from ship.tuflow import tuflowfactory as f
class TuflowFilePartTests(unittest.TestCase):
'''Tests TuflowPart's and subclasses.
Note:
These test look at the general behaviour of TuflowPart's. Including the
methods within TuflowPart itself and the main subclasses, like
ATuflowVariable and TuflowFile.
Tests within test_tuflowfactor.py provide decent coverage of creating
new instances of specific TuflowPart's and checking that they have
been instanciated properly. If you need to add tests to check that
they have been created properly they should probably go in the factory
tests.
'''
def setUp(self):
# Setup a main .tcf file
self.prefix = '/'
if os.name != 'posix':
self.prefix = 'c:' + os.sep
self.fake_root = os.path.join(self.prefix, 'path', 'to', 'fake')
self.tcf = tfp.ModelFile(None, **{'path': 'tcffile.tcf', 'command': None,
'comment': None, 'model_type': 'TCF',
'root': self.fake_root})
# Setup a tgc file with tcf parent
tgc_line = "Geometry Control File == {} ! A tgc comment".format(
os.path.join('..', 'model', 'tgcfile.tgc')
)
self.tgc = f.TuflowFactory.getTuflowPart(tgc_line, self.tcf)[0]
# Setup a gis file with tgc parent
gis_line = "Read Gis Z Shape == {} ! A gis comment".format(
os.path.join('gis', 'gisfile.shp')
)
self.gis = f.TuflowFactory.getTuflowPart(gis_line, self.tgc)[0]
var_line = "Timestep == 2 ! A var comment"
self.var = f.TuflowFactory.getTuflowPart(var_line, self.tgc)[0]
gis_line2 = "Read Gis Z Shape == {} ! A gis 2 comment".format(
os.path.join('gis', 'gisfile2.shp')
)
self.gis2 = f.TuflowFactory.getTuflowPart(gis_line2, self.tgc)[0]
# For the evt testing stuff
line_evt = "Read Gis Z Shape == {} ! A gis 3 comment".format(
os.path.join('gis', 'gisfile_evt.shp')
)
self.gis_evt = f.TuflowFactory.getTuflowPart(line_evt, self.tgc)[0]
linevar_evt = "Timestep == 6 ! A var evt comment"
self.var_evt = f.TuflowFactory.getTuflowPart(linevar_evt, self.tgc)[0]
# For the scenario testing stuff
line_scen = "Read Gis Z Shape == {} ! A gis 3 comment".format(
os.path.join('gis', 'gisfile_evt.shp')
)
self.gis_scen = f.TuflowFactory.getTuflowPart(line_scen, self.tgc)[0]
linevar_scen = "Timestep == 6 ! A var scen comment"
self.var_scen = f.TuflowFactory.getTuflowPart(linevar_scen, self.tgc)[0]
if_args = {
'commands': ['If Scenario', 'Else'], 'terms': [['scen1', 'scen2'], []],
'comments': ['', '']
}
self.iflogic = f.TuflowFactory.createIfLogic(self.tgc, if_args['commands'],
if_args['terms'],
if_args['comments'])
self.iflogic.add_callback = self.fakeCallbackfunc
self.iflogic.remove_callback = self.fakeCallbackfunc
self.iflogic.addPart(self.gis, 0)
self.iflogic.addPart(self.var, 0)
self.iflogic.addPart(self.gis2, 1)
evt_args = {
'commands': 'Define Event', 'terms': ['event1', 'event2'],
'comments': ''
}
self.evtlogic = f.TuflowFactory.createBlockLogic(self.tgc, evt_args['commands'],
evt_args['terms'],
evt_args['comments'])
self.evtlogic.add_callback = self.fakeCallbackfunc
self.evtlogic.remove_callback = self.fakeCallbackfunc
# domain_args = {
# 'commands': 'Start 1D Domain', 'terms': ['domain1d'],
# 'comments': ''
# }
# self.domainlogic = f.TuflowFactory.createSectionLogic(
# self.tgc, evt_args['commands'], evt_args['terms'], evt_args['comments']
# )
def test_TPallParents(self):
"""Check that it returns parents properly.
TuflowPart method.
"""
tgc_parents = self.tgc.allParents([])
gis_parents = self.gis.allParents([])
tgc_hashes = [self.tcf.hash]
for i, t in enumerate(tgc_parents):
self.assertEqual(t, tgc_hashes[i])
gis_hashes = [self.tgc.hash, self.tcf.hash]
for i, t in enumerate(gis_parents):
self.assertEqual(t, gis_hashes[i])
def test_TPisInSeVals(self):
"""Check that parts show up with correct scenario/event values.
TuflowPart method.
"""
se_vals = {
'scenario': ['scen1']
}
self.assertTrue(self.gis.isInSeVals(se_vals))
self.assertTrue(self.var.isInSeVals(se_vals))
self.assertFalse(self.gis2.isInSeVals(se_vals))
se_vals = {
'scenario': ['whatever']
}
self.assertFalse(self.gis.isInSeVals(se_vals))
self.assertFalse(self.var.isInSeVals(se_vals))
self.assertTrue(self.gis2.isInSeVals(se_vals))
def test_TPresolvePlaceholder(self):
"""Test return value of resolvePlaceholder in TuflowPart."""
data_line = "Read Materials File == Materials_<<s1>>.tmf ! A tmf comment"
vardata = f.TuflowFactory.getTuflowPart(data_line, self.tgc)[0]
var_line = "Cell Size == <<size>> ! a cell size comment"
varvar = f.TuflowFactory.getTuflowPart(var_line, self.tgc)[0]
user_vars = {
's1': 'scen1', 'size': '10', 'e1': 'event1', 'anothervar': '2.5'
}
path = vardata.absolutePath(user_vars=user_vars)
pth = os.path.join(self.prefix, 'path', 'to', 'model', 'Materials_scen1.tmf')
self.assertEqual(pth, path)
var = TuflowPart.resolvePlaceholder(varvar.variable, user_vars)
var2 = varvar.resolvedVariable(user_vars)
var3 = varvar.resolvePlaceholder(varvar.variable, user_vars=user_vars)
self.assertEqual(var, '10')
self.assertEqual(var2, '10')
self.assertEqual(var3, '10')
def test_TFabsolutePath(self):
"""Test return value of absolutePath in TuflowFile."""
path1 = os.path.join(self.prefix, 'path', 'to', 'model', 'tgcfile.tgc')
path2 = os.path.join(self.prefix, 'path', 'to', 'model', 'gis', 'gisfile.shp')
self.assertEqual(path1, self.tgc.absolutePath())
self.assertEqual(path2, self.gis.absolutePath())
def test_TFabsolutePathAllTypes(self):
"""Test return all types of absolute path in TuflowFile.
This will return the same as absolutePath if there is only one associated
file extension. Otherwise it will return one path for each type.
"""
paths = [
os.path.join(self.prefix, 'path', 'to', 'model', 'gis', 'gisfile.shp'),
os.path.join(self.prefix, 'path', 'to', 'model', 'gis', 'gisfile.shx'),
os.path.join(self.prefix, 'path', 'to', 'model', 'gis', 'gisfile.dbf')]
self.assertListEqual(paths, self.gis.absolutePathAllTypes())
def test_TFrelativePath(self):
"""Test return value of relativePaths in TuflowFile."""
relpath = os.path.join('..', 'model')
path1 = [relpath]
path2 = [relpath, 'gis']
self.assertListEqual(path1, self.tgc.getRelativeRoots([]))
self.assertListEqual(path2, self.gis.getRelativeRoots([]))
def test_TLaddPart(self):
"""Test adding a new part to TuflowLogic."""
self.evtlogic.addPart(self.gis_evt)
self.assertIn(self.gis_evt, self.evtlogic.group_parts[0])
self.assertIn(self.gis_evt.hash, self.evtlogic.parts)
self.iflogic.addPart(self.gis_scen, 1)
self.assertIn(self.gis_scen, self.iflogic.group_parts[1])
self.assertIn(self.gis_scen.hash, self.iflogic.parts)
def test_TLinsertPart(self):
"""Test inserting a new part to TuflowLogic."""
self.iflogic.insertPart(self.gis_scen, self.gis)
self.assertIn(self.gis_scen, self.iflogic.group_parts[0])
self.iflogic.insertPart(self.var_scen, self.gis2)
self.assertIn(self.var_scen, self.iflogic.group_parts[1])
def test_removePart(self):
self.evtlogic.addPart(self.gis_evt)
self.evtlogic.removePart(self.gis_evt)
self.assertEqual(len(self.evtlogic.group_parts[0]), 0)
self.assertEqual(len(self.evtlogic.parts), 0)
def test_getAllParts(self):
self.evtlogic.addPart(self.gis_evt)
parts = self.evtlogic.getAllParts(hash_only=True)
self.assertEqual(len(parts), 1)
self.assertEqual(parts[0], self.gis_evt.hash)
parts = self.iflogic.getAllParts(hash_only=True)
self.assertEqual(len(parts), 3)
testp = [self.gis.hash, self.gis2.hash, self.var.hash]
self.assertEqual(set(parts), set(testp))
def test_getGroup(self):
self.evtlogic.addPart(self.gis_evt)
g1 = self.evtlogic.getGroup(self.gis_evt)
g2 = self.evtlogic.getGroup(self.gis)
self.assertEqual(g1, 0)
self.assertEqual(g2, -1)
g1 = self.iflogic.getGroup(self.gis)
g2 = self.iflogic.getGroup(self.gis2)
self.assertEqual(g1, 0)
self.assertEqual(g2, 1)
def test_isInClause(self):
self.assertTrue(self.iflogic.isInClause(self.gis, 'scen1'))
self.assertFalse(self.iflogic.isInClause(self.gis2, 'scen1'))
def test_allTerms(self):
self.assertListEqual(self.evtlogic.allTerms(), ['event1', 'event2'])
self.assertListEqual(self.iflogic.allTerms(), ['scen1', 'scen2'])
def test_isInTerms(self):
se_vals = {'scenario': ['scen1'],
'event': ['event1'],
'variable': {}
}
self.assertTrue(self.iflogic.isInTerms(self.gis, se_vals))
self.assertFalse(self.iflogic.isInTerms(self.gis2, se_vals))
se_vals = {'scenario': ['scen122'],
'event': ['event122'],
'variable': {}
}
self.assertFalse(self.iflogic.isInTerms(self.gis, se_vals))
self.assertTrue(self.iflogic.isInTerms(self.gis2, se_vals))
def fakeCallbackfunc(self, new_part, old_part):
"""Used to contain the callback functions in the TuflowLogic."""
pass
|
|
# Copyright 2017, OpenCensus Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import mock
from opencensus.common.version import __version__
from opencensus.ext.stackdriver import trace_exporter
from opencensus.trace import span_context
from opencensus.trace import span_data as span_data_module
class _Client(object):
def __init__(self, project=None):
if project is None:
project = 'PROJECT'
self.project = project
class TestStackdriverExporter(unittest.TestCase):
def test_constructor_default(self):
patch = mock.patch(
'opencensus.ext.stackdriver.trace_exporter.Client',
new=_Client)
with patch:
exporter = trace_exporter.StackdriverExporter()
project_id = 'PROJECT'
self.assertEqual(exporter.project_id, project_id)
def test_constructor_explicit(self):
client = mock.Mock()
project_id = 'PROJECT'
client.project = project_id
transport = mock.Mock()
exporter = trace_exporter.StackdriverExporter(
client=client, project_id=project_id, transport=transport)
self.assertIs(exporter.client, client)
self.assertEqual(exporter.project_id, project_id)
def test_export(self):
client = mock.Mock()
project_id = 'PROJECT'
client.project = project_id
exporter = trace_exporter.StackdriverExporter(
client=client, project_id=project_id, transport=MockTransport)
exporter.export({})
self.assertTrue(exporter.transport.export_called)
@mock.patch('opencensus.ext.stackdriver.trace_exporter.'
'monitored_resource.get_instance',
return_value=None)
def test_emit(self, mr_mock):
trace_id = '6e0c63257de34c92bf9efcd03927272e'
span_datas = [
span_data_module.SpanData(
name='span',
context=span_context.SpanContext(trace_id=trace_id),
span_id='1111',
parent_span_id=None,
attributes=None,
start_time=None,
end_time=None,
child_span_count=None,
stack_trace=None,
annotations=None,
message_events=None,
links=None,
status=None,
same_process_as_parent_span=None,
span_kind=0,
)
]
stackdriver_spans = {
'spans': [{
'status':
None,
'childSpanCount':
None,
'links':
None,
'startTime':
None,
'spanId':
'1111',
'attributes': {
'attributeMap': {
'g.co/agent': {
'string_value': {
'truncated_byte_count':
0,
'value':
'opencensus-python [{}]'.format(__version__)
}
}
}
},
'stackTrace':
None,
'displayName': {
'truncated_byte_count': 0,
'value': 'span'
},
'name':
'projects/PROJECT/traces/{}/spans/1111'.format(trace_id),
'timeEvents':
None,
'endTime':
None,
'sameProcessAsParentSpan':
None
}]
}
client = mock.Mock()
project_id = 'PROJECT'
client.project = project_id
exporter = trace_exporter.StackdriverExporter(
client=client, project_id=project_id)
exporter.emit(span_datas)
name = 'projects/{}'.format(project_id)
client.batch_write_spans.assert_called_with(name, stackdriver_spans)
self.assertTrue(client.batch_write_spans.called)
@mock.patch('opencensus.ext.stackdriver.trace_exporter.'
'monitored_resource.get_instance',
return_value=None)
def test_translate_to_stackdriver(self, mr_mock):
project_id = 'PROJECT'
trace_id = '6e0c63257de34c92bf9efcd03927272e'
span_name = 'test span'
span_id = '6e0c63257de34c92'
attributes = {
'attributeMap': {
'key': {
'string_value': {
'truncated_byte_count': 0,
'value': 'value'
}
},
'key_double': {
'double_value': {
'value': 123.45
}
},
'http.host': {
'string_value': {
'truncated_byte_count': 0,
'value': 'host'
}
}
}
}
parent_span_id = '6e0c63257de34c93'
start_time = 'test start time'
end_time = 'test end time'
trace = {
'spans': [{
'displayName': {
'value': span_name,
'truncated_byte_count': 0
},
'spanId':
span_id,
'startTime':
start_time,
'endTime':
end_time,
'parentSpanId':
parent_span_id,
'attributes':
attributes,
'someRandomKey':
'this should not be included in result',
'childSpanCount':
0
}],
'traceId':
trace_id
}
client = mock.Mock()
client.project = project_id
exporter = trace_exporter.StackdriverExporter(
client=client, project_id=project_id)
spans = list(exporter.translate_to_stackdriver(trace))
expected_traces = [{
'name': 'projects/{}/traces/{}/spans/{}'.format(
project_id, trace_id, span_id),
'displayName': {
'value': span_name,
'truncated_byte_count': 0
},
'attributes': {
'attributeMap': {
'g.co/agent': {
'string_value': {
'truncated_byte_count': 0,
'value':
'opencensus-python [{}]'.format(__version__)
}
},
'key': {
'string_value': {
'truncated_byte_count': 0,
'value': 'value'
}
},
'key_double': {
'double_value': {
'value': 123.45
}
},
'/http/host': {
'string_value': {
'truncated_byte_count': 0,
'value': 'host'
}
}
}
},
'spanId': str(span_id),
'startTime': start_time,
'endTime': end_time,
'parentSpanId': str(parent_span_id),
'status': None,
'links': None,
'stackTrace': None,
'timeEvents': None,
'childSpanCount': 0,
'sameProcessAsParentSpan': None
}]
self.assertEqual(spans, expected_traces)
def test_translate_common_attributes_to_stackdriver_no_attribute_map(self):
project_id = 'PROJECT'
client = mock.Mock()
client.project = project_id
exporter = trace_exporter.StackdriverExporter(
client=client, project_id=project_id)
attributes = {'outer key': 'some value'}
expected_attributes = {'outer key': 'some value'}
exporter.map_attributes(attributes)
self.assertEqual(attributes, expected_attributes)
def test_translate_common_attributes_to_stackdriver_none(self):
project_id = 'PROJECT'
client = mock.Mock()
client.project = project_id
exporter = trace_exporter.StackdriverExporter(
client=client, project_id=project_id)
# does not throw
self.assertIsNone(exporter.map_attributes(None))
def test_translate_common_attributes_to_stackdriver(self):
project_id = 'PROJECT'
client = mock.Mock()
client.project = project_id
exporter = trace_exporter.StackdriverExporter(
client=client, project_id=project_id)
attributes = {
'outer key': 'some value',
'attributeMap': {
'key': {
'string_value': {
'truncated_byte_count': 0,
'value': 'value'
}
},
'component': {
'string_value': {
'truncated_byte_count': 0,
'value': 'http'
}
},
'error.message': {
'string_value': {
'truncated_byte_count': 0,
'value': 'error message'
}
},
'error.name': {
'string_value': {
'truncated_byte_count': 0,
'value': 'error name'
}
},
'http.host': {
'string_value': {
'truncated_byte_count': 0,
'value': 'host'
}
},
'http.method': {
'string_value': {
'truncated_byte_count': 0,
'value': 'GET'
}
},
'http.status_code': {
'int_value': {
'value': 200
}
},
'http.url': {
'string_value': {
'truncated_byte_count': 0,
'value': 'http://host:port/path?query'
}
},
'http.user_agent': {
'string_value': {
'truncated_byte_count': 0,
'value': 'some user agent'
}
},
'http.client_city': {
'string_value': {
'truncated_byte_count': 0,
'value': 'Redmond'
}
},
'http.client_country': {
'string_value': {
'truncated_byte_count': 0,
'value': 'USA'
}
},
'http.client_protocol': {
'string_value': {
'truncated_byte_count': 0,
'value': 'HTTP 1.1'
}
},
'http.client_region': {
'string_value': {
'truncated_byte_count': 0,
'value': 'WA'
}
},
'http.request_size': {
'int_value': {
'value': 100
}
},
'http.response_size': {
'int_value': {
'value': 10
}
},
'pid': {
'int_value': {
'value': 123456789
}
},
'tid': {
'int_value': {
'value': 987654321
}
},
'stacktrace': {
'string_value': {
'truncated_byte_count': 0,
'value': 'at unknown'
}
},
'grpc.host_port': {
'string_value': {
'truncated_byte_count': 0,
'value': 'localhost:50051'
}
},
'grpc.method': {
'string_value': {
'truncated_byte_count': 0,
'value': 'post'
}
}
}
}
expected_attributes = {
'outer key': 'some value',
'attributeMap': {
'key': {
'string_value': {
'truncated_byte_count': 0,
'value': 'value'
}
},
'/component': {
'string_value': {
'truncated_byte_count': 0,
'value': 'http'
}
},
'/error/message': {
'string_value': {
'truncated_byte_count': 0,
'value': 'error message'
}
},
'/error/name': {
'string_value': {
'truncated_byte_count': 0,
'value': 'error name'
}
},
'/http/host': {
'string_value': {
'truncated_byte_count': 0,
'value': 'host'
}
},
'/http/method': {
'string_value': {
'truncated_byte_count': 0,
'value': 'GET'
}
},
'/http/status_code': {
'string_value': {
'truncated_byte_count': 0,
'value': '200'
}
},
'/http/url': {
'string_value': {
'truncated_byte_count': 0,
'value': 'http://host:port/path?query'
}
},
'/http/user_agent': {
'string_value': {
'truncated_byte_count': 0,
'value': 'some user agent'
}
},
'/http/client_city': {
'string_value': {
'truncated_byte_count': 0,
'value': 'Redmond'
}
},
'/http/client_country': {
'string_value': {
'truncated_byte_count': 0,
'value': 'USA'
}
},
'/http/client_protocol': {
'string_value': {
'truncated_byte_count': 0,
'value': 'HTTP 1.1'
}
},
'/http/client_region': {
'string_value': {
'truncated_byte_count': 0,
'value': 'WA'
}
},
'/http/request/size': {
'int_value': {
'value': 100
}
},
'/http/response/size': {
'int_value': {
'value': 10
}
},
'/pid': {
'int_value': {
'value': 123456789
}
},
'/tid': {
'int_value': {
'value': 987654321
}
},
'/stacktrace': {
'string_value': {
'truncated_byte_count': 0,
'value': 'at unknown'
}
},
'/grpc/host_port': {
'string_value': {
'truncated_byte_count': 0,
'value': 'localhost:50051'
}
},
'/grpc/method': {
'string_value': {
'truncated_byte_count': 0,
'value': 'post'
}
}
}
}
exporter.map_attributes(attributes)
self.assertEqual(attributes, expected_attributes)
def test_translate_common_attributes_status_code(self):
project_id = 'PROJECT'
client = mock.Mock()
client.project = project_id
exporter = trace_exporter.StackdriverExporter(
client=client, project_id=project_id)
attributes = {
'outer key': 'some value',
'attributeMap': {
'http.status_code': {
'int_value': 200
}
}
}
expected_attributes = {
'outer key': 'some value',
'attributeMap': {
'/http/status_code': {
'string_value': {
'truncated_byte_count': 0,
'value': '200'
}
}
}
}
exporter.map_attributes(attributes)
self.assertEqual(attributes, expected_attributes)
class Test_set_attributes_gae(unittest.TestCase):
@mock.patch('opencensus.ext.stackdriver.trace_exporter.'
'monitored_resource.get_instance',
return_value=None)
def test_set_attributes_gae(self, mr_mock):
import os
trace = {'spans': [{'attributes': {}}]}
expected = {
'attributes': {
'attributeMap': {
'g.co/gae/app/module': {
'string_value': {
'truncated_byte_count': 0,
'value': 'service'
}
},
'g.co/gae/app/instance': {
'string_value': {
'truncated_byte_count': 0,
'value': 'flex'
}
},
'g.co/gae/app/version': {
'string_value': {
'truncated_byte_count': 0,
'value': 'version'
}
},
'g.co/gae/app/project': {
'string_value': {
'truncated_byte_count': 0,
'value': 'project'
}
},
'g.co/agent': {
'string_value': {
'truncated_byte_count': 0,
'value':
'opencensus-python [{}]'.format(__version__)
}
},
}
}
}
with mock.patch.dict(
os.environ, {
trace_exporter._APPENGINE_FLEXIBLE_ENV_VM: 'vm',
trace_exporter._APPENGINE_FLEXIBLE_ENV_FLEX: 'flex',
'GOOGLE_CLOUD_PROJECT': 'project',
'GAE_SERVICE': 'service',
'GAE_VERSION': 'version'
}):
self.assertTrue(trace_exporter.is_gae_environment())
trace_exporter.set_attributes(trace)
span = trace.get('spans')[0]
self.assertEqual(span, expected)
class TestMonitoredResourceAttributes(unittest.TestCase):
@mock.patch('opencensus.ext.stackdriver.trace_exporter.'
'monitored_resource.get_instance')
def test_monitored_resource_attributes_gke(self, gmr_mock):
import os
trace = {'spans': [{'attributes': {}}]}
expected = {
'attributes': {
'attributeMap': {
'g.co/gae/app/module': {
'string_value': {
'truncated_byte_count': 0,
'value': 'service'
}
},
'g.co/gae/app/instance': {
'string_value': {
'truncated_byte_count': 0,
'value': 'flex'
}
},
'g.co/gae/app/version': {
'string_value': {
'truncated_byte_count': 0,
'value': 'version'
}
},
'g.co/gae/app/project': {
'string_value': {
'truncated_byte_count': 0,
'value': 'project'
}
},
'g.co/agent': {
'string_value': {
'truncated_byte_count': 0,
'value':
'opencensus-python [{}]'.format(__version__)
}
},
'g.co/r/k8s_container/project_id': {
'string_value': {
'truncated_byte_count': 0,
'value': 'my_project'
}
},
'g.co/r/k8s_container/location': {
'string_value': {
'truncated_byte_count': 0,
'value': 'zone1'
}
},
'g.co/r/k8s_container/namespace_name': {
'string_value': {
'truncated_byte_count': 0,
'value': 'namespace'
}
},
'g.co/r/k8s_container/pod_name': {
'string_value': {
'truncated_byte_count': 0,
'value': 'pod'
}
},
'g.co/r/k8s_container/cluster_name': {
'string_value': {
'truncated_byte_count': 0,
'value': 'cluster'
}
},
'g.co/r/k8s_container/container_name': {
'string_value': {
'truncated_byte_count': 0,
'value': 'c1'
}
},
}
}
}
mock_resource = mock.Mock()
mock_resource.get_type.return_value = 'k8s_container'
mock_resource.get_labels.return_value = {
'k8s.io/pod/name': 'pod',
'k8s.io/cluster/name': 'cluster',
'k8s.io/namespace/name': 'namespace',
'k8s.io/container/name': 'c1',
'project_id': 'my_project',
'zone': 'zone1'
}
gmr_mock.return_value = mock_resource
with mock.patch.dict(
os.environ, {
trace_exporter._APPENGINE_FLEXIBLE_ENV_VM: 'vm',
trace_exporter._APPENGINE_FLEXIBLE_ENV_FLEX: 'flex',
'GOOGLE_CLOUD_PROJECT': 'project',
'GAE_SERVICE': 'service',
'GAE_VERSION': 'version'
}):
self.assertTrue(trace_exporter.is_gae_environment())
trace_exporter.set_attributes(trace)
span = trace.get('spans')[0]
self.assertEqual(span, expected)
@mock.patch('opencensus.ext.stackdriver.trace_exporter.'
'monitored_resource.get_instance')
def test_monitored_resource_attributes_gce(self, gmr_mock):
trace = {'spans': [{'attributes': {}}]}
expected = {
'attributes': {
'attributeMap': {
'g.co/agent': {
'string_value': {
'truncated_byte_count': 0,
'value':
'opencensus-python [{}]'.format(__version__)
}
},
'g.co/r/gce_instance/project_id': {
'string_value': {
'truncated_byte_count': 0,
'value': 'my_project'
}
},
'g.co/r/gce_instance/instance_id': {
'string_value': {
'truncated_byte_count': 0,
'value': '12345'
}
},
'g.co/r/gce_instance/zone': {
'string_value': {
'truncated_byte_count': 0,
'value': 'zone1'
}
},
}
}
}
mock_resource = mock.Mock()
mock_resource.get_type.return_value = 'gce_instance'
mock_resource.get_labels.return_value = {
'project_id': 'my_project',
'instance_id': '12345',
'zone': 'zone1'
}
gmr_mock.return_value = mock_resource
trace_exporter.set_attributes(trace)
span = trace.get('spans')[0]
self.assertEqual(span, expected)
@mock.patch('opencensus.ext.stackdriver.trace_exporter.'
'monitored_resource.get_instance')
def test_monitored_resource_attributes_aws(self, amr_mock):
trace = {'spans': [{'attributes': {}}]}
expected = {
'attributes': {
'attributeMap': {
'g.co/agent': {
'string_value': {
'truncated_byte_count': 0,
'value':
'opencensus-python [{}]'.format(__version__)
}
},
'g.co/r/aws_ec2_instance/aws_account': {
'string_value': {
'truncated_byte_count': 0,
'value': '123456789012'
}
},
'g.co/r/aws_ec2_instance/region': {
'string_value': {
'truncated_byte_count': 0,
'value': 'aws:us-west-2'
}
},
}
}
}
mock_resource = mock.Mock()
mock_resource.get_type.return_value = 'aws_ec2_instance'
mock_resource.get_labels.return_value = {
'aws_account': '123456789012',
'region': 'us-west-2'
}
amr_mock.return_value = mock_resource
trace_exporter.set_attributes(trace)
span = trace.get('spans')[0]
self.assertEqual(span, expected)
@mock.patch('opencensus.ext.stackdriver.trace_exporter.'
'monitored_resource.get_instance')
def test_monitored_resource_attributes_None(self, mr_mock):
trace = {'spans': [{'attributes': {}}]}
expected = {
'attributes': {
'attributeMap': {
'g.co/agent': {
'string_value': {
'truncated_byte_count': 0,
'value':
'opencensus-python [{}]'.format(__version__)
}
}
}
}
}
mr_mock.return_value = None
trace_exporter.set_attributes(trace)
span = trace.get('spans')[0]
self.assertEqual(span, expected)
mock_resource = mock.Mock()
mock_resource.get_type.return_value = mock.Mock()
mock_resource.get_labels.return_value = mock.Mock()
mr_mock.return_value = mock_resource
trace_exporter.set_attributes(trace)
span = trace.get('spans')[0]
self.assertEqual(span, expected)
class MockTransport(object):
def __init__(self, exporter=None):
self.export_called = False
self.exporter = exporter
def export(self, trace):
self.export_called = True
|
|
"""
Main application window for starcheat GUI
"""
import sys
import logging
import json
from PyQt5 import QtWidgets
from PyQt5 import QtCore
from PyQt5.QtWidgets import QApplication
from PyQt5.QtWidgets import QMainWindow
from PyQt5.QtWidgets import QTableWidgetItem
from PyQt5.QtWidgets import QFileDialog
from PyQt5.QtWidgets import QMessageBox
from PyQt5.QtWidgets import QAction
from PyQt5.QtWidgets import QProgressDialog
from PyQt5.QtWidgets import QColorDialog
from PyQt5.QtGui import QColor
from PyQt5.QtGui import QPixmap
from PIL.ImageQt import ImageQt
from threading import Thread
import saves
import qt_mainwindow
from assets.core import Assets
from config import Config
from gui.common import ItemWidget
from gui.common import empty_slot
from gui.openplayer import CharacterSelectDialog
from gui.utils import OptionsDialog
from gui.utils import AboutDialog
from gui.utils import ModsDialog
from gui.utils import save_modified_dialog
from gui.utils import new_setup_dialog
from gui.utils import check_index_valid
from gui.utils import update_check_worker
from gui.utils import update_check_dialog
from gui.itemedit import ItemEdit
from gui.itemedit import ImageBrowser
from gui.itemedit import import_json
from gui.itemedit import ItemEditOptions
from gui.blueprints import BlueprintLib
from gui.itembrowser import ItemBrowser
from gui.appearance import Appearance
from gui.techs import Techs
from gui.quests import Quests
from gui.ship import Ship
class StarcheatMainWindow(QMainWindow):
"""Overrides closeEvent on the main window to allow "want to save changes?" dialog"""
def __init__(self, parent):
super(QMainWindow, self).__init__()
self.parent = parent
def closeEvent(self, event):
if not self.isWindowModified():
event.accept()
return
button = save_modified_dialog(self.parent.window)
if button == QMessageBox.Save:
self.parent.save()
event.accept()
elif button == QMessageBox.Cancel:
event.ignore()
elif button == QMessageBox.Discard:
event.accept()
class MainWindow():
def __init__(self):
# check for new starcheat version online in seperate thread
update_result = [None]
update_thread = Thread(target=update_check_worker, args=[update_result], daemon=True)
update_thread.start()
"""Display the main starcheat window."""
self.app = QApplication(sys.argv)
self.window = StarcheatMainWindow(self)
self.ui = qt_mainwindow.Ui_MainWindow()
self.ui.setupUi(self.window)
logging.info("Main window init")
self.players = None
self.filename = None
self.item_browser = None
# remember the last selected item browser category
self.remember_browser = "<all>"
self.options_dialog = None
self.preview_armor = True
self.preview_bg = "#ffffff"
# connect action menu
self.ui.actionSave.triggered.connect(self.save)
self.ui.actionReload.triggered.connect(self.reload)
self.ui.actionOpen.triggered.connect(self.open_file)
self.ui.actionQuit.triggered.connect(self.app.closeAllWindows)
self.ui.actionOptions.triggered.connect(self.new_options_dialog)
self.ui.actionItemBrowser.triggered.connect(self.new_item_browser)
self.ui.actionAbout.triggered.connect(self.new_about_dialog)
self.ui.actionMods.triggered.connect(self.new_mods_dialog)
self.ui.actionImageBrowser.triggered.connect(self.new_image_browser_dialog)
self.ui.actionExportPlayerBinary.triggered.connect(self.export_save)
self.ui.actionExportPlayerJSON.triggered.connect(self.export_json)
self.ui.actionExportMetadataBinary.triggered.connect(lambda: self.export_save("metadata"))
self.ui.actionExportMetadataJSON.triggered.connect(lambda: self.export_json("metadata"))
self.ui.actionImportPlayerBinary.triggered.connect(self.import_save)
self.ui.actionImportPlayerJSON.triggered.connect(self.import_json)
self.ui.actionImportMetadataBinary.triggered.connect(lambda: self.import_save("metadata"))
self.ui.actionImportMetadataJSON.triggered.connect(lambda: self.import_json("metadata"))
# set up bag tables
bags = ("wieldable", "head", "chest", "legs", "back", "main_bag",
"action_bar", "tile_bag", "essentials", "mouse")
for bag in bags:
logging.debug("Setting up %s bag", bag)
self.bag_setup(getattr(self.ui, bag), bag)
self.preview_setup()
# signals
self.ui.blueprints_button.clicked.connect(self.new_blueprint_edit)
self.ui.appearance_button.clicked.connect(self.new_appearance_dialog)
self.ui.techs_button.clicked.connect(self.new_techs_dialog)
self.ui.quests_button.clicked.connect(self.new_quests_dialog)
self.ui.ship_button.clicked.connect(self.new_ship_dialog)
self.ui.name.textChanged.connect(self.set_name)
self.ui.male.clicked.connect(self.set_gender)
self.ui.female.clicked.connect(self.set_gender)
self.ui.description.textChanged.connect(self.set_description)
self.ui.pixels.valueChanged.connect(self.set_pixels)
self.ui.health.valueChanged.connect(lambda: self.set_stat_slider("health"))
self.ui.energy.valueChanged.connect(lambda: self.set_stat_slider("energy"))
self.ui.health_button.clicked.connect(lambda: self.max_stat("health"))
self.ui.energy_button.clicked.connect(lambda: self.max_stat("energy"))
self.ui.copy_uuid_button.clicked.connect(self.copy_uuid)
self.window.setWindowModified(False)
logging.debug("Showing main window")
self.window.show()
# launch first setup if we need to
if not new_setup_dialog(self.window):
logging.error("Config/index creation failed")
return
logging.info("Starbound folder: %s", Config().read("starbound_folder"))
logging.info("Checking assets hash")
if not check_index_valid(self.window):
logging.error("Index creation failed")
return
logging.info("Loading assets database")
self.assets = Assets(Config().read("assets_db"),
Config().read("starbound_folder"))
self.items = self.assets.items()
# populate species combobox
for species in self.assets.species().get_species_list():
self.ui.race.addItem(species)
self.ui.race.currentTextChanged.connect(self.update_species)
# populate game mode combobox
for mode in sorted(self.assets.player().mode_types.values()):
self.ui.game_mode.addItem(mode)
self.ui.game_mode.currentTextChanged.connect(self.set_game_mode)
# launch open file dialog
self.player = None
logging.debug("Open file dialog")
open_player = self.open_file()
# we *need* at least an initial save file
if not open_player:
logging.warning("No player file selected")
return
self.ui.name.setFocus()
# block for update check result (should be ready now)
update_thread.join()
if update_result[0]:
update_check_dialog(self.window, update_result[0])
sys.exit(self.app.exec_())
def update(self):
"""Update all GUI widgets with values from PlayerSave instance."""
logging.info("Updating main window")
# uuid / save version
self.ui.uuid_label.setText(self.player.get_uuid())
self.ui.ver_label.setText(self.player.get_header())
# name
self.ui.name.setText(self.player.get_name())
# race
self.ui.race.setCurrentText(self.player.get_race(pretty=True))
# pixels
try:
self.ui.pixels.setValue(self.player.get_pixels())
except TypeError:
logging.exception("Unable to set pixels widget")
# description
self.ui.description.setPlainText(self.player.get_description())
# gender
getattr(self.ui, self.player.get_gender()).toggle()
# game mode
game_mode = self.player.get_game_mode()
try:
self.ui.game_mode.setCurrentText(self.assets.player().mode_types[game_mode])
except KeyError:
logging.exception("No game mode set on player")
# stats
self.update_stat("health")
self.update_stat("energy")
# quests
can_edit_quests = (self.player.metadata is not None and
"quests" in self.player.metadata.metadata)
self.ui.quests_button.setEnabled(can_edit_quests)
# ship
can_edit_ship = (self.player.metadata is not None and
"shipUpgrades" in self.player.metadata.metadata and
"ai" in self.player.metadata.metadata)
self.ui.ship_button.setEnabled(can_edit_ship)
# items
total = 0
progress = QProgressDialog("Updating item slots...",
None, 0, 10, self.window)
progress.setWindowTitle("Updating...")
progress.setWindowModality(QtCore.Qt.ApplicationModal)
progress.forceShow()
progress.setValue(total)
# equipment
equip_bags = "head", "chest", "legs", "back"
for bag in equip_bags:
logging.debug("Updating %s", bag)
items = []
for x in getattr(self.player, "get_" + bag)():
if x is not None:
items.append(ItemWidget(x["__content"], self.assets))
else:
items.append(ItemWidget(None, self.assets))
getattr(self.ui, bag).setItem(0, 0, items[0])
getattr(self.ui, bag).setItem(0, 1, items[1])
total += 1
progress.setValue(total)
for bag in "wieldable", "main_bag", "tile_bag", "action_bar", "essentials", "mouse":
self.update_bag(bag)
total += 1
progress.setValue(total)
self.update_player_preview()
def bag_setup(self, widget, name):
widget.setContextMenuPolicy(QtCore.Qt.ActionsContextMenu)
# TODO: still issues with drag drop between tables
widget.setDragDropMode(QtWidgets.QAbstractItemView.InternalMove)
widget.cellChanged.connect(self.set_edited)
item_edit = getattr(self, "new_" + name + "_item_edit")
widget.cellDoubleClicked.connect(lambda: item_edit(False))
sortable = ("main_bag", "tile_bag")
clearable = ("wieldable", "action_bar", "essentials")
edit_action = QAction("Edit...", widget)
edit_action.triggered.connect(lambda: item_edit(False))
widget.addAction(edit_action)
edit_json_action = QAction("Edit JSON...", widget)
edit_json_action.triggered.connect(lambda: item_edit(False, True))
widget.addAction(edit_json_action)
import_json = QAction("Import...", widget)
import_json.triggered.connect(lambda: item_edit(True))
widget.addAction(import_json)
trash_action = QAction("Trash", widget)
trash_slot = lambda: self.trash_slot(self.window, widget, True)
trash_action.triggered.connect(trash_slot)
widget.addAction(trash_action)
if name in sortable or name in clearable:
sep_action = QAction(widget)
sep_action.setSeparator(True)
widget.addAction(sep_action)
if name in clearable:
clear_action = QAction("Clear Held Items", widget)
clear_action.triggered.connect(self.clear_held_slots)
widget.addAction(clear_action)
if name in sortable:
sort_name = QAction("Sort By Name", widget)
sort_name.triggered.connect(lambda: self.sort_bag(name, "name"))
widget.addAction(sort_name)
sort_type = QAction("Sort By Type", widget)
sort_type.triggered.connect(lambda: self.sort_bag(name, "category"))
widget.addAction(sort_type)
sort_count = QAction("Sort By Count", widget)
sort_count.triggered.connect(lambda: self.sort_bag(name, "count"))
widget.addAction(sort_count)
def toggle_preview_armor(self):
self.preview_armor = not self.preview_armor
self.update_player_preview()
def change_preview_background(self):
qcolor = QColorDialog().getColor(QColor(self.preview_bg),
self.window)
if qcolor.isValid():
self.preview_bg = qcolor.name()
self.update_player_preview()
def preview_setup(self):
button = self.ui.preview_config_button
toggle_armor = QAction("Toggle Armor", button)
toggle_armor.triggered.connect(self.toggle_preview_armor)
button.addAction(toggle_armor)
change_bg = QAction("Change Background...", button)
change_bg.triggered.connect(self.change_preview_background)
button.addAction(change_bg)
def update_title(self):
"""Update window title with player name."""
self.window.setWindowTitle("starcheat - " + self.player.get_name() + "[*]")
def save(self):
"""Update internal player dict with GUI values and export to file."""
logging.info("Saving player file %s", self.player.filename)
self.set_bags()
# save and show status
logging.info("Writing file to disk")
self.player.export_save(self.player.filename)
self.player.metadata.export_metadata(self.player.metadata.filename)
self.update_title()
self.ui.statusbar.showMessage("Saved " + self.player.filename, 3000)
self.window.setWindowModified(False)
self.players[self.player.get_uuid()] = self.player
def new_item_edit(self, bag, do_import, json_edit=False):
"""Display a new item edit dialog using the select cell in a given bag."""
logging.debug("New item edit dialog")
row = bag.currentRow()
column = bag.currentColumn()
current = bag.currentItem()
item = saves.empty_slot()
valid_slot = (type(current) is not QTableWidgetItem and
current is not None and
current.item is not None)
if do_import:
imported = import_json(self.window)
if imported is False:
self.ui.statusbar.showMessage("Error importing item, see starcheat log for details", 3000)
return
elif imported is None:
return
else:
item = imported
# cells don't retain ItemSlot widget when they've been dragged away
if valid_slot:
item.update(current.item)
if not json_edit:
item_edit = ItemEdit(self.window, item,
self.player, self.assets,
self.remember_browser)
else:
item_edit = ItemEditOptions(self.window,
item["name"],
item,
"Edit Item Data")
def update_slot():
logging.debug("Writing changes to slot")
try:
if not json_edit:
data = item_edit.get_item()
else:
name, data = item_edit.get_option()
new_slot = ItemWidget(data, self.assets)
if new_slot.item["name"] != "":
bag.setItem(row, column, new_slot)
if not json_edit:
self.remember_browser = item_edit.remember_browser
self.set_bags()
self.update_player_preview()
self.set_edited()
except (TypeError, KeyError):
logging.exception("Error updating item slot")
self.ui.statusbar.showMessage("Error updating item slot, see starcheat log for details", 3000)
item_edit.dialog.accepted.connect(update_slot)
if not json_edit:
trash_slot = lambda: self.trash_slot(item_edit.dialog, bag)
item_edit.ui.trash_button.clicked.connect(trash_slot)
got_item = item_edit.launch()
if got_item:
item_edit.dialog.exec()
else:
item_edit.dialog.exec()
def trash_slot(self, dialog, bag, standalone=False):
row = bag.currentRow()
column = bag.currentColumn()
ask_dialog = QMessageBox(dialog)
ask_dialog.setWindowTitle("Trash Item")
ask_dialog.setText("Are you sure?")
ask_dialog.setStandardButtons(QMessageBox.Yes | QMessageBox.No)
ask_dialog.setDefaultButton(QMessageBox.No)
ask_dialog.setIcon(QMessageBox.Question)
if ask_dialog.exec() == QMessageBox.Yes:
bag.setItem(row, column, empty_slot())
if not standalone:
dialog.close()
self.set_bags()
self.update_player_preview()
self.set_edited()
def set_edited(self):
self.window.setWindowModified(True)
def sort_bag(self, bag_name, sort_by):
self.set_bags()
bag = getattr(self.player, "get_" + bag_name)()
sorted_bag = self.assets.player().sort_bag(bag, sort_by)
getattr(self.player, "set_" + bag_name)(sorted_bag)
self.ui.statusbar.showMessage("Sorting by " + sort_by + "...", 3000)
self.update()
self.ui.statusbar.clearMessage()
def clear_held_slots(self):
self.player.clear_held_slots()
self.set_edited()
self.ui.statusbar.showMessage("All held items have been cleared", 3000)
def new_blueprint_edit(self):
"""Launch a new blueprint management dialog."""
logging.debug("New blueprint dialog")
blueprint_lib = BlueprintLib(self.window,
self.player.get_blueprints(),
self.player.get_new_blueprints())
def update_blueprints():
logging.debug("Writing blueprints")
self.player.set_blueprints(blueprint_lib.get_known_list())
self.player.set_new_blueprints(blueprint_lib.new_blueprints)
blueprint_lib.dialog.close()
self.set_edited()
blueprint_lib.ui.buttonBox.rejected.connect(update_blueprints)
blueprint_lib.dialog.exec()
def copy_uuid(self):
clipboard = self.app.clipboard()
clipboard.setText(self.player.get_uuid())
self.ui.statusbar.showMessage("UUID copied to clipboard", 3000)
def new_item_browser(self):
"""Launch a standalone item browser dialog that does write any changes."""
self.item_browser = ItemBrowser(self.window, True)
self.item_browser.dialog.show()
def new_options_dialog(self):
"""Launch a new options config dialog."""
logging.debug("New options dialog")
self.options_dialog = OptionsDialog(self.window)
def write_options():
logging.info("Writing options to disk")
self.options_dialog.write()
self.update()
self.options_dialog.dialog.rejected.connect(write_options)
self.options_dialog.dialog.exec()
def new_about_dialog(self):
"""Launch a new about dialog."""
about_dialog = AboutDialog(self.window)
about_dialog.dialog.exec()
def new_appearance_dialog(self):
appearance_dialog = Appearance(self)
appearance_dialog.dialog.exec()
appearance_dialog.write_appearance_values()
self.update_player_preview()
def new_techs_dialog(self):
techs_dialog = Techs(self)
techs_dialog.dialog.rejected.connect(techs_dialog.write_techs)
techs_dialog.dialog.exec()
def new_quests_dialog(self):
quests_dialog = Quests(self)
quests_dialog.dialog.rejected.connect(quests_dialog.write_quests)
quests_dialog.dialog.exec()
def new_ship_dialog(self):
ship_dialog = Ship(self)
ship_dialog.dialog.rejected.connect(ship_dialog.write_ship)
ship_dialog.dialog.exec()
def new_mods_dialog(self):
mods_dialog = ModsDialog(self.window)
mods_dialog.dialog.exec()
def new_image_browser_dialog(self):
self.image_browser = ImageBrowser(self.window, self.assets)
self.image_browser.dialog.show()
def reload(self):
"""Reload the currently open save file and update GUI values."""
logging.info("Reloading file %s", self.player.filename)
self.player = saves.PlayerSave(self.player.filename)
self.update()
self.update_title()
self.ui.statusbar.showMessage("Reloaded " + self.player.filename, 3000)
self.window.setWindowModified(False)
def open_file(self):
"""Display open file dialog and load selected save."""
if self.window.isWindowModified():
button = save_modified_dialog(self.window)
if button == QMessageBox.Cancel:
return False
elif button == QMessageBox.Save:
self.save()
character_select = CharacterSelectDialog(self, self.assets)
character_select.show()
self.players = character_select.players
if character_select.selected is None:
logging.warning("No player selected")
return False
else:
self.player = character_select.selected
self.update()
self.update_title()
self.ui.statusbar.showMessage("Opened " + self.player.filename, 3000)
self.window.setWindowModified(False)
return True
# export save stuff
def export_save(self, kind="player"):
"""Save a copy of the current metadata/player file to another location.
Doesn't change the current filename."""
if kind == "metadata":
export_func = lambda: self.player.metadata.export_metadata(filename[0])
title = "Export Metadata File As"
filetype = "Player (*.metadata);;All Files (*)"
status = "Exported metadata file to "
else:
export_func = lambda: self.player.export_save(filename[0])
title = "Export Player File As"
filetype = "Player (*.player);;All Files (*)"
status = "Exported player file to "
filename = QFileDialog.getSaveFileName(self.window, title, filter=filetype)
if filename[0] != "":
self.set_bags()
export_func()
self.ui.statusbar.showMessage(status + filename[0], 3000)
def export_json(self, kind="player"):
"""Export player entity as json."""
if kind == "metadata":
data = self.player.metadata.metadata
title = "Export Metadata JSON File As"
filetype = "JSON (*.json);;All Files (*)"
status = "Exported metadata JSON file to "
else:
data = self.player.entity
title = "Export Player JSON File As"
filetype = "JSON (*.json);;All Files (*)"
status = "Exported player JSON file to "
filename = QFileDialog.getSaveFileName(self.window, title, filter=filetype)
if filename[0] != "":
self.set_bags()
json_data = json.dumps(data, sort_keys=True,
indent=4, separators=(',', ': '))
json_file = open(filename[0], "w")
json_file.write(json_data)
json_file.close()
self.ui.statusbar.showMessage(status + filename[0], 3000)
# import save stuff
def import_save(self, kind="player"):
"""Import a .player file over the top of current player."""
if kind == "metadata":
import_func = self.player.metadata.import_metadata
title = "Import Metadata File"
filetype = "Player (*.metadata);;All Files (*)"
status = "Imported metadata file from "
else:
import_func = self.player.import_save
title = "Import Player File"
filetype = "Player (*.player);;All Files (*)"
status = "Imported player file from "
filename = QFileDialog.getOpenFileName(self.window, title, filter=filetype)
if filename[0] == "":
return
try:
import_func(filename[0])
self.update()
self.ui.statusbar.showMessage(status + filename[0], 3000)
except:
logging.exception("Error reading file: %s", filename[0])
self.ui.statusbar.showMessage("Error reading file, see starcheat log for details", 3000)
def import_json(self, kind="player"):
"""Import an exported JSON file and merge/update with open player/metadata."""
if kind == "metadata":
update_func = lambda: self.player.metadata.metadata.update(data)
title = "Import JSON Metadata File"
status = "Imported metadata file "
else:
update_func = lambda: self.player.entity.update(data)
title = "Import JSON Player File"
status = "Imported player file "
filename = QFileDialog.getOpenFileName(self.window, title,
filter="JSON (*.json);;All Files (*)")
if filename[0] == "":
logging.debug("No file selected to import")
return
try:
data = json.load(open(filename[0], "r"))
update_func()
self.update()
self.ui.statusbar.showMessage(status + filename[0], 3000)
except:
logging.exception("Error reading file: %s", filename[0])
self.ui.statusbar.showMessage("Error importing file, see starcheat log for details", 3000)
def get_gender(self):
if self.ui.male.isChecked():
return "male"
else:
return "female"
def get_bag(self, name):
"""Return the entire contents of a given non-equipment bag as raw values."""
logging.debug("Getting %s contents", name)
row = column = 0
bag = getattr(self.player, "get_" + name)()
for i in range(len(bag)):
item = getattr(self.ui, name).item(row, column)
empty_item = (item is None or
type(item) is QTableWidgetItem or
item.item is None)
if empty_item:
item = None
else:
widget = item.item
item = saves.new_item(widget["name"],
widget["count"],
widget["parameters"])
bag[i] = item
# so far all non-equip bags are 10 cols long
column += 1
if (column % 10) == 0:
row += 1
column = 0
return bag
def get_equip(self, name):
"""Return the raw values of both slots in a given equipment bag."""
logging.debug("Getting %s contents", name)
equip = getattr(self.ui, name)
main_cell = equip.item(0, 0)
glamor_cell = equip.item(0, 1)
# when you drag itemwidgets around the cell will become empty so just
# pretend it had an empty slot value
empty_main = (main_cell is None or
type(main_cell) is QTableWidgetItem or
main_cell.item is None)
if empty_main:
main = None
else:
widget = main_cell.item
main = saves.new_item(widget["name"],
widget["count"],
widget["parameters"])
empty_glamor = (glamor_cell is None or
type(glamor_cell) is QTableWidgetItem or
glamor_cell.item is None)
if empty_glamor:
glamor = None
else:
widget = glamor_cell.item
glamor = saves.new_item(widget["name"],
widget["count"],
widget["parameters"])
return main, glamor
def update_bag(self, bag_name):
"""Set the entire contents of any given bag with ItemWidgets based off player data."""
logging.debug("Updating %s contents", bag_name)
row = column = 0
bag = getattr(self.player, "get_" + bag_name)()
for slot in range(len(bag)):
item = bag[slot]
if item is not None and "__content" in item:
widget = ItemWidget(item["__content"], self.assets)
else:
widget = ItemWidget(None, self.assets)
getattr(self.ui, bag_name).setItem(row, column, widget)
column += 1
if (column % 10) == 0:
row += 1
column = 0
def update_player_preview(self):
try:
image = self.assets.species().render_player(self.player,
self.preview_armor)
pixmap = QPixmap.fromImage(ImageQt(image))
except (OSError, TypeError, AttributeError):
# TODO: more specific error handling. may as well except all errors
# at this point jeez
logging.exception("Couldn't load species images")
pixmap = QPixmap()
self.ui.player_preview.setStyleSheet("background-color: %s;" % self.preview_bg)
self.ui.player_preview.setPixmap(pixmap)
self.window.setWindowModified(True)
def update_species(self):
species = self.ui.race.currentText()
if self.player.get_race(pretty=True) == species:
# don't overwrite appearance values if it didn't really change
return
self.player.set_race(species)
defaults = self.assets.species().get_default_colors(species)
for key in defaults:
getattr(self.player, "set_%s_directives" % key)(defaults[key][0])
self.update_player_preview()
self.window.setWindowModified(True)
def set_pixels(self):
self.player.set_pixels(self.ui.pixels.value())
self.set_edited()
def set_name(self):
self.player.set_name(self.ui.name.text())
self.set_edited()
def set_description(self):
self.player.set_description(self.ui.description.toPlainText())
self.set_edited()
def set_gender(self):
self.player.set_gender(self.get_gender())
self.update_player_preview()
self.set_edited()
def set_game_mode(self):
self.player.set_game_mode(self.assets.player().get_mode_type(self.ui.game_mode.currentText()))
self.set_edited()
def set_bags(self):
# this function mostly just exist to work around the bug of
# dragndrop not updating player entity. this requires the table view
# equipment
equip_bags = "head", "chest", "legs", "back"
for b in equip_bags:
bag = self.get_equip(b)
getattr(self.player, "set_" + b)(bag[0], bag[1])
# bags
bags = "wieldable", "main_bag", "tile_bag", "action_bar", "essentials", "mouse"
for b in bags:
getattr(self.player, "set_" + b)(self.get_bag(b))
def max_stat(self, name):
"""Set a stat's current value to its max value."""
getattr(self.player, "set_"+name)(100)
self.update_stat(name)
def set_stat(self, name):
max = getattr(self.ui, "max_"+name).value()
getattr(self.player, "set_max_"+name)(float(max))
self.update_stat(name)
def set_stat_slider(self, name):
current = getattr(self.ui, name).value()
getattr(self.player, "set_"+name)(current)
self.update_stat(name)
def update_stat(self, name):
try:
current = int(getattr(self.player, "get_"+name)())
button = getattr(self.ui, name+"_button")
getattr(self.ui, name).setValue(current)
button.setEnabled(current != 100)
self.set_edited()
except TypeError:
logging.exception("Unable to set stat %s", name)
# these are used for connecting the item edit dialog to bag tables
def new_main_bag_item_edit(self, do_import, json_edit=False):
self.new_item_edit(self.ui.main_bag, do_import, json_edit)
def new_tile_bag_item_edit(self, do_import, json_edit=False):
self.new_item_edit(self.ui.tile_bag, do_import, json_edit)
def new_action_bar_item_edit(self, do_import, json_edit=False):
self.new_item_edit(self.ui.action_bar, do_import, json_edit)
def new_head_item_edit(self, do_import, json_edit=False):
self.new_item_edit(self.ui.head, do_import, json_edit)
def new_chest_item_edit(self, do_import, json_edit=False):
self.new_item_edit(self.ui.chest, do_import, json_edit)
def new_legs_item_edit(self, do_import, json_edit=False):
self.new_item_edit(self.ui.legs, do_import, json_edit)
def new_back_item_edit(self, do_import, json_edit=False):
self.new_item_edit(self.ui.back, do_import, json_edit)
def new_wieldable_item_edit(self, do_import, json_edit=False):
self.new_item_edit(self.ui.wieldable, do_import, json_edit)
def new_essentials_item_edit(self, do_import, json_edit=False):
self.new_item_edit(self.ui.essentials, do_import, json_edit)
def new_mouse_item_edit(self, do_import, json_edit=False):
self.new_item_edit(self.ui.mouse, do_import, json_edit)
|
|
from __future__ import nested_scopes # Backward compat for 2.1
from unittest import TestCase
from wsgiref.util import setup_testing_defaults
from wsgiref.headers import Headers
from wsgiref.handlers import BaseHandler, BaseCGIHandler
from wsgiref import util
from wsgiref.validate import validator
from wsgiref.simple_server import WSGIServer, WSGIRequestHandler, demo_app
from wsgiref.simple_server import make_server
from StringIO import StringIO
from SocketServer import BaseServer
import os
import re
import sys
from test import test_support
class MockServer(WSGIServer):
"""Non-socket HTTP server"""
def __init__(self, server_address, RequestHandlerClass):
BaseServer.__init__(self, server_address, RequestHandlerClass)
self.server_bind()
def server_bind(self):
host, port = self.server_address
self.server_name = host
self.server_port = port
self.setup_environ()
class MockHandler(WSGIRequestHandler):
"""Non-socket HTTP handler"""
def setup(self):
self.connection = self.request
self.rfile, self.wfile = self.connection
def finish(self):
pass
def hello_app(environ,start_response):
start_response("200 OK", [
('Content-Type','text/plain'),
('Date','Mon, 05 Jun 2006 18:49:54 GMT')
])
return ["Hello, world!"]
def run_amock(app=hello_app, data="GET / HTTP/1.0\n\n"):
server = make_server("", 80, app, MockServer, MockHandler)
inp, out, err, olderr = StringIO(data), StringIO(), StringIO(), sys.stderr
sys.stderr = err
try:
server.finish_request((inp,out), ("127.0.0.1",8888))
finally:
sys.stderr = olderr
return out.getvalue(), err.getvalue()
def compare_generic_iter(make_it,match):
"""Utility to compare a generic 2.1/2.2+ iterator with an iterable
If running under Python 2.2+, this tests the iterator using iter()/next(),
as well as __getitem__. 'make_it' must be a function returning a fresh
iterator to be tested (since this may test the iterator twice)."""
it = make_it()
n = 0
for item in match:
if not it[n]==item: raise AssertionError
n+=1
try:
it[n]
except IndexError:
pass
else:
raise AssertionError("Too many items from __getitem__",it)
try:
iter, StopIteration
except NameError:
pass
else:
# Only test iter mode under 2.2+
it = make_it()
if not iter(it) is it: raise AssertionError
for item in match:
if not it.next()==item: raise AssertionError
try:
it.next()
except StopIteration:
pass
else:
raise AssertionError("Too many items from .next()",it)
class IntegrationTests(TestCase):
def check_hello(self, out, has_length=True):
self.assertEqual(out,
"HTTP/1.0 200 OK\r\n"
"Server: WSGIServer/0.1 Python/"+sys.version.split()[0]+"\r\n"
"Content-Type: text/plain\r\n"
"Date: Mon, 05 Jun 2006 18:49:54 GMT\r\n" +
(has_length and "Content-Length: 13\r\n" or "") +
"\r\n"
"Hello, world!"
)
def test_plain_hello(self):
out, err = run_amock()
self.check_hello(out)
def test_validated_hello(self):
out, err = run_amock(validator(hello_app))
# the middleware doesn't support len(), so content-length isn't there
self.check_hello(out, has_length=False)
def test_simple_validation_error(self):
def bad_app(environ,start_response):
start_response("200 OK", ('Content-Type','text/plain'))
return ["Hello, world!"]
out, err = run_amock(validator(bad_app))
self.assertTrue(out.endswith(
"A server error occurred. Please contact the administrator."
))
self.assertEqual(
err.splitlines()[-2],
"AssertionError: Headers (('Content-Type', 'text/plain')) must"
" be of type list: <type 'tuple'>"
)
class UtilityTests(TestCase):
def checkShift(self,sn_in,pi_in,part,sn_out,pi_out):
env = {'SCRIPT_NAME':sn_in,'PATH_INFO':pi_in}
util.setup_testing_defaults(env)
self.assertEqual(util.shift_path_info(env),part)
self.assertEqual(env['PATH_INFO'],pi_out)
self.assertEqual(env['SCRIPT_NAME'],sn_out)
return env
def checkDefault(self, key, value, alt=None):
# Check defaulting when empty
env = {}
util.setup_testing_defaults(env)
if isinstance(value, StringIO):
self.assertIsInstance(env[key], StringIO)
else:
self.assertEqual(env[key], value)
# Check existing value
env = {key:alt}
util.setup_testing_defaults(env)
self.assertIs(env[key], alt)
def checkCrossDefault(self,key,value,**kw):
util.setup_testing_defaults(kw)
self.assertEqual(kw[key],value)
def checkAppURI(self,uri,**kw):
util.setup_testing_defaults(kw)
self.assertEqual(util.application_uri(kw),uri)
def checkReqURI(self,uri,query=1,**kw):
util.setup_testing_defaults(kw)
self.assertEqual(util.request_uri(kw,query),uri)
def checkFW(self,text,size,match):
def make_it(text=text,size=size):
return util.FileWrapper(StringIO(text),size)
compare_generic_iter(make_it,match)
it = make_it()
self.assertFalse(it.filelike.closed)
for item in it:
pass
self.assertFalse(it.filelike.closed)
it.close()
self.assertTrue(it.filelike.closed)
def testSimpleShifts(self):
self.checkShift('','/', '', '/', '')
self.checkShift('','/x', 'x', '/x', '')
self.checkShift('/','', None, '/', '')
self.checkShift('/a','/x/y', 'x', '/a/x', '/y')
self.checkShift('/a','/x/', 'x', '/a/x', '/')
def testNormalizedShifts(self):
self.checkShift('/a/b', '/../y', '..', '/a', '/y')
self.checkShift('', '/../y', '..', '', '/y')
self.checkShift('/a/b', '//y', 'y', '/a/b/y', '')
self.checkShift('/a/b', '//y/', 'y', '/a/b/y', '/')
self.checkShift('/a/b', '/./y', 'y', '/a/b/y', '')
self.checkShift('/a/b', '/./y/', 'y', '/a/b/y', '/')
self.checkShift('/a/b', '///./..//y/.//', '..', '/a', '/y/')
self.checkShift('/a/b', '///', '', '/a/b/', '')
self.checkShift('/a/b', '/.//', '', '/a/b/', '')
self.checkShift('/a/b', '/x//', 'x', '/a/b/x', '/')
self.checkShift('/a/b', '/.', None, '/a/b', '')
def testDefaults(self):
for key, value in [
('SERVER_NAME','127.0.0.1'),
('SERVER_PORT', '80'),
('SERVER_PROTOCOL','HTTP/1.0'),
('HTTP_HOST','127.0.0.1'),
('REQUEST_METHOD','GET'),
('SCRIPT_NAME',''),
('PATH_INFO','/'),
('wsgi.version', (1,0)),
('wsgi.run_once', 0),
('wsgi.multithread', 0),
('wsgi.multiprocess', 0),
('wsgi.input', StringIO("")),
('wsgi.errors', StringIO()),
('wsgi.url_scheme','http'),
]:
self.checkDefault(key,value)
def testCrossDefaults(self):
self.checkCrossDefault('HTTP_HOST',"foo.bar",SERVER_NAME="foo.bar")
self.checkCrossDefault('wsgi.url_scheme',"https",HTTPS="on")
self.checkCrossDefault('wsgi.url_scheme',"https",HTTPS="1")
self.checkCrossDefault('wsgi.url_scheme',"https",HTTPS="yes")
self.checkCrossDefault('wsgi.url_scheme',"http",HTTPS="foo")
self.checkCrossDefault('SERVER_PORT',"80",HTTPS="foo")
self.checkCrossDefault('SERVER_PORT',"443",HTTPS="on")
def testGuessScheme(self):
self.assertEqual(util.guess_scheme({}), "http")
self.assertEqual(util.guess_scheme({'HTTPS':"foo"}), "http")
self.assertEqual(util.guess_scheme({'HTTPS':"on"}), "https")
self.assertEqual(util.guess_scheme({'HTTPS':"yes"}), "https")
self.assertEqual(util.guess_scheme({'HTTPS':"1"}), "https")
def testAppURIs(self):
self.checkAppURI("http://127.0.0.1/")
self.checkAppURI("http://127.0.0.1/spam", SCRIPT_NAME="/spam")
self.checkAppURI("http://127.0.0.1/sp%E4m", SCRIPT_NAME="/sp\xe4m")
self.checkAppURI("http://spam.example.com:2071/",
HTTP_HOST="spam.example.com:2071", SERVER_PORT="2071")
self.checkAppURI("http://spam.example.com/",
SERVER_NAME="spam.example.com")
self.checkAppURI("http://127.0.0.1/",
HTTP_HOST="127.0.0.1", SERVER_NAME="spam.example.com")
self.checkAppURI("https://127.0.0.1/", HTTPS="on")
self.checkAppURI("http://127.0.0.1:8000/", SERVER_PORT="8000",
HTTP_HOST=None)
def testReqURIs(self):
self.checkReqURI("http://127.0.0.1/")
self.checkReqURI("http://127.0.0.1/spam", SCRIPT_NAME="/spam")
self.checkReqURI("http://127.0.0.1/sp%E4m", SCRIPT_NAME="/sp\xe4m")
self.checkReqURI("http://127.0.0.1/spammity/spam",
SCRIPT_NAME="/spammity", PATH_INFO="/spam")
self.checkReqURI("http://127.0.0.1/spammity/sp%E4m",
SCRIPT_NAME="/spammity", PATH_INFO="/sp\xe4m")
self.checkReqURI("http://127.0.0.1/spammity/spam;ham",
SCRIPT_NAME="/spammity", PATH_INFO="/spam;ham")
self.checkReqURI("http://127.0.0.1/spammity/spam;cookie=1234,5678",
SCRIPT_NAME="/spammity", PATH_INFO="/spam;cookie=1234,5678")
self.checkReqURI("http://127.0.0.1/spammity/spam?say=ni",
SCRIPT_NAME="/spammity", PATH_INFO="/spam",QUERY_STRING="say=ni")
self.checkReqURI("http://127.0.0.1/spammity/spam?s%E4y=ni",
SCRIPT_NAME="/spammity", PATH_INFO="/spam",QUERY_STRING="s%E4y=ni")
self.checkReqURI("http://127.0.0.1/spammity/spam", 0,
SCRIPT_NAME="/spammity", PATH_INFO="/spam",QUERY_STRING="say=ni")
def testFileWrapper(self):
self.checkFW("xyz"*50, 120, ["xyz"*40,"xyz"*10])
def testHopByHop(self):
for hop in (
"Connection Keep-Alive Proxy-Authenticate Proxy-Authorization "
"TE Trailers Transfer-Encoding Upgrade"
).split():
for alt in hop, hop.title(), hop.upper(), hop.lower():
self.assertTrue(util.is_hop_by_hop(alt))
# Not comprehensive, just a few random header names
for hop in (
"Accept Cache-Control Date Pragma Trailer Via Warning"
).split():
for alt in hop, hop.title(), hop.upper(), hop.lower():
self.assertFalse(util.is_hop_by_hop(alt))
class HeaderTests(TestCase):
def testMappingInterface(self):
test = [('x','y')]
self.assertEqual(len(Headers([])),0)
self.assertEqual(len(Headers(test[:])),1)
self.assertEqual(Headers(test[:]).keys(), ['x'])
self.assertEqual(Headers(test[:]).values(), ['y'])
self.assertEqual(Headers(test[:]).items(), test)
self.assertIsNot(Headers(test).items(), test) # must be copy!
h=Headers([])
del h['foo'] # should not raise an error
h['Foo'] = 'bar'
for m in h.has_key, h.__contains__, h.get, h.get_all, h.__getitem__:
self.assertTrue(m('foo'))
self.assertTrue(m('Foo'))
self.assertTrue(m('FOO'))
self.assertFalse(m('bar'))
self.assertEqual(h['foo'],'bar')
h['foo'] = 'baz'
self.assertEqual(h['FOO'],'baz')
self.assertEqual(h.get_all('foo'),['baz'])
self.assertEqual(h.get("foo","whee"), "baz")
self.assertEqual(h.get("zoo","whee"), "whee")
self.assertEqual(h.setdefault("foo","whee"), "baz")
self.assertEqual(h.setdefault("zoo","whee"), "whee")
self.assertEqual(h["foo"],"baz")
self.assertEqual(h["zoo"],"whee")
def testRequireList(self):
self.assertRaises(TypeError, Headers, "foo")
def testExtras(self):
h = Headers([])
self.assertEqual(str(h),'\r\n')
h.add_header('foo','bar',baz="spam")
self.assertEqual(h['foo'], 'bar; baz="spam"')
self.assertEqual(str(h),'foo: bar; baz="spam"\r\n\r\n')
h.add_header('Foo','bar',cheese=None)
self.assertEqual(h.get_all('foo'),
['bar; baz="spam"', 'bar; cheese'])
self.assertEqual(str(h),
'foo: bar; baz="spam"\r\n'
'Foo: bar; cheese\r\n'
'\r\n'
)
class ErrorHandler(BaseCGIHandler):
"""Simple handler subclass for testing BaseHandler"""
# BaseHandler records the OS environment at import time, but envvars
# might have been changed later by other tests, which trips up
# HandlerTests.testEnviron().
os_environ = dict(os.environ.items())
def __init__(self,**kw):
setup_testing_defaults(kw)
BaseCGIHandler.__init__(
self, StringIO(''), StringIO(), StringIO(), kw,
multithread=True, multiprocess=True
)
class TestHandler(ErrorHandler):
"""Simple handler subclass for testing BaseHandler, w/error passthru"""
def handle_error(self):
raise # for testing, we want to see what's happening
class HandlerTests(TestCase):
def checkEnvironAttrs(self, handler):
env = handler.environ
for attr in [
'version','multithread','multiprocess','run_once','file_wrapper'
]:
if attr=='file_wrapper' and handler.wsgi_file_wrapper is None:
continue
self.assertEqual(getattr(handler,'wsgi_'+attr),env['wsgi.'+attr])
def checkOSEnviron(self,handler):
empty = {}; setup_testing_defaults(empty)
env = handler.environ
from os import environ
for k,v in environ.items():
if k not in empty:
self.assertEqual(env[k],v)
for k,v in empty.items():
self.assertIn(k, env)
def testEnviron(self):
h = TestHandler(X="Y")
h.setup_environ()
self.checkEnvironAttrs(h)
self.checkOSEnviron(h)
self.assertEqual(h.environ["X"],"Y")
def testCGIEnviron(self):
h = BaseCGIHandler(None,None,None,{})
h.setup_environ()
for key in 'wsgi.url_scheme', 'wsgi.input', 'wsgi.errors':
self.assertIn(key, h.environ)
def testScheme(self):
h=TestHandler(HTTPS="on"); h.setup_environ()
self.assertEqual(h.environ['wsgi.url_scheme'],'https')
h=TestHandler(); h.setup_environ()
self.assertEqual(h.environ['wsgi.url_scheme'],'http')
def testAbstractMethods(self):
h = BaseHandler()
for name in [
'_flush','get_stdin','get_stderr','add_cgi_vars'
]:
self.assertRaises(NotImplementedError, getattr(h,name))
self.assertRaises(NotImplementedError, h._write, "test")
def testContentLength(self):
# Demo one reason iteration is better than write()... ;)
def trivial_app1(e,s):
s('200 OK',[])
return [e['wsgi.url_scheme']]
def trivial_app2(e,s):
s('200 OK',[])(e['wsgi.url_scheme'])
return []
def trivial_app4(e,s):
# Simulate a response to a HEAD request
s('200 OK',[('Content-Length', '12345')])
return []
h = TestHandler()
h.run(trivial_app1)
self.assertEqual(h.stdout.getvalue(),
"Status: 200 OK\r\n"
"Content-Length: 4\r\n"
"\r\n"
"http")
h = TestHandler()
h.run(trivial_app2)
self.assertEqual(h.stdout.getvalue(),
"Status: 200 OK\r\n"
"\r\n"
"http")
h = TestHandler()
h.run(trivial_app4)
self.assertEqual(h.stdout.getvalue(),
b'Status: 200 OK\r\n'
b'Content-Length: 12345\r\n'
b'\r\n')
def testBasicErrorOutput(self):
def non_error_app(e,s):
s('200 OK',[])
return []
def error_app(e,s):
raise AssertionError("This should be caught by handler")
h = ErrorHandler()
h.run(non_error_app)
self.assertEqual(h.stdout.getvalue(),
"Status: 200 OK\r\n"
"Content-Length: 0\r\n"
"\r\n")
self.assertEqual(h.stderr.getvalue(),"")
h = ErrorHandler()
h.run(error_app)
self.assertEqual(h.stdout.getvalue(),
"Status: %s\r\n"
"Content-Type: text/plain\r\n"
"Content-Length: %d\r\n"
"\r\n%s" % (h.error_status,len(h.error_body),h.error_body))
self.assertNotEqual(h.stderr.getvalue().find("AssertionError"), -1)
def testErrorAfterOutput(self):
MSG = "Some output has been sent"
def error_app(e,s):
s("200 OK",[])(MSG)
raise AssertionError("This should be caught by handler")
h = ErrorHandler()
h.run(error_app)
self.assertEqual(h.stdout.getvalue(),
"Status: 200 OK\r\n"
"\r\n"+MSG)
self.assertNotEqual(h.stderr.getvalue().find("AssertionError"), -1)
def testHeaderFormats(self):
def non_error_app(e,s):
s('200 OK',[])
return []
stdpat = (
r"HTTP/%s 200 OK\r\n"
r"Date: \w{3}, [ 0123]\d \w{3} \d{4} \d\d:\d\d:\d\d GMT\r\n"
r"%s" r"Content-Length: 0\r\n" r"\r\n"
)
shortpat = (
"Status: 200 OK\r\n" "Content-Length: 0\r\n" "\r\n"
)
for ssw in "FooBar/1.0", None:
sw = ssw and "Server: %s\r\n" % ssw or ""
for version in "1.0", "1.1":
for proto in "HTTP/0.9", "HTTP/1.0", "HTTP/1.1":
h = TestHandler(SERVER_PROTOCOL=proto)
h.origin_server = False
h.http_version = version
h.server_software = ssw
h.run(non_error_app)
self.assertEqual(shortpat,h.stdout.getvalue())
h = TestHandler(SERVER_PROTOCOL=proto)
h.origin_server = True
h.http_version = version
h.server_software = ssw
h.run(non_error_app)
if proto=="HTTP/0.9":
self.assertEqual(h.stdout.getvalue(),"")
else:
self.assertTrue(
re.match(stdpat%(version,sw), h.stdout.getvalue()),
(stdpat%(version,sw), h.stdout.getvalue())
)
def testCloseOnError(self):
side_effects = {'close_called': False}
MSG = b"Some output has been sent"
def error_app(e,s):
s("200 OK",[])(MSG)
class CrashyIterable(object):
def __iter__(self):
while True:
yield b'blah'
raise AssertionError("This should be caught by handler")
def close(self):
side_effects['close_called'] = True
return CrashyIterable()
h = ErrorHandler()
h.run(error_app)
self.assertEqual(side_effects['close_called'], True)
def test_main():
test_support.run_unittest(__name__)
if __name__ == "__main__":
test_main()
|
|
import re
from dateutil.relativedelta import relativedelta
from rest_framework.decorators import api_view
from rest_framework.exceptions import APIException
from rest_framework.response import Response
from frontend.models import ImportLog
from frontend.models import Measure
from frontend.models import MeasureGlobal
from frontend.models import MeasureValue
from frontend.models import MEASURE_TAGS
import view_utils as utils
class MissingParameter(APIException):
status_code = 400
default_detail = 'You are missing a required parameter.'
class InvalidMultiParameter(APIException):
status_code = 400
default_detail = ('You can specify one org and many measures, '
'or one measure and many orgs, but not many of both')
@api_view(['GET'])
def measure_global(request, format=None):
measures = utils.param_to_list(request.query_params.get('measure', None))
tags = utils.param_to_list(request.query_params.get('tags', None))
qs = MeasureGlobal.objects.select_related('measure')
if measures:
qs = qs.filter(measure_id__in=measures)
if tags:
qs = qs.filter(measure__tags__overlap=tags)
qs = qs.order_by('measure_id', 'month')
rolled = {}
for mg in qs:
id = mg.measure_id
d_copy = {
'date': mg.month,
'numerator': mg.numerator,
'denominator': mg.denominator,
'calc_value': mg.calc_value,
'percentiles': mg.percentiles,
'cost_savings': mg.cost_savings
}
if id in rolled:
rolled[id]['data'].append(d_copy)
else:
measure = mg.measure
if measure.tags_focus:
tags_focus = ','.join(measure.tags_focus)
else:
tags_focus = ''
rolled[id] = {
'id': id,
'name': measure.name,
'title': measure.title,
'description': measure.description,
'why_it_matters': measure.why_it_matters,
'numerator_short': measure.numerator_short,
'denominator_short': measure.denominator_short,
'url': measure.url,
'is_cost_based': measure.is_cost_based,
'is_percentage': measure.is_percentage,
'low_is_good': measure.low_is_good,
'tags_focus': tags_focus,
'numerator_is_list_of_bnf_codes': measure.numerator_is_list_of_bnf_codes,
'tags': _hydrate_tags(measure.tags),
'data': [d_copy]
}
d = {
'measures': [rolled[k] for k in rolled]
}
return Response(d)
@api_view(['GET'])
def measure_numerators_by_org(request, format=None):
measure = request.query_params.get('measure', None)
org = utils.param_to_list(request.query_params.get('org', []))[0]
if 'org_type' in request.query_params:
org_selector = request.query_params['org_type'] + '_id'
if org_selector == 'ccg_id':
org_selector = 'pct_id'
else:
# This is here for backwards compatibility, in case anybody else is
# using the API. Now we have measures for regional teams, we cannot
# guess the type of an org by the length of its code, as both CCGs and
# regional teams have codes of length 3.
if len(org) == 3:
org_selector = 'pct_id'
elif len(org) == 6:
org_selector = 'practice_id'
else:
assert False, 'Unexpected org: {}'.format(org)
this_month = ImportLog.objects.latest_in_category('prescribing').current_at
three_months_ago = (
this_month - relativedelta(months=2)).strftime('%Y-%m-01')
m = Measure.objects.get(pk=measure)
if m.numerator_is_list_of_bnf_codes:
if org_selector in ['stp_id', 'regional_team_id']:
extra_join = '''
INNER JOIN frontend_pct
ON frontend_pct.code = p.pct_id
'''
else:
extra_join = ''
# For measures whose numerator sums one of the columns in the
# prescribing table, we order the presentations by that column.
# For other measures, the columns used to calculate the numerator is
# not available here (it's in BQ) so we order by total_items, which is
# the best we can do.
#
# But because the columns in BQ don't match the columns in PG (items vs
# total_items), and because we alias a column in the query below
# (actual_cost vs cost) we need to translate the name of the column we
# use for ordering the results.
match = re.match(
'SUM\((items|quantity|actual_cost)\) AS numerator',
m.numerator_columns
)
if match:
order_col = {
'items': 'total_items',
'actual_cost': 'cost',
'quantity': 'quantity',
}[match.groups()[0]]
else:
order_col = 'total_items'
# The redundancy in the following column names is so we can
# support various flavours of `WHERE` clause from the measure
# definitions that may use a subset of any of these column
# names
query = '''
SELECT
{org_selector} AS entity,
presentation_code AS bnf_code,
pn.name AS presentation_name,
SUM(total_items) AS total_items,
SUM(actual_cost) AS cost,
SUM(quantity) AS quantity
FROM
frontend_prescription p
INNER JOIN
frontend_presentation pn
ON p.presentation_code = pn.bnf_code
{extra_join}
WHERE
{org_selector} = %(org)s
AND
processing_date >= %(three_months_ago)s
AND
pn.bnf_code = ANY(%(numerator_bnf_codes)s)
GROUP BY
{org_selector}, presentation_code, pn.name
ORDER BY {order_col} DESC
LIMIT 50
'''.format(
org_selector=org_selector,
three_months_ago=three_months_ago,
extra_join=extra_join,
order_col=order_col,
)
params = {
'numerator_bnf_codes': m.numerator_bnf_codes,
'org': org,
'three_months_ago': three_months_ago,
}
data = utils.execute_query(query, params)
else:
data = []
response = Response(data)
filename = "%s-%s-breakdown.csv" % (measure, org)
if request.accepted_renderer.format == 'csv':
response['content-disposition'] = "attachment; filename=%s" % filename
return response
@api_view(['GET'])
def measure_by_regional_team(request, format=None):
return _measure_by_org(request, 'regional_team')
@api_view(['GET'])
def measure_by_stp(request, format=None):
return _measure_by_org(request, 'stp')
@api_view(['GET'])
def measure_by_ccg(request, format=None):
return _measure_by_org(request, 'ccg')
@api_view(['GET'])
def measure_by_practice(request, format=None):
return _measure_by_org(request, 'practice')
def _measure_by_org(request, org_type):
measure_ids = utils.param_to_list(request.query_params.get('measure', None))
tags = utils.param_to_list(request.query_params.get('tags', []))
org_ids = utils.param_to_list(request.query_params.get('org', []))
parent_org_type = request.query_params.get('parent_org_type', None)
aggregate = bool(request.query_params.get('aggregate'))
if org_type == 'practice' and not (org_ids or aggregate):
raise MissingParameter
if len(org_ids) > 1 and len(measure_ids) > 1:
raise InvalidMultiParameter
if parent_org_type is None:
if org_type == 'practice' and org_ids:
l = len(org_ids[0])
assert all(len(org_id) == l for org_id in org_ids)
if l == 3:
parent_org_type = 'pct'
elif l == 6:
parent_org_type = 'practice'
else:
assert False, l
else:
parent_org_type = org_type
measure_values = MeasureValue.objects.by_org(
org_type,
parent_org_type,
org_ids,
measure_ids,
tags,
)
# Because we access the `name` of the related org for each MeasureValue
# during the roll-up process below we need to prefetch them to avoid doing
# N+1 db queries
org_field = org_type if org_type != 'ccg' else 'pct'
measure_values = measure_values.prefetch_related(org_field)
if aggregate:
measure_values = measure_values.aggregate_by_measure_and_month()
rsp_data = {
'measures': _roll_up_measure_values(measure_values, org_type)
}
return Response(rsp_data)
def _roll_up_measure_values(measure_values, org_type):
rolled = {}
for measure_value in measure_values:
measure_id = measure_value.measure_id
measure_value_data = {
'date': measure_value.month,
'numerator': measure_value.numerator,
'denominator': measure_value.denominator,
'calc_value': measure_value.calc_value,
'percentile': measure_value.percentile,
'cost_savings': measure_value.cost_savings,
}
if org_type == 'practice':
if measure_value.practice_id:
measure_value_data.update({
'practice_id': measure_value.practice_id,
'practice_name': measure_value.practice.name,
})
elif org_type == 'ccg':
if measure_value.pct_id:
measure_value_data.update({
'pct_id': measure_value.pct_id,
'pct_name': measure_value.pct.name,
})
elif org_type == 'stp':
if measure_value.stp_id:
measure_value_data.update({
'stp_id': measure_value.stp_id,
'stp_name': measure_value.stp.name,
})
elif org_type == 'regional_team':
if measure_value.regional_team_id:
measure_value_data.update({
'regional_team_id': measure_value.regional_team_id,
'regional_team_name': measure_value.regional_team.name,
})
else:
assert False
if measure_id in rolled:
rolled[measure_id]['data'].append(measure_value_data)
else:
measure = measure_value.measure
rolled[measure_id] = {
'id': measure_id,
'name': measure.name,
'title': measure.title,
'description': measure.description,
'why_it_matters': measure.why_it_matters,
'numerator_short': measure.numerator_short,
'denominator_short': measure.denominator_short,
'url': measure.url,
'is_cost_based': measure.is_cost_based,
'is_percentage': measure.is_percentage,
'low_is_good': measure.low_is_good,
'tags': _hydrate_tags(measure.tags),
'data': [measure_value_data],
}
return rolled.values()
def _hydrate_tags(tag_ids):
return [
{'id': tag_id, 'name': MEASURE_TAGS[tag_id]['name']}
for tag_id in tag_ids
]
|
|
#/****************************************************************************
#* *
#* OpenNI 2.x Alpha *
#* Copyright (C) 2012 PrimeSense Ltd. *
#* *
#* This file is part of OpenNI. *
#* *
#* Licensed under the Apache License, Version 2.0 (the "License"); *
#* you may not use this file except in compliance with the License. *
#* You may obtain a copy of the License at *
#* *
#* http://www.apache.org/licenses/LICENSE-2.0 *
#* *
#* Unless required by applicable law or agreed to in writing, software *
#* distributed under the License is distributed on an "AS IS" BASIS, *
#* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
#* See the License for the specific language governing permissions and *
#* limitations under the License. *
#* *
#****************************************************************************/
import os
import re
import sys
import shutil
import subprocess
import platform
import argparse
import stat
from commands import getoutput as gop
import UpdateVersion
origDir = os.getcwd()
if platform.system() == 'Windows':
import win32con,pywintypes,win32api
def get_reg_values(reg_key, value_list):
# open the reg key
try:
reg_key = win32api.RegOpenKeyEx(*reg_key)
except pywintypes.error as e:
raise Exception("Failed to open registry key!")
# Get the values
try:
values = [(win32api.RegQueryValueEx(reg_key, name), data_type) for name, data_type in value_list]
# values list of ((value, type), expected_type)
for (value, data_type), expected in values:
if data_type != expected:
raise Exception("Bad registry value type! Expected %d, got %d instead." % (expected, data_type))
# values okay, leave only values
values = [value for ((value, data_type), expected) in values]
except pywintypes.error as e:
raise Exception("Failed to get registry value!")
finally:
try:
win32api.RegCloseKey(reg_key)
except pywintypes.error as e:
# We don't care if reg key close failed...
pass
return tuple(values)
class OS:
#def __init__():
def setConfiguration(self, config):
self.config = config
def getExportedDrivers(self):
return ['PS1080', 'OniFile']
def getExportedTools(self):
return ['NiViewer']
def getExportedSamples(self):
return ['ClosestPointViewer', 'EventBasedRead', 'MultiDepthViewer', 'MultipleStreamRead', 'MWClosestPoint', 'MWClosestPointApp', 'SimpleRead', 'SimpleViewer']
def cleanOutput(self):
# Delete the redist directory and create it again
if os.path.isdir(self.config.output_dir):
shutil.rmtree(self.config.output_dir)
os.mkdir(self.config.output_dir)
return self.config.output_dir
def compileAll(self):
# Clean
if os.path.isdir('./Bin') and self.config.compile == 'Rebuild':
shutil.rmtree('Bin')
for platform in self.config.getPlatforms():
# Build
if self.config.compile != 'None':
rc = self.compile("Release", platform.getPlatformString(), self.config.compile)
if rc != 0:
return rc
return 0
def copyOpenNI(self, where):
# Copy config content to Redist
for r, d, f in os.walk('Config'):
for file in f:
shutil.copy(r+'/'+file, where)
return
def createGeneralFiles(self):
shutil.copy('LICENSE', self.config.output_dir)
shutil.copy('NOTICE', self.config.output_dir)
def createRedist(self):
# Create Redist directory
if not os.path.isdir(self.config.output_dir+'/Redist'):
os.mkdir(self.config.output_dir+'/Redist')
# Copy OpenNI to Redist
self.copyOpenNI(self.config.output_dir+'/Redist')
def createInclude(self):
shutil.copytree('Include', self.config.output_dir+'/Include')
def createLib(self):
return
def createTools(self):
# Create Tools directory
if not os.path.isdir(self.config.output_dir+'/Tools'):
os.mkdir(self.config.output_dir+'/Tools')
if not self.config.supplyTools:
return False
# Copy Source/Tools to Redist
os.mkdir(self.config.output_dir+'/Source/')
shutil.copytree('Source/Tools', self.config.output_dir+'/Source/Tools')
for r, d, f in os.walk(self.config.output_dir+'/Source/Tools'):
for file in f:
if self.isIllegalSampleFile(file):
os.remove(r+'/'+file)
# Copy ThirdParty/PSCommon/XnLib/Include to Redist
os.makedirs(self.config.output_dir+'/ThirdParty/PSCommon/XnLib')
shutil.copytree('ThirdParty/PSCommon/XnLib/Include', self.config.output_dir+'/ThirdParty/PSCommon/XnLib/Include')
return True
def createSamples(self):
## Copy entire Samples dir to Redist
shutil.copytree('Samples', self.config.output_dir+'/Samples')
## Delete user files from samples dir on Redist
for r, d, f in os.walk(self.config.output_dir+'/Samples'):
for file in f:
if self.isIllegalSampleFile(file):
os.remove(r+'/'+file)
## Copy ThirdParty/GL dir to Redist
shutil.copytree('ThirdParty/GL', self.config.output_dir+'/Samples/GL')
def createDocumentation(self):
return
def getBinDir(self):
for platform in self.config.getPlatforms():
return 'Bin/'+platform.getBinDirString()+'-Release'
return ""
def isBaseDirValid(self, path):
return False
def regx_replace(self,findStr,repStr,filePath):
"replaces all findStr by repStr in file filePath using regualr expression"
findStrRegx = re.compile(findStr)
tempName=filePath+'~~~'
fileMode = os.stat(filePath).st_mode
os.chmod(filePath, fileMode | stat.S_IWRITE)
input = open(filePath)
output = open(tempName,'w')
for s in input:
output.write(findStrRegx.sub(repStr,s))
output.close()
input.close()
os.remove(filePath)
os.rename(tempName,filePath)
class OSWin(OS):
def __init__(self):
self.supportsBoth = True
MSVC_KEY = (win32con.HKEY_LOCAL_MACHINE, r"SOFTWARE\Wow6432Node\Microsoft\VisualStudio\10.0")
MSVC_VALUES = [("InstallDir", win32con.REG_SZ)]
self.VS_INST_DIR = get_reg_values(MSVC_KEY, MSVC_VALUES)[0]
self.PROJECT_SLN = "OpenNI.sln"
def getExportedDrivers(self):
drivers = OS.getExportedDrivers(self)
drivers.append('Kinect')
return drivers
def copyOpenNI(self, where):
OS.copyOpenNI(self, where)
# Copy the OpenNI binaries
binDir = self.getBinDir()
for r, d, f in os.walk(binDir):
for file in f:
if not self.isIllegalBinFile(file) and (file.startswith('OpenNI') and not file.endswith('.lib')):
shutil.copy(r+'/'+file, where)
# Copy the OpenNI driver binaries
if not os.path.isdir(where+'/OpenNI2/Drivers'):
if not os.path.isdir(where+'/OpenNI2'):
os.mkdir(where+'/OpenNI2')
os.mkdir(where+'/OpenNI2/Drivers')
binDir = self.getBinDir()
for r, d, f in os.walk(binDir+'/OpenNI2/Drivers'):
for file in f:
if not self.isIllegalBinDriverFile(file) and not self.isIllegalBinFile(file):
shutil.copy(r+'/'+file, where+'/OpenNI2/Drivers')
def createGeneralFiles(self):
OS.createGeneralFiles(self)
## Copy Driver
if not os.path.isdir(self.config.output_dir+'/Driver'):
shutil.copytree('ThirdParty/PSCommon/XnLib/Driver/Win32/Bin', self.config.output_dir+'/Driver')
def createLib(self):
# Copy the libraries to the Libs directory
if not os.path.isdir(self.config.output_dir+'/Lib'):
os.mkdir(self.config.output_dir+'/Lib/')
binDir = self.getBinDir()
shutil.copy(binDir+'/OpenNI2.lib', self.config.output_dir+'/Lib/')
def createTools(self):
supplyTools = OS.createTools(self)
# Copy tool files
binDir = self.getBinDir()
for r, d, f in os.walk(binDir):
for file in f:
if not self.isIllegalBinFile(file) and not self.isIllegalToolFile(file):
shutil.copy(r+'/'+file, self.config.output_dir+'/Tools/')
# Copy OpenNI files
self.copyOpenNI(self.config.output_dir+'/Tools/')
# Copy GLUT .dll file
shutil.copy('ThirdParty/GL/glut'+self.config.bits+'.dll', self.config.output_dir+'/Tools/')
if not supplyTools:
return
def createSamples(self):
OS.createSamples(self)
# Create the samples bin directory
if not os.path.isdir(self.config.output_dir+'/Samples/Bin'):
os.mkdir(self.config.output_dir+'/Samples/Bin/')
# Copy sample files
binDir = self.getBinDir()
for r, d, f in os.walk(binDir):
for file in f:
if not self.isIllegalBinFile(file) and not self.isIllegalSampleBinFile(file):
shutil.copy(r+'/'+file, self.config.output_dir+'/Samples/Bin/')
# Copy OpenNI files
self.copyOpenNI(self.config.output_dir+'/Samples/Bin/')
# Copy GLUT .dll file
shutil.copy('ThirdParty/GL/glut'+self.config.bits+'.dll', self.config.output_dir+'/Samples/Bin/')
# replace location of OutDir in project files to relative path of Bin
outDir = '<OutDir>\$\(SolutionDir\)Bin\\\\\$\(Platform\)-\$\(Configuration\)'
newOutDir = '<OutDir>$(ProjectDir)..\\Bin'
self.regx_replace(outDir, newOutDir, self.config.output_dir + "/Samples/ClosestPointViewer/ClosestPointViewer.vcxproj")
self.regx_replace(outDir, newOutDir, self.config.output_dir + "/Samples/EventBasedRead/EventBasedRead.vcxproj")
self.regx_replace(outDir, newOutDir, self.config.output_dir + "/Samples/MultiDepthViewer/MultiDepthViewer.vcxproj")
self.regx_replace(outDir, newOutDir, self.config.output_dir + "/Samples/MultipleStreamRead/MultipleStreamRead.vcxproj")
self.regx_replace(outDir, newOutDir, self.config.output_dir + "/Samples/MWClosestPoint/MWClosestPoint.vcxproj")
self.regx_replace(outDir, newOutDir, self.config.output_dir + "/Samples/MWClosestPointApp/MWClosestPointApp.vcxproj")
self.regx_replace(outDir, newOutDir, self.config.output_dir + "/Samples/SimpleRead/SimpleRead.vcxproj")
self.regx_replace(outDir, newOutDir, self.config.output_dir + "/Samples/SimpleViewer/SimpleViewer.vcxproj")
# replace location of IntDir in project files to relative path of Bin
intDir = '<IntDir>\$\(SolutionDir\)Bin'
newIntDir = '<IntDir>$(ProjectDir)..\\Bin'
self.regx_replace(intDir, newIntDir, self.config.output_dir + "/Samples/ClosestPointViewer/ClosestPointViewer.vcxproj")
self.regx_replace(intDir, newIntDir, self.config.output_dir + "/Samples/EventBasedRead/EventBasedRead.vcxproj")
self.regx_replace(intDir, newIntDir, self.config.output_dir + "/Samples/MultiDepthViewer/MultiDepthViewer.vcxproj")
self.regx_replace(intDir, newIntDir, self.config.output_dir + "/Samples/MultipleStreamRead/MultipleStreamRead.vcxproj")
self.regx_replace(intDir, newIntDir, self.config.output_dir + "/Samples/MWClosestPoint/MWClosestPoint.vcxproj")
self.regx_replace(intDir, newIntDir, self.config.output_dir + "/Samples/MWClosestPointApp/MWClosestPointApp.vcxproj")
self.regx_replace(intDir, newIntDir, self.config.output_dir + "/Samples/SimpleRead/SimpleRead.vcxproj")
self.regx_replace(intDir, newIntDir, self.config.output_dir + "/Samples/SimpleViewer/SimpleViewer.vcxproj")
# replace location of OpenNI2.lib in project files to environment variable of OPENNI2_LIB[64]
platform_suffix = ''
if self.config.bits == '64':
platform_suffix = '64'
libDir = '<AdditionalLibraryDirectories>\$\(OutDir\)'
newLibDir = '<AdditionalLibraryDirectories>$(OutDir);$(OPENNI2_LIB' + platform_suffix + ')'
self.regx_replace(libDir, newLibDir, self.config.output_dir + "/Samples/ClosestPointViewer/ClosestPointViewer.vcxproj")
self.regx_replace(libDir, newLibDir, self.config.output_dir + "/Samples/EventBasedRead/EventBasedRead.vcxproj")
self.regx_replace(libDir, newLibDir, self.config.output_dir + "/Samples/MultiDepthViewer/MultiDepthViewer.vcxproj")
self.regx_replace(libDir, newLibDir, self.config.output_dir + "/Samples/MultipleStreamRead/MultipleStreamRead.vcxproj")
self.regx_replace(libDir, newLibDir, self.config.output_dir + "/Samples/MWClosestPoint/MWClosestPoint.vcxproj")
self.regx_replace(libDir, newLibDir, self.config.output_dir + "/Samples/MWClosestPointApp/MWClosestPointApp.vcxproj")
self.regx_replace(libDir, newLibDir, self.config.output_dir + "/Samples/SimpleRead/SimpleRead.vcxproj")
self.regx_replace(libDir, newLibDir, self.config.output_dir + "/Samples/SimpleViewer/SimpleViewer.vcxproj")
# replace location of OpenNI include path from ..\..\Include to environment variable OPENNI2_INCLUDE
incDir = '..\\\\..\\\\Include'
newIncDir = '$(OPENNI2_INCLUDE' + platform_suffix + ')'
self.regx_replace(incDir, newIncDir, self.config.output_dir + "/Samples/ClosestPointViewer/ClosestPointViewer.vcxproj")
self.regx_replace(incDir, newIncDir, self.config.output_dir + "/Samples/EventBasedRead/EventBasedRead.vcxproj")
self.regx_replace(incDir, newIncDir, self.config.output_dir + "/Samples/MultiDepthViewer/MultiDepthViewer.vcxproj")
self.regx_replace(incDir, newIncDir, self.config.output_dir + "/Samples/MultipleStreamRead/MultipleStreamRead.vcxproj")
self.regx_replace(incDir, newIncDir, self.config.output_dir + "/Samples/MWClosestPoint/MWClosestPoint.vcxproj")
self.regx_replace(incDir, newIncDir, self.config.output_dir + "/Samples/MWClosestPointApp/MWClosestPointApp.vcxproj")
self.regx_replace(incDir, newIncDir, self.config.output_dir + "/Samples/SimpleRead/SimpleRead.vcxproj")
self.regx_replace(incDir, newIncDir, self.config.output_dir + "/Samples/SimpleViewer/SimpleViewer.vcxproj")
# replace location GL files
glDir = '..\\\\..\\\\ThirdParty\\\\GL'
newGlDir = '..\\GL'
self.regx_replace(glDir, newGlDir, self.config.output_dir + "/Samples/ClosestPointViewer/ClosestPointViewer.vcxproj")
self.regx_replace(glDir, newGlDir, self.config.output_dir + "/Samples/EventBasedRead/EventBasedRead.vcxproj")
self.regx_replace(glDir, newGlDir, self.config.output_dir + "/Samples/MultiDepthViewer/MultiDepthViewer.vcxproj")
self.regx_replace(glDir, newGlDir, self.config.output_dir + "/Samples/MultipleStreamRead/MultipleStreamRead.vcxproj")
self.regx_replace(glDir, newGlDir, self.config.output_dir + "/Samples/MWClosestPoint/MWClosestPoint.vcxproj")
self.regx_replace(glDir, newGlDir, self.config.output_dir + "/Samples/MWClosestPointApp/MWClosestPointApp.vcxproj")
self.regx_replace(glDir, newGlDir, self.config.output_dir + "/Samples/SimpleRead/SimpleRead.vcxproj")
self.regx_replace(glDir, newGlDir, self.config.output_dir + "/Samples/SimpleViewer/SimpleViewer.vcxproj")
def createDocumentation(self):
if not self.config.createDocs:
return
## Run doxygen
beforeDir = os.getcwd()
os.chdir('Source/Documentation')
if os.path.isdir('index'):
shutil.rmtree('index')
subprocess.call(["python", "Runme.py"])
## Create documentation dir in redist
os.chdir(beforeDir)
if not os.path.isdir(self.config.output_dir+'/Documentation'):
os.mkdir(self.config.output_dir+'/Documentation')
## copy doxygen output (html?) to doc dir in redist
shutil.copy('Source/Documentation/html/OpenNI.chm', self.config.output_dir+'/Documentation/')
def compile(self, configuration, platform, compilationMode):
outfile = origDir+'/build.'+configuration+'.'+platform+'.txt'
devenv_cmd = '\"'+self.VS_INST_DIR + 'devenv\" '+self.PROJECT_SLN + ' /' + compilationMode + ' \"'+configuration+'|'+platform+'\" /out '+outfile
print(devenv_cmd)
rc = subprocess.call(devenv_cmd, close_fds=True)
print(compilationMode + ', RC: %d'%rc)
if rc == 0:
os.remove(outfile)
return rc
def isBaseDirValid(self, dir):
if os.path.isdir(dir) and os.path.exists(dir+'/OpenNI.sln'):
return True
return False
def isIllegalSampleFile(self, file):
return file.endswith('.user') or file == 'Makefile' or file == 'Android.mk'
def isIllegalBinFile(self, file):
return not file.endswith('.exe') and not file.endswith('.dll') and not file.endswith('.pdb') and not file.endswith('.lib') or (not self.config.supplyTools and file == 'XnLib.lib')
def isIllegalBinDriverFile(self, file):
return not any(file.startswith(driver) for driver in self.getExportedDrivers()) or file.endswith('.lib')
def isIllegalToolFile(self, file):
return not any(file.startswith(tool) for tool in self.getExportedTools()) or file.endswith('.lib')
def isIllegalSampleBinFile(self, file):
return not any(file.startswith(sample) for sample in self.getExportedSamples())
class OSLinux(OS):
def __init__(self):
self.supportsBoth = False
def copyOpenNI(self, where):
OS.copyOpenNI(self, where)
# Copy the OpenNI binaries
binDir = self.getBinDir()
for r, d, f in os.walk(binDir):
for file in f:
if not self.isIllegalBinFile(file) and file.startswith('libOpenNI'):
shutil.copy(r+'/'+file, where)
# Copy the OpenNI driver binaries
if not os.path.isdir(where+'/OpenNI2/Drivers'):
if not os.path.isdir(where+'/OpenNI2'):
os.mkdir(where+'/OpenNI2')
os.mkdir(where+'/OpenNI2/Drivers')
binDir = self.getBinDir()
for r, d, f in os.walk(binDir+'/OpenNI2/Drivers'):
for file in f:
if not self.isIllegalBinDriverFile(file) and not self.isIllegalBinFile(file):
shutil.copy(r+'/'+file, where+'/OpenNI2/Drivers')
def createTools(self):
# Arm redist does not provide Tools:
if isinstance(self.config.getPlatforms()[0], PlatformArm):
return
supplyTools = OS.createTools(self)
# Copy NiViewer required files.
binDir = self.getBinDir()
for r, d, f in os.walk(binDir):
for file in f:
if not self.isIllegalBinFile(file) and not self.isIllegalToolFile(file):
shutil.copy(r+'/'+file, self.config.output_dir+'/Tools/')
# Copy OpenNI files
self.copyOpenNI(self.config.output_dir+'/Tools/')
if not supplyTools:
return
#'Bin/'+platform.getBinDirString()+'-Release'
#os.makedirs(self.config.output_dir+'/ThirdParty/PSCommon/XnLib/Bin')
shutil.copytree('ThirdParty/PSCommon/XnLib/Bin/'+self.config.getPlatforms()[0].getBinDirString()+'-Release', self.config.output_dir+'/ThirdParty/PSCommon/XnLib/Bin')
def createSamples(self):
OS.createSamples(self)
# Delete GLUT libs (they are windows libraries)
if os.path.isfile(self.config.output_dir+'/Samples/GL/glut32.lib'):
os.remove(self.config.output_dir+'/Samples/GL/glut32.lib')
if os.path.isfile(self.config.output_dir+'/Samples/GL/glut64.lib'):
os.remove(self.config.output_dir+'/Samples/GL/glut64.lib')
# Create the samples bin directory
if not os.path.isdir(self.config.output_dir+'/Samples/Bin'):
os.mkdir(self.config.output_dir+'/Samples/Bin/')
# Copy sample files
binDir = self.getBinDir()
for r, d, f in os.walk(binDir):
for file in f:
if not self.isIllegalBinFile(file) and not self.isIllegalSampleBinFile(file):
shutil.copy(r+'/'+file, self.config.output_dir+'/Samples/Bin/')
# Copy OpenNI files
self.copyOpenNI(self.config.output_dir+'/Samples/Bin/')
## Copy BuildSystem, which is needed to compile the samples - TEMPORARY
shutil.copytree('ThirdParty/PSCommon/BuildSystem', self.config.output_dir+'/Samples/BuildSystem')
# Change location of make file output directory
outDir = 'OUT_DIR = \$\(BIN_DIR\)/\$\(PLATFORM\)-\$\(CFG\)'
newOutDir = 'OUT_DIR = $(BIN_DIR)'
self.regx_replace(outDir, newOutDir, self.config.output_dir + "/Samples/BuildSystem/CommonDefs.mak")
# Change location of make files bin directory
binDir = 'BIN_DIR = ../../Bin'
newBinDir = 'BIN_DIR = ../Bin'
self.regx_replace(binDir, newBinDir, self.config.output_dir + "/Samples/ClosestPointViewer/Makefile")
self.regx_replace(binDir, newBinDir, self.config.output_dir + "/Samples/EventBasedRead/Makefile")
self.regx_replace(binDir, newBinDir, self.config.output_dir + "/Samples/MultiDepthViewer/Makefile")
self.regx_replace(binDir, newBinDir, self.config.output_dir + "/Samples/MultipleStreamRead/Makefile")
self.regx_replace(binDir, newBinDir, self.config.output_dir + "/Samples/MWClosestPoint/Makefile")
self.regx_replace(binDir, newBinDir, self.config.output_dir + "/Samples/MWClosestPointApp/Makefile")
self.regx_replace(binDir, newBinDir, self.config.output_dir + "/Samples/SimpleRead/Makefile")
self.regx_replace(binDir, newBinDir, self.config.output_dir + "/Samples/SimpleViewer/Makefile")
# Change makefile build system reference
incDir = '../../ThirdParty/PSCommon'
newIncDir = '..'
self.regx_replace(incDir, newIncDir, self.config.output_dir + "/Samples/ClosestPointViewer/Makefile")
self.regx_replace(incDir, newIncDir, self.config.output_dir + "/Samples/EventBasedRead/Makefile")
self.regx_replace(incDir, newIncDir, self.config.output_dir + "/Samples/MultiDepthViewer/Makefile")
self.regx_replace(incDir, newIncDir, self.config.output_dir + "/Samples/MultipleStreamRead/Makefile")
self.regx_replace(incDir, newIncDir, self.config.output_dir + "/Samples/MWClosestPoint/Makefile")
self.regx_replace(incDir, newIncDir, self.config.output_dir + "/Samples/MWClosestPointApp/Makefile")
self.regx_replace(incDir, newIncDir, self.config.output_dir + "/Samples/SimpleRead/Makefile")
self.regx_replace(incDir, newIncDir, self.config.output_dir + "/Samples/SimpleViewer/Makefile")
# Change GL include dir
self.regx_replace(incDir, newIncDir, self.config.output_dir + "/Samples/ClosestPointViewer/Makefile")
self.regx_replace(incDir, newIncDir, self.config.output_dir + "/Samples/EventBasedRead/Makefile")
self.regx_replace(incDir, newIncDir, self.config.output_dir + "/Samples/MultiDepthViewer/Makefile")
self.regx_replace(incDir, newIncDir, self.config.output_dir + "/Samples/MultipleStreamRead/Makefile")
self.regx_replace(incDir, newIncDir, self.config.output_dir + "/Samples/MWClosestPoint/Makefile")
self.regx_replace(incDir, newIncDir, self.config.output_dir + "/Samples/MWClosestPointApp/Makefile")
self.regx_replace(incDir, newIncDir, self.config.output_dir + "/Samples/SimpleRead/Makefile")
self.regx_replace(incDir, newIncDir, self.config.output_dir + "/Samples/SimpleViewer/Makefile")
def createDocumentation(self):
if not self.config.createDocs:
return
## Run doxygen
beforeDir = os.getcwd()
os.chdir('Source/Documentation')
if os.path.isdir('index'):
shutil.rmtree('index')
if subprocess.call(["python", "Runme.py"]) != 0:
print "Couldn't run doxygen!";
os.chdir(beforeDir)
sys.exit(3)
## Create documentation dir in redist
os.chdir(beforeDir)
if not os.path.isdir(self.config.output_dir+'/Documentation'):
os.mkdir(self.config.output_dir+'/Documentation')
## copy doxygen output (html?) to doc dir in redist
shutil.copytree('Source/Documentation/html', self.config.output_dir+'/Documentation/html')
for r, d, f in os.walk(self.config.output_dir+'/Documentation/html/'):
for file in f:
if file.endswith('md5'):
os.remove(r+'/'+file)
def compile(self, configuration, platform, compilationMode):
def calc_jobs_number():
cores = 1
try:
if isinstance(self, OSMac):
txt = gop('sysctl -n hw.physicalcpu')
else:
txt = gop('grep "processor\W:" /proc/cpuinfo | wc -l')
cores = int(txt)
except:
pass
return str(cores * 2)
# make sure platform is valid (linux compilation can only be done on platform machine's type).
if self.config.machine == 'x86_64' and platform == 'x86':
print('Error: Building x86 platform requires 32bit operating system')
return 1
outfile = origDir+'/build.'+configuration+'.'+platform+'.txt'
compilation_cmd = "make -j" + calc_jobs_number() + " CFG=" + configuration + " PLATFORM=" + platform + " > " + outfile + " 2>&1"
if compilationMode == 'Rebuild':
compilation_cmd = "make CFG=" + configuration + " PLATFORM=" + platform + " clean > /dev/null && " + compilation_cmd
print(compilation_cmd)
rc = os.system(compilation_cmd)
print('RC: %d'%rc)
if rc == 0:
os.remove(outfile)
return rc
def isBaseDirValid(self, dir):
if os.path.isdir(dir) and os.path.exists(dir+'/Makefile'):
return True
return False
def isIllegalBinFile(self, file):
return False
def isIllegalBinDriverFile(self, file):
return not any(file=="lib"+driver+".so" for driver in self.getExportedDrivers())
def isIllegalSampleFile(self, file):
return any(file.endswith(ext) for ext in ['.vcxproj', '.vcxproj.filters', 'Android.mk'])
def isIllegalToolFile(self, file):
return not any(file.startswith(tool) for tool in self.getExportedTools())
def isIllegalSampleBinFile(self, file):
return not any((file.startswith(sample) or file.startswith('lib'+sample)) for sample in self.getExportedSamples())
class OSMac(OSLinux):
def isIllegalBinDriverFile(self, file):
return not any(file=="lib"+driver+".dylib" for driver in self.getExportedDrivers())
class Platform:
def __init__(self):
print "Bla"
def getBinDirString(self):
return self.platformString
def getPlatformString(self):
return self.generalPlatformString
def getBits(self):
return self.bits
class Platform32(Platform):
def __init__(self):
if platform.system() == 'Windows':
self.platformString = 'Win32'
else:
self.platformString = 'x86'
self.generalPlatformString = 'x86'
self.bits = '32'
class Platform64(Platform):
def __init__(self):
self.platformString = 'x64'
self.generalPlatformString = 'x64'
self.bits = '64'
class PlatformArm(Platform):
def __init__(self):
self.platformString = 'Arm'
self.generalPlatformString = 'Arm'
self.bits = 'arm'
def boolean(string):
string = string.lower()
if string in ['0', 'f', 'false', 'no', 'off']:
return False
elif string in ['1', 't', 'true', 'yes', 'on']:
return True
raise ValueError()
class Config:
def __init__(self):
self.bits = '32'
self.path = '..'
self.output_dir = ''
self.compile = 'Rebuild'
self.createDocs = True
self.supplyTools = False
self.machine = platform.machine()
def parseArgs(self, args):
parser = argparse.ArgumentParser(prog=sys.argv[0])
parser.add_argument('-path', default='..')
parser.add_argument('-output', default='')
parser.add_argument('-platform', default='32', choices=['32', '64', 'both', 'x86', 'x64', 'arm'])
parser.add_argument('-compile', default='Rebuild', choices=['Rebuild', 'Build', 'None'])
parser.add_argument('-docs', default = True, const=True, nargs='?', type=boolean)
parser.add_argument('-tools', default = False, const=True, nargs='?', type=boolean)
options = parser.parse_args(args)
self.path = options.path
self.compile = options.compile
self.createDocs = options.docs
self.supplyTools = options.tools
self.bits = options.platform
if self.bits == 'x86':
self.bits = '32'
elif self.bits == 'x64':
self.bits = '64'
if options.output != '':
self.output_dir = options.output
else:
self.output_dir = 'Redist/Output'
if (len(self.getPlatforms()) == 1):
self.output_dir = self.output_dir + self.getPlatforms()[0].getBits()
return True
def getPlatforms(self):
platforms = []
if self.bits == '32':
platforms.append(Platform32())
elif self.bits == '64':
platforms.append(Platform64())
elif self.bits == 'arm':
platforms.append(PlatformArm())
elif self.bits == 'both':
platforms.append(Platform32())
platforms.append(Platform64())
return platforms
def printState(self):
print "Path: " + self.path
print "Output: " + self.output_dir
print "Platform: " + self.bits
print "Compile: " + str(self.compile)
print "Docs: " + str(self.createDocs)
print "Tools: " + str(self.supplyTools)
def Redist(myConfig):
# Check operating system.
plat = platform.system()
if plat == 'Windows':
myOS = OSWin()
elif plat == 'Linux':
myOS = OSLinux()
elif plat == 'Darwin':
myOS = OSMac()
else:
print "Unsupported OS: " + platform.system()
sys.exit(1)
myConfig.printState()
myOS.setConfiguration(myConfig)
origDir = os.getcwd()
if not myOS.isBaseDirValid(myConfig.path):
print 'Directory '+myConfig.path+' not valid'
sys.exit(1)
os.chdir(myConfig.path)
output_dir = myOS.cleanOutput()
if myOS.compileAll() != 0:
print 'Compilation failure'
sys.exit(2)
# Create file structure
myOS.createGeneralFiles()
myOS.createRedist()
myOS.createInclude()
myOS.createTools()
myOS.createLib()
myOS.createSamples()
myOS.createDocumentation()
## Done
os.chdir(origDir)
###### main
if __name__ == '__main__':
# Parse configuration.
myConfig = Config()
if not myConfig.parseArgs(sys.argv[1:]):
sys.exit(1)
# Run
Redist(myConfig)
|
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""File-based sink."""
from __future__ import absolute_import
import logging
import os
import re
import time
import uuid
from builtins import range
from builtins import zip
from future.utils import iteritems
from past.builtins import unicode
from apache_beam.internal import util
from apache_beam.io import iobase
from apache_beam.io.filesystem import BeamIOError
from apache_beam.io.filesystem import CompressionTypes
from apache_beam.io.filesystems import FileSystems
from apache_beam.options.value_provider import StaticValueProvider
from apache_beam.options.value_provider import ValueProvider
from apache_beam.options.value_provider import check_accessible
from apache_beam.transforms.display import DisplayDataItem
DEFAULT_SHARD_NAME_TEMPLATE = '-SSSSS-of-NNNNN'
__all__ = ['FileBasedSink']
class FileBasedSink(iobase.Sink):
"""A sink to a GCS or local files.
To implement a file-based sink, extend this class and override
either :meth:`.write_record()` or :meth:`.write_encoded_record()`.
If needed, also overwrite :meth:`.open()` and/or :meth:`.close()` to customize
the file handling or write headers and footers.
The output of this write is a :class:`~apache_beam.pvalue.PCollection` of
all written shards.
"""
# Max number of threads to be used for renaming.
_MAX_RENAME_THREADS = 64
__hash__ = None
def __init__(self,
file_path_prefix,
coder,
file_name_suffix='',
num_shards=0,
shard_name_template=None,
mime_type='application/octet-stream',
compression_type=CompressionTypes.AUTO):
"""
Raises:
~exceptions.TypeError: if file path parameters are not a :class:`str` or
:class:`~apache_beam.options.value_provider.ValueProvider`, or if
**compression_type** is not member of
:class:`~apache_beam.io.filesystem.CompressionTypes`.
~exceptions.ValueError: if **shard_name_template** is not of expected
format.
"""
if not isinstance(file_path_prefix, ((str, unicode), ValueProvider)):
raise TypeError('file_path_prefix must be a string or ValueProvider;'
'got %r instead' % file_path_prefix)
if not isinstance(file_name_suffix, ((str, unicode), ValueProvider)):
raise TypeError('file_name_suffix must be a string or ValueProvider;'
'got %r instead' % file_name_suffix)
if not CompressionTypes.is_valid_compression_type(compression_type):
raise TypeError('compression_type must be CompressionType object but '
'was %s' % type(compression_type))
if shard_name_template is None:
shard_name_template = DEFAULT_SHARD_NAME_TEMPLATE
elif shard_name_template == '':
num_shards = 1
if isinstance(file_path_prefix, (str, unicode)):
file_path_prefix = StaticValueProvider(str, file_path_prefix)
if isinstance(file_name_suffix, (str, unicode)):
file_name_suffix = StaticValueProvider(str, file_name_suffix)
self.file_path_prefix = file_path_prefix
self.file_name_suffix = file_name_suffix
self.num_shards = num_shards
self.coder = coder
self.shard_name_format = self._template_to_format(shard_name_template)
self.shard_name_glob_format = self._template_to_glob_format(
shard_name_template)
self.compression_type = compression_type
self.mime_type = mime_type
def display_data(self):
return {'shards':
DisplayDataItem(self.num_shards,
label='Number of Shards').drop_if_default(0),
'compression':
DisplayDataItem(str(self.compression_type)),
'file_pattern':
DisplayDataItem('{}{}{}'.format(self.file_path_prefix,
self.shard_name_format,
self.file_name_suffix),
label='File Pattern')}
@check_accessible(['file_path_prefix'])
def open(self, temp_path):
"""Opens ``temp_path``, returning an opaque file handle object.
The returned file handle is passed to ``write_[encoded_]record`` and
``close``.
"""
return FileSystems.create(temp_path, self.mime_type, self.compression_type)
def write_record(self, file_handle, value):
"""Writes a single record go the file handle returned by ``open()``.
By default, calls ``write_encoded_record`` after encoding the record with
this sink's Coder.
"""
self.write_encoded_record(file_handle, self.coder.encode(value))
def write_encoded_record(self, file_handle, encoded_value):
"""Writes a single encoded record to the file handle returned by ``open()``.
"""
raise NotImplementedError
def close(self, file_handle):
"""Finalize and close the file handle returned from ``open()``.
Called after all records are written.
By default, calls ``file_handle.close()`` iff it is not None.
"""
if file_handle is not None:
file_handle.close()
@check_accessible(['file_path_prefix', 'file_name_suffix'])
def initialize_write(self):
file_path_prefix = self.file_path_prefix.get()
tmp_dir = self._create_temp_dir(file_path_prefix)
FileSystems.mkdirs(tmp_dir)
return tmp_dir
def _create_temp_dir(self, file_path_prefix):
base_path, last_component = FileSystems.split(file_path_prefix)
if not last_component:
# Trying to re-split the base_path to check if it's a root.
new_base_path, _ = FileSystems.split(base_path)
if base_path == new_base_path:
raise ValueError('Cannot create a temporary directory for root path '
'prefix %s. Please specify a file path prefix with '
'at least two components.' % file_path_prefix)
path_components = [base_path,
'beam-temp-' + last_component + '-' + uuid.uuid1().hex]
return FileSystems.join(*path_components)
@check_accessible(['file_path_prefix', 'file_name_suffix'])
def open_writer(self, init_result, uid):
# A proper suffix is needed for AUTO compression detection.
# We also ensure there will be no collisions with uid and a
# (possibly unsharded) file_path_prefix and a (possibly empty)
# file_name_suffix.
file_path_prefix = self.file_path_prefix.get()
file_name_suffix = self.file_name_suffix.get()
suffix = (
'.' + os.path.basename(file_path_prefix) + file_name_suffix)
writer_path = FileSystems.join(init_result, uid) + suffix
return FileBasedSinkWriter(self, writer_path)
@check_accessible(['file_path_prefix', 'file_name_suffix'])
def _get_final_name(self, shard_num, num_shards):
return ''.join([
self.file_path_prefix.get(),
self.shard_name_format % dict(shard_num=shard_num,
num_shards=num_shards),
self.file_name_suffix.get()
])
@check_accessible(['file_path_prefix', 'file_name_suffix'])
def _get_final_name_glob(self, num_shards):
return ''.join([
self.file_path_prefix.get(),
self.shard_name_glob_format % dict(num_shards=num_shards),
self.file_name_suffix.get()
])
def pre_finalize(self, init_result, writer_results):
num_shards = len(list(writer_results))
dst_glob = self._get_final_name_glob(num_shards)
dst_glob_files = [file_metadata.path
for mr in FileSystems.match([dst_glob])
for file_metadata in mr.metadata_list]
if dst_glob_files:
logging.warn('Deleting %d existing files in target path matching: %s',
len(dst_glob_files), self.shard_name_glob_format)
FileSystems.delete(dst_glob_files)
def _check_state_for_finalize_write(self, writer_results, num_shards):
"""Checks writer output files' states.
Returns:
src_files, dst_files: Lists of files to rename. For each i, finalize_write
should rename(src_files[i], dst_files[i]).
delete_files: Src files to delete. These could be leftovers from an
incomplete (non-atomic) rename operation.
num_skipped: Tally of writer results files already renamed, such as from
a previous run of finalize_write().
"""
if not writer_results:
return [], [], [], 0
src_glob = FileSystems.join(FileSystems.split(writer_results[0])[0], '*')
dst_glob = self._get_final_name_glob(num_shards)
src_glob_files = set(file_metadata.path
for mr in FileSystems.match([src_glob])
for file_metadata in mr.metadata_list)
dst_glob_files = set(file_metadata.path
for mr in FileSystems.match([dst_glob])
for file_metadata in mr.metadata_list)
src_files = []
dst_files = []
delete_files = []
num_skipped = 0
for shard_num, src in enumerate(writer_results):
final_name = self._get_final_name(shard_num, num_shards)
dst = final_name
src_exists = src in src_glob_files
dst_exists = dst in dst_glob_files
if not src_exists and not dst_exists:
raise BeamIOError('src and dst files do not exist. src: %s, dst: %s' % (
src, dst))
if not src_exists and dst_exists:
logging.debug('src: %s -> dst: %s already renamed, skipping', src, dst)
num_skipped += 1
continue
if (src_exists and dst_exists and
FileSystems.checksum(src) == FileSystems.checksum(dst)):
logging.debug('src: %s == dst: %s, deleting src', src, dst)
delete_files.append(src)
continue
src_files.append(src)
dst_files.append(dst)
return src_files, dst_files, delete_files, num_skipped
@check_accessible(['file_path_prefix'])
def finalize_write(self, init_result, writer_results,
unused_pre_finalize_results):
writer_results = sorted(writer_results)
num_shards = len(writer_results)
src_files, dst_files, delete_files, num_skipped = (
self._check_state_for_finalize_write(writer_results, num_shards))
num_skipped += len(delete_files)
FileSystems.delete(delete_files)
num_shards_to_finalize = len(src_files)
min_threads = min(num_shards_to_finalize, FileBasedSink._MAX_RENAME_THREADS)
num_threads = max(1, min_threads)
chunk_size = FileSystems.get_chunk_size(self.file_path_prefix.get())
source_file_batch = [src_files[i:i + chunk_size]
for i in range(0, len(src_files), chunk_size)]
destination_file_batch = [dst_files[i:i + chunk_size]
for i in range(0, len(dst_files), chunk_size)]
if num_shards_to_finalize:
logging.info(
'Starting finalize_write threads with num_shards: %d (skipped: %d), '
'batches: %d, num_threads: %d',
num_shards_to_finalize, num_skipped, len(source_file_batch),
num_threads)
start_time = time.time()
# Use a thread pool for renaming operations.
def _rename_batch(batch):
"""_rename_batch executes batch rename operations."""
source_files, destination_files = batch
exceptions = []
try:
FileSystems.rename(source_files, destination_files)
return exceptions
except BeamIOError as exp:
if exp.exception_details is None:
raise
for (src, dst), exception in iteritems(exp.exception_details):
if exception:
logging.error(('Exception in _rename_batch. src: %s, '
'dst: %s, err: %s'), src, dst, exception)
exceptions.append(exception)
else:
logging.debug('Rename successful: %s -> %s', src, dst)
return exceptions
exception_batches = util.run_using_threadpool(
_rename_batch, list(zip(source_file_batch, destination_file_batch)),
num_threads)
all_exceptions = [e for exception_batch in exception_batches
for e in exception_batch]
if all_exceptions:
raise Exception(
'Encountered exceptions in finalize_write: %s' % all_exceptions)
for final_name in dst_files:
yield final_name
logging.info('Renamed %d shards in %.2f seconds.', num_shards_to_finalize,
time.time() - start_time)
else:
logging.warning(
'No shards found to finalize. num_shards: %d, skipped: %d',
num_shards, num_skipped)
try:
FileSystems.delete([init_result])
except IOError:
# May have already been removed.
pass
@staticmethod
def _template_replace_num_shards(shard_name_template):
match = re.search('N+', shard_name_template)
if match:
shard_name_template = shard_name_template.replace(
match.group(0), '%%(num_shards)0%dd' % len(match.group(0)))
return shard_name_template
@staticmethod
def _template_to_format(shard_name_template):
if not shard_name_template:
return ''
match = re.search('S+', shard_name_template)
if match is None:
raise ValueError(
"Shard number pattern S+ not found in shard_name_template: %s" %
shard_name_template)
shard_name_format = shard_name_template.replace(
match.group(0), '%%(shard_num)0%dd' % len(match.group(0)))
return FileBasedSink._template_replace_num_shards(shard_name_format)
@staticmethod
def _template_to_glob_format(shard_name_template):
if not shard_name_template:
return ''
match = re.search('S+', shard_name_template)
if match is None:
raise ValueError(
"Shard number pattern S+ not found in shard_name_template: %s" %
shard_name_template)
shard_name_format = shard_name_template.replace(match.group(0), '*')
return FileBasedSink._template_replace_num_shards(shard_name_format)
def __eq__(self, other):
# TODO: Clean up workitem_test which uses this.
# pylint: disable=unidiomatic-typecheck
return type(self) == type(other) and self.__dict__ == other.__dict__
def __ne__(self, other):
# TODO(BEAM-5949): Needed for Python 2 compatibility.
return not self == other
class FileBasedSinkWriter(iobase.Writer):
"""The writer for FileBasedSink.
"""
def __init__(self, sink, temp_shard_path):
self.sink = sink
self.temp_shard_path = temp_shard_path
self.temp_handle = self.sink.open(temp_shard_path)
def write(self, value):
self.sink.write_record(self.temp_handle, value)
def close(self):
self.sink.close(self.temp_handle)
return self.temp_shard_path
|
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class NetworkSecurityGroupsOperations:
"""NetworkSecurityGroupsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2017_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
network_security_group_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-06-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
network_security_group_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified network security group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
network_security_group_name=network_security_group_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}'} # type: ignore
async def get(
self,
resource_group_name: str,
network_security_group_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.NetworkSecurityGroup":
"""Gets the specified network security group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkSecurityGroup, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2017_06_01.models.NetworkSecurityGroup
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkSecurityGroup"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-06-01"
accept = "application/json, text/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkSecurityGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
network_security_group_name: str,
parameters: "_models.NetworkSecurityGroup",
**kwargs: Any
) -> "_models.NetworkSecurityGroup":
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkSecurityGroup"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'NetworkSecurityGroup')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('NetworkSecurityGroup', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('NetworkSecurityGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
network_security_group_name: str,
parameters: "_models.NetworkSecurityGroup",
**kwargs: Any
) -> AsyncLROPoller["_models.NetworkSecurityGroup"]:
"""Creates or updates a network security group in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:param parameters: Parameters supplied to the create or update network security group
operation.
:type parameters: ~azure.mgmt.network.v2017_06_01.models.NetworkSecurityGroup
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either NetworkSecurityGroup or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2017_06_01.models.NetworkSecurityGroup]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkSecurityGroup"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
network_security_group_name=network_security_group_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('NetworkSecurityGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}'} # type: ignore
def list_all(
self,
**kwargs: Any
) -> AsyncIterable["_models.NetworkSecurityGroupListResult"]:
"""Gets all network security groups in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkSecurityGroupListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2017_06_01.models.NetworkSecurityGroupListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkSecurityGroupListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-06-01"
accept = "application/json, text/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkSecurityGroupListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/networkSecurityGroups'} # type: ignore
def list(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.NetworkSecurityGroupListResult"]:
"""Gets all network security groups in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkSecurityGroupListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2017_06_01.models.NetworkSecurityGroupListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkSecurityGroupListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-06-01"
accept = "application/json, text/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkSecurityGroupListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups'} # type: ignore
|
|
from __future__ import unicode_literals
from django import forms
from django.contrib.admin.util import (flatten_fieldsets, lookup_field,
display_for_field, label_for_field, help_text_for_field)
from django.contrib.admin.templatetags.admin_static import static
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ObjectDoesNotExist
from django.db.models.fields.related import ManyToManyRel
from django.forms.util import flatatt
from django.template.defaultfilters import capfirst
from django.utils.encoding import force_unicode, smart_unicode
from django.utils.html import conditional_escape, format_html
from django.utils.safestring import mark_safe
from django.utils import six
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
ACTION_CHECKBOX_NAME = '_selected_action'
class ActionForm(forms.Form):
action = forms.ChoiceField(label=_('Action:'))
select_across = forms.BooleanField(label='', required=False, initial=0,
widget=forms.HiddenInput({'class': 'select-across'}))
checkbox = forms.CheckboxInput({'class': 'action-select'}, lambda value: False)
class AdminForm(object):
def __init__(self, form, fieldsets, prepopulated_fields, readonly_fields=None, model_admin=None):
self.form, self.fieldsets = form, normalize_fieldsets(fieldsets)
self.prepopulated_fields = [{
'field': form[field_name],
'dependencies': [form[f] for f in dependencies]
} for field_name, dependencies in prepopulated_fields.items()]
self.model_admin = model_admin
if readonly_fields is None:
readonly_fields = ()
self.readonly_fields = readonly_fields
def __iter__(self):
for name, options in self.fieldsets:
yield Fieldset(self.form, name,
readonly_fields=self.readonly_fields,
model_admin=self.model_admin,
**options
)
def first_field(self):
try:
fieldset_name, fieldset_options = self.fieldsets[0]
field_name = fieldset_options['fields'][0]
if not isinstance(field_name, six.string_types):
field_name = field_name[0]
return self.form[field_name]
except (KeyError, IndexError):
pass
try:
return next(iter(self.form))
except StopIteration:
return None
def _media(self):
media = self.form.media
for fs in self:
media = media + fs.media
return media
media = property(_media)
class Fieldset(object):
def __init__(self, form, name=None, readonly_fields=(), fields=(), classes=(),
description=None, model_admin=None):
self.form = form
self.name, self.fields = name, fields
self.classes = ' '.join(classes)
self.description = description
self.model_admin = model_admin
self.readonly_fields = readonly_fields
def _media(self):
if 'collapse' in self.classes:
extra = '' if settings.DEBUG else '.min'
js = ['jquery%s.js' % extra,
'jquery.init.js',
'collapse%s.js' % extra]
return forms.Media(js=[static('admin/js/%s' % url) for url in js])
return forms.Media()
media = property(_media)
def __iter__(self):
for field in self.fields:
yield Fieldline(self.form, field, self.readonly_fields, model_admin=self.model_admin)
class Fieldline(object):
def __init__(self, form, field, readonly_fields=None, model_admin=None):
self.form = form # A django.forms.Form instance
if not hasattr(field, "__iter__") or isinstance(field, str):
self.fields = [field]
else:
self.fields = field
self.model_admin = model_admin
if readonly_fields is None:
readonly_fields = ()
self.readonly_fields = readonly_fields
def __iter__(self):
for i, field in enumerate(self.fields):
if field in self.readonly_fields:
yield AdminReadonlyField(self.form, field, is_first=(i == 0),
model_admin=self.model_admin)
else:
yield AdminField(self.form, field, is_first=(i == 0))
def errors(self):
return mark_safe('\n'.join([self.form[f].errors.as_ul() for f in self.fields if f not in self.readonly_fields]).strip('\n'))
class AdminField(object):
def __init__(self, form, field, is_first):
self.field = form[field] # A django.forms.BoundField instance
self.is_first = is_first # Whether this field is first on the line
self.is_checkbox = isinstance(self.field.field.widget, forms.CheckboxInput)
def label_tag(self):
classes = []
contents = conditional_escape(force_unicode(self.field.label))
if self.is_checkbox:
classes.append('vCheckboxLabel')
else:
contents += ':'
if self.field.field.required:
classes.append('required')
if not self.is_first:
classes.append('inline')
attrs = classes and {'class': ' '.join(classes)} or {}
return self.field.label_tag(contents=mark_safe(contents), attrs=attrs)
def errors(self):
return mark_safe(self.field.errors.as_ul())
class AdminReadonlyField(object):
def __init__(self, form, field, is_first, model_admin=None):
label = label_for_field(field, form._meta.model, model_admin)
# Make self.field look a little bit like a field. This means that
# {{ field.name }} must be a useful class name to identify the field.
# For convenience, store other field-related data here too.
if callable(field):
class_name = field.__name__ != '<lambda>' and field.__name__ or ''
else:
class_name = field
self.field = {
'name': class_name,
'label': label,
'field': field,
'help_text': help_text_for_field(class_name, form._meta.model)
}
self.form = form
self.model_admin = model_admin
self.is_first = is_first
self.is_checkbox = False
self.is_readonly = True
def label_tag(self):
attrs = {}
if not self.is_first:
attrs["class"] = "inline"
label = self.field['label']
return format_html('<label{0}>{1}:</label>',
flatatt(attrs),
capfirst(force_unicode(label)))
def contents(self):
from django.contrib.admin.templatetags.admin_list import _boolean_icon
from django.contrib.admin.views.main import EMPTY_CHANGELIST_VALUE
field, obj, model_admin = self.field['field'], self.form.instance, self.model_admin
try:
f, attr, value = lookup_field(field, obj, model_admin)
except (AttributeError, ValueError, ObjectDoesNotExist):
result_repr = EMPTY_CHANGELIST_VALUE
else:
if f is None:
boolean = getattr(attr, "boolean", False)
if boolean:
result_repr = _boolean_icon(value)
else:
result_repr = smart_unicode(value)
if getattr(attr, "allow_tags", False):
result_repr = mark_safe(result_repr)
else:
if value is None:
result_repr = EMPTY_CHANGELIST_VALUE
elif isinstance(f.rel, ManyToManyRel):
result_repr = ", ".join(map(six.text_type, value.all()))
else:
result_repr = display_for_field(value, f)
return conditional_escape(result_repr)
class InlineAdminFormSet(object):
"""
A wrapper around an inline formset for use in the admin system.
"""
def __init__(self, inline, formset, fieldsets, prepopulated_fields=None,
readonly_fields=None, model_admin=None):
self.opts = inline
self.formset = formset
self.fieldsets = fieldsets
self.model_admin = model_admin
if readonly_fields is None:
readonly_fields = ()
self.readonly_fields = readonly_fields
if prepopulated_fields is None:
prepopulated_fields = {}
self.prepopulated_fields = prepopulated_fields
def __iter__(self):
for form, original in zip(self.formset.initial_forms, self.formset.get_queryset()):
yield InlineAdminForm(self.formset, form, self.fieldsets,
self.prepopulated_fields, original, self.readonly_fields,
model_admin=self.opts)
for form in self.formset.extra_forms:
yield InlineAdminForm(self.formset, form, self.fieldsets,
self.prepopulated_fields, None, self.readonly_fields,
model_admin=self.opts)
yield InlineAdminForm(self.formset, self.formset.empty_form,
self.fieldsets, self.prepopulated_fields, None,
self.readonly_fields, model_admin=self.opts)
def fields(self):
fk = getattr(self.formset, "fk", None)
for i, field in enumerate(flatten_fieldsets(self.fieldsets)):
if fk and fk.name == field:
continue
if field in self.readonly_fields:
yield {
'label': label_for_field(field, self.opts.model, self.opts),
'widget': {
'is_hidden': False
},
'required': False
}
else:
yield self.formset.form.base_fields[field]
def _media(self):
media = self.opts.media + self.formset.media
for fs in self:
media = media + fs.media
return media
media = property(_media)
class InlineAdminForm(AdminForm):
"""
A wrapper around an inline form for use in the admin system.
"""
def __init__(self, formset, form, fieldsets, prepopulated_fields, original,
readonly_fields=None, model_admin=None):
self.formset = formset
self.model_admin = model_admin
self.original = original
if original is not None:
self.original_content_type_id = ContentType.objects.get_for_model(original).pk
self.show_url = original and hasattr(original, 'get_absolute_url')
super(InlineAdminForm, self).__init__(form, fieldsets, prepopulated_fields,
readonly_fields, model_admin)
def __iter__(self):
for name, options in self.fieldsets:
yield InlineFieldset(self.formset, self.form, name,
self.readonly_fields, model_admin=self.model_admin, **options)
def has_auto_field(self):
if self.form._meta.model._meta.has_auto_field:
return True
# Also search any parents for an auto field.
for parent in self.form._meta.model._meta.get_parent_list():
if parent._meta.has_auto_field:
return True
return False
def field_count(self):
# tabular.html uses this function for colspan value.
num_of_fields = 0
if self.has_auto_field():
num_of_fields += 1
num_of_fields += len(self.fieldsets[0][1]["fields"])
if self.formset.can_order:
num_of_fields += 1
if self.formset.can_delete:
num_of_fields += 1
return num_of_fields
def pk_field(self):
return AdminField(self.form, self.formset._pk_field.name, False)
def fk_field(self):
fk = getattr(self.formset, "fk", None)
if fk:
return AdminField(self.form, fk.name, False)
else:
return ""
def deletion_field(self):
from django.forms.formsets import DELETION_FIELD_NAME
return AdminField(self.form, DELETION_FIELD_NAME, False)
def ordering_field(self):
from django.forms.formsets import ORDERING_FIELD_NAME
return AdminField(self.form, ORDERING_FIELD_NAME, False)
class InlineFieldset(Fieldset):
def __init__(self, formset, *args, **kwargs):
self.formset = formset
super(InlineFieldset, self).__init__(*args, **kwargs)
def __iter__(self):
fk = getattr(self.formset, "fk", None)
for field in self.fields:
if fk and fk.name == field:
continue
yield Fieldline(self.form, field, self.readonly_fields,
model_admin=self.model_admin)
class AdminErrorList(forms.util.ErrorList):
"""
Stores all errors for the form/formsets in an add/change stage view.
"""
def __init__(self, form, inline_formsets):
if form.is_bound:
self.extend(form.errors.values())
for inline_formset in inline_formsets:
self.extend(inline_formset.non_form_errors())
for errors_in_inline_form in inline_formset.errors:
self.extend(errors_in_inline_form.values())
def normalize_fieldsets(fieldsets):
"""
Make sure the keys in fieldset dictionaries are strings. Returns the
normalized data.
"""
result = []
for name, options in fieldsets:
result.append((name, normalize_dictionary(options)))
return result
def normalize_dictionary(data_dict):
"""
Converts all the keys in "data_dict" to strings. The keys must be
convertible using str().
"""
for key, value in data_dict.items():
if not isinstance(key, str):
del data_dict[key]
data_dict[str(key)] = value
return data_dict
|
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Cloud Datastore query splitter test."""
import unittest
from mock import MagicMock
from mock import call
from apache_beam.io.gcp.datastore.v1 import fake_datastore
from apache_beam.io.gcp.datastore.v1 import query_splitter
# Protect against environments where datastore library is not available.
# pylint: disable=wrong-import-order, wrong-import-position
try:
from google.cloud.proto.datastore.v1 import datastore_pb2
from google.cloud.proto.datastore.v1 import query_pb2
from google.cloud.proto.datastore.v1.query_pb2 import PropertyFilter
except ImportError:
datastore_pb2 = None
# pylint: enable=wrong-import-order, wrong-import-position
@unittest.skipIf(datastore_pb2 is None, 'GCP dependencies are not installed')
class QuerySplitterTest(unittest.TestCase):
def test_get_splits_query_with_multiple_kinds(self):
query = query_pb2.Query()
query.kind.add()
query.kind.add()
self.assertRaises(ValueError, query_splitter.get_splits, None, query, 4)
def test_get_splits_query_with_order(self):
query = query_pb2.Query()
query.kind.add()
query.order.add()
self.assertRaises(ValueError, query_splitter.get_splits, None, query, 3)
def test_get_splits_query_with_unsupported_filter(self):
query = query_pb2.Query()
query.kind.add()
test_filter = query.filter.composite_filter.filters.add()
test_filter.property_filter.op = PropertyFilter.GREATER_THAN
self.assertRaises(ValueError, query_splitter.get_splits, None, query, 2)
def test_get_splits_query_with_limit(self):
query = query_pb2.Query()
query.kind.add()
query.limit.value = 10
self.assertRaises(ValueError, query_splitter.get_splits, None, query, 2)
def test_get_splits_query_with_offset(self):
query = query_pb2.Query()
query.kind.add()
query.offset = 10
self.assertRaises(ValueError, query_splitter.get_splits, None, query, 2)
def test_create_scatter_query(self):
query = query_pb2.Query()
kind = query.kind.add()
kind.name = 'shakespeare-demo'
num_splits = 10
scatter_query = query_splitter._create_scatter_query(query, num_splits)
self.assertEqual(scatter_query.kind[0], kind)
self.assertEqual(scatter_query.limit.value,
(num_splits -1) * query_splitter.KEYS_PER_SPLIT)
self.assertEqual(scatter_query.order[0].direction,
query_pb2.PropertyOrder.ASCENDING)
self.assertEqual(scatter_query.projection[0].property.name,
query_splitter.KEY_PROPERTY_NAME)
def test_get_splits_with_two_splits(self):
query = query_pb2.Query()
kind = query.kind.add()
kind.name = 'shakespeare-demo'
num_splits = 2
num_entities = 97
batch_size = 9
self.check_get_splits(query, num_splits, num_entities, batch_size)
def test_get_splits_with_multiple_splits(self):
query = query_pb2.Query()
kind = query.kind.add()
kind.name = 'shakespeare-demo'
num_splits = 4
num_entities = 369
batch_size = 12
self.check_get_splits(query, num_splits, num_entities, batch_size)
def test_get_splits_with_large_num_splits(self):
query = query_pb2.Query()
kind = query.kind.add()
kind.name = 'shakespeare-demo'
num_splits = 10
num_entities = 4
batch_size = 10
self.check_get_splits(query, num_splits, num_entities, batch_size)
def test_get_splits_with_small_num_entities(self):
query = query_pb2.Query()
kind = query.kind.add()
kind.name = 'shakespeare-demo'
num_splits = 4
num_entities = 50
batch_size = 10
self.check_get_splits(query, num_splits, num_entities, batch_size)
def test_get_splits_with_batch_size_exact_multiple(self):
"""Test get_splits when num scatter keys is a multiple of batch size."""
query = query_pb2.Query()
kind = query.kind.add()
kind.name = 'shakespeare-demo'
num_splits = 4
num_entities = 400
batch_size = 32
self.check_get_splits(query, num_splits, num_entities, batch_size)
def test_get_splits_with_large_batch_size(self):
"""Test get_splits when all scatter keys are retured in a single req."""
query = query_pb2.Query()
kind = query.kind.add()
kind.name = 'shakespeare-demo'
num_splits = 4
num_entities = 400
batch_size = 500
self.check_get_splits(query, num_splits, num_entities, batch_size)
def check_get_splits(self, query, num_splits, num_entities, batch_size):
"""A helper method to test the query_splitter get_splits method.
Args:
query: the query to be split
num_splits: number of splits
num_entities: number of scatter entities contained in the fake datastore.
batch_size: the number of entities returned by fake datastore in one req.
"""
# Test for both random long ids and string ids.
id_or_name = [True, False]
for id_type in id_or_name:
entities = fake_datastore.create_entities(num_entities, id_type)
mock_datastore = MagicMock()
# Assign a fake run_query method as a side_effect to the mock.
mock_datastore.run_query.side_effect = \
fake_datastore.create_run_query(entities, batch_size)
split_queries = query_splitter.get_splits(
mock_datastore, query, num_splits)
# if request num_splits is greater than num_entities, the best it can
# do is one entity per split.
expected_num_splits = min(num_splits, num_entities + 1)
self.assertEqual(len(split_queries), expected_num_splits)
expected_requests = QuerySplitterTest.create_scatter_requests(
query, num_splits, batch_size, num_entities)
expected_calls = []
for req in expected_requests:
expected_calls.append(call(req))
self.assertEqual(expected_calls, mock_datastore.run_query.call_args_list)
@staticmethod
def create_scatter_requests(query, num_splits, batch_size, num_entities):
"""Creates a list of expected scatter requests from the query splitter.
This list of requests returned is used to verify that the query splitter
made the same number of requests in the same order to datastore.
"""
requests = []
count = (num_splits - 1) * query_splitter.KEYS_PER_SPLIT
start_cursor = ''
i = 0
scatter_query = query_splitter._create_scatter_query(query, count)
while i < count and i < num_entities:
request = datastore_pb2.RunQueryRequest()
request.query.CopyFrom(scatter_query)
request.query.start_cursor = start_cursor
request.query.limit.value = count - i
requests.append(request)
i += batch_size
start_cursor = str(i)
return requests
if __name__ == '__main__':
unittest.main()
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
import http.client
from oslo_serialization import jsonutils
from keystone.common.policies import base as bp
from keystone.common import provider_api
import keystone.conf
from keystone.tests.common import auth as common_auth
from keystone.tests import unit
from keystone.tests.unit import base_classes
from keystone.tests.unit import ksfixtures
from keystone.tests.unit.ksfixtures import temporaryfile
CONF = keystone.conf.CONF
PROVIDERS = provider_api.ProviderAPIs
class _SystemUserProjectEndpointTests(object):
"""Common default functionality for all system users."""
def test_user_can_list_projects_for_endpoint(self):
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex,
unit.new_project_ref(domain_id=CONF.identity.default_domain_id)
)
service = PROVIDERS.catalog_api.create_service(
uuid.uuid4().hex, unit.new_service_ref()
)
endpoint = unit.new_endpoint_ref(service['id'], region_id=None)
endpoint = PROVIDERS.catalog_api.create_endpoint(
endpoint['id'], endpoint
)
PROVIDERS.catalog_api.add_endpoint_to_project(
endpoint['id'], project['id'])
with self.test_client() as c:
r = c.get('/v3/OS-EP-FILTER/endpoints/%s/projects'
% endpoint['id'],
headers=self.headers)
for project_itr in r.json['projects']:
self.assertIn(project['id'], project_itr['id'])
def test_user_can_check_endpoint_in_project(self):
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex,
unit.new_project_ref(domain_id=CONF.identity.default_domain_id)
)
service = PROVIDERS.catalog_api.create_service(
uuid.uuid4().hex, unit.new_service_ref()
)
endpoint = unit.new_endpoint_ref(service['id'], region_id=None)
endpoint = PROVIDERS.catalog_api.create_endpoint(
endpoint['id'], endpoint
)
PROVIDERS.catalog_api.add_endpoint_to_project(
endpoint['id'], project['id'])
with self.test_client() as c:
c.get('/v3/OS-EP-FILTER/projects/%s/endpoints/%s'
% (project['id'], endpoint['id']),
headers=self.headers,
expected_status_code=http.client.NO_CONTENT)
def test_user_can_list_endpoints_for_project(self):
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex,
unit.new_project_ref(domain_id=CONF.identity.default_domain_id)
)
service = PROVIDERS.catalog_api.create_service(
uuid.uuid4().hex, unit.new_service_ref()
)
endpoint = unit.new_endpoint_ref(service['id'], region_id=None)
endpoint = PROVIDERS.catalog_api.create_endpoint(
endpoint['id'], endpoint
)
PROVIDERS.catalog_api.add_endpoint_to_project(
endpoint['id'], project['id'])
with self.test_client() as c:
r = c.get('/v3/OS-EP-FILTER/projects/%s/endpoints' % project['id'],
headers=self.headers)
for endpoint_itr in r.json['endpoints']:
self.assertIn(endpoint['id'], endpoint_itr['id'])
class _SystemReaderAndMemberProjectEndpointTests(object):
def test_user_cannot_add_endpoint_to_project(self):
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex,
unit.new_project_ref(domain_id=CONF.identity.default_domain_id)
)
service = PROVIDERS.catalog_api.create_service(
uuid.uuid4().hex, unit.new_service_ref()
)
endpoint = unit.new_endpoint_ref(service['id'], region_id=None)
endpoint = PROVIDERS.catalog_api.create_endpoint(
endpoint['id'], endpoint
)
with self.test_client() as c:
c.put('/v3/OS-EP-FILTER/projects/%s/endpoints/%s'
% (project['id'], endpoint['id']),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN)
def test_user_cannot_remove_endpoint_from_project(self):
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex,
unit.new_project_ref(domain_id=CONF.identity.default_domain_id)
)
service = PROVIDERS.catalog_api.create_service(
uuid.uuid4().hex, unit.new_service_ref()
)
endpoint = unit.new_endpoint_ref(service['id'], region_id=None)
endpoint = PROVIDERS.catalog_api.create_endpoint(
endpoint['id'], endpoint
)
with self.test_client() as c:
c.delete('/v3/OS-EP-FILTER/projects/%s/endpoints/%s'
% (project['id'], endpoint['id']),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN)
class _DomainAndProjectUserProjectEndpointTests(object):
def test_user_cannot_list_projects_for_endpoint(self):
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex,
unit.new_project_ref(domain_id=CONF.identity.default_domain_id)
)
service = PROVIDERS.catalog_api.create_service(
uuid.uuid4().hex, unit.new_service_ref()
)
endpoint = unit.new_endpoint_ref(service['id'], region_id=None)
endpoint = PROVIDERS.catalog_api.create_endpoint(
endpoint['id'], endpoint
)
PROVIDERS.catalog_api.add_endpoint_to_project(
endpoint['id'], project['id'])
with self.test_client() as c:
c.get('/v3/OS-EP-FILTER/endpoints/%s/projects' % endpoint['id'],
headers=self.headers,
expected_status_code=http.client.FORBIDDEN)
def test_user_cannot_check_endpoint_in_project(self):
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex,
unit.new_project_ref(domain_id=CONF.identity.default_domain_id)
)
service = PROVIDERS.catalog_api.create_service(
uuid.uuid4().hex, unit.new_service_ref()
)
endpoint = unit.new_endpoint_ref(service['id'], region_id=None)
endpoint = PROVIDERS.catalog_api.create_endpoint(
endpoint['id'], endpoint
)
PROVIDERS.catalog_api.add_endpoint_to_project(
endpoint['id'], project['id'])
with self.test_client() as c:
c.get('/v3/OS-EP-FILTER/projects/%s/endpoints/%s'
% (project['id'], endpoint['id']),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN)
def test_user_cannot_list_endpoints_for_project(self):
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex,
unit.new_project_ref(domain_id=CONF.identity.default_domain_id)
)
service = PROVIDERS.catalog_api.create_service(
uuid.uuid4().hex, unit.new_service_ref()
)
endpoint = unit.new_endpoint_ref(service['id'], region_id=None)
endpoint = PROVIDERS.catalog_api.create_endpoint(
endpoint['id'], endpoint
)
PROVIDERS.catalog_api.add_endpoint_to_project(
endpoint['id'], project['id'])
with self.test_client() as c:
c.get('/v3/OS-EP-FILTER/projects/%s/endpoints' % project['id'],
headers=self.headers,
expected_status_code=http.client.FORBIDDEN)
class SystemReaderTests(base_classes.TestCaseWithBootstrap,
common_auth.AuthTestMixin,
_SystemUserProjectEndpointTests,
_SystemReaderAndMemberProjectEndpointTests):
def setUp(self):
super(SystemReaderTests, self).setUp()
self.loadapp()
self.useFixture(ksfixtures.Policy(self.config_fixture))
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
system_reader = unit.new_user_ref(
domain_id=CONF.identity.default_domain_id
)
self.user_id = PROVIDERS.identity_api.create_user(
system_reader
)['id']
PROVIDERS.assignment_api.create_system_grant_for_user(
self.user_id, self.bootstrapper.reader_role_id
)
auth = self.build_authentication_request(
user_id=self.user_id, password=system_reader['password'],
system=True
)
# Grab a token using the persona we're testing and prepare headers
# for requests we'll be making in the tests.
with self.test_client() as c:
r = c.post('/v3/auth/tokens', json=auth)
self.token_id = r.headers['X-Subject-Token']
self.headers = {'X-Auth-Token': self.token_id}
class SystemMemberTests(base_classes.TestCaseWithBootstrap,
common_auth.AuthTestMixin,
_SystemUserProjectEndpointTests,
_SystemReaderAndMemberProjectEndpointTests):
def setUp(self):
super(SystemMemberTests, self).setUp()
self.loadapp()
self.useFixture(ksfixtures.Policy(self.config_fixture))
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
system_member = unit.new_user_ref(
domain_id=CONF.identity.default_domain_id
)
self.user_id = PROVIDERS.identity_api.create_user(
system_member
)['id']
PROVIDERS.assignment_api.create_system_grant_for_user(
self.user_id, self.bootstrapper.member_role_id
)
auth = self.build_authentication_request(
user_id=self.user_id, password=system_member['password'],
system=True
)
# Grab a token using the persona we're testing and prepare headers
# for requests we'll be making in the tests.
with self.test_client() as c:
r = c.post('/v3/auth/tokens', json=auth)
self.token_id = r.headers['X-Subject-Token']
self.headers = {'X-Auth-Token': self.token_id}
class SystemAdminTests(base_classes.TestCaseWithBootstrap,
common_auth.AuthTestMixin,
_SystemUserProjectEndpointTests):
def setUp(self):
super(SystemAdminTests, self).setUp()
self.loadapp()
self.useFixture(ksfixtures.Policy(self.config_fixture))
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
# Reuse the system administrator account created during
# ``keystone-manage bootstrap``
self.user_id = self.bootstrapper.admin_user_id
auth = self.build_authentication_request(
user_id=self.user_id,
password=self.bootstrapper.admin_password,
system=True
)
# Grab a token using the persona we're testing and prepare headers
# for requests we'll be making in the tests.
with self.test_client() as c:
r = c.post('/v3/auth/tokens', json=auth)
self.token_id = r.headers['X-Subject-Token']
self.headers = {'X-Auth-Token': self.token_id}
def test_user_can_add_endpoint_to_project(self):
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex,
unit.new_project_ref(domain_id=CONF.identity.default_domain_id)
)
service = PROVIDERS.catalog_api.create_service(
uuid.uuid4().hex, unit.new_service_ref()
)
endpoint = unit.new_endpoint_ref(service['id'], region_id=None)
endpoint = PROVIDERS.catalog_api.create_endpoint(
endpoint['id'], endpoint
)
with self.test_client() as c:
c.put('/v3/OS-EP-FILTER/projects/%s/endpoints/%s'
% (project['id'], endpoint['id']),
headers=self.headers,
expected_status_code=http.client.NO_CONTENT)
def test_user_can_remove_endpoint_from_project(self):
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex,
unit.new_project_ref(domain_id=CONF.identity.default_domain_id)
)
service = PROVIDERS.catalog_api.create_service(
uuid.uuid4().hex, unit.new_service_ref()
)
endpoint = unit.new_endpoint_ref(service['id'], region_id=None)
endpoint = PROVIDERS.catalog_api.create_endpoint(
endpoint['id'], endpoint
)
PROVIDERS.catalog_api.add_endpoint_to_project(
endpoint['id'], project['id'])
with self.test_client() as c:
c.delete('/v3/OS-EP-FILTER/projects/%s/endpoints/%s'
% (project['id'], endpoint['id']),
headers=self.headers,
expected_status_code=http.client.NO_CONTENT)
class DomainUserTests(base_classes.TestCaseWithBootstrap,
common_auth.AuthTestMixin,
_DomainAndProjectUserProjectEndpointTests,
_SystemReaderAndMemberProjectEndpointTests):
def setUp(self):
super(DomainUserTests, self).setUp()
self.loadapp()
self.useFixture(ksfixtures.Policy(self.config_fixture))
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
self.domain_id = domain['id']
domain_admin = unit.new_user_ref(domain_id=self.domain_id)
self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id']
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.admin_role_id, user_id=self.user_id,
domain_id=self.domain_id
)
auth = self.build_authentication_request(
user_id=self.user_id,
password=domain_admin['password'],
domain_id=self.domain_id
)
# Grab a token using the persona we're testing and prepare headers
# for requests we'll be making in the tests.
with self.test_client() as c:
r = c.post('/v3/auth/tokens', json=auth)
self.token_id = r.headers['X-Subject-Token']
self.headers = {'X-Auth-Token': self.token_id}
class ProjectUserTests(base_classes.TestCaseWithBootstrap,
common_auth.AuthTestMixin,
_DomainAndProjectUserProjectEndpointTests,
_SystemReaderAndMemberProjectEndpointTests):
def setUp(self):
super(ProjectUserTests, self).setUp()
self.loadapp()
self.useFixture(ksfixtures.Policy(self.config_fixture))
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
self.user_id = self.bootstrapper.admin_user_id
auth = self.build_authentication_request(
user_id=self.user_id,
password=self.bootstrapper.admin_password,
project_id=self.bootstrapper.project_id
)
# Grab a token using the persona we're testing and prepare headers
# for requests we'll be making in the tests.
with self.test_client() as c:
r = c.post('/v3/auth/tokens', json=auth)
self.token_id = r.headers['X-Subject-Token']
self.headers = {'X-Auth-Token': self.token_id}
class ProjectUserTestsWithoutEnforceScope(
base_classes.TestCaseWithBootstrap,
common_auth.AuthTestMixin,
_DomainAndProjectUserProjectEndpointTests,
_SystemReaderAndMemberProjectEndpointTests):
def _override_policy(self):
# TODO(cmurphy): Remove this once the deprecated policies in
# keystone.common.policies.project_endpoint have been removed. This is
# only here to make sure we test the new policies instead of the
# deprecated ones. Oslo.policy will OR deprecated policies with new
# policies to maintain compatibility and give operators a chance to
# update permissions or update policies without breaking users. This
# will cause these specific tests to fail since we're trying to correct
# this broken behavior with better scope checking.
with open(self.policy_file_name, 'w') as f:
overridden_policies = {
'identity:list_projects_for_endpoint': bp.SYSTEM_READER,
'identity:add_endpoint_to_project': bp.SYSTEM_ADMIN,
'identity:check_endpoint_in_project': bp.SYSTEM_READER,
'identity:list_endpoints_for_project': bp.SYSTEM_READER,
'identity:remove_endpoint_from_project': bp.SYSTEM_ADMIN
}
f.write(jsonutils.dumps(overridden_policies))
def setUp(self):
super(ProjectUserTestsWithoutEnforceScope, self).setUp()
self.loadapp()
self.policy_file = self.useFixture(temporaryfile.SecureTempFile())
self.policy_file_name = self.policy_file.file_name
self.useFixture(
ksfixtures.Policy(
self.config_fixture, policy_file=self.policy_file_name
)
)
self._override_policy()
# Explicity set enforce_scope to False to make sure we maintain
# backwards compatibility with project users.
self.config_fixture.config(group='oslo_policy', enforce_scope=False)
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
user = unit.new_user_ref(domain_id=domain['id'])
self.user_id = PROVIDERS.identity_api.create_user(user)['id']
self.project_id = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(domain_id=domain['id'])
)['id']
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.admin_role_id, user_id=self.user_id,
project_id=self.project_id
)
auth = self.build_authentication_request(
user_id=self.user_id,
password=user['password'],
project_id=self.project_id
)
# Grab a token using the persona we're testing and prepare headers
# for requests we'll be making in the tests.
with self.test_client() as c:
r = c.post('/v3/auth/tokens', json=auth)
self.token_id = r.headers['X-Subject-Token']
self.headers = {'X-Auth-Token': self.token_id}
|
|
# Copyright 2012 VMware, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import contextlib
import copy
import mock
import netaddr
from oslo.config import cfg
from webob import exc
from neutron.api.rpc.agentnotifiers import l3_rpc_agent_api
from neutron.api.rpc.handlers import l3_rpc
from neutron.api.v2 import attributes
from neutron.common import constants as l3_constants
from neutron.common import exceptions as n_exc
from neutron import context
from neutron.db import common_db_mixin
from neutron.db import db_base_plugin_v2
from neutron.db import external_net_db
from neutron.db import l3_agentschedulers_db
from neutron.db import l3_attrs_db
from neutron.db import l3_db
from neutron.db import l3_dvr_db
from neutron.extensions import external_net
from neutron.extensions import l3
from neutron.extensions import portbindings
from neutron import manager
from neutron.openstack.common import importutils
from neutron.openstack.common import log as logging
from neutron.openstack.common import uuidutils
from neutron.plugins.common import constants as service_constants
from neutron.tests import base
from neutron.tests import fake_notifier
from neutron.tests.unit import test_agent_ext_plugin
from neutron.tests.unit import test_api_v2
from neutron.tests.unit import test_api_v2_extension
from neutron.tests.unit import test_db_plugin
from neutron.tests.unit import testlib_plugin
LOG = logging.getLogger(__name__)
_uuid = uuidutils.generate_uuid
_get_path = test_api_v2._get_path
class L3TestExtensionManager(object):
def get_resources(self):
# Add the resources to the global attribute map
# This is done here as the setup process won't
# initialize the main API router which extends
# the global attribute map
attributes.RESOURCE_ATTRIBUTE_MAP.update(
l3.RESOURCE_ATTRIBUTE_MAP)
return l3.L3.get_resources()
def get_actions(self):
return []
def get_request_extensions(self):
return []
class L3NatExtensionTestCase(test_api_v2_extension.ExtensionTestCase):
fmt = 'json'
def setUp(self):
super(L3NatExtensionTestCase, self).setUp()
self._setUpExtension(
'neutron.extensions.l3.RouterPluginBase', None,
l3.RESOURCE_ATTRIBUTE_MAP, l3.L3, '',
allow_pagination=True, allow_sorting=True,
supported_extension_aliases=['router'],
use_quota=True)
def test_router_create(self):
router_id = _uuid()
data = {'router': {'name': 'router1', 'admin_state_up': True,
'tenant_id': _uuid(),
'external_gateway_info': None}}
return_value = copy.deepcopy(data['router'])
return_value.update({'status': "ACTIVE", 'id': router_id})
instance = self.plugin.return_value
instance.create_router.return_value = return_value
instance.get_routers_count.return_value = 0
res = self.api.post(_get_path('routers', fmt=self.fmt),
self.serialize(data),
content_type='application/%s' % self.fmt)
instance.create_router.assert_called_with(mock.ANY,
router=data)
self.assertEqual(res.status_int, exc.HTTPCreated.code)
res = self.deserialize(res)
self.assertIn('router', res)
router = res['router']
self.assertEqual(router['id'], router_id)
self.assertEqual(router['status'], "ACTIVE")
self.assertEqual(router['admin_state_up'], True)
def test_router_list(self):
router_id = _uuid()
return_value = [{'name': 'router1', 'admin_state_up': True,
'tenant_id': _uuid(), 'id': router_id}]
instance = self.plugin.return_value
instance.get_routers.return_value = return_value
res = self.api.get(_get_path('routers', fmt=self.fmt))
instance.get_routers.assert_called_with(mock.ANY, fields=mock.ANY,
filters=mock.ANY,
sorts=mock.ANY,
limit=mock.ANY,
marker=mock.ANY,
page_reverse=mock.ANY)
self.assertEqual(res.status_int, exc.HTTPOk.code)
res = self.deserialize(res)
self.assertIn('routers', res)
self.assertEqual(1, len(res['routers']))
self.assertEqual(router_id, res['routers'][0]['id'])
def test_router_update(self):
router_id = _uuid()
update_data = {'router': {'admin_state_up': False}}
return_value = {'name': 'router1', 'admin_state_up': False,
'tenant_id': _uuid(),
'status': "ACTIVE", 'id': router_id}
instance = self.plugin.return_value
instance.update_router.return_value = return_value
res = self.api.put(_get_path('routers', id=router_id,
fmt=self.fmt),
self.serialize(update_data))
instance.update_router.assert_called_with(mock.ANY, router_id,
router=update_data)
self.assertEqual(res.status_int, exc.HTTPOk.code)
res = self.deserialize(res)
self.assertIn('router', res)
router = res['router']
self.assertEqual(router['id'], router_id)
self.assertEqual(router['status'], "ACTIVE")
self.assertEqual(router['admin_state_up'], False)
def test_router_get(self):
router_id = _uuid()
return_value = {'name': 'router1', 'admin_state_up': False,
'tenant_id': _uuid(),
'status': "ACTIVE", 'id': router_id}
instance = self.plugin.return_value
instance.get_router.return_value = return_value
res = self.api.get(_get_path('routers', id=router_id,
fmt=self.fmt))
instance.get_router.assert_called_with(mock.ANY, router_id,
fields=mock.ANY)
self.assertEqual(res.status_int, exc.HTTPOk.code)
res = self.deserialize(res)
self.assertIn('router', res)
router = res['router']
self.assertEqual(router['id'], router_id)
self.assertEqual(router['status'], "ACTIVE")
self.assertEqual(router['admin_state_up'], False)
def test_router_delete(self):
router_id = _uuid()
res = self.api.delete(_get_path('routers', id=router_id))
instance = self.plugin.return_value
instance.delete_router.assert_called_with(mock.ANY, router_id)
self.assertEqual(res.status_int, exc.HTTPNoContent.code)
def test_router_add_interface(self):
router_id = _uuid()
subnet_id = _uuid()
port_id = _uuid()
interface_data = {'subnet_id': subnet_id}
return_value = copy.deepcopy(interface_data)
return_value['port_id'] = port_id
instance = self.plugin.return_value
instance.add_router_interface.return_value = return_value
path = _get_path('routers', id=router_id,
action="add_router_interface",
fmt=self.fmt)
res = self.api.put(path, self.serialize(interface_data))
instance.add_router_interface.assert_called_with(mock.ANY, router_id,
interface_data)
self.assertEqual(res.status_int, exc.HTTPOk.code)
res = self.deserialize(res)
self.assertIn('port_id', res)
self.assertEqual(res['port_id'], port_id)
self.assertEqual(res['subnet_id'], subnet_id)
class L3NatExtensionTestCaseXML(L3NatExtensionTestCase):
fmt = 'xml'
# This base plugin class is for tests.
class TestL3NatBasePlugin(db_base_plugin_v2.NeutronDbPluginV2,
external_net_db.External_net_db_mixin):
__native_pagination_support = True
__native_sorting_support = True
def create_network(self, context, network):
session = context.session
with session.begin(subtransactions=True):
net = super(TestL3NatBasePlugin, self).create_network(context,
network)
self._process_l3_create(context, net, network['network'])
return net
def update_network(self, context, id, network):
session = context.session
with session.begin(subtransactions=True):
net = super(TestL3NatBasePlugin, self).update_network(context, id,
network)
self._process_l3_update(context, net, network['network'])
return net
def delete_network(self, context, id):
with context.session.begin(subtransactions=True):
self._process_l3_delete(context, id)
super(TestL3NatBasePlugin, self).delete_network(context, id)
def delete_port(self, context, id, l3_port_check=True):
plugin = manager.NeutronManager.get_service_plugins().get(
service_constants.L3_ROUTER_NAT)
if plugin:
if l3_port_check:
plugin.prevent_l3_port_deletion(context, id)
plugin.disassociate_floatingips(context, id)
return super(TestL3NatBasePlugin, self).delete_port(context, id)
# This plugin class is for tests with plugin that integrates L3.
class TestL3NatIntPlugin(TestL3NatBasePlugin,
l3_db.L3_NAT_db_mixin):
supported_extension_aliases = ["external-net", "router"]
# This plugin class is for tests with plugin that integrates L3 and L3 agent
# scheduling.
class TestL3NatIntAgentSchedulingPlugin(TestL3NatIntPlugin,
l3_agentschedulers_db.
L3AgentSchedulerDbMixin):
supported_extension_aliases = ["external-net", "router",
"l3_agent_scheduler"]
router_scheduler = importutils.import_object(
cfg.CONF.router_scheduler_driver)
# This plugin class is for tests with plugin not supporting L3.
class TestNoL3NatPlugin(TestL3NatBasePlugin):
__native_pagination_support = True
__native_sorting_support = True
supported_extension_aliases = ["external-net"]
# A L3 routing service plugin class for tests with plugins that
# delegate away L3 routing functionality
class TestL3NatServicePlugin(common_db_mixin.CommonDbMixin,
l3_dvr_db.L3_NAT_with_dvr_db_mixin,
l3_db.L3_NAT_db_mixin):
supported_extension_aliases = ["router"]
def get_plugin_type(self):
return service_constants.L3_ROUTER_NAT
def get_plugin_description(self):
return "L3 Routing Service Plugin for testing"
# A L3 routing with L3 agent scheduling service plugin class for tests with
# plugins that delegate away L3 routing functionality
class TestL3NatAgentSchedulingServicePlugin(TestL3NatServicePlugin,
l3_agentschedulers_db.
L3AgentSchedulerDbMixin):
supported_extension_aliases = ["router", "l3_agent_scheduler"]
def __init__(self):
super(TestL3NatAgentSchedulingServicePlugin, self).__init__()
self.router_scheduler = importutils.import_object(
cfg.CONF.router_scheduler_driver)
self.agent_notifiers.update(
{l3_constants.AGENT_TYPE_L3: l3_rpc_agent_api.L3AgentNotifyAPI()})
class L3NATdbonlyMixinTestCase(base.BaseTestCase):
def setUp(self):
super(L3NATdbonlyMixinTestCase, self).setUp()
self.mixin = l3_db.L3_NAT_dbonly_mixin()
def test_build_routers_list_with_gw_port_mismatch(self):
routers = [{'gw_port_id': 'foo_gw_port_id', 'id': 'foo_router_id'}]
gw_ports = {}
routers = self.mixin._build_routers_list(mock.ANY, routers, gw_ports)
self.assertIsNone(routers[0].get('gw_port'))
class L3NatTestCaseMixin(object):
def _create_router(self, fmt, tenant_id, name=None,
admin_state_up=None, set_context=False,
arg_list=None, **kwargs):
data = {'router': {'tenant_id': tenant_id}}
if name:
data['router']['name'] = name
if admin_state_up:
data['router']['admin_state_up'] = admin_state_up
for arg in (('admin_state_up', 'tenant_id') + (arg_list or ())):
# Arg must be present and not empty
if arg in kwargs and kwargs[arg]:
data['router'][arg] = kwargs[arg]
router_req = self.new_create_request('routers', data, fmt)
if set_context and tenant_id:
# create a specific auth context for this request
router_req.environ['neutron.context'] = context.Context(
'', tenant_id)
return router_req.get_response(self.ext_api)
def _make_router(self, fmt, tenant_id, name=None, admin_state_up=None,
external_gateway_info=None, set_context=False,
arg_list=None, **kwargs):
if external_gateway_info:
arg_list = ('external_gateway_info', ) + (arg_list or ())
res = self._create_router(fmt, tenant_id, name,
admin_state_up, set_context,
arg_list=arg_list,
external_gateway_info=external_gateway_info,
**kwargs)
return self.deserialize(fmt, res)
def _add_external_gateway_to_router(self, router_id, network_id,
expected_code=exc.HTTPOk.code,
neutron_context=None):
return self._update('routers', router_id,
{'router': {'external_gateway_info':
{'network_id': network_id}}},
expected_code=expected_code,
neutron_context=neutron_context)
def _remove_external_gateway_from_router(self, router_id, network_id,
expected_code=exc.HTTPOk.code,
external_gw_info=None):
return self._update('routers', router_id,
{'router': {'external_gateway_info':
external_gw_info}},
expected_code=expected_code)
def _router_interface_action(self, action, router_id, subnet_id, port_id,
expected_code=exc.HTTPOk.code,
expected_body=None,
tenant_id=None):
interface_data = {}
if subnet_id:
interface_data.update({'subnet_id': subnet_id})
if port_id and (action != 'add' or not subnet_id):
interface_data.update({'port_id': port_id})
req = self.new_action_request('routers', interface_data, router_id,
"%s_router_interface" % action)
# if tenant_id was specified, create a tenant context for this request
if tenant_id:
req.environ['neutron.context'] = context.Context(
'', tenant_id)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, expected_code)
response = self.deserialize(self.fmt, res)
if expected_body:
self.assertEqual(response, expected_body)
return response
@contextlib.contextmanager
def router(self, name='router1', admin_state_up=True,
fmt=None, tenant_id=_uuid(),
external_gateway_info=None, set_context=False,
**kwargs):
router = self._make_router(fmt or self.fmt, tenant_id, name,
admin_state_up, external_gateway_info,
set_context, **kwargs)
yield router
self._delete('routers', router['router']['id'])
def _set_net_external(self, net_id):
self._update('networks', net_id,
{'network': {external_net.EXTERNAL: True}})
def _create_floatingip(self, fmt, network_id, port_id=None,
fixed_ip=None, set_context=False):
data = {'floatingip': {'floating_network_id': network_id,
'tenant_id': self._tenant_id}}
if port_id:
data['floatingip']['port_id'] = port_id
if fixed_ip:
data['floatingip']['fixed_ip_address'] = fixed_ip
floatingip_req = self.new_create_request('floatingips', data, fmt)
if set_context and self._tenant_id:
# create a specific auth context for this request
floatingip_req.environ['neutron.context'] = context.Context(
'', self._tenant_id)
return floatingip_req.get_response(self.ext_api)
def _make_floatingip(self, fmt, network_id, port_id=None,
fixed_ip=None, set_context=False):
res = self._create_floatingip(fmt, network_id, port_id,
fixed_ip, set_context)
self.assertEqual(res.status_int, exc.HTTPCreated.code)
return self.deserialize(fmt, res)
def _validate_floating_ip(self, fip):
body = self._list('floatingips')
self.assertEqual(len(body['floatingips']), 1)
self.assertEqual(body['floatingips'][0]['id'],
fip['floatingip']['id'])
body = self._show('floatingips', fip['floatingip']['id'])
self.assertEqual(body['floatingip']['id'],
fip['floatingip']['id'])
@contextlib.contextmanager
def floatingip_with_assoc(self, port_id=None, fmt=None, fixed_ip=None,
set_context=False):
with self.subnet(cidr='11.0.0.0/24') as public_sub:
self._set_net_external(public_sub['subnet']['network_id'])
private_port = None
if port_id:
private_port = self._show('ports', port_id)
with test_db_plugin.optional_ctx(private_port,
self.port) as private_port:
with self.router() as r:
sid = private_port['port']['fixed_ips'][0]['subnet_id']
private_sub = {'subnet': {'id': sid}}
floatingip = None
self._add_external_gateway_to_router(
r['router']['id'],
public_sub['subnet']['network_id'])
self._router_interface_action(
'add', r['router']['id'],
private_sub['subnet']['id'], None)
floatingip = self._make_floatingip(
fmt or self.fmt,
public_sub['subnet']['network_id'],
port_id=private_port['port']['id'],
fixed_ip=fixed_ip,
set_context=set_context)
yield floatingip
if floatingip:
self._delete('floatingips',
floatingip['floatingip']['id'])
self._router_interface_action(
'remove', r['router']['id'],
private_sub['subnet']['id'], None)
self._remove_external_gateway_from_router(
r['router']['id'],
public_sub['subnet']['network_id'])
@contextlib.contextmanager
def floatingip_no_assoc_with_public_sub(
self, private_sub, fmt=None, set_context=False, public_sub=None):
self._set_net_external(public_sub['subnet']['network_id'])
with self.router() as r:
floatingip = None
self._add_external_gateway_to_router(
r['router']['id'],
public_sub['subnet']['network_id'])
self._router_interface_action('add', r['router']['id'],
private_sub['subnet']['id'],
None)
floatingip = self._make_floatingip(
fmt or self.fmt,
public_sub['subnet']['network_id'],
set_context=set_context)
yield floatingip, r
if floatingip:
self._delete('floatingips',
floatingip['floatingip']['id'])
self._router_interface_action('remove', r['router']['id'],
private_sub['subnet']['id'],
None)
self._remove_external_gateway_from_router(
r['router']['id'],
public_sub['subnet']['network_id'])
@contextlib.contextmanager
def floatingip_no_assoc(self, private_sub, fmt=None, set_context=False):
with self.subnet(cidr='12.0.0.0/24') as public_sub:
with self.floatingip_no_assoc_with_public_sub(
private_sub, fmt, set_context, public_sub) as (f, r):
# Yield only the floating ip object
yield f
class ExtraAttributesMixinTestCase(base.BaseTestCase):
def setUp(self):
super(ExtraAttributesMixinTestCase, self).setUp()
self.mixin = l3_attrs_db.ExtraAttributesMixin()
def _test__extend_extra_router_dict(
self, extra_attributes, attributes, expected_attributes):
self.mixin._extend_extra_router_dict(
attributes, {'extra_attributes': extra_attributes})
self.assertEqual(expected_attributes, attributes)
def test__extend_extra_router_dict_string_default(self):
self.mixin.extra_attributes = [{
'name': "foo_key",
'default': 'foo_default'
}]
extension_attributes = {'foo_key': 'my_fancy_value'}
self._test__extend_extra_router_dict(
extension_attributes, {}, extension_attributes)
def test__extend_extra_router_dict_booleans_false_default(self):
self.mixin.extra_attributes = [{
'name': "foo_key",
'default': False
}]
extension_attributes = {'foo_key': True}
self._test__extend_extra_router_dict(
extension_attributes, {}, extension_attributes)
def test__extend_extra_router_dict_booleans_true_default(self):
self.mixin.extra_attributes = [{
'name': "foo_key",
'default': True
}]
# Test that the default is overridden
extension_attributes = {'foo_key': False}
self._test__extend_extra_router_dict(
extension_attributes, {}, extension_attributes)
def test__extend_extra_router_dict_no_extension_attributes(self):
self.mixin.extra_attributes = [{
'name': "foo_key",
'default': 'foo_value'
}]
self._test__extend_extra_router_dict({}, {}, {'foo_key': 'foo_value'})
def test__extend_extra_router_dict_none_extension_attributes(self):
self._test__extend_extra_router_dict(None, {}, {})
class L3NatTestCaseBase(L3NatTestCaseMixin):
def test_router_create(self):
name = 'router1'
tenant_id = _uuid()
expected_value = [('name', name), ('tenant_id', tenant_id),
('admin_state_up', True), ('status', 'ACTIVE'),
('external_gateway_info', None)]
with self.router(name='router1', admin_state_up=True,
tenant_id=tenant_id) as router:
for k, v in expected_value:
self.assertEqual(router['router'][k], v)
def test_router_create_call_extensions(self):
self.extension_called = False
def _extend_router_dict_test_attr(*args, **kwargs):
self.extension_called = True
db_base_plugin_v2.NeutronDbPluginV2.register_dict_extend_funcs(
l3.ROUTERS, [_extend_router_dict_test_attr])
self.assertFalse(self.extension_called)
with self.router():
self.assertTrue(self.extension_called)
def test_router_create_with_gwinfo(self):
with self.subnet() as s:
self._set_net_external(s['subnet']['network_id'])
data = {'router': {'tenant_id': _uuid()}}
data['router']['name'] = 'router1'
data['router']['external_gateway_info'] = {
'network_id': s['subnet']['network_id']}
router_req = self.new_create_request('routers', data, self.fmt)
res = router_req.get_response(self.ext_api)
router = self.deserialize(self.fmt, res)
self.assertEqual(
s['subnet']['network_id'],
router['router']['external_gateway_info']['network_id'])
self._delete('routers', router['router']['id'])
def test_router_list(self):
with contextlib.nested(self.router(),
self.router(),
self.router()
) as routers:
self._test_list_resources('router', routers)
def test_router_list_with_parameters(self):
with contextlib.nested(self.router(name='router1'),
self.router(name='router2'),
) as (router1, router2):
query_params = 'name=router1'
self._test_list_resources('router', [router1],
query_params=query_params)
query_params = 'name=router2'
self._test_list_resources('router', [router2],
query_params=query_params)
query_params = 'name=router3'
self._test_list_resources('router', [],
query_params=query_params)
def test_router_list_with_sort(self):
with contextlib.nested(self.router(name='router1'),
self.router(name='router2'),
self.router(name='router3')
) as (router1, router2, router3):
self._test_list_with_sort('router', (router3, router2, router1),
[('name', 'desc')])
def test_router_list_with_pagination(self):
with contextlib.nested(self.router(name='router1'),
self.router(name='router2'),
self.router(name='router3')
) as (router1, router2, router3):
self._test_list_with_pagination('router',
(router1, router2, router3),
('name', 'asc'), 2, 2)
def test_router_list_with_pagination_reverse(self):
with contextlib.nested(self.router(name='router1'),
self.router(name='router2'),
self.router(name='router3')
) as (router1, router2, router3):
self._test_list_with_pagination_reverse('router',
(router1, router2,
router3),
('name', 'asc'), 2, 2)
def test_router_update(self):
rname1 = "yourrouter"
rname2 = "nachorouter"
with self.router(name=rname1) as r:
body = self._show('routers', r['router']['id'])
self.assertEqual(body['router']['name'], rname1)
body = self._update('routers', r['router']['id'],
{'router': {'name': rname2}})
body = self._show('routers', r['router']['id'])
self.assertEqual(body['router']['name'], rname2)
def test_router_update_gateway(self):
with self.router() as r:
with self.subnet() as s1:
with self.subnet() as s2:
self._set_net_external(s1['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s1['subnet']['network_id'])
body = self._show('routers', r['router']['id'])
net_id = (body['router']
['external_gateway_info']['network_id'])
self.assertEqual(net_id, s1['subnet']['network_id'])
self._set_net_external(s2['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s2['subnet']['network_id'])
body = self._show('routers', r['router']['id'])
net_id = (body['router']
['external_gateway_info']['network_id'])
self.assertEqual(net_id, s2['subnet']['network_id'])
# Validate that we can clear the gateway with
# an empty dict, in any other case, we fall back
# on None as default value
self._remove_external_gateway_from_router(
r['router']['id'],
s2['subnet']['network_id'],
external_gw_info={})
def test_router_update_gateway_with_existed_floatingip(self):
with self.subnet() as subnet:
self._set_net_external(subnet['subnet']['network_id'])
with self.floatingip_with_assoc() as fip:
self._add_external_gateway_to_router(
fip['floatingip']['router_id'],
subnet['subnet']['network_id'],
expected_code=exc.HTTPConflict.code)
def test_router_update_gateway_to_empty_with_existed_floatingip(self):
with self.floatingip_with_assoc() as fip:
self._remove_external_gateway_from_router(
fip['floatingip']['router_id'], None,
expected_code=exc.HTTPConflict.code)
def test_router_add_interface_subnet(self):
exp_notifications = ['router.create.start',
'router.create.end',
'network.create.start',
'network.create.end',
'subnet.create.start',
'subnet.create.end',
'router.interface.create',
'router.interface.delete']
fake_notifier.reset()
with self.router() as r:
with self.subnet() as s:
body = self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None)
self.assertIn('port_id', body)
# fetch port and confirm device_id
r_port_id = body['port_id']
body = self._show('ports', r_port_id)
self.assertEqual(body['port']['device_id'], r['router']['id'])
body = self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
None)
body = self._show('ports', r_port_id,
expected_code=exc.HTTPNotFound.code)
self.assertEqual(
set(exp_notifications),
set(n['event_type'] for n in fake_notifier.NOTIFICATIONS))
for n in fake_notifier.NOTIFICATIONS:
if n['event_type'].startswith('router.interface.'):
payload = n['payload']['router_interface']
self.assertIn('id', payload)
self.assertEqual(payload['id'], r['router']['id'])
self.assertIn('tenant_id', payload)
stid = s['subnet']['tenant_id']
# tolerate subnet tenant deliberately to '' in the
# nsx metadata access case
self.assertIn(payload['tenant_id'], [stid, ''])
def test_router_add_interface_subnet_with_bad_tenant_returns_404(self):
with mock.patch('neutron.context.Context.to_dict') as tdict:
tenant_id = _uuid()
admin_context = {'roles': ['admin']}
tenant_context = {'tenant_id': 'bad_tenant',
'roles': []}
tdict.return_value = admin_context
with self.router(tenant_id=tenant_id) as r:
with self.network(tenant_id=tenant_id) as n:
with self.subnet(network=n) as s:
tdict.return_value = tenant_context
err_code = exc.HTTPNotFound.code
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None,
err_code)
tdict.return_value = admin_context
body = self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None)
self.assertIn('port_id', body)
tdict.return_value = tenant_context
self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
None,
err_code)
tdict.return_value = admin_context
body = self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
None)
def test_router_add_interface_subnet_with_port_from_other_tenant(self):
tenant_id = _uuid()
other_tenant_id = _uuid()
with contextlib.nested(
self.router(tenant_id=tenant_id),
self.network(tenant_id=tenant_id),
self.network(tenant_id=other_tenant_id)) as (r, n1, n2):
with contextlib.nested(
self.subnet(network=n1, cidr='10.0.0.0/24'),
self.subnet(network=n2, cidr='10.1.0.0/24')) as (s1, s2):
body = self._router_interface_action(
'add',
r['router']['id'],
s2['subnet']['id'],
None)
self.assertIn('port_id', body)
self._router_interface_action(
'add',
r['router']['id'],
s1['subnet']['id'],
None,
tenant_id=tenant_id)
self.assertIn('port_id', body)
self._router_interface_action(
'remove',
r['router']['id'],
s1['subnet']['id'],
None,
tenant_id=tenant_id)
body = self._router_interface_action(
'remove',
r['router']['id'],
s2['subnet']['id'],
None)
def test_router_add_interface_port(self):
with self.router() as r:
with self.port() as p:
body = self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
self.assertIn('port_id', body)
self.assertEqual(body['port_id'], p['port']['id'])
# fetch port and confirm device_id
body = self._show('ports', p['port']['id'])
self.assertEqual(body['port']['device_id'], r['router']['id'])
# clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
def test_router_add_interface_empty_port_and_subnet_ids(self):
with self.router() as r:
self._router_interface_action('add', r['router']['id'],
None, None,
expected_code=exc.
HTTPBadRequest.code)
def test_router_add_interface_port_bad_tenant_returns_404(self):
with mock.patch('neutron.context.Context.to_dict') as tdict:
admin_context = {'roles': ['admin']}
tenant_context = {'tenant_id': 'bad_tenant',
'roles': []}
tdict.return_value = admin_context
with self.router() as r:
with self.port() as p:
tdict.return_value = tenant_context
err_code = exc.HTTPNotFound.code
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'],
err_code)
tdict.return_value = admin_context
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
tdict.return_value = tenant_context
# clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'],
err_code)
tdict.return_value = admin_context
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
def test_router_add_interface_dup_subnet1_returns_400(self):
with self.router() as r:
with self.subnet() as s:
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None)
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None,
expected_code=exc.
HTTPBadRequest.code)
self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
None)
def test_router_add_interface_dup_subnet2_returns_400(self):
with self.router() as r:
with self.subnet() as s:
with self.port(subnet=s) as p1:
with self.port(subnet=s) as p2:
self._router_interface_action('add',
r['router']['id'],
None,
p1['port']['id'])
self._router_interface_action('add',
r['router']['id'],
None,
p2['port']['id'],
expected_code=exc.
HTTPBadRequest.code)
# clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p1['port']['id'])
def test_router_add_interface_overlapped_cidr_returns_400(self):
with self.router() as r:
with self.subnet(cidr='10.0.1.0/24') as s1:
self._router_interface_action('add',
r['router']['id'],
s1['subnet']['id'],
None)
def try_overlapped_cidr(cidr):
with self.subnet(cidr=cidr) as s2:
self._router_interface_action('add',
r['router']['id'],
s2['subnet']['id'],
None,
expected_code=exc.
HTTPBadRequest.code)
# another subnet with same cidr
try_overlapped_cidr('10.0.1.0/24')
# another subnet with overlapped cidr including s1
try_overlapped_cidr('10.0.0.0/16')
# clean-up
self._router_interface_action('remove',
r['router']['id'],
s1['subnet']['id'],
None)
def test_router_add_interface_no_data_returns_400(self):
with self.router() as r:
self._router_interface_action('add',
r['router']['id'],
None,
None,
expected_code=exc.
HTTPBadRequest.code)
def test_router_add_gateway_dup_subnet1_returns_400(self):
with self.router() as r:
with self.subnet() as s:
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None)
self._set_net_external(s['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'],
expected_code=exc.HTTPBadRequest.code)
self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
None)
def test_router_add_gateway_dup_subnet2_returns_400(self):
with self.router() as r:
with self.subnet() as s:
self._set_net_external(s['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'])
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None,
expected_code=exc.
HTTPBadRequest.code)
self._remove_external_gateway_from_router(
r['router']['id'],
s['subnet']['network_id'])
def test_router_add_gateway(self):
with self.router() as r:
with self.subnet() as s:
self._set_net_external(s['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'])
body = self._show('routers', r['router']['id'])
net_id = body['router']['external_gateway_info']['network_id']
self.assertEqual(net_id, s['subnet']['network_id'])
self._remove_external_gateway_from_router(
r['router']['id'],
s['subnet']['network_id'])
body = self._show('routers', r['router']['id'])
gw_info = body['router']['external_gateway_info']
self.assertIsNone(gw_info)
def test_router_add_gateway_tenant_ctx(self):
with self.router(tenant_id='noadmin',
set_context=True) as r:
with self.subnet() as s:
self._set_net_external(s['subnet']['network_id'])
ctx = context.Context('', 'noadmin')
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'],
neutron_context=ctx)
body = self._show('routers', r['router']['id'])
net_id = body['router']['external_gateway_info']['network_id']
self.assertEqual(net_id, s['subnet']['network_id'])
self._remove_external_gateway_from_router(
r['router']['id'],
s['subnet']['network_id'])
body = self._show('routers', r['router']['id'])
gw_info = body['router']['external_gateway_info']
self.assertIsNone(gw_info)
def test_create_router_port_with_device_id_of_other_teants_router(self):
with self.router() as admin_router:
with self.network(tenant_id='tenant_a',
set_context=True) as n:
with self.subnet(network=n):
self._create_port(
self.fmt, n['network']['id'],
tenant_id='tenant_a',
device_id=admin_router['router']['id'],
device_owner='network:router_interface',
set_context=True,
expected_res_status=exc.HTTPConflict.code)
def test_create_non_router_port_device_id_of_other_teants_router_update(
self):
# This tests that HTTPConflict is raised if we create a non-router
# port that matches the device_id of another tenants router and then
# we change the device_owner to be network:router_interface.
with self.router() as admin_router:
with self.network(tenant_id='tenant_a',
set_context=True) as n:
with self.subnet(network=n):
port_res = self._create_port(
self.fmt, n['network']['id'],
tenant_id='tenant_a',
device_id=admin_router['router']['id'],
set_context=True)
port = self.deserialize(self.fmt, port_res)
neutron_context = context.Context('', 'tenant_a')
data = {'port': {'device_owner':
'network:router_interface'}}
self._update('ports', port['port']['id'], data,
neutron_context=neutron_context,
expected_code=exc.HTTPConflict.code)
self._delete('ports', port['port']['id'])
def test_update_port_device_id_to_different_tenants_router(self):
with self.router() as admin_router:
with self.router(tenant_id='tenant_a',
set_context=True) as tenant_router:
with self.network(tenant_id='tenant_a',
set_context=True) as n:
with self.subnet(network=n) as s:
port = self._router_interface_action(
'add', tenant_router['router']['id'],
s['subnet']['id'], None, tenant_id='tenant_a')
neutron_context = context.Context('', 'tenant_a')
data = {'port':
{'device_id': admin_router['router']['id']}}
self._update('ports', port['port_id'], data,
neutron_context=neutron_context,
expected_code=exc.HTTPConflict.code)
self._router_interface_action(
'remove', tenant_router['router']['id'],
s['subnet']['id'], None, tenant_id='tenant_a')
def test_router_add_gateway_invalid_network_returns_400(self):
with self.router() as r:
self._add_external_gateway_to_router(
r['router']['id'],
"foobar", expected_code=exc.HTTPBadRequest.code)
def test_router_add_gateway_non_existent_network_returns_404(self):
with self.router() as r:
self._add_external_gateway_to_router(
r['router']['id'],
_uuid(), expected_code=exc.HTTPNotFound.code)
def test_router_add_gateway_net_not_external_returns_400(self):
with self.router() as r:
with self.subnet() as s:
# intentionally do not set net as external
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'],
expected_code=exc.HTTPBadRequest.code)
def test_router_add_gateway_no_subnet_returns_400(self):
with self.router() as r:
with self.network() as n:
self._set_net_external(n['network']['id'])
self._add_external_gateway_to_router(
r['router']['id'],
n['network']['id'], expected_code=exc.HTTPBadRequest.code)
def test_router_remove_interface_inuse_returns_409(self):
with self.router() as r:
with self.subnet() as s:
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None)
self._delete('routers', r['router']['id'],
expected_code=exc.HTTPConflict.code)
# remove interface so test can exit without errors
self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
None)
def test_router_remove_interface_wrong_subnet_returns_400(self):
with self.router() as r:
with self.subnet() as s:
with self.port() as p:
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
p['port']['id'],
exc.HTTPBadRequest.code)
#remove properly to clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
def test_router_remove_interface_returns_200(self):
with self.router() as r:
with self.port() as p:
body = self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'],
expected_body=body)
def test_router_remove_interface_wrong_port_returns_404(self):
with self.router() as r:
with self.subnet():
with self.port() as p:
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
# create another port for testing failure case
res = self._create_port(self.fmt, p['port']['network_id'])
p2 = self.deserialize(self.fmt, res)
self._router_interface_action('remove',
r['router']['id'],
None,
p2['port']['id'],
exc.HTTPNotFound.code)
# remove correct interface to cleanup
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
# remove extra port created
self._delete('ports', p2['port']['id'])
def test_router_delete(self):
with self.router() as router:
router_id = router['router']['id']
req = self.new_show_request('router', router_id)
res = req.get_response(self._api_for_resource('router'))
self.assertEqual(res.status_int, 404)
def test_router_delete_with_port_existed_returns_409(self):
with self.subnet() as subnet:
res = self._create_router(self.fmt, _uuid())
router = self.deserialize(self.fmt, res)
self._router_interface_action('add',
router['router']['id'],
subnet['subnet']['id'],
None)
self._delete('routers', router['router']['id'],
exc.HTTPConflict.code)
self._router_interface_action('remove',
router['router']['id'],
subnet['subnet']['id'],
None)
self._delete('routers', router['router']['id'])
def test_router_delete_with_floatingip_existed_returns_409(self):
with self.port() as p:
private_sub = {'subnet': {'id':
p['port']['fixed_ips'][0]['subnet_id']}}
with self.subnet(cidr='12.0.0.0/24') as public_sub:
self._set_net_external(public_sub['subnet']['network_id'])
res = self._create_router(self.fmt, _uuid())
r = self.deserialize(self.fmt, res)
self._add_external_gateway_to_router(
r['router']['id'],
public_sub['subnet']['network_id'])
self._router_interface_action('add', r['router']['id'],
private_sub['subnet']['id'],
None)
res = self._create_floatingip(
self.fmt, public_sub['subnet']['network_id'],
port_id=p['port']['id'])
self.assertEqual(res.status_int, exc.HTTPCreated.code)
floatingip = self.deserialize(self.fmt, res)
self._delete('routers', r['router']['id'],
expected_code=exc.HTTPConflict.code)
# Cleanup
self._delete('floatingips', floatingip['floatingip']['id'])
self._router_interface_action('remove', r['router']['id'],
private_sub['subnet']['id'],
None)
self._delete('routers', r['router']['id'])
def test_router_show(self):
name = 'router1'
tenant_id = _uuid()
expected_value = [('name', name), ('tenant_id', tenant_id),
('admin_state_up', True), ('status', 'ACTIVE'),
('external_gateway_info', None)]
with self.router(name='router1', admin_state_up=True,
tenant_id=tenant_id) as router:
res = self._show('routers', router['router']['id'])
for k, v in expected_value:
self.assertEqual(res['router'][k], v)
def test_network_update_external_failure(self):
with self.router() as r:
with self.subnet() as s1:
self._set_net_external(s1['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s1['subnet']['network_id'])
self._update('networks', s1['subnet']['network_id'],
{'network': {external_net.EXTERNAL: False}},
expected_code=exc.HTTPConflict.code)
self._remove_external_gateway_from_router(
r['router']['id'],
s1['subnet']['network_id'])
def test_network_update_external(self):
with self.router() as r:
with self.network('test_net') as testnet:
self._set_net_external(testnet['network']['id'])
with self.subnet() as s1:
self._set_net_external(s1['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s1['subnet']['network_id'])
self._update('networks', testnet['network']['id'],
{'network': {external_net.EXTERNAL: False}})
self._remove_external_gateway_from_router(
r['router']['id'],
s1['subnet']['network_id'])
def test_floatingip_crd_ops(self):
with self.floatingip_with_assoc() as fip:
self._validate_floating_ip(fip)
# post-delete, check that it is really gone
body = self._list('floatingips')
self.assertEqual(len(body['floatingips']), 0)
self._show('floatingips', fip['floatingip']['id'],
expected_code=exc.HTTPNotFound.code)
def _test_floatingip_with_assoc_fails(self, plugin_method):
with self.subnet(cidr='200.0.0.0/24') as public_sub:
self._set_net_external(public_sub['subnet']['network_id'])
with self.port() as private_port:
with self.router() as r:
sid = private_port['port']['fixed_ips'][0]['subnet_id']
private_sub = {'subnet': {'id': sid}}
self._add_external_gateway_to_router(
r['router']['id'],
public_sub['subnet']['network_id'])
self._router_interface_action('add', r['router']['id'],
private_sub['subnet']['id'],
None)
with mock.patch(plugin_method) as pl:
pl.side_effect = n_exc.BadRequest(
resource='floatingip',
msg='fake_error')
res = self._create_floatingip(
self.fmt,
public_sub['subnet']['network_id'],
port_id=private_port['port']['id'])
self.assertEqual(res.status_int, 400)
for p in self._list('ports')['ports']:
if (p['device_owner'] ==
l3_constants.DEVICE_OWNER_FLOATINGIP):
self.fail('garbage port is not deleted')
self._remove_external_gateway_from_router(
r['router']['id'],
public_sub['subnet']['network_id'])
self._router_interface_action('remove',
r['router']['id'],
private_sub['subnet']['id'],
None)
def test_floatingip_with_assoc_fails(self):
self._test_floatingip_with_assoc_fails(
'neutron.db.l3_db.L3_NAT_db_mixin._check_and_get_fip_assoc')
def test_create_floatingip_with_assoc(
self, expected_status=l3_constants.FLOATINGIP_STATUS_ACTIVE):
with self.floatingip_with_assoc() as fip:
body = self._show('floatingips', fip['floatingip']['id'])
self.assertEqual(body['floatingip']['id'],
fip['floatingip']['id'])
self.assertEqual(body['floatingip']['port_id'],
fip['floatingip']['port_id'])
self.assertEqual(expected_status, body['floatingip']['status'])
self.assertIsNotNone(body['floatingip']['fixed_ip_address'])
self.assertIsNotNone(body['floatingip']['router_id'])
def test_floatingip_update(
self, expected_status=l3_constants.FLOATINGIP_STATUS_ACTIVE):
with self.port() as p:
private_sub = {'subnet': {'id':
p['port']['fixed_ips'][0]['subnet_id']}}
with self.floatingip_no_assoc(private_sub) as fip:
body = self._show('floatingips', fip['floatingip']['id'])
self.assertIsNone(body['floatingip']['port_id'])
self.assertIsNone(body['floatingip']['fixed_ip_address'])
self.assertEqual(body['floatingip']['status'], expected_status)
port_id = p['port']['id']
ip_address = p['port']['fixed_ips'][0]['ip_address']
body = self._update('floatingips', fip['floatingip']['id'],
{'floatingip': {'port_id': port_id}})
self.assertEqual(body['floatingip']['port_id'], port_id)
self.assertEqual(body['floatingip']['fixed_ip_address'],
ip_address)
def test_floatingip_create_different_fixed_ip_same_port(self):
'''This tests that it is possible to delete a port that has
multiple floating ip addresses associated with it (each floating
address associated with a unique fixed address).
'''
with self.router() as r:
with self.subnet(cidr='11.0.0.0/24') as public_sub:
self._set_net_external(public_sub['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
public_sub['subnet']['network_id'])
with self.subnet() as private_sub:
ip_range = list(netaddr.IPNetwork(
private_sub['subnet']['cidr']))
fixed_ips = [{'ip_address': str(ip_range[-3])},
{'ip_address': str(ip_range[-2])}]
self._router_interface_action(
'add', r['router']['id'],
private_sub['subnet']['id'], None)
with self.port(subnet=private_sub,
fixed_ips=fixed_ips) as p:
fip1 = self._make_floatingip(
self.fmt,
public_sub['subnet']['network_id'],
p['port']['id'],
fixed_ip=str(ip_range[-2]))
fip2 = self._make_floatingip(
self.fmt,
public_sub['subnet']['network_id'],
p['port']['id'],
fixed_ip=str(ip_range[-3]))
# Test that floating ips are assigned successfully.
body = self._show('floatingips',
fip1['floatingip']['id'])
self.assertEqual(
body['floatingip']['port_id'],
fip1['floatingip']['port_id'])
body = self._show('floatingips',
fip2['floatingip']['id'])
self.assertEqual(
body['floatingip']['port_id'],
fip2['floatingip']['port_id'])
self._delete('ports', p['port']['id'])
# Test that port has been successfully deleted.
body = self._show('ports', p['port']['id'],
expected_code=exc.HTTPNotFound.code)
for fip in [fip1, fip2]:
self._delete('floatingips',
fip['floatingip']['id'])
self._router_interface_action(
'remove', r['router']['id'],
private_sub['subnet']['id'], None)
self._remove_external_gateway_from_router(
r['router']['id'],
public_sub['subnet']['network_id'])
def test_floatingip_update_different_fixed_ip_same_port(self):
with self.subnet() as s:
ip_range = list(netaddr.IPNetwork(s['subnet']['cidr']))
fixed_ips = [{'ip_address': str(ip_range[-3])},
{'ip_address': str(ip_range[-2])}]
with self.port(subnet=s, fixed_ips=fixed_ips) as p:
with self.floatingip_with_assoc(
port_id=p['port']['id'],
fixed_ip=str(ip_range[-3])) as fip:
body = self._show('floatingips', fip['floatingip']['id'])
self.assertEqual(fip['floatingip']['id'],
body['floatingip']['id'])
self.assertEqual(fip['floatingip']['port_id'],
body['floatingip']['port_id'])
self.assertEqual(str(ip_range[-3]),
body['floatingip']['fixed_ip_address'])
self.assertIsNotNone(body['floatingip']['router_id'])
body_2 = self._update(
'floatingips', fip['floatingip']['id'],
{'floatingip': {'port_id': p['port']['id'],
'fixed_ip_address': str(ip_range[-2])}
})
self.assertEqual(fip['floatingip']['port_id'],
body_2['floatingip']['port_id'])
self.assertEqual(str(ip_range[-2]),
body_2['floatingip']['fixed_ip_address'])
def test_floatingip_update_different_router(self):
# Create subnet with different CIDRs to account for plugins which
# do not support overlapping IPs
with contextlib.nested(self.subnet(cidr='10.0.0.0/24'),
self.subnet(cidr='10.0.1.0/24')) as (
s1, s2):
with contextlib.nested(self.port(subnet=s1),
self.port(subnet=s2)) as (p1, p2):
private_sub1 = {'subnet':
{'id':
p1['port']['fixed_ips'][0]['subnet_id']}}
private_sub2 = {'subnet':
{'id':
p2['port']['fixed_ips'][0]['subnet_id']}}
with self.subnet(cidr='12.0.0.0/24') as public_sub:
with contextlib.nested(
self.floatingip_no_assoc_with_public_sub(
private_sub1, public_sub=public_sub),
self.floatingip_no_assoc_with_public_sub(
private_sub2, public_sub=public_sub)) as (
(fip1, r1), (fip2, r2)):
def assert_no_assoc(fip):
body = self._show('floatingips',
fip['floatingip']['id'])
self.assertIsNone(body['floatingip']['port_id'])
self.assertIsNone(
body['floatingip']['fixed_ip_address'])
assert_no_assoc(fip1)
assert_no_assoc(fip2)
def associate_and_assert(fip, port):
port_id = port['port']['id']
ip_address = (port['port']['fixed_ips']
[0]['ip_address'])
body = self._update(
'floatingips', fip['floatingip']['id'],
{'floatingip': {'port_id': port_id}})
self.assertEqual(body['floatingip']['port_id'],
port_id)
self.assertEqual(
body['floatingip']['fixed_ip_address'],
ip_address)
return body['floatingip']['router_id']
fip1_r1_res = associate_and_assert(fip1, p1)
self.assertEqual(fip1_r1_res, r1['router']['id'])
# The following operation will associate the floating
# ip to a different router
fip1_r2_res = associate_and_assert(fip1, p2)
self.assertEqual(fip1_r2_res, r2['router']['id'])
fip2_r1_res = associate_and_assert(fip2, p1)
self.assertEqual(fip2_r1_res, r1['router']['id'])
# disassociate fip1
self._update(
'floatingips', fip1['floatingip']['id'],
{'floatingip': {'port_id': None}})
fip2_r2_res = associate_and_assert(fip2, p2)
self.assertEqual(fip2_r2_res, r2['router']['id'])
def test_floatingip_port_delete(self):
with self.subnet() as private_sub:
with self.floatingip_no_assoc(private_sub) as fip:
with self.port(subnet=private_sub) as p:
body = self._update('floatingips', fip['floatingip']['id'],
{'floatingip':
{'port_id': p['port']['id']}})
# note: once this port goes out of scope, the port will be
# deleted, which is what we want to test. We want to confirm
# that the fields are set back to None
self._delete('ports', p['port']['id'])
body = self._show('floatingips', fip['floatingip']['id'])
self.assertEqual(body['floatingip']['id'],
fip['floatingip']['id'])
self.assertIsNone(body['floatingip']['port_id'])
self.assertIsNone(body['floatingip']['fixed_ip_address'])
self.assertIsNone(body['floatingip']['router_id'])
def test_two_fips_one_port_invalid_return_409(self):
with self.floatingip_with_assoc() as fip1:
res = self._create_floatingip(
self.fmt,
fip1['floatingip']['floating_network_id'],
fip1['floatingip']['port_id'])
self.assertEqual(res.status_int, exc.HTTPConflict.code)
def test_floating_ip_direct_port_delete_returns_409(self):
found = False
with self.floatingip_with_assoc():
for p in self._list('ports')['ports']:
if p['device_owner'] == l3_constants.DEVICE_OWNER_FLOATINGIP:
self._delete('ports', p['id'],
expected_code=exc.HTTPConflict.code)
found = True
self.assertTrue(found)
def _test_floatingip_with_invalid_create_port(self, plugin_class):
with self.port() as p:
private_sub = {'subnet': {'id':
p['port']['fixed_ips'][0]['subnet_id']}}
with self.subnet(cidr='12.0.0.0/24') as public_sub:
self._set_net_external(public_sub['subnet']['network_id'])
res = self._create_router(self.fmt, _uuid())
r = self.deserialize(self.fmt, res)
self._add_external_gateway_to_router(
r['router']['id'],
public_sub['subnet']['network_id'])
self._router_interface_action(
'add', r['router']['id'],
private_sub['subnet']['id'],
None)
with mock.patch(plugin_class + '.create_port') as createport:
createport.return_value = {'fixed_ips': []}
res = self._create_floatingip(
self.fmt, public_sub['subnet']['network_id'],
port_id=p['port']['id'])
self.assertEqual(res.status_int,
exc.HTTPBadRequest.code)
self._router_interface_action('remove',
r['router']['id'],
private_sub
['subnet']['id'],
None)
self._delete('routers', r['router']['id'])
def test_floatingip_with_invalid_create_port(self):
self._test_floatingip_with_invalid_create_port(
'neutron.db.db_base_plugin_v2.NeutronDbPluginV2')
def test_create_floatingip_no_ext_gateway_return_404(self):
with self.subnet() as public_sub:
self._set_net_external(public_sub['subnet']['network_id'])
with self.port() as private_port:
with self.router():
res = self._create_floatingip(
self.fmt,
public_sub['subnet']['network_id'],
port_id=private_port['port']['id'])
# this should be some kind of error
self.assertEqual(res.status_int, exc.HTTPNotFound.code)
def test_create_floating_non_ext_network_returns_400(self):
with self.subnet() as public_sub:
# normally we would set the network of public_sub to be
# external, but the point of this test is to handle when
# that is not the case
with self.router():
res = self._create_floatingip(
self.fmt,
public_sub['subnet']['network_id'])
self.assertEqual(res.status_int, exc.HTTPBadRequest.code)
def test_create_floatingip_no_public_subnet_returns_400(self):
with self.network() as public_network:
with self.port() as private_port:
with self.router() as r:
sid = private_port['port']['fixed_ips'][0]['subnet_id']
private_sub = {'subnet': {'id': sid}}
self._router_interface_action('add', r['router']['id'],
private_sub['subnet']['id'],
None)
res = self._create_floatingip(
self.fmt,
public_network['network']['id'],
port_id=private_port['port']['id'])
self.assertEqual(res.status_int, exc.HTTPBadRequest.code)
# cleanup
self._router_interface_action('remove',
r['router']['id'],
private_sub['subnet']['id'],
None)
def test_create_floatingip_invalid_floating_network_id_returns_400(self):
# API-level test - no need to create all objects for l3 plugin
res = self._create_floatingip(self.fmt, 'iamnotanuuid',
uuidutils.generate_uuid(), '192.168.0.1')
self.assertEqual(res.status_int, 400)
def test_create_floatingip_invalid_floating_port_id_returns_400(self):
# API-level test - no need to create all objects for l3 plugin
res = self._create_floatingip(self.fmt, uuidutils.generate_uuid(),
'iamnotanuuid', '192.168.0.1')
self.assertEqual(res.status_int, 400)
def test_create_floatingip_invalid_fixed_ip_address_returns_400(self):
# API-level test - no need to create all objects for l3 plugin
res = self._create_floatingip(self.fmt, uuidutils.generate_uuid(),
uuidutils.generate_uuid(), 'iamnotnanip')
self.assertEqual(res.status_int, 400)
def test_floatingip_list_with_sort(self):
with contextlib.nested(self.subnet(cidr="10.0.0.0/24"),
self.subnet(cidr="11.0.0.0/24"),
self.subnet(cidr="12.0.0.0/24")
) as (s1, s2, s3):
network_id1 = s1['subnet']['network_id']
network_id2 = s2['subnet']['network_id']
network_id3 = s3['subnet']['network_id']
self._set_net_external(network_id1)
self._set_net_external(network_id2)
self._set_net_external(network_id3)
fp1 = self._make_floatingip(self.fmt, network_id1)
fp2 = self._make_floatingip(self.fmt, network_id2)
fp3 = self._make_floatingip(self.fmt, network_id3)
try:
self._test_list_with_sort('floatingip', (fp3, fp2, fp1),
[('floating_ip_address', 'desc')])
finally:
self._delete('floatingips', fp1['floatingip']['id'])
self._delete('floatingips', fp2['floatingip']['id'])
self._delete('floatingips', fp3['floatingip']['id'])
def test_floatingip_list_with_port_id(self):
with self.floatingip_with_assoc() as fip:
port_id = fip['floatingip']['port_id']
res = self._list('floatingips',
query_params="port_id=%s" % port_id)
self.assertEqual(len(res['floatingips']), 1)
res = self._list('floatingips', query_params="port_id=aaa")
self.assertEqual(len(res['floatingips']), 0)
def test_floatingip_list_with_pagination(self):
with contextlib.nested(self.subnet(cidr="10.0.0.0/24"),
self.subnet(cidr="11.0.0.0/24"),
self.subnet(cidr="12.0.0.0/24")
) as (s1, s2, s3):
network_id1 = s1['subnet']['network_id']
network_id2 = s2['subnet']['network_id']
network_id3 = s3['subnet']['network_id']
self._set_net_external(network_id1)
self._set_net_external(network_id2)
self._set_net_external(network_id3)
fp1 = self._make_floatingip(self.fmt, network_id1)
fp2 = self._make_floatingip(self.fmt, network_id2)
fp3 = self._make_floatingip(self.fmt, network_id3)
try:
self._test_list_with_pagination(
'floatingip', (fp1, fp2, fp3),
('floating_ip_address', 'asc'), 2, 2)
finally:
self._delete('floatingips', fp1['floatingip']['id'])
self._delete('floatingips', fp2['floatingip']['id'])
self._delete('floatingips', fp3['floatingip']['id'])
def test_floatingip_list_with_pagination_reverse(self):
with contextlib.nested(self.subnet(cidr="10.0.0.0/24"),
self.subnet(cidr="11.0.0.0/24"),
self.subnet(cidr="12.0.0.0/24")
) as (s1, s2, s3):
network_id1 = s1['subnet']['network_id']
network_id2 = s2['subnet']['network_id']
network_id3 = s3['subnet']['network_id']
self._set_net_external(network_id1)
self._set_net_external(network_id2)
self._set_net_external(network_id3)
fp1 = self._make_floatingip(self.fmt, network_id1)
fp2 = self._make_floatingip(self.fmt, network_id2)
fp3 = self._make_floatingip(self.fmt, network_id3)
try:
self._test_list_with_pagination_reverse(
'floatingip', (fp1, fp2, fp3),
('floating_ip_address', 'asc'), 2, 2)
finally:
self._delete('floatingips', fp1['floatingip']['id'])
self._delete('floatingips', fp2['floatingip']['id'])
self._delete('floatingips', fp3['floatingip']['id'])
def test_floatingip_delete_router_intf_with_subnet_id_returns_409(self):
found = False
with self.floatingip_with_assoc():
for p in self._list('ports')['ports']:
if p['device_owner'] == l3_constants.DEVICE_OWNER_ROUTER_INTF:
subnet_id = p['fixed_ips'][0]['subnet_id']
router_id = p['device_id']
self._router_interface_action(
'remove', router_id, subnet_id, None,
expected_code=exc.HTTPConflict.code)
found = True
break
self.assertTrue(found)
def test_floatingip_delete_router_intf_with_port_id_returns_409(self):
found = False
with self.floatingip_with_assoc():
for p in self._list('ports')['ports']:
if p['device_owner'] == l3_constants.DEVICE_OWNER_ROUTER_INTF:
router_id = p['device_id']
self._router_interface_action(
'remove', router_id, None, p['id'],
expected_code=exc.HTTPConflict.code)
found = True
break
self.assertTrue(found)
def test_router_delete_subnet_inuse_returns_409(self):
with self.router() as r:
with self.subnet() as s:
self._router_interface_action('add',
r['router']['id'],
s['subnet']['id'],
None)
# subnet cannot be delete as it's attached to a router
self._delete('subnets', s['subnet']['id'],
expected_code=exc.HTTPConflict.code)
# remove interface so test can exit without errors
self._router_interface_action('remove',
r['router']['id'],
s['subnet']['id'],
None)
def test_delete_ext_net_with_disassociated_floating_ips(self):
with self.network() as net:
net_id = net['network']['id']
self._set_net_external(net_id)
with self.subnet(network=net):
self._make_floatingip(self.fmt, net_id)
class L3AgentDbTestCaseBase(L3NatTestCaseMixin):
"""Unit tests for methods called by the L3 agent."""
def test_l3_agent_routers_query_interfaces(self):
with self.router() as r:
with self.port() as p:
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
routers = self.plugin.get_sync_data(
context.get_admin_context(), None)
self.assertEqual(1, len(routers))
interfaces = routers[0][l3_constants.INTERFACE_KEY]
self.assertEqual(1, len(interfaces))
subnet_id = interfaces[0]['subnet']['id']
wanted_subnetid = p['port']['fixed_ips'][0]['subnet_id']
self.assertEqual(wanted_subnetid, subnet_id)
# clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
def test_l3_agent_routers_query_ignore_interfaces_with_moreThanOneIp(self):
with self.router() as r:
with self.subnet(cidr='9.0.1.0/24') as subnet:
with self.port(subnet=subnet,
fixed_ips=[{'ip_address': '9.0.1.3'}]) as p:
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
port = {'port': {'fixed_ips':
[{'ip_address': '9.0.1.4',
'subnet_id': subnet['subnet']['id']},
{'ip_address': '9.0.1.5',
'subnet_id': subnet['subnet']['id']}]}}
ctx = context.get_admin_context()
self.core_plugin.update_port(ctx, p['port']['id'], port)
routers = self.plugin.get_sync_data(ctx, None)
self.assertEqual(1, len(routers))
interfaces = routers[0].get(l3_constants.INTERFACE_KEY, [])
self.assertEqual(1, len(interfaces))
# clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
def test_l3_agent_routers_query_gateway(self):
with self.router() as r:
with self.subnet() as s:
self._set_net_external(s['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'])
routers = self.plugin.get_sync_data(
context.get_admin_context(), [r['router']['id']])
self.assertEqual(1, len(routers))
gw_port = routers[0]['gw_port']
self.assertEqual(s['subnet']['id'], gw_port['subnet']['id'])
self._remove_external_gateway_from_router(
r['router']['id'],
s['subnet']['network_id'])
def test_l3_agent_routers_query_floatingips(self):
with self.floatingip_with_assoc() as fip:
routers = self.plugin.get_sync_data(
context.get_admin_context(), [fip['floatingip']['router_id']])
self.assertEqual(1, len(routers))
floatingips = routers[0][l3_constants.FLOATINGIP_KEY]
self.assertEqual(1, len(floatingips))
self.assertEqual(floatingips[0]['id'],
fip['floatingip']['id'])
self.assertEqual(floatingips[0]['port_id'],
fip['floatingip']['port_id'])
self.assertIsNotNone(floatingips[0]['fixed_ip_address'])
self.assertIsNotNone(floatingips[0]['router_id'])
def _test_notify_op_agent(self, target_func, *args):
l3_rpc_agent_api_str = (
'neutron.api.rpc.agentnotifiers.l3_rpc_agent_api.L3AgentNotifyAPI')
plugin = manager.NeutronManager.get_service_plugins()[
service_constants.L3_ROUTER_NAT]
oldNotify = plugin.l3_rpc_notifier
try:
with mock.patch(l3_rpc_agent_api_str) as notifyApi:
plugin.l3_rpc_notifier = notifyApi
kargs = [item for item in args]
kargs.append(notifyApi)
target_func(*kargs)
except Exception:
plugin.l3_rpc_notifier = oldNotify
raise
else:
plugin.l3_rpc_notifier = oldNotify
def _test_router_gateway_op_agent(self, notifyApi):
with self.router() as r:
with self.subnet() as s:
self._set_net_external(s['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s['subnet']['network_id'])
self._remove_external_gateway_from_router(
r['router']['id'],
s['subnet']['network_id'])
self.assertEqual(
2, notifyApi.routers_updated.call_count)
def test_router_gateway_op_agent(self):
self._test_notify_op_agent(self._test_router_gateway_op_agent)
def _test_interfaces_op_agent(self, r, notifyApi):
with self.port() as p:
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
# clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
self.assertEqual(2, notifyApi.routers_updated.call_count)
def test_interfaces_op_agent(self):
with self.router() as r:
self._test_notify_op_agent(
self._test_interfaces_op_agent, r)
def _test_floatingips_op_agent(self, notifyApi):
with self.floatingip_with_assoc():
pass
# add gateway, add interface, associate, deletion of floatingip,
# delete gateway, delete interface
self.assertEqual(6, notifyApi.routers_updated.call_count)
def test_floatingips_op_agent(self):
self._test_notify_op_agent(self._test_floatingips_op_agent)
class L3BaseForIntTests(test_db_plugin.NeutronDbPluginV2TestCase,
testlib_plugin.NotificationSetupHelper):
mock_rescheduling = True
def setUp(self, plugin=None, ext_mgr=None, service_plugins=None):
if not plugin:
plugin = 'neutron.tests.unit.test_l3_plugin.TestL3NatIntPlugin'
# for these tests we need to enable overlapping ips
cfg.CONF.set_default('allow_overlapping_ips', True)
ext_mgr = ext_mgr or L3TestExtensionManager()
if self.mock_rescheduling:
mock.patch('%s._check_router_needs_rescheduling' % plugin,
new=lambda *a: False).start()
super(L3BaseForIntTests, self).setUp(plugin=plugin, ext_mgr=ext_mgr,
service_plugins=service_plugins)
self.setup_notification_driver()
class L3BaseForSepTests(test_db_plugin.NeutronDbPluginV2TestCase,
testlib_plugin.NotificationSetupHelper):
def setUp(self, plugin=None, ext_mgr=None):
# the plugin without L3 support
if not plugin:
plugin = 'neutron.tests.unit.test_l3_plugin.TestNoL3NatPlugin'
# the L3 service plugin
l3_plugin = ('neutron.tests.unit.test_l3_plugin.'
'TestL3NatServicePlugin')
service_plugins = {'l3_plugin_name': l3_plugin}
# for these tests we need to enable overlapping ips
cfg.CONF.set_default('allow_overlapping_ips', True)
if not ext_mgr:
ext_mgr = L3TestExtensionManager()
super(L3BaseForSepTests, self).setUp(plugin=plugin, ext_mgr=ext_mgr,
service_plugins=service_plugins)
self.setup_notification_driver()
class L3NatDBIntAgentSchedulingTestCase(L3BaseForIntTests,
L3NatTestCaseMixin,
test_agent_ext_plugin.
AgentDBTestMixIn):
"""Unit tests for core plugin with L3 routing and scheduling integrated."""
def setUp(self, plugin='neutron.tests.unit.test_l3_plugin.'
'TestL3NatIntAgentSchedulingPlugin',
ext_mgr=None, service_plugins=None):
self.mock_rescheduling = False
super(L3NatDBIntAgentSchedulingTestCase, self).setUp(
plugin, ext_mgr, service_plugins)
self.adminContext = context.get_admin_context()
def _assert_router_on_agent(self, router_id, agent_host):
plugin = manager.NeutronManager.get_service_plugins().get(
service_constants.L3_ROUTER_NAT)
agents = plugin.list_l3_agents_hosting_router(
self.adminContext, router_id)['agents']
self.assertEqual(len(agents), 1)
self.assertEqual(agents[0]['host'], agent_host)
def test_update_gateway_agent_exists_supporting_network(self):
with contextlib.nested(self.router(),
self.subnet(),
self.subnet()) as (r, s1, s2):
self._set_net_external(s1['subnet']['network_id'])
l3_rpc_cb = l3_rpc.L3RpcCallback()
self._register_one_l3_agent(
host='host1',
ext_net_id=s1['subnet']['network_id'])
self._register_one_l3_agent(
host='host2', internal_only=False,
ext_net_id=s2['subnet']['network_id'])
l3_rpc_cb.sync_routers(self.adminContext,
host='host1')
self._assert_router_on_agent(r['router']['id'], 'host1')
self._add_external_gateway_to_router(
r['router']['id'],
s1['subnet']['network_id'])
self._assert_router_on_agent(r['router']['id'], 'host1')
self._set_net_external(s2['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s2['subnet']['network_id'])
self._assert_router_on_agent(r['router']['id'], 'host2')
self._remove_external_gateway_from_router(
r['router']['id'],
s2['subnet']['network_id'])
def test_update_gateway_agent_exists_supporting_multiple_network(self):
with contextlib.nested(self.router(),
self.subnet(),
self.subnet()) as (r, s1, s2):
self._set_net_external(s1['subnet']['network_id'])
l3_rpc_cb = l3_rpc.L3RpcCallback()
self._register_one_l3_agent(
host='host1',
ext_net_id=s1['subnet']['network_id'])
self._register_one_l3_agent(
host='host2', internal_only=False,
ext_net_id='', ext_bridge='')
l3_rpc_cb.sync_routers(self.adminContext,
host='host1')
self._assert_router_on_agent(r['router']['id'], 'host1')
self._add_external_gateway_to_router(
r['router']['id'],
s1['subnet']['network_id'])
self._assert_router_on_agent(r['router']['id'], 'host1')
self._set_net_external(s2['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s2['subnet']['network_id'])
self._assert_router_on_agent(r['router']['id'], 'host2')
self._remove_external_gateway_from_router(
r['router']['id'],
s2['subnet']['network_id'])
def test_router_update_gateway_no_eligible_l3_agent(self):
with self.router() as r:
with self.subnet() as s1:
with self.subnet() as s2:
self._set_net_external(s1['subnet']['network_id'])
self._set_net_external(s2['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s1['subnet']['network_id'],
expected_code=exc.HTTPBadRequest.code)
class L3RpcCallbackTestCase(base.BaseTestCase):
def setUp(self):
super(L3RpcCallbackTestCase, self).setUp()
self.mock_plugin = mock.patch.object(
l3_rpc.L3RpcCallback,
'plugin', new_callable=mock.PropertyMock).start()
self.mock_l3plugin = mock.patch.object(
l3_rpc.L3RpcCallback,
'l3plugin', new_callable=mock.PropertyMock).start()
self.l3_rpc_cb = l3_rpc.L3RpcCallback()
def test__ensure_host_set_on_port_update_on_concurrent_delete(self):
port_id = 'foo_port_id'
port = {
'id': port_id,
'device_owner': 'compute:None',
portbindings.HOST_ID: '',
portbindings.VIF_TYPE: portbindings.VIF_TYPE_BINDING_FAILED
}
router_id = 'foo_router_id'
self.l3_rpc_cb.plugin.update_port.side_effect = n_exc.PortNotFound(
port_id=port_id)
with mock.patch.object(l3_rpc.LOG, 'debug') as mock_log:
self.l3_rpc_cb._ensure_host_set_on_port(
mock.ANY, mock.ANY, port, router_id)
self.l3_rpc_cb.plugin.update_port.assert_called_once_with(
mock.ANY, port_id, {'port': {'binding:host_id': mock.ANY}})
self.assertTrue(mock_log.call_count)
expected_message = ('Port foo_port_id not found while updating '
'agent binding for router foo_router_id.')
actual_message = mock_log.call_args[0][0]
self.assertEqual(expected_message, actual_message)
class L3AgentDbIntTestCase(L3BaseForIntTests, L3AgentDbTestCaseBase):
"""Unit tests for methods called by the L3 agent for
the case where core plugin implements L3 routing.
"""
def setUp(self):
super(L3AgentDbIntTestCase, self).setUp()
self.core_plugin = TestL3NatIntPlugin()
self.plugin = self.core_plugin
class L3AgentDbSepTestCase(L3BaseForSepTests, L3AgentDbTestCaseBase):
"""Unit tests for methods called by the L3 agent for the
case where separate service plugin implements L3 routing.
"""
def setUp(self):
super(L3AgentDbSepTestCase, self).setUp()
self.core_plugin = TestNoL3NatPlugin()
self.plugin = TestL3NatServicePlugin()
class L3NatDBIntTestCase(L3BaseForIntTests, L3NatTestCaseBase):
"""Unit tests for core plugin with L3 routing integrated."""
pass
class L3NatDBSepTestCase(L3BaseForSepTests, L3NatTestCaseBase):
"""Unit tests for a separate L3 routing service plugin."""
pass
class L3NatDBIntTestCaseXML(L3NatDBIntTestCase):
fmt = 'xml'
class L3NatDBSepTestCaseXML(L3NatDBSepTestCase):
fmt = 'xml'
|
|
# -*- coding: utf-8 -*-
import os
from pydocgen.model import ListStyleProperty, AlignmentProperty, FontEffectProperty, Image, Style, Table
from pydocgen.builders.common import Builder
class DitaBuilder(Builder):
"""Class responsible for creating a DITA V1.1 document.
It inherits from base Builder class shared between all builder classes.
"""
def __init__(self):
super(DitaBuilder, self).__init__()
self.extension = "dita"
def generate_document(self, document):
"""Main method for generating DITA document. Generates DITA frame for content and fills that frame with data.
Args:
document (Document): Stores a document representation independent of a particular builder class.
"""
body = ''
for element in document.content:
body += self.generate(element)
result = ''
result += '<?xml version="1.0" encoding="utf-8"?>\n'
result += '<!DOCTYPE topic PUBLIC "-//OASIS//DTD DITA Composite//EN" "http://docs.oasis-open.org/dita/v1.1/CD01/dtd/ditabase.dtd">\n'
if 'language' in document.properties:
result += '<topic xml:lang=\"' + document.properties['language'] + '\" id="main_topic" >'
else:
result += '<topic xml:lang="en" id="main_topic">\n'
title = ''
if 'title' in document.properties:
title = document.properties['title']
result += '\t<title>' + title + '</title>\n'
result += '\t<shortdesc>' + title + '</shortdesc>\n'
result += '<body>\n' + body + '\n</body>\n</topic>\n'
return result
def generate_paragraph(self, paragraph):
"""Generates a DITA paragraph and fills it with data.
Args:
paragraph (Paragraph): Stores information about the paragraph. Information is independent of the output file format.
"""
p, tmp = '', None
if paragraph.content:
for element in paragraph.content:
tmp = self.generate(element)
if tmp:
p += tmp
return '\n<p' + \
'>\n\t' + p + '\n</p>\n'
def generate_span(self, span):
"""Generates a DITA span.
Args:
span (Span): stores information about span. Information is independent of the output file format.
"""
css = self.fontType(span)
open1 = ''
close1 = ''
if css == 'b':
open1 = '<b>'
close1 = '</b>'
if css == 'u':
open1 = '<u>'
close1 = '</u>'
if css == 'i':
open1 = '<u>'
close1 = '</u>'
return open1 + span.text + close1
def generate_header(self, header):
"""Generates a DITA header and fills it with data.
Args:
header (Header): Stores information about the header. Information is independent of the output file format.
"""
content = ''
if header.content:
for element in header.content:
if element:
content += element.generate()
seq_number = ''
if header.sequence is not None:
if header.is_style_property_set('header-numbered'):
if header.effective_style['header-numbered']:
if element.is_style_property_set("seq-number-sep"):
seq_number = element.sequence.to_str(header.effective_style['seq-number-sep'])
else:
seq_number = str(header.sequence)
header.sequence.advance()
else:
header.sequence.advance()
return '<section>\n\t<title>' + seq_number + ' ' + content + '</title>\n</section>'
def generate_list(self, lst):
"""Generates a DITA list and fills it with content.
Args:
lst(List): Stores information about the list. Information is independent of the output file format.
"""
result, tmp = '', None
for item in lst.content:
tmp = self.generate(item)
if tmp:
result += '\n<li' + '>' + tmp + '</li>\n'
if 'list-style' in lst.style.keys() and lst.style['list-style'] == ListStyleProperty.NUMBER:
return '\n<ol' + '>\n' + result + '\n</ol>\n'
elif 'list-style' in lst.style.keys() and lst.style['list-style'] == ListStyleProperty.BULLET:
return '\n<ul' + '>\n' + result + '\n</ul>\n'
else:
return '\n<ul' + '>\n' + result + '\n</ul>\n'
def generate_table(self, table):
"""Generates a DITA table and fills the table with content.
Args:
table (Table): Stores information about the table. Information is independent of the output file format.
"""
result = '\n\n<table' + '>'
caption = ''
if table.sequence is not None:
caption += table.sequence + ' '
for c in table.caption:
caption += self.generate(c)
result += '\n<title>' + caption + '</title>\n'
colCount = 0
for j in xrange(0, table.cols_num):
colCount += 1
result += '<tgroup cols=\"' + str(colCount) + '\">\n'
for j in xrange(0, table.cols_num):
result += '<colspec colnum=\"' + str(j + 1) + '\" colname=\"col' + str(j + 1) + '\" />'
i = 0
skip_cols = 0
result += '\n<thead>\n<row>'
for j in xrange(0, table.cols_num):
if skip_cols > 0:
skip_cols -= 1
continue
colspan_code = ''
if table.get_cell(i, j).colspan is not None and table.get_cell(i, j).colspan > 1:
skip_cols = table.get_cell(i, j).colspan - 1
colspan_code = ' namest=\"col' + str(j + 1) + '\" nameend=\"col' + str(
j + table.get_cell(i, j).colspan) + '\" '
result += '\n<entry ' + colspan_code + self.alignmentFun(table.get_cell(i, j)) + '>'
for k in table.get_cell(i, j).content:
result += self.generate(k)
result += '</entry>'
result += '\n</row>\n</thead>\n<tbody>'
skip_cols = 0
for i in xrange(1, table.rows_num):
result += '\n<row>\n' #style? no!
for j in xrange(0, table.cols_num):
if skip_cols > 0:
skip_cols -= 1
continue
colspan_code = ''
if table.get_cell(i, j).colspan is not None and table.get_cell(i, j).colspan > 1:
skip_cols = table.get_cell(i, j).colspan - 1
colspan_code = ' namest=\"col' + str(j + 1) + '\" nameend=\"col' + str(
j + table.get_cell(i, j).colspan) + '\" '
result += '\n<entry ' + colspan_code + self.alignmentFun(table.get_cell(i, j)) + '>'
for k in table.get_cell(i, j).content:
result += self.generate(k)
result += '</entry>'
result += '\n</row>\n'
return result + '\n</tbody>\n</tgroup>\n</table>\n\n'
def generate_image(self, image):
"""Generates a DITA image.
Args:
image (Image): Stores information about the image. Information is independent of the output file format.
"""
image_caption = ''
if image.sequence != None:
image_caption += image.sequence + ' '
for c in image.caption:
image_caption += self.generate(c)
return '<image href=\"' + os.path.basename(image.path) +\
'\" placement=\"break\" ' + self.alignmentFun(image) +\
'></image>\n'
def alignmentFun(self, elem):
"""Sets text alignment for the element. Alignment is among left, center, right and justify.
Args:
elem (Element): Stores information about content of particular part of text.
"""
style = elem.style
css = ''
if style != None:
for key in style.keys():
if key == 'alignment':
css += 'align=\"' + {AlignmentProperty.LEFT: 'left\"', \
AlignmentProperty.CENTER: 'center\"', \
AlignmentProperty.RIGHT: 'right\"', \
AlignmentProperty.JUSTIFY: 'justify\"' \
}.get(style[key])
return css
def widthFun(self, elem):
"""Sets width and height for part of text stored in elem.
Args:
elem (Element): Stores information about content of particular part of text.
"""
style = elem.style
css = ''
if style is not None:
for key in style.keys():
if key == 'width':
css += 'colwidth=\" ' + str(style[key]) + '*\"'
elif key == 'height':
css += 'colheight=\"' + str(style[key]) + '*\"'
return css
def fontType(self, elem):
"""Sets font effect such as bold, italic underline and strike for the element.
Args:
elem (Element): Stores information about particular part of text.
"""
if isinstance(elem, str) or isinstance(elem, unicode):
return ''
style = elem.style
css = ''
if style is not None:
for key in style.keys():
if key == 'font-effect':
font_effects = style['font-effect']
if FontEffectProperty.BOLD in font_effects:
css = 'b'
if FontEffectProperty.ITALIC in font_effects:
css = 'i'
elif FontEffectProperty.UNDERLINE in font_effects:
css = 'u'
return css
|
|
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2011 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os, sys, re, json
import platform
import shutil
from collections import defaultdict
from datetime import datetime
from decimal import Decimal
import traceback
import urlparse
import urllib
import threading
from i18n import _
base_units = {'DGB':8, 'mDGB':5, 'uDGB':2}
def normalize_version(v):
return [int(x) for x in re.sub(r'(\.0+)*$','', v).split(".")]
class NotEnoughFunds(Exception): pass
class InvalidPassword(Exception):
def __str__(self):
return _("Incorrect password")
# Throw this exception to unwind the stack like when an error occurs.
# However unlike other exceptions the user won't be informed.
class UserCancelled(Exception):
'''An exception that is suppressed from the user'''
pass
class MyEncoder(json.JSONEncoder):
def default(self, obj):
from transaction import Transaction
if isinstance(obj, Transaction):
return obj.as_dict()
return super(MyEncoder, self).default(obj)
class PrintError(object):
'''A handy base class'''
def diagnostic_name(self):
return self.__class__.__name__
def print_error(self, *msg):
print_error("[%s]" % self.diagnostic_name(), *msg)
def print_msg(self, *msg):
print_msg("[%s]" % self.diagnostic_name(), *msg)
class ThreadJob(PrintError):
"""A job that is run periodically from a thread's main loop. run() is
called from that thread's context.
"""
def run(self):
"""Called periodically from the thread"""
pass
class DebugMem(ThreadJob):
'''A handy class for debugging GC memory leaks'''
def __init__(self, classes, interval=30):
self.next_time = 0
self.classes = classes
self.interval = interval
def mem_stats(self):
import gc
self.print_error("Start memscan")
gc.collect()
objmap = defaultdict(list)
for obj in gc.get_objects():
for class_ in self.classes:
if isinstance(obj, class_):
objmap[class_].append(obj)
for class_, objs in objmap.items():
self.print_error("%s: %d" % (class_.__name__, len(objs)))
self.print_error("Finish memscan")
def run(self):
if time.time() > self.next_time:
self.mem_stats()
self.next_time = time.time() + self.interval
class DaemonThread(threading.Thread, PrintError):
""" daemon thread that terminates cleanly """
def __init__(self):
threading.Thread.__init__(self)
self.parent_thread = threading.currentThread()
self.running = False
self.running_lock = threading.Lock()
self.job_lock = threading.Lock()
self.jobs = []
def add_jobs(self, jobs):
with self.job_lock:
self.jobs.extend(jobs)
def run_jobs(self):
# Don't let a throwing job disrupt the thread, future runs of
# itself, or other jobs. This is useful protection against
# malformed or malicious server responses
with self.job_lock:
for job in self.jobs:
try:
job.run()
except:
traceback.print_exc(file=sys.stderr)
def remove_jobs(self, jobs):
with self.job_lock:
for job in jobs:
self.jobs.remove(job)
def start(self):
with self.running_lock:
self.running = True
return threading.Thread.start(self)
def is_running(self):
with self.running_lock:
return self.running and self.parent_thread.is_alive()
def stop(self):
with self.running_lock:
self.running = False
def on_stop(self):
if 'ANDROID_DATA' in os.environ:
import jnius
jnius.detach()
self.print_error("jnius detach")
self.print_error("stopped")
is_verbose = False
def set_verbosity(b):
global is_verbose
is_verbose = b
def print_error(*args):
if not is_verbose: return
print_stderr(*args)
def print_stderr(*args):
args = [str(item) for item in args]
sys.stderr.write(" ".join(args) + "\n")
sys.stderr.flush()
def print_msg(*args):
# Stringify args
args = [str(item) for item in args]
sys.stdout.write(" ".join(args) + "\n")
sys.stdout.flush()
def json_encode(obj):
try:
s = json.dumps(obj, sort_keys = True, indent = 4, cls=MyEncoder)
except TypeError:
s = repr(obj)
return s
def json_decode(x):
try:
return json.loads(x, parse_float=decimal.Decimal)
except:
return x
# decorator that prints execution time
def profiler(func):
def do_profile(func, args, kw_args):
n = func.func_name
t0 = time.time()
o = func(*args, **kw_args)
t = time.time() - t0
print_error("[profiler]", n, "%.4f"%t)
return o
return lambda *args, **kw_args: do_profile(func, args, kw_args)
def android_ext_dir():
import jnius
env = jnius.autoclass('android.os.Environment')
return env.getExternalStorageDirectory().getPath()
def android_data_dir():
import jnius
PythonActivity = jnius.autoclass('org.kivy.android.PythonActivity')
return PythonActivity.mActivity.getFilesDir().getPath() + '/data'
def android_headers_path():
path = android_ext_dir() + '/org.electrum_dgb.electrum_dgb/blockchain_headers'
d = os.path.dirname(path)
if not os.path.exists(d):
os.mkdir(d)
return path
def android_check_data_dir():
""" if needed, move old directory to sandbox """
ext_dir = android_ext_dir()
data_dir = android_data_dir()
old_electrum_dir = ext_dir + '/digielectrum'
if not os.path.exists(data_dir) and os.path.exists(old_electrum_dir):
import shutil
new_headers_path = android_headers_path()
old_headers_path = old_electrum_dir + '/blockchain_headers'
if not os.path.exists(new_headers_path) and os.path.exists(old_headers_path):
print_error("Moving headers file to", new_headers_path)
shutil.move(old_headers_path, new_headers_path)
print_error("Moving data to", data_dir)
shutil.move(old_electrum_dir, data_dir)
return data_dir
def get_headers_path(config):
if 'ANDROID_DATA' in os.environ:
return android_headers_path()
else:
return os.path.join(config.path, 'blockchain_headers')
def user_dir():
if 'ANDROID_DATA' in os.environ:
return android_check_data_dir()
elif os.name == 'posix':
return os.path.join(os.environ["HOME"], ".digielectrum")
elif "APPDATA" in os.environ:
return os.path.join(os.environ["APPDATA"], "DigiElectrum")
elif "LOCALAPPDATA" in os.environ:
return os.path.join(os.environ["LOCALAPPDATA"], "DigiElectrum")
else:
#raise Exception("No home directory found in environment variables.")
return
def format_satoshis_plain(x, decimal_point = 8):
'''Display a satoshi amount scaled. Always uses a '.' as a decimal
point and has no thousands separator'''
scale_factor = pow(10, decimal_point)
return "{:.8f}".format(Decimal(x) / scale_factor).rstrip('0').rstrip('.')
def format_satoshis(x, is_diff=False, num_zeros = 0, decimal_point = 8, whitespaces=False):
from locale import localeconv
if x is None:
return 'unknown'
x = int(x) # Some callers pass Decimal
scale_factor = pow (10, decimal_point)
integer_part = "{:n}".format(int(abs(x) / scale_factor))
if x < 0:
integer_part = '-' + integer_part
elif is_diff:
integer_part = '+' + integer_part
dp = localeconv()['decimal_point']
fract_part = ("{:0" + str(decimal_point) + "}").format(abs(x) % scale_factor)
fract_part = fract_part.rstrip('0')
if len(fract_part) < num_zeros:
fract_part += "0" * (num_zeros - len(fract_part))
result = integer_part + dp + fract_part
if whitespaces:
result += " " * (decimal_point - len(fract_part))
result = " " * (15 - len(result)) + result
return result.decode('utf8')
def timestamp_to_datetime(timestamp):
try:
return datetime.fromtimestamp(timestamp)
except:
return None
def format_time(timestamp):
date = timestamp_to_datetime(timestamp)
return date.isoformat(' ')[:-3] if date else _("Unknown")
# Takes a timestamp and returns a string with the approximation of the age
def age(from_date, since_date = None, target_tz=None, include_seconds=False):
if from_date is None:
return "Unknown"
from_date = datetime.fromtimestamp(from_date)
if since_date is None:
since_date = datetime.now(target_tz)
td = time_difference(from_date - since_date, include_seconds)
return td + " ago" if from_date < since_date else "in " + td
def time_difference(distance_in_time, include_seconds):
#distance_in_time = since_date - from_date
distance_in_seconds = int(round(abs(distance_in_time.days * 86400 + distance_in_time.seconds)))
distance_in_minutes = int(round(distance_in_seconds/60))
if distance_in_minutes <= 1:
if include_seconds:
for remainder in [5, 10, 20]:
if distance_in_seconds < remainder:
return "less than %s seconds" % remainder
if distance_in_seconds < 40:
return "half a minute"
elif distance_in_seconds < 60:
return "less than a minute"
else:
return "1 minute"
else:
if distance_in_minutes == 0:
return "less than a minute"
else:
return "1 minute"
elif distance_in_minutes < 45:
return "%s minutes" % distance_in_minutes
elif distance_in_minutes < 90:
return "about 1 hour"
elif distance_in_minutes < 1440:
return "about %d hours" % (round(distance_in_minutes / 60.0))
elif distance_in_minutes < 2880:
return "1 day"
elif distance_in_minutes < 43220:
return "%d days" % (round(distance_in_minutes / 1440))
elif distance_in_minutes < 86400:
return "about 1 month"
elif distance_in_minutes < 525600:
return "%d months" % (round(distance_in_minutes / 43200))
elif distance_in_minutes < 1051200:
return "about 1 year"
else:
return "over %d years" % (round(distance_in_minutes / 525600))
block_explorer_info = {
'DigiExplorer': ('http://www.digiexplorer.info',
{'tx': 'tx', 'addr': 'address'}),
'system default': ('http://www.digiexplorer.info',
{'tx': 'tx', 'addr': 'address'}),
}
def block_explorer(config):
return config.get('block_explorer', 'DigiExplorer')
def block_explorer_tuple(config):
return block_explorer_info.get(block_explorer(config))
def block_explorer_URL(config, kind, item):
be_tuple = block_explorer_tuple(config)
if not be_tuple:
return
kind_str = be_tuple[1].get(kind)
if not kind_str:
return
url_parts = [be_tuple[0], kind_str, item]
return "/".join(url_parts)
# URL decode
#_ud = re.compile('%([0-9a-hA-H]{2})', re.MULTILINE)
#urldecode = lambda x: _ud.sub(lambda m: chr(int(m.group(1), 16)), x)
def parse_URI(uri, on_pr=None):
import bitcoin
from bitcoin import COIN
if ':' not in uri:
if not bitcoin.is_address(uri):
raise BaseException("Not a digibyte address")
return {'address': uri}
u = urlparse.urlparse(uri)
if u.scheme != 'digibyte':
raise BaseException("Not a digibyte URI")
address = u.path
# python for android fails to parse query
if address.find('?') > 0:
address, query = u.path.split('?')
pq = urlparse.parse_qs(query)
else:
pq = urlparse.parse_qs(u.query)
for k, v in pq.items():
if len(v)!=1:
raise Exception('Duplicate Key', k)
out = {k: v[0] for k, v in pq.items()}
if address:
if not bitcoin.is_address(address):
raise BaseException("Invalid digibyte address:" + address)
out['address'] = address
if 'amount' in out:
am = out['amount']
m = re.match('([0-9\.]+)X([0-9])', am)
if m:
k = int(m.group(2)) - 8
amount = Decimal(m.group(1)) * pow( Decimal(10) , k)
else:
amount = Decimal(am) * COIN
out['amount'] = int(amount)
if 'message' in out:
out['message'] = out['message'].decode('utf8')
out['memo'] = out['message']
if 'time' in out:
out['time'] = int(out['time'])
if 'exp' in out:
out['exp'] = int(out['exp'])
if 'sig' in out:
out['sig'] = bitcoin.base_decode(out['sig'], None, base=58).encode('hex')
r = out.get('r')
sig = out.get('sig')
name = out.get('name')
if r or (name and sig):
def get_payment_request_thread():
import paymentrequest as pr
if name and sig:
s = pr.serialize_request(out).SerializeToString()
request = pr.PaymentRequest(s)
else:
request = pr.get_payment_request(r)
on_pr(request)
t = threading.Thread(target=get_payment_request_thread)
t.setDaemon(True)
t.start()
return out
def create_URI(addr, amount, message):
import bitcoin
if not bitcoin.is_address(addr):
return ""
query = []
if amount:
query.append('amount=%s'%format_satoshis_plain(amount))
if message:
if type(message) == unicode:
message = message.encode('utf8')
query.append('message=%s'%urllib.quote(message))
p = urlparse.ParseResult(scheme='digibyte', netloc='', path=addr, params='', query='&'.join(query), fragment='')
return urlparse.urlunparse(p)
# Python bug (http://bugs.python.org/issue1927) causes raw_input
# to be redirected improperly between stdin/stderr on Unix systems
def raw_input(prompt=None):
if prompt:
sys.stdout.write(prompt)
return builtin_raw_input()
import __builtin__
builtin_raw_input = __builtin__.raw_input
__builtin__.raw_input = raw_input
def parse_json(message):
n = message.find('\n')
if n==-1:
return None, message
try:
j = json.loads( message[0:n] )
except:
j = None
return j, message[n+1:]
class timeout(Exception):
pass
import socket
import errno
import json
import ssl
import time
class SocketPipe:
def __init__(self, socket):
self.socket = socket
self.message = ''
self.set_timeout(0.1)
self.recv_time = time.time()
def set_timeout(self, t):
self.socket.settimeout(t)
def idle_time(self):
return time.time() - self.recv_time
def get(self):
while True:
response, self.message = parse_json(self.message)
if response is not None:
return response
try:
data = self.socket.recv(1024)
except socket.timeout:
raise timeout
except ssl.SSLError:
raise timeout
except socket.error, err:
if err.errno == 60:
raise timeout
elif err.errno in [11, 35, 10035]:
print_error("socket errno %d (resource temporarily unavailable)"% err.errno)
time.sleep(0.2)
raise timeout
else:
print_error("pipe: socket error", err)
data = ''
except:
traceback.print_exc(file=sys.stderr)
data = ''
if not data: # Connection closed remotely
return None
self.message += data
self.recv_time = time.time()
def send(self, request):
out = json.dumps(request) + '\n'
self._send(out)
def send_all(self, requests):
out = ''.join(map(lambda x: json.dumps(x) + '\n', requests))
self._send(out)
def _send(self, out):
while out:
try:
sent = self.socket.send(out)
out = out[sent:]
except ssl.SSLError as e:
print_error("SSLError:", e)
time.sleep(0.1)
continue
except socket.error as e:
if e[0] in (errno.EWOULDBLOCK,errno.EAGAIN):
print_error("EAGAIN: retrying")
time.sleep(0.1)
continue
elif e[0] in ['timed out', 'The write operation timed out']:
print_error("socket timeout, retry")
time.sleep(0.1)
continue
else:
traceback.print_exc(file=sys.stdout)
raise e
import Queue
class QueuePipe:
def __init__(self, send_queue=None, get_queue=None):
self.send_queue = send_queue if send_queue else Queue.Queue()
self.get_queue = get_queue if get_queue else Queue.Queue()
self.set_timeout(0.1)
def get(self):
try:
return self.get_queue.get(timeout=self.timeout)
except Queue.Empty:
raise timeout
def get_all(self):
responses = []
while True:
try:
r = self.get_queue.get_nowait()
responses.append(r)
except Queue.Empty:
break
return responses
def set_timeout(self, t):
self.timeout = t
def send(self, request):
self.send_queue.put(request)
def send_all(self, requests):
for request in requests:
self.send(request)
class StoreDict(dict):
def __init__(self, config, name):
self.config = config
self.path = os.path.join(self.config.path, name)
self.load()
def load(self):
try:
with open(self.path, 'r') as f:
self.update(json.loads(f.read()))
except:
pass
def save(self):
with open(self.path, 'w') as f:
s = json.dumps(self, indent=4, sort_keys=True)
r = f.write(s)
def __setitem__(self, key, value):
dict.__setitem__(self, key, value)
self.save()
def pop(self, key):
if key in self.keys():
dict.pop(self, key)
self.save()
def check_www_dir(rdir):
import urllib, urlparse, shutil, os
if not os.path.exists(rdir):
os.mkdir(rdir)
index = os.path.join(rdir, 'index.html')
if not os.path.exists(index):
print_error("copying index.html")
src = os.path.join(os.path.dirname(__file__), 'www', 'index.html')
shutil.copy(src, index)
files = [
"https://code.jquery.com/jquery-1.9.1.min.js",
"https://raw.githubusercontent.com/davidshimjs/qrcodejs/master/qrcode.js",
"https://code.jquery.com/ui/1.10.3/jquery-ui.js",
"https://code.jquery.com/ui/1.10.3/themes/smoothness/jquery-ui.css"
]
for URL in files:
path = urlparse.urlsplit(URL).path
filename = os.path.basename(path)
path = os.path.join(rdir, filename)
if not os.path.exists(path):
print_error("downloading ", URL)
urllib.urlretrieve(URL, path)
|
|
import collections
import mock
from babel import dates, Locale
from schema import Schema, And, Use, Or
from django.utils import timezone
from nose.tools import * # noqa PEP8 asserts
from framework.auth import Auth
from osf.models import Comment, NotificationDigest, NotificationSubscription, Guid, OSFUser
from website.notifications.tasks import get_users_emails, send_users_email, group_by_node, remove_notifications
from website.notifications import constants
from website.notifications import emails
from website.notifications import utils
from website import mails, settings
from website.project.signals import contributor_removed, node_deleted
from website.reviews import listeners
from website.util import api_url_for
from website.util import web_url_for
from website import settings
from osf_tests import factories
from tests.base import capture_signals
from tests.base import OsfTestCase, NotificationTestCase
class TestNotificationsModels(OsfTestCase):
def setUp(self):
super(TestNotificationsModels, self).setUp()
# Create project with component
self.user = factories.UserFactory()
self.consolidate_auth = Auth(user=self.user)
self.parent = factories.ProjectFactory(creator=self.user)
self.node = factories.NodeFactory(creator=self.user, parent=self.parent)
def test_has_permission_on_children(self):
non_admin_user = factories.UserFactory()
parent = factories.ProjectFactory()
parent.add_contributor(contributor=non_admin_user, permissions=['read'])
parent.save()
node = factories.NodeFactory(parent=parent, category='project')
sub_component = factories.NodeFactory(parent=node)
sub_component.add_contributor(contributor=non_admin_user)
sub_component.save()
sub_component2 = factories.NodeFactory(parent=node)
assert_true(
node.has_permission_on_children(non_admin_user, 'read')
)
def test_check_user_has_permission_excludes_deleted_components(self):
non_admin_user = factories.UserFactory()
parent = factories.ProjectFactory()
parent.add_contributor(contributor=non_admin_user, permissions=['read'])
parent.save()
node = factories.NodeFactory(parent=parent, category='project')
sub_component = factories.NodeFactory(parent=node)
sub_component.add_contributor(contributor=non_admin_user)
sub_component.is_deleted = True
sub_component.save()
sub_component2 = factories.NodeFactory(parent=node)
assert_false(
node.has_permission_on_children(non_admin_user,'read')
)
def test_check_user_does_not_have_permission_on_private_node_child(self):
non_admin_user = factories.UserFactory()
parent = factories.ProjectFactory()
parent.add_contributor(contributor=non_admin_user, permissions=['read'])
parent.save()
node = factories.NodeFactory(parent=parent, category='project')
sub_component = factories.NodeFactory(parent=node)
assert_false(
node.has_permission_on_children(non_admin_user,'read')
)
def test_check_user_child_node_permissions_false_if_no_children(self):
non_admin_user = factories.UserFactory()
parent = factories.ProjectFactory()
parent.add_contributor(contributor=non_admin_user, permissions=['read'])
parent.save()
node = factories.NodeFactory(parent=parent, category='project')
assert_false(
node.has_permission_on_children(non_admin_user,'read')
)
def test_check_admin_has_permissions_on_private_component(self):
parent = factories.ProjectFactory()
node = factories.NodeFactory(parent=parent, category='project')
sub_component = factories.NodeFactory(parent=node)
assert_true(
node.has_permission_on_children(parent.creator,'read')
)
def test_check_user_private_node_child_permissions_excludes_pointers(self):
user = factories.UserFactory()
parent = factories.ProjectFactory()
pointed = factories.ProjectFactory(creator=user)
parent.add_pointer(pointed, Auth(parent.creator))
parent.save()
assert_false(
parent.has_permission_on_children(user,'read')
)
def test_new_project_creator_is_subscribed(self):
user = factories.UserFactory()
factories.ProjectFactory(creator=user)
user_subscriptions = list(utils.get_all_user_subscriptions(user))
event_types = [sub.event_name for sub in user_subscriptions]
assert_equal(len(user_subscriptions), 2) # subscribed to both file_updated and comments
assert_in('file_updated', event_types)
assert_in('comments', event_types)
def test_new_node_creator_is_not_subscribed(self):
user = factories.UserFactory()
factories.NodeFactory(creator=user)
user_subscriptions = list(utils.get_all_user_subscriptions(user))
assert_equal(len(user_subscriptions), 0)
def test_new_project_creator_is_subscribed_with_global_settings(self):
user = factories.UserFactory()
factories.NotificationSubscriptionFactory(
_id=user._id + '_' + 'global_comments',
user=user,
event_name='global_comments'
).add_user_to_subscription(user, 'email_digest')
factories.NotificationSubscriptionFactory(
_id=user._id + '_' + 'global_file_updated',
user=user,
event_name='global_file_updated'
).add_user_to_subscription(user, 'none')
factories.NotificationSubscriptionFactory(
_id=user._id + '_' + 'global_mentions',
user=user,
event_name='global_mentions'
).add_user_to_subscription(user, 'email_digest')
node = factories.ProjectFactory(creator=user)
user_subscriptions = list(utils.get_all_user_subscriptions(user))
event_types = [sub.event_name for sub in user_subscriptions]
file_updated_subscription = NotificationSubscription.objects.get(_id=node._id + '_file_updated')
comments_subscription = NotificationSubscription.objects.get(_id=node._id + '_comments')
assert_equal(len(user_subscriptions), 5) # subscribed to both node and user settings
assert_in('file_updated', event_types)
assert_in('comments', event_types)
assert_in('global_file_updated', event_types)
assert_in('global_comments', event_types)
assert_in('global_mentions', event_types)
assert_equal(file_updated_subscription.none.count(), 1)
assert_equal(file_updated_subscription.email_transactional.count(), 0)
assert_equal(comments_subscription.email_digest.count(), 1)
assert_equal(comments_subscription.email_transactional.count(), 0)
def test_new_node_creator_is_not_subscribed_with_global_settings(self):
user = factories.UserFactory()
factories.NotificationSubscriptionFactory(
_id=user._id + '_' + 'global_comments',
user=user,
event_name='global_comments'
).add_user_to_subscription(user, 'email_digest')
factories.NotificationSubscriptionFactory(
_id=user._id + '_' + 'global_file_updated',
user=user,
event_name='global_file_updated'
).add_user_to_subscription(user, 'none')
factories.NotificationSubscriptionFactory(
_id=user._id + '_' + 'global_comment_replies',
user=user,
event_name='global_comment_replies'
).add_user_to_subscription(user, 'email_transactional')
factories.NotificationSubscriptionFactory(
_id=user._id + '_' + 'global_mentions',
user=user,
event_name='global_mentions'
).add_user_to_subscription(user, 'email_transactional')
node = factories.NodeFactory(creator=user)
user_subscriptions = list(utils.get_all_user_subscriptions(user))
event_types = [sub.event_name for sub in user_subscriptions]
assert_equal(len(user_subscriptions), 4) # subscribed to only user settings
assert_in('global_file_updated', event_types)
assert_in('global_comments', event_types)
assert_in('global_comment_replies', event_types)
assert_in('global_mentions', event_types)
def test_subscribe_user_to_global_notfiications(self):
user = factories.UserFactory()
utils.subscribe_user_to_global_notifications(user)
subscription_event_names = list(user.notification_subscriptions.values_list('event_name', flat=True))
for event_name in constants.USER_SUBSCRIPTIONS_AVAILABLE:
assert_in(event_name, subscription_event_names)
def test_new_project_creator_is_subscribed_with_default_global_settings(self):
user = factories.UserFactory()
factories.NotificationSubscriptionFactory(
_id=user._id + '_' + 'global_comments',
user=user,
event_name='global_comments'
).add_user_to_subscription(user, 'email_transactional')
factories.NotificationSubscriptionFactory(
_id=user._id + '_' + 'global_file_updated',
user=user,
event_name='global_file_updated'
).add_user_to_subscription(user, 'email_transactional')
factories.NotificationSubscriptionFactory(
_id=user._id + '_' + 'global_comment_replies',
user=user,
event_name='global_comment_replies'
).add_user_to_subscription(user, 'email_transactional')
factories.NotificationSubscriptionFactory(
_id=user._id + '_' + 'global_mentions',
user=user,
event_name='global_mentions'
).add_user_to_subscription(user, 'email_transactional')
node = factories.ProjectFactory(creator=user)
user_subscriptions = list(utils.get_all_user_subscriptions(user))
event_types = [sub.event_name for sub in user_subscriptions]
file_updated_subscription = NotificationSubscription.objects.get(_id=node._id + '_file_updated')
comments_subscription = NotificationSubscription.objects.get(_id=node._id + '_comments')
assert_equal(len(user_subscriptions), 6) # subscribed to both node and user settings
assert_in('file_updated', event_types)
assert_in('comments', event_types)
assert_in('global_file_updated', event_types)
assert_in('global_comments', event_types)
assert_in('global_comment_replies', event_types)
assert_in('global_mentions', event_types)
assert_equal(file_updated_subscription.email_transactional.count(), 1)
assert_equal(comments_subscription.email_transactional.count(), 1)
def test_new_fork_creator_is_subscribed_with_default_global_settings(self):
user = factories.UserFactory()
project = factories.ProjectFactory(creator=user)
factories.NotificationSubscriptionFactory(
_id=user._id + '_' + 'global_comments',
user=user,
event_name='global_comments'
).add_user_to_subscription(user, 'email_transactional')
factories.NotificationSubscriptionFactory(
_id=user._id + '_' + 'global_file_updated',
user=user,
event_name='global_file_updated'
).add_user_to_subscription(user, 'email_transactional')
factories.NotificationSubscriptionFactory(
_id=user._id + '_' + 'global_mentions',
user=user,
event_name='global_mentions'
).add_user_to_subscription(user, 'email_transactional')
node = factories.ForkFactory(project=project)
user_subscriptions = list(utils.get_all_user_subscriptions(user))
event_types = [sub.event_name for sub in user_subscriptions]
node_file_updated_subscription = NotificationSubscription.objects.get(_id=node._id + '_file_updated')
node_comments_subscription = NotificationSubscription.objects.get(_id=node._id + '_comments')
project_file_updated_subscription = NotificationSubscription.objects.get(_id=project._id + '_file_updated')
project_comments_subscription = NotificationSubscription.objects.get(_id=project._id + '_comments')
assert_equal(len(user_subscriptions), 7) # subscribed to project, fork, and user settings
assert_in('file_updated', event_types)
assert_in('comments', event_types)
assert_in('global_file_updated', event_types)
assert_in('global_comments', event_types)
assert_in('global_mentions', event_types)
assert_equal(node_file_updated_subscription.email_transactional.count(), 1)
assert_equal(node_comments_subscription.email_transactional.count(), 1)
assert_equal(project_file_updated_subscription.email_transactional.count(), 1)
assert_equal(project_comments_subscription.email_transactional.count(), 1)
def test_new_node_creator_is_not_subscribed_with_default_global_settings(self):
user = factories.UserFactory()
factories.NotificationSubscriptionFactory(
_id=user._id + '_' + 'global_comments',
user=user,
event_name='global_comments'
).add_user_to_subscription(user, 'email_transactional')
factories.NotificationSubscriptionFactory(
_id=user._id + '_' + 'global_file_updated',
user=user,
event_name='global_file_updated'
).add_user_to_subscription(user, 'email_transactional')
factories.NotificationSubscriptionFactory(
_id=user._id + '_' + 'global_comment_replies',
user=user,
event_name='global_comment_replies'
).add_user_to_subscription(user, 'email_transactional')
factories.NotificationSubscriptionFactory(
_id=user._id + '_' + 'global_mentions',
user=user,
event_name='global_mentions'
).add_user_to_subscription(user, 'email_transactional')
node = factories.NodeFactory(creator=user)
user_subscriptions = list(utils.get_all_user_subscriptions(user))
event_types = [sub.event_name for sub in user_subscriptions]
assert_equal(len(user_subscriptions), 4) # subscribed to only user settings
assert_in('global_file_updated', event_types)
assert_in('global_comments', event_types)
assert_in('global_comment_replies', event_types)
assert_in('global_mentions', event_types)
def test_contributor_subscribed_when_added_to_project(self):
user = factories.UserFactory()
contributor = factories.UserFactory()
project = factories.ProjectFactory(creator=user)
project.add_contributor(contributor=contributor)
contributor_subscriptions = list(utils.get_all_user_subscriptions(contributor))
event_types = [sub.event_name for sub in contributor_subscriptions]
assert_equal(len(contributor_subscriptions), 2)
assert_in('file_updated', event_types)
assert_in('comments', event_types)
def test_contributor_subscribed_when_added_to_component(self):
user = factories.UserFactory()
contributor = factories.UserFactory()
factories.NotificationSubscriptionFactory(
_id=contributor._id + '_' + 'global_comments',
user=contributor,
event_name='global_comments'
).add_user_to_subscription(contributor, 'email_transactional')
factories.NotificationSubscriptionFactory(
_id=contributor._id + '_' + 'global_file_updated',
user=contributor,
event_name='global_file_updated'
).add_user_to_subscription(contributor, 'email_transactional')
node = factories.NodeFactory(creator=user)
node.add_contributor(contributor=contributor)
contributor_subscriptions = list(utils.get_all_user_subscriptions(contributor))
event_types = [sub.event_name for sub in contributor_subscriptions]
file_updated_subscription = NotificationSubscription.objects.get(_id=node._id + '_file_updated')
comments_subscription = NotificationSubscription.objects.get(_id=node._id + '_comments')
assert_equal(len(contributor_subscriptions), 4) # subscribed to both node and user settings
assert_in('file_updated', event_types)
assert_in('comments', event_types)
assert_in('global_file_updated', event_types)
assert_in('global_comments', event_types)
assert_equal(file_updated_subscription.email_transactional.count(), 1)
assert_equal(comments_subscription.email_transactional.count(), 1)
def test_unregistered_contributor_not_subscribed_when_added_to_project(self):
user = factories.UserFactory()
unregistered_contributor = factories.UnregUserFactory()
project = factories.ProjectFactory(creator=user)
project.add_contributor(contributor=unregistered_contributor)
contributor_subscriptions = list(utils.get_all_user_subscriptions(unregistered_contributor))
assert_equal(len(contributor_subscriptions), 0)
class TestSubscriptionView(OsfTestCase):
def setUp(self):
super(TestSubscriptionView, self).setUp()
self.node = factories.NodeFactory()
self.user = self.node.creator
def test_create_new_subscription(self):
payload = {
'id': self.node._id,
'event': 'comments',
'notification_type': 'email_transactional'
}
url = api_url_for('configure_subscription')
self.app.post_json(url, payload, auth=self.node.creator.auth)
# check that subscription was created
event_id = self.node._id + '_' + 'comments'
s = NotificationSubscription.objects.get(_id=event_id)
# check that user was added to notification_type field
assert_equal(payload['id'], s.owner._id)
assert_equal(payload['event'], s.event_name)
assert_in(self.node.creator, getattr(s, payload['notification_type']).all())
# change subscription
new_payload = {
'id': self.node._id,
'event': 'comments',
'notification_type': 'email_digest'
}
url = api_url_for('configure_subscription')
self.app.post_json(url, new_payload, auth=self.node.creator.auth)
s.reload()
assert_false(self.node.creator in getattr(s, payload['notification_type']).all())
assert_in(self.node.creator, getattr(s, new_payload['notification_type']).all())
def test_adopt_parent_subscription_default(self):
payload = {
'id': self.node._id,
'event': 'comments',
'notification_type': 'adopt_parent'
}
url = api_url_for('configure_subscription')
self.app.post_json(url, payload, auth=self.node.creator.auth)
event_id = self.node._id + '_' + 'comments'
# confirm subscription was created because parent had default subscription
s = NotificationSubscription.objects.filter(_id=event_id).count()
assert_equal(0, s)
def test_change_subscription_to_adopt_parent_subscription_removes_user(self):
payload = {
'id': self.node._id,
'event': 'comments',
'notification_type': 'email_transactional'
}
url = api_url_for('configure_subscription')
self.app.post_json(url, payload, auth=self.node.creator.auth)
# check that subscription was created
event_id = self.node._id + '_' + 'comments'
s = NotificationSubscription.objects.get(_id=event_id)
# change subscription to adopt_parent
new_payload = {
'id': self.node._id,
'event': 'comments',
'notification_type': 'adopt_parent'
}
url = api_url_for('configure_subscription')
self.app.post_json(url, new_payload, auth=self.node.creator.auth)
s.reload()
# assert that user is removed from the subscription entirely
for n in constants.NOTIFICATION_TYPES:
assert_false(self.node.creator in getattr(s, n).all())
def test_configure_subscription_adds_node_id_to_notifications_configured(self):
project = factories.ProjectFactory(creator=self.user)
assert_false(project._id in self.user.notifications_configured)
payload = {
'id': project._id,
'event': 'comments',
'notification_type': 'email_digest'
}
url = api_url_for('configure_subscription')
self.app.post_json(url, payload, auth=project.creator.auth)
self.user.reload()
assert_true(project._id in self.user.notifications_configured)
class TestRemoveContributor(OsfTestCase):
def setUp(self):
super(OsfTestCase, self).setUp()
self.project = factories.ProjectFactory()
self.contributor = factories.UserFactory()
self.project.add_contributor(contributor=self.contributor, permissions=['read'])
self.project.save()
self.subscription = NotificationSubscription.objects.get(
node=self.project,
_id=self.project._id + '_comments'
)
self.node = factories.NodeFactory(parent=self.project)
self.node.add_contributor(contributor=self.project.creator, permissions=['read', 'write', 'admin'])
self.node.save()
self.node_subscription = NotificationSubscription.objects.get(
_id=self.node._id + '_comments',
node=self.node
)
self.node_subscription.add_user_to_subscription(self.node.creator, 'email_transactional')
def test_removed_non_admin_contributor_is_removed_from_subscriptions(self):
assert_in(self.contributor, self.subscription.email_transactional.all())
self.project.remove_contributor(self.contributor, auth=Auth(self.project.creator))
assert_not_in(self.contributor, self.project.contributors.all())
self.subscription.reload()
assert_not_in(self.contributor, self.subscription.email_transactional.all())
def test_removed_non_parent_admin_contributor_is_removed_from_subscriptions(self):
assert_in(self.node.creator, self.node_subscription.email_transactional.all())
self.node.remove_contributor(self.node.creator, auth=Auth(self.node.creator))
assert_not_in(self.node.creator, self.node.contributors.all())
self.node_subscription.reload()
assert_not_in(self.node.creator, self.node_subscription.email_transactional.all())
def test_removed_contributor_admin_on_parent_not_removed_from_node_subscription(self):
# Admin on parent project is removed as a contributor on a component. Check
# that admin is not removed from component subscriptions, as the admin
# now has read-only access.
assert_in(self.project.creator, self.node_subscription.email_transactional.all())
self.node.remove_contributor(self.project.creator, auth=Auth(self.project.creator))
assert_not_in(self.project.creator, self.node.contributors.all())
assert_in(self.project.creator, self.node_subscription.email_transactional.all())
def test_remove_contributor_signal_called_when_contributor_is_removed(self):
with capture_signals() as mock_signals:
self.project.remove_contributor(self.contributor, auth=Auth(self.project.creator))
assert_equal(mock_signals.signals_sent(), set([contributor_removed]))
class TestRemoveNodeSignal(OsfTestCase):
def test_node_subscriptions_and_backrefs_removed_when_node_is_deleted(self):
project = factories.ProjectFactory()
s = NotificationSubscription.objects.filter(email_transactional=project.creator)
assert_equal(s.count(), 2)
with capture_signals() as mock_signals:
project.remove_node(auth=Auth(project.creator))
assert_true(project.is_deleted)
assert_equal(mock_signals.signals_sent(), set([node_deleted]))
s = NotificationSubscription.objects.filter(email_transactional=project.creator)
assert_equal(s.count(), 0)
with assert_raises(NotificationSubscription.DoesNotExist):
NotificationSubscription.objects.get(node=project)
def list_or_dict(data):
# Generator only returns lists or dicts from list or dict
if isinstance(data, dict):
for key in data:
if isinstance(data[key], dict) or isinstance(data[key], list):
yield data[key]
elif isinstance(data, list):
for item in data:
if isinstance(item, dict) or isinstance(item, list):
yield item
def has(data, sub_data):
# Recursive approach to look for a subset of data in data.
# WARNING: Don't use on huge structures
# :param data: Data structure
# :param sub_data: subset being checked for
# :return: True or False
try:
(item for item in data if item == sub_data).next()
return True
except StopIteration:
lists_and_dicts = list_or_dict(data)
for item in lists_and_dicts:
if has(item, sub_data):
return True
return False
def subscription_schema(project, structure, level=0):
# builds a schema from a list of nodes and events
# :param project: validation type
# :param structure: list of nodes (another list) and events
# :return: schema
sub_list = []
for item in list_or_dict(structure):
sub_list.append(subscription_schema(project, item, level=level+1))
sub_list.append(event_schema(level))
node_schema = {
'node': {
'id': Use(type(project._id), error="node_id{}".format(level)),
'title': Use(type(project.title), error="node_title{}".format(level)),
'url': Use(type(project.url), error="node_{}".format(level))
},
'kind': And(str, Use(lambda s: s in ('node', 'folder'),
error="kind didn't match node or folder {}".format(level))),
'nodeType': Use(lambda s: s in ('project', 'component'), error='nodeType not project or component'),
'category': Use(lambda s: s in settings.NODE_CATEGORY_MAP, error='category not in settings.NODE_CATEGORY_MAP'),
'permissions': {
'view': Use(lambda s: s in (True, False), error='view permissions is not True/False')
},
'children': sub_list
}
if level == 0:
return Schema([node_schema])
return node_schema
def event_schema(level=None):
return {
'event': {
'title': And(Use(str, error="event_title{} not a string".format(level)),
Use(lambda s: s in constants.NOTIFICATION_TYPES,
error="event_title{} not in list".format(level))),
'description': And(Use(str, error="event_desc{} not a string".format(level)),
Use(lambda s: s in constants.NODE_SUBSCRIPTIONS_AVAILABLE,
error="event_desc{} not in list".format(level))),
'notificationType': And(str, Or('adopt_parent', lambda s: s in constants.NOTIFICATION_TYPES)),
'parent_notification_type': Or(None, 'adopt_parent', lambda s: s in constants.NOTIFICATION_TYPES)
},
'kind': 'event',
'children': And(list, lambda l: len(l) == 0)
}
class TestNotificationUtils(OsfTestCase):
def setUp(self):
super(TestNotificationUtils, self).setUp()
self.user = factories.UserFactory()
self.project = factories.ProjectFactory(creator=self.user)
self.project_subscription = NotificationSubscription.objects.get(
node=self.project,
_id=self.project._id + '_comments',
event_name='comments'
)
self.user.notifications_configured[self.project._id] = True
self.user.save()
self.node = factories.NodeFactory(parent=self.project, creator=self.user)
self.node_comments_subscription = factories.NotificationSubscriptionFactory(
_id=self.node._id + '_' + 'comments',
node=self.node,
event_name='comments'
)
self.node_comments_subscription.save()
self.node_comments_subscription.email_transactional.add(self.user)
self.node_comments_subscription.save()
self.node_subscription = list(NotificationSubscription.objects.filter(node=self.node))
self.user_subscription = [factories.NotificationSubscriptionFactory(
_id=self.user._id + '_' + 'comment_replies',
user=self.user,
event_name='comment_replies'
),
factories.NotificationSubscriptionFactory(
_id=self.user._id + '_' + 'global_comment',
user=self.user,
event_name='global_comment'
),
factories.NotificationSubscriptionFactory(
_id=self.user._id + '_' + 'global_file_updated',
user=self.user,
event_name='global_file_updated'
)]
for x in self.user_subscription:
x.save()
for x in self.user_subscription:
x.email_transactional.add(self.user)
for x in self.user_subscription:
x.save()
def test_to_subscription_key(self):
key = utils.to_subscription_key('xyz', 'comments')
assert_equal(key, 'xyz_comments')
def test_from_subscription_key(self):
parsed_key = utils.from_subscription_key('xyz_comment_replies')
assert_equal(parsed_key, {
'uid': 'xyz',
'event': 'comment_replies'
})
def test_get_all_user_subscriptions(self):
user_subscriptions = list(utils.get_all_user_subscriptions(self.user))
assert_in(self.project_subscription, user_subscriptions)
assert_in(self.node_comments_subscription, user_subscriptions)
for x in self.user_subscription:
assert_in(x, user_subscriptions)
assert_equal(len(user_subscriptions), 6)
def test_get_all_node_subscriptions_given_user_subscriptions(self):
user_subscriptions = utils.get_all_user_subscriptions(self.user)
node_subscription_ids = [x._id for x in utils.get_all_node_subscriptions(self.user, self.node,
user_subscriptions=user_subscriptions)]
expected_node_subscription_ids = [x._id for x in self.node_subscription]
assert_items_equal(node_subscription_ids, expected_node_subscription_ids)
def test_get_all_node_subscriptions_given_user_and_node(self):
node_subscription_ids = [x._id for x in utils.get_all_node_subscriptions(self.user, self.node)]
expected_node_subscription_ids = [x._id for x in self.node_subscription]
assert_items_equal(node_subscription_ids, expected_node_subscription_ids)
def test_get_configured_project_ids_does_not_return_user_or_node_ids(self):
configured_nodes = utils.get_configured_projects(self.user)
configured_ids = [n._id for n in configured_nodes]
# No duplicates!
assert_equal(len(configured_nodes), 1)
assert_in(self.project._id, configured_ids)
assert_not_in(self.node._id, configured_ids)
assert_not_in(self.user._id, configured_ids)
def test_get_configured_project_ids_excludes_deleted_projects(self):
project = factories.ProjectFactory()
project.is_deleted = True
project.save()
assert_not_in(project, utils.get_configured_projects(self.user))
def test_get_configured_project_ids_excludes_node_with_project_category(self):
node = factories.NodeFactory(parent=self.project, category='project')
assert_not_in(node, utils.get_configured_projects(self.user))
def test_get_configured_project_ids_includes_top_level_private_projects_if_subscriptions_on_node(self):
private_project = factories.ProjectFactory()
node = factories.NodeFactory(parent=private_project)
node_comments_subscription = factories.NotificationSubscriptionFactory(
_id=node._id + '_' + 'comments',
node=node,
event_name='comments'
)
node_comments_subscription.save()
node_comments_subscription.email_transactional.add(node.creator)
node_comments_subscription.save()
node.creator.notifications_configured[node._id] = True
node.creator.save()
configured_project_nodes = utils.get_configured_projects(node.creator)
assert_in(private_project, configured_project_nodes)
def test_get_configured_project_ids_excludes_private_projects_if_no_subscriptions_on_node(self):
user = factories.UserFactory()
private_project = factories.ProjectFactory()
node = factories.NodeFactory(parent=private_project)
node.add_contributor(user)
utils.remove_contributor_from_subscriptions(node, user)
configured_project_nodes = utils.get_configured_projects(user)
assert_not_in(private_project, configured_project_nodes)
def test_get_parent_notification_type(self):
nt = utils.get_parent_notification_type(self.node, 'comments', self.user)
assert_equal(nt, 'email_transactional')
def test_get_parent_notification_type_no_parent_subscriptions(self):
node = factories.NodeFactory()
nt = utils.get_parent_notification_type(node._id, 'comments', self.user)
assert_equal(nt, None)
def test_get_parent_notification_type_no_parent(self):
project = factories.ProjectFactory()
nt = utils.get_parent_notification_type(project._id, 'comments', self.user)
assert_equal(nt, None)
def test_get_parent_notification_type_handles_user_id(self):
nt = utils.get_parent_notification_type(self.user._id, 'comments', self.user)
assert_equal(nt, None)
def test_format_data_project_settings(self):
data = utils.format_data(self.user, [self.project])
parent_event = {
'event': {
'title': 'comments',
'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'],
'notificationType': 'email_transactional',
'parent_notification_type': None
},
'kind': 'event',
'children': []
}
child_event = {
'event': {
'title': 'comments',
'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'],
'notificationType': 'email_transactional',
'parent_notification_type': 'email_transactional'
},
'kind': 'event',
'children': []
}
expected_new = [['event'], 'event']
schema = subscription_schema(self.project, expected_new)
assert schema.validate(data)
assert has(data, parent_event)
assert has(data, child_event)
def test_format_data_node_settings(self):
data = utils.format_data(self.user, [self.node])
event = {
'event': {
'title': 'comments',
'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'],
'notificationType': 'email_transactional',
'parent_notification_type': 'email_transactional'
},
'kind': 'event',
'children': []
}
schema = subscription_schema(self.project, ['event'])
assert schema.validate(data)
assert has(data, event)
def test_format_includes_admin_view_only_component_subscriptions(self):
# Test private components in which parent project admins are not contributors still appear in their
# notifications settings.
node = factories.NodeFactory(parent=self.project)
data = utils.format_data(self.user, [self.project])
event = {
'event': {
'title': 'comments',
'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'],
'notificationType': 'adopt_parent',
'parent_notification_type': 'email_transactional'
},
'kind': 'event',
'children': [],
}
schema = subscription_schema(self.project, ['event', ['event'], ['event']])
assert schema.validate(data)
assert has(data, event)
def test_format_data_excludes_pointers(self):
project = factories.ProjectFactory()
pointed = factories.ProjectFactory()
project.add_pointer(pointed, Auth(project.creator))
project.creator.notifications_configured[project._id] = True
project.creator.save()
configured_project_nodes = utils.get_configured_projects(project.creator)
data = utils.format_data(project.creator, configured_project_nodes)
event = {
'event': {
'title': 'comments',
'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'],
'notificationType': 'email_transactional',
'parent_notification_type': None
},
'kind': 'event',
'children': [],
}
schema = subscription_schema(self.project, ['event'])
assert schema.validate(data)
assert has(data, event)
def test_format_data_user_subscriptions_includes_private_parent_if_configured_children(self):
private_project = factories.ProjectFactory()
node = factories.NodeFactory(parent=private_project)
node_comments_subscription = factories.NotificationSubscriptionFactory(
_id=node._id + '_' + 'comments',
node=node,
event_name='comments'
)
node_comments_subscription.save()
node_comments_subscription.email_transactional.add(node.creator)
node_comments_subscription.save()
node.creator.notifications_configured[node._id] = True
node.creator.save()
configured_project_nodes = utils.get_configured_projects(node.creator)
data = utils.format_data(node.creator, configured_project_nodes)
event = {
'event': {
'title': 'comments',
'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'],
'notificationType': 'email_transactional',
'parent_notification_type': None
},
'kind': 'event',
'children': [],
}
schema = subscription_schema(self.project, ['event', ['event']])
assert schema.validate(data)
assert has(data, event)
def test_format_data_user_subscriptions_if_children_points_to_parent(self):
private_project = factories.ProjectFactory(creator=self.user)
node = factories.NodeFactory(parent=private_project, creator=self.user)
node.add_pointer(private_project, Auth(self.user))
node.save()
node_comments_subscription = factories.NotificationSubscriptionFactory(
_id=node._id + '_' + 'comments',
node=node,
event_name='comments'
)
node_comments_subscription.save()
node_comments_subscription.email_transactional.add(node.creator)
node_comments_subscription.save()
node.creator.notifications_configured[node._id] = True
node.creator.save()
configured_project_nodes = utils.get_configured_projects(node.creator)
data = utils.format_data(node.creator, configured_project_nodes)
event = {
'event': {
'title': 'comments',
'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'],
'notificationType': 'email_transactional',
'parent_notification_type': None
},
'kind': 'event',
'children': [],
}
schema = subscription_schema(self.project, ['event', ['event']])
assert schema.validate(data)
assert has(data, event)
def test_format_user_subscriptions(self):
data = utils.format_user_subscriptions(self.user)
expected = [
{
'event': {
'title': 'global_file_updated',
'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['global_file_updated'],
'notificationType': 'email_transactional',
'parent_notification_type': None,
},
'kind': 'event',
'children': []
}, {
'event': {
'title': 'global_comment_replies',
'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['global_comment_replies'],
'notificationType': 'email_transactional',
'parent_notification_type': None
},
'kind': 'event',
'children': []
}, {
'event': {
'title': 'global_mentions',
'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['global_mentions'],
'notificationType': 'email_transactional',
'parent_notification_type': None
},
'kind': 'event',
'children': []
}, {
'event': {
'title': 'global_comments',
'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['global_comments'],
'notificationType': 'email_transactional',
'parent_notification_type': None
},
'kind': 'event',
'children': []
}, {
'event': {
'title': 'global_reviews',
'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['global_reviews'],
'notificationType': 'email_transactional',
'parent_notification_type': None
},
'kind': 'event',
'children': []
}
]
assert_items_equal(data, expected)
def test_get_global_notification_type(self):
notification_type = utils.get_global_notification_type(self.user_subscription[1] ,self.user)
assert_equal('email_transactional', notification_type)
def test_check_if_all_global_subscriptions_are_none_false(self):
all_global_subscriptions_none = utils.check_if_all_global_subscriptions_are_none(self.user)
assert_false(all_global_subscriptions_none)
def test_check_if_all_global_subscriptions_are_none_true(self):
for x in self.user_subscription:
x.none.add(self.user)
x.email_transactional.remove(self.user)
for x in self.user_subscription:
x.save()
all_global_subscriptions_none = utils.check_if_all_global_subscriptions_are_none(self.user)
assert_true(all_global_subscriptions_none)
def test_format_data_user_settings(self):
data = utils.format_user_and_project_subscriptions(self.user)
expected = [
{
'node': {
'id': self.user._id,
'title': 'Default Notification Settings',
'help': 'These are default settings for new projects you create or are added to. Modifying these settings will not modify settings on existing projects.'
},
'kind': 'heading',
'children': utils.format_user_subscriptions(self.user)
},
{
'node': {
'help': 'These are settings for each of your projects. Modifying these settings will only modify the settings for the selected project.',
'id': '',
'title': 'Project Notifications'
},
'kind': 'heading',
'children': utils.format_data(self.user, utils.get_configured_projects(self.user))
}]
assert_equal(data, expected)
def test_serialize_user_level_event(self):
user_subscriptions = [x for x in utils.get_all_user_subscriptions(self.user)]
user_subscription = None
for subscription in user_subscriptions:
if 'global_comment_replies' in getattr(subscription, 'event_name'):
user_subscription = subscription
data = utils.serialize_event(self.user, event_description='global_comment_replies',
subscription=user_subscription)
expected = {
'event': {
'title': 'global_comment_replies',
'description': constants.USER_SUBSCRIPTIONS_AVAILABLE['global_comment_replies'],
'notificationType': 'email_transactional',
'parent_notification_type': None
},
'kind': 'event',
'children': []
}
assert_equal(data, expected)
def test_serialize_node_level_event(self):
node_subscriptions = [x for x in utils.get_all_node_subscriptions(self.user, self.node)]
data = utils.serialize_event(user=self.user, event_description='comments',
subscription=node_subscriptions[0], node=self.node)
expected = {
'event': {
'title': 'comments',
'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'],
'notificationType': 'email_transactional',
'parent_notification_type': 'email_transactional'
},
'kind': 'event',
'children': [],
}
assert_equal(data, expected)
def test_serialize_node_level_event_that_adopts_parent_settings(self):
user = factories.UserFactory()
self.project.add_contributor(contributor=user, permissions=['read'])
self.project.save()
self.node.add_contributor(contributor=user, permissions=['read'])
self.node.save()
# set up how it was in original test - remove existing subscriptions
utils.remove_contributor_from_subscriptions(self.node, user)
node_subscriptions = utils.get_all_node_subscriptions(user, self.node)
data = utils.serialize_event(user=user, event_description='comments',
subscription=node_subscriptions, node=self.node)
expected = {
'event': {
'title': 'comments',
'description': constants.NODE_SUBSCRIPTIONS_AVAILABLE['comments'],
'notificationType': 'adopt_parent',
'parent_notification_type': 'email_transactional'
},
'kind': 'event',
'children': [],
}
assert_equal(data, expected)
class TestNotificationsDict(OsfTestCase):
def test_notifications_dict_add_message_returns_proper_format(self):
d = utils.NotificationsDict()
message = {
'message': 'Freddie commented on your project',
'timestamp': timezone.now()
}
message2 = {
'message': 'Mercury commented on your component',
'timestamp': timezone.now()
}
d.add_message(['project'], message)
d.add_message(['project', 'node'], message2)
expected = {
'messages': [],
'children': collections.defaultdict(
utils.NotificationsDict, {
'project': {
'messages': [message],
'children': collections.defaultdict(utils.NotificationsDict, {
'node': {
'messages': [message2],
'children': collections.defaultdict(utils.NotificationsDict, {})
}
})
}
}
)}
assert_equal(d, expected)
class TestCompileSubscriptions(NotificationTestCase):
def setUp(self):
super(TestCompileSubscriptions, self).setUp()
self.user_1 = factories.UserFactory()
self.user_2 = factories.UserFactory()
self.user_3 = factories.UserFactory()
self.user_4 = factories.UserFactory()
# Base project + 1 project shared with 3 + 1 project shared with 2
self.base_project = factories.ProjectFactory(is_public=False, creator=self.user_1)
self.shared_node = factories.NodeFactory(parent=self.base_project, is_public=False, creator=self.user_1)
self.private_node = factories.NodeFactory(parent=self.base_project, is_public=False, creator=self.user_1)
# Adding contributors
for node in [self.base_project, self.shared_node, self.private_node]:
node.add_contributor(self.user_2, permissions='admin')
self.base_project.add_contributor(self.user_3, permissions='write')
self.shared_node.add_contributor(self.user_3, permissions='write')
# Setting basic subscriptions
self.base_sub = factories.NotificationSubscriptionFactory(
_id=self.base_project._id + '_file_updated',
node=self.base_project,
event_name='file_updated'
)
self.base_sub.save()
self.shared_sub = factories.NotificationSubscriptionFactory(
_id=self.shared_node._id + '_file_updated',
node=self.shared_node,
event_name='file_updated'
)
self.shared_sub.save()
self.private_sub = factories.NotificationSubscriptionFactory(
_id=self.private_node._id + '_file_updated',
node=self.private_node,
event_name='file_updated'
)
self.private_sub.save()
def test_no_subscription(self):
node = factories.NodeFactory()
result = emails.compile_subscriptions(node, 'file_updated')
assert_equal({'email_transactional': [], 'none': [], 'email_digest': []}, result)
def test_no_subscribers(self):
node = factories.NodeFactory()
node_sub = factories.NotificationSubscriptionFactory(
_id=node._id + '_file_updated',
node=node,
event_name='file_updated'
)
node_sub.save()
result = emails.compile_subscriptions(node, 'file_updated')
assert_equal({'email_transactional': [], 'none': [], 'email_digest': []}, result)
def test_creator_subbed_parent(self):
# Basic sub check
self.base_sub.email_transactional.add(self.user_1)
self.base_sub.save()
result = emails.compile_subscriptions(self.base_project, 'file_updated')
assert_equal({'email_transactional': [self.user_1._id], 'none': [], 'email_digest': []}, result)
def test_creator_subbed_to_parent_from_child(self):
# checks the parent sub is the one to appear without a child sub
self.base_sub.email_transactional.add(self.user_1)
self.base_sub.save()
result = emails.compile_subscriptions(self.shared_node, 'file_updated')
assert_equal({'email_transactional': [self.user_1._id], 'none': [], 'email_digest': []}, result)
def test_creator_subbed_to_both_from_child(self):
# checks that only one sub is in the list.
self.base_sub.email_transactional.add(self.user_1)
self.base_sub.save()
self.shared_sub.email_transactional.add(self.user_1)
self.shared_sub.save()
result = emails.compile_subscriptions(self.shared_node, 'file_updated')
assert_equal({'email_transactional': [self.user_1._id], 'none': [], 'email_digest': []}, result)
def test_creator_diff_subs_to_both_from_child(self):
# Check that the child node sub overrides the parent node sub
self.base_sub.email_transactional.add(self.user_1)
self.base_sub.save()
self.shared_sub.none.add(self.user_1)
self.shared_sub.save()
result = emails.compile_subscriptions(self.shared_node, 'file_updated')
assert_equal({'email_transactional': [], 'none': [self.user_1._id], 'email_digest': []}, result)
def test_user_wo_permission_on_child_node_not_listed(self):
# Tests to see if a user without permission gets an Email about a node they cannot see.
self.base_sub.email_transactional.add(self.user_3)
self.base_sub.save()
result = emails.compile_subscriptions(self.private_node, 'file_updated')
assert_equal({'email_transactional': [], 'none': [], 'email_digest': []}, result)
def test_several_nodes_deep(self):
self.base_sub.email_transactional.add(self.user_1)
self.base_sub.save()
node2 = factories.NodeFactory(parent=self.shared_node)
node3 = factories.NodeFactory(parent=node2)
node4 = factories.NodeFactory(parent=node3)
node5 = factories.NodeFactory(parent=node4)
subs = emails.compile_subscriptions(node5, 'file_updated')
assert_equal(subs, {'email_transactional': [self.user_1._id], 'email_digest': [], 'none': []})
def test_several_nodes_deep_precedence(self):
self.base_sub.email_transactional.add(self.user_1)
self.base_sub.save()
node2 = factories.NodeFactory(parent=self.shared_node)
node3 = factories.NodeFactory(parent=node2)
node4 = factories.NodeFactory(parent=node3)
node4_subscription = factories.NotificationSubscriptionFactory(
_id=node4._id + '_file_updated',
node=node4,
event_name='file_updated'
)
node4_subscription.save()
node4_subscription.email_digest.add(self.user_1)
node4_subscription.save()
node5 = factories.NodeFactory(parent=node4)
subs = emails.compile_subscriptions(node5, 'file_updated')
assert_equal(subs, {'email_transactional': [], 'email_digest': [self.user_1._id], 'none': []})
class TestMoveSubscription(NotificationTestCase):
def setUp(self):
super(TestMoveSubscription, self).setUp()
self.blank = {key: [] for key in constants.NOTIFICATION_TYPES} # For use where it is blank.
self.user_1 = factories.AuthUserFactory()
self.auth = Auth(user=self.user_1)
self.user_2 = factories.AuthUserFactory()
self.user_3 = factories.AuthUserFactory()
self.user_4 = factories.AuthUserFactory()
self.project = factories.ProjectFactory(creator=self.user_1)
self.private_node = factories.NodeFactory(parent=self.project, is_public=False, creator=self.user_1)
self.sub = factories.NotificationSubscriptionFactory(
_id=self.project._id + '_file_updated',
node=self.project,
event_name='file_updated'
)
self.sub.email_transactional.add(self.user_1)
self.sub.save()
self.file_sub = factories.NotificationSubscriptionFactory(
_id=self.project._id + '_xyz42_file_updated',
node=self.project,
event_name='xyz42_file_updated'
)
self.file_sub.save()
def test_separate_users(self):
self.private_node.add_contributor(self.user_2, permissions=['admin', 'write', 'read'], auth=self.auth)
self.private_node.add_contributor(self.user_3, permissions=['write', 'read'], auth=self.auth)
self.private_node.save()
subbed, removed = utils.separate_users(
self.private_node, [self.user_2._id, self.user_3._id, self.user_4._id]
)
assert_equal([self.user_2._id, self.user_3._id], subbed)
assert_equal([self.user_4._id], removed)
def test_event_subs_same(self):
self.file_sub.email_transactional.add(self.user_2, self.user_3, self.user_4)
self.file_sub.save()
self.private_node.add_contributor(self.user_2, permissions=['admin', 'write', 'read'], auth=self.auth)
self.private_node.add_contributor(self.user_3, permissions=['write', 'read'], auth=self.auth)
self.private_node.save()
results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node)
assert_equal({'email_transactional': [self.user_4._id], 'email_digest': [], 'none': []}, results)
def test_event_nodes_same(self):
self.file_sub.email_transactional.add(self.user_2, self.user_3, self.user_4)
self.file_sub.save()
self.private_node.add_contributor(self.user_2, permissions=['admin', 'write', 'read'], auth=self.auth)
self.private_node.add_contributor(self.user_3, permissions=['write', 'read'], auth=self.auth)
self.private_node.save()
results = utils.users_to_remove('xyz42_file_updated', self.project, self.project)
assert_equal({'email_transactional': [], 'email_digest': [], 'none': []}, results)
def test_move_sub(self):
# Tests old sub is replaced with new sub.
utils.move_subscription(self.blank, 'xyz42_file_updated', self.project, 'abc42_file_updated', self.private_node)
self.file_sub.reload()
assert_equal('abc42_file_updated', self.file_sub.event_name)
assert_equal(self.private_node, self.file_sub.owner)
assert_equal(self.private_node._id + '_abc42_file_updated', self.file_sub._id)
def test_move_sub_with_none(self):
# Attempt to reproduce an error that is seen when moving files
self.project.add_contributor(self.user_2, permissions=['write', 'read'], auth=self.auth)
self.project.save()
self.file_sub.none.add(self.user_2)
self.file_sub.save()
results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node)
assert_equal({'email_transactional': [], 'email_digest': [], 'none': [self.user_2._id]}, results)
def test_remove_one_user(self):
# One user doesn't have permissions on the node the sub is moved to. Should be listed.
self.file_sub.email_transactional.add(self.user_2, self.user_3, self.user_4)
self.file_sub.save()
self.private_node.add_contributor(self.user_2, permissions=['admin', 'write', 'read'], auth=self.auth)
self.private_node.add_contributor(self.user_3, permissions=['write', 'read'], auth=self.auth)
self.private_node.save()
results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node)
assert_equal({'email_transactional': [self.user_4._id], 'email_digest': [], 'none': []}, results)
def test_remove_one_user_warn_another(self):
# Two users do not have permissions on new node, but one has a project sub. Both should be listed.
self.private_node.add_contributor(self.user_2, permissions=['admin', 'write', 'read'], auth=self.auth)
self.private_node.save()
self.project.add_contributor(self.user_3, permissions=['write', 'read'], auth=self.auth)
self.project.save()
self.sub.email_digest.add(self.user_3)
self.sub.save()
self.file_sub.email_transactional.add(self.user_2, self.user_4)
results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node)
utils.move_subscription(results, 'xyz42_file_updated', self.project, 'abc42_file_updated', self.private_node)
assert_equal({'email_transactional': [self.user_4._id], 'email_digest': [self.user_3._id], 'none': []}, results)
assert_true(self.sub.email_digest.filter(id=self.user_3.id).exists()) # Is not removed from the project subscription.
def test_warn_user(self):
# One user with a project sub does not have permission on new node. User should be listed.
self.private_node.add_contributor(self.user_2, permissions=['admin', 'write', 'read'], auth=self.auth)
self.private_node.save()
self.project.add_contributor(self.user_3, permissions=['write', 'read'], auth=self.auth)
self.project.save()
self.sub.email_digest.add(self.user_3)
self.sub.save()
self.file_sub.email_transactional.add(self.user_2)
results = utils.users_to_remove('xyz42_file_updated', self.project, self.private_node)
utils.move_subscription(results, 'xyz42_file_updated', self.project, 'abc42_file_updated', self.private_node)
assert_equal({'email_transactional': [], 'email_digest': [self.user_3._id], 'none': []}, results)
assert_in(self.user_3, self.sub.email_digest.all()) # Is not removed from the project subscription.
def test_user_node_subbed_and_not_removed(self):
self.project.add_contributor(self.user_3, permissions=['write', 'read'], auth=self.auth)
self.project.save()
self.private_node.add_contributor(self.user_3, permissions=['write', 'read'], auth=self.auth)
self.private_node.save()
self.sub.email_digest.add(self.user_3)
self.sub.save()
utils.move_subscription(self.blank, 'xyz42_file_updated', self.project, 'abc42_file_updated', self.private_node)
assert_false(self.file_sub.email_digest.filter().exists())
# Regression test for commit ea15186
def test_garrulous_event_name(self):
self.file_sub.email_transactional.add(self.user_2, self.user_3, self.user_4)
self.file_sub.save()
self.private_node.add_contributor(self.user_2, permissions=['admin', 'write', 'read'], auth=self.auth)
self.private_node.add_contributor(self.user_3, permissions=['write', 'read'], auth=self.auth)
self.private_node.save()
results = utils.users_to_remove('complicated/path_to/some/file/ASDFASDF.txt_file_updated', self.project, self.private_node)
assert_equal({'email_transactional': [], 'email_digest': [], 'none': []}, results)
class TestSendEmails(NotificationTestCase):
def setUp(self):
super(TestSendEmails, self).setUp()
self.user = factories.AuthUserFactory()
self.project = factories.ProjectFactory()
self.project_subscription = factories.NotificationSubscriptionFactory(
_id=self.project._id + '_' + 'comments',
node=self.project,
event_name='comments'
)
self.project_subscription.save()
self.project_subscription.email_transactional.add(self.project.creator)
self.project_subscription.save()
self.node = factories.NodeFactory(parent=self.project)
self.node_subscription = factories.NotificationSubscriptionFactory(
_id=self.node._id + '_comments',
node=self.node,
event_name='comments'
)
self.node_subscription.save()
self.user_subscription = factories.NotificationSubscriptionFactory(
_id=self.user._id + '_' + 'global_comment_replies',
node=self.node,
event_name='global_comment_replies'
)
self.user_subscription.email_transactional.add(self.user)
self.user_subscription.save()
@mock.patch('website.notifications.emails.store_emails')
def test_notify_no_subscription(self, mock_store):
node = factories.ProjectFactory()
user = factories.AuthUserFactory()
emails.notify('comments', user=user, node=node, timestamp=timezone.now())
assert_false(mock_store.called)
@mock.patch('website.notifications.emails.store_emails')
def test_notify_no_subscribers(self, mock_store):
node = factories.NodeFactory()
node_subscription = factories.NotificationSubscriptionFactory(
_id=node._id + '_comments',
node=node,
event_name='comments'
)
node_subscription.save()
emails.notify('comments', user=self.user, node=node, timestamp=timezone.now())
assert_false(mock_store.called)
@mock.patch('website.notifications.emails.store_emails')
def test_notify_sends_with_correct_args(self, mock_store):
time_now = timezone.now()
emails.notify('comments', user=self.user, node=self.node, timestamp=time_now)
assert_true(mock_store.called)
mock_store.assert_called_with([self.project.creator._id], 'email_transactional', 'comments', self.user,
self.node, time_now)
@mock.patch('website.notifications.emails.store_emails')
def test_notify_does_not_send_to_exclude(self, mock_store):
time_now = timezone.now()
context = {'exclude':[self.project.creator._id]}
emails.notify('comments', user=self.user, node=self.node, timestamp=time_now, **context)
assert_equal(mock_store.call_count, 0)
@mock.patch('website.notifications.emails.store_emails')
def test_notify_does_not_send_to_users_subscribed_to_none(self, mock_store):
node = factories.NodeFactory()
user = factories.UserFactory()
node_subscription = factories.NotificationSubscriptionFactory(
_id=node._id + '_comments',
node=node,
event_name='comments'
)
node_subscription.save()
node_subscription.none.add(user)
node_subscription.save()
sent = emails.notify('comments', user=user, node=node, timestamp=timezone.now())
assert_false(mock_store.called)
assert_equal(sent, [])
@mock.patch('website.notifications.emails.store_emails')
def test_notify_mentions_does_not_send_to_mentioned_users_subscribed_to_none(self, mock_store):
node = factories.NodeFactory()
user = factories.UserFactory()
factories.NotificationSubscriptionFactory(
_id=user._id + '_global_mentions',
node=self.node,
event_name='global_mentions'
).add_user_to_subscription(user, 'none')
time_now = timezone.now()
sent = emails.notify_mentions('global_mentions', user=user, node=node, timestamp=time_now, new_mentions=[user._id])
assert_false(mock_store.called)
assert_equal(sent, [])
@mock.patch('website.notifications.emails.store_emails')
def test_notify_mentions_does_send_to_mentioned_users(self, mock_store):
user = factories.UserFactory()
factories.NotificationSubscriptionFactory(
_id=user._id + '_global_mentions',
node=self.node,
event_name='global_mentions'
).add_user_to_subscription(user, 'email_transactional')
node = factories.ProjectFactory(creator=user)
time_now = timezone.now()
emails.notify_mentions('global_mentions', user=user, node=node, timestamp=time_now, new_mentions=[user._id])
assert_true(mock_store.called)
mock_store.assert_called_with([node.creator._id], 'email_transactional', 'global_mentions', user,
node, time_now, None, new_mentions=[node.creator._id])
@mock.patch('website.notifications.emails.store_emails')
def test_notify_sends_comment_reply_event_if_comment_is_direct_reply(self, mock_store):
time_now = timezone.now()
emails.notify('comments', user=self.user, node=self.node, timestamp=time_now, target_user=self.project.creator)
mock_store.assert_called_with([self.project.creator._id], 'email_transactional', 'comment_replies',
self.user, self.node, time_now, target_user=self.project.creator)
@mock.patch('website.notifications.emails.store_emails')
def test_notify_sends_comment_reply_when_target_user_is_subscribed_via_user_settings(self, mock_store):
time_now = timezone.now()
emails.notify('global_comment_replies', user=self.project.creator, node=self.node, timestamp=time_now, target_user=self.user)
mock_store.assert_called_with([self.user._id], 'email_transactional', 'comment_replies',
self.project.creator, self.node, time_now, target_user=self.user)
@mock.patch('website.notifications.emails.store_emails')
def test_notify_sends_comment_event_if_comment_reply_is_not_direct_reply(self, mock_store):
user = factories.UserFactory()
time_now = timezone.now()
emails.notify('comments', user=user, node=self.node, timestamp=time_now, target_user=user)
mock_store.assert_called_with([self.project.creator._id], 'email_transactional', 'comments', user,
self.node, time_now, target_user=user)
@mock.patch('website.mails.send_mail')
@mock.patch('website.notifications.emails.store_emails')
def test_notify_does_not_send_comment_if_they_reply_to_their_own_comment(self, mock_store, mock_send_mail):
time_now = timezone.now()
emails.notify('comments', user=self.project.creator, node=self.project, timestamp=time_now,
target_user=self.project.creator)
assert_false(mock_store.called)
assert_false(mock_send_mail.called)
@mock.patch('website.notifications.emails.store_emails')
def test_notify_sends_comment_event_if_comment_reply_is_not_direct_reply_on_component(self, mock_store):
# Test that comment replies on components that are not direct replies to the subscriber use the
# "comments" email template.
user = factories.UserFactory()
time_now = timezone.now()
emails.notify('comments', user, self.node, time_now, target_user=user)
mock_store.assert_called_with([self.project.creator._id], 'email_transactional', 'comments', user,
self.node, time_now, target_user=user)
def test_check_node_node_none(self):
subs = emails.check_node(None, 'comments')
assert_equal(subs, {'email_transactional': [], 'email_digest': [], 'none': []})
def test_check_node_one(self):
subs = emails.check_node(self.project, 'comments')
assert_equal(subs, {'email_transactional': [self.project.creator._id], 'email_digest': [], 'none': []})
@mock.patch('website.project.views.comment.notify')
def test_check_user_comment_reply_subscription_if_email_not_sent_to_target_user(self, mock_notify):
# user subscribed to comment replies
user = factories.UserFactory()
user_subscription = factories.NotificationSubscriptionFactory(
_id=user._id + '_comments',
user=user,
event_name='comment_replies'
)
user_subscription.email_transactional.add(user)
user_subscription.save()
# user is not subscribed to project comment notifications
project = factories.ProjectFactory()
# user comments on project
target = factories.CommentFactory(node=project, user=user)
content = 'hammer to fall'
# reply to user (note: notify is called from Comment.create)
reply = Comment.create(
auth=Auth(project.creator),
user=project.creator,
node=project,
content=content,
target=Guid.load(target._id),
root_target=Guid.load(project._id),
)
assert_true(mock_notify.called)
assert_equal(mock_notify.call_count, 2)
@mock.patch('website.project.views.comment.notify')
def test_check_user_comment_reply_only_calls_once(self, mock_notify):
# user subscribed to comment replies
user = factories.UserFactory()
user_subscription = factories.NotificationSubscriptionFactory(
_id=user._id + '_comments',
user=user,
event_name='comment_replies'
)
user_subscription.email_transactional.add(user)
user_subscription.save()
project = factories.ProjectFactory()
# user comments on project
target = factories.CommentFactory(node=project, user=user)
content = 'P-Hacking: A user guide'
mock_notify.return_value = [user._id]
# reply to user (note: notify is called from Comment.create)
reply = Comment.create(
auth=Auth(project.creator),
user=project.creator,
node=project,
content=content,
target=Guid.load(target._id),
root_target=Guid.load(project._id),
)
assert_true(mock_notify.called)
assert_equal(mock_notify.call_count, 1)
def test_get_settings_url_for_node(self):
url = emails.get_settings_url(self.project._id, self.user)
assert_equal(url, self.project.absolute_url + 'settings/')
def test_get_settings_url_for_user(self):
url = emails.get_settings_url(self.user._id, self.user)
assert_equal(url, web_url_for('user_notifications', _absolute=True))
def test_get_node_lineage(self):
node_lineage = emails.get_node_lineage(self.node)
assert_equal(node_lineage, [self.project._id, self.node._id])
def test_fix_locale(self):
assert emails.fix_locale('en') == 'en'
assert emails.fix_locale('de_DE') == 'de_DE'
assert emails.fix_locale('de_de') == 'de_DE'
def test_localize_timestamp(self):
timestamp = timezone.now()
self.user.timezone = 'America/New_York'
self.user.locale = 'en_US'
self.user.save()
tz = dates.get_timezone(self.user.timezone)
locale = Locale(self.user.locale)
formatted_date = dates.format_date(timestamp, format='full', locale=locale)
formatted_time = dates.format_time(timestamp, format='short', tzinfo=tz, locale=locale)
formatted_datetime = u'{time} on {date}'.format(time=formatted_time, date=formatted_date)
assert_equal(emails.localize_timestamp(timestamp, self.user), formatted_datetime)
def test_localize_timestamp_empty_timezone(self):
timestamp = timezone.now()
self.user.timezone = ''
self.user.locale = 'en_US'
self.user.save()
tz = dates.get_timezone('Etc/UTC')
locale = Locale(self.user.locale)
formatted_date = dates.format_date(timestamp, format='full', locale=locale)
formatted_time = dates.format_time(timestamp, format='short', tzinfo=tz, locale=locale)
formatted_datetime = u'{time} on {date}'.format(time=formatted_time, date=formatted_date)
assert_equal(emails.localize_timestamp(timestamp, self.user), formatted_datetime)
def test_localize_timestamp_empty_locale(self):
timestamp = timezone.now()
self.user.timezone = 'America/New_York'
self.user.locale = ''
self.user.save()
tz = dates.get_timezone(self.user.timezone)
locale = Locale('en')
formatted_date = dates.format_date(timestamp, format='full', locale=locale)
formatted_time = dates.format_time(timestamp, format='short', tzinfo=tz, locale=locale)
formatted_datetime = u'{time} on {date}'.format(time=formatted_time, date=formatted_date)
assert_equal(emails.localize_timestamp(timestamp, self.user), formatted_datetime)
def test_localize_timestamp_handles_unicode(self):
timestamp = timezone.now()
self.user.timezone = 'Europe/Moscow'
self.user.locale = 'ru_RU'
self.user.save()
tz = dates.get_timezone(self.user.timezone)
locale = Locale(self.user.locale)
formatted_date = dates.format_date(timestamp, format='full', locale=locale)
formatted_time = dates.format_time(timestamp, format='short', tzinfo=tz, locale=locale)
formatted_datetime = u'{time} on {date}'.format(time=formatted_time, date=formatted_date)
assert_equal(emails.localize_timestamp(timestamp, self.user), formatted_datetime)
class TestSendDigest(OsfTestCase):
def setUp(self):
super(TestSendDigest, self).setUp()
self.user_1 = factories.UserFactory()
self.user_2 = factories.UserFactory()
self.project = factories.ProjectFactory()
self.timestamp = timezone.now()
def test_group_notifications_by_user_transactional(self):
send_type = 'email_transactional'
d = factories.NotificationDigestFactory(
user=self.user_1,
send_type=send_type,
timestamp=self.timestamp,
message='Hello',
node_lineage=[self.project._id]
)
d.save()
d2 = factories.NotificationDigestFactory(
user=self.user_2,
send_type=send_type,
timestamp=self.timestamp,
message='Hello',
node_lineage=[self.project._id]
)
d2.save()
d3 = factories.NotificationDigestFactory(
user=self.user_2,
send_type='email_digest',
timestamp=self.timestamp,
message='Hello, but this should not appear (this is a digest)',
node_lineage=[self.project._id]
)
d3.save()
user_groups = list(get_users_emails(send_type))
expected = [
{
u'user_id': self.user_1._id,
u'info': [{
u'message': u'Hello',
u'node_lineage': [unicode(self.project._id)],
u'_id': d._id
}]
},
{
u'user_id': self.user_2._id,
u'info': [{
u'message': u'Hello',
u'node_lineage': [unicode(self.project._id)],
u'_id': d2._id
}]
}
]
assert_equal(len(user_groups), 2)
assert_equal(user_groups, expected)
digest_ids = [d._id, d2._id, d3._id]
remove_notifications(email_notification_ids=digest_ids)
def test_group_notifications_by_user_digest(self):
send_type = 'email_digest'
d = factories.NotificationDigestFactory(
user=self.user_1,
send_type=send_type,
event='comment_replies',
timestamp=self.timestamp,
message='Hello',
node_lineage=[self.project._id]
)
d.save()
d2 = factories.NotificationDigestFactory(
user=self.user_2,
send_type=send_type,
timestamp=self.timestamp,
message='Hello',
node_lineage=[self.project._id]
)
d2.save()
d3 = factories.NotificationDigestFactory(
user=self.user_2,
send_type='email_transactional',
timestamp=self.timestamp,
message='Hello, but this should not appear (this is transactional)',
node_lineage=[self.project._id]
)
d3.save()
user_groups = list(get_users_emails(send_type))
expected = [
{
u'user_id': unicode(self.user_1._id),
u'info': [{
u'message': u'Hello',
u'node_lineage': [unicode(self.project._id)],
u'_id': unicode(d._id)
}]
},
{
u'user_id': unicode(self.user_2._id),
u'info': [{
u'message': u'Hello',
u'node_lineage': [unicode(self.project._id)],
u'_id': unicode(d2._id)
}]
}
]
assert_equal(len(user_groups), 2)
assert_equal(user_groups, expected)
digest_ids = [d._id, d2._id, d3._id]
remove_notifications(email_notification_ids=digest_ids)
@mock.patch('website.mails.send_mail')
def test_send_users_email_called_with_correct_args(self, mock_send_mail):
send_type = 'email_transactional'
d = factories.NotificationDigestFactory(
send_type=send_type,
event='comment_replies',
timestamp=timezone.now(),
message='Hello',
node_lineage=[factories.ProjectFactory()._id]
)
d.save()
user_groups = list(get_users_emails(send_type))
send_users_email(send_type)
assert_true(mock_send_mail.called)
assert_equals(mock_send_mail.call_count, len(user_groups))
last_user_index = len(user_groups) - 1
user = OSFUser.load(user_groups[last_user_index]['user_id'])
args, kwargs = mock_send_mail.call_args
assert_equal(kwargs['to_addr'], user.username)
assert_equal(kwargs['mimetype'], 'html')
assert_equal(kwargs['mail'], mails.DIGEST)
assert_equal(kwargs['name'], user.fullname)
assert_equal(kwargs['can_change_node_preferences'], True)
message = group_by_node(user_groups[last_user_index]['info'])
assert_equal(kwargs['message'], message)
@mock.patch('website.mails.send_mail')
def test_send_users_email_ignores_disabled_users(self, mock_send_mail):
send_type = 'email_transactional'
d = factories.NotificationDigestFactory(
send_type=send_type,
event='comment_replies',
timestamp=timezone.now(),
message='Hello',
node_lineage=[factories.ProjectFactory()._id]
)
d.save()
user_groups = list(get_users_emails(send_type))
last_user_index = len(user_groups) - 1
user = OSFUser.load(user_groups[last_user_index]['user_id'])
user.is_disabled = True
user.save()
send_users_email(send_type)
assert_false(mock_send_mail.called)
def test_remove_sent_digest_notifications(self):
d = factories.NotificationDigestFactory(
event='comment_replies',
timestamp=timezone.now(),
message='Hello',
node_lineage=[factories.ProjectFactory()._id]
)
digest_id = d._id
remove_notifications(email_notification_ids=[digest_id])
with assert_raises(NotificationDigest.DoesNotExist):
NotificationDigest.objects.get(_id=digest_id)
class TestNotificationsReviews(OsfTestCase):
def setUp(self):
super(TestNotificationsReviews, self).setUp()
self.provider = factories.PreprintProviderFactory(_id='engrxiv')
self.preprint = factories.PreprintFactory(provider=self.provider)
self.user = factories.UserFactory()
self.sender = factories.UserFactory()
self.context_info = {
'email_sender': self.sender,
'domain': 'osf.io',
'reviewable': self.preprint,
'workflow': 'pre-moderation',
'provider_contact_email': settings.OSF_CONTACT_EMAIL,
'provider_support_email': settings.OSF_SUPPORT_EMAIL,
}
self.action = factories.ReviewActionFactory()
factories.NotificationSubscriptionFactory(
_id=self.user._id + '_' + 'global_comments',
user=self.user,
event_name='global_comments'
).add_user_to_subscription(self.user, 'email_transactional')
factories.NotificationSubscriptionFactory(
_id=self.user._id + '_' + 'global_file_updated',
user=self.user,
event_name='global_file_updated'
).add_user_to_subscription(self.user, 'email_transactional')
factories.NotificationSubscriptionFactory(
_id=self.user._id + '_' + 'global_reviews',
user=self.user,
event_name='global_reviews'
).add_user_to_subscription(self.user, 'email_transactional')
def test_reviews_base_notification(self):
contributor_subscriptions = list(utils.get_all_user_subscriptions(self.user))
event_types = [sub.event_name for sub in contributor_subscriptions]
assert_in('global_reviews', event_types)
@mock.patch('website.mails.mails.send_mail')
def test_reviews_submit_notification(self, mock_send_email):
listeners.reviews_submit_notification(self, context=self.context_info, recipients=[self.sender, self.user])
assert_true(mock_send_email.called)
@mock.patch('website.notifications.emails.notify_global_event')
def test_reviews_notification(self, mock_notify):
listeners.reviews_notification(self, creator=self.sender, context=self.context_info, action=self.action, template='test.html.mako')
assert_true(mock_notify.called)
|
|
"""
sentry.web.frontend.accounts
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import itertools
from django.contrib import messages
from django.contrib.auth import login as login_user, authenticate
from django.core.context_processors import csrf
from django.core.urlresolvers import reverse
from django.db import transaction
from django.http import HttpResponseRedirect
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
from django.utils import timezone
from sentry.constants import MEMBER_USER
from sentry.models import Project, UserOption, LostPasswordHash
from sentry.plugins import plugins
from sentry.web.decorators import login_required
from sentry.web.forms.accounts import (
AccountSettingsForm, NotificationSettingsForm, AppearanceSettingsForm,
RegistrationForm, RecoverPasswordForm, ChangePasswordRecoverForm,
ProjectEmailOptionsForm, AuthenticationForm)
from sentry.web.helpers import render_to_response
from sentry.utils.auth import get_auth_providers
from sentry.utils.safe import safe_execute
@csrf_protect
@never_cache
def login(request):
from django.conf import settings
if request.user.is_authenticated():
return login_redirect(request)
form = AuthenticationForm(request, request.POST or None)
if form.is_valid():
login_user(request, form.get_user())
return login_redirect(request)
request.session.set_test_cookie()
context = csrf(request)
context.update({
'form': form,
'next': request.session.get('_next'),
'CAN_REGISTER': settings.SENTRY_ALLOW_REGISTRATION or request.session.get('can_register'),
'AUTH_PROVIDERS': get_auth_providers(),
'SOCIAL_AUTH_CREATE_USERS': settings.SOCIAL_AUTH_CREATE_USERS,
})
return render_to_response('sentry/login.html', context, request)
@csrf_protect
@never_cache
@transaction.commit_on_success
def register(request):
from django.conf import settings
if not (settings.SENTRY_ALLOW_REGISTRATION or request.session.get('can_register')):
return HttpResponseRedirect(reverse('sentry'))
form = RegistrationForm(request.POST or None)
if form.is_valid():
user = form.save()
# can_register should only allow a single registration
request.session.pop('can_register', None)
# HACK: grab whatever the first backend is and assume it works
user.backend = settings.AUTHENTICATION_BACKENDS[0]
login_user(request, user)
return login_redirect(request)
return render_to_response('sentry/register.html', {
'form': form,
'AUTH_PROVIDERS': get_auth_providers(),
'SOCIAL_AUTH_CREATE_USERS': settings.SOCIAL_AUTH_CREATE_USERS,
}, request)
@login_required
def login_redirect(request):
default = reverse('sentry')
login_url = request.session.pop('_next', None) or default
if '//' in login_url:
login_url = default
elif login_url.startswith(reverse('sentry-login')):
login_url = default
return HttpResponseRedirect(login_url)
@never_cache
def logout(request):
from django.contrib.auth import logout
logout(request)
return HttpResponseRedirect(reverse('sentry'))
def recover(request):
form = RecoverPasswordForm(request.POST or None)
if form.is_valid():
password_hash, created = LostPasswordHash.objects.get_or_create(
user=form.cleaned_data['user']
)
if not password_hash.is_valid():
password_hash.date_added = timezone.now()
password_hash.set_hash()
if form.is_valid():
password_hash.send_recover_mail()
return render_to_response('sentry/account/recover/sent.html', {
'email': password_hash.user.email,
}, request)
context = {
'form': form,
}
return render_to_response('sentry/account/recover/index.html', context, request)
def recover_confirm(request, user_id, hash):
try:
password_hash = LostPasswordHash.objects.get(user=user_id, hash=hash)
if not password_hash.is_valid():
password_hash.delete()
raise LostPasswordHash.DoesNotExist
user = password_hash.user
except LostPasswordHash.DoesNotExist:
context = {}
tpl = 'sentry/account/recover/failure.html'
else:
tpl = 'sentry/account/recover/confirm.html'
if request.method == 'POST':
form = ChangePasswordRecoverForm(request.POST)
if form.is_valid():
user.set_password(form.cleaned_data['password'])
user.save()
# Ugly way of doing this, but Django requires the backend be set
user = authenticate(
username=user.username,
password=form.cleaned_data['password'],
)
login_user(request, user)
password_hash.delete()
return login_redirect(request)
else:
form = ChangePasswordRecoverForm()
context = {
'form': form,
}
return render_to_response(tpl, context, request)
@csrf_protect
@never_cache
@login_required
@transaction.commit_on_success
def settings(request):
form = AccountSettingsForm(request.user, request.POST or None, initial={
'email': request.user.email,
'first_name': request.user.first_name,
})
if form.is_valid():
form.save()
messages.add_message(request, messages.SUCCESS, 'Your settings were saved.')
return HttpResponseRedirect(request.path)
context = csrf(request)
context.update({
'form': form,
'page': 'settings',
})
return render_to_response('sentry/account/settings.html', context, request)
@csrf_protect
@never_cache
@login_required
@transaction.commit_on_success
def appearance_settings(request):
from django.conf import settings
options = UserOption.objects.get_all_values(user=request.user, project=None)
form = AppearanceSettingsForm(request.user, request.POST or None, initial={
'language': options.get('language') or request.LANGUAGE_CODE,
'stacktrace_order': int(options.get('stacktrace_order', -1) or -1),
'timezone': options.get('timezone') or settings.TIME_ZONE,
})
if form.is_valid():
form.save()
messages.add_message(request, messages.SUCCESS, 'Your settings were saved.')
return HttpResponseRedirect(request.path)
context = csrf(request)
context.update({
'form': form,
'page': 'appearance',
})
return render_to_response('sentry/account/appearance.html', context, request)
@csrf_protect
@never_cache
@login_required
@transaction.commit_on_success
def notification_settings(request):
settings_form = NotificationSettingsForm(request.user, request.POST or None)
project_list = Project.objects.get_for_user(request.user, access=MEMBER_USER)
project_forms = [
(project, ProjectEmailOptionsForm(
project, request.user,
request.POST or None,
prefix='project-%s' % (project.id,)
))
for project in sorted(project_list, key=lambda x: (x.team.name, x.name))
]
ext_forms = []
for plugin in plugins.all():
for form in safe_execute(plugin.get_notification_forms) or ():
form = safe_execute(form, plugin, request.user, request.POST or None, prefix=plugin.slug)
if not form:
continue
ext_forms.append(form)
if request.POST:
all_forms = list(itertools.chain(
[settings_form], ext_forms, (f for _, f in project_forms)
))
if all(f.is_valid() for f in all_forms):
for form in all_forms:
form.save()
messages.add_message(request, messages.SUCCESS, 'Your settings were saved.')
return HttpResponseRedirect(request.path)
context = csrf(request)
context.update({
'settings_form': settings_form,
'project_forms': project_forms,
'ext_forms': ext_forms,
'page': 'notifications',
})
return render_to_response('sentry/account/notifications.html', context, request)
@csrf_protect
@never_cache
@login_required
def list_identities(request):
from social_auth.models import UserSocialAuth
identity_list = list(UserSocialAuth.objects.filter(user=request.user))
AUTH_PROVIDERS = get_auth_providers()
context = csrf(request)
context.update({
'identity_list': identity_list,
'page': 'identities',
'AUTH_PROVIDERS': AUTH_PROVIDERS,
})
return render_to_response('sentry/account/identities.html', context, request)
|
|
from collections import OrderedDict
import copy
import re
from mangrove.datastore.cache_manager import get_cache_manager
from mangrove.form_model.validator_factory import validator_factory
from mangrove.datastore.database import DatabaseManager, DataObject
from mangrove.datastore.documents import FormModelDocument, EntityFormModelDocument
from mangrove.errors.MangroveException import FormModelDoesNotExistsException, QuestionCodeAlreadyExistsException, \
DataObjectAlreadyExists, QuestionAlreadyExistsException, NoDocumentError
from mangrove.form_model.field import UniqueIdField, ShortCodeField, FieldSet, SelectField, MediaField
from mangrove.form_model.validators import MandatoryValidator
from mangrove.utils.types import is_sequence, is_string, is_empty, is_not_empty
from mangrove.form_model import field
ARPT_SHORT_CODE = "dummy"
REGISTRATION_FORM_CODE = "reg"
ENTITY_TYPE_FIELD_CODE = "t"
ENTITY_TYPE_FIELD_NAME = "entity_type"
LOCATION_TYPE_FIELD_NAME = "location"
LOCATION_TYPE_FIELD_CODE = "l"
GEO_CODE = "g"
GEO_CODE_FIELD_NAME = "geo_code"
NAME_FIELD = "name"
FORM_MODEL_ID = "form_model_id"
FORM_CODE = "form_code"
NAME_FIELD_CODE = "n"
SHORT_CODE_FIELD = "short_code"
SHORT_CODE = "s"
DESCRIPTION_FIELD = "description"
DESCRIPTION_FIELD_CODE = "d"
MOBILE_NUMBER_FIELD = "mobile_number"
MOBILE_NUMBER_FIELD_CODE = "m"
EMAIL_FIELD = "email"
EMAIL_FIELD_CODE = "email"
REPORTER = "reporter"
TAG = "tag"
TAG_FIELD_CODE = "tg"
GLOBAL_REGISTRATION_FORM_ENTITY_TYPE = "registration"
FORM_MODEL_EXPIRY_TIME_IN_SEC = 2 * 60 * 60
ENTITY_DELETION_FORM_CODE = "delete"
def get_form_model_by_code(dbm, code):
cache_manger = get_cache_manager()
key_as_str = get_form_model_cache_key(code, dbm)
row_value = cache_manger.get(key_as_str)
if row_value is None:
row_value = _load_questionnaire(code, dbm)
cache_manger.set(key_as_str, row_value, time=FORM_MODEL_EXPIRY_TIME_IN_SEC)
if row_value.get('is_registration_model') or row_value.get('form_code') == ENTITY_DELETION_FORM_CODE:
return EntityFormModel.new_from_doc(dbm, EntityFormModelDocument.wrap(row_value))
return FormModel.new_from_doc(dbm, FormModelDocument.wrap(row_value))
def _load_questionnaire(form_code, dbm):
assert isinstance(dbm, DatabaseManager)
assert is_string(form_code)
rows = dbm.load_all_rows_in_view('questionnaire', key=form_code)
if not len(rows):
raise FormModelDoesNotExistsException(form_code)
return rows[0]['value']
def list_form_models_by_code(dbm, codes):
assert isinstance(dbm, DatabaseManager)
assert is_sequence(codes)
rows = dbm.load_all_rows_in_view('questionnaire', keys=codes)
def _row_to_form_model(row):
doc = FormModelDocument.wrap(row['value'])
return FormModel.new_from_doc(dbm, doc)
return map(_row_to_form_model, rows)
def get_form_model_cache_key(form_code, dbm):
assert isinstance(dbm, DatabaseManager)
assert form_code is not None
if type(form_code) == unicode:
return "%s_%s" % (dbm.database.name.encode('utf-8'), form_code.encode('utf-8'))
return str("%s_%s" % (dbm.database.name, form_code))
def header_fields(form_model, key_attribute="name", ref_header_dict=None):
header_dict = ref_header_dict or OrderedDict()
_header_fields(form_model.fields, key_attribute, header_dict)
return header_dict
def _header_fields(fields, key_attribute, header_dict, parent_field_name=None):
for field in fields:
if isinstance(field, FieldSet) and field.is_group():
_header_fields(field.fields, key_attribute, header_dict, field.code)
continue
key = field.__getattribute__(key_attribute) if type(key_attribute) == str else key_attribute(field)
key = "%s-%s" % (parent_field_name, key) if parent_field_name else key
if not header_dict.get(key):
header_dict.update({key: field.label})
def header_fields_for_mobile(form_model, key_attribute="name"):
header_dict = OrderedDict()
header_dict.update(_header_fields_for_mobile(form_model.fields, key_attribute))
return header_dict
def _header_fields_for_mobile(fields, key_attribute):
header_dict = OrderedDict()
for field in fields:
key = field.__getattribute__(key_attribute) if type(key_attribute) == str else key_attribute(field)
if isinstance(field, FieldSet) and (field.is_group() or field.is_repeat()):
header_dict.update({key: _header_fields_for_mobile(field.fields, key_attribute)})
if not header_dict.get(key):
header_dict.update({key: field.label})
return header_dict
def get_field_by_attribute_value(form_model, key_attribute, attribute_label):
# ex: field1.name='first_name' field1.code='q1'
# field2.name='location' field2.code='q3'
# and both field1 and field2 are form_model fields,
# get_field_by_attribute_value(form_model,'name','location') will give back field2
for field in form_model.fields:
if field.__getattribute__(key_attribute) == attribute_label:
return field
return None
def get_form_model_by_entity_type(dbm, entity_type):
assert isinstance(dbm, DatabaseManager)
assert is_sequence(entity_type)
rows = dbm.view.registration_form_model_by_entity_type(key=entity_type, include_docs=True)
if len(rows):
doc = EntityFormModelDocument.wrap(rows[0]['doc'])
return EntityFormModel.new_from_doc(dbm, doc)
return None
def get_form_code_by_entity_type(dbm, entity_type):
form_model = get_form_model_by_entity_type(dbm, entity_type)
return form_model.form_code if form_model else None
class FormModel(DataObject):
__document_class__ = FormModelDocument
@classmethod
def new_from_doc(cls, dbm, doc):
form_model = super(FormModel, cls).new_from_doc(dbm, doc)
form_model._old_doc = copy.deepcopy(form_model._doc)
return form_model
def _set_doc(self, form_code, is_registration_model, label, language, name):
doc = FormModelDocument()
doc.name = name
doc.set_label(label)
doc.form_code = form_code
doc.active_languages = [language]
doc.is_registration_model = is_registration_model
DataObject._set_document(self, doc)
def __init__(self, dbm, name=None, label=None, form_code=None, fields=None,
language="en", is_registration_model=False, validators=None,
enforce_unique_labels=True):
if not validators: validators = [MandatoryValidator()]
assert isinstance(dbm, DatabaseManager)
assert name is None or is_not_empty(name)
assert fields is None or is_sequence(fields)
assert form_code is None or (is_string(form_code) and is_not_empty(form_code))
# assert type is None or is_not_empty(type)
DataObject.__init__(self, dbm)
self._old_doc = None
self._snapshots = {}
self._form_fields = []
self.errors = []
self.validators = validators
self._enforce_unique_labels = enforce_unique_labels
self._validation_exception = []
# Are we being constructed from scratch or existing doc?
if name is None:
return
# Not made from existing doc, so build ourselves up
self._validate_fields(fields)
self._form_fields = fields
self._set_doc(form_code, is_registration_model, label, language, name)
@property
def name(self):
"""
Returns the name of the FormModel
"""
return self._doc.name
@property
def id(self):
return self._doc.id
@name.setter
def name(self, value):
"""
Sets the name of the FormModel
"""
self._doc.name = value
@property
def entity_questions(self):
ef = []
for f in self._form_fields:
if isinstance(f, UniqueIdField):
ef.append(f)
return ef
@property
def is_media_type_fields_present(self):
is_media = self._doc.is_media_type_fields_present
return False if is_media is None else is_media
def update_media_field_flag(self):
if self.media_fields:
self._doc.is_media_type_fields_present = True
def has_tag_field(self):
return self.get_field_by_code(TAG) is not None
@property
def xform(self):
return self._doc.xform
@property
def created_using(self):
return self._doc.created_using
@property
def mobile_main_fields(self):
return self._doc.mobile_main_fields
@mobile_main_fields.setter
def mobile_main_fields(self, value):
self._doc.mobile_main_fields = value
@created_using.setter
def created_using(self, value):
self._doc.created_using = value
def update_xform_with_questionnaire_name(self, questionnaire_name):
self.xform = re.sub(r"<html:title>.+</html:", "<html:title>%s</html:" % questionnaire_name, self.xform)
@xform.setter
def xform(self, value):
self._doc.xform = value
@property
def entity_type(self):
unique_id_fields = self.entity_questions
if unique_id_fields:
# There can be multiple fields with similar unique id types. we need set of unique id types.
entity_types = OrderedDict()
for unique_id_field in unique_id_fields:
entity_types.update({unique_id_field.unique_id_type: None})
return entity_types.keys()
else:
return []
@property
def form_code(self):
return self._doc.form_code
@form_code.setter
def form_code(self, value):
self._doc.form_code = value
@property
def fields(self):
return self._form_fields
@property
def has_nested_fields(self):
return filter(lambda f: f.is_field_set, self.fields)
@property
def choice_fields(self):
return [field for field in self._form_fields if field.type in ("select", "select1")]
@property
def date_fields(self):
return [field for field in self._form_fields if field.type == 'date']
@property
def media_fields(self):
return self._get_media_fields(self._form_fields)
def _get_media_fields(self, fields):
media_fields = []
for field in fields:
if isinstance(field, MediaField):
media_fields.append(field)
if isinstance(field, FieldSet):
media_fields_from_field_set = self._get_media_fields(field.fields)
if media_fields_from_field_set:
media_fields.extend(media_fields_from_field_set)
return media_fields
@property
def label(self):
return self._doc.label
@property
def activeLanguages(self):
return self._doc.active_languages
@activeLanguages.setter
def activeLanguages(self, value):
self._doc.active_languages = value
def get_entity_type(self, values):
entity_type = self._case_insensitive_lookup(values, ENTITY_TYPE_FIELD_CODE)
return entity_type.lower() if is_not_empty(entity_type) else None
def delete(self):
self._delete_form_model_from_cache()
super(FormModel, self).delete()
def _delete_form_model_from_cache(self):
cache_manger = get_cache_manager()
cache_key = get_form_model_cache_key(self.form_code, self._dbm)
cache_manger.delete(cache_key)
def void(self, void=True):
self._delete_form_model_from_cache()
super(FormModel, self).void(void=void)
def save(self, process_post_update=True):
# convert fields and validators to json fields before save
self.check_if_form_model_unique()
return self.update_doc_and_save(process_post_update)
def check_if_form_model_unique(self):
if not self.is_form_code_unique():
raise DataObjectAlreadyExists('Form Model', 'Form Code', self.form_code)
def update_doc_and_save(self, process_post_update=True):
self._doc.json_fields = [f._to_json() for f in self._form_fields]
self._doc.validators = [validator.to_json() for validator in self.validators]
json_snapshots = {}
for key, value in self._snapshots.items():
json_snapshots[key] = [each._to_json() for each in value]
self._doc.snapshots = json_snapshots
self._delete_form_model_from_cache()
if self._doc is None:
raise NoDocumentError('No document to save')
return self._dbm._save_document(self._doc, prev_doc=self._old_doc, process_post_update=process_post_update)
def update_attachments(self, attachments, attachment_name=None):
return self.put_attachment(self._doc, attachments, filename=attachment_name)
def add_attachments(self, attachments, attachment_name=None):
return self.put_attachment(self._doc, attachments, filename=attachment_name)
def get_attachments(self, attachment_name=None):
return self.get_attachment(self._doc.id, filename=attachment_name)
def get_field_by_name(self, name):
for field in self._form_fields:
if field.name == name:
return field
return None
def get_field_by_code(self, code):
return self._get_by_code(self._form_fields, code)
def _get_by_code(self, fields, code):
for field in fields:
if code is not None and field.code.lower() == code.lower():
return field
if isinstance(field, FieldSet):
field_by_code = self._get_by_code(field.fields, code)
if field_by_code:
return field_by_code
return None
def get_field_by_code_and_rev(self, code, revision=None):
if self.revision == revision or not self._snapshots:
return self.get_field_by_code(code)
if revision is None:
revision = min(self._snapshots, key=lambda x: int(x.split('-')[0]))
snapshot = self._snapshots.get(revision, [])
for field in snapshot:
if field.code.lower() == code.lower(): return field
return None
def get_field_code_label_dict(self):
field_code_label_dict = {}
for form_field in self._form_fields:
quoted_label = ''' + form_field.label + '''
field_code_label_dict.update({form_field.code: quoted_label})
return field_code_label_dict
def add_field(self, field):
self._validate_fields(self._form_fields + [field])
self._form_fields.append(field)
return self._form_fields
def delete_field(self, code):
self._form_fields = [f for f in self._form_fields if f.code != code]
self._validate_fields(self._form_fields)
def delete_all_fields(self):
self._form_fields = []
def create_snapshot(self):
if self._form_fields:
self._snapshots[self._doc.rev] = self._form_fields
@property
def snapshots(self):
return self._snapshots
@property
def revision(self):
return self._doc.rev
@property
def form_fields(self):
return self._doc["json_fields"]
def field_names(self):
return [field['name'] for field in self._doc["json_fields"]]
def field_codes(self):
return [field['code'] for field in self._doc["json_fields"]]
@revision.setter
def revision(self, rev):
self._doc.rev = rev
def is_entity_registration_form(self):
return False
def bind(self, submission):
self.submission = submission
for field in self.fields:
answer = self._lookup_answer_for_field_code(self.submission, field.code)
field.set_value(answer)
def bound_values(self):
values = {}
for field in self.fields:
values.update({field.code: field.value})
return values
def _validate_fields(self, fields):
self._validate_uniqueness_of_field_codes(fields)
self._validate_uniqueness_of_field_labels(fields)
def _validate_uniqueness_of_field_labels(self, fields):
""" Validate all question labels are unique
"""
if self._enforce_unique_labels:
label_list = [f.label.lower() for f in fields]
label_list_without_duplicates = list(set(label_list))
if len(label_list) != len(label_list_without_duplicates):
raise QuestionAlreadyExistsException("All questions must be unique")
def _validate_uniqueness_of_field_codes(self, fields):
""" Validate all question codes are unique
"""
code_list = [f.code.lower() for f in fields]
code_list_without_duplicates = list(set(code_list))
if len(code_list) != len(code_list_without_duplicates):
raise QuestionCodeAlreadyExistsException("All question codes must be unique")
def _validate_answer_for_field(self, answer, field):
try:
value = field.validate(answer)
return True, value
except Exception as e:
field.errors.append(e.message)
self._validation_exception.append(e)
return False, e.message
def _set_document(self, document):
DataObject._set_document(self, document)
# make form_model level fields for any json fields in to
for json_field in document.json_fields:
f = field.create_question_from(json_field, self._dbm)
self._form_fields.append(f)
for validator_json in document.validators:
validator = validator_factory(validator_json)
if validator not in self.validators:
self.validators.append(validator)
if hasattr(document, 'snapshots'):
for key, value in document.snapshots.items():
self._snapshots[key] = []
for each in value:
f = field.create_question_from(each, self._dbm)
self._snapshots[key].append(f)
def is_form_code_unique(self):
try:
form = get_form_model_by_code(self._dbm, self.form_code)
is_form_code_same_as_existing_form_code = True if form.id == self.id else False
return is_form_code_same_as_existing_form_code
except FormModelDoesNotExistsException:
return True
def _find_code(self, answers, code):
for key in answers:
if key.lower() == code.lower():
return answers[key]
return None
def _remove_empty_values(self, answers):
return OrderedDict([(k, v) for k, v in answers.items() if not is_empty(v)])
def _remove_unknown_fields(self, answers):
key_value_items = OrderedDict([(k, v) for k, v in answers.items() if self.get_field_by_code(k) is not None])
return key_value_items
# TODO : does not handle value errors. eg. Text for Number. Done outside the service right now.
def validate_submission(self, values):
assert values is not None
cleaned_values = OrderedDict()
errors = OrderedDict()
if not self.xform:
for validator in self.validators:
validator_error = validator.validate(values, self.fields, self._dbm)
if hasattr(validator, 'exception'):
self._validation_exception.extend(getattr(validator, 'exception'))
errors.update(validator_error)
values = self._remove_empty_values(values)
values = self._remove_unknown_fields(values)
for key in values:
field = self.get_field_by_code(key)
is_valid, result = self._validate_answer_for_field(values[key], field)
if is_valid:
cleaned_values[field.code] = result
else:
errors[field.code] = result
return cleaned_values, errors
def _case_insensitive_lookup(self, values, code):
for fieldcode in values:
if fieldcode.lower() == code.lower():
return values[fieldcode]
return None
def _lookup_answer_for_field_code(self, values, code):
value = self._case_insensitive_lookup(values, code)
return value
def stringify(self, values):
self.bind(values)
dict = OrderedDict()
for field in self.fields:
dict[field.code] = field.stringify()
return dict
def add_validator(self, validator_class):
if validator_class not in [validator.__class__ for validator in self.validators]:
self.validators.append(validator_class())
def remove_validator(self, validator_class):
for validator in self.validators:
if isinstance(validator, validator_class):
self.validators.remove(validator)
return
@property
def data_senders(self):
return self._doc._data.get('data_senders')
@property
def validation_exception(self):
return self._validation_exception
@property
def is_open_survey(self):
return self._doc.get('is_open_survey', False)
class EntityFormModel(FormModel):
__document_class__ = EntityFormModelDocument
def __init__(self, dbm, name=None, label=None, form_code=None, fields=None,
language="en", is_registration_model=False, validators=None,
enforce_unique_labels=True, entity_type=None):
super(EntityFormModel, self).__init__(dbm, name, label, form_code, fields,
language, is_registration_model, validators,
enforce_unique_labels)
assert entity_type is None or is_sequence(entity_type)
if self._doc:
self._doc.entity_type = entity_type
@property
def entity_type(self):
return self._doc.entity_type
@entity_type.setter
def entity_type(self, value):
self._doc.entity_type = value
def is_entity_registration_form(self):
return True
def is_global_registration_form(self):
return GLOBAL_REGISTRATION_FORM_ENTITY_TYPE in self.entity_type
@property
def entity_questions(self):
eq = []
for f in self._form_fields:
if isinstance(f, ShortCodeField):
eq.append(f)
return eq
def get_short_code(self, values):
return self._case_insensitive_lookup(values, self.entity_questions[0].code)
def _set_doc(self, form_code, is_registration_model, label, language, name):
doc = EntityFormModelDocument()
doc.name = name
doc.set_label(label)
doc.form_code = form_code
doc.active_languages = [language]
doc.is_registration_model = is_registration_model
DataObject._set_document(self, doc)
def get_entity_name_question_code(self):
for f in self._form_fields:
if f.name == 'name':
return f.code
|
|
# -*- coding: utf-8 -*-
import datetime
from lxml import etree
from requests import session as requests_session
from requests.exceptions import HTTPError
from .base import Item, Storage
from .http import HTTP_STORAGE_PARAMETERS, USERAGENT, prepare_auth, \
prepare_verify
from .. import exceptions, log, utils
from ..utils.compat import text_type
dav_logger = log.get(__name__)
CALDAV_DT_FORMAT = '%Y%m%dT%H%M%SZ'
def _normalize_href(base, href):
'''Normalize the href to be a path only relative to hostname and
schema.'''
if not href:
raise ValueError(href)
x = utils.urlparse.urljoin(base, href)
x = utils.urlparse.urlsplit(x).path
return x
def _encode_href(x):
return utils.compat.urlquote(x, '/@')
def _decode_href(x):
return utils.compat.urlunquote(x)
class InvalidXMLResponse(exceptions.InvalidResponse):
pass
def _parse_xml(content):
try:
return etree.XML(content)
except etree.Error as e:
raise InvalidXMLResponse('Invalid XML encountered: {}\n'
'Double-check the URLs in your config.'
.format(e))
def _merge_xml(items):
rv = items[0]
rv.extend(items[1:])
return rv
def _fuzzy_matches_mimetype(strict, weak):
# different servers give different getcontenttypes:
# "text/vcard", "text/x-vcard", "text/x-vcard; charset=utf-8",
# "text/directory;profile=vCard", "text/directory",
# "text/vcard; charset=utf-8"
if strict is None or weak is None:
return True
mediatype, subtype = strict.split('/')
if subtype in weak:
return True
return False
def _get_collection_from_url(url):
_, collection = url.rstrip('/').rsplit('/', 1)
return collection
class Discover(object):
_resourcetype = None
_homeset_xml = None
_homeset_tag = None
_well_known_uri = None
_collection_xml = """
<d:propfind xmlns:d="DAV:">
<d:prop>
<d:resourcetype />
<d:displayname />
</d:prop>
</d:propfind>
"""
def __init__(self, session):
self.session = session
def find_dav(self):
try:
response = self.session.request('GET', self._well_known_uri,
allow_redirects=False)
return response.headers.get('Location', '')
except (HTTPError, exceptions.Error):
# The user might not have well-known URLs set up and instead points
# vdirsyncer directly to the DAV server.
dav_logger.debug('Server does not support well-known URIs.')
return ''
def find_principal(self, url=None):
if url is None:
url = self.find_dav()
headers = self.session.get_default_headers()
headers['Depth'] = 0
body = """
<d:propfind xmlns:d="DAV:">
<d:prop>
<d:current-user-principal />
</d:prop>
</d:propfind>
"""
response = self.session.request('PROPFIND', url, headers=headers,
data=body)
root = _parse_xml(response.content)
rv = root.find('.//{DAV:}current-user-principal/{DAV:}href')
if rv is None:
raise InvalidXMLResponse()
return utils.urlparse.urljoin(response.url, rv.text)
def find_home(self, url=None):
if url is None:
url = self.find_principal()
headers = self.session.get_default_headers()
headers['Depth'] = 0
response = self.session.request('PROPFIND', url,
headers=headers,
data=self._homeset_xml)
root = etree.fromstring(response.content)
# Better don't do string formatting here, because of XML namespaces
rv = root.find('.//' + self._homeset_tag + '/{*}href')
if rv is None:
raise InvalidXMLResponse()
return utils.urlparse.urljoin(response.url, rv.text)
def find_collections(self, url=None):
if url is None:
url = self.find_home()
headers = self.session.get_default_headers()
headers['Depth'] = 1
r = self.session.request('PROPFIND', url, headers=headers,
data=self._collection_xml)
root = _parse_xml(r.content)
done = set()
for response in root.findall('{DAV:}response'):
props = _merge_xml(response.findall('{*}propstat/{*}prop'))
if props.find('{*}resourcetype/{*}' + self._resourcetype) is None:
continue
displayname = getattr(props.find('{*}displayname'), 'text', '')
href = response.find('{*}href')
if href is None:
raise InvalidXMLResponse()
href = utils.urlparse.urljoin(r.url, href.text)
if href not in done:
done.add(href)
yield {'href': href, 'displayname': displayname}
def discover(self):
for x in self.find_collections():
yield x
# Another one of Radicale's specialties: Discovery is broken (returning
# completely wrong URLs at every stage) as of version 0.9.
# https://github.com/Kozea/Radicale/issues/196
try:
for x in self.find_collections(''):
yield x
except (InvalidXMLResponse, HTTPError, exceptions.Error):
pass
class CalDiscover(Discover):
_resourcetype = 'calendar'
_homeset_xml = """
<d:propfind xmlns:d="DAV:" xmlns:c="urn:ietf:params:xml:ns:caldav">
<d:prop>
<c:calendar-home-set />
</d:prop>
</d:propfind>
"""
_homeset_tag = '{*}calendar-home-set'
_well_known_uri = '/.well-known/caldav/'
class CardDiscover(Discover):
_resourcetype = 'addressbook'
_homeset_xml = """
<d:propfind xmlns:d="DAV:" xmlns:c="urn:ietf:params:xml:ns:carddav">
<d:prop>
<c:addressbook-home-set />
</d:prop>
</d:propfind>
"""
_homeset_tag = '{*}addressbook-home-set'
_well_known_uri = '/.well-known/carddav/'
class DavSession(object):
'''
A helper class to connect to DAV servers.
'''
def __init__(self, url, username='', password='', verify=True, auth=None,
useragent=USERAGENT, verify_fingerprint=None):
if username and not password:
password = utils.get_password(username, url)
self._settings = {
'verify': prepare_verify(verify),
'auth': prepare_auth(auth, username, password),
'verify_fingerprint': verify_fingerprint,
}
self.useragent = useragent
self.url = url.rstrip('/') + '/'
self.parsed_url = utils.urlparse.urlparse(self.url)
self._session = None
def request(self, method, path, **kwargs):
url = self.url
if path:
url = utils.urlparse.urljoin(self.url, path)
if self._session is None:
self._session = requests_session()
more = dict(self._settings)
more.update(kwargs)
return utils.request(method, url, session=self._session, **more)
def get_default_headers(self):
return {
'User-Agent': self.useragent,
'Content-Type': 'application/xml; charset=UTF-8'
}
class DavStorage(Storage):
__doc__ = '''
:param url: Base URL or an URL to a collection.
''' + HTTP_STORAGE_PARAMETERS + '''
:param unsafe_href_chars: Replace the given characters when generating
hrefs. Defaults to ``'@'``.
.. note::
Please also see :ref:`supported-servers`, as some servers may not work
well.
'''
# the file extension of items. Useful for testing against radicale.
fileext = None
# mimetype of items
item_mimetype = None
# XML to use when fetching multiple hrefs.
get_multi_template = None
# The LXML query for extracting results in get_multi
get_multi_data_query = None
# The Discover subclass to use
discovery_class = None
_session = None
_repr_attributes = ('username', 'url')
def __init__(self, url, username='', password='', verify=True, auth=None,
useragent=USERAGENT, unsafe_href_chars='@',
verify_fingerprint=None, **kwargs):
super(DavStorage, self).__init__(**kwargs)
url = url.rstrip('/') + '/'
self.session = DavSession(url, username, password, verify, auth,
useragent, verify_fingerprint)
self.unsafe_href_chars = unsafe_href_chars
# defined for _repr_attributes
self.username = username
self.url = url
@classmethod
def _get_session(cls, **kwargs):
discover_args, _ = utils.split_dict(kwargs, lambda key: key in (
'url', 'username', 'password', 'verify', 'auth', 'useragent',
'verify_fingerprint',
))
return DavSession(**discover_args)
@classmethod
def discover(cls, **kwargs):
if kwargs.pop('collection', None) is not None:
raise TypeError('collection argument must not be given.')
d = cls.discovery_class(cls._get_session(**kwargs))
for c in d.discover():
url = c['href']
collection = _get_collection_from_url(url)
storage_args = dict(kwargs)
storage_args.update({'url': url, 'collection': collection,
'collection_human': c['displayname']})
yield storage_args
@classmethod
def create_collection(cls, collection, **kwargs):
if collection is None:
collection = _get_collection_from_url(kwargs['url'])
session = cls._get_session(**kwargs)
d = cls.discovery_class(session)
for c in cls.discover(**kwargs):
if c['collection'] == collection:
return c
home = d.find_home()
collection_url = '{}/{}'.format(home.rstrip('/'), collection)
data = '''<?xml version="1.0" encoding="utf-8" ?>
<D:mkcol xmlns:D="DAV:" xmlns:C="{}">
<D:set>
<D:prop>
<D:resourcetype>
<D:collection/>
<C:{}/>
</D:resourcetype>
</D:prop>
</D:set>
</D:mkcol>
'''.format(cls._dav_namespace, cls._dav_resourcetype)
headers = d.session.get_default_headers()
try:
response = d.session.request('MKCOL', collection_url, data=data,
headers=headers)
except HTTPError as e:
raise NotImplementedError(e)
else:
rv = dict(kwargs)
rv['collection'] = collection
rv['url'] = response.url
return rv
def _normalize_href(self, *args, **kwargs):
return _normalize_href(self.session.url, *args, **kwargs)
def _get_href(self, item):
href = item.ident
for char in self.unsafe_href_chars:
href = href.replace(char, '_')
return self._normalize_href(href + self.fileext)
def _is_item_mimetype(self, mimetype):
return _fuzzy_matches_mimetype(self.item_mimetype, mimetype)
def get(self, href):
((actual_href, item, etag),) = self.get_multi([href])
assert href == actual_href
return item, etag
def get_multi(self, hrefs):
hrefs = set(hrefs)
href_xml = []
for href in hrefs:
if href != self._normalize_href(href):
raise exceptions.NotFoundError(href)
href_xml.append('<D:href>{}</D:href>'.format(_encode_href(href)))
if not href_xml:
return ()
data = self.get_multi_template.format(hrefs='\n'.join(href_xml))
response = self.session.request(
'REPORT',
'',
data=data,
headers=self.session.get_default_headers()
)
root = _parse_xml(response.content) # etree only can handle bytes
rv = []
hrefs_left = set(hrefs)
for href, etag, prop in self._parse_prop_responses(root):
raw = prop.find(self.get_multi_data_query)
if raw is None:
dav_logger.warning('Skipping {}, the item content is missing.'
.format(href))
continue
else:
raw = raw.text
if isinstance(raw, bytes):
raw = raw.decode(response.encoding)
if isinstance(etag, bytes):
etag = etag.decode(response.encoding)
try:
hrefs_left.remove(href)
except KeyError:
if href in hrefs:
dav_logger.warning('Server sent item twice: {}'
.format(href))
else:
dav_logger.warning('Server sent unsolicited item: {}'
.format(href))
else:
rv.append((href, Item(raw), etag))
for href in hrefs_left:
raise exceptions.NotFoundError(href)
return rv
def _put(self, href, item, etag):
headers = self.session.get_default_headers()
headers['Content-Type'] = self.item_mimetype
if etag is None:
headers['If-None-Match'] = '*'
else:
headers['If-Match'] = etag
response = self.session.request(
'PUT',
_encode_href(href),
data=item.raw.encode('utf-8'),
headers=headers
)
etag = response.headers.get('etag', None)
href = self._normalize_href(response.url)
if not etag:
item2, etag = self.get(href)
assert item2.uid == item.uid
return href, etag
def update(self, href, item, etag):
if etag is None:
raise ValueError('etag must be given and must not be None.')
href, etag = self._put(self._normalize_href(href), item, etag)
return etag
def upload(self, item):
href = self._get_href(item)
return self._put(href, item, None)
def delete(self, href, etag):
href = self._normalize_href(href)
headers = self.session.get_default_headers()
headers.update({
'If-Match': etag
})
self.session.request(
'DELETE',
_encode_href(href),
headers=headers
)
def _parse_prop_responses(self, root, decoding_rounds=1):
hrefs = set()
for response in root.iter('{DAV:}response'):
href = response.find('{DAV:}href')
if href is None:
dav_logger.error('Skipping response, href is missing.')
continue
href = self._normalize_href(href.text)
for i in range(decoding_rounds):
href = _decode_href(href)
if href in hrefs:
# Servers that send duplicate hrefs:
# - Zimbra
# https://github.com/untitaker/vdirsyncer/issues/88
# - Davmail
# https://github.com/untitaker/vdirsyncer/issues/144
dav_logger.warning('Skipping identical href: {!r}'
.format(href))
continue
props = response.findall('{DAV:}propstat/{DAV:}prop')
if not props:
dav_logger.warning('Skipping {!r}, properties are missing.'
.format(href))
continue
else:
props = _merge_xml(props)
if props.find('{DAV:}resourcetype/{DAV:}collection') is not None:
dav_logger.debug('Skipping {!r}, is collection.'.format(href))
continue
etag = getattr(props.find('{DAV:}getetag'), 'text', '')
if not etag:
dav_logger.warning('Skipping {!r}, etag property is missing.'
.format(href))
contenttype = getattr(props.find('{DAV:}getcontenttype'),
'text', None)
if not self._is_item_mimetype(contenttype):
dav_logger.debug('Skipping {!r}, {!r} != {!r}.'
.format(href, contenttype,
self.item_mimetype))
continue
hrefs.add(href)
yield href, etag, props
class CaldavStorage(DavStorage):
__doc__ = '''
CalDAV.
You can set a timerange to synchronize with the parameters ``start_date``
and ``end_date``. Inside those parameters, you can use any Python
expression to return a valid :py:class:`datetime.datetime` object. For
example, the following would synchronize the timerange from one year in the
past to one year in the future::
start_date = datetime.now() - timedelta(days=365)
end_date = datetime.now() + timedelta(days=365)
Either both or none have to be specified. The default is to synchronize
everything.
You can set ``item_types`` to restrict the *kind of items* you want to
synchronize. For example, if you want to only synchronize events (but don't
download any tasks from the server), set ``item_types = ["VEVENT"]``. If
you want to synchronize events and tasks, but have some ``VJOURNAL`` items
on the server you don't want to synchronize, use ``item_types = ["VEVENT",
"VTODO"]``.
:param start_date: Start date of timerange to show, default -inf.
:param end_date: End date of timerange to show, default +inf.
:param item_types: Kind of items to show. The default, the empty list, is
to show all. This depends on particular features on the server, the
results are not validated.
''' + DavStorage.__doc__
storage_name = 'caldav'
fileext = '.ics'
item_mimetype = 'text/calendar'
discovery_class = CalDiscover
_dav_namespace = 'urn:ietf:params:xml:ns:caldav'
_dav_resourcetype = 'calendar'
start_date = None
end_date = None
get_multi_template = '''<?xml version="1.0" encoding="utf-8" ?>
<C:calendar-multiget xmlns:D="DAV:"
xmlns:C="urn:ietf:params:xml:ns:caldav">
<D:prop>
<D:getetag/>
<C:calendar-data/>
</D:prop>
{hrefs}
</C:calendar-multiget>'''
get_multi_data_query = '{urn:ietf:params:xml:ns:caldav}calendar-data'
def __init__(self, start_date=None, end_date=None,
item_types=(), **kwargs):
super(CaldavStorage, self).__init__(**kwargs)
if not isinstance(item_types, (list, tuple)):
raise ValueError('item_types must be a list.')
self.item_types = tuple(item_types)
if (start_date is None) != (end_date is None):
raise ValueError('If start_date is given, '
'end_date has to be given too.')
elif start_date is not None and end_date is not None:
namespace = dict(datetime.__dict__)
namespace['start_date'] = self.start_date = \
(eval(start_date, namespace)
if isinstance(start_date, (bytes, text_type))
else start_date)
self.end_date = \
(eval(end_date, namespace)
if isinstance(end_date, (bytes, text_type))
else end_date)
@staticmethod
def _get_list_filters(components, start, end):
if components:
caldavfilter = '''
<C:comp-filter name="VCALENDAR">
<C:comp-filter name="{component}">
{timefilter}
</C:comp-filter>
</C:comp-filter>
'''
if start is not None and end is not None:
start = start.strftime(CALDAV_DT_FORMAT)
end = end.strftime(CALDAV_DT_FORMAT)
timefilter = ('<C:time-range start="{start}" end="{end}"/>'
.format(start=start, end=end))
else:
timefilter = ''
for component in components:
yield caldavfilter.format(component=component,
timefilter=timefilter)
else:
if start is not None and end is not None:
for x in CaldavStorage._get_list_filters(('VTODO', 'VEVENT'),
start, end):
yield x
else:
yield '<C:comp-filter name="VCALENDAR"/>'
def list(self):
data = '''<?xml version="1.0" encoding="utf-8" ?>
<C:calendar-query xmlns:D="DAV:"
xmlns:C="urn:ietf:params:xml:ns:caldav">
<D:prop>
<D:getcontenttype/>
<D:getetag/>
</D:prop>
<C:filter>
{caldavfilter}
</C:filter>
</C:calendar-query>'''
headers = self.session.get_default_headers()
# https://github.com/untitaker/vdirsyncer/issues/166
# The default in CalDAV's calendar-queries is 0, but the examples use
# an explicit value of 1 for querying items. it is extremely unclear in
# the spec which values from WebDAV are actually allowed.
headers['Depth'] = 1
caldavfilters = self._get_list_filters(self.item_types,
self.start_date, self.end_date)
for caldavfilter in caldavfilters:
xml = data.format(caldavfilter=caldavfilter)
response = self.session.request('REPORT', '', data=xml,
headers=headers)
root = _parse_xml(response.content)
rv = self._parse_prop_responses(root)
for href, etag, prop in rv:
yield href, etag
class CarddavStorage(DavStorage):
__doc__ = '''
CardDAV.
''' + DavStorage.__doc__
storage_name = 'carddav'
fileext = '.vcf'
item_mimetype = 'text/vcard'
discovery_class = CardDiscover
get_multi_template = '''<?xml version="1.0" encoding="utf-8" ?>
<C:addressbook-multiget xmlns:D="DAV:"
xmlns:C="urn:ietf:params:xml:ns:carddav">
<D:prop>
<D:getetag/>
<C:address-data/>
</D:prop>
{hrefs}
</C:addressbook-multiget>'''
_dav_namespace = 'urn:ietf:params:xml:ns:carddav'
_dav_resourcetype = 'addressbook'
get_multi_data_query = '{urn:ietf:params:xml:ns:carddav}address-data'
def list(self):
headers = self.session.get_default_headers()
headers['Depth'] = 1
data = '''<?xml version="1.0" encoding="utf-8" ?>
<D:propfind xmlns:D="DAV:">
<D:prop>
<D:resourcetype/>
<D:getcontenttype/>
<D:getetag/>
</D:prop>
</D:propfind>
'''
# We use a PROPFIND request instead of addressbook-query due to issues
# with Zimbra. See https://github.com/untitaker/vdirsyncer/issues/83
response = self.session.request('PROPFIND', '', data=data,
headers=headers)
root = _parse_xml(response.content)
# Decode twice because ownCloud encodes twice.
# See https://github.com/owncloud/contacts/issues/581
rv = self._parse_prop_responses(root, decoding_rounds=2)
for href, etag, prop in rv:
yield href, etag
|
|
def test_000_basic_key_value_store(cfg):
# space kv dimensions k, v key k auto 0 1
# TODO: cfg.should().rm_space('kv')
cfg.must().add_space('''
space kv
key k
attributes v
''')
cfg.should().get('kv','k1').equals(None)
cfg.should().get('kv','k2').equals(None)
cfg.should().get('kv','k3').equals(None)
cfg.should().get('kv','k4').equals(None)
cfg.must().delete('kv','k1').equals(False)
cfg.must().delete('kv','k2').equals(False)
cfg.must().delete('kv','k3').equals(False)
cfg.must().delete('kv','k4').equals(False)
cfg.must().put('kv','k1',{'v': 'v1'}).equals(True)
cfg.should().get('kv','k1').equals({'v': 'v1'})
cfg.should().get('kv','k2').equals(None)
cfg.should().get('kv','k3').equals(None)
cfg.should().get('kv','k4').equals(None)
cfg.must().put('kv','k2',{'v': 'v2'}).equals(True)
cfg.should().get('kv','k1').equals({'v': 'v1'})
cfg.should().get('kv','k2').equals({'v': 'v2'})
cfg.should().get('kv','k3').equals(None)
cfg.should().get('kv','k4').equals(None)
cfg.must().put('kv','k3',{'v': 'v3'}).equals(True)
cfg.should().get('kv','k1').equals({'v': 'v1'})
cfg.should().get('kv','k2').equals({'v': 'v2'})
cfg.should().get('kv','k3').equals({'v': 'v3'})
cfg.should().get('kv','k4').equals(None)
cfg.must().put('kv','k4',{'v': 'v4'}).equals(True)
cfg.should().get('kv','k1').equals({'v': 'v1'})
cfg.should().get('kv','k2').equals({'v': 'v2'})
cfg.should().get('kv','k3').equals({'v': 'v3'})
cfg.should().get('kv','k4').equals({'v': 'v4'})
cfg.must().delete('kv','k1').equals(True)
cfg.must().delete('kv','k2').equals(True)
cfg.must().delete('kv','k3').equals(True)
cfg.must().delete('kv','k4').equals(True)
cfg.should().get('kv','k1').equals(None)
cfg.should().get('kv','k2').equals(None)
cfg.should().get('kv','k3').equals(None)
cfg.should().get('kv','k4').equals(None)
def test_000_error_unknownspace(cfg):
cfg.shouldnt().get('noexist','k')# HYPERCLIENT_UNKNOWNSPACE
def test_010_basic_multi_attribute_space(cfg):
# space kv dimensions k, v1, v2, v3, v4 key k auto 0 1
cfg.should().rm_space('kv')
cfg.must().add_space('''
space kv
key k
attributes v1, v2, v3, v4
''')
cfg.should().get('kv','k').equals(None)
cfg.must().delete('kv','k').equals(False)
cfg.must().put('kv','k',{'v1': 'v1'}).equals(True)
cfg.should().get('kv','k').equals({'v1': 'v1', 'v2': '', 'v3': '', 'v4': ''})
cfg.must().put('kv','k',{'v2': 'v2', 'v3': 'v3'}).equals(True)
cfg.should().get('kv','k').equals({'v1': 'v1', 'v2': 'v2', 'v3': 'v3', 'v4': ''})
cfg.must().delete('kv','k').equals(True)
cfg.should().get('kv','k').equals(None)
def test_020_basic_equality_search(cfg):
cfg.should().rm_space('kv')
cfg.must().add_space('''
space kv
key k
attributes v
''')
cfg.should().get('kv','ka').equals(None)
cfg.should().get('kv',"ka'").equals(None)
cfg.should().get('kv','kb').equals(None)
cfg.should().get('kv','kc').equals(None)
cfg.should().get('kv','kd').equals(None)
cfg.must().put('kv','ka',{'v': 'a'}).equals(True)
cfg.must().put('kv',"ka'",{'v': 'a'}).equals(True)
cfg.must().put('kv','kb',{'v': 'b'}).equals(True)
cfg.must().put('kv','kc',{'v': 'c'}).equals(True)
cfg.must().put('kv','kd',{'v': 'd'}).equals(True)
cfg.should().search('kv',{}).equals([
{'k': 'ka', 'v': 'a'},
{'k': "ka'", 'v': 'a'},
{'k': "kb", 'v': 'b'},
{'k': "kc", 'v': 'c'},
{'k': "kd", 'v': 'd'}
])
cfg.should().search('kv',{'v':'b'}).equals([{'k': 'kb', 'v': 'b'}])
cfg.should().search('kv',{'v':'c'}).equals([{'k': 'kc', 'v': 'c'}])
cfg.should().search('kv',{'v':'d'}).equals([{'k': 'kd', 'v': 'd'}])
cfg.must().delete('kv','ka').equals(True)
cfg.must().delete('kv',"ka'").equals(True)
cfg.must().delete('kv','kb').equals(True)
cfg.must().delete('kv','kc').equals(True)
cfg.must().delete('kv','kd').equals(True)
cfg.should().get('kv','ka').equals(None)
cfg.should().get('kv',"ka'").equals(None)
cfg.should().get('kv','kb').equals(None)
cfg.should().get('kv','kc').equals(None)
cfg.should().get('kv','kd').equals(None)
def test_030_basic_cond_put(cfg):
cfg.should().rm_space('kv')
cfg.must().add_space('''
space kv
key k
attributes v1, v2
''')
cfg.should().get('kv','k').equals(None)
cfg.must().put('kv','k',{'v1': '1', 'v2': '2'}).equals(True)
cfg.should().get('kv','k').equals({'v1': '1', 'v2': '2'})
cfg.should().cond_put('kv','k',{'v1':'1'}, {'v1': '3'}).equals(True)
cfg.should().get('kv','k').equals({'v1': '3', 'v2': '2'})
cfg.should().cond_put('kv','k',{'v1':'1'},{'v1': '4'}).equals(False)
cfg.should().get('kv','k').equals({'v1': '3', 'v2': '2'})
cfg.should().cond_put('kv','k',{'v2':'2'},{'v1': '4'}).equals(True)
cfg.should().get('kv','k').equals({'v1': '4', 'v2': '2'})
cfg.should().cond_put('kv','k',{'v2':'1'},{'v1': '5'}).equals(False)
cfg.should().get('kv','k').equals({'v1': '4', 'v2': '2'})
cfg.must().delete('kv','k').equals(True)
cfg.should().get('kv','k').equals(None)
def test_040_basic_put_if_not_exist(cfg):
# space kv dimensions k, v1, v2 key k auto 0 1
cfg.should().rm_space('kv')
cfg.must().add_space('''
space kv
key k
attributes v1, v2
''')
cfg.should().get('kv','k').equals(None)
cfg.should().put_if_not_exist('kv','k',{'v1': '1', 'v2': '2'}).equals(True)
cfg.should().get('kv','k').equals({'v1': '1', 'v2': '2'})
cfg.should().put_if_not_exist('kv','k',{'v1': 'a', 'v2': 'b'}).equals(False)
cfg.should().get('kv','k').equals({'v1': '1', 'v2': '2'})
cfg.must().delete('kv','k').equals(True)
cfg.should().get('kv','k').equals(None)
def test_100_datatype_string(cfg):
# space kv dimensions k, v key k auto 0 1
cfg.should().rm_space('kv')
cfg.must().add_space('''
space kv
key k
attributes v
''')
cfg.should().get('kv','k').equals(None)
cfg.must().put('kv','k',{}).equals(True)
cfg.must().string_prepend('kv','k',{'v': '5'}).equals(True)
cfg.should().get('kv','k').equals({'v': '5'})
cfg.must().string_append('kv','k',{'v': '6'}).equals(True)
cfg.should().get('kv','k').equals({'v': '56'})
cfg.must().string_prepend('kv','k',{'v': '4'}).equals(True)
cfg.should().get('kv','k').equals({'v': '456'})
cfg.must().string_append('kv','k',{'v': '7'}).equals(True)
cfg.should().get('kv','k').equals({'v': '4567'})
cfg.must().string_prepend('kv','k',{'v': '3'}).equals(True)
cfg.should().get('kv','k').equals({'v': '34567'})
cfg.must().string_append('kv','k',{'v': '8'}).equals(True)
cfg.should().get('kv','k').equals({'v': '345678'})
cfg.must().string_prepend('kv','k',{'v': '2'}).equals(True)
cfg.should().get('kv','k').equals({'v': '2345678'})
cfg.must().string_append('kv','k',{'v': '9'}).equals(True)
cfg.should().get('kv','k').equals({'v': '23456789'})
cfg.must().string_prepend('kv','k',{'v': '1'}).equals(True)
cfg.should().get('kv','k').equals({'v': '123456789'})
cfg.must().string_append('kv','k',{'v': '0'}).equals(True)
cfg.should().get('kv','k').equals({'v': '1234567890'})
cfg.must().delete('kv','k').equals(True)
cfg.should().get('kv','k').equals(None)
def test_110_datatype_int64(cfg):
# space kv dimensions k, v (int64) key k auto 0 1
cfg.should().rm_space('kv')
cfg.must().add_space('''
space kv
key k
attributes int64 v
''')
cfg.should().get('kv','k').equals(None)
# Test signed-ness and the limits of a two's complement number
cfg.must().put('kv','k',{'v': 0L}).equals(True)
cfg.should().get('kv','k').equals({'v': 0L})
cfg.must().put('kv','k',{'v': 1L}).equals(True)
cfg.should().get('kv','k').equals({'v': 1L})
cfg.must().put('kv','k',{'v': -1L}).equals(True)
cfg.should().get('kv','k').equals({'v': -1L})
cfg.must().put('kv','k',{'v': 9223372036854775807L}).equals(True)
cfg.should().get('kv','k').equals({'v': 9223372036854775807L})
cfg.must().put('kv','k',{'v': -9223372036854775808L}).equals(True)
cfg.should().get('kv','k').equals({'v': -9223372036854775808L})
# Test add
cfg.must().put('kv','k',{'v': 0L}).equals(True)
cfg.should().get('kv','k').equals({'v': 0L})
cfg.must().atomic_add('kv','k',{'v': 9223372036854775807L}).equals(True)
cfg.should().get('kv','k').equals({'v': 9223372036854775807L})
cfg.mustnt().atomic_add('kv','k',{'v': 1L})# HYPERCLIENT_OVERFLOW
cfg.should().get('kv','k').equals({'v': 9223372036854775807L})
cfg.must().atomic_add('kv','k',{'v': -9223372036854775807L}).equals(True)
cfg.should().get('kv','k').equals({'v': 0L})
# Test sub
cfg.must().put('kv','k',{'v': 0L}).equals(True)
cfg.should().get('kv','k').equals({'v': 0L})
cfg.should().atomic_sub('kv','k',{'v': 9223372036854775807L}).equals(True)
cfg.should().get('kv','k').equals({'v': -9223372036854775807L})
cfg.should().atomic_sub('kv','k',{'v': 1L}).equals(True)
cfg.should().get('kv','k').equals({'v': -9223372036854775808L})
cfg.shouldnt().atomic_sub('kv','k',{'v': 1L})# HYPERCLIENT_OVERFLOW
cfg.should().get('kv','k').equals({'v': -9223372036854775808L})
cfg.should().atomic_sub('kv','k',{'v': -9223372036854775808L}).equals(True)
cfg.should().get('kv','k').equals({'v': 0L})
# Test mul
cfg.must().put('kv','k',{'v': 0L}).equals(True)
cfg.should().get('kv','k').equals({'v': 0L})
cfg.should().atomic_mul('kv','k',{'v': 0L}).equals(True)
cfg.should().get('kv','k').equals({'v': 0L})
cfg.should().atomic_mul('kv','k',{'v': 1L}).equals(True)
cfg.should().get('kv','k').equals({'v': 0L})
cfg.should().atomic_mul('kv','k',{'v': 9223372036854775807L}).equals(True)
cfg.should().get('kv','k').equals({'v': 0L})
cfg.should().atomic_mul('kv','k',{'v': -9223372036854775808L}).equals(True)
cfg.should().get('kv','k').equals({'v': 0L})
cfg.must().put('kv','k',{'v': 1L}).equals(True)
cfg.should().get('kv','k').equals({'v': 1L})
cfg.should().atomic_mul('kv','k',{'v': 2L}).equals(True)
cfg.should().get('kv','k').equals({'v': 2L})
cfg.should().atomic_mul('kv','k',{'v': 2L}).equals(True)
cfg.should().get('kv','k').equals({'v': 4L})
cfg.should().atomic_mul('kv','k',{'v': 2L}).equals(True)
cfg.should().get('kv','k').equals({'v': 8L})
cfg.should().atomic_mul('kv','k',{'v': 2L}).equals(True)
cfg.should().get('kv','k').equals({'v': 16L})
cfg.should().atomic_mul('kv','k',{'v': 2L}).equals(True)
cfg.should().get('kv','k').equals({'v': 32L})
cfg.should().atomic_mul('kv','k',{'v': 2L}).equals(True)
cfg.should().get('kv','k').equals({'v': 64L})
cfg.should().atomic_mul('kv','k',{'v': 72057594037927936L}).equals(True)
cfg.should().get('kv','k').equals({'v': 4611686018427387904L})
cfg.shouldnt().atomic_mul('kv','k',{'v': 2L})# HYPERCLIENT_OVERFLOW
cfg.should().get('kv','k').equals({'v': 4611686018427387904L})
# Test div
cfg.must().put('kv','k',{'v': 4611686018427387904L}).equals(True)
cfg.should().get('kv','k').equals({'v': 4611686018427387904L})
cfg.should().atomic_div('kv','k',{'v': 72057594037927936L}).equals(True)
cfg.should().get('kv','k').equals({'v': 64L})
cfg.should().atomic_div('kv','k',{'v': 2L}).equals(True)
cfg.should().get('kv','k').equals({'v': 32L})
cfg.should().atomic_div('kv','k',{'v': 2L}).equals(True)
cfg.should().get('kv','k').equals({'v': 16L})
cfg.should().atomic_div('kv','k',{'v': 2L}).equals(True)
cfg.should().get('kv','k').equals({'v': 8L})
cfg.should().atomic_div('kv','k',{'v': 2L}).equals(True)
cfg.should().get('kv','k').equals({'v': 4L})
cfg.should().atomic_div('kv','k',{'v': 2L}).equals(True)
cfg.should().get('kv','k').equals({'v': 2L})
cfg.should().atomic_div('kv','k',{'v': 2L}).equals(True)
cfg.should().get('kv','k').equals({'v': 1L})
cfg.should().atomic_div('kv','k',{'v': 2L}).equals(True)
cfg.should().get('kv','k').equals({'v': 0L})
cfg.shouldnt().atomic_div('kv','k',{'v': 0L})# HYPERCLIENT_OVERFLOW
cfg.should().get('kv','k').equals({'v': 0L})
# Test mod
cfg.must().put('kv','k',{'v': 7L}).equals(True)
cfg.should().get('kv','k').equals({'v': 7L})
cfg.should().atomic_mod('kv','k',{'v': 3L}).equals(True)
cfg.should().get('kv','k').equals({'v': 1L})
cfg.should().atomic_mod('kv','k',{'v': 2L}).equals(True)
cfg.should().get('kv','k').equals({'v': 1L})
cfg.must().put('kv','k',{'v': 7L}).equals(True)
cfg.should().get('kv','k').equals({'v': 7L})
cfg.should().atomic_mod('kv','k',{'v': -3L}).equals(True)
cfg.should().get('kv','k').equals({'v': -2L})
cfg.must().put('kv','k',{'v': -7L}).equals(True)
cfg.should().get('kv','k').equals({'v': -7L})
cfg.should().atomic_mod('kv','k',{'v': 3L}).equals(True)
cfg.should().get('kv','k').equals({'v': 2L})
cfg.must().put('kv','k',{'v': -7L}).equals(True)
cfg.should().get('kv','k').equals({'v': -7L})
cfg.should().atomic_mod('kv','k',{'v': -3L}).equals(True)
cfg.should().get('kv','k').equals({'v': -1L})
# Test and
cfg.must().put('kv','k',{'v': -2401053089206453570L}).equals(True)
cfg.should().get('kv','k').equals({'v': -2401053089206453570L})
cfg.should().atomic_and('kv','k',{'v': -374081424649621491L}).equals(True)
cfg.should().get('kv','k').equals({'v': -2698572151406022644L})
# Test or
cfg.must().put('kv','k',{'v': 0L}).equals(True)
cfg.should().get('kv','k').equals({'v': 0L})
cfg.should().atomic_or('kv','k',{'v': -6148914691236517206L}).equals(True)
cfg.should().get('kv','k').equals({'v': -6148914691236517206L})
cfg.should().atomic_or('kv','k',{'v': 6148914691236517205L}).equals(True)
cfg.should().get('kv','k').equals({'v': -1L})
# Test xor
cfg.must().put('kv','k',{'v': 0L}).equals(True)
cfg.should().get('kv','k').equals({'v': 0L})
cfg.should().atomic_xor('kv','k',{'v': -6148914691236517206L}).equals(True)
cfg.should().get('kv','k').equals({'v': -6148914691236517206L})
cfg.should().atomic_xor('kv','k',{'v': 6148914691236517205L}).equals(True)
cfg.should().get('kv','k').equals({'v': -1L})
# Cleanup
cfg.must().delete('kv','k').equals(True)
cfg.should().get('kv','k').equals(None)
def test_120_datatype_float(cfg):
# space kv dimensions k, v (float) key k auto 0 1
cfg.should().rm_space('kv')
cfg.must().add_space('''
space kv
key k
attributes float v
''')
cfg.should().get('kv','k').equals(None)
# Test signed-ness and precise floating point numbers
cfg.must().put('kv','k',{'v': 0.0}).equals(True)
cfg.should().get('kv','k').equals({'v': 0.0})
cfg.must().put('kv','k',{'v': 1.0}).equals(True)
cfg.should().get('kv','k').equals({'v': 1.0})
cfg.must().put('kv','k',{'v': -1.0}).equals(True)
cfg.should().get('kv','k').equals({'v': -1.0})
cfg.must().put('kv','k',{'v': 9006104071832581.0}).equals(True)
cfg.should().get('kv','k').equals({'v': 9006104071832581.0})
# Test add
cfg.must().put('kv','k',{'v': 0.0}).equals(True)
cfg.should().get('kv','k').equals({'v': 0.0})
cfg.must().atomic_add('kv','k',{'v': 9006104071832581.0}).equals(True)
cfg.should().get('kv','k').equals({'v': 9006104071832581.0})
cfg.must().atomic_add('kv','k',{'v': -9006104071832581.0}).equals(True)
cfg.should().get('kv','k').equals({'v': 0.0})
# Test sub
cfg.must().put('kv','k',{'v': 0.0}).equals(True)
cfg.should().get('kv','k').equals({'v': 0.0})
cfg.should().atomic_sub('kv','k',{'v': 9006104071832581.0}).equals(True)
cfg.should().get('kv','k').equals({'v': -9006104071832581.0})
cfg.should().atomic_sub('kv','k',{'v': -9006104071832581.0}).equals(True)
cfg.should().get('kv','k').equals({'v': 0.0})
# Test mul
cfg.must().put('kv','k',{'v': 0.0}).equals(True)
cfg.should().get('kv','k').equals({'v': 0.0})
cfg.should().atomic_mul('kv','k',{'v': 0.0}).equals(True)
cfg.should().get('kv','k').equals({'v': 0.0})
cfg.should().atomic_mul('kv','k',{'v': 1.0}).equals(True)
cfg.should().get('kv','k').equals({'v': 0.0})
cfg.should().atomic_mul('kv','k',{'v': 9006104071832581.0}).equals(True)
cfg.should().get('kv','k').equals({'v': 0.0})
cfg.should().atomic_mul('kv','k',{'v': -9006104071832581.0}).equals(True)
cfg.should().get('kv','k').equals({'v': 0.0})
cfg.must().put('kv','k',{'v': 1.0}).equals(True)
cfg.should().get('kv','k').equals({'v': 1.0})
cfg.should().atomic_mul('kv','k',{'v': 2.0}).equals(True)
cfg.should().get('kv','k').equals({'v': 2.0})
cfg.should().atomic_mul('kv','k',{'v': 2.0}).equals(True)
cfg.should().get('kv','k').equals({'v': 4.0})
cfg.should().atomic_mul('kv','k',{'v': 2.0}).equals(True)
cfg.should().get('kv','k').equals({'v': 8.0})
cfg.should().atomic_mul('kv','k',{'v': 2.0}).equals(True)
cfg.should().get('kv','k').equals({'v': 16.0})
cfg.should().atomic_mul('kv','k',{'v': 2.0}).equals(True)
cfg.should().get('kv','k').equals({'v': 32.0})
cfg.should().atomic_mul('kv','k',{'v': 2.0}).equals(True)
cfg.should().get('kv','k').equals({'v': 64.0})
cfg.should().atomic_mul('kv','k',{'v': 7.205759403792794e+16}).equals(True)
cfg.should().get('kv','k').equals({'v': 4.611686018427388e+18})
# Test div
cfg.must().put('kv','k',{'v': 4.611686018427388e+18}).equals(True)
cfg.should().get('kv','k').equals({'v': 4.611686018427388e+18})
cfg.should().atomic_div('kv','k',{'v': 7.205759403792794e+16}).equals(True)
cfg.should().get('kv','k').equals({'v': 64.0})
cfg.should().atomic_div('kv','k',{'v': 2.0}).equals(True)
cfg.should().get('kv','k').equals({'v': 32.0})
cfg.should().atomic_div('kv','k',{'v': 2.0}).equals(True)
cfg.should().get('kv','k').equals({'v': 16.0})
cfg.should().atomic_div('kv','k',{'v': 2.0}).equals(True)
cfg.should().get('kv','k').equals({'v': 8.0})
cfg.should().atomic_div('kv','k',{'v': 2.0}).equals(True)
cfg.should().get('kv','k').equals({'v': 4.0})
cfg.should().atomic_div('kv','k',{'v': 2.0}).equals(True)
cfg.should().get('kv','k').equals({'v': 2.0})
cfg.should().atomic_div('kv','k',{'v': 2.0}).equals(True)
cfg.should().get('kv','k').equals({'v': 1.0})
cfg.should().atomic_div('kv','k',{'v': 2.0}).equals(True)
cfg.should().get('kv','k').equals({'v': 0.5})
cfg.should().atomic_div('kv','k',{'v': 2.0}).equals(True)
cfg.should().get('kv','k').equals({'v': 0.25})
cfg.must().put('kv','k',{'v': 1.0}).equals(True)
cfg.should().get('kv','k').equals({'v': 1.0})
cfg.should().atomic_div('kv','k',{'v': 0.0}).equals(True)
# Cleanup
cfg.must().delete('kv','k').equals(True)
cfg.should().get('kv','k').equals(None)
def test_200_datatype_list_string(cfg):
# space kv dimensions k, v (list(string)) key k auto 0 1
cfg.should().rm_space('kv')
cfg.must().add_space('''
space kv
key k
attributes list(string) v
''')
cfg.should().get('kv','k').equals(None)
cfg.must().put('kv','k',{'v': ['100', '200', '300']}).equals(True)
cfg.should().get('kv','k').equals({'v': ['100', '200', '300']})
cfg.must().put('kv','k',{'v': []}).equals(True)
cfg.should().get('kv','k').equals({'v': []})
cfg.should().list_lpush('kv','k',{'v': '5'}).equals(True)
cfg.should().get('kv','k').equals({'v': ['5']})
cfg.should().list_rpush('kv','k',{'v': '6'}).equals(True)
cfg.should().get('kv','k').equals({'v': ['5', '6']})
cfg.should().list_lpush('kv','k',{'v': '4'}).equals(True)
cfg.should().get('kv','k').equals({'v': ['4', '5', '6']})
cfg.should().list_rpush('kv','k',{'v': '7'}).equals(True)
cfg.should().get('kv','k').equals({'v': ['4', '5', '6', '7']})
cfg.should().list_lpush('kv','k',{'v': '3'}).equals(True)
cfg.should().get('kv','k').equals({'v': ['3', '4', '5', '6', '7']})
cfg.should().list_rpush('kv','k',{'v': '8'}).equals(True)
cfg.should().get('kv','k').equals({'v': ['3', '4', '5', '6', '7', '8']})
cfg.should().list_lpush('kv','k',{'v': '2'}).equals(True)
cfg.should().get('kv','k').equals({'v': ['2', '3', '4', '5', '6', '7', '8']})
cfg.should().list_rpush('kv','k',{'v': '9'}).equals(True)
cfg.should().get('kv','k').equals({'v': ['2', '3', '4', '5', '6', '7', '8', '9']})
cfg.should().list_lpush('kv','k',{'v': '1'}).equals(True)
cfg.should().get('kv','k').equals({'v': ['1', '2', '3', '4', '5', '6', '7', '8', '9']})
cfg.should().list_rpush('kv','k',{'v': '0'}).equals(True)
cfg.should().get('kv','k').equals({'v': ['1', '2', '3', '4', '5', '6', '7', '8', '9', '0']})
cfg.must().delete('kv','k').equals(True)
cfg.should().get('kv','k').equals(None)
def test_210_datatype_list_int64(cfg):
# space kv dimensions k, v (list(int64)) key k auto 0 1
cfg.should().rm_space('kv')
cfg.must().add_space('''
space kv
key k
attributes list(int64) v
''')
cfg.should().get('kv','k').equals(None)
cfg.must().put('kv','k',{'v': [100, 200, 300]}).equals(True)
cfg.should().get('kv','k').equals({'v': [100, 200, 300]})
cfg.must().put('kv','k',{'v': []}).equals(True)
cfg.should().get('kv','k').equals({'v': []})
cfg.should().list_lpush('kv','k',{'v': 5}).equals(True)
cfg.should().get('kv','k').equals({'v': [5]})
cfg.should().list_rpush('kv','k',{'v': 6}).equals(True)
cfg.should().get('kv','k').equals({'v': [5, 6]})
cfg.should().list_lpush('kv','k',{'v': 4}).equals(True)
cfg.should().get('kv','k').equals({'v': [4, 5, 6]})
cfg.should().list_rpush('kv','k',{'v': 7}).equals(True)
cfg.should().get('kv','k').equals({'v': [4, 5, 6, 7]})
cfg.should().list_lpush('kv','k',{'v': 3}).equals(True)
cfg.should().get('kv','k').equals({'v': [3, 4, 5, 6, 7]})
cfg.should().list_rpush('kv','k',{'v': 8}).equals(True)
cfg.should().get('kv','k').equals({'v': [3, 4, 5, 6, 7, 8]})
cfg.should().list_lpush('kv','k',{'v': 2}).equals(True)
cfg.should().get('kv','k').equals({'v': [2, 3, 4, 5, 6, 7, 8]})
cfg.should().list_rpush('kv','k',{'v': 9}).equals(True)
cfg.should().get('kv','k').equals({'v': [2, 3, 4, 5, 6, 7, 8, 9]})
cfg.should().list_lpush('kv','k',{'v': 1}).equals(True)
cfg.should().get('kv','k').equals({'v': [1, 2, 3, 4, 5, 6, 7, 8, 9]})
cfg.should().list_rpush('kv','k',{'v': 0}).equals(True)
cfg.should().get('kv','k').equals({'v': [1, 2, 3, 4, 5, 6, 7, 8, 9, 0]})
cfg.must().delete('kv','k').equals(True)
cfg.should().get('kv','k').equals(None)
def test_220_datatype_list_float(cfg):
# space kv dimensions k, v (list(float)) key k auto 0 1
cfg.should().rm_space('kv')
cfg.must().add_space('''
space kv
key k
attributes list(float) v
''')
cfg.should().get('kv','k').equals(None)
cfg.must().put('kv','k',{'v': [100.0, 200.0, 300.0]}).equals(True)
cfg.should().get('kv','k').equals({'v': [100.0, 200.0, 300.0]})
cfg.must().put('kv','k',{'v': []}).equals(True)
cfg.should().get('kv','k').equals({'v': []})
cfg.should().list_lpush('kv','k',{'v': 5.0}).equals(True)
cfg.should().get('kv','k').equals({'v': [5.0]})
cfg.should().list_rpush('kv','k',{'v': 6.0}).equals(True)
cfg.should().get('kv','k').equals({'v': [5.0, 6.0]})
cfg.should().list_lpush('kv','k',{'v': 4.0}).equals(True)
cfg.should().get('kv','k').equals({'v': [4.0, 5.0, 6.0]})
cfg.should().list_rpush('kv','k',{'v': 7.0}).equals(True)
cfg.should().get('kv','k').equals({'v': [4.0, 5.0, 6.0, 7.0]})
cfg.should().list_lpush('kv','k',{'v': 3.0}).equals(True)
cfg.should().get('kv','k').equals({'v': [3.0, 4.0, 5.0, 6.0, 7.0]})
cfg.should().list_rpush('kv','k',{'v': 8.0}).equals(True)
cfg.should().get('kv','k').equals({'v': [3.0, 4.0, 5.0, 6.0, 7.0, 8.0]})
cfg.should().list_lpush('kv','k',{'v': 2.0}).equals(True)
cfg.should().get('kv','k').equals({'v': [2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0]})
cfg.should().list_rpush('kv','k',{'v': 9.0}).equals(True)
cfg.should().get('kv','k').equals({'v': [2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0]})
cfg.should().list_lpush('kv','k',{'v': 1.0}).equals(True)
cfg.should().get('kv','k').equals({'v': [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0]})
cfg.should().list_rpush('kv','k',{'v': 0.0}).equals(True)
cfg.should().get('kv','k').equals({'v': [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 0.0]})
cfg.must().delete('kv','k').equals(True)
cfg.should().get('kv','k').equals(None)
def test_300_datatype_set_string(cfg):
# space kv dimensions k, v (set(string)) key k auto 0 1
cfg.should().rm_space('kv')
cfg.must().add_space('''
space kv
key k
attributes set(string) v
''')
cfg.should().get('kv','k').equals(None)
cfg.must().put('kv','k',{}).equals(True)
cfg.must().set_add('kv','k',{'v': '4'}).equals(True)
cfg.should().get('kv','k').equals({'v': set(['4'])})
cfg.must().set_add('kv','k',{'v': '3'}).equals(True)
cfg.should().get('kv','k').equals({'v': set(['3', '4'])})
cfg.must().set_add('kv','k',{'v': '7'}).equals(True)
cfg.should().get('kv','k').equals({'v': set(['3', '4', '7'])})
cfg.must().set_add('kv','k',{'v': '5'}).equals(True)
cfg.should().get('kv','k').equals({'v': set(['3', '5', '4', '7'])})
cfg.must().set_add('kv','k',{'v': '2'}).equals(True)
cfg.should().get('kv','k').equals({'v': set(['3', '2', '5', '4', '7'])})
cfg.must().set_add('kv','k',{'v': '8'}).equals(True)
cfg.should().get('kv','k').equals({'v': set(['3', '2', '5', '4', '7', '8'])})
cfg.must().set_add('kv','k',{'v': '6'}).equals(True)
cfg.should().get('kv','k').equals({'v': set(['3', '2', '5', '4', '7', '6', '8'])})
cfg.must().set_add('kv','k',{'v': '1'}).equals(True)
cfg.should().get('kv','k').equals({'v': set(['1', '3', '2', '5', '4', '7', '6', '8'])})
cfg.must().set_add('kv','k',{'v': '9'}).equals(True)
cfg.should().get('kv','k').equals({'v': set(['1', '3', '2', '5', '4', '7', '6', '9', '8'])})
cfg.must().delete('kv','k').equals(True)
cfg.should().get('kv','k').equals(None)
def test_310_datatype_set_int64(cfg):
# space kv dimensions k, v (set(int64)) key k auto 0 1
cfg.should().rm_space('kv')
cfg.must().add_space('''
space kv
key k
attributes set(int64) v
''')
cfg.should().get('kv','k').equals(None)
cfg.must().put('kv','k',{}).equals(True)
cfg.must().set_add('kv','k',{'v': 4}).equals(True)
cfg.should().get('kv','k').equals({'v': set([4])})
cfg.must().set_add('kv','k',{'v': 3}).equals(True)
cfg.should().get('kv','k').equals({'v': set([3, 4])})
cfg.must().set_add('kv','k',{'v': 7}).equals(True)
cfg.should().get('kv','k').equals({'v': set([3, 4, 7])})
cfg.must().set_add('kv','k',{'v': 5}).equals(True)
cfg.should().get('kv','k').equals({'v': set([3, 4, 5, 7])})
cfg.must().set_add('kv','k',{'v': 2}).equals(True)
cfg.should().get('kv','k').equals({'v': set([2, 3, 4, 5, 7])})
cfg.must().set_add('kv','k',{'v': 8}).equals(True)
cfg.should().get('kv','k').equals({'v': set([2, 3, 4, 5, 7, 8])})
cfg.must().set_add('kv','k',{'v': 6}).equals(True)
cfg.should().get('kv','k').equals({'v': set([2, 3, 4, 5, 6, 7, 8])})
cfg.must().set_add('kv','k',{'v': 1}).equals(True)
cfg.should().get('kv','k').equals({'v': set([1, 2, 3, 4, 5, 6, 7, 8])})
cfg.must().set_add('kv','k',{'v': 9}).equals(True)
cfg.should().get('kv','k').equals({'v': set([1, 2, 3, 4, 5, 6, 7, 8, 9])})
cfg.must().delete('kv','k').equals(True)
cfg.should().get('kv','k').equals(None)
def test_320_datatype_set_float(cfg):
# space kv dimensions k, v (set(float)) key k auto 0 1
cfg.should().rm_space('kv')
cfg.must().add_space('''
space kv
key k
attributes set(float) v
''')
cfg.should().get('kv','k').equals(None)
cfg.must().put('kv','k',{}).equals(True)
cfg.must().set_add('kv','k',{'v': 4.0}).equals(True)
cfg.should().get('kv','k').equals({'v': set([4.0])})
cfg.must().set_add('kv','k',{'v': 3.0}).equals(True)
cfg.should().get('kv','k').equals({'v': set([3.0, 4.0])})
cfg.must().set_add('kv','k',{'v': 7.0}).equals(True)
cfg.should().get('kv','k').equals({'v': set([3.0, 4.0, 7.0])})
cfg.must().set_add('kv','k',{'v': 5.0}).equals(True)
cfg.should().get('kv','k').equals({'v': set([3.0, 4.0, 5.0, 7.0])})
cfg.must().set_add('kv','k',{'v': 2.0}).equals(True)
cfg.should().get('kv','k').equals({'v': set([2.0, 3.0, 4.0, 5.0, 7.0])})
cfg.must().set_add('kv','k',{'v': 8.0}).equals(True)
cfg.should().get('kv','k').equals({'v': set([2.0, 3.0, 4.0, 5.0, 7.0, 8.0])})
cfg.must().set_add('kv','k',{'v': 6.0}).equals(True)
cfg.should().get('kv','k').equals({'v': set([2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0])})
cfg.must().set_add('kv','k',{'v': 1.0}).equals(True)
cfg.should().get('kv','k').equals({'v': set([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0])})
cfg.must().set_add('kv','k',{'v': 9.0}).equals(True)
cfg.should().get('kv','k').equals({'v': set([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0])})
cfg.must().delete('kv','k').equals(True)
cfg.should().get('kv','k').equals(None)
def test_410_datatype_map_string_string_microops(cfg):
# space kv dimensions k, v (map(string,string)) key k auto 0 1
cfg.should().rm_space('kv')
cfg.must().add_space('''
space kv
key k
attributes map(string,string) v
''')
cfg.should().get('kv','k').equals(None)
cfg.must().put('kv','k',{'v': {'1': 'nnn', '100': 'xzvwe', '50': '234*'}}).equals(True)
cfg.should().get('kv','k').equals({'v': {'1': 'nnn', '100': 'xzvwe', '50': '234*'}})
cfg.must().put('kv','k',{'v': {}}).equals(True)
cfg.should().get('kv','k').equals({'v': {}})
cfg.must().put('kv','k',{'v': {'1': 'nnn', '100': 'xzvwe', '50': '234*'}}).equals(True)
cfg.should().get('kv','k').equals({'v': {'1': 'nnn', '100': 'xzvwe', '50': '234*'}})
cfg.must().map_string_prepend('kv','k',{'v': {'KEY': '5'}}).equals(True)
cfg.should().get('kv','k').equals({'v': {'1': 'nnn', '100': 'xzvwe', '50': '234*', 'KEY': '5'}})
cfg.must().map_string_append('kv','k',{'v': {'KEY': '6'}}).equals(True)
cfg.should().get('kv','k').equals({'v': {'1': 'nnn', '100': 'xzvwe', '50': '234*', 'KEY': '56'}})
cfg.must().map_string_prepend('kv','k',{'v': {'KEY': '4'}}).equals(True)
cfg.should().get('kv','k').equals({'v': {'1': 'nnn', '100': 'xzvwe', '50': '234*', 'KEY': '456'}})
cfg.must().map_string_append('kv','k',{'v': {'KEY': '7'}}).equals(True)
cfg.should().get('kv','k').equals({'v': {'1': 'nnn', '100': 'xzvwe', '50': '234*', 'KEY': '4567'}})
cfg.must().map_string_prepend('kv','k',{'v': {'KEY': '3'}}).equals(True)
cfg.should().get('kv','k').equals({'v': {'1': 'nnn', '100': 'xzvwe', '50': '234*', 'KEY': '34567'}})
cfg.must().map_string_append('kv','k',{'v': {'KEY': '8'}}).equals(True)
cfg.should().get('kv','k').equals({'v': {'1': 'nnn', '100': 'xzvwe', '50': '234*', 'KEY': '345678'}})
cfg.must().map_string_prepend('kv','k',{'v': {'KEY': '2'}}).equals(True)
cfg.should().get('kv','k').equals({'v': {'1': 'nnn', '100': 'xzvwe', '50': '234*', 'KEY': '2345678'}})
cfg.must().map_string_append('kv','k',{'v': {'KEY': '9'}}).equals(True)
cfg.should().get('kv','k').equals({'v': {'1': 'nnn', '100': 'xzvwe', '50': '234*', 'KEY': '23456789'}})
cfg.must().map_string_prepend('kv','k',{'v': {'KEY': '1'}}).equals(True)
cfg.should().get('kv','k').equals({'v': {'1': 'nnn', '100': 'xzvwe', '50': '234*', 'KEY': '123456789'}})
cfg.must().map_string_append('kv','k',{'v': {'KEY': '0'}}).equals(True)
cfg.should().get('kv','k').equals({'v': {'1': 'nnn', '100': 'xzvwe', '50': '234*', 'KEY': '1234567890'}})
cfg.must().map_string_append('kv','k',{'v': {'1': 'nnn', '100': 'xzvwe', '50': '234*', 'KEY': '1234567890'}}).equals(True)
cfg.should().get('kv','k').equals({'v': {'1': 'nnnnnn', '100': 'xzvwexzvwe', '50': '234*234*', 'KEY': '12345678901234567890'}})
cfg.must().delete('kv','k').equals(True)
cfg.should().get('kv','k').equals(None)
|
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import six
import time
from taskflow.persistence.backends import impl_memory
from taskflow import task
ARGS_KEY = '__args__'
KWARGS_KEY = '__kwargs__'
ORDER_KEY = '__order__'
def make_reverting_task(token, blowup=False):
def do_revert(context, *args, **kwargs):
context[token] = 'reverted'
if blowup:
def blow_up(context, *args, **kwargs):
raise Exception("I blew up")
return task.FunctorTask(blow_up, name='blowup_%s' % token)
else:
def do_apply(context, *args, **kwargs):
context[token] = 'passed'
return task.FunctorTask(do_apply, revert=do_revert,
name='do_apply_%s' % token)
class DummyTask(task.Task):
def execute(self, context, *args, **kwargs):
pass
if six.PY3:
RUNTIME_ERROR_CLASSES = ['RuntimeError', 'Exception',
'BaseException', 'object']
else:
RUNTIME_ERROR_CLASSES = ['RuntimeError', 'StandardError', 'Exception',
'BaseException', 'object']
class ProvidesRequiresTask(task.Task):
def __init__(self, name, provides, requires, return_tuple=True):
super(ProvidesRequiresTask, self).__init__(name=name,
provides=provides,
requires=requires)
self.return_tuple = isinstance(provides, (tuple, list))
def execute(self, *args, **kwargs):
if self.return_tuple:
return tuple(range(len(self.provides)))
else:
return dict((k, k) for k in self.provides)
class SaveOrderTask(task.Task):
def __init__(self, values=None, name=None, sleep=None,
*args, **kwargs):
super(SaveOrderTask, self).__init__(name=name, *args, **kwargs)
if values is None:
self.values = []
else:
self.values = values
self._sleep = sleep
def execute(self, **kwargs):
self.update_progress(0.0)
if self._sleep:
time.sleep(self._sleep)
self.values.append(self.name)
self.update_progress(1.0)
return 5
def revert(self, **kwargs):
self.update_progress(0)
if self._sleep:
time.sleep(self._sleep)
self.values.append(self.name + ' reverted(%s)'
% kwargs.get('result'))
self.update_progress(1.0)
class FailingTask(SaveOrderTask):
def execute(self, **kwargs):
self.update_progress(0)
if self._sleep:
time.sleep(self._sleep)
self.update_progress(0.99)
raise RuntimeError('Woot!')
class NastyTask(task.Task):
def execute(self, **kwargs):
pass
def revert(self, **kwargs):
raise RuntimeError('Gotcha!')
class TaskNoRequiresNoReturns(task.Task):
def execute(self, **kwargs):
pass
def revert(self, **kwargs):
pass
class TaskOneArg(task.Task):
def execute(self, x, **kwargs):
pass
def revert(self, x, **kwargs):
pass
class TaskMultiArg(task.Task):
def execute(self, x, y, z, **kwargs):
pass
def revert(self, x, y, z, **kwargs):
pass
class TaskOneReturn(task.Task):
def execute(self, **kwargs):
return 1
def revert(self, **kwargs):
pass
class TaskMultiReturn(task.Task):
def execute(self, **kwargs):
return 1, 3, 5
def revert(self, **kwargs):
pass
class TaskOneArgOneReturn(task.Task):
def execute(self, x, **kwargs):
return 1
def revert(self, x, **kwargs):
pass
class TaskMultiArgOneReturn(task.Task):
def execute(self, x, y, z, **kwargs):
return x + y + z
def revert(self, x, y, z, **kwargs):
pass
class TaskMultiArgMultiReturn(task.Task):
def execute(self, x, y, z, **kwargs):
return 1, 3, 5
def revert(self, x, y, z, **kwargs):
pass
class TaskMultiDictk(task.Task):
def execute(self):
output = {}
for i, k in enumerate(sorted(self.provides)):
output[k] = i
return output
class NeverRunningTask(task.Task):
def execute(self, **kwargs):
assert False, 'This method should not be called'
def revert(self, **kwargs):
assert False, 'This method should not be called'
class EngineTestBase(object):
def setUp(self):
super(EngineTestBase, self).setUp()
self.values = []
self.backend = impl_memory.MemoryBackend(conf={})
def tearDown(self):
super(EngineTestBase, self).tearDown()
with contextlib.closing(self.backend) as be:
with contextlib.closing(be.get_connection()) as conn:
conn.clear_all()
def _make_engine(self, flow, flow_detail=None):
raise NotImplementedError()
|
|
# Copyright (c) 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import mock
from oslo_config import cfg
import six.moves.urllib.parse as urlparse
import webob
from webob import exc
import webtest
from neutron.api import api_common
from neutron.api import extensions
from neutron.api.rpc.agentnotifiers import dhcp_rpc_agent_api
from neutron.api.v2 import attributes
from neutron.api.v2 import base as v2_base
from neutron.api.v2 import router
from neutron.common import exceptions as n_exc
from neutron import context
from neutron import manager
from neutron.openstack.common import policy as common_policy
from neutron.openstack.common import uuidutils
from neutron import policy
from neutron import quota
from neutron.tests import base
from neutron.tests import fake_notifier
from neutron.tests.unit import testlib_api
EXTDIR = os.path.join(base.ROOTDIR, 'unit/extensions')
_uuid = uuidutils.generate_uuid
def _get_path(resource, id=None, action=None, fmt=None):
path = '/%s' % resource
if id is not None:
path = path + '/%s' % id
if action is not None:
path = path + '/%s' % action
if fmt is not None:
path = path + '.%s' % fmt
return path
class ResourceIndexTestCase(base.BaseTestCase):
def test_index_json(self):
index = webtest.TestApp(router.Index({'foo': 'bar'}))
res = index.get('')
self.assertIn('resources', res.json)
self.assertEqual(len(res.json['resources']), 1)
resource = res.json['resources'][0]
self.assertIn('collection', resource)
self.assertEqual(resource['collection'], 'bar')
self.assertIn('name', resource)
self.assertEqual(resource['name'], 'foo')
self.assertIn('links', resource)
self.assertEqual(len(resource['links']), 1)
link = resource['links'][0]
self.assertIn('href', link)
self.assertEqual(link['href'], 'http://localhost/bar')
self.assertIn('rel', link)
self.assertEqual(link['rel'], 'self')
class APIv2TestBase(base.BaseTestCase):
def setUp(self):
super(APIv2TestBase, self).setUp()
plugin = 'neutron.neutron_plugin_base_v2.NeutronPluginBaseV2'
# Ensure existing ExtensionManager is not used
extensions.PluginAwareExtensionManager._instance = None
# Create the default configurations
self.config_parse()
# Update the plugin
self.setup_coreplugin(plugin)
cfg.CONF.set_override('allow_pagination', True)
cfg.CONF.set_override('allow_sorting', True)
self._plugin_patcher = mock.patch(plugin, autospec=True)
self.plugin = self._plugin_patcher.start()
instance = self.plugin.return_value
instance._NeutronPluginBaseV2__native_pagination_support = True
instance._NeutronPluginBaseV2__native_sorting_support = True
api = router.APIRouter()
self.api = webtest.TestApp(api)
quota.QUOTAS._driver = None
cfg.CONF.set_override('quota_driver', 'neutron.quota.ConfDriver',
group='QUOTAS')
# APIRouter initialization resets policy module, re-initializing it
policy.init()
class _ArgMatcher(object):
"""An adapter to assist mock assertions, used to custom compare."""
def __init__(self, cmp, obj):
self.cmp = cmp
self.obj = obj
def __eq__(self, other):
return self.cmp(self.obj, other)
def _list_cmp(l1, l2):
return set(l1) == set(l2)
class APIv2TestCase(APIv2TestBase):
def _do_field_list(self, resource, base_fields):
attr_info = attributes.RESOURCE_ATTRIBUTE_MAP[resource]
policy_attrs = [name for (name, info) in attr_info.items()
if info.get('required_by_policy')]
for name, info in attr_info.items():
if info.get('primary_key'):
policy_attrs.append(name)
fields = base_fields
fields.extend(policy_attrs)
return fields
def _get_collection_kwargs(self, skipargs=[], **kwargs):
args_list = ['filters', 'fields', 'sorts', 'limit', 'marker',
'page_reverse']
args_dict = dict(
(arg, mock.ANY) for arg in set(args_list) - set(skipargs))
args_dict.update(kwargs)
return args_dict
def test_fields(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'), {'fields': 'foo'})
fields = self._do_field_list('networks', ['foo'])
kwargs = self._get_collection_kwargs(fields=fields)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_fields_multiple(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
fields = self._do_field_list('networks', ['foo', 'bar'])
self.api.get(_get_path('networks'), {'fields': ['foo', 'bar']})
kwargs = self._get_collection_kwargs(fields=fields)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_fields_multiple_with_empty(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
fields = self._do_field_list('networks', ['foo'])
self.api.get(_get_path('networks'), {'fields': ['foo', '']})
kwargs = self._get_collection_kwargs(fields=fields)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_fields_empty(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'), {'fields': ''})
kwargs = self._get_collection_kwargs(fields=[])
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_fields_multiple_empty(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'), {'fields': ['', '']})
kwargs = self._get_collection_kwargs(fields=[])
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_filters(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'), {'name': 'bar'})
filters = {'name': ['bar']}
kwargs = self._get_collection_kwargs(filters=filters)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_filters_empty(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'), {'name': ''})
filters = {}
kwargs = self._get_collection_kwargs(filters=filters)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_filters_multiple_empty(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'), {'name': ['', '']})
filters = {}
kwargs = self._get_collection_kwargs(filters=filters)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_filters_multiple_with_empty(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'), {'name': ['bar', '']})
filters = {'name': ['bar']}
kwargs = self._get_collection_kwargs(filters=filters)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_filters_multiple_values(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'), {'name': ['bar', 'bar2']})
filters = {'name': ['bar', 'bar2']}
kwargs = self._get_collection_kwargs(filters=filters)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_filters_multiple(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'), {'name': 'bar',
'tenant_id': 'bar2'})
filters = {'name': ['bar'], 'tenant_id': ['bar2']}
kwargs = self._get_collection_kwargs(filters=filters)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_filters_with_fields(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'), {'name': 'bar', 'fields': 'foo'})
filters = {'name': ['bar']}
fields = self._do_field_list('networks', ['foo'])
kwargs = self._get_collection_kwargs(filters=filters, fields=fields)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_filters_with_convert_to(self):
instance = self.plugin.return_value
instance.get_ports.return_value = []
self.api.get(_get_path('ports'), {'admin_state_up': 'true'})
filters = {'admin_state_up': [True]}
kwargs = self._get_collection_kwargs(filters=filters)
instance.get_ports.assert_called_once_with(mock.ANY, **kwargs)
def test_filters_with_convert_list_to(self):
instance = self.plugin.return_value
instance.get_ports.return_value = []
self.api.get(_get_path('ports'),
{'fixed_ips': ['ip_address=foo', 'subnet_id=bar']})
filters = {'fixed_ips': {'ip_address': ['foo'], 'subnet_id': ['bar']}}
kwargs = self._get_collection_kwargs(filters=filters)
instance.get_ports.assert_called_once_with(mock.ANY, **kwargs)
def test_limit(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'),
{'limit': '10'})
kwargs = self._get_collection_kwargs(limit=10)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_limit_with_great_than_max_limit(self):
cfg.CONF.set_default('pagination_max_limit', '1000')
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'),
{'limit': '1001'})
kwargs = self._get_collection_kwargs(limit=1000)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_limit_with_zero(self):
cfg.CONF.set_default('pagination_max_limit', '1000')
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'), {'limit': '0'})
kwargs = self._get_collection_kwargs(limit=1000)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_limit_with_unspecific(self):
cfg.CONF.set_default('pagination_max_limit', '1000')
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'))
kwargs = self._get_collection_kwargs(limit=1000)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_limit_with_negative_value(self):
cfg.CONF.set_default('pagination_max_limit', '1000')
instance = self.plugin.return_value
instance.get_networks.return_value = []
res = self.api.get(_get_path('networks'), {'limit': -1},
expect_errors=True)
self.assertEqual(res.status_int, exc.HTTPBadRequest.code)
def test_limit_with_non_integer(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
res = self.api.get(_get_path('networks'),
{'limit': 'abc'}, expect_errors=True)
self.assertEqual(res.status_int, exc.HTTPBadRequest.code)
def test_limit_with_infinite_pagination_max_limit(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
cfg.CONF.set_override('pagination_max_limit', 'Infinite')
self.api.get(_get_path('networks'))
kwargs = self._get_collection_kwargs(limit=None)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_limit_with_negative_pagination_max_limit(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
cfg.CONF.set_default('pagination_max_limit', '-1')
self.api.get(_get_path('networks'))
kwargs = self._get_collection_kwargs(limit=None)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_limit_with_non_integer_pagination_max_limit(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
cfg.CONF.set_default('pagination_max_limit', 'abc')
self.api.get(_get_path('networks'))
kwargs = self._get_collection_kwargs(limit=None)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_marker(self):
cfg.CONF.set_override('pagination_max_limit', '1000')
instance = self.plugin.return_value
instance.get_networks.return_value = []
marker = _uuid()
self.api.get(_get_path('networks'),
{'marker': marker})
kwargs = self._get_collection_kwargs(limit=1000, marker=marker)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_page_reverse(self):
calls = []
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'),
{'page_reverse': 'True'})
kwargs = self._get_collection_kwargs(page_reverse=True)
calls.append(mock.call.get_networks(mock.ANY, **kwargs))
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
instance.get_networks.reset_mock()
self.api.get(_get_path('networks'),
{'page_reverse': 'False'})
kwargs = self._get_collection_kwargs(page_reverse=False)
calls.append(mock.call.get_networks(mock.ANY, **kwargs))
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_page_reverse_with_non_bool(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'),
{'page_reverse': 'abc'})
kwargs = self._get_collection_kwargs(page_reverse=False)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_page_reverse_with_unspecific(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'))
kwargs = self._get_collection_kwargs(page_reverse=False)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_sort(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'),
{'sort_key': ['name', 'admin_state_up'],
'sort_dir': ['desc', 'asc']})
kwargs = self._get_collection_kwargs(sorts=[('name', False),
('admin_state_up', True),
('id', True)])
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_sort_with_primary_key(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'),
{'sort_key': ['name', 'admin_state_up', 'id'],
'sort_dir': ['desc', 'asc', 'desc']})
kwargs = self._get_collection_kwargs(sorts=[('name', False),
('admin_state_up', True),
('id', False)])
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_sort_without_direction(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
res = self.api.get(_get_path('networks'), {'sort_key': ['name']},
expect_errors=True)
self.assertEqual(res.status_int, exc.HTTPBadRequest.code)
def test_sort_with_invalid_attribute(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
res = self.api.get(_get_path('networks'),
{'sort_key': 'abc',
'sort_dir': 'asc'},
expect_errors=True)
self.assertEqual(res.status_int, exc.HTTPBadRequest.code)
def test_sort_with_invalid_dirs(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
res = self.api.get(_get_path('networks'),
{'sort_key': 'name',
'sort_dir': 'abc'},
expect_errors=True)
self.assertEqual(res.status_int, exc.HTTPBadRequest.code)
def test_emulated_sort(self):
instance = self.plugin.return_value
instance._NeutronPluginBaseV2__native_pagination_support = False
instance._NeutronPluginBaseV2__native_sorting_support = False
instance.get_networks.return_value = []
api = webtest.TestApp(router.APIRouter())
api.get(_get_path('networks'), {'sort_key': ['name', 'status'],
'sort_dir': ['desc', 'asc']})
kwargs = self._get_collection_kwargs(
skipargs=['sorts', 'limit', 'marker', 'page_reverse'])
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_emulated_sort_without_sort_field(self):
instance = self.plugin.return_value
instance._NeutronPluginBaseV2__native_pagination_support = False
instance._NeutronPluginBaseV2__native_sorting_support = False
instance.get_networks.return_value = []
api = webtest.TestApp(router.APIRouter())
api.get(_get_path('networks'), {'sort_key': ['name', 'status'],
'sort_dir': ['desc', 'asc'],
'fields': ['subnets']})
kwargs = self._get_collection_kwargs(
skipargs=['sorts', 'limit', 'marker', 'page_reverse'],
fields=_ArgMatcher(_list_cmp, ['name',
'status',
'id',
'subnets',
'shared',
'tenant_id']))
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_emulated_pagination(self):
instance = self.plugin.return_value
instance._NeutronPluginBaseV2__native_pagination_support = False
instance.get_networks.return_value = []
api = webtest.TestApp(router.APIRouter())
api.get(_get_path('networks'), {'limit': 10,
'marker': 'foo',
'page_reverse': False})
kwargs = self._get_collection_kwargs(skipargs=['limit',
'marker',
'page_reverse'])
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_native_pagination_without_native_sorting(self):
instance = self.plugin.return_value
instance._NeutronPluginBaseV2__native_sorting_support = False
self.assertRaises(n_exc.Invalid, router.APIRouter)
def test_native_pagination_without_allow_sorting(self):
cfg.CONF.set_override('allow_sorting', False)
instance = self.plugin.return_value
instance.get_networks.return_value = []
api = webtest.TestApp(router.APIRouter())
api.get(_get_path('networks'),
{'sort_key': ['name', 'admin_state_up'],
'sort_dir': ['desc', 'asc']})
kwargs = self._get_collection_kwargs(sorts=[('name', False),
('admin_state_up', True),
('id', True)])
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
# Note: since all resources use the same controller and validation
# logic, we actually get really good coverage from testing just networks.
class JSONV2TestCase(APIv2TestBase, testlib_api.WebTestCase):
def _test_list(self, req_tenant_id, real_tenant_id):
env = {}
if req_tenant_id:
env = {'neutron.context': context.Context('', req_tenant_id)}
input_dict = {'id': uuidutils.generate_uuid(),
'name': 'net1',
'admin_state_up': True,
'status': "ACTIVE",
'tenant_id': real_tenant_id,
'shared': False,
'subnets': []}
return_value = [input_dict]
instance = self.plugin.return_value
instance.get_networks.return_value = return_value
res = self.api.get(_get_path('networks',
fmt=self.fmt), extra_environ=env)
res = self.deserialize(res)
self.assertIn('networks', res)
if not req_tenant_id or req_tenant_id == real_tenant_id:
# expect full list returned
self.assertEqual(len(res['networks']), 1)
output_dict = res['networks'][0]
input_dict['shared'] = False
self.assertEqual(len(input_dict), len(output_dict))
for k, v in input_dict.iteritems():
self.assertEqual(v, output_dict[k])
else:
# expect no results
self.assertEqual(len(res['networks']), 0)
def test_list_noauth(self):
self._test_list(None, _uuid())
def test_list_keystone(self):
tenant_id = _uuid()
self._test_list(tenant_id, tenant_id)
def test_list_keystone_bad(self):
tenant_id = _uuid()
self._test_list(tenant_id + "bad", tenant_id)
def test_list_pagination(self):
id1 = str(_uuid())
id2 = str(_uuid())
input_dict1 = {'id': id1,
'name': 'net1',
'admin_state_up': True,
'status': "ACTIVE",
'tenant_id': '',
'shared': False,
'subnets': []}
input_dict2 = {'id': id2,
'name': 'net2',
'admin_state_up': True,
'status': "ACTIVE",
'tenant_id': '',
'shared': False,
'subnets': []}
return_value = [input_dict1, input_dict2]
instance = self.plugin.return_value
instance.get_networks.return_value = return_value
params = {'limit': ['2'],
'marker': [str(_uuid())],
'sort_key': ['name'],
'sort_dir': ['asc']}
res = self.api.get(_get_path('networks'),
params=params).json
self.assertEqual(len(res['networks']), 2)
self.assertEqual(sorted([id1, id2]),
sorted([res['networks'][0]['id'],
res['networks'][1]['id']]))
self.assertIn('networks_links', res)
next_links = []
previous_links = []
for r in res['networks_links']:
if r['rel'] == 'next':
next_links.append(r)
if r['rel'] == 'previous':
previous_links.append(r)
self.assertEqual(len(next_links), 1)
self.assertEqual(len(previous_links), 1)
url = urlparse.urlparse(next_links[0]['href'])
self.assertEqual(url.path, _get_path('networks'))
params['marker'] = [id2]
self.assertEqual(urlparse.parse_qs(url.query), params)
url = urlparse.urlparse(previous_links[0]['href'])
self.assertEqual(url.path, _get_path('networks'))
params['marker'] = [id1]
params['page_reverse'] = ['True']
self.assertEqual(urlparse.parse_qs(url.query), params)
def test_list_pagination_with_last_page(self):
id = str(_uuid())
input_dict = {'id': id,
'name': 'net1',
'admin_state_up': True,
'status': "ACTIVE",
'tenant_id': '',
'shared': False,
'subnets': []}
return_value = [input_dict]
instance = self.plugin.return_value
instance.get_networks.return_value = return_value
params = {'limit': ['2'],
'marker': str(_uuid())}
res = self.api.get(_get_path('networks'),
params=params).json
self.assertEqual(len(res['networks']), 1)
self.assertEqual(id, res['networks'][0]['id'])
self.assertIn('networks_links', res)
previous_links = []
for r in res['networks_links']:
self.assertNotEqual(r['rel'], 'next')
if r['rel'] == 'previous':
previous_links.append(r)
self.assertEqual(len(previous_links), 1)
url = urlparse.urlparse(previous_links[0]['href'])
self.assertEqual(url.path, _get_path('networks'))
expect_params = params.copy()
expect_params['marker'] = [id]
expect_params['page_reverse'] = ['True']
self.assertEqual(urlparse.parse_qs(url.query), expect_params)
def test_list_pagination_with_empty_page(self):
return_value = []
instance = self.plugin.return_value
instance.get_networks.return_value = return_value
params = {'limit': ['2'],
'marker': str(_uuid())}
res = self.api.get(_get_path('networks'),
params=params).json
self.assertEqual(res['networks'], [])
previous_links = []
if 'networks_links' in res:
for r in res['networks_links']:
self.assertNotEqual(r['rel'], 'next')
if r['rel'] == 'previous':
previous_links.append(r)
self.assertEqual(len(previous_links), 1)
url = urlparse.urlparse(previous_links[0]['href'])
self.assertEqual(url.path, _get_path('networks'))
expect_params = params.copy()
del expect_params['marker']
expect_params['page_reverse'] = ['True']
self.assertEqual(urlparse.parse_qs(url.query), expect_params)
def test_list_pagination_reverse_with_last_page(self):
id = str(_uuid())
input_dict = {'id': id,
'name': 'net1',
'admin_state_up': True,
'status': "ACTIVE",
'tenant_id': '',
'shared': False,
'subnets': []}
return_value = [input_dict]
instance = self.plugin.return_value
instance.get_networks.return_value = return_value
params = {'limit': ['2'],
'marker': [str(_uuid())],
'page_reverse': ['True']}
res = self.api.get(_get_path('networks'),
params=params).json
self.assertEqual(len(res['networks']), 1)
self.assertEqual(id, res['networks'][0]['id'])
self.assertIn('networks_links', res)
next_links = []
for r in res['networks_links']:
self.assertNotEqual(r['rel'], 'previous')
if r['rel'] == 'next':
next_links.append(r)
self.assertEqual(len(next_links), 1)
url = urlparse.urlparse(next_links[0]['href'])
self.assertEqual(url.path, _get_path('networks'))
expected_params = params.copy()
del expected_params['page_reverse']
expected_params['marker'] = [id]
self.assertEqual(urlparse.parse_qs(url.query),
expected_params)
def test_list_pagination_reverse_with_empty_page(self):
return_value = []
instance = self.plugin.return_value
instance.get_networks.return_value = return_value
params = {'limit': ['2'],
'marker': [str(_uuid())],
'page_reverse': ['True']}
res = self.api.get(_get_path('networks'),
params=params).json
self.assertEqual(res['networks'], [])
next_links = []
if 'networks_links' in res:
for r in res['networks_links']:
self.assertNotEqual(r['rel'], 'previous')
if r['rel'] == 'next':
next_links.append(r)
self.assertEqual(len(next_links), 1)
url = urlparse.urlparse(next_links[0]['href'])
self.assertEqual(url.path, _get_path('networks'))
expect_params = params.copy()
del expect_params['marker']
del expect_params['page_reverse']
self.assertEqual(urlparse.parse_qs(url.query), expect_params)
def test_create(self):
net_id = _uuid()
data = {'network': {'name': 'net1', 'admin_state_up': True,
'tenant_id': _uuid()}}
return_value = {'subnets': [], 'status': "ACTIVE",
'id': net_id}
return_value.update(data['network'].copy())
instance = self.plugin.return_value
instance.create_network.return_value = return_value
instance.get_networks_count.return_value = 0
res = self.api.post(_get_path('networks', fmt=self.fmt),
self.serialize(data),
content_type='application/' + self.fmt)
self.assertEqual(res.status_int, exc.HTTPCreated.code)
res = self.deserialize(res)
self.assertIn('network', res)
net = res['network']
self.assertEqual(net['id'], net_id)
self.assertEqual(net['status'], "ACTIVE")
def test_create_use_defaults(self):
net_id = _uuid()
initial_input = {'network': {'name': 'net1', 'tenant_id': _uuid()}}
full_input = {'network': {'admin_state_up': True,
'shared': False}}
full_input['network'].update(initial_input['network'])
return_value = {'id': net_id, 'status': "ACTIVE"}
return_value.update(full_input['network'])
instance = self.plugin.return_value
instance.create_network.return_value = return_value
instance.get_networks_count.return_value = 0
res = self.api.post(_get_path('networks', fmt=self.fmt),
self.serialize(initial_input),
content_type='application/' + self.fmt)
instance.create_network.assert_called_with(mock.ANY,
network=full_input)
self.assertEqual(res.status_int, exc.HTTPCreated.code)
res = self.deserialize(res)
self.assertIn('network', res)
net = res['network']
self.assertEqual(net['id'], net_id)
self.assertEqual(net['admin_state_up'], True)
self.assertEqual(net['status'], "ACTIVE")
def test_create_no_keystone_env(self):
data = {'name': 'net1'}
self._test_create_failure_bad_request('networks', data)
def test_create_with_keystone_env(self):
tenant_id = _uuid()
net_id = _uuid()
env = {'neutron.context': context.Context('', tenant_id)}
# tenant_id should be fetched from env
initial_input = {'network': {'name': 'net1'}}
full_input = {'network': {'admin_state_up': True,
'shared': False, 'tenant_id': tenant_id}}
full_input['network'].update(initial_input['network'])
return_value = {'id': net_id, 'status': "ACTIVE"}
return_value.update(full_input['network'])
instance = self.plugin.return_value
instance.create_network.return_value = return_value
instance.get_networks_count.return_value = 0
res = self.api.post(_get_path('networks', fmt=self.fmt),
self.serialize(initial_input),
content_type='application/' + self.fmt,
extra_environ=env)
instance.create_network.assert_called_with(mock.ANY,
network=full_input)
self.assertEqual(res.status_int, exc.HTTPCreated.code)
def test_create_bad_keystone_tenant(self):
tenant_id = _uuid()
data = {'network': {'name': 'net1', 'tenant_id': tenant_id}}
env = {'neutron.context': context.Context('', tenant_id + "bad")}
self._test_create_failure_bad_request('networks', data,
extra_environ=env)
def test_create_no_body(self):
data = {'whoa': None}
self._test_create_failure_bad_request('networks', data)
def test_create_no_resource(self):
data = {}
self._test_create_failure_bad_request('networks', data)
def test_create_missing_attr(self):
data = {'port': {'what': 'who', 'tenant_id': _uuid()}}
self._test_create_failure_bad_request('ports', data)
def test_create_readonly_attr(self):
data = {'network': {'name': 'net1', 'tenant_id': _uuid(),
'status': "ACTIVE"}}
self._test_create_failure_bad_request('networks', data)
def test_create_with_too_long_name(self):
data = {'network': {'name': "12345678" * 32,
'admin_state_up': True,
'tenant_id': _uuid()}}
res = self.api.post(_get_path('networks', fmt=self.fmt),
self.serialize(data),
content_type='application/' + self.fmt,
expect_errors=True)
self.assertEqual(res.status_int, exc.HTTPBadRequest.code)
def test_create_bulk(self):
data = {'networks': [{'name': 'net1',
'admin_state_up': True,
'tenant_id': _uuid()},
{'name': 'net2',
'admin_state_up': True,
'tenant_id': _uuid()}]}
def side_effect(context, network):
net = network.copy()
net['network'].update({'subnets': []})
return net['network']
instance = self.plugin.return_value
instance.create_network.side_effect = side_effect
instance.get_networks_count.return_value = 0
res = self.api.post(_get_path('networks', fmt=self.fmt),
self.serialize(data),
content_type='application/' + self.fmt)
self.assertEqual(res.status_int, exc.HTTPCreated.code)
def _test_create_failure_bad_request(self, resource, data, **kwargs):
res = self.api.post(_get_path(resource, fmt=self.fmt),
self.serialize(data),
content_type='application/' + self.fmt,
expect_errors=True, **kwargs)
self.assertEqual(res.status_int, exc.HTTPBadRequest.code)
def test_create_bulk_networks_none(self):
self._test_create_failure_bad_request('networks', {'networks': None})
def test_create_bulk_networks_empty_list(self):
self._test_create_failure_bad_request('networks', {'networks': []})
def test_create_bulk_missing_attr(self):
data = {'ports': [{'what': 'who', 'tenant_id': _uuid()}]}
self._test_create_failure_bad_request('ports', data)
def test_create_bulk_partial_body(self):
data = {'ports': [{'device_id': 'device_1',
'tenant_id': _uuid()},
{'tenant_id': _uuid()}]}
self._test_create_failure_bad_request('ports', data)
def test_create_attr_not_specified(self):
net_id = _uuid()
tenant_id = _uuid()
device_id = _uuid()
initial_input = {'port': {'name': '', 'network_id': net_id,
'tenant_id': tenant_id,
'device_id': device_id,
'admin_state_up': True}}
full_input = {'port': {'admin_state_up': True,
'mac_address': attributes.ATTR_NOT_SPECIFIED,
'fixed_ips': attributes.ATTR_NOT_SPECIFIED,
'device_owner': ''}}
full_input['port'].update(initial_input['port'])
return_value = {'id': _uuid(), 'status': 'ACTIVE',
'admin_state_up': True,
'mac_address': 'ca:fe:de:ad:be:ef',
'device_id': device_id,
'device_owner': ''}
return_value.update(initial_input['port'])
instance = self.plugin.return_value
instance.get_network.return_value = {'tenant_id': unicode(tenant_id)}
instance.get_ports_count.return_value = 1
instance.create_port.return_value = return_value
res = self.api.post(_get_path('ports', fmt=self.fmt),
self.serialize(initial_input),
content_type='application/' + self.fmt)
instance.create_port.assert_called_with(mock.ANY, port=full_input)
self.assertEqual(res.status_int, exc.HTTPCreated.code)
res = self.deserialize(res)
self.assertIn('port', res)
port = res['port']
self.assertEqual(port['network_id'], net_id)
self.assertEqual(port['mac_address'], 'ca:fe:de:ad:be:ef')
def test_create_return_extra_attr(self):
net_id = _uuid()
data = {'network': {'name': 'net1', 'admin_state_up': True,
'tenant_id': _uuid()}}
return_value = {'subnets': [], 'status': "ACTIVE",
'id': net_id, 'v2attrs:something': "123"}
return_value.update(data['network'].copy())
instance = self.plugin.return_value
instance.create_network.return_value = return_value
instance.get_networks_count.return_value = 0
res = self.api.post(_get_path('networks', fmt=self.fmt),
self.serialize(data),
content_type='application/' + self.fmt)
self.assertEqual(res.status_int, exc.HTTPCreated.code)
res = self.deserialize(res)
self.assertIn('network', res)
net = res['network']
self.assertEqual(net['id'], net_id)
self.assertEqual(net['status'], "ACTIVE")
self.assertNotIn('v2attrs:something', net)
def test_fields(self):
return_value = {'name': 'net1', 'admin_state_up': True,
'subnets': []}
instance = self.plugin.return_value
instance.get_network.return_value = return_value
self.api.get(_get_path('networks',
id=uuidutils.generate_uuid(),
fmt=self.fmt))
def _test_delete(self, req_tenant_id, real_tenant_id, expected_code,
expect_errors=False):
env = {}
if req_tenant_id:
env = {'neutron.context': context.Context('', req_tenant_id)}
instance = self.plugin.return_value
instance.get_network.return_value = {'tenant_id': real_tenant_id,
'shared': False}
instance.delete_network.return_value = None
res = self.api.delete(_get_path('networks',
id=uuidutils.generate_uuid(),
fmt=self.fmt),
extra_environ=env,
expect_errors=expect_errors)
self.assertEqual(res.status_int, expected_code)
def test_delete_noauth(self):
self._test_delete(None, _uuid(), exc.HTTPNoContent.code)
def test_delete_keystone(self):
tenant_id = _uuid()
self._test_delete(tenant_id, tenant_id, exc.HTTPNoContent.code)
def test_delete_keystone_bad_tenant(self):
tenant_id = _uuid()
self._test_delete(tenant_id + "bad", tenant_id,
exc.HTTPNotFound.code, expect_errors=True)
def _test_get(self, req_tenant_id, real_tenant_id, expected_code,
expect_errors=False):
env = {}
shared = False
if req_tenant_id:
env = {'neutron.context': context.Context('', req_tenant_id)}
if req_tenant_id.endswith('another'):
shared = True
env['neutron.context'].roles = ['tenant_admin']
data = {'tenant_id': real_tenant_id, 'shared': shared}
instance = self.plugin.return_value
instance.get_network.return_value = data
res = self.api.get(_get_path('networks',
id=uuidutils.generate_uuid(),
fmt=self.fmt),
extra_environ=env,
expect_errors=expect_errors)
self.assertEqual(res.status_int, expected_code)
return res
def test_get_noauth(self):
self._test_get(None, _uuid(), 200)
def test_get_keystone(self):
tenant_id = _uuid()
self._test_get(tenant_id, tenant_id, 200)
def test_get_keystone_bad_tenant(self):
tenant_id = _uuid()
self._test_get(tenant_id + "bad", tenant_id,
exc.HTTPNotFound.code, expect_errors=True)
def test_get_keystone_shared_network(self):
tenant_id = _uuid()
self._test_get(tenant_id + "another", tenant_id, 200)
def test_get_keystone_strip_admin_only_attribute(self):
tenant_id = _uuid()
# Inject rule in policy engine
rules = {'get_network:name': common_policy.parse_rule(
"rule:admin_only")}
policy.set_rules(rules, overwrite=False)
res = self._test_get(tenant_id, tenant_id, 200)
res = self.deserialize(res)
self.assertNotIn('name', res['network'])
def _test_update(self, req_tenant_id, real_tenant_id, expected_code,
expect_errors=False):
env = {}
if req_tenant_id:
env = {'neutron.context': context.Context('', req_tenant_id)}
# leave out 'name' field intentionally
data = {'network': {'admin_state_up': True}}
return_value = {'subnets': []}
return_value.update(data['network'].copy())
instance = self.plugin.return_value
instance.get_network.return_value = {'tenant_id': real_tenant_id,
'shared': False}
instance.update_network.return_value = return_value
res = self.api.put(_get_path('networks',
id=uuidutils.generate_uuid(),
fmt=self.fmt),
self.serialize(data),
extra_environ=env,
expect_errors=expect_errors)
# Ensure id attribute is included in fields returned by GET call
# in update procedure.
self.assertEqual(1, instance.get_network.call_count)
self.assertIn('id', instance.get_network.call_args[1]['fields'])
self.assertEqual(res.status_int, expected_code)
def test_update_noauth(self):
self._test_update(None, _uuid(), 200)
def test_update_keystone(self):
tenant_id = _uuid()
self._test_update(tenant_id, tenant_id, 200)
def test_update_keystone_bad_tenant(self):
tenant_id = _uuid()
self._test_update(tenant_id + "bad", tenant_id,
exc.HTTPNotFound.code, expect_errors=True)
def test_update_readonly_field(self):
data = {'network': {'status': "NANANA"}}
res = self.api.put(_get_path('networks', id=_uuid()),
self.serialize(data),
content_type='application/' + self.fmt,
expect_errors=True)
self.assertEqual(res.status_int, 400)
def test_invalid_attribute_field(self):
data = {'network': {'invalid_key1': "foo1", 'invalid_key2': "foo2"}}
res = self.api.put(_get_path('networks', id=_uuid()),
self.serialize(data),
content_type='application/' + self.fmt,
expect_errors=True)
self.assertEqual(res.status_int, 400)
class SubresourceTest(base.BaseTestCase):
def setUp(self):
super(SubresourceTest, self).setUp()
plugin = 'neutron.tests.unit.api.v2.test_base.TestSubresourcePlugin'
extensions.PluginAwareExtensionManager._instance = None
# Save the global RESOURCE_ATTRIBUTE_MAP
self.saved_attr_map = {}
for resource, attrs in attributes.RESOURCE_ATTRIBUTE_MAP.iteritems():
self.saved_attr_map[resource] = attrs.copy()
self.config_parse()
self.setup_coreplugin(plugin)
self._plugin_patcher = mock.patch(plugin, autospec=True)
self.plugin = self._plugin_patcher.start()
router.SUB_RESOURCES['dummy'] = {
'collection_name': 'dummies',
'parent': {'collection_name': 'networks',
'member_name': 'network'}
}
attributes.RESOURCE_ATTRIBUTE_MAP['dummies'] = {
'foo': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'default': '', 'is_visible': True},
'tenant_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:string': None},
'required_by_policy': True,
'is_visible': True}
}
api = router.APIRouter()
self.api = webtest.TestApp(api)
def tearDown(self):
router.SUB_RESOURCES = {}
# Restore the global RESOURCE_ATTRIBUTE_MAP
attributes.RESOURCE_ATTRIBUTE_MAP = self.saved_attr_map
super(SubresourceTest, self).tearDown()
def test_index_sub_resource(self):
instance = self.plugin.return_value
self.api.get('/networks/id1/dummies')
instance.get_network_dummies.assert_called_once_with(mock.ANY,
filters=mock.ANY,
fields=mock.ANY,
network_id='id1')
def test_show_sub_resource(self):
instance = self.plugin.return_value
dummy_id = _uuid()
self.api.get('/networks/id1' + _get_path('dummies', id=dummy_id))
instance.get_network_dummy.assert_called_once_with(mock.ANY,
dummy_id,
network_id='id1',
fields=mock.ANY)
def test_create_sub_resource(self):
instance = self.plugin.return_value
body = {'dummy': {'foo': 'bar', 'tenant_id': _uuid()}}
self.api.post_json('/networks/id1/dummies', body)
instance.create_network_dummy.assert_called_once_with(mock.ANY,
network_id='id1',
dummy=body)
def test_update_sub_resource(self):
instance = self.plugin.return_value
dummy_id = _uuid()
body = {'dummy': {'foo': 'bar'}}
self.api.put_json('/networks/id1' + _get_path('dummies', id=dummy_id),
body)
instance.update_network_dummy.assert_called_once_with(mock.ANY,
dummy_id,
network_id='id1',
dummy=body)
def test_update_subresource_to_none(self):
instance = self.plugin.return_value
dummy_id = _uuid()
body = {'dummy': {}}
self.api.put_json('/networks/id1' + _get_path('dummies', id=dummy_id),
body)
instance.update_network_dummy.assert_called_once_with(mock.ANY,
dummy_id,
network_id='id1',
dummy=body)
def test_delete_sub_resource(self):
instance = self.plugin.return_value
dummy_id = _uuid()
self.api.delete('/networks/id1' + _get_path('dummies', id=dummy_id))
instance.delete_network_dummy.assert_called_once_with(mock.ANY,
dummy_id,
network_id='id1')
# Note: since all resources use the same controller and validation
# logic, we actually get really good coverage from testing just networks.
class V2Views(base.BaseTestCase):
def _view(self, keys, collection, resource):
data = dict((key, 'value') for key in keys)
data['fake'] = 'value'
attr_info = attributes.RESOURCE_ATTRIBUTE_MAP[collection]
controller = v2_base.Controller(None, collection, resource, attr_info)
res = controller._view(context.get_admin_context(), data)
self.assertNotIn('fake', res)
for key in keys:
self.assertIn(key, res)
def test_network(self):
keys = ('id', 'name', 'subnets', 'admin_state_up', 'status',
'tenant_id')
self._view(keys, 'networks', 'network')
def test_port(self):
keys = ('id', 'network_id', 'mac_address', 'fixed_ips',
'device_id', 'admin_state_up', 'tenant_id', 'status')
self._view(keys, 'ports', 'port')
def test_subnet(self):
keys = ('id', 'network_id', 'tenant_id', 'gateway_ip',
'ip_version', 'cidr', 'enable_dhcp')
self._view(keys, 'subnets', 'subnet')
class NotificationTest(APIv2TestBase):
def setUp(self):
super(NotificationTest, self).setUp()
fake_notifier.reset()
def _resource_op_notifier(self, opname, resource, expected_errors=False):
initial_input = {resource: {'name': 'myname'}}
instance = self.plugin.return_value
instance.get_networks.return_value = initial_input
instance.get_networks_count.return_value = 0
expected_code = exc.HTTPCreated.code
if opname == 'create':
initial_input[resource]['tenant_id'] = _uuid()
res = self.api.post_json(
_get_path('networks'),
initial_input, expect_errors=expected_errors)
if opname == 'update':
res = self.api.put_json(
_get_path('networks', id=_uuid()),
initial_input, expect_errors=expected_errors)
expected_code = exc.HTTPOk.code
if opname == 'delete':
initial_input[resource]['tenant_id'] = _uuid()
res = self.api.delete(
_get_path('networks', id=_uuid()),
expect_errors=expected_errors)
expected_code = exc.HTTPNoContent.code
expected_events = ('.'.join([resource, opname, "start"]),
'.'.join([resource, opname, "end"]))
self.assertEqual(len(fake_notifier.NOTIFICATIONS),
len(expected_events))
for msg, event in zip(fake_notifier.NOTIFICATIONS, expected_events):
self.assertEqual('INFO', msg['priority'])
self.assertEqual(event, msg['event_type'])
self.assertEqual(res.status_int, expected_code)
def test_network_create_notifer(self):
self._resource_op_notifier('create', 'network')
def test_network_delete_notifer(self):
self._resource_op_notifier('delete', 'network')
def test_network_update_notifer(self):
self._resource_op_notifier('update', 'network')
class DHCPNotificationTest(APIv2TestBase):
def _test_dhcp_notifier(self, opname, resource, initial_input=None):
instance = self.plugin.return_value
instance.get_networks.return_value = initial_input
instance.get_networks_count.return_value = 0
expected_code = exc.HTTPCreated.code
with mock.patch.object(dhcp_rpc_agent_api.DhcpAgentNotifyAPI,
'notify') as dhcp_notifier:
if opname == 'create':
res = self.api.post_json(
_get_path('networks'),
initial_input)
if opname == 'update':
res = self.api.put_json(
_get_path('networks', id=_uuid()),
initial_input)
expected_code = exc.HTTPOk.code
if opname == 'delete':
res = self.api.delete(_get_path('networks', id=_uuid()))
expected_code = exc.HTTPNoContent.code
expected_item = mock.call(mock.ANY, mock.ANY,
resource + "." + opname + ".end")
if initial_input and resource not in initial_input:
resource += 's'
num = len(initial_input[resource]) if initial_input and isinstance(
initial_input[resource], list) else 1
expected = [expected_item for x in xrange(num)]
self.assertEqual(expected, dhcp_notifier.call_args_list)
self.assertEqual(num, dhcp_notifier.call_count)
self.assertEqual(expected_code, res.status_int)
def test_network_create_dhcp_notifer(self):
input = {'network': {'name': 'net',
'tenant_id': _uuid()}}
self._test_dhcp_notifier('create', 'network', input)
def test_network_delete_dhcp_notifer(self):
self._test_dhcp_notifier('delete', 'network')
def test_network_update_dhcp_notifer(self):
input = {'network': {'name': 'net'}}
self._test_dhcp_notifier('update', 'network', input)
def test_networks_create_bulk_dhcp_notifer(self):
input = {'networks': [{'name': 'net1',
'tenant_id': _uuid()},
{'name': 'net2',
'tenant_id': _uuid()}]}
self._test_dhcp_notifier('create', 'network', input)
class QuotaTest(APIv2TestBase):
def test_create_network_quota(self):
cfg.CONF.set_override('quota_network', 1, group='QUOTAS')
initial_input = {'network': {'name': 'net1', 'tenant_id': _uuid()}}
full_input = {'network': {'admin_state_up': True, 'subnets': []}}
full_input['network'].update(initial_input['network'])
instance = self.plugin.return_value
instance.get_networks_count.return_value = 1
res = self.api.post_json(
_get_path('networks'), initial_input, expect_errors=True)
instance.get_networks_count.assert_called_with(mock.ANY,
filters=mock.ANY)
self.assertIn("Quota exceeded for resources",
res.json['NeutronError']['message'])
def test_create_network_quota_no_counts(self):
cfg.CONF.set_override('quota_network', 1, group='QUOTAS')
initial_input = {'network': {'name': 'net1', 'tenant_id': _uuid()}}
full_input = {'network': {'admin_state_up': True, 'subnets': []}}
full_input['network'].update(initial_input['network'])
instance = self.plugin.return_value
instance.get_networks_count.side_effect = (
NotImplementedError())
instance.get_networks.return_value = ["foo"]
res = self.api.post_json(
_get_path('networks'), initial_input, expect_errors=True)
instance.get_networks_count.assert_called_with(mock.ANY,
filters=mock.ANY)
self.assertIn("Quota exceeded for resources",
res.json['NeutronError']['message'])
def test_create_network_quota_without_limit(self):
cfg.CONF.set_override('quota_network', -1, group='QUOTAS')
initial_input = {'network': {'name': 'net1', 'tenant_id': _uuid()}}
instance = self.plugin.return_value
instance.get_networks_count.return_value = 3
res = self.api.post_json(
_get_path('networks'), initial_input)
self.assertEqual(res.status_int, exc.HTTPCreated.code)
class ExtensionTestCase(base.BaseTestCase):
def setUp(self):
super(ExtensionTestCase, self).setUp()
plugin = 'neutron.neutron_plugin_base_v2.NeutronPluginBaseV2'
# Ensure existing ExtensionManager is not used
extensions.PluginAwareExtensionManager._instance = None
# Save the global RESOURCE_ATTRIBUTE_MAP
self.saved_attr_map = {}
for resource, attrs in attributes.RESOURCE_ATTRIBUTE_MAP.iteritems():
self.saved_attr_map[resource] = attrs.copy()
# Create the default configurations
self.config_parse()
# Update the plugin and extensions path
self.setup_coreplugin(plugin)
cfg.CONF.set_override('api_extensions_path', EXTDIR)
self._plugin_patcher = mock.patch(plugin, autospec=True)
self.plugin = self._plugin_patcher.start()
# Instantiate mock plugin and enable the V2attributes extension
manager.NeutronManager.get_plugin().supported_extension_aliases = (
["v2attrs"])
api = router.APIRouter()
self.api = webtest.TestApp(api)
quota.QUOTAS._driver = None
cfg.CONF.set_override('quota_driver', 'neutron.quota.ConfDriver',
group='QUOTAS')
def tearDown(self):
super(ExtensionTestCase, self).tearDown()
self.api = None
self.plugin = None
# Restore the global RESOURCE_ATTRIBUTE_MAP
attributes.RESOURCE_ATTRIBUTE_MAP = self.saved_attr_map
def test_extended_create(self):
net_id = _uuid()
initial_input = {'network': {'name': 'net1', 'tenant_id': _uuid(),
'v2attrs:something_else': "abc"}}
data = {'network': {'admin_state_up': True, 'shared': False}}
data['network'].update(initial_input['network'])
return_value = {'subnets': [], 'status': "ACTIVE",
'id': net_id,
'v2attrs:something': "123"}
return_value.update(data['network'].copy())
instance = self.plugin.return_value
instance.create_network.return_value = return_value
instance.get_networks_count.return_value = 0
res = self.api.post_json(_get_path('networks'), initial_input)
instance.create_network.assert_called_with(mock.ANY,
network=data)
self.assertEqual(res.status_int, exc.HTTPCreated.code)
self.assertIn('network', res.json)
net = res.json['network']
self.assertEqual(net['id'], net_id)
self.assertEqual(net['status'], "ACTIVE")
self.assertEqual(net['v2attrs:something'], "123")
self.assertNotIn('v2attrs:something_else', net)
class TestSubresourcePlugin(object):
def get_network_dummies(self, context, network_id,
filters=None, fields=None):
return []
def get_network_dummy(self, context, id, network_id,
fields=None):
return {}
def create_network_dummy(self, context, network_id, dummy):
return {}
def update_network_dummy(self, context, id, network_id, dummy):
return {}
def delete_network_dummy(self, context, id, network_id):
return
class ListArgsTestCase(base.BaseTestCase):
def test_list_args(self):
path = '/?fields=4&foo=3&fields=2&bar=1'
request = webob.Request.blank(path)
expect_val = ['2', '4']
actual_val = api_common.list_args(request, 'fields')
self.assertEqual(sorted(actual_val), expect_val)
def test_list_args_with_empty(self):
path = '/?foo=4&bar=3&baz=2&qux=1'
request = webob.Request.blank(path)
self.assertEqual([], api_common.list_args(request, 'fields'))
class FiltersTestCase(base.BaseTestCase):
def test_all_skip_args(self):
path = '/?fields=4&fields=3&fields=2&fields=1'
request = webob.Request.blank(path)
self.assertEqual({}, api_common.get_filters(request, None,
["fields"]))
def test_blank_values(self):
path = '/?foo=&bar=&baz=&qux='
request = webob.Request.blank(path)
self.assertEqual({}, api_common.get_filters(request, {}))
def test_no_attr_info(self):
path = '/?foo=4&bar=3&baz=2&qux=1'
request = webob.Request.blank(path)
expect_val = {'foo': ['4'], 'bar': ['3'], 'baz': ['2'], 'qux': ['1']}
actual_val = api_common.get_filters(request, {})
self.assertEqual(actual_val, expect_val)
def test_attr_info_without_conversion(self):
path = '/?foo=4&bar=3&baz=2&qux=1'
request = webob.Request.blank(path)
attr_info = {'foo': {'key': 'val'}}
expect_val = {'foo': ['4'], 'bar': ['3'], 'baz': ['2'], 'qux': ['1']}
actual_val = api_common.get_filters(request, attr_info)
self.assertEqual(actual_val, expect_val)
def test_attr_info_with_convert_list_to(self):
path = '/?foo=key=4&bar=3&foo=key=2&qux=1'
request = webob.Request.blank(path)
attr_info = {
'foo': {
'convert_list_to': attributes.convert_kvp_list_to_dict,
}
}
expect_val = {'foo': {'key': ['2', '4']}, 'bar': ['3'], 'qux': ['1']}
actual_val = api_common.get_filters(request, attr_info)
self.assertOrderedEqual(expect_val, actual_val)
def test_attr_info_with_convert_to(self):
path = '/?foo=4&bar=3&baz=2&qux=1'
request = webob.Request.blank(path)
attr_info = {'foo': {'convert_to': attributes.convert_to_int}}
expect_val = {'foo': [4], 'bar': ['3'], 'baz': ['2'], 'qux': ['1']}
actual_val = api_common.get_filters(request, attr_info)
self.assertEqual(actual_val, expect_val)
class CreateResourceTestCase(base.BaseTestCase):
def test_resource_creation(self):
resource = v2_base.create_resource('fakes', 'fake', None, {})
self.assertIsInstance(resource, webob.dec.wsgify)
|
|
from datetime import datetime, timedelta
import json
import os
import re
import pytz
from collections import deque
import typing
from urllib import parse, request
from django.conf import settings
from django.contrib.gis.geos import GeometryCollection, GEOSGeometry, Point
from django.contrib.gis.measure import D
from django.contrib.gis.geos.polygon import Polygon
import site_config
def normalize(s):
"""
Downcases string s, removes special characters and replaces spaces with _.
:s: a string
"""
s = re.sub(r"[',!@#$%^&*()-=[\]]+", "", s)
s = re.sub(r"\s+", "_", s)
return s.casefold()
def extension(path):
"Returns the last piece of the filename after the extension separator."
_, ext = os.path.splitext(os.path.basename(path))
return ext[1:] if ext else ""
def split_list(pred, items=None):
if items is None:
items = pred
pred = lambda item: bool(item)
yes = []
no = []
for item in items:
if pred(item):
yes.append(item)
else:
no.append(item)
return yes, no
class pushback_iter(object):
"""An iterator that implements a pushback() method, allowing values to
be added back to the 'stack' after consideration.
"""
def __init__(self, it):
"""
:it: an iterable object
"""
self.iterable = iter(it)
self.pushed = deque()
def pushback(self, v):
self.pushed.append(v)
def __iter__(self):
return self
def __next__(self):
if self.pushed:
return self.pushed.pop()
return next(self.iterable)
def decompose_coord(ll):
"""
:ll: degrees latitude or longitude.
Return a tuple of (degrees, minutes, seconds)
"""
degrees = int(ll)
minutes = ll % 1 * 60
seconds = minutes % 1 * 60
return (degrees, int(minutes), seconds)
prettify_format = "{d}\N{DEGREE SIGN} {m}' {s:.2f}\" {h}"
def prettify_lat(lat):
"""
:lat: degrees latitude
Returns a human-friendly string representing latitude
"""
d, m, s = decompose_coord(lat)
return prettify_format.\
format(d=d, m=m, s=s, h="N" if lat > 0 else "S")
def prettify_long(lng):
"""
:lng: degrees longitude
Returns a human-friendly string representing longitude
"""
d, m, s = decompose_coord(lng)
return prettify_format.\
format(d=d, m=m, s=s, h="E" if lng > 0 else "W")
def add_params(url, extra=None, remove=None):
"""Given a URL, add new query parameters by merging in the contents of the
`extra` dictionary.
:param url: (str)
:param extra: (dict)
:param remove: (list or set)
:returns: (str) URL including new parameters
"""
if not (extra or remove):
return url
parsed = parse.urlparse(url)._asdict()
params = parse.parse_qsl(parsed["query"])
if extra:
params += list(extra.items())
if remove:
params = [pair for pair in params if pair[0] not in remove]
parsed["query"] = parse.urlencode(params, doseq=True)
return parse.urlunparse(parse.ParseResult(**parsed))
def bounds_from_box(box):
"""Converts a `box` string parameter to a Polygon object. If given a Polygon,
it is returned unchanged.
:param box: (str) with the format latMin,longMin,latMax,longMax
"""
if isinstance(box, Polygon):
return box
coords = [float(coord) for coord in box.split(",")]
assert len(coords) == 4
# Coordinates are submitted to the server as
# latMin,longMin,latMax,longMax, but from_bbox wants its arguments in a
# different order:
return Polygon.from_bbox((coords[1], coords[0], coords[3], coords[2]))
def point_from_str(coord):
"""Converts a `circle` string parameter to a center point and radius. If
given a Point, it is returned unchanged.
"""
if isinstance(coord, Point):
return coord
coords = coord.split(",")
return Point(float(coords[1]), float(coords[0]), srid=4326)
units_patt = "(" + "|".join(D.UNITS.keys()) + ")"
distance_patt = rf"(\d+(?:\.\d+)?)\s*{units_patt}?\s*$"
def distance_from_str(distance):
if isinstance(distance, str):
m = re.match(distance_patt, distance)
if m:
unit = m.group(2) or "m"
return D(**{unit: float(m.group(1))})
else:
return D(m=float(distance))
def _geometry(feat):
if feat["type"] == "FeatureCollection":
return sum([_geometry(f) for f in feat["features"]], [])
return [GEOSGeometry(json.dumps(feat["geometry"]))]
def geometry(feat):
"""Constructs a GEOSGeometryCollection from a GeoJSON dict.
"""
return GeometryCollection(_geometry(feat), srid=4326)
def geometry_from_url(url):
"""Constructs a GEOSGeometryCollection from a URL that points to a GeoJSON
resource.
"""
with request.urlopen(url) as resp:
raw = resp.read().decode("utf-8")
return geometry(json.loads(raw))
def utc_now():
return pytz.utc.localize(datetime.utcnow())
def lazy(fn):
memo = [None, False]
def wrapped():
if not memo[1]:
memo[0:2] = fn(), True
return memo[0]
return wrapped
def add_locations(dicts, geocoder,
get_address=lambda d: d["all_addresses"][0],
region=lambda d: d.get("region_name", "")):
"""Alters an iterable of dictionaries in place, adding a "location" field
that contains the geocoded latitude and longitude of each
dictionary's address field.
:param geocoder: an instance of a geocoder object that takes a
"""
get_region = region if callable(region) else (lambda _: region)
locations = geocoder.geocode(
f"{get_address(d)}, {get_region(d)}" for d in dicts)
for d, location in zip(dicts, locations):
if not location:
continue
d["location"] = {"lat": location["location"]["lat"],
"long": location["location"]["lng"],
"formatted": location["formatted_name"],
"score": location["properties"].get("score"),
"google_place_id": location["properties"].get("place_id")}
def fn_chain(val, *fns):
for fn in fns:
val = fn(val) if callable(fn) else val.get(fn)
if val is None:
return val
return val
def make_fn_chain(*fns):
return lambda x: fn_chain(x, *fns)
def parse_duration(s):
h, m = s.split(":")
return timedelta(hours=int(h), minutes=int(m))
def read_n_from_end(fp: typing.IO, n,
split_chunk=lambda c: c.split(b"\n"),
chunksize=1000):
"""
Consume a file in reverse, splitting with function `split_chunk`. By
default, takes the last `n` lines from the reader.
:fp: file handle, must be opened in 'rb' mode
:n: the number of lines
:split_chunk: function to split chunks into lines
"""
start_pos = fp.tell()
lines = deque()
pos = fp.seek(0, 2)
current = b""
try:
while True:
last_pos = pos
pos = fp.seek(-chunksize, 1)
current = fp.read(last_pos - pos) + current
current, *found_lines = split_chunk(current)
lines.extendleft(reversed(found_lines[0:n-len(lines)]))
if len(lines) >= n:
break
except OSError as _err:
if len(lines) < n:
lines.appendleft(current)
fp.seek(start_pos, 0)
return lines
def make_absolute_url(path, site_name=None):
if re.match(r"^https?://", path):
return path
if site_name:
config = site_config.by_hostname(site_name)
if settings.BASE_URL:
url_base = settings.BASE_URL
path = add_params(path, {"_hostname": config.hostname})
else:
url_base = f"https://{config.hostname}"
else:
hostname = settings.SERVER_DOMAIN
scheme = "http" if hostname.startswith("localhost") else "https"
url_base = f"{scheme}://{hostname}"
return parse.urljoin(url_base, path)
def today():
return datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
def request_ip(req):
# TODO Make this function more robust
# if "HTTP_X_FORWARDED_FOR" in req.META:
# ips = req.META["HTTP_X_FORWARDED_FOR"].split(",")
# stop_at_ip = os.environ.get("CLUSTER_SUBNET_MASK", )
return req.META.get("HTTP_X_REAL_IP", req.META["REMOTE_ADDR"])
|
|
import nbt
import struct
import time
import random
import logging
import os
import traceback
from numpy import array
from uuid import UUID
from mclevelbase import PlayerNotFound
from level import MCLevel
log = logging.getLogger(__name__)
def TagProperty(tagName, tagType, default_or_func=None):
def getter(self):
if tagName not in self.root_tag["Data"]:
if hasattr(default_or_func, "__call__"):
default = default_or_func(self)
else:
default = default_or_func
self.root_tag["Data"][tagName] = tagType(default)
return self.root_tag["Data"][tagName].value
def setter(self, val):
self.root_tag["Data"][tagName] = tagType(value=val)
return property(getter, setter)
class SessionLockLost(IOError):
pass
class PCMetadata(MCLevel):
"""
Common super type of world types that have PC like metadata.
This is used by both MCInfdevOldLevel and TWLevel
"""
playersFolder = None
readonly = False
playerTagCache = {}
players = []
filename = None
# --- NBT Tag variables ---
VERSION_ANVIL = 19133
SizeOnDisk = TagProperty('SizeOnDisk', nbt.TAG_Long, 0)
RandomSeed = TagProperty('RandomSeed', nbt.TAG_Long, 0)
Time = TagProperty('Time', nbt.TAG_Long, 0) # Age of the world in ticks. 20 ticks per second; 24000 ticks per day.
LastPlayed = TagProperty('LastPlayed', nbt.TAG_Long, lambda self: long(time.time() * 1000))
LevelName = TagProperty('LevelName', nbt.TAG_String, lambda self: self.displayName)
GeneratorName = TagProperty('generatorName', nbt.TAG_String, 'default')
MapFeatures = TagProperty('MapFeatures', nbt.TAG_Byte, 1)
GameType = TagProperty('GameType', nbt.TAG_Int, 0) # 0 for survival, 1 for creative
version = TagProperty('version', nbt.TAG_Int, VERSION_ANVIL)
def getFilePath(self, filename):
pass
def getFolderPath(self, dirname):
pass
def initPlayers(self):
if os.path.exists(self.getFilePath("players")) and os.listdir(
self.getFolderPath("players")) != []:
self.playersFolder = self.getFolderPath("players")
self.oldPlayerFolderFormat = True
if os.path.exists(self.getFolderPath("playerdata")):
self.playersFolder = self.getFolderPath("playerdata")
self.oldPlayerFolderFormat = False
self.players = [x[:-4] for x in os.listdir(self.playersFolder) if x.endswith(".dat")]
for player in self.players:
try:
UUID(player, version=4)
except ValueError:
print "{0} does not seem to be in a valid UUID format".format(player)
self.players.remove(player)
if "Player" in self.root_tag["Data"]:
self.players.append("Player")
def acquireSessionLock(self):
lockfile = self.getFilePath("session.lock")
self.initTime = int(time.time() * 1000)
with file(lockfile, "wb") as f:
f.write(struct.pack(">q", self.initTime))
f.flush()
os.fsync(f.fileno())
logging.getLogger().info("Re-acquired session lock")
def checkSessionLock(self):
if self.readonly:
raise SessionLockLost("World is opened read only.")
lockfile = self.getFilePath("session.lock")
try:
(lock, ) = struct.unpack(">q", file(lockfile, "rb").read())
except struct.error:
lock = -1
if lock != self.initTime:
# I should raise an error, but this seems to always fire the exception, so I will just try to aquire it instead
raise SessionLockLost("Session lock lost. This world is being accessed from another location.")
#self.acquireSessionLock()
def _create(self, filename, random_seed, last_played):
# create a new level
root_tag = nbt.TAG_Compound()
root_tag["Data"] = nbt.TAG_Compound()
root_tag["Data"]["SpawnX"] = nbt.TAG_Int(0)
root_tag["Data"]["SpawnY"] = nbt.TAG_Int(2)
root_tag["Data"]["SpawnZ"] = nbt.TAG_Int(0)
if last_played is None:
last_played = long(time.time() * 1000)
if random_seed is None:
random_seed = long(random.random() * 0xffffffffffffffffL) - 0x8000000000000000L
self.root_tag = root_tag
root_tag["Data"]['version'] = nbt.TAG_Int(self.VERSION_ANVIL)
self.LastPlayed = long(last_played)
self.RandomSeed = long(random_seed)
self.SizeOnDisk = 0
self.Time = 1
self.LevelName = os.path.basename(self.filename)
# ## if singleplayer:
self.createPlayer("Player")
def loadLevelDat(self, create=False, random_seed=None, last_played=None):
if create:
self._create(self.filename, random_seed, last_played)
self.saveInPlace()
else:
try:
self.root_tag = nbt.load(self.filename)
except Exception, e:
filename_old = self.getFilePath("level.dat_old")
log.info("Error loading level.dat, trying level.dat_old ({0})".format(e))
try:
self.root_tag = nbt.load(filename_old)
log.info("level.dat restored from backup.")
self.saveInPlace()
except Exception, e:
traceback.print_exc()
print repr(e)
log.info("Error loading level.dat_old. Initializing with defaults.")
self._create(self.filename, random_seed, last_played)
def save_metadata(self):
"""
Saves the metadata to file. The session lock should have already been checked.
"""
for path, tag in self.playerTagCache.iteritems():
tag.save(path)
if self.playersFolder is not None:
for file_ in os.listdir(self.playersFolder):
if file_.endswith(".dat") and file_[:-4] not in self.players:
os.remove(os.path.join(self.playersFolder, file_))
self.playerTagCache.clear()
self.root_tag.save(self.filename)
def init_scoreboard(self):
if os.path.exists(self.getFolderPath("data")):
if os.path.exists(self.getFolderPath("data")+"/scoreboard.dat"):
return nbt.load(self.getFolderPath("data")+"/scoreboard.dat")
else:
root_tag = nbt.TAG_Compound()
root_tag["data"] = nbt.TAG_Compound()
root_tag["data"]["Objectives"] = nbt.TAG_List()
root_tag["data"]["PlayerScores"] = nbt.TAG_List()
root_tag["data"]["Teams"] = nbt.TAG_List()
root_tag["data"]["DisplaySlots"] = nbt.TAG_List()
self.save_scoreboard(root_tag)
return root_tag
else:
self.getFolderPath("data")
root_tag = nbt.TAG_Compound()
root_tag["data"] = nbt.TAG_Compound()
root_tag["data"]["Objectives"] = nbt.TAG_List()
root_tag["data"]["PlayerScores"] = nbt.TAG_List()
root_tag["data"]["Teams"] = nbt.TAG_List()
root_tag["data"]["DisplaySlots"] = nbt.TAG_List()
self.save_scoreboard(root_tag)
return root_tag
def save_scoreboard(self, score):
score.save(self.getFolderPath("data")+"/scoreboard.dat")
def init_player_data(self):
player_data = {}
if self.oldPlayerFolderFormat:
for p in self.players:
if p != "Player":
player_data_file = os.path.join(self.getFolderPath("players"), p+".dat")
player_data[p] = nbt.load(player_data_file)
else:
data = nbt.load(self.getFilePath("level.dat"))
player_data[p] = data["Data"]["Player"]
else:
for p in self.players:
if p != "Player":
player_data_file = os.path.join(self.getFolderPath("playerdata"), p+".dat")
player_data[p] = nbt.load(player_data_file)
else:
data = nbt.load(self.getFilePath("level.dat"))
player_data[p] = data["Data"]["Player"]
#player_data = []
#for p in [x for x in os.listdir(self.playersFolder) if x.endswith(".dat")]:
#player_data.append(player.Player(self.playersFolder+"\\"+p))
return player_data
def save_player_data(self, player_data):
if self.oldPlayerFolderFormat:
for p in player_data.keys():
if p != "Player":
player_data[p].save(os.path.join(self.getFolderPath("players"), p+".dat"))
else:
for p in player_data.keys():
if p != "Player":
player_data[p].save(os.path.join(self.getFolderPath("playerdata"), p+".dat"))
# --- Player and spawn manipulation ---
def playerSpawnPosition(self, player=None):
"""
xxx if player is None then it gets the default spawn position for the world
if player hasn't used a bed then it gets the default spawn position
"""
dataTag = self.root_tag["Data"]
if player is None:
playerSpawnTag = dataTag
else:
playerSpawnTag = self.getPlayerTag(player)
return [playerSpawnTag.get(i, dataTag[i]).value for i in ("SpawnX", "SpawnY", "SpawnZ")]
def setPlayerSpawnPosition(self, pos, player=None):
""" xxx if player is None then it sets the default spawn position for the world """
if player is None:
playerSpawnTag = self.root_tag["Data"]
else:
playerSpawnTag = self.getPlayerTag(player)
for name, val in zip(("SpawnX", "SpawnY", "SpawnZ"), pos):
playerSpawnTag[name] = nbt.TAG_Int(val)
def getPlayerPath(self, player, dim=0):
assert player != "Player"
if dim != 0:
return os.path.join(os.path.dirname(self.filename), "DIM%s" % dim, "playerdata", "%s.dat" % player)
else:
return os.path.join(self.playersFolder, "%s.dat" % player)
def getPlayerTag(self, player="Player"):
if player == "Player":
if player in self.root_tag["Data"]:
# single-player world
return self.root_tag["Data"]["Player"]
raise PlayerNotFound(player)
else:
playerFilePath = self.getPlayerPath(player)
playerTag = self.playerTagCache.get(playerFilePath)
if playerTag is None:
if os.path.exists(playerFilePath):
playerTag = nbt.load(playerFilePath)
self.playerTagCache[playerFilePath] = playerTag
else:
raise PlayerNotFound(player)
return playerTag
def getPlayerDimension(self, player="Player"):
playerTag = self.getPlayerTag(player)
if "Dimension" not in playerTag:
return 0
return playerTag["Dimension"].value
def setPlayerDimension(self, d, player="Player"):
playerTag = self.getPlayerTag(player)
if "Dimension" not in playerTag:
playerTag["Dimension"] = nbt.TAG_Int(0)
playerTag["Dimension"].value = d
def setPlayerPosition(self, (x, y, z), player="Player"):
posList = nbt.TAG_List([nbt.TAG_Double(p) for p in (x, y - 1.75, z)])
playerTag = self.getPlayerTag(player)
playerTag["Pos"] = posList
def getPlayerPosition(self, player="Player"):
playerTag = self.getPlayerTag(player)
posList = playerTag["Pos"]
x, y, z = map(lambda x: x.value, posList)
return x, y + 1.75, z
def setPlayerOrientation(self, yp, player="Player"):
self.getPlayerTag(player)["Rotation"] = nbt.TAG_List([nbt.TAG_Float(p) for p in yp])
def getPlayerOrientation(self, player="Player"):
""" returns (yaw, pitch) """
yp = map(lambda x: x.value, self.getPlayerTag(player)["Rotation"])
y, p = yp
if p == 0:
p = 0.000000001
if p == 180.0:
p -= 0.000000001
yp = y, p
return array(yp)
def setPlayerAbilities(self, gametype, player="Player"):
playerTag = self.getPlayerTag(player)
# Check for the Abilities tag. It will be missing in worlds from before
# Beta 1.9 Prerelease 5.
if 'abilities' not in playerTag:
playerTag['abilities'] = nbt.TAG_Compound()
# Assumes creative (1) is the only mode with these abilities set,
# which is true for now. Future game modes may not hold this to be
# true, however.
if gametype == 1:
playerTag['abilities']['instabuild'] = nbt.TAG_Byte(1)
playerTag['abilities']['mayfly'] = nbt.TAG_Byte(1)
playerTag['abilities']['invulnerable'] = nbt.TAG_Byte(1)
else:
playerTag['abilities']['flying'] = nbt.TAG_Byte(0)
playerTag['abilities']['instabuild'] = nbt.TAG_Byte(0)
playerTag['abilities']['mayfly'] = nbt.TAG_Byte(0)
playerTag['abilities']['invulnerable'] = nbt.TAG_Byte(0)
def setPlayerGameType(self, gametype, player="Player"):
playerTag = self.getPlayerTag(player)
# This annoyingly works differently between single- and multi-player.
if player == "Player":
self.GameType = gametype
self.setPlayerAbilities(gametype, player)
else:
playerTag['playerGameType'] = nbt.TAG_Int(gametype)
self.setPlayerAbilities(gametype, player)
def getPlayerGameType(self, player="Player"):
if player == "Player":
return self.GameType
else:
playerTag = self.getPlayerTag(player)
return playerTag["playerGameType"].value
def createPlayer(self, playerName):
if playerName == "Player":
playerTag = self.root_tag["Data"].setdefault(playerName, nbt.TAG_Compound())
else:
playerTag = nbt.TAG_Compound()
playerTag['Air'] = nbt.TAG_Short(300)
playerTag['AttackTime'] = nbt.TAG_Short(0)
playerTag['DeathTime'] = nbt.TAG_Short(0)
playerTag['Fire'] = nbt.TAG_Short(-20)
playerTag['Health'] = nbt.TAG_Short(20)
playerTag['HurtTime'] = nbt.TAG_Short(0)
playerTag['Score'] = nbt.TAG_Int(0)
playerTag['FallDistance'] = nbt.TAG_Float(0)
playerTag['OnGround'] = nbt.TAG_Byte(0)
playerTag["Inventory"] = nbt.TAG_List()
playerTag['Motion'] = nbt.TAG_List([nbt.TAG_Double(0) for i in range(3)])
playerTag['Pos'] = nbt.TAG_List([nbt.TAG_Double([0.5, 2.8, 0.5][i]) for i in range(3)])
playerTag['Rotation'] = nbt.TAG_List([nbt.TAG_Float(0), nbt.TAG_Float(0)])
if playerName != "Player":
self.playerTagCache[self.getPlayerPath(playerName)] = playerTag
|
|
#Couple cool and very useful imports.
import sys, random
import socket
import struct
import threading
import time
import math
from PyQt4 import QtGui, QtCore
import proto.messages_robocup_ssl_wrapper_pb2 as wrapper
from proto.grSim_Packet_pb2 import grSim_Packet
from proto.grSim_Commands_pb2 import grSim_Commands, grSim_Robot_Command
#TODO: Using RecvVision, save the data to be used.
# Use the data in computeAI.
MCAST_GRP = '224.5.23.2'
MCAST_PORT = 10020
SEND_ADDR = '127.0.0.1'
SEND_PORT = 20011
udpsocket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(('', MCAST_PORT)) # use MCAST_GRP instead of '' to listen only
# to MCAST_GRP, not all groups on MCAST_PORT
mreq = struct.pack("4sl", socket.inet_aton(MCAST_GRP), socket.INADDR_ANY)
sock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq)
shoot = False
fakeOri = 0
playAll = True
"""
fakeOri is a state variable. It tells the bot where to
look and where to go.
0: Look at the ball, go towards the ball.
1: Look towards angle (0) and try to position to reach
the ball from that angle.
2: Look towards angle (math.pi + math.pi / 2) and try
to reach the ball from that angle.
3: Look towards angle (math.pi) and try to reach the
ball from that angle.
4: Look towards angle (math.pi / 2) and try to reach
the ball from that angle.
5: Look at the ball and rotate around the ball clockwise.
6: Look at the ball and rotate around the ball anti-
clockwise.
7: Look at the ball and go towards it following a curve
clockwise.
8: Look at the ball and go towards it following a curve
anti-clockwise.
9: Look at one of the goals and go towars the ball.
10: Press R for 10. Look at the ball. Go backwards.
"""
def debugP(text):
#print (text)
pass #NOPE.JPG
class WorldClass:
def __init__(self):
self.ball = None
self.teams = []
self.geo = None
wc = WorldClass()
class RecvVision(QtCore.QThread):
updated = QtCore.pyqtSignal()
name = "recv"
"""def __init__(self, name):
threading.Thread.__init__(self)
self.name = name"""
def run(self):
print ("Starting " + self.name)
self.recvData()
print ("Done with " + self.name)
def recvData(self):
global wc
wp = wrapper.SSL_WrapperPacket()
while playAll:
wp.ParseFromString(sock.recv(65536))
if wp.detection.IsInitialized():
debugP ("Frame number: {}".format(type(wp.detection.frame_number)))
debugP ("Time Capture: {}".format(wp.detection.t_capture))
debugP ("Time Sent: {}".format(wp.detection.t_sent))
debugP ("Camera ID: {}".format(wp.detection.camera_id))
if len(wc.teams) == 0:
wc.teams.append(wp.detection.robots_yellow)
wc.teams.append(wp.detection.robots_blue)
else:
wc.teams[0] = (wp.detection.robots_yellow)
wc.teams[1] = (wp.detection.robots_blue)
for i in wp.detection.balls:
#Assuming that there is only one ball.
#The last one will overwrite the other ones.
wc.ball = i
debugP ("Ball")
debugP ("\tConfidence: {}".format(i.confidence))
debugP ("\tx: {}".format(i.x))
debugP ("\ty: {}".format(i.y))
debugP ("\tz: {}".format(i.z))
for i in wp.detection.robots_yellow:
debugP ("Robot Yellow {}".format(i.robot_id))
debugP ("\tConfidence: {}".format(i.confidence))
debugP ("\tx: {}".format(i.x))
debugP ("\ty: {}".format(i.y))
debugP ("\tOrientation: {}".format(i.orientation))
for i in wp.detection.robots_blue:
debugP ("Robot Blue {}".format(i.robot_id))
debugP ("\tConfidence: {}".format(i.confidence))
debugP ("\tx: {}".format(i.x))
debugP ("\ty: {}".format(i.y))
debugP ("\tOrientation: {}".format(i.orientation))
#debugP (wp.detection)
pass
if wp.geometry.IsInitialized():
debugP (wp.geometry)
wc.geo = wp.geometry
pass
debugP ("************")
self.updated.emit()
def computeDistance(x1, y1, x2, y2):
xdis = x1 - x2
ydis = y1 - y2
distance = math.sqrt(xdis * xdis + ydis * ydis)
return distance
def slopeFromAngle(angle):
if angle == math.pi + math.pi / 2:
angle += 0.01
elif angle == math.pi / 2:
angle -= 0.01
return math.tan(angle - math.pi)
def pointsOnLine(slope, x, y, distance):
b = y - slope * x
r = math.sqrt(1 + slope * slope)
newX1 = (x + (distance / r))
newY1 = (y + ((distance * slope) / r))
newX2 = (x + ((-distance) / r))
newY2 = (y + (((-distance) * slope) / r))
return ((newX1, newY1), (newX2, newY2))
def followAngle(angle, x, y, distance):
slope = slopeFromAngle(angle)
coord1, coord2 = pointsOnLine(slope, x, y, distance)
side = (angle - math.pi / 2) % (math.pi * 2)
if (side < math.pi):
return coord2
else:
return coord1
def rotatePoint(x1, y1, x2, y2, angle):
s = math.sin(angle)
c = math.cos(angle)
x1 -= x2
y1 -= y2
newX = x1 * c - y1 * s
newY = x1 * s + y1 * c
x1 = newX + x2
y1 = newY + y2
return (x1, y1)
#x1, y1: location of current destination
#x2, y2: direction to be looking at when at destination
#x3, y3: current position
def fetchAndRotate(x1, y1, x2, y2, x3, y3):
destDist = computeDistance(x1, y1, x3, y3) - 100
feather = 200
angle = 0
goalAngle = math.atan2((y2 - y3), (x2 - x3))
x4, y4 = followAngle(goalAngle, x1, y1, - feather)
destX = x1
destY = y1
fromAngle = math.atan2((y4 - y3), (x4 - x3))
finalAngle = math.atan2((y1 - y3), (x1 - x3))
if (finalAngle - goalAngle) % (math.pi * 2) < (goalAngle - finalAngle) % (math.pi * 2):
angleT = (finalAngle - goalAngle) % (math.pi * 2)
diff = - wc.teams[0][0].orientation + math.pi / 2
bounded = 0.5
else:
angleT = (goalAngle - finalAngle) % (math.pi * 2)
diff = - wc.teams[0][0].orientation - math.pi / 2
bounded = -0.5
debugP ("Dest: {}".format(((destDist % 10) - 5) / 10))
if angleT <= math.pi / 5:# and destDist <= feather:
debugP ("\tFinal")
aimAngle = finalAngle + ida(goalAngle)
angle = goalAngle
destX = x1
destY = y1
elif destDist <= feather:
#TODO: Make sure to rotate towards the cloest angle.
debugP ("\tMid")
aimAngle = diff
angle = finalAngle
#aimAngle = diff
#angle = fromAngle
destX = x1
destY = y1
else:
debugP ("\tFar {}".format(bounded))
aimAngle = - wc.teams[0][0].orientation + bounded
angle = finalAngle# + math.sin(destDist)
destX = x4
destY = y4
return (destX, destY, angle, aimAngle)
def resetCommand(command, i_id):
command.id = i_id
command.wheelsspeed = False
command.wheel1 = 0
command.wheel2 = 0
command.wheel3 = 0
command.wheel4 = 0
command.veltangent = 0 #positive -> Go forward
command.velnormal = 0 #positive -> Go left side
command.velangular = 0 #Rotate by angle
command.kickspeedx = 0
command.kickspeedz = 0
command.spinner = False
#Inverts the rotation and doubles an angle.
def ida(angle):
return (math.pi * 2 - angle) * 2
#angle: The angle to face towards to.
#currentOri: The current orientation.
def getAngleDiff(angle1, angle2):
return ((math.pi + (angle1 - angle2)) % (math.pi * 2)) - math.pi
class AposAI(threading.Thread):
def __init__(self, name):
threading.Thread.__init__(self)
self.name = name
def run(self):
print ("Starting " + self.name)
self.computeAI()
print ("Done with " + self.name)
def computeAI(self):
global udpsocket
global shoot
commandList = []
packet = grSim_Packet()
packet.commands.isteamyellow = True
packet.commands.timestamp = 0.0
commandList.append(packet.commands.robot_commands.add())
commandList.append(packet.commands.robot_commands.add())
resetCommand(commandList[0], 0)
resetCommand(commandList[1], 1)
while playAll and len(wc.teams) == 0:
pass
while playAll:
#goalX = -3100
#goalY = 0
goalX = wc.teams[0][1].x
goalY = wc.teams[0][1].y
if not shoot:
commandList[0].kickspeedx = 0
bX = wc.ball.x
bY = wc.ball.y
else:
commandList[0].kickspeedx = 5
bX = goalX
bY = goalY
shoot = False
pX = wc.teams[0][0].x
pY = wc.teams[0][0].y
angle = math.atan2((bY - pY), (bX - pX))
angle2 = math.atan2((bY - wc.teams[0][1].y), (bX - wc.teams[0][1].x))
if fakeOri == 0:
aimAngle = - wc.teams[0][0].orientation
angle = angle
elif fakeOri == 1:
aimAngle = angle + ida(0)
angle = 0
#aimAngle = angle + math.pi / 2
#angle = math.pi + math.pi / 2 + math.pi / 4
elif fakeOri == 2:
aimAngle = angle + ida(math.pi + math.pi / 2)
angle = math.pi + math.pi / 2
elif fakeOri == 3:
aimAngle = angle + ida(math.pi)
angle = math.pi
elif fakeOri == 4:
aimAngle = angle + ida(math.pi / 2)
angle = math.pi / 2
elif fakeOri == 5:
aimAngle = - wc.teams[0][0].orientation + math.pi / 2
angle = angle
elif fakeOri == 6:
aimAngle = - wc.teams[0][0].orientation - math.pi / 2
angle = angle
elif fakeOri == 7:
#You can adjust the factor. Lower means
#that it will go towards the destination
#using a smaller arc
aimAngle = - wc.teams[0][0].orientation + 0.5
angle = angle
elif fakeOri == 8:
#You can adjust the factor. Lower means
#that it will go towards the destination
#using a smaller arc
aimAngle = - wc.teams[0][0].orientation - 0.5
angle = angle
elif fakeOri == 9:
bX, bY, angle, aimAngle = fetchAndRotate(bX, bY, goalX, goalY, pX, pY)
elif fakeOri == 10:
aimAngle = - wc.teams[0][0].orientation
angle = angle
bX, bY = rotatePoint(bX, bY, pX, pY, aimAngle)
#angle = The angle to look towards
#pX, pY = rotatePoint(pX, pY, bX, bY, -angle)
tempD = computeDistance(bX, bY, pX, pY)
if (bX == pX):
bX += 1
if (bY == pY):
bY += 1
ratioX = tempD / (bX - pX)
ratioY = tempD / (bY - pY)
#offsetX = bX - (bX - tempD)
#offsetX = bY - (bY - tempD)
if fakeOri != 10:
commandList[0].velnormal = 1 / ratioY
commandList[0].veltangent = 1 / ratioX
else:
commandList[0].velnormal = -1 / ratioY
commandList[0].veltangent = -1 / ratioX
#angle = 0
angleDiff = getAngleDiff(angle, wc.teams[0][0].orientation)
angleDiff2 = getAngleDiff(angle2, wc.teams[0][1].orientation)
commandList[0].velangular = angleDiff * 10
commandList[1].velangular = angleDiff2 * 10
debugP ("Mode: {}".format(fakeOri))
debugP ("Angle: {}".format(angle))
debugP ("Diff: {}".format(angleDiff))
debugP ("RatioX: {}".format(ratioX))
debugP ("RatioY: {}".format(ratioY))
debugP ("Ball at:")
debugP ("\tx: {}".format(wc.ball.x))
debugP ("\ty: {}".format(wc.ball.y))
debugP ("Robot 0 of {}:".format(len(wc.teams[0])))
debugP ("\tx: {}".format(wc.teams[0][0].x))
debugP ("\ty: {}".format(wc.teams[0][0].y))
debugP ("\tOri: {}".format(wc.teams[0][0].orientation))
debugP ("************")
udpsocket.sendto(packet.SerializeToString(), (SEND_ADDR, SEND_PORT))
time.sleep(0.02)
pass
class InputCommands(threading.Thread):
def __init__(self, name):
threading.Thread.__init__(self)
self.name = name
def run(self):
print ("Starting " + self.name)
self.getCommands()
print ("Done with " + self.name)
def getCommands(self):
global playAll
global fakeOri
global shoot
txtInput = ""
while txtInput is not "q" and playAll:
txtInput = input()
if txtInput is "s":
shoot = True
elif txtInput is "0":
fakeOri = 0
elif txtInput is "1":
fakeOri = 1
elif txtInput is "2":
fakeOri = 2
elif txtInput is "3":
fakeOri = 3
elif txtInput is "4":
fakeOri = 4
elif txtInput is "5":
fakeOri = 5
elif txtInput is "6":
fakeOri = 6
elif txtInput is "7":
fakeOri = 7
elif txtInput is "8":
fakeOri = 8
elif txtInput is "9":
fakeOri = 9
elif txtInput is "r":
fakeOri = 10
playAll = False
class FieldDisplay(QtGui.QWidget):
#TODO: Make the gui be based on the current window size.
def __init__(self):
super(FieldDisplay, self).__init__()
self._thread = RecvVision(self)
self._thread.updated.connect(self.refresh)
self.ratio = 1.0
self.fieldOffsetX = 700
self.fieldOffsetY = 700
self._thread.start()
self.initUI()
def initUI(self):
self.setGeometry(200, 200, 1280, 720)
self.ratio = (6000 + self.fieldOffsetY * 2) / 720
self.setWindowTitle('SSL Visualizer')
self.show()
def closeEvent(self, e):
global playAll
playAll = False
def resizeEvent(self, e):
if (wc.geo is not None):
print ("Current new size: {}, {}".format(e.size().width(), e.size().height()))
print ("Field size: {}, {}".format(wc.geo.field.field_width, wc.geo.field.boundary_width))
ratioX = ((wc.geo.field.field_width + self.fieldOffsetX * 2) / (e.size().width()))
ratioY = ((wc.geo.field.goal_width + self.fieldOffsetY * 2) / (e.size().height()))
print ("RatioX: {}".format(ratioX))
print ("RatioY: {}".format(ratioY))
self.ratio = max(ratioX, ratioY)
pass
def mousePressEvent(self, e):
global udpsocket
packet = grSim_Packet()
packet.replacement.ball.x = e.x() * self.ratio / 1000 - 10400 / 1000 / 2
packet.replacement.ball.y = -e.y() * self.ratio / 1000 + 7400 / 1000 / 2
packet.replacement.ball.vx = 0
packet.replacement.ball.vy = 0
print ("Clicked! {}, {}".format(packet.replacement.ball.x, packet.replacement.ball.y))
udpsocket.sendto(packet.SerializeToString(), (SEND_ADDR, SEND_PORT))
def mouseMoveEvent(self, e):
global udpsocket
packet = grSim_Packet()
packet.replacement.ball.x = e.x() * self.ratio / 1000 - 10400 / 1000 / 2
packet.replacement.ball.y = -e.y() * self.ratio / 1000 + 7400 / 1000 / 2
packet.replacement.ball.vx = 0
packet.replacement.ball.vy = 0
print ("Clicked! {}, {}".format(packet.replacement.ball.x, packet.replacement.ball.y))
udpsocket.sendto(packet.SerializeToString(), (SEND_ADDR, SEND_PORT))
def paintEvent(self, e):
qp = QtGui.QPainter()
qp.begin(self)
self.drawField(qp)
qp.end()
def drawField(self, qp):
pen = QtGui.QPen(QtGui.QColor(0, 0, 0), 3, QtCore.Qt.SolidLine)
if (wc.geo is not None):
color = QtGui.QColor(0, 0, 0)
color.setNamedColor('#d4d4d4')
qp.setPen(pen)
width = wc.geo.field.field_width / self.ratio
height = wc.geo.field.goal_width / self.ratio
qp.setBrush(QtGui.QColor(0, 155, 0, 150))
qp.drawRect(0, 0, width + self.fieldOffsetX * 2 / self.ratio, height + self.fieldOffsetY * 2 / self.ratio)
qp.setBrush(QtGui.QColor(0, 155, 0, 200))
pen = QtGui.QPen(QtGui.QColor(0, 0, 0, 0), 3, QtCore.Qt.SolidLine)
qp.setPen(pen)
qp.drawRect(self.fieldOffsetX / self.ratio - 250 / self.ratio, self.fieldOffsetY / self.ratio - 250 / self.ratio, width + 500 / self.ratio, height + 500 / self.ratio)
pen = QtGui.QPen(QtGui.QColor(255, 255, 255), 3, QtCore.Qt.SolidLine)
qp.setPen(pen)
qp.drawRect(self.fieldOffsetX / self.ratio, self.fieldOffsetY / self.ratio, width, height)
pen = QtGui.QPen(QtGui.QColor(255, 255, 255), 3, QtCore.Qt.SolidLine)
qp.setPen(pen)
qp.drawLine(self.fieldOffsetX / self.ratio + width / 2, self.fieldOffsetY / self.ratio, self.fieldOffsetX / self.ratio + width / 2, self.fieldOffsetY / self.ratio + height)
qp.drawLine(self.fieldOffsetX / self.ratio, self.fieldOffsetY / self.ratio + height / 2, self.fieldOffsetX / self.ratio + width, self.fieldOffsetY / self.ratio + height / 2)
qp.setBrush(QtGui.QColor(255, 255, 255, 0))
circleSize = 500 / self.ratio
qp.drawEllipse(self.fieldOffsetX / self.ratio + width / 2 - circleSize, self.fieldOffsetY / self.ratio + height / 2 - circleSize, circleSize * 2, circleSize * 2)
#qp.drawEllipse(self.fieldOffsetX / self.ratio - circleSize * 2, self.fieldOffsetY / self.ratio + height / 2 - circleSize * 2 - 250 / self.ratio, circleSize * 4, circleSize * 4)
#qp.drawEllipse(self.fieldOffsetX / self.ratio - circleSize * 2, self.fieldOffsetY / self.ratio + height / 2 - circleSize * 2 + 250 / self.ratio, circleSize * 4, circleSize * 4)
qp.drawArc(self.fieldOffsetX / self.ratio - circleSize * 2, self.fieldOffsetY / self.ratio + height / 2 - circleSize * 2 - 250 / self.ratio, circleSize * 4, circleSize * 4, 0, 90 * 16)
qp.drawArc(self.fieldOffsetX / self.ratio - circleSize * 2, self.fieldOffsetY / self.ratio + height / 2 - circleSize * 2 + 250 / self.ratio, circleSize * 4, circleSize * 4, 0, -90 * 16)
qp.drawArc(self.fieldOffsetX / self.ratio + width - circleSize * 2, self.fieldOffsetY / self.ratio + height / 2 - circleSize * 2 - 250 / self.ratio, circleSize * 4, circleSize * 4, 180 * 16, -90 * 16)
qp.drawArc(self.fieldOffsetX / self.ratio + width - circleSize * 2, self.fieldOffsetY / self.ratio + height / 2 - circleSize * 2 + 250 / self.ratio, circleSize * 4, circleSize * 4, 180 * 16, 90 * 16)
qp.drawLine(self.fieldOffsetX / self.ratio + circleSize * 2, self.fieldOffsetY / self.ratio + height / 2 - 250 / self.ratio, self.fieldOffsetX / self.ratio + circleSize * 2, self.fieldOffsetY / self.ratio + height / 2 + 250 / self.ratio)
qp.drawLine(self.fieldOffsetX / self.ratio + width - circleSize * 2, self.fieldOffsetY / self.ratio + height / 2 - 250 / self.ratio, self.fieldOffsetX / self.ratio + width - circleSize * 2, self.fieldOffsetY / self.ratio + height / 2 + 250 / self.ratio)
goalSize = 1000
pen = QtGui.QPen(QtGui.QColor(255, 0, 0), 3, QtCore.Qt.SolidLine)
qp.setPen(pen)
qp.drawLine(self.fieldOffsetX / self.ratio - 180 / self.ratio, self.fieldOffsetY / self.ratio - goalSize / 2 / self.ratio + height / 2, self.fieldOffsetX / self.ratio - 180 / self.ratio, self.fieldOffsetY / self.ratio + goalSize / 2 / self.ratio + height / 2)
qp.drawLine(self.fieldOffsetX / self.ratio - 180 / self.ratio, self.fieldOffsetY / self.ratio - goalSize / 2 / self.ratio + height / 2, self.fieldOffsetX / self.ratio, self.fieldOffsetY / self.ratio - goalSize / 2 / self.ratio + height / 2)
qp.drawLine(self.fieldOffsetX / self.ratio - 180 / self.ratio, self.fieldOffsetY / self.ratio + goalSize / 2 / self.ratio + height / 2, self.fieldOffsetX / self.ratio, self.fieldOffsetY / self.ratio + goalSize / 2 / self.ratio + height / 2)
qp.drawLine(self.fieldOffsetX / self.ratio + 180 / self.ratio + width, self.fieldOffsetY / self.ratio - goalSize / 2 / self.ratio + height / 2, self.fieldOffsetX / self.ratio + 180 / self.ratio + width, self.fieldOffsetY / self.ratio + goalSize / 2 / self.ratio + height / 2)
qp.drawLine(self.fieldOffsetX / self.ratio + 180 / self.ratio + width, self.fieldOffsetY / self.ratio - goalSize / 2 / self.ratio + height / 2, self.fieldOffsetX / self.ratio + width, self.fieldOffsetY / self.ratio - goalSize / 2 / self.ratio + height / 2)
qp.drawLine(self.fieldOffsetX / self.ratio + 180 / self.ratio + width, self.fieldOffsetY / self.ratio + goalSize / 2 / self.ratio + height / 2, self.fieldOffsetX / self.ratio + width, self.fieldOffsetY / self.ratio + goalSize / 2 / self.ratio + height / 2)
pen = QtGui.QPen(QtGui.QColor(0, 0, 0), 3, QtCore.Qt.SolidLine)
qp.setPen(pen)
robotSize = 180 / self.ratio
for i in wc.teams[0]:
centerX = i.x / self.ratio + (self.fieldOffsetX / self.ratio + width / 2)
centerY = -i.y / self.ratio + (self.fieldOffsetY / self.ratio + height / 2)
qp.setBrush(QtGui.QColor(255, 255, 0, 0))
qp.drawEllipse(centerX - robotSize, centerY - robotSize, robotSize * 2, robotSize * 2)
qp.setBrush(QtGui.QColor(255, 255, 0, 200))
qp.drawEllipse(centerX - robotSize / 2, centerY - robotSize / 2, robotSize, robotSize)
x2, y2 = followAngle(-i.orientation, centerX, centerY, robotSize)
qp.drawLine(centerX, centerY, x2, y2)
for i in wc.teams[1]:
centerX = i.x / self.ratio + (self.fieldOffsetX / self.ratio + width / 2)
centerY = -i.y / self.ratio + (self.fieldOffsetY / self.ratio + height / 2)
qp.setBrush(QtGui.QColor(0, 0, 255, 0))
qp.drawEllipse(centerX - robotSize, centerY - robotSize, robotSize * 2, robotSize * 2)
qp.setBrush(QtGui.QColor(0, 0, 255, 200))
qp.drawEllipse(centerX - robotSize / 2, centerY - robotSize / 2, robotSize, robotSize)
x2, y2 = followAngle(-i.orientation, centerX, centerY, robotSize)
qp.drawLine(centerX, centerY, x2, y2)
qp.setBrush(QtGui.QColor(255, 69, 0, 200))
ballSize = 10
ballX = wc.ball.x / self.ratio + (self.fieldOffsetX / self.ratio + width / 2) #(wc.ball.x + width) / self.ratio
ballY = -wc.ball.y / self.ratio + (self.fieldOffsetY / self.ratio + height / 2) #(-wc.ball.y + height) / self.ratio
#print ("Ball x: {} and y: {}".format(ballX, ballY))
qp.drawEllipse(ballX - (ballSize / 2), ballY - (ballSize / 2), ballSize, ballSize)
def drawPoints(self, qp):
qp.setPen(QtCore.Qt.red)
size = self.size()
for i in range(1000):
x = random.randint(1, size.width()-1)
y = random.randint(1, size.height()-1)
qp.drawPoint(x, y)
def refresh(self):
self.update()
#thread1 = RecvVision("recv")
thread2 = AposAI("send")
thread3 = InputCommands("input")
#thread1.start()
thread2.start()
thread3.start()
if __name__ == '__main__':
app = QtGui.QApplication(sys.argv)
ex = FieldDisplay()
sys.exit(app.exec_())
print ("I had a good life. This is it though.")
|
|
# Copyright 2016-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""
Unit tests for the arpignore module
"""
import os
import subprocess
import sys
import unittest
import mock
import moduletests.src.arpignore
try:
# Python 2.x
from cStringIO import StringIO
except ImportError:
# Python 3.x
from io import StringIO
if sys.hexversion >= 0x3040000:
# contextlib.redirect_stdout was introduced in Python 3.4
import contextlib
else:
# contextlib2 is a backport of contextlib from Python 3.5 and is compatible with Python2/3
import contextlib2 as contextlib
# builtins was named __builtin__ in Python 2 so accommodate the change for the purposes of mocking the open call
if sys.version_info >= (3,):
builtins_name = "builtins"
else:
builtins_name = "__builtin__"
class Testarpignore(unittest.TestCase):
config_file_path = "/etc/sysctl.d/55-arp-ignore.conf"
def setUp(self):
self.output = StringIO()
def tearDown(self):
self.output.close()
@mock.patch("subprocess.check_output")
def test_detect_noproblem(self, check_output_mock):
"""Test that no problem is detected with expected-good output."""
check_output_mock.return_value = "arp_ignore = 0"
self.assertFalse(moduletests.src.arpignore.detect())
self.assertTrue(check_output_mock.called)
@mock.patch("subprocess.check_output")
def test_detect_problem(self, check_output_mock):
"""Test that the problem is detected with expected-bad output."""
check_output_mock.return_value = "arp_ignore = 1"
self.assertTrue(moduletests.src.arpignore.detect())
self.assertTrue(check_output_mock.called)
@mock.patch("subprocess.check_output", side_effect=["net.ipv4.conf.all.arp_ignore = 1",
subprocess.CalledProcessError(1, "test")])
def test_fix_sysctlfail(self, check_output_mock):
with contextlib.redirect_stdout(self.output):
self.assertRaises(subprocess.CalledProcessError, moduletests.src.arpignore.fix, self.config_file_path)
self.assertTrue(check_output_mock.called)
self.assertTrue(self.output.getvalue().endswith(
"[UNFIXED] net.ipv4.conf.all.arp_ignore=0 failed for running system\n"))
@mock.patch("subprocess.check_output")
@mock.patch("moduletests.src.arpignore.os.path.exists", side_effect=[False])
@mock.patch("moduletests.src.arpignore.open", side_effect=IOError)
def test_fix_write_new_fail(self, open_mock, exists_mock, check_output_mock):
check_output_mock.return_value = "net.ipv4.conf.lo.arp_announce = 0\nnet.ipv4.conf.all.arp_ignore = 1"
with contextlib.redirect_stdout(self.output):
self.assertRaises(IOError, moduletests.src.arpignore.fix, self.config_file_path)
self.assertTrue(open_mock.called)
self.assertTrue(exists_mock.called)
self.assertTrue(check_output_mock.called)
self.assertTrue(self.output.getvalue().endswith(
"[UNFIXED] Unable to open /etc/sysctl.d/55-arp-ignore.conf and write to it.\n"))
@mock.patch("subprocess.check_output")
@mock.patch("moduletests.src.arpignore.os.path.exists", side_effect=[False])
@mock.patch("moduletests.src.arpignore.open", mock.mock_open())
def test_fix_write_new_success(self, exists_mock, check_output_mock):
check_output_mock.return_value = "net.ipv4.conf.lo.arp_announce = 0\nnet.ipv4.conf.all.arp_ignore = 1"
with contextlib.redirect_stdout(self.output):
self.assertTrue(moduletests.src.arpignore.fix(self.config_file_path))
self.assertTrue(self.output.getvalue().endswith("[FIXED] /etc/sysctl.d/55-arp-ignore.conf written.\n"))
self.assertTrue(exists_mock.called)
self.assertTrue(check_output_mock.called)
@mock.patch("subprocess.check_output")
@mock.patch("moduletests.src.arpignore.os.path.exists", side_effect=[True])
def test_fix_success(self, exists_mock, check_output_mock):
check_output_mock.return_value = "net.ipv4.conf.all.arp_ignore = 1\nsome_other = 0"
open_mock = mock.mock_open(read_data="#comment\n"
"net.ipv4.conf.all.arp_ignore = 1\n"
"net.ipv4.conf.lo.arp_ignore = 0\n"
"garbage\n")
# mock_open does not have support for iteration so it must be added manually
# readline() until a blank line is reached (the sentinel)
def iter_func(self):
return iter(self.readline, "")
open_mock.return_value.__iter__ = iter_func
def py3_next_func(self):
return next(iter(self.readline, ""))
if sys.hexversion >= 0x3000000:
open_mock.return_value.__next__ = py3_next_func
with mock.patch("moduletests.src.arpignore.open", open_mock):
with contextlib.redirect_stdout(self.output):
self.assertTrue(moduletests.src.arpignore.fix(self.config_file_path))
self.assertTrue(self.output.getvalue().endswith("[FIXED] /etc/sysctl.d/55-arp-ignore.conf written.\n"))
self.assertEqual(str(open_mock.mock_calls), "[call('/etc/sysctl.d/55-arp-ignore.conf', 'r'),\n"
" call().__enter__(),\n call().readlines(),\n"
" call().__exit__(None, None, None),\n"
" call('/etc/sysctl.d/55-arp-ignore.conf', 'w'),\n"
" call().__enter__(),\n"
" call().write('#comment\\nnet.ipv4.conf.lo.arp_ignore = 0'),\n"
" call().write('\\n'),\n"
" call().write('net.ipv4.conf.all.arp_ignore = 0'),\n"
" call().write('\\n'),\n"
" call().__exit__(None, None, None)]")
self.assertTrue(exists_mock.called)
self.assertTrue(check_output_mock.called)
@mock.patch("moduletests.src.arpignore.get_config_dict", return_value=dict())
@mock.patch("moduletests.src.arpignore.detect", return_value=False)
def test_run_success(self, detect_mock, config_mock):
with contextlib.redirect_stdout(self.output):
self.assertTrue(moduletests.src.arpignore.run())
self.assertEqual(self.output.getvalue(), "Determining if any interfaces are set to ignore arp requests\n"
"[SUCCESS] arp ignore is disabled for all interfaces.\n")
self.assertTrue(detect_mock.called)
self.assertTrue(config_mock.called)
@mock.patch("moduletests.src.arpignore.get_config_dict")
@mock.patch("moduletests.src.arpignore.detect", return_value=True)
def test_run_no_remediate(self, detect_mock, config_mock):
config_mock.return_value = {"BACKUP_DIR": "/var/tmp/ec2rl",
"LOG_DIR": "/var/tmp/ec2rl",
"BACKED_FILES": dict(),
"REMEDIATE": False,
"SUDO": True}
with contextlib.redirect_stdout(self.output):
self.assertFalse(moduletests.src.arpignore.run())
self.assertTrue("[UNFIXED] Remediation impossible without sudo and --remediate.\n"
"-- Running as root/sudo: True\n"
"-- Required --remediate flag specified: False\n"
"[FAILURE] arp ignore is enabled for one or more interfaces. Please see the module log\n"
in self.output.getvalue())
self.assertTrue(detect_mock.called)
self.assertTrue(config_mock.called)
@mock.patch("moduletests.src.arpignore.get_config_dict")
@mock.patch("moduletests.src.arpignore.detect", return_value=True)
@mock.patch("moduletests.src.arpignore.os.path.isfile", return_value=True)
@mock.patch("moduletests.src.arpignore.backup", return_value=True)
@mock.patch("moduletests.src.arpignore.fix", return_value=True)
@mock.patch("moduletests.src.arpignore.restore", return_value=True)
def test_run_failure_isfile(self,
restore_mock,
fix_mock,
backup_mock,
isfile_mock,
detect_mock,
config_mock):
config_mock.return_value = {"BACKUP_DIR": "/var/tmp/ec2rl",
"LOG_DIR": "/var/tmp/ec2rl",
"BACKED_FILES": {self.config_file_path: "/some/path"},
"REMEDIATE": True,
"SUDO": True}
with contextlib.redirect_stdout(self.output):
self.assertFalse(moduletests.src.arpignore.run())
self.assertTrue("[FAILURE] arp ignore is enabled for one or more interfaces. "
"Please see the module log"
in self.output.getvalue())
self.assertTrue(restore_mock.called)
self.assertTrue(fix_mock.called)
self.assertTrue(backup_mock.called)
self.assertTrue(isfile_mock.called)
self.assertTrue(detect_mock.called)
self.assertTrue(config_mock.called)
@mock.patch("moduletests.src.arpignore.get_config_dict")
@mock.patch("moduletests.src.arpignore.detect", return_value=True)
@mock.patch("moduletests.src.arpignore.os.path.isfile", return_value=False)
@mock.patch("moduletests.src.arpignore.fix", return_value=True)
def test_run_failure(self, fix_mock, isfile_mock, detect_mock, config_mock):
config_mock.return_value = {"BACKUP_DIR": "/var/tmp/ec2rl",
"LOG_DIR": "/var/tmp/ec2rl",
"BACKED_FILES": dict(),
"REMEDIATE": True,
"SUDO": True}
with contextlib.redirect_stdout(self.output):
self.assertFalse(moduletests.src.arpignore.run())
self.assertTrue("[FAILURE] arp ignore is enabled for one or more interfaces. "
"Please see the module log"
in self.output.getvalue())
self.assertTrue(fix_mock.called)
self.assertTrue(isfile_mock.called)
self.assertTrue(detect_mock.called)
self.assertTrue(config_mock.called)
@mock.patch("moduletests.src.arpignore.get_config_dict")
@mock.patch("moduletests.src.arpignore.detect", side_effect=(True, False))
@mock.patch("moduletests.src.arpignore.os.path.isfile", return_value=False)
@mock.patch("moduletests.src.arpignore.fix", return_value=True)
def test_run_fix(self, fix_mock, isfile_mock, detect_mock, config_mock):
config_mock.return_value = {"BACKUP_DIR": "/var/tmp/ec2rl",
"LOG_DIR": "/var/tmp/ec2rl",
"BACKED_FILES": dict(),
"REMEDIATE": True,
"SUDO": True}
with contextlib.redirect_stdout(self.output):
self.assertTrue(moduletests.src.arpignore.run())
self.assertEqual(self.output.getvalue(), "Determining if any interfaces are set to ignore arp requests\n"
"[SUCCESS] arp ignore is disabled for all interfaces "
"after remediation.\n")
self.assertTrue(fix_mock.called)
self.assertTrue(isfile_mock.called)
self.assertTrue(detect_mock.called)
self.assertTrue(config_mock.called)
@mock.patch("moduletests.src.arpignore.get_config_dict")
@mock.patch("moduletests.src.arpignore.detect", side_effect=Exception)
@mock.patch("moduletests.src.arpignore.restore", return_value=True)
def test_run_exception(self, restore_mock, detect_mock, config_mock):
config_mock.return_value = {"BACKUP_DIR": "/var/tmp/ec2rl",
"LOG_DIR": "/var/tmp/ec2rl",
"BACKED_FILES": {self.config_file_path: "/some/path"},
"REMEDIATE": True,
"SUDO": True}
with contextlib.redirect_stdout(self.output):
self.assertFalse(moduletests.src.arpignore.run())
self.assertTrue(restore_mock.called)
self.assertTrue(detect_mock.called)
self.assertTrue(config_mock.called)
@mock.patch("moduletests.src.arpignore.get_config_dict", side_effect=IOError)
def test_run_failure_config_exception(self, config_mock):
with contextlib.redirect_stdout(self.output):
self.assertFalse(moduletests.src.arpignore.run())
self.assertTrue(self.output.getvalue().endswith("Review the logs to determine the cause of the issue.\n"))
self.assertTrue(config_mock.called)
|
|
#===============================================================================
# Copyright (c) 2015, Max Zwiessele
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of GPy.plotting.matplot_dep.plot_definitions nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#===============================================================================
import numpy as np
from ..abstract_plotting_library import AbstractPlottingLibrary
from .. import Tango
from . import defaults
import plotly
from plotly import tools
from plotly.graph_objs import Scatter, Scatter3d, Line,\
Marker, ErrorX, ErrorY, Bar, Heatmap, Trace,\
Annotations, Annotation, Contour, Font, Surface
from plotly.exceptions import PlotlyDictKeyError
SYMBOL_MAP = {
'o': 'dot',
'v': 'triangle-down',
'^': 'triangle-up',
'<': 'triangle-left',
'>': 'triangle-right',
's': 'square',
'+': 'cross',
'x': 'x',
'*': 'x', # no star yet in plotly!!
'D': 'diamond',
'd': 'diamond',
}
class PlotlyPlotsBase(AbstractPlottingLibrary):
def __init__(self):
super(PlotlyPlotsBase, self).__init__()
self._defaults = defaults.__dict__
self.current_states = dict()
def figure(self, rows=1, cols=1, specs=None, is_3d=False, **kwargs):
if specs is None:
specs = [[{'is_3d': is_3d}]*cols]*rows
figure = tools.make_subplots(rows, cols, specs=specs, **kwargs)
return figure
def new_canvas(self, figure=None, row=1, col=1, projection='2d',
xlabel=None, ylabel=None, zlabel=None,
title=None, xlim=None,
ylim=None, zlim=None, **kwargs):
#if 'filename' not in kwargs:
# print('PlotlyWarning: filename was not given, this may clutter your plotly workspace')
# filename = None
#else:
# filename = kwargs.pop('filename')
if figure is None:
figure = self.figure(is_3d=projection=='3d')
figure.layout.font = Font(family="Raleway, sans-serif")
if projection == '3d':
figure.layout.legend.x=.5
figure.layout.legend.bgcolor='#DCDCDC'
return (figure, row, col), kwargs
def add_to_canvas(self, canvas, traces, legend=False, **kwargs):
figure, row, col = canvas
def append_annotation(a, xref, yref):
if 'xref' not in a:
a['xref'] = xref
if 'yref' not in a:
a['yref'] = yref
figure.layout.annotations.append(a)
def append_trace(t, row, col):
figure.append_trace(t, row, col)
def recursive_append(traces):
if isinstance(traces, Annotations):
xref, yref = figure._grid_ref[row-1][col-1]
for a in traces:
append_annotation(a, xref, yref)
# elif isinstance(traces, (Trace)): # doesn't work
# elif type(traces) in [v for k,v in go.__dict__.iteritems()]:
elif isinstance(traces, (Scatter, Scatter3d, ErrorX,
ErrorY, Bar, Heatmap, Trace, Contour, Surface)):
try:
append_trace(traces, row, col)
except PlotlyDictKeyError:
# Its a dictionary of plots:
for t in traces:
recursive_append(traces[t])
elif isinstance(traces, (dict)):
for t in traces:
recursive_append(traces[t])
elif isinstance(traces, (tuple, list)):
for t in traces:
recursive_append(t)
recursive_append(traces)
figure.layout['showlegend'] = legend
return canvas
def show_canvas(self, canvas, filename=None, **kwargs):
return NotImplementedError
def scatter(self, ax, X, Y, Z=None, color=Tango.colorsHex['mediumBlue'], cmap=None, label=None, marker='o', marker_kwargs=None, **kwargs):
try:
marker = SYMBOL_MAP[marker]
except:
#not matplotlib marker
pass
marker_kwargs = marker_kwargs or {}
if 'symbol' not in marker_kwargs:
marker_kwargs['symbol'] = marker
X, Y = np.squeeze(X), np.squeeze(Y)
if Z is not None:
Z = np.squeeze(Z)
return Scatter3d(x=X, y=Y, z=Z, mode='markers',
showlegend=label is not None,
marker=Marker(color=color, colorscale=cmap, **marker_kwargs),
name=label, **kwargs)
return Scatter(x=X, y=Y, mode='markers', showlegend=label is not None,
marker=Marker(color=color, colorscale=cmap, **marker_kwargs),
name=label, **kwargs)
def plot(self, ax, X, Y, Z=None, color=None, label=None, line_kwargs=None, **kwargs):
if 'mode' not in kwargs:
kwargs['mode'] = 'lines'
X, Y = np.squeeze(X), np.squeeze(Y)
if Z is not None:
Z = np.squeeze(Z)
return Scatter3d(x=X, y=Y, z=Z, showlegend=label is not None, line=Line(color=color, **line_kwargs or {}), name=label, **kwargs)
return Scatter(x=X, y=Y, showlegend=label is not None, line=Line(color=color, **line_kwargs or {}), name=label, **kwargs)
def plot_axis_lines(self, ax, X, color=Tango.colorsHex['mediumBlue'], label=None, marker_kwargs=None, **kwargs):
if X.shape[1] == 1:
annotations = Annotations()
for i, row in enumerate(X):
annotations.append(
Annotation(
text='',
x=row[0], y=0,
yref='paper',
ax=0, ay=20,
arrowhead=2,
arrowsize=1,
arrowwidth=2,
arrowcolor=color,
showarrow=True,
#showlegend=i==0,
#label=label,
))
return annotations
elif X.shape[1] == 2:
marker_kwargs.setdefault('symbol', 'diamond')
opacity = kwargs.pop('opacity', .8)
return Scatter3d(x=X[:, 0], y=X[:, 1], z=np.zeros(X.shape[0]),
mode='markers',
projection=dict(z=dict(show=True, opacity=opacity)),
marker=Marker(color=color, **marker_kwargs or {}),
opacity=0,
name=label,
showlegend=label is not None, **kwargs)
def barplot(self, canvas, x, height, width=0.8, bottom=0, color=Tango.colorsHex['mediumBlue'], label=None, **kwargs):
figure, _, _ = canvas
if 'barmode' in kwargs:
figure.layout['barmode'] = kwargs.pop('barmode')
return Bar(x=x, y=height, marker=Marker(color=color), name=label)
def xerrorbar(self, ax, X, Y, error, Z=None, color=Tango.colorsHex['mediumBlue'], label=None, error_kwargs=None, **kwargs):
error_kwargs = error_kwargs or {}
if (error.shape[0] == 2) and (error.ndim == 2):
error_kwargs.update(dict(array=error[1], arrayminus=error[0], symmetric=False))
else:
error_kwargs.update(dict(array=error, symmetric=True))
X, Y = np.squeeze(X), np.squeeze(Y)
if Z is not None:
Z = np.squeeze(Z)
return Scatter3d(x=X, y=Y, z=Z, mode='markers',
error_x=ErrorX(color=color, **error_kwargs or {}),
marker=Marker(size='0'), name=label,
showlegend=label is not None, **kwargs)
return Scatter(x=X, y=Y, mode='markers',
error_x=ErrorX(color=color, **error_kwargs or {}),
marker=Marker(size='0'), name=label,
showlegend=label is not None,
**kwargs)
def yerrorbar(self, ax, X, Y, error, Z=None, color=Tango.colorsHex['mediumBlue'], label=None, error_kwargs=None, **kwargs):
error_kwargs = error_kwargs or {}
if (error.shape[0] == 2) and (error.ndim == 2):
error_kwargs.update(dict(array=error[1], arrayminus=error[0], symmetric=False))
else:
error_kwargs.update(dict(array=error, symmetric=True))
X, Y = np.squeeze(X), np.squeeze(Y)
if Z is not None:
Z = np.squeeze(Z)
return Scatter3d(x=X, y=Y, z=Z, mode='markers',
error_y=ErrorY(color=color, **error_kwargs or {}),
marker=Marker(size='0'), name=label,
showlegend=label is not None, **kwargs)
return Scatter(x=X, y=Y, mode='markers',
error_y=ErrorY(color=color, **error_kwargs or {}),
marker=Marker(size='0'), name=label,
showlegend=label is not None,
**kwargs)
def imshow(self, ax, X, extent=None, label=None, vmin=None, vmax=None, **imshow_kwargs):
if not 'showscale' in imshow_kwargs:
imshow_kwargs['showscale'] = False
return Heatmap(z=X, name=label,
x0=extent[0], dx=float(extent[1]-extent[0])/(X.shape[0]-1),
y0=extent[2], dy=float(extent[3]-extent[2])/(X.shape[1]-1),
zmin=vmin, zmax=vmax,
showlegend=label is not None,
hoverinfo='z',
**imshow_kwargs)
def imshow_interact(self, ax, plot_function, extent=None, label=None, resolution=None, vmin=None, vmax=None, **imshow_kwargs):
# TODO stream interaction?
super(PlotlyPlotsBase, self).imshow_interact(ax, plot_function)
def annotation_heatmap(self, ax, X, annotation, extent=None, label='Gradient', imshow_kwargs=None, **annotation_kwargs):
imshow_kwargs.setdefault('label', label)
imshow_kwargs.setdefault('showscale', True)
imshow = self.imshow(ax, X, extent, **imshow_kwargs)
X = X-X.min()
X /= X.max()/2.
X -= 1
x = np.linspace(extent[0], extent[1], X.shape[0])
y = np.linspace(extent[2], extent[3], X.shape[1])
annotations = Annotations()
for n, row in enumerate(annotation):
for m, val in enumerate(row):
var = X[n][m]
annotations.append(
Annotation(
text=str(val),
x=x[m], y=y[n],
xref='x1', yref='y1',
font=dict(color='white' if np.abs(var) > 0.8 else 'black', size=10),
opacity=.5,
showarrow=False,
))
return imshow, annotations
def annotation_heatmap_interact(self, ax, plot_function, extent, label=None, resolution=15, imshow_kwargs=None, **annotation_kwargs):
super(PlotlyPlotsBase, self).annotation_heatmap_interact(ax, plot_function, extent)
def contour(self, ax, X, Y, C, levels=20, label=None, **kwargs):
return Contour(x=X, y=Y, z=C,
#ncontours=levels, contours=Contours(start=C.min(), end=C.max(), size=(C.max()-C.min())/levels),
name=label, **kwargs)
def surface(self, ax, X, Y, Z, color=None, label=None, **kwargs):
return Surface(x=X, y=Y, z=Z, name=label, showlegend=label is not None, **kwargs)
def fill_between(self, ax, X, lower, upper, color=Tango.colorsHex['mediumBlue'], label=None, line_kwargs=None, **kwargs):
if not 'line' in kwargs:
kwargs['line'] = Line(**line_kwargs or {})
else:
kwargs['line'].update(line_kwargs or {})
if color.startswith('#'):
fcolor = 'rgba({c[0]}, {c[1]}, {c[2]}, {alpha})'.format(c=Tango.hex2rgb(color), alpha=kwargs.get('opacity', 1.0))
else: fcolor = color
u = Scatter(x=X, y=upper, fillcolor=fcolor, showlegend=label is not None, name=label, fill='tonextx', legendgroup='{}_fill_({},{})'.format(label, ax[1], ax[2]), **kwargs)
#fcolor = '{}, {alpha})'.format(','.join(fcolor.split(',')[:-1]), alpha=0.0)
l = Scatter(x=X, y=lower, fillcolor=fcolor, showlegend=False, name=label, legendgroup='{}_fill_({},{})'.format(label, ax[1], ax[2]), **kwargs)
return l, u
def fill_gradient(self, canvas, X, percentiles, color=Tango.colorsHex['mediumBlue'], label=None, **kwargs):
if color.startswith('#'):
colarray = Tango.hex2rgb(color)
opacity = .9
else:
colarray = map(float(color.strip(')').split('(')[1]))
if len(colarray) == 4:
colarray, opacity = colarray[:3] ,colarray[3]
alpha = opacity*(1.-np.abs(np.linspace(-1,1,len(percentiles)-1)))
def pairwise(iterable):
"s -> (s0,s1), (s1,s2), (s2, s3), ..."
from itertools import tee
a, b = tee(iterable)
next(b, None)
return zip(a, b)
polycol = []
for i, y1, a in zip(range(len(percentiles)), percentiles, alpha):
fcolor = 'rgba({}, {}, {}, {alpha})'.format(*colarray, alpha=a)
if i == len(percentiles)/2:
polycol.append(Scatter(x=X, y=y1, fillcolor=fcolor, showlegend=True,
name=label, line=Line(width=0, smoothing=0), mode='none', fill='tonextx',
legendgroup='density', hoverinfo='none', **kwargs))
else:
polycol.append(Scatter(x=X, y=y1, fillcolor=fcolor, showlegend=False,
name=None, line=Line(width=1, smoothing=0, color=fcolor), mode='none', fill='tonextx',
legendgroup='density', hoverinfo='none', **kwargs))
return polycol
class PlotlyPlotsOnline(PlotlyPlotsBase):
def __init__(self):
super(PlotlyPlotsOnline, self).__init__()
def show_canvas(self, canvas, filename=None, **kwargs):
figure, _, _ = canvas
if len(figure.data) == 0:
# add mock data
figure.append_trace(Scatter(x=[], y=[], name='', showlegend=False), 1, 1)
from ..gpy_plot.plot_util import in_ipynb
if in_ipynb():
return plotly.plotly.iplot(figure, filename=filename, **kwargs)
else:
return plotly.plotly.plot(figure, filename=filename, **kwargs)#self.current_states[hex(id(figure))]['filename'])
class PlotlyPlotsOffline(PlotlyPlotsBase):
def __init__(self):
super(PlotlyPlotsOffline, self).__init__()
def show_canvas(self, canvas, filename=None, **kwargs):
figure, _, _ = canvas
if len(figure.data) == 0:
# add mock data
figure.append_trace(Scatter(x=[], y=[], name='', showlegend=False), 1, 1)
from ..gpy_plot.plot_util import in_ipynb
if in_ipynb():
plotly.offline.init_notebook_mode(connected=True)
return plotly.offline.iplot(figure, filename=filename, **kwargs)#self.current_states[hex(id(figure))]['filename'])
else:
return plotly.offline.plot(figure, filename=filename, **kwargs)
|
|
# Copyright (C) 2009 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import StringIO
from webkitpy.common.checkout.scm import CheckoutNeedsUpdate
from webkitpy.common.checkout.scm.scm_mock import MockSCM
from webkitpy.common.net.bugzilla import Attachment
from webkitpy.common.system.outputcapture import OutputCapture
from webkitpy.layout_tests.models import test_results
from webkitpy.layout_tests.models import test_failures
from webkitpy.thirdparty.mock import Mock
from webkitpy.tool.commands.commandtest import CommandsTest
from webkitpy.tool.commands.queues import *
from webkitpy.tool.commands.queuestest import QueuesTest
from webkitpy.tool.commands.stepsequence import StepSequence
from webkitpy.common.net.statusserver_mock import MockStatusServer
from webkitpy.tool.mocktool import MockTool, MockOptions
class TestCommitQueue(CommitQueue):
def __init__(self, tool=None):
CommitQueue.__init__(self)
if tool:
self.bind_to_tool(tool)
self._options = MockOptions(confirm=False, parent_command="commit-queue", port=None)
def begin_work_queue(self):
output_capture = OutputCapture()
output_capture.capture_output()
CommitQueue.begin_work_queue(self)
output_capture.restore_output()
class TestQueue(AbstractPatchQueue):
name = "test-queue"
class TestReviewQueue(AbstractReviewQueue):
name = "test-review-queue"
class TestFeederQueue(FeederQueue):
_sleep_duration = 0
class AbstractQueueTest(CommandsTest):
def test_log_directory(self):
self.assertEqual(TestQueue()._log_directory(), os.path.join("..", "test-queue-logs"))
def _assert_run_webkit_patch(self, run_args, port=None):
queue = TestQueue()
tool = MockTool()
tool.status_server.bot_id = "gort"
tool.executive = Mock()
queue.bind_to_tool(tool)
queue._options = Mock()
queue._options.port = port
queue.run_webkit_patch(run_args)
expected_run_args = ["echo", "--status-host=example.com", "--bot-id=gort"]
if port:
expected_run_args.append("--port=%s" % port)
expected_run_args.extend(run_args)
tool.executive.run_command.assert_called_with(expected_run_args, cwd='/mock-checkout')
def test_run_webkit_patch(self):
self._assert_run_webkit_patch([1])
self._assert_run_webkit_patch(["one", 2])
self._assert_run_webkit_patch([1], port="mockport")
def test_iteration_count(self):
queue = TestQueue()
queue._options = Mock()
queue._options.iterations = 3
self.assertTrue(queue.should_continue_work_queue())
self.assertTrue(queue.should_continue_work_queue())
self.assertTrue(queue.should_continue_work_queue())
self.assertFalse(queue.should_continue_work_queue())
def test_no_iteration_count(self):
queue = TestQueue()
queue._options = Mock()
self.assertTrue(queue.should_continue_work_queue())
self.assertTrue(queue.should_continue_work_queue())
self.assertTrue(queue.should_continue_work_queue())
self.assertTrue(queue.should_continue_work_queue())
def _assert_log_message(self, script_error, log_message):
failure_log = AbstractQueue._log_from_script_error_for_upload(script_error, output_limit=10)
self.assertTrue(failure_log.read(), log_message)
def test_log_from_script_error_for_upload(self):
self._assert_log_message(ScriptError("test"), "test")
unicode_tor = u"WebKit \u2661 Tor Arne Vestb\u00F8!"
utf8_tor = unicode_tor.encode("utf-8")
self._assert_log_message(ScriptError(unicode_tor), utf8_tor)
script_error = ScriptError(unicode_tor, output=unicode_tor)
expected_output = "%s\nLast %s characters of output:\n%s" % (utf8_tor, 10, utf8_tor[-10:])
self._assert_log_message(script_error, expected_output)
class FeederQueueTest(QueuesTest):
def test_feeder_queue(self):
queue = TestFeederQueue()
tool = MockTool(log_executive=True)
expected_logs = {
"begin_work_queue": self._default_begin_work_queue_logs("feeder-queue"),
"process_work_item": """Warning, attachment 10001 on bug 50000 has invalid committer ([email protected])
Warning, attachment 10001 on bug 50000 has invalid committer ([email protected])
MOCK setting flag 'commit-queue' to '-' on attachment '10001' with comment 'Rejecting attachment 10001 from commit-queue.\n\[email protected] does not have committer permissions according to http://trac.webkit.org/browser/trunk/Tools/Scripts/webkitpy/common/config/committers.py.
- If you do not have committer rights please read http://webkit.org/coding/contributing.html for instructions on how to use bugzilla flags.
- If you have committer rights please correct the error in Tools/Scripts/webkitpy/common/config/committers.py by adding yourself to the file (no review needed). The commit-queue restarts itself every 2 hours. After restart the commit-queue will correctly respect your committer rights.'
MOCK: update_work_items: commit-queue [10005, 10000]
Feeding commit-queue items [10005, 10000]
Feeding EWS (1 r? patch, 1 new)
MOCK: submit_to_ews: 10002
""",
"handle_unexpected_error": "Mock error message\n",
}
self.assert_queue_outputs(queue, tool=tool, expected_logs=expected_logs)
class AbstractPatchQueueTest(CommandsTest):
def test_next_patch(self):
queue = AbstractPatchQueue()
tool = MockTool()
queue.bind_to_tool(tool)
queue._options = Mock()
queue._options.port = None
self.assertIsNone(queue._next_patch())
tool.status_server = MockStatusServer(work_items=[2, 10000, 10001])
expected_stdout = "MOCK: fetch_attachment: 2 is not a known attachment id\n" # A mock-only message to prevent us from making mistakes.
expected_logs = "MOCK: release_work_item: None 2\n"
patch = OutputCapture().assert_outputs(self, queue._next_patch, expected_stdout=expected_stdout, expected_logs=expected_logs)
# The patch.id() == 2 is ignored because it doesn't exist.
self.assertEqual(patch.id(), 10000)
self.assertEqual(queue._next_patch().id(), 10001)
self.assertEqual(queue._next_patch(), None) # When the queue is empty
class PatchProcessingQueueTest(CommandsTest):
def test_upload_results_archive_for_patch(self):
queue = PatchProcessingQueue()
queue.name = "mock-queue"
tool = MockTool()
queue.bind_to_tool(tool)
queue._options = Mock()
queue._options.port = None
patch = queue._tool.bugs.fetch_attachment(10001)
expected_logs = """MOCK add_attachment_to_bug: bug_id=50000, description=Archive of layout-test-results from bot for mac-snowleopard filename=layout-test-results.zip mimetype=None
-- Begin comment --
The attached test failures were seen while running run-webkit-tests on the mock-queue.
Port: mac-snowleopard Platform: MockPlatform 1.0
-- End comment --
"""
OutputCapture().assert_outputs(self, queue._upload_results_archive_for_patch, [patch, Mock()], expected_logs=expected_logs)
class NeedsUpdateSequence(StepSequence):
def _run(self, tool, options, state):
raise CheckoutNeedsUpdate([], 1, "", None)
class AlwaysCommitQueueTool(object):
def __init__(self):
self.status_server = MockStatusServer()
def command_by_name(self, name):
return CommitQueue
class SecondThoughtsCommitQueue(TestCommitQueue):
def __init__(self, tool=None):
self._reject_patch = False
TestCommitQueue.__init__(self, tool)
def run_command(self, command):
# We want to reject the patch after the first validation,
# so wait to reject it until after some other command has run.
self._reject_patch = True
return CommitQueue.run_command(self, command)
def refetch_patch(self, patch):
if not self._reject_patch:
return self._tool.bugs.fetch_attachment(patch.id())
attachment_dictionary = {
"id": patch.id(),
"bug_id": patch.bug_id(),
"name": "Rejected",
"is_obsolete": True,
"is_patch": False,
"review": "-",
"reviewer_email": "[email protected]",
"commit-queue": "-",
"committer_email": "[email protected]",
"attacher_email": "Contributer1",
}
return Attachment(attachment_dictionary, None)
class CommitQueueTest(QueuesTest):
def _mock_test_result(self, testname):
return test_results.TestResult(testname, [test_failures.FailureTextMismatch()])
def test_commit_queue(self):
tool = MockTool()
tool.filesystem.write_text_file('/tmp/layout-test-results/full_results.json', '') # Otherwise the commit-queue will hit a KeyError trying to read the results from the MockFileSystem.
tool.filesystem.write_text_file('/tmp/layout-test-results/webkit_unit_tests_output.xml', '')
expected_logs = {
"begin_work_queue": self._default_begin_work_queue_logs("commit-queue"),
"process_work_item": """Running: webkit-patch --status-host=example.com clean --port=mac
MOCK: update_status: commit-queue Cleaned working directory
Running: webkit-patch --status-host=example.com update --port=mac
MOCK: update_status: commit-queue Updated working directory
Running: webkit-patch --status-host=example.com apply-attachment --no-update --non-interactive 10000 --port=mac
MOCK: update_status: commit-queue Applied patch
Running: webkit-patch --status-host=example.com validate-changelog --check-oops --non-interactive 10000 --port=mac
MOCK: update_status: commit-queue ChangeLog validated
Running: webkit-patch --status-host=example.com build --no-clean --no-update --build-style=release --port=mac
MOCK: update_status: commit-queue Built patch
Running: webkit-patch --status-host=example.com build-and-test --no-clean --no-update --test --non-interactive --port=mac
MOCK: update_status: commit-queue Passed tests
Running: webkit-patch --status-host=example.com land-attachment --force-clean --non-interactive --parent-command=commit-queue 10000 --port=mac
MOCK: update_status: commit-queue Landed patch
MOCK: update_status: commit-queue Pass
MOCK: release_work_item: commit-queue 10000
""",
"handle_script_error": "ScriptError error message\n\nMOCK output\n",
"handle_unexpected_error": "MOCK setting flag 'commit-queue' to '-' on attachment '10000' with comment 'Rejecting attachment 10000 from commit-queue.\n\nMock error message'\n",
}
self.assert_queue_outputs(CommitQueue(), tool=tool, expected_logs=expected_logs)
def test_commit_queue_failure(self):
expected_logs = {
"begin_work_queue": self._default_begin_work_queue_logs("commit-queue"),
"process_work_item": """MOCK: update_status: commit-queue Cleaned working directory
MOCK: update_status: commit-queue Updated working directory
MOCK: update_status: commit-queue Patch does not apply
MOCK setting flag 'commit-queue' to '-' on attachment '10000' with comment 'Rejecting attachment 10000 from commit-queue.\n\nMOCK script error
Full output: http://dummy_url'
MOCK: update_status: commit-queue Fail
MOCK: release_work_item: commit-queue 10000
""",
"handle_script_error": "ScriptError error message\n\nMOCK output\n",
"handle_unexpected_error": "MOCK setting flag 'commit-queue' to '-' on attachment '10000' with comment 'Rejecting attachment 10000 from commit-queue.\n\nMock error message'\n",
}
queue = CommitQueue()
def mock_run_webkit_patch(command):
if command[0] == 'clean' or command[0] == 'update':
# We want cleaning to succeed so we can error out on a step
# that causes the commit-queue to reject the patch.
return
raise ScriptError('MOCK script error')
queue.run_webkit_patch = mock_run_webkit_patch
self.assert_queue_outputs(queue, expected_logs=expected_logs)
def test_commit_queue_failure_with_failing_tests(self):
expected_logs = {
"begin_work_queue": self._default_begin_work_queue_logs("commit-queue"),
"process_work_item": """MOCK: update_status: commit-queue Cleaned working directory
MOCK: update_status: commit-queue Updated working directory
MOCK: update_status: commit-queue Patch does not apply
MOCK setting flag 'commit-queue' to '-' on attachment '10000' with comment 'Rejecting attachment 10000 from commit-queue.\n\nNew failing tests:
mock_test_name.html
another_test_name.html
Full output: http://dummy_url'
MOCK: update_status: commit-queue Fail
MOCK: release_work_item: commit-queue 10000
""",
"handle_script_error": "ScriptError error message\n\nMOCK output\n",
"handle_unexpected_error": "MOCK setting flag 'commit-queue' to '-' on attachment '10000' with comment 'Rejecting attachment 10000 from commit-queue.\n\nMock error message'\n",
}
queue = CommitQueue()
def mock_run_webkit_patch(command):
if command[0] == 'clean' or command[0] == 'update':
# We want cleaning to succeed so we can error out on a step
# that causes the commit-queue to reject the patch.
return
queue._expected_failures.unexpected_failures_observed = lambda results: ["mock_test_name.html", "another_test_name.html"]
raise ScriptError('MOCK script error')
queue.run_webkit_patch = mock_run_webkit_patch
self.assert_queue_outputs(queue, expected_logs=expected_logs)
def test_rollout(self):
tool = MockTool()
tool.filesystem.write_text_file('/tmp/layout-test-results/full_results.json', '') # Otherwise the commit-queue will hit a KeyError trying to read the results from the MockFileSystem.
tool.filesystem.write_text_file('/tmp/layout-test-results/webkit_unit_tests_output.xml', '')
tool.buildbot.light_tree_on_fire()
expected_logs = {
"begin_work_queue": self._default_begin_work_queue_logs("commit-queue"),
"process_work_item": """Running: webkit-patch --status-host=example.com clean --port=%(port)s
MOCK: update_status: commit-queue Cleaned working directory
Running: webkit-patch --status-host=example.com update --port=%(port)s
MOCK: update_status: commit-queue Updated working directory
Running: webkit-patch --status-host=example.com apply-attachment --no-update --non-interactive 10000 --port=%(port)s
MOCK: update_status: commit-queue Applied patch
Running: webkit-patch --status-host=example.com validate-changelog --check-oops --non-interactive 10000 --port=%(port)s
MOCK: update_status: commit-queue ChangeLog validated
Running: webkit-patch --status-host=example.com build --no-clean --no-update --build-style=release --port=%(port)s
MOCK: update_status: commit-queue Built patch
Running: webkit-patch --status-host=example.com build-and-test --no-clean --no-update --test --non-interactive --port=%(port)s
MOCK: update_status: commit-queue Passed tests
Running: webkit-patch --status-host=example.com land-attachment --force-clean --non-interactive --parent-command=commit-queue 10000 --port=%(port)s
MOCK: update_status: commit-queue Landed patch
MOCK: update_status: commit-queue Pass
MOCK: release_work_item: commit-queue 10000
""" % {"port": "mac"},
"handle_script_error": "ScriptError error message\n\nMOCK output\n",
"handle_unexpected_error": "MOCK setting flag 'commit-queue' to '-' on attachment '10000' with comment 'Rejecting attachment 10000 from commit-queue.\n\nMock error message'\n",
}
self.assert_queue_outputs(CommitQueue(), tool=tool, expected_logs=expected_logs)
def test_rollout_lands(self):
tool = MockTool()
tool.buildbot.light_tree_on_fire()
rollout_patch = tool.bugs.fetch_attachment(10005) # _patch6, a rollout patch.
assert(rollout_patch.is_rollout())
expected_logs = {
"begin_work_queue": self._default_begin_work_queue_logs("commit-queue"),
"process_work_item": """Running: webkit-patch --status-host=example.com clean --port=%(port)s
MOCK: update_status: commit-queue Cleaned working directory
Running: webkit-patch --status-host=example.com update --port=%(port)s
MOCK: update_status: commit-queue Updated working directory
Running: webkit-patch --status-host=example.com apply-attachment --no-update --non-interactive 10005 --port=%(port)s
MOCK: update_status: commit-queue Applied patch
Running: webkit-patch --status-host=example.com validate-changelog --check-oops --non-interactive 10005 --port=%(port)s
MOCK: update_status: commit-queue ChangeLog validated
Running: webkit-patch --status-host=example.com land-attachment --force-clean --non-interactive --parent-command=commit-queue 10005 --port=%(port)s
MOCK: update_status: commit-queue Landed patch
MOCK: update_status: commit-queue Pass
MOCK: release_work_item: commit-queue 10005
""" % {"port": "mac"},
"handle_script_error": "ScriptError error message\n\nMOCK output\n",
"handle_unexpected_error": "MOCK setting flag 'commit-queue' to '-' on attachment '10005' with comment 'Rejecting attachment 10005 from commit-queue.\n\nMock error message'\n",
}
self.assert_queue_outputs(CommitQueue(), tool=tool, work_item=rollout_patch, expected_logs=expected_logs)
def test_auto_retry(self):
queue = CommitQueue()
options = Mock()
options.parent_command = "commit-queue"
tool = AlwaysCommitQueueTool()
sequence = NeedsUpdateSequence(None)
expected_logs = """Commit failed because the checkout is out of date. Please update and try again.
MOCK: update_status: commit-queue Tests passed, but commit failed (checkout out of date). Updating, then landing without building or re-running tests.
"""
state = {'patch': None}
OutputCapture().assert_outputs(self, sequence.run_and_handle_errors, [tool, options, state], expected_exception=TryAgain, expected_logs=expected_logs)
self.assertTrue(options.update)
self.assertFalse(options.build)
self.assertFalse(options.test)
def test_manual_reject_during_processing(self):
queue = SecondThoughtsCommitQueue(MockTool())
queue.begin_work_queue()
queue._tool.filesystem.write_text_file('/tmp/layout-test-results/full_results.json', '') # Otherwise the commit-queue will hit a KeyError trying to read the results from the MockFileSystem.
queue._tool.filesystem.write_text_file('/tmp/layout-test-results/webkit_unit_tests_output.xml', '')
queue._options = Mock()
queue._options.port = None
expected_logs = """Running: webkit-patch --status-host=example.com clean --port=mac
MOCK: update_status: commit-queue Cleaned working directory
Running: webkit-patch --status-host=example.com update --port=mac
MOCK: update_status: commit-queue Updated working directory
Running: webkit-patch --status-host=example.com apply-attachment --no-update --non-interactive 10000 --port=mac
MOCK: update_status: commit-queue Applied patch
Running: webkit-patch --status-host=example.com validate-changelog --check-oops --non-interactive 10000 --port=mac
MOCK: update_status: commit-queue ChangeLog validated
Running: webkit-patch --status-host=example.com build --no-clean --no-update --build-style=release --port=mac
MOCK: update_status: commit-queue Built patch
Running: webkit-patch --status-host=example.com build-and-test --no-clean --no-update --test --non-interactive --port=mac
MOCK: update_status: commit-queue Passed tests
MOCK: update_status: commit-queue Retry
MOCK: release_work_item: commit-queue 10000
"""
self.maxDiff = None
OutputCapture().assert_outputs(self, queue.process_work_item, [QueuesTest.mock_work_item], expected_logs=expected_logs)
def test_report_flaky_tests(self):
queue = TestCommitQueue(MockTool())
expected_logs = """MOCK bug comment: bug_id=50002, cc=None
--- Begin comment ---
The commit-queue just saw foo/bar.html flake (text diff) while processing attachment 10000 on bug 50000.
Port: MockPort Platform: MockPlatform 1.0
--- End comment ---
MOCK add_attachment_to_bug: bug_id=50002, description=Failure diff from bot filename=failure.diff mimetype=None
MOCK bug comment: bug_id=50002, cc=None
--- Begin comment ---
The commit-queue just saw bar/baz.html flake (text diff) while processing attachment 10000 on bug 50000.
Port: MockPort Platform: MockPlatform 1.0
--- End comment ---
bar/baz-diffs.txt does not exist in results archive, uploading entire archive.
MOCK add_attachment_to_bug: bug_id=50002, description=Archive of layout-test-results from bot filename=layout-test-results.zip mimetype=None
MOCK bug comment: bug_id=50000, cc=None
--- Begin comment ---
The commit-queue encountered the following flaky tests while processing attachment 10000:
foo/bar.html bug 50002 (author: [email protected])
bar/baz.html bug 50002 (author: [email protected])
The commit-queue is continuing to process your patch.
--- End comment ---
"""
test_names = ["foo/bar.html", "bar/baz.html"]
test_results = [self._mock_test_result(name) for name in test_names]
class MockZipFile(object):
def __init__(self):
self.fp = StringIO()
def read(self, path):
return ""
def namelist(self):
# This is intentionally missing one diffs.txt to exercise the "upload the whole zip" codepath.
return ['foo/bar-diffs.txt']
OutputCapture().assert_outputs(self, queue.report_flaky_tests, [QueuesTest.mock_work_item, test_results, MockZipFile()], expected_logs=expected_logs)
def test_did_pass_testing_ews(self):
tool = MockTool()
patch = tool.bugs.fetch_attachment(10000)
queue = TestCommitQueue(tool)
self.assertFalse(queue.did_pass_testing_ews(patch))
class StyleQueueTest(QueuesTest):
def test_style_queue_with_style_exception(self):
expected_logs = {
"begin_work_queue": self._default_begin_work_queue_logs("style-queue"),
"process_work_item": """Running: webkit-patch --status-host=example.com clean
MOCK: update_status: style-queue Cleaned working directory
Running: webkit-patch --status-host=example.com update
MOCK: update_status: style-queue Updated working directory
Running: webkit-patch --status-host=example.com apply-attachment --no-update --non-interactive 10000
MOCK: update_status: style-queue Applied patch
Running: webkit-patch --status-host=example.com apply-watchlist-local 50000
MOCK: update_status: style-queue Watchlist applied
Running: webkit-patch --status-host=example.com check-style-local --non-interactive --quiet
MOCK: update_status: style-queue Style checked
MOCK: update_status: style-queue Pass
MOCK: release_work_item: style-queue 10000
""",
"handle_unexpected_error": "Mock error message\n",
"handle_script_error": "MOCK output\n",
}
tool = MockTool(executive_throws_when_run=set(['check-style']))
self.assert_queue_outputs(StyleQueue(), expected_logs=expected_logs, tool=tool)
def test_style_queue_with_watch_list_exception(self):
expected_logs = {
"begin_work_queue": self._default_begin_work_queue_logs("style-queue"),
"process_work_item": """Running: webkit-patch --status-host=example.com clean
MOCK: update_status: style-queue Cleaned working directory
Running: webkit-patch --status-host=example.com update
MOCK: update_status: style-queue Updated working directory
Running: webkit-patch --status-host=example.com apply-attachment --no-update --non-interactive 10000
MOCK: update_status: style-queue Applied patch
Running: webkit-patch --status-host=example.com apply-watchlist-local 50000
Exception for ['echo', '--status-host=example.com', 'apply-watchlist-local', 50000]
MOCK command output
MOCK: update_status: style-queue Unabled to apply watchlist
Running: webkit-patch --status-host=example.com check-style-local --non-interactive --quiet
MOCK: update_status: style-queue Style checked
MOCK: update_status: style-queue Pass
MOCK: release_work_item: style-queue 10000
""",
"handle_unexpected_error": "Mock error message\n",
"handle_script_error": "MOCK output\n",
}
tool = MockTool(executive_throws_when_run=set(['apply-watchlist-local']))
self.assert_queue_outputs(StyleQueue(), expected_logs=expected_logs, tool=tool)
|
|
#!/usr/bin/python2.7
"""
Copyright (c) 2014, ICFLIX Media FZ LLC All rights reserved.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
Desc: Generate Nagios configuration from given file, resp. from check_multi.
"""
import logging
import logging.handlers
import json
import os.path
import re
import sys
import yaml
from nagios_to_yaml import NagiosToYaml
ICINGA_DIR = '/etc/icinga'
LOG_FORMAT = '%(asctime)s %(levelname)-10s %(message)s'
MACHINEDB_FILE = '/etc/icinga/machines.json'
NAGIOS_DEFS_FILE = '/etc/icinga/nagios.yml'
NAGIOS_TKEYS = [
'commands',
'contacts',
'contactgroups',
'datacenters',
'hostgroups',
'hosts',
'services'
]
STAGING_DOMAIN = 'icflix.io'
# 7 minutes
SVC_FRESHNESS_THRESHOLD = 420
# Make sure Nagios knows all Hosts in MachineDB
# -> <FQDN>.cfg must exist for each and every Host in MDB
# Re-Generate hostgroups
# Re-Generate contacts
# Re-Generate contactsgroups
# Re-Generate commands
class NagiosConfigGenerator(object):
"""Generate Nagios Configuration for *ONE* Host from given file."""
def __init__(self):
self.machine_db = None
self.nagios_db = None
self.load_machine_db()
self.load_nagios_definitions()
self.mdb_to_nagios()
def add_datacenter_to_nagios(self, dct_dict):
"""Add given Datacenter to Nagios. If given Datacenter is already
known, merge in attributes/values, but don't over-ride Nagios ones.
"""
nagios_dcs = self.nagios_db['datacenters']
dct_name = dct_dict.pop('host_name')
if dct_name not in nagios_dcs:
nagios_dcs[dct_name] = {}
nagios_dct = nagios_dcs[dct_name]
if 'hostgroups' not in nagios_dct:
nagios_dct['hostgroups'] = list()
nagios_dct['hostgroups'].append('datacenter')
if 'host' not in nagios_dct:
nagios_dct['host'] = {}
for attr in dct_dict.iterkeys():
if attr in nagios_dct['host']:
# Don't over-ride Nagios definitions
continue
nagios_dct['host'][attr] = dct_dict[attr]
def add_host_to_nagios(self, host_dict, is_lxc):
"""Add given Host to Nagios. If given Host is already known, merge in
values, but don't over-ride Nagios ones.
"""
nagios_hosts = self.nagios_db['hosts']
hostname = host_dict.pop('host_name')
if hostname not in nagios_hosts:
nagios_hosts[hostname] = {}
nagios_host = nagios_hosts[hostname]
if 'hostgroups' not in nagios_host:
nagios_host['hostgroups'] = list()
auto_hostgroup = self.get_auto_hostgroup(hostname)
nagios_host['hostgroups'].append(auto_hostgroup)
if is_lxc:
nagios_host['hostgroups'].append('lxc')
if ('_DOMAIN' in host_dict
and host_dict['_DOMAIN'] == STAGING_DOMAIN):
nagios_host['hostgroups'].append('stage')
if 'host' not in nagios_host:
nagios_host['host'] = {}
for attr in host_dict.iterkeys():
if attr in nagios_host['host']:
# Don't over-ride Nagios definitions
continue
nagios_host['host'][attr] = host_dict[attr]
def add_services_to_host(self, nagios_host, ext_svcs):
"""Add (external) service definition to Nagios."""
if 'services' not in nagios_host:
nagios_host['services'] = {}
nagios_svcs = nagios_host['services']
for svc_key in ext_svcs['services'].iterkeys():
if svc_key not in nagios_svcs:
nagios_svcs[svc_key] = {}
nagios_svc = nagios_svcs[svc_key]
for attr in ext_svcs['services'][svc_key].iterkeys():
if attr in nagios_svc:
continue
nagios_svc[attr] = ext_svcs['services'][svc_key][attr]
def ensure_host_definitions(self):
"""Ensure Nagios knows all Hosts defined in MDB. This is required in
order to re-generate Hostgroups, because it could easilly happen Nagios
wouldn't know Host(s) in hostgroups.
"""
for host_key in self.nagios_db['hosts'].iterkeys():
host_dict = self.nagios_db['hosts'][host_key]
host_dict['host']['host_name'] = host_key
self.ensure_host_definition(host_dict)
def ensure_host_definition(self, host_dict):
"""Ensure file with Host definition exists."""
if host_dict is None:
return (-1)
host_file = ('%s/objects/host_%s.cfg' %
(ICINGA_DIR, host_dict['host']['host_name']))
if os.path.exists(host_file):
#logging.debug("File '%s' exists.", host_file)
return 1
fhandle = open(host_file, 'w+')
self.write_definition(fhandle, 'host', host_dict['host'])
if 'services' not in host_dict:
host_dict['services'] = {}
dummy_svc = dict()
dummy_svc['active_checks_enabled'] = 1
dummy_svc['check_command'] = 'return-ok'
dummy_svc['check_interval'] = 20
dummy_svc['host_name'] = host_dict['host']['host_name']
dummy_svc['use'] = 'generic-service'
host_dict['services']['dummy-ok'] = dummy_svc
for service_key in host_dict['services'].iterkeys():
service_copy = host_dict['services'][service_key]
service_copy['service_description'] = service_key
self.write_definition(fhandle, 'service',
service_copy)
del service_copy
fhandle.close()
return 0
def finish_host_definition(self, host_dict, hostname):
"""Add/over-ride attributes in Host definition."""
if hostname not in self.nagios_db['hosts']:
return
if 'host' not in self.nagios_db['hosts'][hostname]:
return
for attr in self.nagios_db['hosts'][hostname]['host'].iterkeys():
host_dict[attr] = self.nagios_db['hosts'][hostname]['host'][attr]
def get_auto_hostgroup(self, hostname):
"""Determine automatic Nagios hostgroup."""
auto_hostgroup = hostname.split('.')[0]
auto_hostgroup = re.sub(r'(\d+$|\d+[a-z]+)$', r'', auto_hostgroup)
return auto_hostgroup
def get_host_dict(self, hostname, machine_ip, ssh_port, parents):
"""Create Nagios 'host' as a dictionary from given params.
Parents is expected to be either None or a list.
"""
host_dict = {}
host_dict['use'] = 'generic-host'
host_dict['host_name'] = hostname
host_dict['address'] = machine_ip
if parents is not None:
host_dict['parents'] = ','.join(parents)
if ssh_port is not None:
host_dict['_SSH_PORT'] = ssh_port
splitted = hostname.split('.')
host_dict['_SHORTNAME'] = '.'.join(splitted[:len(splitted)-2])
host_dict['_DOMAIN'] = '.'.join(splitted[len(splitted)-2:])
return host_dict
def get_padding(self, padding_len):
"""Return padding :)"""
padding = ''
while padding_len > 0:
padding += ' '
padding_len -= 1
return padding
def get_ssh_port(self, machine_obj, is_lxc):
"""Determine SSH port for given Machine."""
ssh_port = 22
if is_lxc == False:
return ssh_port
if 'ports' not in machine_obj:
# Ehm, this is a bit inconclusive, isn't it?
return ssh_port
for port_cfg in machine_obj['ports']:
# dict is expected here
if 'private_port' not in port_cfg:
continue
if int(port_cfg['private_port']) == 22:
ssh_port = int(port_cfg['public_port'])
return ssh_port
def load_machine_db(self):
"""Just loads machine DB from JSON."""
with open(MACHINEDB_FILE, 'r') as fhandle:
self.machine_db = json.load(fhandle)['machines']
def load_nagios_definitions(self):
"""Load Nagios definitions from YAML."""
with open(NAGIOS_DEFS_FILE, 'r') as fhandle:
self.nagios_db = yaml.load(fhandle)
# Make nagios_db sane
for top_key in NAGIOS_TKEYS:
if top_key in self.nagios_db:
continue
self.nagios_db[top_key] = {}
if 'passive' not in self.nagios_db['services']:
self.nagios_db['services']['passive'] = {}
if 'active' not in self.nagios_db['services']:
self.nagios_db['services']['active'] = {}
def import_config(self, services_cfg):
"""Import configuration file (sent) from remote Host."""
if not os.path.exists(services_cfg):
logging.error("Given file '%s' doesn't exist.", services_cfg)
return False
hostname = os.path.basename(services_cfg).replace('.cfg', '')
if hostname == '':
logging.error('I have empty hostname! :-(')
return False
nagios_host = None
for host_key in self.nagios_db['hosts'].iterkeys():
if hostname == host_key:
nagios_host = self.nagios_db['hosts'][host_key]
break
if nagios_host is None:
logging.error('Machine %s not found in Nagios/MDB.', hostname)
return False
logging.info('FQDN: %s', hostname)
logging.info('IP: %s', nagios_host['host']['address'])
logging.info('SSH: %s', nagios_host['host']['_SSH_PORT'])
logging.info('Hostgroups: %s', nagios_host['hostgroups'])
nag2yaml = NagiosToYaml()
nag2yaml.parse_nagios_config(services_cfg)
ext_services = nag2yaml.nagios_cfg
for extsvc_key in ext_services['services'].iterkeys():
ext_service = ext_services['services'][extsvc_key]
if 'stage' in nagios_host['hostgroups']:
ext_service['use'] = 'stage-service'
else:
ext_service['use'] = 'generic-service'
ext_service['check_freshness'] = 1
ext_service['active_checks_enabled'] = 0
ext_service['passive_checks_enabled'] = 1
ext_service['freshness_threshold'] = SVC_FRESHNESS_THRESHOLD
ext_service['check_command'] = 'check_dummy_4p!2 "check is stale"'
if extsvc_key not in self.nagios_db['services']['passive']:
continue
# Over-ride attributes from ['services']['passive']
svc_nagios = self.nagios_db['services']['passive'][extsvc_key]
for attr in svc_nagios.iterkeys():
ext_service[attr] = svc_nagios[attr]
self.add_services_to_host(nagios_host, ext_services)
host_file = '%s/objects/host_%s.cfg' % (ICINGA_DIR, hostname)
with open(host_file, 'w+') as fhandle:
host_copy = nagios_host['host'].copy()
host_copy['host_name'] = hostname
self.write_definition(fhandle, 'host', host_copy)
for svc_key in nagios_host['services'].iterkeys():
service_copy = nagios_host['services'][svc_key].copy()
service_copy['service_description'] = svc_key
self.write_definition(fhandle, 'service', service_copy)
return True
def mdb_to_nagios(self):
"""Sync Nagios YAML with MDB."""
for host_key in self.machine_db.iterkeys():
hostname = '%s.icflix.com' % (host_key)
mdb_host = self.machine_db[host_key]
if 'datacenter' in mdb_host and 'provider' in mdb_host:
dct_name = '%s.%s' % (mdb_host['datacenter'],
mdb_host['provider'])
dct_dict = self.get_host_dict(dct_name, 'localhost', None, None)
dct_dict['use'] = 'generic-datacenter'
dct_dict.pop('_SHORTNAME')
dct_dict.pop('_DOMAIN')
self.add_datacenter_to_nagios(dct_dict)
parents = [dct_name]
else:
parents = None
host_dict = self.get_host_dict(hostname, mdb_host['ip'], 22,
parents)
self.add_host_to_nagios(host_dict, False)
if 'lxc' not in mdb_host:
continue
for lxc_key in mdb_host['lxc'].iterkeys():
ssh_port = self.get_ssh_port(mdb_host['lxc'][lxc_key], True)
lxc_dict = self.get_host_dict(lxc_key, mdb_host['ip'],
ssh_port, [hostname])
self.add_host_to_nagios(lxc_dict, True)
def print_definition(self, definition_str, some_dict):
"""Print host definition."""
stuffing_len = 0
dict_keys = some_dict.keys()
dict_keys.sort()
# figure-out padding len
for attribute in dict_keys:
if len(attribute) > stuffing_len:
stuffing_len = len(attribute)
stuffing_len += 1
print 'define %s {' % (definition_str)
for attribute in dict_keys:
padding_len = stuffing_len - len(attribute)
padding = self.get_padding(padding_len)
print ' %s%s%s' % (attribute, padding, some_dict[attribute])
print '}\n'
def run(self, services_cfg):
""" Go, go, go!"""
if not self.import_config(services_cfg):
return False
self.ensure_host_definitions()
self.write_command_definitions()
self.write_contact_definitions()
self.write_contactgroup_definitions()
self.write_datacenter_definitions()
self.write_hostgroup_definitions()
self.write_service_definitions()
return True
def write_command_definitions(self):
"""Write definitions of all commands."""
if 'commands' not in self.nagios_db:
return
commands_file = '%s/objects/commands.cfg' % (ICINGA_DIR)
fhandle = open(commands_file, 'w+')
i = 0
for command in self.nagios_db['commands'].iterkeys():
cmd_dict = self.nagios_db['commands'][command]
cmd_dict['command_name'] = command
self.write_definition(fhandle, 'command', cmd_dict)
i += 1
fhandle.close()
logging.info("Written %i 'command' definitions.", i)
def write_contact_definitions(self):
"""Write definitions of all contacts."""
if 'contacts' not in self.nagios_db:
return
contacts_file = '%s/objects/contacts.cfg' % (ICINGA_DIR)
fhandle = open(contacts_file, 'w+')
i = 0
for contact in self.nagios_db['contacts'].iterkeys():
contact_dict = self.nagios_db['contacts'][contact]
contact_dict['contact_name'] = contact
self.write_definition(fhandle, 'contact', contact_dict)
i += 1
fhandle.close()
logging.info("Written %i 'contact' definitions.", i)
def write_contactgroup_definitions(self):
"""Write definitions of all contactgroups."""
cgroups_file = '%s/objects/contactgroups.cfg' % (ICINGA_DIR)
cgroups = self.nagios_db['contactgroups']
fhandle = open(cgroups_file, 'w+')
i = 0
for cgroup_key in cgroups.iterkeys():
cgroup_dict = cgroups[cgroup_key]
cgroup_dict['contactgroup_name'] = cgroup_key
self.write_definition(fhandle, 'contactgroup', cgroup_dict)
i += 1
fhandle.close()
logging.info("Written %i 'contactgroup' definitions.", i)
def write_datacenter_definitions(self):
"""Write definitions for all datacenters."""
dctrs_file = '%s/objects/datacenters.cfg' % (ICINGA_DIR)
dctrs = self.nagios_db['datacenters']
with open(dctrs_file, 'w+') as fhandle:
i = 0
for dctr_key in dctrs.iterkeys():
dct_dict = dctrs[dctr_key]['host'].copy()
dct_dict['host_name'] = dctr_key
self.write_definition(fhandle, 'host', dct_dict)
i += 1
logging.info("Written %i 'datacenter' definitions.", i)
def write_definition(self, fhandle, definition_str, some_dict):
"""Write Nagios definition into given file pointer."""
stuffing_len = 0
dict_keys = some_dict.keys()
dict_keys.sort()
# figure-out padding len
for attribute in dict_keys:
if len(attribute) > stuffing_len:
stuffing_len = len(attribute)
stuffing_len += 1
fhandle.write('define %s {\n' % (definition_str))
for attribute in dict_keys:
padding_len = stuffing_len - len(attribute)
padding = self.get_padding(padding_len)
fhandle.write(' %s%s%s\n' % (attribute, padding,
some_dict[attribute]))
fhandle.write('}\n\n')
def write_hostgroup_definitions(self):
"""Write hostgroup definitions."""
hosts = self.nagios_db['hosts']
hostgroups = self.nagios_db['hostgroups']
for host in hosts.iterkeys():
if 'hostgroups' not in hosts[host]:
continue
for hostgroup in hosts[host]['hostgroups']:
if hostgroup not in hostgroups:
hostgroups[hostgroup] = {}
# add 'members' attribute if hostgroup doesn't have any
if 'members' not in hostgroups[hostgroup]:
hostgroups[hostgroup]['members'] = list()
if host in hostgroups[hostgroup]['members']:
continue
hostgroups[hostgroup]['members'].append(host)
dctrs = self.nagios_db['datacenters']
for dctr in dctrs.iterkeys():
if 'hostgroups' not in dctrs[dctr]:
continue
for hostgroup in dctrs[dctr]['hostgroups']:
if hostgroup not in hostgroups:
hostgroups[hostgroup] = {}
# add 'members' attribute if hostgroup doesn't have any
if 'members' not in hostgroups[hostgroup]:
hostgroups[hostgroup]['members'] = list()
if dctr in hostgroups[hostgroup]['members']:
continue
hostgroups[hostgroup]['members'].append(dctr)
hgroups_file = '%s/objects/hostgroups.cfg' % (ICINGA_DIR)
fhandle = open(hgroups_file, 'w+')
i = 0
for hgrp_key in hostgroups.iterkeys():
hostgroup = hostgroups[hgrp_key]
if 'members' in hostgroup:
if len(hostgroup['members']) < 1:
# I guess Nagios wouldn't like empty members
hostgroup.pop('members')
else:
# Yes, let's change 'list' to 'string' and make it easy on
# printer
hostgroup['members'] = ','.join(hostgroup['members'])
hostgroup['hostgroup_name'] = hgrp_key
self.write_definition(fhandle, 'hostgroup', hostgroup)
i += 1
fhandle.close()
logging.info("Written %i 'hostgroup' definitions.", i)
def write_service_definitions(self):
"""Write service definitons."""
if 'active' not in self.nagios_db['services']:
return
services_file = '%s/objects/services.cfg' % (ICINGA_DIR)
fhandle = open(services_file, 'w+')
i = 0
for svc_key in self.nagios_db['services']['active'].iterkeys():
service = self.nagios_db['services']['active'][svc_key]
service['service_description'] = svc_key
if 'use' not in service:
service['use'] = 'generic-service'
self.write_definition(fhandle, 'service', service)
i += 1
fhandle.close()
logging.info("Written %i 'service' definitions.", i)
def convert_nagios_config():
"""Convert given Nagios config into YAML."""
if len(sys.argv) != 4:
logging.error('Expected %i arguments, %i given.', 3, len(sys.argv) - 1)
sys.exit(1)
nagios_to_yaml = NagiosToYaml()
nagios_to_yaml.parse_nagios_config(sys.argv[2])
nagios_to_yaml.write_to_yaml(sys.argv[3])
def import_remote_config():
"""Imports config sent from Remote Host."""
if len(sys.argv) < 3:
logging.error('Expected %i arguments, %i given.', 2, len(sys.argv) - 1)
sys.exit(1)
cfg_file = sys.argv[2]
config_generator = NagiosConfigGenerator()
retval = config_generator.run(cfg_file)
if retval == True:
logging.info("Will remove '%s'.", cfg_file)
os.remove(cfg_file)
os.remove('%s.ok' % (cfg_file))
print '* run % icinga -v /etc/icinga/icinga.cfg; before reload!'
print "* don't forget to commit your changes"
def main():
"""Main."""
logging.basicConfig(format=LOG_FORMAT)
logging.getLogger().setLevel(logging.INFO)
if len(sys.argv) < 2:
logging.error('Not enough arguments given.')
print_help()
sys.exit(1)
action = sys.argv[1]
if action == 'help':
print_help()
elif action == 'import':
import_remote_config()
elif action == 'regen':
regenerate_nagios_config()
elif action == 'convert':
convert_nagios_config()
else:
logging.error("Invalid parameter '%s'.", action)
sys.exit(1)
def print_help():
"""Print help."""
print '%s <action> [params]' % (sys.argv[0])
print ''
print 'Actions and params:'
print ' convert <src> <tgt> - convert Nagios config(src) to YAML(tgt)'
print ''
print ' import <path_to_cfg> - import configuration from remote Host'
print ''
print "NOTE: It's possible for 'regen' to create inconsistent Nagios"
print ' configuration! Use with care!'
print ' regen <what> - regenerates given definitions'
print ' commands - command definitons'
print ' contacts - contact definitions'
print ' contactgroups - contactgroup definitions'
print ' datacenters - datacenter definitions'
print ' hostgroups - hostgroup definitions'
print ' services - (active) service definitions'
def regenerate_nagios_config():
"""Regenerate part of Nagios config."""
if len(sys.argv) < 3:
logging.error('Expected %i parameters, %i given.', 2, len(sys.argv) - 1)
sys.exit(1)
config_generator = NagiosConfigGenerator()
config_generator.ensure_host_definitions()
what = sys.argv[2]
if what == 'commands':
config_generator.write_command_definitions()
elif what == 'contacts':
config_generator.write_contact_definitions()
elif what == 'contactgroups':
config_generator.write_contactgroup_definitions()
elif what == 'datacenters':
config_generator.write_datacenter_definitions()
elif what == 'hostgroups':
config_generator.write_hostgroup_definitions()
elif what == 'services':
config_generator.write_service_definitions()
else:
logging.error("Unknown parameter '%s'.", what)
sys.exit(1)
if __name__ == '__main__':
main()
|
|
#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import multiprocessing
import optparse
import os
import shutil
import subprocess
import sys
from slave import build_directory
SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
BLACKLIST = set((
'blacklist_test_dll_1.dll',
'crash_service64.exe',
'mini_installer.exe',
'nacl.exe',
'nacl64.exe',
'sql.dll',
))
SKIP_DIRS = [
'locales',
'obj',
'syzygy/asan',
]
class ASANitizer(object):
def __init__(self, instrument_exe, stopped, root):
self.instrument_exe = instrument_exe
self.stopped = stopped
self.root = os.path.abspath(root)
def __call__(self, job):
retval = 0
stdout = ''
pe_image, pdb = job
try:
if not self.stopped.is_set():
out_pe = GetInstrumentedFilepath(pe_image, self.root)
out_pdb = GetInstrumentedFilepath(pdb, self.root)
# Note that instrument.exe requires --foo=bar format (including the '=')
command = [
self.instrument_exe, '--mode=ASAN',
'--input-image=%s' % pe_image,
'--output-image=%s' % out_pe,
'--output-pdb=%s' % out_pdb,
]
for fname in filter(os.path.exists, (out_pe, out_pdb)):
os.remove(fname)
proc = subprocess.Popen(command,
stderr=subprocess.STDOUT,
stdout=subprocess.PIPE)
stdout, _ = proc.communicate()
retval = proc.returncode
return (retval, stdout, pe_image)
except Exception:
import traceback
return (1, stdout+'\n'+traceback.format_exc(), pe_image)
def GetInstrumentedFilepath(fname, root):
"""Returns the name of the instrumented file. Creates the output directory if
if doesn't exist.
>>> GetInstrumentedFilepath('C:/src/out/Release/foo/image.exe',
'src/out/Release')
'c:/src/out/Release/syzygy/asan/foo/image.exe'
TODO(sebmarchand): Separate the path computation from the side-effect of path
creation.
"""
asan_root = os.path.join(root, 'syzygy', 'asan')
asaned_file = fname.replace(root, asan_root)
out_path = os.path.dirname(asaned_file)
if not os.path.exists(out_path):
os.makedirs(out_path)
elif not os.path.isdir(out_path):
raise Exception('Invalid output directory for %s.' % fname)
return asaned_file
def UpdateAsanArtifact(full_directory, artifact_path):
"""Updates an ASAN artifact in the build directory, if it exists."""
artifact = os.path.join(full_directory, os.path.basename(artifact_path))
if os.path.exists(artifact):
print('Removing', artifact)
os.remove(artifact)
print 'Copying %s -> %s' % (artifact_path, artifact)
shutil.copy2(artifact_path, artifact)
fname = os.path.basename(artifact_path)
print 'Blacklisting %s' % fname
BLACKLIST.add(fname)
def GetCompatiblePDB(pe_image, pdbfind_exe):
"""Returns <path to pdb> or None (if no good pdb exists)."""
try:
command = [pdbfind_exe, pe_image]
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
pdb_path, _ = proc.communicate()
pdb_path = pdb_path.splitlines()[0]
retval = proc.returncode
if retval == 0:
return os.path.abspath(pdb_path)
return None
except Exception:
return None
def FindFilesToAsan(directory, pdbfind_exe):
"""Finds eligible PE images in given directory.
A PE image is eligible if it has a corresponding pdb and doesn't already have
ASAN applied to it. Skips files which have an extra extension (like
foo.orig.exe).
"""
ret = []
def GoodExeOrDll(fname):
return (
'.' in fname and
fname not in BLACKLIST and
fname.split('.', 1)[-1].lower() in ('exe', 'dll'))
skip_dirs = set((os.path.abspath(os.path.join(directory, skip_dir))
for skip_dir in SKIP_DIRS))
for root, subdirs, files in os.walk(directory):
for path, sdir in [(os.path.join(root, s), s) for s in subdirs]:
if path in skip_dirs:
subdirs.remove(sdir)
for pe_image in (os.path.join(root, f) for f in files if GoodExeOrDll(f)):
pdb = GetCompatiblePDB(pe_image, pdbfind_exe)
if not pdb:
print >> sys.stderr, 'PDB for "%s" does not exist.' % pe_image
continue
ret.append((pe_image, pdb))
return ret
def ApplyAsanToBuild(full_directory, instrument_exe, pdbfind_exe, jobs):
"""Applies ASAN to all exe's/dll's in the build directory."""
to_asan = FindFilesToAsan(full_directory, pdbfind_exe)
if not to_asan:
print >> sys.stderr, 'No files to ASAN!'
return 1
manager = multiprocessing.Manager()
stopped = manager.Event()
sanitizer = ASANitizer(instrument_exe, stopped, full_directory)
pool = multiprocessing.Pool(jobs)
ret = 0
try:
generator = pool.imap_unordered(sanitizer, to_asan)
for retval, stdout, failed_image in generator:
ostream = (sys.stderr if retval else sys.stdout)
print >> ostream, stdout
sys.stdout.flush()
sys.stderr.flush()
if retval:
print 'Failed to ASAN %s. Stopping remaining jobs.' % failed_image
ret = retval
stopped.set()
except KeyboardInterrupt:
stopped.set()
pool.close()
pool.join()
return ret
def main():
# syzygy is relative to --build-dir, not relative to SCRIPT_DIR.
default_asan_dir = os.path.join(
os.pardir, 'third_party', 'syzygy', 'binaries', 'exe')
default_instrument_exe = os.path.join(default_asan_dir, 'instrument.exe')
default_agent_logger_exe = os.path.join(default_asan_dir, 'agent_logger.exe')
default_pdbfind_exe = os.path.join(default_asan_dir, 'pdbfind.exe')
default_runtime_path = os.path.join(default_asan_dir, 'syzyasan_rtl.dll')
parser = optparse.OptionParser()
parser.add_option('--build-dir', help='ignored')
parser.add_option(
'--target',
help='The target in the build directory to asan (required).')
parser.add_option(
'--jobs', type='int', default=multiprocessing.cpu_count(),
help='Specify the number of sub-tasks to use (%default).')
parser.add_option(
'--instrument_exe', default=default_instrument_exe,
help='Specify the path to the ASAN instrument.exe relative to '
'build-dir (%default).')
parser.add_option(
'--agent_logger_exe', default=default_agent_logger_exe,
help='Specify the path to the ASAN agent_logger.exe relative to '
'build-dir (%default).')
parser.add_option(
'--pdbfind_exe', default=default_pdbfind_exe,
help='Specify the path to the ASAN pdbfind.exe relative to '
'build-dir (%default).')
parser.add_option(
'--runtime_path', default=default_runtime_path,
help='Specify the path to the ASAN runtime DLL relative to '
'build-dir (%default).')
options, args = parser.parse_args()
options.build_dir = build_directory.GetBuildOutputDirectory()
options.build_dir = os.path.abspath(options.build_dir)
if not options.build_dir:
parser.error('Must specify --build-dir')
if not options.target:
parser.error('Must specify --target')
if args:
parser.error('Not expecting additional arguments')
# A 3-tuples list describing the different artifacts needed in a Win ASan
# build. The tuples values are:
# - Artifact name: The name of the parameter to add to the options for
# this artifact.
# - Artifact path: The path to this artifact. It is expected to be
# relative to build_dir or absolute.
# - should_update: Indicates it this artifact should be copied to the
# build directory.
artifacts = [
('full_directory', options.target, False),
('instrument_exe', options.instrument_exe, False),
('agent_logger_exe', options.agent_logger_exe, True),
('pdbfind_exe', options.pdbfind_exe, False),
('runtime_path', options.runtime_path, True),
]
for name, path, should_update in artifacts:
if not os.path.isabs(path):
path = os.path.abspath(os.path.join(options.build_dir, path))
setattr(options, name, path)
if not os.path.exists(path):
parser.error('Could not find %s : %s' % (name, path))
if should_update:
UpdateAsanArtifact(options.full_directory, path)
print 'Default BLACKLIST is: %r' % BLACKLIST
return ApplyAsanToBuild(options.full_directory,
options.instrument_exe,
options.pdbfind_exe,
options.jobs)
if __name__ == '__main__':
sys.exit(main())
|
|
# -*- coding: utf-8 -*-
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests related to content translation to multiple languages."""
__author__ = 'Pavel Simakov ([email protected])'
import json
import os
import yaml
import appengine_config
from common import schema_fields
from common import xcontent
from models import courses
from modules.dashboard.question_editor import McQuestionRESTHandler
from tests.functional import actions
COURSE_NAME = 'i18n_tests'
NAMESPACE = 'ns_%s' % COURSE_NAME
COURSE_TITLE = 'I18N Tests'
ADMIN_EMAIL = '[email protected]'
BASE_URL = '/' + COURSE_NAME
STUDENT_EMAIL = '[email protected]'
FIELD_FILTER = schema_fields.FieldFilter(
type_names=['string', 'html', 'url'],
hidden_values=[False],
i18n_values=[None, True],
editable_values=[True])
def _filter(binding):
"""Filter out translatable strings."""
return FIELD_FILTER.filter_value_to_type_binding(binding)
class I18NCourseSettingsTests(actions.TestBase):
"""Tests for various course settings transformations I18N relies upon."""
def _build_mapping(self, translations=None, errors=None):
"""Build mapping of course.yaml properties to their translations."""
if translations is None:
translations = {}
binding = schema_fields.ValueToTypeBinding.bind_entity_to_schema(
self.course_yaml, self.schema)
desired = _filter(binding)
mappings = xcontent.SourceToTargetDiffMapping.map_source_to_target(
binding, existing_mappings=translations, allowed_names=desired,
errors=errors)
if errors:
self.assertEqual(len(mappings) + len(errors), len(desired))
else:
self.assertEqual(len(mappings), len(desired))
for mapping in mappings:
self.assertTrue(mapping.name in desired)
return binding, mappings
def setUp(self):
super(I18NCourseSettingsTests, self).setUp()
context = actions.simple_add_course(
COURSE_NAME, ADMIN_EMAIL, COURSE_TITLE)
self.course = courses.Course(None, context)
course_yaml = os.path.join(appengine_config.BUNDLE_ROOT, 'course.yaml')
self.schema = courses.Course.create_settings_schema(self.course)
self.schema.add_property(schema_fields.SchemaField(
'test:i18n_test', 'Test Text', 'url', i18n=True))
course_yaml_text = open(course_yaml).read()
course_yaml_text = '%s\ntest:i18n_test: \'Test!\'' % course_yaml_text
self.course_yaml = yaml.safe_load(course_yaml_text)
def tearDown(self):
super(I18NCourseSettingsTests, self).tearDown()
def test_course_yaml_schema_binding(self):
binding = schema_fields.ValueToTypeBinding.bind_entity_to_schema(
self.course_yaml, self.schema)
expected_unmapped = [
'html_hooks:preview:after_main_content_ends',
'html_hooks:preview:after_top_content_ends',
'html_hooks:unit:after_content_begins',
'html_hooks:unit:after_leftnav_begins',
'html_hooks:unit:before_content_ends',
'html_hooks:unit:before_leftnav_ends',
'reg_form:whitelist',
]
self.assertEqual(expected_unmapped, sorted(binding.unmapped_names))
self.assertEqual(len(binding.name_to_field), len(binding.name_to_value))
value = binding.find_value('test:i18n_test')
self.assertTrue(value.field.i18n)
self.assertEqual('url', value.field.type)
self.assertEqual('Test!', value.value)
self.assertEqual(binding.name_to_field['test:i18n_test'], value.field)
value = binding.find_value('course:title')
self.assertTrue(value.field.i18n is None)
self.assertEqual('string', value.field.type)
self.assertEqual('Power Searching with Google', value.value)
self.assertEqual(binding.name_to_field['course:title'], value.field)
forum_url_field = binding.find_field('course:forum_url')
self.assertEquals('string', forum_url_field.type)
blurb_field = binding.find_field('course:blurb')
self.assertEquals('html', blurb_field.type)
now_avail_field = binding.find_field('course:now_available')
self.assertEquals('boolean', now_avail_field.type)
def test_extract_translatable_fields(self):
binding = schema_fields.ValueToTypeBinding.bind_entity_to_schema(
self.course_yaml, self.schema)
value_names_to_translate = _filter(binding)
self.assertTrue('course:locale' in binding.name_to_value)
self.assertFalse('course:locale' in value_names_to_translate)
self.assertTrue('course:title' in binding.name_to_value)
self.assertTrue('course:title' in value_names_to_translate)
def test_translate_never_before_translated(self):
_, mappings = self._build_mapping()
for mapping in mappings:
self.assertEqual(
mapping.verb, xcontent.SourceToTargetDiffMapping.VERB_NEW)
self.assertEqual(mapping.target_value, None)
def test_translations_must_have_same_type(self):
translation = xcontent.SourceToTargetMapping(
'course:title', 'Title', 'unknown_type',
'Power Searching with Google', 'POWER SEARCHING WITH Google')
errors = []
binding, _ = self._build_mapping(
translations=[translation], errors=errors)
error_at_index = None
for index, value_field in enumerate(binding.value_list):
if 'course:title' == value_field.name:
error_at_index = index
break
self.assertTrue(error_at_index is not None)
self.assertEqual(error_at_index, errors[0].index)
self.assertEqual(
'Source and target types don\'t match: '
'string, unknown_type.', errors[0].original_exception.message)
def test_retranslate_already_translated_verb_same(self):
translation = xcontent.SourceToTargetMapping(
'course:title', 'Title', 'string',
'Power Searching with Google', 'POWER SEARCHING WITH Google',)
translations = [translation]
_, mappings = self._build_mapping(translations)
mapping = xcontent.SourceToTargetMapping.find_mapping(
mappings, 'course:title')
self.assertEqual(
mapping.verb, xcontent.SourceToTargetDiffMapping.VERB_CURRENT)
self.assertEqual('Power Searching with Google', mapping.source_value)
self.assertEqual('POWER SEARCHING WITH Google', mapping.target_value)
mapping = xcontent.SourceToTargetMapping.find_mapping(
mappings, 'course:forum_url')
self.assertEqual(
mapping.verb, xcontent.SourceToTargetDiffMapping.VERB_NEW)
self.assertEqual(None, mapping.target_value)
mapping = xcontent.SourceToTargetMapping.find_mapping(
mappings, 'course:locale')
self.assertEqual(None, mapping)
def test_retranslate_already_translated_verb_changed(self):
translation = xcontent.SourceToTargetMapping(
'course:title', 'Title', 'string',
'Power Searching with Google (old)',
'POWER SEARCHING WITH Google (old)')
translations = [translation]
_, mappings = self._build_mapping(translations)
mapping = xcontent.SourceToTargetMapping.find_mapping(
mappings, 'course:title')
self.assertEqual(
mapping.verb, xcontent.SourceToTargetDiffMapping.VERB_CHANGED)
self.assertEqual('Power Searching with Google', mapping.source_value)
self.assertEqual(
'POWER SEARCHING WITH Google (old)', mapping.target_value)
def test_schema_with_array_element_type(self):
self.course_yaml['course']['extra_tabs'] = [
{
'label': 'FAQ',
'position': 'left',
'visibility': 'student',
'url': '',
'content': 'Frequently asked questions'},
{
'label': 'Resources',
'position': 'right',
'visibility': 'student',
'url': '',
'content': 'Links to resources'}]
binding = schema_fields.ValueToTypeBinding.bind_entity_to_schema(
self.course_yaml, self.schema)
expected_names = [
('course:extra_tabs', None),
('course:extra_tabs:[0]:label', 'FAQ'),
('course:extra_tabs:[0]:position', 'left'),
('course:extra_tabs:[0]:visibility', 'student'),
('course:extra_tabs:[0]:url', ''),
('course:extra_tabs:[0]:content', 'Frequently asked questions'),
('course:extra_tabs:[1]:label', 'Resources'),
('course:extra_tabs:[1]:position', 'right'),
('course:extra_tabs:[1]:visibility', 'student'),
('course:extra_tabs:[1]:url', ''),
('course:extra_tabs:[1]:content', 'Links to resources')]
for name, value in expected_names:
self.assertIn(name, binding.name_to_field.keys())
if value is not None:
self.assertEquals(value, binding.name_to_value[name].value)
class I18NMultipleChoiceQuestionTests(actions.TestBase):
"""Tests for multiple choice object transformations I18N relies upon."""
def setUp(self):
super(I18NMultipleChoiceQuestionTests, self).setUp()
self.schema = McQuestionRESTHandler.get_schema()
self.question = json.loads("""{
"description": "sky",
"multiple_selections": false,
"question": "What color is the sky?",
"choices": [
{"text": "red", "score": 0.0, "feedback": "Wrong!"},
{"text": "blue", "score": 1.0, "feedback": "Correct!"},
{"text": "green", "score": 0.0, "feedback": "Close..."}],
"version": "1.5",
"type": 0}
""")
def test_schema_with_array_element_type(self):
binding = schema_fields.ValueToTypeBinding.bind_entity_to_schema(
self.question, self.schema)
expected_names = [
'choices',
'choices:[0]:feedback',
'choices:[0]:score',
'choices:[0]:text',
'choices:[1]:feedback',
'choices:[1]:score',
'choices:[1]:text',
'choices:[2]:feedback',
'choices:[2]:score',
'choices:[2]:text',
'description',
'multiple_selections',
'question',
'version']
self.assertEquals(
expected_names, sorted(binding.name_to_field.keys()))
self.assertEquals(
expected_names, sorted(binding.name_to_value.keys()))
self.assertEquals(set(['type']), binding.unmapped_names)
field = binding.find_field('choices')
self.assertEqual('array', field.type)
value = binding.find_value('choices')
self.assertEqual(3, len(value.value))
field = binding.find_field('choices:[0]:feedback')
self.assertEqual('html', field.type)
field = binding.find_field('choices:[0]:text')
self.assertEqual('html', field.type)
field = binding.find_field('choices:[0]:score')
self.assertEqual('string', field.type)
value = binding.find_value('choices:[1]:feedback')
self.assertEqual('Correct!', value.value)
value = binding.find_value('choices:[1]:text')
self.assertEqual('blue', value.value)
value = binding.find_value('choices:[1]:score')
self.assertEqual(1.0, value.value)
def test_translate_never_before_translated(self):
binding = schema_fields.ValueToTypeBinding.bind_entity_to_schema(
self.question, self.schema)
desired = _filter(binding)
expected_desired = [
'choices:[0]:feedback',
'choices:[0]:text',
'choices:[1]:feedback',
'choices:[1]:text',
'choices:[2]:feedback',
'choices:[2]:text',
'description',
'question']
self.assertEqual(expected_desired, sorted(desired))
mappings = xcontent.SourceToTargetDiffMapping.map_source_to_target(
binding, allowed_names=desired)
expected_source_values = [
'sky',
'What color is the sky?',
'red',
'Wrong!',
'blue',
'Correct!',
'green',
'Close...']
self.assertEqual(
expected_source_values,
[mapping.source_value for mapping in mappings])
def test_retranslate_already_translated_verb_same(self):
binding = schema_fields.ValueToTypeBinding.bind_entity_to_schema(
self.question, self.schema)
desired = _filter(binding)
translation = xcontent.SourceToTargetMapping(
'choices:[1]:feedback', 'Feedback', 'html',
'Correct!',
'CORRECT!')
mappings = xcontent.SourceToTargetDiffMapping.map_source_to_target(
binding, existing_mappings=[translation], allowed_names=desired)
expected_mappings = [
(None, xcontent.SourceToTargetDiffMapping.VERB_NEW),
(None, xcontent.SourceToTargetDiffMapping.VERB_NEW),
(None, xcontent.SourceToTargetDiffMapping.VERB_NEW),
(None, xcontent.SourceToTargetDiffMapping.VERB_NEW),
(None, xcontent.SourceToTargetDiffMapping.VERB_NEW),
('CORRECT!', xcontent.SourceToTargetDiffMapping.VERB_CURRENT),
(None, xcontent.SourceToTargetDiffMapping.VERB_NEW),
(None, xcontent.SourceToTargetDiffMapping.VERB_NEW)]
self.assertEqual(
expected_mappings,
[(mapping.target_value, mapping.verb) for mapping in mappings])
mapping = xcontent.SourceToTargetMapping.find_mapping(
mappings, 'choices:[1]:feedback')
self.assertEqual(
mapping.verb, xcontent.SourceToTargetDiffMapping.VERB_CURRENT)
self.assertEqual('Correct!', mapping.source_value)
self.assertEqual('CORRECT!', translation.target_value)
def test_retranslate_already_translated_verb_changed(self):
binding = schema_fields.ValueToTypeBinding.bind_entity_to_schema(
self.question, self.schema)
desired = _filter(binding)
translation = xcontent.SourceToTargetMapping(
'choices:[1]:feedback', 'Feedback', 'html',
'Correct (old)!',
'CORRECT (old)!')
mappings = xcontent.SourceToTargetDiffMapping.map_source_to_target(
binding, existing_mappings=[translation], allowed_names=desired)
mapping = xcontent.SourceToTargetMapping.find_mapping(
mappings, 'choices:[1]:feedback')
self.assertEqual(
mapping.verb, xcontent.SourceToTargetDiffMapping.VERB_CHANGED)
self.assertEqual('Correct!', mapping.source_value)
self.assertEqual('CORRECT (old)!', mapping.target_value)
def test_retranslate_already_translated_with_list_reordered(self):
binding = schema_fields.ValueToTypeBinding.bind_entity_to_schema(
self.question, self.schema)
desired = _filter(binding)
translation = xcontent.SourceToTargetMapping(
'choices:[0]:feedback', 'Feedback', 'html',
'Correct (old)!',
'CORRECT!')
mappings = xcontent.SourceToTargetDiffMapping.map_source_to_target(
binding, existing_mappings=[translation], allowed_names=desired)
mapping = xcontent.SourceToTargetMapping.find_mapping(
mappings, 'choices:[0]:feedback')
self.assertEqual(
mapping.verb, xcontent.SourceToTargetDiffMapping.VERB_CHANGED)
self.assertEqual('Wrong!', mapping.source_value)
self.assertEqual('CORRECT!', mapping.target_value)
def test_retranslate_already_translated_with_list_reordered_matched(self):
binding = schema_fields.ValueToTypeBinding.bind_entity_to_schema(
self.question, self.schema)
desired = _filter(binding)
translation = xcontent.SourceToTargetMapping(
'choices:[0]:feedback', 'Feedback', 'html',
'Correct (old)!',
'CORRECT!')
try:
mappings = xcontent.SourceToTargetDiffMapping.map_source_to_target(
binding, existing_mappings=[translation], allowed_names=desired,
allow_list_reorder=True)
mapping = xcontent.SourceToTargetMapping.find_mapping(
mappings, 'choices:[0]:feedback')
self.assertEqual(
mapping.verb, xcontent.SourceToTargetDiffMapping.VERB_CHANGED)
self.assertEqual('Correct!', mapping.source_value)
self.assertEqual('CORRECT!', mapping.target_value)
raise Exception('Must have failed.')
# TODO(psimakov): fix allow_list_reorder=True to stop this failure
except NotImplementedError:
pass
|
|
from __future__ import unicode_literals
from operator import attrgetter
from django.core.exceptions import FieldError
from django.core.management import call_command
from django.db import connection
from django.test import TestCase
from django.test.utils import CaptureQueriesContext
from django.utils import six
from .models import (
Chef, CommonInfo, ItalianRestaurant, ParkingLot, Place, Post,
Restaurant, Student, Supplier, Worker, MixinModel,
Title, Copy, Base, SubBase)
class ModelInheritanceTests(TestCase):
def test_abstract(self):
# The Student and Worker models both have 'name' and 'age' fields on
# them and inherit the __unicode__() method, just as with normal Python
# subclassing. This is useful if you want to factor out common
# information for programming purposes, but still completely
# independent separate models at the database level.
w1 = Worker.objects.create(name="Fred", age=35, job="Quarry worker")
Worker.objects.create(name="Barney", age=34, job="Quarry worker")
s = Student.objects.create(name="Pebbles", age=5, school_class="1B")
self.assertEqual(six.text_type(w1), "Worker Fred")
self.assertEqual(six.text_type(s), "Student Pebbles")
# The children inherit the Meta class of their parents (if they don't
# specify their own).
self.assertQuerysetEqual(
Worker.objects.values("name"), [
{"name": "Barney"},
{"name": "Fred"},
],
lambda o: o
)
# Since Student does not subclass CommonInfo's Meta, it has the effect
# of completely overriding it. So ordering by name doesn't take place
# for Students.
self.assertEqual(Student._meta.ordering, [])
# However, the CommonInfo class cannot be used as a normal model (it
# doesn't exist as a model).
self.assertRaises(AttributeError, lambda: CommonInfo.objects.all())
def test_multiple_table(self):
post = Post.objects.create(title="Lorem Ipsum")
# The Post model has distinct accessors for the Comment and Link models.
post.attached_comment_set.create(content="Save $ on V1agr@", is_spam=True)
post.attached_link_set.create(
content="The Web framework for perfections with deadlines.",
url="http://www.djangoproject.com/"
)
# The Post model doesn't have an attribute called
# 'attached_%(class)s_set'.
self.assertRaises(
AttributeError, getattr, post, "attached_%(class)s_set"
)
# The Place/Restaurant/ItalianRestaurant models all exist as
# independent models. However, the subclasses also have transparent
# access to the fields of their ancestors.
# Create a couple of Places.
Place.objects.create(name="Master Shakes", address="666 W. Jersey")
Place.objects.create(name="Ace Hardware", address="1013 N. Ashland")
# Test constructor for Restaurant.
r = Restaurant.objects.create(
name="Demon Dogs",
address="944 W. Fullerton",
serves_hot_dogs=True,
serves_pizza=False,
rating=2
)
# Test the constructor for ItalianRestaurant.
c = Chef.objects.create(name="Albert")
ir = ItalianRestaurant.objects.create(
name="Ristorante Miron",
address="1234 W. Ash",
serves_hot_dogs=False,
serves_pizza=False,
serves_gnocchi=True,
rating=4,
chef=c
)
self.assertQuerysetEqual(
ItalianRestaurant.objects.filter(address="1234 W. Ash"), [
"Ristorante Miron",
],
attrgetter("name")
)
ir.address = "1234 W. Elm"
ir.save()
self.assertQuerysetEqual(
ItalianRestaurant.objects.filter(address="1234 W. Elm"), [
"Ristorante Miron",
],
attrgetter("name")
)
# Make sure Restaurant and ItalianRestaurant have the right fields in
# the right order.
self.assertEqual(
[f.name for f in Restaurant._meta.fields],
["id", "name", "address", "place_ptr", "rating", "serves_hot_dogs",
"serves_pizza", "chef"]
)
self.assertEqual(
[f.name for f in ItalianRestaurant._meta.fields],
["id", "name", "address", "place_ptr", "rating", "serves_hot_dogs",
"serves_pizza", "chef", "restaurant_ptr", "serves_gnocchi"],
)
self.assertEqual(Restaurant._meta.ordering, ["-rating"])
# Even though p.supplier for a Place 'p' (a parent of a Supplier), a
# Restaurant object cannot access that reverse relation, since it's not
# part of the Place-Supplier Hierarchy.
self.assertQuerysetEqual(Place.objects.filter(supplier__name="foo"), [])
self.assertRaises(
FieldError, Restaurant.objects.filter, supplier__name="foo"
)
# Parent fields can be used directly in filters on the child model.
self.assertQuerysetEqual(
Restaurant.objects.filter(name="Demon Dogs"), [
"Demon Dogs",
],
attrgetter("name")
)
self.assertQuerysetEqual(
ItalianRestaurant.objects.filter(address="1234 W. Elm"), [
"Ristorante Miron",
],
attrgetter("name")
)
# Filters against the parent model return objects of the parent's type.
p = Place.objects.get(name="Demon Dogs")
self.assertIs(type(p), Place)
# Since the parent and child are linked by an automatically created
# OneToOneField, you can get from the parent to the child by using the
# child's name.
self.assertEqual(
p.restaurant, Restaurant.objects.get(name="Demon Dogs")
)
self.assertEqual(
Place.objects.get(name="Ristorante Miron").restaurant.italianrestaurant,
ItalianRestaurant.objects.get(name="Ristorante Miron")
)
self.assertEqual(
Restaurant.objects.get(name="Ristorante Miron").italianrestaurant,
ItalianRestaurant.objects.get(name="Ristorante Miron")
)
# This won't work because the Demon Dogs restaurant is not an Italian
# restaurant.
self.assertRaises(
ItalianRestaurant.DoesNotExist,
lambda: p.restaurant.italianrestaurant
)
# An ItalianRestaurant which does not exist is also a Place which does
# not exist.
self.assertRaises(
Place.DoesNotExist,
ItalianRestaurant.objects.get, name="The Noodle Void"
)
# MultipleObjectsReturned is also inherited.
self.assertRaises(
Place.MultipleObjectsReturned,
Restaurant.objects.get, id__lt=12321
)
# Related objects work just as they normally do.
s1 = Supplier.objects.create(name="Joe's Chickens", address="123 Sesame St")
s1.customers = [r, ir]
s2 = Supplier.objects.create(name="Luigi's Pasta", address="456 Sesame St")
s2.customers = [ir]
# This won't work because the Place we select is not a Restaurant (it's
# a Supplier).
p = Place.objects.get(name="Joe's Chickens")
self.assertRaises(
Restaurant.DoesNotExist, lambda: p.restaurant
)
self.assertEqual(p.supplier, s1)
self.assertQuerysetEqual(
ir.provider.order_by("-name"), [
"Luigi's Pasta",
"Joe's Chickens"
],
attrgetter("name")
)
self.assertQuerysetEqual(
Restaurant.objects.filter(provider__name__contains="Chickens"), [
"Ristorante Miron",
"Demon Dogs",
],
attrgetter("name")
)
self.assertQuerysetEqual(
ItalianRestaurant.objects.filter(provider__name__contains="Chickens"), [
"Ristorante Miron",
],
attrgetter("name"),
)
ParkingLot.objects.create(
name="Main St", address="111 Main St", main_site=s1
)
ParkingLot.objects.create(
name="Well Lit", address="124 Sesame St", main_site=ir
)
self.assertEqual(
Restaurant.objects.get(lot__name="Well Lit").name,
"Ristorante Miron"
)
# The update() command can update fields in parent and child classes at
# once (although it executed multiple SQL queries to do so).
rows = Restaurant.objects.filter(
serves_hot_dogs=True, name__contains="D"
).update(
name="Demon Puppies", serves_hot_dogs=False
)
self.assertEqual(rows, 1)
r1 = Restaurant.objects.get(pk=r.pk)
self.assertFalse(r1.serves_hot_dogs)
self.assertEqual(r1.name, "Demon Puppies")
# The values() command also works on fields from parent models.
self.assertQuerysetEqual(
ItalianRestaurant.objects.values("name", "rating"), [
{"rating": 4, "name": "Ristorante Miron"}
],
lambda o: o
)
# select_related works with fields from the parent object as if they
# were a normal part of the model.
self.assertNumQueries(
2, lambda: ItalianRestaurant.objects.all()[0].chef
)
self.assertNumQueries(
1, lambda: ItalianRestaurant.objects.select_related("chef")[0].chef
)
def test_select_related_defer(self):
"""
#23370 - Should be able to defer child fields when using
select_related() from parent to child.
"""
Restaurant.objects.create(
name="Demon Dogs",
address="944 W. Fullerton",
serves_hot_dogs=True,
serves_pizza=False,
rating=2,
)
ItalianRestaurant.objects.create(
name="Ristorante Miron",
address="1234 W. Ash",
serves_hot_dogs=False,
serves_pizza=False,
serves_gnocchi=True,
rating=4,
)
qs = (Restaurant.objects
.select_related("italianrestaurant")
.defer("italianrestaurant__serves_gnocchi")
.order_by("rating"))
# Test that the field was actually defered
with self.assertNumQueries(2):
objs = list(qs.all())
self.assertTrue(objs[1].italianrestaurant.serves_gnocchi)
# Test that model fields where assigned correct values
self.assertEqual(qs[0].name, 'Demon Dogs')
self.assertEqual(qs[0].rating, 2)
self.assertEqual(qs[1].italianrestaurant.name, 'Ristorante Miron')
self.assertEqual(qs[1].italianrestaurant.rating, 4)
def test_mixin_init(self):
m = MixinModel()
self.assertEqual(m.other_attr, 1)
def test_update_query_counts(self):
"""
Test that update queries do not generate non-necessary queries.
Refs #18304.
"""
c = Chef.objects.create(name="Albert")
ir = ItalianRestaurant.objects.create(
name="Ristorante Miron",
address="1234 W. Ash",
serves_hot_dogs=False,
serves_pizza=False,
serves_gnocchi=True,
rating=4,
chef=c
)
with self.assertNumQueries(3):
ir.save()
def test_update_parent_filtering(self):
"""
Test that updating a field of a model subclass doesn't issue an UPDATE
query constrained by an inner query.
Refs #10399
"""
supplier = Supplier.objects.create(
name='Central market',
address='610 some street'
)
# Capture the expected query in a database agnostic way
with CaptureQueriesContext(connection) as captured_queries:
Place.objects.filter(pk=supplier.pk).update(name=supplier.name)
expected_sql = captured_queries[0]['sql']
# Capture the queries executed when a subclassed model instance is saved.
with CaptureQueriesContext(connection) as captured_queries:
supplier.save(update_fields=('name',))
for query in captured_queries:
sql = query['sql']
if 'UPDATE' in sql:
self.assertEqual(expected_sql, sql)
def test_eq(self):
# Equality doesn't transfer in multitable inheritance.
self.assertNotEqual(Place(id=1), Restaurant(id=1))
self.assertNotEqual(Restaurant(id=1), Place(id=1))
def test_ticket_12567(self):
r = Restaurant.objects.create(name='n1', address='a1')
s = Supplier.objects.create(name='s1', address='a2')
self.assertQuerysetEqual(
Place.objects.filter(supplier__isnull=False),
[Place.objects.get(pk=s.pk)],
lambda x: x
)
self.assertQuerysetEqual(
Place.objects.filter(supplier__isnull=True),
[Place.objects.get(pk=r.pk)],
lambda x: x
)
self.assertQuerysetEqual(
Place.objects.exclude(supplier__isnull=False),
[Place.objects.get(pk=r.pk)],
lambda x: x
)
self.assertQuerysetEqual(
Place.objects.exclude(supplier__isnull=True),
[Place.objects.get(pk=s.pk)],
lambda x: x
)
def test_custompk_m2m(self):
b = Base.objects.create()
b.titles.add(Title.objects.create(title="foof"))
s = SubBase.objects.create(sub_id=b.id)
b = Base.objects.get(pk=s.id)
self.assertNotEqual(b.pk, s.pk)
# Low-level test for related_val
self.assertEqual(s.titles.related_val, (s.id,))
# Higher level test for correct query values (title foof not
# accidentally found).
self.assertQuerysetEqual(
s.titles.all(), [])
class InheritanceSameModelNameTests(TestCase):
def setUp(self):
# The Title model has distinct accessors for both
# model_inheritance.Copy and model_inheritance_same_model_name.Copy
# models.
self.title = Title.objects.create(title='Lorem Ipsum')
def test_inheritance_related_name(self):
self.assertEqual(
self.title.attached_model_inheritance_copy_set.create(
content='Save $ on V1agr@',
url='http://v1agra.com/',
title='V1agra is spam',
), Copy.objects.get(
content='Save $ on V1agr@',
))
def test_inheritance_with_same_model_name(self):
with self.modify_settings(
INSTALLED_APPS={'append': ['model_inheritance.same_model_name']}):
call_command('migrate', verbosity=0)
from .same_model_name.models import Copy
self.assertEqual(
self.title.attached_same_model_name_copy_set.create(
content='The Web framework for perfectionists with deadlines.',
url='http://www.djangoproject.com/',
title='Django Rocks'
), Copy.objects.get(
content='The Web framework for perfectionists with deadlines.',
))
def test_related_name_attribute_exists(self):
# The Post model doesn't have an attribute called 'attached_%(app_label)s_%(class)s_set'.
self.assertFalse(hasattr(self.title, 'attached_%(app_label)s_%(class)s_set'))
|
|
# Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
from c7n.resources.elasticache import _cluster_eligible_for_snapshot
from .common import BaseTest
class TestElastiCacheCluster(BaseTest):
def test_eligibility_snapshot(self):
# so black box testing, due to use of private interface.
self.assertTrue(
_cluster_eligible_for_snapshot(
{'Engine': 'redis', 'CacheNodeType': 'cache.t2.medium'}))
self.assertFalse(
_cluster_eligible_for_snapshot(
{'Engine': 'redis', 'CacheNodeType': 'cache.t1.medium'}))
self.assertFalse(
_cluster_eligible_for_snapshot(
{'Engine': 'memcached', 'CacheNodeType': 'cache.t2.medium'}))
def test_elasticache_security_group(self):
session_factory = self.replay_flight_data("test_elasticache_security_group")
p = self.load_policy(
{
"name": "elasticache-cluster-simple",
"resource": "cache-cluster",
"filters": [
{"type": "security-group", "key": "GroupName", "value": "default"}
],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 3)
self.assertEqual(
sorted([r["CacheClusterId"] for r in resources]),
["myec-001", "myec-002", "myec-003"],
)
def test_elasticache_subnet_filter(self):
session_factory = self.replay_flight_data(
"test_elasticache_subnet_group_filter"
)
p = self.load_policy(
{
"name": "elasticache-cluster-simple",
"resource": "cache-cluster",
"filters": [
{"type": "subnet", "key": "MapPublicIpOnLaunch", "value": False}
],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 3)
self.assertEqual(
sorted([r["CacheClusterId"] for r in resources]),
["myec-001", "myec-002", "myec-003"],
)
def test_elasticache_cluster_simple(self):
session_factory = self.replay_flight_data("test_elasticache_cluster_simple")
p = self.load_policy(
{"name": "elasticache-cluster-simple", "resource": "cache-cluster"},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 3)
def test_elasticache_cluster_simple_filter(self):
session_factory = self.replay_flight_data("test_elasticache_cluster_simple")
p = self.load_policy(
{
"name": "elasticache-cluster-simple-filter",
"resource": "cache-cluster",
"filters": [{"type": "value", "key": "Engine", "value": "redis"}],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 3)
def test_elasticache_sharded_snapshot_copy_tags(self):
factory = self.replay_flight_data("test_elasticache_sharded_copy_cluster_tags")
client = factory().client("elasticache")
snap_tags = {
t["Key"]: t["Value"]
for t in client.list_tags_for_resource(
ResourceName="arn:aws:elasticache:us-east-2:644160558196:snapshot:zero-bytes"
)[
"TagList"
]
}
self.assertEqual(snap_tags, {"App": "MegaCache"})
p = self.load_policy(
{
"name": "test-copy-cluster-tags",
"resource": "cache-snapshot",
"actions": [
{
"type": "copy-cluster-tags",
"tags": ["App", "Env", "Zone", "Color"],
}
],
},
config=dict(region="us-east-2"),
session_factory=factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
self.assertEqual(resources[0]["SnapshotName"], "zero-bytes")
arn = p.resource_manager.get_arns(resources)[0]
snap_tags = {
t["Key"]: t["Value"]
for t in client.list_tags_for_resource(ResourceName=arn)["TagList"]
}
self.assertEqual(
snap_tags, {"App": "MegaCache", "Color": "Blue", "Env": "Dev", "Zone": "12"}
)
def test_elasticache_snapshot_copy_cluster_tags(self):
session_factory = self.replay_flight_data("test_elasticache_copy_cluster_tags")
client = session_factory().client("elasticache")
results = client.list_tags_for_resource(
ResourceName="arn:aws:elasticache:us-east-1:644160558196:snapshot:myec-backup"
)[
"TagList"
]
tags = {t["Key"]: t["Value"] for t in results}
self.assertEqual(tags, {})
policy = self.load_policy(
{
"name": "test-copy-cluster-tags",
"resource": "cache-snapshot",
"actions": [{"type": "copy-cluster-tags", "tags": ["tagkey"]}],
},
config=dict(region="us-east-1"),
session_factory=session_factory,
)
resources = policy.run()
arn = policy.resource_manager.generate_arn(resources[0]["SnapshotName"])
results = client.list_tags_for_resource(ResourceName=arn)["TagList"]
tags = {t["Key"]: t["Value"] for t in results}
self.assertEqual(tags["tagkey"], "tagval")
def test_elasticache_cluster_available(self):
session_factory = self.replay_flight_data("test_elasticache_cluster_available")
p = self.load_policy(
{
"name": "elasticache-cluster-available",
"resource": "cache-cluster",
"filters": [
{"type": "value", "key": "CacheClusterStatus", "value": "available"}
],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 3)
self.assertEqual(resources[0]["CacheClusterStatus"], "available")
def test_elasticache_cluster_mark(self):
session_factory = self.replay_flight_data("test_elasticache_cluster_mark")
client = session_factory().client("elasticache")
p = self.load_policy(
{
"name": "elasticache-cluster-mark",
"resource": "cache-cluster",
"filters": [{"type": "value", "key": "Engine", "value": "redis"}],
"actions": [{"type": "mark-for-op", "days": 30, "op": "delete"}],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 3)
arn = p.resource_manager.generate_arn(resources[0]["CacheClusterId"])
tags = client.list_tags_for_resource(ResourceName=arn)
tag_map = {t["Key"]: t["Value"] for t in tags["TagList"]}
self.assertTrue("maid_status" in tag_map)
def test_elasticache_cluster_unmark(self):
session_factory = self.replay_flight_data("test_elasticache_cluster_unmark")
client = session_factory().client("elasticache")
p = self.load_policy(
{
"name": "elasticache-cluster-unmark",
"resource": "cache-cluster",
"filters": [{"type": "value", "key": "Engine", "value": "redis"}],
"actions": [{"type": "unmark"}],
},
session_factory=session_factory,
)
resources = p.run()
arn = p.resource_manager.generate_arn(resources[0]["CacheClusterId"])
self.assertEqual(len(resources), 3)
tags = client.list_tags_for_resource(ResourceName=arn)
self.assertFalse("maid_status" in tags)
def test_elasticache_cluster_delete(self):
session_factory = self.replay_flight_data("test_elasticache_cluster_delete")
p = self.load_policy(
{
"name": "elasticache-cluster-delete",
"resource": "cache-cluster",
"filters": [{"type": "value", "key": "Engine", "value": "redis"}],
"actions": [{"type": "delete"}],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 3)
def test_elasticache_cluster_snapshot(self):
session_factory = self.replay_flight_data("test_elasticache_cluster_snapshot")
p = self.load_policy(
{
"name": "elasticache-cluster-snapshot",
"resource": "cache-cluster",
"actions": [{"type": "snapshot"}],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 3)
class TestElastiCacheSubnetGroup(BaseTest):
def test_elasticache_subnet_group(self):
session_factory = self.replay_flight_data("test_elasticache_subnet_group")
p = self.load_policy(
{"name": "elasticache-subnet-group", "resource": "cache-subnet-group"},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
class TestElastiCacheSnapshot(BaseTest):
def test_elasticache_snapshot(self):
session_factory = self.replay_flight_data("test_elasticache_snapshot")
p = self.load_policy(
{"name": "elasticache-snapshot", "resource": "cache-snapshot"},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 4)
def test_elasticache_snapshot_age_filter(self):
factory = self.replay_flight_data("test_elasticache_snapshot")
p = self.load_policy(
{
"name": "elasticache-snapshot-age-filter",
"resource": "cache-snapshot",
"filters": [{"type": "age", "days": 2, "op": "gt"}],
},
session_factory=factory,
)
resources = p.run()
self.assertEqual(len(resources), 4)
def test_elasticache_snapshot_mark(self):
session_factory = self.replay_flight_data("test_elasticache_snapshot_mark")
client = session_factory().client("elasticache")
p = self.load_policy(
{
"name": "elasticache-snapshot-mark",
"resource": "cache-snapshot",
"filters": [
{
"type": "value",
"key": "SnapshotName",
"value": "backup-myec-001-2017-06-23",
}
],
"actions": [{"type": "mark-for-op", "days": 30, "op": "delete"}],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
arn = p.resource_manager.generate_arn(resources[0]["SnapshotName"])
self.assertEqual(len(resources), 1)
tags = client.list_tags_for_resource(ResourceName=arn)
tag_map = {t["Key"]: t["Value"] for t in tags["TagList"]}
self.assertTrue("maid_status" in tag_map)
def test_elasticache_snapshot_unmark(self):
session_factory = self.replay_flight_data("test_elasticache_snapshot_unmark")
client = session_factory().client("elasticache")
p = self.load_policy(
{
"name": "elasticache-snapshot-unmark",
"resource": "cache-snapshot",
"filters": [
{
"type": "value",
"key": "SnapshotName",
"value": "backup-myec-001-2017-06-23",
}
],
"actions": [{"type": "unmark"}],
},
session_factory=session_factory,
)
resources = p.run()
arn = p.resource_manager.generate_arn(resources[0]["SnapshotName"])
self.assertEqual(len(resources), 1)
tags = client.list_tags_for_resource(ResourceName=arn)
self.assertFalse("maid_status" in tags)
def test_elasticache_snapshot_delete(self):
factory = self.replay_flight_data("test_elasticache_snapshot_delete")
p = self.load_policy(
{
"name": "elasticache-snapshot-delete",
"resource": "cache-snapshot",
"actions": ["delete"],
},
session_factory=factory,
)
resources = p.run()
self.assertEqual(len(resources), 4)
class TestModifyVpcSecurityGroupsAction(BaseTest):
def test_elasticache_remove_matched_security_groups(self):
#
# Test conditions:
# - running 2 Elasticache replication group in default VPC with 3 clusters
# - translates to 6 clusters
# - a default security group with id 'sg-7a3fcb13' exists
# - security group named PROD-ONLY-Test-Security-Group exists in VPC and is attached to
# one replication group
# - translates to 3 clusters marked non-compliant
#
# Results in 6 clusters with default Security Group attached
session_factory = self.replay_flight_data(
"test_elasticache_remove_matched_security_groups"
)
client = session_factory().client("elasticache", region_name="ca-central-1")
p = self.load_policy(
{
"name": "elasticache-remove-matched-security-groups",
"resource": "cache-cluster",
"filters": [
{
"type": "security-group",
"key": "GroupName",
"value": "(.*PROD-ONLY.*)",
"op": "regex",
}
],
"actions": [
{
"type": "modify-security-groups",
"remove": "matched",
"isolation-group": "sg-7a3fcb13",
}
],
},
session_factory=session_factory,
)
clean_p = self.load_policy(
{
"name": "elasticache-verifyremove-matched-security-groups",
"resource": "cache-cluster",
"filters": [
{"type": "security-group", "key": "GroupName", "value": "default"}
],
},
session_factory=session_factory,
)
resources = p.run()
waiter = client.get_waiter("replication_group_available")
waiter.wait()
clean_resources = clean_p.run()
# clusters autoscale across AZs, so they get -001, -002, etc appended
self.assertIn("sg-test-base", resources[0]["CacheClusterId"])
self.assertEqual(len(resources), 3)
self.assertEqual(len(resources[0]["SecurityGroups"]), 1)
# show that it was indeed a replacement of security groups
self.assertEqual(len(clean_resources[0]["SecurityGroups"]), 1)
self.assertEqual(len(clean_resources), 6)
def test_elasticache_add_security_group(self):
# Test conditions:
# - running Elasticache replication group in default VPC with 3 clusters
# - a default security group with id 'sg-7a3fcb13' exists
# - security group named PROD-ONLY-Test-Security-Group exists in VPC and is not attached
# - translates to 3 clusters marked to get new group attached
#
# Results in 3 clusters with default Security Group and PROD-ONLY-Test-Security-Group
session_factory = self.replay_flight_data("test_elasticache_add_security_group")
client = session_factory().client("elasticache", region_name="ca-central-1")
p = self.load_policy(
{
"name": "add-sg-to-prod-elasticache",
"resource": "cache-cluster",
"filters": [
{"type": "security-group", "key": "GroupName", "value": "default"}
],
"actions": [{"type": "modify-security-groups", "add": "sg-6360920a"}],
},
session_factory=session_factory,
)
clean_p = self.load_policy(
{
"name": "validate-add-sg-to-prod-elasticache",
"resource": "cache-cluster",
"filters": [
{"type": "security-group", "key": "GroupName", "value": "default"},
{
"type": "security-group",
"key": "GroupName",
"value": "PROD-ONLY-Test-Security-Group",
},
],
},
session_factory=session_factory,
)
resources = p.run()
waiter = client.get_waiter("replication_group_available")
waiter.wait()
clean_resources = clean_p.run()
self.assertEqual(len(resources), 3)
self.assertIn("sg-test-base", resources[0]["CacheClusterId"])
self.assertEqual(len(resources[0]["SecurityGroups"]), 1)
self.assertEqual(len(clean_resources[0]["SecurityGroups"]), 2)
self.assertEqual(len(clean_resources), 3)
class TestElastiCacheReplicationGroup(BaseTest):
def test_elasticache_replication_group(self):
session_factory = self.replay_flight_data("test_elasticache_replication_group")
p = self.load_policy(
{"name": "elasticache-rg", "resource": "elasticache-group"},
session_factory=session_factory,)
resources = p.run()
self.assertEqual(len(resources), 1)
self.assertEqual(resources[0]['ReplicationGroupId'], 'test-c7n-rg')
def test_elasticache_replication_group_delete(self):
session_factory = self.replay_flight_data("test_elasticache_replication_group_delete")
p = self.load_policy(
{
"name": "replication-group-enc-delete",
"resource": "elasticache-group",
"filters": [{"type": "value", "key": "AtRestEncryptionEnabled", "value": False}],
"actions": [{"type": "delete", "snapshot": True}],
},
session_factory=session_factory,)
resources = p.run()
self.assertEqual(len(resources), 1)
self.assertEqual(resources[0]['ReplicationGroupId'], 'c7n-delete')
client = session_factory().client("elasticache")
response = client.describe_replication_groups(ReplicationGroupId='c7n-delete')
self.assertEqual(response.get('ReplicationGroups')[0].get('Status'), 'deleting')
def test_elasticache_replication_group_tag(self):
# the elasticache resource uses the universal_taggable wrapper for the AWS
# resource tagging API - this test ensures that API works for RGs
session_factory = self.replay_flight_data(
"test_elasticache_replication_group_tag")
p = self.load_policy(
{
"name": "tag-ElastiCacheReplicationGroup",
"resource": "elasticache-group",
"filters": [{"tag:Tagging": "absent"}],
"actions": [{"type": "tag", "key": "Tagging", "value": "added"}],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
client = session_factory().client("elasticache")
response = client.describe_replication_groups(ReplicationGroupId='c7n-tagging')
while(response.get('ReplicationGroups')[0].get('Status') == 'modifying'):
response = client.describe_replication_groups(ReplicationGroupId='c7n-tagging')
arn = p.resource_manager.get_arns(resources)[0]
tags = client.list_tags_for_resource(ResourceName=arn)["TagList"]
self.assertEqual(tags[0]["Value"], "added")
|
|
# -*- coding: utf8
"""Random Projection transformers
Random Projections are a simple and computationally efficient way to
reduce the dimensionality of the data by trading a controlled amount
of accuracy (as additional variance) for faster processing times and
smaller model sizes.
The dimensions and distribution of Random Projections matrices are
controlled so as to preserve the pairwise distances between any two
samples of the dataset.
The main theoretical result behind the efficiency of random projection is the
`Johnson-Lindenstrauss lemma (quoting Wikipedia)
<http://en.wikipedia.org/wiki/Johnson%E2%80%93Lindenstrauss_lemma>`_:
In mathematics, the Johnson-Lindenstrauss lemma is a result
concerning low-distortion embeddings of points from high-dimensional
into low-dimensional Euclidean space. The lemma states that a small set
of points in a high-dimensional space can be embedded into a space of
much lower dimension in such a way that distances between the points are
nearly preserved. The map used for the embedding is at least Lipschitz,
and can even be taken to be an orthogonal projection.
"""
# Authors: Olivier Grisel <[email protected]>,
# Arnaud Joly <[email protected]>
# License: BSD 3 clause
from __future__ import division
import warnings
from abc import ABCMeta, abstractmethod
import numpy as np
from numpy.testing import assert_equal
import scipy.sparse as sp
from .base import BaseEstimator, TransformerMixin
from .externals import six
from .externals.six.moves import xrange
from .utils import check_random_state
from .utils.extmath import safe_sparse_dot
from .utils.random import sample_without_replacement
from .utils.validation import check_array, NotFittedError
from .utils import DataDimensionalityWarning
__all__ = ["SparseRandomProjection",
"GaussianRandomProjection",
"johnson_lindenstrauss_min_dim"]
def johnson_lindenstrauss_min_dim(n_samples, eps=0.1):
"""Find a 'safe' number of components to randomly project to
The distortion introduced by a random projection `p` only changes the
distance between two points by a factor (1 +- eps) in an euclidean space
with good probability. The projection `p` is an eps-embedding as defined
by:
(1 - eps) ||u - v||^2 < ||p(u) - p(v)||^2 < (1 + eps) ||u - v||^2
Where u and v are any rows taken from a dataset of shape [n_samples,
n_features], eps is in ]0, 1[ and p is a projection by a random Gaussian
N(0, 1) matrix with shape [n_components, n_features] (or a sparse
Achlioptas matrix).
The minimum number of components to guarantee the eps-embedding is
given by:
n_components >= 4 log(n_samples) / (eps^2 / 2 - eps^3 / 3)
Note that the number of dimensions is independent of the original
number of features but instead depends on the size of the dataset:
the larger the dataset, the higher is the minimal dimensionality of
an eps-embedding.
Parameters
----------
n_samples : int or numpy array of int greater than 0,
Number of samples. If an array is given, it will compute
a safe number of components array-wise.
eps : float or numpy array of float in ]0,1[, optional (default=0.1)
Maximum distortion rate as defined by the Johnson-Lindenstrauss lemma.
If an array is given, it will compute a safe number of components
array-wise.
Returns
-------
n_components : int or numpy array of int,
The minimal number of components to guarantee with good probability
an eps-embedding with n_samples.
Examples
--------
>>> johnson_lindenstrauss_min_dim(1e6, eps=0.5)
663
>>> johnson_lindenstrauss_min_dim(1e6, eps=[0.5, 0.1, 0.01])
array([ 663, 11841, 1112658])
>>> johnson_lindenstrauss_min_dim([1e4, 1e5, 1e6], eps=0.1)
array([ 7894, 9868, 11841])
References
----------
.. [1] http://en.wikipedia.org/wiki/Johnson%E2%80%93Lindenstrauss_lemma
.. [2] Sanjoy Dasgupta and Anupam Gupta, 1999,
"An elementary proof of the Johnson-Lindenstrauss Lemma."
http://citeseer.ist.psu.edu/viewdoc/summary?doi=10.1.1.45.3654
"""
eps = np.asarray(eps)
n_samples = np.asarray(n_samples)
if np.any(eps <= 0.0) or np.any(eps >= 1):
raise ValueError(
"The JL bound is defined for eps in ]0, 1[, got %r" % eps)
if np.any(n_samples) <= 0:
raise ValueError(
"The JL bound is defined for n_samples greater than zero, got %r"
% n_samples)
denominator = (eps ** 2 / 2) - (eps ** 3 / 3)
return (4 * np.log(n_samples) / denominator).astype(np.int)
def _check_density(density, n_features):
"""Factorize density check according to Li et al."""
if density == 'auto':
density = 1 / np.sqrt(n_features)
elif density <= 0 or density > 1:
raise ValueError("Expected density in range ]0, 1], got: %r"
% density)
return density
def _check_input_size(n_components, n_features):
"""Factorize argument checking for random matrix generation"""
if n_components <= 0:
raise ValueError("n_components must be strictly positive, got %d" %
n_components)
if n_features <= 0:
raise ValueError("n_features must be strictly positive, got %d" %
n_components)
def gaussian_random_matrix(n_components, n_features, random_state=None):
""" Generate a dense Gaussian random matrix.
The components of the random matrix are drawn from
N(0, 1.0 / n_components).
Parameters
----------
n_components : int,
Dimensionality of the target projection space.
n_features : int,
Dimensionality of the original source space.
random_state : int, RandomState instance or None (default=None)
Control the pseudo random number generator used to generate the
matrix at fit time.
Returns
-------
components : numpy array of shape [n_components, n_features]
The generated Gaussian random matrix.
See Also
--------
GaussianRandomProjection
sparse_random_matrix
"""
_check_input_size(n_components, n_features)
rng = check_random_state(random_state)
components = rng.normal(loc=0.0,
scale=1.0 / np.sqrt(n_components),
size=(n_components, n_features))
return components
def sparse_random_matrix(n_components, n_features, density='auto',
random_state=None):
"""Generalized Achlioptas random sparse matrix for random projection
Setting density to 1 / 3 will yield the original matrix by Dimitris
Achlioptas while setting a lower value will yield the generalization
by Ping Li et al.
If we note :math:`s = 1 / density`, the components of the random matrix are
drawn from:
- -sqrt(s) / sqrt(n_components) with probability 1 / 2s
- 0 with probability 1 - 1 / s
- +sqrt(s) / sqrt(n_components) with probability 1 / 2s
Parameters
----------
n_components : int,
Dimensionality of the target projection space.
n_features : int,
Dimensionality of the original source space.
density : float in range ]0, 1] or 'auto', optional (default='auto')
Ratio of non-zero component in the random projection matrix.
If density = 'auto', the value is set to the minimum density
as recommended by Ping Li et al.: 1 / sqrt(n_features).
Use density = 1 / 3.0 if you want to reproduce the results from
Achlioptas, 2001.
random_state : integer, RandomState instance or None (default=None)
Control the pseudo random number generator used to generate the
matrix at fit time.
Returns
-------
components: numpy array or CSR matrix with shape [n_components, n_features]
The generated Gaussian random matrix.
See Also
--------
SparseRandomProjection
gaussian_random_matrix
References
----------
.. [1] Ping Li, T. Hastie and K. W. Church, 2006,
"Very Sparse Random Projections".
http://www.stanford.edu/~hastie/Papers/Ping/KDD06_rp.pdf
.. [2] D. Achlioptas, 2001, "Database-friendly random projections",
http://www.cs.ucsc.edu/~optas/papers/jl.pdf
"""
_check_input_size(n_components, n_features)
density = _check_density(density, n_features)
rng = check_random_state(random_state)
if density == 1:
# skip index generation if totally dense
components = rng.binomial(1, 0.5, (n_components, n_features)) * 2 - 1
return 1 / np.sqrt(n_components) * components
else:
# Generate location of non zero elements
indices = []
offset = 0
indptr = [offset]
for i in xrange(n_components):
# find the indices of the non-zero components for row i
n_nonzero_i = rng.binomial(n_features, density)
indices_i = sample_without_replacement(n_features, n_nonzero_i,
random_state=rng)
indices.append(indices_i)
offset += n_nonzero_i
indptr.append(offset)
indices = np.concatenate(indices)
# Among non zero components the probability of the sign is 50%/50%
data = rng.binomial(1, 0.5, size=np.size(indices)) * 2 - 1
# build the CSR structure by concatenating the rows
components = sp.csr_matrix((data, indices, indptr),
shape=(n_components, n_features))
return np.sqrt(1 / density) / np.sqrt(n_components) * components
class BaseRandomProjection(six.with_metaclass(ABCMeta, BaseEstimator,
TransformerMixin)):
"""Base class for random projections.
Warning: This class should not be used directly.
Use derived classes instead.
"""
@abstractmethod
def __init__(self, n_components='auto', eps=0.1, dense_output=False,
random_state=None):
self.n_components = n_components
self.eps = eps
self.dense_output = dense_output
self.random_state = random_state
self.components_ = None
self.n_components_ = None
@abstractmethod
def _make_random_matrix(n_components, n_features):
""" Generate the random projection matrix
Parameters
----------
n_components : int,
Dimensionality of the target projection space.
n_features : int,
Dimensionality of the original source space.
Returns
-------
components : numpy array or CSR matrix [n_components, n_features]
The generated random matrix.
"""
def fit(self, X, y=None):
"""Generate a sparse random projection matrix
Parameters
----------
X : numpy array or scipy.sparse of shape [n_samples, n_features]
Training set: only the shape is used to find optimal random
matrix dimensions based on the theory referenced in the
afore mentioned papers.
y : is not used: placeholder to allow for usage in a Pipeline.
Returns
-------
self
"""
X = check_array(X, accept_sparse=['csr', 'csc'])
n_samples, n_features = X.shape
if self.n_components == 'auto':
self.n_components_ = johnson_lindenstrauss_min_dim(
n_samples=n_samples, eps=self.eps)
if self.n_components_ <= 0:
raise ValueError(
'eps=%f and n_samples=%d lead to a target dimension of '
'%d which is invalid' % (
self.eps, n_samples, self.n_components_))
elif self.n_components_ > n_features:
raise ValueError(
'eps=%f and n_samples=%d lead to a target dimension of '
'%d which is larger than the original space with '
'n_features=%d' % (self.eps, n_samples, self.n_components_,
n_features))
else:
if self.n_components <= 0:
raise ValueError("n_components must be greater than 0, got %s"
% self.n_components_)
elif self.n_components > n_features:
warnings.warn(
"The number of components is higher than the number of"
" features: n_features < n_components (%s < %s)."
"The dimensionality of the problem will not be reduced."
% (n_features, self.n_components),
DataDimensionalityWarning)
self.n_components_ = self.n_components
# Generate a projection matrix of size [n_components, n_features]
self.components_ = self._make_random_matrix(self.n_components_,
n_features)
# Check contract
assert_equal(
self.components_.shape,
(self.n_components_, n_features),
err_msg=('An error has occurred the self.components_ matrix has '
' not the proper shape.'))
return self
def transform(self, X, y=None):
"""Project the data by using matrix product with the random matrix
Parameters
----------
X : numpy array or scipy.sparse of shape [n_samples, n_features]
The input data to project into a smaller dimensional space.
y : is not used: placeholder to allow for usage in a Pipeline.
Returns
-------
X_new : numpy array or scipy sparse of shape [n_samples, n_components]
Projected array.
"""
X = check_array(X, accept_sparse=['csr', 'csc'])
if self.components_ is None:
raise NotFittedError('No random projection matrix had been fit.')
if X.shape[1] != self.components_.shape[1]:
raise ValueError(
'Impossible to perform projection:'
'X at fit stage had a different number of features. '
'(%s != %s)' % (X.shape[1], self.components_.shape[1]))
X_new = safe_sparse_dot(X, self.components_.T,
dense_output=self.dense_output)
return X_new
class GaussianRandomProjection(BaseRandomProjection):
"""Reduce dimensionality through Gaussian random projection
The components of the random matrix are drawn from N(0, 1 / n_components).
Parameters
----------
n_components : int or 'auto', optional (default = 'auto')
Dimensionality of the target projection space.
n_components can be automatically adjusted according to the
number of samples in the dataset and the bound given by the
Johnson-Lindenstrauss lemma. In that case the quality of the
embedding is controlled by the ``eps`` parameter.
It should be noted that Johnson-Lindenstrauss lemma can yield
very conservative estimated of the required number of components
as it makes no assumption on the structure of the dataset.
eps : strictly positive float, optional (default=0.1)
Parameter to control the quality of the embedding according to
the Johnson-Lindenstrauss lemma when n_components is set to
'auto'.
Smaller values lead to better embedding and higher number of
dimensions (n_components) in the target projection space.
random_state : integer, RandomState instance or None (default=None)
Control the pseudo random number generator used to generate the
matrix at fit time.
Attributes
----------
n_component_ : int
Concrete number of components computed when n_components="auto".
components_ : numpy array of shape [n_components, n_features]
Random matrix used for the projection.
See Also
--------
SparseRandomProjection
"""
def __init__(self, n_components='auto', eps=0.1, random_state=None):
super(GaussianRandomProjection, self).__init__(
n_components=n_components,
eps=eps,
dense_output=True,
random_state=random_state)
def _make_random_matrix(self, n_components, n_features):
""" Generate the random projection matrix
Parameters
----------
n_components : int,
Dimensionality of the target projection space.
n_features : int,
Dimensionality of the original source space.
Returns
-------
components : numpy array or CSR matrix [n_components, n_features]
The generated random matrix.
"""
random_state = check_random_state(self.random_state)
return gaussian_random_matrix(n_components,
n_features,
random_state=random_state)
class SparseRandomProjection(BaseRandomProjection):
"""Reduce dimensionality through sparse random projection
Sparse random matrix is an alternative to dense random
projection matrix that guarantees similar embedding quality while being
much more memory efficient and allowing faster computation of the
projected data.
If we note `s = 1 / density` the components of the random matrix are
drawn from:
- -sqrt(s) / sqrt(n_components) with probability 1 / 2s
- 0 with probability 1 - 1 / s
- +sqrt(s) / sqrt(n_components) with probability 1 / 2s
Parameters
----------
n_components : int or 'auto', optional (default = 'auto')
Dimensionality of the target projection space.
n_components can be automatically adjusted according to the
number of samples in the dataset and the bound given by the
Johnson-Lindenstrauss lemma. In that case the quality of the
embedding is controlled by the ``eps`` parameter.
It should be noted that Johnson-Lindenstrauss lemma can yield
very conservative estimated of the required number of components
as it makes no assumption on the structure of the dataset.
density : float in range ]0, 1], optional (default='auto')
Ratio of non-zero component in the random projection matrix.
If density = 'auto', the value is set to the minimum density
as recommended by Ping Li et al.: 1 / sqrt(n_features).
Use density = 1 / 3.0 if you want to reproduce the results from
Achlioptas, 2001.
eps : strictly positive float, optional, (default=0.1)
Parameter to control the quality of the embedding according to
the Johnson-Lindenstrauss lemma when n_components is set to
'auto'.
Smaller values lead to better embedding and higher number of
dimensions (n_components) in the target projection space.
dense_output : boolean, optional (default=False)
If True, ensure that the output of the random projection is a
dense numpy array even if the input and random projection matrix
are both sparse. In practice, if the number of components is
small the number of zero components in the projected data will
be very small and it will be more CPU and memory efficient to
use a dense representation.
If False, the projected data uses a sparse representation if
the input is sparse.
random_state : integer, RandomState instance or None (default=None)
Control the pseudo random number generator used to generate the
matrix at fit time.
Attributes
----------
n_component_ : int
Concrete number of components computed when n_components="auto".
components_ : CSR matrix with shape [n_components, n_features]
Random matrix used for the projection.
density_ : float in range 0.0 - 1.0
Concrete density computed from when density = "auto".
See Also
--------
GaussianRandomProjection
References
----------
.. [1] Ping Li, T. Hastie and K. W. Church, 2006,
"Very Sparse Random Projections".
http://www.stanford.edu/~hastie/Papers/Ping/KDD06_rp.pdf
.. [2] D. Achlioptas, 2001, "Database-friendly random projections",
http://www.cs.ucsc.edu/~optas/papers/jl.pdf
"""
def __init__(self, n_components='auto', density='auto', eps=0.1,
dense_output=False, random_state=None):
super(SparseRandomProjection, self).__init__(
n_components=n_components,
eps=eps,
dense_output=dense_output,
random_state=random_state)
self.density = density
self.density_ = None
def _make_random_matrix(self, n_components, n_features):
""" Generate the random projection matrix
Parameters
----------
n_components : int,
Dimensionality of the target projection space.
n_features : int,
Dimensionality of the original source space.
Returns
-------
components : numpy array or CSR matrix [n_components, n_features]
The generated random matrix.
"""
random_state = check_random_state(self.random_state)
self.density_ = _check_density(self.density, n_features)
return sparse_random_matrix(n_components,
n_features,
density=self.density_,
random_state=random_state)
|
|
#!/usr/bin/python
"""
HeaderID Extension for Python-Markdown
======================================
Auto-generate id attributes for HTML headers.
Basic usage:
>>> import markdown
>>> text = "# Some Header #"
>>> md = markdown.markdown(text, ['headerid'])
>>> print md
<h1 id="some-header">Some Header</h1>
All header IDs are unique:
>>> text = '''
... #Header
... #Header
... #Header'''
>>> md = markdown.markdown(text, ['headerid'])
>>> print md
<h1 id="header">Header</h1>
<h1 id="header_1">Header</h1>
<h1 id="header_2">Header</h1>
To fit within a html template's hierarchy, set the header base level:
>>> text = '''
... #Some Header
... ## Next Level'''
>>> md = markdown.markdown(text, ['headerid(level=3)'])
>>> print md
<h3 id="some-header">Some Header</h3>
<h4 id="next-level">Next Level</h4>
Works with inline markup.
>>> text = '#Some *Header* with [markup](http://example.com).'
>>> md = markdown.markdown(text, ['headerid'])
>>> print md
<h1 id="some-header-with-markup">Some <em>Header</em> with <a href="http://example.com">markup</a>.</h1>
Turn off auto generated IDs:
>>> text = '''
... # Some Header
... # Another Header'''
>>> md = markdown.markdown(text, ['headerid(forceid=False)'])
>>> print md
<h1>Some Header</h1>
<h1>Another Header</h1>
Use with MetaData extension:
>>> text = '''header_level: 2
... header_forceid: Off
...
... # A Header'''
>>> md = markdown.markdown(text, ['headerid', 'meta'])
>>> print md
<h2>A Header</h2>
Copyright 2007-2011 [Waylan Limberg](http://achinghead.com/).
Project website: <http://packages.python.org/Markdown/extensions/header_id.html>
Contact: [email protected]
License: BSD (see ../docs/LICENSE for details)
Dependencies:
* [Python 2.3+](http://python.org)
* [Markdown 2.0+](http://packages.python.org/Markdown/)
"""
import markdown
import re
import logging
import unicodedata
logger = logging.getLogger('MARKDOWN')
IDCOUNT_RE = re.compile(r'^(.*)_([0-9]+)$')
def slugify(value, separator):
""" Slugify a string, to make it URL friendly. """
value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore')
value = re.sub('[^\w\s-]', '', value.decode('ascii')).strip().lower()
return re.sub('[%s\s]+' % separator, separator, value)
def unique(id, ids):
""" Ensure id is unique in set of ids. Append '_1', '_2'... if not """
while id in ids or not id:
m = IDCOUNT_RE.match(id)
if m:
id = '%s_%d'% (m.group(1), int(m.group(2))+1)
else:
id = '%s_%d'% (id, 1)
ids.append(id)
return id
def itertext(elem):
""" Loop through all children and return text only.
Reimplements method of same name added to ElementTree in Python 2.7
"""
if elem.text:
yield elem.text
for e in elem:
for s in itertext(e):
yield s
if e.tail:
yield e.tail
class HeaderIdTreeprocessor(markdown.treeprocessors.Treeprocessor):
""" Assign IDs to headers. """
IDs = set()
def run(self, doc):
start_level, force_id = self._get_meta()
slugify = self.config['slugify']
sep = self.config['separator']
for elem in doc.getiterator():
if elem.tag in ['h1', 'h2', 'h3', 'h4', 'h5', 'h6']:
if force_id:
if "id" in elem.attrib:
id = elem.get('id')
else:
id = slugify(u''.join(itertext(elem)), sep)
elem.set('id', unique(id, self.IDs))
if start_level:
level = int(elem.tag[-1]) + start_level
if level > 6:
level = 6
elem.tag = 'h%d' % level
def _get_meta(self):
""" Return meta data suported by this ext as a tuple """
level = int(self.config['level']) - 1
force = self._str2bool(self.config['forceid'])
if hasattr(self.md, 'Meta'):
if self.md.Meta.has_key('header_level'):
level = int(self.md.Meta['header_level'][0]) - 1
if self.md.Meta.has_key('header_forceid'):
force = self._str2bool(self.md.Meta['header_forceid'][0])
return level, force
def _str2bool(self, s, default=False):
""" Convert a string to a booleen value. """
s = str(s)
if s.lower() in ['0', 'f', 'false', 'off', 'no', 'n']:
return False
elif s.lower() in ['1', 't', 'true', 'on', 'yes', 'y']:
return True
return default
class HeaderIdExtension (markdown.Extension):
def __init__(self, configs):
# set defaults
self.config = {
'level' : ['1', 'Base level for headers.'],
'forceid' : ['True', 'Force all headers to have an id.'],
'separator' : ['-', 'Word separator.'],
'slugify' : [slugify, 'Callable to generate anchors'],
}
for key, value in configs:
self.setConfig(key, value)
def extendMarkdown(self, md, md_globals):
md.registerExtension(self)
self.processor = HeaderIdTreeprocessor()
self.processor.md = md
self.processor.config = self.getConfigs()
if 'attr_list' in md.treeprocessors.keys():
# insert after attr_list treeprocessor
md.treeprocessors.add('headerid', self.processor, '>attr_list')
else:
# insert after 'inline' treeprocessor.
md.treeprocessors.add('headerid', self.processor, '>inline')
def reset(self):
self.processor.IDs = []
def makeExtension(configs=None):
return HeaderIdExtension(configs=configs)
if __name__ == "__main__":
import doctest
doctest.testmod()
|
|
# coding=utf-8
# Copyright 2022 The ML Fairness Gym Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python2, python3
"""Fairness environment base classes."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import enum
from typing import Any, Callable, Dict, List, Mapping, Optional, Text, Tuple, TypeVar, Union
from absl import flags
from absl import logging
import attr
import gin
import gym
from gym.utils import seeding
import gym.utils.json_utils
import more_itertools
import networkx as nx
import numpy as np
from recsim.simulator import recsim_gym
import simplejson as json
# Values with associated with this key within dictionaries are given
# special treatment as RandomState internals during JSON serialization /
# deserialization. This works around an issue where RandomState itself fails
# to serialize.
RANDOM_STATE_KEY = '__random_state__'
flags.DEFINE_bool(
'validate_history', False,
'If True, metrics check the validity of the history when measuring. '
'Can be turned off to save computation.')
class NotInitializedError(Exception):
"""Object is not fully initialized."""
pass
class InvalidObservationError(Exception):
"""Observation is not valid."""
pass
class InvalidRewardError(Exception):
"""Reward is not valid."""
pass
class BadFeatureFnError(Exception):
"""Featurization is not valid."""
pass
class InvalidHistoryError(Exception):
"""History is not valid."""
pass
class EpisodeDoneError(Exception):
"""Called act on a done episode."""
pass
class NotReproducibleError(Exception):
"""Simulation was run in a non-reproducible way."""
pass
def validate_reward(reward):
"""Raises InvalidRewardError if reward is not None or a scalar."""
if reward is None:
return True
try:
float(reward)
except TypeError:
raise InvalidRewardError
class GymEncoder(json.JSONEncoder):
"""Encoder to handle common gym and numpy objects."""
def default(self, obj):
# First check if the object has a to_jsonable() method which converts it to
# a representation that can be json encoded.
try:
return obj.to_jsonable()
except AttributeError:
pass
if callable(obj):
return {'callable': obj.__name__}
if isinstance(obj, (bool, np.bool_)):
return int(obj)
if isinstance(obj, enum.Enum):
return {'__enum__': str(obj)}
if isinstance(obj, recsim_gym.RecSimGymEnv):
# TODO(): We cannot serialize a full RecSimGymEnv but for now
# we can note its existence.
return 'RecSimGym'
if isinstance(obj, np.ndarray):
return obj.tolist()
if isinstance(obj, (np.int_, np.intc, np.intp, np.int8, np.int16, np.int32,
np.int64, np.uint8, np.uint16, np.uint32, np.uint64)):
return int(obj)
if isinstance(obj, (bool, np.bool_)):
return str(obj)
if isinstance(obj, (np.float_, np.float16, np.float32, np.float64)):
return float(obj)
if isinstance(obj, nx.Graph):
return nx.readwrite.json_graph.node_link_data(obj)
if isinstance(obj, np.random.RandomState):
state = obj.get_state()
return {
RANDOM_STATE_KEY:
(state[0], state[1].tolist(), state[2], state[3], state[4])
}
if isinstance(obj, Params) or isinstance(obj, State):
return obj.asdict()
return json.JSONEncoder.default(self, obj)
def to_json(dictionary, sort_keys=True, **kw):
return json.dumps(dictionary, cls=GymEncoder, sort_keys=sort_keys, **kw)
@attr.s(cmp=False)
class State(object):
"""Simple mutable storage class for state variables."""
asdict = attr.asdict
def to_json(self):
return to_json(self)
def __eq__(self, other):
return self.to_json() == other.to_json()
def __ne__(self, other):
return self.to_json() != other.to_json()
# TODO(): Find a better type for actions than Any.
ActionType = Any # pylint: disable=invalid-name
@attr.s
class HistoryItem(object):
"""Data class for state, action pairs that make up a history."""
state = attr.ib() # type: State
action = attr.ib() # type: ActionType
def to_jsonable(self):
return attr.astuple(self)
# Allow HistoryItems to act like tuples for unpacking.
def __iter__(self):
return iter(attr.astuple(self, recurse=False))
HistoryType = List[HistoryItem]
@gin.configurable
@attr.s
class Params(object):
"""Simple mutable storage class for parameter variables."""
asdict = attr.asdict
ParamsType = TypeVar('ParamsType', bound=Params)
class RewardFn(object):
"""Base reward function.
A reward function describes how to extract a scalar reward from state or
changes in state.
Subclasses should override the __call__ function.
"""
# TODO(): Find a better type for observations than Any.
def __call__(self, observation):
raise NotImplementedError
DEFAULT_GROUP = np.ones(1)
NO_GROUP = np.zeros(1)
DEFAULT_GROUP_SPACE = gym.spaces.MultiBinary(1)
class StateUpdater(object):
"""An object used to update state."""
def update(self, state, action):
raise NotImplementedError
class NoUpdate(StateUpdater):
"""Applies no update."""
def update(self, state, action):
"""Does nothing."""
del state, action # Unused.
class FairnessEnv(gym.Env):
"""ML-fairness-gym Environment.
An ML-fairness-gym environment is an environment that additionally reports to
an oracle that can determine the potential outcomes for each action that the
agent takes.
The main API methods that users of this class need to know are:
Inherited from gym.Env (see gym/core.py for more documentation):
step
reset
render
close
seed
# TODO(): Add methods to save/restore state.
Extends gym.Env:
set_scalar_reward: Allows an agent to specify how the environment should
translate state or changes in state to a scalar reward.
Observations returned immediately after reset (initial observations) may not
be in the observation space. They can be used to establish some prior.
Subsequent observations are checked at each step to ensure they are contained.
When implementing a FairnessEnv, override `_step_impl` instead of overriding
the `step` method.
"""
observable_state_vars = {} # type: Mapping[Text, gym.Space]
# Should inherit from gym.Space
action_space = None # type: Optional[gym.Space]
# group_membership_var should be the name of an observable state variable.
group_membership_var = None # type: Optional[Text]
assert (not group_membership_var or
(group_membership_var in observable_state_vars))
def __init__(self,
params = None,
initialize_observation_space = True,
init_action_space_random_state = True):
self.history = [] # type: HistoryType
self.state = None # type: Optional[State]
self.reward_fn = None # type: Optional[RewardFn]
# Sometimes the action_space property is not ready here, e.g. RecsimWrapper
if init_action_space_random_state:
# gym.Space.np_random is created lazily, make sure it is created here.
_ = self.action_space.np_random
if initialize_observation_space:
self.observation_space = gym.spaces.Dict(self.observable_state_vars)
# Copy params so if environment mutates params it is contained to this
# environment instance.
self.initial_params = copy.deepcopy(params)
def get_group_identifier(observation):
return observation.get(self.group_membership_var, DEFAULT_GROUP)
self.group_identifier_fn = get_group_identifier # type: Callable
def step(
self,
action):
"""Run one timestep of the environment's dynamics.
This is part of the openAI gym interface and should not be overridden.
When writing a new ML fairness gym environment, users should override the
`_step_impl` method.
Args:
action: An action provided by the agent. A member of `action_space`.
Returns:
observation: Agent's observation of the current environment. A member
of `observation_space`.
reward: Scalar reward returned after previous action. This should be the
output of a `RewardFn` provided by the agent.
done: Whether the episode has ended, in which case further step() calls
will return undefined results.
info: A dictionary with auxiliary diagnostic information.
Raises:
NotInitializedError: If called before first reset().
gym.error.InvalidAction: If `action` is not in `self.action_space`.
"""
if self.state is None:
raise NotInitializedError(
'State is None. State must be initialized before taking a step.'
'If using core.FairnessEnv, subclass and implement necessary methods.'
)
if not self.action_space.contains(action):
raise gym.error.InvalidAction('Invalid action: %s' % action)
self._update_history(self.state, action)
self.state = self._step_impl(self.state, action)
observation = self._get_observable_state()
logging.debug('Observation: %s.', observation)
logging.debug('Observation space: %s.', self.observation_space)
assert self.observation_space.contains(
observation
), 'Observation %s is not contained in self.observation_space' % observation
# TODO(): Remove this completely.
# For compatibility, compute a reward_fn if one is given.
reward = self.reward_fn(observation) if self.reward_fn is not None else 0
return observation, reward, self._is_done(), {}
def seed(self, seed = None):
"""Sets the seed for this env's random number generator."""
rng, seed = seeding.np_random(seed)
self.state.rng = rng
return [seed]
def reset(self):
"""Resets the state of the environment and returns an initial observation.
Returns:
observation: The observable features for the first interaction.
"""
self._reset_history()
return self._get_observable_state()
# TODO(): Remove this.
def set_scalar_reward(self, reward_fn):
"""Sets the environment's reward_fn.
`reward_fn` describes how to extract a scalar reward from the environment's
state or changes in state.
The agent interacting with the environment is expected to call this function
if it intends to use the environment's reward response.
Args:
reward_fn: A `RewardFn` object.
"""
self.reward_fn = reward_fn
def serialize_history(self):
"""Serialize history to JSON.
Returns:
A string containing a serialized JSON representation of the environment's
history.
"""
# Sanitize history by handling non-json-serializable state.
sanitized_history = [(json.loads(history_item.state.to_json()),
history_item.action) for history_item in self.history]
return json.dumps(
{
'environment': repr(self.__class__),
'history': sanitized_history
},
cls=GymEncoder,
sort_keys=True)
####################################################################
# Methods to be overridden by each fairness environment. #
####################################################################
def _step_impl(self, state, action):
"""Run one timestep of the environment's dynamics.
This should be implemented when creating a new enviornment.
Args:
state: A `State` object.
action: An action provided by the agent. A member of `action_space`.
Returns:
An updated `State` object.
"""
raise NotImplementedError
def _get_observable_state(self):
"""Extracts observable state from `self.state`.
Returns:
A dictionary mapping variable name to a numpy array with that variable's
value.
"""
return {
var_name: np.array(getattr(self.state, var_name))
for var_name in self.observable_state_vars
}
def _get_reward(self):
"""Extracts a scalar reward from `self.state`."""
return
def _is_done(self):
"""Extracts whether the episode is done from `self.state`."""
return False
#####################
# Metric interface #
#####################
def _get_history(self):
"""This function should only be called by a Metric."""
return self.history
def _get_state(self):
"""This function should only be called by a Metric."""
return copy.deepcopy(self.state)
#################################
# Private convenience functions #
#################################
def _update_history(self, state, action):
"""Adds state and action to the environment's history."""
self.history.append(HistoryItem(state=copy.deepcopy(state), action=action))
def _set_history(self, history):
self.history = history
def _reset_history(self):
"""Resets the environment's history."""
self.history = []
def _set_state(self, state):
"""Sets the environment's state."""
self.state = state
return self
class Metric(object):
"""Base metric class.
A metric processes the history of interactions between an agent and an
environment and evaluates some measure of fairness of those interactions.
The main API methods that users of this class need to know is:
measure: Takes a FairnessEnv as input and outputs an measure report. The
type of the measure report is not specified in the base class, but may
be specified for subclasses.
"""
def __init__(self,
environment,
realign_fn = None):
# A copy of the environment is used so that simulations do not affect
# the history of the environment being measured.
self._environment = copy.deepcopy(environment)
self._environment_setter = self._environment._set_state # pylint: disable=protected-access
self._realign_fn = realign_fn
def _simulate(self, state, action):
"""Simulates the effect of `action` on `state`.
Args:
state: A `State` object.
action: An action that is in the action space of `self.environment`.
Returns:
A new state.
"""
env = self._environment_setter(state)
env.step(action)
simulated_state = env._get_state() # pylint: disable=protected-access
return simulated_state
def _validate_history(self, history):
"""Checks that a history can be replayed using the metric's simulation.
Args:
history: an iterable of (state, action) pairs.
Raises:
ValueError if the metric's simulation and the history do not match.
"""
history = copy.deepcopy(history)
for idx, (step, next_step) in enumerate(more_itertools.pairwise(history)):
simulated_state = self._simulate(step.state, step.action)
if simulated_state != next_step.state:
raise ValueError('Invalid history at step %d %s != %s' %
(idx, step, next_step))
def _extract_history(self, env):
"""Gets and validates a history from an environment."""
history = env._get_history() # pylint: disable=protected-access
if flags.FLAGS.validate_history:
self._validate_history(history)
if self._realign_fn is not None:
return self._realign_fn(history)
return history
def measure(self, env):
"""Measures an agent's history of interactions with an environment."""
raise NotImplementedError
class Agent(object):
"""Base Agent class.
The main API methods that users of this class need to know is:
act: Takes (observation, reward, done) from the environment and returns
an action in the action space of the environment.
"""
def __init__(self, action_space, reward_fn,
observation_space):
"""Initializes an Agent.
Args:
action_space: a `gym.Space` that contains valid actions.
reward_fn: a `RewardFn` object.
observation_space: a `gym.Space` that contains valid observations.
"""
self.action_space = action_space
self.reward_fn = reward_fn
self.observation_space = observation_space
self.rng = np.random.RandomState()
def initial_action(self):
"""Returns an action in action_space that is the initial default action."""
raise NotImplementedError
# TODO(): Find a better type for observations than Any.
def act(self, observation, done):
"""Returns an action in the action_space specified in the constructor.
Do not override this method. When implementing act for a child class,
override the _act_impl method instead.
Args:
observation: An observation in `self.observation_space`.
done: Boolean indicating whether the simulation has terminated.
"""
reward = self.reward_fn(observation)
return self._act_impl(observation, reward, done)
# TODO(): Find a better type for observations than Any.
def _act_impl(self, observation, reward, done):
"""The implementation of the agent's act method.
This should be overridden by any class inheriting from Agent. When calling
this function, the agent has already replaced the environment's reward
value with its own.
Args:
observation: An observation in `self.observation_space`.
reward: A scalar reward function that the agent has computed from
observation.
done: Boolean indicating whether the simulation has terminated.
"""
raise NotImplementedError
# TODO(): Find a better type for observations than Any.
def flatten_features(self, observation):
"""Flattens observation in `observation_space` into a vector for training.
Args:
observation: An observation in `observation_space`.
Returns:
A 1-d numpy array containing the values from the observation.
"""
return np.concatenate([
np.array(feat).reshape((-1,)) for _, feat in sorted(observation.items())
])
def seed(self, value):
rng, seed = seeding.np_random(value)
self.rng = rng
return [seed]
def sample_from(self, space):
"""Sample from a space using the agent's own state."""
space = copy.deepcopy(space)
space._np_random = self.rng # pylint: disable=protected-access
return space.sample()
|
|
# Lint as: python3
# Copyright 2017 The Rudders Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Tests for dataset_utils.
Note that Pandas DataFrame lists have been manually aligned so that you can read
a "row" from the named-column representation by simply reading "down". This
makes it much easier to verify and write the intended behaviour.
blaze test --test_output=streamed \
third_party/py/jax_recommenders/datasets:dataset_utils_test
"""
from jax import random
from jax_recommenders.datasets.dataset_utils import DatasetsBuilder
import pandas as pd
import tensorflow_datasets as tfds
from absl.testing import absltest
# TODO(b/193004530): when/if JAX team supports a mock random generator, use that
# to avoid potential breakages if/when JAX team changes default random
# number generator.
class DatasetUtilsTest(googletest.TestCase):
def test_add_per_user_time_deltas(self):
# pyformat: disable
# pylint: disable=bad-whitespace
input_df = pd.DataFrame({
'user_id': [0, 1, 0, 0, 3, 2],
'item_id': [0, 1, 2, 0, 1, 3],
'timestamp': [1, 2, 2, 2, 5, 9],
})
expected_df = pd.DataFrame({
'user_id': [0, 1, 0, 0, 3, 2],
'item_id': [0, 1, 2, 0, 1, 3],
'timestamp': [1, 2, 2, 2, 5, 9],
# 'is_first' has 1 for the lowest timestamp of each user.
'is_first': [1, 1, 0, 0, 1, 1],
# 'time_delta' has the timestamp difference from the previous review.
# by the same user.
'time_delta': [0, 0, 1, 1, 0, 0],
})
# pylint: enable=bad-whitespace
# pyformat: enable
actual_df = DatasetsBuilder.add_per_user_time_deltas(input_df)
pd.util.testing.assert_frame_equal(expected_df, actual_df)
def test_add_per_user_time_deltas_missing_column(self):
# pylint: disable=bad-whitespace
input_df = pd.DataFrame({
'user_id': [0, 1, 0, 3, 2],
'item_id': [0, 1, 2, 1, 3],
})
# pylint: enable=bad-whitespace
with self.assertRaises(KeyError):
DatasetsBuilder.add_per_user_time_deltas(input_df)
def test_leave_last_one_out_split(self):
# pylint: disable=bad-whitespace
# pyformat: disable
input_df = pd.DataFrame({
'user_id': [0, 1, 2, 0, 1, 2, 0, 0, 3, 1 ],
'timestamp': [1, 2, 2, 8, 15, 20, 21, 21, 25, 30],
'item_id': [0, 1, 2, 2, 2, 1, 0, 1, 2, 0 ],
})
# Expected Train is all but the last entry for each user.
expected_train_df = pd.DataFrame({
'user_id': [0, 1, 2, 0, 1, 0 ],
'timestamp': [1, 2, 2, 8, 15, 21],
'item_id': [0, 1, 2, 2, 2, 1 ],
})
# Test is the last entry for each user.
expected_test_df = pd.DataFrame({
'user_id': [2, 0, 3, 1 ],
'timestamp': [20, 21, 25, 30],
'item_id': [1, 0, 2, 0 ],
})
# pyformat: enable
# pylint: enable=bad-whitespace
train_df, test_df = DatasetsBuilder.leave_last_one_out_split(
input_df, [0, 1, 2, 3])
pd.util.testing.assert_frame_equal(expected_train_df,
train_df.reset_index(drop=True))
pd.util.testing.assert_frame_equal(expected_test_df,
test_df.reset_index(drop=True))
def test_leave_last_one_out_split_with_a_user_not_in_the_test(self):
# pylint: disable=bad-whitespace
# pyformat: disable
input_df = pd.DataFrame({
'user_id': [0, 1, 2, 0, 1, 2, 0, 0, 1 ],
'timestamp': [1, 2, 2, 8, 15, 20, 21, 21, 30],
'item_id': [0, 1, 2, 2, 2, 1, 0, 1, 0 ],
})
# Expected Train is all but the last entry for each user.
expected_train_df = pd.DataFrame({
'user_id': [0, 1, 2, 0, 1, 2, 0 ],
'timestamp': [1, 2, 2, 8, 15, 20, 21],
'item_id': [0, 1, 2, 2, 2, 1, 1 ],
})
# Test is the last entry for each user.
expected_test_df = pd.DataFrame({
'user_id': [0, 1 ],
'timestamp': [21, 30],
'item_id': [0, 0 ],
})
# pyformat: enable
# pylint: enable=bad-whitespace
train_df, test_df = DatasetsBuilder.leave_last_one_out_split(
input_df, [0, 1])
pd.util.testing.assert_frame_equal(expected_train_df,
train_df.reset_index(drop=True))
pd.util.testing.assert_frame_equal(expected_test_df,
test_df.reset_index(drop=True))
def test_make_datasets_from_reviews_dataframe(self):
"""Tests making Datasets from a DataFrame of reviews.
This is a fairly complex integration test that splits a dataset represented
as a DataFrame of reviews (contains columns: ['user_id', 'timestamp',
'user_rating', 'is_first', 'time_delta']) into a time separated train and
eval set.
"""
# pylint: disable=bad-whitespace
# pyformat: disable
all_items_df = pd.DataFrame({
'user_id': [10,11,12,10,11,12,10,10,14,11,13,13,15,15,16,16],
'timestamp': [1, 2, 2, 8, 15,20,21,21,25,30,33,34,34,36,38,40],
'item_id': [10,11,12,12,12,11,10,11,12,10,12,10,12,11,10,11],
'user_rating': [1, 2, 3, 4, 5, 1, 2, 3, 4, 5, 5, 1, 3, 1, 1, 4 ],
})
# Note that Pandas' `assert_frame_equal` is sensitive to the order of
# columns, and our ordering is non-determinitsic, so we sort the DataFrame
# columns explicitly for the expected-columns and actual-columns to make
# sure the names and their contents are equal; there isn't a simple default
# option for this in Pandas testing library.
expected_columns = sorted(['user_id', 'timestamp', 'item_id', 'user_rating',
'is_first', 'time_delta'])
expected_train_df = pd.DataFrame({
'user_id': [0, 1, 2, 0, 1, 0, 4, 1, 3, 3, 5, 6, 6 ],
'timestamp': [1, 2, 2, 8, 15,21,25,30,33,34,34,38,40],
'item_id': [0, 1, 2, 2, 2, 1, 2, 0, 2, 0, 2, 0, 1 ],
'user_rating': [1.,2.,3.,4.,5.,3.,4.,5.,5.,1.,3.,1.,4.],
'is_first': [1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0 ],
'time_delta': [0, 0, 0, 7,13, 13,0, 15,0, 1, 0, 0, 2 ],
}).reindex(expected_columns, axis=1)
expected_eval_df = pd.DataFrame({
'user_id': [2, 0, 5 ],
'timestamp': [20,21,36],
'item_id': [1, 0, 1 ],
'user_rating': [1.,2.,1.],
'is_first': [0, 0, 0 ],
'time_delta': [18,13, 2 ],
}).reindex(expected_columns, axis=1)
# pyformat: enable
# pylint: enable=bad-whitespace
rng = random.PRNGKey(7) # Results in picking test users: 2,5,0.
with self.assertRaises(ValueError):
datasets = DatasetsBuilder.make_datasets_from_reviews_dataframe(
all_items_df, 100, rng)
test_set_size = 3
datasets = DatasetsBuilder.make_datasets_from_reviews_dataframe(
all_items_df, test_set_size, rng)
self.assertLen(expected_train_df.user_id, datasets.num_train)
self.assertLen(expected_eval_df.user_id, datasets.num_eval)
# The user and item IDs lists relate each original ID to the corresponding
# "Dense" integer id (which is the position in the list).
self.assertSequenceEqual([10, 11, 12, 13, 14, 15, 16], datasets.users)
self.assertSequenceEqual([10, 11, 12], datasets.items)
actual_train = tfds.as_dataframe(datasets.train)
actual_train = actual_train.reindex(sorted(actual_train.columns), axis=1)
pd.util.testing.assert_frame_equal(expected_train_df, actual_train)
actual_eval = tfds.as_dataframe(datasets.eval)
actual_eval = actual_eval.reindex(sorted(actual_eval.columns), axis=1)
pd.util.testing.assert_frame_equal(expected_eval_df, actual_eval)
def test_make_datasets_from_string_id_reviews_dataframe(self):
"""Test on string-ID'd reviews DataFrame into a recommendation Dataset."""
# pylint: disable=bad-whitespace
# pyformat: disable
all_items_df = pd.DataFrame({
'user_id': ['b','a','c','a'],
'timestamp': [ 1, 2, 2, 8 ],
'item_id': ['x','z','y','y'],
'user_rating': [ 1., 2., 3., 4.],
})
expected_columns = ['user_id', 'timestamp', 'item_id', 'user_rating',
'is_first', 'time_delta']
expected_train_df = pd.DataFrame({
'user_id': [ 0, 2, ],
'timestamp': [ 2, 2, ],
'item_id': [ 2, 1, ],
'user_rating': [ 2., 3. ],
'is_first': [ 1, 1, ],
'time_delta': [ 0, 0, ],
}).reindex(sorted(expected_columns), axis=1)
expected_eval_df = pd.DataFrame({
'user_id': [ 1, 0 ],
'timestamp': [ 1, 8 ],
'item_id': [ 0, 1 ],
'user_rating': [ 1., 4.],
'is_first': [ 1, 0 ],
'time_delta': [ 0, 6 ],
}).reindex(sorted(expected_columns), axis=1)
# pyformat: enable
# pylint: enable=bad-whitespace
rng = random.PRNGKey(7) # Results in picking test users: 0, 1 = a, b
test_set_size = 2
datasets = DatasetsBuilder.make_datasets_from_reviews_dataframe(
all_items_df, test_set_size, rng)
self.assertLen(expected_train_df.user_id, datasets.num_train)
self.assertLen(expected_eval_df.user_id, datasets.num_eval)
# The user and item IDs lists relate each original ID to the corresponding
# "Dense" integer id (which is the position in the list).
self.assertSequenceEqual(['a', 'b', 'c'], datasets.users)
self.assertSequenceEqual(['x', 'y', 'z'], datasets.items)
actual_train = tfds.as_dataframe(datasets.train)
actual_train = actual_train.reindex(sorted(actual_train.columns), axis=1)
pd.util.testing.assert_frame_equal(expected_train_df, actual_train)
actual_eval = tfds.as_dataframe(datasets.eval)
actual_eval = actual_eval.reindex(sorted(actual_eval.columns), axis=1)
pd.util.testing.assert_frame_equal(expected_eval_df, actual_eval)
if __name__ == '__main__':
googletest.main()
|
|
#!/usr/bin/env python
r"""Gathering and processing the data required for predicting
daily temperatures from temperature, atmospheric pressure
and hygrometry time series.
Instead of gathering all daily time series into a single large
array, careful consideration is given to the possibility of
duplicated rows, missing measurements, etc.
This is accomplished by massaging the data into a multi-indexed
dataframe (in the case at hand a two-level index accessed by
specifying a day and a timestamp).
Indeed, it turns out that for the so-called Jena dataset there are
287 timestamps on '2010-01-07', instead of the expected 144
(on most days measurements are recorded every 10 minutes).
As another illustration, '2016-10-25' comes with only 64 timestamps;
for some unaccounted reason, measurements range only from 12:00:00 A.M.
to 10:30:00 A.M. on that day.
A related issue addressed in the code herewith is that of
calendar gaps. This would be overlooked by simply aggregating
all time series in an array and would affect the purpose
of making forecasts one or more days ahead.
"""
from __future__ import print_function
from builtins import enumerate, map, range, zip
import datetime
from itertools import permutations, product
import operator
from os import devnull, getcwd, path
import random
import shlex
import signal
import six
import subprocess
import sys
import tarfile
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
import warnings
import zipfile
import numpy as np
import pandas as pd
from sortedcontainers import SortedSet
import wget
__author__ = 'Gregory Giecold'
__copyright__ = 'Copyright 2017-2022 Gregory Giecold and contributors'
__credit__ = 'Gregory Giecold'
__status__ = 'beta'
__version__ = '0.1.0'
__all__ = ['data_generator', 'date_parser', 'fill_time', 'normalize',
'resample', 'store_data', 'timeout_handler',
'to_batchsize', 'to_dataframe', 'to_timedelta']
class TimeoutException(Exception):
pass
def timeout_handler(signalnum, frame):
raise TimeoutException
def to_timedelta(s, fmt='%H:%M:%S'):
try:
assert isinstance(s, six.string_types)
except AssertionError:
raise
return datetime.datetime.strptime(s, fmt) -\
datetime.datetime.strptime('00:00:00', fmt)
def store_data(url, odir=path.join(path.dirname(getcwd()), 'data')):
try:
assert path.isdir(odir)
except AssertionError:
raise ValueError('Incorrect directory provided\n')
sys.exit(1)
fname = path.split(url)[-1]
try:
if fname.endswith('.gz'):
with tarfile.open(wget.download(url, out=odir)) as th:
th.extractall(odir)
elif fname.endswith('.zip'):
with zipfile.ZipFile(wget.download(url, out=odir)) as zh:
zh.extractall(odir)
else:
res = urlopen(url)
chunk_size = 64 * 1024
with open(path.join(odir, fname), 'wb') as fh:
while True:
chunk = res.read(chunk_size)
if not chunk:
break
fh.write(chunk)
except tarfile.ReadError, zipfile.BadZipfile:
sys.exit(1)
except tarfile.ExtractError, zipfile.error:
sys.exit(1)
if fname.endswith(('.gz', '.zip')):
cmd = 'rm {}'.format(fname)
DEVNULL = open(devnull, 'wb')
subprocess.Popen(shlex.split(cmd), stdout=DEVNULL,
stderr=subprocess.PIPE, cwd=odir)
return path.join(odir, fname.rstrip('.gz').rstrip('.tar').rstrip('.zip'))
def date_parser(s):
try:
if isinstance(s, pd.Timestamp):
s = s.date().strftime('%d.%m.%Y')
elif isinstance(s, datetime.datetime):
s = s.strftime('%d.%m.%Y')
elif isinstance(s, datetime.date):
s = '.'.join([s.day, s.month, s.year])
assert isinstance(s, six.string_types)
except AssertionError:
raise
separators = ('.', '-', '/', ':', '')
for sep in separators:
formats = map(
lambda x: sep.join(x),
permutations(('%d', '%m', '%Y'), 3)
)
for fmt in formats:
try:
return datetime.datetime.strptime(s, fmt)
except ValueError:
pass
raise ValueError('Invalid date format\n')
def to_dataframe(fname, sep=None):
try:
assert path.isfile(fname)
except AssertionError:
raise ValueError('No such file\n')
sys.exit(1)
try:
assert (sep is None) or isinstance(sep, six.string_types)
except AssertionError:
raise ValueError('Invalid separator provided\n')
sys.exit(1)
columns = pd.read_table(fname, sep, header='infer', nrows=0)
columns = columns.columns.tolist()
for elt in ('Date Time', 'Date time', 'Datetime'):
try:
columns.remove(elt)
columns.remove(elt.lower())
except ValueError:
pass
if sep is None:
sep = r'\s+'
else:
sep = r'[\s' + sep + ']'
df = pd.read_table(
fname, sep, header=None, skiprows=1, engine='python',
names = ['Date', 'Time'] + columns,
converters={'Time': lambda x: datetime.time(*map(int, x.split(':'))),
'Date': lambda x: date_parser(x)},
index_col=('Date', 'Time'),
infer_datetime_format=True,
na_filter=True, skip_blank_lines=True,
dayfirst=False, compression=None,
comment='#', error_bad_lines=True
)
df.drop_duplicates(inplace=True)
df.index = pd.MultiIndex.from_tuples(
list(map(lambda tpl: (pd.to_datetime(tpl[0]), tpl[1]), df.index)),
names=['Date', 'Time']
)
df.sort_index(inplace=True)
print(df.head(), '\n\n')
sys.stdout.flush()
days = df.index.levels[0]
warnings.simplefilter('always', UserWarning)
for current_day, next_day in zip(days[:-1], days[1:]):
try:
delta = next_day - current_day
assert delta.days == 1
except AssertionError:
fmt = '%Y-%m-%d'
current_day = date_parser(current_day).strftime(fmt)
next_day = date_parser(next_day).strftime(fmt)
msg = "\n{} and {} ".format(current_day, next_day)
msg += "are consecutive in the first level of the multi-index "
msg += "but exhibit a calendar gap; this might have implications "
msg += "for your forecasts and training your models.\n\n"
warnings.warn(msg, UserWarning)
continue
return df
def normalize(df, training_days, with_mean=True, with_std=True):
try:
assert isinstance(df, pd.DataFrame)
assert isinstance(training_days, int) and training_days > 0
except AssertionError:
raise
training_days = min(training_days, df.index.levshape[0])
days = df.index.levels[0]
tmp = df.loc[(days[:training_days], slice(None)), :]
mean = tmp.mean(axis=0) if with_mean else 0.0
std = tmp.std(axis=0) if with_std else 1.0
df -= mean
df /= std
return df
def data_generator(df, target_feature='T (degC)',
begin_idx=None, end_idx=None,
lookahead=1, lookback=5, sampling_rate='1h',
batch_size=128, shuffle=True, timeout=False):
"""
Parameters
----------
df: type pandas.DataFrame
A multi-index structure is assumed, i.e. a hierarchy of
rows with the first level corresponding to a date
and the second level to a timestamp.
target_feature: type six.string_types (default: 'T (degC)')
Refers to a column/feature name in 'df' for which
lookahead targets should be extracted.
begin_idx: type int (default: None)
Denotes an index in the 'df' dataframe corresponding to the earliest
date from to begin drawing samples.
end_idx: type int (default: None)
Denotes and index in the 'df' dataframe corresponding to the latest
day from which to draw.
lookahead: type int (default: 1)
How many day(s) in the future the forecast target should be.
lookback: type int (default: 5)
How many days back to gather input data (one sample).
sampling_rate: type six.string_types (default: '1h')
Over which period to average data points.
batch_size: type int (default: 128)
Number of samples per batch.
shuffle: type bool (default: True)
Whether to shuffle the days or fetch them in order.
timeout: type bool (default: False)
Used to specify how long to wait for a batch of samples
to be processed. This flag has to be set to False
if you were to fork to multiple processes; indeed,
only the main thread can set or invoke signal handlers,
and in the code under present consideration an alarm signal
handler is underlying our implementation of the
timeout notification.
Returns
-------
tuple of ('samples', 'targets'), where samples correspond to one
batch of input data ('batch_size' of them, each sample corresponding
to 'lookback' full days) and where 'targets' denotes an array of
target temperatures.
"""
begin_idx = 0 if begin_idx is None else begin_idx
if end_idx is None:
end_idx = df.index.levshape[0] - lookahead
try:
assert isinstance(target_feature, six.string_types)
assert isinstance(begin_idx, int)
assert isinstance(end_idx, int)
assert begin_idx < df.index.levshape[0]
assert begin_idx <= end_idx
except AssertionError:
raise
days = df.index.levels[0].to_datetime()
idx = begin_idx + lookback
while True:
if shuffle:
day_indices = np.random.randint(idx, end_idx, batch_size)
else:
if idx + batch_size >= end_idx:
idx = begin_idx + lookback
day_indices = np.arange(idx, min(idx + batch_size, end_idx))
idx += random_indices.size
samples, targets = list(), list()
for day_idx in day_indices:
past_day = days[day_idx - lookback + 1]
current_day = days[day_idx]
next_day = days[day_idx + lookahead]
delta = next_day - current_day
if delta.days != lookahead:
pass
sample = resample(df, sampling_rate, current_day, past_day)
tmp = resample(df, sampling_rate, next_day)
if timeout:
signal.signal(signal.SIGALRM, timeout_handler)
while True:
if timeout:
signal.alarm(batch_size // 5)
try:
time_idx, timestamp = random.choice(list(enumerate(tmp.index)))
timestamp = timestamp.time().isoformat()
try:
key = current_day.date().isoformat() + ' ' + timestamp
sample.loc[key, :]
break
except KeyError:
pass
except TimeoutException:
continue
else:
if timeout:
signal.alarm(0)
past_datetime = ' '.join([past_day.date().isoformat(),
timestamp])
current_datetime = ' '.join([current_day.date().isoformat(),
timestamp])
sample = fill_time(sample, sampling_rate)
sample = sample.loc[slice(past_datetime, current_datetime), :].values
target = tmp.loc[tmp.index[time_idx], target_feature]
samples.append(sample)
targets.append(target)
samples = np.asarray(samples)
targets = np.asarray(targets)
samples, targets = to_batchsize(samples, targets, batch_size)
yield samples, targets
def resample(df, sampling_rate, current_day, past_day=None):
if past_day is not None:
sample = df.loc[(slice(past_day, current_day), slice(None)), :]
newindex = map(lambda tpl: operator.add(*tpl),
zip(sample.index.get_level_values('Date'),
map(to_timedelta, sample.index.get_level_values('Time'))
)
)
sample.index = list(newindex)
sample = sample.resample(sampling_rate).mean()
else:
sample = df.xs(current_day, level='Date')
sample.index = pd.to_datetime(sample.index)
sample = sample.resample(sampling_rate).mean()
return sample
def fill_time(df, sampling_rate='1h'):
r"""This procedure ensures that if a batch of 'lookback' days
were to feature a day with less measurements than most
(say, only one afternoon of measurements, instead of the usual
full range - from 00:00:00 till 23:50:00), the batch size
will still share a common value across all batches. This is
simply achieved by filling in and interpolating missing
time measurements.
The possibility of a calendar gap within the measurements
held in dataframe 'df' is properly accounted: we do not fill
the intervening day(s) and timestamps.
"""
try:
assert isinstance(df.index, pd.DatetimeIndex)
except AssertionError:
raise TypeError("Provide a dataframe indexed by a DatetimeIndex; "
"multi-indexed dataframes are not supported in "
"the present version.\n")
try:
assert sampling_rate in ('1h',)
except AssertionError:
raise ValueError('No other sampling rate supported for now.\n')
# This could be easily addressed if the need to experiment
# with different resampling formats were to arise.
if not df.index.is_monotonic:
df.sort_index(inplace=True)
earliest_day = df.index[0].date().isoformat()
latest_day = df.index[-1].date().isoformat()
gap_pairs = list()
days = SortedSet(map(lambda x: x.date(), df.index))
for current_day, next_day in zip(days[:-1], days[1:]):
delta = next_day - current_day
if delta.days > 1:
gap_pairs.append((current_day, next_day))
gap_pairs.reverse()
# Would need to expand this if more options were introduced
# for the choice of resampling schedule.
if sampling_rate == '1h':
earliest_time = '00:00:00'
latest_time = '23:00:00'
if not gap_pairs:
idx = pd.date_range(' '.join([earliest_day, earliest_time]),
' '.join([latest_day, latest_time]), freq=sampling_rate)
else:
previous_beg_gap, previous_end_gap = gap_pairs.pop()
idx = pd.date_range(' '.join([earliest_day, earliest_time]),
' '.join([previous_beg_gap, latest_time]), freq=sampling_rate)
while gap_pairs:
beg_gap, end_gap = gap_pairs.pop()
idx = idx.append(pd.date_range(' '.join([previous_end_gap, earliest_time]),
' '.join([beg_gap, latest_time]),
freq=sampling_rate))
previous_end_gap = end_gap
idx = idx.append(pd.date_range(' '.join([previous_end_gap, earliest_time]),
' '.join([latest_day, latest_time]),
freq=sampling_rate))
df = df.reindex(idx, fill_value=np.nan, copy=False)
df.ffill(axis=0, inplace=True)
df.bfill(axis=0, inplace=True)
return df
def to_batchsize(samples, targets, batch_size):
r"""This procedure expands samples and targets to the required
size, i.e. 'batch_size'.
Indeed, an array 'samples' could arise whose first dimension
has less than the required number of samples because
our data_generator does not create a ('sample', 'target') pair
when there exists a calendar gap of more than one 'lookahead'
day(s) between the day on which 'target' was measured and the
latest day recorded in 'sample'.
"""
try:
assert isinstance(batch_size, int) and batch_size > 0
assert isinstance(samples, np.ndarray)
assert isinstance(targets, np.ndarray)
assert targets.ndim == 1
targets = targets.reshape(targets.size)
assert samples.shape[0] == targets.size
except AssertionError:
raise
if targets.size < batch_size:
repeats = [1] * targets.size
i = -1
diff = batch_size - targets.size
while diff:
repeats[i % targets.size] += 1
i -= 1
diff -= 1
samples = np.repeat(samples, repeats, axis=0)
targets = np.repeat(targets, repeats)
elif targets.size > batch_size:
samples = samples[:batch_size]
targets = targets[:batch_size]
reshuffle_idx = np.random.permutation(batch_size)
samples = samples[reshuffle_idx]
targets = targets[reshuffle_idx]
return samples, targets
|
|
import os
import torch
import torchvision.datasets as datasets
from torch.utils.data.distributed import DistributedSampler
from torch.utils.data.sampler import RandomSampler
from torch.utils.data import Subset
from torch._utils import _accumulate
from utils.regime import Regime
from utils.dataset import IndexedFileDataset
from preprocess import get_transform
from itertools import chain
from copy import deepcopy
import warnings
warnings.filterwarnings("ignore", "(Possibly )?corrupt EXIF data", UserWarning)
def get_dataset(name, split='train', transform=None,
target_transform=None, download=True, datasets_path='~/Datasets'):
train = (split == 'train')
root = os.path.join(os.path.expanduser(datasets_path), name)
if name == 'cifar10':
return datasets.CIFAR10(root=root,
train=train,
transform=transform,
target_transform=target_transform,
download=download)
elif name == 'cifar100':
return datasets.CIFAR100(root=root,
train=train,
transform=transform,
target_transform=target_transform,
download=download)
elif name == 'mnist':
return datasets.MNIST(root=root,
train=train,
transform=transform,
target_transform=target_transform,
download=download)
elif name == 'stl10':
return datasets.STL10(root=root,
split=split,
transform=transform,
target_transform=target_transform,
download=download)
elif name == 'imagenet':
if train:
root = os.path.join(root, 'train')
else:
root = os.path.join(root, 'val')
return datasets.ImageFolder(root=root,
transform=transform,
target_transform=target_transform)
elif name == 'imagenet_tar':
if train:
root = os.path.join(root, 'imagenet_train.tar')
else:
root = os.path.join(root, 'imagenet_validation.tar')
return IndexedFileDataset(root, extract_target_fn=(
lambda fname: fname.split('/')[0]),
transform=transform,
target_transform=target_transform)
_DATA_ARGS = {'name', 'split', 'transform',
'target_transform', 'download', 'datasets_path'}
_DATALOADER_ARGS = {'batch_size', 'shuffle', 'sampler', 'batch_sampler',
'num_workers', 'collate_fn', 'pin_memory', 'drop_last',
'timeout', 'worker_init_fn'}
_TRANSFORM_ARGS = {'transform_name', 'input_size', 'scale_size', 'normalize', 'augment',
'cutout', 'duplicates', 'num_crops', 'autoaugment'}
_OTHER_ARGS = {'distributed'}
class DataRegime(object):
def __init__(self, regime, defaults={}):
self.regime = Regime(regime, deepcopy(defaults))
self.epoch = 0
self.steps = None
self.get_loader(True)
def get_setting(self):
setting = self.regime.setting
loader_setting = {k: v for k,
v in setting.items() if k in _DATALOADER_ARGS}
data_setting = {k: v for k, v in setting.items() if k in _DATA_ARGS}
transform_setting = {
k: v for k, v in setting.items() if k in _TRANSFORM_ARGS}
other_setting = {k: v for k, v in setting.items() if k in _OTHER_ARGS}
transform_setting.setdefault('transform_name', data_setting['name'])
return {'data': data_setting, 'loader': loader_setting,
'transform': transform_setting, 'other': other_setting}
def get(self, key, default=None):
return self.regime.setting.get(key, default)
def get_loader(self, force_update=False, override_settings=None, subset_indices=None):
if force_update or self.regime.update(self.epoch, self.steps):
setting = self.get_setting()
if override_settings is not None:
setting.update(override_settings)
self._transform = get_transform(**setting['transform'])
setting['data'].setdefault('transform', self._transform)
self._data = get_dataset(**setting['data'])
if subset_indices is not None:
self._data = Subset(self._data, subset_indices)
if setting['other'].get('distributed', False):
setting['loader']['sampler'] = DistributedSampler(self._data)
setting['loader']['shuffle'] = None
# pin-memory currently broken for distributed
setting['loader']['pin_memory'] = False
self._sampler = setting['loader'].get('sampler', None)
self._loader = torch.utils.data.DataLoader(
self._data, **setting['loader'])
return self._loader
def set_epoch(self, epoch):
self.epoch = epoch
if self._sampler is not None and hasattr(self._sampler, 'set_epoch'):
self._sampler.set_epoch(epoch)
def __len__(self):
return len(self._data)
def __repr__(self):
return str(self.regime)
class SampledDataLoader(object):
def __init__(self, dl_list):
self.dl_list = dl_list
self.epoch = 0
def generate_order(self):
order = [[idx]*len(dl) for idx, dl in enumerate(self.dl_list)]
order = list(chain(*order))
g = torch.Generator()
g.manual_seed(self.epoch)
return torch.tensor(order)[torch.randperm(len(order), generator=g)].tolist()
def __len__(self):
return sum([len(dl) for dl in self.dl_list])
def __iter__(self):
order = self.generate_order()
iterators = [iter(dl) for dl in self.dl_list]
for idx in order:
yield next(iterators[idx])
return
class SampledDataRegime(DataRegime):
def __init__(self, data_regime_list, probs, split_data=True):
self.probs = probs
self.data_regime_list = data_regime_list
self.split_data = split_data
def get_setting(self):
return [data_regime.get_setting() for data_regime in self.data_regime_list]
def get(self, key, default=None):
return [data_regime.get(key, default) for data_regime in self.data_regime_list]
def get_loader(self, force_update=False):
settings = self.get_setting()
if self.split_data:
dset_sizes = [len(get_dataset(**s['data'])) for s in settings]
assert len(set(dset_sizes)) == 1, \
"all datasets should be same size"
dset_size = dset_sizes[0]
lengths = [int(prob * dset_size) for prob in self.probs]
lengths[-1] = dset_size - sum(lengths[:-1])
indices = torch.randperm(dset_size).tolist()
indices_split = [indices[offset - length:offset]
for offset, length in zip(_accumulate(lengths), lengths)]
loaders = [data_regime.get_loader(force_update=True, subset_indices=indices_split[i])
for i, data_regime in enumerate(self.data_regime_list)]
else:
loaders = [data_regime.get_loader(
force_update=force_update) for data_regime in self.data_regime_list]
self._loader = SampledDataLoader(loaders)
self._loader.epoch = self.epoch
return self._loader
def set_epoch(self, epoch):
self.epoch = epoch
if hasattr(self, '_loader'):
self._loader.epoch = epoch
for data_regime in self.data_regime_list:
if data_regime._sampler is not None and hasattr(data_regime._sampler, 'set_epoch'):
data_regime._sampler.set_epoch(epoch)
def __len__(self):
return sum([len(data_regime._data)
for data_regime in self.data_regime_list])
def __repr__(self):
print_str = 'Sampled Data Regime:\n'
for p, config in zip(self.probs, self.data_regime_list):
print_str += 'w.p. %s: %s\n' % (p, config)
return print_str
if __name__ == '__main__':
reg1 = DataRegime(None, {'name': 'imagenet', 'batch_size': 16})
reg2 = DataRegime(None, {'name': 'imagenet', 'batch_size': 32})
reg1.set_epoch(0)
reg2.set_epoch(0)
mreg = SampledDataRegime([reg1, reg2])
for x, _ in mreg.get_loader():
print(x.shape)
|
|
"""Compressed Block Sparse Row matrix format"""
from __future__ import division, print_function, absolute_import
__docformat__ = "restructuredtext en"
__all__ = ['bsr_matrix', 'isspmatrix_bsr']
from warnings import warn
import numpy as np
from .data import _data_matrix, _minmax_mixin
from .compressed import _cs_matrix
from .base import isspmatrix, _formats
from .sputils import isshape, getdtype, to_native, upcast, get_index_dtype
from . import _sparsetools
from ._sparsetools import (bsr_matvec, bsr_matvecs, csr_matmat_pass1,
bsr_matmat_pass2, bsr_transpose, bsr_sort_indices)
class bsr_matrix(_cs_matrix, _minmax_mixin):
"""Block Sparse Row matrix
This can be instantiated in several ways:
bsr_matrix(D, [blocksize=(R,C)])
where D is a dense matrix or 2-D ndarray.
bsr_matrix(S, [blocksize=(R,C)])
with another sparse matrix S (equivalent to S.tobsr())
bsr_matrix((M, N), [blocksize=(R,C), dtype])
to construct an empty matrix with shape (M, N)
dtype is optional, defaulting to dtype='d'.
bsr_matrix((data, ij), [blocksize=(R,C), shape=(M, N)])
where ``data`` and ``ij`` satisfy ``a[ij[0, k], ij[1, k]] = data[k]``
bsr_matrix((data, indices, indptr), [shape=(M, N)])
is the standard BSR representation where the block column
indices for row i are stored in ``indices[indptr[i]:indptr[i+1]]``
and their corresponding block values are stored in
``data[ indptr[i]: indptr[i+1] ]``. If the shape parameter is not
supplied, the matrix dimensions are inferred from the index arrays.
Attributes
----------
dtype : dtype
Data type of the matrix
shape : 2-tuple
Shape of the matrix
ndim : int
Number of dimensions (this is always 2)
nnz
Number of nonzero elements
data
Data array of the matrix
indices
BSR format index array
indptr
BSR format index pointer array
blocksize
Block size of the matrix
has_sorted_indices
Whether indices are sorted
Notes
-----
Sparse matrices can be used in arithmetic operations: they support
addition, subtraction, multiplication, division, and matrix power.
**Summary of BSR format**
The Block Compressed Row (BSR) format is very similar to the Compressed
Sparse Row (CSR) format. BSR is appropriate for sparse matrices with dense
sub matrices like the last example below. Block matrices often arise in
vector-valued finite element discretizations. In such cases, BSR is
considerably more efficient than CSR and CSC for many sparse arithmetic
operations.
**Blocksize**
The blocksize (R,C) must evenly divide the shape of the matrix (M,N).
That is, R and C must satisfy the relationship ``M % R = 0`` and
``N % C = 0``.
If no blocksize is specified, a simple heuristic is applied to determine
an appropriate blocksize.
Examples
--------
>>> from scipy.sparse import bsr_matrix
>>> bsr_matrix((3, 4), dtype=np.int8).toarray()
array([[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]], dtype=int8)
>>> row = np.array([0, 0, 1, 2, 2, 2])
>>> col = np.array([0, 2, 2, 0, 1, 2])
>>> data = np.array([1, 2, 3 ,4, 5, 6])
>>> bsr_matrix((data, (row, col)), shape=(3, 3)).toarray()
array([[1, 0, 2],
[0, 0, 3],
[4, 5, 6]])
>>> indptr = np.array([0, 2, 3, 6])
>>> indices = np.array([0, 2, 2, 0, 1, 2])
>>> data = np.array([1, 2, 3, 4, 5, 6]).repeat(4).reshape(6, 2, 2)
>>> bsr_matrix((data,indices,indptr), shape=(6, 6)).toarray()
array([[1, 1, 0, 0, 2, 2],
[1, 1, 0, 0, 2, 2],
[0, 0, 0, 0, 3, 3],
[0, 0, 0, 0, 3, 3],
[4, 4, 5, 5, 6, 6],
[4, 4, 5, 5, 6, 6]])
"""
format = 'bsr'
def __init__(self, arg1, shape=None, dtype=None, copy=False, blocksize=None):
_data_matrix.__init__(self)
if isspmatrix(arg1):
if isspmatrix_bsr(arg1) and copy:
arg1 = arg1.copy()
else:
arg1 = arg1.tobsr(blocksize=blocksize)
self._set_self(arg1)
elif isinstance(arg1,tuple):
if isshape(arg1):
# it's a tuple of matrix dimensions (M,N)
self.shape = arg1
M,N = self.shape
# process blocksize
if blocksize is None:
blocksize = (1,1)
else:
if not isshape(blocksize):
raise ValueError('invalid blocksize=%s' % blocksize)
blocksize = tuple(blocksize)
self.data = np.zeros((0,) + blocksize, getdtype(dtype, default=float))
R,C = blocksize
if (M % R) != 0 or (N % C) != 0:
raise ValueError('shape must be multiple of blocksize')
idx_dtype = get_index_dtype(maxval=N//C)
self.indices = np.zeros(0, dtype=idx_dtype)
self.indptr = np.zeros(M//R + 1, dtype=idx_dtype)
elif len(arg1) == 2:
# (data,(row,col)) format
from .coo import coo_matrix
self._set_self(coo_matrix(arg1, dtype=dtype).tobsr(blocksize=blocksize))
elif len(arg1) == 3:
# (data,indices,indptr) format
(data, indices, indptr) = arg1
idx_dtype = get_index_dtype((indices, indptr), check_contents=True)
self.indices = np.array(indices, copy=copy, dtype=idx_dtype)
self.indptr = np.array(indptr, copy=copy, dtype=idx_dtype)
self.data = np.array(data, copy=copy, dtype=getdtype(dtype, data))
else:
raise ValueError('unrecognized bsr_matrix constructor usage')
else:
# must be dense
try:
arg1 = np.asarray(arg1)
except:
raise ValueError("unrecognized form for"
" %s_matrix constructor" % self.format)
from .coo import coo_matrix
arg1 = coo_matrix(arg1, dtype=dtype).tobsr(blocksize=blocksize)
self._set_self(arg1)
if shape is not None:
self.shape = shape # spmatrix will check for errors
else:
if self.shape is None:
# shape not already set, try to infer dimensions
try:
M = len(self.indptr) - 1
N = self.indices.max() + 1
except:
raise ValueError('unable to infer matrix dimensions')
else:
R,C = self.blocksize
self.shape = (M*R,N*C)
if self.shape is None:
if shape is None:
# TODO infer shape here
raise ValueError('need to infer shape')
else:
self.shape = shape
if dtype is not None:
self.data = self.data.astype(dtype)
self.check_format(full_check=False)
def check_format(self, full_check=True):
"""check whether the matrix format is valid
*Parameters*:
full_check:
True - rigorous check, O(N) operations : default
False - basic check, O(1) operations
"""
M,N = self.shape
R,C = self.blocksize
# index arrays should have integer data types
if self.indptr.dtype.kind != 'i':
warn("indptr array has non-integer dtype (%s)"
% self.indptr.dtype.name)
if self.indices.dtype.kind != 'i':
warn("indices array has non-integer dtype (%s)"
% self.indices.dtype.name)
idx_dtype = get_index_dtype((self.indices, self.indptr))
self.indptr = np.asarray(self.indptr, dtype=idx_dtype)
self.indices = np.asarray(self.indices, dtype=idx_dtype)
self.data = to_native(self.data)
# check array shapes
if self.indices.ndim != 1 or self.indptr.ndim != 1:
raise ValueError("indices, and indptr should be 1-D")
if self.data.ndim != 3:
raise ValueError("data should be 3-D")
# check index pointer
if (len(self.indptr) != M//R + 1):
raise ValueError("index pointer size (%d) should be (%d)" %
(len(self.indptr), M//R + 1))
if (self.indptr[0] != 0):
raise ValueError("index pointer should start with 0")
# check index and data arrays
if (len(self.indices) != len(self.data)):
raise ValueError("indices and data should have the same size")
if (self.indptr[-1] > len(self.indices)):
raise ValueError("Last value of index pointer should be less than "
"the size of index and data arrays")
self.prune()
if full_check:
# check format validity (more expensive)
if self.nnz > 0:
if self.indices.max() >= N//C:
raise ValueError("column index values must be < %d (now max %d)" % (N//C, self.indices.max()))
if self.indices.min() < 0:
raise ValueError("column index values must be >= 0")
if np.diff(self.indptr).min() < 0:
raise ValueError("index pointer values must form a "
"non-decreasing sequence")
# if not self.has_sorted_indices():
# warn('Indices were not in sorted order. Sorting indices.')
# self.sort_indices(check_first=False)
def _get_blocksize(self):
return self.data.shape[1:]
blocksize = property(fget=_get_blocksize)
def getnnz(self):
R,C = self.blocksize
return int(self.indptr[-1] * R * C)
nnz = property(fget=getnnz)
def __repr__(self):
nnz = self.getnnz()
format = self.getformat()
return "<%dx%d sparse matrix of type '%s'\n" \
"\twith %d stored elements (blocksize = %dx%d) in %s format>" % \
(self.shape + (self.dtype.type, nnz) + self.blocksize +
(_formats[format][1],))
def diagonal(self):
"""Returns the main diagonal of the matrix
"""
M,N = self.shape
R,C = self.blocksize
y = np.empty(min(M,N), dtype=upcast(self.dtype))
_sparsetools.bsr_diagonal(M//R, N//C, R, C,
self.indptr, self.indices,
np.ravel(self.data), y)
return y
##########################
# NotImplemented methods #
##########################
def getdata(self,ind):
raise NotImplementedError
def __getitem__(self,key):
raise NotImplementedError
def __setitem__(self,key,val):
raise NotImplementedError
######################
# Arithmetic methods #
######################
def matvec(self, other):
return self * other
def matmat(self, other):
return self * other
def _mul_vector(self, other):
M,N = self.shape
R,C = self.blocksize
result = np.zeros(self.shape[0], dtype=upcast(self.dtype, other.dtype))
bsr_matvec(M//R, N//C, R, C,
self.indptr, self.indices, self.data.ravel(),
other, result)
return result
def _mul_multivector(self,other):
R,C = self.blocksize
M,N = self.shape
n_vecs = other.shape[1] # number of column vectors
result = np.zeros((M,n_vecs), dtype=upcast(self.dtype,other.dtype))
bsr_matvecs(M//R, N//C, n_vecs, R, C,
self.indptr, self.indices, self.data.ravel(),
other.ravel(), result.ravel())
return result
def _mul_sparse_matrix(self, other):
M, K1 = self.shape
K2, N = other.shape
R,n = self.blocksize
# convert to this format
if isspmatrix_bsr(other):
C = other.blocksize[1]
else:
C = 1
from .csr import isspmatrix_csr
if isspmatrix_csr(other) and n == 1:
other = other.tobsr(blocksize=(n,C), copy=False) # lightweight conversion
else:
other = other.tobsr(blocksize=(n,C))
idx_dtype = get_index_dtype((self.indptr, self.indices,
other.indptr, other.indices),
maxval=(M//R)*(N//C))
indptr = np.empty(self.indptr.shape, dtype=idx_dtype)
csr_matmat_pass1(M//R, N//C,
self.indptr.astype(idx_dtype),
self.indices.astype(idx_dtype),
other.indptr.astype(idx_dtype),
other.indices.astype(idx_dtype),
indptr)
bnnz = indptr[-1]
idx_dtype = get_index_dtype((self.indptr, self.indices,
other.indptr, other.indices),
maxval=bnnz)
indptr = indptr.astype(idx_dtype)
indices = np.empty(bnnz, dtype=idx_dtype)
data = np.empty(R*C*bnnz, dtype=upcast(self.dtype,other.dtype))
bsr_matmat_pass2(M//R, N//C, R, C, n,
self.indptr.astype(idx_dtype),
self.indices.astype(idx_dtype),
np.ravel(self.data),
other.indptr.astype(idx_dtype),
other.indices.astype(idx_dtype),
np.ravel(other.data),
indptr,
indices,
data)
data = data.reshape(-1,R,C)
# TODO eliminate zeros
return bsr_matrix((data,indices,indptr),shape=(M,N),blocksize=(R,C))
######################
# Conversion methods #
######################
def tobsr(self,blocksize=None,copy=False):
if blocksize not in [None, self.blocksize]:
return self.tocsr().tobsr(blocksize=blocksize)
if copy:
return self.copy()
else:
return self
def tocsr(self):
return self.tocoo(copy=False).tocsr()
# TODO make this more efficient
def tocsc(self):
return self.tocoo(copy=False).tocsc()
def tocoo(self,copy=True):
"""Convert this matrix to COOrdinate format.
When copy=False the data array will be shared between
this matrix and the resultant coo_matrix.
"""
M,N = self.shape
R,C = self.blocksize
indptr_diff = np.diff(self.indptr)
if indptr_diff.dtype.itemsize > np.dtype(np.intp).itemsize:
# Check for potential overflow
indptr_diff_limited = indptr_diff.astype(np.intp)
if np.any(indptr_diff_limited != indptr_diff):
raise ValueError("Matrix too big to convert")
indptr_diff = indptr_diff_limited
row = (R * np.arange(M//R)).repeat(indptr_diff)
row = row.repeat(R*C).reshape(-1,R,C)
row += np.tile(np.arange(R).reshape(-1,1), (1,C))
row = row.reshape(-1)
col = (C * self.indices).repeat(R*C).reshape(-1,R,C)
col += np.tile(np.arange(C), (R,1))
col = col.reshape(-1)
data = self.data.reshape(-1)
if copy:
data = data.copy()
from .coo import coo_matrix
return coo_matrix((data,(row,col)), shape=self.shape)
def transpose(self):
R,C = self.blocksize
M,N = self.shape
NBLK = self.nnz//(R*C)
if self.nnz == 0:
return bsr_matrix((N,M), blocksize=(C,R),
dtype=self.dtype)
indptr = np.empty(N//C + 1, dtype=self.indptr.dtype)
indices = np.empty(NBLK, dtype=self.indices.dtype)
data = np.empty((NBLK,C,R), dtype=self.data.dtype)
bsr_transpose(M//R, N//C, R, C,
self.indptr, self.indices, self.data.ravel(),
indptr, indices, data.ravel())
return bsr_matrix((data,indices,indptr), shape=(N,M))
##############################################################
# methods that examine or modify the internal data structure #
##############################################################
def eliminate_zeros(self):
R,C = self.blocksize
M,N = self.shape
mask = (self.data != 0).reshape(-1,R*C).sum(axis=1) # nonzero blocks
nonzero_blocks = mask.nonzero()[0]
if len(nonzero_blocks) == 0:
return # nothing to do
self.data[:len(nonzero_blocks)] = self.data[nonzero_blocks]
# modifies self.indptr and self.indices *in place*
_sparsetools.csr_eliminate_zeros(M//R, N//C, self.indptr,
self.indices, mask)
self.prune()
def sum_duplicates(self):
"""Eliminate duplicate matrix entries by adding them together
The is an *in place* operation
"""
if self.has_canonical_format:
return
self.sort_indices()
R, C = self.blocksize
M, N = self.shape
# port of _sparsetools.csr_sum_duplicates
n_row = M // R
nnz = 0
row_end = 0
for i in range(n_row):
jj = row_end
row_end = self.indptr[i+1]
while jj < row_end:
j = self.indices[jj]
x = self.data[jj]
jj += 1
while jj < row_end and self.indices[jj] == j:
x += self.data[jj]
jj += 1
self.indices[nnz] = j
self.data[nnz] = x
nnz += 1
self.indptr[i+1] = nnz
self.prune() # nnz may have changed
self.has_canonical_format = True
def sort_indices(self):
"""Sort the indices of this matrix *in place*
"""
if self.has_sorted_indices:
return
R,C = self.blocksize
M,N = self.shape
bsr_sort_indices(M//R, N//C, R, C, self.indptr, self.indices, self.data.ravel())
self.has_sorted_indices = True
def prune(self):
""" Remove empty space after all non-zero elements.
"""
R,C = self.blocksize
M,N = self.shape
if len(self.indptr) != M//R + 1:
raise ValueError("index pointer has invalid length")
bnnz = self.indptr[-1]
if len(self.indices) < bnnz:
raise ValueError("indices array has too few elements")
if len(self.data) < bnnz:
raise ValueError("data array has too few elements")
self.data = self.data[:bnnz]
self.indices = self.indices[:bnnz]
# utility functions
def _binopt(self, other, op, in_shape=None, out_shape=None):
"""Apply the binary operation fn to two sparse matrices."""
# Ideally we'd take the GCDs of the blocksize dimensions
# and explode self and other to match.
other = self.__class__(other, blocksize=self.blocksize)
# e.g. bsr_plus_bsr, etc.
fn = getattr(_sparsetools, self.format + op + self.format)
R,C = self.blocksize
max_bnnz = len(self.data) + len(other.data)
idx_dtype = get_index_dtype((self.indptr, self.indices,
other.indptr, other.indices),
maxval=max_bnnz)
indptr = np.empty(self.indptr.shape, dtype=idx_dtype)
indices = np.empty(max_bnnz, dtype=idx_dtype)
bool_ops = ['_ne_', '_lt_', '_gt_', '_le_', '_ge_']
if op in bool_ops:
data = np.empty(R*C*max_bnnz, dtype=np.bool_)
else:
data = np.empty(R*C*max_bnnz, dtype=upcast(self.dtype,other.dtype))
fn(self.shape[0]//R, self.shape[1]//C, R, C,
self.indptr.astype(idx_dtype),
self.indices.astype(idx_dtype),
self.data,
other.indptr.astype(idx_dtype),
other.indices.astype(idx_dtype),
np.ravel(other.data),
indptr,
indices,
data)
actual_bnnz = indptr[-1]
indices = indices[:actual_bnnz]
data = data[:R*C*actual_bnnz]
if actual_bnnz < max_bnnz/2:
indices = indices.copy()
data = data.copy()
data = data.reshape(-1,R,C)
return self.__class__((data, indices, indptr), shape=self.shape)
# needed by _data_matrix
def _with_data(self,data,copy=True):
"""Returns a matrix with the same sparsity structure as self,
but with different data. By default the structure arrays
(i.e. .indptr and .indices) are copied.
"""
if copy:
return self.__class__((data,self.indices.copy(),self.indptr.copy()),
shape=self.shape,dtype=data.dtype)
else:
return self.__class__((data,self.indices,self.indptr),
shape=self.shape,dtype=data.dtype)
# # these functions are used by the parent class
# # to remove redudancy between bsc_matrix and bsr_matrix
# def _swap(self,x):
# """swap the members of x if this is a column-oriented matrix
# """
# return (x[0],x[1])
def isspmatrix_bsr(x):
return isinstance(x, bsr_matrix)
|
|
from featureflow import Node, NotEnoughData
from zounds.core import ArrayWithUnits, IdentityDimension
import numpy as np
from multiprocessing.pool import ThreadPool
from os import cpu_count
class Reservoir(object):
def __init__(self, nsamples, dtype=None):
super(Reservoir, self).__init__()
if not isinstance(nsamples, int):
raise ValueError('nsamples must be an integer')
if nsamples <= 0:
raise ValueError('nsamples must be greater than zero')
self.nsamples = nsamples
self.arr = None
self.indices = set()
self.dtype = dtype
def __len__(self):
return len(self.indices)
def percent_full(self):
return float(len(self)) / self.nsamples
def _init_arr(self, samples):
if self.arr is not None:
return
shape = (self.nsamples,) + samples.shape[1:]
self.arr = np.zeros(shape, dtype=self.dtype or samples.dtype)
try:
self.arr = ArrayWithUnits(
self.arr, (IdentityDimension(),) + samples.dimensions[1:])
except AttributeError:
pass
def add(self, samples, indices=None):
self._init_arr(samples)
if indices is None:
indices = np.random.randint(0, self.nsamples, len(samples))
if len(indices) != len(samples):
raise ValueError(
'number of input samples and indices must match'
' but they were {samples} and {indices} respectively'
.format(samples=len(samples), indices=len(indices)))
self.arr[indices, ...] = samples
self.indices.update(indices)
def get(self):
if len(self.indices) == self.nsamples:
return self.arr
x = self.arr[sorted(self.indices), ...]
return x
def get_batch(self, batch_size):
if batch_size > self.nsamples:
raise ValueError(
'Requested {batch_size} samples, but this instance can provide '
'at maximum {nsamples}'
.format(batch_size=batch_size, nsamples=self.nsamples))
if batch_size > len(self.indices):
raise ValueError(
'Requested {batch_size} samples, but this instance only '
'currently has {n} samples, with a maximum of {nsamples}'
.format(
batch_size=batch_size,
n=len(self.indices),
nsamples=self.nsamples))
# TODO: this would be much more efficient for repeated calls if I
# instead maintained a sorted set
indices = np.random.choice(list(self.indices), batch_size)
return self.arr[indices, ...]
class MultiplexedReservoir(object):
def __init__(self, nsamples, dtype=None):
super(MultiplexedReservoir, self).__init__()
self.dtype = dtype
self.reservoir = None
self.nsamples = nsamples
def _init_dict(self, samples):
if self.reservoir is not None:
return
if self.reservoir is None:
self.reservoir = dict(
(k, Reservoir(self.nsamples, dtype=self.dtype))
for k in samples.keys())
def _check_sample_keys(self, samples):
if set(self.reservoir.keys()) != set(samples.keys()):
raise ValueError(
'samples should have keys {keys}'
.format(keys=list(self.reservoir.keys())))
def add(self, samples):
self._init_dict(samples)
self._check_sample_keys(samples)
indices = None
for k, v in samples.items():
if indices is None:
indices = np.random.randint(0, self.nsamples, len(v))
self.reservoir[k].add(v, indices=indices)
def get(self):
return dict((k, v.get()) for k, v in self.reservoir.items())
class ShuffledSamples(Node):
def __init__(
self,
nsamples=None,
multiplexed=False,
dtype=None,
needs=None):
super(ShuffledSamples, self).__init__(needs=needs)
self.reservoir = MultiplexedReservoir(nsamples, dtype=dtype) \
if multiplexed else Reservoir(nsamples, dtype=dtype)
def _enqueue(self, data, pusher):
self.reservoir.add(data)
def _dequeue(self):
if not self._finalized:
raise NotEnoughData()
return self.reservoir.get()
class InfiniteSampler(Node):
def __init__(
self,
nsamples=None,
multiplexed=False,
dtype=None,
needs=None,
feature_filter=lambda x: x,
parallel=True):
super(InfiniteSampler, self).__init__(needs=needs)
self.parallel = parallel
self.feature_filter = feature_filter
self.multiplexed = multiplexed
self.reservoir = MultiplexedReservoir(nsamples, dtype=dtype) \
if multiplexed else Reservoir(nsamples, dtype=dtype)
def _total_samples(self, cls, feature, _ids):
pool = ThreadPool(cpu_count())
feature_filter = self.feature_filter
def x(_id):
f = feature(_id=_id, persistence=cls)
filtered = feature_filter(f)
return len(filtered)
if self.parallel:
total_samples = sum(pool.imap_unordered(x, _ids))
else:
total_samples = sum(map(x, _ids))
return total_samples
def _update_reservoir(self, _id, cls, feature, total_samples):
# fetch the features from a single document
x = feature(_id=_id, persistence=cls)
x = self.feature_filter(x)
# compute the contribution this sample makes to the dataset at
# large
feature_size = len(x)
ratio = float(feature_size) / total_samples
# determine the appropriate number of samples to contribute to
# the reservoir
nsamples = max(1, int(self.reservoir.nsamples * ratio))
print('Contributing', feature_size, ratio, nsamples)
# select an appropriately-sized and random subset of the feature.
# this will be shuffled again as it is added to the reservoir,
# but this ensures that samples are drawn evenly from the
# duration of the sound
indices = np.random.randint(0, feature_size, nsamples)
self.reservoir.add(x[indices, ...])
return len(indices)
def _process(self, data):
cls, feature = data
# compute the total number of samples in our dataset
_ids = list(cls.database.iter_ids())
total_samples = self._total_samples(cls, feature, _ids)
print('Total samples', total_samples)
while True:
if self.parallel:
pool = ThreadPool(cpu_count())
list(pool.imap_unordered(
lambda _id: self._update_reservoir(
_id, cls, feature, total_samples),
_ids))
else:
for _id in _ids:
self._update_reservoir(_id, cls, feature, total_samples)
yield self.reservoir.get()
class ReservoirSampler(Node):
"""
Use reservoir sampling (http://en.wikipedia.org/wiki/Reservoir_sampling) to
draw a fixed-size set of random samples from a stream of unknown size.
This is useful when the samples can fit into memory, but the stream cannot.
"""
def __init__(self, nsamples=None, wrapper=None, needs=None):
super(ReservoirSampler, self).__init__(needs=needs)
if wrapper:
raise DeprecationWarning('wrapper is no longer used or needed')
self._nsamples = int(nsamples)
self._r = None
self._index = 0
# TODO: What happens if we have filled up all the sample slots and we run
# out of data?
def _enqueue(self, data, pusher):
if self._r is None:
shape = (self._nsamples,) + data.shape[1:]
self._r = np.zeros(shape, dtype=data.dtype)
try:
self._r = ArrayWithUnits(
self._r, (IdentityDimension(),) + data.dimensions[1:])
except AttributeError:
# samples were likely a plain numpy array, and not an
# ArrayWithUnits instance
pass
diff = 0
if self._index < self._nsamples:
diff = self._nsamples - self._index
available = len(data[:diff])
self._r[self._index: self._index + available] = data[:diff]
self._index += available
remaining = len(data[diff:])
if not remaining:
return
indices = np.random.random_integers(0, self._index, size=remaining)
indices = indices[indices < self._nsamples]
self._r[indices, ...] = data[diff:][list(range(len(indices)))]
self._index += remaining
def _dequeue(self):
if not self._finalized:
raise NotEnoughData()
if self._index <= self._nsamples:
arr = np.asarray(self._r[:self._index])
np.random.shuffle(arr)
if isinstance(self._r, ArrayWithUnits):
arr = ArrayWithUnits(arr, self._r.dimensions)
return arr
return self._r
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for `message` package."""
import copy
import aioxmpp
import aioxmpp.forms.xso as forms_xso
import pytest
from spade.message import Message, SPADE_X_METADATA
def test_prepare(message):
aiomsg = message.prepare()
assert aiomsg.to == aioxmpp.JID.fromstr("to@localhost")
assert aiomsg.from_ == aioxmpp.JID.fromstr("sender@localhost")
assert aiomsg.body[None] == "message body"
for data in aiomsg.xep0004_data:
if data.title == SPADE_X_METADATA:
for field in data.fields:
if field.var == "_thread_node":
assert field.values[0] == "thread-id"
else:
assert message.get_metadata(field.var) == field.values[0]
def test_make_reply(message):
reply = message.make_reply()
assert reply.to == aioxmpp.JID.fromstr("sender@localhost")
assert reply.sender == aioxmpp.JID.fromstr("to@localhost")
assert reply.body == "message body"
assert reply.thread == "thread-id"
assert reply.get_metadata("metadata1") == "value1"
assert reply.get_metadata("metadata2") == "value2"
def test_message_from_node_attribute_error():
with pytest.raises(AttributeError) as e:
Message.from_node(Message())
def test_body_with_languages():
msg = aioxmpp.Message(type_=aioxmpp.MessageType.CHAT)
msg.body["en"] = "Hello World"
msg.body["es"] = "Hola Mundo"
new_msg = Message.from_node(msg)
assert new_msg.body == "Hello World"
def test_message_from_node():
aiomsg = aioxmpp.Message(type_=aioxmpp.MessageType.CHAT)
data = forms_xso.Data(type_=forms_xso.DataType.FORM)
data.fields.append(
forms_xso.Field(
var="performative",
type_=forms_xso.FieldType.TEXT_SINGLE,
values=["request"],
)
)
data.fields.append(
forms_xso.Field(
var="_thread_node",
type_=forms_xso.FieldType.TEXT_SINGLE,
values=["thread-id"],
)
)
data.title = SPADE_X_METADATA
aiomsg.xep0004_data = [data]
msg = Message.from_node(aiomsg)
assert msg.thread == "thread-id"
assert msg.get_metadata("performative") == "request"
assert msg.metadata == {"performative": "request"}
def test_thread_empty():
msg = Message(thread=None)
assert msg.thread is None
assert msg.metadata == {}
aiomsg = msg.prepare()
for data in aiomsg.xep0004_data:
if data.title == SPADE_X_METADATA:
for field in data.fields:
assert field.var != "_thread_node"
def test_equal(message):
assert message == copy.copy(message)
def test_not_equal(message, message2):
assert message != message2
def test_id(message):
assert type(message.id) == int
def test_metadata_is_string():
Message(metadata={"key": "value"})
def test_metadata_is_not_string():
with pytest.raises(TypeError):
Message(metadata={"key": 1000})
def test_metadata_set_string():
msg = Message()
msg.set_metadata("key", "value")
def test_metadata_set_not_string():
msg = Message()
with pytest.raises(TypeError):
msg.set_metadata(1000, "value")
def test_body_is_string():
Message(body="body")
def test_body_is_not_string():
with pytest.raises(TypeError):
Message(body={})
def test_body_set_string():
msg = Message()
msg.body = "body"
def test_body_set_not_string():
msg = Message()
with pytest.raises(TypeError):
msg.body = 1000
def test_body_set_none():
msg = Message()
msg.body = None
def test_to_is_string():
Message(to="agent@fakeserver")
def test_to_is_not_string():
with pytest.raises(TypeError):
Message(to=aioxmpp.JID.fromstr("agent@fakeserver"))
def test_to_set_string():
msg = Message()
msg.to = "agent@fakeserver"
def test_to_set_not_string():
msg = Message()
with pytest.raises(TypeError):
msg.to = 1000
def test_to_set_none():
msg = Message()
msg.to = None
def test_sender_is_string():
Message(sender="agent@fakeserver")
def test_sender_is_not_string():
with pytest.raises(TypeError):
Message(sender=aioxmpp.JID.fromstr("agent@fakeserver"))
def test_sender_set_string():
msg = Message()
msg.sender = "agent@fakeserver"
def test_sender_set_not_string():
msg = Message()
with pytest.raises(TypeError):
msg.sender = 1000
def test_sender_set_none():
msg = Message()
msg.sender = None
def test_thread_is_string():
Message(thread="thread_id_001")
def test_thread_is_not_string():
with pytest.raises(TypeError):
Message(thread=1000)
def test_thread_set_string():
msg = Message()
msg.thread = "thread_id_001"
def test_thread_set_not_string():
msg = Message()
with pytest.raises(TypeError):
msg.thread = 1000
def test_thread_set_none():
msg = Message()
msg.thread = None
|
|
MAX_ROUNDS = 15
TOTAL_HONOR_TOKENS = 60.0
STARTING_RUNES = 4.0
STARTING_POWER = 1.0
# can't have more than 10 runes in one hand (5 mystics)
# not counting draw-card effects
RUNES_CAP = 10.0
# can't have more than 10 power in one hand (5 heavies)
POWER_CAP = 10.0
# realistically, can't really draw more than 5 cards on a regular basis
DRAW_CARDS_CAP = 2.0
def simulate_power_strategy():
honor_per_rune = 0.5 # buys only heavy infantry
# other values from calculator
honor_per_power = 0.700855
# each heavy adds about 0.429 power per draw in expectation
# normalizing around the expected contribution of the fifth heavy
# old deck: 10 power (4 heavy, 2 militia), 14 cards total
# new deck: 12 power (5 heavy, 2 militia), 15 cards total
# hand = 5 cards
power_per_rune = ((12.0 / 15.0) - (10.0 / 14.0)) * 5.0
assert power_per_rune > 0
runes = STARTING_RUNES
power = STARTING_POWER
total_honor_tokens = 0.0
total_honor_cards = 0.0
honor_history = []
card_honor_history = []
token_honor_history = []
for i in xrange(MAX_ROUNDS):
total_honor_cards += runes * honor_per_rune
total_honor_tokens += power * honor_per_power
# all runes used to purchase power
power += runes * power_per_rune
power = min(power, POWER_CAP)
total_honor = total_honor_tokens + total_honor_cards
card_honor_history.append(total_honor_cards)
token_honor_history.append(total_honor_tokens)
honor_history.append(total_honor)
print "**************************************************"
print "Power strategy honor history:"
print "cards: %s" % card_honor_history
print "tokens: %s" % token_honor_history
print "total: %s" % honor_history
print ""
return (card_honor_history, token_honor_history, honor_history)
# mystic_round_cutoff is the round number when the strategy
# stops buying mystics and starts buying normal cards
def simulate_mechana_strategy(mystic_round_cutoff):
assert mystic_round_cutoff <= MAX_ROUNDS
assert mystic_round_cutoff >= 0
honor_per_rune = 0.600000 # from calculator, when buying mix of cards
honor_per_power = 0.700855
runes = STARTING_RUNES
power = STARTING_POWER
runes_per_card = 2.357143 # from calculator
draw_card_effect_per_card = 1.038089 # from calculator
# each mystic adds about 0.455 runes per draw in expectation
# normalizing around the expected contribution of the second mystic
# old deck: 10 runes (1 mystics, 8 apprentice), 11 cards total
# new deck: 12 runes (2 mystics, 8 apprentice), 12 cards total
# hand = 5 cards
# this is higher than the power-per-heavy because fewer mystics are usually bought
runes_per_mystic = ((12.0/12.0) - (10.0/11.0))*5
assert runes_per_mystic > 0
mystic_cost = 3.0
honor_per_mystic = 1.0
total_honor_tokens = 0.0
total_honor_cards = 0.0
honor_history = []
card_honor_history = []
token_honor_history = []
draw_card_multiplier = 1.0
for i in xrange(MAX_ROUNDS):
effective_runes = runes * draw_card_multiplier
effective_power = power * draw_card_multiplier
if i < mystic_round_cutoff:
# buying only mystics
mystics_bought = effective_runes / mystic_cost
total_honor_cards += mystics_bought * honor_per_mystic
total_honor_tokens += effective_power * honor_per_power
runes += mystics_bought * runes_per_mystic
runes = min(runes, RUNES_CAP)
else:
# buying only center deck cards
cards_bought = effective_runes / runes_per_card
total_honor_cards += effective_runes * honor_per_rune
total_honor_tokens += effective_power * honor_per_power
draw_card_multiplier *= draw_card_effect_per_card ** cards_bought # yes, that's an exponent
draw_card_multiplier = min(draw_card_multiplier, DRAW_CARDS_CAP)
total_honor = total_honor_tokens + total_honor_cards
card_honor_history.append(total_honor_cards)
token_honor_history.append(total_honor_tokens)
honor_history.append(total_honor)
print "**************************************************"
print "Mechana-mystic-%d strategy honor history:" % mystic_round_cutoff
print "cards: %s" % card_honor_history
print "tokens: %s" % token_honor_history
print "total: %s" % honor_history
print ""
return (card_honor_history, token_honor_history, honor_history)
def simulate_lifebound_strategy(mystic_round_cutoff):
assert mystic_round_cutoff <= MAX_ROUNDS
assert mystic_round_cutoff >= 0
# total card honor in honor-per-turn cards = 19
# total rune cost of honor-per-turn cards = 39
honor_per_rune = 19.0 / 39.0
honor_per_power = 0.700855 # from calculator
runes = STARTING_RUNES
power = STARTING_POWER
runes_per_card = 2.357143 # from calculator
draw_card_effect_per_card = 1.038089 # from calculator
honor_per_turn_per_card = 0.105714 # from calculator
# each mystic adds about 0.455 runes per draw in expectation
# normalizing around the expected contribution of the second mystic
# old deck: 10 runes (1 mystics, 8 apprentice), 11 cards total
# new deck: 12 runes (2 mystics, 8 apprentice), 12 cards total
# hand = 5 cards
# this is higher than the power-per-heavy because fewer mystics are usually bought
runes_per_mystic = ((12.0/12.0) - (10.0/11.0))*5
assert runes_per_mystic > 0
mystic_cost = 3.0
honor_per_mystic = 1.0
total_honor_tokens = 0.0
total_honor_cards = 0.0
honor_history = []
card_honor_history = []
token_honor_history = []
draw_card_multiplier = 1.0
honor_per_turn = 0.0
for i in xrange(MAX_ROUNDS):
effective_runes = runes * draw_card_multiplier
effective_power = power * draw_card_multiplier
effective_honor_per_turn = honor_per_turn * draw_card_multiplier
if i < mystic_round_cutoff:
# buying only mystics
mystics_bought = effective_runes / mystic_cost
total_honor_cards += mystics_bought * honor_per_mystic
total_honor_tokens += effective_power * honor_per_power
runes += mystics_bought * runes_per_mystic
runes = min(runes, RUNES_CAP)
else:
# buying only center deck cards
cards_bought = effective_runes / runes_per_card
total_honor_cards += effective_runes * honor_per_rune
total_honor_tokens += effective_power * honor_per_power + effective_honor_per_turn
honor_per_turn += cards_bought * honor_per_turn_per_card
draw_card_multiplier *= draw_card_effect_per_card ** cards_bought # yes, that's an exponent
draw_card_multiplier = min(draw_card_multiplier, DRAW_CARDS_CAP)
total_honor = total_honor_tokens + total_honor_cards
card_honor_history.append(total_honor_cards)
token_honor_history.append(total_honor_tokens)
honor_history.append(total_honor)
print "**************************************************"
print "Lifebound-mystic-%d strategy honor history:" % mystic_round_cutoff
print "cards: %s" % card_honor_history
print "tokens: %s" % token_honor_history
print "total: %s" % honor_history
print ""
return (card_honor_history, token_honor_history, honor_history)
def calculate_payoffs(a_strat, b_strat):
a_card_honor, a_token_honor, a_total_honor = a_strat
b_card_honor, b_token_honor, b_total_honor = b_strat
for i in xrange(MAX_ROUNDS):
total_honor_in_tokens = a_token_honor[i] + b_token_honor[i]
if total_honor_in_tokens >= TOTAL_HONOR_TOKENS:
# game over after round i, calculate payoffs
total_honor = a_total_honor[i] + b_total_honor[i]
return (a_total_honor[i] / total_honor, b_total_honor[i] / total_honor)
# no game will realistically ever last more than MAX_ROUNDS
# but because of the simplifications in the model, mechana vs lifebound
# may take a very long time
# print "Stopping game after %d rounds" % MAX_ROUNDS
total_honor = a_total_honor[-1] + b_total_honor[-1]
return (a_total_honor[-1] / total_honor, b_total_honor[-1] / total_honor)
def construct_payoff_matrix(first_strats, second_strats):
return [[calculate_payoffs(a, b) for b in second_strats] for a in first_strats]
# tries to find a deviation that makes either player better off
# compared to their payoffs at payoff_matrix[x][y]
def find_better_off_from(payoff_matrix, x, y):
p1_payoff, p2_payoff = payoff_matrix[x][y]
# print "Considering (%d, %d), payoffs %s" % (x, y, str(payoff_matrix[x][y]))
for i in xrange(len(payoff_matrix)):
if payoff_matrix[i][y][0] > p1_payoff:
# print "x = %d improves it for P1 to %s" % (i, str(payoff_matrix[i][y]))
return True
for i in xrange(len(payoff_matrix[0])):
if payoff_matrix[x][i][1] > p2_payoff:
# print "y = %d improves it for P2 to %s" % (i, str(payoff_matrix[x][i]))
return True
# print "Found Nash equilibrium!"
return False
def find_nash_equilibria(payoff_matrix):
equilibria = []
for i in xrange(len(payoff_matrix)):
for j in xrange(len(payoff_matrix[0])):
if not find_better_off_from(payoff_matrix, i, j):
equilibria.append((i, j))
return equilibria
def main():
round_cutoff = MAX_ROUNDS
power_strats = [simulate_power_strategy()]
mechana_strats = [simulate_mechana_strategy(i) for i in xrange(round_cutoff)]
lifebound_strats = [simulate_lifebound_strategy(i) for i in xrange(round_cutoff)]
power_vs_mechana = construct_payoff_matrix(power_strats, mechana_strats)
power_vs_mechana_equilibria = find_nash_equilibria(power_vs_mechana)
print "**************************************************"
print "Power vs Mechana"
# print " -> payoff matrix:"
# print power_vs_mechana
print " -> Nash equilibria:"
print power_vs_mechana_equilibria
print " -> Nash payoffs:"
for eq in power_vs_mechana_equilibria:
x, y = eq
print power_vs_mechana[x][y]
print ""
power_vs_lifebound = construct_payoff_matrix(power_strats, lifebound_strats)
power_vs_lifebound_equilibria = find_nash_equilibria(power_vs_lifebound)
print "**************************************************"
print "Power vs Lifebound"
# print " -> payoff matrix:"
# print power_vs_lifebound
print " -> Nash equilibria:"
print power_vs_lifebound_equilibria
print " -> Nash payoffs:"
for eq in power_vs_lifebound_equilibria:
x, y = eq
print power_vs_lifebound[x][y]
print ""
mechana_vs_lifebound = construct_payoff_matrix(mechana_strats, lifebound_strats)
mechana_vs_lifebound_equilibria = find_nash_equilibria(mechana_vs_lifebound)
print "**************************************************"
print "Mechana vs Lifebound"
# print " -> payoff matrix:"
# print mechana_vs_lifebound
print " -> Nash equilibria:"
print mechana_vs_lifebound_equilibria
print " -> Nash payoffs:"
for eq in mechana_vs_lifebound_equilibria:
x, y = eq
print mechana_vs_lifebound[x][y]
print ""
if __name__ == "__main__":
main()
|
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._kube_environments_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_list_by_resource_group_request, build_list_by_subscription_request, build_update_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class KubeEnvironmentsOperations:
"""KubeEnvironmentsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.web.v2021_01_15.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list_by_subscription(
self,
**kwargs: Any
) -> AsyncIterable["_models.KubeEnvironmentCollection"]:
"""Get all Kubernetes Environments for a subscription.
Description for Get all Kubernetes Environments for a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either KubeEnvironmentCollection or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2021_01_15.models.KubeEnvironmentCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KubeEnvironmentCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_subscription_request(
subscription_id=self._config.subscription_id,
template_url=self.list_by_subscription.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_by_subscription_request(
subscription_id=self._config.subscription_id,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("KubeEnvironmentCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Web/kubeEnvironments'} # type: ignore
@distributed_trace
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.KubeEnvironmentCollection"]:
"""Get all the Kubernetes Environments in a resource group.
Description for Get all the Kubernetes Environments in a resource group.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either KubeEnvironmentCollection or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2021_01_15.models.KubeEnvironmentCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KubeEnvironmentCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
template_url=self.list_by_resource_group.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("KubeEnvironmentCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/kubeEnvironments'} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
name: str,
**kwargs: Any
) -> "_models.KubeEnvironment":
"""Get the properties of a Kubernetes Environment.
Description for Get the properties of a Kubernetes Environment.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the Kubernetes Environment.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KubeEnvironment, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2021_01_15.models.KubeEnvironment
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KubeEnvironment"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
resource_group_name=resource_group_name,
name=name,
subscription_id=self._config.subscription_id,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('KubeEnvironment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/kubeEnvironments/{name}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
name: str,
kube_environment_envelope: "_models.KubeEnvironment",
**kwargs: Any
) -> "_models.KubeEnvironment":
cls = kwargs.pop('cls', None) # type: ClsType["_models.KubeEnvironment"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(kube_environment_envelope, 'KubeEnvironment')
request = build_create_or_update_request_initial(
resource_group_name=resource_group_name,
name=name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('KubeEnvironment', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('KubeEnvironment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/kubeEnvironments/{name}'} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
name: str,
kube_environment_envelope: "_models.KubeEnvironment",
**kwargs: Any
) -> AsyncLROPoller["_models.KubeEnvironment"]:
"""Creates or updates a Kubernetes Environment.
Description for Creates or updates a Kubernetes Environment.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the Kubernetes Environment.
:type name: str
:param kube_environment_envelope: Configuration details of the Kubernetes Environment.
:type kube_environment_envelope: ~azure.mgmt.web.v2021_01_15.models.KubeEnvironment
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either KubeEnvironment or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.web.v2021_01_15.models.KubeEnvironment]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.KubeEnvironment"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
name=name,
kube_environment_envelope=kube_environment_envelope,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('KubeEnvironment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/kubeEnvironments/{name}'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request_initial(
resource_group_name=resource_group_name,
name=name,
subscription_id=self._config.subscription_id,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/kubeEnvironments/{name}'} # type: ignore
@distributed_trace_async
async def begin_delete(
self,
resource_group_name: str,
name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Delete a Kubernetes Environment.
Description for Delete a Kubernetes Environment.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the Kubernetes Environment.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
name=name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/kubeEnvironments/{name}'} # type: ignore
@distributed_trace_async
async def update(
self,
resource_group_name: str,
name: str,
kube_environment_envelope: "_models.KubeEnvironmentPatchResource",
**kwargs: Any
) -> "_models.KubeEnvironment":
"""Creates or updates a Kubernetes Environment.
Description for Creates or updates a Kubernetes Environment.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the Kubernetes Environment.
:type name: str
:param kube_environment_envelope: Configuration details of the Kubernetes Environment.
:type kube_environment_envelope:
~azure.mgmt.web.v2021_01_15.models.KubeEnvironmentPatchResource
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KubeEnvironment, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2021_01_15.models.KubeEnvironment
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KubeEnvironment"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(kube_environment_envelope, 'KubeEnvironmentPatchResource')
request = build_update_request(
resource_group_name=resource_group_name,
name=name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.update.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('KubeEnvironment', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('KubeEnvironment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/kubeEnvironments/{name}'} # type: ignore
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.