text
stringlengths 213
32.3k
|
---|
from flexx import flx
# Associate Ace's assets with this module so that Flexx will load
# them when (things from) this module is used.
base_url = 'https://cdnjs.cloudflare.com/ajax/libs/ace/1.2.6/'
flx.assets.associate_asset(__name__, base_url + 'ace.js')
flx.assets.associate_asset(__name__, base_url + 'mode-python.js')
flx.assets.associate_asset(__name__, base_url + 'theme-solarized_dark.js')
class CodeEditor(flx.Widget):
""" A CodeEditor widget based on Ace.
"""
CSS = """
.flx-CodeEditor > .ace {
width: 100%;
height: 100%;
}
"""
def init(self):
global window
# https://ace.c9.io/#nav=api
self.ace = window.ace.edit(self.node, "editor")
self.ace.setValue("import os\n\ndirs = os.walk")
self.ace.navigateFileEnd() # otherwise all lines highlighted
self.ace.setTheme("ace/theme/solarized_dark")
self.ace.getSession().setMode("ace/mode/python")
@flx.reaction('size')
def __on_size(self, *events):
self.ace.resize()
if __name__ == '__main__':
flx.launch(CodeEditor, 'app')
flx.run()
|
import os
import time
from mlpatches import base
_stash = base._stash
def _get_status(exitcode, killer=0):
"""
calculates the exit status for a command.
see the documentation of os.wait for info about this.
"""
return (exitcode * 256) + killer
class VoidIO(object):
"""no-op I/O"""
def __init__(self):
pass
def write(self, *args):
pass
def writelines(self, *args):
pass
def read(self, *args):
return ""
def readline(self, *args):
return ""
def readlines(self, *args):
return []
def close(self):
pass
def flush(self):
pass
class _PipeEndpoint(object):
"""this class represents a pipe endpoint."""
def __init__(self, root, pipe):
self.__root = root
self.__pipe = pipe
def __getattr__(self, name):
"""return attribute name of the pipe."""
return getattr(self.__pipe, name)
def __hasattr__(self, name):
"""checks wether the pioe has a attribute called name."""
return hasattr(self.__pipe, name)
def __repr__(self):
"""returns the representation of the pipe."""
return repr(self.__pipe)
def __del__(self):
"""called on deletion."""
self.close()
def close(self):
"""closes the pipe."""
try:
os.close(self.__pipe.fileno())
except (OSError, IOError):
pass
ec = self.__root.get_exit_code(wait=True)
if ec / 256 == 0:
return None # see os.popen
else:
return ec
class _PopenCmd(object):
"""This class handles the command processing."""
# TODO: replace state mechanics with single bool and threading.Lock()
STATE_INIT = "INIT"
STATE_RUNNING = "RUNNING"
STATE_FINISHED = "FINISHED"
def __init__(self, cmd, mode, bufsize, shared_eo=False):
self.cmd = cmd
self.mode = mode
self.bufsize = bufsize
self.fds = []
self.worker = None
self.state = self.STATE_INIT
self.shared_eo = shared_eo
self.chinr, self.chinw = self.create_pipe(wbuf=bufsize)
self.choutr, self.choutw = self.create_pipe(rbuf=bufsize)
if shared_eo:
self.cherrr, self.cherrw = self.choutr, self.choutw
else:
self.cherrr, self.cherrw = self.create_pipe(rbuf=bufsize)
def get_pipes(self):
"""returns the pipes."""
if not self.shared_eo:
return (_PipeEndpoint(self, self.chinw), _PipeEndpoint(self, self.choutr), _PipeEndpoint(self, self.cherrr))
else:
return (_PipeEndpoint(self, self.chinw), _PipeEndpoint(self, self.choutr))
def close_fds(self):
"""close all fds."""
for fd in self.fds:
try:
os.close(fd)
except os.OSError:
pass
def create_pipe(self, rbuf=0, wbuf=0):
"""creates a pipe. returns (readpipe, writepipe)"""
rfd, wfd = os.pipe()
self.fds += [rfd, wfd]
rf, wf = os.fdopen(rfd, "rb", rbuf), os.fdopen(wfd, "wb", wbuf)
return rf, wf
def run(self):
"""runs the command."""
self.state = self.STATE_RUNNING
self.worker = _stash.runtime.run(
input_=self.cmd,
persistent_level=2,
is_background=False,
add_to_history=False,
final_ins=self.chinr,
final_outs=self.choutw,
final_errs=self.cherrw
)
if not self.worker.is_alive():
# sometimes stash is faster than the return
self.state = self.STATE_FINISHED
def get_exit_code(self, wait=True):
"""returns the exitcode.
If wait is False and the worker has not finishef yet, return None."""
if self.state != self.STATE_INIT:
if self.worker is None:
# temp fix for pipes for fast commands
if not wait:
return 0
while self.worker is None:
time.sleep(0.01)
if wait and self.worker.is_alive():
self.worker.join()
self.state = self.STATE_FINISHED
elif self.worker.status() != self.worker.STOPPED:
return None
es = self.worker.state.return_value
return _get_status(es, self.worker.killer)
raise RuntimeError("get_exit_code() called before run()!")
def popen(patch, cmd, mode="r", bufsize=0):
"""Open a pipe to or from command. The return value is an open file object connected to the pipe, which can be read or written depending on whether mode is 'r' (default) or 'w'. The bufsize argument has the same meaning as the corresponding argument to the built-in open() function. The exit status of the command (encoded in the format specified for wait()) is available as the return value of the close() method of the file object, except that when the exit status is zero (termination without errors), None is returned."""
cmd = _PopenCmd(cmd, mode, bufsize, shared_eo=False)
pipes = cmd.get_pipes()
cmd.run()
if mode == "r":
return pipes[1]
elif mode == "w":
return pipes[0]
def popen2(patch, cmd, mode="r", bufsize=0):
"""Execute cmd as a sub-process and return the file objects (child_stdin, child_stdout)."""
cmd = _PopenCmd(cmd, mode, bufsize, shared_eo=False)
pipes = cmd.get_pipes()
cmd.run()
return pipes[0], pipes[1]
def popen3(patch, cmd, mode="r", bufsize=0):
"""Execute cmd as a sub-process and return the file objects (child_stdin, child_stdout, child_stderr)."""
cmd = _PopenCmd(cmd, mode, bufsize, shared_eo=False)
pipes = cmd.get_pipes()
cmd.run()
return pipes[0], pipes[1], pipes[2]
def popen4(patch, cmd, mode="r", bufsize=0):
"""Execute cmd as a sub-process and return the file objects (child_stdin, child_stdout_and_stderr)."""
cmd = _PopenCmd(cmd, mode, bufsize, shared_eo=True)
pipes = cmd.get_pipes()
cmd.run()
return pipes[0], pipes[1]
def system(patch, command):
"""Execute the command (a string) in a subshell. This is implemented by calling the Standard C function system(), and has the same limitations. Changes to sys.stdin, etc. are not reflected in the environment of the executed command.
On Unix, the return value is the exit status of the process encoded in the format specified for wait(). Note that POSIX does not specify the meaning of the return value of the C system() function, so the return value of the Python function is system-dependent.
On Windows, the return value is that returned by the system shell after running command, given by the Windows environment variable COMSPEC: on command.com systems (Windows 95, 98 and ME) this is always 0; on cmd.exe systems (Windows NT, 2000 and XP) this is the exit status of the command run; on systems using a non-native shell, consult your shell documentation.
The subprocess module provides more powerful facilities for spawning new processes and retrieving their results; using that module is preferable to using this function. See the Replacing Older Functions with the subprocess Module section in the subprocess documentation for some helpful recipes."""
io = VoidIO()
worker = _stash.runtime.run(
input_=command,
persistent_level=2,
is_background=False,
add_to_history=False,
final_ins=io,
final_outs=io,
final_errs=io,
)
worker.join() # wait for completion
es = worker.state.return_value
return _get_status(es, worker.killer)
|
import itertools
import logging
import multiprocessing as mp
import sys
from collections import Counter
import numpy as np
import scipy.sparse as sps
from gensim import utils
from gensim.models.word2vec import Word2Vec
logger = logging.getLogger(__name__)
def _ids_to_words(ids, dictionary):
"""Convert an iterable of ids to their corresponding words using a dictionary.
Abstract away the differences between the HashDictionary and the standard one.
Parameters
----------
ids: dict
Dictionary of ids and their words.
dictionary: :class:`~gensim.corpora.dictionary.Dictionary`
Input gensim dictionary
Returns
-------
set
Corresponding words.
Examples
--------
.. sourcecode:: pycon
>>> from gensim.corpora.dictionary import Dictionary
>>> from gensim.topic_coherence import text_analysis
>>>
>>> dictionary = Dictionary()
>>> ids = {1: 'fake', 4: 'cats'}
>>> dictionary.id2token = {1: 'fake', 2: 'tokens', 3: 'rabbids', 4: 'cats'}
>>>
>>> text_analysis._ids_to_words(ids, dictionary)
set(['cats', 'fake'])
"""
if not dictionary.id2token: # may not be initialized in the standard gensim.corpora.Dictionary
setattr(dictionary, 'id2token', {v: k for k, v in dictionary.token2id.items()})
top_words = set()
for word_id in ids:
word = dictionary.id2token[word_id]
if isinstance(word, set):
top_words = top_words.union(word)
else:
top_words.add(word)
return top_words
class BaseAnalyzer:
"""Base class for corpus and text analyzers.
Attributes
----------
relevant_ids : dict
Mapping
_vocab_size : int
Size of vocabulary.
id2contiguous : dict
Mapping word_id -> number.
log_every : int
Interval for logging.
_num_docs : int
Number of documents.
"""
def __init__(self, relevant_ids):
"""
Parameters
----------
relevant_ids : dict
Mapping
Examples
--------
.. sourcecode:: pycon
>>> from gensim.topic_coherence import text_analysis
>>> ids = {1: 'fake', 4: 'cats'}
>>> base = text_analysis.BaseAnalyzer(ids)
>>> # should return {1: 'fake', 4: 'cats'} 2 {1: 0, 4: 1} 1000 0
>>> print(base.relevant_ids, base._vocab_size, base.id2contiguous, base.log_every, base._num_docs)
{1: 'fake', 4: 'cats'} 2 {1: 0, 4: 1} 1000 0
"""
self.relevant_ids = relevant_ids
self._vocab_size = len(self.relevant_ids)
self.id2contiguous = {word_id: n for n, word_id in enumerate(self.relevant_ids)}
self.log_every = 1000
self._num_docs = 0
@property
def num_docs(self):
return self._num_docs
@num_docs.setter
def num_docs(self, num):
self._num_docs = num
if self._num_docs % self.log_every == 0:
logger.info(
"%s accumulated stats from %d documents",
self.__class__.__name__, self._num_docs)
def analyze_text(self, text, doc_num=None):
raise NotImplementedError("Base classes should implement analyze_text.")
def __getitem__(self, word_or_words):
if isinstance(word_or_words, str) or not hasattr(word_or_words, '__iter__'):
return self.get_occurrences(word_or_words)
else:
return self.get_co_occurrences(*word_or_words)
def get_occurrences(self, word_id):
"""Return number of docs the word occurs in, once `accumulate` has been called."""
return self._get_occurrences(self.id2contiguous[word_id])
def _get_occurrences(self, word_id):
raise NotImplementedError("Base classes should implement occurrences")
def get_co_occurrences(self, word_id1, word_id2):
"""Return number of docs the words co-occur in, once `accumulate` has been called."""
return self._get_co_occurrences(self.id2contiguous[word_id1], self.id2contiguous[word_id2])
def _get_co_occurrences(self, word_id1, word_id2):
raise NotImplementedError("Base classes should implement co_occurrences")
class UsesDictionary(BaseAnalyzer):
"""A BaseAnalyzer that uses a Dictionary, hence can translate tokens to counts.
The standard BaseAnalyzer can only deal with token ids since it doesn't have the token2id
mapping.
Attributes
----------
relevant_words : set
Set of words that occurrences should be accumulated for.
dictionary : :class:`~gensim.corpora.dictionary.Dictionary`
Dictionary based on text
token2id : dict
Mapping from :class:`~gensim.corpora.dictionary.Dictionary`
"""
def __init__(self, relevant_ids, dictionary):
"""
Parameters
----------
relevant_ids : dict
Mapping
dictionary : :class:`~gensim.corpora.dictionary.Dictionary`
Dictionary based on text
Examples
--------
.. sourcecode:: pycon
>>> from gensim.topic_coherence import text_analysis
>>> from gensim.corpora.dictionary import Dictionary
>>>
>>> ids = {1: 'foo', 2: 'bar'}
>>> dictionary = Dictionary([['foo', 'bar', 'baz'], ['foo', 'bar', 'bar', 'baz']])
>>> udict = text_analysis.UsesDictionary(ids, dictionary)
>>>
>>> print(udict.relevant_words)
set([u'foo', u'baz'])
"""
super(UsesDictionary, self).__init__(relevant_ids)
self.relevant_words = _ids_to_words(self.relevant_ids, dictionary)
self.dictionary = dictionary
self.token2id = dictionary.token2id
def get_occurrences(self, word):
"""Return number of docs the word occurs in, once `accumulate` has been called."""
try:
word_id = self.token2id[word]
except KeyError:
word_id = word
return self._get_occurrences(self.id2contiguous[word_id])
def _word2_contiguous_id(self, word):
try:
word_id = self.token2id[word]
except KeyError:
word_id = word
return self.id2contiguous[word_id]
def get_co_occurrences(self, word1, word2):
"""Return number of docs the words co-occur in, once `accumulate` has been called."""
word_id1 = self._word2_contiguous_id(word1)
word_id2 = self._word2_contiguous_id(word2)
return self._get_co_occurrences(word_id1, word_id2)
class InvertedIndexBased(BaseAnalyzer):
"""Analyzer that builds up an inverted index to accumulate stats."""
def __init__(self, *args):
"""
Parameters
----------
args : dict
Look at :class:`~gensim.topic_coherence.text_analysis.BaseAnalyzer`
Examples
--------
.. sourcecode:: pycon
>>> from gensim.topic_coherence import text_analysis
>>>
>>> ids = {1: 'fake', 4: 'cats'}
>>> ininb = text_analysis.InvertedIndexBased(ids)
>>>
>>> print(ininb._inverted_index)
[set([]) set([])]
"""
super(InvertedIndexBased, self).__init__(*args)
self._inverted_index = np.array([set() for _ in range(self._vocab_size)])
def _get_occurrences(self, word_id):
return len(self._inverted_index[word_id])
def _get_co_occurrences(self, word_id1, word_id2):
s1 = self._inverted_index[word_id1]
s2 = self._inverted_index[word_id2]
return len(s1.intersection(s2))
def index_to_dict(self):
contiguous2id = {n: word_id for word_id, n in self.id2contiguous.items()}
return {contiguous2id[n]: doc_id_set for n, doc_id_set in enumerate(self._inverted_index)}
class CorpusAccumulator(InvertedIndexBased):
"""Gather word occurrence stats from a corpus by iterating over its BoW representation."""
def analyze_text(self, text, doc_num=None):
"""Build an inverted index from a sequence of corpus texts."""
doc_words = frozenset(x[0] for x in text)
top_ids_in_doc = self.relevant_ids.intersection(doc_words)
for word_id in top_ids_in_doc:
self._inverted_index[self.id2contiguous[word_id]].add(self._num_docs)
def accumulate(self, corpus):
for document in corpus:
self.analyze_text(document)
self.num_docs += 1
return self
class WindowedTextsAnalyzer(UsesDictionary):
"""Gather some stats about relevant terms of a corpus by iterating over windows of texts."""
def __init__(self, relevant_ids, dictionary):
"""
Parameters
----------
relevant_ids : set of int
Relevant id
dictionary : :class:`~gensim.corpora.dictionary.Dictionary`
Dictionary instance with mappings for the relevant_ids.
"""
super(WindowedTextsAnalyzer, self).__init__(relevant_ids, dictionary)
self._none_token = self._vocab_size # see _iter_texts for use of none token
def accumulate(self, texts, window_size):
relevant_texts = self._iter_texts(texts)
windows = utils.iter_windows(
relevant_texts, window_size, ignore_below_size=False, include_doc_num=True)
for doc_num, virtual_document in windows:
self.analyze_text(virtual_document, doc_num)
self.num_docs += 1
return self
def _iter_texts(self, texts):
dtype = np.uint16 if np.iinfo(np.uint16).max >= self._vocab_size else np.uint32
for text in texts:
if self.text_is_relevant(text):
yield np.fromiter((
self.id2contiguous[self.token2id[w]] if w in self.relevant_words
else self._none_token
for w in text), dtype=dtype, count=len(text))
def text_is_relevant(self, text):
"""Check if the text has any relevant words."""
for word in text:
if word in self.relevant_words:
return True
return False
class InvertedIndexAccumulator(WindowedTextsAnalyzer, InvertedIndexBased):
"""Build an inverted index from a sequence of corpus texts."""
def analyze_text(self, window, doc_num=None):
for word_id in window:
if word_id is not self._none_token:
self._inverted_index[word_id].add(self._num_docs)
class WordOccurrenceAccumulator(WindowedTextsAnalyzer):
"""Accumulate word occurrences and co-occurrences from a sequence of corpus texts."""
def __init__(self, *args):
super(WordOccurrenceAccumulator, self).__init__(*args)
self._occurrences = np.zeros(self._vocab_size, dtype='uint32')
self._co_occurrences = sps.lil_matrix((self._vocab_size, self._vocab_size), dtype='uint32')
self._uniq_words = np.zeros((self._vocab_size + 1,), dtype=bool) # add 1 for none token
self._counter = Counter()
def __str__(self):
return self.__class__.__name__
def accumulate(self, texts, window_size):
self._co_occurrences = self._co_occurrences.tolil()
self.partial_accumulate(texts, window_size)
self._symmetrize()
return self
def partial_accumulate(self, texts, window_size):
"""Meant to be called several times to accumulate partial results.
Notes
-----
The final accumulation should be performed with the `accumulate` method as opposed to this one.
This method does not ensure the co-occurrence matrix is in lil format and does not
symmetrize it after accumulation.
"""
self._current_doc_num = -1
self._token_at_edge = None
self._counter.clear()
super(WordOccurrenceAccumulator, self).accumulate(texts, window_size)
for combo, count in self._counter.items():
self._co_occurrences[combo] += count
return self
def analyze_text(self, window, doc_num=None):
self._slide_window(window, doc_num)
mask = self._uniq_words[:-1] # to exclude none token
if mask.any():
self._occurrences[mask] += 1
self._counter.update(itertools.combinations(np.nonzero(mask)[0], 2))
def _slide_window(self, window, doc_num):
if doc_num != self._current_doc_num:
self._uniq_words[:] = False
self._uniq_words[np.unique(window)] = True
self._current_doc_num = doc_num
else:
self._uniq_words[self._token_at_edge] = False
self._uniq_words[window[-1]] = True
self._token_at_edge = window[0]
def _symmetrize(self):
"""Word pairs may have been encountered in (i, j) and (j, i) order.
Notes
-----
Rather than enforcing a particular ordering during the update process,
we choose to symmetrize the co-occurrence matrix after accumulation has completed.
"""
co_occ = self._co_occurrences
co_occ.setdiag(self._occurrences) # diagonal should be equal to occurrence counts
self._co_occurrences = \
co_occ + co_occ.T - sps.diags(co_occ.diagonal(), offsets=0, dtype='uint32')
def _get_occurrences(self, word_id):
return self._occurrences[word_id]
def _get_co_occurrences(self, word_id1, word_id2):
return self._co_occurrences[word_id1, word_id2]
def merge(self, other):
self._occurrences += other._occurrences
self._co_occurrences += other._co_occurrences
self._num_docs += other._num_docs
class PatchedWordOccurrenceAccumulator(WordOccurrenceAccumulator):
"""Monkey patched for multiprocessing worker usage, to move some of the logic to the master process."""
def _iter_texts(self, texts):
return texts # master process will handle this
class ParallelWordOccurrenceAccumulator(WindowedTextsAnalyzer):
"""Accumulate word occurrences in parallel.
Attributes
----------
processes : int
Number of processes to use; must be at least two.
args :
Should include `relevant_ids` and `dictionary` (see :class:`~UsesDictionary.__init__`).
kwargs :
Can include `batch_size`, which is the number of docs to send to a worker at a time.
If not included, it defaults to 64.
"""
def __init__(self, processes, *args, **kwargs):
super(ParallelWordOccurrenceAccumulator, self).__init__(*args)
if processes < 2:
raise ValueError(
"Must have at least 2 processes to run in parallel; got %d" % processes)
self.processes = processes
self.batch_size = kwargs.get('batch_size', 64)
def __str__(self):
return "%s(processes=%s, batch_size=%s)" % (
self.__class__.__name__, self.processes, self.batch_size)
def accumulate(self, texts, window_size):
workers, input_q, output_q = self.start_workers(window_size)
try:
self.queue_all_texts(input_q, texts, window_size)
interrupted = False
except KeyboardInterrupt:
logger.warn("stats accumulation interrupted; <= %d documents processed", self._num_docs)
interrupted = True
accumulators = self.terminate_workers(input_q, output_q, workers, interrupted)
return self.merge_accumulators(accumulators)
def start_workers(self, window_size):
"""Set up an input and output queue and start processes for each worker.
Notes
-----
The input queue is used to transmit batches of documents to the workers.
The output queue is used by workers to transmit the WordOccurrenceAccumulator instances.
Parameters
----------
window_size : int
Returns
-------
(list of lists)
Tuple of (list of workers, input queue, output queue).
"""
input_q = mp.Queue(maxsize=self.processes)
output_q = mp.Queue()
workers = []
for _ in range(self.processes):
accumulator = PatchedWordOccurrenceAccumulator(self.relevant_ids, self.dictionary)
worker = AccumulatingWorker(input_q, output_q, accumulator, window_size)
worker.start()
workers.append(worker)
return workers, input_q, output_q
def yield_batches(self, texts):
"""Return a generator over the given texts that yields batches of `batch_size` texts at a time."""
batch = []
for text in self._iter_texts(texts):
batch.append(text)
if len(batch) == self.batch_size:
yield batch
batch = []
if batch:
yield batch
def queue_all_texts(self, q, texts, window_size):
"""Sequentially place batches of texts on the given queue until `texts` is consumed.
The texts are filtered so that only those with at least one relevant token are queued.
"""
for batch_num, batch in enumerate(self.yield_batches(texts)):
q.put(batch, block=True)
before = self._num_docs / self.log_every
self._num_docs += sum(len(doc) - window_size + 1 for doc in batch)
if before < (self._num_docs / self.log_every):
logger.info(
"%d batches submitted to accumulate stats from %d documents (%d virtual)",
(batch_num + 1), (batch_num + 1) * self.batch_size, self._num_docs)
def terminate_workers(self, input_q, output_q, workers, interrupted=False):
"""Wait until all workers have transmitted their WordOccurrenceAccumulator instances, then terminate each.
Warnings
--------
We do not use join here because it has been shown to have some issues
in Python 2.7 (and even in later versions). This method also closes both the input and output queue.
If `interrupted` is False (normal execution), a None value is placed on the input queue for
each worker. The workers are looking for this sentinel value and interpret it as a signal to
terminate themselves. If `interrupted` is True, a KeyboardInterrupt occurred. The workers are
programmed to recover from this and continue on to transmit their results before terminating.
So in this instance, the sentinel values are not queued, but the rest of the execution
continues as usual.
"""
if not interrupted:
for _ in workers:
input_q.put(None, block=True)
accumulators = []
while len(accumulators) != len(workers):
accumulators.append(output_q.get())
logger.info("%d accumulators retrieved from output queue", len(accumulators))
for worker in workers:
if worker.is_alive():
worker.terminate()
input_q.close()
output_q.close()
return accumulators
def merge_accumulators(self, accumulators):
"""Merge the list of accumulators into a single `WordOccurrenceAccumulator` with all
occurrence and co-occurrence counts, and a `num_docs` that reflects the total observed
by all the individual accumulators.
"""
accumulator = WordOccurrenceAccumulator(self.relevant_ids, self.dictionary)
for other_accumulator in accumulators:
accumulator.merge(other_accumulator)
# Workers do partial accumulation, so none of the co-occurrence matrices are symmetrized.
# This is by design, to avoid unnecessary matrix additions/conversions during accumulation.
accumulator._symmetrize()
logger.info("accumulated word occurrence stats for %d virtual documents", accumulator.num_docs)
return accumulator
class AccumulatingWorker(mp.Process):
"""Accumulate stats from texts fed in from queue."""
def __init__(self, input_q, output_q, accumulator, window_size):
super(AccumulatingWorker, self).__init__()
self.input_q = input_q
self.output_q = output_q
self.accumulator = accumulator
self.accumulator.log_every = sys.maxsize # avoid logging in workers
self.window_size = window_size
def run(self):
try:
self._run()
except KeyboardInterrupt:
logger.info(
"%s interrupted after processing %d documents",
self.__class__.__name__, self.accumulator.num_docs)
except Exception:
logger.exception("worker encountered unexpected exception")
finally:
self.reply_to_master()
def _run(self):
batch_num = -1
n_docs = 0
while True:
batch_num += 1
docs = self.input_q.get(block=True)
if docs is None: # sentinel value
logger.debug("observed sentinel value; terminating")
break
self.accumulator.partial_accumulate(docs, self.window_size)
n_docs += len(docs)
logger.debug(
"completed batch %d; %d documents processed (%d virtual)",
batch_num, n_docs, self.accumulator.num_docs)
logger.debug(
"finished all batches; %d documents processed (%d virtual)",
n_docs, self.accumulator.num_docs)
def reply_to_master(self):
logger.info("serializing accumulator to return to master...")
self.output_q.put(self.accumulator, block=False)
logger.info("accumulator serialized")
class WordVectorsAccumulator(UsesDictionary):
"""Accumulate context vectors for words using word vector embeddings.
Attributes
----------
model: Word2Vec (:class:`~gensim.models.keyedvectors.KeyedVectors`)
If None, a new Word2Vec model is trained on the given text corpus. Otherwise,
it should be a pre-trained Word2Vec context vectors.
model_kwargs:
if model is None, these keyword arguments will be passed through to the Word2Vec constructor.
"""
def __init__(self, relevant_ids, dictionary, model=None, **model_kwargs):
super(WordVectorsAccumulator, self).__init__(relevant_ids, dictionary)
self.model = model
self.model_kwargs = model_kwargs
def not_in_vocab(self, words):
uniq_words = set(utils.flatten(words))
return set(word for word in uniq_words if word not in self.model)
def get_occurrences(self, word):
"""Return number of docs the word occurs in, once `accumulate` has been called."""
try:
self.token2id[word] # is this a token or an id?
except KeyError:
word = self.dictionary.id2token[word]
return self.model.get_vecattr(word, 'count')
def get_co_occurrences(self, word1, word2):
"""Return number of docs the words co-occur in, once `accumulate` has been called."""
raise NotImplementedError("Word2Vec model does not support co-occurrence counting")
def accumulate(self, texts, window_size):
if self.model is not None:
logger.debug("model is already trained; no accumulation necessary")
return self
kwargs = self.model_kwargs.copy()
if window_size is not None:
kwargs['window'] = window_size
kwargs['min_count'] = kwargs.get('min_count', 1)
kwargs['sg'] = kwargs.get('sg', 1)
kwargs['hs'] = kwargs.get('hw', 0)
self.model = Word2Vec(**kwargs)
self.model.build_vocab(texts)
self.model.train(texts, total_examples=self.model.corpus_count, epochs=self.model.epochs)
self.model = self.model.wv # retain KeyedVectors
return self
def ids_similarity(self, ids1, ids2):
words1 = self._words_with_embeddings(ids1)
words2 = self._words_with_embeddings(ids2)
return self.model.n_similarity(words1, words2)
def _words_with_embeddings(self, ids):
if not hasattr(ids, '__iter__'):
ids = [ids]
words = [self.dictionary.id2token[word_id] for word_id in ids]
return [word for word in words if word in self.model]
|
import os
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
# EC2 provides unique random hostnames.
def test_hostname(host):
pass
def test_etc_molecule_directory(host):
f = host.file('/etc/molecule')
assert f.is_directory
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o755
def test_etc_molecule_ansible_hostname_file(host):
filename = '/etc/molecule/{}'.format(host.check_output('hostname -s'))
f = host.file(filename)
assert f.is_file
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o644
|
import asyncio
import logging
from keba_kecontact.connection import KebaKeContact
import voluptuous as vol
from homeassistant.const import CONF_HOST
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DOMAIN = "keba"
SUPPORTED_COMPONENTS = ["binary_sensor", "sensor", "lock", "notify"]
CONF_RFID = "rfid"
CONF_FS = "failsafe"
CONF_FS_TIMEOUT = "failsafe_timeout"
CONF_FS_FALLBACK = "failsafe_fallback"
CONF_FS_PERSIST = "failsafe_persist"
CONF_FS_INTERVAL = "refresh_interval"
MAX_POLLING_INTERVAL = 5 # in seconds
MAX_FAST_POLLING_COUNT = 4
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_RFID, default="00845500"): cv.string,
vol.Optional(CONF_FS, default=False): cv.boolean,
vol.Optional(CONF_FS_TIMEOUT, default=30): cv.positive_int,
vol.Optional(CONF_FS_FALLBACK, default=6): cv.positive_int,
vol.Optional(CONF_FS_PERSIST, default=0): cv.positive_int,
vol.Optional(CONF_FS_INTERVAL, default=5): cv.positive_int,
}
)
},
extra=vol.ALLOW_EXTRA,
)
_SERVICE_MAP = {
"request_data": "async_request_data",
"set_energy": "async_set_energy",
"set_current": "async_set_current",
"authorize": "async_start",
"deauthorize": "async_stop",
"enable": "async_enable_ev",
"disable": "async_disable_ev",
"set_failsafe": "async_set_failsafe",
}
async def async_setup(hass, config):
"""Check connectivity and version of KEBA charging station."""
host = config[DOMAIN][CONF_HOST]
rfid = config[DOMAIN][CONF_RFID]
refresh_interval = config[DOMAIN][CONF_FS_INTERVAL]
keba = KebaHandler(hass, host, rfid, refresh_interval)
hass.data[DOMAIN] = keba
# Wait for KebaHandler setup complete (initial values loaded)
if not await keba.setup():
_LOGGER.error("Could not find a charging station at %s", host)
return False
# Set failsafe mode at start up of Home Assistant
failsafe = config[DOMAIN][CONF_FS]
timeout = config[DOMAIN][CONF_FS_TIMEOUT] if failsafe else 0
fallback = config[DOMAIN][CONF_FS_FALLBACK] if failsafe else 0
persist = config[DOMAIN][CONF_FS_PERSIST] if failsafe else 0
try:
hass.loop.create_task(keba.set_failsafe(timeout, fallback, persist))
except ValueError as ex:
_LOGGER.warning("Could not set failsafe mode %s", ex)
# Register services to hass
async def execute_service(call):
"""Execute a service to KEBA charging station.
This must be a member function as we need access to the keba
object here.
"""
function_name = _SERVICE_MAP[call.service]
function_call = getattr(keba, function_name)
await function_call(call.data)
for service in _SERVICE_MAP:
hass.services.async_register(DOMAIN, service, execute_service)
# Load components
for domain in SUPPORTED_COMPONENTS:
hass.async_create_task(
discovery.async_load_platform(hass, domain, DOMAIN, {}, config)
)
# Start periodic polling of charging station data
keba.start_periodic_request()
return True
class KebaHandler(KebaKeContact):
"""Representation of a KEBA charging station connection."""
def __init__(self, hass, host, rfid, refresh_interval):
"""Initialize charging station connection."""
super().__init__(host, self.hass_callback)
self._update_listeners = []
self._hass = hass
self.rfid = rfid
self.device_name = "keba" # correct device name will be set in setup()
self.device_id = "keba_wallbox_" # correct device id will be set in setup()
# Ensure at least MAX_POLLING_INTERVAL seconds delay
self._refresh_interval = max(MAX_POLLING_INTERVAL, refresh_interval)
self._fast_polling_count = MAX_FAST_POLLING_COUNT
self._polling_task = None
def start_periodic_request(self):
"""Start periodic data polling."""
self._polling_task = self._hass.loop.create_task(self._periodic_request())
async def _periodic_request(self):
"""Send periodic update requests."""
await self.request_data()
if self._fast_polling_count < MAX_FAST_POLLING_COUNT:
self._fast_polling_count += 1
_LOGGER.debug("Periodic data request executed, now wait for 2 seconds")
await asyncio.sleep(2)
else:
_LOGGER.debug(
"Periodic data request executed, now wait for %s seconds",
self._refresh_interval,
)
await asyncio.sleep(self._refresh_interval)
_LOGGER.debug("Periodic data request rescheduled")
self._polling_task = self._hass.loop.create_task(self._periodic_request())
async def setup(self, loop=None):
"""Initialize KebaHandler object."""
await super().setup(loop)
# Request initial values and extract serial number
await self.request_data()
if (
self.get_value("Serial") is not None
and self.get_value("Product") is not None
):
self.device_id = f"keba_wallbox_{self.get_value('Serial')}"
self.device_name = self.get_value("Product")
return True
return False
def hass_callback(self, data):
"""Handle component notification via callback."""
# Inform entities about updated values
for listener in self._update_listeners:
listener()
_LOGGER.debug("Notifying %d listeners", len(self._update_listeners))
def _set_fast_polling(self):
_LOGGER.debug("Fast polling enabled")
self._fast_polling_count = 0
self._polling_task.cancel()
self._polling_task = self._hass.loop.create_task(self._periodic_request())
def add_update_listener(self, listener):
"""Add a listener for update notifications."""
self._update_listeners.append(listener)
# initial data is already loaded, thus update the component
listener()
async def async_request_data(self, param):
"""Request new data in async way."""
await self.request_data()
_LOGGER.debug("New data from KEBA wallbox requested")
async def async_set_energy(self, param):
"""Set energy target in async way."""
try:
energy = param["energy"]
await self.set_energy(float(energy))
self._set_fast_polling()
except (KeyError, ValueError) as ex:
_LOGGER.warning("Energy value is not correct. %s", ex)
async def async_set_current(self, param):
"""Set current maximum in async way."""
try:
current = param["current"]
await self.set_current(float(current))
# No fast polling as this function might be called regularly
except (KeyError, ValueError) as ex:
_LOGGER.warning("Current value is not correct. %s", ex)
async def async_start(self, param=None):
"""Authorize EV in async way."""
await self.start(self.rfid)
self._set_fast_polling()
async def async_stop(self, param=None):
"""De-authorize EV in async way."""
await self.stop(self.rfid)
self._set_fast_polling()
async def async_enable_ev(self, param=None):
"""Enable EV in async way."""
await self.enable(True)
self._set_fast_polling()
async def async_disable_ev(self, param=None):
"""Disable EV in async way."""
await self.enable(False)
self._set_fast_polling()
async def async_set_failsafe(self, param=None):
"""Set failsafe mode in async way."""
try:
timeout = param[CONF_FS_TIMEOUT]
fallback = param[CONF_FS_FALLBACK]
persist = param[CONF_FS_PERSIST]
await self.set_failsafe(int(timeout), float(fallback), bool(persist))
self._set_fast_polling()
except (KeyError, ValueError) as ex:
_LOGGER.warning(
"failsafe_timeout, failsafe_fallback and/or "
"failsafe_persist value are not correct. %s",
ex,
)
|
from weblate.trans.management.commands import WeblateComponentCommand
class Command(WeblateComponentCommand):
help = "pushes all changes to upstream repository"
needs_repo = True
def add_arguments(self, parser):
super().add_arguments(parser)
parser.add_argument(
"--force-commit",
action="store_true",
dest="force_commit",
default=False,
help="Forces committing pending changes",
)
def handle(self, *args, **options):
for component in self.get_components(**options):
component.do_push(None, force_commit=options["force_commit"])
|
import io
import os
import uuid
from urllib.parse import urlunsplit
import docutils.parsers.rst.directives.body
import docutils.parsers.rst.directives.misc
import pygments
import pygments.util
from docutils import core
from docutils import nodes
from docutils.parsers.rst import Directive, directives
from docutils.parsers.rst.roles import set_classes
from docutils.parsers.rst.directives.misc import Include
from pygments.lexers import get_lexer_by_name
from nikola import utils
from nikola.plugin_categories import RestExtension
# A sanitized version of docutils.parsers.rst.directives.body.CodeBlock.
class CodeBlock(Directive):
"""Parse and mark up content of a code block."""
optional_arguments = 1
option_spec = {'class': directives.class_option,
'name': directives.unchanged,
'number-lines': directives.unchanged, # integer or None
'linenos': directives.unchanged,
'tab-width': directives.nonnegative_int}
has_content = True
def run(self):
"""Run code block directive."""
self.assert_has_content()
if 'linenos' in self.options:
self.options['number-lines'] = self.options['linenos']
if 'tab-width' in self.options:
self.content = [x.replace('\t', ' ' * self.options['tab-width']) for x in self.content]
if self.arguments:
language = self.arguments[0]
else:
language = 'text'
set_classes(self.options)
classes = ['code']
if language:
classes.append(language)
if 'classes' in self.options:
classes.extend(self.options['classes'])
code = '\n'.join(self.content)
try:
lexer = get_lexer_by_name(language)
except pygments.util.ClassNotFound:
raise self.error('Cannot find pygments lexer for language "{0}"'.format(language))
if 'number-lines' in self.options:
linenos = 'table'
# optional argument `startline`, defaults to 1
try:
linenostart = int(self.options['number-lines'] or 1)
except ValueError:
raise self.error(':number-lines: with non-integer start value')
else:
linenos = False
linenostart = 1 # actually unused
if self.site.invariant: # for testing purposes
anchor_ref = 'rest_code_' + 'fixedvaluethatisnotauuid'
else:
anchor_ref = 'rest_code_' + uuid.uuid4().hex
formatter = utils.NikolaPygmentsHTML(anchor_ref=anchor_ref, classes=classes, linenos=linenos, linenostart=linenostart)
out = pygments.highlight(code, lexer, formatter)
node = nodes.raw('', out, format='html')
self.add_name(node)
# if called from "include", set the source
if 'source' in self.options:
node.attributes['source'] = self.options['source']
return [node]
# Monkey-patch: replace insane docutils CodeBlock with our implementation.
docutils.parsers.rst.directives.body.CodeBlock = CodeBlock
docutils.parsers.rst.directives.misc.CodeBlock = CodeBlock
class Plugin(RestExtension):
"""Plugin for listing directive."""
name = "rest_listing"
def set_site(self, site):
"""Set Nikola site."""
self.site = site
# Even though listings don't use CodeBlock anymore, I am
# leaving these to make the code directive work with
# docutils < 0.9
CodeBlock.site = site
Listing.site = site
directives.register_directive('code', CodeBlock)
directives.register_directive('code-block', CodeBlock)
directives.register_directive('sourcecode', CodeBlock)
directives.register_directive('listing', Listing)
Listing.folders = site.config['LISTINGS_FOLDERS']
return super().set_site(site)
# Add sphinx compatibility option
listing_spec = Include.option_spec
listing_spec['linenos'] = directives.unchanged
class Listing(Include):
"""Create a highlighted block of code from a file in listings/.
Usage:
.. listing:: nikola.py python
:number-lines:
"""
has_content = False
required_arguments = 1
optional_arguments = 1
option_spec = listing_spec
def run(self):
"""Run listing directive."""
_fname = self.arguments.pop(0)
fname = _fname.replace('/', os.sep)
try:
lang = self.arguments.pop(0)
self.options['code'] = lang
except IndexError:
self.options['literal'] = True
if len(self.folders) == 1:
listings_folder = next(iter(self.folders.keys()))
if fname.startswith(listings_folder):
fpath = os.path.join(fname) # new syntax: specify folder name
else:
fpath = os.path.join(listings_folder, fname) # old syntax: don't specify folder name
else:
fpath = os.path.join(fname) # must be new syntax: specify folder name
self.arguments.insert(0, fpath)
if 'linenos' in self.options:
self.options['number-lines'] = self.options['linenos']
with io.open(fpath, 'r+', encoding='utf-8-sig') as fileobject:
self.content = fileobject.read().splitlines()
self.state.document.settings.record_dependencies.add(fpath)
target = urlunsplit(("link", 'listing', fpath.replace('\\', '/'), '', ''))
src_target = urlunsplit(("link", 'listing_source', fpath.replace('\\', '/'), '', ''))
src_label = self.site.MESSAGES('Source')
generated_nodes = (
[core.publish_doctree('`{0} <{1}>`_ `({2}) <{3}>`_' .format(
_fname, target, src_label, src_target))[0]])
generated_nodes += self.get_code_from_file(fileobject)
return generated_nodes
def get_code_from_file(self, data):
"""Create CodeBlock nodes from file object content."""
return super().run()
def assert_has_content(self):
"""Override check from superclass with nothing.
Listing has no content, override check from superclass.
"""
pass
|
import datetime as dt
from mock import create_autospec, call
from arctic.store.metadata_store import MetadataStore
def test_ensure_index():
ms = create_autospec(MetadataStore)
MetadataStore._ensure_index(ms)
assert ms.create_index.call_args_list == [call([('symbol', 1),
('start_time', -1)],
unique=True,
background=True)]
def test_list_symbols_simple():
ms = create_autospec(MetadataStore)
ms.distinct.return_value = []
MetadataStore.list_symbols(ms)
ms.distinct.assert_called_once_with('symbol')
def test_list_symbols_regex():
ms = create_autospec(MetadataStore)
ms.aggregate.return_value = []
expected_pipeline = [
{'$sort': {'symbol': 1, 'start_time': -1}},
{'$match': {'symbol': {'$regex': 'test.*'}}},
{'$group': {'_id': '$symbol', 'metadata': {'$first': '$metadata'}}},
{'$project': {'_id': 0, 'symbol': '$_id'}}
]
MetadataStore.list_symbols(ms, regex='test.*')
ms.aggregate.assert_called_once_with(expected_pipeline)
def test_list_symbols_as_of():
ms = create_autospec(MetadataStore)
ms.aggregate.return_value = []
expected_pipeline = [
{'$sort': {'symbol': 1, 'start_time': -1}},
{'$match': {'symbol': {'$regex': '^'},
'start_time': {'$lte': dt.datetime(2018, 5, 11)}}},
{'$group': {'_id': '$symbol', 'metadata': {'$first': '$metadata'}}},
{'$project': {'_id': 0, 'symbol': '$_id'}}
]
MetadataStore.list_symbols(ms, as_of=dt.datetime(2018, 5, 11))
ms.aggregate.assert_called_once_with(expected_pipeline)
def test_list_symbols_as_of_regex():
ms = create_autospec(MetadataStore)
ms.aggregate.return_value = []
expected_pipeline = [
{'$sort': {'symbol': 1, 'start_time': -1}},
{'$match': {'symbol': {'$regex': 'test.*'},
'start_time': {'$lte': dt.datetime(2018, 5, 11)}}},
{'$group': {'_id': '$symbol', 'metadata': {'$first': '$metadata'}}},
{'$project': {'_id': 0, 'symbol': '$_id'}}
]
MetadataStore.list_symbols(ms,
regex='test.*',
as_of=dt.datetime(2018, 5, 11))
ms.aggregate.assert_called_once_with(expected_pipeline)
def test_list_symbols_metadata_query():
ms = create_autospec(MetadataStore)
ms.aggregate.return_value = []
expected_pipeline = [
{'$sort': {'symbol': 1, 'start_time': -1}},
{'$group': {'_id': '$symbol', 'metadata': {'$first': '$metadata'}}},
{'$match': {'metadata.foo': 'bar'}},
{'$project': {'_id': 0, 'symbol': '$_id'}}
]
MetadataStore.list_symbols(ms, foo='bar')
ms.aggregate.assert_called_once_with(expected_pipeline)
def test_list_symbols_all_options():
ms = create_autospec(MetadataStore)
ms.aggregate.return_value = []
expected_pipeline = [
{'$sort': {'symbol': 1, 'start_time': -1}},
{'$match': {'symbol': {'$regex': 'test.*'},
'start_time': {'$lte': dt.datetime(2018, 5, 11)}}},
{'$group': {'_id': '$symbol', 'metadata': {'$first': '$metadata'}}},
{'$match': {'metadata.foo': 'bar'}},
{'$project': {'_id': 0, 'symbol': '$_id'}}
]
MetadataStore.list_symbols(ms,
regex='test.*',
as_of=dt.datetime(2018, 5, 11),
foo='bar')
ms.aggregate.assert_called_once_with(expected_pipeline)
|
import asyncio
import logging
from typing import Any, Dict, Iterable, Optional
from homeassistant.const import (
ATTR_ENTITY_ID,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_OFF,
STATE_ON,
)
from homeassistant.core import Context, State
from homeassistant.helpers.typing import HomeAssistantType
from . import DOMAIN
_LOGGER = logging.getLogger(__name__)
VALID_STATES = {STATE_ON, STATE_OFF}
async def _async_reproduce_state(
hass: HomeAssistantType,
state: State,
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce a single state."""
cur_state = hass.states.get(state.entity_id)
if cur_state is None:
_LOGGER.warning("Unable to find entity %s", state.entity_id)
return
if state.state not in VALID_STATES:
_LOGGER.warning(
"Invalid state specified for %s: %s", state.entity_id, state.state
)
return
# Return if we are already at the right state.
if cur_state.state == state.state:
return
service_data = {ATTR_ENTITY_ID: state.entity_id}
if state.state == STATE_ON:
service = SERVICE_TURN_ON
elif state.state == STATE_OFF:
service = SERVICE_TURN_OFF
await hass.services.async_call(
DOMAIN, service, service_data, context=context, blocking=True
)
async def async_reproduce_states(
hass: HomeAssistantType,
states: Iterable[State],
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce Automation states."""
await asyncio.gather(
*(
_async_reproduce_state(
hass, state, context=context, reproduce_options=reproduce_options
)
for state in states
)
)
|
from matchzoo.engine import base_task
class Ranking(base_task.BaseTask):
"""Ranking Task.
Examples:
>>> ranking_task = Ranking()
>>> ranking_task.metrics = ['map', 'ndcg']
>>> ranking_task.output_shape
(1,)
>>> ranking_task.output_dtype
<class 'float'>
>>> print(ranking_task)
Ranking Task
"""
@classmethod
def list_available_losses(cls) -> list:
""":return: a list of available losses."""
return ['mse']
@classmethod
def list_available_metrics(cls) -> list:
""":return: a list of available metrics."""
return ['map']
@property
def output_shape(self) -> tuple:
""":return: output shape of a single sample of the task."""
return 1,
@property
def output_dtype(self):
""":return: target data type, expect `float` as output."""
return float
def __str__(self):
""":return: Task name as string."""
return 'Ranking Task'
|
import numpy as np
from itertools import tee
from keras.utils.generic_utils import slice_arrays
from keras.models import model_from_yaml
from keras.optimizers import get as get_optimizer
from .utils import subtract_params
from .parameter import SocketClient, HttpClient
class SparkWorker(object):
"""Synchronous Spark worker. This code will be executed on workers.
"""
def __init__(self, yaml, parameters, train_config, master_optimizer,
master_loss, master_metrics, custom_objects):
self.yaml = yaml
self.parameters = parameters
self.train_config = train_config
self.master_optimizer = master_optimizer
self.master_loss = master_loss
self.master_metrics = master_metrics
self.custom_objects = custom_objects
self.model = None
def train(self, data_iterator):
"""Train a keras model on a worker
"""
optimizer = get_optimizer(self.master_optimizer)
self.model = model_from_yaml(self.yaml, self.custom_objects)
self.model.compile(optimizer=optimizer,
loss=self.master_loss, metrics=self.master_metrics)
self.model.set_weights(self.parameters.value)
feature_iterator, label_iterator = tee(data_iterator, 2)
x_train = np.asarray([x for x, y in feature_iterator])
y_train = np.asarray([y for x, y in label_iterator])
self.model.compile(optimizer=get_optimizer(self.master_optimizer),
loss=self.master_loss,
metrics=self.master_metrics)
weights_before_training = self.model.get_weights()
if x_train.shape[0] > self.train_config.get('batch_size'):
self.model.fit(x_train, y_train, **self.train_config)
weights_after_training = self.model.get_weights()
deltas = subtract_params(
weights_before_training, weights_after_training)
yield deltas
class AsynchronousSparkWorker(object):
"""Asynchronous Spark worker. This code will be executed on workers.
"""
def __init__(self, yaml, parameters, parameter_server_mode, train_config, frequency,
master_optimizer, master_loss, master_metrics, custom_objects):
if parameter_server_mode == 'http':
self.client = HttpClient()
elif parameter_server_mode == 'socket':
self.client = SocketClient()
else:
raise ValueError("Parameter server mode has to be either `http` or `socket`, "
"got {}".format(parameter_server_mode))
self.train_config = train_config
self.frequency = frequency
self.master_optimizer = master_optimizer
self.master_loss = master_loss
self.master_metrics = master_metrics
self.yaml = yaml
self.parameters = parameters
self.custom_objects = custom_objects
self.model = None
def train(self, data_iterator):
"""Train a keras model on a worker and send asynchronous updates
to parameter server
"""
feature_iterator, label_iterator = tee(data_iterator, 2)
x_train = np.asarray([x for x, y in feature_iterator])
y_train = np.asarray([y for x, y in label_iterator])
if x_train.size == 0:
return
optimizer = get_optimizer(self.master_optimizer)
self.model = model_from_yaml(self.yaml, self.custom_objects)
self.model.compile(optimizer=get_optimizer(self.master_optimizer),
loss=self.master_loss, metrics=self.master_metrics)
self.model.set_weights(self.parameters.value)
epochs = self.train_config['epochs']
batch_size = self.train_config.get('batch_size')
nb_train_sample = x_train.shape[0]
nb_batch = int(np.ceil(nb_train_sample / float(batch_size)))
index_array = np.arange(nb_train_sample)
batches = [
(i * batch_size, min(nb_train_sample, (i + 1) * batch_size))
for i in range(0, nb_batch)
]
if self.frequency == 'epoch':
for epoch in range(epochs):
weights_before_training = self.client.get_parameters()
self.model.set_weights(weights_before_training)
self.train_config['epochs'] = 1
if x_train.shape[0] > batch_size:
self.model.fit(x_train, y_train, **self.train_config)
self.train_config['epochs'] = epochs
weights_after_training = self.model.get_weights()
deltas = subtract_params(
weights_before_training, weights_after_training)
self.client.update_parameters(deltas)
elif self.frequency == 'batch':
for epoch in range(epochs):
if x_train.shape[0] > batch_size:
for (batch_start, batch_end) in batches:
weights_before_training = self.client.get_parameters()
self.model.set_weights(weights_before_training)
batch_ids = index_array[batch_start:batch_end]
x = slice_arrays(x_train, batch_ids)
y = slice_arrays(y_train, batch_ids)
self.model.train_on_batch(x, y)
weights_after_training = self.model.get_weights()
deltas = subtract_params(
weights_before_training, weights_after_training)
self.client.update_parameters(deltas)
else:
raise ValueError(
'frequency parameter can be `epoch` or `batch, got {}'.format(self.frequency))
yield []
|
from aiohttp import web
import voluptuous as vol
from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER
from homeassistant.const import (
ATTR_LATITUDE,
ATTR_LONGITUDE,
ATTR_NAME,
CONF_WEBHOOK_ID,
HTTP_OK,
HTTP_UNPROCESSABLE_ENTITY,
STATE_NOT_HOME,
)
from homeassistant.helpers import config_entry_flow
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.util import slugify
from .const import DOMAIN
CONF_MOBILE_BEACONS = "mobile_beacons"
CONFIG_SCHEMA = vol.Schema(
{
vol.Optional(DOMAIN): vol.Schema(
{
vol.Optional(CONF_MOBILE_BEACONS, default=[]): vol.All(
cv.ensure_list, [cv.string]
)
}
)
},
extra=vol.ALLOW_EXTRA,
)
ATTR_ADDRESS = "address"
ATTR_BEACON_ID = "beaconUUID"
ATTR_CURRENT_LATITUDE = "currentLatitude"
ATTR_CURRENT_LONGITUDE = "currentLongitude"
ATTR_DEVICE = "device"
ATTR_ENTRY = "entry"
BEACON_DEV_PREFIX = "beacon"
LOCATION_ENTRY = "1"
LOCATION_EXIT = "0"
TRACKER_UPDATE = f"{DOMAIN}_tracker_update"
def _address(value: str) -> str:
r"""Coerce address by replacing '\n' with ' '."""
return value.replace("\n", " ")
WEBHOOK_SCHEMA = vol.Schema(
{
vol.Required(ATTR_ADDRESS): vol.All(cv.string, _address),
vol.Required(ATTR_DEVICE): vol.All(cv.string, slugify),
vol.Required(ATTR_ENTRY): vol.Any(LOCATION_ENTRY, LOCATION_EXIT),
vol.Required(ATTR_LATITUDE): cv.latitude,
vol.Required(ATTR_LONGITUDE): cv.longitude,
vol.Required(ATTR_NAME): vol.All(cv.string, slugify),
vol.Optional(ATTR_CURRENT_LATITUDE): cv.latitude,
vol.Optional(ATTR_CURRENT_LONGITUDE): cv.longitude,
vol.Optional(ATTR_BEACON_ID): cv.string,
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, hass_config):
"""Set up the Geofency component."""
config = hass_config.get(DOMAIN, {})
mobile_beacons = config.get(CONF_MOBILE_BEACONS, [])
hass.data[DOMAIN] = {
"beacons": [slugify(beacon) for beacon in mobile_beacons],
"devices": set(),
"unsub_device_tracker": {},
}
return True
async def handle_webhook(hass, webhook_id, request):
"""Handle incoming webhook from Geofency."""
try:
data = WEBHOOK_SCHEMA(dict(await request.post()))
except vol.MultipleInvalid as error:
return web.Response(text=error.error_message, status=HTTP_UNPROCESSABLE_ENTITY)
if _is_mobile_beacon(data, hass.data[DOMAIN]["beacons"]):
return _set_location(hass, data, None)
if data["entry"] == LOCATION_ENTRY:
location_name = data["name"]
else:
location_name = STATE_NOT_HOME
if ATTR_CURRENT_LATITUDE in data:
data[ATTR_LATITUDE] = data[ATTR_CURRENT_LATITUDE]
data[ATTR_LONGITUDE] = data[ATTR_CURRENT_LONGITUDE]
return _set_location(hass, data, location_name)
def _is_mobile_beacon(data, mobile_beacons):
"""Check if we have a mobile beacon."""
return ATTR_BEACON_ID in data and data["name"] in mobile_beacons
def _device_name(data):
"""Return name of device tracker."""
if ATTR_BEACON_ID in data:
return f"{BEACON_DEV_PREFIX}_{data['name']}"
return data["device"]
def _set_location(hass, data, location_name):
"""Fire HA event to set location."""
device = _device_name(data)
async_dispatcher_send(
hass,
TRACKER_UPDATE,
device,
(data[ATTR_LATITUDE], data[ATTR_LONGITUDE]),
location_name,
data,
)
return web.Response(text=f"Setting location for {device}", status=HTTP_OK)
async def async_setup_entry(hass, entry):
"""Configure based on config entry."""
hass.components.webhook.async_register(
DOMAIN, "Geofency", entry.data[CONF_WEBHOOK_ID], handle_webhook
)
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, DEVICE_TRACKER)
)
return True
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
hass.components.webhook.async_unregister(entry.data[CONF_WEBHOOK_ID])
hass.data[DOMAIN]["unsub_device_tracker"].pop(entry.entry_id)()
await hass.config_entries.async_forward_entry_unload(entry, DEVICE_TRACKER)
return True
async_remove_entry = config_entry_flow.webhook_async_remove_entry
|
import logging
from kalliope import SignalLauncher, Utils
from kalliope.core import NeuronModule
from kalliope.core.ConfigurationManager import SettingEditor, YAMLLoader
from kalliope.core.Models.settings.Stt import Stt
from kalliope.core.Models.settings.Trigger import Trigger
from kalliope.core.Models.settings.Tts import Tts
from kalliope.core.Models.settings.Player import Player
logging.basicConfig()
logger = logging.getLogger("kalliope")
class Settings(NeuronModule):
"""
This Neuron allows to update dynamically the settings of kalliope.
Currently available Settings parameters :
- default_tts
- default_stt
- default_trigger
- default_player
- text_to_speech
- speech_to_text
- triggers
- players
- hooks
- var_files
- variable
- deaf
- mute
- recognizer_multiplier
- recognizer_energy_ratio
- recognizer_recording_timeout
- recognizer_recording_timeout_with_silence
"""
def __init__(self, **kwargs):
super(Settings, self).__init__(**kwargs)
# Modules
self.default_tts = kwargs.get("default_tts", None)
self.text_to_speech = kwargs.get("text_to_speech", None)
self.default_stt = kwargs.get("default_stt", None)
self.speech_to_text = kwargs.get("speech_to_text", None)
self.default_trigger = kwargs.get("default_trigger", None)
self.triggers = kwargs.get("triggers", None)
self.default_player = kwargs.get("default_player", None)
self.players = kwargs.get("players", None)
# Options
self.deaf = kwargs.get("deaf", None)
self.mute = kwargs.get("mute", None)
self.recognizer_multiplier = kwargs.get("recognizer_multiplier", None)
self.recognizer_energy_ratio = kwargs.get("recognizer_energy_ratio", None)
self.recognizer_recording_timeout = kwargs.get("recognizer_recording_timeout", None)
self.recognizer_recording_timeout_with_silence = kwargs.get("recognizer_recording_timeout_with_silence", None)
# Hooks
self.hooks = kwargs.get("hooks", None)
# Variables
self.var_files = kwargs.get("var_files", None)
self.variable = kwargs.get("variable", None)
# Not applicable yet as Variables are applied during brainloading.
# REST API
# RESSOURCES Does it even make sense to update this one ?
if self._is_parameters_ok():
self._set_settings()
def _is_parameters_ok(self):
"""
Check the validity for each parameter
:return: True if all parameters are set correctly, False otherwise.
"""
# Players
if self.default_player:
if not SettingEditor._check_name_in_list_settings_entry(self.default_player, self.settings.players):
logger.debug("[Settings] default_player %s is not defined in settings file ",
self.default_player)
return False
if self.players:
if not isinstance(self.players, list):
logger.debug("[Settings] players current type: %s. players should be a list", type(self.players))
return False
for player_el in self.players:
if not isinstance(player_el, dict):
logger.debug("[Settings] player current element type: %s. player element should be a dict",
type(player_el))
return False
# STT
if self.default_stt:
if not SettingEditor._check_name_in_list_settings_entry(self.default_stt, self.settings.stts):
logger.debug("[Settings] default_stt %s is not defined in settings file ", self.default_stt)
return False
if self.speech_to_text:
if not isinstance(self.speech_to_text, list):
logger.debug("[Settings] speech_to_text current type: %s. speech_to_text should be a list",
type(self.speech_to_text))
return False
for stt_el in self.speech_to_text:
if not isinstance(stt_el, dict):
logger.debug(
"[Settings] speech_to_text current element type: %s. speech_to_text element should be a dict",
type(stt_el))
return False
# TRIGGER
if self.default_trigger:
if not SettingEditor._check_name_in_list_settings_entry(self.default_trigger, self.settings.triggers):
logger.debug("[Settings] default_trigger %s is not defined in settings file ",
self.default_trigger)
return False
if self.triggers:
if not isinstance(self.triggers, list):
logger.debug("[Settings] triggers current type: %s. triggers should be a list", type(self.triggers))
return False
for trigger_el in self.triggers:
if not isinstance(trigger_el, dict):
logger.debug("[Settings] triggers current element type: %s. triggers element should be a dict",
type(trigger_el))
return False
# TTS
if self.default_tts:
if not SettingEditor._check_name_in_list_settings_entry(self.default_tts, self.settings.ttss):
logger.debug("[Settings] default_tts %s is not defined in settings file ", self.default_tts)
return False
if self.text_to_speech:
if not isinstance(self.text_to_speech, list):
logger.debug("[Settings] text_to_speech current type: %s. text_to_speech should be a list",
type(self.text_to_speech))
return False
for tts_el in self.text_to_speech:
if not isinstance(tts_el, dict):
logger.debug(
"[Settings] text_to_speech element current type: %s. text_to_speech element should be a dict",
type(tts_el))
return False
# Options
if self.deaf is not None:
if not isinstance(self.deaf, bool):
logger.debug("[Settings] deaf %s is not a correct value, you must define True or False", self.deaf)
return False
if self.mute is not None:
if not isinstance(self.mute, bool):
logger.debug("[Settings] mute %s is not a correct value, you must define True or False", self.mute)
return False
if self.recognizer_multiplier is not None:
if not isinstance(self.recognizer_multiplier, int):
logger.debug("[Settings] recognizer_multiplier %s is not a correct integer, you must define a number",
self.recognizer_multiplier)
return False
if self.recognizer_energy_ratio is not None:
if not isinstance(self.recognizer_energy_ratio, int):
logger.debug(
"[Settings] recognizer_energy_ratio %s is not a correct integer, you must define a number",
self.recognizer_energy_ratio)
return False
if self.recognizer_recording_timeout is not None:
if not isinstance(self.recognizer_recording_timeout, int):
logger.debug(
"[Settings] recognizer_recording_timeout %s is not a correct integer, you must define a number",
self.recognizer_recording_timeout)
return False
if self.recognizer_recording_timeout_with_silence is not None:
if not isinstance(self.recognizer_recording_timeout_with_silence, int):
logger.debug(
"[Settings] recognizer_recording_timeout_with_silence %s is not a correct integer, you must define a number",
self.recognizer_recording_timeout_with_silence)
return False
# Hooks
if self.hooks:
if not isinstance(self.hooks, dict):
logger.debug("[Settings] hooks property %s is not a dictionary as it should be.", type(self.hooks))
return False
for hook_name, synap in self.hooks.items():
if not isinstance(synap, str) and not isinstance(synap, list):
logger.debug(
"[Settings] for hook element %s the type %s is nor a string nor a list as it should be.",
hook_name, type(synap))
return False
# Variables
if self.var_files:
if not isinstance(self.var_files, list):
logger.debug("[Settings] var_files property %s is not a list as it should be.", type(self.var_files))
return False
for file_name in self.var_files:
var = Utils.get_real_file_path(file_name)
if var is None:
logger.debug("[Settings] Variables file %s not found", file_name)
return False
if self.variable:
if not isinstance(self.variable, dict):
logger.debug("[Settings] variable property %s is not a dict as it should be.", type(self.variable))
return False
return True
def _set_settings(self):
# PLAYERS
if self.default_player:
SettingEditor.set_default_player(self.default_player)
if self.players:
for player_el in self.players:
if isinstance(player_el, dict):
for player_name in player_el:
name = player_name
parameters = player_el[name]
new_player = Player(name=name, parameters=parameters)
SettingEditor.set_players(new_player)
# STT
if self.default_stt:
SettingEditor.set_default_stt(self.default_stt)
if self.speech_to_text:
for stt_el in self.speech_to_text:
if isinstance(stt_el, dict):
for stt_name in stt_el:
name = stt_name
parameters = stt_el[name]
new_stt = Stt(name=name, parameters=parameters)
SettingEditor.set_stts(new_stt)
# TRIGGER
if self.default_trigger:
SettingEditor.set_default_trigger(self.default_trigger)
if self.triggers:
for trigger_el in self.triggers:
if isinstance(trigger_el, dict):
for trigger_name in trigger_el:
name = trigger_name
parameters = trigger_el[name]
new_trigger = Trigger(name=name, parameters=parameters)
SettingEditor.set_trigger(new_trigger)
# TTS
if self.default_tts:
SettingEditor.set_default_tts(self.default_tts)
if self.text_to_speech:
for tts_el in self.text_to_speech:
if isinstance(tts_el, dict):
for tts_name in tts_el:
name = tts_name
parameters = tts_el[name]
new_tts = Tts(name=name, parameters=parameters)
SettingEditor.set_ttss(new_tts)
# Options
if self.deaf is not None:
signal_order = SignalLauncher.get_order_instance()
if signal_order is not None:
SettingEditor.set_deaf_status(signal_order.trigger_instance, self.deaf)
if self.mute is not None:
SettingEditor.set_mute_status(self.mute)
if self.recognizer_multiplier is not None:
SettingEditor.set_recognizer_multiplier(self.recognizer_multiplier)
if self.recognizer_energy_ratio is not None:
SettingEditor.set_recognizer_energy_ratio(self.recognizer_energy_ratio)
if self.recognizer_recording_timeout is not None:
SettingEditor.set_recognizer_recording_timeout(self.recognizer_recording_timeout)
if self.recognizer_recording_timeout_with_silence is not None:
SettingEditor.set_recognizer_recording_timeout_with_silence(self.recognizer_recording_timeout_with_silence)
# Hooks
if self.hooks:
SettingEditor.set_hooks(self.hooks)
# Variables
if self.var_files:
variables = dict()
for files in self.var_files:
var = Utils.get_real_file_path(files)
# var is None has been checked previously in _is_parameters_ok() method
variables.update(YAMLLoader.get_config(var))
SettingEditor.set_variables(variables)
if self.variable is not None:
SettingEditor.set_variables(self.variable)
|
import numpy as np
from scipy import linalg
from ..forward import is_fixed_orient
from ..minimum_norm.inverse import _check_reference, _log_exp_var
from ..utils import logger, verbose, warn
from .mxne_inverse import (_check_ori, _make_sparse_stc, _prepare_gain,
_reapply_source_weighting, _compute_residual,
_make_dipoles_sparse)
@verbose
def _gamma_map_opt(M, G, alpha, maxit=10000, tol=1e-6, update_mode=1,
group_size=1, gammas=None, verbose=None):
"""Hierarchical Bayes (Gamma-MAP).
Parameters
----------
M : array, shape=(n_sensors, n_times)
Observation.
G : array, shape=(n_sensors, n_sources)
Forward operator.
alpha : float
Regularization parameter (noise variance).
maxit : int
Maximum number of iterations.
tol : float
Tolerance parameter for convergence.
group_size : int
Number of consecutive sources which use the same gamma.
update_mode : int
Update mode, 1: MacKay update (default), 3: Modified MacKay update.
gammas : array, shape=(n_sources,)
Initial values for posterior variances (gammas). If None, a
variance of 1.0 is used.
%(verbose)s
Returns
-------
X : array, shape=(n_active, n_times)
Estimated source time courses.
active_set : array, shape=(n_active,)
Indices of active sources.
"""
G = G.copy()
M = M.copy()
if gammas is None:
gammas = np.ones(G.shape[1], dtype=np.float64)
eps = np.finfo(float).eps
n_sources = G.shape[1]
n_sensors, n_times = M.shape
# apply normalization so the numerical values are sane
M_normalize_constant = linalg.norm(np.dot(M, M.T), ord='fro')
M /= np.sqrt(M_normalize_constant)
alpha /= M_normalize_constant
G_normalize_constant = linalg.norm(G, ord=np.inf)
G /= G_normalize_constant
if n_sources % group_size != 0:
raise ValueError('Number of sources has to be evenly dividable by the '
'group size')
n_active = n_sources
active_set = np.arange(n_sources)
gammas_full_old = gammas.copy()
if update_mode == 2:
denom_fun = np.sqrt
else:
# do nothing
def denom_fun(x):
return x
last_size = -1
for itno in range(maxit):
gammas[np.isnan(gammas)] = 0.0
gidx = (np.abs(gammas) > eps)
active_set = active_set[gidx]
gammas = gammas[gidx]
# update only active gammas (once set to zero it stays at zero)
if n_active > len(active_set):
n_active = active_set.size
G = G[:, gidx]
CM = np.dot(G * gammas[np.newaxis, :], G.T)
CM.flat[::n_sensors + 1] += alpha
# Invert CM keeping symmetry
U, S, V = linalg.svd(CM, full_matrices=False)
S = S[np.newaxis, :]
del CM
CMinv = np.dot(U / (S + eps), U.T)
CMinvG = np.dot(CMinv, G)
A = np.dot(CMinvG.T, M) # mult. w. Diag(gamma) in gamma update
if update_mode == 1:
# MacKay fixed point update (10) in [1]
numer = gammas ** 2 * np.mean((A * A.conj()).real, axis=1)
denom = gammas * np.sum(G * CMinvG, axis=0)
elif update_mode == 2:
# modified MacKay fixed point update (11) in [1]
numer = gammas * np.sqrt(np.mean((A * A.conj()).real, axis=1))
denom = np.sum(G * CMinvG, axis=0) # sqrt is applied below
else:
raise ValueError('Invalid value for update_mode')
if group_size == 1:
if denom is None:
gammas = numer
else:
gammas = numer / np.maximum(denom_fun(denom),
np.finfo('float').eps)
else:
numer_comb = np.sum(numer.reshape(-1, group_size), axis=1)
if denom is None:
gammas_comb = numer_comb
else:
denom_comb = np.sum(denom.reshape(-1, group_size), axis=1)
gammas_comb = numer_comb / denom_fun(denom_comb)
gammas = np.repeat(gammas_comb / group_size, group_size)
# compute convergence criterion
gammas_full = np.zeros(n_sources, dtype=np.float64)
gammas_full[active_set] = gammas
err = (np.sum(np.abs(gammas_full - gammas_full_old)) /
np.sum(np.abs(gammas_full_old)))
gammas_full_old = gammas_full
breaking = (err < tol or n_active == 0)
if len(gammas) != last_size or breaking:
logger.info('Iteration: %d\t active set size: %d\t convergence: '
'%0.3e' % (itno, len(gammas), err))
last_size = len(gammas)
if breaking:
break
if itno < maxit - 1:
logger.info('\nConvergence reached !\n')
else:
warn('\nConvergence NOT reached !\n')
# undo normalization and compute final posterior mean
n_const = np.sqrt(M_normalize_constant) / G_normalize_constant
x_active = n_const * gammas[:, None] * A
return x_active, active_set
@verbose
def gamma_map(evoked, forward, noise_cov, alpha, loose="auto", depth=0.8,
xyz_same_gamma=True, maxit=10000, tol=1e-6, update_mode=1,
gammas=None, pca=True, return_residual=False,
return_as_dipoles=False, rank=None, pick_ori=None, verbose=None):
"""Hierarchical Bayes (Gamma-MAP) sparse source localization method.
Models each source time course using a zero-mean Gaussian prior with an
unknown variance (gamma) parameter. During estimation, most gammas are
driven to zero, resulting in a sparse source estimate, as in
:footcite:`WipfEtAl2007` and :footcite:`WipfNagarajan2009`.
For fixed-orientation forward operators, a separate gamma is used for each
source time course, while for free-orientation forward operators, the same
gamma is used for the three source time courses at each source space point
(separate gammas can be used in this case by using xyz_same_gamma=False).
Parameters
----------
evoked : instance of Evoked
Evoked data to invert.
forward : dict
Forward operator.
noise_cov : instance of Covariance
Noise covariance to compute whitener.
alpha : float
Regularization parameter (noise variance).
%(loose)s
%(depth)s
xyz_same_gamma : bool
Use same gamma for xyz current components at each source space point.
Recommended for free-orientation forward solutions.
maxit : int
Maximum number of iterations.
tol : float
Tolerance parameter for convergence.
update_mode : int
Update mode, 1: MacKay update (default), 2: Modified MacKay update.
gammas : array, shape=(n_sources,)
Initial values for posterior variances (gammas). If None, a
variance of 1.0 is used.
pca : bool
If True the rank of the data is reduced to the true dimension.
return_residual : bool
If True, the residual is returned as an Evoked instance.
return_as_dipoles : bool
If True, the sources are returned as a list of Dipole instances.
%(rank_None)s
.. versionadded:: 0.18
%(pick_ori)s
%(verbose)s
Returns
-------
stc : instance of SourceEstimate
Source time courses.
residual : instance of Evoked
The residual a.k.a. data not explained by the sources.
Only returned if return_residual is True.
References
----------
.. footbibliography::
"""
_check_reference(evoked)
forward, gain, gain_info, whitener, source_weighting, mask = _prepare_gain(
forward, evoked.info, noise_cov, pca, depth, loose, rank)
_check_ori(pick_ori, forward)
group_size = 1 if (is_fixed_orient(forward) or not xyz_same_gamma) else 3
# get the data
sel = [evoked.ch_names.index(name) for name in gain_info['ch_names']]
M = evoked.data[sel]
# whiten the data
logger.info('Whitening data matrix.')
M = np.dot(whitener, M)
# run the optimization
X, active_set = _gamma_map_opt(M, gain, alpha, maxit=maxit, tol=tol,
update_mode=update_mode, gammas=gammas,
group_size=group_size, verbose=verbose)
if len(active_set) == 0:
raise Exception("No active dipoles found. alpha is too big.")
M_estimate = gain[:, active_set] @ X
# Reapply weights to have correct unit
X = _reapply_source_weighting(X, source_weighting, active_set)
if return_residual:
residual = _compute_residual(forward, evoked, X, active_set,
gain_info)
if group_size == 1 and not is_fixed_orient(forward):
# make sure each source has 3 components
idx, offset = divmod(active_set, 3)
active_src = np.unique(idx)
if len(X) < 3 * len(active_src):
X_xyz = np.zeros((len(active_src), 3, X.shape[1]), dtype=X.dtype)
idx = np.searchsorted(active_src, idx)
X_xyz[idx, offset, :] = X
X_xyz.shape = (len(active_src) * 3, X.shape[1])
X = X_xyz
active_set = (active_src[:, np.newaxis] * 3 + np.arange(3)).ravel()
source_weighting[source_weighting == 0] = 1 # zeros
gain_active = gain[:, active_set] / source_weighting[active_set]
del source_weighting
tmin = evoked.times[0]
tstep = 1.0 / evoked.info['sfreq']
if return_as_dipoles:
out = _make_dipoles_sparse(X, active_set, forward, tmin, tstep, M,
gain_active, active_is_idx=True)
else:
out = _make_sparse_stc(X, active_set, forward, tmin, tstep,
active_is_idx=True, pick_ori=pick_ori,
verbose=verbose)
_log_exp_var(M, M_estimate, prefix='')
logger.info('[done]')
if return_residual:
out = out, residual
return out
|
from marshmallow import fields
from lemur.common.schema import LemurInputSchema, LemurOutputSchema
from lemur.schemas import (
AssociatedRoleSchema,
AssociatedCertificateSchema,
AssociatedAuthoritySchema,
)
class UserInputSchema(LemurInputSchema):
id = fields.Integer()
username = fields.String(required=True)
email = fields.Email(required=True)
password = fields.String() # TODO add complexity requirements
active = fields.Boolean()
roles = fields.Nested(AssociatedRoleSchema, many=True, missing=[])
certificates = fields.Nested(AssociatedCertificateSchema, many=True, missing=[])
authorities = fields.Nested(AssociatedAuthoritySchema, many=True, missing=[])
class UserOutputSchema(LemurOutputSchema):
id = fields.Integer()
username = fields.String()
email = fields.Email()
active = fields.Boolean()
roles = fields.Nested(AssociatedRoleSchema, many=True)
profile_picture = fields.String()
user_input_schema = UserInputSchema()
user_output_schema = UserOutputSchema()
users_output_schema = UserOutputSchema(many=True)
class UserNestedOutputSchema(LemurOutputSchema):
__envelope__ = False
id = fields.Integer()
username = fields.String()
email = fields.Email()
active = fields.Boolean()
|
from __future__ import absolute_import
from __future__ import unicode_literals
import io
import operator
import threading
import time
import requests
import six
import tenacity
from ._impl import PIL, piexif, json
class InstaDownloader(threading.Thread):
"""The background InstaLooter worker class.
"""
_tenacity_options = {
"stop": tenacity.stop_after_attempt(5),
"wait": tenacity.wait_exponential(1, 10),
}
def __init__(self,
queue,
destination,
namegen,
add_metadata=False,
dump_json=False,
dump_only=False,
pbar=None,
session=None):
super(InstaDownloader, self).__init__()
self.queue = queue
self.destination = destination
self.namegen = namegen
self.session = session or requests.Session()
self.pbar = pbar
self.dump_only = dump_only
self.dump_json = dump_json or dump_only
self.add_metadata = add_metadata
self._killed = False
self._downloading = None
retry = tenacity.retry(**self._tenacity_options)
self._DOWNLOAD_METHODS = {
"GraphImage": retry(self._download_image),
"GraphVideo": retry(self._download_video),
"GraphSidecar": self._download_sidecar,
}
def _download_image(self, media):
url = media['display_url']
filename = self.namegen.file(media)
if self.destination.exists(filename):
return
# FIXME: find a way to remove failed temporary downloads
with self.destination.open(filename, "wb") as f:
with self.session.get(url) as res:
f.write(res.content)
self._set_time(media, filename)
def _download_video(self, media):
url = media['video_url']
filename = self.namegen.file(media)
if self.destination.exists(filename):
return
# FIXME: find a way to remove failed temporary downloads
with self.destination.open(filename, "wb") as f:
with self.session.get(url) as res:
for chunk in res.iter_content(io.DEFAULT_BUFFER_SIZE):
f.write(chunk)
self._set_time(media, filename)
def _download_sidecar(self, media):
edges = media.pop('edge_sidecar_to_children')['edges']
for edge in six.moves.map(operator.itemgetter('node'), edges):
for key, value in six.iteritems(media):
edge.setdefault(key, value)
self._DOWNLOAD_METHODS[edge['__typename']](edge)
def _set_time(self, media, filename):
details = {}
details["modified"] = details["accessed"] = details["created"] = \
media.get('taken_at_timestamp') or media['date']
self.destination.setinfo(filename, {"details": details})
def _dump(self, media):
basename = self.namegen.base(media)
filename = "{}.json".format(basename)
mode = "w" if six.PY3 else "wb"
with self.destination.open(filename, mode) as dest:
json.dump(media, dest, indent=4, sort_keys=True)
self._set_time(media, filename)
def run(self):
while not self._killed:
try:
media = self.queue.get_nowait()
# Received a poison pill: break the loop
if media is None:
self._killed = True
else:
# Download media
if not self.dump_only:
self._DOWNLOAD_METHODS[media["__typename"]](media)
# Dump JSON metadata if needed
if self.dump_json:
self._dump(media)
# Update progress bar if any
if self.pbar is not None and not self._killed:
with self.pbar.get_lock():
self.pbar.update()
self.queue.task_done()
except six.moves.queue.Empty:
time.sleep(1)
def terminate(self):
self._killed = True
|
from __future__ import print_function
import datetime
import logging
import re
import shlex
import subprocess
import sys
import time
from absl import flags
import plot_sysbench_results
# GLOBAL STRINGS
PER_SECOND_GRAPHS = 'per_second_graphs'
MYSQL_SVC_DB_INSTANCE_CORES = 'mysql_svc_db_instance_cores'
MYSQL_SVC_OLTP_TABLES_COUNT = 'mysql_svc_oltp_tables_count'
MYSQL_SVC_OLTP_TABLE_SIZE = 'mysql_svc_oltp_table_size'
SYSBENCH_WARMUP_SECONDS = 'sysbench_warmup_seconds'
SYSBENCH_RUN_SECONDS = 'sysbench_run_seconds'
SYSBENCH_THREAD_COUNT = 'sysbench_thread_count'
SYSBENCH_REPORT_INTERVAL = 'sysbench_report_interval'
THREAD_COUNT_LIST = 'thread_count_list'
GCE_BOOT_DISK_SIZE = 'gce_boot_disk_size'
GCE_BOOT_DISK_TYPE = 'gce_boot_disk_type'
MACHINE_TYPE = 'machine_type'
RUN_URI = 'run_uri'
RUN_STAGE = 'run_stage'
MYSQL_INSTANCE_STORAGE_SIZE = 'mysql_instance_storage_size'
STDOUT = 'STDOUT'
STDERR = 'STDERR'
DATETIME_FORMAT = '{:%m_%d_%Y_%H_%M_}'
URI_REGEX = r'run_uri=([a-z0-9]{8})'
ADDITIONAL_FLAGS = 'additional_flags'
SLEEP_TIME_BETWEEN_RUNS = 20 # seconds
TAIL_LINE_NUM = '20'
PKB_TIMEOUT = 43200 # max wait time for a run in seconds
TIME_MIN = 1
# FLAG STRINGS
PKB = './pkb.py --benchmarks=mysql_service'
STAGE_FLAG = ' --run_stage='
URI_FLAG = ' --run_uri='
THREAD_FLAG = ' --sysbench_thread_count='
RUN_TIME = ' --sysbench_run_seconds='
WARMUP_FLAG = ' --sysbench_warmup_seconds='
BOOT_DISK_SIZE_FLAG = ' --gce_boot_disk_size='
BOOT_DISK_TYPE_FLAG = ' --gce_boot_disk_type='
MACHINE_TYPE_FLAG = ' --machine_type='
MYSQL_SVC_DB_CORES_FLAG = ' --mysql_svc_db_instance_cores='
MYSQL_SVC_DB_TABLES_COUNT_FLAG = ' --mysql_svc_oltp_tables_count='
MYSQL_SVC_OLTP_TABLE_SIZE_FLAG = ' --mysql_svc_oltp_table_size='
MYSQL_INSTANCE_STORAGE_SIZE_FLAG = ' --mysql_instance_storage_size='
PROVISION = 'provision'
PREPARE = 'prepare'
RUN = 'run'
CLEANUP = 'cleanup'
TEARDOWN = 'teardown'
FLAGS = flags.FLAGS
flags.DEFINE_bool(PER_SECOND_GRAPHS, False,
'Indicator for using per second data collection.'
'To enable set True.')
flags.DEFINE_integer(SYSBENCH_RUN_SECONDS, 480,
'The duration, in seconds, of each run phase with varying'
'thread count.')
flags.DEFINE_integer(SYSBENCH_WARMUP_SECONDS, 0,
'The duration, in seconds, of the warmup run in which '
'results are discarded.')
flags.DEFINE_list(THREAD_COUNT_LIST, [1, 2, 4, 8, 16, 32, 64, 128, 256, 512],
'The number of test threads on the client side.')
flags.DEFINE_integer(SYSBENCH_REPORT_INTERVAL, 1,
'The interval, in seconds, we ask sysbench to report '
'results.')
flags.DEFINE_string(RUN_URI, None,
'Run identifier, if provided, only run phase '
'will be completed.')
flags.DEFINE_string(RUN_STAGE, None,
'List of phases to be executed. For example:'
'"--run_uri=provision,prepare". Available phases:'
'prepare, provision, run, cleanup, teardown.')
flags.DEFINE_string(GCE_BOOT_DISK_SIZE, '1000',
'The boot disk size in GB for GCP VMs..')
flags.DEFINE_string(GCE_BOOT_DISK_TYPE, 'pd-ssd',
'The boot disk type for GCP VMs.')
flags.DEFINE_string(MACHINE_TYPE, 'n1-standard-16',
'Machine type for GCE Virtual machines.')
flags.DEFINE_enum(MYSQL_SVC_DB_INSTANCE_CORES, '4', ['1', '4', '8', '16'],
'The number of cores to be provisioned for the DB instance.')
flags.DEFINE_string(MYSQL_SVC_OLTP_TABLES_COUNT, '4',
'The number of tables used in sysbench oltp.lua tests')
flags.DEFINE_string(MYSQL_SVC_OLTP_TABLE_SIZE, '100000',
'The number of rows of each table used in the oltp tests')
flags.DEFINE_string(MYSQL_INSTANCE_STORAGE_SIZE, '300',
'Storage size (in GB) for SQL instance.')
flags.DEFINE_list(ADDITIONAL_FLAGS, None,
'List of additional PKB mysql_service valid flags (strings).'
'For example: "--cloud_storage_bucket=bucket_name".')
# TODO: Implement flag for STDOUT/STDERR file paths.
class UnexpectedFileOutputError(Exception):
pass
class OperationTimeoutError(Exception):
pass
class CallFailureError(Exception):
pass
def driver(argv):
"""Driver initiates sysbench run with different thread counts.
If running this wrapper module with a bash script the print statement
can be used to capture the run_uri. This allows user to provision and prepare
the database and client VM less frequently which is advantageous when the
specifications contain larger values.
Args:
argv: system arguments (command line flags).
"""
try: # Parse command line flags
argv = FLAGS(argv)
except flags.Error as e:
logging.error('%s\nUsage: %s ARGS\n%s', e, sys.argv[0], FLAGS)
sys.exit(1)
run_uri = FLAGS.run_uri
run_stage = FLAGS.run_stage
if not run_uri:
if not run_stage:
logging.info('No run_uri given. Will run full mysql_service_benchmark '
'test.')
run_uri = _provision_prepare_pkb()
logging.info('Provision and prepare completed. Run uri assigned: %s',
run_uri)
if run_stage == 'provision,prepare':
return run_uri
if not run_stage or run_stage == RUN:
_run(run_uri)
if not run_stage or run_stage == 'cleanup,teardown':
_cleanup_teardown_pkb(run_uri)
print(run_uri)
return run_uri
def _provision_prepare_pkb():
"""Run provision and prepare stage of PKB benchmark.
Returns:
run_uri: (string)
"""
pkb_cmd = PKB + STAGE_FLAG + PROVISION + ',' + PREPARE
pkb_cmd += (
BOOT_DISK_SIZE_FLAG + FLAGS.gce_boot_disk_size + BOOT_DISK_TYPE_FLAG +
FLAGS.gce_boot_disk_type + MACHINE_TYPE_FLAG + FLAGS.machine_type +
MYSQL_SVC_DB_CORES_FLAG + FLAGS.mysql_svc_db_instance_cores +
MYSQL_SVC_OLTP_TABLE_SIZE_FLAG + FLAGS.mysql_svc_oltp_table_size +
MYSQL_SVC_DB_TABLES_COUNT_FLAG + FLAGS.mysql_svc_oltp_tables_count +
MYSQL_INSTANCE_STORAGE_SIZE_FLAG + FLAGS.mysql_instance_storage_size)
if FLAGS.additional_flags:
pkb_cmd = _append_additional_flags(pkb_cmd)
# PKB run with prepare,provision, wait
logging.info('Provision and prepare sysbench with the following command:\n%s',
pkb_cmd)
[stdout_filename, stderr_filename] = _generate_filenames(PROVISION, None)
_execute_pkb_cmd(pkb_cmd, stdout_filename, stderr_filename)
return _get_run_uri(stderr_filename)
def _run(run_uri):
"""Run stage of PKB benchmark.
Args:
run_uri: (string).
"""
if FLAGS.per_second_graphs:
logging.info('Will generate per second logs for this run.')
plotter = plot_sysbench_results.Plotter(FLAGS.sysbench_run_seconds,
FLAGS.sysbench_report_interval,
run_uri)
run_iterations = len(FLAGS.thread_count_list)
logging.info(
'Beginning run phase. Will execute runs with %d different thread counts.',
run_iterations)
for t in FLAGS.thread_count_list:
pkb_cmd = (PKB + STAGE_FLAG + RUN + URI_FLAG + run_uri + THREAD_FLAG +
str(t) + RUN_TIME + str(FLAGS.sysbench_run_seconds) + WARMUP_FLAG
+ str(FLAGS.sysbench_warmup_seconds))
if FLAGS.additional_flags:
pkb_cmd = _append_additional_flags(pkb_cmd)
stdout_filename, stderr_filename = _generate_filenames(RUN, t)
logging.info('Executing PKB run with thread count: %s', t)
logging.info('Run sysbench with the following command:\n%s', pkb_cmd)
try:
_execute_pkb_cmd(pkb_cmd, stdout_filename, stderr_filename)
except CallFailureError:
logging.info('Call Failed. Ending run phase.')
break
if FLAGS.per_second_graphs:
logging.info('Adding Sysbench STDERR to per second graph.')
plotter.add_file(stderr_filename)
logging.info('Finished executing PKB run.')
time.sleep(SLEEP_TIME_BETWEEN_RUNS)
if FLAGS.per_second_graphs:
logging.info('Plotting per second graph for this series of runs.')
plotter.plot()
def _cleanup_teardown_pkb(run_uri):
"""Run cleanup stage of PKB benchmark.
Args:
run_uri: (string)
"""
logging.info('Run phase complete. Starting cleanup/teardown.')
pkb_cmd = (PKB + STAGE_FLAG + CLEANUP + ',' + TEARDOWN + URI_FLAG + run_uri)
logging.info('Cleanup, teardown sysbench with the following command:'
'\n%s', pkb_cmd)
[stdout_filename, stderr_filename] = _generate_filenames(CLEANUP, None)
_execute_pkb_cmd(pkb_cmd, stdout_filename, stderr_filename)
logging.info('Finished executing PKB cleanup and teardown.')
def _execute_pkb_cmd(pkb_cmd, stdout_filename, stderr_filename):
"""Given pkb run command, execute.
Args:
pkb_cmd: (str)
stdout_filename: (str) filename string.
stderr_filename: (str) filename_str
Raises:
Exception (CallFailureError): Popen call failed.
"""
stdout_file = open(stdout_filename, 'w+')
stderr_file = open(stderr_filename, 'w+')
pkb_cmd_list = shlex.split(pkb_cmd)
logging.info('pkb command list: %s', str(pkb_cmd_list))
start_time = time.time()
p = subprocess.Popen(pkb_cmd_list, stdout=stdout_file, stderr=stderr_file)
logging.info('Waiting for PKB call to finish.')
# TODO: implement timeout. Currently this call will wait unbounded.
# Will probably have to implement with threading.
p.wait()
elapsed_time = time.time() - start_time
retcode = p.returncode
if retcode != 0:
raise CallFailureError('The call failed (return code is {}). '
'Check stderr for traceback.'.format(retcode))
logging.info('PKB call finished in %i seconds.', int(elapsed_time))
def _get_run_uri(filename):
"""Grab the last lines of file and return the first match with URI_REGEX.
Args:
filename: (string)
Returns:
run_uri: (string) Run identifier from file.
Raises:
Exception: No match with regular expression. Unexpected output to filename.
"""
grab_file_tail_cmd = ['tail', '-n', TAIL_LINE_NUM, filename]
p = subprocess.Popen(
grab_file_tail_cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True)
lines = p.stdout.readlines()
r = re.compile(URI_REGEX)
for line in lines:
matches = r.search(line)
if matches:
return matches.group(matches.lastindex)
raise UnexpectedFileOutputError('No regex match with {}.'.format(filename))
def _append_additional_flags(pkb_cmd):
"""Appends additional flags to the end of pkb_cmd.
Args:
pkb_cmd: (string) Current pkb command.
Returns:
pkb_cmd: (string) PKB command with additional flags.
"""
for flag in FLAGS.additional_flags:
pkb_cmd += ' ' + flag
return pkb_cmd
def _generate_filenames(run_stage, thread_number):
"""Generate filenames for STDOUT and STDERR based on phase and time.
Args:
run_stage: Current stage of sysbench.
thread_number: (int) Number of sysbench threads for run iteration.
Returns:
[stdout_filename, stderr_filename]: list of filename strings.
"""
date_string = DATETIME_FORMAT.format(datetime.datetime.now())
if run_stage == RUN:
stdout_filename = '{}{}_THREAD_RUN_PKB_STDOUT.txt'.format(
date_string, thread_number)
stderr_filename = '{}{}_THREAD_RUN_PKB_STDERR.txt'.format(
date_string, thread_number)
else:
stdout_filename = '{}{}_PKB_STDOUT.txt'.format(date_string, run_stage)
stderr_filename = '{}{}_PKB_STDERR.txt'.format(date_string, run_stage)
logging.info('STDOUT will be copied to: %s', stdout_filename)
logging.info('STDERR will be copied to: %s', stderr_filename)
return [stdout_filename, stderr_filename]
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
driver(sys.argv)
|
from pymfy.api.devices.blind import Blind
from pymfy.api.devices.category import Category
from homeassistant.components.cover import (
ATTR_POSITION,
ATTR_TILT_POSITION,
DEVICE_CLASS_BLIND,
DEVICE_CLASS_SHUTTER,
CoverEntity,
)
from homeassistant.const import STATE_CLOSED, STATE_OPEN
from homeassistant.helpers.restore_state import RestoreEntity
from . import SomfyEntity
from .const import API, CONF_OPTIMISTIC, COORDINATOR, DOMAIN
BLIND_DEVICE_CATEGORIES = {Category.INTERIOR_BLIND.value, Category.EXTERIOR_BLIND.value}
SHUTTER_DEVICE_CATEGORIES = {Category.EXTERIOR_BLIND.value}
SUPPORTED_CATEGORIES = {
Category.ROLLER_SHUTTER.value,
Category.INTERIOR_BLIND.value,
Category.EXTERIOR_BLIND.value,
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Somfy cover platform."""
def get_covers():
"""Retrieve covers."""
domain_data = hass.data[DOMAIN]
coordinator = domain_data[COORDINATOR]
api = domain_data[API]
return [
SomfyCover(coordinator, device_id, api, domain_data[CONF_OPTIMISTIC])
for device_id, device in coordinator.data.items()
if SUPPORTED_CATEGORIES & set(device.categories)
]
async_add_entities(await hass.async_add_executor_job(get_covers))
class SomfyCover(SomfyEntity, RestoreEntity, CoverEntity):
"""Representation of a Somfy cover device."""
def __init__(self, coordinator, device_id, api, optimistic):
"""Initialize the Somfy device."""
super().__init__(coordinator, device_id, api)
self.categories = set(self.device.categories)
self.optimistic = optimistic
self._closed = None
self._is_opening = None
self._is_closing = None
self.cover = None
self._create_device()
def _create_device(self):
"""Update the device with the latest data."""
self.cover = Blind(self.device, self.api)
async def async_close_cover(self, **kwargs):
"""Close the cover."""
self._is_closing = True
self.async_write_ha_state()
try:
# Blocks until the close command is sent
await self.hass.async_add_executor_job(self.cover.close)
self._closed = True
finally:
self._is_closing = None
self.async_write_ha_state()
async def async_open_cover(self, **kwargs):
"""Open the cover."""
self._is_opening = True
self.async_write_ha_state()
try:
# Blocks until the open command is sent
await self.hass.async_add_executor_job(self.cover.open)
self._closed = False
finally:
self._is_opening = None
self.async_write_ha_state()
def stop_cover(self, **kwargs):
"""Stop the cover."""
self.cover.stop()
def set_cover_position(self, **kwargs):
"""Move the cover shutter to a specific position."""
self.cover.set_position(100 - kwargs[ATTR_POSITION])
@property
def device_class(self):
"""Return the device class."""
if self.categories & BLIND_DEVICE_CATEGORIES:
return DEVICE_CLASS_BLIND
if self.categories & SHUTTER_DEVICE_CATEGORIES:
return DEVICE_CLASS_SHUTTER
return None
@property
def current_cover_position(self):
"""Return the current position of cover shutter."""
position = None
if self.has_capability("position"):
position = 100 - self.cover.get_position()
return position
@property
def is_opening(self):
"""Return if the cover is opening."""
if not self.optimistic:
return None
return self._is_opening
@property
def is_closing(self):
"""Return if the cover is closing."""
if not self.optimistic:
return None
return self._is_closing
@property
def is_closed(self):
"""Return if the cover is closed."""
is_closed = None
if self.has_capability("position"):
is_closed = self.cover.is_closed()
elif self.optimistic:
is_closed = self._closed
return is_closed
@property
def current_cover_tilt_position(self):
"""Return current position of cover tilt.
None is unknown, 0 is closed, 100 is fully open.
"""
orientation = None
if self.has_capability("rotation"):
orientation = 100 - self.cover.orientation
return orientation
def set_cover_tilt_position(self, **kwargs):
"""Move the cover tilt to a specific position."""
self.cover.orientation = 100 - kwargs[ATTR_TILT_POSITION]
def open_cover_tilt(self, **kwargs):
"""Open the cover tilt."""
self.cover.orientation = 0
def close_cover_tilt(self, **kwargs):
"""Close the cover tilt."""
self.cover.orientation = 100
def stop_cover_tilt(self, **kwargs):
"""Stop the cover."""
self.cover.stop()
async def async_added_to_hass(self):
"""Complete the initialization."""
await super().async_added_to_hass()
if not self.optimistic:
return
# Restore the last state if we use optimistic
last_state = await self.async_get_last_state()
if last_state is not None and last_state.state in (
STATE_OPEN,
STATE_CLOSED,
):
self._closed = last_state.state == STATE_CLOSED
|
import argparse
import pathlib
import sys
from time import monotonic
from . import (
codeowners,
config_flow,
coverage,
dependencies,
json,
manifest,
mqtt,
requirements,
services,
ssdp,
translations,
zeroconf,
)
from .model import Config, Integration
INTEGRATION_PLUGINS = [
json,
codeowners,
config_flow,
dependencies,
manifest,
mqtt,
services,
ssdp,
translations,
zeroconf,
]
HASS_PLUGINS = [
coverage,
]
def valid_integration_path(integration_path):
"""Test if it's a valid integration."""
path = pathlib.Path(integration_path)
if not path.is_dir():
raise argparse.ArgumentTypeError(f"{integration_path} is not a directory.")
return path
def get_config() -> Config:
"""Return config."""
parser = argparse.ArgumentParser(description="Hassfest")
parser.add_argument(
"--action", type=str, choices=["validate", "generate"], default=None
)
parser.add_argument(
"--integration-path",
action="append",
type=valid_integration_path,
help="Validate a single integration",
)
parser.add_argument(
"--requirements",
action="store_true",
help="Validate requirements",
)
parsed = parser.parse_args()
if parsed.action is None:
parsed.action = "validate" if parsed.integration_path else "generate"
if parsed.action == "generate" and parsed.integration_path:
raise RuntimeError(
"Generate is not allowed when limiting to specific integrations"
)
if (
not parsed.integration_path
and not pathlib.Path("requirements_all.txt").is_file()
):
raise RuntimeError("Run from Home Assistant root")
return Config(
root=pathlib.Path(".").absolute(),
specific_integrations=parsed.integration_path,
action=parsed.action,
requirements=parsed.requirements,
)
def main():
"""Validate manifests."""
try:
config = get_config()
except RuntimeError as err:
print(err)
return 1
plugins = [*INTEGRATION_PLUGINS]
if config.requirements:
plugins.append(requirements)
if config.specific_integrations:
integrations = {}
for int_path in config.specific_integrations:
integration = Integration(int_path)
integration.load_manifest()
integrations[integration.domain] = integration
else:
integrations = Integration.load_dir(pathlib.Path("homeassistant/components"))
plugins += HASS_PLUGINS
for plugin in plugins:
try:
start = monotonic()
print(f"Validating {plugin.__name__.split('.')[-1]}...", end="", flush=True)
if plugin is requirements and not config.specific_integrations:
print()
plugin.validate(integrations, config)
print(" done in {:.2f}s".format(monotonic() - start))
except RuntimeError as err:
print()
print()
print("Error!")
print(err)
return 1
# When we generate, all errors that are fixable will be ignored,
# as generating them will be fixed.
if config.action == "generate":
general_errors = [err for err in config.errors if not err.fixable]
invalid_itg = [
itg
for itg in integrations.values()
if any(not error.fixable for error in itg.errors)
]
else:
# action == validate
general_errors = config.errors
invalid_itg = [itg for itg in integrations.values() if itg.errors]
warnings_itg = [itg for itg in integrations.values() if itg.warnings]
print()
print("Integrations:", len(integrations))
print("Invalid integrations:", len(invalid_itg))
print()
if not invalid_itg and not general_errors:
print_integrations_status(config, warnings_itg, show_fixable_errors=False)
if config.action == "generate":
for plugin in plugins:
if hasattr(plugin, "generate"):
plugin.generate(integrations, config)
return 0
if config.action == "generate":
print("Found errors. Generating files canceled.")
print()
if general_errors:
print("General errors:")
for error in general_errors:
print("*", error)
print()
invalid_itg.extend(itg for itg in warnings_itg if itg not in invalid_itg)
print_integrations_status(config, invalid_itg, show_fixable_errors=False)
return 1
def print_integrations_status(config, integrations, *, show_fixable_errors=True):
"""Print integration status."""
for integration in sorted(integrations, key=lambda itg: itg.domain):
extra = f" - {integration.path}" if config.specific_integrations else ""
print(f"Integration {integration.domain}{extra}:")
for error in integration.errors:
if show_fixable_errors or not error.fixable:
print("*", error)
for warning in integration.warnings:
print("*", "[WARNING]", warning)
print()
if __name__ == "__main__":
sys.exit(main())
|
import diamond.collector
import re
import os
import multiprocessing
from diamond.collector import str_to_bool
class LoadAverageCollector(diamond.collector.Collector):
PROC_LOADAVG = '/proc/loadavg'
PROC_LOADAVG_RE = re.compile(r'([\d.]+) ([\d.]+) ([\d.]+) (\d+)/(\d+)')
def get_default_config_help(self):
config_help = super(LoadAverageCollector,
self).get_default_config_help()
config_help.update({
'simple': 'Only collect the 1 minute load average'
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(LoadAverageCollector, self).get_default_config()
config.update({
'path': 'loadavg',
'simple': 'False'
})
return config
def collect(self):
load01, load05, load15 = os.getloadavg()
cpu_count = multiprocessing.cpu_count()
if not str_to_bool(self.config['simple']):
self.publish_gauge('01', load01, 2)
self.publish_gauge('05', load05, 2)
self.publish_gauge('15', load15, 2)
self.publish_gauge('01_normalized', load01 / cpu_count, 2)
self.publish_gauge('05_normalized', load05 / cpu_count, 2)
self.publish_gauge('15_normalized', load15 / cpu_count, 2)
else:
self.publish_gauge('load', load01, 2)
self.publish_gauge('load_normalized', load01 / cpu_count, 2)
# Legacy: add process/thread counters provided by
# /proc/loadavg (if available).
if os.access(self.PROC_LOADAVG, os.R_OK):
file = open(self.PROC_LOADAVG)
for line in file:
match = self.PROC_LOADAVG_RE.match(line)
if match:
self.publish_gauge('processes_running',
int(match.group(4)))
self.publish_gauge('processes_total', int(match.group(5)))
file.close()
|
import logging
import voluptuous as vol
from homeassistant.const import (
ATTR_LATITUDE,
ATTR_LONGITUDE,
CONF_DEVICES,
CONF_UNIT_OF_MEASUREMENT,
CONF_ZONE,
LENGTH_FEET,
LENGTH_KILOMETERS,
LENGTH_METERS,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import track_state_change
from homeassistant.util.distance import convert
from homeassistant.util.location import distance
# mypy: allow-untyped-defs, no-check-untyped-defs
_LOGGER = logging.getLogger(__name__)
ATTR_DIR_OF_TRAVEL = "dir_of_travel"
ATTR_DIST_FROM = "dist_to_zone"
ATTR_NEAREST = "nearest"
CONF_IGNORED_ZONES = "ignored_zones"
CONF_TOLERANCE = "tolerance"
DEFAULT_DIR_OF_TRAVEL = "not set"
DEFAULT_DIST_TO_ZONE = "not set"
DEFAULT_NEAREST = "not set"
DEFAULT_PROXIMITY_ZONE = "home"
DEFAULT_TOLERANCE = 1
DOMAIN = "proximity"
UNITS = [LENGTH_KILOMETERS, LENGTH_METERS, "mi", LENGTH_FEET]
ZONE_SCHEMA = vol.Schema(
{
vol.Optional(CONF_ZONE, default=DEFAULT_PROXIMITY_ZONE): cv.string,
vol.Optional(CONF_DEVICES, default=[]): vol.All(cv.ensure_list, [cv.entity_id]),
vol.Optional(CONF_IGNORED_ZONES, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_TOLERANCE, default=DEFAULT_TOLERANCE): cv.positive_int,
vol.Optional(CONF_UNIT_OF_MEASUREMENT): vol.All(cv.string, vol.In(UNITS)),
}
)
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: cv.schema_with_slug_keys(ZONE_SCHEMA)}, extra=vol.ALLOW_EXTRA
)
def setup_proximity_component(hass, name, config):
"""Set up the individual proximity component."""
ignored_zones = config.get(CONF_IGNORED_ZONES)
proximity_devices = config.get(CONF_DEVICES)
tolerance = config.get(CONF_TOLERANCE)
proximity_zone = name
unit_of_measurement = config.get(
CONF_UNIT_OF_MEASUREMENT, hass.config.units.length_unit
)
zone_id = f"zone.{config.get(CONF_ZONE)}"
proximity = Proximity(
hass,
proximity_zone,
DEFAULT_DIST_TO_ZONE,
DEFAULT_DIR_OF_TRAVEL,
DEFAULT_NEAREST,
ignored_zones,
proximity_devices,
tolerance,
zone_id,
unit_of_measurement,
)
proximity.entity_id = f"{DOMAIN}.{proximity_zone}"
proximity.schedule_update_ha_state()
track_state_change(hass, proximity_devices, proximity.check_proximity_state_change)
return True
def setup(hass, config):
"""Get the zones and offsets from configuration.yaml."""
for zone, proximity_config in config[DOMAIN].items():
setup_proximity_component(hass, zone, proximity_config)
return True
class Proximity(Entity):
"""Representation of a Proximity."""
def __init__(
self,
hass,
zone_friendly_name,
dist_to,
dir_of_travel,
nearest,
ignored_zones,
proximity_devices,
tolerance,
proximity_zone,
unit_of_measurement,
):
"""Initialize the proximity."""
self.hass = hass
self.friendly_name = zone_friendly_name
self.dist_to = dist_to
self.dir_of_travel = dir_of_travel
self.nearest = nearest
self.ignored_zones = ignored_zones
self.proximity_devices = proximity_devices
self.tolerance = tolerance
self.proximity_zone = proximity_zone
self._unit_of_measurement = unit_of_measurement
@property
def name(self):
"""Return the name of the entity."""
return self.friendly_name
@property
def state(self):
"""Return the state."""
return self.dist_to
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity."""
return self._unit_of_measurement
@property
def state_attributes(self):
"""Return the state attributes."""
return {ATTR_DIR_OF_TRAVEL: self.dir_of_travel, ATTR_NEAREST: self.nearest}
def check_proximity_state_change(self, entity, old_state, new_state):
"""Perform the proximity checking."""
entity_name = new_state.name
devices_to_calculate = False
devices_in_zone = ""
zone_state = self.hass.states.get(self.proximity_zone)
proximity_latitude = zone_state.attributes.get(ATTR_LATITUDE)
proximity_longitude = zone_state.attributes.get(ATTR_LONGITUDE)
# Check for devices in the monitored zone.
for device in self.proximity_devices:
device_state = self.hass.states.get(device)
if device_state is None:
devices_to_calculate = True
continue
if device_state.state not in self.ignored_zones:
devices_to_calculate = True
# Check the location of all devices.
if (device_state.state).lower() == (self.friendly_name).lower():
device_friendly = device_state.name
if devices_in_zone != "":
devices_in_zone = f"{devices_in_zone}, "
devices_in_zone = devices_in_zone + device_friendly
# No-one to track so reset the entity.
if not devices_to_calculate:
self.dist_to = "not set"
self.dir_of_travel = "not set"
self.nearest = "not set"
self.schedule_update_ha_state()
return
# At least one device is in the monitored zone so update the entity.
if devices_in_zone != "":
self.dist_to = 0
self.dir_of_travel = "arrived"
self.nearest = devices_in_zone
self.schedule_update_ha_state()
return
# We can't check proximity because latitude and longitude don't exist.
if "latitude" not in new_state.attributes:
return
# Collect distances to the zone for all devices.
distances_to_zone = {}
for device in self.proximity_devices:
# Ignore devices in an ignored zone.
device_state = self.hass.states.get(device)
if device_state.state in self.ignored_zones:
continue
# Ignore devices if proximity cannot be calculated.
if "latitude" not in device_state.attributes:
continue
# Calculate the distance to the proximity zone.
dist_to_zone = distance(
proximity_latitude,
proximity_longitude,
device_state.attributes[ATTR_LATITUDE],
device_state.attributes[ATTR_LONGITUDE],
)
# Add the device and distance to a dictionary.
distances_to_zone[device] = round(
convert(dist_to_zone, LENGTH_METERS, self.unit_of_measurement), 1
)
# Loop through each of the distances collected and work out the
# closest.
closest_device: str = None
dist_to_zone: float = None
for device in distances_to_zone:
if not dist_to_zone or distances_to_zone[device] < dist_to_zone:
closest_device = device
dist_to_zone = distances_to_zone[device]
# If the closest device is one of the other devices.
if closest_device != entity:
self.dist_to = round(distances_to_zone[closest_device])
self.dir_of_travel = "unknown"
device_state = self.hass.states.get(closest_device)
self.nearest = device_state.name
self.schedule_update_ha_state()
return
# Stop if we cannot calculate the direction of travel (i.e. we don't
# have a previous state and a current LAT and LONG).
if old_state is None or "latitude" not in old_state.attributes:
self.dist_to = round(distances_to_zone[entity])
self.dir_of_travel = "unknown"
self.nearest = entity_name
self.schedule_update_ha_state()
return
# Reset the variables
distance_travelled = 0
# Calculate the distance travelled.
old_distance = distance(
proximity_latitude,
proximity_longitude,
old_state.attributes[ATTR_LATITUDE],
old_state.attributes[ATTR_LONGITUDE],
)
new_distance = distance(
proximity_latitude,
proximity_longitude,
new_state.attributes[ATTR_LATITUDE],
new_state.attributes[ATTR_LONGITUDE],
)
distance_travelled = round(new_distance - old_distance, 1)
# Check for tolerance
if distance_travelled < self.tolerance * -1:
direction_of_travel = "towards"
elif distance_travelled > self.tolerance:
direction_of_travel = "away_from"
else:
direction_of_travel = "stationary"
# Update the proximity entity
self.dist_to = round(dist_to_zone)
self.dir_of_travel = direction_of_travel
self.nearest = entity_name
self.schedule_update_ha_state()
_LOGGER.debug(
"proximity.%s update entity: distance=%s: direction=%s: device=%s",
self.friendly_name,
round(dist_to_zone),
direction_of_travel,
entity_name,
)
_LOGGER.info("%s: proximity calculation complete", entity_name)
|
from plumbum.commands.base import BaseCommand
from plumbum.commands.processes import run_proc, CommandNotFound, ProcessExecutionError
def make_concurrent(self, rhs):
if not isinstance(rhs, BaseCommand):
raise TypeError("rhs must be an instance of BaseCommand")
if isinstance(self, ConcurrentCommand):
if isinstance(rhs, ConcurrentCommand):
self.commands.extend(rhs.commands)
else:
self.commands.append(rhs)
return self
elif isinstance(rhs, ConcurrentCommand):
rhs.commands.insert(0, self)
return rhs
else:
return ConcurrentCommand(self, rhs)
BaseCommand.__and__ = make_concurrent
class ConcurrentPopen(object):
def __init__(self, procs):
self.procs = procs
self.stdin = None
self.stdout = None
self.stderr = None
self.custom_encoding = None
self.returncode = None
@property
def argv(self):
return [getattr(proc, "argv", []) for proc in self.procs]
def poll(self):
if self.returncode is not None:
return self.returncode
rcs = [proc.poll() for proc in self.procs]
if any(rc is None for rc in rcs):
return None
self.returncode = 0
for rc in rcs:
if rc != 0:
self.returncode = rc
break
return self.returncode
def wait(self):
for proc in self.procs:
proc.wait()
return self.poll()
def communicate(self, input=None):
if input:
raise ValueError("Cannot pass input to ConcurrentPopen.communicate")
out_err_tuples = [proc.communicate() for proc in self.procs]
self.wait()
return tuple(zip(*out_err_tuples))
class ConcurrentCommand(BaseCommand):
def __init__(self, *commands):
self.commands = list(commands)
def formulate(self, level=0, args=()):
form = ["("]
for cmd in self.commands:
form.extend(cmd.formulate(level, args))
form.append("&")
return form + [")"]
def popen(self, *args, **kwargs):
return ConcurrentPopen([cmd[args].popen(**kwargs) for cmd in self.commands])
def __getitem__(self, args):
"""Creates a bound-command with the given arguments"""
if not isinstance(args, (tuple, list)):
args = [args, ]
if not args:
return self
else:
return ConcurrentCommand(*(cmd[args] for cmd in self.commands))
class Cluster(object):
def __init__(self, *machines):
self.machines = list(machines)
def __enter__(self):
return self
def __exit__(self, t, v, tb):
self.close()
def close(self):
for mach in self.machines:
mach.close()
del self.machines[:]
def add_machine(self, machine):
self.machines.append(machine)
def __iter__(self):
return iter(self.machines)
def filter(self, pred):
return self.__class__(filter(pred, self))
def which(self, progname):
return [mach.which(progname) for mach in self]
def list_processes(self):
return [mach.list_processes() for mach in self]
def pgrep(self, pattern):
return [mach.pgrep(pattern) for mach in self]
def path(self, *parts):
return [mach.path(*parts) for mach in self]
def __getitem__(self, progname):
if not isinstance(progname, str):
raise TypeError("progname must be a string, not %r" % (type(progname,)))
return ConcurrentCommand(*(mach[progname] for mach in self))
def __contains__(self, cmd):
try:
self[cmd]
except CommandNotFound:
return False
else:
return True
@property
def python(self):
return ConcurrentCommand(*(mach.python for mach in self))
def session(self):
return ClusterSession(*(mach.session() for mach in self))
class ClusterSession(object):
def __init__(self, *sessions):
self.sessions = sessions
def __iter__(self):
return iter(self.sessions)
def __enter__(self):
return self
def __exit__(self, t, v, tb):
self.close()
def __del__(self):
try:
self.close()
except Exception:
pass
def alive(self):
"""Returns ``True`` if the underlying shells are all alive, ``False`` otherwise"""
return all(session.alive for session in self)
def close(self):
"""Closes (terminates) all underlying shell sessions"""
for session in self.sessions:
session.close()
del self.sessions[:]
def popen(self, cmd):
return ConcurrentPopen([session.popen(cmd) for session in self])
def run(self, cmd, retcode=None):
return run_proc(self.popen(cmd), retcode)
if __name__ == "__main__":
from plumbum import local
from plumbum.cmd import ls, date, sleep
c = ls & date & sleep[1]
print(c())
c = ls & date & sleep[1] & sleep["-z"]
try:
c()
except ProcessExecutionError as ex:
print(ex)
else:
assert False
clst = Cluster(local, local, local)
print(clst["ls"]())
# This works fine
print(local.session().run("echo $$"))
# this does not
ret, stdout, stderr = clst.session().run("echo $$")
print(ret)
ret = [int(pid) for pid in stdout]
assert(len(set(ret))==3)
|
from typing import TYPE_CHECKING, Optional
if TYPE_CHECKING:
from .core import Context # noqa: F401 pylint: disable=unused-import
class HomeAssistantError(Exception):
"""General Home Assistant exception occurred."""
class InvalidEntityFormatError(HomeAssistantError):
"""When an invalid formatted entity is encountered."""
class NoEntitySpecifiedError(HomeAssistantError):
"""When no entity is specified."""
class TemplateError(HomeAssistantError):
"""Error during template rendering."""
def __init__(self, exception: Exception) -> None:
"""Init the error."""
super().__init__(f"{exception.__class__.__name__}: {exception}")
class PlatformNotReady(HomeAssistantError):
"""Error to indicate that platform is not ready."""
class ConfigEntryNotReady(HomeAssistantError):
"""Error to indicate that config entry is not ready."""
class InvalidStateError(HomeAssistantError):
"""When an invalid state is encountered."""
class Unauthorized(HomeAssistantError):
"""When an action is unauthorized."""
def __init__(
self,
context: Optional["Context"] = None,
user_id: Optional[str] = None,
entity_id: Optional[str] = None,
config_entry_id: Optional[str] = None,
perm_category: Optional[str] = None,
permission: Optional[str] = None,
) -> None:
"""Unauthorized error."""
super().__init__(self.__class__.__name__)
self.context = context
if user_id is None and context is not None:
user_id = context.user_id
self.user_id = user_id
self.entity_id = entity_id
self.config_entry_id = config_entry_id
# Not all actions have an ID (like adding config entry)
# We then use this fallback to know what category was unauth
self.perm_category = perm_category
self.permission = permission
class UnknownUser(Unauthorized):
"""When call is made with user ID that doesn't exist."""
class ServiceNotFound(HomeAssistantError):
"""Raised when a service is not found."""
def __init__(self, domain: str, service: str) -> None:
"""Initialize error."""
super().__init__(self, f"Service {domain}.{service} not found")
self.domain = domain
self.service = service
def __str__(self) -> str:
"""Return string representation."""
return f"Unable to find service {self.domain}/{self.service}"
|
import binascii
import os
import pickle
from radicale import item as radicale_item
from radicale import pathutils
from radicale.log import logger
class CollectionHistoryMixin:
def _update_history_etag(self, href, item):
"""Updates and retrieves the history etag from the history cache.
The history cache contains a file for each current and deleted item
of the collection. These files contain the etag of the item (empty
string for deleted items) and a history etag, which is a hash over
the previous history etag and the etag separated by "/".
"""
history_folder = os.path.join(self._filesystem_path,
".Radicale.cache", "history")
try:
with open(os.path.join(history_folder, href), "rb") as f:
cache_etag, history_etag = pickle.load(f)
except (FileNotFoundError, pickle.UnpicklingError, ValueError) as e:
if isinstance(e, (pickle.UnpicklingError, ValueError)):
logger.warning(
"Failed to load history cache entry %r in %r: %s",
href, self.path, e, exc_info=True)
cache_etag = ""
# Initialize with random data to prevent collisions with cleaned
# expired items.
history_etag = binascii.hexlify(os.urandom(16)).decode("ascii")
etag = item.etag if item else ""
if etag != cache_etag:
self._storage._makedirs_synced(history_folder)
history_etag = radicale_item.get_etag(
history_etag + "/" + etag).strip("\"")
try:
# Race: Other processes might have created and locked the file.
with self._atomic_write(os.path.join(history_folder, href),
"wb") as f:
pickle.dump([etag, history_etag], f)
except PermissionError:
pass
return history_etag
def _get_deleted_history_hrefs(self):
"""Returns the hrefs of all deleted items that are still in the
history cache."""
history_folder = os.path.join(self._filesystem_path,
".Radicale.cache", "history")
try:
for entry in os.scandir(history_folder):
href = entry.name
if not pathutils.is_safe_filesystem_path_component(href):
continue
if os.path.isfile(os.path.join(self._filesystem_path, href)):
continue
yield href
except FileNotFoundError:
pass
def _clean_history(self):
# Delete all expired history entries of deleted items.
history_folder = os.path.join(self._filesystem_path,
".Radicale.cache", "history")
self._clean_cache(history_folder, self._get_deleted_history_hrefs(),
max_age=self._storage.configuration.get(
"storage", "max_sync_token_age"))
|
import logging
import os
from pyicloud import PyiCloudService
from pyicloud.exceptions import (
PyiCloudException,
PyiCloudFailedLoginException,
PyiCloudNoDevicesException,
PyiCloudServiceNotActivatedException,
)
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from .const import (
CONF_GPS_ACCURACY_THRESHOLD,
CONF_MAX_INTERVAL,
CONF_WITH_FAMILY,
DEFAULT_GPS_ACCURACY_THRESHOLD,
DEFAULT_MAX_INTERVAL,
DEFAULT_WITH_FAMILY,
STORAGE_KEY,
STORAGE_VERSION,
)
from .const import DOMAIN # pylint: disable=unused-import
CONF_TRUSTED_DEVICE = "trusted_device"
CONF_VERIFICATION_CODE = "verification_code"
_LOGGER = logging.getLogger(__name__)
class IcloudFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a iCloud config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
def __init__(self):
"""Initialize iCloud config flow."""
self.api = None
self._username = None
self._password = None
self._with_family = None
self._max_interval = None
self._gps_accuracy_threshold = None
self._trusted_device = None
self._verification_code = None
self._existing_entry = None
self._description_placeholders = None
def _show_setup_form(self, user_input=None, errors=None, step_id="user"):
"""Show the setup form to the user."""
if user_input is None:
user_input = {}
if step_id == "user":
schema = {
vol.Required(
CONF_USERNAME, default=user_input.get(CONF_USERNAME, "")
): str,
vol.Required(
CONF_PASSWORD, default=user_input.get(CONF_PASSWORD, "")
): str,
vol.Optional(
CONF_WITH_FAMILY,
default=user_input.get(CONF_WITH_FAMILY, DEFAULT_WITH_FAMILY),
): bool,
}
else:
schema = {
vol.Required(
CONF_PASSWORD, default=user_input.get(CONF_PASSWORD, "")
): str,
}
return self.async_show_form(
step_id=step_id,
data_schema=vol.Schema(schema),
errors=errors or {},
description_placeholders=self._description_placeholders,
)
async def _validate_and_create_entry(self, user_input, step_id):
"""Check if config is valid and create entry if so."""
self._password = user_input[CONF_PASSWORD]
extra_inputs = user_input
# If an existing entry was found, meaning this is a password update attempt,
# use those to get config values that aren't changing
if self._existing_entry:
extra_inputs = self._existing_entry
self._username = extra_inputs[CONF_USERNAME]
self._with_family = extra_inputs.get(CONF_WITH_FAMILY, DEFAULT_WITH_FAMILY)
self._max_interval = extra_inputs.get(CONF_MAX_INTERVAL, DEFAULT_MAX_INTERVAL)
self._gps_accuracy_threshold = extra_inputs.get(
CONF_GPS_ACCURACY_THRESHOLD, DEFAULT_GPS_ACCURACY_THRESHOLD
)
# Check if already configured
if self.unique_id is None:
await self.async_set_unique_id(self._username)
self._abort_if_unique_id_configured()
try:
self.api = await self.hass.async_add_executor_job(
PyiCloudService,
self._username,
self._password,
self.hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY).path,
True,
None,
self._with_family,
)
except PyiCloudFailedLoginException as error:
_LOGGER.error("Error logging into iCloud service: %s", error)
self.api = None
errors = {CONF_PASSWORD: "invalid_auth"}
return self._show_setup_form(user_input, errors, step_id)
if self.api.requires_2sa:
return await self.async_step_trusted_device()
try:
devices = await self.hass.async_add_executor_job(
getattr, self.api, "devices"
)
if not devices:
raise PyiCloudNoDevicesException()
except (PyiCloudServiceNotActivatedException, PyiCloudNoDevicesException):
_LOGGER.error("No device found in the iCloud account: %s", self._username)
self.api = None
return self.async_abort(reason="no_device")
data = {
CONF_USERNAME: self._username,
CONF_PASSWORD: self._password,
CONF_WITH_FAMILY: self._with_family,
CONF_MAX_INTERVAL: self._max_interval,
CONF_GPS_ACCURACY_THRESHOLD: self._gps_accuracy_threshold,
}
# If this is a password update attempt, update the entry instead of creating one
if step_id == "user":
return self.async_create_entry(title=self._username, data=data)
for entry in self.hass.config_entries.async_entries(DOMAIN):
if entry.unique_id == self.unique_id:
self.hass.config_entries.async_update_entry(entry, data=data)
await self.hass.config_entries.async_reload(entry.entry_id)
return self.async_abort(reason="reauth_successful")
async def async_step_user(self, user_input=None):
"""Handle a flow initiated by the user."""
errors = {}
icloud_dir = self.hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY)
if not os.path.exists(icloud_dir.path):
await self.hass.async_add_executor_job(os.makedirs, icloud_dir.path)
if user_input is None:
return self._show_setup_form(user_input, errors)
return await self._validate_and_create_entry(user_input, "user")
async def async_step_import(self, user_input):
"""Import a config entry."""
return await self.async_step_user(user_input)
async def async_step_reauth(self, user_input=None):
"""Update password for a config entry that can't authenticate."""
# Store existing entry data so it can be used later and set unique ID
# so existing config entry can be updated
if not self._existing_entry:
await self.async_set_unique_id(user_input.pop("unique_id"))
self._existing_entry = user_input.copy()
self._description_placeholders = {"username": user_input[CONF_USERNAME]}
user_input = None
if user_input is None:
return self._show_setup_form(step_id=config_entries.SOURCE_REAUTH)
return await self._validate_and_create_entry(
user_input, config_entries.SOURCE_REAUTH
)
async def async_step_trusted_device(self, user_input=None, errors=None):
"""We need a trusted device."""
if errors is None:
errors = {}
trusted_devices = await self.hass.async_add_executor_job(
getattr, self.api, "trusted_devices"
)
trusted_devices_for_form = {}
for i, device in enumerate(trusted_devices):
trusted_devices_for_form[i] = device.get(
"deviceName", f"SMS to {device.get('phoneNumber')}"
)
if user_input is None:
return await self._show_trusted_device_form(
trusted_devices_for_form, user_input, errors
)
self._trusted_device = trusted_devices[int(user_input[CONF_TRUSTED_DEVICE])]
if not await self.hass.async_add_executor_job(
self.api.send_verification_code, self._trusted_device
):
_LOGGER.error("Failed to send verification code")
self._trusted_device = None
errors[CONF_TRUSTED_DEVICE] = "send_verification_code"
return await self._show_trusted_device_form(
trusted_devices_for_form, user_input, errors
)
return await self.async_step_verification_code()
async def _show_trusted_device_form(
self, trusted_devices, user_input=None, errors=None
):
"""Show the trusted_device form to the user."""
return self.async_show_form(
step_id=CONF_TRUSTED_DEVICE,
data_schema=vol.Schema(
{
vol.Required(CONF_TRUSTED_DEVICE): vol.All(
vol.Coerce(int), vol.In(trusted_devices)
)
}
),
errors=errors or {},
)
async def async_step_verification_code(self, user_input=None):
"""Ask the verification code to the user."""
errors = {}
if user_input is None:
return await self._show_verification_code_form(user_input)
self._verification_code = user_input[CONF_VERIFICATION_CODE]
try:
if not await self.hass.async_add_executor_job(
self.api.validate_verification_code,
self._trusted_device,
self._verification_code,
):
raise PyiCloudException("The code you entered is not valid.")
except PyiCloudException as error:
# Reset to the initial 2FA state to allow the user to retry
_LOGGER.error("Failed to verify verification code: %s", error)
self._trusted_device = None
self._verification_code = None
errors["base"] = "validate_verification_code"
return await self.async_step_trusted_device(None, errors)
return await self.async_step_user(
{
CONF_USERNAME: self._username,
CONF_PASSWORD: self._password,
CONF_WITH_FAMILY: self._with_family,
CONF_MAX_INTERVAL: self._max_interval,
CONF_GPS_ACCURACY_THRESHOLD: self._gps_accuracy_threshold,
}
)
async def _show_verification_code_form(self, user_input=None):
"""Show the verification_code form to the user."""
return self.async_show_form(
step_id=CONF_VERIFICATION_CODE,
data_schema=vol.Schema({vol.Required(CONF_VERIFICATION_CODE): str}),
errors=None,
)
|
import abc
import asyncio
from typing import Any, Dict, List, Optional, cast
import uuid
import voluptuous as vol
from .core import HomeAssistant, callback
from .exceptions import HomeAssistantError
RESULT_TYPE_FORM = "form"
RESULT_TYPE_CREATE_ENTRY = "create_entry"
RESULT_TYPE_ABORT = "abort"
RESULT_TYPE_EXTERNAL_STEP = "external"
RESULT_TYPE_EXTERNAL_STEP_DONE = "external_done"
# Event that is fired when a flow is progressed via external source.
EVENT_DATA_ENTRY_FLOW_PROGRESSED = "data_entry_flow_progressed"
class FlowError(HomeAssistantError):
"""Error while configuring an account."""
class UnknownHandler(FlowError):
"""Unknown handler specified."""
class UnknownFlow(FlowError):
"""Unknown flow specified."""
class UnknownStep(FlowError):
"""Unknown step specified."""
class AbortFlow(FlowError):
"""Exception to indicate a flow needs to be aborted."""
def __init__(self, reason: str, description_placeholders: Optional[Dict] = None):
"""Initialize an abort flow exception."""
super().__init__(f"Flow aborted: {reason}")
self.reason = reason
self.description_placeholders = description_placeholders
class FlowManager(abc.ABC):
"""Manage all the flows that are in progress."""
def __init__(
self,
hass: HomeAssistant,
) -> None:
"""Initialize the flow manager."""
self.hass = hass
self._initializing: Dict[str, List[asyncio.Future]] = {}
self._progress: Dict[str, Any] = {}
async def async_wait_init_flow_finish(self, handler: str) -> None:
"""Wait till all flows in progress are initialized."""
current = self._initializing.get(handler)
if not current:
return
await asyncio.wait(current)
@abc.abstractmethod
async def async_create_flow(
self,
handler_key: Any,
*,
context: Optional[Dict[str, Any]] = None,
data: Optional[Dict[str, Any]] = None,
) -> "FlowHandler":
"""Create a flow for specified handler.
Handler key is the domain of the component that we want to set up.
"""
@abc.abstractmethod
async def async_finish_flow(
self, flow: "FlowHandler", result: Dict[str, Any]
) -> Dict[str, Any]:
"""Finish a config flow and add an entry."""
async def async_post_init(
self, flow: "FlowHandler", result: Dict[str, Any]
) -> None:
"""Entry has finished executing its first step asynchronously."""
@callback
def async_progress(self) -> List[Dict]:
"""Return the flows in progress."""
return [
{
"flow_id": flow.flow_id,
"handler": flow.handler,
"context": flow.context,
"step_id": flow.cur_step["step_id"],
}
for flow in self._progress.values()
if flow.cur_step is not None
]
async def async_init(
self, handler: str, *, context: Optional[Dict] = None, data: Any = None
) -> Any:
"""Start a configuration flow."""
if context is None:
context = {}
init_done: asyncio.Future = asyncio.Future()
self._initializing.setdefault(handler, []).append(init_done)
flow = await self.async_create_flow(handler, context=context, data=data)
if not flow:
self._initializing[handler].remove(init_done)
raise UnknownFlow("Flow was not created")
flow.hass = self.hass
flow.handler = handler
flow.flow_id = uuid.uuid4().hex
flow.context = context
self._progress[flow.flow_id] = flow
try:
result = await self._async_handle_step(
flow, flow.init_step, data, init_done
)
finally:
self._initializing[handler].remove(init_done)
if result["type"] != RESULT_TYPE_ABORT:
await self.async_post_init(flow, result)
return result
async def async_configure(
self, flow_id: str, user_input: Optional[Dict] = None
) -> Any:
"""Continue a configuration flow."""
flow = self._progress.get(flow_id)
if flow is None:
raise UnknownFlow
cur_step = flow.cur_step
if cur_step.get("data_schema") is not None and user_input is not None:
user_input = cur_step["data_schema"](user_input)
result = await self._async_handle_step(flow, cur_step["step_id"], user_input)
if cur_step["type"] == RESULT_TYPE_EXTERNAL_STEP:
if result["type"] not in (
RESULT_TYPE_EXTERNAL_STEP,
RESULT_TYPE_EXTERNAL_STEP_DONE,
):
raise ValueError(
"External step can only transition to "
"external step or external step done."
)
# If the result has changed from last result, fire event to update
# the frontend.
if cur_step["step_id"] != result.get("step_id"):
# Tell frontend to reload the flow state.
self.hass.bus.async_fire(
EVENT_DATA_ENTRY_FLOW_PROGRESSED,
{"handler": flow.handler, "flow_id": flow_id, "refresh": True},
)
return result
@callback
def async_abort(self, flow_id: str) -> None:
"""Abort a flow."""
if self._progress.pop(flow_id, None) is None:
raise UnknownFlow
async def _async_handle_step(
self,
flow: Any,
step_id: str,
user_input: Optional[Dict],
step_done: Optional[asyncio.Future] = None,
) -> Dict:
"""Handle a step of a flow."""
method = f"async_step_{step_id}"
if not hasattr(flow, method):
self._progress.pop(flow.flow_id)
if step_done:
step_done.set_result(None)
raise UnknownStep(
f"Handler {flow.__class__.__name__} doesn't support step {step_id}"
)
try:
result: Dict = await getattr(flow, method)(user_input)
except AbortFlow as err:
result = _create_abort_data(
flow.flow_id, flow.handler, err.reason, err.description_placeholders
)
# Mark the step as done.
# We do this before calling async_finish_flow because config entries will hit a
# circular dependency where async_finish_flow sets up new entry, which needs the
# integration to be set up, which is waiting for init to be done.
if step_done:
step_done.set_result(None)
if result["type"] not in (
RESULT_TYPE_FORM,
RESULT_TYPE_EXTERNAL_STEP,
RESULT_TYPE_CREATE_ENTRY,
RESULT_TYPE_ABORT,
RESULT_TYPE_EXTERNAL_STEP_DONE,
):
raise ValueError(f"Handler returned incorrect type: {result['type']}")
if result["type"] in (
RESULT_TYPE_FORM,
RESULT_TYPE_EXTERNAL_STEP,
RESULT_TYPE_EXTERNAL_STEP_DONE,
):
flow.cur_step = result
return result
# We pass a copy of the result because we're mutating our version
result = await self.async_finish_flow(flow, dict(result))
# _async_finish_flow may change result type, check it again
if result["type"] == RESULT_TYPE_FORM:
flow.cur_step = result
return result
# Abort and Success results both finish the flow
self._progress.pop(flow.flow_id)
return result
class FlowHandler:
"""Handle the configuration flow of a component."""
# Set by flow manager
flow_id: str = None # type: ignore
hass: Optional[HomeAssistant] = None
handler: Optional[str] = None
cur_step: Optional[Dict[str, str]] = None
context: Dict
# Set by _async_create_flow callback
init_step = "init"
# Set by developer
VERSION = 1
@property
def source(self) -> Optional[str]:
"""Source that initialized the flow."""
if not hasattr(self, "context"):
return None
return self.context.get("source", None)
@property
def show_advanced_options(self) -> bool:
"""If we should show advanced options."""
if not hasattr(self, "context"):
return False
return self.context.get("show_advanced_options", False)
@callback
def async_show_form(
self,
*,
step_id: str,
data_schema: vol.Schema = None,
errors: Optional[Dict] = None,
description_placeholders: Optional[Dict] = None,
) -> Dict[str, Any]:
"""Return the definition of a form to gather user input."""
return {
"type": RESULT_TYPE_FORM,
"flow_id": self.flow_id,
"handler": self.handler,
"step_id": step_id,
"data_schema": data_schema,
"errors": errors,
"description_placeholders": description_placeholders,
}
@callback
def async_create_entry(
self,
*,
title: str,
data: Dict,
description: Optional[str] = None,
description_placeholders: Optional[Dict] = None,
) -> Dict[str, Any]:
"""Finish config flow and create a config entry."""
return {
"version": self.VERSION,
"type": RESULT_TYPE_CREATE_ENTRY,
"flow_id": self.flow_id,
"handler": self.handler,
"title": title,
"data": data,
"description": description,
"description_placeholders": description_placeholders,
}
@callback
def async_abort(
self, *, reason: str, description_placeholders: Optional[Dict] = None
) -> Dict[str, Any]:
"""Abort the config flow."""
return _create_abort_data(
self.flow_id, cast(str, self.handler), reason, description_placeholders
)
@callback
def async_external_step(
self, *, step_id: str, url: str, description_placeholders: Optional[Dict] = None
) -> Dict[str, Any]:
"""Return the definition of an external step for the user to take."""
return {
"type": RESULT_TYPE_EXTERNAL_STEP,
"flow_id": self.flow_id,
"handler": self.handler,
"step_id": step_id,
"url": url,
"description_placeholders": description_placeholders,
}
@callback
def async_external_step_done(self, *, next_step_id: str) -> Dict[str, Any]:
"""Return the definition of an external step for the user to take."""
return {
"type": RESULT_TYPE_EXTERNAL_STEP_DONE,
"flow_id": self.flow_id,
"handler": self.handler,
"step_id": next_step_id,
}
@callback
def _create_abort_data(
flow_id: str,
handler: str,
reason: str,
description_placeholders: Optional[Dict] = None,
) -> Dict[str, Any]:
"""Return the definition of an external step for the user to take."""
return {
"type": RESULT_TYPE_ABORT,
"flow_id": flow_id,
"handler": handler,
"reason": reason,
"description_placeholders": description_placeholders,
}
|
class MissingRemote(Exception):
"""
Raise when a remote by name is not found.
"""
pass
class MissingMasterBranch(Exception):
"""
Raise when the "master" branch cannot be located.
"""
pass
class BaseOperation(object):
"""
Base class for all Git-related operations.
"""
def __init__(self, repo, remote_name='origin', master_branch='master'):
self.repo = repo
self.remote_name = remote_name
self.master_branch = master_branch
def _filtered_remotes(self, origin, skip=[]):
"""
Returns a list of remote refs, skipping ones you don't need.
If ``skip`` is empty, it will default to ``['HEAD',
self.master_branch]``.
"""
if not skip:
skip = ['HEAD', self.master_branch]
refs = [i for i in origin.refs if not i.remote_head in skip]
return refs
def _master_ref(self, origin):
"""
Finds the master ref object that matches master branch.
"""
for ref in origin.refs:
if ref.remote_head == self.master_branch:
return ref
raise MissingMasterBranch(
'Could not find ref for {0}'.format(self.master_branch))
@property
def _origin(self):
"""
Gets the remote that references origin by name self.origin_name.
"""
origin = None
for remote in self.repo.remotes:
if remote.name == self.remote_name:
origin = remote
if not origin:
raise MissingRemote('Could not find the remote named {0}'.format(
self.remote_name))
return origin
|
from homeassistant.components.alarm_control_panel import AlarmControlPanelEntity
from homeassistant.components.alarm_control_panel.const import (
SUPPORT_ALARM_ARM_AWAY,
SUPPORT_ALARM_ARM_HOME,
SUPPORT_ALARM_ARM_NIGHT,
)
from homeassistant.const import (
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_DISARMED,
)
from .const import CONNECTION, DOMAIN as AGENT_DOMAIN
ICON = "mdi:security"
CONF_HOME_MODE_NAME = "home"
CONF_AWAY_MODE_NAME = "away"
CONF_NIGHT_MODE_NAME = "night"
CONST_ALARM_CONTROL_PANEL_NAME = "Alarm Panel"
async def async_setup_entry(
hass, config_entry, async_add_entities, discovery_info=None
):
"""Set up the Agent DVR Alarm Control Panels."""
async_add_entities(
[AgentBaseStation(hass.data[AGENT_DOMAIN][config_entry.entry_id][CONNECTION])]
)
class AgentBaseStation(AlarmControlPanelEntity):
"""Representation of an Agent DVR Alarm Control Panel."""
def __init__(self, client):
"""Initialize the alarm control panel."""
self._state = None
self._client = client
self._unique_id = f"{client.unique}_CP"
name = CONST_ALARM_CONTROL_PANEL_NAME
self._name = name = f"{client.name} {name}"
@property
def icon(self):
"""Return icon."""
return ICON
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def supported_features(self) -> int:
"""Return the list of supported features."""
return SUPPORT_ALARM_ARM_HOME | SUPPORT_ALARM_ARM_AWAY | SUPPORT_ALARM_ARM_NIGHT
@property
def device_info(self):
"""Return the device info for adding the entity to the agent object."""
return {
"identifiers": {(AGENT_DOMAIN, self._client.unique)},
"manufacturer": "Agent",
"model": CONST_ALARM_CONTROL_PANEL_NAME,
"sw_version": self._client.version,
}
async def async_update(self):
"""Update the state of the device."""
await self._client.update()
armed = self._client.is_armed
if armed is None:
self._state = None
return
if armed:
prof = (await self._client.get_active_profile()).lower()
self._state = STATE_ALARM_ARMED_AWAY
if prof == CONF_HOME_MODE_NAME:
self._state = STATE_ALARM_ARMED_HOME
elif prof == CONF_NIGHT_MODE_NAME:
self._state = STATE_ALARM_ARMED_NIGHT
else:
self._state = STATE_ALARM_DISARMED
async def async_alarm_disarm(self, code=None):
"""Send disarm command."""
await self._client.disarm()
self._state = STATE_ALARM_DISARMED
async def async_alarm_arm_away(self, code=None):
"""Send arm away command. Uses custom mode."""
await self._client.arm()
await self._client.set_active_profile(CONF_AWAY_MODE_NAME)
self._state = STATE_ALARM_ARMED_AWAY
async def async_alarm_arm_home(self, code=None):
"""Send arm home command. Uses custom mode."""
await self._client.arm()
await self._client.set_active_profile(CONF_HOME_MODE_NAME)
self._state = STATE_ALARM_ARMED_HOME
async def async_alarm_arm_night(self, code=None):
"""Send arm night command. Uses custom mode."""
await self._client.arm()
await self._client.set_active_profile(CONF_NIGHT_MODE_NAME)
self._state = STATE_ALARM_ARMED_NIGHT
@property
def name(self):
"""Return the name of the base station."""
return self._name
@property
def available(self) -> bool:
"""Device available."""
return self._client.is_available
@property
def unique_id(self) -> str:
"""Return a unique ID."""
return self._unique_id
|
import voluptuous as vol
from homeassistant.components import bbb_gpio
from homeassistant.components.binary_sensor import PLATFORM_SCHEMA, BinarySensorEntity
from homeassistant.const import CONF_NAME, DEVICE_DEFAULT_NAME
import homeassistant.helpers.config_validation as cv
CONF_PINS = "pins"
CONF_BOUNCETIME = "bouncetime"
CONF_INVERT_LOGIC = "invert_logic"
CONF_PULL_MODE = "pull_mode"
DEFAULT_BOUNCETIME = 50
DEFAULT_INVERT_LOGIC = False
DEFAULT_PULL_MODE = "UP"
PIN_SCHEMA = vol.Schema(
{
vol.Required(CONF_NAME): cv.string,
vol.Optional(CONF_BOUNCETIME, default=DEFAULT_BOUNCETIME): cv.positive_int,
vol.Optional(CONF_INVERT_LOGIC, default=DEFAULT_INVERT_LOGIC): cv.boolean,
vol.Optional(CONF_PULL_MODE, default=DEFAULT_PULL_MODE): vol.In(["UP", "DOWN"]),
}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_PINS, default={}): vol.Schema({cv.string: PIN_SCHEMA})}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Beaglebone Black GPIO devices."""
pins = config[CONF_PINS]
binary_sensors = []
for pin, params in pins.items():
binary_sensors.append(BBBGPIOBinarySensor(pin, params))
add_entities(binary_sensors)
class BBBGPIOBinarySensor(BinarySensorEntity):
"""Representation of a binary sensor that uses Beaglebone Black GPIO."""
def __init__(self, pin, params):
"""Initialize the Beaglebone Black binary sensor."""
self._pin = pin
self._name = params[CONF_NAME] or DEVICE_DEFAULT_NAME
self._bouncetime = params[CONF_BOUNCETIME]
self._pull_mode = params[CONF_PULL_MODE]
self._invert_logic = params[CONF_INVERT_LOGIC]
bbb_gpio.setup_input(self._pin, self._pull_mode)
self._state = bbb_gpio.read_input(self._pin)
def read_gpio(pin):
"""Read state from GPIO."""
self._state = bbb_gpio.read_input(self._pin)
self.schedule_update_ha_state()
bbb_gpio.edge_detect(self._pin, read_gpio, self._bouncetime)
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def is_on(self):
"""Return the state of the entity."""
return self._state != self._invert_logic
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging
import re
import time
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import sample
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.linux_packages import oldisim_dependencies
from six.moves import map
FLAGS = flags.FLAGS
flags.DEFINE_integer('oldisim_num_leaves', 4, 'number of leaf nodes',
lower_bound=1, upper_bound=64)
flags.DEFINE_list('oldisim_fanout', [],
'a list of fanouts to be tested. '
'a root can connect to a subset of leaf nodes (fanout). '
'the value of fanout has to be smaller than num_leaves.')
flags.DEFINE_enum('oldisim_latency_metric', 'avg',
['avg', '50p', '90p', '95p', '99p', '99.9p'],
'Allowable metrics for end-to-end latency')
flags.DEFINE_float('oldisim_latency_target', '30', 'latency target in ms')
NUM_DRIVERS = 1
NUM_ROOTS = 1
BENCHMARK_NAME = 'oldisim'
BENCHMARK_CONFIG = """
oldisim:
description: >
Run oldisim. Specify the number of leaf
nodes with --oldisim_num_leaves
vm_groups:
default:
vm_spec: *default_single_core
"""
def GetConfig(user_config):
"""Decide number of vms needed to run oldisim."""
config = configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
config['vm_groups']['default']['vm_count'] = (FLAGS.oldisim_num_leaves
+ NUM_DRIVERS + NUM_ROOTS)
return config
def InstallAndBuild(vm):
"""Install and build oldisim on the target vm.
Args:
vm: A vm instance that runs oldisim.
"""
logging.info('prepare oldisim on %s', vm)
vm.Install('oldisim_dependencies')
def Prepare(benchmark_spec):
"""Install and build oldisim on the target vm.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
vms = benchmark_spec.vms
leaf_vms = [vm for vm_idx, vm in enumerate(vms)
if vm_idx >= (NUM_DRIVERS + NUM_ROOTS)]
if vms:
vm_util.RunThreaded(InstallAndBuild, vms)
# Launch job on the leaf nodes.
leaf_server_bin = oldisim_dependencies.BinaryPath('LeafNode')
for vm in leaf_vms:
leaf_cmd = '%s --threads=%s' % (leaf_server_bin, vm.NumCpusForBenchmark())
vm.RemoteCommand('%s &> /dev/null &' % leaf_cmd)
def SetupRoot(root_vm, leaf_vms):
"""Connect a root node to a list of leaf nodes.
Args:
root_vm: A root vm instance.
leaf_vms: A list of leaf vm instances.
"""
fanout_args = ' '.join(['--leaf=%s' % i.internal_ip
for i in leaf_vms])
root_server_bin = oldisim_dependencies.BinaryPath('ParentNode')
root_cmd = '%s --threads=%s %s' % (root_server_bin,
root_vm.NumCpusForBenchmark(), fanout_args)
logging.info('Root cmdline: %s', root_cmd)
root_vm.RemoteCommand('%s &> /dev/null &' % root_cmd)
def ParseOutput(oldisim_output):
"""Parses the output from oldisim.
Args:
oldisim_output: A string containing the text of oldisim output.
Returns:
A tuple of (peak_qps, peak_lat, target_qps, target_lat).
"""
re_peak = re.compile(r'peak qps = (?P<qps>\S+), latency = (?P<lat>\S+)')
re_target = re.compile(r'measured_qps = (?P<qps>\S+), latency = (?P<lat>\S+)')
for line in oldisim_output.splitlines():
match = re.search(re_peak, line)
if match:
peak_qps = float(match.group('qps'))
peak_lat = float(match.group('lat'))
target_qps = float(peak_qps)
target_lat = float(peak_lat)
continue
match = re.search(re_target, line)
if match:
target_qps = float(match.group('qps'))
target_lat = float(match.group('lat'))
return peak_qps, peak_lat, target_qps, target_lat
def RunLoadTest(benchmark_spec, fanout):
"""Run Loadtest for a given topology.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
fanout: Request is first processed by a root node, which then
fans out to a subset of leaf nodes.
Returns:
A tuple of (peak_qps, peak_lat, target_qps, target_lat).
"""
assert fanout <= FLAGS.oldisim_num_leaves, (
'The number of leaf nodes a root node connected to is defined by the '
'flag fanout. Its current value %s is bigger than the total number of '
'leaves %s.' % (fanout, FLAGS.oldisim_num_leaves))
vms = benchmark_spec.vms
driver_vms = []
root_vms = []
leaf_vms = []
for vm_index, vm in enumerate(vms):
if vm_index < NUM_DRIVERS:
driver_vms.append(vm)
elif vm_index < (NUM_DRIVERS + NUM_ROOTS):
root_vms.append(vm)
else:
leaf_vms.append(vm)
leaf_vms = leaf_vms[:fanout]
for root_vm in root_vms:
SetupRoot(root_vm, leaf_vms)
driver_vm = driver_vms[0]
driver_binary = oldisim_dependencies.BinaryPath('DriverNode')
launch_script = oldisim_dependencies.Path('workloads/search/search_qps.sh')
driver_args = ' '.join(['--server=%s' % i.internal_ip
for i in root_vms])
# Make sure server is up.
time.sleep(5)
driver_cmd = '%s -s %s:%s -t 30 -- %s %s --threads=%s --depth=16' % (
launch_script, FLAGS.oldisim_latency_metric, FLAGS.oldisim_latency_target,
driver_binary, driver_args, driver_vm.NumCpusForBenchmark())
logging.info('Driver cmdline: %s', driver_cmd)
stdout, _ = driver_vm.RemoteCommand(driver_cmd, should_log=True)
return ParseOutput(stdout)
def Run(benchmark_spec):
"""Run oldisim on the target vm.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample objects.
"""
results = []
qps_dict = dict()
fanout_list = set([1, FLAGS.oldisim_num_leaves])
for fanout in map(int, FLAGS.oldisim_fanout):
if fanout > 1 and fanout < FLAGS.oldisim_num_leaves:
fanout_list.add(fanout)
for fanout in sorted(fanout_list):
qps = RunLoadTest(benchmark_spec, fanout)[2]
qps_dict[fanout] = qps
if fanout == 1:
base_qps = qps
name = 'Scaling efficiency of %s leaves' % fanout
scaling_efficiency = round(min(qps_dict[fanout] / base_qps, 1), 2)
metadata = {}
results.append(sample.Sample(name, scaling_efficiency, '', metadata))
return results
def Cleanup(benchmark_spec): # pylint: disable=unused-argument
"""Cleanup oldisim on the target vm.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
vms = benchmark_spec.vms
for vm_index, vm in enumerate(vms):
if vm_index >= NUM_DRIVERS and vm_index < (NUM_DRIVERS + NUM_ROOTS):
vm.RemoteCommand('sudo pkill ParentNode')
elif vm_index >= (NUM_DRIVERS + NUM_ROOTS):
vm.RemoteCommand('sudo pkill LeafNode')
|
import asyncio
import threading
from . import logger
def this_is_js():
return False
class Loop:
""" The singleton Flexx event loop at ``flexx.event.loop``. This holds
the queue of pending calls, actions, and reactions. These are queued
separately to realize a consistent one-way data-flow. Further, this
object keeps track of (per thread) active components (i.e. the components
whose context manager is currently active).
Users typically do not need to be aware of the loop object, as it is
used internally by Flexx, though it can be useful during debugging.
This event system integrates with Python's builtin asyncio system,
configurable via ``Loop.integrate()``. This system can run in a separate
thread, but there can be only one active flexx event loop per process.
This object can also be used as a context manager; an event loop
iteration takes place when the context exits.
"""
def __init__(self):
self._lock = threading.RLock()
self._thread_id = threading.get_ident()
# self._call_soon_func = lambda x: None
# Keep track of a stack of "active" components for use within Component
# context manager. We have one list for each thread. Note that we should
# limit its use to context managers, and execution should never be
# handed back to the event loop while inside a context.
self._local = threading.local()
self.reset()
self.integrate()
def reset(self):
""" Reset the loop, purging all pending calls, actions and reactions.
This is mainly intended for test-related code.
"""
self._in_iter = False
self._scheduled_call_to_iter = False
self._processing_action = None
self._processing_reaction = None
self._prop_access = {}
self._pending_calls = []
self._pending_actions = []
self._pending_reactions = []
self._pending_reaction_ids = {}
def has_pending(self):
""" Get whether there are any pending actions, reactions, or calls.
"""
return (len(self._pending_reactions) > 0 or
len(self._pending_actions) > 0 or
len(self._pending_calls) > 0)
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.iter()
def can_mutate(self, component=None):
""" Whether mutations can be done to the given component,
and whether invoked actions on the component are applied directly.
"""
# When there is an active component, only that one can be mutated
# (so that behavior of an init() is the same regardless whether a
# component is instantiated from an action), it must the current one.
# Otherwise we must be in an action.
active = self.get_active_component()
if active is not None:
return active is component
else:
return self._processing_action is not None
## Active components
def get_active_components(self):
""" Get a tuple that represents the stack of "active" components.
Each thread has its own stack. Should only be used directly inside
a Component context manager.
"""
return tuple(self._local._active_components)
def get_active_component(self):
""" Get the currently "active" component (for this thread), or None.
"""
if len(self._local._active_components) > 0:
return self._local._active_components[-1]
def _activate_component(self, component):
""" Friend method of Component. """
self._local._active_components.append(component)
def _deactivate_component(self, component):
""" Friend method of Component. """
top = self._local._active_components.pop(-1)
if top is not component:
raise RuntimeError('loop._deactivate_component: %s is not %s' %
(component.id, top and top.id))
## Adding to queues
def _schedule_iter(self):
# Make sure to call this with the lock
if self._scheduled_call_to_iter is False:
self._scheduled_call_to_iter = True
self._call_soon_func(self._iter_callback)
def call_soon(self, func, *args):
""" Arrange for a callback to be called as soon as possible.
The callback is called after ``call_soon()`` returns, when control
returns to the event loop.
This operates as a FIFO queue, callbacks are called in the order in
which they are registered. Each callback will be called exactly once.
Any positional arguments after the callback will be passed to
the callback when it is called.
This method is thread-safe: the callback will be called in the thread
corresponding with the loop. It is therefore actually more similar to
asyncio's ``call_soon_threadsafe()``.
Also see ``asyncio.get_event_loop().call_soon()`` and
``asyncio.get_event_loop().call_later()``.
"""
# We keep track of pending calls locally to our event system, which
# gives more control, e.g. during testing.
with self._lock:
self._pending_calls.append((func, args))
self._schedule_iter()
# def call_later(self, delay, func, *args)
# This would be nice, but we'd have to implement more sophisticated
# scheduling. Unless we'd just call asyncio's call_later, but I am
# reluctant to do that while this code is still more or less independent of
# asyncio.
def add_action_invokation(self, action, args):
""" Schedule the handling of an action. Automatically called when
an action object is called.
"""
with self._lock:
self._pending_actions.append((action, args))
self._schedule_iter()
def add_reaction_event(self, reaction, ev):
""" Schulde the handling of a reaction. Automatically called by
components.
"""
# In principal, the mechanics of adding items to the queue is not complex,
# but this code is performance critical, so we apply several tricks
# to make this code run fast.
# _pending_reactions is a list of tuples (reaction, representing event, events)
pending_reactions = self._pending_reactions
mode = reaction.get_mode()
with self._lock:
self._thread_match(True)
if mode == 'normal':
# Normally, we try to consolidate the events by
# appending the event to the existing item in the queue, but
# we don't want to break the order, i.e. we can only skip over
# events that are the same as the current. Each queue item has
# a reference event to make this skipping more efficient.
i = len(pending_reactions)
while i > 0:
i -= 1
ev2 = pending_reactions[i][1] # representing event
if pending_reactions[i][0] is reaction:
# We can simply append the event
pending_reactions[i][2].append(ev)
if not (ev2['source'] is ev['source'] and
ev2['type'] == ev['type']):
# Mark that the events are heterogeneous
pending_reactions[i][1] = {'source': None}
return
# Only continue if all events of the next item match the current
if not (ev2 is None or
(ev2['source'] is ev['source'] and ev2.type == ev.type)):
break
else:
# For greedy and auto reactions, we consolidate by not adding
# to the queue if the corresponding reaction is already
# present. We use _pending_reaction_ids for this.
# We even omit the event objects themselves when we think they
# don't matter (when the number of connection strings is zero).
if reaction._id in self._pending_reaction_ids:
if len(reaction._connections) > 0:
self._pending_reaction_ids[reaction._id][2].append(ev)
return
# Add new item to queue
if len(reaction._connections) > 0:
new_item = [reaction, ev, [ev]]
else:
new_item = [reaction, None, []]
pending_reactions.append(new_item)
self._pending_reaction_ids[reaction._id] = new_item
self._schedule_iter()
def register_prop_access(self, component, prop_name):
""" Register access of a property, to keep track of automatic reactions.
"""
# Notes on auto-reactions. Like any reactions, these are
# connected to events, such that add_reaction_event() will get called
# for the reaction when a property that the reaction uses changes.
# This wil always result in the invokation of the reaction.
#
# During the invokation of a reaction, the register_prop_access()
# method is used to track property access by the reaction. That way,
# connections can be updated as needed.
# Note that we use a dict here, but for the event reconnecting to
# be efficient, the order of connections is imporant, so auto
# reactions have really poor performance on Python < 3.6
# Make sure not to count access from other threads
if self._processing_reaction is not None:
if self._processing_reaction.get_mode() == 'auto':
if self._thread_match(False):
if component._id not in self._prop_access:
d = {}
self._prop_access[component._id] = component, d
else:
d = self._prop_access[component._id][1]
d[prop_name] = True
## Queue processing
def _thread_match(self, fail):
# Check that event loop is not run from multiple threads at once
tid = threading.get_ident()
if self._thread_id != tid: # pragma: no cover
if not fail:
return False
raise RuntimeError('Flexx is supposed to run a single event loop a once.')
return True
def _iter_callback(self):
if threading.get_ident() != self._thread_id:
return # probably an old pending callback
self._scheduled_call_to_iter = False
self.iter()
# We need a way to run our own little event system, because we cannot do
# async in JavaScript. Therefore this is public, and therefore call_soon()
# invokations are queued locally instead of being delegated to asyncio.
def iter(self):
""" Do one event loop iteration; process pending calls,
actions and reactions. These tree types of items are each queued
in separate queues, and are handled in the aforementioned order.
"""
with self._lock:
self._thread_match(True)
# Guard against inproper use
if self._in_iter is True:
raise RuntimeError('Cannot call flexx.event.loop.iter() while it '
'is processing.')
self._in_iter = True
try:
self._process_calls()
self._process_actions()
self._process_reactions()
finally:
self._in_iter = False
def _process_calls(self):
""" Process pending function calls.
"""
# Select pending
with self._lock:
self._thread_match(True)
pending_calls = self._pending_calls
self._pending_calls = []
# Process
for i in range(len(pending_calls)):
func, args = pending_calls[i]
try:
func(*args)
except Exception as err:
logger.exception(err)
def _process_actions(self, n=None):
""" Process all (or just one) pending actions.
"""
# Select pending
with self._lock:
self._thread_match(True)
if n is None:
pending_actions = self._pending_actions
self._pending_actions = []
else:
pending_actions = self._pending_actions[:n]
self._pending_actions = self._pending_actions[n:]
# Process
for i in range(len(pending_actions)):
action, args = pending_actions[i]
self._processing_action = action
try:
action(*args)
except Exception as err:
logger.exception(err)
finally:
self._processing_action = None
def _process_reactions(self):
""" Process all pending reactions.
"""
# Select pending
with self._lock:
self._thread_match(True)
pending_reactions = self._pending_reactions
self._pending_reactions = []
self._pending_reaction_ids = {}
# Process
for ir in range(len(pending_reactions)):
reaction, _, events = pending_reactions[ir]
# Call reaction
if len(events) > 0 or reaction.get_mode() == 'auto':
self._prop_access = {}
self._processing_reaction = reaction
try:
reaction(*events)
except Exception as err:
logger.exception(err)
finally:
self._processing_reaction = None
# Reconnect auto reaction. The _update_implicit_connections()
# method is pretty efficient if connections has not changed.
try:
if reaction.get_mode() == 'auto':
connections = []
for component_names in self._prop_access.values():
component = component_names[0]
for name in component_names[1].keys():
connections.append((component, name))
reaction._update_implicit_connections(connections)
except Exception as err: # pragma: no cover
logger.exception(err)
finally:
self._prop_access = {}
## Integration
def integrate(self, loop=None, reset=True):
""" Integrate the Flexx event system with the given asyncio
event loop (or the default one). Also binds the event system
to the current thread.
From this point, any (pending) calls to the iter callback by the
previous thread will be ignored.
By calling this without calling reset(), it should be possible
to hot-swap the system from one loop (and/or thread) to another
(though this is currently not tested).
"""
if loop is None:
loop = asyncio.get_event_loop()
with self._lock:
self._thread_id = threading.get_ident()
self._local._active_components = []
self._call_soon_func = loop.call_soon_threadsafe
self._call_soon_func(self._iter_callback)
if reset:
self.reset()
# Below is deprecated, but I leavae it here for a bit; we may want to
# revive some of it.
#
# def integrate(self, call_soon_func=None, raise_on_fail=True):
# """ Integrate with an existing event loop system.
#
# Params:
# call_soon_func (func): a function that can be called to
# schedule the calling of a given function. If not given,
# will try to connect to Tornado or Qt event loop, but only
# if either library is already imported.
# raise_on_fail (bool): whether to raise an error when the
# integration could not be performed.
# """
# if call_soon_func is not None:
# if callable(call_soon_func):
# self._call_soon_func = call_soon_func
# self._call_soon_func(self.iter)
# else:
# raise ValueError('call_soon_func must be a function')
# elif 'tornado' in sys.modules: # pragma: no cover
# self.integrate_tornado()
# elif 'PyQt4.QtGui' in sys.modules: # pragma: no cover
# self.integrate_pyqt4()
# elif 'PySide.QtGui' in sys.modules: # pragma: no cover
# self.integrate_pyside()
# elif raise_on_fail: # pragma: no cover
# raise RuntimeError('Could not integrate flexx.event loop')
#
# def integrate_tornado(self): # pragma: no cover
# """ Integrate with tornado.
# """
# import tornado.ioloop
# loop = tornado.ioloop.IOLoop.current()
# self._call_soon_func = loop.add_callback
# self._call_soon_func(self.iter)
# logger.debug('Flexx event loop integrated with Tornado')
#
# def integrate_pyqt4(self): # pragma: no cover
# """ Integrate with PyQt4.
# """
# from PyQt4 import QtCore, QtGui
# self._integrate_qt(QtCore, QtGui)
# logger.debug('Flexx event loop integrated with PyQt4')
#
# def integrate_pyside(self): # pragma: no cover
# """ Integrate with PySide.
# """
# from PySide import QtCore, QtGui
# self._integrate_qt(QtCore, QtGui)
# logger.debug('Flexx event loop integrated with PySide')
#
# def _integrate_qt(self, QtCore, QtGui): # pragma: no cover
# from queue import Queue, Empty
#
# class _CallbackEventHandler(QtCore.QObject):
#
# def __init__(self):
# QtCore.QObject.__init__(self)
# self.queue = Queue()
#
# def customEvent(self, event):
# while True:
# try:
# callback, args = self.queue.get_nowait()
# except Empty:
# break
# try:
# callback(*args)
# except Exception as why:
# logger.warning('blck failed: {}:\n{}'.format(callback, why))
#
# def postEventWithCallback(self, callback, *args):
# self.queue.put((callback, args))
# QtGui.qApp.postEvent(self, QtCore.QEvent(QtCore.QEvent.User))
#
# _callbackEventHandler = _CallbackEventHandler()
# self._call_soon_func = _callbackEventHandler.postEventWithCallback
# self._call_soon_func(self.iter)
loop = Loop()
|
import numpy as np
import os
import warnings
import xml.etree.ElementTree as ET
from chainercv.chainer_experimental.datasets.sliceable import GetterDataset
from chainercv.datasets.voc import voc_utils
from chainercv.utils import read_image
class VOCBboxDataset(GetterDataset):
"""Bounding box dataset for PASCAL `VOC`_.
.. _`VOC`: http://host.robots.ox.ac.uk/pascal/VOC/voc2012/
Args:
data_dir (string): Path to the root of the training data. If this is
:obj:`auto`, this class will automatically download data for you
under :obj:`$CHAINER_DATASET_ROOT/pfnet/chainercv/voc`.
split ({'train', 'val', 'trainval', 'test'}): Select a split of the
dataset. :obj:`test` split is only available for
2007 dataset.
year ({'2007', '2012'}): Use a dataset prepared for a challenge
held in :obj:`year`.
use_difficult (bool): If :obj:`True`, use images that are labeled as
difficult in the original annotation.
return_difficult (bool): If :obj:`True`, this dataset returns
a boolean array
that indicates whether bounding boxes are labeled as difficult
or not. The default value is :obj:`False`.
This dataset returns the following data.
.. csv-table::
:header: name, shape, dtype, format
:obj:`img`, ":math:`(3, H, W)`", :obj:`float32`, \
"RGB, :math:`[0, 255]`"
:obj:`bbox` [#voc_bbox_1]_, ":math:`(R, 4)`", :obj:`float32`, \
":math:`(y_{min}, x_{min}, y_{max}, x_{max})`"
:obj:`label` [#voc_bbox_1]_, ":math:`(R,)`", :obj:`int32`, \
":math:`[0, \#fg\_class - 1]`"
:obj:`difficult` (optional [#voc_bbox_2]_), ":math:`(R,)`", \
:obj:`bool`, --
.. [#voc_bbox_1] If :obj:`use_difficult = True`, \
:obj:`bbox` and :obj:`label` contain difficult instances.
.. [#voc_bbox_2] :obj:`difficult` is available \
if :obj:`return_difficult = True`.
"""
def __init__(self, data_dir='auto', split='train', year='2012',
use_difficult=False, return_difficult=False):
super(VOCBboxDataset, self).__init__()
if data_dir == 'auto' and year in ['2007', '2012']:
data_dir = voc_utils.get_voc(year, split)
if split not in ['train', 'trainval', 'val']:
if not (split == 'test' and year == '2007'):
warnings.warn(
'please pick split from \'train\', \'trainval\', \'val\''
'for 2012 dataset. For 2007 dataset, you can pick \'test\''
' in addition to the above mentioned splits.'
)
id_list_file = os.path.join(
data_dir, 'ImageSets/Main/{0}.txt'.format(split))
self.ids = [id_.strip() for id_ in open(id_list_file)]
self.data_dir = data_dir
self.use_difficult = use_difficult
self.add_getter('img', self._get_image)
self.add_getter(('bbox', 'label', 'difficult'), self._get_annotations)
if not return_difficult:
self.keys = ('img', 'bbox', 'label')
def __len__(self):
return len(self.ids)
def _get_image(self, i):
id_ = self.ids[i]
img_path = os.path.join(self.data_dir, 'JPEGImages', id_ + '.jpg')
img = read_image(img_path, color=True)
return img
def _get_annotations(self, i):
id_ = self.ids[i]
anno = ET.parse(
os.path.join(self.data_dir, 'Annotations', id_ + '.xml'))
bbox = []
label = []
difficult = []
for obj in anno.findall('object'):
# when in not using difficult split, and the object is
# difficult, skipt it.
if not self.use_difficult and int(obj.find('difficult').text) == 1:
continue
difficult.append(int(obj.find('difficult').text))
bndbox_anno = obj.find('bndbox')
# subtract 1 to make pixel indexes 0-based
bbox.append([
int(bndbox_anno.find(tag).text) - 1
for tag in ('ymin', 'xmin', 'ymax', 'xmax')])
name = obj.find('name').text.lower().strip()
label.append(voc_utils.voc_bbox_label_names.index(name))
bbox = np.stack(bbox).astype(np.float32)
label = np.stack(label).astype(np.int32)
# When `use_difficult==False`, all elements in `difficult` are False.
difficult = np.array(difficult, dtype=np.bool)
return bbox, label, difficult
|
import numpy as np
import pytest
from tensornetwork.block_sparse.charge import (BaseCharge, U1Charge,
fuse_charges, Z2Charge, ZNCharge,
charge_equal)
from tensornetwork.block_sparse.utils import (fuse_ndarrays, unique,
fuse_degeneracies)
def test_charge_equal():
q1 = np.array([[-1, 2, 4, -3, 1, 2, -5]]).T
q2 = np.array([[1, 2, 4, -3, 1, 2, -5]]).T
q3 = np.array([[1, 2, 4, -3, -5]]).T
Q1 = BaseCharge(charges=q1, charge_types=[U1Charge])
Q2 = BaseCharge(charges=q2, charge_types=[U1Charge])
Q3 = BaseCharge(charges=q3, charge_types=[U1Charge])
assert charge_equal(Q1, Q1)
assert not charge_equal(Q1, Q2)
assert not charge_equal(Q1, Q3)
_ = Q1.unique_charges
assert charge_equal(Q1, Q1)
assert not charge_equal(Q1, Q2)
assert not charge_equal(Q1, Q3)
_ = Q2.unique_charges
assert charge_equal(Q1, Q1)
assert not charge_equal(Q1, Q2)
assert not charge_equal(Q1, Q3)
_ = Q3.unique_charges
assert charge_equal(Q1, Q1)
assert not charge_equal(Q1, Q2)
assert not charge_equal(Q1, Q3)
def test_BaseCharge_charges():
D = 100
B = 6
np.random.seed(10)
charges = np.random.randint(-B // 2, B // 2 + 1, (D, 2)).astype(np.int16)
q1 = BaseCharge(charges)
np.testing.assert_allclose(q1.charges, charges)
def test_BaseCharge_generic():
D = 300
B = 5
np.random.seed(10)
q = np.random.randint(-B // 2, B // 2 + 1, (D, 2)).astype(np.int16)
unique_charges = np.unique(q, axis=0)
Q = BaseCharge(charges=q)
assert Q.dim == 300
assert Q.num_symmetries == 2
assert Q.num_unique == unique_charges.shape[0]
def test_BaseCharge_len():
D = 300
B = 5
np.random.seed(10)
q = np.random.randint(-B // 2, B // 2 + 1, (D, 2)).astype(np.int16)
Q = BaseCharge(charges=q)
assert len(Q) == 300
def test_BaseCharge_copy():
D = 300
B = 5
np.random.seed(10)
q = np.random.randint(-B // 2, B // 2 + 1, (D, 2)).astype(np.int16)
Q = BaseCharge(charges=q)
Qcopy = Q.copy()
assert Q.charge_labels is not Qcopy.charge_labels
assert Q.unique_charges is not Qcopy.unique_charges
np.testing.assert_allclose(Q.charge_labels, Qcopy.charge_labels)
np.testing.assert_allclose(Q.unique_charges, Qcopy.unique_charges)
def test_BaseCharge_unique():
D = 3000
B = 5
np.random.seed(10)
q = np.random.randint(-B // 2, B // 2 + 1, (D, 2)).astype(np.int16)
Q = BaseCharge(charges=q, charge_types=[U1Charge, U1Charge])
#this call has to be to custom unique to ensure correct ordering
expected = unique(
q, return_index=True, return_inverse=True, return_counts=True)
actual = Q.unique(return_index=True, return_inverse=True, return_counts=True)
assert np.all(actual[0].charges == expected[0])
assert np.all(actual[1] == expected[1])
assert np.all(actual[2] == expected[2])
assert np.all(actual[3] == expected[3])
_ = Q.unique_charges # switch internally to unique-labels representation
actual = Q.unique(return_index=True, return_inverse=True, return_counts=True)
assert np.all(actual[0].charges == expected[0])
assert np.all(actual[1] == expected[1])
assert np.all(actual[2] == expected[2])
assert np.all(actual[3] == expected[3])
def test_BaseCharge_single_unique():
D = 30
np.random.seed(10)
q = np.ones((D, 2), dtype=np.int16)
Q = BaseCharge(charges=q, charge_types=[U1Charge, U1Charge])
expected = np.unique(
q, return_index=True, return_inverse=True, return_counts=True, axis=0)
actual = Q.unique(return_index=True, return_inverse=True, return_counts=True)
assert np.all(actual[0].charges == expected[0])
assert np.all(actual[1] == expected[1])
assert np.all(actual[2] == expected[2])
assert np.all(actual[3] == expected[3])
expected = np.unique(q, axis=0)
actual = Q.unique()
assert np.all(actual.charges == expected)
def test_BaseCharge_unique_sort():
np.random.seed(10)
unique_charges = np.array([1, 0, -1])
labels = np.random.randint(0, 3, 100)
Q = U1Charge(charges=unique_charges, charge_labels=labels)
actual = Q.unique(return_index=True, return_inverse=True, return_counts=True)
np.testing.assert_allclose(actual[0].unique_charges, [[1], [0], [-1]])
@pytest.mark.parametrize('chargetype, B0, B1', [(U1Charge, -5, 5),
(Z2Charge, 0, 1),
(ZNCharge(3), 0, 2),
(ZNCharge(6), 0, 5)])
def test_Charge_charges(chargetype, B0, B1):
D = 100
np.random.seed(10)
charges = np.random.randint(B0, B1 + 1, D).astype(np.int16)
q1 = chargetype(charges)
assert np.all(np.squeeze(q1.charges) == charges)
@pytest.mark.parametrize('chargetype, B0, B1,sign', [(U1Charge, -5, 5, -1),
(Z2Charge, 0, 1, 1)])
def test_Charge_dual(chargetype, B0, B1, sign):
D = 100
np.random.seed(10)
charges = np.random.randint(B0, B1 + 1, D).astype(np.int16)
q1 = chargetype(charges)
assert np.all(np.squeeze(q1.dual(True).charges) == sign * charges)
@pytest.mark.parametrize('n', list(range(2, 12)))
def test_Charge_dual_zncharges(n):
chargetype = ZNCharge(n)
D = 100
np.random.seed(10)
charges = np.random.randint(0, n, D).astype(np.int16)
q1 = chargetype(charges)
assert np.all(np.squeeze(q1.dual(True).charges) == (n - charges) % n)
def test_Z2Charge_random():
np.random.seed(10)
z2 = Z2Charge.random(10, 0, 1)
assert np.all(np.isin(z2.charges.ravel(), [0, 1]))
def test_Z2Charge_raises():
np.random.seed(10)
charges = np.array([-1, 0, 1, 2])
with pytest.raises(ValueError):
Z2Charge(charges)
with pytest.raises(ValueError, match="Z2 charges can only"):
Z2Charge.random(10, -1, 1)
with pytest.raises(ValueError, match="Z2 charges can only"):
Z2Charge.random(10, 0, 2)
def get_charges(B0, B1, D, num_charges):
return [
np.random.randint(B0, B1 + 1, D).astype(np.int16)
for _ in range(num_charges)
]
def fuse_many_charges(num_charges,
num_charge_types,
seed,
D,
B,
use_flows=False):
np.random.seed(seed)
if use_flows:
flows = np.random.choice([True, False], num_charges, replace=True)
else:
flows = np.asarray([False] * num_charges)
np_flows = np.ones(num_charges, dtype=np.int16)
np_flows[flows] = -1
charges = [
get_charges(-B // 2, B // 2, D, num_charge_types)
for _ in range(num_charges)
]
fused = [
fuse_ndarrays([charges[n][m] * np_flows[n]
for n in range(num_charges)])
for m in range(num_charge_types)
]
final_charges = [U1Charge(charges[n][0]) for n in range(num_charges)]
for n in range(num_charges):
for m in range(1, num_charge_types):
final_charges[n] = final_charges[n] @ U1Charge(charges[n][m])
np_target_charges = np.random.randint(-B, B, num_charge_types, dtype=np.int16)
target_charges = [
U1Charge(np.array([np_target_charges[n]]))
for n in range(num_charge_types)
]
target = target_charges[0]
for m in range(1, num_charge_types):
target = target @ target_charges[m]
final = final_charges[0] * flows[0]
for n in range(1, num_charges):
final = final + final_charges[n] * flows[n]
nz_1 = np.nonzero(final == target)[0]
masks = [fused[m] == target.charges[0, m] for m in range(num_charge_types)]
#pylint: disable=no-member
nz_2 = np.nonzero(np.logical_and.reduce(masks))[0]
return nz_1, nz_2
@pytest.mark.parametrize('use_flows', [True, False])
@pytest.mark.parametrize('num_charges, num_charge_types, D, B',
[(1, 1, 0, 5), (2, 1, 1000, 6), (2, 2, 1000, 6),
(3, 1, 100, 6), (3, 2, 100, 6), (3, 3, 100, 6)])
def test_U1Charge_fusion(num_charges, num_charge_types, D, B, use_flows):
nz_1, nz_2 = fuse_many_charges(
num_charges=num_charges,
num_charge_types=num_charge_types,
seed=20,
D=D,
B=B,
use_flows=use_flows)
if D > 0:
assert len(nz_1) > 0
assert len(nz_2) > 0
assert np.all(nz_1 == nz_2)
def test_BaseCharge_intersect():
q1 = np.array([[0, 1, 2, 0, 6], [2, 3, 4, -1, 4]]).T
q2 = np.array([[0, -2, 6], [2, 3, 4]]).T
Q1 = BaseCharge(charges=q1)
Q2 = BaseCharge(charges=q2)
res = Q1.intersect(Q2)
np.testing.assert_allclose(res.charges, np.asarray([[0, 6], [2, 4]]).T)
def test_BaseCharge_intersect_2():
c1 = U1Charge(np.array([1, 0, -1]), charge_labels=np.array([2, 0, 1]))
c2 = U1Charge(np.array([-1, 0, 1]))
res = c1.intersect(c2)
np.testing.assert_allclose(res.charges, [[-1], [0], [1]])
def test_BaseCharge_intersect_3():
c1 = U1Charge(np.array([1, 0, -1]), charge_labels=np.array([2, 0, 1]))
c2 = np.array([-1, 0, 1], dtype=np.int16)
res = c1.intersect(c2)
np.testing.assert_allclose(res.charges, [[-1], [0], [1]])
def test_BaseCharge_intersect_return_indices():
q1 = np.array([[0, 1, 2, 0, 6], [2, 3, 4, -1, 4]]).T
q2 = np.array([[-2, 0, 6], [3, 2, 4]]).T
Q1 = BaseCharge(charges=q1)
Q2 = BaseCharge(charges=q2)
res, i1, i2 = Q1.intersect(Q2, return_indices=True)
np.testing.assert_allclose(res.charges, np.asarray([[0, 6], [2, 4]]).T)
np.testing.assert_allclose(i1, [0, 4])
np.testing.assert_allclose(i2, [1, 2])
@pytest.mark.parametrize('chargetype, B0, B1', [(U1Charge, -5, 5),
(Z2Charge, 0, 1),
(ZNCharge(3), 0, 2)])
def test_Charge_matmul(chargetype, B0, B1):
D = 1000
np.random.seed(10)
C1 = np.random.randint(B0, B1 + 1, D).astype(np.int16)
C2 = np.random.randint(B0, B1 + 1, D).astype(np.int16)
C3 = np.random.randint(B0, B1 + 1, D).astype(np.int16)
q1 = chargetype(C1)
q2 = chargetype(C2)
q3 = chargetype(C3)
Q = q1 @ q2 @ q3
Q_ = BaseCharge(
np.stack([C1, C2, C3], axis=1),
charge_labels=None,
charge_types=[chargetype] * 3)
assert np.all(Q.charges == Q_.charges)
def test_BaseCharge_matmul_raises():
B = 5
np.random.seed(10)
C1 = np.random.randint(-B // 2, B // 2 + 1, 10).astype(np.int16)
C2 = np.random.randint(-B // 2, B // 2 + 1, 11).astype(np.int16)
q1 = U1Charge(C1)
q2 = U1Charge(C2)
with pytest.raises(ValueError):
q1 @ q2
@pytest.mark.parametrize('chargetype, B0, B1, identity',
[(U1Charge, -5, 5, 0), (Z2Charge, 0, 1, 0),
(ZNCharge(5), 0, 4, 0), (ZNCharge(7), 0, 6, 0)])
def test_Charge_identity(chargetype, B0, B1, identity):
D = 100
np.random.seed(10)
C1 = np.random.randint(B0, B1 + 1, D).astype(np.int16)
C2 = np.random.randint(B0, B1 + 1, D).astype(np.int16)
C3 = np.random.randint(B0, B1 + 1, D).astype(np.int16)
q1 = chargetype(C1)
q2 = chargetype(C2)
q3 = chargetype(C3)
Q = q1 @ q2 @ q3
eye = Q.identity_charges()
np.testing.assert_allclose(eye.unique_charges, identity)
assert eye.num_symmetries == 3
@pytest.mark.parametrize("n", list(range(2, 20)))
def test_zncharge_dual_invariant(n):
D = 100
np.random.seed(10)
charges = np.random.randint(0, n, D).astype(np.int16)
a = ZNCharge(n)(charges)
b = a.dual(True)
np.testing.assert_allclose((b.charges + a.charges) % n, np.zeros((D, 1)))
@pytest.mark.parametrize("n", list(range(2, 20)))
def test_zncharge_fusion(n):
D = 100
np.random.seed(10)
charges1 = np.random.randint(0, n, D).astype(np.int16)
charges2 = np.random.randint(0, n, D).astype(np.int16)
a = ZNCharge(n)(charges1)
b = ZNCharge(n)(charges2)
np.testing.assert_allclose(
np.add.outer(charges1, charges2).ravel() % n, (a + b).charges.ravel())
@pytest.mark.parametrize('chargetype, B0, B1, sign', [(U1Charge, -5, 5, -1),
(Z2Charge, 0, 1, 1)])
def test_Charge_mul(chargetype, B0, B1, sign):
D = 100
np.random.seed(10)
C1 = np.random.randint(B0, B1 + 1, D).astype(np.int16)
C2 = np.random.randint(B0, B1 + 1, D).astype(np.int16)
q1 = chargetype(C1)
q2 = chargetype(C2)
q = q1 @ q2
res = q * True
np.testing.assert_allclose(res.charges, sign * np.stack([C1, C2], axis=1))
def test_Charge_mul_raises():
D = 100
np.random.seed(10)
C = np.random.randint(-5, 6, D).astype(np.int16)
q = U1Charge(C)
with pytest.raises(
ValueError, match="can only multiply by `True` or `False`"):
q * 10 #pytype: disable=unsupported-operands
@pytest.mark.parametrize('n', list(range(2, 12)))
def test_Charge_mul_zncharge(n):
chargetype = ZNCharge(n)
D = 100
np.random.seed(10)
C1 = np.random.randint(0, n, D).astype(np.int16)
C2 = np.random.randint(0, n, D).astype(np.int16)
q1 = chargetype(C1)
q2 = chargetype(C2)
q = q1 @ q2
res = q * True
np.testing.assert_allclose(res.charges, (n - np.stack([C1, C2], axis=1)) % n)
def test_fuse_charges():
num_charges = 5
B = 6
D = 10
np_charges = [
np.random.randint(-B // 2, B // 2 + 1, D).astype(np.int16)
for _ in range(num_charges)
]
charges = [U1Charge(c) for c in np_charges]
flows = [True, False, True, False, True]
np_flows = np.ones(5, dtype=np.int16)
np_flows[flows] = -1
fused = fuse_charges(charges, flows)
np_fused = fuse_ndarrays([c * f for c, f in zip(np_charges, np_flows)])
np.testing.assert_allclose(np.squeeze(fused.charges), np_fused)
def test_fuse_charges_raises():
num_charges = 5
B = 6
D = 10
np_charges = [
np.random.randint(-B // 2, B // 2 + 1, D).astype(np.int16)
for _ in range(num_charges)
]
charges = [U1Charge(c) for c in np_charges]
flows = [True, False, True, False]
with pytest.raises(ValueError):
fuse_charges(charges, flows)
def test_reduce():
q = np.array([[0, 1, 2, 0, 6, 1, -9, 0, -7], [2, 3, 4, -1, 4, 3, 1, 2, 0]]).T
Q = BaseCharge(charges=q)
target_charge = np.array([[0, 1, 6, -12], [2, 3, 4, 16]]).T
expected = np.array([[0, 1, 6, 1, 0], [2, 3, 4, 3, 2]]).T
res, locs = Q.reduce(target_charge, return_locations=True)
np.testing.assert_allclose(res.charges, expected)
np.testing.assert_allclose(locs, [0, 1, 4, 5, 7])
def test_reduce_1d():
q = np.array([0, 1, 2, 0, 6, 1, -9, 0, -7])
Q = BaseCharge(charges=q)
target_charge = np.array([0, 1])
expected = np.array([[0, 1, 0, 1, 0]]).T
res, locs = Q.reduce(target_charge, return_locations=True)
np.testing.assert_allclose(res.charges, expected)
np.testing.assert_allclose(locs, [0, 1, 3, 5, 7])
def test_reduce_integer():
q = np.array([0, 1, 2, 0, 6, 1, -9, 0, -7])
Q = BaseCharge(charges=q)
target_charge = 0
expected = np.zeros((3, 1))
res, locs = Q.reduce(target_charge, return_locations=True)
np.testing.assert_allclose(res.charges, expected)
np.testing.assert_allclose(locs, [0, 3, 7])
def test_getitem():
q1 = np.array([0, 1, 2, 0, 6, 1, -9, 0, -7])
q2 = np.array([2, 3, 4, -1, 4, 3, 1, 2, 0])
Q1 = U1Charge(charges=q1)
Q2 = U1Charge(charges=q2)
Q = Q1 @ Q2
t1 = Q[5]
np.testing.assert_allclose(t1.charges, [[1, 3]])
assert np.all([t1.charge_types[n] == U1Charge for n in range(2)])
t2 = Q[[2, 5, 7]]
assert np.all([t2.charge_types[n] == U1Charge for n in range(2)])
np.testing.assert_allclose(t2.charges, [[2, 4], [1, 3], [0, 2]])
t3 = Q[[5, 2, 7]]
assert np.all([t3.charge_types[n] == U1Charge for n in range(2)])
np.testing.assert_allclose(t3.charges, [[1, 3], [2, 4], [0, 2]])
def test_eq_0():
np.random.seed(10)
arr = np.array([-2, -1, 0, 1, -1, 3, 4, 5], dtype=np.int16)
c1 = U1Charge(arr)
targets = np.array([-1, 0])
m1 = c1 == targets
m2 = arr[:, None] == targets[None, :]
np.testing.assert_allclose(m1, m2)
def test_eq_1():
np.random.seed(10)
c1 = U1Charge(np.array([-2, -1, 0, 1, -1, 3, 4, 5], dtype=np.int16))
c2 = U1Charge(np.array([-1, 0, 1, 2, 0, 4, 5, 6], dtype=np.int16))
c = c1 @ c2
c3 = np.array([[-1, 0]])
inds = np.nonzero(c == c3)[0]
np.testing.assert_allclose(inds, [1, 4])
for i in inds:
np.array_equal(c[i].charges, c3)
def test_eq_2():
np.random.seed(10)
c1 = U1Charge(np.array([-2, -1, 0, 1, -1, 3, 4, 5, 1], dtype=np.int16))
c2 = U1Charge(np.array([-1, 0, 1, 2, 0, 4, 5, 6, 2], dtype=np.int16))
c = c1 @ c2
c3 = np.array([[-1, 0], [1, 2]])
inds = np.nonzero(c == c3)[0]
np.testing.assert_allclose(inds, [1, 3, 4, 8])
def test_eq__raises():
np.random.seed(10)
num_charges = 2
charge = BaseCharge(
np.random.randint(-2, 3, (30, num_charges)),
charge_types=[U1Charge] * num_charges)
with pytest.raises(ValueError):
_ = charge == np.random.randint(-1, 1, (2, num_charges + 1), dtype=np.int16)
def test_iter():
np.random.seed(10)
arr1 = np.array([-2, -1, 0, 1, -1, 3, 4, 5, 1], dtype=np.int16)
arr2 = np.array([-1, 0, 1, 2, 0, 4, 5, 6, 2], dtype=np.int16)
c1 = U1Charge(arr1)
c2 = U1Charge(arr2)
c = c1 @ c2
m = 0
for n in c:
np.testing.assert_allclose(n, np.array([arr1[m], arr2[m]]))
m += 1
def test_empty():
num_charges = 4
charges = BaseCharge(
np.random.randint(-5, 6, (0, num_charges)),
charge_types=[U1Charge] * num_charges)
assert len(charges) == 0
def test_init_raises():
num_charges = 4
with pytest.raises(ValueError):
BaseCharge(
np.random.randint(-5, 6, (10, num_charges)),
charge_types=[U1Charge] * (num_charges - 1))
def test_eq_raises():
num_charges = 4
c1 = BaseCharge(
np.random.randint(-5, 6, (10, num_charges)),
charge_types=[U1Charge] * num_charges)
c2 = BaseCharge(
np.random.randint(-5, 6, (0, num_charges)),
charge_types=[U1Charge] * num_charges)
npc = np.empty((0, num_charges), dtype=np.int16)
with pytest.raises(ValueError):
c1 == c2
with pytest.raises(ValueError):
c1 == npc
def test_zncharge_raises():
with pytest.raises(ValueError, match="n must be >= 2, found 0"):
ZNCharge(0)
with pytest.raises(ValueError, match="Z7 charges must be in"):
ZNCharge(7)([0, 4, 9])
with pytest.raises(ValueError, match="maxval"):
ZNCharge(3).random(10, 0, 3)
with pytest.raises(ValueError, match="minval"):
ZNCharge(3).random(10, -1, 2)
def test_zncharge_does_not_raise():
ZNCharge(2).random(4) #pytype: disable=attribute-error
def test_BaseCharge_raises():
D = 100
B = 6
np.random.seed(10)
charges = np.random.randint(-B // 2, B // 2 + 1, (D, 2)).astype(np.int16)
q = BaseCharge(charges)
with pytest.raises(NotImplementedError):
q.fuse([], [])
with pytest.raises(NotImplementedError):
q.dual_charges([])
with pytest.raises(NotImplementedError):
q.identity_charge()
with pytest.raises(NotImplementedError):
BaseCharge.random(0, 0, 0)
|
import os
from time import mktime
from zipfile import ZipFile
from django.conf import settings
from django.core.paginator import EmptyPage, Paginator
from django.http import FileResponse, Http404, HttpResponse, HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.utils.http import http_date
from django.utils.translation import activate
from django.utils.translation import gettext as _
from django.utils.translation import gettext_lazy, pgettext_lazy
from django.views.generic.edit import FormView
from weblate.formats.models import EXPORTERS
from weblate.trans.models import Component, Project, Translation
from weblate.utils import messages
from weblate.vcs.git import LocalRepository
def optional_form(form, perm_user, perm, perm_obj, **kwargs):
if not perm_user.has_perm(perm, perm_obj):
return None
return form(**kwargs)
def get_percent_color(percent):
if percent >= 85:
return "#2eccaa"
if percent >= 50:
return "#38f"
return "#f6664c"
def get_page_limit(request, default):
"""Return page and limit as integers."""
try:
limit = int(request.GET.get("limit", default))
except ValueError:
limit = default
# Cap it to range 10 - 2000
limit = min(max(10, limit), 2000)
try:
page = int(request.GET.get("page", 1))
except ValueError:
page = 1
page = max(1, page)
return page, limit
def get_paginator(request, object_list, default_page_limit=100):
"""Return paginator and current page."""
page, limit = get_page_limit(request, default_page_limit)
paginator = Paginator(object_list, limit)
try:
return paginator.page(page)
except EmptyPage:
return paginator.page(paginator.num_pages)
class ComponentViewMixin:
# This should be done in setup once we drop support for older Django
def get_component(self):
return get_component(
self.request, self.kwargs["project"], self.kwargs["component"]
)
class ProjectViewMixin:
project = None
# This should be done in setup once we drop support for older Django
def dispatch(self, request, *args, **kwargs):
self.project = get_project(self.request, self.kwargs["project"])
return super().dispatch(request, *args, **kwargs)
SORT_CHOICES = {
"-priority,position": gettext_lazy("Position and priority"),
"position": gettext_lazy("Position"),
"priority": gettext_lazy("Priority"),
"labels": gettext_lazy("Labels"),
"timestamp": gettext_lazy("Age of string"),
"num_words": gettext_lazy("Number of words"),
"num_comments": gettext_lazy("Number of comments"),
"num_failing_checks": gettext_lazy("Number of failing checks"),
"context": pgettext_lazy("Translation key", "Key"),
}
SORT_LOOKUP = {key.replace("-", ""): value for key, value in SORT_CHOICES.items()}
def get_sort_name(request):
"""Gets sort name."""
sort_params = request.GET.get("sort_by", "-priority,position").replace("-", "")
sort_name = SORT_LOOKUP.get(sort_params, _("Position and priority"))
result = {
"query": request.GET.get("sort_by", "-priority,position"),
"name": sort_name,
}
return result
def get_translation(request, project, component, lang, skip_acl=False):
"""Return translation matching parameters."""
translation = get_object_or_404(
Translation.objects.prefetch(),
language__code=lang,
component__slug=component,
component__project__slug=project,
)
if not skip_acl:
request.user.check_access_component(translation.component)
return translation
def get_component(request, project, component, skip_acl=False):
"""Return component matching parameters."""
component = get_object_or_404(
Component.objects.prefetch(),
project__slug=project,
slug=component,
)
if not skip_acl:
request.user.check_access_component(component)
component.acting_user = request.user
return component
def get_project(request, project, skip_acl=False):
"""Return project matching parameters."""
project = get_object_or_404(Project, slug=project)
if not skip_acl:
request.user.check_access(project)
project.acting_user = request.user
return project
def get_project_translation(request, project=None, component=None, lang=None):
"""Return project, component, translation tuple for given parameters."""
if lang and component:
# Language defined? We can get all
translation = get_translation(request, project, component, lang)
component = translation.component
project = component.project
else:
translation = None
if component:
# Component defined?
component = get_component(request, project, component)
project = component.project
elif project:
# Only project defined?
project = get_project(request, project)
# Return tuple
return project or None, component or None, translation or None
def create_component_from_doc(data):
# Calculate filename
uploaded = data["docfile"]
ext = os.path.splitext(os.path.basename(uploaded.name))[1]
filemask = "{}/{}{}".format(data["slug"], "*", ext)
filename = filemask.replace(
"*",
data["source_language"].code
if "source_language" in data
else settings.DEFAULT_LANGUAGE,
)
# Create fake component (needed to calculate path)
fake = Component(
project=data["project"],
slug=data["slug"],
name=data["name"],
template=filename,
filemask=filemask,
)
# Create repository
LocalRepository.from_files(fake.full_path, {filename: uploaded.read()})
return fake
def create_component_from_zip(data):
# Create fake component (needed to calculate path)
fake = Component(
project=data["project"],
slug=data["slug"],
name=data["name"],
)
# Create repository
LocalRepository.from_zip(fake.full_path, data["zipfile"])
return fake
def try_set_language(lang):
"""Try to activate language."""
try:
activate(lang)
except Exception:
# Ignore failure on activating language
activate("en")
def import_message(request, count, message_none, message_ok):
if count == 0:
messages.warning(request, message_none)
else:
messages.success(request, message_ok % count)
def iter_files(filenames):
for filename in filenames:
if os.path.isdir(filename):
for root, _unused, files in os.walk(filename):
if "/.git/" in root or "/.hg/" in root:
continue
yield from (os.path.join(root, name) for name in files)
else:
yield filename
def zip_download(root, filenames, name="translations"):
response = HttpResponse(content_type="application/zip")
with ZipFile(response, "w") as zipfile:
for filename in iter_files(filenames):
with open(filename, "rb") as handle:
zipfile.writestr(os.path.relpath(filename, root), handle.read())
response["Content-Disposition"] = f'attachment; filename="{name}.zip"'
return response
def download_translation_file(translation, fmt=None, units=None):
if fmt is not None:
try:
exporter_cls = EXPORTERS[fmt]
except KeyError:
raise Http404("File format not supported")
if not exporter_cls.supports(translation):
raise Http404("File format not supported")
exporter = exporter_cls(translation=translation)
if units is None:
units = translation.unit_set.prefetch_full()
exporter.add_units(units)
response = exporter.get_response(
"{{project}}-{0}-{{language}}.{{extension}}".format(
translation.component.slug
)
)
else:
# Force flushing pending units
translation.commit_pending("download", None)
filenames = translation.filenames
if len(filenames) == 1:
extension = translation.component.file_format_cls.extension()
# Create response
response = FileResponse(
open(filenames[0], "rb"),
content_type=translation.component.file_format_cls.mimetype(),
)
else:
extension = "zip"
response = zip_download(
translation.get_filename(),
filenames,
translation.full_slug.replace("/", "-"),
)
# Construct filename (do not use real filename as it is usually not
# that useful)
filename = "{}-{}-{}.{}".format(
translation.component.project.slug,
translation.component.slug,
translation.language.code,
extension,
)
# Fill in response headers
response["Content-Disposition"] = f"attachment; filename={filename}"
if translation.stats.last_changed:
response["Last-Modified"] = http_date(
mktime(translation.stats.last_changed.timetuple())
)
return response
def show_form_errors(request, form):
"""Show all form errors as a message."""
for error in form.non_field_errors():
messages.error(request, error)
for field in form:
for error in field.errors:
messages.error(
request,
_("Error in parameter %(field)s: %(error)s")
% {"field": field.name, "error": error},
)
class ErrorFormView(FormView):
def form_invalid(self, form):
"""If the form is invalid, redirect to the supplied URL."""
show_form_errors(self.request, form)
return HttpResponseRedirect(self.get_success_url())
def get(self, request, *args, **kwargs):
"""There is no GET view here."""
return HttpResponseRedirect(self.get_success_url())
|
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.windows_packages import iperf3
FLAGS = flags.FLAGS
BENCHMARK_NAME = 'iperf3'
BENCHMARK_CONFIG = """
iperf3:
description: Run iperf3 between two VMs.
vm_groups:
default:
vm_spec: *default_single_core
vm_count: 2
"""
def GetConfig(user_config):
return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def Prepare(benchmark_spec):
for vm in benchmark_spec.vms:
vm.Install('iperf3')
vm.AllowPort(iperf3.IPERF3_UDP_PORT)
def Run(benchmark_spec):
"""Measure UDP bandwidth between two VMs.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample objects with the benchmark results.
"""
vms = benchmark_spec.vms
results = []
def _RunTest(sending_vm, receiving_vm):
"""Runs the tests depending on what is enabled.
Args:
sending_vm: The vm that will initiate the stream.
receiving_vm: The vm that will act as server.
"""
if vm_util.ShouldRunOnExternalIpAddress():
if FLAGS.run_udp:
results.extend(
iperf3.RunIperf3UDPStream(
sending_vm, receiving_vm, use_internal_ip=False))
if FLAGS.run_tcp:
results.extend(
iperf3.RunIperf3TCPMultiStream(
sending_vm, receiving_vm, use_internal_ip=False))
if vm_util.ShouldRunOnInternalIpAddress(sending_vm, receiving_vm):
if FLAGS.run_udp:
results.extend(
iperf3.RunIperf3UDPStream(
sending_vm, receiving_vm, use_internal_ip=True))
if FLAGS.run_tcp:
results.extend(
iperf3.RunIperf3TCPMultiStream(
sending_vm, receiving_vm, use_internal_ip=True))
_RunTest(vms[0], vms[1])
_RunTest(vms[1], vms[0])
return results
def Cleanup(unused_benchmark_spec):
pass
|
from kalliope.core.Models.settings.SettingsEntry import SettingsEntry
class RestAPI(SettingsEntry):
"""
This Class is representing the rest API with all its configuration.
"""
def __init__(self,
password_protected=None,
login=None, password=None,
active=None,
port=None,
allowed_cors_origin=None):
"""
:param password_protected: If true, the rest api will ask for an authentication
:param login: login used if auth is activated
:param password: password used if auth is activated
:param active: specify if the rest api is loaded on start with Kalliope
:param allowed_cors_origin: specify allowed origins
"""
super(RestAPI, self).__init__("RestAPI")
self.password_protected = password_protected
self.login = login
self.password = password
self.active = active
self.port = port
self.allowed_cors_origin = allowed_cors_origin
def __str__(self):
return str(self.serialize())
def serialize(self):
"""
This method allows to serialize in a proper way this object
:return: A dict of order
:rtype: Dict
"""
return {
'password_protected': self.password_protected,
'login': self.login,
'password': self.password,
'active': self.active,
'port': self.port,
'allowed_cors_origin': self.allowed_cors_origin
}
def __eq__(self, other):
"""
This is used to compare 2 objects
:param other:
:return:
"""
return self.__dict__ == other.__dict__
|
import json
from homeassistant.components.metoffice.const import ATTRIBUTION, DOMAIN
from . import NewDateTime
from .const import (
DATETIME_FORMAT,
KINGSLYNN_SENSOR_RESULTS,
METOFFICE_CONFIG_KINGSLYNN,
METOFFICE_CONFIG_WAVERTREE,
TEST_DATETIME_STRING,
TEST_SITE_NAME_KINGSLYNN,
TEST_SITE_NAME_WAVERTREE,
WAVERTREE_SENSOR_RESULTS,
)
from tests.async_mock import patch
from tests.common import MockConfigEntry, load_fixture
@patch(
"datapoint.Forecast.datetime.datetime",
NewDateTime,
)
async def test_one_sensor_site_running(hass, requests_mock, legacy_patchable_time):
"""Test the Met Office sensor platform."""
# all metoffice test data encapsulated in here
mock_json = json.loads(load_fixture("metoffice.json"))
all_sites = json.dumps(mock_json["all_sites"])
wavertree_hourly = json.dumps(mock_json["wavertree_hourly"])
requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites)
requests_mock.get(
"/public/data/val/wxfcs/all/json/354107?res=3hourly",
text=wavertree_hourly,
)
entry = MockConfigEntry(
domain=DOMAIN,
data=METOFFICE_CONFIG_WAVERTREE,
)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
running_sensor_ids = hass.states.async_entity_ids("sensor")
assert len(running_sensor_ids) > 0
for running_id in running_sensor_ids:
sensor = hass.states.get(running_id)
sensor_id = sensor.attributes.get("sensor_id")
sensor_name, sensor_value = WAVERTREE_SENSOR_RESULTS[sensor_id]
assert sensor.state == sensor_value
assert (
sensor.attributes.get("last_update").strftime(DATETIME_FORMAT)
== TEST_DATETIME_STRING
)
assert sensor.attributes.get("site_id") == "354107"
assert sensor.attributes.get("site_name") == TEST_SITE_NAME_WAVERTREE
assert sensor.attributes.get("attribution") == ATTRIBUTION
@patch(
"datapoint.Forecast.datetime.datetime",
NewDateTime,
)
async def test_two_sensor_sites_running(hass, requests_mock, legacy_patchable_time):
"""Test we handle two sets of sensors running for two different sites."""
# all metoffice test data encapsulated in here
mock_json = json.loads(load_fixture("metoffice.json"))
all_sites = json.dumps(mock_json["all_sites"])
wavertree_hourly = json.dumps(mock_json["wavertree_hourly"])
kingslynn_hourly = json.dumps(mock_json["kingslynn_hourly"])
requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites)
requests_mock.get(
"/public/data/val/wxfcs/all/json/354107?res=3hourly", text=wavertree_hourly
)
requests_mock.get(
"/public/data/val/wxfcs/all/json/322380?res=3hourly", text=kingslynn_hourly
)
entry = MockConfigEntry(
domain=DOMAIN,
data=METOFFICE_CONFIG_WAVERTREE,
)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
entry2 = MockConfigEntry(
domain=DOMAIN,
data=METOFFICE_CONFIG_KINGSLYNN,
)
entry2.add_to_hass(hass)
await hass.config_entries.async_setup(entry2.entry_id)
await hass.async_block_till_done()
running_sensor_ids = hass.states.async_entity_ids("sensor")
assert len(running_sensor_ids) > 0
for running_id in running_sensor_ids:
sensor = hass.states.get(running_id)
sensor_id = sensor.attributes.get("sensor_id")
if sensor.attributes.get("site_id") == "354107":
sensor_name, sensor_value = WAVERTREE_SENSOR_RESULTS[sensor_id]
assert sensor.state == sensor_value
assert (
sensor.attributes.get("last_update").strftime(DATETIME_FORMAT)
== TEST_DATETIME_STRING
)
assert sensor.attributes.get("sensor_id") == sensor_id
assert sensor.attributes.get("site_id") == "354107"
assert sensor.attributes.get("site_name") == TEST_SITE_NAME_WAVERTREE
assert sensor.attributes.get("attribution") == ATTRIBUTION
else:
sensor_name, sensor_value = KINGSLYNN_SENSOR_RESULTS[sensor_id]
assert sensor.state == sensor_value
assert (
sensor.attributes.get("last_update").strftime(DATETIME_FORMAT)
== TEST_DATETIME_STRING
)
assert sensor.attributes.get("sensor_id") == sensor_id
assert sensor.attributes.get("site_id") == "322380"
assert sensor.attributes.get("site_name") == TEST_SITE_NAME_KINGSLYNN
assert sensor.attributes.get("attribution") == ATTRIBUTION
|
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
from .common import setup_platform
from tests.common import load_fixture
async def test_entity_registry(hass, requests_mock):
"""Tests that the devices are registered in the entity registry."""
await setup_platform(hass, SWITCH_DOMAIN)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
entry = entity_registry.async_get("switch.front_siren")
assert entry.unique_id == "765432-siren"
entry = entity_registry.async_get("switch.internal_siren")
assert entry.unique_id == "345678-siren"
async def test_siren_off_reports_correctly(hass, requests_mock):
"""Tests that the initial state of a device that should be off is correct."""
await setup_platform(hass, SWITCH_DOMAIN)
state = hass.states.get("switch.front_siren")
assert state.state == "off"
assert state.attributes.get("friendly_name") == "Front siren"
async def test_siren_on_reports_correctly(hass, requests_mock):
"""Tests that the initial state of a device that should be on is correct."""
await setup_platform(hass, SWITCH_DOMAIN)
state = hass.states.get("switch.internal_siren")
assert state.state == "on"
assert state.attributes.get("friendly_name") == "Internal siren"
assert state.attributes.get("icon") == "mdi:alarm-bell"
async def test_siren_can_be_turned_on(hass, requests_mock):
"""Tests the siren turns on correctly."""
await setup_platform(hass, SWITCH_DOMAIN)
# Mocks the response for turning a siren on
requests_mock.put(
"https://api.ring.com/clients_api/doorbots/765432/siren_on",
text=load_fixture("ring_doorbot_siren_on_response.json"),
)
state = hass.states.get("switch.front_siren")
assert state.state == "off"
await hass.services.async_call(
"switch", "turn_on", {"entity_id": "switch.front_siren"}, blocking=True
)
await hass.async_block_till_done()
state = hass.states.get("switch.front_siren")
assert state.state == "on"
async def test_updates_work(hass, requests_mock):
"""Tests the update service works correctly."""
await setup_platform(hass, SWITCH_DOMAIN)
state = hass.states.get("switch.front_siren")
assert state.state == "off"
# Changes the return to indicate that the siren is now on.
requests_mock.get(
"https://api.ring.com/clients_api/ring_devices",
text=load_fixture("ring_devices_updated.json"),
)
await hass.services.async_call("ring", "update", {}, blocking=True)
await hass.async_block_till_done()
state = hass.states.get("switch.front_siren")
assert state.state == "on"
|
import unittest
from absl import flags
from absl.testing import flagsaver
from perfkitbenchmarker.providers.aws import aws_dynamodb
from tests import pkb_common_test_case
FLAGS = flags.FLAGS
@flagsaver.flagsaver
def GetTestDynamoDBInstance(table_name='test_table'):
FLAGS.zone = ['us-east-1a']
return aws_dynamodb.AwsDynamoDBInstance(table_name)
class AwsDynamodbTest(pkb_common_test_case.PkbCommonTestCase):
@flagsaver.flagsaver
def testInitTableName(self):
test_instance = GetTestDynamoDBInstance('dynamo_test_table')
self.assertEqual(test_instance.table_name, 'dynamo_test_table')
@flagsaver.flagsaver
def testInitLocation(self):
FLAGS.zone = ['us-east-1a']
test_instance = aws_dynamodb.AwsDynamoDBInstance('test_table')
self.assertEqual(test_instance.zone, 'us-east-1a')
self.assertEqual(test_instance.region, 'us-east-1')
@flagsaver.flagsaver
def testInitKeysAndAttributes(self):
FLAGS.aws_dynamodb_primarykey = 'test_primary_key'
FLAGS.aws_dynamodb_sortkey = 'test_sort_key'
FLAGS.aws_dynamodb_attributetype = 'test_attribute_type'
test_instance = GetTestDynamoDBInstance()
self.assertEqual(test_instance.primary_key,
'{"AttributeName": "test_primary_key","KeyType": "HASH"}')
self.assertEqual(test_instance.sort_key,
'{"AttributeName": "test_sort_key","KeyType": "RANGE"}')
self.assertEqual(
test_instance.part_attributes,
'{"AttributeName": "test_primary_key","AttributeType": "test_attribute_type"}'
)
self.assertEqual(
test_instance.sort_attributes,
'{"AttributeName": "test_sort_key","AttributeType": "test_attribute_type"}'
)
@flagsaver.flagsaver
def testInitThroughput(self):
FLAGS.aws_dynamodb_read_capacity = 1
FLAGS.aws_dynamodb_write_capacity = 2
test_instance = GetTestDynamoDBInstance()
self.assertEqual(test_instance.throughput,
'ReadCapacityUnits=1,WriteCapacityUnits=2')
@flagsaver.flagsaver
def testGetResourceMetadata(self):
FLAGS.zone = ['us-east-1a']
FLAGS.aws_dynamodb_primarykey = 'test_primary_key'
FLAGS.aws_dynamodb_use_sort = 'test_use_sort'
FLAGS.aws_dynamodb_sortkey = 'test_sortkey'
FLAGS.aws_dynamodb_attributetype = 'test_attribute_type'
FLAGS.aws_dynamodb_read_capacity = 1
FLAGS.aws_dynamodb_write_capacity = 2
FLAGS.aws_dynamodb_lsi_count = 3
FLAGS.aws_dynamodb_gsi_count = 4
FLAGS.aws_dynamodb_ycsb_consistentReads = 5
FLAGS.aws_dynamodb_connectMax = 6
test_instance = aws_dynamodb.AwsDynamoDBInstance('test_table')
actual_metadata = test_instance.GetResourceMetadata()
expected_metadata = {
'aws_dynamodb_primarykey': 'test_primary_key',
'aws_dynamodb_use_sort': 'test_use_sort',
'aws_dynamodb_sortkey': 'test_sortkey',
'aws_dynamodb_attributetype': 'test_attribute_type',
'aws_dynamodb_read_capacity': 1,
'aws_dynamodb_write_capacity': 2,
'aws_dynamodb_lsi_count': 3,
'aws_dynamodb_gsi_count': 4,
'aws_dynamodb_consistentReads': 5,
'aws_dynamodb_connectMax': 6,
}
self.assertEqual(actual_metadata, expected_metadata)
if __name__ == '__main__':
unittest.main()
|
from aiohomekit.model.characteristics import CharacteristicsTypes
from aiohomekit.model.services import ServicesTypes
from tests.components.homekit_controller.common import setup_test_component
POSITION_STATE = ("window-covering", "position.state")
POSITION_CURRENT = ("window-covering", "position.current")
POSITION_TARGET = ("window-covering", "position.target")
POSITION_HOLD = ("window-covering", "position.hold")
H_TILT_CURRENT = ("window-covering", "horizontal-tilt.current")
H_TILT_TARGET = ("window-covering", "horizontal-tilt.target")
V_TILT_CURRENT = ("window-covering", "vertical-tilt.current")
V_TILT_TARGET = ("window-covering", "vertical-tilt.target")
WINDOW_OBSTRUCTION = ("window-covering", "obstruction-detected")
DOOR_CURRENT = ("garage-door-opener", "door-state.current")
DOOR_TARGET = ("garage-door-opener", "door-state.target")
DOOR_OBSTRUCTION = ("garage-door-opener", "obstruction-detected")
def create_window_covering_service(accessory):
"""Define a window-covering characteristics as per page 219 of HAP spec."""
service = accessory.add_service(ServicesTypes.WINDOW_COVERING)
cur_state = service.add_char(CharacteristicsTypes.POSITION_CURRENT)
cur_state.value = 0
targ_state = service.add_char(CharacteristicsTypes.POSITION_TARGET)
targ_state.value = 0
position_state = service.add_char(CharacteristicsTypes.POSITION_STATE)
position_state.value = 0
position_hold = service.add_char(CharacteristicsTypes.POSITION_HOLD)
position_hold.value = 0
obstruction = service.add_char(CharacteristicsTypes.OBSTRUCTION_DETECTED)
obstruction.value = False
name = service.add_char(CharacteristicsTypes.NAME)
name.value = "testdevice"
return service
def create_window_covering_service_with_h_tilt(accessory):
"""Define a window-covering characteristics as per page 219 of HAP spec."""
service = create_window_covering_service(accessory)
tilt_current = service.add_char(CharacteristicsTypes.HORIZONTAL_TILT_CURRENT)
tilt_current.value = 0
tilt_target = service.add_char(CharacteristicsTypes.HORIZONTAL_TILT_TARGET)
tilt_target.value = 0
def create_window_covering_service_with_v_tilt(accessory):
"""Define a window-covering characteristics as per page 219 of HAP spec."""
service = create_window_covering_service(accessory)
tilt_current = service.add_char(CharacteristicsTypes.VERTICAL_TILT_CURRENT)
tilt_current.value = 0
tilt_target = service.add_char(CharacteristicsTypes.VERTICAL_TILT_TARGET)
tilt_target.value = 0
async def test_change_window_cover_state(hass, utcnow):
"""Test that we can turn a HomeKit alarm on and off again."""
helper = await setup_test_component(hass, create_window_covering_service)
await hass.services.async_call(
"cover", "open_cover", {"entity_id": helper.entity_id}, blocking=True
)
assert helper.characteristics[POSITION_TARGET].value == 100
await hass.services.async_call(
"cover", "close_cover", {"entity_id": helper.entity_id}, blocking=True
)
assert helper.characteristics[POSITION_TARGET].value == 0
async def test_read_window_cover_state(hass, utcnow):
"""Test that we can read the state of a HomeKit alarm accessory."""
helper = await setup_test_component(hass, create_window_covering_service)
helper.characteristics[POSITION_STATE].value = 0
state = await helper.poll_and_get_state()
assert state.state == "closing"
helper.characteristics[POSITION_STATE].value = 1
state = await helper.poll_and_get_state()
assert state.state == "opening"
helper.characteristics[POSITION_STATE].value = 2
state = await helper.poll_and_get_state()
assert state.state == "closed"
helper.characteristics[WINDOW_OBSTRUCTION].value = True
state = await helper.poll_and_get_state()
assert state.attributes["obstruction-detected"] is True
async def test_read_window_cover_tilt_horizontal(hass, utcnow):
"""Test that horizontal tilt is handled correctly."""
helper = await setup_test_component(
hass, create_window_covering_service_with_h_tilt
)
helper.characteristics[H_TILT_CURRENT].value = 75
state = await helper.poll_and_get_state()
assert state.attributes["current_tilt_position"] == 75
async def test_read_window_cover_tilt_vertical(hass, utcnow):
"""Test that vertical tilt is handled correctly."""
helper = await setup_test_component(
hass, create_window_covering_service_with_v_tilt
)
helper.characteristics[V_TILT_CURRENT].value = 75
state = await helper.poll_and_get_state()
assert state.attributes["current_tilt_position"] == 75
async def test_write_window_cover_tilt_horizontal(hass, utcnow):
"""Test that horizontal tilt is written correctly."""
helper = await setup_test_component(
hass, create_window_covering_service_with_h_tilt
)
await hass.services.async_call(
"cover",
"set_cover_tilt_position",
{"entity_id": helper.entity_id, "tilt_position": 90},
blocking=True,
)
assert helper.characteristics[H_TILT_TARGET].value == 90
async def test_write_window_cover_tilt_vertical(hass, utcnow):
"""Test that vertical tilt is written correctly."""
helper = await setup_test_component(
hass, create_window_covering_service_with_v_tilt
)
await hass.services.async_call(
"cover",
"set_cover_tilt_position",
{"entity_id": helper.entity_id, "tilt_position": 90},
blocking=True,
)
assert helper.characteristics[V_TILT_TARGET].value == 90
async def test_window_cover_stop(hass, utcnow):
"""Test that vertical tilt is written correctly."""
helper = await setup_test_component(
hass, create_window_covering_service_with_v_tilt
)
await hass.services.async_call(
"cover", "stop_cover", {"entity_id": helper.entity_id}, blocking=True
)
assert helper.characteristics[POSITION_HOLD].value == 1
def create_garage_door_opener_service(accessory):
"""Define a garage-door-opener chars as per page 217 of HAP spec."""
service = accessory.add_service(ServicesTypes.GARAGE_DOOR_OPENER)
cur_state = service.add_char(CharacteristicsTypes.DOOR_STATE_CURRENT)
cur_state.value = 0
cur_state = service.add_char(CharacteristicsTypes.DOOR_STATE_TARGET)
cur_state.value = 0
obstruction = service.add_char(CharacteristicsTypes.OBSTRUCTION_DETECTED)
obstruction.value = False
name = service.add_char(CharacteristicsTypes.NAME)
name.value = "testdevice"
return service
async def test_change_door_state(hass, utcnow):
"""Test that we can turn open and close a HomeKit garage door."""
helper = await setup_test_component(hass, create_garage_door_opener_service)
await hass.services.async_call(
"cover", "open_cover", {"entity_id": helper.entity_id}, blocking=True
)
assert helper.characteristics[DOOR_TARGET].value == 0
await hass.services.async_call(
"cover", "close_cover", {"entity_id": helper.entity_id}, blocking=True
)
assert helper.characteristics[DOOR_TARGET].value == 1
async def test_read_door_state(hass, utcnow):
"""Test that we can read the state of a HomeKit garage door."""
helper = await setup_test_component(hass, create_garage_door_opener_service)
helper.characteristics[DOOR_CURRENT].value = 0
state = await helper.poll_and_get_state()
assert state.state == "open"
helper.characteristics[DOOR_CURRENT].value = 1
state = await helper.poll_and_get_state()
assert state.state == "closed"
helper.characteristics[DOOR_CURRENT].value = 2
state = await helper.poll_and_get_state()
assert state.state == "opening"
helper.characteristics[DOOR_CURRENT].value = 3
state = await helper.poll_and_get_state()
assert state.state == "closing"
helper.characteristics[DOOR_OBSTRUCTION].value = True
state = await helper.poll_and_get_state()
assert state.attributes["obstruction-detected"] is True
|
import speech_recognition as sr
from kalliope.core import Utils
from kalliope.stt.Utils import SpeechRecognition
class Cmusphinx(SpeechRecognition):
def __init__(self, callback=None, **kwargs):
"""
Start recording the microphone and analyse audio with CMU sphinx api
:param callback: The callback function to call to send the text
:param kwargs:
"""
# give the audio file path to process directly to the mother class if exist
SpeechRecognition.__init__(self, kwargs.get('audio_file_path', None))
# callback function to call after the translation speech/tex
self.main_controller_callback = callback
self.language = kwargs.get('language', "en-US")
self.keyword_entries = kwargs.get('keyword_entries', None)
# ge the grammar file if exist
self.grammar_file = kwargs.get('grammar_file', None)
# start listening in the background
self.set_callback(self.sphinx_callback)
# start processing, record a sample from the microphone if no audio file path provided, else read the file
self.start_processing()
def sphinx_callback(self, recognizer, audio):
"""
called from the background thread
"""
try:
captured_audio = recognizer.recognize_sphinx(audio,
language=self.language,
keyword_entries=self.keyword_entries,
grammar=self.grammar_file)
Utils.print_success("Sphinx Speech Recognition thinks you said %s" % captured_audio)
self._analyse_audio(captured_audio)
except sr.UnknownValueError:
Utils.print_warning("Sphinx Speech Recognition could not understand audio")
# callback anyway, we need to listen again for a new order
self._analyse_audio(audio_to_text=None)
except sr.RequestError as e:
Utils.print_danger("Could not request results from Sphinx Speech Recognition service; {0}".format(e))
# callback anyway, we need to listen again for a new order
self._analyse_audio(audio_to_text=None)
except AssertionError:
Utils.print_warning("No audio caught from microphone")
self._analyse_audio(audio_to_text=None)
def _analyse_audio(self, audio_to_text):
"""
Confirm the audio exists and run it in a Callback
:param audio_to_text: the captured audio
"""
if self.main_controller_callback is not None:
self.main_controller_callback(audio_to_text)
|
import datetime
import socket
import sys
from collections import defaultdict
from typing import Dict
from typing import List
import choice
from paasta_tools import remote_git
from paasta_tools import utils
from paasta_tools.api.client import get_paasta_oapi_client
from paasta_tools.cli.cmds.status import add_instance_filter_arguments
from paasta_tools.cli.cmds.status import apply_args_filters
from paasta_tools.cli.utils import get_instance_config
from paasta_tools.cli.utils import trigger_deploys
from paasta_tools.flink_tools import FlinkDeploymentConfig
from paasta_tools.generate_deployments_for_service import get_latest_deployment_tag
from paasta_tools.marathon_tools import MarathonServiceConfig
from paasta_tools.utils import DEFAULT_SOA_DIR
from paasta_tools.utils import load_system_paasta_config
from paasta_tools.utils import PaastaColors
def add_subparser(subparsers):
for command, lower, upper, cmd_func in [
("start", "start or restart", "Start or restart", paasta_start),
("restart", "start or restart", "Start or restart", paasta_start),
("stop", "stop", "Stop", paasta_stop),
]:
status_parser = subparsers.add_parser(
command,
help="%ss a PaaSTA service in a graceful way." % upper,
description=(
"%ss a PaaSTA service in a graceful way. This uses the Git control plane."
% upper
),
epilog=(
"This command uses Git, and assumes access and authorization to the Git repo "
"for the service is available."
),
)
add_instance_filter_arguments(status_parser, verb=lower)
status_parser.add_argument(
"-d",
"--soa-dir",
dest="soa_dir",
metavar="SOA_DIR",
default=DEFAULT_SOA_DIR,
help="define a different soa config directory",
)
status_parser.set_defaults(command=cmd_func)
def format_tag(branch, force_bounce, desired_state):
return f"refs/tags/paasta-{branch}-{force_bounce}-{desired_state}"
def make_mutate_refs_func(service_config, force_bounce, desired_state):
"""Create a function that will inform send_pack that we want to create tags
corresponding to the set of branches passed, with the given force_bounce
and desired_state parameters. These tags will point at the current tip of
the branch they associate with.
dulwich's send_pack wants a function that takes a dictionary of ref name
to sha and returns a modified version of that dictionary. send_pack will
then diff what is returned versus what was passed in, and inform the remote
git repo of our desires."""
def mutate_refs(refs):
deploy_group = service_config.get_deploy_group()
(_, head_sha) = get_latest_deployment_tag(refs, deploy_group)
refs[
format_tag(service_config.get_branch(), force_bounce, desired_state)
] = head_sha
return refs
return mutate_refs
def log_event(service_config, desired_state):
user = utils.get_username()
host = socket.getfqdn()
line = "Issued request to change state of {} (an instance of {}) to '{}' by {}@{}".format(
service_config.get_instance(),
service_config.get_service(),
desired_state,
user,
host,
)
utils._log(
service=service_config.get_service(),
level="event",
cluster=service_config.get_cluster(),
instance=service_config.get_instance(),
component="deploy",
line=line,
)
utils._log_audit(
action=desired_state,
service=service_config.get_service(),
cluster=service_config.get_cluster(),
instance=service_config.get_instance(),
)
def issue_state_change_for_service(service_config, force_bounce, desired_state):
ref_mutator = make_mutate_refs_func(
service_config=service_config,
force_bounce=force_bounce,
desired_state=desired_state,
)
git_url = utils.get_git_url(service_config.get_service())
remote_git.create_remote_refs(git_url, ref_mutator)
if "yelpcorp.com" in git_url:
trigger_deploys(service_config.get_service())
log_event(service_config=service_config, desired_state=desired_state)
def print_marathon_message(desired_state):
if desired_state == "start":
print(
"This service will soon be gracefully started/restarted, replacing old instances according "
"to the bounce method chosen in soa-configs. "
)
elif desired_state == "stop":
print(
"This service will be gracefully stopped soon. It will be started back up again on the next deploy.\n"
"To stop this service permanently. Set this in the soa-configs definition:\n"
"\n"
" instances: 0\n"
)
def print_flink_message(desired_state):
if desired_state == "start":
print("'Start' will tell Flink operator to start the cluster.")
elif desired_state == "stop":
print(
"'Stop' will put Flink cluster in stopping mode, it may"
"take some time before shutdown is completed."
)
def confirm_to_continue(cluster_service_instances, desired_state):
print(f"You are about to {desired_state} the following instances:")
print("Either --instances or --clusters not specified. Asking for confirmation.")
i_count = 0
for cluster, services_instances in cluster_service_instances:
for service, instances in services_instances.items():
for instance in instances.keys():
print(f"cluster = {cluster}, instance = {instance}")
i_count += 1
if sys.stdin.isatty():
return choice.Binary(
f"Are you sure you want to {desired_state} these {i_count} instances?",
False,
).ask()
return True
REMOTE_REFS: Dict[str, List[str]] = {}
def get_remote_refs(service, soa_dir):
if service not in REMOTE_REFS:
REMOTE_REFS[service] = remote_git.list_remote_refs(
utils.get_git_url(service, soa_dir)
)
return REMOTE_REFS[service]
def paasta_start_or_stop(args, desired_state):
"""Requests a change of state to start or stop given branches of a service."""
soa_dir = args.soa_dir
pargs = apply_args_filters(args)
if len(pargs) == 0:
return 1
affected_services = {
s for service_list in pargs.values() for s in service_list.keys()
}
if len(affected_services) > 1:
print(
PaastaColors.red("Warning: trying to start/stop/restart multiple services:")
)
for cluster, services_instances in pargs.items():
print("Cluster %s:" % cluster)
for service, instances in services_instances.items():
print(" Service %s:" % service)
print(" Instances %s" % ",".join(instances.keys()))
if sys.stdin.isatty():
confirm = choice.Binary("Are you sure you want to continue?", False).ask()
else:
confirm = False
if not confirm:
print()
print("exiting")
return 1
invalid_deploy_groups = []
marathon_message_printed = False
affected_flinks = []
if args.clusters is None or args.instances is None:
if confirm_to_continue(pargs.items(), desired_state) is False:
print()
print("exiting")
return 1
for cluster, services_instances in pargs.items():
for service, instances in services_instances.items():
for instance in instances.keys():
service_config = get_instance_config(
service=service,
cluster=cluster,
instance=instance,
soa_dir=soa_dir,
load_deployments=False,
)
if isinstance(service_config, FlinkDeploymentConfig):
affected_flinks.append(service_config)
continue
try:
remote_refs = get_remote_refs(service, soa_dir)
except remote_git.LSRemoteException as e:
msg = (
"Error talking to the git server: %s\n"
"This PaaSTA command requires access to the git server to operate.\n"
"The git server may be down or not reachable from here.\n"
"Try again from somewhere where the git server can be reached, "
"like your developer environment."
) % str(e)
print(msg)
return 1
deploy_group = service_config.get_deploy_group()
(deploy_tag, _) = get_latest_deployment_tag(remote_refs, deploy_group)
if deploy_tag not in remote_refs:
invalid_deploy_groups.append(deploy_group)
else:
force_bounce = utils.format_timestamp(datetime.datetime.utcnow())
if (
isinstance(service_config, MarathonServiceConfig)
and not marathon_message_printed
):
print_marathon_message(desired_state)
marathon_message_printed = True
issue_state_change_for_service(
service_config=service_config,
force_bounce=force_bounce,
desired_state=desired_state,
)
return_val = 0
# TODO: Refactor to discover if set_state is available for given
# instance_type in API
if affected_flinks:
print_flink_message(desired_state)
csi = defaultdict(lambda: defaultdict(list))
for service_config in affected_flinks:
csi[service_config.cluster][service_config.service].append(
service_config.instance
)
system_paasta_config = load_system_paasta_config()
for cluster, services_instances in csi.items():
client = get_paasta_oapi_client(cluster, system_paasta_config)
if not client:
print("Cannot get a paasta-api client")
exit(1)
for service, instances in services_instances.items():
for instance in instances:
try:
client.service.instance_set_state(
service=service,
instance=instance,
desired_state=desired_state,
)
except client.api_error as exc:
print(exc.reason)
return exc.status
return_val = 0
if invalid_deploy_groups:
print(f"No deploy tags found for {', '.join(invalid_deploy_groups)}.")
print(f"Has {service} been deployed there yet?")
return_val = 1
return return_val
def paasta_start(args):
return paasta_start_or_stop(args, "start")
def paasta_stop(args):
return paasta_start_or_stop(args, "stop")
|
from unittest.mock import Mock, patch
from homeassistant.components import onboarding
from homeassistant.setup import async_setup_component
from . import mock_storage
from tests.common import MockUser, mock_coro
# Temporarily: if auth not active, always set onboarded=True
async def test_not_setup_views_if_onboarded(hass, hass_storage):
"""Test if onboarding is done, we don't setup views."""
mock_storage(hass_storage, {"done": onboarding.STEPS})
with patch("homeassistant.components.onboarding.views.async_setup") as mock_setup:
assert await async_setup_component(hass, "onboarding", {})
assert len(mock_setup.mock_calls) == 0
assert onboarding.DOMAIN not in hass.data
assert onboarding.async_is_onboarded(hass)
async def test_setup_views_if_not_onboarded(hass):
"""Test if onboarding is not done, we setup views."""
with patch(
"homeassistant.components.onboarding.views.async_setup",
return_value=mock_coro(),
) as mock_setup:
assert await async_setup_component(hass, "onboarding", {})
assert len(mock_setup.mock_calls) == 1
assert onboarding.DOMAIN in hass.data
assert not onboarding.async_is_onboarded(hass)
async def test_is_onboarded():
"""Test the is onboarded function."""
hass = Mock()
hass.data = {}
assert onboarding.async_is_onboarded(hass)
hass.data[onboarding.DOMAIN] = True
assert onboarding.async_is_onboarded(hass)
hass.data[onboarding.DOMAIN] = {"done": []}
assert not onboarding.async_is_onboarded(hass)
async def test_is_user_onboarded():
"""Test the is onboarded function."""
hass = Mock()
hass.data = {}
assert onboarding.async_is_user_onboarded(hass)
hass.data[onboarding.DOMAIN] = True
assert onboarding.async_is_user_onboarded(hass)
hass.data[onboarding.DOMAIN] = {"done": []}
assert not onboarding.async_is_user_onboarded(hass)
async def test_having_owner_finishes_user_step(hass, hass_storage):
"""If owner user already exists, mark user step as complete."""
MockUser(is_owner=True).add_to_hass(hass)
with patch(
"homeassistant.components.onboarding.views.async_setup"
) as mock_setup, patch.object(onboarding, "STEPS", [onboarding.STEP_USER]):
assert await async_setup_component(hass, "onboarding", {})
assert len(mock_setup.mock_calls) == 0
assert onboarding.DOMAIN not in hass.data
assert onboarding.async_is_onboarded(hass)
done = hass_storage[onboarding.STORAGE_KEY]["data"]["done"]
assert onboarding.STEP_USER in done
async def test_migration(hass, hass_storage):
"""Test migrating onboarding to new version."""
hass_storage[onboarding.STORAGE_KEY] = {"version": 1, "data": {"done": ["user"]}}
assert await async_setup_component(hass, "onboarding", {})
assert onboarding.async_is_onboarded(hass)
|
from django.urls import reverse
from weblate.trans.tests.test_views import ViewTestCase
class LabelTest(ViewTestCase):
def setUp(self):
super().setUp()
self.make_manager()
self.labels_url = reverse("labels", kwargs=self.kw_project)
def test_create(self):
response = self.client.post(
self.labels_url, {"name": "Test label", "color": "orange"}, follow=True
)
self.assertRedirects(response, self.labels_url)
self.assertContains(response, "Test label")
self.assertTrue(self.project.label_set.filter(name="Test label").exists())
def test_edit(self):
self.test_create()
label = self.project.label_set.get()
response = self.client.post(
reverse(
"label_edit", kwargs={"project": self.project.slug, "pk": label.pk}
),
{"name": "Renamed label", "color": "orange"},
follow=True,
)
self.assertRedirects(response, self.labels_url)
self.assertContains(response, "Renamed label")
self.assertTrue(self.project.label_set.filter(name="Renamed label").exists())
def test_delete(self):
self.test_create()
label = self.project.label_set.get()
response = self.client.post(
reverse(
"label_delete", kwargs={"project": self.project.slug, "pk": label.pk}
),
follow=True,
)
self.assertRedirects(response, self.labels_url)
self.assertNotContains(response, "Test label")
self.assertFalse(self.project.label_set.filter(name="Test label").exists())
def test_assign(self):
self.test_create()
label = self.project.label_set.get()
unit = self.get_unit().source_unit
self.client.post(
reverse("edit_context", kwargs={"pk": unit.pk}),
{"explanation": "", "extra_flags": "", "labels": label.pk},
)
translation = self.get_translation()
self.assertEqual(getattr(translation.stats, "label:Test label"), 1)
self.client.post(
reverse("edit_context", kwargs={"pk": unit.pk}),
{"explanation": "", "extra_flags": ""},
)
translation = self.get_translation()
self.assertEqual(getattr(translation.stats, "label:Test label"), 0)
|
import os
import sys
sys.path.insert(0, os.path.abspath('.'))
from flask import url_for
from pytest_flask.fixtures import client
model_module = 'tests.automap_models'
database = 'blog.sqlite3'
def test_get_automap_collection(client):
"""Do we see a model's __unicode__ definition being used in the admin?"""
response = client.get(url_for('blog.index_view'))
assert response.status_code == 200
assert 'Jeff Knupp' in response.get_data(as_text=True)
|
from __future__ import division
from builtins import range
import warnings
import numpy as np
from scipy.stats.stats import pearsonr
from scipy.spatial.distance import cdist
import matplotlib.pyplot as plt
import seaborn as sns
from .reduce import reduce as reducer
from .format_data import format_data as formatter
from .._shared.helpers import memoize
def describe(x, reduce='IncrementalPCA', max_dims=None, show=True,
format_data=True):
"""
Create plot describing covariance with as a function of number of dimensions
This function correlates the raw data with reduced data to get a sense
for how well the data can be summarized with n dimensions. Useful for
evaluating quality of dimensionality reduced plots.
Parameters
----------
x : Numpy array, DataFrame or list of arrays/dfs
A list of Numpy arrays or Pandas Dataframes
reduce : str or dict
Decomposition/manifold learning model to use. Models supported: PCA,
IncrementalPCA, SparsePCA, MiniBatchSparsePCA, KernelPCA, FastICA,
FactorAnalysis, TruncatedSVD, DictionaryLearning, MiniBatchDictionaryLearning,
TSNE, Isomap, SpectralEmbedding, LocallyLinearEmbedding, and MDS. Can be
passed as a string, but for finer control of the model parameters, pass
as a dictionary, e.g. reduce={'model' : 'PCA', 'params' : {'whiten' : True}}.
See scikit-learn specific model docs for details on parameters supported
for each model.
max_dims : int
Maximum number of dimensions to consider
show : bool
Plot the result (default : true)
format_data : bool
Whether or not to first call the format_data function (default: True).
Returns
----------
result : dict
A dictionary with the analysis results. 'average' is the correlation
by number of components for all data. 'individual' is a list of lists,
where each list is a correlation by number of components vector (for each
input list).
"""
warnings.warn('When input data is large, this computation can take a long time.')
def summary(x, max_dims=None):
# if data is a list, stack it
if type(x) is list:
x = np.vstack(x)
# if max dims is not set, make it the length of the minimum number of columns
if max_dims is None:
if x.shape[1]>x.shape[0]:
max_dims = x.shape[0]
else:
max_dims = x.shape[1]
# correlation matrix for all dimensions
alldims = get_cdist(x)
corrs=[]
for dims in range(2, max_dims):
reduced = get_cdist(reducer(x, ndims=dims, reduce=reduce))
corrs.append(get_corr(alldims, reduced))
del reduced
return corrs
# common format
if format_data:
x = formatter(x, ppca=True)
# a dictionary to store results
result = {}
result['average'] = summary(x, max_dims)
result['individual'] = [summary(x_i, max_dims) for x_i in x]
if max_dims is None:
max_dims = len(result['average'])
# if show, plot it
if show:
fig, ax = plt.subplots()
ax = sns.tsplot(data=result['individual'], time=[i for i in range(2, max_dims+2)], err_style="unit_traces")
ax.set_title('Correlation with raw data by number of components')
ax.set_ylabel('Correlation')
ax.set_xlabel('Number of components')
plt.show()
return result
@memoize
def get_corr(reduced, alldims):
return pearsonr(alldims.ravel(), reduced.ravel())[0]
@memoize
def get_cdist(x):
return cdist(x, x)
|
from test import CollectorTestCase
from test import get_collector_config
from iodrivesnmp import IODriveSNMPCollector
class TestIODriveSNMPCollector(CollectorTestCase):
def setUp(self, allowed_names=None):
if not allowed_names:
allowed_names = []
config = get_collector_config('IODriveSNMPCollector', {
'allowed_names': allowed_names,
'interval': 1
})
self.collector = IODriveSNMPCollector(config, None)
def test_import(self):
self.assertTrue(IODriveSNMPCollector)
|
import asyncio
import datetime as dt
from typing import Callable, Dict, List, Optional, Set
from httpx import RemoteProtocolError, TransportError
from onvif import ONVIFCamera, ONVIFService
from zeep.exceptions import Fault
from homeassistant.core import CALLBACK_TYPE, CoreState, HomeAssistant, callback
from homeassistant.helpers.event import async_call_later
from homeassistant.util import dt as dt_util
from .const import LOGGER
from .models import Event
from .parsers import PARSERS
UNHANDLED_TOPICS = set()
SUBSCRIPTION_ERRORS = (
Fault,
asyncio.TimeoutError,
TransportError,
)
class EventManager:
"""ONVIF Event Manager."""
def __init__(self, hass: HomeAssistant, device: ONVIFCamera, unique_id: str):
"""Initialize event manager."""
self.hass: HomeAssistant = hass
self.device: ONVIFCamera = device
self.unique_id: str = unique_id
self.started: bool = False
self._subscription: ONVIFService = None
self._events: Dict[str, Event] = {}
self._listeners: List[CALLBACK_TYPE] = []
self._unsub_refresh: Optional[CALLBACK_TYPE] = None
super().__init__()
@property
def platforms(self) -> Set[str]:
"""Return platforms to setup."""
return {event.platform for event in self._events.values()}
@callback
def async_add_listener(self, update_callback: CALLBACK_TYPE) -> Callable[[], None]:
"""Listen for data updates."""
# This is the first listener, set up polling.
if not self._listeners:
self.async_schedule_pull()
self._listeners.append(update_callback)
@callback
def remove_listener() -> None:
"""Remove update listener."""
self.async_remove_listener(update_callback)
return remove_listener
@callback
def async_remove_listener(self, update_callback: CALLBACK_TYPE) -> None:
"""Remove data update."""
if update_callback in self._listeners:
self._listeners.remove(update_callback)
if not self._listeners and self._unsub_refresh:
self._unsub_refresh()
self._unsub_refresh = None
async def async_start(self) -> bool:
"""Start polling events."""
if await self.device.create_pullpoint_subscription():
# Create subscription manager
self._subscription = self.device.create_subscription_service(
"PullPointSubscription"
)
# Renew immediately
await self.async_renew()
# Initialize events
pullpoint = self.device.create_pullpoint_service()
try:
await pullpoint.SetSynchronizationPoint()
except SUBSCRIPTION_ERRORS:
pass
response = await pullpoint.PullMessages(
{"MessageLimit": 100, "Timeout": dt.timedelta(seconds=5)}
)
# Parse event initialization
await self.async_parse_messages(response.NotificationMessage)
self.started = True
return True
return False
async def async_stop(self) -> None:
"""Unsubscribe from events."""
self._listeners = []
self.started = False
if not self._subscription:
return
await self._subscription.Unsubscribe()
self._subscription = None
async def async_restart(self, _now: dt = None) -> None:
"""Restart the subscription assuming the camera rebooted."""
if not self.started:
return
if self._subscription:
try:
await self._subscription.Unsubscribe()
except SUBSCRIPTION_ERRORS:
pass # Ignored. The subscription may no longer exist.
self._subscription = None
try:
restarted = await self.async_start()
except SUBSCRIPTION_ERRORS:
restarted = False
if not restarted:
LOGGER.warning(
"Failed to restart ONVIF PullPoint subscription for '%s'. Retrying...",
self.unique_id,
)
# Try again in a minute
self._unsub_refresh = async_call_later(self.hass, 60, self.async_restart)
elif self._listeners:
LOGGER.debug(
"Restarted ONVIF PullPoint subscription for '%s'", self.unique_id
)
self.async_schedule_pull()
async def async_renew(self) -> None:
"""Renew subscription."""
if not self._subscription:
return
termination_time = (
(dt_util.utcnow() + dt.timedelta(days=1))
.isoformat(timespec="seconds")
.replace("+00:00", "Z")
)
await self._subscription.Renew(termination_time)
def async_schedule_pull(self) -> None:
"""Schedule async_pull_messages to run."""
self._unsub_refresh = async_call_later(self.hass, 1, self.async_pull_messages)
async def async_pull_messages(self, _now: dt = None) -> None:
"""Pull messages from device."""
if self.hass.state == CoreState.running:
try:
pullpoint = self.device.create_pullpoint_service()
response = await pullpoint.PullMessages(
{"MessageLimit": 100, "Timeout": dt.timedelta(seconds=60)}
)
# Renew subscription if less than two hours is left
if (
dt_util.as_utc(response.TerminationTime) - dt_util.utcnow()
).total_seconds() < 7200:
await self.async_renew()
except RemoteProtocolError:
# Likley a shutdown event, nothing to see here
return
except SUBSCRIPTION_ERRORS as err:
LOGGER.warning(
"Failed to fetch ONVIF PullPoint subscription messages for '%s': %s",
self.unique_id,
err,
)
# Treat errors as if the camera restarted. Assume that the pullpoint
# subscription is no longer valid.
self._unsub_refresh = None
await self.async_restart()
return
# Parse response
await self.async_parse_messages(response.NotificationMessage)
# Update entities
for update_callback in self._listeners:
update_callback()
# Reschedule another pull
if self._listeners:
self.async_schedule_pull()
# pylint: disable=protected-access
async def async_parse_messages(self, messages) -> None:
"""Parse notification message."""
for msg in messages:
# Guard against empty message
if not msg.Topic:
continue
topic = msg.Topic._value_1
parser = PARSERS.get(topic)
if not parser:
if topic not in UNHANDLED_TOPICS:
LOGGER.info(
"No registered handler for event from %s: %s",
self.unique_id,
msg,
)
UNHANDLED_TOPICS.add(topic)
continue
event = await parser(self.unique_id, msg)
if not event:
LOGGER.warning("Unable to parse event from %s: %s", self.unique_id, msg)
return
self._events[event.uid] = event
def get_uid(self, uid) -> Event:
"""Retrieve event for given id."""
return self._events[uid]
def get_platform(self, platform) -> List[Event]:
"""Retrieve events for given platform."""
return [event for event in self._events.values() if event.platform == platform]
|
import numpy as np
import unittest
import chainer
from chainer import testing
from chainercv.experimental.links.model.fcis.utils.mask_voting \
import mask_voting
from chainercv.utils import assert_is_bbox
from chainercv.utils import generate_random_bbox
class TestMaskVoting(unittest.TestCase):
def setUp(self):
n_roi = 5
n_class = 6
self.roi_size = 7
self.size = (18, 24)
self.bg_label = 0
self.roi_mask_prob = np.random.uniform(
size=(n_roi, self.roi_size, self.roi_size)).astype(np.float32)
self.roi_prob = np.random.uniform(
size=(n_roi, n_class)).astype(np.float32)
self.bbox = generate_random_bbox(n_roi, self.size, 0, 18)
def check_mask_voting(
self, seg_prob, bbox, cls_prob,
size, bg_label, roi_size):
xp = chainer.cuda.get_array_module(seg_prob)
seg_prob, bbox, label, cls_prob = mask_voting(
seg_prob, bbox, cls_prob, size,
0.5, 0.3, 0.5, 0.4, bg_label=bg_label)
n_roi = seg_prob.shape[0]
self.assertIsInstance(seg_prob, xp.ndarray)
self.assertEqual(seg_prob.shape[1:], (roi_size, roi_size))
self.assertTrue(
xp.all(xp.logical_and(seg_prob >= 0.0, seg_prob <= 1.0)))
self.assertIsInstance(label, xp.ndarray)
self.assertEqual(label.shape, (n_roi, ))
self.assertIsInstance(cls_prob, xp.ndarray)
self.assertEqual(cls_prob.shape, (n_roi, ))
assert_is_bbox(bbox, size)
def test_mask_voting_cpu(self):
self.check_mask_voting(
self.roi_mask_prob, self.bbox, self.roi_prob,
self.size, self.bg_label, self.roi_size)
testing.run_module(__name__, __file__)
|
import datetime
import json
import os
import re
import shutil
import coverage
from coverage import env
from coverage.backward import iitems, SimpleNamespace, format_local_datetime
from coverage.data import add_data_to_hash
from coverage.files import flat_rootname
from coverage.misc import CoverageException, ensure_dir, file_be_gone, Hasher, isolate_module
from coverage.report import get_analysis_to_report
from coverage.results import Numbers
from coverage.templite import Templite
os = isolate_module(os)
# Static files are looked for in a list of places.
STATIC_PATH = [
# The place Debian puts system Javascript libraries.
"/usr/share/javascript",
# Our htmlfiles directory.
os.path.join(os.path.dirname(__file__), "htmlfiles"),
]
def data_filename(fname, pkgdir=""):
"""Return the path to a data file of ours.
The file is searched for on `STATIC_PATH`, and the first place it's found,
is returned.
Each directory in `STATIC_PATH` is searched as-is, and also, if `pkgdir`
is provided, at that sub-directory.
"""
tried = []
for static_dir in STATIC_PATH:
static_filename = os.path.join(static_dir, fname)
if os.path.exists(static_filename):
return static_filename
else:
tried.append(static_filename)
if pkgdir:
static_filename = os.path.join(static_dir, pkgdir, fname)
if os.path.exists(static_filename):
return static_filename
else:
tried.append(static_filename)
raise CoverageException(
"Couldn't find static file %r from %r, tried: %r" % (fname, os.getcwd(), tried)
)
def read_data(fname):
"""Return the contents of a data file of ours."""
with open(data_filename(fname)) as data_file:
return data_file.read()
def write_html(fname, html):
"""Write `html` to `fname`, properly encoded."""
html = re.sub(r"(\A\s+)|(\s+$)", "", html, flags=re.MULTILINE) + "\n"
with open(fname, "wb") as fout:
fout.write(html.encode('ascii', 'xmlcharrefreplace'))
class HtmlDataGeneration(object):
"""Generate structured data to be turned into HTML reports."""
EMPTY = "(empty)"
def __init__(self, cov):
self.coverage = cov
self.config = self.coverage.config
data = self.coverage.get_data()
self.has_arcs = data.has_arcs()
if self.config.show_contexts:
if data.measured_contexts() == set([""]):
self.coverage._warn("No contexts were measured")
data.set_query_contexts(self.config.report_contexts)
def data_for_file(self, fr, analysis):
"""Produce the data needed for one file's report."""
if self.has_arcs:
missing_branch_arcs = analysis.missing_branch_arcs()
arcs_executed = analysis.arcs_executed()
if self.config.show_contexts:
contexts_by_lineno = analysis.data.contexts_by_lineno(analysis.filename)
lines = []
for lineno, tokens in enumerate(fr.source_token_lines(), start=1):
# Figure out how to mark this line.
category = None
short_annotations = []
long_annotations = []
if lineno in analysis.excluded:
category = 'exc'
elif lineno in analysis.missing:
category = 'mis'
elif self.has_arcs and lineno in missing_branch_arcs:
category = 'par'
for b in missing_branch_arcs[lineno]:
if b < 0:
short_annotations.append("exit")
else:
short_annotations.append(b)
long_annotations.append(fr.missing_arc_description(lineno, b, arcs_executed))
elif lineno in analysis.statements:
category = 'run'
contexts = contexts_label = None
context_list = None
if category and self.config.show_contexts:
contexts = sorted(c or self.EMPTY for c in contexts_by_lineno[lineno])
if contexts == [self.EMPTY]:
contexts_label = self.EMPTY
else:
contexts_label = "{} ctx".format(len(contexts))
context_list = contexts
lines.append(SimpleNamespace(
tokens=tokens,
number=lineno,
category=category,
statement=(lineno in analysis.statements),
contexts=contexts,
contexts_label=contexts_label,
context_list=context_list,
short_annotations=short_annotations,
long_annotations=long_annotations,
))
file_data = SimpleNamespace(
relative_filename=fr.relative_filename(),
nums=analysis.numbers,
lines=lines,
)
return file_data
class HtmlReporter(object):
"""HTML reporting."""
# These files will be copied from the htmlfiles directory to the output
# directory.
STATIC_FILES = [
("style.css", ""),
("jquery.min.js", "jquery"),
("jquery.ba-throttle-debounce.min.js", "jquery-throttle-debounce"),
("jquery.hotkeys.js", "jquery-hotkeys"),
("jquery.isonscreen.js", "jquery-isonscreen"),
("jquery.tablesorter.min.js", "jquery-tablesorter"),
("coverage_html.js", ""),
("keybd_closed.png", ""),
("keybd_open.png", ""),
("favicon_32.png", ""),
]
def __init__(self, cov):
self.coverage = cov
self.config = self.coverage.config
self.directory = self.config.html_dir
title = self.config.html_title
if env.PY2:
title = title.decode("utf8")
if self.config.extra_css:
self.extra_css = os.path.basename(self.config.extra_css)
else:
self.extra_css = None
self.data = self.coverage.get_data()
self.has_arcs = self.data.has_arcs()
self.file_summaries = []
self.all_files_nums = []
self.incr = IncrementalChecker(self.directory)
self.datagen = HtmlDataGeneration(self.coverage)
self.totals = Numbers()
self.template_globals = {
# Functions available in the templates.
'escape': escape,
'pair': pair,
'len': len,
# Constants for this report.
'__url__': coverage.__url__,
'__version__': coverage.__version__,
'title': title,
'time_stamp': format_local_datetime(datetime.datetime.now()),
'extra_css': self.extra_css,
'has_arcs': self.has_arcs,
'show_contexts': self.config.show_contexts,
# Constants for all reports.
# These css classes determine which lines are highlighted by default.
'category': {
'exc': 'exc show_exc',
'mis': 'mis show_mis',
'par': 'par run show_par',
'run': 'run',
}
}
self.pyfile_html_source = read_data("pyfile.html")
self.source_tmpl = Templite(self.pyfile_html_source, self.template_globals)
def report(self, morfs):
"""Generate an HTML report for `morfs`.
`morfs` is a list of modules or file names.
"""
# Read the status data and check that this run used the same
# global data as the last run.
self.incr.read()
self.incr.check_global_data(self.config, self.pyfile_html_source)
# Process all the files.
for fr, analysis in get_analysis_to_report(self.coverage, morfs):
self.html_file(fr, analysis)
if not self.all_files_nums:
raise CoverageException("No data to report.")
self.totals = sum(self.all_files_nums)
# Write the index file.
self.index_file()
self.make_local_static_report_files()
return self.totals.n_statements and self.totals.pc_covered
def make_local_static_report_files(self):
"""Make local instances of static files for HTML report."""
# The files we provide must always be copied.
for static, pkgdir in self.STATIC_FILES:
shutil.copyfile(
data_filename(static, pkgdir),
os.path.join(self.directory, static)
)
# The user may have extra CSS they want copied.
if self.extra_css:
shutil.copyfile(
self.config.extra_css,
os.path.join(self.directory, self.extra_css)
)
def html_file(self, fr, analysis):
"""Generate an HTML file for one source file."""
rootname = flat_rootname(fr.relative_filename())
html_filename = rootname + ".html"
ensure_dir(self.directory)
html_path = os.path.join(self.directory, html_filename)
# Get the numbers for this file.
nums = analysis.numbers
self.all_files_nums.append(nums)
if self.config.skip_covered:
# Don't report on 100% files.
no_missing_lines = (nums.n_missing == 0)
no_missing_branches = (nums.n_partial_branches == 0)
if no_missing_lines and no_missing_branches:
# If there's an existing file, remove it.
file_be_gone(html_path)
return
if self.config.skip_empty:
# Don't report on empty files.
if nums.n_statements == 0:
file_be_gone(html_path)
return
# Find out if the file on disk is already correct.
if self.incr.can_skip_file(self.data, fr, rootname):
self.file_summaries.append(self.incr.index_info(rootname))
return
# Write the HTML page for this file.
file_data = self.datagen.data_for_file(fr, analysis)
for ldata in file_data.lines:
# Build the HTML for the line.
html = []
for tok_type, tok_text in ldata.tokens:
if tok_type == "ws":
html.append(escape(tok_text))
else:
tok_html = escape(tok_text) or ' '
html.append(
u'<span class="{}">{}</span>'.format(tok_type, tok_html)
)
ldata.html = ''.join(html)
if ldata.short_annotations:
# 202F is NARROW NO-BREAK SPACE.
# 219B is RIGHTWARDS ARROW WITH STROKE.
ldata.annotate = u", ".join(
u"{} ↛ {}".format(ldata.number, d)
for d in ldata.short_annotations
)
else:
ldata.annotate = None
if ldata.long_annotations:
longs = ldata.long_annotations
if len(longs) == 1:
ldata.annotate_long = longs[0]
else:
ldata.annotate_long = u"{:d} missed branches: {}".format(
len(longs),
u", ".join(
u"{:d}) {}".format(num, ann_long)
for num, ann_long in enumerate(longs, start=1)
),
)
else:
ldata.annotate_long = None
css_classes = []
if ldata.category:
css_classes.append(self.template_globals['category'][ldata.category])
ldata.css_class = ' '.join(css_classes) or "pln"
html = self.source_tmpl.render(file_data.__dict__)
write_html(html_path, html)
# Save this file's information for the index file.
index_info = {
'nums': nums,
'html_filename': html_filename,
'relative_filename': fr.relative_filename(),
}
self.file_summaries.append(index_info)
self.incr.set_index_info(rootname, index_info)
def index_file(self):
"""Write the index.html file for this report."""
index_tmpl = Templite(read_data("index.html"), self.template_globals)
html = index_tmpl.render({
'files': self.file_summaries,
'totals': self.totals,
})
write_html(os.path.join(self.directory, "index.html"), html)
# Write the latest hashes for next time.
self.incr.write()
class IncrementalChecker(object):
"""Logic and data to support incremental reporting."""
STATUS_FILE = "status.json"
STATUS_FORMAT = 2
# pylint: disable=wrong-spelling-in-comment,useless-suppression
# The data looks like:
#
# {
# "format": 2,
# "globals": "540ee119c15d52a68a53fe6f0897346d",
# "version": "4.0a1",
# "files": {
# "cogapp___init__": {
# "hash": "e45581a5b48f879f301c0f30bf77a50c",
# "index": {
# "html_filename": "cogapp___init__.html",
# "relative_filename": "cogapp/__init__",
# "nums": [ 1, 14, 0, 0, 0, 0, 0 ]
# }
# },
# ...
# "cogapp_whiteutils": {
# "hash": "8504bb427fc488c4176809ded0277d51",
# "index": {
# "html_filename": "cogapp_whiteutils.html",
# "relative_filename": "cogapp/whiteutils",
# "nums": [ 1, 59, 0, 1, 28, 2, 2 ]
# }
# }
# }
# }
def __init__(self, directory):
self.directory = directory
self.reset()
def reset(self):
"""Initialize to empty. Causes all files to be reported."""
self.globals = ''
self.files = {}
def read(self):
"""Read the information we stored last time."""
usable = False
try:
status_file = os.path.join(self.directory, self.STATUS_FILE)
with open(status_file) as fstatus:
status = json.load(fstatus)
except (IOError, ValueError):
usable = False
else:
usable = True
if status['format'] != self.STATUS_FORMAT:
usable = False
elif status['version'] != coverage.__version__:
usable = False
if usable:
self.files = {}
for filename, fileinfo in iitems(status['files']):
fileinfo['index']['nums'] = Numbers(*fileinfo['index']['nums'])
self.files[filename] = fileinfo
self.globals = status['globals']
else:
self.reset()
def write(self):
"""Write the current status."""
status_file = os.path.join(self.directory, self.STATUS_FILE)
files = {}
for filename, fileinfo in iitems(self.files):
fileinfo['index']['nums'] = fileinfo['index']['nums'].init_args()
files[filename] = fileinfo
status = {
'format': self.STATUS_FORMAT,
'version': coverage.__version__,
'globals': self.globals,
'files': files,
}
with open(status_file, "w") as fout:
json.dump(status, fout, separators=(',', ':'))
def check_global_data(self, *data):
"""Check the global data that can affect incremental reporting."""
m = Hasher()
for d in data:
m.update(d)
these_globals = m.hexdigest()
if self.globals != these_globals:
self.reset()
self.globals = these_globals
def can_skip_file(self, data, fr, rootname):
"""Can we skip reporting this file?
`data` is a CoverageData object, `fr` is a `FileReporter`, and
`rootname` is the name being used for the file.
"""
m = Hasher()
m.update(fr.source().encode('utf-8'))
add_data_to_hash(data, fr.filename, m)
this_hash = m.hexdigest()
that_hash = self.file_hash(rootname)
if this_hash == that_hash:
# Nothing has changed to require the file to be reported again.
return True
else:
self.set_file_hash(rootname, this_hash)
return False
def file_hash(self, fname):
"""Get the hash of `fname`'s contents."""
return self.files.get(fname, {}).get('hash', '')
def set_file_hash(self, fname, val):
"""Set the hash of `fname`'s contents."""
self.files.setdefault(fname, {})['hash'] = val
def index_info(self, fname):
"""Get the information for index.html for `fname`."""
return self.files.get(fname, {}).get('index', {})
def set_index_info(self, fname, info):
"""Set the information for index.html for `fname`."""
self.files.setdefault(fname, {})['index'] = info
# Helpers for templates and generating HTML
def escape(t):
"""HTML-escape the text in `t`.
This is only suitable for HTML text, not attributes.
"""
# Convert HTML special chars into HTML entities.
return t.replace("&", "&").replace("<", "<")
def pair(ratio):
"""Format a pair of numbers so JavaScript can read them in an attribute."""
return "%s %s" % ratio
|
import tensorflow as tf
import os, sys
curr_path = os.path.abspath(os.path.dirname(__file__))
sys.path.insert(0, os.path.join(curr_path, '../'))
from datasets import pascalvoc_to_tfrecords, kitti_object_to_tfrecords
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string(
'dataset_name', 'pascalvoc',
'The name of the dataset to convert.')
tf.app.flags.DEFINE_string(
'dataset_root', None,
'Directory where the original dataset is stored.')
tf.app.flags.DEFINE_string(
'year', '0712',
'Year of VOC dataset.')
tf.app.flags.DEFINE_string(
'split', 'trainval',
'Split of dataset, trainval/train/val/test.')
tf.app.flags.DEFINE_string(
'output_dir', './',
'Output directory where to store TFRecords files.')
def main(_):
print('Dataset root dir:', FLAGS.dataset_root)
print('Output directory:', FLAGS.output_dir)
if FLAGS.dataset_name == 'pascalvoc':
pascalvoc_to_tfrecords.run(FLAGS.dataset_root,
FLAGS.year,
FLAGS.split,
FLAGS.output_dir,
shuffling=True)
elif FLAGS.dataset_name == 'kitti':
kitti_object_to_tfrecords.run(FLAGS.dataset_root,
FLAGS.split,
FLAGS.output_dir,
shuffling=True)
else:
raise ValueError('Dataset [%s] was not recognized.' % FLAGS.dataset_name)
if __name__ == '__main__':
tf.app.run()
|
import datetime
import time
import unittest
from absl import flags
import mock
from perfkitbenchmarker.linux_benchmarks import object_storage_service_benchmark
from tests import pkb_common_test_case
FLAGS = flags.FLAGS
class TestBuildCommands(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(TestBuildCommands, self).setUp()
FLAGS.object_storage_multistream_objects_per_stream = 100
FLAGS.object_storage_object_sizes = {'1KB': '100%'}
FLAGS.object_storage_streams_per_vm = 1
FLAGS.num_vms = 1
FLAGS.object_storage_object_naming_scheme = 'sequential_by_stream'
def testBuildCommands(self):
vm = mock.MagicMock()
vm.RobustRemoteCommand = mock.MagicMock(return_value=('', ''))
command_builder = mock.MagicMock()
service = mock.MagicMock()
with mock.patch(time.__name__ + '.time', return_value=1.0):
with mock.patch(object_storage_service_benchmark.__name__ +
'._ProcessMultiStreamResults'):
with mock.patch(object_storage_service_benchmark.__name__ +
'.LoadWorkerOutput', return_value=(None, None, None)):
object_storage_service_benchmark.MultiStreamRWBenchmark(
[], {}, [vm], command_builder, service, 'bucket')
self.assertEqual(
command_builder.BuildCommand.call_args_list[0],
mock.call(['--bucket=bucket',
'--objects_per_stream=100',
'--num_streams=1',
'--start_time=16.1',
'--objects_written_file=/tmp/pkb/pkb-objects-written',
'--object_sizes="{1000: 100.0}"',
'--object_naming_scheme=sequential_by_stream',
'--scenario=MultiStreamWrite',
'--stream_num_start=0',
'--vm_id=0']))
self.assertEqual(
command_builder.BuildCommand.call_args_list[1],
mock.call(['--bucket=bucket',
'--objects_per_stream=100',
'--num_streams=1',
'--start_time=16.1',
'--objects_written_file=/tmp/pkb/pkb-objects-written',
'--scenario=MultiStreamRead',
'--stream_num_start=0',
'--vm_id=0']))
class TestDistributionToBackendFormat(pkb_common_test_case.PkbCommonTestCase):
def testPointDistribution(self):
dist = {'100KB': '100%'}
self.assertEqual(
object_storage_service_benchmark._DistributionToBackendFormat(dist),
{100000: 100.0})
def testMultiplePointsDistribution(self):
dist = {'1B': '10%',
'2B': '20%',
'4B': '70%'}
self.assertEqual(
object_storage_service_benchmark._DistributionToBackendFormat(dist),
{1: 10.0,
2: 20.0,
4: 70.0})
def testAbbreviatedPointDistribution(self):
dist = '100KB'
self.assertEqual(
object_storage_service_benchmark._DistributionToBackendFormat(dist),
{100000: 100.0})
def testBadPercentages(self):
dist = {'1B': '50%'}
with self.assertRaises(ValueError):
object_storage_service_benchmark._DistributionToBackendFormat(dist)
class TestColdObjectsWrittenFiles(pkb_common_test_case.PkbCommonTestCase):
def testFilename(self):
"""Tests the objects written filename can be parsed for an age."""
FLAGS.object_storage_region = 'us-central1-a'
FLAGS.object_storage_objects_written_file_prefix = 'prefix'
write_time = datetime.datetime.now() - datetime.timedelta(hours=72)
with mock.patch.object(
object_storage_service_benchmark,
'_DatetimeNow',
return_value=write_time):
filename = (
object_storage_service_benchmark._ColdObjectsWrittenFilename())
read_time = datetime.datetime.now()
with mock.patch.object(
object_storage_service_benchmark,
'_DatetimeNow',
return_value=read_time):
age = object_storage_service_benchmark._ColdObjectsWrittenFileAgeHours(
filename)
# Verify that the age is between 72 and 73 hours.
self.assertLessEqual(72, age)
self.assertLessEqual(age, 73)
if __name__ == '__main__':
unittest.main()
|
import copy
import re
from os import PathLike
from typing import Iterable, Type, Union
import box
from box.converters import (
BOX_PARAMETERS,
_from_csv,
_from_json,
_from_msgpack,
_from_toml,
_from_yaml,
_to_csv,
_to_json,
_to_msgpack,
_to_toml,
_to_yaml,
msgpack_available,
toml_available,
yaml_available,
)
from box.exceptions import BoxError, BoxTypeError
_list_pos_re = re.compile(r"\[(\d+)\]")
class BoxList(list):
"""
Drop in replacement of list, that converts added objects to Box or BoxList
objects as necessary.
"""
def __new__(cls, *args, **kwargs):
obj = super().__new__(cls, *args, **kwargs)
# This is required for pickling to work correctly
obj.box_options = {"box_class": box.Box}
obj.box_options.update(kwargs)
obj.box_org_ref = 0
return obj
def __init__(self, iterable: Iterable = None, box_class: Type[box.Box] = box.Box, **box_options):
self.box_options = box_options
self.box_options["box_class"] = box_class
self.box_org_ref = id(iterable) if iterable else 0
if iterable:
for x in iterable:
self.append(x)
if box_options.get("frozen_box"):
def frozen(*args, **kwargs):
raise BoxError("BoxList is frozen")
for method in ["append", "extend", "insert", "pop", "remove", "reverse", "sort"]:
self.__setattr__(method, frozen)
def __getitem__(self, item):
if self.box_options.get("box_dots") and isinstance(item, str) and item.startswith("["):
list_pos = _list_pos_re.search(item)
value = super(BoxList, self).__getitem__(int(list_pos.groups()[0]))
if len(list_pos.group()) == len(item):
return value
return value.__getitem__(item[len(list_pos.group()) :].lstrip("."))
return super(BoxList, self).__getitem__(item)
def __delitem__(self, key):
if self.box_options.get("frozen_box"):
raise BoxError("BoxList is frozen")
if self.box_options.get("box_dots") and isinstance(key, str) and key.startswith("["):
list_pos = _list_pos_re.search(key)
pos = int(list_pos.groups()[0])
if len(list_pos.group()) == len(key):
return super(BoxList, self).__delitem__(pos)
if hasattr(self[pos], "__delitem__"):
return self[pos].__delitem__(key[len(list_pos.group()) :].lstrip(".")) # type: ignore
super(BoxList, self).__delitem__(key)
def __setitem__(self, key, value):
if self.box_options.get("frozen_box"):
raise BoxError("BoxList is frozen")
if self.box_options.get("box_dots") and isinstance(key, str) and key.startswith("["):
list_pos = _list_pos_re.search(key)
pos = int(list_pos.groups()[0])
if len(list_pos.group()) == len(key):
return super(BoxList, self).__setitem__(pos, value)
return super(BoxList, self).__getitem__(pos).__setitem__(key[len(list_pos.group()) :].lstrip("."), value)
super(BoxList, self).__setitem__(key, value)
def _is_intact_type(self, obj):
if self.box_options.get("box_intact_types") and isinstance(obj, self.box_options["box_intact_types"]):
return True
return False
def _convert(self, p_object):
if isinstance(p_object, dict) and not self._is_intact_type(p_object):
p_object = self.box_options["box_class"](p_object, **self.box_options)
elif isinstance(p_object, box.Box):
p_object._box_config.update(self.box_options)
if isinstance(p_object, list) and not self._is_intact_type(p_object):
p_object = self if id(p_object) == self.box_org_ref else self.__class__(p_object, **self.box_options)
elif isinstance(p_object, BoxList):
p_object.box_options.update(self.box_options)
return p_object
def append(self, p_object):
super(BoxList, self).append(self._convert(p_object))
def extend(self, iterable):
for item in iterable:
self.append(item)
def insert(self, index, p_object):
super(BoxList, self).insert(index, self._convert(p_object))
def _dotted_helper(self):
keys = []
for idx, item in enumerate(self):
added = False
if isinstance(item, box.Box):
for key in item.keys(dotted=True):
keys.append(f"[{idx}].{key}")
added = True
elif isinstance(item, BoxList):
for key in item._dotted_helper():
keys.append(f"[{idx}]{key}")
added = True
if not added:
keys.append(f"[{idx}]")
return keys
def __repr__(self):
return f"<BoxList: {self.to_list()}>"
def __str__(self):
return str(self.to_list())
def __copy__(self):
return self.__class__((x for x in self), **self.box_options)
def __deepcopy__(self, memo=None):
out = self.__class__()
memo = memo or {}
memo[id(self)] = out
for k in self:
out.append(copy.deepcopy(k, memo=memo))
return out
def __hash__(self):
if self.box_options.get("frozen_box"):
hashing = 98765
hashing ^= hash(tuple(self))
return hashing
raise BoxTypeError("unhashable type: 'BoxList'")
def to_list(self):
new_list = []
for x in self:
if x is self:
new_list.append(new_list)
elif isinstance(x, box.Box):
new_list.append(x.to_dict())
elif isinstance(x, BoxList):
new_list.append(x.to_list())
else:
new_list.append(x)
return new_list
def to_json(
self,
filename: Union[str, PathLike] = None,
encoding: str = "utf-8",
errors: str = "strict",
multiline: bool = False,
**json_kwargs,
):
"""
Transform the BoxList object into a JSON string.
:param filename: If provided will save to file
:param encoding: File encoding
:param errors: How to handle encoding errors
:param multiline: Put each item in list onto it's own line
:param json_kwargs: additional arguments to pass to json.dump(s)
:return: string of JSON or return of `json.dump`
"""
if filename and multiline:
lines = [_to_json(item, filename=None, encoding=encoding, errors=errors, **json_kwargs) for item in self]
with open(filename, "w", encoding=encoding, errors=errors) as f:
f.write("\n".join(lines))
else:
return _to_json(self.to_list(), filename=filename, encoding=encoding, errors=errors, **json_kwargs)
@classmethod
def from_json(
cls,
json_string: str = None,
filename: Union[str, PathLike] = None,
encoding: str = "utf-8",
errors: str = "strict",
multiline: bool = False,
**kwargs,
):
"""
Transform a json object string into a BoxList object. If the incoming
json is a dict, you must use Box.from_json.
:param json_string: string to pass to `json.loads`
:param filename: filename to open and pass to `json.load`
:param encoding: File encoding
:param errors: How to handle encoding errors
:param multiline: One object per line
:param kwargs: parameters to pass to `Box()` or `json.loads`
:return: BoxList object from json data
"""
bx_args = {}
for arg in list(kwargs.keys()):
if arg in BOX_PARAMETERS:
bx_args[arg] = kwargs.pop(arg)
data = _from_json(
json_string, filename=filename, encoding=encoding, errors=errors, multiline=multiline, **kwargs
)
if not isinstance(data, list):
raise BoxError(f"json data not returned as a list, but rather a {type(data).__name__}")
return cls(data, **bx_args)
if yaml_available:
def to_yaml(
self,
filename: Union[str, PathLike] = None,
default_flow_style: bool = False,
encoding: str = "utf-8",
errors: str = "strict",
**yaml_kwargs,
):
"""
Transform the BoxList object into a YAML string.
:param filename: If provided will save to file
:param default_flow_style: False will recursively dump dicts
:param encoding: File encoding
:param errors: How to handle encoding errors
:param yaml_kwargs: additional arguments to pass to yaml.dump
:return: string of YAML or return of `yaml.dump`
"""
return _to_yaml(
self.to_list(),
filename=filename,
default_flow_style=default_flow_style,
encoding=encoding,
errors=errors,
**yaml_kwargs,
)
@classmethod
def from_yaml(
cls,
yaml_string: str = None,
filename: Union[str, PathLike] = None,
encoding: str = "utf-8",
errors: str = "strict",
**kwargs,
):
"""
Transform a yaml object string into a BoxList object.
:param yaml_string: string to pass to `yaml.load`
:param filename: filename to open and pass to `yaml.load`
:param encoding: File encoding
:param errors: How to handle encoding errors
:param kwargs: parameters to pass to `BoxList()` or `yaml.load`
:return: BoxList object from yaml data
"""
bx_args = {}
for arg in list(kwargs.keys()):
if arg in BOX_PARAMETERS:
bx_args[arg] = kwargs.pop(arg)
data = _from_yaml(yaml_string=yaml_string, filename=filename, encoding=encoding, errors=errors, **kwargs)
if not isinstance(data, list):
raise BoxError(f"yaml data not returned as a list but rather a {type(data).__name__}")
return cls(data, **bx_args)
else:
def to_yaml(
self,
filename: Union[str, PathLike] = None,
default_flow_style: bool = False,
encoding: str = "utf-8",
errors: str = "strict",
**yaml_kwargs,
):
raise BoxError('yaml is unavailable on this system, please install the "ruamel.yaml" or "PyYAML" package')
@classmethod
def from_yaml(
cls,
yaml_string: str = None,
filename: Union[str, PathLike] = None,
encoding: str = "utf-8",
errors: str = "strict",
**kwargs,
):
raise BoxError('yaml is unavailable on this system, please install the "ruamel.yaml" or "PyYAML" package')
if toml_available:
def to_toml(
self,
filename: Union[str, PathLike] = None,
key_name: str = "toml",
encoding: str = "utf-8",
errors: str = "strict",
):
"""
Transform the BoxList object into a toml string.
:param filename: File to write toml object too
:param key_name: Specify the name of the key to store the string under
(cannot directly convert to toml)
:param encoding: File encoding
:param errors: How to handle encoding errors
:return: string of TOML (if no filename provided)
"""
return _to_toml({key_name: self.to_list()}, filename=filename, encoding=encoding, errors=errors)
@classmethod
def from_toml(
cls,
toml_string: str = None,
filename: Union[str, PathLike] = None,
key_name: str = "toml",
encoding: str = "utf-8",
errors: str = "strict",
**kwargs,
):
"""
Transforms a toml string or file into a BoxList object
:param toml_string: string to pass to `toml.load`
:param filename: filename to open and pass to `toml.load`
:param key_name: Specify the name of the key to pull the list from
(cannot directly convert from toml)
:param encoding: File encoding
:param errors: How to handle encoding errors
:param kwargs: parameters to pass to `Box()`
:return:
"""
bx_args = {}
for arg in list(kwargs.keys()):
if arg in BOX_PARAMETERS:
bx_args[arg] = kwargs.pop(arg)
data = _from_toml(toml_string=toml_string, filename=filename, encoding=encoding, errors=errors)
if key_name not in data:
raise BoxError(f"{key_name} was not found.")
return cls(data[key_name], **bx_args)
else:
def to_toml(
self,
filename: Union[str, PathLike] = None,
key_name: str = "toml",
encoding: str = "utf-8",
errors: str = "strict",
):
raise BoxError('toml is unavailable on this system, please install the "toml" package')
@classmethod
def from_toml(
cls,
toml_string: str = None,
filename: Union[str, PathLike] = None,
key_name: str = "toml",
encoding: str = "utf-8",
errors: str = "strict",
**kwargs,
):
raise BoxError('toml is unavailable on this system, please install the "toml" package')
if msgpack_available:
def to_msgpack(self, filename: Union[str, PathLike] = None, **kwargs):
"""
Transform the BoxList object into a toml string.
:param filename: File to write toml object too
:return: string of TOML (if no filename provided)
"""
return _to_msgpack(self.to_list(), filename=filename, **kwargs)
@classmethod
def from_msgpack(cls, msgpack_bytes: bytes = None, filename: Union[str, PathLike] = None, **kwargs):
"""
Transforms a toml string or file into a BoxList object
:param msgpack_bytes: string to pass to `msgpack.packb`
:param filename: filename to open and pass to `msgpack.pack`
:param kwargs: parameters to pass to `Box()`
:return:
"""
bx_args = {}
for arg in list(kwargs.keys()):
if arg in BOX_PARAMETERS:
bx_args[arg] = kwargs.pop(arg)
data = _from_msgpack(msgpack_bytes=msgpack_bytes, filename=filename, **kwargs)
if not isinstance(data, list):
raise BoxError(f"msgpack data not returned as a list but rather a {type(data).__name__}")
return cls(data, **bx_args)
else:
def to_msgpack(self, filename: Union[str, PathLike] = None, **kwargs):
raise BoxError('msgpack is unavailable on this system, please install the "msgpack" package')
@classmethod
def from_msgpack(
cls,
msgpack_bytes: bytes = None,
filename: Union[str, PathLike] = None,
encoding: str = "utf-8",
errors: str = "strict",
**kwargs,
):
raise BoxError('msgpack is unavailable on this system, please install the "msgpack" package')
def to_csv(self, filename: Union[str, PathLike] = None, encoding: str = "utf-8", errors: str = "strict"):
return _to_csv(self, filename=filename, encoding=encoding, errors=errors)
@classmethod
def from_csv(
cls,
csv_string: str = None,
filename: Union[str, PathLike] = None,
encoding: str = "utf-8",
errors: str = "strict",
):
return cls(_from_csv(csv_string=csv_string, filename=filename, encoding=encoding, errors=errors))
|
revision = "3307381f3b88"
down_revision = "412b22cb656a"
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import text
from sqlalchemy.dialects import postgresql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.alter_column(
"authorities", "owner", existing_type=sa.VARCHAR(length=128), nullable=True
)
op.drop_column("authorities", "not_after")
op.drop_column("authorities", "bits")
op.drop_column("authorities", "cn")
op.drop_column("authorities", "not_before")
op.add_column(
"certificates", sa.Column("root_authority_id", sa.Integer(), nullable=True)
)
op.alter_column("certificates", "body", existing_type=sa.TEXT(), nullable=False)
op.alter_column(
"certificates", "owner", existing_type=sa.VARCHAR(length=128), nullable=True
)
op.drop_constraint(
u"certificates_authority_id_fkey", "certificates", type_="foreignkey"
)
op.create_foreign_key(
None,
"certificates",
"authorities",
["authority_id"],
["id"],
ondelete="CASCADE",
)
op.create_foreign_key(
None,
"certificates",
"authorities",
["root_authority_id"],
["id"],
ondelete="CASCADE",
)
### end Alembic commands ###
# link existing certificate to their authority certificates
conn = op.get_bind()
for id, body, owner in conn.execute(
text("select id, body, owner from authorities")
):
if not owner:
owner = "lemur@nobody"
# look up certificate by body, if duplications are found, pick one
stmt = text("select id from certificates where body=:body")
stmt = stmt.bindparams(body=body)
root_certificate = conn.execute(stmt).fetchone()
if root_certificate:
stmt = text(
"update certificates set root_authority_id=:root_authority_id where id=:id"
)
stmt = stmt.bindparams(root_authority_id=id, id=root_certificate[0])
op.execute(stmt)
# link owner roles to their authorities
stmt = text("select id from roles where name=:name")
stmt = stmt.bindparams(name=owner)
owner_role = conn.execute(stmt).fetchone()
if not owner_role:
stmt = text(
"insert into roles (name, description) values (:name, :description)"
)
stmt = stmt.bindparams(
name=owner, description="Lemur generated role or existing owner."
)
op.execute(stmt)
stmt = text("select id from roles where name=:name")
stmt = stmt.bindparams(name=owner)
owner_role = conn.execute(stmt).fetchone()
stmt = text(
"select * from roles_authorities where role_id=:role_id and authority_id=:authority_id"
)
stmt = stmt.bindparams(role_id=owner_role[0], authority_id=id)
exists = conn.execute(stmt).fetchone()
if not exists:
stmt = text(
"insert into roles_authorities (role_id, authority_id) values (:role_id, :authority_id)"
)
stmt = stmt.bindparams(role_id=owner_role[0], authority_id=id)
op.execute(stmt)
# link owner roles to their certificates
for id, owner in conn.execute(text("select id, owner from certificates")):
if not owner:
owner = "lemur@nobody"
stmt = text("select id from roles where name=:name")
stmt = stmt.bindparams(name=owner)
owner_role = conn.execute(stmt).fetchone()
if not owner_role:
stmt = text(
"insert into roles (name, description) values (:name, :description)"
)
stmt = stmt.bindparams(
name=owner, description="Lemur generated role or existing owner."
)
op.execute(stmt)
# link owner roles to their authorities
stmt = text("select id from roles where name=:name")
stmt = stmt.bindparams(name=owner)
owner_role = conn.execute(stmt).fetchone()
stmt = text(
"select * from roles_certificates where role_id=:role_id and certificate_id=:certificate_id"
)
stmt = stmt.bindparams(role_id=owner_role[0], certificate_id=id)
exists = conn.execute(stmt).fetchone()
if not exists:
stmt = text(
"insert into roles_certificates (role_id, certificate_id) values (:role_id, :certificate_id)"
)
stmt = stmt.bindparams(role_id=owner_role[0], certificate_id=id)
op.execute(stmt)
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, "certificates", type_="foreignkey")
op.drop_constraint(None, "certificates", type_="foreignkey")
op.create_foreign_key(
u"certificates_authority_id_fkey",
"certificates",
"authorities",
["authority_id"],
["id"],
)
op.alter_column(
"certificates", "owner", existing_type=sa.VARCHAR(length=128), nullable=True
)
op.alter_column("certificates", "body", existing_type=sa.TEXT(), nullable=True)
op.drop_column("certificates", "root_authority_id")
op.add_column(
"authorities",
sa.Column(
"not_before", postgresql.TIMESTAMP(), autoincrement=False, nullable=True
),
)
op.add_column(
"authorities",
sa.Column("cn", sa.VARCHAR(length=128), autoincrement=False, nullable=True),
)
op.add_column(
"authorities",
sa.Column("bits", sa.INTEGER(), autoincrement=False, nullable=True),
)
op.add_column(
"authorities",
sa.Column(
"not_after", postgresql.TIMESTAMP(), autoincrement=False, nullable=True
),
)
op.alter_column(
"authorities", "owner", existing_type=sa.VARCHAR(length=128), nullable=True
)
### end Alembic commands ###
|
from os import environ
import asyncio
from autobahn.asyncio.wamp import ApplicationSession, ApplicationRunner
class Component(ApplicationSession):
async def onJoin(self, details):
# a remote procedure; see frontend.py for a Python front-end
# that calls this. Any language with WAMP bindings can now call
# this procedure if its connected to the same router and realm.
def add2(x, y):
return x + y
reg = await self.register(add2, 'com.myapp.add2')
print("registered 'com.myapp.add2' with id {}".format(reg.id))
# publish an event every second. The event payloads can be
# anything JSON- and msgpack- serializable
while True:
self.publish('com.myapp.hello', 'Hello, world!')
await asyncio.sleep(1)
if __name__ == '__main__':
runner = ApplicationRunner(
environ.get("AUTOBAHN_DEMO_ROUTER", "ws://127.0.0.1:8080/ws"),
"crossbardemo",
)
runner.run(Component)
|
from celery import current_task
from django.core.exceptions import PermissionDenied
from django.db import transaction
from weblate.machinery import MACHINE_TRANSLATION_SERVICES
from weblate.trans.models import Change, Component, Suggestion, Unit
from weblate.utils.state import STATE_FUZZY, STATE_TRANSLATED
class AutoTranslate:
def __init__(self, user, translation, filter_type, mode):
self.user = user
self.translation = translation
self.filter_type = filter_type
self.mode = mode
self.updated = 0
self.progress_steps = 0
self.target_state = STATE_FUZZY if mode == "fuzzy" else STATE_TRANSLATED
def get_units(self, filter_mode=True):
units = self.translation.unit_set.all()
if self.mode == "suggest" and filter_mode:
units = units.filter(suggestion__isnull=True)
return units.filter_type(self.filter_type)
def set_progress(self, current):
if current_task and current_task.request.id and self.progress_steps:
current_task.update_state(
state="PROGRESS",
meta={
"progress": 100 * current // self.progress_steps,
"translation": self.translation.pk,
},
)
def update(self, unit, state, target):
if self.mode == "suggest" or len(target) > unit.get_max_length():
Suggestion.objects.add(unit, target, None, False)
else:
unit.translate(self.user, target, state, Change.ACTION_AUTO, False)
self.updated += 1
def post_process(self):
if self.updated > 0:
self.translation.invalidate_cache()
if self.user:
self.user.profile.increase_count("translated", self.updated)
@transaction.atomic
def process_others(self, source):
"""Perform automatic translation based on other components."""
kwargs = {
"translation__language": self.translation.language,
"state__gte": STATE_TRANSLATED,
}
source_language = self.translation.component.source_language
exclude = {}
if source:
component = Component.objects.get(id=source)
if (
not component.project.contribute_shared_tm
and not component.project != self.translation.component.project
) or component.source_language != source_language:
raise PermissionDenied()
kwargs["translation__component"] = component
else:
project = self.translation.component.project
kwargs["translation__component__project"] = project
kwargs["translation__component__source_language"] = source_language
exclude["translation"] = self.translation
sources = Unit.objects.filter(**kwargs)
if exclude:
sources = sources.exclude(**exclude)
# Fetch translations
translations = {
source: (state, target)
for source, state, target in sources.filter(
source__in=self.get_units().values("source")
).values_list("source", "state", "target")
}
# We need to skip mode (suggestions) filtering here as SELECT FOR UPDATE
# cannot be used with JOIN
units = (
self.get_units(False)
.filter(source__in=translations.keys())
.select_for_update()
)
self.progress_steps = len(units)
for pos, unit in enumerate(units):
# Get update
state, target = translations[unit.source]
self.set_progress(pos)
# No save if translation is same or unit does not exist
if unit.state == state and unit.target == target:
continue
# Copy translation
self.update(unit, state, target)
self.post_process()
def fetch_mt(self, engines, threshold):
"""Get the translations."""
units = self.get_units()
num_units = len(units)
engines = sorted(
engines,
key=lambda x: MACHINE_TRANSLATION_SERVICES[x].get_rank(),
reverse=True,
)
self.progress_steps = 2 * (len(engines) + num_units)
for pos, engine in enumerate(engines):
translation_service = MACHINE_TRANSLATION_SERVICES[engine]
batch_size = translation_service.batch_size
for batch_start in range(0, num_units, batch_size):
translation_service.batch_translate(
units[batch_start : batch_start + batch_size],
self.user,
threshold=threshold,
)
self.set_progress(pos * num_units + batch_start)
return {
unit.id: unit.machinery["translation"]
for unit in units
if unit.machinery["best"] >= threshold
}
def process_mt(self, engines, threshold):
"""Perform automatic translation based on machine translation."""
translations = self.fetch_mt(engines, int(threshold))
# Adjust total number to show correct progress
offset = self.progress_steps / 2
self.progress_steps = offset + len(translations)
with transaction.atomic():
# Perform the translation
for pos, unit in enumerate(
Unit.objects.filter(id__in=translations.keys())
.prefetch()
.select_for_update()
):
# Copy translation
self.update(unit, self.target_state, translations[unit.pk])
self.set_progress(offset + pos)
self.post_process()
|
import json
from homeassistant.bootstrap import async_setup_component
from homeassistant.components import config
from tests.async_mock import patch
async def test_get_device_config(hass, hass_client):
"""Test getting device config."""
with patch.object(config, "SECTIONS", ["automation"]):
await async_setup_component(hass, "config", {})
client = await hass_client()
def mock_read(path):
"""Mock reading data."""
return [{"id": "sun"}, {"id": "moon"}]
with patch("homeassistant.components.config._read", mock_read):
resp = await client.get("/api/config/automation/config/moon")
assert resp.status == 200
result = await resp.json()
assert result == {"id": "moon"}
async def test_update_device_config(hass, hass_client):
"""Test updating device config."""
with patch.object(config, "SECTIONS", ["automation"]):
await async_setup_component(hass, "config", {})
client = await hass_client()
orig_data = [{"id": "sun"}, {"id": "moon"}]
def mock_read(path):
"""Mock reading data."""
return orig_data
written = []
def mock_write(path, data):
"""Mock writing data."""
written.append(data)
with patch("homeassistant.components.config._read", mock_read), patch(
"homeassistant.components.config._write", mock_write
), patch("homeassistant.config.async_hass_config_yaml", return_value={}):
resp = await client.post(
"/api/config/automation/config/moon",
data=json.dumps({"trigger": [], "action": [], "condition": []}),
)
assert resp.status == 200
result = await resp.json()
assert result == {"result": "ok"}
assert list(orig_data[1]) == ["id", "trigger", "condition", "action"]
assert orig_data[1] == {"id": "moon", "trigger": [], "condition": [], "action": []}
assert written[0] == orig_data
async def test_bad_formatted_automations(hass, hass_client):
"""Test that we handle automations without ID."""
with patch.object(config, "SECTIONS", ["automation"]):
await async_setup_component(hass, "config", {})
client = await hass_client()
orig_data = [
{
# No ID
"action": {"event": "hello"}
},
{"id": "moon"},
]
def mock_read(path):
"""Mock reading data."""
return orig_data
written = []
def mock_write(path, data):
"""Mock writing data."""
written.append(data)
with patch("homeassistant.components.config._read", mock_read), patch(
"homeassistant.components.config._write", mock_write
), patch("homeassistant.config.async_hass_config_yaml", return_value={}):
resp = await client.post(
"/api/config/automation/config/moon",
data=json.dumps({"trigger": [], "action": [], "condition": []}),
)
await hass.async_block_till_done()
assert resp.status == 200
result = await resp.json()
assert result == {"result": "ok"}
# Verify ID added to orig_data
assert "id" in orig_data[0]
assert orig_data[1] == {"id": "moon", "trigger": [], "condition": [], "action": []}
async def test_delete_automation(hass, hass_client):
"""Test deleting an automation."""
ent_reg = await hass.helpers.entity_registry.async_get_registry()
assert await async_setup_component(
hass,
"automation",
{
"automation": [
{
"id": "sun",
"trigger": {"platform": "event", "event_type": "test_event"},
"action": {"service": "test.automation"},
},
{
"id": "moon",
"trigger": {"platform": "event", "event_type": "test_event"},
"action": {"service": "test.automation"},
},
]
},
)
assert len(ent_reg.entities) == 2
with patch.object(config, "SECTIONS", ["automation"]):
assert await async_setup_component(hass, "config", {})
client = await hass_client()
orig_data = [{"id": "sun"}, {"id": "moon"}]
def mock_read(path):
"""Mock reading data."""
return orig_data
written = []
def mock_write(path, data):
"""Mock writing data."""
written.append(data)
with patch("homeassistant.components.config._read", mock_read), patch(
"homeassistant.components.config._write", mock_write
), patch("homeassistant.config.async_hass_config_yaml", return_value={}):
resp = await client.delete("/api/config/automation/config/sun")
await hass.async_block_till_done()
assert resp.status == 200
result = await resp.json()
assert result == {"result": "ok"}
assert len(written) == 1
assert written[0][0]["id"] == "moon"
assert len(ent_reg.entities) == 1
|
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
from datetime import datetime, tzinfo, timedelta
from locale import getpreferredencoding
import re
import sys
DISABLE_COLOR = False
from gitless import core
SEP = (
'##########################################################################'
'######')
ENCODING = getpreferredencoding() or 'utf-8'
def puts(s='', newline=True, stream=sys.stdout.write):
if newline:
s = s + '\n'
stream(s)
# Colored strings
RED = '\033[31m'
RED_BOLD = '\033[1;31m'
GREEN = '\033[32m'
GREEN_BOLD = '\033[1;32m'
YELLOW = '\033[33m'
BLUE = '\033[34m'
MAGENTA = '\033[35m'
CYAN = '\033[36m'
CLEAR = '\033[0m'
def _color(color_code, text):
return '{0}{1}{2}'.format(color_code, text, CLEAR) if should_color() else text
def should_color():
# We only output colored lines if the coloring is enabled and we are not being
# piped or redirected
return not DISABLE_COLOR and sys.stdout.isatty()
def red(text):
return _color(RED, text)
def green(text):
return _color(GREEN, text)
def yellow(text):
return _color(YELLOW, text)
def blue(text):
return _color(BLUE, text)
def magenta(text):
return _color(MAGENTA, text)
def cyan(text):
return _color(CYAN, text)
# Stdout
def ok(text):
puts(green('✔ {0}'.format(text)))
def warn(text):
puts(yellow('! {0}'.format(text)))
def msg(text, stream=sys.stdout.write):
puts(text, stream=stream)
def exp(text, stream=sys.stdout.write):
puts(' ➜ {0}'.format(text), stream=stream)
def item(i, opt_text='', stream=sys.stdout.write):
puts(' {0}{1}'.format(i, opt_text), stream=stream)
def blank(stream=sys.stdout.write):
puts('', stream=stream)
def sep(stream=sys.stdout.write):
puts(SEP, stream=stream)
# Err
def err(text):
puts(red('✘ {0}'.format(text)), stream=sys.stderr.write)
def err_msg(text):
msg(text, stream=sys.stderr.write)
def err_exp(text):
exp(text, stream=sys.stderr.write)
def err_blank():
blank(stream=sys.stderr.write)
def err_item(i, opt_text=''):
item(i, opt_text, stream=sys.stderr.write)
# Misc
def conf_dialog(text):
"""Gets confirmation from the user.
Prints a confirmation message to stdout with the given text and waits for
user confirmation.
Args:
text: the text to include in the confirmation.
Returns:
True if the user confirmed she wanted to continue or False if otherwise.
"""
msg('{0}. Do you wish to continue? (y/N)'.format(text))
user_input = get_user_input()
return user_input and user_input[0].lower() == 'y'
def get_user_input(text='> '):
"""Python 2/3 compatible way of getting user input."""
global input
try:
# Disable pylint's redefined-builtin warning and undefined-variable
# (raw_input is undefined in python 3) error.
# pylint: disable=W0622
# pylint: disable=E0602
input = raw_input
except NameError:
pass
return input(text)
def commit_str(ci):
ci_str = StringIO()
commit(ci, compact=True, stream=ci_str.write)
return ci_str.getvalue().strip()
def commit(ci, compact=False, stream=sys.stdout.write, line_additions=0, line_deletions=0):
merge_commit = len(ci.parent_ids) > 1
color = magenta if merge_commit else yellow
if compact:
title = ci.message.splitlines()[0]
puts('{0} {1}'.format(color(str(ci.id)[:7]), title), stream=stream)
return
puts(color('Commit Id: {0}'.format(ci.id)), stream=stream)
if merge_commit:
merges_str = ' '.join(str(oid)[:7] for oid in ci.parent_ids)
puts(color('Merges: {0}'.format(merges_str)), stream=stream)
puts(
color('Author: {0} <{1}>'.format(ci.author.name, ci.author.email)),
stream=stream)
ci_author_dt = datetime.fromtimestamp(
ci.author.time, FixedOffset(ci.author.offset))
puts(color('Date: {0:%c %z}'.format(ci_author_dt)), stream=stream)
put_s = lambda num: '' if num == 1 else 's'
puts(color('Stats: {0} line{1} added, {2} line{3} removed'
.format(line_additions, put_s(line_additions),
line_deletions, put_s(line_deletions))), stream=stream)
puts(stream=stream)
puts(' {0}'.format(ci.message), stream=stream)
# Op Callbacks
def apply_ok(ci):
ok('Insertion of {0} succeeded'.format(ci.id))
blank()
commit(ci)
blank()
def apply_err(ci):
err('Insertion of {0} failed'.format(ci.id))
blank()
commit(ci)
blank()
def save():
warn('Temporarily saving uncommitted changes')
def restore_ok():
ok('Uncommitted changes applied successfully to the new head of the branch')
OP_CB = core.OpCb(apply_ok, apply_err, save, restore_ok)
class FixedOffset(tzinfo):
def __init__(self, offset):
super(FixedOffset, self).__init__()
self.__offset = timedelta(minutes=offset)
def utcoffset(self, _):
return self.__offset
def dst(self, _):
return timedelta(0)
def diff(patch, stream=sys.stdout.write):
# Diff header
old_fp = patch.delta.old_file.path
new_fp = patch.delta.new_file.path
puts('Diff of file "{0}"'.format(old_fp), stream=stream)
if old_fp != new_fp:
puts(cyan(' (renamed to {0})'.format(new_fp)), stream=stream)
puts(stream=stream)
if patch.delta.is_binary:
puts('Not showing diffs for binary file', stream=stream)
return
additions = patch.line_stats[1]
deletions = patch.line_stats[2]
if (not additions) and (not deletions):
puts('No diffs to output for file', stream=stream)
return
put_s = lambda num: '' if num == 1 else 's'
puts('{0} line{1} added'.format(additions, put_s(additions)), stream=stream)
puts('{0} line{1} removed'.format(deletions, put_s(deletions)), stream=stream)
puts(stream=stream)
# Diff body
for hunk in patch.hunks:
puts(stream=stream)
_hunk(hunk, stream=stream)
puts(stream=stream)
puts(stream=stream)
def diff_totals(total_additions, total_deletions, stream=sys.stdout.write):
put_s = lambda num: '' if num == 1 else 's'
puts('Diff summary', stream=stream)
puts('Total of {0} line{1} added'
.format(total_additions, put_s(total_additions)), stream=stream)
puts('Total of {0} line{1} removed'
.format(total_deletions, put_s(total_deletions)), stream=stream)
puts(stream=stream)
def _hunk(hunk, stream=sys.stdout.write):
puts(cyan('@@ -{0},{1} +{2},{3} @@'.format(
hunk.old_start, hunk.old_lines, hunk.new_start, hunk.new_lines)),
stream=stream)
padding = _padding(hunk)
del_line, add_line, maybe_bold, saw_add = None, None, False, False
for diff_line in hunk.lines:
st = diff_line.origin
if st == '-' and not maybe_bold:
maybe_bold = True
del_line = diff_line
elif st == '+' and maybe_bold and not saw_add:
saw_add = True
add_line = diff_line
elif st == ' ' and maybe_bold and saw_add:
bold1, bold2 = _highlight(del_line.content, add_line.content)
puts(_format_line(del_line, padding, bold_delim=bold1), stream=stream)
puts(_format_line(add_line, padding, bold_delim=bold2), stream=stream)
del_line, add_line, maybe_bold, saw_add = None, None, False, False
puts(_format_line(diff_line, padding), stream=stream)
else:
if del_line:
puts(_format_line(del_line, padding), stream=stream)
if add_line:
puts(_format_line(add_line, padding), stream=stream)
del_line, add_line, maybe_bold, saw_add = None, None, False, False
puts(_format_line(diff_line, padding), stream=stream)
if maybe_bold and saw_add:
bold1, bold2 = _highlight(del_line.content, add_line.content)
puts(_format_line(del_line, padding, bold_delim=bold1), stream=stream)
puts(_format_line(add_line, padding, bold_delim=bold2), stream=stream)
else:
if del_line:
puts(_format_line(del_line, padding), stream=stream)
if add_line:
puts(_format_line(add_line, padding), stream=stream)
def _padding(hunk):
MIN_LINE_PADDING = 8
max_line_number = max([
hunk.old_start + hunk.old_lines, hunk.new_start + hunk.new_lines])
max_line_digits = len(str(max_line_number))
return max(MIN_LINE_PADDING, max_line_digits + 1)
def _format_line(diff_line, padding, bold_delim=None):
"""Format a standard diff line.
Returns:
a padded and colored version of the diff line with line numbers
"""
if should_color():
green = GREEN
green_bold = GREEN_BOLD
red = RED
red_bold = RED_BOLD
clear = CLEAR
else:
green = ''
green_bold = ''
red = ''
red_bold = ''
clear = ''
formatted = ''
st = diff_line.origin
line = st + diff_line.content.rstrip('\n')
old_lineno = diff_line.old_lineno
new_lineno = diff_line.new_lineno
if st == ' ':
formatted = (
str(old_lineno).ljust(padding) + str(new_lineno).ljust(padding) + line)
elif st == '+':
formatted = ' ' * padding + green + str(new_lineno).ljust(padding)
if not bold_delim:
formatted += line
else:
bold_start, bold_end = bold_delim
formatted += (
line[:bold_start] + green_bold + line[bold_start:bold_end] + clear +
green + line[bold_end:])
elif st == '-':
formatted = red + str(old_lineno).ljust(padding) + ' ' * padding
if not bold_delim:
formatted += line
else:
bold_start, bold_end = bold_delim
formatted += (
line[:bold_start] + red_bold + line[bold_start:bold_end] + clear +
red + line[bold_end:])
return formatted + clear
def _highlight(line1, line2):
"""Returns the sections that should be bolded in the given lines.
Returns:
two tuples. Each tuple indicates the start and end of the section
of the line that should be bolded for line1 and line2 respectively.
"""
start1 = start2 = 0
match = re.search(r'\S', line1) # ignore leading whitespace
if match:
start1 = match.start()
match = re.search(r'\S', line2)
if match:
start2 = match.start()
length = min(len(line1), len(line2)) - 1
bold_start1 = start1
bold_start2 = start2
while (bold_start1 <= length and bold_start2 <= length and
line1[bold_start1] == line2[bold_start2]):
bold_start1 += 1
bold_start2 += 1
match = re.search(r'\s*$', line1) # ignore trailing whitespace
bold_end1 = match.start() - 1
match = re.search(r'\s*$', line2)
bold_end2 = match.start() - 1
while (bold_end1 >= bold_start1 and bold_end2 >= bold_start2 and
line1[bold_end1] == line2[bold_end2]):
bold_end1 -= 1
bold_end2 -= 1
if bold_start1 - start1 > 0 or len(line1) - 1 - bold_end1 > 0:
return (bold_start1 + 1, bold_end1 + 2), (bold_start2 + 1, bold_end2 + 2)
return None, None
|
import os
import time
import vobject
from radicale import item as radicale_item
from radicale import pathutils
from radicale.log import logger
class CollectionGetMixin:
def __init__(self):
super().__init__()
self._item_cache_cleaned = False
def _list(self):
for entry in os.scandir(self._filesystem_path):
if not entry.is_file():
continue
href = entry.name
if not pathutils.is_safe_filesystem_path_component(href):
if not href.startswith(".Radicale"):
logger.debug("Skipping item %r in %r", href, self.path)
continue
yield href
def _get(self, href, verify_href=True):
if verify_href:
try:
if not pathutils.is_safe_filesystem_path_component(href):
raise pathutils.UnsafePathError(href)
path = pathutils.path_to_filesystem(
self._filesystem_path, href)
except ValueError as e:
logger.debug(
"Can't translate name %r safely to filesystem in %r: %s",
href, self.path, e, exc_info=True)
return None
else:
path = os.path.join(self._filesystem_path, href)
try:
with open(path, "rb") as f:
raw_text = f.read()
except (FileNotFoundError, IsADirectoryError):
return None
except PermissionError:
# Windows raises ``PermissionError`` when ``path`` is a directory
if (os.name == "nt" and
os.path.isdir(path) and os.access(path, os.R_OK)):
return None
raise
# The hash of the component in the file system. This is used to check,
# if the entry in the cache is still valid.
input_hash = self._item_cache_hash(raw_text)
cache_hash, uid, etag, text, name, tag, start, end = \
self._load_item_cache(href, input_hash)
if input_hash != cache_hash:
with self._acquire_cache_lock("item"):
# Lock the item cache to prevent multpile processes from
# generating the same data in parallel.
# This improves the performance for multiple requests.
if self._storage._lock.locked == "r":
# Check if another process created the file in the meantime
cache_hash, uid, etag, text, name, tag, start, end = \
self._load_item_cache(href, input_hash)
if input_hash != cache_hash:
try:
vobject_items = tuple(vobject.readComponents(
raw_text.decode(self._encoding)))
radicale_item.check_and_sanitize_items(
vobject_items, tag=self.get_meta("tag"))
vobject_item, = vobject_items
temp_item = radicale_item.Item(
collection=self, vobject_item=vobject_item)
cache_hash, uid, etag, text, name, tag, start, end = \
self._store_item_cache(
href, temp_item, input_hash)
except Exception as e:
raise RuntimeError("Failed to load item %r in %r: %s" %
(href, self.path, e)) from e
# Clean cache entries once after the data in the file
# system was edited externally.
if not self._item_cache_cleaned:
self._item_cache_cleaned = True
self._clean_item_cache()
last_modified = time.strftime(
"%a, %d %b %Y %H:%M:%S GMT",
time.gmtime(os.path.getmtime(path)))
# Don't keep reference to ``vobject_item``, because it requires a lot
# of memory.
return radicale_item.Item(
collection=self, href=href, last_modified=last_modified, etag=etag,
text=text, uid=uid, name=name, component_name=tag,
time_range=(start, end))
def get_multi(self, hrefs):
# It's faster to check for file name collissions here, because
# we only need to call os.listdir once.
files = None
for href in hrefs:
if files is None:
# List dir after hrefs returned one item, the iterator may be
# empty and the for-loop is never executed.
files = os.listdir(self._filesystem_path)
path = os.path.join(self._filesystem_path, href)
if (not pathutils.is_safe_filesystem_path_component(href) or
href not in files and os.path.lexists(path)):
logger.debug(
"Can't translate name safely to filesystem: %r", href)
yield (href, None)
else:
yield (href, self._get(href, verify_href=False))
def get_all(self):
# We don't need to check for collissions, because the the file names
# are from os.listdir.
return (self._get(href, verify_href=False) for href in self._list())
|
__all__ = ["fromstring", "parse", "convert_tree"]
import re
from lxml import etree, html
try:
from bs4 import (
BeautifulSoup, Tag, Comment, ProcessingInstruction, NavigableString,
Declaration, Doctype)
_DECLARATION_OR_DOCTYPE = (Declaration, Doctype)
except ImportError:
from BeautifulSoup import (
BeautifulSoup, Tag, Comment, ProcessingInstruction, NavigableString,
Declaration)
_DECLARATION_OR_DOCTYPE = Declaration
def fromstring(data, beautifulsoup=None, makeelement=None, **bsargs):
"""Parse a string of HTML data into an Element tree using the
BeautifulSoup parser.
Returns the root ``<html>`` Element of the tree.
You can pass a different BeautifulSoup parser through the
`beautifulsoup` keyword, and a diffent Element factory function
through the `makeelement` keyword. By default, the standard
``BeautifulSoup`` class and the default factory of `lxml.html` are
used.
"""
return _parse(data, beautifulsoup, makeelement, **bsargs)
def parse(file, beautifulsoup=None, makeelement=None, **bsargs):
"""Parse a file into an ElemenTree using the BeautifulSoup parser.
You can pass a different BeautifulSoup parser through the
`beautifulsoup` keyword, and a diffent Element factory function
through the `makeelement` keyword. By default, the standard
``BeautifulSoup`` class and the default factory of `lxml.html` are
used.
"""
if not hasattr(file, 'read'):
file = open(file)
root = _parse(file, beautifulsoup, makeelement, **bsargs)
return etree.ElementTree(root)
def convert_tree(beautiful_soup_tree, makeelement=None):
"""Convert a BeautifulSoup tree to a list of Element trees.
Returns a list instead of a single root Element to support
HTML-like soup with more than one root element.
You can pass a different Element factory through the `makeelement`
keyword.
"""
root = _convert_tree(beautiful_soup_tree, makeelement)
children = root.getchildren()
for child in children:
root.remove(child)
return children
# helpers
def _parse(source, beautifulsoup, makeelement, **bsargs):
if beautifulsoup is None:
beautifulsoup = BeautifulSoup
if hasattr(beautifulsoup, "HTML_ENTITIES"): # bs3
if 'convertEntities' not in bsargs:
bsargs['convertEntities'] = 'html'
if hasattr(beautifulsoup, "DEFAULT_BUILDER_FEATURES"): # bs4
if 'features' not in bsargs:
bsargs['features'] = 'html.parser' # use Python html parser
tree = beautifulsoup(source, **bsargs)
root = _convert_tree(tree, makeelement)
# from ET: wrap the document in a html root element, if necessary
if len(root) == 1 and root[0].tag == "html":
return root[0]
root.tag = "html"
return root
_parse_doctype_declaration = re.compile(
r'(?:\s|[<!])*DOCTYPE\s*HTML'
r'(?:\s+PUBLIC)?(?:\s+(\'[^\']*\'|"[^"]*"))?'
r'(?:\s+(\'[^\']*\'|"[^"]*"))?',
re.IGNORECASE).match
class _PseudoTag:
# Minimal imitation of BeautifulSoup.Tag
def __init__(self, contents):
self.name = 'html'
self.attrs = []
self.contents = contents
def __iter__(self):
return self.contents.__iter__()
def _convert_tree(beautiful_soup_tree, makeelement):
if makeelement is None:
makeelement = html.html_parser.makeelement
# Split the tree into three parts:
# i) everything before the root element: document type
# declaration, comments, processing instructions, whitespace
# ii) the root(s),
# iii) everything after the root: comments, processing
# instructions, whitespace
first_element_idx = last_element_idx = None
html_root = declaration = None
for i, e in enumerate(beautiful_soup_tree):
if isinstance(e, Tag):
if first_element_idx is None:
first_element_idx = i
last_element_idx = i
if html_root is None and e.name and e.name.lower() == 'html':
html_root = e
elif declaration is None and isinstance(e, _DECLARATION_OR_DOCTYPE):
declaration = e
# For a nice, well-formatted document, the variable roots below is
# a list consisting of a single <html> element. However, the document
# may be a soup like '<meta><head><title>Hello</head><body>Hi
# all<\p>'. In this example roots is a list containing meta, head
# and body elements.
if first_element_idx is None:
pre_root = post_root = []
roots = beautiful_soup_tree.contents
else:
pre_root = beautiful_soup_tree.contents[:first_element_idx]
roots = beautiful_soup_tree.contents[first_element_idx:last_element_idx+1]
post_root = beautiful_soup_tree.contents[last_element_idx+1:]
# Reorganize so that there is one <html> root...
if html_root is not None:
# ... use existing one if possible, ...
i = roots.index(html_root)
html_root.contents = roots[:i] + html_root.contents + roots[i+1:]
else:
# ... otherwise create a new one.
html_root = _PseudoTag(roots)
convert_node = _init_node_converters(makeelement)
# Process pre_root
res_root = convert_node(html_root)
prev = res_root
for e in reversed(pre_root):
converted = convert_node(e)
if converted is not None:
prev.addprevious(converted)
prev = converted
# ditto for post_root
prev = res_root
for e in post_root:
converted = convert_node(e)
if converted is not None:
prev.addnext(converted)
prev = converted
if declaration is not None:
try:
# bs4 provides full Doctype string
doctype_string = declaration.output_ready()
except AttributeError:
doctype_string = declaration.string
match = _parse_doctype_declaration(doctype_string)
if not match:
# Something is wrong if we end up in here. Since soupparser should
# tolerate errors, do not raise Exception, just let it pass.
pass
else:
external_id, sys_uri = match.groups()
docinfo = res_root.getroottree().docinfo
# strip quotes and update DOCTYPE values (any of None, '', '...')
docinfo.public_id = external_id and external_id[1:-1]
docinfo.system_url = sys_uri and sys_uri[1:-1]
return res_root
def _init_node_converters(makeelement):
converters = {}
ordered_node_types = []
def converter(*types):
def add(handler):
for t in types:
converters[t] = handler
ordered_node_types.append(t)
return handler
return add
def find_best_converter(node):
for t in ordered_node_types:
if isinstance(node, t):
return converters[t]
return None
def convert_node(bs_node, parent=None):
# duplicated in convert_tag() below
try:
handler = converters[type(bs_node)]
except KeyError:
handler = converters[type(bs_node)] = find_best_converter(bs_node)
if handler is None:
return None
return handler(bs_node, parent)
def map_attrs(bs_attrs):
if isinstance(bs_attrs, dict): # bs4
attribs = {}
for k, v in bs_attrs.items():
if isinstance(v, list):
v = " ".join(v)
attribs[k] = unescape(v)
else:
attribs = dict((k, unescape(v)) for k, v in bs_attrs)
return attribs
def append_text(parent, text):
if len(parent) == 0:
parent.text = (parent.text or '') + text
else:
parent[-1].tail = (parent[-1].tail or '') + text
# converters are tried in order of their definition
@converter(Tag, _PseudoTag)
def convert_tag(bs_node, parent):
attrs = bs_node.attrs
if parent is not None:
attribs = map_attrs(attrs) if attrs else None
res = etree.SubElement(parent, bs_node.name, attrib=attribs)
else:
attribs = map_attrs(attrs) if attrs else {}
res = makeelement(bs_node.name, attrib=attribs)
for child in bs_node:
# avoid double recursion by inlining convert_node(), see above
try:
handler = converters[type(child)]
except KeyError:
pass
else:
if handler is not None:
handler(child, res)
continue
convert_node(child, res)
return res
@converter(Comment)
def convert_comment(bs_node, parent):
res = html.HtmlComment(bs_node)
if parent is not None:
parent.append(res)
return res
@converter(ProcessingInstruction)
def convert_pi(bs_node, parent):
if bs_node.endswith('?'):
# The PI is of XML style (<?as df?>) but BeautifulSoup
# interpreted it as being SGML style (<?as df>). Fix.
bs_node = bs_node[:-1]
res = etree.ProcessingInstruction(*bs_node.split(' ', 1))
if parent is not None:
parent.append(res)
return res
@converter(NavigableString)
def convert_text(bs_node, parent):
if parent is not None:
append_text(parent, unescape(bs_node))
return None
return convert_node
# copied from ET's ElementSoup
try:
from html.entities import name2codepoint # Python 3
except ImportError:
from htmlentitydefs import name2codepoint
handle_entities = re.compile(r"&(\w+);").sub
try:
unichr
except NameError:
# Python 3
unichr = chr
def unescape(string):
if not string:
return ''
# work around oddities in BeautifulSoup's entity handling
def unescape_entity(m):
try:
return unichr(name2codepoint[m.group(1)])
except KeyError:
return m.group(0) # use as is
return handle_entities(unescape_entity, string)
|
import asyncio
import logging
import async_timeout
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_DATA,
ATTR_TITLE,
ATTR_TITLE_DEFAULT,
PLATFORM_SCHEMA,
BaseNotificationService,
)
from homeassistant.const import CONF_API_KEY, HTTP_OK
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
_RESOURCE = "https://api.prowlapp.com/publicapi/"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Required(CONF_API_KEY): cv.string})
async def async_get_service(hass, config, discovery_info=None):
"""Get the Prowl notification service."""
return ProwlNotificationService(hass, config[CONF_API_KEY])
class ProwlNotificationService(BaseNotificationService):
"""Implement the notification service for Prowl."""
def __init__(self, hass, api_key):
"""Initialize the service."""
self._hass = hass
self._api_key = api_key
async def async_send_message(self, message, **kwargs):
"""Send the message to the user."""
response = None
session = None
url = f"{_RESOURCE}add"
data = kwargs.get(ATTR_DATA)
payload = {
"apikey": self._api_key,
"application": "Home-Assistant",
"event": kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT),
"description": message,
"priority": data["priority"] if data and "priority" in data else 0,
}
_LOGGER.debug("Attempting call Prowl service at %s", url)
session = async_get_clientsession(self._hass)
try:
with async_timeout.timeout(10):
response = await session.post(url, data=payload)
result = await response.text()
if response.status != HTTP_OK or "error" in result:
_LOGGER.error(
"Prowl service returned http status %d, response %s",
response.status,
result,
)
except asyncio.TimeoutError:
_LOGGER.error("Timeout accessing Prowl at %s", url)
|
import numpy as np
import unittest
import chainer
from chainercv.utils import assert_is_detection_link
from chainercv.utils import generate_random_bbox
from chainercv.utils import testing
class DetectionLink(chainer.Link):
def predict(self, imgs):
bboxes = []
labels = []
scores = []
for img in imgs:
n_bbox = np.random.randint(1, 10)
bboxes.append(generate_random_bbox(
n_bbox, img.shape[1:], 4, 12))
labels.append(np.random.randint(
0, 20, size=n_bbox).astype(np.int32))
scores.append(np.random.uniform(
0, 1, size=n_bbox).astype(np.float32))
return bboxes, labels, scores
class InvalidPredictionSizeLink(DetectionLink):
def predict(self, imgs):
bboxes, labels, scores = super(
InvalidPredictionSizeLink, self).predict(imgs)
return bboxes[1:], labels[1:], scores[1:]
class InvalidLabelSizeLink(DetectionLink):
def predict(self, imgs):
bboxes, labels, scores = super(
InvalidLabelSizeLink, self).predict(imgs)
return bboxes, [label[1:] for label in labels], scores
class InvalidLabelValueLink(DetectionLink):
def predict(self, imgs):
bboxes, labels, scores = super(
InvalidLabelValueLink, self).predict(imgs)
return bboxes, [label + 1000 for label in labels], scores
class InvalidScoreSizeLink(DetectionLink):
def predict(self, imgs):
bboxes, labels, scores = super(
InvalidScoreSizeLink, self).predict(imgs)
return bboxes, labels, [score[1:] for score in scores]
@testing.parameterize(
{'link': DetectionLink(), 'valid': True},
{'link': InvalidPredictionSizeLink(), 'valid': False},
{'link': InvalidLabelSizeLink(), 'valid': False},
{'link': InvalidLabelValueLink(), 'valid': False},
{'link': InvalidScoreSizeLink(), 'valid': False},
)
class TestAssertIsDetectionLink(unittest.TestCase):
def test_assert_is_detection_link(self):
if self.valid:
assert_is_detection_link(self.link, 20)
else:
with self.assertRaises(AssertionError):
assert_is_detection_link(self.link, 20)
testing.run_module(__name__, __file__)
|
from flexx import flx
from pscript import RawJS
from pscript.stubs import Math, d3, window
flx.assets.associate_asset(__name__, 'https://d3js.org/d3.v3.min.js')
class CollisionWidget(flx.Widget):
""" A widget showing a collision demo based on D3.
"""
CSS = """
.flx-CollisionWidget {
background: #fff;
border: 1px solid black;
border-radius: 6px;
}
"""
def init(self):
self.node.id = self.id
window.setTimeout(self.load_viz, 500)
@flx.reaction
def _resize(self):
w, h = self.size
if len(self.node.children) > 0:
svg = self.node.children[0]
svg.setAttribute('width', w)
svg.setAttribute('height', h)
def load_viz(self):
w, h = self.size
nodes = d3.range(200).map(lambda: {'radius': Math.random() * 12 + 4})
color = d3.scale.category10()
force = d3.layout.force().gravity(0.05).charge(lambda d, i: 0 if i else -2000)\
.nodes(nodes).size([w, h])
root = nodes[0]
root.radius = 0
root.fixed = True
force.start()
x = d3.select('#' + self.id)
print(x, self.id)
svg = RawJS('x.append("svg").attr("width", w).attr("height", h)')
x = RawJS(
'svg.selectAll("circle").data(nodes.slice(1)).enter().append("circle")')
x.attr("r", lambda d: d.radius).style("fill", lambda d, i: color(i % 3))
def on_tick(e):
q = d3.geom.quadtree(nodes)
i = 0
n = nodes.length
while i < n-1:
i += 1
q.visit(collide(nodes[i]))
svg.selectAll("circle").attr("cx", lambda d: d.x).attr("cy", lambda d: d.y)
force.on("tick", on_tick)
def on_mousemove():
p1 = d3.mouse(self.node)
root.px = p1[0]
root.py = p1[1]
force.resume()
svg.on("mousemove", on_mousemove)
def collide(node):
r = node.radius + 16
nx1 = node.x - r
nx2 = node.x + r
ny1 = node.y - r
ny2 = node.y + r
def func(quad, x1, y1, x2, y2):
if quad.point and quad.point is not node:
x = node.x - quad.point.x
y = node.y - quad.point.y
s = Math.sqrt(x * x + y * y)
r = node.radius + quad.point.radius
if (s < r):
s = (s - r) / s * .5
x *= s
y *= s
node.x -= x
node.y -= y
quad.point.x += x
quad.point.y += y
return x1 > nx2 or x2 < nx1 or y1 > ny2 or y2 < ny1
return func
class CollisionDemo(flx.Widget):
def init(self):
with flx.VSplit():
with flx.HSplit():
CollisionWidget()
CollisionWidget()
with flx.HSplit():
CollisionWidget()
CollisionWidget()
if __name__ == '__main__':
flx.launch(CollisionDemo, 'app')
flx.run()
|
import json
import logging
import numpy as np
import os
import io
from ...common.interfaces import AbstractPlugin,\
MonitoringDataListener, AggregateResultListener
logger = logging.getLogger(__name__) # pylint: disable=C0103
class NumpyEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.generic):
return obj.item()
elif isinstance(obj, np.ndarray):
return obj.tolist()
else:
return super(NumpyEncoder, self).default(obj)
class Plugin(AbstractPlugin, AggregateResultListener, MonitoringDataListener):
# pylint:disable=R0902
SECTION = 'json_report'
def __init__(self, core, cfg, name):
super(Plugin, self).__init__(core, cfg, name)
self.monitoring_stream = io.open(os.path.join(self.core.artifacts_dir,
self.get_option('monitoring_log')),
mode='wb')
self.data_and_stats_stream = io.open(os.path.join(self.core.artifacts_dir,
self.get_option('test_data_log')),
mode='wb')
self._is_telegraf = None
def get_available_options(self):
return ['monitoring_log', 'test_data_log']
def configure(self):
self.core.job.subscribe_plugin(self)
def on_aggregated_data(self, data, stats):
"""
@data: aggregated data
@stats: stats about gun
"""
json_string = json.dumps({
'data': data,
'stats': stats
}, cls=NumpyEncoder)
self.data_and_stats_stream.write('{}\n'.format(json_string).encode('utf-8'))
def monitoring_data(self, data_list):
if self.is_telegraf:
monitoring_data = '{}\n'.format(json.dumps(data_list)).encode('utf-8')
self.monitoring_stream.write(monitoring_data)
else:
[
self.monitoring_stream.write('{}\n'.format(data.strip()).encode('utf-8')) for data in data_list
if data
]
def post_process(self, retcode):
self.data_and_stats_stream.close()
self.monitoring_stream.close()
return retcode
@property
def is_telegraf(self):
return True
|
from datetime import datetime
import json
from typing import Any
class JSONEncoder(json.JSONEncoder):
"""JSONEncoder that supports Home Assistant objects."""
def default(self, o: Any) -> Any:
"""Convert Home Assistant objects.
Hand other objects to the original method.
"""
if isinstance(o, datetime):
return o.isoformat()
if isinstance(o, set):
return list(o)
if hasattr(o, "as_dict"):
return o.as_dict()
return json.JSONEncoder.default(self, o)
|
from homeassistant.const import CONF_NAME
from homeassistant.helpers.entity import Entity
from homeassistant.util import get_local_ip
from .const import DOMAIN, SENSOR
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the platform from config_entry."""
name = config_entry.data.get(CONF_NAME) or DOMAIN
async_add_entities([IPSensor(name)], True)
class IPSensor(Entity):
"""A simple sensor."""
def __init__(self, name):
"""Initialize the sensor."""
self._state = None
self._name = name
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unique_id(self):
"""Return the unique id of the sensor."""
return SENSOR
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def icon(self):
"""Return the icon of the sensor."""
return "mdi:ip"
def update(self):
"""Fetch new state data for the sensor."""
self._state = get_local_ip()
|
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity
from homeassistant.const import CONF_MONITORED_CONDITIONS
import homeassistant.helpers.config_validation as cv
from . import DOMAIN as WIRELESSTAG_DOMAIN, WirelessTagBaseSensor
ARM_TEMPERATURE = "temperature"
ARM_HUMIDITY = "humidity"
ARM_MOTION = "motion"
ARM_LIGHT = "light"
ARM_MOISTURE = "moisture"
# Switch types: Name, tag sensor type
SWITCH_TYPES = {
ARM_TEMPERATURE: ["Arm Temperature", "temperature"],
ARM_HUMIDITY: ["Arm Humidity", "humidity"],
ARM_MOTION: ["Arm Motion", "motion"],
ARM_LIGHT: ["Arm Light", "light"],
ARM_MOISTURE: ["Arm Moisture", "moisture"],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_MONITORED_CONDITIONS, default=[]): vol.All(
cv.ensure_list, [vol.In(SWITCH_TYPES)]
)
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up switches for a Wireless Sensor Tags."""
platform = hass.data.get(WIRELESSTAG_DOMAIN)
switches = []
tags = platform.load_tags()
for switch_type in config.get(CONF_MONITORED_CONDITIONS):
for tag in tags.values():
if switch_type in tag.allowed_monitoring_types:
switches.append(WirelessTagSwitch(platform, tag, switch_type))
add_entities(switches, True)
class WirelessTagSwitch(WirelessTagBaseSensor, SwitchEntity):
"""A switch implementation for Wireless Sensor Tags."""
def __init__(self, api, tag, switch_type):
"""Initialize a switch for Wireless Sensor Tag."""
super().__init__(api, tag)
self._switch_type = switch_type
self.sensor_type = SWITCH_TYPES[self._switch_type][1]
self._name = f"{self._tag.name} {SWITCH_TYPES[self._switch_type][0]}"
def turn_on(self, **kwargs):
"""Turn on the switch."""
self._api.arm(self)
def turn_off(self, **kwargs):
"""Turn on the switch."""
self._api.disarm(self)
@property
def is_on(self) -> bool:
"""Return True if entity is on."""
return self._state
def updated_state_value(self):
"""Provide formatted value."""
return self.principal_value
@property
def principal_value(self):
"""Provide actual value of switch."""
attr_name = f"is_{self.sensor_type}_sensor_armed"
return getattr(self._tag, attr_name, False)
|
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
SUPPORT_BRIGHTNESS,
LightEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.typing import HomeAssistantType
from .const import DOMAIN
from .devolo_multi_level_switch import DevoloMultiLevelSwitchDeviceEntity
async def async_setup_entry(
hass: HomeAssistantType, entry: ConfigEntry, async_add_entities
) -> None:
"""Get all light devices and setup them via config entry."""
entities = []
for gateway in hass.data[DOMAIN][entry.entry_id]["gateways"]:
for device in gateway.multi_level_switch_devices:
for multi_level_switch in device.multi_level_switch_property.values():
if multi_level_switch.switch_type == "dimmer":
entities.append(
DevoloLightDeviceEntity(
homecontrol=gateway,
device_instance=device,
element_uid=multi_level_switch.element_uid,
)
)
async_add_entities(entities, False)
class DevoloLightDeviceEntity(DevoloMultiLevelSwitchDeviceEntity, LightEntity):
"""Representation of a light within devolo Home Control."""
def __init__(self, homecontrol, device_instance, element_uid):
"""Initialize a devolo multi level switch."""
super().__init__(
homecontrol=homecontrol,
device_instance=device_instance,
element_uid=element_uid,
)
self._binary_switch_property = device_instance.binary_switch_property.get(
element_uid.replace("Dimmer", "BinarySwitch")
)
@property
def brightness(self):
"""Return the brightness value of the light."""
return round(self._value / 100 * 255)
@property
def is_on(self):
"""Return the state of the light."""
return bool(self._value)
@property
def supported_features(self):
"""Return the supported features."""
return SUPPORT_BRIGHTNESS
def turn_on(self, **kwargs) -> None:
"""Turn device on."""
if kwargs.get(ATTR_BRIGHTNESS) is not None:
self._multi_level_switch_property.set(
round(kwargs[ATTR_BRIGHTNESS] / 255 * 100)
)
else:
if self._binary_switch_property is not None:
# Turn on the light device to the latest known value. The value is known by the device itself.
self._binary_switch_property.set(True)
else:
# If there is no binary switch attached to the device, turn it on to 100 %.
self._multi_level_switch_property.set(100)
def turn_off(self, **kwargs) -> None:
"""Turn device off."""
if self._binary_switch_property is not None:
self._binary_switch_property.set(False)
else:
self._multi_level_switch_property.set(0)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import json
import re
import string
from absl import flags
from perfkitbenchmarker import context
from perfkitbenchmarker import errors
from perfkitbenchmarker import vm_util
import six
AWS_PATH = 'aws'
AWS_PREFIX = [AWS_PATH, '--output', 'json']
FLAGS = flags.FLAGS
STOCKOUT_MESSAGE = ('Creation failed due to insufficient capacity indicating a '
'potential stockout scenario.')
def IsRegion(zone_or_region):
"""Returns whether "zone_or_region" is a region."""
if not re.match(r'[a-z]{2}-[a-z]+-[0-9][a-z]?$', zone_or_region):
raise ValueError(
'%s is not a valid AWS zone or region name' % zone_or_region)
return zone_or_region[-1] in string.digits
def GetRegionFromZone(zone_or_region):
"""Returns the region a zone is in (or "zone_or_region" if it's a region)."""
if IsRegion(zone_or_region):
return zone_or_region
return zone_or_region[:-1]
def GetRegionFromZones(zones):
"""Returns the region a set of zones are in.
Args:
zones: A set of zones.
Raises:
Exception: if the zones are in different regions.
"""
region = None
for zone in zones:
current_region = GetRegionFromZone(zone)
if region is None:
region = current_region
else:
if region != current_region:
raise Exception('Not All zones are in the same region %s not same as '
'%s. zones: %s' %
(region, current_region, ','.join(zones)))
return region
def GetZonesInRegion(region):
"""Returns all available zones in a given region."""
get_zones_cmd = AWS_PREFIX + [
'ec2',
'describe-availability-zones',
'--region={0}'.format(region)
]
stdout, _, _ = vm_util.IssueCommand(get_zones_cmd)
response = json.loads(stdout)
zones = [item['ZoneName'] for item in response['AvailabilityZones']
if item['State'] == 'available']
return zones
def GroupZonesIntoRegions(zones):
"""Returns a map of regions to zones."""
regions_to_zones_map = collections.defaultdict(set)
for zone in zones:
region = GetRegionFromZone(zone)
regions_to_zones_map[region].add(zone)
return regions_to_zones_map
def EksZonesValidator(value):
"""Validates that the value is a single region or a list of zones."""
if not value:
return True
if len(value) == 1:
return IsRegion(value[0])
if any(IsRegion(zone) for zone in value):
return False
region = GetRegionFromZone(value[0])
if any(GetRegionFromZone(zone) != region for zone in value):
return False
return True
def FormatTags(tags_dict):
"""Format a dict of tags into arguments for 'tag' parameter.
Args:
tags_dict: Tags to be formatted.
Returns:
A list of tags formatted as arguments for 'tag' parameter.
"""
return [
'Key=%s,Value=%s' % (k, v) for k, v in sorted(six.iteritems(tags_dict))
]
def FormatTagSpecifications(resource_type, tags_dict):
"""Format a dict of tags into arguments for 'tag-specifications' parameter.
Args:
resource_type: resource type to be tagged.
tags_dict: Tags to be formatted.
Returns:
A list of tags formatted as arguments for 'tag-specifications' parameter.
"""
tags = ','.join('{Key=%s,Value=%s}' %
(k, v) for k, v in six.iteritems(tags_dict))
return 'ResourceType=%s,Tags=[%s]' % (resource_type, tags)
def AddTags(resource_id, region, **kwargs):
"""Adds tags to an AWS resource created by PerfKitBenchmarker.
Args:
resource_id: An extant AWS resource to operate on.
region: The AWS region 'resource_id' was created in.
**kwargs: dict. Key-value pairs to set on the instance.
"""
if not kwargs:
return
tag_cmd = AWS_PREFIX + [
'ec2',
'create-tags',
'--region=%s' % region,
'--resources', resource_id,
'--tags'] + FormatTags(kwargs)
IssueRetryableCommand(tag_cmd)
def MakeDefaultTags(timeout_minutes=None):
"""Default tags for an AWS resource created by PerfKitBenchmarker.
Args:
timeout_minutes: Timeout used for setting the timeout_utc tag.
Returns:
Dict of default tags, contributed from the benchmark spec.
"""
benchmark_spec = context.GetThreadBenchmarkSpec()
if not benchmark_spec:
return {}
return benchmark_spec.GetResourceTags(timeout_minutes=timeout_minutes)
def MakeFormattedDefaultTags(timeout_minutes=None):
"""Get the default tags formatted correctly for --tags parameter."""
return FormatTags(MakeDefaultTags(timeout_minutes=timeout_minutes))
def AddDefaultTags(resource_id, region):
"""Adds tags to an AWS resource created by PerfKitBenchmarker.
By default, resources are tagged with "owner" and "perfkitbenchmarker-run"
key-value
pairs.
Args:
resource_id: An extant AWS resource to operate on.
region: The AWS region 'resource_id' was created in.
"""
tags = MakeDefaultTags()
AddTags(resource_id, region, **tags)
def GetAccount():
"""Retrieve details about the current IAM identity.
http://docs.aws.amazon.com/cli/latest/reference/sts/get-caller-identity.html
Returns:
A string of the AWS account ID number of the account that owns or contains
the calling entity.
"""
cmd = AWS_PREFIX + ['sts', 'get-caller-identity']
stdout, _, _ = vm_util.IssueCommand(cmd)
return json.loads(stdout)['Account']
@vm_util.Retry()
def IssueRetryableCommand(cmd, env=None, suppress_failure=None):
"""Tries running the provided command until it succeeds or times out.
On Windows, the AWS CLI doesn't correctly set the return code when it
has an error (at least on version 1.7.28). By retrying the command if
we get output on stderr, we can work around this issue.
Args:
cmd: A list of strings such as is given to the subprocess.Popen()
constructor.
env: An alternate environment to pass to the Popen command.
suppress_failure: A function to pass to vm_util.IssueCommand()
Returns:
A tuple of stdout and stderr from running the provided command.
"""
stdout, stderr, retcode = vm_util.IssueCommand(
cmd, env=env, raise_on_failure=False, suppress_failure=suppress_failure)
if retcode:
raise errors.VmUtil.CalledProcessException(
'Command returned a non-zero exit code.\n')
if stderr:
raise errors.VmUtil.CalledProcessException(
'The command had output on stderr:\n%s' % stderr)
return stdout, stderr
def AwsFilter(filter_keys_and_values):
"""Returns a list suitable for an AWS command line filter.
Example:
AwsFilter({'a': 'b', 'c': 'd'}) returns a three element array:
['--filters', 'Name=a,Values=b', 'Name=c,Values=d']
For an example see
https://docs.aws.amazon.com/cli/latest/reference/ec2/describe-instances.html#options
Args:
filter_keys_and_values: A dict with the key as the name of the AWS attribute
and the value is the value of that attribute
"""
filters = ['--filters']
for name, value in sorted(filter_keys_and_values.items()):
filters.append('Name={},Values={}'.format(name, value))
return filters
|
from datetime import timedelta
import logging
import os
import voluptuous as vol
from homeassistant.auth.const import GROUP_ID_ADMIN
from homeassistant.components.homeassistant import SERVICE_CHECK_CONFIG
import homeassistant.config as conf_util
from homeassistant.const import (
ATTR_NAME,
EVENT_CORE_CONFIG_UPDATE,
SERVICE_HOMEASSISTANT_RESTART,
SERVICE_HOMEASSISTANT_STOP,
)
from homeassistant.core import DOMAIN as HASS_DOMAIN, callback
from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.loader import bind_hass
from homeassistant.util.dt import utcnow
from .addon_panel import async_setup_addon_panel
from .auth import async_setup_auth_view
from .discovery import async_setup_discovery_view
from .handler import HassIO, HassioAPIError, api_data
from .http import HassIOView
from .ingress import async_setup_ingress_view
_LOGGER = logging.getLogger(__name__)
DOMAIN = "hassio"
STORAGE_KEY = DOMAIN
STORAGE_VERSION = 1
CONF_FRONTEND_REPO = "development_repo"
CONFIG_SCHEMA = vol.Schema(
{vol.Optional(DOMAIN): vol.Schema({vol.Optional(CONF_FRONTEND_REPO): cv.isdir})},
extra=vol.ALLOW_EXTRA,
)
DATA_INFO = "hassio_info"
DATA_HOST_INFO = "hassio_host_info"
DATA_CORE_INFO = "hassio_core_info"
HASSIO_UPDATE_INTERVAL = timedelta(minutes=55)
SERVICE_ADDON_START = "addon_start"
SERVICE_ADDON_STOP = "addon_stop"
SERVICE_ADDON_RESTART = "addon_restart"
SERVICE_ADDON_STDIN = "addon_stdin"
SERVICE_HOST_SHUTDOWN = "host_shutdown"
SERVICE_HOST_REBOOT = "host_reboot"
SERVICE_SNAPSHOT_FULL = "snapshot_full"
SERVICE_SNAPSHOT_PARTIAL = "snapshot_partial"
SERVICE_RESTORE_FULL = "restore_full"
SERVICE_RESTORE_PARTIAL = "restore_partial"
ATTR_ADDON = "addon"
ATTR_INPUT = "input"
ATTR_SNAPSHOT = "snapshot"
ATTR_ADDONS = "addons"
ATTR_FOLDERS = "folders"
ATTR_HOMEASSISTANT = "homeassistant"
ATTR_PASSWORD = "password"
SCHEMA_NO_DATA = vol.Schema({})
SCHEMA_ADDON = vol.Schema({vol.Required(ATTR_ADDON): cv.slug})
SCHEMA_ADDON_STDIN = SCHEMA_ADDON.extend(
{vol.Required(ATTR_INPUT): vol.Any(dict, cv.string)}
)
SCHEMA_SNAPSHOT_FULL = vol.Schema(
{vol.Optional(ATTR_NAME): cv.string, vol.Optional(ATTR_PASSWORD): cv.string}
)
SCHEMA_SNAPSHOT_PARTIAL = SCHEMA_SNAPSHOT_FULL.extend(
{
vol.Optional(ATTR_FOLDERS): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(ATTR_ADDONS): vol.All(cv.ensure_list, [cv.string]),
}
)
SCHEMA_RESTORE_FULL = vol.Schema(
{vol.Required(ATTR_SNAPSHOT): cv.slug, vol.Optional(ATTR_PASSWORD): cv.string}
)
SCHEMA_RESTORE_PARTIAL = SCHEMA_RESTORE_FULL.extend(
{
vol.Optional(ATTR_HOMEASSISTANT): cv.boolean,
vol.Optional(ATTR_FOLDERS): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(ATTR_ADDONS): vol.All(cv.ensure_list, [cv.string]),
}
)
MAP_SERVICE_API = {
SERVICE_ADDON_START: ("/addons/{addon}/start", SCHEMA_ADDON, 60, False),
SERVICE_ADDON_STOP: ("/addons/{addon}/stop", SCHEMA_ADDON, 60, False),
SERVICE_ADDON_RESTART: ("/addons/{addon}/restart", SCHEMA_ADDON, 60, False),
SERVICE_ADDON_STDIN: ("/addons/{addon}/stdin", SCHEMA_ADDON_STDIN, 60, False),
SERVICE_HOST_SHUTDOWN: ("/host/shutdown", SCHEMA_NO_DATA, 60, False),
SERVICE_HOST_REBOOT: ("/host/reboot", SCHEMA_NO_DATA, 60, False),
SERVICE_SNAPSHOT_FULL: ("/snapshots/new/full", SCHEMA_SNAPSHOT_FULL, 300, True),
SERVICE_SNAPSHOT_PARTIAL: (
"/snapshots/new/partial",
SCHEMA_SNAPSHOT_PARTIAL,
300,
True,
),
SERVICE_RESTORE_FULL: (
"/snapshots/{snapshot}/restore/full",
SCHEMA_RESTORE_FULL,
300,
True,
),
SERVICE_RESTORE_PARTIAL: (
"/snapshots/{snapshot}/restore/partial",
SCHEMA_RESTORE_PARTIAL,
300,
True,
),
}
@bind_hass
async def async_get_addon_info(hass: HomeAssistantType, slug: str) -> dict:
"""Return add-on info.
The caller of the function should handle HassioAPIError.
"""
hassio = hass.data[DOMAIN]
return await hassio.get_addon_info(slug)
@bind_hass
@api_data
async def async_install_addon(hass: HomeAssistantType, slug: str) -> dict:
"""Install add-on.
The caller of the function should handle HassioAPIError.
"""
hassio = hass.data[DOMAIN]
command = f"/addons/{slug}/install"
return await hassio.send_command(command)
@bind_hass
@api_data
async def async_uninstall_addon(hass: HomeAssistantType, slug: str) -> dict:
"""Uninstall add-on.
The caller of the function should handle HassioAPIError.
"""
hassio = hass.data[DOMAIN]
command = f"/addons/{slug}/uninstall"
return await hassio.send_command(command)
@bind_hass
@api_data
async def async_start_addon(hass: HomeAssistantType, slug: str) -> dict:
"""Start add-on.
The caller of the function should handle HassioAPIError.
"""
hassio = hass.data[DOMAIN]
command = f"/addons/{slug}/start"
return await hassio.send_command(command)
@bind_hass
@api_data
async def async_stop_addon(hass: HomeAssistantType, slug: str) -> dict:
"""Stop add-on.
The caller of the function should handle HassioAPIError.
"""
hassio = hass.data[DOMAIN]
command = f"/addons/{slug}/stop"
return await hassio.send_command(command)
@bind_hass
@api_data
async def async_set_addon_options(
hass: HomeAssistantType, slug: str, options: dict
) -> dict:
"""Set add-on options.
The caller of the function should handle HassioAPIError.
"""
hassio = hass.data[DOMAIN]
command = f"/addons/{slug}/options"
return await hassio.send_command(command, payload=options)
@callback
@bind_hass
def get_info(hass):
"""Return generic information from Supervisor.
Async friendly.
"""
return hass.data.get(DATA_INFO)
@callback
@bind_hass
def get_host_info(hass):
"""Return generic host information.
Async friendly.
"""
return hass.data.get(DATA_HOST_INFO)
@callback
@bind_hass
def get_core_info(hass):
"""Return Home Assistant Core information from Supervisor.
Async friendly.
"""
return hass.data.get(DATA_CORE_INFO)
@callback
@bind_hass
def is_hassio(hass):
"""Return true if Hass.io is loaded.
Async friendly.
"""
return DOMAIN in hass.config.components
@callback
def get_supervisor_ip():
"""Return the supervisor ip address."""
if "SUPERVISOR" not in os.environ:
return None
return os.environ["SUPERVISOR"].partition(":")[0]
async def async_setup(hass, config):
"""Set up the Hass.io component."""
# Check local setup
for env in ("HASSIO", "HASSIO_TOKEN"):
if os.environ.get(env):
continue
_LOGGER.error("Missing %s environment variable", env)
return False
host = os.environ["HASSIO"]
websession = hass.helpers.aiohttp_client.async_get_clientsession()
hass.data[DOMAIN] = hassio = HassIO(hass.loop, websession, host)
if not await hassio.is_connected():
_LOGGER.warning("Not connected with Hass.io / system too busy!")
store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY)
data = await store.async_load()
if data is None:
data = {}
refresh_token = None
if "hassio_user" in data:
user = await hass.auth.async_get_user(data["hassio_user"])
if user and user.refresh_tokens:
refresh_token = list(user.refresh_tokens.values())[0]
# Migrate old Hass.io users to be admin.
if not user.is_admin:
await hass.auth.async_update_user(user, group_ids=[GROUP_ID_ADMIN])
if refresh_token is None:
user = await hass.auth.async_create_system_user("Hass.io", [GROUP_ID_ADMIN])
refresh_token = await hass.auth.async_create_refresh_token(user)
data["hassio_user"] = user.id
await store.async_save(data)
# This overrides the normal API call that would be forwarded
development_repo = config.get(DOMAIN, {}).get(CONF_FRONTEND_REPO)
if development_repo is not None:
hass.http.register_static_path(
"/api/hassio/app", os.path.join(development_repo, "hassio/build"), False
)
hass.http.register_view(HassIOView(host, websession))
await hass.components.panel_custom.async_register_panel(
frontend_url_path="hassio",
webcomponent_name="hassio-main",
sidebar_title="Supervisor",
sidebar_icon="hass:home-assistant",
js_url="/api/hassio/app/entrypoint.js",
embed_iframe=True,
require_admin=True,
)
await hassio.update_hass_api(config.get("http", {}), refresh_token)
last_timezone = None
async def push_config(_):
"""Push core config to Hass.io."""
nonlocal last_timezone
new_timezone = str(hass.config.time_zone)
if new_timezone == last_timezone:
return
last_timezone = new_timezone
await hassio.update_hass_timezone(new_timezone)
hass.bus.async_listen(EVENT_CORE_CONFIG_UPDATE, push_config)
await push_config(None)
async def async_service_handler(service):
"""Handle service calls for Hass.io."""
api_command = MAP_SERVICE_API[service.service][0]
data = service.data.copy()
addon = data.pop(ATTR_ADDON, None)
snapshot = data.pop(ATTR_SNAPSHOT, None)
payload = None
# Pass data to Hass.io API
if service.service == SERVICE_ADDON_STDIN:
payload = data[ATTR_INPUT]
elif MAP_SERVICE_API[service.service][3]:
payload = data
# Call API
try:
await hassio.send_command(
api_command.format(addon=addon, snapshot=snapshot),
payload=payload,
timeout=MAP_SERVICE_API[service.service][2],
)
except HassioAPIError as err:
_LOGGER.error("Error on Hass.io API: %s", err)
for service, settings in MAP_SERVICE_API.items():
hass.services.async_register(
DOMAIN, service, async_service_handler, schema=settings[1]
)
async def update_info_data(now):
"""Update last available supervisor information."""
try:
hass.data[DATA_INFO] = await hassio.get_info()
hass.data[DATA_HOST_INFO] = await hassio.get_host_info()
hass.data[DATA_CORE_INFO] = await hassio.get_core_info()
except HassioAPIError as err:
_LOGGER.warning("Can't read last version: %s", err)
hass.helpers.event.async_track_point_in_utc_time(
update_info_data, utcnow() + HASSIO_UPDATE_INTERVAL
)
# Fetch last version
await update_info_data(None)
async def async_handle_core_service(call):
"""Service handler for handling core services."""
if call.service == SERVICE_HOMEASSISTANT_STOP:
await hassio.stop_homeassistant()
return
try:
errors = await conf_util.async_check_ha_config_file(hass)
except HomeAssistantError:
return
if errors:
_LOGGER.error(errors)
hass.components.persistent_notification.async_create(
"Config error. See [the logs](/config/logs) for details.",
"Config validating",
f"{HASS_DOMAIN}.check_config",
)
return
if call.service == SERVICE_HOMEASSISTANT_RESTART:
await hassio.restart_homeassistant()
# Mock core services
for service in (
SERVICE_HOMEASSISTANT_STOP,
SERVICE_HOMEASSISTANT_RESTART,
SERVICE_CHECK_CONFIG,
):
hass.services.async_register(HASS_DOMAIN, service, async_handle_core_service)
# Init discovery Hass.io feature
async_setup_discovery_view(hass, hassio)
# Init auth Hass.io feature
async_setup_auth_view(hass, user)
# Init ingress Hass.io feature
async_setup_ingress_view(hass, host)
# Init add-on ingress panels
await async_setup_addon_panel(hass, hassio)
return True
|
import os
import re
import time
import diamond.collector
import diamond.convertor
try:
import psutil
psutil
except ImportError:
psutil = None
def match_process(pid, name, cmdline, exe, cfg):
"""
Decides whether a process matches with a given process descriptor
:param pid: process pid
:param exe: process executable
:param name: process name
:param cmdline: process cmdline
:param cfg: the dictionary from processes that describes with the
process group we're testing for
:return: True if it matches
:rtype: bool
"""
if cfg['selfmon'] and pid == os.getpid():
return True
for exe_re in cfg['exe']:
if exe_re.search(exe):
return True
for name_re in cfg['name']:
if name_re.search(name):
return True
for cmdline_re in cfg['cmdline']:
if cmdline_re.search(' '.join(cmdline)):
return True
return False
def process_info(process, info_keys):
results = {}
process_info = process.as_dict(info_keys)
metrics = ((key, process_info.get(key, None)) for key in info_keys)
for key, value in metrics:
if type(value) in [float, int]:
results.update({key: value})
elif hasattr(value, '_asdict'):
for subkey, subvalue in value._asdict().iteritems():
results.update({"%s.%s" % (key, subkey): subvalue})
return results
def get_value(process, name):
result = getattr(process, name)
try:
return result()
except TypeError:
return result
class ProcessResourcesCollector(diamond.collector.Collector):
def process_config(self):
super(ProcessResourcesCollector, self).process_config()
"""
prepare self.processes, which is a descriptor dictionary in
pg_name: {
exe: [regex],
name: [regex],
cmdline: [regex],
selfmon: [boolean],
procs: [psutil.Process],
count_workers: [boolean]
}
"""
self.processes = {}
self.processes_info = {}
for pg_name, cfg in self.config['process'].items():
pg_cfg = {}
for key in ('exe', 'name', 'cmdline'):
pg_cfg[key] = cfg.get(key, [])
if not isinstance(pg_cfg[key], list):
pg_cfg[key] = [pg_cfg[key]]
pg_cfg[key] = [re.compile(e) for e in pg_cfg[key]]
pg_cfg['selfmon'] = cfg.get('selfmon', '').lower() == 'true'
pg_cfg['count_workers'] = cfg.get(
'count_workers', '').lower() == 'true'
self.processes[pg_name] = pg_cfg
self.processes_info[pg_name] = {}
def get_default_config_help(self):
config_help = super(ProcessResourcesCollector,
self).get_default_config_help()
config_help.update({
'info_keys': 'List of process metrics to collect. ' +
'Valid list of metrics can be found ' +
'[here](https://pythonhosted.org/psutil/)',
'unit': 'The unit in which memory data is collected.',
'process': ("A subcategory of settings inside of which each "
"collected process has it's configuration"),
})
return config_help
def get_default_config(self):
"""
Default settings are:
info_keys: ['num_ctx_switches', 'cpu_percent', 'cpu_times',
'io_counters', 'num_threads', 'num_fds',
'memory_percent', 'memory_info_ex', ]
path: 'process'
unit: 'B'
"""
config = super(ProcessResourcesCollector, self).get_default_config()
config.update({
'info_keys': ['num_ctx_switches', 'cpu_percent', 'cpu_times',
'io_counters', 'num_threads', 'num_fds',
'memory_percent', 'memory_info_ex', ],
'path': 'process',
'unit': 'B',
'process': {},
})
return config
def save_process_info(self, pg_name, process_info):
for key, value in process_info.iteritems():
if key in self.processes_info[pg_name]:
self.processes_info[pg_name][key] += value
else:
self.processes_info[pg_name][key] = value
def collect_process_info(self, process):
try:
pid = get_value(process, 'pid')
name = get_value(process, 'name')
cmdline = get_value(process, 'cmdline')
try:
exe = get_value(process, 'exe')
except psutil.AccessDenied:
exe = ""
for pg_name, cfg in self.processes.items():
if match_process(pid, name, cmdline, exe, cfg):
pi = process_info(process, self.config['info_keys'])
if cfg['count_workers']:
pi.update({'workers_count': 1})
uptime = time.time() - get_value(process, 'create_time')
pi.update({'uptime': uptime})
self.save_process_info(pg_name, pi)
except psutil.NoSuchProcess as e:
self.log.info("Process exited while trying to get info: %s", e)
def collect(self):
"""
Collects resources usage of each process defined under the
`process` subsection of the config file
"""
if not psutil:
self.log.error('Unable to import psutil')
self.log.error('No process resource metrics retrieved')
return None
for process in psutil.process_iter():
self.collect_process_info(process)
# publish results
for pg_name, counters in self.processes_info.iteritems():
if counters:
metrics = (
("%s.%s" % (pg_name, key), value)
for key, value in counters.iteritems())
else:
if self.processes[pg_name]['count_workers']:
metrics = (('%s.workers_count' % pg_name, 0), )
else:
metrics = ()
[self.publish(*metric) for metric in metrics]
# reinitialize process info
self.processes_info[pg_name] = {}
|
class BankError(Exception):
pass
class BankNotGlobal(BankError):
pass
class BankIsGlobal(BankError):
pass
class AccountAlreadyExists(BankError):
pass
class NoAccount(BankError):
pass
class NoSenderAccount(NoAccount):
pass
class NoReceiverAccount(NoAccount):
pass
class InsufficientBalance(BankError):
pass
class NegativeValue(BankError):
pass
class SameSenderAndReceiver(BankError):
pass
|
from pscript import window, undefined
from ... import event
from . import Layout
from .. import create_element
class BaseTableLayout(Layout):
""" Abstract base class for layouts that use an HTML table.
Layouts that use this approach don't have good performance when
resizing. This is not so much a problem when it is used as a leaf
layout, but it's not recommended to embed such layouts in each-other.
"""
CSS = """
/* Clear any styling on this table (rendered_html is an IPython thing) */
.flx-BaseTableLayout, .flx-BaseTableLayout td, .flx-BaseTableLayout tr,
.rendered_html .flx-BaseTableLayout {
border: 0px;
padding: initial;
margin: initial;
background: initial;
}
/* Behave well inside hbox/vbox,
we assume no layouts to be nested inside a table layout */
.flx-box.flx-horizontal > .flx-BaseTableLayout {
width: auto;
}
.flx-box.flx-vertical > .flx-BaseTableLayout {
height: auto;
}
td.flx-vflex, td.flx-hflex {
padding: 2px;
}
/* In flexed cells, occupy the full space */
td.flx-vflex > .flx-Widget {
height: 100%;
}
td.flx-hflex > .flx-Widget {
width: 100%;
}
"""
def _apply_table_layout(self):
table = self.node
AUTOFLEX = 729 # magic number unlikely to occur in practice
# Get table dimensions
nrows = len(table.children)
ncols = 0
for i in range(len(table.children)):
row = table.children[i]
ncols = max(ncols, len(row.children))
if ncols == 0 and nrows == 0:
return
# Collect flexes
vflexes = []
hflexes = []
for i in range(nrows):
row = table.children[i]
for j in range(ncols):
col = row.children[j]
if (col is undefined) or (len(col.children) == 0):
continue
vflexes[i] = max(vflexes[i] or 0, col.children[0].vflex or 0)
hflexes[j] = max(hflexes[j] or 0, col.children[0].hflex or 0)
# What is the cumulative "flex-value"?
cum_vflex = vflexes.reduce(lambda pv, cv: pv + cv, 0)
cum_hflex = hflexes.reduce(lambda pv, cv: pv + cv, 0)
# If no flexes are given; assign each equal
if (cum_vflex == 0):
for i in range(len(vflexes)):
vflexes[i] = AUTOFLEX
cum_vflex = len(vflexes) * AUTOFLEX
if (cum_hflex == 0):
for i in range(len(hflexes)):
hflexes[i] = AUTOFLEX
cum_hflex = len(hflexes) * AUTOFLEX
# Assign css class and height/weight to cells
for i in range(nrows):
row = table.children[i]
row.vflex = vflexes[i] or 0 # Store for use during resizing
for j in range(ncols):
col = row.children[j]
if (col is undefined) or (col.children.length is 0):
continue
self._apply_cell_layout(row, col, vflexes[i], hflexes[j],
cum_vflex, cum_hflex)
@event.reaction('size')
def _adapt_to_size_change(self, *events):
""" This function adapts the height (in percent) of the flexible rows
of a layout. This is needed because the percent-height applies to the
total height of the table. This function is called whenever the
table resizes, and adjusts the percent-height, taking the available
remaining table height into account. This is not necesary for the
width, since percent-width in colums *does* apply to available width.
"""
table = self.node # or event.target
#print('heigh changed', event.heightChanged, event.owner.__id)
if events[-1].new_value[1] != events[0].old_value[1]:
# Set one flex row to max, so that non-flex rows have their
# minimum size. The table can already have been stretched
# a bit, causing the total row-height in % to not be
# sufficient from keeping the non-flex rows from growing.
for i in range(len(table.children)):
row = table.children[i]
if (row.vflex > 0):
row.style.height = '100%'
break
# Get remaining height: subtract height of each non-flex row
remainingHeight = table.clientHeight
cum_vflex = 0
for i in range(len(table.children)):
row = table.children[i]
cum_vflex += row.vflex
if (row.vflex == 0) and (row.children.length > 0):
remainingHeight -= row.children[0].clientHeight
# Apply height % for each flex row
remainingPercentage = 100 * remainingHeight / table.clientHeight
for i in range(len(table.children)):
row = table.children[i]
if row.vflex > 0:
row.style.height = round(row.vflex /cum_vflex *
remainingPercentage) + 1 + '%'
def _apply_cell_layout(self, row, col, vflex, hflex, cum_vflex, cum_hflex):
raise NotImplementedError()
class FormLayout(BaseTableLayout):
""" A layout widget that vertically alligns its child widgets in a form.
A label is placed to the left of each widget (based on the widget's title).
The ``node`` of this widget is a
`<table> <https://developer.mozilla.org/docs/Web/HTML/Element/table>`_.
(This may be changed to use a CSS layout instead.)
"""
CSS = """
.flx-FormLayout .flx-title {
text-align: right;
padding-right: 5px;
}
"""
def _create_dom(self):
return window.document.createElement('table')
def _render_dom(self):
rows = []
for widget in self.children:
row = create_element('tr', {},
create_element('td', {'class': 'flx-title'}, widget.title),
create_element('td', {}, [widget.outernode]),
)
widget.outernode.hflex = 1
widget.outernode.vflex = widget.flex[1]
rows.append(row)
event.loop.call_soon(self._apply_table_layout)
return rows
def _apply_cell_layout(self, row, col, vflex, hflex, cum_vflex, cum_hflex):
AUTOFLEX = 729
className = ''
if (vflex == AUTOFLEX) or (vflex == 0):
row.style.height = 'auto'
className += ''
else:
row.style.height = vflex * 100 / cum_vflex + '%'
className += 'flx-vflex'
className += ' '
if (hflex == 0):
col.style.width = 'auto'
className += ''
else:
col.style.width = '100%'
className += 'flx-hflex'
col.className = className
def _query_min_max_size(self):
""" Overload to also take child limits into account.
"""
# Collect contributions of child widgets
mima1 = [0, 1e9, 0, 0]
for child in self.children:
mima2 = child._size_limits
mima1[0] = max(mima1[0], mima2[0])
mima1[1] = min(mima1[1], mima2[1])
mima1[2] += mima2[2]
mima1[3] += mima2[3]
# Dont forget padding and spacing
extra_padding = 2
extra_spacing = 2
for i in range(4):
mima1[i] += extra_padding
mima1[2] += extra_spacing
mima1[3] += extra_spacing
# Own limits
mima3 = super()._query_min_max_size()
# Combine own limits with limits of children
return [max(mima1[0], mima3[0]),
min(mima1[1], mima3[1]),
max(mima1[2], mima3[2]),
min(mima1[3], mima3[3])]
|
import asyncio
import time
from typing import Any
from typing import Awaitable
from typing import Callable
from typing import Dict
from typing import List
from typing import Optional
from typing import Sequence
from typing import Set
from typing import Type
from typing import TypeVar
import aiohttp
from mypy_extensions import TypedDict
from paasta_tools.hacheck import get_spool
from paasta_tools.hacheck import post_spool
from paasta_tools.utils import get_user_agent
_drain_methods: Dict[str, Type["DrainMethod"]] = {}
_RegisterDrainMethod_T = TypeVar("_RegisterDrainMethod_T", bound=Type["DrainMethod"])
T = TypeVar("T")
def register_drain_method(
name: str,
) -> Callable[[_RegisterDrainMethod_T], _RegisterDrainMethod_T]:
"""Returns a decorator that registers a DrainMethod subclass at a given name
so get_drain_method/list_drain_methods can find it."""
def outer(drain_method: _RegisterDrainMethod_T) -> _RegisterDrainMethod_T:
_drain_methods[name] = drain_method
return drain_method
return outer
def get_drain_method(
name: str,
service: str,
instance: str,
registrations: List[str],
drain_method_params: Optional[Dict] = None,
) -> "DrainMethod":
return _drain_methods[name](
service, instance, registrations, **(drain_method_params or {})
)
def list_drain_methods() -> List[str]:
return sorted(_drain_methods.keys())
DrainTask = TypeVar("DrainTask", bound=Any)
class DrainMethod:
"""A drain method is a way of stopping new traffic to tasks without killing them. For example, you might take a task
out of a load balancer by causing its healthchecks to fail.
A drain method must have the following methods:
- drain(task): Begin draining traffic from a task. This should be idempotent.
- stop_draining(task): Stop draining traffic from a task. This should be idempotent.
- is_draining(task): Whether a task has already been marked as downed. Note that this state should be stored out of
process, because a bounce may take multiple runs of setup_marathon_job to complete.
- is_safe_to_kill(task): Return True if this task is safe to kill, False otherwise.
When implementing a drain method, be sure to decorate with @register_drain_method(name).
"""
def __init__(
self, service: str, instance: str, registrations: List[str], **kwargs: Dict
) -> None:
self.service = service
self.instance = instance
self.registrations = registrations
async def drain(self, task: DrainTask) -> None:
"""Make a task stop receiving new traffic."""
raise NotImplementedError()
async def stop_draining(self, task: DrainTask) -> None:
"""Make a task that has previously been downed start receiving traffic again."""
raise NotImplementedError()
async def is_draining(self, task: DrainTask) -> bool:
"""Return whether a task is being drained."""
raise NotImplementedError()
async def is_safe_to_kill(self, task: DrainTask) -> bool:
"""Return True if a task is drained and ready to be killed, or False if we should wait."""
raise NotImplementedError()
@register_drain_method("noop")
class NoopDrainMethod(DrainMethod):
"""This drain policy does nothing and assumes every task is safe to kill."""
async def drain(self, task: DrainTask) -> None:
pass
async def stop_draining(self, task: DrainTask) -> None:
pass
async def is_draining(self, task: DrainTask) -> bool:
return False
async def is_safe_to_kill(self, task: DrainTask) -> bool:
return True
@register_drain_method("test")
class TestDrainMethod(DrainMethod):
"""This drain policy is meant for integration testing. Do not use."""
# These are variables on the class for ease of use in testing.
downed_task_ids: Set[str] = set()
safe_to_kill_task_ids: Set[str] = set()
async def drain(self, task: DrainTask) -> None:
if task.id not in self.safe_to_kill_task_ids:
self.downed_task_ids.add(task.id)
async def stop_draining(self, task: DrainTask) -> None:
self.downed_task_ids -= {task.id}
self.safe_to_kill_task_ids -= {task.id}
async def is_draining(self, task: DrainTask) -> bool:
return task.id in (self.downed_task_ids | self.safe_to_kill_task_ids)
async def is_safe_to_kill(self, task: DrainTask) -> bool:
return task.id in self.safe_to_kill_task_ids
@classmethod
def mark_arbitrary_task_as_safe_to_kill(cls) -> None:
cls.safe_to_kill_task_ids.add(cls.downed_task_ids.pop())
@register_drain_method("crashy_drain")
class CrashyDrainDrainMethod(NoopDrainMethod):
async def drain(self, task: DrainTask) -> None:
raise Exception("Intentionally crashing for testing purposes")
async def is_safe_to_kill(self, task: DrainTask) -> bool:
raise Exception("Intentionally crashing for testing purposes")
async def stop_draining(self, task: DrainTask) -> None:
raise Exception("Intentionally crashing for testing purposes")
async def is_draining(self, task: DrainTask) -> bool:
raise Exception("Intentionally crashing for testing purposes")
@register_drain_method("hacheck")
class HacheckDrainMethod(DrainMethod):
"""This drain policy issues a POST to hacheck's /spool/{service}/{port}/status endpoint to cause healthchecks to
fail. It considers tasks safe to kill if they've been down in hacheck for more than a specified delay."""
def __init__(
self,
service: str,
instance: str,
registrations: List[str],
delay: float = 240,
hacheck_port: int = 6666,
expiration: float = 0,
**kwargs: Dict,
) -> None:
super().__init__(service, instance, registrations)
self.delay = float(delay)
self.hacheck_port = hacheck_port
self.expiration = float(expiration) or float(delay) * 10
def spool_urls(self, task: DrainTask) -> List[str]:
return [
"http://%(task_host)s:%(hacheck_port)d/spool/%(registration)s/%(task_port)d/status"
% {
"task_host": task.host,
"task_port": task.ports[0],
"hacheck_port": self.hacheck_port,
"registration": registration,
}
for registration in self.registrations
]
async def for_each_registration(
self, task: DrainTask, func: Callable[..., Awaitable[T]]
) -> Sequence[T]:
if task.ports == []:
return None
futures = [func(url) for url in self.spool_urls(task)]
return await asyncio.gather(*futures)
async def drain(self, task: DrainTask) -> None:
await self.for_each_registration(task, self.down)
async def up(self, url: str) -> None:
await post_spool(url=url, status="up", data={"status": "up"})
async def down(self, url: str) -> None:
await post_spool(
url=url,
status="down",
data={
"status": "down",
"expiration": str(time.time() + self.expiration),
"reason": "Drained by Paasta",
},
)
async def stop_draining(self, task: DrainTask) -> None:
await self.for_each_registration(task, self.up)
async def is_draining(self, task: DrainTask) -> bool:
results = await self.for_each_registration(task, get_spool)
return not all([res is None or res["state"] == "up" for res in results])
async def is_safe_to_kill(self, task: DrainTask) -> bool:
results = await self.for_each_registration(task, lambda url: get_spool(url))
if all([res is None or res["state"] == "up" for res in results]):
return False
else:
return all(
[res.get("since", 0) < (time.time() - self.delay) for res in results]
)
class StatusCodeNotAcceptableError(Exception):
pass
UrlSpec = TypedDict(
"UrlSpec", {"url_format": str, "method": str, "success_codes": str}, total=False
)
@register_drain_method("http")
class HTTPDrainMethod(DrainMethod):
"""This drain policy issues arbitrary HTTP calls to arbitrary URLs specified by the parameters. The URLs are
specified as format strings, and will have variables such as {host}, {port}, etc. filled in."""
def __init__(
self,
service: str,
instance: str,
registrations: List[str],
drain: UrlSpec,
stop_draining: UrlSpec,
is_draining: UrlSpec,
is_safe_to_kill: UrlSpec,
) -> None:
super().__init__(service, instance, registrations)
self.drain_url_spec = drain
self.stop_draining_url_spec = stop_draining
self.is_draining_url_spec = is_draining
self.is_safe_to_kill_url_spec = is_safe_to_kill
def get_format_params(self, task: DrainTask) -> List[Dict[str, Any]]:
return [
{
"host": task.host,
"port": task.ports[0],
"service": self.service,
"instance": self.instance,
"nerve_ns": nerve_ns,
}
for nerve_ns in self.registrations
]
def format_url(self, url_format: str, format_params: Dict[str, Any]) -> str:
return url_format.format(**format_params)
def parse_success_codes(self, success_codes_str: str) -> Set[int]:
"""Expand a string like 200-399,407-409,500 to a set containing all the integers in between."""
acceptable_response_codes: Set[int] = set()
for series_str in str(success_codes_str).split(","):
if "-" in series_str:
start, end = series_str.split("-")
acceptable_response_codes.update(range(int(start), int(end) + 1))
else:
acceptable_response_codes.add(int(series_str))
return acceptable_response_codes
def check_response_code(self, status_code: int, success_codes_str: str) -> bool:
acceptable_response_codes = self.parse_success_codes(success_codes_str)
return status_code in acceptable_response_codes
async def issue_request(self, url_spec: UrlSpec, task: DrainTask) -> None:
"""Issue a request to the URL specified by url_spec regarding the task given."""
format_params = self.get_format_params(task)
urls = [
self.format_url(url_spec["url_format"], param) for param in format_params
]
method = url_spec.get("method", "GET").upper()
async with aiohttp.ClientSession() as session:
reqs = [
session.request(
method=method,
url=url,
headers={"User-Agent": get_user_agent()},
timeout=15,
)
for url in urls
]
res = await asyncio.gather(*reqs)
for response in res:
if not self.check_response_code(
response.status, url_spec["success_codes"]
):
raise StatusCodeNotAcceptableError(
f"Unacceptable status code {response.status} not in {url_spec['success_codes']} when hitting {response.url}"
)
async def drain(self, task: DrainTask) -> None:
return await self.issue_request(self.drain_url_spec, task)
async def stop_draining(self, task: DrainTask) -> None:
return await self.issue_request(self.stop_draining_url_spec, task)
async def is_draining(self, task: DrainTask) -> bool:
try:
await self.issue_request(self.is_draining_url_spec, task)
except StatusCodeNotAcceptableError:
return False
else:
return True
async def is_safe_to_kill(self, task: DrainTask) -> bool:
try:
await self.issue_request(self.is_safe_to_kill_url_spec, task)
except StatusCodeNotAcceptableError:
return False
else:
return True
|
import json
import os
import shutil
from pathlib import Path
from test.common import movie_data, tmp_dir
import msgpack
import pytest
import ruamel.yaml as yaml
from box import BoxError
from box.converters import _from_toml, _to_json, _to_msgpack, _to_toml, _to_yaml
toml_string = """[movies.Spaceballs]
imdb_stars = 7.1
rating = "PG"
length = 96
Director = "Mel Brooks"
[[movies.Spaceballs.Stars]]
name = "Mel Brooks"
imdb = "nm0000316"
role = "President Skroob"
[[movies.Spaceballs.Stars]]
name = "John Candy"
imdb = "nm0001006"
role = "Barf"
"""
class TestConverters:
@pytest.fixture(autouse=True)
def temp_dir_cleanup(self):
shutil.rmtree(str(tmp_dir), ignore_errors=True)
try:
os.mkdir(str(tmp_dir))
except OSError:
pass
yield
shutil.rmtree(str(tmp_dir), ignore_errors=True)
def test_to_toml(self):
formatted = _to_toml(movie_data)
assert formatted.startswith("[movies.Spaceballs]")
def test_to_toml_file(self):
out_file = Path(tmp_dir, "toml_test.tml")
assert not out_file.exists()
_to_toml(movie_data, filename=out_file)
assert out_file.exists()
assert out_file.read_text().startswith("[movies.Spaceballs]")
def test_from_toml(self):
result = _from_toml(toml_string)
assert result["movies"]["Spaceballs"]["length"] == 96
def test_from_toml_file(self):
out_file = Path(tmp_dir, "toml_test.tml")
assert not out_file.exists()
out_file.write_text(toml_string)
result = _from_toml(filename=out_file)
assert result["movies"]["Spaceballs"]["length"] == 96
def test_bad_from_toml(self):
with pytest.raises(BoxError):
_from_toml()
def test_to_json(self):
m_file = os.path.join(tmp_dir, "movie_data")
movie_string = _to_json(movie_data)
assert "Rick Moranis" in movie_string
_to_json(movie_data, filename=m_file)
assert "Rick Moranis" in open(m_file).read()
assert json.load(open(m_file)) == json.loads(movie_string)
def test_to_yaml(self):
m_file = os.path.join(tmp_dir, "movie_data")
movie_string = _to_yaml(movie_data)
assert "Rick Moranis" in movie_string
_to_yaml(movie_data, filename=m_file)
assert "Rick Moranis" in open(m_file).read()
assert yaml.load(open(m_file), Loader=yaml.SafeLoader) == yaml.load(movie_string, Loader=yaml.SafeLoader)
def test_to_msgpack(self):
m_file = os.path.join(tmp_dir, "movie_data")
msg_data = _to_msgpack(movie_data)
assert b"Rick Moranis" in msg_data
_to_msgpack(movie_data, filename=m_file)
assert b"Rick Moranis" in open(m_file, "rb").read()
assert msgpack.unpack(open(m_file, "rb")) == msgpack.unpackb(msg_data)
|
import json
import os
from functools import reduce
from django.conf import settings
from django.db import models
from django.db.models import Q
from django.utils.encoding import force_str
from django.utils.translation import gettext as _
from django.utils.translation import pgettext
from jsonschema import validate
from jsonschema.exceptions import ValidationError
from translate.misc.xml_helpers import getXMLlang, getXMLspace
from translate.storage.tmx import tmxfile
from weblate_schemas import load_schema
from weblate.lang.models import Language
from weblate.memory.utils import (
CATEGORY_FILE,
CATEGORY_PRIVATE_OFFSET,
CATEGORY_SHARED,
CATEGORY_USER_OFFSET,
)
from weblate.utils.errors import report_error
class MemoryImportError(Exception):
pass
def get_node_data(unit, node):
"""Generic implementation of LISAUnit.gettarget."""
# The language should be present as xml:lang, but in some
# cases it's there only as lang
return (
getXMLlang(node) or node.get("lang"),
unit.getNodeText(node, getXMLspace(unit.xmlelement, "preserve")),
)
class MemoryQuerySet(models.QuerySet):
def filter_type(self, user=None, project=None, use_shared=False, from_file=False):
query = []
if from_file:
query.append(Q(from_file=from_file))
if use_shared:
query.append(Q(shared=use_shared))
if project:
query.append(Q(project=project))
if user:
query.append(Q(user=user))
return self.filter(reduce(lambda x, y: x | y, query))
def lookup(self, source_language, target_language, text, user, project, use_shared):
if isinstance(text, str):
search_query = Q(source__search=text)
else:
search_query = reduce(lambda x, y: x | Q(source__search=y), text, Q())
return self.filter_type(
# Type filtering
user=user,
project=project,
use_shared=use_shared,
from_file=True,
).filter(
# Full-text search on source
search_query,
# Language filtering
source_language=source_language,
target_language=target_language,
)[
:50
]
def prefetch_lang(self):
return self.prefetch_related("source_language", "target_language")
class MemoryManager(models.Manager):
def import_file(self, request, fileobj, langmap=None, **kwargs):
origin = os.path.basename(fileobj.name).lower()
name, extension = os.path.splitext(origin)
if len(name) > 25:
origin = "{}...{}".format(name[:25], extension)
if extension == ".tmx":
result = self.import_tmx(request, fileobj, origin, langmap, **kwargs)
elif extension == ".json":
result = self.import_json(request, fileobj, origin, **kwargs)
else:
raise MemoryImportError(_("Unsupported file!"))
if not result:
raise MemoryImportError(_("No valid entries found in the uploaded file!"))
return result
def import_json(self, request, fileobj, origin=None, **kwargs):
content = fileobj.read()
try:
data = json.loads(force_str(content))
except ValueError as error:
report_error(cause="Failed to parse memory")
raise MemoryImportError(_("Failed to parse JSON file: {!s}").format(error))
try:
validate(data, load_schema("weblate-memory.schema.json"))
except ValidationError as error:
report_error(cause="Failed to validate memory")
raise MemoryImportError(_("Failed to parse JSON file: {!s}").format(error))
found = 0
lang_cache = {}
for entry in data:
try:
self.update_entry(
source_language=Language.objects.get_by_code(
entry["source_language"], lang_cache
),
target_language=Language.objects.get_by_code(
entry["target_language"], lang_cache
),
source=entry["source"],
target=entry["target"],
origin=origin,
**kwargs,
)
found += 1
except Language.DoesNotExist:
continue
return found
def import_tmx(self, request, fileobj, origin=None, langmap=None, **kwargs):
if not kwargs:
kwargs = {"from_file": True}
try:
storage = tmxfile.parsefile(fileobj)
except (SyntaxError, AssertionError):
report_error(cause="Failed to parse")
raise MemoryImportError(_("Failed to parse TMX file!"))
header = next(
storage.document.getroot().iterchildren(storage.namespaced("header"))
)
lang_cache = {}
try:
source_language = Language.objects.get_by_code(
header.get("srclang"), lang_cache, langmap
)
except Language.DoesNotExist:
raise MemoryImportError(_("Failed to find source language!"))
found = 0
for unit in storage.units:
# Parse translations (translate-toolkit does not care about
# languages here, it just picks first and second XML elements)
translations = {}
for node in unit.getlanguageNodes():
lang_code, text = get_node_data(unit, node)
if not lang_code or not text:
continue
language = Language.objects.get_by_code(lang_code, lang_cache, langmap)
translations[language.code] = text
try:
source = translations.pop(source_language.code)
except KeyError:
# Skip if source language is not present
continue
for lang, text in translations.items():
self.update_entry(
source_language=source_language,
target_language=Language.objects.get_by_code(
lang, lang_cache, langmap
),
source=source,
target=text,
origin=origin,
**kwargs,
)
found += 1
return found
def update_entry(self, **kwargs):
if not self.filter(**kwargs).exists():
self.create(**kwargs)
class Memory(models.Model):
source_language = models.ForeignKey(
"lang.Language",
on_delete=models.deletion.CASCADE,
related_name="memory_source_set",
)
target_language = models.ForeignKey(
"lang.Language",
on_delete=models.deletion.CASCADE,
related_name="memory_target_set",
)
source = models.TextField()
target = models.TextField()
origin = models.TextField()
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.deletion.CASCADE,
null=True,
blank=True,
default=None,
)
project = models.ForeignKey(
"trans.Project",
on_delete=models.deletion.CASCADE,
null=True,
blank=True,
default=None,
)
from_file = models.BooleanField(db_index=True, default=False)
shared = models.BooleanField(db_index=True, default=False)
objects = MemoryManager.from_queryset(MemoryQuerySet)()
def __str__(self):
return f"Memory: {self.source_language}:{self.target_language}"
def get_origin_display(self):
if self.project:
text = pgettext("Translation memory category", "Project: {}")
elif self.user:
text = pgettext("Translation memory category", "Personal: {}")
elif self.shared:
text = pgettext("Translation memory category", "Shared: {}")
elif self.from_file:
text = pgettext("Translation memory category", "File: {}")
else:
text = "Unknown: {}"
return text.format(self.origin)
def get_category(self):
if self.from_file:
return CATEGORY_FILE
if self.shared:
return CATEGORY_SHARED
if self.project_id:
return CATEGORY_PRIVATE_OFFSET + self.project_id
if self.user_id:
return CATEGORY_USER_OFFSET + self.user_id
return 0
def as_dict(self):
"""Convert to dict suitable for JSON export."""
return {
"source": self.source,
"target": self.target,
"source_language": self.source_language.code,
"target_language": self.target_language.code,
"origin": self.origin,
"category": self.get_category(),
}
|
import io
import random
import unittest
import smart_open.bytebuffer
CHUNK_SIZE = 1024
def int2byte(i):
return bytes((i, ))
def random_byte_string(length=CHUNK_SIZE):
rand_bytes = [int2byte(random.randint(0, 255)) for _ in range(length)]
return b''.join(rand_bytes)
def bytebuffer_and_random_contents():
buf = smart_open.bytebuffer.ByteBuffer(CHUNK_SIZE)
contents = random_byte_string(CHUNK_SIZE)
content_reader = io.BytesIO(contents)
buf.fill(content_reader)
return [buf, contents]
class ByteBufferTest(unittest.TestCase):
def test_len(self):
buf = smart_open.bytebuffer.ByteBuffer(CHUNK_SIZE)
self.assertEqual(len(buf), 0)
contents = b'foo bar baz'
buf._bytes = contents
self.assertEqual(len(buf), len(contents))
pos = 4
buf._pos = pos
self.assertEqual(len(buf), len(contents) - pos)
def test_fill_from_reader(self):
buf = smart_open.bytebuffer.ByteBuffer(CHUNK_SIZE)
contents = random_byte_string(CHUNK_SIZE)
content_reader = io.BytesIO(contents)
bytes_filled = buf.fill(content_reader)
self.assertEqual(bytes_filled, CHUNK_SIZE)
self.assertEqual(len(buf), CHUNK_SIZE)
self.assertEqual(buf._bytes, contents)
def test_fill_from_iterable(self):
buf = smart_open.bytebuffer.ByteBuffer(CHUNK_SIZE)
contents = random_byte_string(CHUNK_SIZE)
contents_iter = (contents[i:i+8] for i in range(0, CHUNK_SIZE, 8))
bytes_filled = buf.fill(contents_iter)
self.assertEqual(bytes_filled, CHUNK_SIZE)
self.assertEqual(len(buf), CHUNK_SIZE)
self.assertEqual(buf._bytes, contents)
def test_fill_from_list(self):
buf = smart_open.bytebuffer.ByteBuffer(CHUNK_SIZE)
contents = random_byte_string(CHUNK_SIZE)
contents_list = [contents[i:i+7] for i in range(0, CHUNK_SIZE, 7)]
bytes_filled = buf.fill(contents_list)
self.assertEqual(bytes_filled, CHUNK_SIZE)
self.assertEqual(len(buf), CHUNK_SIZE)
self.assertEqual(buf._bytes, contents)
def test_fill_multiple(self):
buf = smart_open.bytebuffer.ByteBuffer(CHUNK_SIZE)
long_contents = random_byte_string(CHUNK_SIZE * 4)
long_content_reader = io.BytesIO(long_contents)
first_bytes_filled = buf.fill(long_content_reader)
self.assertEqual(first_bytes_filled, CHUNK_SIZE)
second_bytes_filled = buf.fill(long_content_reader)
self.assertEqual(second_bytes_filled, CHUNK_SIZE)
self.assertEqual(len(buf), 2 * CHUNK_SIZE)
def test_fill_size(self):
buf = smart_open.bytebuffer.ByteBuffer(CHUNK_SIZE)
contents = random_byte_string(CHUNK_SIZE * 2)
content_reader = io.BytesIO(contents)
fill_size = int(CHUNK_SIZE / 2)
bytes_filled = buf.fill(content_reader, size=fill_size)
self.assertEqual(bytes_filled, fill_size)
self.assertEqual(len(buf), fill_size)
second_bytes_filled = buf.fill(content_reader, size=CHUNK_SIZE+1)
self.assertEqual(second_bytes_filled, CHUNK_SIZE)
self.assertEqual(len(buf), fill_size + CHUNK_SIZE)
def test_fill_reader_exhaustion(self):
buf = smart_open.bytebuffer.ByteBuffer(CHUNK_SIZE)
short_content_size = int(CHUNK_SIZE / 4)
short_contents = random_byte_string(short_content_size)
short_content_reader = io.BytesIO(short_contents)
bytes_filled = buf.fill(short_content_reader)
self.assertEqual(bytes_filled, short_content_size)
self.assertEqual(len(buf), short_content_size)
def test_fill_iterable_exhaustion(self):
buf = smart_open.bytebuffer.ByteBuffer(CHUNK_SIZE)
short_content_size = int(CHUNK_SIZE / 4)
short_contents = random_byte_string(short_content_size)
short_contents_iter = (short_contents[i:i+8]
for i in range(0, short_content_size, 8))
bytes_filled = buf.fill(short_contents_iter)
self.assertEqual(bytes_filled, short_content_size)
self.assertEqual(len(buf), short_content_size)
def test_empty(self):
buf, _ = bytebuffer_and_random_contents()
self.assertEqual(len(buf), CHUNK_SIZE)
buf.empty()
self.assertEqual(len(buf), 0)
def test_peek(self):
buf, contents = bytebuffer_and_random_contents()
self.assertEqual(buf.peek(), contents)
self.assertEqual(len(buf), CHUNK_SIZE)
self.assertEqual(buf.peek(64), contents[0:64])
self.assertEqual(buf.peek(CHUNK_SIZE * 10), contents)
def test_read(self):
buf, contents = bytebuffer_and_random_contents()
self.assertEqual(buf.read(), contents)
self.assertEqual(len(buf), 0)
self.assertEqual(buf.read(), b'')
def test_read_size(self):
buf, contents = bytebuffer_and_random_contents()
read_size = 128
self.assertEqual(buf.read(read_size), contents[:read_size])
self.assertEqual(len(buf), CHUNK_SIZE - read_size)
self.assertEqual(buf.read(CHUNK_SIZE*2), contents[read_size:])
self.assertEqual(len(buf), 0)
def test_readline(self):
"""Does the readline function work as expected in the simple case?"""
expected = (b'this is the very first line\n', b'and this the second')
buf = smart_open.bytebuffer.ByteBuffer()
buf.fill(io.BytesIO(b''.join(expected)))
first_line = buf.readline(b'\n')
self.assertEqual(expected[0], first_line)
second_line = buf.readline(b'\n')
self.assertEqual(expected[1], second_line)
def test_readline_middle(self):
"""Does the readline function work when we're in the middle of the buffer?"""
expected = (b'this is the very first line\n', b'and this the second')
buf = smart_open.bytebuffer.ByteBuffer()
buf.fill(io.BytesIO(b''.join(expected)))
buf.read(5)
first_line = buf.readline(b'\n')
self.assertEqual(expected[0][5:], first_line)
buf.read(5)
second_line = buf.readline(b'\n')
self.assertEqual(expected[1][5:], second_line)
def test_readline_terminator(self):
"""Does the readline function respect the terminator parameter?"""
buf = smart_open.bytebuffer.ByteBuffer()
buf.fill(io.BytesIO(b'one!two.three,'))
expected = [b'one!', b'two.', b'three,']
actual = [buf.readline(b'!'), buf.readline(b'.'), buf.readline(b',')]
self.assertEqual(expected, actual)
|
import os
import unittest
from perfkitbenchmarker.traces import sar
class SarTestCase(unittest.TestCase):
def setUp(self):
super(SarTestCase, self).setUp()
path = os.path.join(
os.path.dirname(__file__), '../data', 'sar_output.txt')
with open(path) as fp:
self.contents = fp.read()
def testParseSarResult(self):
metadata = {
'event': 'sar',
'sender': 'run',
'sar_interval': 5,
}
samples = []
sar._AddStealResults(metadata, self.contents, samples)
# Test metadata
metadata = samples[0].metadata
expected_steal_values = [
0.150000, 0.220000, 0.300000, 0.190000, 0.370000, 0.300000, 0.250000,
0.350000, 0.210000, 0.170000, 17.990000
]
for i in range(0, 11):
sample = samples[i]
self.assertEqual(expected_steal_values[i], sample.value)
self.assertEqual('sar', sample.metadata['event'])
self.assertEqual('run', sample.metadata['sender'])
self.assertEqual(len(samples), 11)
last_sample = samples[-1]
self.assertEqual('average_steal', last_sample.metric)
if __name__ == '__main__':
unittest.main()
|
import hashlib
import logging
import sys
import time
from ...common.interfaces import MonitoringDataListener
from ..Telegraf.client import SSHClient, LocalhostClient
from ..Telegraf.config import ConfigManager
logger = logging.getLogger(__name__)
class MonitoringCollector(object):
"""Aggregate data from several collectors
Workflow:
plugin creates collector
collector reads monitoring config and creates agents
collector creates configs for agents (telegraf, shutdown&startup, custom scripts)
collector installs agents on targets, send configs on targets
agent starts startups on target, then starts telegraf
agent reads output of telegraf, consolidates output, caches 5 seconds and then sends output to collector
collector polls agents for data, decodes known metrics and counts diffs for diff-like metrics
collector sends data to listeners
"""
def __init__(self, disguise_hostnames, kill_old):
self.kill_old = kill_old
self.disguise_hostnames = disguise_hostnames
self.config = None
self.default_target = None
self.agents = []
self.agent_sessions = []
self.listeners = []
self.first_data_received = False
self.__collected_data = []
self.artifact_files = []
self.load_start_time = None
self.config_manager = ConfigManager()
self.old_style_configs = False
self.clients = {'localhost': LocalhostClient, 'ssh': SSHClient}
def add_listener(self, obj):
self.listeners.append(obj)
def prepare(self):
"""Prepare for monitoring - install agents etc"""
# Parse config
agent_configs = []
if self.config:
agent_configs = self.config_manager.getconfig(
self.config, self.default_target)
# Creating agent for hosts
for config in agent_configs:
if config['host'] in ['localhost', '127.0.0.1', '::1']:
client = self.clients['localhost'](
config, self.old_style_configs, kill_old=self.kill_old)
else:
client = self.clients['ssh'](
config, self.old_style_configs, timeout=5, kill_old=self.kill_old)
logger.debug('Installing monitoring agent. Host: %s', client.host)
agent_config, startup_config, customs_script = client.install()
if agent_config:
self.agents.append(client)
self.artifact_files.append(agent_config)
if startup_config:
self.artifact_files.append(startup_config)
if customs_script:
self.artifact_files.append(customs_script)
def start(self):
""" Start agents
execute popen of agent.py on target and start output reader thread.
"""
[agent.start() for agent in self.agents]
[agent.reader_thread.start() for agent in self.agents]
def poll(self):
""" Poll agents for data
"""
start_time = time.time()
for agent in self.agents:
for collect in agent.reader:
# don't crash if trash or traceback came from agent to stdout
if not collect:
return 0
for chunk in collect:
ts, prepared_results = chunk
if not self.first_data_received and prepared_results:
self.first_data_received = True
logger.info("Monitoring received first data.")
if self.load_start_time and int(
ts) >= self.load_start_time:
ready_to_send = {
"timestamp": int(ts),
"data": {
self.hash_hostname(agent.host): {
"comment": agent.config.comment,
"metrics": prepared_results
}
}
}
self.__collected_data.append(ready_to_send)
logger.debug(
'Polling/decoding agents data took: %.2fms',
(time.time() - start_time) * 1000)
data = self.__collected_data
self.__collected_data = []
return data
def stop(self):
"""Shutdown agents"""
logger.debug("Uninstalling monitoring agents")
for agent in self.agents:
agent._stop_agent()
for agent in self.agents:
try:
log_filename, data_filename = agent.uninstall()
self.artifact_files.append(log_filename)
self.artifact_files.append(data_filename)
except Exception as exc:
logger.warning("Error while uninstalling agent %s", exc, exc_info=True)
for agent in self.agents:
try:
logger.debug(
'Waiting for agent %s reader thread to finish.', agent)
agent.reader_thread.join(10)
self.agents.remove(agent)
except BaseException:
logger.error('Monitoring reader thread stuck!', exc_info=True)
def get_rest_data(self):
return self.__collected_data
def hash_hostname(self, host):
if self.disguise_hostnames and host:
return hashlib.md5(host.encode()).hexdigest()
else:
return host
class StdOutPrintMon(MonitoringDataListener):
"""Simple listener, writing data to stdout"""
def __init__(self):
MonitoringDataListener.__init__(self)
def monitoring_data(self, data_list):
[sys.stdout.write(data) for data in data_list]
|
import datetime
from homeassistant.components import geo_location
from homeassistant.components.geo_location import ATTR_SOURCE
from homeassistant.components.usgs_earthquakes_feed.geo_location import (
ATTR_ALERT,
ATTR_EXTERNAL_ID,
ATTR_MAGNITUDE,
ATTR_PLACE,
ATTR_STATUS,
ATTR_TIME,
ATTR_TYPE,
ATTR_UPDATED,
CONF_FEED_TYPE,
SCAN_INTERVAL,
)
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_FRIENDLY_NAME,
ATTR_ICON,
ATTR_LATITUDE,
ATTR_LONGITUDE,
ATTR_UNIT_OF_MEASUREMENT,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_RADIUS,
EVENT_HOMEASSISTANT_START,
LENGTH_KILOMETERS,
)
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.async_mock import MagicMock, call, patch
from tests.common import assert_setup_component, async_fire_time_changed
CONFIG = {
geo_location.DOMAIN: [
{
"platform": "usgs_earthquakes_feed",
CONF_FEED_TYPE: "past_hour_m25_earthquakes",
CONF_RADIUS: 200,
}
]
}
CONFIG_WITH_CUSTOM_LOCATION = {
geo_location.DOMAIN: [
{
"platform": "usgs_earthquakes_feed",
CONF_FEED_TYPE: "past_hour_m25_earthquakes",
CONF_RADIUS: 200,
CONF_LATITUDE: 15.1,
CONF_LONGITUDE: 25.2,
}
]
}
def _generate_mock_feed_entry(
external_id,
title,
distance_to_home,
coordinates,
place=None,
attribution=None,
time=None,
updated=None,
magnitude=None,
status=None,
entry_type=None,
alert=None,
):
"""Construct a mock feed entry for testing purposes."""
feed_entry = MagicMock()
feed_entry.external_id = external_id
feed_entry.title = title
feed_entry.distance_to_home = distance_to_home
feed_entry.coordinates = coordinates
feed_entry.place = place
feed_entry.attribution = attribution
feed_entry.time = time
feed_entry.updated = updated
feed_entry.magnitude = magnitude
feed_entry.status = status
feed_entry.type = entry_type
feed_entry.alert = alert
return feed_entry
async def test_setup(hass):
"""Test the general setup of the platform."""
# Set up some mock feed entries for this test.
mock_entry_1 = _generate_mock_feed_entry(
"1234",
"Title 1",
15.5,
(-31.0, 150.0),
place="Location 1",
attribution="Attribution 1",
time=datetime.datetime(2018, 9, 22, 8, 0, tzinfo=datetime.timezone.utc),
updated=datetime.datetime(2018, 9, 22, 9, 0, tzinfo=datetime.timezone.utc),
magnitude=5.7,
status="Status 1",
entry_type="Type 1",
alert="Alert 1",
)
mock_entry_2 = _generate_mock_feed_entry("2345", "Title 2", 20.5, (-31.1, 150.1))
mock_entry_3 = _generate_mock_feed_entry("3456", "Title 3", 25.5, (-31.2, 150.2))
mock_entry_4 = _generate_mock_feed_entry("4567", "Title 4", 12.5, (-31.3, 150.3))
# Patching 'utcnow' to gain more control over the timed update.
utcnow = dt_util.utcnow()
with patch("homeassistant.util.dt.utcnow", return_value=utcnow), patch(
"geojson_client.usgs_earthquake_hazards_program_feed."
"UsgsEarthquakeHazardsProgramFeed"
) as mock_feed:
mock_feed.return_value.update.return_value = (
"OK",
[mock_entry_1, mock_entry_2, mock_entry_3],
)
with assert_setup_component(1, geo_location.DOMAIN):
assert await async_setup_component(hass, geo_location.DOMAIN, CONFIG)
await hass.async_block_till_done()
# Artificially trigger update.
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
# Collect events.
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 3
state = hass.states.get("geo_location.title_1")
assert state is not None
assert state.name == "Title 1"
assert state.attributes == {
ATTR_EXTERNAL_ID: "1234",
ATTR_LATITUDE: -31.0,
ATTR_LONGITUDE: 150.0,
ATTR_FRIENDLY_NAME: "Title 1",
ATTR_PLACE: "Location 1",
ATTR_ATTRIBUTION: "Attribution 1",
ATTR_TIME: datetime.datetime(
2018, 9, 22, 8, 0, tzinfo=datetime.timezone.utc
),
ATTR_UPDATED: datetime.datetime(
2018, 9, 22, 9, 0, tzinfo=datetime.timezone.utc
),
ATTR_STATUS: "Status 1",
ATTR_TYPE: "Type 1",
ATTR_ALERT: "Alert 1",
ATTR_MAGNITUDE: 5.7,
ATTR_UNIT_OF_MEASUREMENT: LENGTH_KILOMETERS,
ATTR_SOURCE: "usgs_earthquakes_feed",
ATTR_ICON: "mdi:pulse",
}
assert round(abs(float(state.state) - 15.5), 7) == 0
state = hass.states.get("geo_location.title_2")
assert state is not None
assert state.name == "Title 2"
assert state.attributes == {
ATTR_EXTERNAL_ID: "2345",
ATTR_LATITUDE: -31.1,
ATTR_LONGITUDE: 150.1,
ATTR_FRIENDLY_NAME: "Title 2",
ATTR_UNIT_OF_MEASUREMENT: LENGTH_KILOMETERS,
ATTR_SOURCE: "usgs_earthquakes_feed",
ATTR_ICON: "mdi:pulse",
}
assert round(abs(float(state.state) - 20.5), 7) == 0
state = hass.states.get("geo_location.title_3")
assert state is not None
assert state.name == "Title 3"
assert state.attributes == {
ATTR_EXTERNAL_ID: "3456",
ATTR_LATITUDE: -31.2,
ATTR_LONGITUDE: 150.2,
ATTR_FRIENDLY_NAME: "Title 3",
ATTR_UNIT_OF_MEASUREMENT: LENGTH_KILOMETERS,
ATTR_SOURCE: "usgs_earthquakes_feed",
ATTR_ICON: "mdi:pulse",
}
assert round(abs(float(state.state) - 25.5), 7) == 0
# Simulate an update - one existing, one new entry,
# one outdated entry
mock_feed.return_value.update.return_value = (
"OK",
[mock_entry_1, mock_entry_4, mock_entry_3],
)
async_fire_time_changed(hass, utcnow + SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 3
# Simulate an update - empty data, but successful update,
# so no changes to entities.
mock_feed.return_value.update.return_value = "OK_NO_DATA", None
async_fire_time_changed(hass, utcnow + 2 * SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 3
# Simulate an update - empty data, removes all entities
mock_feed.return_value.update.return_value = "ERROR", None
async_fire_time_changed(hass, utcnow + 3 * SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 0
async def test_setup_with_custom_location(hass):
"""Test the setup with a custom location."""
# Set up some mock feed entries for this test.
mock_entry_1 = _generate_mock_feed_entry("1234", "Title 1", 20.5, (-31.1, 150.1))
with patch(
"geojson_client.usgs_earthquake_hazards_program_feed."
"UsgsEarthquakeHazardsProgramFeed"
) as mock_feed:
mock_feed.return_value.update.return_value = "OK", [mock_entry_1]
with assert_setup_component(1, geo_location.DOMAIN):
assert await async_setup_component(
hass, geo_location.DOMAIN, CONFIG_WITH_CUSTOM_LOCATION
)
await hass.async_block_till_done()
# Artificially trigger update.
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
# Collect events.
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 1
assert mock_feed.call_args == call(
(15.1, 25.2),
"past_hour_m25_earthquakes",
filter_minimum_magnitude=0.0,
filter_radius=200.0,
)
|
import importlib
import time
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
PLATFORM_SCHEMA,
SUPPORT_BRIGHTNESS,
LightEntity,
)
from homeassistant.const import (
CONF_API_KEY,
CONF_DEVICES,
CONF_ID,
CONF_NAME,
CONF_PASSWORD,
CONF_USERNAME,
)
import homeassistant.helpers.config_validation as cv
SUPPORT_AVION_LED = SUPPORT_BRIGHTNESS
DEVICE_SCHEMA = vol.Schema(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Optional(CONF_ID): cv.positive_int,
vol.Optional(CONF_NAME): cv.string,
}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_DEVICES, default={}): {cv.string: DEVICE_SCHEMA},
vol.Optional(CONF_USERNAME): cv.string,
vol.Optional(CONF_PASSWORD): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up an Avion switch."""
# pylint: disable=no-member
avion = importlib.import_module("avion")
lights = []
if CONF_USERNAME in config and CONF_PASSWORD in config:
devices = avion.get_devices(config[CONF_USERNAME], config[CONF_PASSWORD])
for device in devices:
lights.append(AvionLight(device))
for address, device_config in config[CONF_DEVICES].items():
device = avion.Avion(
mac=address,
passphrase=device_config[CONF_API_KEY],
name=device_config.get(CONF_NAME),
object_id=device_config.get(CONF_ID),
connect=False,
)
lights.append(AvionLight(device))
add_entities(lights)
class AvionLight(LightEntity):
"""Representation of an Avion light."""
def __init__(self, device):
"""Initialize the light."""
self._name = device.name
self._address = device.mac
self._brightness = 255
self._state = False
self._switch = device
@property
def unique_id(self):
"""Return the ID of this light."""
return self._address
@property
def name(self):
"""Return the name of the device if any."""
return self._name
@property
def is_on(self):
"""Return true if device is on."""
return self._state
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return self._brightness
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_AVION_LED
@property
def should_poll(self):
"""Don't poll."""
return False
@property
def assumed_state(self):
"""We can't read the actual state, so assume it matches."""
return True
def set_state(self, brightness):
"""Set the state of this lamp to the provided brightness."""
# pylint: disable=no-member
avion = importlib.import_module("avion")
# Bluetooth LE is unreliable, and the connection may drop at any
# time. Make an effort to re-establish the link.
initial = time.monotonic()
while True:
if time.monotonic() - initial >= 10:
return False
try:
self._switch.set_brightness(brightness)
break
except avion.AvionException:
self._switch.connect()
return True
def turn_on(self, **kwargs):
"""Turn the specified or all lights on."""
brightness = kwargs.get(ATTR_BRIGHTNESS)
if brightness is not None:
self._brightness = brightness
self.set_state(self.brightness)
self._state = True
def turn_off(self, **kwargs):
"""Turn the specified or all lights off."""
self.set_state(0)
self._state = False
|
import logging
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_MOISTURE,
DEVICE_CLASS_OPENING,
BinarySensorEntity,
)
from homeassistant.core import callback
from homeassistant.helpers.event import async_call_later
from . import XiaomiDevice
from .const import DOMAIN, GATEWAYS_KEY
_LOGGER = logging.getLogger(__name__)
NO_CLOSE = "no_close"
ATTR_OPEN_SINCE = "Open since"
MOTION = "motion"
NO_MOTION = "no_motion"
ATTR_LAST_ACTION = "last_action"
ATTR_NO_MOTION_SINCE = "No motion since"
DENSITY = "density"
ATTR_DENSITY = "Density"
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Perform the setup for Xiaomi devices."""
entities = []
gateway = hass.data[DOMAIN][GATEWAYS_KEY][config_entry.entry_id]
for entity in gateway.devices["binary_sensor"]:
model = entity["model"]
if model in ["motion", "sensor_motion", "sensor_motion.aq2"]:
entities.append(XiaomiMotionSensor(entity, hass, gateway, config_entry))
elif model in ["magnet", "sensor_magnet", "sensor_magnet.aq2"]:
entities.append(XiaomiDoorSensor(entity, gateway, config_entry))
elif model == "sensor_wleak.aq1":
entities.append(XiaomiWaterLeakSensor(entity, gateway, config_entry))
elif model in ["smoke", "sensor_smoke"]:
entities.append(XiaomiSmokeSensor(entity, gateway, config_entry))
elif model in ["natgas", "sensor_natgas"]:
entities.append(XiaomiNatgasSensor(entity, gateway, config_entry))
elif model in [
"switch",
"sensor_switch",
"sensor_switch.aq2",
"sensor_switch.aq3",
"remote.b1acn01",
]:
if "proto" not in entity or int(entity["proto"][0:1]) == 1:
data_key = "status"
else:
data_key = "button_0"
entities.append(
XiaomiButton(entity, "Switch", data_key, hass, gateway, config_entry)
)
elif model in [
"86sw1",
"sensor_86sw1",
"sensor_86sw1.aq1",
"remote.b186acn01",
"remote.b186acn02",
]:
if "proto" not in entity or int(entity["proto"][0:1]) == 1:
data_key = "channel_0"
else:
data_key = "button_0"
entities.append(
XiaomiButton(
entity, "Wall Switch", data_key, hass, gateway, config_entry
)
)
elif model in [
"86sw2",
"sensor_86sw2",
"sensor_86sw2.aq1",
"remote.b286acn01",
"remote.b286acn02",
]:
if "proto" not in entity or int(entity["proto"][0:1]) == 1:
data_key_left = "channel_0"
data_key_right = "channel_1"
else:
data_key_left = "button_0"
data_key_right = "button_1"
entities.append(
XiaomiButton(
entity,
"Wall Switch (Left)",
data_key_left,
hass,
gateway,
config_entry,
)
)
entities.append(
XiaomiButton(
entity,
"Wall Switch (Right)",
data_key_right,
hass,
gateway,
config_entry,
)
)
entities.append(
XiaomiButton(
entity,
"Wall Switch (Both)",
"dual_channel",
hass,
gateway,
config_entry,
)
)
elif model in ["cube", "sensor_cube", "sensor_cube.aqgl01"]:
entities.append(XiaomiCube(entity, hass, gateway, config_entry))
elif model in ["vibration", "vibration.aq1"]:
entities.append(
XiaomiVibration(entity, "Vibration", "status", gateway, config_entry)
)
else:
_LOGGER.warning("Unmapped Device Model %s", model)
async_add_entities(entities)
class XiaomiBinarySensor(XiaomiDevice, BinarySensorEntity):
"""Representation of a base XiaomiBinarySensor."""
def __init__(self, device, name, xiaomi_hub, data_key, device_class, config_entry):
"""Initialize the XiaomiSmokeSensor."""
self._data_key = data_key
self._device_class = device_class
self._should_poll = False
self._density = 0
super().__init__(device, name, xiaomi_hub, config_entry)
@property
def should_poll(self):
"""Return True if entity has to be polled for state."""
return self._should_poll
@property
def is_on(self):
"""Return true if sensor is on."""
return self._state
@property
def device_class(self):
"""Return the class of binary sensor."""
return self._device_class
def update(self):
"""Update the sensor state."""
_LOGGER.debug("Updating xiaomi sensor (%s) by polling", self._sid)
self._get_from_hub(self._sid)
class XiaomiNatgasSensor(XiaomiBinarySensor):
"""Representation of a XiaomiNatgasSensor."""
def __init__(self, device, xiaomi_hub, config_entry):
"""Initialize the XiaomiSmokeSensor."""
self._density = None
super().__init__(
device, "Natgas Sensor", xiaomi_hub, "alarm", "gas", config_entry
)
@property
def device_state_attributes(self):
"""Return the state attributes."""
attrs = {ATTR_DENSITY: self._density}
attrs.update(super().device_state_attributes)
return attrs
def parse_data(self, data, raw_data):
"""Parse data sent by gateway."""
if DENSITY in data:
self._density = int(data.get(DENSITY))
value = data.get(self._data_key)
if value is None:
return False
if value in ("1", "2"):
if self._state:
return False
self._state = True
return True
if value == "0":
if self._state:
self._state = False
return True
return False
class XiaomiMotionSensor(XiaomiBinarySensor):
"""Representation of a XiaomiMotionSensor."""
def __init__(self, device, hass, xiaomi_hub, config_entry):
"""Initialize the XiaomiMotionSensor."""
self._hass = hass
self._no_motion_since = 0
self._unsub_set_no_motion = None
if "proto" not in device or int(device["proto"][0:1]) == 1:
data_key = "status"
else:
data_key = "motion_status"
super().__init__(
device, "Motion Sensor", xiaomi_hub, data_key, "motion", config_entry
)
@property
def device_state_attributes(self):
"""Return the state attributes."""
attrs = {ATTR_NO_MOTION_SINCE: self._no_motion_since}
attrs.update(super().device_state_attributes)
return attrs
@callback
def _async_set_no_motion(self, now):
"""Set state to False."""
self._unsub_set_no_motion = None
self._state = False
self.async_write_ha_state()
def parse_data(self, data, raw_data):
"""Parse data sent by gateway.
Polling (proto v1, firmware version 1.4.1_159.0143)
>> { "cmd":"read","sid":"158..."}
<< {'model': 'motion', 'sid': '158...', 'short_id': 26331,
'cmd': 'read_ack', 'data': '{"voltage":3005}'}
Multicast messages (proto v1, firmware version 1.4.1_159.0143)
<< {'model': 'motion', 'sid': '158...', 'short_id': 26331,
'cmd': 'report', 'data': '{"status":"motion"}'}
<< {'model': 'motion', 'sid': '158...', 'short_id': 26331,
'cmd': 'report', 'data': '{"no_motion":"120"}'}
<< {'model': 'motion', 'sid': '158...', 'short_id': 26331,
'cmd': 'report', 'data': '{"no_motion":"180"}'}
<< {'model': 'motion', 'sid': '158...', 'short_id': 26331,
'cmd': 'report', 'data': '{"no_motion":"300"}'}
<< {'model': 'motion', 'sid': '158...', 'short_id': 26331,
'cmd': 'heartbeat', 'data': '{"voltage":3005}'}
"""
if raw_data["cmd"] == "heartbeat":
_LOGGER.debug(
"Skipping heartbeat of the motion sensor. "
"It can introduce an incorrect state because of a firmware "
"bug (https://github.com/home-assistant/core/pull/"
"11631#issuecomment-357507744)"
)
return
if NO_MOTION in data:
self._no_motion_since = data[NO_MOTION]
self._state = False
return True
value = data.get(self._data_key)
if value is None:
return False
if value == MOTION:
if self._data_key == "motion_status":
if self._unsub_set_no_motion:
self._unsub_set_no_motion()
self._unsub_set_no_motion = async_call_later(
self._hass, 120, self._async_set_no_motion
)
if self.entity_id is not None:
self._hass.bus.fire(
"xiaomi_aqara.motion", {"entity_id": self.entity_id}
)
self._no_motion_since = 0
if self._state:
return False
self._state = True
return True
class XiaomiDoorSensor(XiaomiBinarySensor):
"""Representation of a XiaomiDoorSensor."""
def __init__(self, device, xiaomi_hub, config_entry):
"""Initialize the XiaomiDoorSensor."""
self._open_since = 0
if "proto" not in device or int(device["proto"][0:1]) == 1:
data_key = "status"
else:
data_key = "window_status"
super().__init__(
device,
"Door Window Sensor",
xiaomi_hub,
data_key,
DEVICE_CLASS_OPENING,
config_entry,
)
@property
def device_state_attributes(self):
"""Return the state attributes."""
attrs = {ATTR_OPEN_SINCE: self._open_since}
attrs.update(super().device_state_attributes)
return attrs
def parse_data(self, data, raw_data):
"""Parse data sent by gateway."""
self._should_poll = False
if NO_CLOSE in data: # handle push from the hub
self._open_since = data[NO_CLOSE]
return True
value = data.get(self._data_key)
if value is None:
return False
if value == "open":
self._should_poll = True
if self._state:
return False
self._state = True
return True
if value == "close":
self._open_since = 0
if self._state:
self._state = False
return True
return False
class XiaomiWaterLeakSensor(XiaomiBinarySensor):
"""Representation of a XiaomiWaterLeakSensor."""
def __init__(self, device, xiaomi_hub, config_entry):
"""Initialize the XiaomiWaterLeakSensor."""
if "proto" not in device or int(device["proto"][0:1]) == 1:
data_key = "status"
else:
data_key = "wleak_status"
super().__init__(
device,
"Water Leak Sensor",
xiaomi_hub,
data_key,
DEVICE_CLASS_MOISTURE,
config_entry,
)
def parse_data(self, data, raw_data):
"""Parse data sent by gateway."""
self._should_poll = False
value = data.get(self._data_key)
if value is None:
return False
if value == "leak":
self._should_poll = True
if self._state:
return False
self._state = True
return True
if value == "no_leak":
if self._state:
self._state = False
return True
return False
class XiaomiSmokeSensor(XiaomiBinarySensor):
"""Representation of a XiaomiSmokeSensor."""
def __init__(self, device, xiaomi_hub, config_entry):
"""Initialize the XiaomiSmokeSensor."""
self._density = 0
super().__init__(
device, "Smoke Sensor", xiaomi_hub, "alarm", "smoke", config_entry
)
@property
def device_state_attributes(self):
"""Return the state attributes."""
attrs = {ATTR_DENSITY: self._density}
attrs.update(super().device_state_attributes)
return attrs
def parse_data(self, data, raw_data):
"""Parse data sent by gateway."""
if DENSITY in data:
self._density = int(data.get(DENSITY))
value = data.get(self._data_key)
if value is None:
return False
if value in ("1", "2"):
if self._state:
return False
self._state = True
return True
if value == "0":
if self._state:
self._state = False
return True
return False
class XiaomiVibration(XiaomiBinarySensor):
"""Representation of a Xiaomi Vibration Sensor."""
def __init__(self, device, name, data_key, xiaomi_hub, config_entry):
"""Initialize the XiaomiVibration."""
self._last_action = None
super().__init__(device, name, xiaomi_hub, data_key, None, config_entry)
@property
def device_state_attributes(self):
"""Return the state attributes."""
attrs = {ATTR_LAST_ACTION: self._last_action}
attrs.update(super().device_state_attributes)
return attrs
def parse_data(self, data, raw_data):
"""Parse data sent by gateway."""
value = data.get(self._data_key)
if value is None:
return False
if value not in ("vibrate", "tilt", "free_fall", "actively"):
_LOGGER.warning("Unsupported movement_type detected: %s", value)
return False
self.hass.bus.fire(
"xiaomi_aqara.movement",
{"entity_id": self.entity_id, "movement_type": value},
)
self._last_action = value
return True
class XiaomiButton(XiaomiBinarySensor):
"""Representation of a Xiaomi Button."""
def __init__(self, device, name, data_key, hass, xiaomi_hub, config_entry):
"""Initialize the XiaomiButton."""
self._hass = hass
self._last_action = None
super().__init__(device, name, xiaomi_hub, data_key, None, config_entry)
@property
def device_state_attributes(self):
"""Return the state attributes."""
attrs = {ATTR_LAST_ACTION: self._last_action}
attrs.update(super().device_state_attributes)
return attrs
def parse_data(self, data, raw_data):
"""Parse data sent by gateway."""
value = data.get(self._data_key)
if value is None:
return False
if value == "long_click_press":
self._state = True
click_type = "long_click_press"
elif value == "long_click_release":
self._state = False
click_type = "hold"
elif value == "click":
click_type = "single"
elif value == "double_click":
click_type = "double"
elif value == "both_click":
click_type = "both"
elif value == "double_both_click":
click_type = "double_both"
elif value == "shake":
click_type = "shake"
elif value == "long_click":
click_type = "long"
elif value == "long_both_click":
click_type = "long_both"
else:
_LOGGER.warning("Unsupported click_type detected: %s", value)
return False
self._hass.bus.fire(
"xiaomi_aqara.click",
{"entity_id": self.entity_id, "click_type": click_type},
)
self._last_action = click_type
return True
class XiaomiCube(XiaomiBinarySensor):
"""Representation of a Xiaomi Cube."""
def __init__(self, device, hass, xiaomi_hub, config_entry):
"""Initialize the Xiaomi Cube."""
self._hass = hass
self._last_action = None
self._state = False
if "proto" not in device or int(device["proto"][0:1]) == 1:
data_key = "status"
else:
data_key = "cube_status"
super().__init__(device, "Cube", xiaomi_hub, data_key, None, config_entry)
@property
def device_state_attributes(self):
"""Return the state attributes."""
attrs = {ATTR_LAST_ACTION: self._last_action}
attrs.update(super().device_state_attributes)
return attrs
def parse_data(self, data, raw_data):
"""Parse data sent by gateway."""
if self._data_key in data:
self._hass.bus.fire(
"xiaomi_aqara.cube_action",
{"entity_id": self.entity_id, "action_type": data[self._data_key]},
)
self._last_action = data[self._data_key]
if "rotate" in data:
action_value = float(
data["rotate"]
if isinstance(data["rotate"], int)
else data["rotate"].replace(",", ".")
)
self._hass.bus.fire(
"xiaomi_aqara.cube_action",
{
"entity_id": self.entity_id,
"action_type": "rotate",
"action_value": action_value,
},
)
self._last_action = "rotate"
if "rotate_degree" in data:
action_value = float(
data["rotate_degree"]
if isinstance(data["rotate_degree"], int)
else data["rotate_degree"].replace(",", ".")
)
self._hass.bus.fire(
"xiaomi_aqara.cube_action",
{
"entity_id": self.entity_id,
"action_type": "rotate",
"action_value": action_value,
},
)
self._last_action = "rotate"
return True
|
import pandas as pd
from scattertext import FeatsFromSpacyDoc
class FeatsFromScoredLexicon(FeatsFromSpacyDoc):
def __init__(self,
lexicon_df,
use_lemmas=False,
**kwargs):
'''
Parameters
----------
lexicon_df: pd.DataFrame, Indexed on terms, columns are scores for each category
Other parameters from FeatsFromSpacyDoc.__init__
Example:
>>> print(lexicon_df)
activation imagery pleasantness
word
a 1.3846 1.0 2.0000
abandon 2.3750 2.4 1.0000
abandoned 2.1000 3.0 1.1429
abandonment 2.0000 1.4 1.0000
abated 1.3333 1.2 1.6667
'''
assert type(lexicon_df) == pd.DataFrame
self._lexicon_df = lexicon_df
super(FeatsFromScoredLexicon, self).__init__(use_lemmas, **kwargs)
def get_doc_metadata(self, doc, prefix=''):
'''
:param doc: spacy.Doc
:param prefix: str, default is ''
:return: pd.Series
'''
out_series = pd.merge(
pd.DataFrame(pd.Series([tok.lemma_ if self._use_lemmas else tok.lower_
for tok in doc]).value_counts(), columns=['count']),
self._lexicon_df, left_index=True, right_index=True
).drop(columns=['count']).mean(axis=0)
if prefix == '':
return out_series
return pd.Series(out_series.values, index=[prefix + x for x in out_series.index])
def has_metadata_term_list(self):
return True
def get_top_model_term_lists(self):
return {col: list(self._lexicon_df[col].sort_values(ascending=False).iloc[:10].index)
for col in self._lexicon_df}
|
import logging
from lmnotify import Model, SimpleFrame, Sound
from oauthlib.oauth2 import TokenExpiredError
from requests.exceptions import ConnectionError as RequestsConnectionError
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_DATA,
ATTR_TARGET,
PLATFORM_SCHEMA,
BaseNotificationService,
)
from homeassistant.const import CONF_ICON
import homeassistant.helpers.config_validation as cv
from . import DOMAIN as LAMETRIC_DOMAIN
_LOGGER = logging.getLogger(__name__)
AVAILABLE_PRIORITIES = ["info", "warning", "critical"]
AVAILABLE_ICON_TYPES = ["none", "info", "alert"]
CONF_CYCLES = "cycles"
CONF_LIFETIME = "lifetime"
CONF_PRIORITY = "priority"
CONF_ICON_TYPE = "icon_type"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_ICON, default="a7956"): cv.string,
vol.Optional(CONF_LIFETIME, default=10): cv.positive_int,
vol.Optional(CONF_CYCLES, default=1): cv.positive_int,
vol.Optional(CONF_PRIORITY, default="warning"): vol.In(AVAILABLE_PRIORITIES),
vol.Optional(CONF_ICON_TYPE, default="info"): vol.In(AVAILABLE_ICON_TYPES),
}
)
def get_service(hass, config, discovery_info=None):
"""Get the LaMetric notification service."""
hlmn = hass.data.get(LAMETRIC_DOMAIN)
return LaMetricNotificationService(
hlmn,
config[CONF_ICON],
config[CONF_LIFETIME] * 1000,
config[CONF_CYCLES],
config[CONF_PRIORITY],
config[CONF_ICON_TYPE],
)
class LaMetricNotificationService(BaseNotificationService):
"""Implement the notification service for LaMetric."""
def __init__(
self, hasslametricmanager, icon, lifetime, cycles, priority, icon_type
):
"""Initialize the service."""
self.hasslametricmanager = hasslametricmanager
self._icon = icon
self._lifetime = lifetime
self._cycles = cycles
self._priority = priority
self._icon_type = icon_type
self._devices = []
def send_message(self, message="", **kwargs):
"""Send a message to some LaMetric device."""
targets = kwargs.get(ATTR_TARGET)
data = kwargs.get(ATTR_DATA)
_LOGGER.debug("Targets/Data: %s/%s", targets, data)
icon = self._icon
cycles = self._cycles
sound = None
priority = self._priority
icon_type = self._icon_type
# Additional data?
if data is not None:
if "icon" in data:
icon = data["icon"]
if "sound" in data:
try:
sound = Sound(category="notifications", sound_id=data["sound"])
_LOGGER.debug("Adding notification sound %s", data["sound"])
except AssertionError:
_LOGGER.error("Sound ID %s unknown, ignoring", data["sound"])
if "cycles" in data:
cycles = int(data["cycles"])
if "icon_type" in data:
if data["icon_type"] in AVAILABLE_ICON_TYPES:
icon_type = data["icon_type"]
else:
_LOGGER.warning(
"Priority %s invalid, using default %s",
data["priority"],
priority,
)
if "priority" in data:
if data["priority"] in AVAILABLE_PRIORITIES:
priority = data["priority"]
else:
_LOGGER.warning(
"Priority %s invalid, using default %s",
data["priority"],
priority,
)
text_frame = SimpleFrame(icon, message)
_LOGGER.debug(
"Icon/Message/Cycles/Lifetime: %s, %s, %d, %d",
icon,
message,
self._cycles,
self._lifetime,
)
frames = [text_frame]
model = Model(frames=frames, cycles=cycles, sound=sound)
lmn = self.hasslametricmanager.manager
try:
self._devices = lmn.get_devices()
except TokenExpiredError:
_LOGGER.debug("Token expired, fetching new token")
lmn.get_token()
self._devices = lmn.get_devices()
except RequestsConnectionError:
_LOGGER.warning(
"Problem connecting to LaMetric, using cached devices instead"
)
for dev in self._devices:
if targets is None or dev["name"] in targets:
try:
lmn.set_device(dev)
lmn.send_notification(
model,
lifetime=self._lifetime,
priority=priority,
icon_type=icon_type,
)
_LOGGER.debug("Sent notification to LaMetric %s", dev["name"])
except OSError:
_LOGGER.warning("Cannot connect to LaMetric %s", dev["name"])
|
import asyncio
from collections import OrderedDict
from datetime import timedelta
from typing import Any, Dict, List, Optional, Tuple, cast
import jwt
from homeassistant import data_entry_flow
from homeassistant.auth.const import ACCESS_TOKEN_EXPIRATION
from homeassistant.core import HomeAssistant, callback
from homeassistant.util import dt as dt_util
from . import auth_store, models
from .const import GROUP_ID_ADMIN
from .mfa_modules import MultiFactorAuthModule, auth_mfa_module_from_config
from .providers import AuthProvider, LoginFlow, auth_provider_from_config
EVENT_USER_ADDED = "user_added"
EVENT_USER_REMOVED = "user_removed"
_MfaModuleDict = Dict[str, MultiFactorAuthModule]
_ProviderKey = Tuple[str, Optional[str]]
_ProviderDict = Dict[_ProviderKey, AuthProvider]
async def auth_manager_from_config(
hass: HomeAssistant,
provider_configs: List[Dict[str, Any]],
module_configs: List[Dict[str, Any]],
) -> "AuthManager":
"""Initialize an auth manager from config.
CORE_CONFIG_SCHEMA will make sure do duplicated auth providers or
mfa modules exist in configs.
"""
store = auth_store.AuthStore(hass)
if provider_configs:
providers = await asyncio.gather(
*(
auth_provider_from_config(hass, store, config)
for config in provider_configs
)
)
else:
providers = []
# So returned auth providers are in same order as config
provider_hash: _ProviderDict = OrderedDict()
for provider in providers:
key = (provider.type, provider.id)
provider_hash[key] = provider
if module_configs:
modules = await asyncio.gather(
*(auth_mfa_module_from_config(hass, config) for config in module_configs)
)
else:
modules = []
# So returned auth modules are in same order as config
module_hash: _MfaModuleDict = OrderedDict()
for module in modules:
module_hash[module.id] = module
manager = AuthManager(hass, store, provider_hash, module_hash)
return manager
class AuthManagerFlowManager(data_entry_flow.FlowManager):
"""Manage authentication flows."""
def __init__(self, hass: HomeAssistant, auth_manager: "AuthManager"):
"""Init auth manager flows."""
super().__init__(hass)
self.auth_manager = auth_manager
async def async_create_flow(
self,
handler_key: Any,
*,
context: Optional[Dict[str, Any]] = None,
data: Optional[Dict[str, Any]] = None,
) -> data_entry_flow.FlowHandler:
"""Create a login flow."""
auth_provider = self.auth_manager.get_auth_provider(*handler_key)
if not auth_provider:
raise KeyError(f"Unknown auth provider {handler_key}")
return await auth_provider.async_login_flow(context)
async def async_finish_flow(
self, flow: data_entry_flow.FlowHandler, result: Dict[str, Any]
) -> Dict[str, Any]:
"""Return a user as result of login flow."""
flow = cast(LoginFlow, flow)
if result["type"] != data_entry_flow.RESULT_TYPE_CREATE_ENTRY:
return result
# we got final result
if isinstance(result["data"], models.User):
result["result"] = result["data"]
return result
auth_provider = self.auth_manager.get_auth_provider(*result["handler"])
if not auth_provider:
raise KeyError(f"Unknown auth provider {result['handler']}")
credentials = await auth_provider.async_get_or_create_credentials(
result["data"]
)
if flow.context.get("credential_only"):
result["result"] = credentials
return result
# multi-factor module cannot enabled for new credential
# which has not linked to a user yet
if auth_provider.support_mfa and not credentials.is_new:
user = await self.auth_manager.async_get_user_by_credentials(credentials)
if user is not None:
modules = await self.auth_manager.async_get_enabled_mfa(user)
if modules:
flow.user = user
flow.available_mfa_modules = modules
return await flow.async_step_select_mfa_module()
result["result"] = await self.auth_manager.async_get_or_create_user(credentials)
return result
class AuthManager:
"""Manage the authentication for Home Assistant."""
def __init__(
self,
hass: HomeAssistant,
store: auth_store.AuthStore,
providers: _ProviderDict,
mfa_modules: _MfaModuleDict,
) -> None:
"""Initialize the auth manager."""
self.hass = hass
self._store = store
self._providers = providers
self._mfa_modules = mfa_modules
self.login_flow = AuthManagerFlowManager(hass, self)
@property
def auth_providers(self) -> List[AuthProvider]:
"""Return a list of available auth providers."""
return list(self._providers.values())
@property
def auth_mfa_modules(self) -> List[MultiFactorAuthModule]:
"""Return a list of available auth modules."""
return list(self._mfa_modules.values())
def get_auth_provider(
self, provider_type: str, provider_id: str
) -> Optional[AuthProvider]:
"""Return an auth provider, None if not found."""
return self._providers.get((provider_type, provider_id))
def get_auth_providers(self, provider_type: str) -> List[AuthProvider]:
"""Return a List of auth provider of one type, Empty if not found."""
return [
provider
for (p_type, _), provider in self._providers.items()
if p_type == provider_type
]
def get_auth_mfa_module(self, module_id: str) -> Optional[MultiFactorAuthModule]:
"""Return a multi-factor auth module, None if not found."""
return self._mfa_modules.get(module_id)
async def async_get_users(self) -> List[models.User]:
"""Retrieve all users."""
return await self._store.async_get_users()
async def async_get_user(self, user_id: str) -> Optional[models.User]:
"""Retrieve a user."""
return await self._store.async_get_user(user_id)
async def async_get_owner(self) -> Optional[models.User]:
"""Retrieve the owner."""
users = await self.async_get_users()
return next((user for user in users if user.is_owner), None)
async def async_get_group(self, group_id: str) -> Optional[models.Group]:
"""Retrieve all groups."""
return await self._store.async_get_group(group_id)
async def async_get_user_by_credentials(
self, credentials: models.Credentials
) -> Optional[models.User]:
"""Get a user by credential, return None if not found."""
for user in await self.async_get_users():
for creds in user.credentials:
if creds.id == credentials.id:
return user
return None
async def async_create_system_user(
self, name: str, group_ids: Optional[List[str]] = None
) -> models.User:
"""Create a system user."""
user = await self._store.async_create_user(
name=name, system_generated=True, is_active=True, group_ids=group_ids or []
)
self.hass.bus.async_fire(EVENT_USER_ADDED, {"user_id": user.id})
return user
async def async_create_user(
self, name: str, group_ids: Optional[List[str]] = None
) -> models.User:
"""Create a user."""
kwargs: Dict[str, Any] = {
"name": name,
"is_active": True,
"group_ids": group_ids or [],
}
if await self._user_should_be_owner():
kwargs["is_owner"] = True
user = await self._store.async_create_user(**kwargs)
self.hass.bus.async_fire(EVENT_USER_ADDED, {"user_id": user.id})
return user
async def async_get_or_create_user(
self, credentials: models.Credentials
) -> models.User:
"""Get or create a user."""
if not credentials.is_new:
user = await self.async_get_user_by_credentials(credentials)
if user is None:
raise ValueError("Unable to find the user.")
return user
auth_provider = self._async_get_auth_provider(credentials)
if auth_provider is None:
raise RuntimeError("Credential with unknown provider encountered")
info = await auth_provider.async_user_meta_for_credentials(credentials)
user = await self._store.async_create_user(
credentials=credentials,
name=info.name,
is_active=info.is_active,
group_ids=[GROUP_ID_ADMIN],
)
self.hass.bus.async_fire(EVENT_USER_ADDED, {"user_id": user.id})
return user
async def async_link_user(
self, user: models.User, credentials: models.Credentials
) -> None:
"""Link credentials to an existing user."""
await self._store.async_link_user(user, credentials)
async def async_remove_user(self, user: models.User) -> None:
"""Remove a user."""
tasks = [
self.async_remove_credentials(credentials)
for credentials in user.credentials
]
if tasks:
await asyncio.wait(tasks)
await self._store.async_remove_user(user)
self.hass.bus.async_fire(EVENT_USER_REMOVED, {"user_id": user.id})
async def async_update_user(
self,
user: models.User,
name: Optional[str] = None,
group_ids: Optional[List[str]] = None,
) -> None:
"""Update a user."""
kwargs: Dict[str, Any] = {}
if name is not None:
kwargs["name"] = name
if group_ids is not None:
kwargs["group_ids"] = group_ids
await self._store.async_update_user(user, **kwargs)
async def async_activate_user(self, user: models.User) -> None:
"""Activate a user."""
await self._store.async_activate_user(user)
async def async_deactivate_user(self, user: models.User) -> None:
"""Deactivate a user."""
if user.is_owner:
raise ValueError("Unable to deactivate the owner")
await self._store.async_deactivate_user(user)
async def async_remove_credentials(self, credentials: models.Credentials) -> None:
"""Remove credentials."""
provider = self._async_get_auth_provider(credentials)
if provider is not None and hasattr(provider, "async_will_remove_credentials"):
# https://github.com/python/mypy/issues/1424
await provider.async_will_remove_credentials(credentials) # type: ignore
await self._store.async_remove_credentials(credentials)
async def async_enable_user_mfa(
self, user: models.User, mfa_module_id: str, data: Any
) -> None:
"""Enable a multi-factor auth module for user."""
if user.system_generated:
raise ValueError(
"System generated users cannot enable multi-factor auth module."
)
module = self.get_auth_mfa_module(mfa_module_id)
if module is None:
raise ValueError(f"Unable find multi-factor auth module: {mfa_module_id}")
await module.async_setup_user(user.id, data)
async def async_disable_user_mfa(
self, user: models.User, mfa_module_id: str
) -> None:
"""Disable a multi-factor auth module for user."""
if user.system_generated:
raise ValueError(
"System generated users cannot disable multi-factor auth module."
)
module = self.get_auth_mfa_module(mfa_module_id)
if module is None:
raise ValueError(f"Unable find multi-factor auth module: {mfa_module_id}")
await module.async_depose_user(user.id)
async def async_get_enabled_mfa(self, user: models.User) -> Dict[str, str]:
"""List enabled mfa modules for user."""
modules: Dict[str, str] = OrderedDict()
for module_id, module in self._mfa_modules.items():
if await module.async_is_user_setup(user.id):
modules[module_id] = module.name
return modules
async def async_create_refresh_token(
self,
user: models.User,
client_id: Optional[str] = None,
client_name: Optional[str] = None,
client_icon: Optional[str] = None,
token_type: Optional[str] = None,
access_token_expiration: timedelta = ACCESS_TOKEN_EXPIRATION,
) -> models.RefreshToken:
"""Create a new refresh token for a user."""
if not user.is_active:
raise ValueError("User is not active")
if user.system_generated and client_id is not None:
raise ValueError(
"System generated users cannot have refresh tokens connected "
"to a client."
)
if token_type is None:
if user.system_generated:
token_type = models.TOKEN_TYPE_SYSTEM
else:
token_type = models.TOKEN_TYPE_NORMAL
if user.system_generated != (token_type == models.TOKEN_TYPE_SYSTEM):
raise ValueError(
"System generated users can only have system type refresh tokens"
)
if token_type == models.TOKEN_TYPE_NORMAL and client_id is None:
raise ValueError("Client is required to generate a refresh token.")
if (
token_type == models.TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN
and client_name is None
):
raise ValueError("Client_name is required for long-lived access token")
if token_type == models.TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN:
for token in user.refresh_tokens.values():
if (
token.client_name == client_name
and token.token_type == models.TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN
):
# Each client_name can only have one
# long_lived_access_token type of refresh token
raise ValueError(f"{client_name} already exists")
return await self._store.async_create_refresh_token(
user,
client_id,
client_name,
client_icon,
token_type,
access_token_expiration,
)
async def async_get_refresh_token(
self, token_id: str
) -> Optional[models.RefreshToken]:
"""Get refresh token by id."""
return await self._store.async_get_refresh_token(token_id)
async def async_get_refresh_token_by_token(
self, token: str
) -> Optional[models.RefreshToken]:
"""Get refresh token by token."""
return await self._store.async_get_refresh_token_by_token(token)
async def async_remove_refresh_token(
self, refresh_token: models.RefreshToken
) -> None:
"""Delete a refresh token."""
await self._store.async_remove_refresh_token(refresh_token)
@callback
def async_create_access_token(
self, refresh_token: models.RefreshToken, remote_ip: Optional[str] = None
) -> str:
"""Create a new access token."""
self._store.async_log_refresh_token_usage(refresh_token, remote_ip)
now = dt_util.utcnow()
return jwt.encode(
{
"iss": refresh_token.id,
"iat": now,
"exp": now + refresh_token.access_token_expiration,
},
refresh_token.jwt_key,
algorithm="HS256",
).decode()
async def async_validate_access_token(
self, token: str
) -> Optional[models.RefreshToken]:
"""Return refresh token if an access token is valid."""
try:
unverif_claims = jwt.decode(token, verify=False)
except jwt.InvalidTokenError:
return None
refresh_token = await self.async_get_refresh_token(
cast(str, unverif_claims.get("iss"))
)
if refresh_token is None:
jwt_key = ""
issuer = ""
else:
jwt_key = refresh_token.jwt_key
issuer = refresh_token.id
try:
jwt.decode(token, jwt_key, leeway=10, issuer=issuer, algorithms=["HS256"])
except jwt.InvalidTokenError:
return None
if refresh_token is None or not refresh_token.user.is_active:
return None
return refresh_token
@callback
def _async_get_auth_provider(
self, credentials: models.Credentials
) -> Optional[AuthProvider]:
"""Get auth provider from a set of credentials."""
auth_provider_key = (
credentials.auth_provider_type,
credentials.auth_provider_id,
)
return self._providers.get(auth_provider_key)
async def _user_should_be_owner(self) -> bool:
"""Determine if user should be owner.
A user should be an owner if it is the first non-system user that is
being created.
"""
for user in await self._store.async_get_users():
if not user.system_generated:
return False
return True
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from diamond.collector import Collector
from mogilefs import MogilefsCollector
################################################################################
class TestMogilefsCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('MogilefsCollector', {
'interval': 10
})
self.collector = MogilefsCollector(config, None)
def test_import(self):
self.assertTrue(MogilefsCollector)
@patch.object(Collector, 'publish')
def test_stub_data(self, publish_mock):
mockTelnet = Mock(**{'read_until.return_value':
self.getFixture('stats').getvalue()})
patch_Telnet = patch('telnetlib.Telnet', Mock(return_value=mockTelnet))
patch_Telnet.start()
self.collector.collect()
patch_Telnet.stop()
mockTelnet.read_until.assert_any_call('.', 3)
metrics = {
'uptime': 181491,
'pending_queries': 0,
'processing_queries': 1,
'bored_queryworkers': 49,
'queries': 4353158,
'times_out_of_qworkers': 2,
'work_queue_for_delete': 2,
'work_queue_for_replicate': 0,
'work_sent_to_delete': 336154,
'work_sent_to_replicate': 274882
}
self.assertPublishedMany(publish_mock, metrics)
################################################################################
if __name__ == "__main__":
unittest.main()
|
from homeassistant.config_entries import ENTRY_STATE_LOADED
from homeassistant.const import STATE_OFF, STATE_ON
from tests.components.plugwise.common import async_init_integration
async def test_anna_climate_binary_sensor_entities(hass, mock_smile_anna):
"""Test creation of climate related binary_sensor entities."""
entry = await async_init_integration(hass, mock_smile_anna)
assert entry.state == ENTRY_STATE_LOADED
state = hass.states.get("binary_sensor.auxiliary_slave_boiler_state")
assert str(state.state) == STATE_OFF
state = hass.states.get("binary_sensor.auxiliary_dhw_state")
assert str(state.state) == STATE_OFF
async def test_anna_climate_binary_sensor_change(hass, mock_smile_anna):
"""Test change of climate related binary_sensor entities."""
entry = await async_init_integration(hass, mock_smile_anna)
assert entry.state == ENTRY_STATE_LOADED
hass.states.async_set("binary_sensor.auxiliary_dhw_state", STATE_ON, {})
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.auxiliary_dhw_state")
assert str(state.state) == STATE_ON
await hass.helpers.entity_component.async_update_entity(
"binary_sensor.auxiliary_dhw_state"
)
state = hass.states.get("binary_sensor.auxiliary_dhw_state")
assert str(state.state) == STATE_OFF
|
from tests.async_mock import AsyncMock, Mock
def build_device_info_mock(
name="fake-device-1", ipAddress="1.1.1.1", mac="aabbcc112233"
):
"""Build mock device info structure."""
mock = Mock(ip=ipAddress, port=7000, mac=mac)
mock.name = name
return mock
def build_device_mock(name="fake-device-1", ipAddress="1.1.1.1", mac="aabbcc112233"):
"""Build mock device object."""
mock = Mock(
device_info=build_device_info_mock(name, ipAddress, mac),
name=name,
bind=AsyncMock(),
update_state=AsyncMock(),
push_state_update=AsyncMock(),
temperature_units=0,
mode=0,
fan_speed=0,
horizontal_swing=0,
vertical_swing=0,
target_temperature=25,
power=False,
sleep=False,
quiet=False,
turbo=False,
power_save=False,
steady_heat=False,
)
return mock
|
import pytest
from jinja2 import Environment
from jinja2 import escape
from jinja2.exceptions import SecurityError
from jinja2.exceptions import TemplateRuntimeError
from jinja2.exceptions import TemplateSyntaxError
from jinja2.nodes import EvalContext
from jinja2.sandbox import ImmutableSandboxedEnvironment
from jinja2.sandbox import SandboxedEnvironment
from jinja2.sandbox import unsafe
class PrivateStuff:
def bar(self):
return 23
@unsafe
def foo(self):
return 42
def __repr__(self):
return "PrivateStuff"
class PublicStuff:
def bar(self):
return 23
def _foo(self):
return 42
def __repr__(self):
return "PublicStuff"
class TestSandbox:
def test_unsafe(self, env):
env = SandboxedEnvironment()
pytest.raises(
SecurityError, env.from_string("{{ foo.foo() }}").render, foo=PrivateStuff()
)
assert env.from_string("{{ foo.bar() }}").render(foo=PrivateStuff()) == "23"
pytest.raises(
SecurityError, env.from_string("{{ foo._foo() }}").render, foo=PublicStuff()
)
assert env.from_string("{{ foo.bar() }}").render(foo=PublicStuff()) == "23"
assert env.from_string("{{ foo.__class__ }}").render(foo=42) == ""
assert env.from_string("{{ foo.func_code }}").render(foo=lambda: None) == ""
# security error comes from __class__ already.
pytest.raises(
SecurityError,
env.from_string("{{ foo.__class__.__subclasses__() }}").render,
foo=42,
)
def test_immutable_environment(self, env):
env = ImmutableSandboxedEnvironment()
pytest.raises(SecurityError, env.from_string("{{ [].append(23) }}").render)
pytest.raises(SecurityError, env.from_string("{{ {1:2}.clear() }}").render)
def test_restricted(self, env):
env = SandboxedEnvironment()
pytest.raises(
TemplateSyntaxError,
env.from_string,
"{% for item.attribute in seq %}...{% endfor %}",
)
pytest.raises(
TemplateSyntaxError,
env.from_string,
"{% for foo, bar.baz in seq %}...{% endfor %}",
)
def test_template_data(self, env):
env = Environment(autoescape=True)
t = env.from_string(
"{% macro say_hello(name) %}"
"<p>Hello {{ name }}!</p>{% endmacro %}"
'{{ say_hello("<blink>foo</blink>") }}'
)
escaped_out = "<p>Hello <blink>foo</blink>!</p>"
assert t.render() == escaped_out
assert str(t.module) == escaped_out
assert escape(t.module) == escaped_out
assert t.module.say_hello("<blink>foo</blink>") == escaped_out
assert (
escape(t.module.say_hello(EvalContext(env), "<blink>foo</blink>"))
== escaped_out
)
assert escape(t.module.say_hello("<blink>foo</blink>")) == escaped_out
def test_attr_filter(self, env):
env = SandboxedEnvironment()
tmpl = env.from_string('{{ cls|attr("__subclasses__")() }}')
pytest.raises(SecurityError, tmpl.render, cls=int)
def test_binary_operator_intercepting(self, env):
def disable_op(left, right):
raise TemplateRuntimeError("that operator so does not work")
for expr, ctx, rv in ("1 + 2", {}, "3"), ("a + 2", {"a": 2}, "4"):
env = SandboxedEnvironment()
env.binop_table["+"] = disable_op
t = env.from_string(f"{{{{ {expr} }}}}")
assert t.render(ctx) == rv
env.intercepted_binops = frozenset(["+"])
t = env.from_string(f"{{{{ {expr} }}}}")
with pytest.raises(TemplateRuntimeError):
t.render(ctx)
def test_unary_operator_intercepting(self, env):
def disable_op(arg):
raise TemplateRuntimeError("that operator so does not work")
for expr, ctx, rv in ("-1", {}, "-1"), ("-a", {"a": 2}, "-2"):
env = SandboxedEnvironment()
env.unop_table["-"] = disable_op
t = env.from_string(f"{{{{ {expr} }}}}")
assert t.render(ctx) == rv
env.intercepted_unops = frozenset(["-"])
t = env.from_string(f"{{{{ {expr} }}}}")
with pytest.raises(TemplateRuntimeError):
t.render(ctx)
class TestStringFormat:
def test_basic_format_safety(self):
env = SandboxedEnvironment()
t = env.from_string('{{ "a{0.__class__}b".format(42) }}')
assert t.render() == "ab"
def test_basic_format_all_okay(self):
env = SandboxedEnvironment()
t = env.from_string('{{ "a{0.foo}b".format({"foo": 42}) }}')
assert t.render() == "a42b"
def test_safe_format_safety(self):
env = SandboxedEnvironment()
t = env.from_string('{{ ("a{0.__class__}b{1}"|safe).format(42, "<foo>") }}')
assert t.render() == "ab<foo>"
def test_safe_format_all_okay(self):
env = SandboxedEnvironment()
t = env.from_string('{{ ("a{0.foo}b{1}"|safe).format({"foo": 42}, "<foo>") }}')
assert t.render() == "a42b<foo>"
class TestStringFormatMap:
def test_basic_format_safety(self):
env = SandboxedEnvironment()
t = env.from_string('{{ "a{x.__class__}b".format_map({"x":42}) }}')
assert t.render() == "ab"
def test_basic_format_all_okay(self):
env = SandboxedEnvironment()
t = env.from_string('{{ "a{x.foo}b".format_map({"x":{"foo": 42}}) }}')
assert t.render() == "a42b"
def test_safe_format_all_okay(self):
env = SandboxedEnvironment()
t = env.from_string(
'{{ ("a{x.foo}b{y}"|safe).format_map({"x":{"foo": 42}, "y":"<foo>"}) }}'
)
assert t.render() == "a42b<foo>"
|
from collections import namedtuple
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.components.rest.data import RestData
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_DATE,
ATTR_TEMPERATURE,
ATTR_TIME,
ATTR_VOLTAGE,
CONF_API_KEY,
CONF_NAME,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
_ENDPOINT = "http://pvoutput.org/service/r2/getstatus.jsp"
ATTR_ENERGY_GENERATION = "energy_generation"
ATTR_POWER_GENERATION = "power_generation"
ATTR_ENERGY_CONSUMPTION = "energy_consumption"
ATTR_POWER_CONSUMPTION = "power_consumption"
ATTR_EFFICIENCY = "efficiency"
CONF_SYSTEM_ID = "system_id"
DEFAULT_NAME = "PVOutput"
DEFAULT_VERIFY_SSL = True
SCAN_INTERVAL = timedelta(minutes=2)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_SYSTEM_ID): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the PVOutput sensor."""
name = config.get(CONF_NAME)
api_key = config.get(CONF_API_KEY)
system_id = config.get(CONF_SYSTEM_ID)
method = "GET"
payload = auth = None
verify_ssl = DEFAULT_VERIFY_SSL
headers = {"X-Pvoutput-Apikey": api_key, "X-Pvoutput-SystemId": system_id}
rest = RestData(method, _ENDPOINT, auth, headers, payload, verify_ssl)
await rest.async_update()
if rest.data is None:
_LOGGER.error("Unable to fetch data from PVOutput")
return False
async_add_entities([PvoutputSensor(rest, name)], True)
class PvoutputSensor(Entity):
"""Representation of a PVOutput sensor."""
def __init__(self, rest, name):
"""Initialize a PVOutput sensor."""
self.rest = rest
self._name = name
self.pvcoutput = None
self.status = namedtuple(
"status",
[
ATTR_DATE,
ATTR_TIME,
ATTR_ENERGY_GENERATION,
ATTR_POWER_GENERATION,
ATTR_ENERGY_CONSUMPTION,
ATTR_POWER_CONSUMPTION,
ATTR_EFFICIENCY,
ATTR_TEMPERATURE,
ATTR_VOLTAGE,
],
)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the device."""
if self.pvcoutput is not None:
return self.pvcoutput.energy_generation
return None
@property
def device_state_attributes(self):
"""Return the state attributes of the monitored installation."""
if self.pvcoutput is not None:
return {
ATTR_ENERGY_GENERATION: self.pvcoutput.energy_generation,
ATTR_POWER_GENERATION: self.pvcoutput.power_generation,
ATTR_ENERGY_CONSUMPTION: self.pvcoutput.energy_consumption,
ATTR_POWER_CONSUMPTION: self.pvcoutput.power_consumption,
ATTR_EFFICIENCY: self.pvcoutput.efficiency,
ATTR_TEMPERATURE: self.pvcoutput.temperature,
ATTR_VOLTAGE: self.pvcoutput.voltage,
}
async def async_update(self):
"""Get the latest data from the PVOutput API and updates the state."""
try:
await self.rest.async_update()
self.pvcoutput = self.status._make(self.rest.data.split(","))
except TypeError:
self.pvcoutput = None
_LOGGER.error("Unable to fetch data from PVOutput. %s", self.rest.data)
async def async_will_remove_from_hass(self):
"""Shutdown the session."""
await self.rest.async_remove()
|
import aiolifx
from homeassistant import config_entries
from homeassistant.helpers import config_entry_flow
from .const import DOMAIN
async def _async_has_devices(hass):
"""Return if there are devices that can be discovered."""
lifx_ip_addresses = await aiolifx.LifxScan(hass.loop).scan()
return len(lifx_ip_addresses) > 0
config_entry_flow.register_discovery_flow(
DOMAIN, "LIFX", _async_has_devices, config_entries.CONN_CLASS_LOCAL_POLL
)
|
import logging
from typing import Any
from typing import Sequence
from kubernetes.client import V1Deployment
from kubernetes.client import V1StatefulSet
from paasta_tools.kubernetes.application.controller_wrappers import Application
from paasta_tools.kubernetes.application.controller_wrappers import DeploymentWrapper
from paasta_tools.kubernetes.application.controller_wrappers import StatefulSetWrapper
from paasta_tools.kubernetes_tools import KubeClient
from paasta_tools.kubernetes_tools import paasta_prefixed
log = logging.getLogger(__name__)
def is_valid_application(deployment: V1Deployment):
is_valid = True
missing = []
for attr in ["service", "instance", "git_sha", "config_sha"]:
prefixed_attr = paasta_prefixed(attr)
if prefixed_attr not in deployment.metadata.labels:
is_valid = False
missing.append(prefixed_attr)
if missing:
log.warning(
f"deployment/{deployment.metadata.name} in "
f"namespace/{deployment.metadata.namespace} "
f"is missing following labels: {missing}"
)
return is_valid
def list_namespaced_deployments(
kube_client: KubeClient, namespace: str, **kwargs
) -> Sequence[DeploymentWrapper]:
return [
DeploymentWrapper(deployment)
for deployment in kube_client.deployments.list_namespaced_deployment(
namespace, **kwargs
).items
if is_valid_application(deployment)
]
def list_namespaced_stateful_sets(
kube_client: KubeClient, namespace: str, **kwargs
) -> Sequence[StatefulSetWrapper]:
return [
StatefulSetWrapper(deployment)
for deployment in kube_client.deployments.list_namespaced_stateful_set(
namespace, **kwargs
).items
if is_valid_application(deployment)
]
def list_namespaced_applications(
kube_client: KubeClient, namespace: str, application_types: Sequence[Any]
) -> Sequence[Application]:
"""
List all applications in the namespace of the types from application_types.
Only applications with complete set of labels are included (See is_valid_application()).
:param kube_client:
:param namespace:
:param application_types: types of applications
:return:
"""
apps = [] # type: ignore
for application_type in application_types:
if application_type == V1Deployment:
apps.extend(list_namespaced_deployments(kube_client, namespace))
elif application_type == V1StatefulSet:
apps.extend(list_namespaced_stateful_sets(kube_client, namespace))
return apps
|
from __future__ import print_function
import os
import sys
from roscreate.core import author_name
from roscreate.core import read_template
import roslib.names
from rospkg import ResourceNotFound
from rospkg import RosPack
from rospkg import on_ros_path
NAME = 'roscreate-pkg'
def get_templates():
templates = {}
templates['CMakeLists.txt'] = read_template('CMakeLists.tmpl')
templates['manifest.xml'] = read_template('manifest.tmpl')
templates['mainpage.dox'] = read_template('mainpage.tmpl')
templates['Makefile'] = read_template('Makefile.tmpl')
return templates
def instantiate_template(template, package, brief, description, author, depends):
return template % locals()
def create_package(package, author, depends, uses_roscpp=False, uses_rospy=False):
p = os.path.abspath(package)
if os.path.exists(p):
print('%s already exists, aborting' % p, file=sys.stderr)
sys.exit(1)
os.makedirs(p)
print('Created package directory', p)
if uses_roscpp:
# create package/include/package and package/src for roscpp code
cpp_path = os.path.join(p, 'include', package)
try:
os.makedirs(cpp_path)
print('Created include directory', cpp_path)
cpp_path = os.path.join(p, 'src')
os.makedirs(cpp_path)
print('Created cpp source directory', cpp_path)
except Exception:
# file exists
pass
if uses_rospy:
# create package/src/ for python files
py_path = os.path.join(p, 'src')
try:
os.makedirs(py_path)
print('Created python source directory', py_path)
except Exception:
# file exists
pass
templates = get_templates()
for filename, template in templates.items():
contents = instantiate_template(template, package, package, package, author, depends)
p = os.path.abspath(os.path.join(package, filename))
with open(p, 'wb') as f:
f.write(contents.encode('utf-8'))
print('Created package file', p)
print('\nPlease edit %s/manifest.xml and mainpage.dox to finish creating your package' % package)
def roscreatepkg_main():
from optparse import OptionParser
parser = OptionParser(usage='usage: %prog <package-name> [dependencies...]', prog=NAME)
options, args = parser.parse_args()
if not args:
parser.error('you must specify a package name and optionally also list package dependencies')
package = args[0]
if not roslib.names.is_legal_resource_base_name(package):
parser.error('illegal package name: %s\nNames must start with a letter and contain only alphanumeric characters\nand underscores.' % package)
# validate dependencies and turn into XML
depends = args[1:]
uses_roscpp = 'roscpp' in depends
uses_rospy = 'rospy' in depends
rospack = RosPack()
for d in depends:
try:
rospack.get_path(d)
except ResourceNotFound:
print('ERROR: dependency [%s] cannot be found' % d, file=sys.stderr)
sys.exit(1)
depends = u''.join([u' <depend package="%s"/>\n' % d for d in depends])
if not on_ros_path(os.getcwd()):
print('!'*80+'\nWARNING: current working directory is not on ROS_PACKAGE_PATH!\nPlease update your ROS_PACKAGE_PATH environment variable.\n'+'!'*80, file=sys.stderr)
create_package(package, author_name(), depends, uses_roscpp=uses_roscpp, uses_rospy=uses_rospy)
|
import itertools
import json
import os
import sys
# sys.path varies by execution environments. Coverage.py uses setuptools to
# make console scripts, which means pkg_resources is imported. pkg_resources
# removes duplicate entries from sys.path. So we do that too, since the extra
# entries don't affect the running of the program.
def same_file(p1, p2):
"""Determine if `p1` and `p2` refer to the same existing file."""
if not p1:
return not p2
if not os.path.exists(p1):
return False
if not os.path.exists(p2):
return False
if hasattr(os.path, "samefile"):
return os.path.samefile(p1, p2)
else:
norm1 = os.path.normcase(os.path.normpath(p1))
norm2 = os.path.normcase(os.path.normpath(p2))
return norm1 == norm2
def without_same_files(filenames):
"""Return the list `filenames` with duplicates (by same_file) removed."""
reduced = []
for filename in filenames:
if not any(same_file(filename, other) for other in reduced):
reduced.append(filename)
return reduced
cleaned_sys_path = [os.path.normcase(p) for p in without_same_files(sys.path)]
DATA = "xyzzy"
import __main__
def my_function(a):
"""A function to force execution of module-level values."""
return "my_fn(%r)" % a
FN_VAL = my_function("fooey")
loader = globals().get('__loader__')
spec = globals().get('__spec__')
# A more compact ad-hoc grouped-by-first-letter list of builtins.
CLUMPS = "ABC,DEF,GHI,JKLMN,OPQR,ST,U,VWXYZ_,ab,cd,efg,hij,lmno,pqr,stuvwxyz".split(",")
def word_group(w):
"""Figure out which CLUMP the first letter of w is in."""
for i, clump in enumerate(CLUMPS):
if w[0] in clump:
return i
return 99
builtin_dir = [" ".join(s) for _, s in itertools.groupby(dir(__builtins__), key=word_group)]
globals_to_check = {
'os.getcwd': os.getcwd(),
'__name__': __name__,
'__file__': __file__,
'__doc__': __doc__,
'__builtins__.has_open': hasattr(__builtins__, 'open'),
'__builtins__.dir': builtin_dir,
'__loader__ exists': loader is not None,
'__package__': __package__,
'__spec__ exists': spec is not None,
'DATA': DATA,
'FN_VAL': FN_VAL,
'__main__.DATA': getattr(__main__, "DATA", "nothing"),
'argv0': sys.argv[0],
'argv1-n': sys.argv[1:],
'path': cleaned_sys_path,
}
if loader is not None:
globals_to_check.update({
'__loader__.fullname': getattr(loader, 'fullname', None) or getattr(loader, 'name', None)
})
if spec is not None:
globals_to_check.update({
'__spec__.' + aname: getattr(spec, aname)
for aname in ['name', 'origin', 'submodule_search_locations', 'parent', 'has_location']
})
print(json.dumps(globals_to_check, indent=4, sort_keys=True))
|
import copy
import functools
import logging
import re
from absl import flags
import contextlib2
from perfkitbenchmarker import data
from perfkitbenchmarker import errors
import six
import yaml
FLAGS = flags.FLAGS
CONFIG_CONSTANTS = 'default_config_constants.yaml'
FLAGS_KEY = 'flags'
IMPORT_REGEX = re.compile('^#import (.*)')
flags.DEFINE_string('benchmark_config_file', None,
'The file path to the user config file which will '
'override benchmark defaults. This should either be '
'a path relative to the current working directory, '
'an absolute path, or just the name of a file in the '
'configs/ directory.')
flags.DEFINE_multi_string(
'config_override', None,
'This flag can be used to override any config value. It is applied after '
'the user config (specified via --benchmark_config_file_path), so it has '
'a higher priority than that config. The value of the flag should be '
'fully.qualified.key=value (e.g. --config_override=cluster_boot.vm_groups.'
'default.vm_count=4).')
class _ConcatenatedFiles(object):
"""Class that presents several files as a single object.
The class exposes a single method (read) which is all that yaml
needs to interact with a stream.
Attributes:
files: A list of opened file objects.
current_file_index: The index of the current file that is being read from.
"""
def __init__(self, files):
self.files = files
self.current_file_index = 0
def read(self, length):
data = self.files[self.current_file_index].read(length)
while (not data) and (self.current_file_index + 1 < len(self.files)):
self.current_file_index += 1
data = self.files[self.current_file_index].read(length)
return data
def _GetImportFiles(config_file, imported_set=None):
"""Get a list of file names that get imported from config_file.
Args:
config_file: The name of a config file to find imports for.
imported_set: A set of files that _GetImportFiles has already
been called on that should be ignored.
Returns:
A list of file names that are imported by config_file
(including config_file itself).
"""
imported_set = imported_set or set()
config_path = data.ResourcePath(config_file)
# Give up on circular imports.
if config_path in imported_set:
return []
imported_set.add(config_path)
with open(config_path) as f:
line = f.readline()
match = IMPORT_REGEX.match(line)
import_files = []
while match:
import_file = match.group(1)
for file_name in _GetImportFiles(import_file, imported_set):
if file_name not in import_files:
import_files.append(file_name)
line = f.readline()
match = IMPORT_REGEX.match(line)
import_files.append(config_path)
return import_files
def _LoadUserConfig(path):
"""Loads a user config from the supplied path."""
config_files = _GetImportFiles(path)
with contextlib2.ExitStack() as stack:
files = [stack.enter_context(open(f)) for f in config_files]
return yaml.safe_load(_ConcatenatedFiles(files))
@functools.lru_cache()
def _LoadConfigConstants():
"""Reads the config constants file."""
with open(data.ResourcePath(CONFIG_CONSTANTS, False)) as fp:
return fp.read()
def _GetConfigFromOverrides(overrides):
"""Converts a list of overrides into a config."""
config = {}
for override in overrides:
if override.count('=') != 1:
raise ValueError('--config_override flag value has incorrect number of '
'"=" characters. The value must take the form '
'fully.qualified.key=value.')
full_key, value = override.split('=')
keys = full_key.split('.')
new_config = {keys.pop(): yaml.safe_load(value)}
while keys:
new_config = {keys.pop(): new_config}
config = MergeConfigs(config, new_config)
return config
@functools.lru_cache()
def GetConfigFlags():
"""Returns the global flags from the user config."""
return GetUserConfig().get(FLAGS_KEY, {})
def GetUserConfig():
"""Returns the user config with any overrides applied.
This loads config from --benchmark_config_file and merges it with
any overrides specified via --config_override and returns the result.
Returns:
dict. The result of merging the loaded config from the
--benchmark_config_file flag with the config generated from the
--config override flag.
"""
try:
if FLAGS.benchmark_config_file:
config = _LoadUserConfig(FLAGS.benchmark_config_file)
else:
config = {}
if FLAGS.config_override:
override_config = _GetConfigFromOverrides(FLAGS.config_override)
config = MergeConfigs(config, override_config)
except yaml.parser.ParserError as e:
raise errors.Config.ParseError(
'Encountered a problem loading config. Please ensure that the config '
'is valid YAML. Error received:\n%s' % e)
except yaml.composer.ComposerError as e:
raise errors.Config.ParseError(
'Encountered a problem loading config. Please ensure that all '
'references are defined. Error received:\n%s' % e)
return config
def MergeConfigs(default_config, override_config, warn_new_key=False):
"""Merges the override config into the default config.
This function will recursively merge two nested dicts.
The override_config represents overrides to the default_config dict, so any
leaf key/value pairs which are present in both dicts will take their value
from the override_config.
Args:
default_config: The dict which will have its values overridden.
override_config: The dict wich contains the overrides.
warn_new_key: Determines whether we warn the user if the override config
has a key that the default config did not have.
Returns:
A dict containing the values from the default_config merged with those from
the override_config.
"""
def _Merge(d1, d2):
"""Merge two nested dicts."""
merged_dict = copy.deepcopy(d1)
for k, v in six.iteritems(d2):
if k not in d1:
merged_dict[k] = copy.deepcopy(v)
if warn_new_key:
logging.warning('The key "%s" was not in the default config, '
'but was in user overrides. This may indicate '
'a typo.', k)
elif isinstance(d1[k], dict) and isinstance(v, dict):
merged_dict[k] = _Merge(d1[k], v)
else:
merged_dict[k] = v
return merged_dict
if override_config:
return _Merge(default_config, override_config)
else:
return default_config
def LoadMinimalConfig(benchmark_config, benchmark_name):
"""Loads a benchmark config without using any flags in the process.
This function will prepend configs/default_config_constants.yaml to the
benchmark config prior to loading it. This allows the config to use
references to anchors defined in the constants file.
Args:
benchmark_config: str. The default config in YAML format.
benchmark_name: str. The name of the benchmark.
Returns:
dict. The loaded config.
"""
yaml_config = []
yaml_config.append(_LoadConfigConstants())
yaml_config.append(benchmark_config)
try:
config = yaml.safe_load('\n'.join(yaml_config))
except yaml.parser.ParserError as e:
raise errors.Config.ParseError(
'Encountered a problem loading the default benchmark config. Please '
'ensure that the config is valid YAML. Error received:\n%s' % e)
except yaml.composer.ComposerError as e:
raise errors.Config.ParseError(
'Encountered a problem loading the default benchmark config. Please '
'ensure that all references are defined. Error received:\n%s' % e)
return config[benchmark_name]
def LoadConfig(benchmark_config, user_config, benchmark_name):
"""Loads a benchmark configuration.
This function loads a benchmark's default configuration (in YAML format),
then merges it with any overrides the user provided, and returns the result.
This loaded config is then passed to the benchmark_spec.BenchmarkSpec
constructor in order to create a BenchmarkSpec.
Args:
benchmark_config: str. The default configuration in YAML format.
user_config: dict. The loaded user config for the benchmark.
benchmark_name: str. The name of the benchmark.
Returns:
dict. The loaded config.
"""
config = LoadMinimalConfig(benchmark_config, benchmark_name)
config = MergeConfigs(config, user_config, warn_new_key=True)
return config
|
import asyncio
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from .const import DOMAIN
CONFIG_SCHEMA = vol.Schema({DOMAIN: vol.Schema({})}, extra=vol.ALLOW_EXTRA)
# TODO List the platforms that you want to support.
# For your initial PR, limit it to 1 platform.
PLATFORMS = ["light"]
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the NEW_NAME component."""
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up NEW_NAME from a config entry."""
# TODO Store an API object for your platforms to access
# hass.data[DOMAIN][entry.entry_id] = MyApi(...)
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
|
import shutil
from paasta_tools.utils import get_docker_client
def after_scenario(context, scenario):
if getattr(context, "tmpdir", None):
shutil.rmtree(context.tmpdir)
if getattr(context, "running_container_id", None):
docker_client = get_docker_client()
docker_client.stop(container=context.running_container_id)
docker_client.remove_container(container=context.running_container_id)
if getattr(context, "fake_http_server", None):
context.fake_http_server.shutdown()
context.fake_http_server = None
|
import json
from flask import current_app
from sqlalchemy.orm import relationship
from sqlalchemy import (
Column,
Integer,
String,
Text,
func,
ForeignKey,
DateTime,
DefaultClause,
Boolean,
)
from sqlalchemy.dialects.postgresql import JSON
from lemur.database import db
from lemur.plugins.base import plugins
from lemur.models import roles_authorities
class Authority(db.Model):
__tablename__ = "authorities"
id = Column(Integer, primary_key=True)
owner = Column(String(128), nullable=False)
name = Column(String(128), unique=True)
body = Column(Text())
chain = Column(Text())
active = Column(Boolean, default=True)
plugin_name = Column(String(64))
description = Column(Text)
options = Column(JSON)
date_created = Column(DateTime, DefaultClause(func.now()), nullable=False)
roles = relationship(
"Role",
secondary=roles_authorities,
passive_deletes=True,
backref=db.backref("authority"),
lazy="dynamic",
)
user_id = Column(Integer, ForeignKey("users.id"))
authority_certificate = relationship(
"Certificate",
backref="root_authority",
uselist=False,
foreign_keys="Certificate.root_authority_id",
)
certificates = relationship(
"Certificate", backref="authority", foreign_keys="Certificate.authority_id"
)
authority_pending_certificate = relationship(
"PendingCertificate",
backref="root_authority",
uselist=False,
foreign_keys="PendingCertificate.root_authority_id",
)
pending_certificates = relationship(
"PendingCertificate",
backref="authority",
foreign_keys="PendingCertificate.authority_id",
)
def __init__(self, **kwargs):
self.owner = kwargs["owner"]
self.roles = kwargs.get("roles", [])
self.name = kwargs.get("name")
self.description = kwargs.get("description")
self.authority_certificate = kwargs["authority_certificate"]
self.plugin_name = kwargs["plugin"]["slug"]
self.options = kwargs.get("options")
@property
def plugin(self):
return plugins.get(self.plugin_name)
@property
def is_cab_compliant(self):
"""
Parse the options to find whether authority is CAB Forum Compliant,
i.e., adhering to the CA/Browser Forum Baseline Requirements.
Returns None if option is not available
"""
if not self.options:
return None
options_array = json.loads(self.options)
if isinstance(options_array, list):
for option in options_array:
if "name" in option and option["name"] == 'cab_compliant':
return option["value"]
return None
@property
def max_issuance_days(self):
if self.is_cab_compliant:
return current_app.config.get("PUBLIC_CA_MAX_VALIDITY_DAYS", 397)
@property
def default_validity_days(self):
if self.is_cab_compliant:
return current_app.config.get("PUBLIC_CA_MAX_VALIDITY_DAYS", 397)
return current_app.config.get("DEFAULT_VALIDITY_DAYS", 365) # 1 year default
def __repr__(self):
return "Authority(name={name})".format(name=self.name)
|
import datetime
from typing import List, Sequence, Tuple
from qutebrowser.config import config, configdata
from qutebrowser.utils import objreg, log, utils
from qutebrowser.completion.models import completionmodel, listcategory, util
from qutebrowser.browser import inspector
def command(*, info):
"""A CompletionModel filled with non-hidden commands and descriptions."""
model = completionmodel.CompletionModel(column_widths=(20, 60, 20))
cmdlist = util.get_cmd_completions(info, include_aliases=True,
include_hidden=False)
model.add_category(listcategory.ListCategory("Commands", cmdlist))
return model
def helptopic(*, info):
"""A CompletionModel filled with help topics."""
model = completionmodel.CompletionModel(column_widths=(20, 70, 10))
cmdlist = util.get_cmd_completions(info, include_aliases=False,
include_hidden=True, prefix=':')
settings = ((opt.name, opt.description, info.config.get_str(opt.name))
for opt in configdata.DATA.values())
model.add_category(listcategory.ListCategory("Commands", cmdlist))
model.add_category(listcategory.ListCategory("Settings", settings))
return model
def quickmark(*, info=None):
"""A CompletionModel filled with all quickmarks."""
def delete(data: Sequence[str]) -> None:
"""Delete a quickmark from the completion menu."""
name = data[0]
quickmark_manager = objreg.get('quickmark-manager')
log.completion.debug('Deleting quickmark {}'.format(name))
quickmark_manager.delete(name)
utils.unused(info)
model = completionmodel.CompletionModel(column_widths=(30, 70, 0))
marks = objreg.get('quickmark-manager').marks.items()
model.add_category(listcategory.ListCategory('Quickmarks', marks,
delete_func=delete,
sort=False))
return model
def bookmark(*, info=None):
"""A CompletionModel filled with all bookmarks."""
def delete(data: Sequence[str]) -> None:
"""Delete a bookmark from the completion menu."""
urlstr = data[0]
log.completion.debug('Deleting bookmark {}'.format(urlstr))
bookmark_manager = objreg.get('bookmark-manager')
bookmark_manager.delete(urlstr)
utils.unused(info)
model = completionmodel.CompletionModel(column_widths=(30, 70, 0))
marks = objreg.get('bookmark-manager').marks.items()
model.add_category(listcategory.ListCategory('Bookmarks', marks,
delete_func=delete,
sort=False))
return model
def session(*, info=None):
"""A CompletionModel filled with session names."""
from qutebrowser.misc import sessions
utils.unused(info)
model = completionmodel.CompletionModel()
try:
sess = ((name,) for name
in sessions.session_manager.list_sessions()
if not name.startswith('_'))
model.add_category(listcategory.ListCategory("Sessions", sess))
except OSError:
log.completion.exception("Failed to list sessions!")
return model
def _buffer(*, win_id_filter=lambda _win_id: True, add_win_id=True):
"""Helper to get the completion model for buffer/other_buffer.
Args:
win_id_filter: A filter function for window IDs to include.
Should return True for all included windows.
add_win_id: Whether to add the window ID to the completion items.
"""
def delete_buffer(data):
"""Close the selected tab."""
win_id, tab_index = data[0].split('/')
tabbed_browser = objreg.get('tabbed-browser', scope='window',
window=int(win_id))
tabbed_browser.on_tab_close_requested(int(tab_index) - 1)
model = completionmodel.CompletionModel(column_widths=(6, 40, 54))
tabs_are_windows = config.val.tabs.tabs_are_windows
# list storing all single-tabbed windows when tabs_are_windows
windows: List[Tuple[str, str, str]] = []
for win_id in objreg.window_registry:
if not win_id_filter(win_id):
continue
tabbed_browser = objreg.get('tabbed-browser', scope='window',
window=win_id)
if tabbed_browser.is_shutting_down:
continue
tabs: List[Tuple[str, str, str]] = []
for idx in range(tabbed_browser.widget.count()):
tab = tabbed_browser.widget.widget(idx)
tab_str = ("{}/{}".format(win_id, idx + 1) if add_win_id
else str(idx + 1))
tabs.append((tab_str,
tab.url().toDisplayString(),
tabbed_browser.widget.page_title(idx)))
if tabs_are_windows:
windows += tabs
else:
title = str(win_id) if add_win_id else "Tabs"
cat = listcategory.ListCategory(
title, tabs, delete_func=delete_buffer, sort=False)
model.add_category(cat)
if tabs_are_windows:
win = listcategory.ListCategory(
"Windows", windows, delete_func=delete_buffer, sort=False)
model.add_category(win)
return model
def buffer(*, info=None):
"""A model to complete on open tabs across all windows.
Used for switching the buffer command.
"""
utils.unused(info)
return _buffer()
def other_buffer(*, info):
"""A model to complete on open tabs across all windows except the current.
Used for the tab-take command.
"""
return _buffer(win_id_filter=lambda win_id: win_id != info.win_id)
def tab_focus(*, info):
"""A model to complete on open tabs in the current window."""
model = _buffer(win_id_filter=lambda win_id: win_id == info.win_id,
add_win_id=False)
special = [
("last", "Focus the last-focused tab"),
("stack-next", "Go forward through a stack of focused tabs"),
("stack-prev", "Go backward through a stack of focused tabs"),
]
model.add_category(listcategory.ListCategory("Special", special))
return model
def window(*, info):
"""A model to complete on all open windows."""
model = completionmodel.CompletionModel(column_widths=(6, 30, 64))
windows = []
for win_id in objreg.window_registry:
if win_id == info.win_id:
continue
tabbed_browser = objreg.get('tabbed-browser', scope='window',
window=win_id)
tab_titles = (tab.title() for tab in tabbed_browser.widgets())
windows.append(("{}".format(win_id),
objreg.window_registry[win_id].windowTitle(),
", ".join(tab_titles)))
model.add_category(listcategory.ListCategory("Windows", windows))
return model
def inspector_position(*, info):
"""A model for possible inspector positions."""
utils.unused(info)
model = completionmodel.CompletionModel(column_widths=(100, 0, 0))
positions = [(e.name,) for e in inspector.Position]
category = listcategory.ListCategory("Position (optional)", positions)
model.add_category(category)
return model
def _qdatetime_to_completion_format(qdate):
if not qdate.isValid():
ts = 0
else:
ts = qdate.toMSecsSinceEpoch()
if ts < 0:
ts = 0
pydate = datetime.datetime.fromtimestamp(ts / 1000)
return pydate.strftime(config.val.completion.timestamp_format)
def _back_forward(info, go_forward):
history = info.cur_tab.history
current_idx = history.current_idx()
model = completionmodel.CompletionModel(column_widths=(5, 36, 50, 9))
if go_forward:
start = current_idx + 1
items = history.forward_items()
else:
start = 0
items = history.back_items()
entries = [
(
str(idx),
entry.url().toDisplayString(),
entry.title(),
_qdatetime_to_completion_format(entry.lastVisited())
)
for idx, entry in enumerate(items, start)
]
if not go_forward:
# make sure the most recent is at the top for :back
entries.reverse()
cat = listcategory.ListCategory("History", entries, sort=False)
model.add_category(cat)
return model
def forward(*, info):
"""A model to complete on history of the current tab.
Used for the :forward command.
"""
return _back_forward(info, go_forward=True)
def back(*, info):
"""A model to complete on history of the current tab.
Used for the :back command.
"""
return _back_forward(info, go_forward=False)
def undo(*, info):
"""A model to complete undo entries."""
tabbed_browser = objreg.get('tabbed-browser', scope='window',
window=info.win_id)
model = completionmodel.CompletionModel(column_widths=(6, 84, 10))
timestamp_format = config.val.completion.timestamp_format
entries = [
(
str(idx),
', '.join(entry.url.toDisplayString() for entry in group),
group[-1].created_at.strftime(timestamp_format)
)
for idx, group in
enumerate(reversed(tabbed_browser.undo_stack), start=1)
]
cat = listcategory.ListCategory("Closed tabs", entries)
model.add_category(cat)
return model
|
import logging
import os
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_NAME, CONF_UNIT_OF_MEASUREMENT, CONF_VALUE_TEMPLATE
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
CONF_FILE_PATH = "file_path"
DEFAULT_NAME = "File"
ICON = "mdi:file"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_FILE_PATH): cv.isfile,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the file sensor."""
file_path = config.get(CONF_FILE_PATH)
name = config.get(CONF_NAME)
unit = config.get(CONF_UNIT_OF_MEASUREMENT)
value_template = config.get(CONF_VALUE_TEMPLATE)
if value_template is not None:
value_template.hass = hass
if hass.config.is_allowed_path(file_path):
async_add_entities([FileSensor(name, file_path, unit, value_template)], True)
else:
_LOGGER.error("'%s' is not an allowed directory", file_path)
class FileSensor(Entity):
"""Implementation of a file sensor."""
def __init__(self, name, file_path, unit_of_measurement, value_template):
"""Initialize the file sensor."""
self._name = name
self._file_path = file_path
self._unit_of_measurement = unit_of_measurement
self._val_tpl = value_template
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return ICON
@property
def state(self):
"""Return the state of the sensor."""
return self._state
def update(self):
"""Get the latest entry from a file and updates the state."""
try:
with open(self._file_path, encoding="utf-8") as file_data:
for line in file_data:
data = line
data = data.strip()
except (IndexError, FileNotFoundError, IsADirectoryError, UnboundLocalError):
_LOGGER.warning(
"File or data not present at the moment: %s",
os.path.basename(self._file_path),
)
return
if self._val_tpl is not None:
self._state = self._val_tpl.async_render_with_possible_json_value(
data, None
)
else:
self._state = data
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.