prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>0002_auto__add_field_executioncommandserver_celery_task_id.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'ExecutionCommandServer.celery_task_id'
db.add_column(u'task_executioncommandserver', 'celery_task_id',
self.gf('django.db.models.fields.CharField')(default='', max_length=36, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'ExecutionCommandServer.celery_task_id'
db.delete_column(u'task_executioncommandserver', 'celery_task_id')
models = {
u'account.customuser': {
'Meta': {'object_name': 'CustomUser'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '254'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'core.application': {
'Meta': {'unique_together': "(('department', 'name'),)", 'object_name': 'Application'},
'department': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'applications'", 'to': u"orm['core.Department']"}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
u'core.department': {
'Meta': {'object_name': 'Department'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
},
u'core.environment': {
'Meta': {'unique_together': "(('application', 'name'),)", 'object_name': 'Environment'},
'application': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'environments'", 'to': u"orm['core.Application']"}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_production': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
u'core.server': {
'Meta': {'unique_together': "(('environment', 'name'),)", 'object_name': 'Server'},
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'servers'", 'to': u"orm['core.Environment']"}),
'host': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'roles': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'servers'", 'symmetrical': 'False', 'to': u"orm['core.ServerRole']"}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
u'core.serverrole': {
'Meta': {'object_name': 'ServerRole'},
'department': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'serverroles'", 'to': u"orm['core.Department']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
},
u'task.execution': {
'Meta': {'object_name': 'Execution'},
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'executions'", 'to': u"orm['core.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '3'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'executions'", 'to': u"orm['task.Task']"}),
'time': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'time_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'time_end': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'time_start': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'executions'", 'to': u"orm['account.CustomUser']"})
},
u'task.executioncommand': {
'Meta': {'object_name': 'ExecutionCommand'},
'command': ('django.db.models.fields.TextField', [], {}),
'execution': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'commands'", 'to': u"orm['task.Execution']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'roles': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['core.ServerRole']", 'symmetrical': 'False'})
},
u'task.executioncommandserver': {
'Meta': {'object_name': 'ExecutionCommandServer'},
'celery_task_id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'execution_command': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'servers'", 'to': u"orm['task.ExecutionCommand']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'output': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'return_code': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'server': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Server']"}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '3'}),
'time': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'time_end': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'time_start': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
u'task.executionlivelog': {
'Meta': {'object_name': 'ExecutionLiveLog'},
'data': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'event': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'execution': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'live_logs'", 'to': u"orm['task.Execution']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'task.executionparameter': {
'Meta': {'object_name': 'ExecutionParameter'},
'execution': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parameters'", 'to': u"orm['task.Execution']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
u'task.task': {
'Meta': {'unique_together': "(('application', 'name'),)", 'object_name': 'Task'},
'application': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tasks'", 'to': u"orm['core.Application']"}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
u'task.taskcommand': {<|fim▁hole|> 'order': ('django.db.models.fields.IntegerField', [], {}),
'roles': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'commands'", 'symmetrical': 'False', 'to': u"orm['core.ServerRole']"}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'commands'", 'to': u"orm['task.Task']"})
},
u'task.taskparameter': {
'Meta': {'object_name': 'TaskParameter'},
'default_value': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'order': ('django.db.models.fields.IntegerField', [], {}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parameters'", 'to': u"orm['task.Task']"})
}
}
complete_apps = ['task']<|fim▁end|> | 'Meta': {'object_name': 'TaskCommand'},
'command': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), |
<|file_name|>nest.py<|end_file_name|><|fim▁begin|># Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""## Functions for working with arbitrarily nested sequences of elements.
NOTE(mrry): This fork of the `tensorflow.python.util.nest` module
makes two changes:
<|fim▁hole|>
1. It seems more natural for lists to be treated (e.g. in Dataset constructors)
as tensors, rather than lists of (lists of...) tensors.
2. This is needed because `SparseTensorValue` is implemented as a `namedtuple`
that would normally be flattened and we want to be able to create sparse
tensor from `SparseTensorValue's similarly to creating tensors from numpy
arrays.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six as _six
from tensorflow.python.framework import sparse_tensor as _sparse_tensor
from tensorflow.python.util import _pywrap_utils
from tensorflow.python.util import nest
from tensorflow.python.util.compat import collections_abc as _collections_abc
def _sorted(dict_):
"""Returns a sorted list of the dict keys, with error if keys not sortable."""
try:
return sorted(list(dict_))
except TypeError:
raise TypeError("nest only supports dicts with sortable keys.")
def _yield_value(iterable):
if isinstance(iterable, _collections_abc.Mapping):
# Iterate through dictionaries in a deterministic order by sorting the
# keys. Notice this means that we ignore the original order of `OrderedDict`
# instances. This is intentional, to avoid potential bugs caused by mixing
# ordered and plain dicts (e.g., flattening a dict but using a
# corresponding `OrderedDict` to pack it back).
for key in _sorted(iterable):
yield iterable[key]
elif isinstance(iterable, _sparse_tensor.SparseTensorValue):
yield iterable
else:
for value in iterable:
yield value
# See the swig file (../../util/util.i) for documentation.
is_sequence = _pywrap_utils.IsSequenceForData
# See the swig file (../../util/util.i) for documentation.
flatten = _pywrap_utils.FlattenForData
def assert_same_structure(nest1, nest2, check_types=True):
"""Asserts that two structures are nested in the same way.
Args:
nest1: an arbitrarily nested structure.
nest2: an arbitrarily nested structure.
check_types: if `True` (default) types of sequences should be same as
well. For dictionary, "type" of dictionary is considered to include its
keys. In other words, two dictionaries with different keys are considered
to have a different "type". If set to `False`, two iterables are
considered same as long as they yield the elements that have same
structures.
Raises:
ValueError: If the two structures do not have the same number of elements or
if the two structures are not nested in the same way.
TypeError: If the two structures differ in the type of sequence in any of
their substructures. Only possible if `check_types` is `True`.
"""
_pywrap_utils.AssertSameStructureForData(nest1, nest2, check_types)
def _packed_nest_with_indices(structure, flat, index):
"""Helper function for pack_nest_as.
Args:
structure: Substructure (tuple of elements and/or tuples) to mimic
flat: Flattened values to output substructure for.
index: Index at which to start reading from flat.
Returns:
The tuple (new_index, child), where:
* new_index - the updated index into `flat` having processed `structure`.
* packed - the subset of `flat` corresponding to `structure`,
having started at `index`, and packed into the same nested
format.
Raises:
ValueError: if `structure` contains more elements than `flat`
(assuming indexing starts from `index`).
"""
packed = []
for s in _yield_value(structure):
if is_sequence(s):
new_index, child = _packed_nest_with_indices(s, flat, index)
packed.append(nest._sequence_like(s, child)) # pylint: disable=protected-access
index = new_index
else:
packed.append(flat[index])
index += 1
return index, packed
def pack_sequence_as(structure, flat_sequence):
"""Returns a given flattened sequence packed into a nest.
If `structure` is a scalar, `flat_sequence` must be a single-element list;
in this case the return value is `flat_sequence[0]`.
Args:
structure: tuple or list constructed of scalars and/or other tuples/lists,
or a scalar. Note: numpy arrays are considered scalars.
flat_sequence: flat sequence to pack.
Returns:
packed: `flat_sequence` converted to have the same recursive structure as
`structure`.
Raises:
ValueError: If nest and structure have different element counts.
"""
if not (is_sequence(flat_sequence) or isinstance(flat_sequence, list)):
raise TypeError("flat_sequence must be a sequence")
if not is_sequence(structure):
if len(flat_sequence) != 1:
raise ValueError("Structure is a scalar but len(flat_sequence) == %d > 1"
% len(flat_sequence))
return flat_sequence[0]
flat_structure = flatten(structure)
if len(flat_structure) != len(flat_sequence):
raise ValueError(
"Could not pack sequence. Structure had %d elements, but flat_sequence "
"had %d elements. Structure: %s, flat_sequence: %s."
% (len(flat_structure), len(flat_sequence), structure, flat_sequence))
_, packed = _packed_nest_with_indices(structure, flat_sequence, 0)
return nest._sequence_like(structure, packed) # pylint: disable=protected-access
def map_structure(func, *structure, **check_types_dict):
"""Applies `func` to each entry in `structure` and returns a new structure.
Applies `func(x[0], x[1], ...)` where x[i] is an entry in
`structure[i]`. All structures in `structure` must have the same arity,
and the return value will contain the results in the same structure.
Args:
func: A callable that accepts as many arguments are there are structures.
*structure: scalar, or tuple or list of constructed scalars and/or other
tuples/lists, or scalars. Note: numpy arrays are considered scalars.
**check_types_dict: only valid keyword argument is `check_types`. If set to
`True` (default) the types of iterables within the structures have to be
same (e.g. `map_structure(func, [1], (1,))` raises a `TypeError`
exception). To allow this set this argument to `False`.
Returns:
A new structure with the same arity as `structure`, whose values correspond
to `func(x[0], x[1], ...)` where `x[i]` is a value in the corresponding
location in `structure[i]`. If there are different sequence types and
`check_types` is `False` the sequence types of the first structure will be
used.
Raises:
TypeError: If `func` is not callable or if the structures do not match
each other by depth tree.
ValueError: If no structure is provided or if the structures do not match
each other by type.
ValueError: If wrong keyword arguments are provided.
"""
if not callable(func):
raise TypeError("func must be callable, got: %s" % func)
if not structure:
raise ValueError("Must provide at least one structure")
if check_types_dict:
if "check_types" not in check_types_dict or len(check_types_dict) > 1:
raise ValueError("Only valid keyword argument is check_types")
check_types = check_types_dict["check_types"]
else:
check_types = True
for other in structure[1:]:
assert_same_structure(structure[0], other, check_types=check_types)
flat_structure = (flatten(s) for s in structure)
entries = zip(*flat_structure)
return pack_sequence_as(
structure[0], [func(*x) for x in entries])
def _yield_flat_up_to(shallow_tree, input_tree):
"""Yields elements `input_tree` partially flattened up to `shallow_tree`."""
if is_sequence(shallow_tree):
for shallow_branch, input_branch in zip(_yield_value(shallow_tree),
_yield_value(input_tree)):
for input_leaf in _yield_flat_up_to(shallow_branch, input_branch):
yield input_leaf
else:
yield input_tree
def assert_shallow_structure(shallow_tree, input_tree, check_types=True):
"""Asserts that `shallow_tree` is a shallow structure of `input_tree`.
That is, this function tests if the `input_tree` structure can be created from
the `shallow_tree` structure by replacing its leaf nodes with deeper
tree structures.
Examples:
The following code will raise an exception:
```python
shallow_tree = ["a", "b"]
input_tree = ["c", ["d", "e"], "f"]
assert_shallow_structure(shallow_tree, input_tree)
```
The following code will not raise an exception:
```python
shallow_tree = ["a", "b"]
input_tree = ["c", ["d", "e"]]
assert_shallow_structure(shallow_tree, input_tree)
```
Args:
shallow_tree: an arbitrarily nested structure.
input_tree: an arbitrarily nested structure.
check_types: if `True` (default) the sequence types of `shallow_tree` and
`input_tree` have to be the same.
Raises:
TypeError: If `shallow_tree` is a sequence but `input_tree` is not.
TypeError: If the sequence types of `shallow_tree` are different from
`input_tree`. Only raised if `check_types` is `True`.
ValueError: If the sequence lengths of `shallow_tree` are different from
`input_tree`.
"""
if is_sequence(shallow_tree):
if not is_sequence(input_tree):
raise TypeError(
"If shallow structure is a sequence, input must also be a sequence. "
"Input has type: %s." % type(input_tree))
if check_types and not isinstance(input_tree, type(shallow_tree)):
raise TypeError(
"The two structures don't have the same sequence type. Input "
"structure has type %s, while shallow structure has type %s."
% (type(input_tree), type(shallow_tree)))
if len(input_tree) != len(shallow_tree):
raise ValueError(
"The two structures don't have the same sequence length. Input "
"structure has length %s, while shallow structure has length %s."
% (len(input_tree), len(shallow_tree)))
if check_types and isinstance(shallow_tree, _collections_abc.Mapping):
if set(input_tree) != set(shallow_tree):
raise ValueError(
"The two structures don't have the same keys. Input "
"structure has keys %s, while shallow structure has keys %s." %
(list(input_tree), list(shallow_tree)))
input_tree = sorted(_six.iteritems(input_tree))
shallow_tree = sorted(_six.iteritems(shallow_tree))
for shallow_branch, input_branch in zip(shallow_tree, input_tree):
assert_shallow_structure(shallow_branch, input_branch,
check_types=check_types)
def flatten_up_to(shallow_tree, input_tree):
"""Flattens `input_tree` up to `shallow_tree`.
Any further depth in structure in `input_tree` is retained as elements in the
partially flatten output.
If `shallow_tree` and `input_tree` are not sequences, this returns a
single-element list: `[input_tree]`.
Use Case:
Sometimes we may wish to partially flatten a nested sequence, retaining some
of the nested structure. We achieve this by specifying a shallow structure,
`shallow_tree`, we wish to flatten up to.
The input, `input_tree`, can be thought of as having the same structure as
`shallow_tree`, but with leaf nodes that are themselves tree structures.
Examples:
```python
input_tree = [[[2, 2], [3, 3]], [[4, 9], [5, 5]]]
shallow_tree = [[True, True], [False, True]]
flattened_input_tree = flatten_up_to(shallow_tree, input_tree)
flattened_shallow_tree = flatten_up_to(shallow_tree, shallow_tree)
# Output is:
# [[2, 2], [3, 3], [4, 9], [5, 5]]
# [True, True, False, True]
```
```python
input_tree = [[('a', 1), [('b', 2), [('c', 3), [('d', 4)]]]]]
shallow_tree = [['level_1', ['level_2', ['level_3', ['level_4']]]]]
input_tree_flattened_as_shallow_tree = flatten_up_to(shallow_tree, input_tree)
input_tree_flattened = flatten(input_tree)
# Output is:
# [('a', 1), ('b', 2), ('c', 3), ('d', 4)]
# ['a', 1, 'b', 2, 'c', 3, 'd', 4]
```
Non-Sequence Edge Cases:
```python
flatten_up_to(0, 0) # Output: [0]
flatten_up_to(0, [0, 1, 2]) # Output: [[0, 1, 2]]
flatten_up_to([0, 1, 2], 0) # Output: TypeError
flatten_up_to([0, 1, 2], [0, 1, 2]) # Output: [0, 1, 2]
```
Args:
shallow_tree: a possibly pruned structure of input_tree.
input_tree: an arbitrarily nested structure or a scalar object.
Note, numpy arrays are considered scalars.
Returns:
A Python list, the partially flattened version of `input_tree` according to
the structure of `shallow_tree`.
Raises:
TypeError: If `shallow_tree` is a sequence but `input_tree` is not.
TypeError: If the sequence types of `shallow_tree` are different from
`input_tree`.
ValueError: If the sequence lengths of `shallow_tree` are different from
`input_tree`.
"""
assert_shallow_structure(shallow_tree, input_tree)
return list(_yield_flat_up_to(shallow_tree, input_tree))
def map_structure_up_to(shallow_tree, func, *inputs):
"""Applies a function or op to a number of partially flattened inputs.
The `inputs` are flattened up to `shallow_tree` before being mapped.
Use Case:
Sometimes we wish to apply a function to a partially flattened
sequence (for example when the function itself takes sequence inputs). We
achieve this by specifying a shallow structure, `shallow_tree` we wish to
flatten up to.
The `inputs`, can be thought of as having the same structure as
`shallow_tree`, but with leaf nodes that are themselves tree structures.
This function, therefore, will return something with the same base structure
as `shallow_tree`.
Examples:
```python
ab_tuple = collections.namedtuple("ab_tuple", "a, b")
op_tuple = collections.namedtuple("op_tuple", "add, mul")
inp_val = ab_tuple(a=2, b=3)
inp_ops = ab_tuple(a=op_tuple(add=1, mul=2), b=op_tuple(add=2, mul=3))
out = map_structure_up_to(inp_val, lambda val, ops: (val + ops.add) * ops.mul,
inp_val, inp_ops)
# Output is: ab_tuple(a=6, b=15)
```
```python
data_list = [[2, 4, 6, 8], [[1, 3, 5, 7, 9], [3, 5, 7]]]
name_list = ['evens', ['odds', 'primes']]
out = map_structure_up_to(
name_list,
lambda name, sec: "first_{}_{}".format(len(sec), name),
name_list, data_list)
# Output is: ['first_4_evens', ['first_5_odds', 'first_3_primes']]
```
Args:
shallow_tree: a shallow tree, common to all the inputs.
func: callable which will be applied to each input individually.
*inputs: arbitrarily nested combination of objects that are compatible with
shallow_tree. The function `func` is applied to corresponding
partially flattened elements of each input, so the function must support
arity of `len(inputs)`.
Raises:
TypeError: If `shallow_tree` is a sequence but `input_tree` is not.
TypeError: If the sequence types of `shallow_tree` are different from
`input_tree`.
ValueError: If the sequence lengths of `shallow_tree` are different from
`input_tree`.
Returns:
result of repeatedly applying `func`, with same structure as
`shallow_tree`.
"""
if not inputs:
raise ValueError("Cannot map over no sequences")
for input_tree in inputs:
assert_shallow_structure(shallow_tree, input_tree)
# Flatten each input separately, apply the function to corresponding elements,
# then repack based on the structure of the first input.
all_flattened_up_to = (
flatten_up_to(shallow_tree, input_tree) for input_tree in inputs)
results = [func(*tensors) for tensors in zip(*all_flattened_up_to)]
return pack_sequence_as(structure=shallow_tree, flat_sequence=results)<|fim▁end|> | 1. It removes support for lists as a level of nesting in nested structures.
2. It adds support for `SparseTensorValue` as an atomic element.
The motivation for this change is twofold: |
<|file_name|>dependency_parse_handlers.py<|end_file_name|><|fim▁begin|>__author__ = 'kjoseph'
import itertools
import Queue
from collections import defaultdict
from dependency_parse_object import DependencyParseObject, is_noun, is_verb
def get_parse(dp_objs):
term_map = {}
map_to_head = defaultdict(list)
for parse_object in dp_objs:
if parse_object.head > 0:
map_to_head[parse_object.head].append(parse_object.id)
term_map[parse_object.id] = parse_object
# first manually combine MWE
#mwe_to_combine = get_mwe_combinations(map_to_head,term_map)
#for mwe in mwe_to_combine:
# combine_terms(mwe,term_map,map_to_head)
#conj_to_combine = get_conj_combinations(map_to_head,term_map)
#for conj in conj_to_combine:
# combine_terms(conj,term_map,map_to_head)
# now manually chunk the nouns together
nouns_to_combine = get_noun_combinations(map_to_head,term_map)
for noun_set in nouns_to_combine:
combine_terms(noun_set,term_map, map_to_head)
verbs_to_combine = get_verb_combinations(map_to_head,term_map)
for verb_set in verbs_to_combine:
combine_terms(verb_set,term_map, map_to_head)
roots =[]
non_terms = []
for parse_object in term_map.values():
if parse_object.head == 0:
roots.append(parse_object)
elif parse_object.head == -1:
non_terms.append(parse_object)
# now build the parse tree
to_parse = Queue.LifoQueue()
for root in reversed(roots):
to_parse.put([root,0])
return to_parse, term_map, map_to_head, non_terms
def get_noun_combinations(map_to_head,term_map):
to_combine = []
for head_id, children in map_to_head.iteritems():
head = term_map[head_id]
if len(children) == 0 or not (is_noun(head.postag) or head.postag in ['D','@','A','R']) :
continue
for child_id in children:
child = term_map[child_id]
if is_noun(child.postag) or child.postag in ['D','@','A','R']:
to_combine.append({child.id, head.id})
return get_combinations(to_combine)
def get_verb_combinations(map_to_head,term_map):
to_combine = []
for head_id, children in map_to_head.iteritems():
head = term_map[head_id]
if len(children) == 0 or not is_verb(head.postag):
continue
for child_id in children:
child = term_map[child_id]
if is_verb(child.postag) and child.id == (head.id +1):
to_combine.append({child.id, head.id})
return get_combinations(to_combine)
def get_mwe_combinations(map_to_head,term_map):
to_combine = []
for head_id, children in map_to_head.iteritems():
head = term_map[head_id]
if len(children) == 0:
continue
for child_id in children:
child = term_map[child_id]
if child.deprel == 'MWE':
to_combine.append({child.id, head.id})
return get_combinations(to_combine)
def get_conj_combinations(map_to_head,term_map):
to_combine = []
for head_id, children in map_to_head.iteritems():
head = term_map[head_id]
if len(children) == 0:
continue
for child_id in children:
child = term_map[child_id]
if child.deprel == 'CONJ':
to_combine.append({child.id, head.id})
return get_combinations(to_combine)
def get_combinations(to_combine):
combination_found = True
while combination_found:
combination_found = False
combos = itertools.combinations(to_combine,2)
removed = []
for d in combos:
if len([d[0] == r or d[1] == r for r in removed]):
continue
if len(d[0].intersection(d[1])) > 0:
combination_found = True
to_combine.append(set.union(d[0],d[1]))
[to_combine.remove(x) for x in to_combine if x == d[0]]
[to_combine.remove(x) for x in to_combine if x == d[1]]
removed.append(d[0])
removed.append(d[1])
return to_combine
def combine_terms(noun_set,term_map, map_to_head):
new_parse_obj = DependencyParseObject(object_ids=noun_set,term_map=term_map)
# okay, we've created a new parse object
# now we need to update the relations to it
for id in noun_set:
if id == new_parse_obj.id:
term_map[id] = new_parse_obj
if id in map_to_head:
for child_id in noun_set:
if child_id in map_to_head[id]:
map_to_head[id].remove(child_id)
else:
# things dependent on this thing need to become dependent on the new parse object
if id in map_to_head:
for child in map_to_head[id]:
if child not in noun_set:
map_to_head[new_parse_obj.id].append(child)
term_map[child].head = new_parse_obj.id
del map_to_head[id]
del term_map[id]
def print_parse(parse_out, term_map, map_to_head):
while not parse_out.empty():
curr_head,level = parse_out.get()
print " "*level + str(level) +" " + curr_head.__unicode__()
for child in reversed(map_to_head.get(curr_head.id,[])):
parse_out.put([term_map[child],level+1])
def get_entities_from_parse(term_map):
all_proper = []
all_entities = []<|fim▁hole|> for k,v in term_map.iteritems():
if is_noun(v.postag) or v.postag == '@' or v.postag == '#':
text = []
split_text = v.text.split()
ent_ids = []
for x in range(len(split_text)):
t = split_text[x]#.strip(string.punctuation)
#if x == 0 and t in stopwords:
# continue
text.append(t)
ent_ids.append(v.all_original_ids[x])
if len(text) > 0 and v.postag != 'O':
if '^' in v.postag and v.text[0].isupper():
all_proper.append(" ".join(text))
all_proper_original_ids.append(sorted(v.all_original_ids))
all_entities.append(" ".join([t.lower() for t in text]))
all_entities_original_ids.append(sorted(ent_ids))
return all_entities, all_proper, all_entities_original_ids, all_proper_original_ids<|fim▁end|> | all_entities_original_ids = []
all_proper_original_ids = [] |
<|file_name|>identity.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | export default function identity(x) {
return x;
} |
<|file_name|>mem.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//
// ignore-lexer-test FIXME #15679
//! Readers and Writers for in-memory buffers
#![allow(deprecated)]
use cmp::min;
use option::None;
use result::{Err, Ok};
use io;
use io::{Reader, Writer, Seek, Buffer, IoError, SeekStyle, IoResult};
use slice::{mod, AsSlice, SlicePrelude};
use vec::Vec;
const BUF_CAPACITY: uint = 128;
fn combine(seek: SeekStyle, cur: uint, end: uint, offset: i64) -> IoResult<u64> {
// compute offset as signed and clamp to prevent overflow
let pos = match seek {
io::SeekSet => 0,
io::SeekEnd => end,
io::SeekCur => cur,
} as i64;
if offset + pos < 0 {
Err(IoError {
kind: io::InvalidInput,
desc: "invalid seek to a negative offset",
detail: None
})
} else {
Ok((offset + pos) as u64)
}
}
impl Writer for Vec<u8> {
#[inline]
fn write(&mut self, buf: &[u8]) -> IoResult<()> {
self.push_all(buf);
Ok(())
}
}
/// Writes to an owned, growable byte vector
///
/// # Example
///
/// ```rust
/// # #![allow(unused_must_use)]
/// use std::io::MemWriter;
///
/// let mut w = MemWriter::new();
/// w.write(&[0, 1, 2]);
///
/// assert_eq!(w.unwrap(), vec!(0, 1, 2));
/// ```
#[deprecated = "use the Vec<u8> Writer implementation directly"]
#[deriving(Clone)]
pub struct MemWriter {
buf: Vec<u8>,
}
impl MemWriter {
/// Create a new `MemWriter`.
#[inline]
pub fn new() -> MemWriter {
MemWriter::with_capacity(BUF_CAPACITY)
}
/// Create a new `MemWriter`, allocating at least `n` bytes for
/// the internal buffer.
#[inline]
pub fn with_capacity(n: uint) -> MemWriter {
MemWriter::from_vec(Vec::with_capacity(n))
}
/// Create a new `MemWriter` that will append to an existing `Vec`.
#[inline]
pub fn from_vec(buf: Vec<u8>) -> MemWriter {
MemWriter { buf: buf }
}
/// Acquires an immutable reference to the underlying buffer of this
/// `MemWriter`.
#[inline]
pub fn get_ref<'a>(&'a self) -> &'a [u8] { self.buf.as_slice() }
/// Unwraps this `MemWriter`, returning the underlying buffer
#[inline]
pub fn unwrap(self) -> Vec<u8> { self.buf }
}
impl Writer for MemWriter {
#[inline]
fn write(&mut self, buf: &[u8]) -> IoResult<()> {
self.buf.push_all(buf);
Ok(())
}
}
/// Reads from an owned byte vector
///
/// # Example
///
/// ```rust
/// # #![allow(unused_must_use)]
/// use std::io::MemReader;
///
/// let mut r = MemReader::new(vec!(0, 1, 2));
///
/// assert_eq!(r.read_to_end().unwrap(), vec!(0, 1, 2));
/// ```
pub struct MemReader {
buf: Vec<u8>,
pos: uint
}
impl MemReader {
/// Creates a new `MemReader` which will read the buffer given. The buffer
/// can be re-acquired through `unwrap`
#[inline]
pub fn new(buf: Vec<u8>) -> MemReader {
MemReader {
buf: buf,
pos: 0
}
}
/// Tests whether this reader has read all bytes in its buffer.
///
/// If `true`, then this will no longer return bytes from `read`.
#[inline]
pub fn eof(&self) -> bool { self.pos >= self.buf.len() }
/// Acquires an immutable reference to the underlying buffer of this
/// `MemReader`.
///
/// No method is exposed for acquiring a mutable reference to the buffer
/// because it could corrupt the state of this `MemReader`.
#[inline]
pub fn get_ref<'a>(&'a self) -> &'a [u8] { self.buf.as_slice() }
/// Unwraps this `MemReader`, returning the underlying buffer
#[inline]
pub fn unwrap(self) -> Vec<u8> { self.buf }
}
impl Reader for MemReader {
#[inline]
fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> {
if self.eof() { return Err(io::standard_error(io::EndOfFile)) }
let write_len = min(buf.len(), self.buf.len() - self.pos);
{
let input = self.buf[self.pos.. self.pos + write_len];
let output = buf[mut ..write_len];
assert_eq!(input.len(), output.len());
slice::bytes::copy_memory(output, input);
}
self.pos += write_len;
assert!(self.pos <= self.buf.len());
return Ok(write_len);
}
}
impl Seek for MemReader {
#[inline]
fn tell(&self) -> IoResult<u64> { Ok(self.pos as u64) }
#[inline]
fn seek(&mut self, pos: i64, style: SeekStyle) -> IoResult<()> {
let new = try!(combine(style, self.pos, self.buf.len(), pos));
self.pos = new as uint;
Ok(())
}
}
impl Buffer for MemReader {
#[inline]
fn fill_buf<'a>(&'a mut self) -> IoResult<&'a [u8]> {
if self.pos < self.buf.len() {
Ok(self.buf[self.pos..])
} else {
Err(io::standard_error(io::EndOfFile))
}
}
#[inline]
fn consume(&mut self, amt: uint) { self.pos += amt; }
}
/// Writes to a fixed-size byte slice
///
/// If a write will not fit in the buffer, it returns an error and does not
/// write any data.
///
/// # Example
///
/// ```rust
/// # #![allow(unused_must_use)]
/// use std::io::BufWriter;
///
/// let mut buf = [0, ..4];
/// {
/// let mut w = BufWriter::new(&mut buf);
/// w.write(&[0, 1, 2]);
/// }
/// assert!(buf == [0, 1, 2, 0]);
/// ```
pub struct BufWriter<'a> {
buf: &'a mut [u8],
pos: uint
}
impl<'a> BufWriter<'a> {
/// Creates a new `BufWriter` which will wrap the specified buffer. The
/// writer initially starts at position 0.
#[inline]
pub fn new<'a>(buf: &'a mut [u8]) -> BufWriter<'a> {
BufWriter {
buf: buf,
pos: 0
}
}
}
impl<'a> Writer for BufWriter<'a> {
#[inline]
fn write(&mut self, buf: &[u8]) -> IoResult<()> {
// return an error if the entire write does not fit in the buffer
let cap = if self.pos >= self.buf.len() { 0 } else { self.buf.len() - self.pos };
if buf.len() > cap {
return Err(IoError {
kind: io::OtherIoError,
desc: "Trying to write past end of buffer",
detail: None
})
}
slice::bytes::copy_memory(self.buf[mut self.pos..], buf);
self.pos += buf.len();<|fim▁hole|> Ok(())
}
}
impl<'a> Seek for BufWriter<'a> {
#[inline]
fn tell(&self) -> IoResult<u64> { Ok(self.pos as u64) }
#[inline]
fn seek(&mut self, pos: i64, style: SeekStyle) -> IoResult<()> {
let new = try!(combine(style, self.pos, self.buf.len(), pos));
self.pos = new as uint;
Ok(())
}
}
/// Reads from a fixed-size byte slice
///
/// # Example
///
/// ```rust
/// # #![allow(unused_must_use)]
/// use std::io::BufReader;
///
/// let mut buf = [0, 1, 2, 3];
/// let mut r = BufReader::new(&mut buf);
///
/// assert_eq!(r.read_to_end().unwrap(), vec!(0, 1, 2, 3));
/// ```
pub struct BufReader<'a> {
buf: &'a [u8],
pos: uint
}
impl<'a> BufReader<'a> {
/// Creates a new buffered reader which will read the specified buffer
#[inline]
pub fn new<'a>(buf: &'a [u8]) -> BufReader<'a> {
BufReader {
buf: buf,
pos: 0
}
}
/// Tests whether this reader has read all bytes in its buffer.
///
/// If `true`, then this will no longer return bytes from `read`.
#[inline]
pub fn eof(&self) -> bool { self.pos >= self.buf.len() }
}
impl<'a> Reader for BufReader<'a> {
#[inline]
fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> {
if self.eof() { return Err(io::standard_error(io::EndOfFile)) }
let write_len = min(buf.len(), self.buf.len() - self.pos);
{
let input = self.buf[self.pos.. self.pos + write_len];
let output = buf[mut ..write_len];
assert_eq!(input.len(), output.len());
slice::bytes::copy_memory(output, input);
}
self.pos += write_len;
assert!(self.pos <= self.buf.len());
return Ok(write_len);
}
}
impl<'a> Seek for BufReader<'a> {
#[inline]
fn tell(&self) -> IoResult<u64> { Ok(self.pos as u64) }
#[inline]
fn seek(&mut self, pos: i64, style: SeekStyle) -> IoResult<()> {
let new = try!(combine(style, self.pos, self.buf.len(), pos));
self.pos = new as uint;
Ok(())
}
}
impl<'a> Buffer for BufReader<'a> {
#[inline]
fn fill_buf<'a>(&'a mut self) -> IoResult<&'a [u8]> {
if self.pos < self.buf.len() {
Ok(self.buf[self.pos..])
} else {
Err(io::standard_error(io::EndOfFile))
}
}
#[inline]
fn consume(&mut self, amt: uint) { self.pos += amt; }
}
#[cfg(test)]
mod test {
extern crate test;
use prelude::*;
use super::*;
use io::*;
use io;
use self::test::Bencher;
use str::StrPrelude;
#[test]
fn test_mem_writer() {
let mut writer = MemWriter::new();
writer.write(&[0]).unwrap();
writer.write(&[1, 2, 3]).unwrap();
writer.write(&[4, 5, 6, 7]).unwrap();
let b: &[_] = &[0, 1, 2, 3, 4, 5, 6, 7];
assert_eq!(writer.get_ref(), b);
}
#[test]
fn test_buf_writer() {
let mut buf = [0 as u8, ..8];
{
let mut writer = BufWriter::new(&mut buf);
assert_eq!(writer.tell(), Ok(0));
writer.write(&[0]).unwrap();
assert_eq!(writer.tell(), Ok(1));
writer.write(&[1, 2, 3]).unwrap();
writer.write(&[4, 5, 6, 7]).unwrap();
assert_eq!(writer.tell(), Ok(8));
writer.write(&[]).unwrap();
assert_eq!(writer.tell(), Ok(8));
}
let b: &[_] = &[0, 1, 2, 3, 4, 5, 6, 7];
assert_eq!(buf.as_slice(), b);
}
#[test]
fn test_buf_writer_seek() {
let mut buf = [0 as u8, ..8];
{
let mut writer = BufWriter::new(&mut buf);
assert_eq!(writer.tell(), Ok(0));
writer.write(&[1]).unwrap();
assert_eq!(writer.tell(), Ok(1));
writer.seek(2, SeekSet).unwrap();
assert_eq!(writer.tell(), Ok(2));
writer.write(&[2]).unwrap();
assert_eq!(writer.tell(), Ok(3));
writer.seek(-2, SeekCur).unwrap();
assert_eq!(writer.tell(), Ok(1));
writer.write(&[3]).unwrap();
assert_eq!(writer.tell(), Ok(2));
writer.seek(-1, SeekEnd).unwrap();
assert_eq!(writer.tell(), Ok(7));
writer.write(&[4]).unwrap();
assert_eq!(writer.tell(), Ok(8));
}
let b: &[_] = &[1, 3, 2, 0, 0, 0, 0, 4];
assert_eq!(buf.as_slice(), b);
}
#[test]
fn test_buf_writer_error() {
let mut buf = [0 as u8, ..2];
let mut writer = BufWriter::new(&mut buf);
writer.write(&[0]).unwrap();
match writer.write(&[0, 0]) {
Ok(..) => panic!(),
Err(e) => assert_eq!(e.kind, io::OtherIoError),
}
}
#[test]
fn test_mem_reader() {
let mut reader = MemReader::new(vec!(0, 1, 2, 3, 4, 5, 6, 7));
let mut buf = [];
assert_eq!(reader.read(&mut buf), Ok(0));
assert_eq!(reader.tell(), Ok(0));
let mut buf = [0];
assert_eq!(reader.read(&mut buf), Ok(1));
assert_eq!(reader.tell(), Ok(1));
let b: &[_] = &[0];
assert_eq!(buf.as_slice(), b);
let mut buf = [0, ..4];
assert_eq!(reader.read(&mut buf), Ok(4));
assert_eq!(reader.tell(), Ok(5));
let b: &[_] = &[1, 2, 3, 4];
assert_eq!(buf.as_slice(), b);
assert_eq!(reader.read(&mut buf), Ok(3));
let b: &[_] = &[5, 6, 7];
assert_eq!(buf[0..3], b);
assert!(reader.read(&mut buf).is_err());
let mut reader = MemReader::new(vec!(0, 1, 2, 3, 4, 5, 6, 7));
assert_eq!(reader.read_until(3).unwrap(), vec!(0, 1, 2, 3));
assert_eq!(reader.read_until(3).unwrap(), vec!(4, 5, 6, 7));
assert!(reader.read(&mut buf).is_err());
}
#[test]
fn test_buf_reader() {
let in_buf = vec![0, 1, 2, 3, 4, 5, 6, 7];
let mut reader = BufReader::new(in_buf.as_slice());
let mut buf = [];
assert_eq!(reader.read(&mut buf), Ok(0));
assert_eq!(reader.tell(), Ok(0));
let mut buf = [0];
assert_eq!(reader.read(&mut buf), Ok(1));
assert_eq!(reader.tell(), Ok(1));
let b: &[_] = &[0];
assert_eq!(buf.as_slice(), b);
let mut buf = [0, ..4];
assert_eq!(reader.read(&mut buf), Ok(4));
assert_eq!(reader.tell(), Ok(5));
let b: &[_] = &[1, 2, 3, 4];
assert_eq!(buf.as_slice(), b);
assert_eq!(reader.read(&mut buf), Ok(3));
let b: &[_] = &[5, 6, 7];
assert_eq!(buf[0..3], b);
assert!(reader.read(&mut buf).is_err());
let mut reader = BufReader::new(in_buf.as_slice());
assert_eq!(reader.read_until(3).unwrap(), vec!(0, 1, 2, 3));
assert_eq!(reader.read_until(3).unwrap(), vec!(4, 5, 6, 7));
assert!(reader.read(&mut buf).is_err());
}
#[test]
fn test_read_char() {
let b = b"Vi\xE1\xBB\x87t";
let mut r = BufReader::new(b);
assert_eq!(r.read_char(), Ok('V'));
assert_eq!(r.read_char(), Ok('i'));
assert_eq!(r.read_char(), Ok('ệ'));
assert_eq!(r.read_char(), Ok('t'));
assert!(r.read_char().is_err());
}
#[test]
fn test_read_bad_char() {
let b = b"\x80";
let mut r = BufReader::new(b);
assert!(r.read_char().is_err());
}
#[test]
fn test_write_strings() {
let mut writer = MemWriter::new();
writer.write_str("testing").unwrap();
writer.write_line("testing").unwrap();
writer.write_str("testing").unwrap();
let mut r = BufReader::new(writer.get_ref());
assert_eq!(r.read_to_string().unwrap(), "testingtesting\ntesting".to_string());
}
#[test]
fn test_write_char() {
let mut writer = MemWriter::new();
writer.write_char('a').unwrap();
writer.write_char('\n').unwrap();
writer.write_char('ệ').unwrap();
let mut r = BufReader::new(writer.get_ref());
assert_eq!(r.read_to_string().unwrap(), "a\nệ".to_string());
}
#[test]
fn test_read_whole_string_bad() {
let buf = [0xff];
let mut r = BufReader::new(&buf);
match r.read_to_string() {
Ok(..) => panic!(),
Err(..) => {}
}
}
#[test]
fn seek_past_end() {
let buf = [0xff];
let mut r = BufReader::new(&buf);
r.seek(10, SeekSet).unwrap();
assert!(r.read(&mut []).is_err());
let mut r = MemReader::new(vec!(10));
r.seek(10, SeekSet).unwrap();
assert!(r.read(&mut []).is_err());
let mut buf = [0];
let mut r = BufWriter::new(&mut buf);
r.seek(10, SeekSet).unwrap();
assert!(r.write(&[3]).is_err());
}
#[test]
fn seek_before_0() {
let buf = [0xff];
let mut r = BufReader::new(&buf);
assert!(r.seek(-1, SeekSet).is_err());
let mut r = MemReader::new(vec!(10));
assert!(r.seek(-1, SeekSet).is_err());
let mut buf = [0];
let mut r = BufWriter::new(&mut buf);
assert!(r.seek(-1, SeekSet).is_err());
}
#[test]
fn io_read_at_least() {
let mut r = MemReader::new(vec![1, 2, 3, 4, 5, 6, 7, 8]);
let mut buf = [0, ..3];
assert!(r.read_at_least(buf.len(), &mut buf).is_ok());
let b: &[_] = &[1, 2, 3];
assert_eq!(buf.as_slice(), b);
assert!(r.read_at_least(0, buf[mut ..0]).is_ok());
assert_eq!(buf.as_slice(), b);
assert!(r.read_at_least(buf.len(), &mut buf).is_ok());
let b: &[_] = &[4, 5, 6];
assert_eq!(buf.as_slice(), b);
assert!(r.read_at_least(buf.len(), &mut buf).is_err());
let b: &[_] = &[7, 8, 6];
assert_eq!(buf.as_slice(), b);
}
fn do_bench_mem_writer(b: &mut Bencher, times: uint, len: uint) {
let src: Vec<u8> = Vec::from_elem(len, 5);
b.bytes = (times * len) as u64;
b.iter(|| {
let mut wr = MemWriter::new();
for _ in range(0, times) {
wr.write(src.as_slice()).unwrap();
}
let v = wr.unwrap();
assert_eq!(v.len(), times * len);
assert!(v.iter().all(|x| *x == 5));
});
}
#[bench]
fn bench_mem_writer_001_0000(b: &mut Bencher) {
do_bench_mem_writer(b, 1, 0)
}
#[bench]
fn bench_mem_writer_001_0010(b: &mut Bencher) {
do_bench_mem_writer(b, 1, 10)
}
#[bench]
fn bench_mem_writer_001_0100(b: &mut Bencher) {
do_bench_mem_writer(b, 1, 100)
}
#[bench]
fn bench_mem_writer_001_1000(b: &mut Bencher) {
do_bench_mem_writer(b, 1, 1000)
}
#[bench]
fn bench_mem_writer_100_0000(b: &mut Bencher) {
do_bench_mem_writer(b, 100, 0)
}
#[bench]
fn bench_mem_writer_100_0010(b: &mut Bencher) {
do_bench_mem_writer(b, 100, 10)
}
#[bench]
fn bench_mem_writer_100_0100(b: &mut Bencher) {
do_bench_mem_writer(b, 100, 100)
}
#[bench]
fn bench_mem_writer_100_1000(b: &mut Bencher) {
do_bench_mem_writer(b, 100, 1000)
}
#[bench]
fn bench_mem_reader(b: &mut Bencher) {
b.iter(|| {
let buf = [5 as u8, ..100].to_vec();
{
let mut rdr = MemReader::new(buf);
for _i in range(0u, 10) {
let mut buf = [0 as u8, .. 10];
rdr.read(&mut buf).unwrap();
assert_eq!(buf.as_slice(), [5, .. 10].as_slice());
}
}
});
}
#[bench]
fn bench_buf_writer(b: &mut Bencher) {
b.iter(|| {
let mut buf = [0 as u8, ..100];
{
let mut wr = BufWriter::new(&mut buf);
for _i in range(0u, 10) {
wr.write(&[5, .. 10]).unwrap();
}
}
assert_eq!(buf.as_slice(), [5, .. 100].as_slice());
});
}
#[bench]
fn bench_buf_reader(b: &mut Bencher) {
b.iter(|| {
let buf = [5 as u8, ..100];
{
let mut rdr = BufReader::new(&buf);
for _i in range(0u, 10) {
let mut buf = [0 as u8, .. 10];
rdr.read(&mut buf).unwrap();
assert_eq!(buf.as_slice(), [5, .. 10].as_slice());
}
}
});
}
}<|fim▁end|> | |
<|file_name|>feature_gate.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Feature gating
//!
//! This modules implements the gating necessary for preventing certain compiler
//! features from being used by default. This module will crawl a pre-expanded
//! AST to ensure that there are no features which are used that are not
//! enabled.
//!
//! Features are enabled in programs via the crate-level attributes of
//! `#![feature(...)]` with a comma-separated list of features.
use lint;
use syntax::abi::RustIntrinsic;
use syntax::ast::NodeId;
use syntax::ast;
use syntax::attr;
use syntax::attr::AttrMetaMethods;
use syntax::codemap::Span;
use syntax::visit;
use syntax::visit::Visitor;
use syntax::parse::token;
use driver::session::Session;
use std::cell::Cell;
/// This is a list of all known features since the beginning of time. This list
/// can never shrink, it may only be expanded (in order to prevent old programs
/// from failing to compile). The status of each feature may change, however.
static KNOWN_FEATURES: &'static [(&'static str, Status)] = &[
("globs", Active),
("macro_rules", Active),
("struct_variant", Active),
("once_fns", Active),
("asm", Active),
("managed_boxes", Active),
("non_ascii_idents", Active),
("thread_local", Active),
("link_args", Active),
("phase", Active),
("plugin_registrar", Active),
("log_syntax", Active),
("trace_macros", Active),
("concat_idents", Active),
("unsafe_destructor", Active),
("intrinsics", Active),
("lang_items", Active),
("simd", Active),
("default_type_params", Active),
("quote", Active),
("linkage", Active),
("struct_inherit", Active),
("overloaded_calls", Active),
("unboxed_closure_sugar", Active),
("quad_precision_float", Removed),
("rustc_diagnostic_macros", Active),
("unboxed_closures", Active),
("import_shadowing", Active),
// if you change this list without updating src/doc/rust.md, cmr will be sad
// A temporary feature gate used to enable parser extensions needed
// to bootstrap fix for #5723.
("issue_5723_bootstrap", Accepted),
// These are used to test this portion of the compiler, they don't actually
// mean anything
("test_accepted_feature", Accepted),
("test_removed_feature", Removed),
];
enum Status {
/// Represents an active feature that is currently being implemented or
/// currently being considered for addition/removal.
Active,
/// Represents a feature which has since been removed (it was once Active)
Removed,
/// This language feature has since been Accepted (it was once Active)
Accepted,
}
/// A set of features to be used by later passes.
pub struct Features {
pub default_type_params: Cell<bool>,
pub overloaded_calls: Cell<bool>,
pub rustc_diagnostic_macros: Cell<bool>,
pub import_shadowing: Cell<bool>,
}
impl Features {
pub fn new() -> Features {
Features {
default_type_params: Cell::new(false),
overloaded_calls: Cell::new(false),
rustc_diagnostic_macros: Cell::new(false),
import_shadowing: Cell::new(false),
}
}
}
struct Context<'a> {
features: Vec<&'static str>,
sess: &'a Session,
}
impl<'a> Context<'a> {
fn gate_feature(&self, feature: &str, span: Span, explain: &str) {
if !self.has_feature(feature) {
self.sess.span_err(span, explain);
self.sess.span_note(span, format!("add #![feature({})] to the \
crate attributes to enable",
feature).as_slice());
}
}
fn gate_box(&self, span: Span) {
self.gate_feature("managed_boxes", span,
"The managed box syntax is being replaced by the \
`std::gc::Gc` and `std::rc::Rc` types. Equivalent \
functionality to managed trait objects will be \
implemented but is currently missing.");
}
fn has_feature(&self, feature: &str) -> bool {
self.features.iter().any(|n| n.as_slice() == feature)
}
}
impl<'a> Visitor<()> for Context<'a> {
fn visit_ident(&mut self, sp: Span, id: ast::Ident, _: ()) {
if !token::get_ident(id).get().is_ascii() {
self.gate_feature("non_ascii_idents", sp,
"non-ascii idents are not fully supported.");
}
}
fn visit_view_item(&mut self, i: &ast::ViewItem, _: ()) {
match i.node {
ast::ViewItemUse(ref path) => {
match path.node {
ast::ViewPathGlob(..) => {
self.gate_feature("globs", path.span,
"glob import statements are \
experimental and possibly buggy");
}
_ => {}
}
}
ast::ViewItemExternCrate(..) => {
for attr in i.attrs.iter() {
if attr.name().get() == "phase"{
self.gate_feature("phase", attr.span,
"compile time crate loading is \
experimental and possibly buggy");
}
}
}
}
visit::walk_view_item(self, i, ())
}
fn visit_item(&mut self, i: &ast::Item, _:()) {
for attr in i.attrs.iter() {
if attr.name().equiv(&("thread_local")) {
self.gate_feature("thread_local", i.span,
"`#[thread_local]` is an experimental feature, and does not \
currently handle destructors. There is no corresponding \
`#[task_local]` mapping to the task model");
}
}
match i.node {
ast::ItemEnum(ref def, _) => {
for variant in def.variants.iter() {
match variant.node.kind {
ast::StructVariantKind(..) => {
self.gate_feature("struct_variant", variant.span,
"enum struct variants are \
experimental and possibly buggy");
}
_ => {}
}
}
}
ast::ItemForeignMod(ref foreign_module) => {
if attr::contains_name(i.attrs.as_slice(), "link_args") {
self.gate_feature("link_args", i.span,
"the `link_args` attribute is not portable \
across platforms, it is recommended to \
use `#[link(name = \"foo\")]` instead")
}
if foreign_module.abi == RustIntrinsic {
self.gate_feature("intrinsics",
i.span,
"intrinsics are subject to change")
}
}
ast::ItemFn(..) => {
if attr::contains_name(i.attrs.as_slice(), "plugin_registrar") {
self.gate_feature("plugin_registrar", i.span,
"compiler plugins are experimental and possibly buggy");
}
}
ast::ItemStruct(struct_definition, _) => {
if attr::contains_name(i.attrs.as_slice(), "simd") {
self.gate_feature("simd", i.span,
"SIMD types are experimental and possibly buggy");
}
match struct_definition.super_struct {
Some(ref path) => self.gate_feature("struct_inherit", path.span,
"struct inheritance is experimental \
and possibly buggy"),
None => {}
}
if struct_definition.is_virtual {<|fim▁hole|> }
ast::ItemImpl(..) => {
if attr::contains_name(i.attrs.as_slice(),
"unsafe_destructor") {
self.gate_feature("unsafe_destructor",
i.span,
"`#[unsafe_destructor]` allows too \
many unsafe patterns and may be \
removed in the future");
}
}
_ => {}
}
visit::walk_item(self, i, ());
}
fn visit_mac(&mut self, macro: &ast::Mac, _: ()) {
let ast::MacInvocTT(ref path, _, _) = macro.node;
let id = path.segments.last().unwrap().identifier;
let quotes = ["quote_tokens", "quote_expr", "quote_ty",
"quote_item", "quote_pat", "quote_stmt"];
let msg = " is not stable enough for use and are subject to change";
if id == token::str_to_ident("macro_rules") {
self.gate_feature("macro_rules", path.span, "macro definitions are \
not stable enough for use and are subject to change");
}
else if id == token::str_to_ident("asm") {
self.gate_feature("asm", path.span, "inline assembly is not \
stable enough for use and is subject to change");
}
else if id == token::str_to_ident("log_syntax") {
self.gate_feature("log_syntax", path.span, "`log_syntax!` is not \
stable enough for use and is subject to change");
}
else if id == token::str_to_ident("trace_macros") {
self.gate_feature("trace_macros", path.span, "`trace_macros` is not \
stable enough for use and is subject to change");
}
else if id == token::str_to_ident("concat_idents") {
self.gate_feature("concat_idents", path.span, "`concat_idents` is not \
stable enough for use and is subject to change");
}
else {
for "e in quotes.iter() {
if id == token::str_to_ident(quote) {
self.gate_feature("quote",
path.span,
format!("{}{}", quote, msg).as_slice());
}
}
}
}
fn visit_foreign_item(&mut self, i: &ast::ForeignItem, _: ()) {
if attr::contains_name(i.attrs.as_slice(), "linkage") {
self.gate_feature("linkage", i.span,
"the `linkage` attribute is experimental \
and not portable across platforms")
}
visit::walk_foreign_item(self, i, ())
}
fn visit_ty(&mut self, t: &ast::Ty, _: ()) {
match t.node {
ast::TyClosure(closure) if closure.onceness == ast::Once => {
self.gate_feature("once_fns", t.span,
"once functions are \
experimental and likely to be removed");
},
ast::TyBox(_) => { self.gate_box(t.span); }
ast::TyUnboxedFn(..) => {
self.gate_feature("unboxed_closure_sugar",
t.span,
"unboxed closure trait sugar is experimental");
}
_ => {}
}
visit::walk_ty(self, t, ());
}
fn visit_expr(&mut self, e: &ast::Expr, _: ()) {
match e.node {
ast::ExprUnary(ast::UnBox, _) => {
self.gate_box(e.span);
}
ast::ExprUnboxedFn(..) => {
self.gate_feature("unboxed_closures",
e.span,
"unboxed closures are a work-in-progress \
feature with known bugs");
}
_ => {}
}
visit::walk_expr(self, e, ());
}
fn visit_generics(&mut self, generics: &ast::Generics, _: ()) {
for type_parameter in generics.ty_params.iter() {
match type_parameter.default {
Some(ty) => {
self.gate_feature("default_type_params", ty.span,
"default type parameters are \
experimental and possibly buggy");
}
None => {}
}
}
visit::walk_generics(self, generics, ());
}
fn visit_attribute(&mut self, attr: &ast::Attribute, _: ()) {
if attr::contains_name([*attr], "lang") {
self.gate_feature("lang_items",
attr.span,
"language items are subject to change");
}
}
fn visit_fn(&mut self,
fn_kind: &visit::FnKind,
fn_decl: &ast::FnDecl,
block: &ast::Block,
span: Span,
_: NodeId,
(): ()) {
match *fn_kind {
visit::FkItemFn(_, _, _, ref abi) if *abi == RustIntrinsic => {
self.gate_feature("intrinsics",
span,
"intrinsics are subject to change")
}
_ => {}
}
visit::walk_fn(self, fn_kind, fn_decl, block, span, ());
}
}
pub fn check_crate(sess: &Session, krate: &ast::Crate) {
let mut cx = Context {
features: Vec::new(),
sess: sess,
};
for attr in krate.attrs.iter() {
if !attr.check_name("feature") {
continue
}
match attr.meta_item_list() {
None => {
sess.span_err(attr.span, "malformed feature attribute, \
expected #![feature(...)]");
}
Some(list) => {
for &mi in list.iter() {
let name = match mi.node {
ast::MetaWord(ref word) => (*word).clone(),
_ => {
sess.span_err(mi.span,
"malformed feature, expected just \
one word");
continue
}
};
match KNOWN_FEATURES.iter()
.find(|& &(n, _)| name.equiv(&n)) {
Some(&(name, Active)) => { cx.features.push(name); }
Some(&(_, Removed)) => {
sess.span_err(mi.span, "feature has been removed");
}
Some(&(_, Accepted)) => {
sess.span_warn(mi.span, "feature has been added to Rust, \
directive not necessary");
}
None => {
sess.add_lint(lint::builtin::UNKNOWN_FEATURES,
ast::CRATE_NODE_ID,
mi.span,
"unknown feature".to_string());
}
}
}
}
}
}
visit::walk_crate(&mut cx, krate, ());
sess.abort_if_errors();
sess.features.default_type_params.set(cx.has_feature("default_type_params"));
sess.features.overloaded_calls.set(cx.has_feature("overloaded_calls"));
sess.features.rustc_diagnostic_macros.set(cx.has_feature("rustc_diagnostic_macros"));
sess.features.import_shadowing.set(cx.has_feature("import_shadowing"));
}<|fim▁end|> | self.gate_feature("struct_inherit", i.span,
"struct inheritance (`virtual` keyword) is \
experimental and possibly buggy");
} |
<|file_name|>playback.py<|end_file_name|><|fim▁begin|>import logging, logging.handlers
import sys
logging.handlers.HTTPHandler('','',method='GET')
logger = logging.getLogger('simple_example')
# http_handler = logging.handlers.HTTPHandler('127.0.0.1:9022', '/event', method='GET')
http_handler = logging.handlers.HTTPHandler('127.0.0.1:9999', '/httpevent', method='GET')
logger.addHandler(http_handler)
#logger.setLevel(logging.DEBUG)
f=open(sys.argv[1])
for i in range(10):
line = f.readline()
print line
logger.critical(line)
## For reference, the exert of the relevant Python logger
# import errno, logging, socket, os, pickle, struct, time, re
# from codecs import BOM_UTF8
# from stat import ST_DEV, ST_INO, ST_MTIME
# import queue
# try:
# import threading
# except ImportError: #pragma: no cover
# threading = None
# import http.client, urllib.parse
# port = 9022
# method = "GET"
# host = "127.0.0.1"
# url = "/"
# h = http.client.HTTPConnection(host)
# url = url + "?%s" % (sep, data)
# for item in lines:
# data = urllib.parse.urlencode(record)
# h.putrequest(method, url)
# h.putheader("Host", host)
# if method == "POST":
# h.putheader("Content-type",
# "application/x-www-form-urlencoded")
# h.putheader("Content-length", str(len(data)))<|fim▁hole|><|fim▁end|> | # h.send(data.encode('utf-8'))
# h.getresponse() #can't do anything with the result |
<|file_name|>room-bridge-store.ts<|end_file_name|><|fim▁begin|>/*
Copyright 2020 The Matrix.org Foundation C.I.C.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/**
* Room storage format:
* {
* id: "matrix|remote|link_key", // customisable
* matrix_id: "room_id",
* remote_id: "remote_room_id",
* matrix: { serialised matrix room info },
* remote: { serialised remote room info },
* data: { ... any additional info ... }
* }
*
* Each document can either represent a matrix room, a remote room, or
* a mapping. They look like this:
* MATRIX
* {
* id: "!room:id",
* matrix_id: "!room:id",
* matrix: { .. custom data eg name: "A happy place" .. }
* }
*
* REMOTE (e.g. IRC)
* {
* id: "irc.freenode.net_#channame",
* remote_id: "irc.freenode.net_#channame",
* remote: { .. custom data e.g. is_pm_room: true .. }
* }
*
* MAPPING
* {
* id: "!room:id__irc.freenode.net_#channame", // link key; customisable.
* matrix_id: "!room:id",
* remote_id: "irc.freenode.net_#channame",
* matrix: { .. custom data .. },
* remote: { .. custom data .. },
* data: { .. custom data about the mapping ..}
* }
*
* A unique, non-sparse index can be set on the 'id' key, and non-unique,
* sparse indexes can be set on matrix_id and remote_id to make mappings
* quicker to compute.
*
*/
import Datastore from "nedb";
import { BridgeStore } from "./bridge-store";
import { MatrixRoom, MatrixRoomData } from "../models/rooms/matrix";
import { RemoteRoom } from "../models/rooms/remote";
export class RoomBridgeStore extends BridgeStore {
public delimiter = " ";
/**
* Construct a store suitable for room bridging information. Data is stored
* as {@link RoomBridgeStoreEntry}s which have the following
* *serialized* format:
* ```
* {
* id: "unique_id", // customisable
* matrix_id: "room_id",
* remote_id: "remote_room_id",
* matrix: { serialised matrix room info },
* remote: { serialised remote room info },
* data: { ... any additional info ... }
* }
* ```
* If a unique 'id' is not given, the store will generate one by concatenating
* the `matrix_id` and the `remote_id`. The delimiter
* used is a property on this store and can be modified.
*
* The structure of Entry objects means that it is efficient to select based
* off the 'id', 'matrix_id' or 'remote_id'. Additional indexes can be added
* manually.
* @constructor
* @param db The connected NEDB database instance
* @param opts Options for this store.
*/
constructor(db: Datastore) {
super(db);
}
/**
* Insert an entry, clobbering based on the ID of the entry.
* @param entry
*/
public upsertEntry(entry: RoomBridgeStoreEntry) {
return this.upsert({
id: entry.id
}, RoomBridgeStoreEntry.serializeEntry(entry) as Record<string, unknown>);
}
/**
* Get an existing entry based on the provided entry ID.
* @param id The ID of the entry to retrieve.
*/
public getEntryById(id: string) {
return this.selectOne({
id: id
}, this.convertTo((doc: RoomStoreEntryDoc) =>
new RoomBridgeStoreEntry(doc)
));
}
/**
* Get a list of entries based on the matrix_id of each entry.
* @param matrixId
*/
public getEntriesByMatrixId(matrixId: string) {
return this.select({
matrix_id: matrixId
}, this.convertTo((doc: RoomStoreEntryDoc) =>
new RoomBridgeStoreEntry(doc)
));
}
/**
* A batch version of <code>getEntriesByMatrixId</code>.
* @param ids
* @return Resolves to a map of room_id => Entry[]
*/
public async getEntriesByMatrixIds(ids: string[]) {
// eslint-disable-next-line camelcase
const docs = await this.select<{ matrix_id: string }, RoomStoreEntryDoc>({
matrix_id: {
$in: ids
}
});
if (!docs) {
return {};
}
const entries: {[matrixId: string]: RoomBridgeStoreEntry[]} = {};
docs.forEach((doc: RoomStoreEntryDoc) => {
if (!doc.matrix_id) {
return;
}
if (!entries[doc.matrix_id]) {
entries[doc.matrix_id] = [];
}
entries[doc.matrix_id].push(new RoomBridgeStoreEntry(doc));
});
return entries;
}
/**
* Get a list of entries based on the remote_id of each entry.
* @param remoteId
*/
public getEntriesByRemoteId(remoteId: string) {
return this.select({
remote_id: remoteId
}, this.convertTo((doc: RoomStoreEntryDoc) =>
new RoomBridgeStoreEntry(doc)
));
}
/**
* Create a link between a matrix room and remote room. This will create an entry with:
* - The matrix_id set to the matrix room ID.<|fim▁hole|> * @param matrixRoom The matrix room
* @param remoteRoom The remote room
* @param data Information about this mapping.
* @param linkId The id value to set. If not given, a unique ID will be
* created from the matrix_id and remote_id.
*/
public linkRooms(matrixRoom: MatrixRoom, remoteRoom: RemoteRoom,
data: Record<string, unknown>={}, linkId?: string) {
linkId = linkId || RoomBridgeStore.createUniqueId(
matrixRoom.getId(), remoteRoom.getId(), this.delimiter
);
return this.upsert({
id: linkId
}, {
id: linkId,
remote_id: remoteRoom.getId(),
matrix_id: matrixRoom.getId(),
remote: remoteRoom.serialize(),
matrix: matrixRoom.serialize(),
data: data
});
}
/**
* Create an entry with only a matrix room. Sets the 'id' of the entry to the
* Matrix room ID. If an entry already exists with this 'id', it will be replaced.
* This function is useful if you just want to store a room with some data and not
* worry about any mappings.
* @param matrixRoom
* @see RoomBridgeStore#getMatrixRoom
*/
public setMatrixRoom(matrixRoom: MatrixRoom) {
const entry = new RoomBridgeStoreEntry({
id: matrixRoom.getId(),
matrix_id: matrixRoom.getId(),
matrix: matrixRoom.serialize(),
});
return this.upsertEntry(entry);
}
/**
* Get an entry's Matrix room based on the provided room_id. The entry MUST have
* an 'id' of the room_id and there MUST be a Matrix room contained within the
* entry for this to return.
* @param roomId
* @see RoomBridgeStore#setMatrixRoom
*/
public getMatrixRoom(roomId: string) {
return this.getEntryById(roomId).then(function(e) {
return e ? e.matrix : null;
});
}
/**
* Get all entries with the given remote_id which have a Matrix room within.
* @param remoteId
*/
public async getLinkedMatrixRooms(remoteId: string) {
const entries = await this.getEntriesByRemoteId(remoteId);
if (!entries) {
return [];
}
return entries.filter(function(e) {
return Boolean(e.matrix);
}).map(function(e) {
return e.matrix;
}) as MatrixRoom[];
}
/**
* Get all entries with the given matrix_id which have a Remote room within.
* @param matrixId
*/
public async getLinkedRemoteRooms(matrixId: string) {
const entries = await this.getEntriesByMatrixId(matrixId);
if (!entries) {
return [];
}
return entries.filter(function(e) {
return Boolean(e.remote);
}).map(function(e) {
return e.remote;
}) as RemoteRoom[];
}
/**
* A batched version of `getLinkedRemoteRooms`.
* @param matrixIds
* @return A mapping of room_id to RemoteRoom.
* @see RoomBridgeStore#getLinkedRemoteRooms
*/
public async batchGetLinkedRemoteRooms(matrixIds: string[]) {
const entryMap = await this.getEntriesByMatrixIds(matrixIds);
const result: {[roomId: string]: RemoteRoom[]} = {};
for (const [key, obj] of Object.entries(entryMap)) {
result[key] = obj.filter((e) => {
return Boolean(e.remote);
}).map((e) => {
return e.remote;
}) as RemoteRoom[];
}
return result;
}
/**
* Get a list of entries based on a RemoteRoom data value.
* @param data The data values to retrieve based from.
* @example
* remoteRoom.set("some_key", "some_val");
* // store remoteRoom and then:
* store.getEntriesByRemoteRoomData({
* some_key: "some_val"
* });
*/
public getEntriesByRemoteRoomData(data: Record<string, unknown>) {
Object.keys(data).forEach(function(k) {
const query = data[k];
delete data[k];
data["remote." + k] = query;
});
return this.select(data, this.convertTo((doc: RoomStoreEntryDoc) =>
new RoomBridgeStoreEntry(doc)
));
}
/**
* Get a list of entries based on a MatrixRoom data value.
* @param data The data values to retrieve based from.
* @example
* matrixRoom.set("some_key", "some_val");
* // store matrixRoom and then:
* store.getEntriesByMatrixRoomData({
* some_key: "some_val"
* });
*/
public getEntriesByMatrixRoomData(data: Record<string, unknown>) {
Object.keys(data).forEach(function(k) {
const query = data[k];
delete data[k];
data["matrix.extras." + k] = query;
});
return this.select(data, this.convertTo((doc: RoomStoreEntryDoc) =>
new RoomBridgeStoreEntry(doc)
));
}
/**
* Get a list of entries based on the link's data value.
* @param data The data values to retrieve based from.
* @example
* store.linkRooms(matrixRoom, remoteRoom, { some_key: "some_val" });
* store.getEntriesByLinkData({
* some_key: "some_val"
* });
*/
public getEntriesByLinkData(data: Record<string, unknown>) {
Object.keys(data).forEach(function(k) {
const query = data[k];
delete data[k];
data["data." + k] = query;
});
return this.select(data, this.convertTo((doc: RoomStoreEntryDoc) =>
new RoomBridgeStoreEntry(doc)
));
}
/**
* Remove entries based on remote room data.
* @param data The data to match.
* @example
* remoteRoom.set("a_key", "a_val");
* // store remoteRoom and then:
* store.removeEntriesByRemoteRoomData({
* a_key: "a_val"
* });
*/
public removeEntriesByRemoteRoomData(data: Record<string, unknown>) {
Object.keys(data).forEach(function(k) {
const query = data[k];
delete data[k];
data["remote." + k] = query;
});
return this.delete(data);
}
/**
* Remove entries with this remote room id.
* @param remoteId The remote id.
* @example
* new RemoteRoom("foobar");
* // store the RemoteRoom and then:
* store.removeEntriesByRemoteRoomId("foobar");
*/
public removeEntriesByRemoteRoomId(remoteId: string) {
return this.delete({
remote_id: remoteId
});
}
/**
* Remove entries based on matrix room data.
* @param data The data to match.
* @example
* matrixRoom.set("a_key", "a_val");
* // store matrixRoom and then:
* store.removeEntriesByMatrixRoomData({
* a_key: "a_val"
* });
*/
public removeEntriesByMatrixRoomData(data: Record<string, unknown>) {
Object.keys(data).forEach(function(k) {
const query = data[k];
delete data[k];
data["matrix.extras." + k] = query;
});
return this.delete(data);
}
/**
* Remove entries with this matrix room id.
* @param matrixId The matrix id.
* @example
* new MatrixRoom("!foobar:matrix.org");
* // store the MatrixRoom and then:
* store.removeEntriesByMatrixRoomId("!foobar:matrix.org");
*/
public removeEntriesByMatrixRoomId(matrixId: string) {
return this.delete({
matrix_id: matrixId
});
}
/**
* Remove entries based on the link's data value.
* @param data The data to match.
* @example
* store.linkRooms(matrixRoom, remoteRoom, { a_key: "a_val" });
* store.removeEntriesByLinkData({
* a_key: "a_val"
* });
*/
public removeEntriesByLinkData(data: Record<string, unknown>) {
Object.keys(data).forEach(function(k) {
const query = data[k];
delete data[k];
data["data." + k] = query;
});
return this.delete(data);
}
/**
* Remove an existing entry based on the provided entry ID.
* @param id The ID of the entry to remove.
* @example
* store.removeEntryById("anid");
*/
public removeEntryById(id: string) {
return this.delete({ id });
}
public static createUniqueId(matrixRoomId: string, remoteRoomId: string, delimiter: string) {
return (matrixRoomId || "") + delimiter + (remoteRoomId || "");
}
}
interface RoomStoreEntryDoc {
id?: string;
// eslint-disable-next-line camelcase
remote_id?: string;
// eslint-disable-next-line camelcase
matrix_id?: string;
remote?: Record<string, unknown>;
matrix?: MatrixRoomData;
data?: Record<string, unknown>;
}
export class RoomBridgeStoreEntry {
public id?: string;
public matrix?: MatrixRoom;
public remote?: RemoteRoom;
public data: Record<string, unknown>;
constructor(doc?: RoomStoreEntryDoc) {
this.id = doc?.id || undefined;
// eslint-disable-next-line camelcase
this.matrix = doc?.matrix_id ? new MatrixRoom(doc.matrix_id, doc.matrix) : undefined;
// eslint-disable-next-line camelcase
this.remote = doc?.remote_id ? new RemoteRoom(doc.remote_id, doc.remote) : undefined;
this.data = doc?.data || {};
}
// not a member function so callers can provide a POJO
public static serializeEntry(entry: RoomBridgeStoreEntry): RoomStoreEntryDoc {
return {
id: entry.id,
remote_id: entry.remote ? entry.remote.getId() : undefined,
matrix_id: entry.matrix ? entry.matrix.getId() : undefined,
remote: entry.remote ? entry.remote.serialize() : undefined,
matrix: entry.matrix ? entry.matrix.serialize() : undefined,
data: entry.data || undefined,
}
}
}<|fim▁end|> | * - The remote_id set to the remote room ID.
* - The id set to the id value given OR a concatenation of the matrix and remote IDs
* if one is not provided. |
<|file_name|>impls.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::os::windows::io::{FromRawSocket, AsRawSocket};
use winapi::SOCKET;
use {TcpBuilder, UdpBuilder, FromInner, AsInner};
use socket::Socket;
use sys;
impl FromRawSocket for TcpBuilder {
unsafe fn from_raw_socket(fd: SOCKET) -> TcpBuilder {
let sock = sys::Socket::from_inner(fd);
TcpBuilder::from_inner(Socket::from_inner(sock))
}
}
impl AsRawSocket for TcpBuilder {
fn as_raw_socket(&self) -> SOCKET {
// TODO: this unwrap() is very bad
self.as_inner().borrow().as_ref().unwrap().as_inner().raw()
}<|fim▁hole|> let sock = sys::Socket::from_inner(fd);
UdpBuilder::from_inner(Socket::from_inner(sock))
}
}
impl AsRawSocket for UdpBuilder {
fn as_raw_socket(&self) -> SOCKET {
// TODO: this unwrap() is very bad
self.as_inner().borrow().as_ref().unwrap().as_inner().raw()
}
}<|fim▁end|> | }
impl FromRawSocket for UdpBuilder {
unsafe fn from_raw_socket(fd: SOCKET) -> UdpBuilder { |
<|file_name|>_common.py<|end_file_name|><|fim▁begin|># Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common utilities for tests of the Cython layer of gRPC Python."""
import collections
import threading
from grpc._cython import cygrpc
RPC_COUNT = 4000
EMPTY_FLAGS = 0
INVOCATION_METADATA = (
('client-md-key', 'client-md-key'),
('client-md-key-bin', b'\x00\x01' * 3000),
)
INITIAL_METADATA = (
('server-initial-md-key', 'server-initial-md-value'),
('server-initial-md-key-bin', b'\x00\x02' * 3000),
)
TRAILING_METADATA = (
('server-trailing-md-key', 'server-trailing-md-value'),
('server-trailing-md-key-bin', b'\x00\x03' * 3000),
)
class QueueDriver(object):
def __init__(self, condition, completion_queue):
self._condition = condition
self._completion_queue = completion_queue
self._due = collections.defaultdict(int)
self._events = collections.defaultdict(list)
def add_due(self, tags):
if not self._due:
def in_thread():
while True:
event = self._completion_queue.poll()
with self._condition:
self._events[event.tag].append(event)
self._due[event.tag] -= 1
self._condition.notify_all()
if self._due[event.tag] <= 0:
self._due.pop(event.tag)
if not self._due:
return
thread = threading.Thread(target=in_thread)
thread.start()
for tag in tags:
self._due[tag] += 1
def event_with_tag(self, tag):
with self._condition:
while True:
if self._events[tag]:
return self._events[tag].pop(0)
else:
self._condition.wait()
def execute_many_times(behavior):
return tuple(behavior() for _ in range(RPC_COUNT))
class OperationResult(
collections.namedtuple('OperationResult', (
'start_batch_result',
'completion_type',
'success',
))):
pass
SUCCESSFUL_OPERATION_RESULT = OperationResult(
cygrpc.CallError.ok, cygrpc.CompletionType.operation_complete, True)
class RpcTest(object):
def setUp(self):
self.server_completion_queue = cygrpc.CompletionQueue()
self.server = cygrpc.Server([(b'grpc.so_reuseport', 0)])
self.server.register_completion_queue(self.server_completion_queue)
port = self.server.add_http2_port(b'[::]:0')
self.server.start()
self.channel = cygrpc.Channel('localhost:{}'.format(port).encode(), [],
None)
self._server_shutdown_tag = 'server_shutdown_tag'
self.server_condition = threading.Condition()
self.server_driver = QueueDriver(self.server_condition,
self.server_completion_queue)
with self.server_condition:
self.server_driver.add_due({
self._server_shutdown_tag,
})
self.client_condition = threading.Condition()
self.client_completion_queue = cygrpc.CompletionQueue()<|fim▁hole|> def tearDown(self):
self.server.shutdown(self.server_completion_queue,
self._server_shutdown_tag)
self.server.cancel_all_calls()<|fim▁end|> | self.client_driver = QueueDriver(self.client_condition,
self.client_completion_queue)
|
<|file_name|>Animate.js<|end_file_name|><|fim▁begin|>/**
* This animation class is a mixin.
*
* Ext.util.Animate provides an API for the creation of animated transitions of properties and styles.
* This class is used as a mixin and currently applied to {@link Ext.dom.Element}, {@link Ext.CompositeElement},
* {@link Ext.draw.sprite.Sprite}, {@link Ext.draw.sprite.Composite}, and {@link Ext.Component}. Note that Components
* have a limited subset of what attributes can be animated such as top, left, x, y, height, width, and
* opacity (color, paddings, and margins can not be animated).
*
* ## Animation Basics
*
* All animations require three things - `easing`, `duration`, and `to` (the final end value for each property)
* you wish to animate. Easing and duration are defaulted values specified below.
* Easing describes how the intermediate values used during a transition will be calculated.
* {@link Ext.fx.Anim#easing Easing} allows for a transition to change speed over its duration.
* You may use the defaults for easing and duration, but you must always set a
* {@link Ext.fx.Anim#to to} property which is the end value for all animations.
*
* Popular element 'to' configurations are:
*
* - opacity
* - x
* - y
* - color
* - height
* - width
*
* Popular sprite 'to' configurations are:
*
* - translation
* - path
* - scale
* - stroke
* - rotation
*
* The default duration for animations is 250 (which is a 1/4 of a second). Duration is denoted in
* milliseconds. Therefore 1 second is 1000, 1 minute would be 60000, and so on. The default easing curve
* used for all animations is 'ease'. Popular easing functions are included and can be found in {@link Ext.fx.Anim#easing Easing}.
*
* For example, a simple animation to fade out an element with a default easing and duration:
*
* var p1 = Ext.get('myElementId');
*
* p1.animate({
* to: {
* opacity: 0
* }
* });
*
* To make this animation fade out in a tenth of a second:
*
* var p1 = Ext.get('myElementId');
*
* p1.animate({
* duration: 100,
* to: {
* opacity: 0
* }
* });
*
* ## Animation Queues
*
* By default all animations are added to a queue which allows for animation via a chain-style API.
* For example, the following code will queue 4 animations which occur sequentially (one right after the other):
*
* p1.animate({
* to: {
* x: 500
* }
* }).animate({
* to: {
* y: 150
* }
* }).animate({
* to: {
* backgroundColor: '#f00' //red
* }
* }).animate({
* to: {
* opacity: 0
* }
* });
*
* You can change this behavior by calling the {@link Ext.util.Animate#syncFx syncFx} method and all
* subsequent animations for the specified target will be run concurrently (at the same time).
*
* p1.syncFx(); //this will make all animations run at the same time
*
* p1.animate({
* to: {
* x: 500
* }
* }).animate({
* to: {
* y: 150
* }
* }).animate({
* to: {
* backgroundColor: '#f00' //red
* }
* }).animate({
* to: {
* opacity: 0
* }
* });
*
* This works the same as:
*
* p1.animate({
* to: {
* x: 500,
* y: 150,
* backgroundColor: '#f00' //red
* opacity: 0
* }
* });
*
* The {@link Ext.util.Animate#stopAnimation stopAnimation} method can be used to stop any
* currently running animations and clear any queued animations.
*
* ## Animation Keyframes
*
* You can also set up complex animations with {@link Ext.fx.Anim#keyframes keyframes} which follow the
* CSS3 Animation configuration pattern. Note rotation, translation, and scaling can only be done for sprites.
* The previous example can be written with the following syntax:
*
* p1.animate({
* duration: 1000, //one second total
* keyframes: {
* 25: { //from 0 to 250ms (25%)
* x: 0
* },
* 50: { //from 250ms to 500ms (50%)
* y: 0
* },
* 75: { //from 500ms to 750ms (75%)
* backgroundColor: '#f00' //red
* },
* 100: { //from 750ms to 1sec
* opacity: 0
* }
* }
* });
*
* ## Animation Events
*
* Each animation you create has events for {@link Ext.fx.Anim#beforeanimate beforeanimate},
* {@link Ext.fx.Anim#afteranimate afteranimate}, and {@link Ext.fx.Anim#lastframe lastframe}.
* Keyframed animations adds an additional {@link Ext.fx.Animator#keyframe keyframe} event which
* fires for each keyframe in your animation.
*
* All animations support the {@link Ext.util.Observable#listeners listeners} configuration to attact functions to these events.
*
* startAnimate: function() {
* var p1 = Ext.get('myElementId');
* p1.animate({
* duration: 100,
* to: {
* opacity: 0
* },
* listeners: {
* beforeanimate: function() {
* // Execute my custom method before the animation
* this.myBeforeAnimateFn();
* },
* afteranimate: function() {
* // Execute my custom method after the animation
* this.myAfterAnimateFn();
* },
* scope: this
* });
* },
* myBeforeAnimateFn: function() {
* // My custom logic
* },
* myAfterAnimateFn: function() {
* // My custom logic
* }
*
* Due to the fact that animations run asynchronously, you can determine if an animation is currently
* running on any target by using the {@link Ext.util.Animate#getActiveAnimation getActiveAnimation}
* method. This method will return false if there are no active animations or return the currently
* running {@link Ext.fx.Anim} instance.
*
* In this example, we're going to wait for the current animation to finish, then stop any other
* queued animations before we fade our element's opacity to 0:
*
* var curAnim = p1.getActiveAnimation();
* if (curAnim) {
* curAnim.on('afteranimate', function() {
* p1.stopAnimation();
* p1.animate({
* to: {
* opacity: 0
* }
* });
* });
* }
*/
Ext.define('Ext.util.Animate', {
mixinId: 'animate',
requires: [
'Ext.fx.Manager',
'Ext.fx.Anim'
],
isAnimate: true,
/**
* Performs custom animation on this object.
*
* This method is applicable to both the {@link Ext.Component Component} class and the {@link Ext.draw.sprite.Sprite Sprite}
* class. It performs animated transitions of certain properties of this object over a specified timeline.
*
* ### Animating a {@link Ext.Component Component}
*
* When animating a Component, the following properties may be specified in `from`, `to`, and `keyframe` objects:
*
* - `x` - The Component's page X position in pixels.
*
* - `y` - The Component's page Y position in pixels
*
* - `left` - The Component's `left` value in pixels.
*
* - `top` - The Component's `top` value in pixels.
*
* - `width` - The Component's `width` value in pixels.
*
* - `height` - The Component's `height` value in pixels.
*
* The following property may be set on the animation config root:
*<|fim▁hole|> *
* myWindow = Ext.create('Ext.window.Window', {
* title: 'Test Component animation',
* width: 500,
* height: 300,
* layout: {
* type: 'hbox',
* align: 'stretch'
* },
* items: [{
* title: 'Left: 33%',
* margin: '5 0 5 5',
* flex: 1
* }, {
* title: 'Left: 66%',
* margin: '5 5 5 5',
* flex: 2
* }]
* });
* myWindow.show();
* myWindow.header.el.on('click', function() {
* myWindow.animate({
* to: {
* width: (myWindow.getWidth() == 500) ? 700 : 500,
* height: (myWindow.getHeight() == 300) ? 400 : 300
* }
* });
* });
*
* For performance reasons, by default, the internal layout is only updated when the Window reaches its final `"to"`
* size. If dynamic updating of the Window's child Components is required, then configure the animation with
* `dynamic: true` and the two child items will maintain their proportions during the animation.
*
* @param {Object} config Configuration for {@link Ext.fx.Anim}.
* Note that the {@link Ext.fx.Anim#to to} config is required.
* @return {Object} this
*/
animate: function(animObj) {
var me = this;
if (Ext.fx.Manager.hasFxBlock(me.id)) {
return me;
}
Ext.fx.Manager.queueFx(new Ext.fx.Anim(me.anim(animObj)));
return this;
},
/**
* @private
* Process the passed fx configuration.
*/
anim: function(config) {
if (!Ext.isObject(config)) {
return (config) ? {} : false;
}
var me = this;
if (config.stopAnimation) {
me.stopAnimation();
}
Ext.applyIf(config, Ext.fx.Manager.getFxDefaults(me.id));
return Ext.apply({
target: me,
paused: true
}, config);
},
/**
* @private
* Get animation properties
*/
getAnimationProps: function() {
var me = this,
layout = me.layout;
return layout && layout.animate ? layout.animate : {};
},
/**
* Stops any running effects and clears this object's internal effects queue if it contains any additional effects
* that haven't started yet.
* @deprecated 4.0 Replaced by {@link #stopAnimation}
* @return {Ext.dom.Element} The Element
* @method
*/
stopFx: Ext.Function.alias(Ext.util.Animate, 'stopAnimation'),
/**
* Stops any running effects and clears this object's internal effects queue if it contains any additional effects
* that haven't started yet.
* @return {Ext.dom.Element} The Element
*/
stopAnimation: function() {
Ext.fx.Manager.stopAnimation(this.id);
return this;
},
/**
* Ensures that all effects queued after syncFx is called on this object are run concurrently. This is the opposite
* of {@link #sequenceFx}.
* @return {Object} this
*/
syncFx: function() {
Ext.fx.Manager.setFxDefaults(this.id, {
concurrent: true
});
return this;
},
/**
* Ensures that all effects queued after sequenceFx is called on this object are run in sequence. This is the
* opposite of {@link #syncFx}.
* @return {Object} this
*/
sequenceFx: function() {
Ext.fx.Manager.setFxDefaults(this.id, {
concurrent: false
});
return this;
},
/**
* @deprecated 4.0 Replaced by {@link #getActiveAnimation}
* @inheritdoc Ext.util.Animate#getActiveAnimation
* @method
*/
hasActiveFx: Ext.Function.alias(Ext.util.Animate, 'getActiveAnimation'),
/**
* Returns the current animation if this object has any effects actively running or queued, else returns false.
* @return {Ext.fx.Anim/Boolean} Anim if element has active effects, else false
*/
getActiveAnimation: function() {
return Ext.fx.Manager.getActiveAnimation(this.id);
}
});<|fim▁end|> | * - `dynamic` - Specify as true to update the Component's layout (if it is a Container) at every frame of the animation.
* *Use sparingly as laying out on every intermediate size change is an expensive operation.*
*
* For example, to animate a Window to a new size, ensuring that its internal layout and any shadow is correct: |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls.defaults import *<|fim▁hole|> url(r'^(?P<tag_id>.+?)/$', 'show_tag', name='show-tag')
)<|fim▁end|> |
urlpatterns = patterns('mmda.tags.views', |
<|file_name|>cci_capture_clause.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:cci_capture_clause.rs<|fim▁hole|>extern crate cci_capture_clause;
pub fn main() {
cci_capture_clause::foo(()).recv()
}<|fim▁end|> |
// This test makes sure we can do cross-crate inlining on functions
// that use capture clauses.
|
<|file_name|>package.go<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | package extractor // import "code.cloudfoundry.org/archiver/extractor" |
<|file_name|>EditAssignmentDetails.js<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2012 - present Instructure, Inc.
*
* This file is part of Canvas.
*
* Canvas is free software: you can redistribute it and/or modify it under
* the terms of the GNU Affero General Public License as published by the Free
* Software Foundation, version 3 of the License.
*
* Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
* A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
* details.
*
* You should have received a copy of the GNU Affero General Public License along
* with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import I18n from 'i18n!calendar'
import $ from 'jquery'
import moment from 'moment'
import natcompare from '../util/natcompare'
import commonEventFactory from './commonEventFactory'
import ValidatedFormView from '../views/ValidatedFormView'
import SisValidationHelper from '../util/SisValidationHelper'
import editAssignmentTemplate from 'jst/calendar/editAssignment'
import editAssignmentOverrideTemplate from 'jst/calendar/editAssignmentOverride'
import wrapper from 'jst/EmptyDialogFormWrapper'
import genericSelectOptionsTemplate from 'jst/calendar/genericSelectOptions'
import datePickerFormat from 'jsx/shared/helpers/datePickerFormat'
import {showFlashAlert} from 'jsx/shared/FlashAlert'
import withinMomentDates from 'jsx/shared/helpers/momentDateHelper'
import 'jquery.instructure_date_and_time'
import 'jquery.instructure_forms'
import 'jquery.instructure_misc_helpers'
import './fcMomentHandlebarsHelpers'
export default class EditAssignmentDetailsRewrite extends ValidatedFormView {
initialize(selector, event, contextChangeCB, closeCB) {
this.event = event
this.contextChangeCB = contextChangeCB
this.closeCB = closeCB
super.initialize({
title: this.event.title,
contexts: this.event.possibleContexts(),
date: this.event.startDate(),
postToSISEnabled: ENV.POST_TO_SIS,
postToSISName: ENV.SIS_NAME,
postToSIS:
this.event.eventType === 'assignment' ? this.event.assignment.post_to_sis : undefined,
datePickerFormat: this.event.allDay ? 'medium_with_weekday' : 'full_with_weekday'
})
this.currentContextInfo = null
if (this.event.override) {
this.template = editAssignmentOverrideTemplate
}
$(selector).append(this.render().el)
this.setupTimeAndDatePickers()
this.$el.find('select.context_id').triggerHandler('change', false)
if (this.model == null) {
this.model = this.generateNewEvent()
}
if (!this.event.isNewEvent()) {
this.$el.find('.context_select').hide()
this.$el.attr('method', 'PUT')
return this.$el.attr(
'action',
$.replaceTags(this.event.contextInfo.assignment_url, 'id', this.event.object.id)
)
}
}
setContext(newContext) {
this.$el
.find('select.context_id')
.val(newContext)
.triggerHandler('change', false)
}
contextInfoForCode(code) {
return this.event.possibleContexts().find(context => context.asset_string === code)
}
activate() {
this.$el.find('select.context_id').change()
if (this.event.assignment && this.event.assignment.assignment_group_id) {
return this.$el
.find('.assignment_group_select .assignment_group')
.val(this.event.assignment.assignment_group_id)
}
}
moreOptions(jsEvent) {
jsEvent.preventDefault()
const pieces = $(jsEvent.target)
.attr('href')
.split('#')
const data = this.$el.getFormData({object_name: 'assignment'})
const params = {}
if (data.name) {
params.title = data.name
}
if (data.due_at && this.$el.find('.datetime_field').data('unfudged-date')) {
params.due_at = this.$el
.find('.datetime_field')
.data('unfudged-date')
.toISOString()
}
if (data.assignment_group_id) {
params.assignment_group_id = data.assignment_group_id
}
params.return_to = window.location.href
pieces[0] += `?${$.param(params)}`
return (window.location.href = pieces.join('#'))
}
contextChange(jsEvent, propagate) {
if (this.ignoreContextChange) return
const context = $(jsEvent.target).val()
this.currentContextInfo = this.contextInfoForCode(context)
this.event.contextInfo = this.currentContextInfo
if (this.currentContextInfo == null) return
if (propagate !== false) this.contextChangeCB(context)
// TODO: support adding a new assignment group from this select box
const assignmentGroupsSelectOptionsInfo = {<|fim▁hole|> collection: this.currentContextInfo.assignment_groups.sort(natcompare.byKey('name'))
}
this.$el
.find('.assignment_group')
.html(genericSelectOptionsTemplate(assignmentGroupsSelectOptionsInfo))
// Update the edit and more options links with the new context
this.$el.attr('action', this.currentContextInfo.create_assignment_url)
const moreOptionsUrl = this.event.assignment
? `${this.event.assignment.html_url}/edit`
: this.currentContextInfo.new_assignment_url
return this.$el.find('.more_options_link').attr('href', moreOptionsUrl)
}
generateNewEvent() {
return commonEventFactory({}, [])
}
submitAssignment(e) {
e.preventDefault()
const data = this.getFormData()
this.disableWhileLoadingOpts = {buttons: ['.save_assignment']}
if (data.assignment != null) {
return this.submitRegularAssignment(e, data.assignment)
} else {
return this.submitOverride(e, data.assignment_override)
}
}
unfudgedDate(date) {
const unfudged = $.unfudgeDateForProfileTimezone(date)
if (unfudged) {
return unfudged.toISOString()
} else {
return ''
}
}
getFormData() {
const data = super.getFormData(...arguments)
if (data.assignment != null) {
data.assignment.due_at = this.unfudgedDate(data.assignment.due_at)
} else {
data.assignment_override.due_at = this.unfudgedDate(data.assignment_override.due_at)
}
return data
}
submitRegularAssignment(event, data) {
data.due_at = this.unfudgedDate(data.due_at)
if (this.event.isNewEvent()) {
data.context_code = $(this.$el)
.find('.context_id')
.val()
this.model = commonEventFactory(data, this.event.possibleContexts())
return this.submit(event)
} else {
this.event.title = data.title
this.event.start = data.due_at // fudged
this.model = this.event
return this.submit(event)
}
}
submitOverride(event, data) {
this.event.start = data.due_at // fudged
data.due_at = this.unfudgedDate(data.due_at)
this.model = this.event
return this.submit(event)
}
onSaveSuccess() {
return this.closeCB()
}
onSaveFail(xhr) {
let resp
if ((resp = JSON.parse(xhr.responseText))) {
showFlashAlert({message: resp.error, err: null, type: 'error'})
}
this.closeCB()
this.disableWhileLoadingOpts = {}
return super.onSaveFail(xhr)
}
validateBeforeSave(data, errors) {
if (data.assignment != null) {
data = data.assignment
errors = this._validateTitle(data, errors)
} else {
data = data.assignment_override
}
errors = this._validateDueDate(data, errors)
return errors
}
_validateTitle(data, errors) {
const post_to_sis = data.post_to_sis === '1'
let max_name_length = 256
const max_name_length_required = ENV.MAX_NAME_LENGTH_REQUIRED_FOR_ACCOUNT
if (post_to_sis && max_name_length_required) {
max_name_length = ENV.MAX_NAME_LENGTH
}
const validationHelper = new SisValidationHelper({
postToSIS: post_to_sis,
maxNameLength: max_name_length,
name: data.name,
maxNameLengthRequired: max_name_length_required
})
if (!data.name || $.trim(data.name.toString()).length === 0) {
errors['assignment[name]'] = [{message: I18n.t('name_is_required', 'Name is required!')}]
} else if (validationHelper.nameTooLong()) {
errors['assignment[name]'] = [
{
message: I18n.t('Name is too long, must be under %{length} characters', {
length: max_name_length + 1
})
}
]
}
return errors
}
_validateDueDate(data, errors) {
let dueDate
if (
this.event.eventType === 'assignment' &&
this.event.assignment.unlock_at &&
this.event.assignment.lock_at
) {
const startDate = moment(this.event.assignment.unlock_at)
const endDate = moment(this.event.assignment.lock_at)
dueDate = moment(this.event.start)
if (!withinMomentDates(dueDate, startDate, endDate)) {
const rangeErrorMessage = I18n.t(
'Assignment has a locked date. Due date cannot be set outside of locked date range.'
)
errors.lock_range = [{message: rangeErrorMessage}]
showFlashAlert({
message: rangeErrorMessage,
err: null,
type: 'error'
})
}
}
const post_to_sis = data.post_to_sis === '1'
if (!post_to_sis) {
return errors
}
const validationHelper = new SisValidationHelper({
postToSIS: post_to_sis,
dueDateRequired: ENV.DUE_DATE_REQUIRED_FOR_ACCOUNT,
dueDate: data.due_at
})
const error_tag = data.name != null ? 'assignment[due_at]' : 'assignment_override[due_at]'
if (validationHelper.dueDateMissing()) {
errors[error_tag] = [{message: I18n.t('Due Date is required!')}]
}
return errors
}
setupTimeAndDatePickers() {
const $field = this.$el.find('.datetime_field')
return $field.datetime_field({
datepicker: {
dateFormat: datePickerFormat(
this.event.allDay
? I18n.t('#date.formats.medium_with_weekday')
: I18n.t('#date.formats.full_with_weekday')
)
}
})
}
}
EditAssignmentDetailsRewrite.prototype.defaults = {
width: 440,
height: 384
}
EditAssignmentDetailsRewrite.prototype.events = {
...EditAssignmentDetailsRewrite.prototype.events,
'click .save_assignment': 'submitAssignment',
'click .more_options_link': 'moreOptions',
'change .context_id': 'contextChange'
}
EditAssignmentDetailsRewrite.prototype.template = editAssignmentTemplate
EditAssignmentDetailsRewrite.prototype.wrapper = wrapper
EditAssignmentDetailsRewrite.optionProperty('assignmentGroup')<|fim▁end|> | |
<|file_name|>frames.rs<|end_file_name|><|fim▁begin|>use crate::{channel::Reply, types::ChannelId, Error, Promise, PromiseResolver};
use amq_protocol::{
frame::AMQPFrame,
protocol::{basic::AMQPMethod, AMQPClass},
};
use parking_lot::Mutex;
use pinky_swear::Cancellable;
use std::{
collections::{HashMap, VecDeque},
fmt,
sync::Arc,
};
use tracing::{level_enabled, trace, Level};
pub(crate) struct ExpectedReply(
pub(crate) Reply,
pub(crate) Box<dyn Cancellable<Error> + Send>,
);
impl fmt::Debug for ExpectedReply {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("ExpectedReply").field(&self.0).finish()
}
}
#[derive(Clone, Default)]
pub(crate) struct Frames {
inner: Arc<Mutex<Inner>>,
}
impl Frames {
pub(crate) fn push(
&self,
channel_id: ChannelId,
frame: AMQPFrame,
resolver: PromiseResolver<()>,
expected_reply: Option<ExpectedReply>,
) {
self.inner
.lock()
.push(channel_id, frame, resolver, expected_reply);
}
pub(crate) fn push_frames(&self, frames: Vec<AMQPFrame>) -> Promise<()> {
self.inner.lock().push_frames(frames)
}
pub(crate) fn retry(&self, frame: (AMQPFrame, Option<PromiseResolver<()>>)) {
self.inner.lock().retry_frames.push_back(frame);
}
pub(crate) fn pop(&self, flow: bool) -> Option<(AMQPFrame, Option<PromiseResolver<()>>)> {
self.inner.lock().pop(flow)
}
pub(crate) fn find_expected_reply<P: FnMut(&ExpectedReply) -> bool>(
&self,
channel_id: ChannelId,
finder: P,
) -> Option<Reply> {
self.inner
.lock()
.expected_replies
.get_mut(&channel_id)
.and_then(|replies| {
replies
.iter()
.position(finder)
.and_then(|idx| replies.remove(idx))
})
.map(|t| t.0)
}
pub(crate) fn next_expected_close_ok_reply(
&self,
channel_id: u16,
error: Error,
) -> Option<Reply> {
self.inner
.lock()
.next_expected_close_ok_reply(channel_id, error)
}
pub(crate) fn has_pending(&self) -> bool {
self.inner.lock().has_pending()
}
pub(crate) fn drop_pending(&self, error: Error) {
self.inner.lock().drop_pending(error);
}
pub(crate) fn clear_expected_replies(&self, channel_id: ChannelId, error: Error) {
self.inner.lock().clear_expected_replies(channel_id, error);
}
}
#[derive(Default)]
struct Inner {
/* Header frames must follow basic.publish frames directly, otherwise RabbitMQ-server send us an UNEXPECTED_FRAME */
/* After sending the Header frame, we need to send the associated Body frames before anything else for the same reason */
publish_frames: VecDeque<(AMQPFrame, Option<PromiseResolver<()>>)>,
retry_frames: VecDeque<(AMQPFrame, Option<PromiseResolver<()>>)>,
frames: VecDeque<(AMQPFrame, Option<PromiseResolver<()>>)>,
low_prio_frames: VecDeque<(AMQPFrame, Option<PromiseResolver<()>>)>,
expected_replies: HashMap<ChannelId, VecDeque<ExpectedReply>>,
}
impl fmt::Debug for Frames {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut debug = f.debug_struct("Frames");
if let Some(inner) = self.inner.try_lock() {
debug.field("expected_replies", &inner.expected_replies);
}
debug.finish()
}
}
impl Inner {
fn push(
&mut self,
channel_id: ChannelId,
frame: AMQPFrame,
resolver: PromiseResolver<()>,
expected_reply: Option<ExpectedReply>,
) {
self.frames.push_back((frame, Some(resolver)));
if let Some(reply) = expected_reply {
trace!(
channel=%channel_id,
expected_reply=?reply,
"state is now waiting"
);
self.expected_replies
.entry(channel_id)
.or_default()
.push_back(reply);
}
}
fn push_frames(&mut self, mut frames: Vec<AMQPFrame>) -> Promise<()> {
let (promise, resolver) = Promise::new();
let last_frame = frames.pop();
if level_enabled!(Level::TRACE) {
promise.set_marker("Frames".into());
}
for frame in frames {
self.low_prio_frames.push_back((frame, None));
}
if let Some(last_frame) = last_frame {
self.low_prio_frames.push_back((last_frame, Some(resolver)));
} else {
resolver.swear(Ok(()));
}
promise
}
fn pop(&mut self, flow: bool) -> Option<(AMQPFrame, Option<PromiseResolver<()>>)> {
if let Some(frame) = self
.retry_frames
.pop_front()
.or_else(|| self.publish_frames.pop_front())
.or_else(|| self.frames.pop_front())
{
return Some(frame);
}
if flow {
if let Some(frame) = self.low_prio_frames.pop_front() {
// If the next frame is a header, that means we're a basic.publish
// Header frame needs to follow directly the basic.publish frame, and Body frames
// need to be sent just after those or the AMQP server will close the connection.
// Push the header into publish_frames which is there to handle just that.
if self
.low_prio_frames
.front()
.map(|(frame, _)| frame.is_header())
.unwrap_or(false)
{
// Yes, this will always be Some() with a Header frame, but let's keep our unwrap() count low
if let Some(next_frame) = self.low_prio_frames.pop_front() {
self.publish_frames.push_back(next_frame);
}
while let Some(next_frame) = self.low_prio_frames.pop_front() {
match next_frame.0 {
AMQPFrame::Body(..) => {
self.publish_frames.push_back(next_frame);
}
_ => {
// We've exhausted Body frames for this publish, push back the next one and exit
self.low_prio_frames.push_front(next_frame);
break;
}
}
}
}
return Some(frame);
}
}
None
}
fn has_pending(&self) -> bool {
!(self.retry_frames.is_empty()
&& self.publish_frames.is_empty()
&& self.frames.is_empty()
&& self.low_prio_frames.is_empty())
}
fn drop_pending(&mut self, error: Error) {
Self::drop_pending_frames(&mut self.retry_frames, error.clone());
Self::drop_pending_frames(&mut self.publish_frames, error.clone());
Self::drop_pending_frames(&mut self.frames, error.clone());
Self::drop_pending_frames(&mut self.low_prio_frames, error.clone());
for (_, replies) in self.expected_replies.drain() {
Self::cancel_expected_replies(replies, error.clone());
}
}
fn drop_pending_frames(
frames: &mut VecDeque<(AMQPFrame, Option<PromiseResolver<()>>)>,
error: Error,
) {
for (frame, resolver) in std::mem::take(frames) {
if let Some(resolver) = resolver {
match frame {
AMQPFrame::Method(_, AMQPClass::Basic(AMQPMethod::Cancel(_))) => {
resolver.swear(Ok(()))
}
_ => resolver.swear(Err(error.clone())),
}
}
}
}
fn next_expected_close_ok_reply(&mut self, channel_id: u16, error: Error) -> Option<Reply> {
let expected_replies = self.expected_replies.get_mut(&channel_id)?;
while let Some(reply) = expected_replies.pop_front() {
match &reply.0 {
Reply::ChannelCloseOk(_) => return Some(reply.0),
Reply::BasicCancelOk(pinky) => pinky.swear(Ok(())), // Channel close means consumer is canceled automatically
_ => reply.1.cancel(error.clone()),
}
}
None
}
fn clear_expected_replies(&mut self, channel_id: ChannelId, error: Error) {
if let Some(replies) = self.expected_replies.remove(&channel_id) {
Self::cancel_expected_replies(replies, error);
}<|fim▁hole|> fn cancel_expected_replies(replies: VecDeque<ExpectedReply>, error: Error) {
for ExpectedReply(reply, cancel) in replies {
match reply {
Reply::BasicCancelOk(pinky) => pinky.swear(Ok(())),
_ => cancel.cancel(error.clone()),
}
}
}
}<|fim▁end|> | }
|
<|file_name|>rdataset.py<|end_file_name|><|fim▁begin|># Copyright (C) 2001-2017 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS rdatasets (an rdataset is a set of rdatas of a given type and class)"""
import random
from io import StringIO
import struct
import dns.exception
import dns.rdatatype
import dns.rdataclass
import dns.rdata
import dns.set
from ._compat import string_types
# define SimpleSet here for backwards compatibility
SimpleSet = dns.set.Set
class DifferingCovers(dns.exception.DNSException):
"""An attempt was made to add a DNS SIG/RRSIG whose covered type
is not the same as that of the other rdatas in the rdataset."""
class IncompatibleTypes(dns.exception.DNSException):
"""An attempt was made to add DNS RR data of an incompatible type."""
class Rdataset(dns.set.Set):
"""A DNS rdataset."""
__slots__ = ['rdclass', 'rdtype', 'covers', 'ttl']
def __init__(self, rdclass, rdtype, covers=dns.rdatatype.NONE, ttl=0):
"""Create a new rdataset of the specified class and type.
*rdclass*, an ``int``, the rdataclass.
*rdtype*, an ``int``, the rdatatype.
*covers*, an ``int``, the covered rdatatype.
*ttl*, an ``int``, the TTL.
"""
super(Rdataset, self).__init__()
self.rdclass = rdclass
self.rdtype = rdtype
self.covers = covers
self.ttl = ttl
def _clone(self):
obj = super(Rdataset, self)._clone()
obj.rdclass = self.rdclass
obj.rdtype = self.rdtype
obj.covers = self.covers
obj.ttl = self.ttl
return obj
def update_ttl(self, ttl):
"""Perform TTL minimization.
Set the TTL of the rdataset to be the lesser of the set's current
TTL or the specified TTL. If the set contains no rdatas, set the TTL
to the specified TTL.
*ttl*, an ``int``.
"""
if len(self) == 0:
self.ttl = ttl
elif ttl < self.ttl:
self.ttl = ttl
def add(self, rd, ttl=None):
"""Add the specified rdata to the rdataset.
If the optional *ttl* parameter is supplied, then
``self.update_ttl(ttl)`` will be called prior to adding the rdata.
*rd*, a ``dns.rdata.Rdata``, the rdata
*ttl*, an ``int``, the TTL.
Raises ``dns.rdataset.IncompatibleTypes`` if the type and class
do not match the type and class of the rdataset.
Raises ``dns.rdataset.DifferingCovers`` if the type is a signature
type and the covered type does not match that of the rdataset.
"""
#
# If we're adding a signature, do some special handling to
# check that the signature covers the same type as the
# other rdatas in this rdataset. If this is the first rdata
# in the set, initialize the covers field.
#
if self.rdclass != rd.rdclass or self.rdtype != rd.rdtype:
raise IncompatibleTypes
if ttl is not None:
self.update_ttl(ttl)
if self.rdtype == dns.rdatatype.RRSIG or \
self.rdtype == dns.rdatatype.SIG:
covers = rd.covers()
if len(self) == 0 and self.covers == dns.rdatatype.NONE:
self.covers = covers
elif self.covers != covers:
raise DifferingCovers<|fim▁hole|>
def union_update(self, other):
self.update_ttl(other.ttl)
super(Rdataset, self).union_update(other)
def intersection_update(self, other):
self.update_ttl(other.ttl)
super(Rdataset, self).intersection_update(other)
def update(self, other):
"""Add all rdatas in other to self.
*other*, a ``dns.rdataset.Rdataset``, the rdataset from which
to update.
"""
self.update_ttl(other.ttl)
super(Rdataset, self).update(other)
def __repr__(self):
if self.covers == 0:
ctext = ''
else:
ctext = '(' + dns.rdatatype.to_text(self.covers) + ')'
return '<DNS ' + dns.rdataclass.to_text(self.rdclass) + ' ' + \
dns.rdatatype.to_text(self.rdtype) + ctext + ' rdataset>'
def __str__(self):
return self.to_text()
def __eq__(self, other):
if not isinstance(other, Rdataset):
return False
if self.rdclass != other.rdclass or \
self.rdtype != other.rdtype or \
self.covers != other.covers:
return False
return super(Rdataset, self).__eq__(other)
def __ne__(self, other):
return not self.__eq__(other)
def to_text(self, name=None, origin=None, relativize=True,
override_rdclass=None, **kw):
"""Convert the rdataset into DNS master file format.
See ``dns.name.Name.choose_relativity`` for more information
on how *origin* and *relativize* determine the way names
are emitted.
Any additional keyword arguments are passed on to the rdata
``to_text()`` method.
*name*, a ``dns.name.Name``. If name is not ``None``, emit RRs with
*name* as the owner name.
*origin*, a ``dns.name.Name`` or ``None``, the origin for relative
names.
*relativize*, a ``bool``. If ``True``, names will be relativized
to *origin*.
"""
if name is not None:
name = name.choose_relativity(origin, relativize)
ntext = str(name)
pad = ' '
else:
ntext = ''
pad = ''
s = StringIO()
if override_rdclass is not None:
rdclass = override_rdclass
else:
rdclass = self.rdclass
if len(self) == 0:
#
# Empty rdatasets are used for the question section, and in
# some dynamic updates, so we don't need to print out the TTL
# (which is meaningless anyway).
#
s.write(u'%s%s%s %s\n' % (ntext, pad,
dns.rdataclass.to_text(rdclass),
dns.rdatatype.to_text(self.rdtype)))
else:
for rd in self:
s.write(u'%s%s%d %s %s %s\n' %
(ntext, pad, self.ttl, dns.rdataclass.to_text(rdclass),
dns.rdatatype.to_text(self.rdtype),
rd.to_text(origin=origin, relativize=relativize,
**kw)))
#
# We strip off the final \n for the caller's convenience in printing
#
return s.getvalue()[:-1]
def to_wire(self, name, file, compress=None, origin=None,
override_rdclass=None, want_shuffle=True):
"""Convert the rdataset to wire format.
*name*, a ``dns.name.Name`` is the owner name to use.
*file* is the file where the name is emitted (typically a
BytesIO file).
*compress*, a ``dict``, is the compression table to use. If
``None`` (the default), names will not be compressed.
*origin* is a ``dns.name.Name`` or ``None``. If the name is
relative and origin is not ``None``, then *origin* will be appended
to it.
*override_rdclass*, an ``int``, is used as the class instead of the
class of the rdataset. This is useful when rendering rdatasets
associated with dynamic updates.
*want_shuffle*, a ``bool``. If ``True``, then the order of the
Rdatas within the Rdataset will be shuffled before rendering.
Returns an ``int``, the number of records emitted.
"""
if override_rdclass is not None:
rdclass = override_rdclass
want_shuffle = False
else:
rdclass = self.rdclass
file.seek(0, 2)
if len(self) == 0:
name.to_wire(file, compress, origin)
stuff = struct.pack("!HHIH", self.rdtype, rdclass, 0, 0)
file.write(stuff)
return 1
else:
if want_shuffle:
l = list(self)
random.shuffle(l)
else:
l = self
for rd in l:
name.to_wire(file, compress, origin)
stuff = struct.pack("!HHIH", self.rdtype, rdclass,
self.ttl, 0)
file.write(stuff)
start = file.tell()
rd.to_wire(file, compress, origin)
end = file.tell()
assert end - start < 65536
file.seek(start - 2)
stuff = struct.pack("!H", end - start)
file.write(stuff)
file.seek(0, 2)
return len(self)
def match(self, rdclass, rdtype, covers):
"""Returns ``True`` if this rdataset matches the specified class,
type, and covers.
"""
if self.rdclass == rdclass and \
self.rdtype == rdtype and \
self.covers == covers:
return True
return False
def from_text_list(rdclass, rdtype, ttl, text_rdatas):
"""Create an rdataset with the specified class, type, and TTL, and with
the specified list of rdatas in text format.
Returns a ``dns.rdataset.Rdataset`` object.
"""
if isinstance(rdclass, string_types):
rdclass = dns.rdataclass.from_text(rdclass)
if isinstance(rdtype, string_types):
rdtype = dns.rdatatype.from_text(rdtype)
r = Rdataset(rdclass, rdtype)
r.update_ttl(ttl)
for t in text_rdatas:
rd = dns.rdata.from_text(r.rdclass, r.rdtype, t)
r.add(rd)
return r
def from_text(rdclass, rdtype, ttl, *text_rdatas):
"""Create an rdataset with the specified class, type, and TTL, and with
the specified rdatas in text format.
Returns a ``dns.rdataset.Rdataset`` object.
"""
return from_text_list(rdclass, rdtype, ttl, text_rdatas)
def from_rdata_list(ttl, rdatas):
"""Create an rdataset with the specified TTL, and with
the specified list of rdata objects.
Returns a ``dns.rdataset.Rdataset`` object.
"""
if len(rdatas) == 0:
raise ValueError("rdata list must not be empty")
r = None
for rd in rdatas:
if r is None:
r = Rdataset(rd.rdclass, rd.rdtype)
r.update_ttl(ttl)
r.add(rd)
return r
def from_rdata(ttl, *rdatas):
"""Create an rdataset with the specified TTL, and with
the specified rdata objects.
Returns a ``dns.rdataset.Rdataset`` object.
"""
return from_rdata_list(ttl, rdatas)<|fim▁end|> | if dns.rdatatype.is_singleton(rd.rdtype) and len(self) > 0:
self.clear()
super(Rdataset, self).add(rd) |
<|file_name|>hm.py<|end_file_name|><|fim▁begin|># Copyright 2014, Jeff Buttars, A10 Networks.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import acos_client.errors as acos_errors
import acos_client.v30.base as base
class HealthMonitor(base.BaseV30):
# Valid method objects
ICMP = 'icmp'
TCP = 'tcp'
HTTP = 'http'
HTTPS = 'https'
url_prefix = "/health/monitor/"
_method_objects = {
ICMP: {
"icmp": 1
},
HTTP: {
"http": 1,
"http-port": 80,
"http-expect": 1,
"http-response-code": "200",
"http-url": 1,
"url-type": "GET",
"url-path": "/",
},
HTTPS: {
"https": 1,
"web-port": 443,
"https-expect": 1,
"https-response-code": "200",
"https-url": 1,
"url-type": "GET",
"url-path": "/",
"disable-sslv2hello": 0
},
TCP: {
"method-tcp": 1,
"tcp-port": 80
},
}
def get(self, name, **kwargs):
return self._get(self.url_prefix + name, **kwargs)
def _set(self, action, name, mon_method, interval, timeout, max_retries,
method=None, url=None, expect_code=None, port=None, update=False,
**kwargs):
params = {
"monitor": {
"name": name,
"retry": int(max_retries),
"interval": int(interval),
"timeout": int(timeout),
"method": {
mon_method: self._method_objects[mon_method]
}
}
}
if method:
params['monitor']['method'][mon_method]['url-type'] = method
if url:
params['monitor']['method'][mon_method]['url-path'] = url
if expect_code:
k = "%s-response-code" % mon_method
params['monitor']['method'][mon_method][k] = str(expect_code)
if port:
if mon_method == self.HTTPS:
k = 'web-port'
else:
k = '%s-port' % mon_method
params['monitor']['method'][mon_method][k] = int(port)
if update:
action += name
self._post(action, params, **kwargs)
def create(self, name, mon_type, interval, timeout, max_retries,
method=None, url=None, expect_code=None, port=None, **kwargs):
try:
self.get(name)
except acos_errors.NotFound:
pass
else:
raise acos_errors.Exists()
self._set(self.url_prefix, name, mon_type, interval, timeout,
max_retries, method, url, expect_code, port, **kwargs)
def update(self, name, mon_type, interval, timeout, max_retries,
method=None, url=None, expect_code=None, port=None, **kwargs):
self.get(name) # We want a NotFound if it does not exist
self._set(self.url_prefix, name, mon_type, interval, timeout,
max_retries, method, url, expect_code, port, update=True,<|fim▁hole|><|fim▁end|> | **kwargs)
def delete(self, name):
self._delete(self.url_prefix + name) |
<|file_name|>notokufiarabic_regular.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
class Charset(object):
common_name = 'NotoKufiArabic-Regular'
native_name = ''
def glyphs(self):
glyphs = []
glyphs.append(0x0261) #uni0759.fina
glyphs.append(0x007F) #uni0625
glyphs.append(0x00D4) #uni0624
glyphs.append(0x0005) #uni0627
glyphs.append(0x00E2) #uni0626
glyphs.append(0x0004) #uni0621
glyphs.append(0x007D) #uni0623
glyphs.append(0x0081) #uni0622
glyphs.append(0x009C) #uni0686.medi
glyphs.append(0x0099) #uni0629
glyphs.append(0x0085) #uni0628
glyphs.append(0x0267) #uni075C.fina
glyphs.append(0x0256) #uni0756.init
glyphs.append(0x007E) #uni0623.fina
glyphs.append(0x0173) #uni069A.init
glyphs.append(0x01A9) #uni06AB.init
glyphs.append(0x02B8) #wavyhamza_above
glyphs.append(0x00C8) #veh.fina
glyphs.append(0x0192) #uni06A3.init
glyphs.append(0x02C3) #uni06C8.fina
glyphs.append(0x01BC) #uni06B1.fina
glyphs.append(0x020E) #uni06AD.finamorocco
glyphs.append(0x0008) #uni062D.init
glyphs.append(0x00DE) #uni06CC.medi
glyphs.append(0x00A1) #uni062E
glyphs.append(0x0007) #uni062D
glyphs.append(0x000B) #uni062F
glyphs.append(0x008D) #uni062A
glyphs.append(0x0129) #uni067B.init
glyphs.append(0x009D) #uni062C
glyphs.append(0x0091) #uni062B
glyphs.append(0x00E8) #uni06440625.isol
glyphs.append(0x0279) #uni0760.medi
glyphs.append(0x02A3) #uni076B.fina
glyphs.append(0x01D0) #uni06B6.fina
glyphs.append(0x01E7) #uni06BF
glyphs.append(0x0072) #uni066E.init
glyphs.append(0x00E5) #uni0626.fina
glyphs.append(0x025C) #uni0758
glyphs.append(0x01E0) #uni06BB.fina
glyphs.append(0x0284) #uni0763.init
glyphs.append(0x01C5) #uni06B3.init
glyphs.append(0x00DB) #uni064A.fina
glyphs.append(0x0033) #uni06440627.fina
glyphs.append(0x0189) #uni06A0.init
glyphs.append(0x017A) #uni069C.fina
glyphs.append(0x0134) #uni067F.fina
glyphs.append(0x0101) #dammatan_01
glyphs.append(0x0216) #uni06B50627.fina
glyphs.append(0x0036) #uni066E.fina
glyphs.append(0x02CC) #uni06CE.init
glyphs.append(0x0075) #beh_dotless_alt.medi
glyphs.append(0x02A0) #uni076A.init
glyphs.append(0x0108) #Ghunna_above
glyphs.append(0x0027) #uni0645.init
glyphs.append(0x0031) #uni0649.fina
glyphs.append(0x02C6) #uni06CA
glyphs.append(0x0073) #uni066E.medi
glyphs.append(0x026D) #uni075D.medi
glyphs.append(0x02E1) #uni060D
glyphs.append(0x01BD) #uni06B1.init
glyphs.append(0x02DD) #uni06DD
glyphs.append(0x0257) #uni0756.medi
glyphs.append(0x0281) #uni0762.medi
glyphs.append(0x017F) #uni069D.init
glyphs.append(0x023E) #uni0750.init
glyphs.append(0x015A) #uni068C.fina
glyphs.append(0x005A) #uni06BA.fina
glyphs.append(0x018A) #uni06A0.medi
glyphs.append(0x01AC) #uni06AC.fina
glyphs.append(0x018E) #uni06A2.init
glyphs.append(0x0088) #uni0628.fina
glyphs.append(0x00F0) #uni06C2.fina
glyphs.append(0x0196) #uni06A4.medi
glyphs.append(0x0295) #uni0767.medi
glyphs.append(0x0141) #uni0682.init
glyphs.append(0x0062) #uni064B
glyphs.append(0x0265) #uni075B.fina
glyphs.append(0x02E5) #threedots_alt1.below
glyphs.append(0x02CD) #uni06CE.medi
glyphs.append(0x02D5) #uni06D1.fina
glyphs.append(0x01F5) #uni06DB
glyphs.append(0x0138) #uni0680.fina
glyphs.append(0x0277) #uni0760.fina
glyphs.append(0x0133) #uni067F
glyphs.append(0x0260) #uni0759
glyphs.append(0x012F) #uni067D
glyphs.append(0x0089) #uni067E
glyphs.append(0x0127) #uni067B
glyphs.append(0x012B) #uni067C
glyphs.append(0x0123) #uni067A
glyphs.append(0x00EE) #heh_ae.fina
glyphs.append(0x019A) #uni06A5.medi
glyphs.append(0x00D5) #uni0624.fina
glyphs.append(0x02AD) #twodots.vert.below
glyphs.append(0x01D9) #uni06B8.init
glyphs.append(0x02EF) #threedots_alt2.above
glyphs.append(0x008B) #uni067E.init
glyphs.append(0x01FC) #uni06E5
glyphs.append(0x01FD) #uni06E6
glyphs.append(0x00A4) #uni062E.fina
glyphs.append(0x02DF) #uni06E0
glyphs.append(0x01F8) #uni06E1
glyphs.append(0x0098) #uni0679.fina
glyphs.append(0x01FA) #uni06E3
glyphs.append(0x026B) #uni075D.fina
glyphs.append(0x01FF) #uni06E8
glyphs.append(0x02E0) #uni06E9
glyphs.append(0x0202) #uni06ED
glyphs.append(0x022A) #uni06EE
glyphs.append(0x000D) #uni0631.fina
glyphs.append(0x0125) #uni067A.init
glyphs.append(0x0200) #uni06EA
glyphs.append(0x003C) #uni066F.fina
glyphs.append(0x01A6) #uni06AA.medi
glyphs.append(0x0275) #uni075F.medi
glyphs.append(0x000F) #uni0633.init
glyphs.append(0x02F0) #twodots_alt1.above
glyphs.append(0x01C8) #uni06B4.fina
glyphs.append(0x019E) #uni06A6.medi
glyphs.append(0x0121) #uni0678
glyphs.append(0x0095) #uni0679
glyphs.append(0x011D) #uni0676
glyphs.append(0x011F) #uni0677
glyphs.append(0x011B) #uni0675
glyphs.append(0x0117) #uni0672
glyphs.append(0x0119) #uni0673
glyphs.append(0x006D) #uni0670
glyphs.append(0x0083) #uni0671
glyphs.append(0x02A9) #uni076D.medi
glyphs.append(0x01D1) #uni06B6.init
glyphs.append(0x026E) #uni075E
glyphs.append(0x02AE) #twodots.vert.small.above
glyphs.append(0x00B4) #uni0636.init
glyphs.append(0x0268) #uni075C.init
glyphs.append(0x02C5) #uni06C9.fina
glyphs.append(0x00B8) #uni0638.init
glyphs.append(0x0160) #uni068F.fina
glyphs.append(0x0204) #uni06FB.fina
glyphs.append(0x00FE) #uni06F4.urdu
glyphs.append(0x012D) #uni067C.init
glyphs.append(0x025F) #uni0758.medi
glyphs.append(0x0037) #uni066F.init
glyphs.append(0x020F) #uni06440672.isol
glyphs.append(0x01A2) #uni06A8.fina
glyphs.append(0x00B6) #uni0636.fina
glyphs.append(0x00B1) #uni0634.medi
glyphs.append(0x008F) #uni062A.medi
glyphs.append(0x02F3) #uni069F.init
glyphs.append(0x00BE) #uni063A.fina
glyphs.append(0x0241) #uni0751.fina
glyphs.append(0x0213) #uni06440675.isol
glyphs.append(0x0285) #uni0763.medi
glyphs.append(0x00B5) #uni0636.medi
glyphs.append(0x02E4) #threedots.rev_alt1.below
glyphs.append(0x02A7) #uni076D.fina
glyphs.append(0x0176) #uni069B.fina
glyphs.append(0x027F) #uni0762.fina
glyphs.append(0x0148) #uni0684.fina
glyphs.append(0x02EE) #threedots_alt1.above
glyphs.append(0x00EC) #uni06440671.isol
glyphs.append(0x02A8) #uni076D.init
glyphs.append(0x01F2) #uni06D8
glyphs.append(0x004F) #uni06C1.medi
glyphs.append(0x0128) #uni067B.fina
glyphs.append(0x0126) #uni067A.medi
glyphs.append(0x009B) #uni0686.init
glyphs.append(0x012E) #uni067C.medi
glyphs.append(0x02F4) #uni069F.medi
glyphs.append(0x0198) #uni06A5.fina
glyphs.append(0x0263) #uni075A.fina
glyphs.append(0x028B) #uni0765.fina
glyphs.append(0x02B2) #fourdots.above
glyphs.append(0x0249) #uni0753.fina
glyphs.append(0x009F) #uni062C.medi
glyphs.append(0x025A) #uni0757.init
glyphs.append(0x0291) #uni0766.medi
glyphs.append(0x00A3) #uni062E.medi
glyphs.append(0x00C9) #uni0642.init
glyphs.append(0x00BB) #uni063A
glyphs.append(0x0145) #uni0683.init
glyphs.append(0x017E) #uni069D.fina
glyphs.append(0x0253) #uni0755.medi
glyphs.append(0x0142) #uni0682.medi
glyphs.append(0x01A1) #uni06A8
glyphs.append(0x0053) #uni06A9
glyphs.append(0x00D7) #uni0649.medi
glyphs.append(0x01D6) #uni06B7.medi
glyphs.append(0x01C0) #uni06B2.fina
glyphs.append(0x0187) #uni06A0
glyphs.append(0x018B) #uni06A1
glyphs.append(0x018C) #uni06A2
glyphs.append(0x0190) #uni06A3
glyphs.append(0x02BC) #uni06A4
glyphs.append(0x0197) #uni06A5
glyphs.append(0x019B) #uni06A6<|fim▁hole|> glyphs.append(0x01A7) #uni06AB
glyphs.append(0x01AB) #uni06AC
glyphs.append(0x0002) #nonmarkingreturn
glyphs.append(0x01B3) #uni06AE
glyphs.append(0x0055) #uni06AF
glyphs.append(0x00CE) #uni0646.medi
glyphs.append(0x00A9) #uni0632
glyphs.append(0x000E) #uni0633
glyphs.append(0x00A5) #uni0630
glyphs.append(0x000C) #uni0631
glyphs.append(0x00B3) #uni0636
glyphs.append(0x0016) #uni0637
glyphs.append(0x00AF) #uni0634
glyphs.append(0x0012) #uni0635
glyphs.append(0x0029) #uni0645.fina
glyphs.append(0x00B7) #uni0638
glyphs.append(0x001A) #uni0639
glyphs.append(0x0001) #.null
glyphs.append(0x019C) #uni06A6.fina
glyphs.append(0x00AE) #uni0698.fina
glyphs.append(0x0219) #uni06B70627.isol
glyphs.append(0x0297) #uni0768.fina
glyphs.append(0x024D) #uni0754.fina
glyphs.append(0x02CF) #uni06CE.fina
glyphs.append(0x022D) #uni06EF.fina
glyphs.append(0x0146) #uni0683.medi
glyphs.append(0x0080) #uni0625.fina
glyphs.append(0x0158) #uni068B.fina
glyphs.append(0x0038) #uni066F.medi
glyphs.append(0x0032) #uni06440627.isol
glyphs.append(0x011E) #uni0676.fina
glyphs.append(0x025D) #uni0758.fina
glyphs.append(0x0278) #uni0760.init
glyphs.append(0x01CA) #uni06B4.medi
glyphs.append(0x001D) #uni0639.fina
glyphs.append(0x0013) #uni0635.init
glyphs.append(0x002A) #uni0647
glyphs.append(0x00CF) #uni0646
glyphs.append(0x0026) #uni0645
glyphs.append(0x0022) #uni0644
glyphs.append(0x0020) #uni0643
glyphs.append(0x00CB) #uni0642
glyphs.append(0x00C1) #uni0641
glyphs.append(0x0003) #space
glyphs.append(0x00CC) #uni0642.fina
glyphs.append(0x02D3) #uni06D0.medi
glyphs.append(0x0096) #uni0679.init
glyphs.append(0x0030) #uni0649
glyphs.append(0x002E) #uni0648
glyphs.append(0x0282) #uni0763
glyphs.append(0x02EA) #uniFD3F
glyphs.append(0x00F5) #uni06BE.init
glyphs.append(0x0194) #uni06A4.fina
glyphs.append(0x02C9) #uni06CB.fina
glyphs.append(0x0130) #uni067D.fina
glyphs.append(0x0136) #uni067F.medi
glyphs.append(0x0131) #uni067D.init
glyphs.append(0x0230) #uni06FF.init
glyphs.append(0x02ED) #diagonal
glyphs.append(0x02B0) #threedots.small.above
glyphs.append(0x02B7) #hamza_medial
glyphs.append(0x0065) #uni064F
glyphs.append(0x0064) #uni064E
glyphs.append(0x0063) #uni064D
glyphs.append(0x0066) #uni064C
glyphs.append(0x0039) #feh_dotless.isol
glyphs.append(0x00DA) #uni064A
glyphs.append(0x0182) #uni069E.fina
glyphs.append(0x00C4) #uni0641.fina
glyphs.append(0x0010) #uni0633.medi
glyphs.append(0x0106) #shadda_01
glyphs.append(0x0193) #uni06A3.medi
glyphs.append(0x025E) #uni0758.init
glyphs.append(0x00E3) #uni0626.init
glyphs.append(0x02CB) #uni06CF.fina
glyphs.append(0x02A1) #uni076A.medi
glyphs.append(0x00C6) #veh.init
glyphs.append(0x00EB) #uni06440622.fina
glyphs.append(0x01D5) #uni06B7.init
glyphs.append(0x0000) #.notdef
glyphs.append(0x029F) #uni076A.fina
glyphs.append(0x0105) #kasra_01
glyphs.append(0x02D7) #uni06FA
glyphs.append(0x0207) #uni06FC
glyphs.append(0x0203) #uni06FB
glyphs.append(0x020C) #uni06FE
glyphs.append(0x020B) #uni06FD
glyphs.append(0x018F) #uni06A2.medi
glyphs.append(0x022E) #uni06FF
glyphs.append(0x02E2) #patah.wide
glyphs.append(0x02F1) #threedots.rev_alt2.below
glyphs.append(0x00F3) #uni06C3.fina
glyphs.append(0x02E9) #uniFD3E
glyphs.append(0x00A0) #uni062C.fina
glyphs.append(0x00CD) #uni0646.init
glyphs.append(0x0210) #uni06440672.fina
glyphs.append(0x0115) #uni0655064D
glyphs.append(0x00D9) #uni064A.medi
glyphs.append(0x0269) #uni075C.medi
glyphs.append(0x01FB) #uni06E4
glyphs.append(0x00F6) #uni06BE.medi
glyphs.append(0x0025) #uni0644.fina
glyphs.append(0x004E) #uni06C1.init
glyphs.append(0x0103) #fatha_01
glyphs.append(0x00CA) #uni0642.medi
glyphs.append(0x0054) #uni06A9.fina
glyphs.append(0x0186) #uni069F.fina
glyphs.append(0x0084) #uni0671.fina
glyphs.append(0x0195) #uni06A4.init
glyphs.append(0x003E) #twodots.above
glyphs.append(0x02E6) #twodots.vert_alt1.below
glyphs.append(0x023A) #uni065D
glyphs.append(0x0258) #uni0757
glyphs.append(0x015C) #uni068D.fina
glyphs.append(0x021C) #uni06B80627.fina
glyphs.append(0x0254) #uni0756
glyphs.append(0x0233) #riyal
glyphs.append(0x00F8) #uni06F1
glyphs.append(0x00F7) #uni06F0
glyphs.append(0x00FA) #uni06F3
glyphs.append(0x00F9) #uni06F2
glyphs.append(0x004C) #uni06F5
glyphs.append(0x004B) #uni06F4
glyphs.append(0x00FC) #uni06F7
glyphs.append(0x004D) #uni06F6
glyphs.append(0x00FB) #uni06F9
glyphs.append(0x00FD) #uni06F8
glyphs.append(0x02EC) #hah_alt.fina
glyphs.append(0x0237) #uni065A
glyphs.append(0x0238) #uni065B
glyphs.append(0x01E3) #uni06BC.init
glyphs.append(0x0014) #uni0635.medi
glyphs.append(0x0239) #uni065C
glyphs.append(0x00E4) #uni0626.medi
glyphs.append(0x0205) #uni06FB.init
glyphs.append(0x026C) #uni075D.init
glyphs.append(0x0259) #uni0757.fina
glyphs.append(0x0286) #uni0764
glyphs.append(0x028A) #uni0765
glyphs.append(0x028E) #uni0766
glyphs.append(0x0292) #uni0767
glyphs.append(0x0276) #uni0760
glyphs.append(0x027A) #uni0761
glyphs.append(0x027E) #uni0762
glyphs.append(0x01A8) #uni06AB.fina
glyphs.append(0x0220) #uni0603
glyphs.append(0x021F) #uni0602
glyphs.append(0x021E) #uni0601
glyphs.append(0x021D) #uni0600
glyphs.append(0x01C1) #uni06B2.init
glyphs.append(0x017C) #uni069C.medi
glyphs.append(0x0118) #uni0672.fina
glyphs.append(0x0074) #beh_dotless_alt.init
glyphs.append(0x0290) #uni0766.init
glyphs.append(0x012A) #uni067B.medi
glyphs.append(0x01DD) #uni06B9.init
glyphs.append(0x0172) #uni069A.fina
glyphs.append(0x02BD) #uni06C5.fina
glyphs.append(0x02A6) #uni076D
glyphs.append(0x029E) #uni076A
glyphs.append(0x02A2) #uni076B
glyphs.append(0x02A4) #uni076C
glyphs.append(0x005D) #uni060C
glyphs.append(0x0234) #uni060B
glyphs.append(0x008A) #uni067E.fina
glyphs.append(0x0222) #uni060F
glyphs.append(0x0221) #uni060E
glyphs.append(0x0211) #uni06440673.isol
glyphs.append(0x024E) #uni0754.init
glyphs.append(0x024F) #uni0754.medi
glyphs.append(0x0299) #uni0768.medi
glyphs.append(0x01B8) #uni06B0.fina
glyphs.append(0x02B3) #fourdots.below
glyphs.append(0x01E4) #uni06BC.medi
glyphs.append(0x00B2) #uni0634.fina
glyphs.append(0x012C) #uni067C.fina
glyphs.append(0x010B) #uni0651064B
glyphs.append(0x010D) #uni0651064F
glyphs.append(0x010C) #uni0651064E
glyphs.append(0x0150) #uni0687.fina
glyphs.append(0x0050) #uni06BE
glyphs.append(0x01E5) #uni06BD
glyphs.append(0x01B6) #uni06AE.medi
glyphs.append(0x0059) #uni06BA
glyphs.append(0x01E1) #uni06BC
glyphs.append(0x009E) #uni062C.init
glyphs.append(0x0139) #uni0680.init
glyphs.append(0x02DB) #uni076A0627.isol
glyphs.append(0x0114) #uni06540652
glyphs.append(0x01E2) #uni06BC.fina
glyphs.append(0x027B) #uni0761.fina
glyphs.append(0x022B) #uni06EE.fina
glyphs.append(0x01A0) #uni06A7.fina
glyphs.append(0x002B) #uni0647.init
glyphs.append(0x01B1) #uni06AD.init
glyphs.append(0x0090) #uni062A.fina
glyphs.append(0x01C9) #uni06B4.init
glyphs.append(0x01CB) #uni06B5
glyphs.append(0x01C7) #uni06B4
glyphs.append(0x01D3) #uni06B7
glyphs.append(0x01CF) #uni06B6
glyphs.append(0x01BB) #uni06B1
glyphs.append(0x01B7) #uni06B0
glyphs.append(0x01C3) #uni06B3
glyphs.append(0x01BF) #uni06B2
glyphs.append(0x02D2) #uni06D0.init
glyphs.append(0x01DB) #uni06B9
glyphs.append(0x01D7) #uni06B8
glyphs.append(0x0070) #uni062F.fina
glyphs.append(0x00BD) #uni063A.medi
glyphs.append(0x01E8) #uni06BF.fina
glyphs.append(0x01B2) #uni06AD.medi
glyphs.append(0x0082) #uni0622.fina
glyphs.append(0x02BF) #uni06C6.fina
glyphs.append(0x021A) #uni06B70627.fina
glyphs.append(0x023B) #uni065E
glyphs.append(0x0162) #uni0690.fina
glyphs.append(0x01AE) #uni06AC.medi
glyphs.append(0x016E) #uni0697.fina
glyphs.append(0x01EF) #uni06CD.fina
glyphs.append(0x0052) #uni06D2.fina
glyphs.append(0x010F) #uniFC63
glyphs.append(0x007A) #twodots.below
glyphs.append(0x00B9) #uni0638.medi
glyphs.append(0x01DC) #uni06B9.fina
glyphs.append(0x014D) #uni0685.init
glyphs.append(0x0199) #uni06A5.init
glyphs.append(0x00E6) #uni06440623.isol
glyphs.append(0x01AF) #uni06AD
glyphs.append(0x01AA) #uni06AB.medi
glyphs.append(0x008E) #uni062A.init
glyphs.append(0x0178) #uni069B.medi
glyphs.append(0x0144) #uni0683.fina
glyphs.append(0x00C2) #uni0641.init
glyphs.append(0x0243) #uni0751.medi
glyphs.append(0x024B) #uni0753.medi
glyphs.append(0x002C) #uni0647.medi
glyphs.append(0x006C) #uni0655
glyphs.append(0x006E) #uni0656
glyphs.append(0x0228) #uni0657
glyphs.append(0x0067) #uni0650
glyphs.append(0x0068) #uni0651
glyphs.append(0x0069) #uni0652
glyphs.append(0x006A) #uni0653
glyphs.append(0x0271) #uni075E.medi
glyphs.append(0x0229) #uni0658
glyphs.append(0x0236) #uni0659
glyphs.append(0x001B) #uni0639.init
glyphs.append(0x0245) #uni0752.fina
glyphs.append(0x002D) #uni0647.fina
glyphs.append(0x0283) #uni0763.fina
glyphs.append(0x0093) #uni062B.medi
glyphs.append(0x0107) #sukun_01
glyphs.append(0x013E) #uni0681.medi
glyphs.append(0x00C7) #veh.medi
glyphs.append(0x0177) #uni069B.init
glyphs.append(0x0273) #uni075F.fina
glyphs.append(0x007B) #alef_alt.isol
glyphs.append(0x0251) #uni0755.fina
glyphs.append(0x0120) #uni0677.fina
glyphs.append(0x01B0) #uni06AD.fina
glyphs.append(0x00ED) #uni06440671.fina
glyphs.append(0x0102) #kasratan_01
glyphs.append(0x029B) #uni0769.fina
glyphs.append(0x027C) #uni0761.init
glyphs.append(0x02AB) #twodots.small.below
glyphs.append(0x01E6) #uni06BD.fina
glyphs.append(0x0293) #uni0767.fina
glyphs.append(0x01CC) #uni06B5.fina
glyphs.append(0x020D) #uni06AD.morocco
glyphs.append(0x01B4) #uni06AE.fina
glyphs.append(0x006B) #uni0654
glyphs.append(0x00AA) #uni0632.fina
glyphs.append(0x00B0) #uni0634.init
glyphs.append(0x01DF) #uni06BB
glyphs.append(0x022C) #uni06EF
glyphs.append(0x0252) #uni0755.init
glyphs.append(0x01EA) #uni06BF.medi
glyphs.append(0x0294) #uni0767.init
glyphs.append(0x0057) #uni06AF.medi
glyphs.append(0x02D8) #uni06FA.fina
glyphs.append(0x000A) #uni062D.fina
glyphs.append(0x028F) #uni0766.fina
glyphs.append(0x0274) #uni075F.init
glyphs.append(0x00DF) #uni06CC.fina
glyphs.append(0x026F) #uni075E.fina
glyphs.append(0x0214) #uni06440675.fina
glyphs.append(0x0184) #uni069E.medi
glyphs.append(0x001F) #uni0643.medi
glyphs.append(0x0151) #uni0687.init
glyphs.append(0x01D2) #uni06B6.medi
glyphs.append(0x00E7) #uni06440623.fina
glyphs.append(0x01AD) #uni06AC.init
glyphs.append(0x01BA) #uni06B0.medi
glyphs.append(0x029C) #uni0769.init
glyphs.append(0x020A) #uni06FC.medi
glyphs.append(0x02AF) #twodots.vert.small.below
glyphs.append(0x0143) #uni0683
glyphs.append(0x013F) #uni0682
glyphs.append(0x013B) #uni0681
glyphs.append(0x0137) #uni0680
glyphs.append(0x014F) #uni0687
glyphs.append(0x0077) #uni0686
glyphs.append(0x014B) #uni0685
glyphs.append(0x0147) #uni0684
glyphs.append(0x0156) #uni068A.fina
glyphs.append(0x0153) #uni0689
glyphs.append(0x00A7) #uni0688
glyphs.append(0x02DC) #uni076A0627.fina
glyphs.append(0x0262) #uni075A
glyphs.append(0x0266) #uni075C
glyphs.append(0x0264) #uni075B
glyphs.append(0x0116) #uni06550650
glyphs.append(0x026A) #uni075D
glyphs.append(0x0272) #uni075F
glyphs.append(0x01D4) #uni06B7.fina
glyphs.append(0x005E) #uni061B
glyphs.append(0x02F5) #ZWSP
glyphs.append(0x0235) #uni061E
glyphs.append(0x005C) #uni061F
glyphs.append(0x003D) #onedot.above
glyphs.append(0x0079) #onedot.below
glyphs.append(0x00A8) #uni0688.fina
glyphs.append(0x01BE) #uni06B1.medi
glyphs.append(0x00BC) #uni063A.init
glyphs.append(0x014E) #uni0685.medi
glyphs.append(0x014C) #uni0685.fina
glyphs.append(0x010E) #uni06510650
glyphs.append(0x021B) #uni06B80627.isol
glyphs.append(0x00E1) #uni06D3.fina
glyphs.append(0x0152) #uni0687.medi
glyphs.append(0x02B4) #threedots.horz.below
glyphs.append(0x0017) #uni0637.init
glyphs.append(0x023D) #uni0750.fina
glyphs.append(0x01C2) #uni06B2.medi
glyphs.append(0x00C3) #uni0641.medi
glyphs.append(0x013A) #uni0680.medi
glyphs.append(0x0240) #uni0751
glyphs.append(0x023C) #uni0750
glyphs.append(0x0248) #uni0753
glyphs.append(0x0244) #uni0752
glyphs.append(0x0250) #uni0755
glyphs.append(0x024C) #uni0754
glyphs.append(0x0132) #uni067D.medi
glyphs.append(0x00FF) #uni06F7.urdu
glyphs.append(0x0223) #uni0610
glyphs.append(0x0224) #uni0611
glyphs.append(0x0225) #uni0612
glyphs.append(0x0226) #uni0613
glyphs.append(0x0227) #uni0614
glyphs.append(0x006F) #uni0615
glyphs.append(0x02B1) #threedots.small.below
glyphs.append(0x0159) #uni068C
glyphs.append(0x0157) #uni068B
glyphs.append(0x0155) #uni068A
glyphs.append(0x029D) #uni0769.medi
glyphs.append(0x015F) #uni068F
glyphs.append(0x015D) #uni068E
glyphs.append(0x015B) #uni068D
glyphs.append(0x0246) #uni0752.init
glyphs.append(0x0164) #uni0692.fina
glyphs.append(0x0024) #uni0644.medi
glyphs.append(0x0149) #uni0684.init
glyphs.append(0x022F) #uni06FF.fina
glyphs.append(0x0296) #uni0768
glyphs.append(0x015E) #uni068E.fina
glyphs.append(0x002F) #uni0648.fina
glyphs.append(0x029A) #uni0769
glyphs.append(0x0113) #uni0654064F
glyphs.append(0x0111) #uni0654064E
glyphs.append(0x0110) #uni0654064B
glyphs.append(0x0112) #uni0654064C
glyphs.append(0x0021) #uni0643.fina
glyphs.append(0x01C6) #uni06B3.medi
glyphs.append(0x0180) #uni069D.medi
glyphs.append(0x0122) #uni0678.fina
glyphs.append(0x028C) #uni0765.init
glyphs.append(0x01CE) #uni06B5.medi
glyphs.append(0x01DE) #uni06B9.medi
glyphs.append(0x017B) #uni069C.init
glyphs.append(0x0183) #uni069E.init
glyphs.append(0x01DA) #uni06B8.medi
glyphs.append(0x0094) #uni062B.fina
glyphs.append(0x017D) #uni069D
glyphs.append(0x027D) #uni0761.medi
glyphs.append(0x02AC) #twodots.vert.above
glyphs.append(0x0298) #uni0768.init
glyphs.append(0x02DA) #uni06FA.medi
glyphs.append(0x018D) #uni06A2.fina
glyphs.append(0x0100) #fathatan_01
glyphs.append(0x02B5) #smallv.arabic
glyphs.append(0x016A) #uni0695.fina
glyphs.append(0x00E9) #uni06440625.fina
glyphs.append(0x0215) #uni06B50627.isol
glyphs.append(0x02F6) #uni200C
glyphs.append(0x00C5) #veh.isol
glyphs.append(0x02F9) #uni200F
glyphs.append(0x02F8) #uni200E
glyphs.append(0x02F7) #uni200D
glyphs.append(0x00D3) #uni06C0.fina
glyphs.append(0x0206) #uni06FB.medi
glyphs.append(0x011A) #uni0673.fina
glyphs.append(0x02D1) #uni06D0.fina
glyphs.append(0x02AA) #twodots.small.above
glyphs.append(0x01A5) #uni06AA.init
glyphs.append(0x0019) #uni0637.fina
glyphs.append(0x016C) #uni0696.fina
glyphs.append(0x02E3) #twodots_alt1.below
glyphs.append(0x01F7) #uni06DF
glyphs.append(0x02DE) #uni06DE
glyphs.append(0x00F4) #uni06BE.fina
glyphs.append(0x01F6) #uni06DC
glyphs.append(0x001C) #uni0639.medi
glyphs.append(0x01F4) #uni06DA
glyphs.append(0x00A2) #uni062E.init
glyphs.append(0x011C) #uni0675.fina
glyphs.append(0x02BA) #threedots.rev.above
glyphs.append(0x0009) #uni062D.medi
glyphs.append(0x013D) #uni0681.init
glyphs.append(0x014A) #uni0684.medi
glyphs.append(0x0086) #uni0628.init
glyphs.append(0x00EF) #uni06C2
glyphs.append(0x00F2) #uni06C3
glyphs.append(0x00D2) #uni06C0
glyphs.append(0x00F1) #uni06C1
glyphs.append(0x02BE) #uni06C6
glyphs.append(0x02C0) #uni06C7
glyphs.append(0x01EB) #uni06C4
glyphs.append(0x01ED) #uni06C5
glyphs.append(0x02C1) #uni06C7.fina
glyphs.append(0x02C2) #uni06C8
glyphs.append(0x02C4) #uni06C9
glyphs.append(0x0154) #uni0689.fina
glyphs.append(0x0124) #uni067A.fina
glyphs.append(0x0255) #uni0756.fina
glyphs.append(0x0168) #uni0694.fina
glyphs.append(0x0247) #uni0752.medi
glyphs.append(0x02C7) #uni06CA.fina
glyphs.append(0x008C) #uni067E.medi
glyphs.append(0x0104) #damma_01
glyphs.append(0x00D8) #uni064A.init
glyphs.append(0x00DD) #uni06CC.init
glyphs.append(0x02C8) #uni06CB
glyphs.append(0x00DC) #uni06CC
glyphs.append(0x0166) #uni0693.fina
glyphs.append(0x0218) #uni06B60627.fina
glyphs.append(0x02CA) #uni06CF
glyphs.append(0x007C) #alef_alt.fina
glyphs.append(0x01EE) #uni06CD
glyphs.append(0x02CE) #uni06CE
glyphs.append(0x003F) #threedots.above
glyphs.append(0x01EC) #uni06C4.fina
glyphs.append(0x02EB) #hah_alt.isol
glyphs.append(0x01E9) #uni06BF.init
glyphs.append(0x01F1) #uni06D7
glyphs.append(0x01F0) #uni06D6
glyphs.append(0x00D1) #uni06D5
glyphs.append(0x02D6) #uni06D4
glyphs.append(0x00E0) #uni06D3
glyphs.append(0x0051) #uni06D2
glyphs.append(0x02D4) #uni06D1
glyphs.append(0x02D0) #uni06D0
glyphs.append(0x0191) #uni06A3.fina
glyphs.append(0x02BB) #threedots.rev.below
glyphs.append(0x01F3) #uni06D9
glyphs.append(0x009A) #uni0629.fina
glyphs.append(0x0087) #uni0628.medi
glyphs.append(0x0135) #uni067F.init
glyphs.append(0x0242) #uni0751.init
glyphs.append(0x01B5) #uni06AE.init
glyphs.append(0x0018) #uni0637.medi
glyphs.append(0x02E8) #threedots.horz_alt1.below
glyphs.append(0x023F) #uni0750.medi
glyphs.append(0x0209) #uni06FC.init
glyphs.append(0x00D0) #uni0646.fina
glyphs.append(0x0071) #wasla
glyphs.append(0x0231) #uni06FF.medi
glyphs.append(0x01C4) #uni06B3.fina
glyphs.append(0x0028) #uni0645.medi
glyphs.append(0x0056) #uni06AF.init
glyphs.append(0x0042) #uni0661
glyphs.append(0x0041) #uni0660
glyphs.append(0x0044) #uni0663
glyphs.append(0x0043) #uni0662
glyphs.append(0x0046) #uni0665
glyphs.append(0x0045) #uni0664
glyphs.append(0x0048) #uni0667
glyphs.append(0x0047) #uni0666
glyphs.append(0x004A) #uni0669
glyphs.append(0x0049) #uni0668
glyphs.append(0x0076) #uni06C1.fina
glyphs.append(0x01FE) #uni06E7
glyphs.append(0x01CD) #uni06B5.init
glyphs.append(0x0023) #uni0644.init
glyphs.append(0x013C) #uni0681.fina
glyphs.append(0x0188) #uni06A0.fina
glyphs.append(0x0270) #uni075E.init
glyphs.append(0x00C0) #uni06A9.medi
glyphs.append(0x0232) #allah
glyphs.append(0x01F9) #uni06E2
glyphs.append(0x0288) #uni0764.init
glyphs.append(0x0212) #uni06440673.fina
glyphs.append(0x005B) #uni066A
glyphs.append(0x0060) #uni066C
glyphs.append(0x005F) #uni066B
glyphs.append(0x0035) #uni066E
glyphs.append(0x0061) #uni066D
glyphs.append(0x003B) #uni066F
glyphs.append(0x02F2) #uni25CC
glyphs.append(0x01D8) #uni06B8.fina
glyphs.append(0x01B9) #uni06B0.init
glyphs.append(0x00A6) #uni0630.fina
glyphs.append(0x01A4) #uni06AA.fina
glyphs.append(0x02A5) #uni076C.fina
glyphs.append(0x024A) #uni0753.init
glyphs.append(0x003A) #uni06A1.fina
glyphs.append(0x0078) #uni0686.fina
glyphs.append(0x001E) #uni0643.init
glyphs.append(0x02B6) #circumflex.arabic
glyphs.append(0x02E7) #threedots_alt2.below
glyphs.append(0x025B) #uni0757.medi
glyphs.append(0x0109) #uni0651064C
glyphs.append(0x0201) #uni06EB
glyphs.append(0x0006) #uni0627.fina
glyphs.append(0x0092) #uni062B.init
glyphs.append(0x00BF) #uni06A9.init
glyphs.append(0x0171) #uni069A
glyphs.append(0x0175) #uni069B
glyphs.append(0x0179) #uni069C
glyphs.append(0x00AC) #uni0691.fina
glyphs.append(0x0181) #uni069E
glyphs.append(0x0185) #uni069F
glyphs.append(0x0208) #uni06FC.fina
glyphs.append(0x0174) #uni069A.medi
glyphs.append(0x0161) #uni0690
glyphs.append(0x00AB) #uni0691
glyphs.append(0x0163) #uni0692
glyphs.append(0x0165) #uni0693
glyphs.append(0x0167) #uni0694
glyphs.append(0x0169) #uni0695
glyphs.append(0x016B) #uni0696
glyphs.append(0x016D) #uni0697
glyphs.append(0x00AD) #uni0698
glyphs.append(0x016F) #uni0699
glyphs.append(0x0217) #uni06B60627.isol
glyphs.append(0x010A) #uni0651064D
glyphs.append(0x00EA) #uni06440622.isol
glyphs.append(0x019D) #uni06A6.init
glyphs.append(0x0058) #uni06AF.fina
glyphs.append(0x02B9) #wavyhamza_below
glyphs.append(0x0280) #uni0762.init
glyphs.append(0x028D) #uni0765.medi
glyphs.append(0x0289) #uni0764.medi
glyphs.append(0x00BA) #uni0638.fina
glyphs.append(0x0011) #uni0633.fina
glyphs.append(0x0034) #uni0640
glyphs.append(0x0170) #uni0699.fina
glyphs.append(0x0287) #uni0764.fina
glyphs.append(0x0140) #uni0682.fina
glyphs.append(0x02D9) #uni06FA.init
glyphs.append(0x0097) #uni0679.medi
glyphs.append(0x0040) #threedots.below
glyphs.append(0x00D6) #uni0649.init
return glyphs<|fim▁end|> | glyphs.append(0x019F) #uni06A7
glyphs.append(0x0015) #uni0635.fina
glyphs.append(0x01A3) #uni06AA |
<|file_name|>FileAwareTestLanguageScopeProvider.java<|end_file_name|><|fim▁begin|>/*
* generated by Xtext
*/
package org.eclipse.xtext.testlanguages.fileAware.scoping;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.EReference;
import org.eclipse.xtext.scoping.IGlobalScopeProvider;
import org.eclipse.xtext.scoping.IScope;
import org.eclipse.xtext.testlanguages.fileAware.fileAware.FileAwarePackage;
import com.google.inject.Inject;
/**
* This class contains custom scoping description.
*
* See https://www.eclipse.org/Xtext/documentation/303_runtime_concepts.html#scoping
* on how and when to use it.
*/
public class FileAwareTestLanguageScopeProvider extends AbstractFileAwareTestLanguageScopeProvider {
<|fim▁hole|> @Inject IGlobalScopeProvider global;
public IScope getScope(EObject context, EReference reference) {
if (reference == FileAwarePackage.Literals.IMPORT__ELEMENT) {
return global.getScope(context.eResource(), reference, null);
}
return super.getScope(context, reference);
}
}<|fim▁end|> | |
<|file_name|>process_NCBI_Taxonomy.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from common_processing import *
import tarfile
import sys
import glob<|fim▁hole|>
def untar(ftp_link, out_folder):
tar = tarfile.open(out_folder + ftp_link.split("/")[-1])
tar.extractall(path=out_folder)
tar.close()
def process_nodes_dmp(out_folder):
"""
extract data from file:nodes.dmp
create 2 map_tables:
map_organism2organism
map_organism2rank
"""
map_organism2organism = ""
map_organism2rank = ""
parent_tax_dict = dict()
tax_tree_dict = dict()
with open(out_folder + 'nodes.dmp', 'rb') as f:
for line in f:
tax_id, parent_tax_id, rank, embl_code, division_id, inherited_div_flag, genetic_code_id, inherited_gc_flag, mitochondrial_genetic_code_id, inherited_mgc_flag, genbank_hidden_flag, hidden_subtree_root_flag, comments = line.split("\t|\t")
map_organism2rank += str(tax_id) + "\t" + rank + "\n"
parent_tax_dict.setdefault(tax_id, parent_tax_id)
for tax_id, parent_tax_id in parent_tax_dict.iteritems():
tax_tree_dict.setdefault(tax_id, []).append(parent_tax_id)
while parent_tax_dict[tax_tree_dict[tax_id][-1]] != tax_tree_dict[tax_id][-1]:
tax_tree_dict[tax_id].append(parent_tax_dict[tax_tree_dict[tax_id][-1]])
for tax_id, parent_tax_ids in tax_tree_dict.iteritems():
map_organism2organism += '{}\t{}\t{}\n'.format(tax_id, tax_id, 0)
for level, parent_tax_id in enumerate(parent_tax_ids):
map_organism2organism += '{}\t{}\t{}\n'.format(tax_id, parent_tax_id, level+1)
with open(out_folder + "map_organism2organism.tsv", "wb") as f:
f.write(map_organism2organism)
with open(out_folder + "map_organism2rank.tsv", "wb") as f:
f.write(map_organism2rank)
def process_names_dmp(out_folder):
"""
extract data from file:names.dmp
map_symbol2organism
name type included: scientific name, synonym, acronym, anamorph, misspelling, misnomer, common name,
"""
map_symbol2organism = ''
non_unique_name = set()
with open(out_folder + "names.dmp", "rb") as f:
for line in f:
tax_id, name_txt, unique_name, name_class = line.split("\t|\t")
map_symbol2organism += "{}\t{}\t{}".format(tax_id, name_txt, name_class.split("|")[0].replace("\t", "\n"))
with open(out_folder + "map_symbol2organism.tsv", "wb") as f:
f.write(map_symbol2organism)
def argument_parser():
parser = argparse.ArgumentParser(description="download the Taxonomy PubMed from ftp")
parser.add_argument("-f", "--ftp_link", type=str, help="ftp url link to the file")
parser.add_argument("-o", "--out_folder", type=str, help="target folder of downloaded file")
args = parser.parse_args()
return args
if __name__ == "__main__":
args = argument_parser()
print "processing Taxonomy data"
ftp_download(args.ftp_link, args.out_folder)
untar(args.ftp_link, args.out_folder)
process_nodes_dmp(args.out_folder)
process_names_dmp(args.out_folder)<|fim▁end|> | |
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for karma-remap-coverage 0.1
// Project: https://github.com/sshev/karma-remap-coverage#readme
// Definitions by: Piotr Błażejewicz (Peter Blazejewicz) <https://github.com/peterblazejewicz>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// TypeScript Version: 3.2
/// <reference types="karma-coverage" />
import 'karma';
declare module 'karma' {
interface ConfigOptions {
/**
* Key-value pairs where key is report type and value - path to file/dir where to save it.
* Reporters like `text-summary`, `text-lcov` and `teamcity` can print out to console as well
* - in this case just provide any falsy value instead of path.
*
* @example
* ```ts
* 'text-summary': null, // to show summary in console
* html: './coverage/html',
* ```
*
* {@link https://github.com/sshev/karma-remap-coverage#remapcoveragereporter-format }
*/
remapCoverageReporter?: KarmaRemapCoverageReporter | undefined;
}
// remapped reporter types to key-value pairs
type KarmaRemapCoverageReporter = Partial<Record<ReporterType, string | null | undefined>>;<|fim▁hole|><|fim▁end|> | } |
<|file_name|>study_os.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 17-4-19 上午11:02
# @Author : Tom.Lee
# @Description :
# @File : helper_os.py
# @Product : PyCharm
import commands
import os
import sys
def shell():
command_ls = 'ls -al /opt'
command_docker = 'docker ps -a'
# 使用os.system()模块
ros = os.system(command_ls)
print '\n\nos.system() : ', ros
# 使用os.popen()模块
output = os.popen(command_docker)
result = output.read()
print '\n\nos.popen() : ', result
# 使用commands模块
(status, output) = commands.getstatusoutput(command_docker)
print '\n\ncommands : ', status, output
def deep_look_dir(dir_path, deep=1, console_full_path=False):
"""
deep_look_dir(dir_name, console_full_path=False)
遍历文件夹下所有文件
:param dir_path: os.path.dirname(__file__)
:param deep:
:param console_full_path:
:return:
"""
if deep == 1:
print dir_path
files = os.listdir(dir_path)
split_symbol = '|_' * deep if deep == 1 else '|' + ' ' * (deep - 1) + '|_'
for f in files:
f_path = os.path.join(dir_path, f)
console_name = f_path if console_full_path else f
if not os.path.isfile(f_path):
print "{sp} {dir_path}/: ".format(
sp=split_symbol,
dir_path=console_name)
num = deep + 1
deep_look_dir(f_path, num, console_full_path)
else:
print split_symbol, console_name
def sys_path():
print '\n\n'
print '当前文件路径: '
print os.path.abspath(__file__)
print '当前文件所在目录: '
print os.path.dirname(__file__)
print '当前文件所在目录的上一级目录: '
print os.path.dirname(os.path.dirname(__file__))
print '当前文件所在目录: '
root_path = os.path.dirname(os.path.abspath(__file__))
print root_path
print '当前文件所在目录同一级的bin目录: '
print os.path.abspath(os.path.join(root_path, '..', 'bin'))
print '目录拼接: '
print os.path.join(os.path.dirname(__file__), "templates")
print os.path.join(os.path.dirname(os.path.dirname(__file__)), "templates")
print '添加文件路径到系统环境变量: '
if root_path not in sys.path:
sys.path.append(root_path)
print sys.path
if '__main__' == __name__:
"""TEST MAIN"""
# shell()
# sys_path()
# print os.path.abspath(os.path.join('/opt/tom/', '..', 'bin')) # /opt/bin
# print os.path.dirname('/opt/tom')
print '\033[1;31;m os.listdir() ==> \033[0m', os.listdir(os.getcwd())
print'\033[1;31;m os.getcwd() ==> \033[0m', os.getcwd()
print '\033[1;31;m os.getcwdu() ==> \033[0m', os.getcwdu()
print '\033[1;31;m os.getegid() ==> \033[0m', os.getegid()<|fim▁hole|> print '\033[1;31;m os.getppid() ==> \033[0m', os.getppid()
print '\033[1;31;m os.getpgrp() ==> \033[0m', os.getpgrp()
print '\033[1;31;m os.getresgid() ==> \033[0m', os.getresgid()
print '\033[1;31;m os.getloadavg() ==> \033[0m', os.getloadavg()
print '\033[1;31;m os.geteuid() ==> \033[0m', os.geteuid()<|fim▁end|> | print '\033[1;31;m os.getenv() ==> \033[0m', os.getenv('TOM_PATH', '/home/tom')
print '\033[1;31;m os.geteuid() ==> \033[0m', os.geteuid()
print '\033[1;31;m os.getgid() ==> \033[0m', os.getgid()
print '\033[1;31;m os.getgroups() ==> \033[0m', os.getgroups() |
<|file_name|>settings.rs<|end_file_name|><|fim▁begin|>// The MIT License (MIT)
// Copyright © 2014-2018 Miguel Peláez <[email protected]>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation
// files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy,
// modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software
// is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#[derive(Serialize, Deserialize, Debug)]
pub struct WindowSettings {
width: u32,
height: u32,
fullscreen: bool,
maximized: bool,
multisampling: u16,
gui_scale: f64,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GameplaySettings {
fov: u8,
vsync: bool,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Settings {
window: WindowSettings,
gameplay: GameplaySettings,
resourcepacks: Vec<String>,
}
impl Settings {
/// Create settings with default values
pub fn new() -> Settings { Settings::new_with_size(800, 600) }
/// Create settings with width and height
pub fn new_with_size(width: u32, height: u32) -> Settings {
Settings {
window: WindowSettings {
width,
height,
fullscreen: false,
maximized: true,
multisampling: 0,
gui_scale: 1.0,
},
gameplay: GameplaySettings { fov: 90, vsync: true },
resourcepacks: Vec::new(),
}
}
/// Get window width<|fim▁hole|>
/// Set window width
pub fn set_width(&mut self, value: u32) { self.window.width = value }
/// Get window height
pub fn height(&self) -> u32 { self.window.height }
/// Set window height
pub fn set_height(&mut self, value: u32) { self.window.height = value }
/// Get if user wants fullscreen
pub fn fullscreen(&self) -> bool { self.window.fullscreen }
/// Get if user wants maximized
pub fn maximized(&self) -> bool { self.window.maximized }
/// Get if user wants MSAA anti-aliasing
pub fn multisampling(&self) -> u16 { self.window.multisampling }
/// Get if user wants vsync
pub fn vsync(&self) -> bool { self.gameplay.vsync }
/// Get user FOV
pub fn fov(&self) -> u8 { self.gameplay.fov }
/// Get user GUI scale
pub fn scale(&self) -> f64 { self.window.gui_scale }
/// Get enabled resourcepacks by filename
pub fn resourcepacks(&self) -> &Vec<String> { &self.resourcepacks }
}<|fim▁end|> | pub fn width(&self) -> u32 { self.window.width } |
<|file_name|>user.js<|end_file_name|><|fim▁begin|>module.exports = {
_id: '5731eb718dc033bc69d9660f',<|fim▁hole|> password: '09a3fcee233197960eb1bafe82e20021ad2807e8b12c79e9822ba0d5addfdcb7f786a365b2e8de0419f24cf671f2bd8bac9e6aacd5ff5bece1e5e7346fc19c1741e0286659e39577aaa4a6fd4fe0eab30b95d4437f3821838e27f311bda42a70ae47656eea65afd65a856c9079c866d591df466538cfe7abff4578f3d93fb9e6',
passwordSalt: 'a6a465b8c1baf79bb9423c249ef8a523bd7e34600b3b2215562870a35895fdab'
};<|fim▁end|> | email: '[email protected]',
name: 'New Test User', |
<|file_name|>test_cmds_list.py<|end_file_name|><|fim▁begin|># Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from StringIO import StringIO
import mock
from paasta_tools.cli.cmds.list import paasta_list
@mock.patch('sys.stdout', new_callable=StringIO)
@mock.patch('paasta_tools.cli.cmds.list.list_services', autospec=True)
def test_list_paasta_list(mock_list_services, mock_stdout):
""" paasta_list print each service returned by get_services """
<|fim▁hole|> mock_list_services.return_value = mock_services
args = mock.MagicMock()
args.print_instances = False
paasta_list(args)
output = mock_stdout.getvalue()
assert output == 'service_1\nservice_2\n'
@mock.patch('sys.stdout', new_callable=StringIO)
@mock.patch('paasta_tools.cli.cmds.list.list_service_instances', autospec=True)
def test_list_paasta_list_instances(mock_list_service_instances, mock_stdout):
""" paasta_list print each service.instance """
mock_services = ['service_1.main', 'service_2.canary']
mock_list_service_instances.return_value = mock_services
args = mock.MagicMock()
args.print_instances = True
paasta_list(args)
output = mock_stdout.getvalue()
assert output == 'service_1.main\nservice_2.canary\n'<|fim▁end|> | mock_services = ['service_1', 'service_2']
|
<|file_name|>cexceptions.py<|end_file_name|><|fim▁begin|>__author__ = 'sushil'
<|fim▁hole|>
class DateOutOfRange(BaseException):
pass
class InvalidDate(BaseException):
pass<|fim▁end|> | class InvalidDateFormat(BaseException):
pass |
<|file_name|>cli.py<|end_file_name|><|fim▁begin|>import click
import requests
@click.command()
@click.argument('url')
@click.option('--show-headers', '-H', is_flag=True, default=False)
@click.option('--show-status', '-S', is_flag=True, default=False)
@click.option('--quiet', '-Q', is_flag=True, default=False)
@click.option('--allow-redirects/--no-allow-redirects', default=True)
@click.option('--verbose', '-v', is_flag=True, default=False)
def cli(url, show_headers, show_status, quiet, allow_redirects, verbose):
# Make the request
if verbose:
click.secho('Making HTTP request to "{0}"...'.format(url), err=True, fg='white')
try:
response = requests.get(url, allow_redirects=allow_redirects)
response.raise_for_status()
except requests.exceptions.RequestException as e:
click.secho(str(e), err=True, fg='yellow' )
raise click.Abort()
except Exception as e:
click.secho(str(e), err=True, fg='red' )
raise click.Abort()
status_colors = {
2: 'green',
3: 'blue',
4: 'yellow',
5: 'red',
}
<|fim▁hole|> if show_status:
status_color = status_colors.get(int(response.status_code) / 100)
click.secho('Status: {0}'.format(response.status_code), err=True, fg=status_color)
# Show the response headers
if show_headers:
click.echo(format_headers(response.headers), err=True)
# Show the response body
if not quiet:
click.echo(response.text)
if __name__ == '__main__':
cli()
def format_headers(headers):
formatted = ['{0}: {1}'.format(k, v) for k, v in headers.items()]
return '\n'.join(formatted)<|fim▁end|> | # Show the response status |
<|file_name|>powerfocus.d.ts<|end_file_name|><|fim▁begin|>///<reference path="../node_modules/DefinitelyTyped/d3/d3.d.ts" />
interface PWInterpolatorFactory {
(oldScale: D3.Scale.LinearScale) : (t: number)=>powerfocusI;
}<|fim▁hole|> * A scale with polynomial zoom
* @class powerfocusI
* @extends D3.Scale.LinearScale
*/
interface powerfocusI extends D3.Scale.LinearScale {
(domain: number[], range: number[], interpolate: D3.Transition.Interpolate, focus: number, exponent: number, scaleInterpolate?: PWInterpolatorFactory) : powerfocusI;
focus: {
(): number;
(x: number): powerfocusI;
};
exponent: {
(): number;
(x: number): powerfocusI;
};
derivative(x:number): number;
invDerivAtFocus(): number;
scaleInterpolate: {
(): PWInterpolatorFactory;
(f: PWInterpolatorFactory): powerfocusI;
};
regionFocus(rstart:number, rend:number, proportion:number);
powerticks(m?): number[][];
copy(): powerfocusI;
}
declare var powerfocus: powerfocusI;<|fim▁end|> |
/** |
<|file_name|>HashMapInsertBenchmark.cpp<|end_file_name|><|fim▁begin|>#include "HashMap.h"
#include <benchmark/benchmark.h>
#include <string>
#include <tr1/unordered_map><|fim▁hole|>
static const std::string gs_str_value = "0123456789";
template<typename T>
struct GetValue;
template<>
struct GetValue<int>
{
static int Get()
{
return 123456;
}
};
template<>
struct GetValue<std::string>
{
static const std::string& Get()
{
return gs_str_value;
}
};
template<typename T>
static void BM_HashMapInsert(benchmark::State& state)
{
HashMap<int, T> hash_map;
T value = GetValue<T>::Get();
while (state.KeepRunning())
{
for (int i = 0; i < state.range_x(); ++i)
{
hash_map.Insert(i, value);
}
}
}
template<typename T>
static void BM_StdMapInsert(benchmark::State& state)
{
std::map<int, T> std_map;
T value = GetValue<T>::Get();
while (state.KeepRunning())
{
for (int i = 0; i < state.range_x(); ++i)
{
std_map.insert(std::make_pair(i, value));
}
}
}
template<typename T>
static void BM_StdUnorderedMapInsert(benchmark::State& state)
{
std::tr1::unordered_map<int, T> std_map;
T value = GetValue<T>::Get();
while (state.KeepRunning())
{
for (int i = 0; i < state.range_x(); ++i)
{
std_map.insert(std::make_pair(i, value));
}
}
}
// Register the function as a benchmark
// BM for <int, int>
BENCHMARK_TEMPLATE(BM_HashMapInsert, int)->Range(8, 8<<10);
BENCHMARK_TEMPLATE(BM_StdMapInsert, int)->Range(8, 8<<10);
BENCHMARK_TEMPLATE(BM_StdUnorderedMapInsert, int)->Range(8, 8<<10);
BENCHMARK_TEMPLATE(BM_HashMapInsert, std::string)->Range(8, 8<<10);
BENCHMARK_TEMPLATE(BM_StdMapInsert, std::string)->Range(8, 8<<10);
BENCHMARK_TEMPLATE(BM_StdUnorderedMapInsert, std::string)->Range(8, 8<<10);
BENCHMARK_MAIN();<|fim▁end|> | #include <map>
#include <utility>
using namespace snippet::algo; |
<|file_name|>bytecode.rs<|end_file_name|><|fim▁begin|>#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub struct StrID(pub usize);
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub struct ChunkID(pub usize);
<|fim▁hole|>pub struct Chunk {
/// The bytecode instructions to be executed, starting at the first one.
pub insts: Vec<Inst>,
/// The amount of slots to allocate for local variables.
pub locals: usize,
/// The amount of free variables to capture when creating a function from
/// this chunk.
pub captures: usize,
}
/// A bytecode instruction.
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum Inst {
/// A no-op, which does nothing.
NoOp,
/// Pop an argument off the stack, pop a callee of the stack, and call the
/// callee with the argument.
Call,
/// Return to the caller.
Return,
/// Pop a value and discard it.
Pop,
/// Push a local variable onto the stack.
GetLocal(usize),
/// Pop elements of the stack, create a new tuple with those elements, and
/// push the new tuple onto the stack.
NewTuple(usize),
/// Push a new integer onto the stack.
NewI32(i32),
/// Push a new string onto the stack.
NewStr(StrID),
/// Pop free variables off the stack, create a new function with those free
/// variables as captures, and push the new function onto the stack.
NewFunc(ChunkID),
/// Push the stdout file handle onto the stack.
Stdout,
/// Pop a buffer off the stack, pop a handle off the stack, write the
/// buffer to the handle, and push the number of bytes written onto the
/// stack.
Write,
}<|fim▁end|> | /// A chunk of bytecode, pairing instructions with metadata.
#[derive(Clone, Debug, Eq, PartialEq)] |
<|file_name|>agent.rs<|end_file_name|><|fim▁begin|>use std::sync::mpsc::{channel};
use std::ffi::{CString};
use indy::api::agent::{
indy_agent_add_identity,
indy_agent_close_connection,
indy_agent_close_listener,
indy_agent_connect,
indy_agent_listen,
indy_agent_remove_identity,
indy_agent_send,
};
use indy::api::ErrorCode;
use utils::callback::CallbackUtils;
use utils::timeout::TimeoutUtils;
pub struct AgentUtils {}
impl AgentUtils {
pub fn connect(pool_handle: i32, wallet_handle: i32, sender_did: &str, receiver_did: &str,
on_msg: Option<Box<Fn(i32, String) + Send>>) -> Result<i32, ErrorCode> {
let (sender, receiver) = channel();
let closure = Box::new(move |err, connection_handle| { sender.send((err, connection_handle)).unwrap(); });
let (cmd_connect, cb) = CallbackUtils::closure_to_agent_connect_cb(closure);
let (cb_id, msg_cb) = CallbackUtils::closure_to_agent_message_cb(Box::new(move |conn_handle, err, msg| {
info!("On connection {} received (with error {:?}) agent message (SRV->CLI): {}", conn_handle, err, msg);
if let Some(ref on_msg) = on_msg {
on_msg(conn_handle, msg);
}
})); //TODO make as parameter?
let err = indy_agent_connect(cmd_connect, pool_handle, wallet_handle,
CString::new(sender_did).unwrap().as_ptr(),
CString::new(receiver_did).unwrap().as_ptr(),
cb, msg_cb);
if err != ErrorCode::Success {
return Err(err);
}
let (err, conn_handle) = receiver.recv_timeout(TimeoutUtils::medium_timeout()).unwrap();
if err != ErrorCode::Success {
return Err(err);
}
CallbackUtils::closure_map_ids(cb_id, conn_handle);
Ok(conn_handle)
}
pub fn listen(endpoint: &str,
on_connect: Option<Box<Fn(i32, i32) + Send>>,
on_msg: Option<Box<Fn(i32, String) + Send>>) -> Result<i32, ErrorCode> {
let (sender, receiver) = channel();
let on_msg = Box::new(move |conn_handle, err, msg| {
info!("On connection {} received (with error {:?}) agent message (CLI->SRV): {}", conn_handle, err, msg);
if let Some(ref on_msg) = on_msg {
on_msg(conn_handle, msg);
}
});
let (on_msg_cb_id, on_msg) = CallbackUtils::closure_to_agent_message_cb(on_msg);
let on_connect = Box::new(move |listener_handle, err, conn_handle, sender_did, receiver_did| {
if let Some(ref on_connect) = on_connect {
on_connect(listener_handle, conn_handle);
}
CallbackUtils::closure_map_ids(on_msg_cb_id, conn_handle);
info!("New connection {} on listener {}, err {:?}, sender DID {}, receiver DID {}", conn_handle, listener_handle, err, sender_did, receiver_did);
});
let (on_connect_cb_id, on_connect) = CallbackUtils::closure_to_agent_connected_cb(on_connect);
let cb = Box::new(move |err, listener_handle| sender.send((err, listener_handle)).unwrap());
let (cmd_id, cb) = CallbackUtils::closure_to_agent_listen_cb(cb);
let res = indy_agent_listen(cmd_id, CString::new(endpoint).unwrap().as_ptr(), cb, on_connect, on_msg);
if res != ErrorCode::Success {
return Err(res);
}
let (res, listener_handle) = receiver.recv_timeout(TimeoutUtils::short_timeout()).unwrap();
CallbackUtils::closure_map_ids(on_connect_cb_id, listener_handle);
if res != ErrorCode::Success {
return Err(res);
}
Ok(listener_handle)
}
pub fn add_identity(listener_handle: i32, pool_handle: i32, wallet_handle: i32, did: &str) -> Result<(), ErrorCode> {
let (sender, receiver) = channel();
let (cmd_id, cb) = CallbackUtils::closure_to_agent_add_identity_cb(
Box::new(move |err_code| sender.send(err_code).unwrap())
);
let res = indy_agent_add_identity(cmd_id, listener_handle, pool_handle, wallet_handle, CString::new(did).unwrap().as_ptr(), cb);
if res != ErrorCode::Success {
return Err(res);
}
let res = receiver.recv_timeout(TimeoutUtils::short_timeout()).unwrap();
if res != ErrorCode::Success {
return Err(res)
}
Ok(())
}
pub fn rm_identity(listener_handle: i32, wallet_handle: i32, did: &str) -> Result<(), ErrorCode> {
let (sender, receiver) = channel();
let (cmd_id, cb) = CallbackUtils::closure_to_agent_rm_identity_cb(
Box::new(move |err_code| sender.send(err_code).unwrap())
);
let res = indy_agent_remove_identity(cmd_id, listener_handle, wallet_handle, CString::new(did).unwrap().as_ptr(), cb);
if res != ErrorCode::Success {
return Err(res);
}
let res = receiver.recv_timeout(TimeoutUtils::short_timeout()).unwrap();
if res != ErrorCode::Success {
return Err(res)
}
Ok(())
}
pub fn send(conn_handle: i32, msg: &str) -> Result<(), ErrorCode> {
let (send_sender, send_receiver) = channel();
let (send_cmd_id, send_cb) = CallbackUtils::closure_to_agent_send_cb(
Box::new(move |err_code| send_sender.send(err_code).unwrap())
);
<|fim▁hole|> let res = indy_agent_send(send_cmd_id, conn_handle, CString::new(msg).unwrap().as_ptr(), send_cb);
if res != ErrorCode::Success {
return Err(res);
}
let res = send_receiver.recv_timeout(TimeoutUtils::short_timeout()).unwrap();
if res != ErrorCode::Success {
return Err(res)
}
Ok(())
}
pub fn close_connection(conn_handle: i32) -> Result<(), ErrorCode> {
let (sender, receiver) = channel();
let (cmd_id, cb) = CallbackUtils::closure_to_agent_close_cb(Box::new(move |res| {
sender.send(res).unwrap();
}));
let res = indy_agent_close_connection(cmd_id, conn_handle, cb);
if res != ErrorCode::Success {
return Err(res);
}
let res = receiver.recv_timeout(TimeoutUtils::medium_timeout()).unwrap();
if res != ErrorCode::Success {
return Err(res);
}
Ok(())
}
pub fn close_listener(listener_handle: i32) -> Result<(), ErrorCode> {
let (sender, receiver) = channel();
let (cmd_id, cb) = CallbackUtils::closure_to_agent_close_cb(Box::new(move |res| {
sender.send(res).unwrap();
}));
let res = indy_agent_close_listener(cmd_id, listener_handle, cb);
if res != ErrorCode::Success {
return Err(res);
}
let res = receiver.recv_timeout(TimeoutUtils::medium_timeout()).unwrap();
if res != ErrorCode::Success {
return Err(res);
}
Ok(())
}
}<|fim▁end|> | |
<|file_name|>BufferingEdtManager.java<|end_file_name|><|fim▁begin|><|fim▁hole|> * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jetbrains.jetpad.base.edt;
public class BufferingEdtManager extends RunningEdtManager {
public BufferingEdtManager() {
super();
}
public BufferingEdtManager(String name) {
super(name);
}
@Override
protected void doSchedule(Runnable r) {
addTaskToQueue(r);
}
@Override
public String toString() {
return "BufferingEdtManager@" + Integer.toHexString(hashCode()) +
("".equals(getName()) ? "" : " (" + getName()+ ")");
}
}<|fim▁end|> | /*
* Copyright 2012-2016 JetBrains s.r.o
* |
<|file_name|>ClassPathConfigurationLoaderTest.java<|end_file_name|><|fim▁begin|>package net.amygdalum.testrecorder.profile;
import static org.assertj.core.api.Assertions.assertThat;
import java.io.ByteArrayOutputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URL;
import java.nio.file.Path;
import java.util.Enumeration;
import java.util.stream.Stream;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import net.amygdalum.testrecorder.util.ExtensibleClassLoader;
import net.amygdalum.testrecorder.util.LogLevel;
import net.amygdalum.testrecorder.util.LoggerExtension;
public class ClassPathConfigurationLoaderTest {
@Nested
class testLoad {
@Test
void common() throws Exception {
ExtensibleClassLoader classLoader = new ExtensibleClassLoader(ClassPathConfigurationLoaderTest.class.getClassLoader());
classLoader.defineResource("agentconfig/net.amygdalum.testrecorder.profile.ConfigNoArgumentsNonExclusive", "net.amygdalum.testrecorder.profile.DefaultConfigNoArguments".getBytes());
ClassPathConfigurationLoader loader = new ClassPathConfigurationLoader(classLoader);
assertThat(loader.load(ConfigNoArgumentsNonExclusive.class).findFirst()).containsInstanceOf(DefaultConfigNoArguments.class);
}<|fim▁hole|> @Test
void withClassLoaderError(@LogLevel("error") ByteArrayOutputStream error) throws Exception {
ExtensibleClassLoader classLoader = new ExtensibleClassLoader(ClassPathConfigurationLoaderTest.class.getClassLoader()) {
@Override
public Enumeration<URL> getResources(String name) throws IOException {
throw new IOException();
}
};
classLoader.defineResource("agentconfig/net.amygdalum.testrecorder.profile.ConfigNoArgumentsNonExclusive", "net.amygdalum.testrecorder.profile.DefaultConfigNoArguments".getBytes());
ClassPathConfigurationLoader loader = new ClassPathConfigurationLoader(classLoader);
assertThat(loader.load(ConfigNoArgumentsNonExclusive.class).findFirst()).isNotPresent();
assertThat(error.toString()).contains("cannot load configuration from classpath");
}
@ExtendWith(LoggerExtension.class)
@Test
void withFileNotFound(@LogLevel("debug") ByteArrayOutputStream debug) throws Exception {
ExtensibleClassLoader classLoader = new ExtensibleClassLoader(ClassPathConfigurationLoaderTest.class.getClassLoader());
classLoader.defineResource("agentconfig/net.amygdalum.testrecorder.profile.ConfigNoArgumentsNonExclusive", "net.amygdalum.testrecorder.profile.DefaultConfigNoArguments".getBytes());
ClassPathConfigurationLoader loader = new ClassPathConfigurationLoader(classLoader) {
@Override
protected <T> Stream<T> configsFrom(Path path, Class<T> clazz, Object[] args) throws IOException {
throw new FileNotFoundException();
}
};
assertThat(loader.load(ConfigNoArgumentsNonExclusive.class).findFirst()).isNotPresent();
assertThat(debug.toString()).contains("did not find configuration file");
}
@ExtendWith(LoggerExtension.class)
@Test
void withIOException(@LogLevel("error") ByteArrayOutputStream error) throws Exception {
ExtensibleClassLoader classLoader = new ExtensibleClassLoader(ClassPathConfigurationLoaderTest.class.getClassLoader());
classLoader.defineResource("agentconfig/net.amygdalum.testrecorder.profile.ConfigNoArgumentsNonExclusive", "net.amygdalum.testrecorder.profile.DefaultConfigNoArguments".getBytes());
ClassPathConfigurationLoader loader = new ClassPathConfigurationLoader(classLoader) {
@Override
protected <T> Stream<T> configsFrom(Path path, Class<T> clazz, Object[] args) throws IOException {
throw new IOException();
}
};
assertThat(loader.load(ConfigNoArgumentsNonExclusive.class).findFirst()).isNotPresent();
assertThat(error.toString()).contains("cannot load configuration file");
}
}
}<|fim▁end|> |
@ExtendWith(LoggerExtension.class) |
<|file_name|>message_grpc.rs<|end_file_name|><|fim▁begin|>// This file is generated. Do not edit
// @generated
// https://github.com/Manishearth/rust-clippy/issues/702
#![allow(unknown_lints)]
#![allow(clippy)]
#![cfg_attr(rustfmt, rustfmt_skip)]
#![allow(box_pointers)]
#![allow(dead_code)]
#![allow(missing_docs)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
#![allow(non_upper_case_globals)]
#![allow(trivial_casts)]
#![allow(unsafe_code)]
#![allow(unused_imports)]
#![allow(unused_results)]
// interface
pub trait Communicator {
fn initiate_conversation(&self, o: ::grpc::RequestOptions, p: super::message::InitiateRequest) -> ::grpc::SingleResponse<super::message::InitiateReply>;
fn terminate_conversation(&self, o: ::grpc::RequestOptions, p: super::message::TerminateRequest) -> ::grpc::SingleResponse<super::message::TerminateReply>;
fn send_message(&self, o: ::grpc::RequestOptions, p: ::grpc::StreamingRequest<super::message::MessageRequest>) -> ::grpc::StreamingResponse<super::message::MessageReply>;
}
// client
pub struct CommunicatorClient {
grpc_client: ::grpc::Client,
method_InitiateConversation: ::std::sync::Arc<::grpc::method::MethodDescriptor<super::message::InitiateRequest, super::message::InitiateReply>>,
method_TerminateConversation: ::std::sync::Arc<::grpc::method::MethodDescriptor<super::message::TerminateRequest, super::message::TerminateReply>>,
method_SendMessage: ::std::sync::Arc<::grpc::method::MethodDescriptor<super::message::MessageRequest, super::message::MessageReply>>,
}
impl CommunicatorClient {
pub fn with_client(grpc_client: ::grpc::Client) -> Self {
CommunicatorClient {
grpc_client: grpc_client,
method_InitiateConversation: ::std::sync::Arc::new(::grpc::method::MethodDescriptor {
name: "/Communicator/InitiateConversation".to_string(),
streaming: ::grpc::method::GrpcStreaming::Unary,
req_marshaller: Box::new(::grpc::protobuf::MarshallerProtobuf),
resp_marshaller: Box::new(::grpc::protobuf::MarshallerProtobuf),
}),
method_TerminateConversation: ::std::sync::Arc::new(::grpc::method::MethodDescriptor {
name: "/Communicator/TerminateConversation".to_string(),
streaming: ::grpc::method::GrpcStreaming::Unary,
req_marshaller: Box::new(::grpc::protobuf::MarshallerProtobuf),
resp_marshaller: Box::new(::grpc::protobuf::MarshallerProtobuf),
}),
method_SendMessage: ::std::sync::Arc::new(::grpc::method::MethodDescriptor {
name: "/Communicator/SendMessage".to_string(),
streaming: ::grpc::method::GrpcStreaming::Bidi,
req_marshaller: Box::new(::grpc::protobuf::MarshallerProtobuf),
resp_marshaller: Box::new(::grpc::protobuf::MarshallerProtobuf),
}),
}
}
pub fn new(host: &str, port: u16, tls: bool, conf: ::grpc::ClientConf) -> ::grpc::Result<Self> {
::grpc::Client::new(host, port, tls, conf).map(|c| {
CommunicatorClient::with_client(c)
})
}
}
impl Communicator for CommunicatorClient {
fn initiate_conversation(&self, o: ::grpc::RequestOptions, p: super::message::InitiateRequest) -> ::grpc::SingleResponse<super::message::InitiateReply> {
self.grpc_client.call_unary(o, p, self.method_InitiateConversation.clone())
}
fn terminate_conversation(&self, o: ::grpc::RequestOptions, p: super::message::TerminateRequest) -> ::grpc::SingleResponse<super::message::TerminateReply> {
self.grpc_client.call_unary(o, p, self.method_TerminateConversation.clone())
}
fn send_message(&self, o: ::grpc::RequestOptions, p: ::grpc::StreamingRequest<super::message::MessageRequest>) -> ::grpc::StreamingResponse<super::message::MessageReply> {
self.grpc_client.call_bidi(o, p, self.method_SendMessage.clone())
}
}
// server
pub struct CommunicatorServer {
pub grpc_server: ::grpc::Server,
}
impl ::std::ops::Deref for CommunicatorServer {
type Target = ::grpc::Server;
fn deref(&self) -> &Self::Target {
&self.grpc_server
}
}
impl CommunicatorServer {
pub fn new<A : ::std::net::ToSocketAddrs, H : Communicator + 'static + Sync + Send + 'static>(addr: A, conf: ::grpc::ServerConf, h: H) -> Self {
let service_definition = CommunicatorServer::new_service_def(h);
CommunicatorServer {
grpc_server: ::grpc::Server::new_plain(addr, conf, service_definition),
}
}
pub fn new_pool<A : ::std::net::ToSocketAddrs, H : Communicator + 'static + Sync + Send + 'static>(addr: A, conf: ::grpc::ServerConf, h: H, cpu_pool: ::futures_cpupool::CpuPool) -> Self {
let service_definition = CommunicatorServer::new_service_def(h);
CommunicatorServer {
grpc_server: ::grpc::Server::new_plain_pool(addr, conf, service_definition, cpu_pool),
}
}
pub fn new_service_def<H : Communicator + 'static + Sync + Send + 'static>(handler: H) -> ::grpc::server::ServerServiceDefinition {
let handler_arc = ::std::sync::Arc::new(handler);
::grpc::server::ServerServiceDefinition::new(
vec![
::grpc::server::ServerMethod::new(
::std::sync::Arc::new(::grpc::method::MethodDescriptor {
name: "/Communicator/InitiateConversation".to_string(),
streaming: ::grpc::method::GrpcStreaming::Unary,
req_marshaller: Box::new(::grpc::protobuf::MarshallerProtobuf),
resp_marshaller: Box::new(::grpc::protobuf::MarshallerProtobuf),
}),
{
let handler_copy = handler_arc.clone();
::grpc::server::MethodHandlerUnary::new(move |o, p| handler_copy.initiate_conversation(o, p))
},<|fim▁hole|> name: "/Communicator/TerminateConversation".to_string(),
streaming: ::grpc::method::GrpcStreaming::Unary,
req_marshaller: Box::new(::grpc::protobuf::MarshallerProtobuf),
resp_marshaller: Box::new(::grpc::protobuf::MarshallerProtobuf),
}),
{
let handler_copy = handler_arc.clone();
::grpc::server::MethodHandlerUnary::new(move |o, p| handler_copy.terminate_conversation(o, p))
},
),
::grpc::server::ServerMethod::new(
::std::sync::Arc::new(::grpc::method::MethodDescriptor {
name: "/Communicator/SendMessage".to_string(),
streaming: ::grpc::method::GrpcStreaming::Bidi,
req_marshaller: Box::new(::grpc::protobuf::MarshallerProtobuf),
resp_marshaller: Box::new(::grpc::protobuf::MarshallerProtobuf),
}),
{
let handler_copy = handler_arc.clone();
::grpc::server::MethodHandlerBidi::new(move |o, p| handler_copy.send_message(o, p))
},
),
],
)
}
}<|fim▁end|> | ),
::grpc::server::ServerMethod::new(
::std::sync::Arc::new(::grpc::method::MethodDescriptor { |
<|file_name|>IosCopy.js<|end_file_name|><|fim▁begin|>import React from 'react';
import IconBase from './../components/IconBase/IconBase';
export default class IosCopy extends React.Component {
render() {
if(this.props.bare) {
return <g>
<g>
<polygon points="144,416 144,400 144,112 112,112 112,448 352,448 352,416 160,416 "></polygon>
<g>
<path d="M325.3,64H160v48v288h192h48V139L325.3,64z M368,176h-80V96h16v64h64V176z"></path>
</g><|fim▁hole|></g>
</g>;
} return <IconBase>
<g>
<polygon points="144,416 144,400 144,112 112,112 112,448 352,448 352,416 160,416 "></polygon>
<g>
<path d="M325.3,64H160v48v288h192h48V139L325.3,64z M368,176h-80V96h16v64h64V176z"></path>
</g>
</g>
</IconBase>;
}
};IosCopy.defaultProps = {bare: false}<|fim▁end|> | |
<|file_name|>AlbumCtrl.js<|end_file_name|><|fim▁begin|><|fim▁hole|>(function() {
function AlbumCtrl(Fixtures, SongPlayer) {
this.albumData = [];
this.albumData.push(Fixtures.getAlbum());
this.songPlayer = SongPlayer;
}
angular
.module('blocJams')
.controller('AlbumCtrl', ['Fixtures','SongPlayer', AlbumCtrl]);
})();<|fim▁end|> | |
<|file_name|>DObject_8h.js<|end_file_name|><|fim▁begin|>var DObject_8h =
[<|fim▁hole|><|fim▁end|> | [ "DObject", "classHelix_1_1Logic_1_1dev_1_1DObject.html", "classHelix_1_1Logic_1_1dev_1_1DObject" ],
[ "DObject_svect", "DObject_8h.html#a56460b28b8ab9b64a1aecf912b2f14ac", null ]
]; |
<|file_name|>tests.js<|end_file_name|><|fim▁begin|>"use strict";
const lua = require("../src/lua.js");
const lauxlib = require("../src/lauxlib.js");
const {to_luastring} = require("../src/fengaricore.js");
const toByteCode = function(luaCode) {
let L = lauxlib.luaL_newstate();
if (!L) throw Error("failed to create lua state");
if (lauxlib.luaL_loadstring(L, to_luastring(luaCode)) !== lua.LUA_OK)
throw Error(lua.lua_tojsstring(L, -1));
let b = [];
if (lua.lua_dump(L, function(L, b, size, B) {
B.push(...b.slice(0, size));<|fim▁hole|> return Uint8Array.from(b);
};
module.exports.toByteCode = toByteCode;<|fim▁end|> | return 0;
}, b, false) !== 0)
throw Error("unable to dump given function"); |
<|file_name|>date.rs<|end_file_name|><|fim▁begin|>use header::HttpDate;
header! {
#[doc="`Date` header, defined in [RFC7231](http://tools.ietf.org/html/rfc7231#section-7.1.1.2)"]
#[doc=""]
#[doc="The `Date` header field represents the date and time at which the"]
#[doc="message was originated."]
#[doc=""]
#[doc="# ABNF"]
#[doc="```plain"]
#[doc="Date = HTTP-date"]
#[doc="```"]
#[doc=""]
#[doc="# Example values"]
#[doc="* `Tue, 15 Nov 1994 08:12:31 GMT`"]<|fim▁hole|> #[doc="# Example"]
#[doc="```"]
#[doc="# extern crate time;"]
#[doc="# extern crate hyper;"]
#[doc="# fn main() {"]
#[doc="// extern crate time;"]
#[doc=""]
#[doc="use hyper::header::{Headers, Date, HttpDate};"]
#[doc="use time;"]
#[doc=""]
#[doc="let mut headers = Headers::new();"]
#[doc="headers.set(Date(HttpDate(time::now())));"]
#[doc="# }"]
#[doc="```"]
(Date, "Date") => [HttpDate]
test_date {
test_header!(test1, vec![b"Tue, 15 Nov 1994 08:12:31 GMT"]);
}
}
bench_header!(imf_fixdate, Date, { vec![b"Sun, 07 Nov 1994 08:48:37 GMT".to_vec()] });
bench_header!(rfc_850, Date, { vec![b"Sunday, 06-Nov-94 08:49:37 GMT".to_vec()] });
bench_header!(asctime, Date, { vec![b"Sun Nov 6 08:49:37 1994".to_vec()] });<|fim▁end|> | #[doc=""] |
<|file_name|>unit_timeside.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import unittest
import doctest
import sys
import time
import timeside.core
class _TextTestResult(unittest.TestResult):
"""A test result class that can print formatted text results to a stream.
Used by TextTestRunner.
"""
separator1 = '=' * 70
separator2 = '-' * 70
def __init__(self, stream, descriptions, verbosity):
unittest.TestResult.__init__(self)
self.stream = stream
self.showAll = verbosity > 1
self.dots = verbosity == 1
self.descriptions = descriptions
self.currentTestCase = None
def getDescription(self, test):
if self.descriptions:
return test.shortDescription() or str(test)
else:
return str(test)
def startTest(self, test):
unittest.TestResult.startTest(self, test)
if self.showAll:
if self.currentTestCase != test.__class__:
self.currentTestCase = test.__class__
self.stream.writeln()
self.stream.writeln("[%s]" % self.currentTestCase.__name__)
self.stream.write(" " + self.getDescription(test))
self.stream.write(" ... ")
def addSuccess(self, test):
unittest.TestResult.addSuccess(self, test)
if self.showAll:
self.stream.writeln("ok")
elif self.dots:
self.stream.write('.')
def addError(self, test, err):
unittest.TestResult.addError(self, test, err)
if self.showAll:
self.stream.writeln("ERROR")
elif self.dots:
self.stream.write('E')
def addFailure(self, test, err):
unittest.TestResult.addFailure(self, test, err)
if self.showAll:
self.stream.writeln("FAIL")
elif self.dots:
self.stream.write('F')
def addSkip(self, test, reason):
unittest.TestResult.addSkip(self, test, reason)
if self.showAll:
self.stream.writeln("SKIP : " + reason)
elif self.dots:
self.stream.write('S')
def printErrors(self):
if self.dots or self.showAll:
self.stream.writeln()
self.printErrorList('ERROR', self.errors)
self.printErrorList('FAIL', self.failures)
def printErrorList(self, flavour, errors):
for test, err in errors:
self.stream.writeln(self.separator1)
self.stream.writeln("%s: [%s] --> %s "
% (flavour,
test.__class__.__name__,
self.getDescription(test)))
self.stream.writeln(self.separator2)
self.stream.writeln("%s" % err)
class _WritelnDecorator:
"""Used to decorate file-like objects with a handy 'writeln' method"""
def __init__(self, stream):
self.stream = stream
def __getattr__(self, attr):
return getattr(self.stream, attr)
def writeln(self, arg=None):
if arg:
self.write(arg)
self.write('\n') # text-mode streams translate to \r\n if needed
class TestRunner:
"""A test runner class that displays results in textual form.
It prints out the names of tests as they are run, errors as they<|fim▁hole|> occur, and a summary of the results at the end of the test run.
"""
def __init__(self, stream=sys.stderr, descriptions=1, verbosity=2):
self.stream = _WritelnDecorator(stream)
self.descriptions = descriptions
self.verbosity = verbosity
def _makeResult(self):
return _TextTestResult(self.stream, self.descriptions, self.verbosity)
def run(self, test):
"Run the given test case or test suite."
result = self._makeResult()
startTime = time.time()
test(result)
stopTime = time.time()
timeTaken = stopTime - startTime
result.printErrors()
self.stream.writeln(result.separator2)
run = result.testsRun
self.stream.writeln("Ran %d test%s in %.3fs" %
(run, run != 1 and "s" or "", timeTaken))
self.stream.writeln()
if not result.wasSuccessful():
self.stream.write("FAILED (")
failed, errored = map(len, (result.failures, result.errors))
if failed:
self.stream.write("failures=%d" % failed)
if errored:
if failed:
self.stream.write(", ")
self.stream.write("errors=%d" % errored)
self.stream.writeln(")")
else:
self.stream.writeln("OK")
return result
def run_test_module(test_modules_list=None, test_prefix=None):
suite = unittest.TestSuite()
finder = doctest.DocTestFinder(exclude_empty=False) # finder for doctest
if test_prefix:
unittest.TestLoader.testMethodPrefix = test_prefix
if not test_modules_list:
test_modules_list = []
elif not isinstance(test_modules_list, list):
test_modules_list = [test_modules_list]
test_modules_list.append('__main__')
for test in test_modules_list:
# Doctest
suite.addTest(doctest.DocTestSuite(test, test_finder=finder))
# unittest
suite.addTest(unittest.loader.TestLoader().loadTestsFromModule(test))
TestRunner().run(suite)<|fim▁end|> | |
<|file_name|>os_keystone_domain_facts.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# Copyright (c) 2016 Hewlett-Packard Enterprise Corporation
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: os_keystone_domain_facts
short_description: Retrieve facts about one or more OpenStack domains<|fim▁hole|>version_added: "2.1"
author: "Ricardo Carrillo Cruz (@rcarrillocruz)"
description:
- Retrieve facts about a one or more OpenStack domains
requirements:
- "python >= 2.7"
- "sdk"
options:
name:
description:
- Name or ID of the domain
filters:
description:
- A dictionary of meta data to use for further filtering. Elements of
this dictionary may be additional dictionaries.
availability_zone:
description:
- Ignored. Present for backwards compatibility
'''
EXAMPLES = '''
# Gather facts about previously created domain
- os_keystone_domain_facts:
cloud: awesomecloud
- debug:
var: openstack_domains
# Gather facts about a previously created domain by name
- os_keystone_domain_facts:
cloud: awesomecloud
name: demodomain
- debug:
var: openstack_domains
# Gather facts about a previously created domain with filter
- os_keystone_domain_facts:
cloud: awesomecloud
name: demodomain
filters:
enabled: False
- debug:
var: openstack_domains
'''
RETURN = '''
openstack_domains:
description: has all the OpenStack facts about domains
returned: always, but can be null
type: complex
contains:
id:
description: Unique UUID.
returned: success
type: str
name:
description: Name given to the domain.
returned: success
type: str
description:
description: Description of the domain.
returned: success
type: str
enabled:
description: Flag to indicate if the domain is enabled.
returned: success
type: bool
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.openstack import openstack_full_argument_spec, openstack_module_kwargs, openstack_cloud_from_module
def main():
argument_spec = openstack_full_argument_spec(
name=dict(required=False, default=None),
filters=dict(required=False, type='dict', default=None),
)
module_kwargs = openstack_module_kwargs(
mutually_exclusive=[
['name', 'filters'],
]
)
module = AnsibleModule(argument_spec, **module_kwargs)
sdk, opcloud = openstack_cloud_from_module(module)
try:
name = module.params['name']
filters = module.params['filters']
if name:
# Let's suppose user is passing domain ID
try:
domains = opcloud.get_domain(name)
except Exception:
domains = opcloud.search_domains(filters={'name': name})
else:
domains = opcloud.search_domains(filters)
module.exit_json(changed=False, ansible_facts=dict(
openstack_domains=domains))
except sdk.exceptions.OpenStackCloudException as e:
module.fail_json(msg=str(e))
if __name__ == '__main__':
main()<|fim▁end|> | extends_documentation_fragment: openstack |
<|file_name|>execution.go<|end_file_name|><|fim▁begin|>package station
<|fim▁hole|> "net/http"
)
func (r *request) execute() {
ratelimit.Add()
res, err := http.Get(r.url)
ratelimit.Sub()
if err != nil {
log.Println("station.execute: " + err.Error())
return
}
rsp, err := parseResBody(res)
if err != nil {
log.Println("station.execute parse:" + err.Error())
return
}
station.Update(r.stationId, rsp.SystemID, rsp.Name)
log.Printf("Updated station %d\n", r.stationId)
}<|fim▁end|> | import (
"github.com/moryg/eve_analyst/apiqueue/ratelimit"
"github.com/moryg/eve_analyst/database/station"
"log" |
<|file_name|>main.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""Module for parsing main configuration file (nagios.cfg)."""
from pynag.Utils import paths
class MainConfig(object):
""" Generic parser for files in the format of key=value.
This is the format used by nagios.cfg and many other unix configuration files.
"""
def __init__(self, filename=None):
if not filename:
filename = paths.find_main_configuration_file()
self.filename = filename
self.data = self.parse()
def get(self, attribute, default=None):
"""Get the first instance of key."""
for key, value in self.data:
if key == attribute:
return value
def get_list(self, attribute):
"""Get a list of all values that have attribute_name 'key'."""
return [value for key, value in self.data if key == attribute]
@staticmethod
def _parse_string(string):
result = []
for line in string.splitlines():
# Strip out new line characters
line = line.strip()
# Skip blank lines
if not line:
continue
# Skip comments
if line.startswith("#") or line.startswith(';'):<|fim▁hole|>
key, value = line.split('=', 1)
key = key.strip()
value = value.strip()
result.append((key, value))
return result
def parse(self):
with open(self.filename) as file_handle:
data = file_handle.read()
return self._parse_string(data)<|fim▁end|> | continue |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright 2013-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pylint: disable=W0613,E1101
from __future__ import division
import os
import sys
import time
import csv
import shutil
import threading
import errno
import tempfile
import collections
import re
from distutils.version import LooseVersion
try:
import pandas as pd
except ImportError:
pd = None
from wlauto import Instrument, Parameter, IterationResult
from wlauto.instrumentation import instrument_is_installed
from wlauto.exceptions import (InstrumentError, WorkerThreadError, ConfigError,
DeviceNotRespondingError, TimeoutError)
from wlauto.utils.types import boolean, numeric
from wlauto.utils.fps import (FpsProcessor, SurfaceFlingerFrame, GfxInfoFrame, GFXINFO_EXEMPT,
VSYNC_INTERVAL)
PAUSE_LATENCY = 20
EPSYLON = 0.0001
class FpsInstrument(Instrument):
name = 'fps'
description = """
Measures Frames Per Second (FPS) and associated metrics for a workload.
.. note:: This instrument depends on pandas Python library (which is not part of standard
WA dependencies), so you will need to install that first, before you can use it.
Android L and below use SurfaceFlinger to calculate the FPS data.
Android M and above use gfxinfo to calculate the FPS data.
SurfaceFlinger:
The view is specified by the workload as ``view`` attribute. This defaults
to ``'SurfaceView'`` for game workloads, and ``None`` for non-game
workloads (as for them FPS mesurement usually doesn't make sense).
Individual workloads may override this.
gfxinfo:
The view is specified by the workload as ``package`` attribute.
This is because gfxinfo already processes for all views in a package.
This instrument adds four metrics to the results:
:FPS: Frames Per Second. This is the frame rate of the workload.
:frame_count: The total number of frames rendered during the execution of
the workload.
:janks: The number of "janks" that occured during execution of the
workload. Janks are sudden shifts in frame rate. They result
in a "stuttery" UI. See http://jankfree.org/jank-busters-io
:not_at_vsync: The number of frames that did not render in a single
vsync cycle.
"""
supported_platforms = ['android']
parameters = [
Parameter('drop_threshold', kind=numeric, default=5,
description='Data points below this FPS will be dropped as they '
'do not constitute "real" gameplay. The assumption '
'being that while actually running, the FPS in the '
'game will not drop below X frames per second, '
'except on loading screens, menus, etc, which '
'should not contribute to FPS calculation. '),
Parameter('keep_raw', kind=boolean, default=False,
description='If set to ``True``, this will keep the raw dumpsys output '
'in the results directory (this is maily used for debugging) '
'Note: frames.csv with collected frames data will always be '
'generated regardless of this setting.'),
Parameter('generate_csv', kind=boolean, default=True,
description='If set to ``True``, this will produce temporal fps data '
'in the results directory, in a file named fps.csv '
'Note: fps data will appear as discrete step-like values '
'in order to produce a more meainingfull representation,'
'a rolling mean can be applied.'),
Parameter('crash_check', kind=boolean, default=True,
description="""
Specifies wither the instrument should check for crashed content by examining
frame data. If this is set, ``execution_time`` instrument must also be installed.
The check is performed by using the measured FPS and exection time to estimate the expected
frames cound and comparing that against the measured frames count. The the ratio of
measured/expected is too low, then it is assumed that the content has crashed part way
during the run. What is "too low" is determined by ``crash_threshold``.
.. note:: This is not 100\% fool-proof. If the crash occurs sufficiently close to
workload's termination, it may not be detected. If this is expected, the
threshold may be adjusted up to compensate.
"""),
Parameter('crash_threshold', kind=float, default=0.7,
description="""
Specifies the threshold used to decided whether a measured/expected frames ration indicates
a content crash. E.g. a value of ``0.75`` means the number of actual frames counted is a
quarter lower than expected, it will treated as a content crash.
"""),
Parameter('dumpsys_period', kind=float, default=2, constraint=lambda x: x > 0,
description="""
Specifies the time period between calls to ``dumpsys SurfaceFlinger --latency`` in
seconds when collecting frame data. Using a lower value improves the granularity
of timings when recording actions that take a short time to complete. Note, this
will produce duplicate frame data in the raw dumpsys output, however, this is
filtered out in frames.csv. It may also affect the overall load on the system.
The default value of 2 seconds corresponds with the NUM_FRAME_RECORDS in
android/services/surfaceflinger/FrameTracker.h (as of the time of writing
currently 128) and a frame rate of 60 fps that is applicable to most devices.
"""),
Parameter('force_surfaceflinger', kind=boolean, default=False,
description="""
By default, the method to capture fps data is based on Android version.
If this is set to true, force the instrument to use the SurfaceFlinger method
regardless of its Android version.
"""),
]
def __init__(self, device, **kwargs):
super(FpsInstrument, self).__init__(device, **kwargs)
self.collector = None
self.outfile = None
self.fps_outfile = None
self.is_enabled = True
self.fps_method = ''
def validate(self):
if not pd or LooseVersion(pd.__version__) < LooseVersion('0.13.1'):
message = ('fps instrument requires pandas Python package (version 0.13.1 or higher) to be installed.\n'
'You can install it with pip, e.g. "sudo pip install pandas"')
raise InstrumentError(message)
if self.crash_check and not instrument_is_installed('execution_time'):
raise ConfigError('execution_time instrument must be installed in order to check for content crash.')
def setup(self, context):
workload = context.workload
if hasattr(workload, 'view'):
self.fps_outfile = os.path.join(context.output_directory, 'fps.csv')
self.outfile = os.path.join(context.output_directory, 'frames.csv')
# Android M brings a new method of collecting FPS data
if not self.force_surfaceflinger and (self.device.get_sdk_version() >= 23):
# gfxinfo takes in the package name rather than a single view/activity
# so there is no 'list_command' to run and compare against a list of
# views/activities. Additionally, clearing the stats requires the package
# so we need to clear for every package in the workload.
# Usually there is only one package, but some workloads may run multiple
# packages so each one must be reset before continuing
self.fps_method = 'gfxinfo'
runcmd = 'dumpsys gfxinfo {} framestats'
lstcmd = None
params = workload.package
params = [params] if isinstance(params, basestring) else params
for pkg in params:
self.device.execute('dumpsys gfxinfo {} reset'.format(pkg))
else:
self.fps_method = 'surfaceflinger'
runcmd = 'dumpsys SurfaceFlinger --latency {}'
lstcmd = 'dumpsys SurfaceFlinger --list'
params = workload.view
self.device.execute('dumpsys SurfaceFlinger --latency-clear ')
self.collector = LatencyCollector(self.outfile, self.device, params or '',
self.keep_raw, self.logger, self.dumpsys_period,
runcmd, lstcmd, self.fps_method)
else:
self.logger.debug('Workload does not contain a view; disabling...')
self.is_enabled = False
def start(self, context):
if self.is_enabled:
self.logger.debug('Starting Frame Statistics collection...')
self.collector.start()
def stop(self, context):
if self.is_enabled and self.collector.is_alive():
self.logger.debug('Stopping Frame Statistics collection...')
self.collector.stop()
def update_result(self, context):
if self.is_enabled:
fps, frame_count, janks, not_at_vsync = float('nan'), 0, 0, 0
p90, p95, p99 = [float('nan')] * 3
data = pd.read_csv(self.outfile)
if not data.empty: # pylint: disable=maybe-no-member
# gfxinfo method has an additional file generated that contains statistics
stats_file = None
if self.fps_method == 'gfxinfo':
stats_file = os.path.join(os.path.dirname(self.outfile), 'gfxinfo.csv')
fp = FpsProcessor(data, extra_data=stats_file)
per_frame_fps, metrics = fp.process(self.collector.refresh_period, self.drop_threshold)
fps, frame_count, janks, not_at_vsync = metrics
if self.generate_csv:
per_frame_fps.to_csv(self.fps_outfile, index=False, header=True)
context.add_artifact('fps', path='fps.csv', kind='data')
p90, p95, p99 = fp.percentiles()
context.result.add_metric('FPS', fps)
context.result.add_metric('frame_count', frame_count)
context.result.add_metric('janks', janks, lower_is_better=True)
context.result.add_metric('not_at_vsync', not_at_vsync, lower_is_better=True)
context.result.add_metric('frame_time_90percentile', p90, 'ms', lower_is_better=True)
context.result.add_metric('frame_time_95percentile', p95, 'ms', lower_is_better=True)
context.result.add_metric('frame_time_99percentile', p99, 'ms', lower_is_better=True)
def slow_update_result(self, context):
result = context.result
if self.crash_check and result.has_metric('execution_time'):
self.logger.debug('Checking for crashed content.')
exec_time = result['execution_time'].value
fps = result['FPS'].value
frames = result['frame_count'].value
if all([exec_time, fps, frames]):
expected_frames = fps * exec_time
ratio = frames / expected_frames
self.logger.debug('actual/expected frames: {:.2}'.format(ratio))
if ratio < self.crash_threshold:
self.logger.error('Content for {} appears to have crashed.'.format(context.spec.label))
result.status = IterationResult.FAILED
result.add_event('Content crash detected (actual/expected frames: {:.2}).'.format(ratio))
class LatencyCollector(threading.Thread):
# Note: the size of the frames buffer for a particular surface is defined
# by NUM_FRAME_RECORDS inside android/services/surfaceflinger/FrameTracker.h.
# At the time of writing, this was hard-coded to 128. So at 60 fps
# (and there is no reason to go above that, as it matches vsync rate
# on pretty much all phones), there is just over 2 seconds' worth of
# frames in there. Hence the default sleep time of 2 seconds between dumps.
def __init__(self, outfile, device, activities, keep_raw, logger, dumpsys_period,
run_command, list_command, fps_method):
super(LatencyCollector, self).__init__()
self.outfile = outfile
self.device = device
self.keep_raw = keep_raw
self.logger = logger
self.dumpsys_period = dumpsys_period
self.stop_signal = threading.Event()
self.frames = []
self.last_ready_time = 0
self.refresh_period = VSYNC_INTERVAL
self.drop_threshold = self.refresh_period * 1000
self.exc = None
self.unresponsive_count = 0
if isinstance(activities, basestring):
activities = [activities]
self.activities = activities
self.command_template = run_command
self.list_command = list_command
self.fps_method = fps_method
# Based on the fps_method, setup the header for the csv,
# and set the process_trace_line function accordingly
if fps_method == 'surfaceflinger':
self.header = SurfaceFlingerFrame._fields
self.process_trace_line = self._process_surfaceflinger_line
else:
self.header = GfxInfoFrame._fields
self.process_trace_line = self._process_gfxinfo_line
self.re_frame = re.compile('([0-9]+,)+')
self.re_stats = re.compile('.*(percentile|frames|Number).*')
# Create a template summary text block that matches what gfxinfo gives after a reset
# - 133 is the default ms value for percentiles after reset
self.summary = collections.OrderedDict((('Total frames rendered', 0),
('Janky frames', 0),
('90th percentile', 133),
('95th percentile', 133),
('99th percentile', 133),
('Number Missed Vsync', 0),
('Number High input latency', 0),
('Number Slow UI thread', 0),
('Number Slow bitmap uploads', 0),
('Number Slow issue draw commands', 0)))
def run(self):
try:
self.logger.debug('Frame Statistics collection started. Method: ' + self.fps_method)
self.stop_signal.clear()
fd, temp_file = tempfile.mkstemp()
self.logger.debug('temp file: {}'.format(temp_file))
wfh = os.fdopen(fd, 'wb')
try:
view_list = self.activities
while not self.stop_signal.is_set():
# If a list_command is provided, set the view_list to be its output
# Then check for each activity in this list and if there is a match,
# process the output. If no command is provided, then always process.
if self.list_command:
view_list = self.device.execute(self.list_command).split()
for activity in self.activities:
if activity in view_list:
wfh.write(self.device.execute(self.command_template.format(activity)))
time.sleep(self.dumpsys_period)
finally:
wfh.close()
# TODO: this can happen after the run during results processing
with open(temp_file) as fh:
text = fh.read().replace('\r\n', '\n').replace('\r', '\n')
for line in text.split('\n'):
line = line.strip()
if line:
self.process_trace_line(line)
if self.keep_raw:
raw_file = os.path.join(os.path.dirname(self.outfile), self.fps_method + '.raw')
shutil.copy(temp_file, raw_file)
os.unlink(temp_file)
except (DeviceNotRespondingError, TimeoutError): # pylint: disable=W0703
raise
except Exception, e: # pylint: disable=W0703
self.logger.warning('Exception on collector thread: {}({})'.format(e.__class__.__name__, e))
self.exc = WorkerThreadError(self.name, sys.exc_info())
self.logger.debug('Frame Statistics collection stopped.')
with open(self.outfile, 'w') as wfh:
writer = csv.writer(wfh)
writer.writerow(self.header)
writer.writerows(self.frames)
self.logger.debug('Frames data written.')
# gfxinfo outputs its own summary statistics for the run.
# No point calculating those from the raw data, so store in its own file for later use.
if self.fps_method == 'gfxinfo':
stats_file = os.path.join(os.path.dirname(self.outfile), 'gfxinfo.csv')
with open(stats_file, 'w') as wfh:
writer = csv.writer(wfh)
writer.writerows(zip(self.summary.keys(), self.summary.values()))
self.logger.debug('Gfxinfo summary data written.')
def stop(self):
self.stop_signal.set()
self.join()
if self.unresponsive_count:
message = 'LatencyCollector was unrepsonsive {} times.'.format(self.unresponsive_count)
if self.unresponsive_count > 10:
self.logger.warning(message)
else:
self.logger.debug(message)
if self.exc:
raise self.exc # pylint: disable=E0702
self.logger.debug('Frame Statistics complete.')
def _process_surfaceflinger_line(self, line):
parts = line.split()<|fim▁hole|> if frame.frame_ready_time <= self.last_ready_time:
return # duplicate frame
if (frame.frame_ready_time - frame.desired_present_time) > self.drop_threshold:
self.logger.debug('Dropping bogus frame {}.'.format(line))
return # bogus data
self.last_ready_time = frame.frame_ready_time
self.frames.append(frame)
elif len(parts) == 1:
self.refresh_period = int(parts[0])
self.drop_threshold = self.refresh_period * 1000
elif 'SurfaceFlinger appears to be unresponsive, dumping anyways' in line:
self.unresponsive_count += 1
else:
self.logger.warning('Unexpected SurfaceFlinger dump output: {}'.format(line))
def _process_gfxinfo_line(self, line):
if 'No process found for' in line:
self.unresponsive_count += 1
return
# Process lines related to the frame data
match = self.re_frame.match(line)
if match:
data = match.group(0)[:-1]
data = map(int, data.split(','))
frame = GfxInfoFrame(*data)
if frame not in self.frames:
if frame.Flags & GFXINFO_EXEMPT:
self.logger.debug('Dropping exempt frame {}.'.format(line))
else:
self.frames.append(frame)
return
# Process lines related to the summary statistics
match = self.re_stats.match(line)
if match:
data = match.group(0)
title, value = data.split(':', 1)
title = title.strip()
value = value.strip()
if title in self.summary:
if 'ms' in value:
value = value.strip('ms')
if '%' in value:
value = value.split()[0]
self.summary[title] = int(value)<|fim▁end|> | if len(parts) == 3:
frame = SurfaceFlingerFrame(*map(int, parts)) |
<|file_name|>cmdlineexporter.py<|end_file_name|><|fim▁begin|># -- coding: utf-8 --
# ===========================================================================
# eXe
# Copyright 2012, Pedro Peña Pérez, Open Phoenix IT
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# ===========================================================================
'''
@author: Pedro Peña Pérez
'''
import sys
import logging
from exe.engine.persistxml import encodeObjectToXML
from exe.engine.path import Path
from exe.engine.package import Package
from exe.export.scormexport import ScormExport
from exe.export.imsexport import IMSExport
from exe.export.websiteexport import WebsiteExport
from exe.export.singlepageexport import SinglePageExport
from exe.export.xliffexport import XliffExport
from exe.export.epub3export import Epub3Export
from exe.export.textexport import TextExport
from exe.export.epub3subexport import Epub3SubExport
LOG = logging.getLogger(__name__)
ENCODING = sys.stdout.encoding or "UTF-8"
class CmdlineExporter(object):
extensions = {'xml': '.xml',
'scorm12': '.zip',
'scorm2004': '.zip',
'agrega': '.zip',
'ims': '.zip',
'website': '',
'webzip': '.zip',
'singlepage': '',
'xliff': '.xlf',
'epub3': '.epub',
'report': '.csv',
'text': '.txt'
}
def __init__(self, config, options):
self.config = config
self.options = options
self.web_dir = Path(self.config.webDir)
self.styles_dir = None
def do_export(self, inputf, outputf):
if hasattr(self, 'export_' + self.options["export"]):
LOG.debug("Exporting to type %s, in: %s, out: %s, overwrite: %s" \
% (self.options["export"], inputf, outputf, str(self.options["overwrite"])))
if not outputf:
if self.options["export"] in ('website', 'singlepage'):
outputf = inputf.rsplit(".elp")[0]
else:
outputf = inputf + self.extensions[self.options["export"]]
outputfp = Path(outputf)
if outputfp.exists() and not self.options["overwrite"]:
error = _(u'"%s" already exists.\nPlease try again \
with a different filename') % outputf
raise Exception(error.encode(ENCODING))
else:
if outputfp.exists() and self.options["overwrite"]:
if outputfp.isdir():
for filen in outputfp.walkfiles():
filen.remove()
outputfp.rmdir()
else:
outputfp.remove()
pkg = Package.load(inputf)
LOG.debug("Package %s loaded" % (inputf))
if not pkg:
error = _(u"Invalid input package")
raise Exception(error.encode(ENCODING))
self.styles_dir = self.config.stylesDir / pkg.style
LOG.debug("Styles dir: %s" % (self.styles_dir))
pkg.exportSource = self.options['editable']
getattr(self, 'export_' + self.options["export"])(pkg, outputf)
return outputf
else:
raise Exception(_(u"Export format not implemented")\
.encode(ENCODING))
def export_xml(self, pkg, outputf):
open(outputf, "w").write(encodeObjectToXML(pkg))
def export_scorm12(self, pkg, outputf):
scormExport = ScormExport(self.config, self.styles_dir, outputf,
'scorm1.2')
pkg.scowsinglepage = self.options['single-page']
pkg.scowwebsite = self.options['website']
scormExport.export(pkg)
def export_scorm2004(self, pkg, outputf):
scormExport = ScormExport(self.config, self.styles_dir, outputf,
'scorm2004')
pkg.scowsinglepage = self.options['single-page']
pkg.scowwebsite = self.options['website']<|fim▁hole|> imsExport.export(pkg)
def export_website(self, pkg, outputf):
outputfp = Path(outputf)
outputfp.makedirs()
websiteExport = WebsiteExport(self.config, self.styles_dir, outputf)
websiteExport.export(pkg)
def export_webzip(self, pkg, outputf):
websiteExport = WebsiteExport(self.config, self.styles_dir, outputf)
websiteExport.exportZip(pkg)
def export_singlepage(self, pkg, outputf, print_flag=0):
images_dir = self.web_dir.joinpath('images')
scripts_dir = self.web_dir.joinpath('scripts')
css_dir = self.web_dir.joinpath('css')
templates_dir = self.web_dir.joinpath('templates')
singlePageExport = SinglePageExport(self.styles_dir, outputf, \
images_dir, scripts_dir, css_dir, templates_dir)
singlePageExport.export(pkg, print_flag)
def export_xliff(self, pkg, outputf):
xliff = XliffExport(self.config, outputf, \
source_copied_in_target=self.options["copy-source"], \
wrap_cdata=self.options["wrap-cdata"])
xliff.export(pkg)
def export_epub3(self, pkg, outputf):
epub3Export = Epub3Export(self.config, self.styles_dir, outputf)
epub3Export.export(pkg)
def export_subepub3(self, pkg, outputf):
epub3SubExport = Epub3SubExport(self.config, self.styles_dir, outputf)
epub3SubExport.export(pkg)
def export_report(self, pkg, outputf):
websiteExport = WebsiteExport(self.config, self.styles_dir, outputf, report=True)
websiteExport.export(pkg)
def export_text(self, pkg, outputf):
textExport =TextExport(outputf)
textExport.export(pkg)
textExport.save(outputf)<|fim▁end|> | scormExport.export(pkg)
def export_ims(self, pkg, outputf):
imsExport = IMSExport(self.config, self.styles_dir, outputf) |
<|file_name|>routes.js<|end_file_name|><|fim▁begin|>/**
* Route Mappings
* (sails.config.routes)
*
* Your routes map URLs to views and controllers.
*
* If Sails receives a URL that doesn't match any of the routes below,
* it will check for matching files (images, scripts, stylesheets, etc.)
* in your assets directory. e.g. `http://localhost:1337/images/foo.jpg`
* might match an image file: `/assets/images/foo.jpg`
*
* Finally, if those don't match either, the default 404 handler is triggered.
* See `api/responses/notFound.js` to adjust your app's 404 logic.
*
* Note: Sails doesn't ACTUALLY serve stuff from `assets`-- the default Gruntfile in Sails copies
* flat files from `assets` to `.tmp/public`. This allows you to do things like compile LESS or
* CoffeeScript for the front-end.
*
* For more information on configuring custom routes, check out:
* http://sailsjs.org/#/documentation/concepts/Routes/RouteTargetSyntax.html
*/
module.exports.routes = {
/***************************************************************************
* *
* Make the view located at `views/homepage.ejs` (or `views/homepage.jade`, *
* etc. depending on your default view engine) your home page. *
* *
* (Alternatively, remove this and add an `index.html` file in your *
* `assets` directory) *
* *
***************************************************************************/
'/': {
view: 'homepage',
locals: {
laytout: 'layout'
}
},
'/admin': {
view: 'admin',
locals: {
layout: 'cms'
}
},
'/admin/login': {
view: 'login',
locals: {
layout: 'layout'
}
},
'/admin/week': {
controller: 'week',
action: 'index',
locals: {
layout: 'cms'
}
},
'/admin/week/save': {
controller: 'week',
action: 'save',
locals: {
layout: 'cms'
}
},
'/admin/week/add': {
view: 'weeks-form',
locals: {
layout: 'cms'
}
},
/***************************************************************************
* *
* Custom routes here... *
* *
* If a request to a URL doesn't match any of the custom routes above, it *
* is matched against Sails route blueprints. See `config/blueprints.js` *
* for configuration options and examples. *
* *<|fim▁hole|>};<|fim▁end|> | ***************************************************************************/
|
<|file_name|>cgi_runtests.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | ../common/cgi_runtests.py |
<|file_name|>cell_id_02.cc<|end_file_name|><|fim▁begin|>// ---------------------------------------------------------------------
//
// Copyright (C) 2010 - 2013 by the deal.II authors
//
// This file is part of the deal.II library.
//
// The deal.II library is free software; you can use it, redistribute
// it, and/or modify it under the terms of the GNU Lesser General
// Public License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
// The full text of the license can be found in the file LICENSE at
// the top level of the deal.II distribution.
//
// ---------------------------------------------------------------------
// check CellId
#include "../tests.h"
#include <deal.II/base/geometry_info.h>
#include <deal.II/base/logstream.h>
#include <deal.II/base/quadrature_lib.h>
#include <deal.II/dofs/dof_handler.h>
#include <deal.II/dofs/dof_accessor.h>
#include <deal.II/grid/tria.h>
#include <deal.II/distributed/tria.h>
#include <deal.II/grid/tria_accessor.h>
#include <deal.II/grid/grid_generator.h>
#include <deal.II/grid/grid_refinement.h>
#include <fstream>
#include <sstream>
template <class TRIA>
void check (TRIA &tr)
{
typename TRIA::cell_iterator cell = tr.begin(),
endc = tr.end();
for (; cell!=endc; ++cell)
{
std::ostringstream outb;
outb << cell->id();
CellId tmp;
std::istringstream in(outb.str());
in >> tmp;
deallog << cell->level() << " " << cell->index() << " " << cell->id() << " " << tmp << std::endl;
}
CellId empty;
Assert(tr.begin()->id() != tr.begin_active()->id(), ExcInternalError());
Assert(tr.begin()->id() != empty, ExcInternalError());
Assert(tr.begin()->id() == tr.begin()->id(), ExcInternalError());
deallog << "OK" << std::endl;
}
int main (int argc, char *argv[])
{
// Utilities::MPI::MPI_InitFinalize mpi_initialization(argc, argv);
initlog();
deal_II_exceptions::disable_abort_on_exception();
Triangulation<2> tria;
GridGenerator::hyper_cube (tria);
tria.refine_global (2);
tria.begin_active()->set_refine_flag();
tria.execute_coarsening_and_refinement();
check(tria);<|fim▁hole|><|fim▁end|> | } |
<|file_name|>codedeploy.rs<|end_file_name|><|fim▁begin|>#![cfg(feature = "codedeploy")]
extern crate rusoto_codedeploy;
extern crate rusoto_core;
use rusoto_codedeploy::{CodeDeploy, CodeDeployClient, ListApplicationsInput};
use rusoto_core::Region;
#[tokio::test]
async fn should_list_applications() {
let client = CodeDeployClient::new(Region::UsEast1);
let request = ListApplicationsInput::default();<|fim▁hole|><|fim▁end|> |
client.list_applications(request).await.unwrap();
} |
<|file_name|>serial.rs<|end_file_name|><|fim▁begin|>// Copyright Dan Schatzberg, 2015. This file is part of Genesis.
// Genesis is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Genesis is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
// You should have received a copy of the GNU Affero General Public License
// along with Genesis. If not, see <http://www.gnu.org/licenses/>.
#![allow(dead_code)]
use super::ioport;
const PORT_BASE: u16 = 0x3F8;
// when DLAB = 0
const DATA_REG: u16 = 0;
const INT_ENABLE: u16 = 1;
// when DLAB = 1
const BAUD_DIV_LSB: u16 = 0;
const BAUD_DIV_MSB: u16 = 1;
const LINE_CTRL_REG: u16 = 3;
const LINE_CTRL_REG_CHARLEN8: u8 = 1 | 1 << 1;
const LINE_CTRL_REG_DLAB: u8 = 1 << 7;
const LINE_STATUS_REG: u16 = 5;
const LINE_STATUS_REG_THR_EMPTY: u8 = 1 << 5;
/// Initialize the Serial Port
pub fn init() {
assert_has_not_been_called!("serial::init() function \
must only be called once");
unsafe {
ioport::out(PORT_BASE + INT_ENABLE, 0u8); // disable interrupts
// enable dlab
ioport::out(PORT_BASE + LINE_CTRL_REG, LINE_CTRL_REG_DLAB);
// XXX: hard coded 115200 baud
ioport::out(PORT_BASE + BAUD_DIV_LSB, 1u8);
ioport::out(PORT_BASE + BAUD_DIV_MSB, 0u8);
// XXX: hard coded as 8N1 (8 bits, no parity, one stop bit)
ioport::out(PORT_BASE + LINE_CTRL_REG, LINE_CTRL_REG_CHARLEN8);
}
}
unsafe fn is_transmit_empty() -> bool {
ioport::inb(PORT_BASE + LINE_STATUS_REG) & LINE_STATUS_REG_THR_EMPTY != 0
}
unsafe fn putc(c: u8) {<|fim▁hole|> while !is_transmit_empty() {}
ioport::out(PORT_BASE + DATA_REG, c);
}
/// Write `str` to the Serial Port
pub unsafe fn write_str(s: &str) {
for c in s.bytes() {
putc(c);
}
}<|fim▁end|> | |
<|file_name|>problem001.py<|end_file_name|><|fim▁begin|>def solve():<|fim▁hole|><|fim▁end|> | return sum([n for n in range(1,1000) if (n % 3 == 0) or (n % 5 == 0)]) |
<|file_name|>test_group_notes.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from sentry.models import Activity
from sentry.testutils import APITestCase
class GroupNoteTest(APITestCase):
def test_simple(self):
group = self.group
activity = Activity.objects.create(
group=group,
project=group.project,
type=Activity.NOTE,
user=self.user,
data={'text': 'hello world'},
)
self.login_as(user=self.user)
url = '/api/0/issues/{}/comments/'.format(group.id)
response = self.client.get(url, format='json')
assert response.status_code == 200, response.content
assert len(response.data) == 1
assert response.data[0]['id'] == str(activity.id)
class GroupNoteCreateTest(APITestCase):
def test_simple(self):
group = self.group
self.login_as(user=self.user)
<|fim▁hole|> url = '/api/0/issues/{}/comments/'.format(group.id)
response = self.client.post(url, format='json')
assert response.status_code == 400
response = self.client.post(url, format='json', data={
'text': 'hello world',
})
assert response.status_code == 201, response.content
activity = Activity.objects.get(id=response.data['id'])
assert activity.user == self.user
assert activity.group == group
assert activity.data == {'text': 'hello world'}
response = self.client.post(url, format='json', data={
'text': 'hello world',
})
assert response.status_code == 400, response.content<|fim▁end|> | |
<|file_name|>test_list_of_representation.py<|end_file_name|><|fim▁begin|>from baby_steps import given, then, when
from district42 import represent, schema
def test_list_of_representation():
with given:
sch = schema.list(schema.bool)
with when:
res = represent(sch)
with then:
assert res == "schema.list(schema.bool)"
def test_list_of_values_representation():
with given:
sch = schema.list(schema.int(1))
with when:
res = represent(sch)
with then:
assert res == "schema.list(schema.int(1))"
def test_list_of_repr_values_representation():
with given:
sch = schema.list(schema.str("banana"))
with when:
res = represent(sch)
with then:
assert res == "schema.list(schema.str('banana'))"
def test_list_of_len_representation():
with given:
sch = schema.list(schema.int).len(10)
with when:
res = represent(sch)
with then:
assert res == "schema.list(schema.int).len(10)"
def test_list_of_min_len_representation():
with given:
sch = schema.list(schema.int).len(1, ...)
with when:
res = represent(sch)
with then:
assert res == "schema.list(schema.int).len(1, ...)"<|fim▁hole|> sch = schema.list(schema.int).len(..., 10)
with when:
res = represent(sch)
with then:
assert res == "schema.list(schema.int).len(..., 10)"
def test_list_of_min_max_len_representation():
with given:
sch = schema.list(schema.int).len(1, 10)
with when:
res = represent(sch)
with then:
assert res == "schema.list(schema.int).len(1, 10)"<|fim▁end|> |
def test_list_of_max_len_representation():
with given: |
<|file_name|>callbacks.rs<|end_file_name|><|fim▁begin|>use std::panic;
use std::ffi::CStr;
use printf::printf;
use alpm_sys::*;
use libc::{c_int, c_char, c_void, off_t};
use {LOG_CB, DOWNLOAD_CB, DLTOTAL_CB, FETCH_CB, EVENT_CB, DownloadResult};
use event::Event;
/// Function with C calling convention and required type signature to wrap our callback
pub unsafe extern "C" fn alpm_cb_log(level: alpm_loglevel_t,
fmt: *const c_char,
args: *const Struct_va_list) {
let out = printf(fmt, args as *mut c_void);
panic::catch_unwind(|| {
let mut cb = LOG_CB.lock().unwrap();
if let Some(ref mut cb) = *cb {
cb(level.into(), out);
}
}).unwrap_or(()) // ignore all errors since we are about to cross ffi boundary
}
/** Type of download progress callbacks.
* @param filename the name of the file being downloaded
* @param xfered the number of transferred bytes
* @param total the total number of bytes to transfer
*/
pub unsafe extern "C" fn alpm_cb_download(filename: *const c_char, xfered: off_t, total: off_t) {
let filename = CStr::from_ptr(filename).to_string_lossy();
let xfered = xfered as u64;
let total = total as u64;
panic::catch_unwind(|| {
let mut cb = DOWNLOAD_CB.lock().unwrap();
if let Some(ref mut cb) = *cb {
cb(filename.as_ref(), xfered, total);
}
}).unwrap_or(()) // ignore all errors since we are about to cross ffi boundary
}
/** Type of download progress callbacks.
* @param filename the name of the file being downloaded
* @param xfered the number of transferred bytes
* @param total the total number of bytes to transfer
*/
pub unsafe extern "C" fn alpm_cb_totaldl(total: off_t) {
let total = total as u64;
panic::catch_unwind(|| {
let mut cb = DLTOTAL_CB.lock().unwrap();
if let Some(ref mut cb) = *cb {
cb(total);
}
}).unwrap_or(()) // ignore all errors since we are about to cross ffi boundary
}
<|fim▁hole|> * @param url the URL of the file to be downloaded
* @param localpath the directory to which the file should be downloaded
* @param force whether to force an update, even if the file is the same
* @return 0 on success, 1 if the file exists and is identical, -1 on
* error.
*/
pub unsafe extern "C" fn alpm_cb_fetch(url: *const c_char,
localpath: *const c_char,
force: c_int) -> c_int
{
let url = CStr::from_ptr(url).to_string_lossy();
let localpath = CStr::from_ptr(localpath).to_string_lossy();
let force = ! force == 0;
panic::catch_unwind(|| {
let mut cb = FETCH_CB.lock().unwrap();
if let Some(ref mut cb) = *cb {
match cb(url.as_ref(), localpath.as_ref(), force) {
DownloadResult::Ok => 0,
DownloadResult::NotNeeded => 1,
DownloadResult::Err => -1,
}
} else {
-1
}
}).unwrap_or(-1) // set error code if we have panicked
}
/** Event callback */
pub unsafe extern "C" fn alpm_cb_event(evt: *const alpm_event_t) {
let evt = Event::new(evt);
panic::catch_unwind(|| {
let mut cb = EVENT_CB.lock().unwrap();
if let Some(ref mut cb) = *cb {
cb(evt);
}
}).unwrap_or(())
}<|fim▁end|> | /** A callback for downloading files |
<|file_name|>test_placeables.py<|end_file_name|><|fim▁begin|>from django_nose.tools import assert_equal
from pontoon.base.tests import TestCase
from pontoon.base.utils import NewlineEscapePlaceable, mark_placeables
<|fim▁hole|>
assert_equal(placeable.parse(u'A string\\n')[1], placeable([u'\\n']))
assert_equal(placeable.parse(u'\\nA string')[0], placeable([u'\\n']))
assert_equal(placeable.parse(u'A\\nstring')[1], placeable([u'\\n']))
assert_equal(placeable.parse(u'A string'), None)
assert_equal(placeable.parse(u'A\nstring'), None)
def test_mark_newline_escape_placeables(self):
"""Test detecting newline escape sequences"""
assert_equal(
mark_placeables(u'A string\\n'),
u'A string<mark class="placeable" title="Escaped newline">\\n</mark>'
)
assert_equal(
mark_placeables(u'\\nA string'),
u'<mark class="placeable" title="Escaped newline">\\n</mark>A string'
)
assert_equal(
mark_placeables(u'A\\nstring'),
u'A<mark class="placeable" title="Escaped newline">\\n</mark>string'
)
assert_equal(
mark_placeables(u'A string'),
u'A string'
)
assert_equal(
mark_placeables(u'A\nstring'),
u'A\nstring'
)
def test_python_new_format_placeables(self):
"""Test detection of the new format string in python strings."""
assert_equal(
mark_placeables(u'Hello {name}'),
u'Hello <mark class="placeable" title="Python format string">{name}</mark>'
)
assert_equal(
mark_placeables(u'Hello {name!s}'),
u'Hello <mark class="placeable" title="Python format string">{name!s}</mark>'
)
assert_equal(
mark_placeables(u'Hello {someone.name}'),
u'Hello <mark class="placeable" title="Python format string">{someone.name}</mark>'
)
assert_equal(
mark_placeables(u'Hello {name[0]}'),
u'Hello <mark class="placeable" title="Python format string">{name[0]}</mark>'
)
assert_equal(
mark_placeables(u'Hello {someone.name[0]}'),
u'Hello <mark class="placeable" title="Python format string">{someone.name[0]}</mark>'
)
def test_python_format_named_placeables(self):
"""Test detection of format string with named placeables."""
assert_equal(
mark_placeables(u'Hello %(name)s'),
u'Hello <mark class="placeable" title="Python format string">%(name)s</mark>'
)
assert_equal(
mark_placeables(u'Rolling %(number)d dices'),
u'Rolling <mark class="placeable" title="Python format string">%(number)d</mark> dices'
)
assert_equal(
mark_placeables(u'Hello %(name)S'),
u'Hello <mark class="placeable" title="Python format string">%(name)S</mark>'
)
assert_equal(
mark_placeables(u'Rolling %(number)D dices'),
u'Rolling <mark class="placeable" title="Python format string">%(number)D</mark> dices'
)<|fim▁end|> | class PlaceablesTests(TestCase):
def test_newline_escape_placeable(self):
"""Test detecting newline escape sequences"""
placeable = NewlineEscapePlaceable |
<|file_name|>filter_test.go<|end_file_name|><|fim▁begin|>package filters
import (
"reflect"
"strings"
"testing"
)
func TestFilters(t *testing.T) {
type cEntry struct {
Name string
Other string
Labels map[string]string
}
corpusS := []cEntry{
{
Name: "foo",
Labels: map[string]string{
"foo": "true",
},
},
{<|fim▁hole|> Name: "foo",
Labels: map[string]string{
"foo": "present",
"more complex label": "present",
},
},
{
Name: "bar",
Labels: map[string]string{
"bar": "true",
},
},
{
Name: "fooer",
Labels: map[string]string{
"more complex label with \\ and \"": "present",
},
},
{
Name: "fooer",
Labels: map[string]string{
"more complex label with \\ and \".post": "present",
},
},
{
Name: "baz",
Other: "too complex, yo",
},
{
Name: "bazo",
Other: "abc",
},
{
Name: "compound",
Labels: map[string]string{
"foo": "omg_asdf.asdf-qwer",
},
},
}
var corpus []interface{}
for _, entry := range corpusS {
corpus = append(corpus, entry)
}
// adapt shows an example of how to build an adaptor function for a type.
adapt := func(o interface{}) Adaptor {
obj := o.(cEntry)
return AdapterFunc(func(fieldpath []string) (string, bool) {
switch fieldpath[0] {
case "name":
return obj.Name, len(obj.Name) > 0
case "other":
return obj.Other, len(obj.Other) > 0
case "labels":
value, ok := obj.Labels[strings.Join(fieldpath[1:], ".")]
return value, ok
}
return "", false
})
}
for _, testcase := range []struct {
name string
input string
expected []interface{}
errString string
}{
{
name: "Empty",
input: "",
expected: corpus,
},
{
name: "Present",
input: "name",
expected: corpus,
},
{
name: "LabelPresent",
input: "labels.foo",
expected: []interface{}{
corpus[0],
corpus[2],
corpus[8],
},
},
{
name: "NameAndLabelPresent",
input: "labels.foo,name",
expected: []interface{}{
corpus[0],
corpus[2],
corpus[8],
},
},
{
name: "LabelValue",
input: "labels.foo==true",
expected: []interface{}{
corpus[0],
},
},
{
name: "LabelValuePunctuated",
input: "labels.foo==omg_asdf.asdf-qwer",
expected: []interface{}{
corpus[8],
},
},
{
name: "LabelValueNoAltQuoting",
input: "labels.|foo|==omg_asdf.asdf-qwer",
errString: "filters: parse error: [labels. >|||< foo|==omg_asdf.asdf-qwer]: invalid quote encountered",
},
{
name: "Name",
input: "name==bar",
expected: []interface{}{
corpus[1],
corpus[3],
},
},
{
name: "NameNotEqual",
input: "name!=bar",
expected: []interface{}{
corpus[0],
corpus[2],
corpus[4],
corpus[5],
corpus[6],
corpus[7],
corpus[8],
},
},
{
name: "NameAndLabelPresent",
input: "name==bar,labels.bar",
expected: []interface{}{
corpus[3],
},
},
{
name: "QuotedValue",
input: "other==\"too complex, yo\"",
expected: []interface{}{
corpus[6],
},
},
{
name: "RegexpValue",
input: "other~=[abc]+,name!=foo",
expected: []interface{}{
corpus[6],
corpus[7],
},
},
{
name: "RegexpQuotedValue",
input: "other~=/[abc]+/,name!=foo",
expected: []interface{}{
corpus[6],
corpus[7],
},
},
{
name: "RegexpQuotedValue",
input: "other~=/[abc]{1,2}/,name!=foo",
expected: []interface{}{
corpus[6],
corpus[7],
},
},
{
name: "RegexpQuotedValueGarbage",
input: "other~=/[abc]{0,1}\"\\//,name!=foo",
// valid syntax, but doesn't match anything
},
{
name: "NameAndLabelValue",
input: "name==bar,labels.bar==true",
expected: []interface{}{
corpus[3],
},
},
{
name: "NameAndLabelValueNoMatch",
input: "name==bar,labels.bar==wrong",
},
{
name: "LabelQuotedFieldPathPresent",
input: `name==foo,labels."more complex label"`,
expected: []interface{}{
corpus[2],
},
},
{
name: "LabelQuotedFieldPathPresentWithQuoted",
input: `labels."more complex label with \\ and \""==present`,
expected: []interface{}{
corpus[4],
},
},
{
name: "LabelQuotedFieldPathPresentWithQuotedEmbed",
input: `labels."more complex label with \\ and \"".post==present`,
expected: []interface{}{
corpus[5],
},
},
{
name: "LabelQuotedFieldPathPresentWithQuotedEmbedInvalid",
input: `labels.?"more complex label with \\ and \"".post==present`,
errString: `filters: parse error: [labels. >|?|< "more complex label with \\ and \"".post==present]: expected field or quoted`,
},
{
name: "TrailingComma",
input: "name==foo,",
errString: `filters: parse error: [name==foo,]: expected field or quoted`,
},
{
name: "TrailingFieldSeparator",
input: "labels.",
errString: `filters: parse error: [labels.]: expected field or quoted`,
},
{
name: "MissingValue",
input: "image~=,id?=?fbaq",
errString: `filters: parse error: [image~= >|,|< id?=?fbaq]: expected value or quoted`,
},
} {
t.Run(testcase.name, func(t *testing.T) {
t.Logf("testcase: %q", testcase.input)
filter, err := Parse(testcase.input)
if testcase.errString != "" {
if err == nil {
t.Fatalf("expected an error, but received nil")
}
if err.Error() != testcase.errString {
t.Fatalf("error %v != %v", err, testcase.errString)
}
return
}
if err != nil {
t.Fatal(err)
}
if filter == nil {
t.Fatal("filter should not be nil")
}
t.Log("filter", filter)
var results []interface{}
for _, item := range corpus {
adaptor := adapt(item)
if filter.Match(adaptor) {
results = append(results, item)
}
}
if !reflect.DeepEqual(results, testcase.expected) {
t.Fatalf("%q: %#v != %#v", testcase.input, results, testcase.expected)
}
})
}
}<|fim▁end|> | Name: "bar",
},
{ |
<|file_name|>currency.js<|end_file_name|><|fim▁begin|>import { getGlobal } from '../src/prebidGlobal.js';
import { createBid } from '../src/bidfactory.js';
import { STATUS } from '../src/constants.json';
import { ajax } from '../src/ajax.js';
import * as utils from '../src/utils.js';
import { config } from '../src/config.js';
import { getHook } from '../src/hook.js';
const DEFAULT_CURRENCY_RATE_URL = 'https://cdn.jsdelivr.net/gh/prebid/currency-file@1/latest.json?date=$$TODAY$$';
const CURRENCY_RATE_PRECISION = 4;
var bidResponseQueue = [];
var conversionCache = {};
var currencyRatesLoaded = false;
var needToCallForCurrencyFile = true;
var adServerCurrency = 'USD';
export var currencySupportEnabled = false;
export var currencyRates = {};
var bidderCurrencyDefault = {};
var defaultRates;
/**
* Configuration function for currency
* @param {string} [config.adServerCurrency = 'USD']
* ISO 4217 3-letter currency code that represents the target currency. (e.g. 'EUR'). If this value is present,
* the currency conversion feature is activated.
* @param {number} [config.granularityMultiplier = 1]
* A decimal value representing how mcuh to scale the price granularity calculations.
* @param {object} config.bidderCurrencyDefault
* An optional argument to specify bid currencies for bid adapters. This option is provided for the transitional phase
* before every bid adapter will specify its own bid currency. If the adapter specifies a bid currency, this value is
* ignored for that bidder.
*
* example:
* {
* rubicon: 'USD'
* }
* @param {string} [config.conversionRateFile = 'URL pointing to conversion file']
* Optional path to a file containing currency conversion data. Prebid.org hosts a file that is used as the default,
* if not specified.
* @param {object} [config.rates]
* This optional argument allows you to specify the rates with a JSON object, subverting the need for a external
* config.conversionRateFile parameter. If this argument is specified, the conversion rate file will not be loaded.
*
* example:
* {
* 'GBP': { 'CNY': 8.8282, 'JPY': 141.7, 'USD': 1.2824 },
* 'USD': { 'CNY': 6.8842, 'GBP': 0.7798, 'JPY': 110.49 }
* }
* @param {object} [config.defaultRates]
* This optional currency rates definition follows the same format as config.rates, however it is only utilized if
* there is an error loading the config.conversionRateFile.
*/
export function setConfig(config) {
let url = DEFAULT_CURRENCY_RATE_URL;
if (typeof config.rates === 'object') {
currencyRates.conversions = config.rates;
currencyRatesLoaded = true;
needToCallForCurrencyFile = false; // don't call if rates are already specified
}
if (typeof config.defaultRates === 'object') {
defaultRates = config.defaultRates;
// set up the default rates to be used if the rate file doesn't get loaded in time
currencyRates.conversions = defaultRates;
currencyRatesLoaded = true;
}
if (typeof config.adServerCurrency === 'string') {
utils.logInfo('enabling currency support', arguments);
adServerCurrency = config.adServerCurrency;
if (config.conversionRateFile) {
utils.logInfo('currency using override conversionRateFile:', config.conversionRateFile);
url = config.conversionRateFile;
}
// see if the url contains a date macro
// this is a workaround to the fact that jsdelivr doesn't currently support setting a 24-hour HTTP cache header
// So this is an approach to let the browser cache a copy of the file each day
// We should remove the macro once the CDN support a day-level HTTP cache setting
const macroLocation = url.indexOf('$$TODAY$$');
if (macroLocation !== -1) {<|fim▁hole|> // get the date to resolve the macro
const d = new Date();
let month = `${d.getMonth() + 1}`;
let day = `${d.getDate()}`;
if (month.length < 2) month = `0${month}`;
if (day.length < 2) day = `0${day}`;
const todaysDate = `${d.getFullYear()}${month}${day}`;
// replace $$TODAY$$ with todaysDate
url = `${url.substring(0, macroLocation)}${todaysDate}${url.substring(macroLocation + 9, url.length)}`;
}
initCurrency(url);
} else {
// currency support is disabled, setting defaults
utils.logInfo('disabling currency support');
resetCurrency();
}
if (typeof config.bidderCurrencyDefault === 'object') {
bidderCurrencyDefault = config.bidderCurrencyDefault;
}
}
config.getConfig('currency', config => setConfig(config.currency));
function errorSettingsRates(msg) {
if (defaultRates) {
utils.logWarn(msg);
utils.logWarn('Currency failed loading rates, falling back to currency.defaultRates');
} else {
utils.logError(msg);
}
}
function initCurrency(url) {
conversionCache = {};
currencySupportEnabled = true;
utils.logInfo('Installing addBidResponse decorator for currency module', arguments);
// Adding conversion function to prebid global for external module and on page use
getGlobal().convertCurrency = (cpm, fromCurrency, toCurrency) => parseFloat(cpm) * getCurrencyConversion(fromCurrency, toCurrency);
getHook('addBidResponse').before(addBidResponseHook, 100);
// call for the file if we haven't already
if (needToCallForCurrencyFile) {
needToCallForCurrencyFile = false;
ajax(url,
{
success: function (response) {
try {
currencyRates = JSON.parse(response);
utils.logInfo('currencyRates set to ' + JSON.stringify(currencyRates));
currencyRatesLoaded = true;
processBidResponseQueue();
} catch (e) {
errorSettingsRates('Failed to parse currencyRates response: ' + response);
}
},
error: errorSettingsRates
}
);
}
}
function resetCurrency() {
utils.logInfo('Uninstalling addBidResponse decorator for currency module', arguments);
getHook('addBidResponse').getHooks({hook: addBidResponseHook}).remove();
delete getGlobal().convertCurrency;
adServerCurrency = 'USD';
conversionCache = {};
currencySupportEnabled = false;
currencyRatesLoaded = false;
needToCallForCurrencyFile = true;
currencyRates = {};
bidderCurrencyDefault = {};
}
export function addBidResponseHook(fn, adUnitCode, bid) {
if (!bid) {
return fn.call(this, adUnitCode); // if no bid, call original and let it display warnings
}
let bidder = bid.bidderCode || bid.bidder;
if (bidderCurrencyDefault[bidder]) {
let currencyDefault = bidderCurrencyDefault[bidder];
if (bid.currency && currencyDefault !== bid.currency) {
utils.logWarn(`Currency default '${bidder}: ${currencyDefault}' ignored. adapter specified '${bid.currency}'`);
} else {
bid.currency = currencyDefault;
}
}
// default to USD if currency not set
if (!bid.currency) {
utils.logWarn('Currency not specified on bid. Defaulted to "USD"');
bid.currency = 'USD';
}
// used for analytics
bid.getCpmInNewCurrency = function(toCurrency) {
return (parseFloat(this.cpm) * getCurrencyConversion(this.currency, toCurrency)).toFixed(3);
};
// execute immediately if the bid is already in the desired currency
if (bid.currency === adServerCurrency) {
return fn.call(this, adUnitCode, bid);
}
bidResponseQueue.push(wrapFunction(fn, this, [adUnitCode, bid]));
if (!currencySupportEnabled || currencyRatesLoaded) {
processBidResponseQueue();
}
}
function processBidResponseQueue() {
while (bidResponseQueue.length > 0) {
(bidResponseQueue.shift())();
}
}
function wrapFunction(fn, context, params) {
return function() {
let bid = params[1];
if (bid !== undefined && 'currency' in bid && 'cpm' in bid) {
let fromCurrency = bid.currency;
try {
let conversion = getCurrencyConversion(fromCurrency);
if (conversion !== 1) {
bid.cpm = (parseFloat(bid.cpm) * conversion).toFixed(4);
bid.currency = adServerCurrency;
}
} catch (e) {
utils.logWarn('Returning NO_BID, getCurrencyConversion threw error: ', e);
params[1] = createBid(STATUS.NO_BID, {
bidder: bid.bidderCode || bid.bidder,
bidId: bid.requestId
});
}
}
return fn.apply(context, params);
};
}
function getCurrencyConversion(fromCurrency, toCurrency = adServerCurrency) {
var conversionRate = null;
var rates;
let cacheKey = `${fromCurrency}->${toCurrency}`;
if (cacheKey in conversionCache) {
conversionRate = conversionCache[cacheKey];
utils.logMessage('Using conversionCache value ' + conversionRate + ' for ' + cacheKey);
} else if (currencySupportEnabled === false) {
if (fromCurrency === 'USD') {
conversionRate = 1;
} else {
throw new Error('Prebid currency support has not been enabled and fromCurrency is not USD');
}
} else if (fromCurrency === toCurrency) {
conversionRate = 1;
} else {
if (fromCurrency in currencyRates.conversions) {
// using direct conversion rate from fromCurrency to toCurrency
rates = currencyRates.conversions[fromCurrency];
if (!(toCurrency in rates)) {
// bid should fail, currency is not supported
throw new Error('Specified adServerCurrency in config \'' + toCurrency + '\' not found in the currency rates file');
}
conversionRate = rates[toCurrency];
utils.logInfo('getCurrencyConversion using direct ' + fromCurrency + ' to ' + toCurrency + ' conversionRate ' + conversionRate);
} else if (toCurrency in currencyRates.conversions) {
// using reciprocal of conversion rate from toCurrency to fromCurrency
rates = currencyRates.conversions[toCurrency];
if (!(fromCurrency in rates)) {
// bid should fail, currency is not supported
throw new Error('Specified fromCurrency \'' + fromCurrency + '\' not found in the currency rates file');
}
conversionRate = roundFloat(1 / rates[fromCurrency], CURRENCY_RATE_PRECISION);
utils.logInfo('getCurrencyConversion using reciprocal ' + fromCurrency + ' to ' + toCurrency + ' conversionRate ' + conversionRate);
} else {
// first defined currency base used as intermediary
var anyBaseCurrency = Object.keys(currencyRates.conversions)[0];
if (!(fromCurrency in currencyRates.conversions[anyBaseCurrency])) {
// bid should fail, currency is not supported
throw new Error('Specified fromCurrency \'' + fromCurrency + '\' not found in the currency rates file');
}
var toIntermediateConversionRate = 1 / currencyRates.conversions[anyBaseCurrency][fromCurrency];
if (!(toCurrency in currencyRates.conversions[anyBaseCurrency])) {
// bid should fail, currency is not supported
throw new Error('Specified adServerCurrency in config \'' + toCurrency + '\' not found in the currency rates file');
}
var fromIntermediateConversionRate = currencyRates.conversions[anyBaseCurrency][toCurrency];
conversionRate = roundFloat(toIntermediateConversionRate * fromIntermediateConversionRate, CURRENCY_RATE_PRECISION);
utils.logInfo('getCurrencyConversion using intermediate ' + fromCurrency + ' thru ' + anyBaseCurrency + ' to ' + toCurrency + ' conversionRate ' + conversionRate);
}
}
if (!(cacheKey in conversionCache)) {
utils.logMessage('Adding conversionCache value ' + conversionRate + ' for ' + cacheKey);
conversionCache[cacheKey] = conversionRate;
}
return conversionRate;
}
function roundFloat(num, dec) {
var d = 1;
for (let i = 0; i < dec; i++) {
d += '0';
}
return Math.round(num * d) / d;
}<|fim▁end|> | |
<|file_name|>importimage.go<|end_file_name|><|fim▁begin|>package cmd
import (
"fmt"
"io"
"strings"
"time"
"github.com/spf13/cobra"
"k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/fields"
"k8s.io/apimachinery/pkg/watch"
"k8s.io/kubernetes/pkg/api/legacyscheme"
kapi "k8s.io/kubernetes/pkg/apis/core"
"k8s.io/kubernetes/pkg/kubectl/cmd/templates"
kcmdutil "k8s.io/kubernetes/pkg/kubectl/cmd/util"
kprinters "k8s.io/kubernetes/pkg/printers"
imageapiv1 "github.com/openshift/api/image/v1"
imageapi "github.com/openshift/origin/pkg/image/apis/image"
imageclientinternal "github.com/openshift/origin/pkg/image/generated/internalclientset"
imageclient "github.com/openshift/origin/pkg/image/generated/internalclientset/typed/image/internalversion"
"github.com/openshift/origin/pkg/oc/cli/describe"
"github.com/openshift/origin/pkg/oc/cli/util/clientcmd"
quotautil "github.com/openshift/origin/pkg/quota/util"
)
var (
importImageLong = templates.LongDesc(`
Import the latest image information from a tag in a Docker registry
Image streams allow you to control which images are rolled out to your builds
and applications. This command fetches the latest version of an image from a
remote repository and updates the image stream tag if it does not match the
previous value. Running the command multiple times will not create duplicate
entries. When importing an image, only the image metadata is copied, not the
image contents.
If you wish to change the image stream tag or provide more advanced options,
see the 'tag' command.`)
importImageExample = templates.Examples(`
%[1]s import-image mystream
`)
)
// NewCmdImportImage implements the OpenShift cli import-image command.
func NewCmdImportImage(fullName string, f *clientcmd.Factory, out, errout io.Writer) *cobra.Command {
opts := &ImportImageOptions{}
cmd := &cobra.Command{
Use: "import-image IMAGESTREAM[:TAG]",
Short: "Imports images from a Docker registry",
Long: importImageLong,
Example: fmt.Sprintf(importImageExample, fullName),
Run: func(cmd *cobra.Command, args []string) {
kcmdutil.CheckErr(opts.Complete(f, cmd, args, fullName, out, errout))
kcmdutil.CheckErr(opts.Validate(cmd))
kcmdutil.CheckErr(opts.Run())
},
}
cmd.Flags().StringVar(&opts.From, "from", "", "A Docker image repository to import images from")
cmd.Flags().BoolVar(&opts.Confirm, "confirm", false, "If true, allow the image stream import location to be set or changed")
cmd.Flags().BoolVar(&opts.All, "all", false, "If true, import all tags from the provided source on creation or if --from is specified")
cmd.Flags().StringVar(&opts.ReferencePolicy, "reference-policy", sourceReferencePolicy, "Allow to request pullthrough for external image when set to 'local'. Defaults to 'source'.")
cmd.Flags().BoolVar(&opts.DryRun, "dry-run", false, "Fetch information about images without creating or updating an image stream.")
cmd.Flags().BoolVar(&opts.Scheduled, "scheduled", false, "Set each imported Docker image to be periodically imported from a remote repository. Defaults to false.")
opts.Insecure = cmd.Flags().Bool("insecure", false, "If true, allow importing from registries that have invalid HTTPS certificates or are hosted via HTTP. This flag will take precedence over the insecure annotation.")
return cmd
}
// ImageImportOptions contains all the necessary information to perform an import.
type ImportImageOptions struct {
// user set values
From string
Confirm bool
All bool
Scheduled bool
Insecure *bool
DryRun bool
// internal values
Namespace string
Name string
Tag string
Target string
ReferencePolicy string
CommandName string
// helpers
out io.Writer
errout io.Writer
imageClient imageclient.ImageInterface
isClient imageclient.ImageStreamInterface
}
// Complete turns a partially defined ImportImageOptions into a solvent structure
// which can be validated and used for aa import.
func (o *ImportImageOptions) Complete(f *clientcmd.Factory, cmd *cobra.Command, args []string, commandName string, out, errout io.Writer) error {
o.CommandName = commandName
if len(args) > 0 {
o.Target = args[0]
}
if !cmd.Flags().Lookup("insecure").Changed {
o.Insecure = nil
}
if !cmd.Flags().Lookup("reference-policy").Changed {
o.ReferencePolicy = ""
}
namespace, _, err := f.DefaultNamespace()
if err != nil {
return err
}
o.Namespace = namespace
clientConfig, err := f.ClientConfig()
if err != nil {
return err
}
client, err := imageclientinternal.NewForConfig(clientConfig)
if err != nil {
return err
}
o.imageClient = client.Image()
o.isClient = client.Image().ImageStreams(namespace)
o.out = out
o.errout = errout
return nil
}
// Validate ensures that a ImportImageOptions is valid and can be used to execute
// an import.
func (o *ImportImageOptions) Validate(cmd *cobra.Command) error {
if len(o.Target) == 0 {
return kcmdutil.UsageErrorf(cmd, "you must specify the name of an image stream")
}
targetRef, err := imageapi.ParseDockerImageReference(o.Target)
switch {
case err != nil:
return fmt.Errorf("the image name must be a valid Docker image pull spec or reference to an image stream (e.g. myregistry/myteam/image:tag)")
case len(targetRef.ID) > 0:
return fmt.Errorf("to import images by ID, use the 'tag' command")
case len(targetRef.Tag) != 0 && o.All:
// error out
return fmt.Errorf("cannot specify a tag %q as well as --all", o.Target)
case len(targetRef.Tag) == 0 && !o.All:
// apply the default tag
targetRef.Tag = imageapi.DefaultImageTag
}
o.Name = targetRef.Name
o.Tag = targetRef.Tag
return nil
}
// Run contains all the necessary functionality for the OpenShift cli import-image command.
func (o *ImportImageOptions) Run() error {
stream, isi, err := o.createImageImport()
if err != nil {
return err
}
// Attempt the new, direct import path
result, err := o.imageClient.ImageStreamImports(isi.Namespace).Create(isi)
err = TransformUnsupportedError(err)
switch {
case err == imageapi.ErrImageStreamImportUnsupported:
case err != nil:
return err
default:
if o.DryRun {
if wasError(result) {
fmt.Fprintf(o.errout, "The dry-run import completed with errors.\n\n")
} else {
fmt.Fprint(o.out, "The dry-run import completed successfully.\n\n")
}
} else {
if wasError(result) {
fmt.Fprintf(o.errout, "The import completed with errors.\n\n")
} else {
fmt.Fprint(o.out, "The import completed successfully.\n\n")
}
}
if result.Status.Import != nil {
// TODO: dry-run doesn't return an image stream, so we have to display partial results
info, err := describe.DescribeImageStream(result.Status.Import)
if err != nil {
return err
}
fmt.Fprintln(o.out, info)
}
if repo := result.Status.Repository; repo != nil {
for _, image := range repo.Images {
if image.Image != nil {
info, err := describe.DescribeImage(image.Image, imageapi.JoinImageStreamTag(stream.Name, image.Tag))
if err != nil {
fmt.Fprintf(o.errout, "error: tag %s failed: %v\n", image.Tag, err)
} else {
fmt.Fprintln(o.out, info)
}
} else {
fmt.Fprintf(o.errout, "error: repository tag %s failed: %v\n", image.Tag, image.Status.Message)
}
}
}
for _, image := range result.Status.Images {
if image.Image != nil {
info, err := describe.DescribeImage(image.Image, imageapi.JoinImageStreamTag(stream.Name, image.Tag))
if err != nil {
fmt.Fprintf(o.errout, "error: tag %s failed: %v\n", image.Tag, err)
} else {
fmt.Fprintln(o.out, info)
}
} else {
fmt.Fprintf(o.errout, "error: tag %s failed: %v\n", image.Tag, image.Status.Message)
}
}
if r := result.Status.Repository; r != nil && len(r.AdditionalTags) > 0 {
fmt.Fprintf(o.out, "\ninfo: The remote repository contained %d additional tags which were not imported: %s\n", len(r.AdditionalTags), strings.Join(r.AdditionalTags, ", "))
}
return nil
}
// Legacy path, remove when support for older importers is removed
delete(stream.Annotations, imageapi.DockerImageRepositoryCheckAnnotation)
if o.Insecure != nil && *o.Insecure {
if stream.Annotations == nil {
stream.Annotations = make(map[string]string)
}
stream.Annotations[imageapi.InsecureRepositoryAnnotation] = "true"
}
if stream.CreationTimestamp.IsZero() {
stream, err = o.isClient.Create(stream)
} else {
stream, err = o.isClient.Update(stream)
}
if err != nil {
return err
}
fmt.Fprintln(o.out, "Importing (ctrl+c to stop waiting) ...")
resourceVersion := stream.ResourceVersion
updatedStream, err := o.waitForImport(resourceVersion)
if err != nil {
if _, ok := err.(importError); ok {
return err
}
return fmt.Errorf("unable to determine if the import completed successfully - please run '%s describe -n %s imagestream/%s' to see if the tags were updated as expected: %v", o.CommandName, stream.Namespace, stream.Name, err)
}
fmt.Fprint(o.out, "The import completed successfully.\n\n")
d := describe.ImageStreamDescriber{ImageClient: o.imageClient}
info, err := d.Describe(updatedStream.Namespace, updatedStream.Name, kprinters.DescriberSettings{})
if err != nil {
return err
}
fmt.Fprintln(o.out, info)
return nil
}
func wasError(isi *imageapi.ImageStreamImport) bool {
for _, image := range isi.Status.Images {
if image.Status.Status == metav1.StatusFailure {
return true
}
}
if isi.Status.Repository != nil && isi.Status.Repository.Status.Status == metav1.StatusFailure {
return true
}
return false
}
// TODO: move to image/api as a helper
type importError struct {
annotation string
}
func (e importError) Error() string {
return fmt.Sprintf("unable to import image: %s", e.annotation)
}
func (o *ImportImageOptions) waitForImport(resourceVersion string) (*imageapi.ImageStream, error) {
streamWatch, err := o.isClient.Watch(metav1.ListOptions{FieldSelector: fields.OneTermEqualSelector("metadata.name", o.Name).String(), ResourceVersion: resourceVersion})
if err != nil {
return nil, err
}
defer streamWatch.Stop()
for {
select {
case event, ok := <-streamWatch.ResultChan():
if !ok {
return nil, fmt.Errorf("image stream watch ended prematurely")
}
switch event.Type {
case watch.Modified:
s, ok := event.Object.(*imageapi.ImageStream)
if !ok {
continue
}
annotation, ok := s.Annotations[imageapi.DockerImageRepositoryCheckAnnotation]
if !ok {
continue
}
if _, err := time.Parse(time.RFC3339, annotation); err == nil {
return s, nil
}
return nil, importError{annotation}
case watch.Deleted:
return nil, fmt.Errorf("the image stream was deleted")
case watch.Error:
return nil, fmt.Errorf("error watching image stream")
}
}
}
}
func (o *ImportImageOptions) createImageImport() (*imageapi.ImageStream, *imageapi.ImageStreamImport, error) {
var isi *imageapi.ImageStreamImport
stream, err := o.isClient.Get(o.Name, metav1.GetOptions{})
// no stream, try creating one
if err != nil {
if !errors.IsNotFound(err) {
return nil, nil, err
}
if !o.Confirm {
return nil, nil, fmt.Errorf("no image stream named %q exists, pass --confirm to create and import", o.Name)
}
stream, isi = o.newImageStream()
// ensure defaulting is applied by round trip converting
// TODO: convert to using versioned types.
external, err := legacyscheme.Scheme.ConvertToVersion(stream, imageapiv1.SchemeGroupVersion)
if err != nil {
return nil, nil, err
}
legacyscheme.Scheme.Default(external)
internal, err := legacyscheme.Scheme.ConvertToVersion(external, imageapi.SchemeGroupVersion)
if err != nil {
return nil, nil, err
}
stream = internal.(*imageapi.ImageStream)
return stream, isi, nil
}
if o.All {
// importing the entire repository
isi, err = o.importAll(stream)<|fim▁hole|> return nil, nil, err
}
} else {
// importing a single tag
isi, err = o.importTag(stream)
if err != nil {
return nil, nil, err
}
}
return stream, isi, nil
}
func (o *ImportImageOptions) importAll(stream *imageapi.ImageStream) (*imageapi.ImageStreamImport, error) {
from := o.From
// update ImageStream appropriately
if len(from) == 0 {
if len(stream.Spec.DockerImageRepository) != 0 {
from = stream.Spec.DockerImageRepository
} else {
tags := make(map[string]string)
for name, tag := range stream.Spec.Tags {
if tag.From != nil && tag.From.Kind == "DockerImage" {
tags[name] = tag.From.Name
}
}
if len(tags) == 0 {
return nil, fmt.Errorf("image stream does not have tags pointing to external docker images")
}
return o.newImageStreamImportTags(stream, tags), nil
}
}
if from != stream.Spec.DockerImageRepository {
if !o.Confirm {
if len(stream.Spec.DockerImageRepository) == 0 {
return nil, fmt.Errorf("the image stream does not currently import an entire Docker repository, pass --confirm to update")
}
return nil, fmt.Errorf("the image stream has a different import spec %q, pass --confirm to update", stream.Spec.DockerImageRepository)
}
stream.Spec.DockerImageRepository = from
}
// and create accompanying ImageStreamImport
return o.newImageStreamImportAll(stream, from), nil
}
func (o *ImportImageOptions) importTag(stream *imageapi.ImageStream) (*imageapi.ImageStreamImport, error) {
from := o.From
tag := o.Tag
// follow any referential tags to the destination
finalTag, existing, multiple, err := imageapi.FollowTagReference(stream, tag)
switch err {
case imageapi.ErrInvalidReference:
return nil, fmt.Errorf("tag %q points to an invalid imagestreamtag", tag)
case imageapi.ErrCrossImageStreamReference:
return nil, fmt.Errorf("tag %q points to an imagestreamtag from another ImageStream", tag)
case imageapi.ErrCircularReference:
return nil, fmt.Errorf("tag %q on the image stream is a reference to same tag", tag)
case imageapi.ErrNotFoundReference:
// create a new tag
if len(from) == 0 && tag == imageapi.DefaultImageTag {
from = stream.Spec.DockerImageRepository
}
// if the from is still empty this means there's no such tag defined
// nor we can't create any from .spec.dockerImageRepository
if len(from) == 0 {
return nil, fmt.Errorf("the tag %q does not exist on the image stream - choose an existing tag to import or use the 'tag' command to create a new tag", tag)
}
existing = &imageapi.TagReference{
From: &kapi.ObjectReference{
Kind: "DockerImage",
Name: from,
},
}
case nil:
// disallow re-importing anything other than DockerImage
if existing.From != nil && existing.From.Kind != "DockerImage" {
return nil, fmt.Errorf("tag %q points to existing %s %q, it cannot be re-imported", tag, existing.From.Kind, existing.From.Name)
}
// disallow changing an existing tag
if existing.From == nil {
return nil, fmt.Errorf("tag %q already exists - you must use the 'tag' command if you want to change the source to %q", tag, from)
}
if len(from) != 0 && from != existing.From.Name {
if multiple {
return nil, fmt.Errorf("the tag %q points to the tag %q which points to %q - use the 'tag' command if you want to change the source to %q",
tag, finalTag, existing.From.Name, from)
}
return nil, fmt.Errorf("the tag %q points to %q - use the 'tag' command if you want to change the source to %q", tag, existing.From.Name, from)
}
// set the target item to import
from = existing.From.Name
if multiple {
tag = finalTag
}
// clear the legacy annotation
delete(existing.Annotations, imageapi.DockerImageRepositoryCheckAnnotation)
// reset the generation
zero := int64(0)
existing.Generation = &zero
}
stream.Spec.Tags[tag] = *existing
// and create accompanying ImageStreamImport
return o.newImageStreamImportTags(stream, map[string]string{tag: from}), nil
}
func (o *ImportImageOptions) newImageStream() (*imageapi.ImageStream, *imageapi.ImageStreamImport) {
from := o.From
tag := o.Tag
if len(from) == 0 {
from = o.Target
}
var (
stream *imageapi.ImageStream
isi *imageapi.ImageStreamImport
)
// create new ImageStream and accompanying ImageStreamImport
// TODO: this should be removed along with the legacy path, we don't need to
// create the IS in the new path, the import mechanism will do that for us,
// this is only for the legacy path that we need to create the IS.
if o.All {
stream = &imageapi.ImageStream{
ObjectMeta: metav1.ObjectMeta{Name: o.Name},
Spec: imageapi.ImageStreamSpec{DockerImageRepository: from},
}
isi = o.newImageStreamImportAll(stream, from)
} else {
stream = &imageapi.ImageStream{
ObjectMeta: metav1.ObjectMeta{Name: o.Name},
Spec: imageapi.ImageStreamSpec{
Tags: map[string]imageapi.TagReference{
tag: {
From: &kapi.ObjectReference{
Kind: "DockerImage",
Name: from,
},
ReferencePolicy: o.getReferencePolicy(),
},
},
},
}
isi = o.newImageStreamImportTags(stream, map[string]string{tag: from})
}
return stream, isi
}
func (o *ImportImageOptions) getReferencePolicy() imageapi.TagReferencePolicy {
ref := imageapi.TagReferencePolicy{}
if len(o.ReferencePolicy) == 0 {
return ref
}
switch o.ReferencePolicy {
case sourceReferencePolicy:
ref.Type = imageapi.SourceTagReferencePolicy
case localReferencePolicy:
ref.Type = imageapi.LocalTagReferencePolicy
}
return ref
}
func (o *ImportImageOptions) newImageStreamImport(stream *imageapi.ImageStream) (*imageapi.ImageStreamImport, bool) {
isi := &imageapi.ImageStreamImport{
ObjectMeta: metav1.ObjectMeta{
Name: stream.Name,
Namespace: o.Namespace,
ResourceVersion: stream.ResourceVersion,
},
Spec: imageapi.ImageStreamImportSpec{Import: !o.DryRun},
}
insecureAnnotation := stream.Annotations[imageapi.InsecureRepositoryAnnotation]
insecure := insecureAnnotation == "true"
// --insecure flag (if provided) takes precedence over insecure annotation
if o.Insecure != nil {
insecure = *o.Insecure
}
return isi, insecure
}
func (o *ImportImageOptions) newImageStreamImportAll(stream *imageapi.ImageStream, from string) *imageapi.ImageStreamImport {
isi, insecure := o.newImageStreamImport(stream)
isi.Spec.Repository = &imageapi.RepositoryImportSpec{
From: kapi.ObjectReference{
Kind: "DockerImage",
Name: from,
},
ImportPolicy: imageapi.TagImportPolicy{
Insecure: insecure,
Scheduled: o.Scheduled,
},
ReferencePolicy: o.getReferencePolicy(),
}
return isi
}
func (o *ImportImageOptions) newImageStreamImportTags(stream *imageapi.ImageStream, tags map[string]string) *imageapi.ImageStreamImport {
isi, streamInsecure := o.newImageStreamImport(stream)
for tag, from := range tags {
insecure := streamInsecure
scheduled := o.Scheduled
oldTag, ok := stream.Spec.Tags[tag]
if ok {
insecure = insecure || oldTag.ImportPolicy.Insecure
scheduled = scheduled || oldTag.ImportPolicy.Scheduled
}
isi.Spec.Images = append(isi.Spec.Images, imageapi.ImageImportSpec{
From: kapi.ObjectReference{
Kind: "DockerImage",
Name: from,
},
To: &kapi.LocalObjectReference{Name: tag},
ImportPolicy: imageapi.TagImportPolicy{
Insecure: insecure,
Scheduled: scheduled,
},
ReferencePolicy: o.getReferencePolicy(),
})
}
return isi
}
// TransformUnsupportedError converts specific error conditions to unsupported
func TransformUnsupportedError(err error) error {
if err == nil {
return nil
}
if errors.IsNotFound(err) {
status, ok := err.(errors.APIStatus)
if !ok {
return imageapi.ErrImageStreamImportUnsupported
}
if status.Status().Details == nil || status.Status().Details.Kind == "" {
return imageapi.ErrImageStreamImportUnsupported
}
}
// The ImageStreamImport resource exists in v1.1.1 of origin but is not yet
// enabled by policy. A create request will return a Forbidden(403) error.
// We want to return ErrImageStreamImportUnsupported to allow fallback behavior
// in clients.
if errors.IsForbidden(err) && !quotautil.IsErrorQuotaExceeded(err) {
return imageapi.ErrImageStreamImportUnsupported
}
return err
}<|fim▁end|> | if err != nil { |
<|file_name|>Ui_configdialog.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'urssus/ui/configdialog.ui'
#
# Created: Fri Feb 27 23:57:10 2009
# by: PyQt4 UI code generator 4.4.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(600, 319)
icon = QtGui.QIcon()<|fim▁hole|> Dialog.setWindowIcon(icon)
self.verticalLayout = QtGui.QVBoxLayout(Dialog)
self.verticalLayout.setObjectName("verticalLayout")
self.tabs = QtGui.QTabWidget(Dialog)
self.tabs.setObjectName("tabs")
self.tab1 = QtGui.QWidget()
self.tab1.setObjectName("tab1")
self.tabs.addTab(self.tab1, "")
self.verticalLayout.addWidget(self.tabs)
self.buttonBox = QtGui.QDialogButtonBox(Dialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.verticalLayout.addWidget(self.buttonBox)
self.retranslateUi(Dialog)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL("accepted()"), Dialog.accept)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL("rejected()"), Dialog.reject)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(QtGui.QApplication.translate("Dialog", "Dialog", None, QtGui.QApplication.UnicodeUTF8))
self.tabs.setTabText(self.tabs.indexOf(self.tab1), QtGui.QApplication.translate("Dialog", "tab1", None, QtGui.QApplication.UnicodeUTF8))
import icons_rc<|fim▁end|> | icon.addPixmap(QtGui.QPixmap(":/urssus.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off) |
<|file_name|>1.cc<|end_file_name|><|fim▁begin|>// { dg-do run { target c++11 } }
// 2007-10-12 Paolo Carlini <[email protected]>
//
// Copyright (C) 2007-2017 Free Software Foundation, Inc.
//
// This file is part of the GNU ISO C++ Library. This library is free<|fim▁hole|>// any later version.
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License along
// with this library; see the file COPYING3. If not see
// <http://www.gnu.org/licenses/>.
// 25.3.6 Heap operations [lib.alg.heap.operations]
#include <algorithm>
#include <functional>
#include <testsuite_hooks.h>
int A[] = {9, 8, 6, 7, 7, 5, 5, 3, 6, 4, 1, 2, 3, 4};
int B[] = {1, 3, 2, 4, 4, 6, 3, 5, 5, 7, 7, 6, 8, 9};
const int N = sizeof(A) / sizeof(int);
void
test01()
{
for (int i = 0; i <= N; ++i)
{
VERIFY( std::is_heap(A, A + i) );
VERIFY( std::is_heap(A, A + i, std::less<int>()) );
VERIFY( std::is_heap(B, B + i, std::greater<int>()) );
VERIFY( (i < 2) || !std::is_heap(B, B + i) );
}
}
int
main()
{
test01();
return 0;
}<|fim▁end|> | // software; you can redistribute it and/or modify it under the
// terms of the GNU General Public License as published by the
// Free Software Foundation; either version 3, or (at your option) |
<|file_name|>test_redirects.py<|end_file_name|><|fim▁begin|>from django.test import TestCase
from builds.models import Version
from projects.models import Project
class RedirectTests(TestCase):
fixtures = ["eric", "test_data"]
def setUp(self):
self.client.login(username='eric', password='test')
r = self.client.post(
'/dashboard/import/',
{'repo_type': 'git', 'name': 'Pip',
'tags': 'big, fucking, monkey', 'default_branch': '',
'project_url': 'http://pip.rtfd.org',
'repo': 'https://github.com/fail/sauce',
'csrfmiddlewaretoken': '34af7c8a5ba84b84564403a280d9a9be',
'default_version': 'latest',
'privacy_level': 'public',
'version_privacy_level': 'public',
'description': 'wat',
'documentation_type': 'sphinx'})
pip = Project.objects.get(slug='pip')
pip_latest = Version.objects.create(project=pip, identifier='latest', verbose_name='latest', slug='latest', active=True)
def test_proper_url_no_slash(self):
r = self.client.get('/docs/pip')
# This is triggered by Django, so its a 301, basically just APPEND_SLASH
self.assertEqual(r.status_code, 301)
self.assertEqual(r._headers['location'], ('Location', 'http://testserver/docs/pip/'))
r = self.client.get(r._headers['location'][1])
self.assertEqual(r.status_code, 302)
r = self.client.get(r._headers['location'][1])
self.assertEqual(r.status_code, 200)
def test_proper_url(self):
r = self.client.get('/docs/pip/')
self.assertEqual(r.status_code, 302)
self.assertEqual(r._headers['location'], ('Location', 'http://testserver/docs/pip/en/latest/'))
r = self.client.get(r._headers['location'][1])
self.assertEqual(r.status_code, 200)
def test_inproper_url(self):
r = self.client.get('/docs/pip/en/')
self.assertEqual(r.status_code, 404)
def test_proper_url_full(self):
r = self.client.get('/docs/pip/en/latest/')
self.assertEqual(r.status_code, 200)
# Subdomains
def test_proper_subdomain(self):
r = self.client.get('/', HTTP_HOST = 'pip.readthedocs.org')
self.assertEqual(r.status_code, 302)
self.assertEqual(r._headers['location'], ('Location', 'http://pip.readthedocs.org/en/latest/'))
# Keep this around for now, until we come up with a nicer interface
"""
def test_inproper_subdomain(self):
r = self.client.get('/en/', HTTP_HOST = 'pip.readthedocs.org')
self.assertEqual(r.status_code, 404)
"""
def test_proper_subdomain_and_url(self):
r = self.client.get('/en/latest/', HTTP_HOST = 'pip.readthedocs.org')
self.assertEqual(r.status_code, 200)
# Specific Page Redirects<|fim▁hole|>
# Specific Page Redirects
def test_proper_page_on_main_site(self):
r = self.client.get('/docs/pip/page/test.html')
self.assertEqual(r.status_code, 302)
self.assertEqual(r._headers['location'], ('Location', 'http://testserver/docs/pip/en/latest/test.html'))<|fim▁end|> | def test_proper_page_on_subdomain(self):
r = self.client.get('/page/test.html', HTTP_HOST = 'pip.readthedocs.org')
self.assertEqual(r.status_code, 302)
self.assertEqual(r._headers['location'], ('Location', 'http://pip.readthedocs.org/en/latest/test.html')) |
<|file_name|>MidiTickUtil.java<|end_file_name|><|fim▁begin|>package org.herac.tuxguitar.gui.util;
import org.herac.tuxguitar.gui.TuxGuitar;
import org.herac.tuxguitar.player.base.MidiRepeatController;
import org.herac.tuxguitar.song.managers.TGSongManager;
import org.herac.tuxguitar.song.models.TGMeasureHeader;
public class MidiTickUtil {
public static long getStart(long tick){
long startPoint = getStartPoint();
long start = startPoint;
long length = 0;
TGSongManager manager = TuxGuitar.instance().getSongManager();
MidiRepeatController controller = new MidiRepeatController(manager.getSong(), getSHeader() , getEHeader() );
while(!controller.finished()){
TGMeasureHeader header = manager.getSong().getMeasureHeader(controller.getIndex());
controller.process();
if(controller.shouldPlay()){
start += length;
length = header.getLength();
//verifico si es el compas correcto
if(tick >= start && tick < (start + length )){
return header.getStart() + (tick - start);
}
}
}
return ( tick < startPoint ? startPoint : start );
}
<|fim▁hole|> long length = 0;
TGSongManager manager = TuxGuitar.instance().getSongManager();
MidiRepeatController controller = new MidiRepeatController(manager.getSong(), getSHeader() , getEHeader() );
while(!controller.finished()){
TGMeasureHeader header = manager.getSong().getMeasureHeader(controller.getIndex());
controller.process();
if(controller.shouldPlay()){
tick += length;
length = header.getLength();
//verifico si es el compas correcto
if(start >= header.getStart() && start < (header.getStart() + length )){
return tick;
}
}
}
return ( start < startPoint ? startPoint : tick );
}
private static long getStartPoint(){
TuxGuitar.instance().getPlayer().updateLoop( false );
return TuxGuitar.instance().getPlayer().getLoopSPosition();
}
public static int getSHeader() {
return TuxGuitar.instance().getPlayer().getLoopSHeader();
}
public static int getEHeader() {
return TuxGuitar.instance().getPlayer().getLoopEHeader();
}
}<|fim▁end|> | public static long getTick(long start){
long startPoint = getStartPoint();
long tick = startPoint; |
<|file_name|>gsubr.go<|end_file_name|><|fim▁begin|>// Derived from Inferno utils/6c/txt.c
// http://code.google.com/p/inferno-os/source/browse/utils/6c/txt.c
//
// Copyright © 1994-1999 Lucent Technologies Inc. All rights reserved.
// Portions Copyright © 1995-1997 C H Forsyth ([email protected])
// Portions Copyright © 1997-1999 Vita Nuova Limited
// Portions Copyright © 2000-2007 Vita Nuova Holdings Limited (www.vitanuova.com)
// Portions Copyright © 2004,2006 Bruce Ellis
// Portions Copyright © 2005-2007 C H Forsyth ([email protected])
// Revisions Copyright © 2000-2007 Lucent Technologies Inc. and others
// Portions Copyright © 2009 The Go Authors. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package s390x
import (
"cmd/compile/internal/gc"
"cmd/internal/obj"
"cmd/internal/obj/s390x"
"fmt"
)
var resvd = []int{
s390x.REGZERO, // R0
s390x.REGTMP, // R10
s390x.REGTMP2, // R11
s390x.REGCTXT, // R12
s390x.REGG, // R13
s390x.REG_LR, // R14
s390x.REGSP, // R15
}
// generate
// as $c, n
func ginscon(as obj.As, c int64, n2 *gc.Node) {
var n1 gc.Node
gc.Nodconst(&n1, gc.Types[gc.TINT64], c)
if as != s390x.AMOVD && (c < -s390x.BIG || c > s390x.BIG) || n2.Op != gc.OREGISTER {
// cannot have more than 16-bit of immediate in ADD, etc.
// instead, MOV into register first.
var ntmp gc.Node
gc.Regalloc(&ntmp, gc.Types[gc.TINT64], nil)
rawgins(s390x.AMOVD, &n1, &ntmp)
rawgins(as, &ntmp, n2)
gc.Regfree(&ntmp)
return
}
rawgins(as, &n1, n2)
}
// generate
// as n, $c (CMP/CMPU)
func ginscon2(as obj.As, n2 *gc.Node, c int64) {
var n1 gc.Node
gc.Nodconst(&n1, gc.Types[gc.TINT64], c)
switch as {
default:
gc.Fatalf("ginscon2")
case s390x.ACMP:
if -s390x.BIG <= c && c <= s390x.BIG {
rawgins(as, n2, &n1)
return
}
case s390x.ACMPU:
if 0 <= c && c <= 2*s390x.BIG {
rawgins(as, n2, &n1)
return
}
}
// MOV n1 into register first
var ntmp gc.Node
gc.Regalloc(&ntmp, gc.Types[gc.TINT64], nil)
rawgins(s390x.AMOVD, &n1, &ntmp)
rawgins(as, n2, &ntmp)
gc.Regfree(&ntmp)
}
func ginscmp(op gc.Op, t *gc.Type, n1, n2 *gc.Node, likely int) *obj.Prog {
if t.IsInteger() && n1.Op == gc.OLITERAL && n2.Op != gc.OLITERAL {
// Reverse comparison to place constant last.
op = gc.Brrev(op)
n1, n2 = n2, n1
}
var r1, r2, g1, g2 gc.Node
gc.Regalloc(&r1, t, n1)
gc.Regalloc(&g1, n1.Type, &r1)
gc.Cgen(n1, &g1)
gmove(&g1, &r1)
if t.IsInteger() && gc.Isconst(n2, gc.CTINT) {
ginscon2(optoas(gc.OCMP, t), &r1, n2.Int64())
} else {
gc.Regalloc(&r2, t, n2)
gc.Regalloc(&g2, n1.Type, &r2)
gc.Cgen(n2, &g2)
gmove(&g2, &r2)
rawgins(optoas(gc.OCMP, t), &r1, &r2)
gc.Regfree(&g2)
gc.Regfree(&r2)
}
gc.Regfree(&g1)
gc.Regfree(&r1)
return gc.Gbranch(optoas(op, t), nil, likely)
}
// gmvc tries to move f to t using a mvc instruction.
// If successful it returns true, otherwise it returns false.
func gmvc(f, t *gc.Node) bool {
ft := int(gc.Simsimtype(f.Type))
tt := int(gc.Simsimtype(t.Type))
if ft != tt {
return false
}
if f.Op != gc.OINDREG || t.Op != gc.OINDREG {
return false
}
if f.Xoffset < 0 || f.Xoffset >= 4096-8 {
return false
}
if t.Xoffset < 0 || t.Xoffset >= 4096-8 {
return false
}
var len int64
switch ft {
case gc.TUINT8, gc.TINT8, gc.TBOOL:
len = 1
case gc.TUINT16, gc.TINT16:
len = 2
case gc.TUINT32, gc.TINT32, gc.TFLOAT32:
len = 4
case gc.TUINT64, gc.TINT64, gc.TFLOAT64, gc.TPTR64:
len = 8
case gc.TUNSAFEPTR:
len = int64(gc.Widthptr)
default:
return false
}
p := gc.Prog(s390x.AMVC)
gc.Naddr(&p.From, f)
gc.Naddr(&p.To, t)
p.From3 = new(obj.Addr)
p.From3.Offset = len
p.From3.Type = obj.TYPE_CONST
return true
}
// generate move:
// t = f
// hard part is conversions.
func gmove(f *gc.Node, t *gc.Node) {
if gc.Debug['M'] != 0 {
fmt.Printf("gmove %v -> %v\n", gc.Nconv(f, gc.FmtLong), gc.Nconv(t, gc.FmtLong))
}
ft := int(gc.Simsimtype(f.Type))
tt := int(gc.Simsimtype(t.Type))
cvt := t.Type
if gc.Iscomplex[ft] || gc.Iscomplex[tt] {
gc.Complexmove(f, t)
return
}
var a obj.As
// cannot have two memory operands
if gc.Ismem(f) && gc.Ismem(t) {
if gmvc(f, t) {
return
}
goto hard
}
// convert constant to desired type
if f.Op == gc.OLITERAL {
var con gc.Node
f.Convconst(&con, t.Type)
f = &con
ft = tt // so big switch will choose a simple mov
// some constants can't move directly to memory.
if gc.Ismem(t) {
// float constants come from memory.
if t.Type.IsFloat() {
goto hard
}
// all immediates are 16-bit sign-extended
// unless moving into a register.
if t.Type.IsInteger() {
if i := con.Int64(); int64(int16(i)) != i {
goto hard
}
}
// immediate moves to memory have a 12-bit unsigned displacement
if t.Xoffset < 0 || t.Xoffset >= 4096-8 {
goto hard
}
}
}
// a float-to-int or int-to-float conversion requires the source operand in a register
if gc.Ismem(f) && ((f.Type.IsFloat() && t.Type.IsInteger()) || (f.Type.IsInteger() && t.Type.IsFloat())) {
cvt = f.Type
goto hard
}
// a float32-to-float64 or float64-to-float32 conversion requires the source operand in a register
if gc.Ismem(f) && f.Type.IsFloat() && t.Type.IsFloat() && (ft != tt) {
cvt = f.Type
goto hard
}
// value -> value copy, only one memory operand.
// figure out the instruction to use.
// break out of switch for one-instruction gins.
// goto rdst for "destination must be register".
// goto hard for "convert to cvt type first".
// otherwise handle and return.
switch uint32(ft)<<16 | uint32(tt) {
default:
gc.Fatalf("gmove %v -> %v", gc.Tconv(f.Type, gc.FmtLong), gc.Tconv(t.Type, gc.FmtLong))
// integer copy and truncate
case gc.TINT8<<16 | gc.TINT8,
gc.TUINT8<<16 | gc.TINT8,
gc.TINT16<<16 | gc.TINT8,
gc.TUINT16<<16 | gc.TINT8,
gc.TINT32<<16 | gc.TINT8,
gc.TUINT32<<16 | gc.TINT8,
gc.TINT64<<16 | gc.TINT8,
gc.TUINT64<<16 | gc.TINT8:
a = s390x.AMOVB
case gc.TINT8<<16 | gc.TUINT8,
gc.TUINT8<<16 | gc.TUINT8,
gc.TINT16<<16 | gc.TUINT8,
gc.TUINT16<<16 | gc.TUINT8,
gc.TINT32<<16 | gc.TUINT8,
gc.TUINT32<<16 | gc.TUINT8,
gc.TINT64<<16 | gc.TUINT8,
gc.TUINT64<<16 | gc.TUINT8:
a = s390x.AMOVBZ
case gc.TINT16<<16 | gc.TINT16,
gc.TUINT16<<16 | gc.TINT16,
gc.TINT32<<16 | gc.TINT16,
gc.TUINT32<<16 | gc.TINT16,
gc.TINT64<<16 | gc.TINT16,
gc.TUINT64<<16 | gc.TINT16:
a = s390x.AMOVH
case gc.TINT16<<16 | gc.TUINT16,<|fim▁hole|> gc.TINT64<<16 | gc.TUINT16,
gc.TUINT64<<16 | gc.TUINT16:
a = s390x.AMOVHZ
case gc.TINT32<<16 | gc.TINT32,
gc.TUINT32<<16 | gc.TINT32,
gc.TINT64<<16 | gc.TINT32,
gc.TUINT64<<16 | gc.TINT32:
a = s390x.AMOVW
case gc.TINT32<<16 | gc.TUINT32,
gc.TUINT32<<16 | gc.TUINT32,
gc.TINT64<<16 | gc.TUINT32,
gc.TUINT64<<16 | gc.TUINT32:
a = s390x.AMOVWZ
case gc.TINT64<<16 | gc.TINT64,
gc.TINT64<<16 | gc.TUINT64,
gc.TUINT64<<16 | gc.TINT64,
gc.TUINT64<<16 | gc.TUINT64:
a = s390x.AMOVD
// sign extend int8
case gc.TINT8<<16 | gc.TINT16,
gc.TINT8<<16 | gc.TUINT16,
gc.TINT8<<16 | gc.TINT32,
gc.TINT8<<16 | gc.TUINT32,
gc.TINT8<<16 | gc.TINT64,
gc.TINT8<<16 | gc.TUINT64:
a = s390x.AMOVB
goto rdst
// sign extend uint8
case gc.TUINT8<<16 | gc.TINT16,
gc.TUINT8<<16 | gc.TUINT16,
gc.TUINT8<<16 | gc.TINT32,
gc.TUINT8<<16 | gc.TUINT32,
gc.TUINT8<<16 | gc.TINT64,
gc.TUINT8<<16 | gc.TUINT64:
a = s390x.AMOVBZ
goto rdst
// sign extend int16
case gc.TINT16<<16 | gc.TINT32,
gc.TINT16<<16 | gc.TUINT32,
gc.TINT16<<16 | gc.TINT64,
gc.TINT16<<16 | gc.TUINT64:
a = s390x.AMOVH
goto rdst
// zero extend uint16
case gc.TUINT16<<16 | gc.TINT32,
gc.TUINT16<<16 | gc.TUINT32,
gc.TUINT16<<16 | gc.TINT64,
gc.TUINT16<<16 | gc.TUINT64:
a = s390x.AMOVHZ
goto rdst
// sign extend int32
case gc.TINT32<<16 | gc.TINT64,
gc.TINT32<<16 | gc.TUINT64:
a = s390x.AMOVW
goto rdst
// zero extend uint32
case gc.TUINT32<<16 | gc.TINT64,
gc.TUINT32<<16 | gc.TUINT64:
a = s390x.AMOVWZ
goto rdst
// float to integer
case gc.TFLOAT32<<16 | gc.TUINT8,
gc.TFLOAT32<<16 | gc.TUINT16:
cvt = gc.Types[gc.TUINT32]
goto hard
case gc.TFLOAT32<<16 | gc.TUINT32:
a = s390x.ACLFEBR
goto rdst
case gc.TFLOAT32<<16 | gc.TUINT64:
a = s390x.ACLGEBR
goto rdst
case gc.TFLOAT64<<16 | gc.TUINT8,
gc.TFLOAT64<<16 | gc.TUINT16:
cvt = gc.Types[gc.TUINT32]
goto hard
case gc.TFLOAT64<<16 | gc.TUINT32:
a = s390x.ACLFDBR
goto rdst
case gc.TFLOAT64<<16 | gc.TUINT64:
a = s390x.ACLGDBR
goto rdst
case gc.TFLOAT32<<16 | gc.TINT8,
gc.TFLOAT32<<16 | gc.TINT16:
cvt = gc.Types[gc.TINT32]
goto hard
case gc.TFLOAT32<<16 | gc.TINT32:
a = s390x.ACFEBRA
goto rdst
case gc.TFLOAT32<<16 | gc.TINT64:
a = s390x.ACGEBRA
goto rdst
case gc.TFLOAT64<<16 | gc.TINT8,
gc.TFLOAT64<<16 | gc.TINT16:
cvt = gc.Types[gc.TINT32]
goto hard
case gc.TFLOAT64<<16 | gc.TINT32:
a = s390x.ACFDBRA
goto rdst
case gc.TFLOAT64<<16 | gc.TINT64:
a = s390x.ACGDBRA
goto rdst
// integer to float
case gc.TUINT8<<16 | gc.TFLOAT32,
gc.TUINT16<<16 | gc.TFLOAT32:
cvt = gc.Types[gc.TUINT32]
goto hard
case gc.TUINT32<<16 | gc.TFLOAT32:
a = s390x.ACELFBR
goto rdst
case gc.TUINT64<<16 | gc.TFLOAT32:
a = s390x.ACELGBR
goto rdst
case gc.TUINT8<<16 | gc.TFLOAT64,
gc.TUINT16<<16 | gc.TFLOAT64:
cvt = gc.Types[gc.TUINT32]
goto hard
case gc.TUINT32<<16 | gc.TFLOAT64:
a = s390x.ACDLFBR
goto rdst
case gc.TUINT64<<16 | gc.TFLOAT64:
a = s390x.ACDLGBR
goto rdst
case gc.TINT8<<16 | gc.TFLOAT32,
gc.TINT16<<16 | gc.TFLOAT32:
cvt = gc.Types[gc.TINT32]
goto hard
case gc.TINT32<<16 | gc.TFLOAT32:
a = s390x.ACEFBRA
goto rdst
case gc.TINT64<<16 | gc.TFLOAT32:
a = s390x.ACEGBRA
goto rdst
case gc.TINT8<<16 | gc.TFLOAT64,
gc.TINT16<<16 | gc.TFLOAT64:
cvt = gc.Types[gc.TINT32]
goto hard
case gc.TINT32<<16 | gc.TFLOAT64:
a = s390x.ACDFBRA
goto rdst
case gc.TINT64<<16 | gc.TFLOAT64:
a = s390x.ACDGBRA
goto rdst
// float to float
case gc.TFLOAT32<<16 | gc.TFLOAT32:
a = s390x.AFMOVS
case gc.TFLOAT64<<16 | gc.TFLOAT64:
a = s390x.AFMOVD
case gc.TFLOAT32<<16 | gc.TFLOAT64:
a = s390x.ALDEBR
goto rdst
case gc.TFLOAT64<<16 | gc.TFLOAT32:
a = s390x.ALEDBR
goto rdst
}
gins(a, f, t)
return
// requires register destination
rdst:
if t != nil && t.Op == gc.OREGISTER {
gins(a, f, t)
return
} else {
var r1 gc.Node
gc.Regalloc(&r1, t.Type, t)
gins(a, f, &r1)
gmove(&r1, t)
gc.Regfree(&r1)
return
}
// requires register intermediate
hard:
var r1 gc.Node
gc.Regalloc(&r1, cvt, t)
gmove(f, &r1)
gmove(&r1, t)
gc.Regfree(&r1)
return
}
func intLiteral(n *gc.Node) (x int64, ok bool) {
switch {
case n == nil:
return
case gc.Isconst(n, gc.CTINT):
return n.Int64(), true
case gc.Isconst(n, gc.CTBOOL):
return int64(obj.Bool2int(n.Bool())), true
}
return
}
// gins is called by the front end.
// It synthesizes some multiple-instruction sequences
// so the front end can stay simpler.
func gins(as obj.As, f, t *gc.Node) *obj.Prog {
if t != nil {
if as >= obj.A_ARCHSPECIFIC {
if x, ok := intLiteral(f); ok {
ginscon(as, x, t)
return nil // caller must not use
}
}
if as == s390x.ACMP || as == s390x.ACMPU {
if x, ok := intLiteral(t); ok {
ginscon2(as, f, x)
return nil // caller must not use
}
}
}
return rawgins(as, f, t)
}
// generate one instruction:
// as f, t
func rawgins(as obj.As, f *gc.Node, t *gc.Node) *obj.Prog {
// self move check
// TODO(mundaym): use sized math and extend to MOVB, MOVWZ etc.
switch as {
case s390x.AMOVD, s390x.AFMOVS, s390x.AFMOVD:
if f != nil && t != nil &&
f.Op == gc.OREGISTER && t.Op == gc.OREGISTER &&
f.Reg == t.Reg {
return nil
}
}
p := gc.Prog(as)
gc.Naddr(&p.From, f)
gc.Naddr(&p.To, t)
switch as {
// Bad things the front end has done to us. Crash to find call stack.
case s390x.ACMP, s390x.ACMPU:
if p.From.Type == obj.TYPE_MEM || p.To.Type == obj.TYPE_MEM {
gc.Debug['h'] = 1
gc.Fatalf("bad inst: %v", p)
}
}
if gc.Debug['g'] != 0 {
fmt.Printf("%v\n", p)
}
w := int32(0)
switch as {
case s390x.AMOVB, s390x.AMOVBZ:
w = 1
case s390x.AMOVH, s390x.AMOVHZ:
w = 2
case s390x.AMOVW, s390x.AMOVWZ:
w = 4
case s390x.AMOVD:
if p.From.Type == obj.TYPE_CONST || p.From.Type == obj.TYPE_ADDR {
break
}
w = 8
}
if w != 0 && ((f != nil && p.From.Width < int64(w)) || (t != nil && p.To.Type != obj.TYPE_REG && p.To.Width > int64(w))) {
gc.Dump("f", f)
gc.Dump("t", t)
gc.Fatalf("bad width: %v (%d, %d)\n", p, p.From.Width, p.To.Width)
}
return p
}
// optoas returns the Axxx equivalent of Oxxx for type t
func optoas(op gc.Op, t *gc.Type) obj.As {
if t == nil {
gc.Fatalf("optoas: t is nil")
}
// avoid constant conversions in switches below
const (
OMINUS_ = uint32(gc.OMINUS) << 16
OLSH_ = uint32(gc.OLSH) << 16
ORSH_ = uint32(gc.ORSH) << 16
OADD_ = uint32(gc.OADD) << 16
OSUB_ = uint32(gc.OSUB) << 16
OMUL_ = uint32(gc.OMUL) << 16
ODIV_ = uint32(gc.ODIV) << 16
OOR_ = uint32(gc.OOR) << 16
OAND_ = uint32(gc.OAND) << 16
OXOR_ = uint32(gc.OXOR) << 16
OEQ_ = uint32(gc.OEQ) << 16
ONE_ = uint32(gc.ONE) << 16
OLT_ = uint32(gc.OLT) << 16
OLE_ = uint32(gc.OLE) << 16
OGE_ = uint32(gc.OGE) << 16
OGT_ = uint32(gc.OGT) << 16
OCMP_ = uint32(gc.OCMP) << 16
OAS_ = uint32(gc.OAS) << 16
OHMUL_ = uint32(gc.OHMUL) << 16
OSQRT_ = uint32(gc.OSQRT) << 16
OLROT_ = uint32(gc.OLROT) << 16
)
a := obj.AXXX
switch uint32(op)<<16 | uint32(gc.Simtype[t.Etype]) {
default:
gc.Fatalf("optoas: no entry for op=%v type=%v", op, t)
case OEQ_ | gc.TBOOL,
OEQ_ | gc.TINT8,
OEQ_ | gc.TUINT8,
OEQ_ | gc.TINT16,
OEQ_ | gc.TUINT16,
OEQ_ | gc.TINT32,
OEQ_ | gc.TUINT32,
OEQ_ | gc.TINT64,
OEQ_ | gc.TUINT64,
OEQ_ | gc.TPTR32,
OEQ_ | gc.TPTR64,
OEQ_ | gc.TFLOAT32,
OEQ_ | gc.TFLOAT64:
a = s390x.ABEQ
case ONE_ | gc.TBOOL,
ONE_ | gc.TINT8,
ONE_ | gc.TUINT8,
ONE_ | gc.TINT16,
ONE_ | gc.TUINT16,
ONE_ | gc.TINT32,
ONE_ | gc.TUINT32,
ONE_ | gc.TINT64,
ONE_ | gc.TUINT64,
ONE_ | gc.TPTR32,
ONE_ | gc.TPTR64,
ONE_ | gc.TFLOAT32,
ONE_ | gc.TFLOAT64:
a = s390x.ABNE
case OLT_ | gc.TINT8, // ACMP
OLT_ | gc.TINT16,
OLT_ | gc.TINT32,
OLT_ | gc.TINT64,
OLT_ | gc.TUINT8,
// ACMPU
OLT_ | gc.TUINT16,
OLT_ | gc.TUINT32,
OLT_ | gc.TUINT64,
OLT_ | gc.TFLOAT32,
// AFCMPU
OLT_ | gc.TFLOAT64:
a = s390x.ABLT
case OLE_ | gc.TINT8, // ACMP
OLE_ | gc.TINT16,
OLE_ | gc.TINT32,
OLE_ | gc.TINT64,
OLE_ | gc.TUINT8,
// ACMPU
OLE_ | gc.TUINT16,
OLE_ | gc.TUINT32,
OLE_ | gc.TUINT64,
OLE_ | gc.TFLOAT32,
OLE_ | gc.TFLOAT64:
a = s390x.ABLE
case OGT_ | gc.TINT8,
OGT_ | gc.TINT16,
OGT_ | gc.TINT32,
OGT_ | gc.TINT64,
OGT_ | gc.TUINT8,
OGT_ | gc.TUINT16,
OGT_ | gc.TUINT32,
OGT_ | gc.TUINT64,
OGT_ | gc.TFLOAT32,
OGT_ | gc.TFLOAT64:
a = s390x.ABGT
case OGE_ | gc.TINT8,
OGE_ | gc.TINT16,
OGE_ | gc.TINT32,
OGE_ | gc.TINT64,
OGE_ | gc.TUINT8,
OGE_ | gc.TUINT16,
OGE_ | gc.TUINT32,
OGE_ | gc.TUINT64,
OGE_ | gc.TFLOAT32,
OGE_ | gc.TFLOAT64:
a = s390x.ABGE
case OCMP_ | gc.TBOOL,
OCMP_ | gc.TINT8,
OCMP_ | gc.TINT16,
OCMP_ | gc.TINT32,
OCMP_ | gc.TPTR32,
OCMP_ | gc.TINT64:
a = s390x.ACMP
case OCMP_ | gc.TUINT8,
OCMP_ | gc.TUINT16,
OCMP_ | gc.TUINT32,
OCMP_ | gc.TUINT64,
OCMP_ | gc.TPTR64:
a = s390x.ACMPU
case OCMP_ | gc.TFLOAT32:
a = s390x.ACEBR
case OCMP_ | gc.TFLOAT64:
a = s390x.AFCMPU
case OAS_ | gc.TBOOL,
OAS_ | gc.TINT8:
a = s390x.AMOVB
case OAS_ | gc.TUINT8:
a = s390x.AMOVBZ
case OAS_ | gc.TINT16:
a = s390x.AMOVH
case OAS_ | gc.TUINT16:
a = s390x.AMOVHZ
case OAS_ | gc.TINT32:
a = s390x.AMOVW
case OAS_ | gc.TUINT32,
OAS_ | gc.TPTR32:
a = s390x.AMOVWZ
case OAS_ | gc.TINT64,
OAS_ | gc.TUINT64,
OAS_ | gc.TPTR64:
a = s390x.AMOVD
case OAS_ | gc.TFLOAT32:
a = s390x.AFMOVS
case OAS_ | gc.TFLOAT64:
a = s390x.AFMOVD
case OADD_ | gc.TINT8,
OADD_ | gc.TUINT8,
OADD_ | gc.TINT16,
OADD_ | gc.TUINT16,
OADD_ | gc.TINT32,
OADD_ | gc.TUINT32,
OADD_ | gc.TPTR32,
OADD_ | gc.TINT64,
OADD_ | gc.TUINT64,
OADD_ | gc.TPTR64:
a = s390x.AADD
case OADD_ | gc.TFLOAT32:
a = s390x.AFADDS
case OADD_ | gc.TFLOAT64:
a = s390x.AFADD
case OSUB_ | gc.TINT8,
OSUB_ | gc.TUINT8,
OSUB_ | gc.TINT16,
OSUB_ | gc.TUINT16,
OSUB_ | gc.TINT32,
OSUB_ | gc.TUINT32,
OSUB_ | gc.TPTR32,
OSUB_ | gc.TINT64,
OSUB_ | gc.TUINT64,
OSUB_ | gc.TPTR64:
a = s390x.ASUB
case OSUB_ | gc.TFLOAT32:
a = s390x.AFSUBS
case OSUB_ | gc.TFLOAT64:
a = s390x.AFSUB
case OMINUS_ | gc.TINT8,
OMINUS_ | gc.TUINT8,
OMINUS_ | gc.TINT16,
OMINUS_ | gc.TUINT16,
OMINUS_ | gc.TINT32,
OMINUS_ | gc.TUINT32,
OMINUS_ | gc.TPTR32,
OMINUS_ | gc.TINT64,
OMINUS_ | gc.TUINT64,
OMINUS_ | gc.TPTR64:
a = s390x.ANEG
case OAND_ | gc.TINT8,
OAND_ | gc.TUINT8,
OAND_ | gc.TINT16,
OAND_ | gc.TUINT16,
OAND_ | gc.TINT32,
OAND_ | gc.TUINT32,
OAND_ | gc.TPTR32,
OAND_ | gc.TINT64,
OAND_ | gc.TUINT64,
OAND_ | gc.TPTR64:
a = s390x.AAND
case OOR_ | gc.TINT8,
OOR_ | gc.TUINT8,
OOR_ | gc.TINT16,
OOR_ | gc.TUINT16,
OOR_ | gc.TINT32,
OOR_ | gc.TUINT32,
OOR_ | gc.TPTR32,
OOR_ | gc.TINT64,
OOR_ | gc.TUINT64,
OOR_ | gc.TPTR64:
a = s390x.AOR
case OXOR_ | gc.TINT8,
OXOR_ | gc.TUINT8,
OXOR_ | gc.TINT16,
OXOR_ | gc.TUINT16,
OXOR_ | gc.TINT32,
OXOR_ | gc.TUINT32,
OXOR_ | gc.TPTR32,
OXOR_ | gc.TINT64,
OXOR_ | gc.TUINT64,
OXOR_ | gc.TPTR64:
a = s390x.AXOR
case OLSH_ | gc.TINT8,
OLSH_ | gc.TUINT8,
OLSH_ | gc.TINT16,
OLSH_ | gc.TUINT16,
OLSH_ | gc.TINT32,
OLSH_ | gc.TUINT32,
OLSH_ | gc.TPTR32,
OLSH_ | gc.TINT64,
OLSH_ | gc.TUINT64,
OLSH_ | gc.TPTR64:
a = s390x.ASLD
case ORSH_ | gc.TUINT8,
ORSH_ | gc.TUINT16,
ORSH_ | gc.TUINT32,
ORSH_ | gc.TPTR32,
ORSH_ | gc.TUINT64,
ORSH_ | gc.TPTR64:
a = s390x.ASRD
case ORSH_ | gc.TINT8,
ORSH_ | gc.TINT16,
ORSH_ | gc.TINT32,
ORSH_ | gc.TINT64:
a = s390x.ASRAD
case OHMUL_ | gc.TINT64:
a = s390x.AMULHD
case OHMUL_ | gc.TUINT64,
OHMUL_ | gc.TPTR64:
a = s390x.AMULHDU
case OMUL_ | gc.TINT8,
OMUL_ | gc.TINT16,
OMUL_ | gc.TINT32,
OMUL_ | gc.TINT64:
a = s390x.AMULLD
case OMUL_ | gc.TUINT8,
OMUL_ | gc.TUINT16,
OMUL_ | gc.TUINT32,
OMUL_ | gc.TPTR32,
// don't use word multiply, the high 32-bit are undefined.
OMUL_ | gc.TUINT64,
OMUL_ | gc.TPTR64:
// for 64-bit multiplies, signedness doesn't matter.
a = s390x.AMULLD
case OMUL_ | gc.TFLOAT32:
a = s390x.AFMULS
case OMUL_ | gc.TFLOAT64:
a = s390x.AFMUL
case ODIV_ | gc.TINT8,
ODIV_ | gc.TINT16,
ODIV_ | gc.TINT32,
ODIV_ | gc.TINT64:
a = s390x.ADIVD
case ODIV_ | gc.TUINT8,
ODIV_ | gc.TUINT16,
ODIV_ | gc.TUINT32,
ODIV_ | gc.TPTR32,
ODIV_ | gc.TUINT64,
ODIV_ | gc.TPTR64:
a = s390x.ADIVDU
case ODIV_ | gc.TFLOAT32:
a = s390x.AFDIVS
case ODIV_ | gc.TFLOAT64:
a = s390x.AFDIV
case OSQRT_ | gc.TFLOAT64:
a = s390x.AFSQRT
case OLROT_ | gc.TUINT32,
OLROT_ | gc.TPTR32,
OLROT_ | gc.TINT32:
a = s390x.ARLL
case OLROT_ | gc.TUINT64,
OLROT_ | gc.TPTR64,
OLROT_ | gc.TINT64:
a = s390x.ARLLG
}
return a
}
const (
ODynam = 1 << 0
OAddable = 1 << 1
)
var clean [20]gc.Node
var cleani int = 0
func sudoclean() {
if clean[cleani-1].Op != gc.OEMPTY {
gc.Regfree(&clean[cleani-1])
}
if clean[cleani-2].Op != gc.OEMPTY {
gc.Regfree(&clean[cleani-2])
}
cleani -= 2
}
/*
* generate code to compute address of n,
* a reference to a (perhaps nested) field inside
* an array or struct.
* return 0 on failure, 1 on success.
* on success, leaves usable address in a.
*
* caller is responsible for calling sudoclean
* after successful sudoaddable,
* to release the register used for a.
*/
func sudoaddable(as obj.As, n *gc.Node, a *obj.Addr) bool {
if n.Type == nil {
return false
}
*a = obj.Addr{}
switch n.Op {
case gc.OLITERAL:
if !gc.Isconst(n, gc.CTINT) {
return false
}
v := n.Int64()
switch as {
default:
return false
// operations that can cope with a 32-bit immediate
// TODO(mundaym): logical operations can work on high bits
case s390x.AADD,
s390x.AADDC,
s390x.ASUB,
s390x.AMULLW,
s390x.AAND,
s390x.AOR,
s390x.AXOR,
s390x.ASLD,
s390x.ASLW,
s390x.ASRAW,
s390x.ASRAD,
s390x.ASRW,
s390x.ASRD,
s390x.AMOVB,
s390x.AMOVBZ,
s390x.AMOVH,
s390x.AMOVHZ,
s390x.AMOVW,
s390x.AMOVWZ,
s390x.AMOVD:
if int64(int32(v)) != v {
return false
}
// for comparisons avoid immediates unless they can
// fit into a int8/uint8
// this favours combined compare and branch instructions
case s390x.ACMP:
if int64(int8(v)) != v {
return false
}
case s390x.ACMPU:
if int64(uint8(v)) != v {
return false
}
}
cleani += 2
reg := &clean[cleani-1]
reg1 := &clean[cleani-2]
reg.Op = gc.OEMPTY
reg1.Op = gc.OEMPTY
gc.Naddr(a, n)
return true
case gc.ODOT,
gc.ODOTPTR:
cleani += 2
reg := &clean[cleani-1]
reg1 := &clean[cleani-2]
reg.Op = gc.OEMPTY
reg1.Op = gc.OEMPTY
var nn *gc.Node
var oary [10]int64
o := gc.Dotoffset(n, oary[:], &nn)
if nn == nil {
sudoclean()
return false
}
if nn.Addable && o == 1 && oary[0] >= 0 {
// directly addressable set of DOTs
n1 := *nn
n1.Type = n.Type
n1.Xoffset += oary[0]
// check that the offset fits into a 12-bit displacement
if n1.Xoffset < 0 || n1.Xoffset >= (1<<12)-8 {
sudoclean()
return false
}
gc.Naddr(a, &n1)
return true
}
gc.Regalloc(reg, gc.Types[gc.Tptr], nil)
n1 := *reg
n1.Op = gc.OINDREG
if oary[0] >= 0 {
gc.Agen(nn, reg)
n1.Xoffset = oary[0]
} else {
gc.Cgen(nn, reg)
gc.Cgen_checknil(reg)
n1.Xoffset = -(oary[0] + 1)
}
for i := 1; i < o; i++ {
if oary[i] >= 0 {
gc.Fatalf("can't happen")
}
gins(s390x.AMOVD, &n1, reg)
gc.Cgen_checknil(reg)
n1.Xoffset = -(oary[i] + 1)
}
a.Type = obj.TYPE_NONE
a.Index = 0
// check that the offset fits into a 12-bit displacement
if n1.Xoffset < 0 || n1.Xoffset >= (1<<12)-8 {
tmp := n1
tmp.Op = gc.OREGISTER
tmp.Type = gc.Types[gc.Tptr]
tmp.Xoffset = 0
gc.Cgen_checknil(&tmp)
ginscon(s390x.AADD, n1.Xoffset, &tmp)
n1.Xoffset = 0
}
gc.Naddr(a, &n1)
return true
}
return false
}<|fim▁end|> | gc.TUINT16<<16 | gc.TUINT16,
gc.TINT32<<16 | gc.TUINT16,
gc.TUINT32<<16 | gc.TUINT16, |
<|file_name|>mapObject.js<|end_file_name|><|fim▁begin|>function Block() {
this.isAttacked = false;
this.hasShip = false;
this.shipType = "NONE";
this.attackable = true;
this.shipSize = 0;
this.direction = "no"
}
function Ship(x,y,direction,size){
this.x = x;
this.y = y;
this.direction = direction;
this.size = size;
this.win = false;
}
var bKimage = document.getElementById("OL");
function GameMap(x, y, scale,ctx) {
this.x = x;
this.y = y;
this.ctx =ctx;
this.scale = scale;
this.length = scale / 11;
this.mapGrid = new Array(10);
this.mapX = this.x + this.length;
this.mapY = this.y + this.length;
this.ships = [];
this.adjustScale = function(num){
this.scale = num;
this.length = this.scale / 11;
this.mapX = this.x + this.length;
this.mapY = this.y + this.length;
}
//ship info
this.sinkList = new Array(5);
for(var i = 0 ; i < 5 ; i++)
this.sinkList[i]= false;
this.win = function(){
for(var i = 0 ; i < 10 ; i++){
for(var j = 0 ;j < 10 ; j++){
if(this.mapGrid[j][i].hasShip && !this.mapGrid[j][i].isAttacked){
return false;
}
}
}
return true;
}
this.updateSink = function (){
var count = [0,0,0,0,0];
for(var i = 0 ;i < 10 ; i++){
for(var j = 0 ;j < 10 ; j++){
if(this.mapGrid[j][i].hasShip && this.mapGrid[j][i].isAttacked){
if(this.mapGrid[j][i].shipType == AIRCRAFT_CARRIER)
count[AIRCRAFT_CARRIER]++;
if(this.mapGrid[j][i].shipType == BATTLESHIP)
count[BATTLESHIP]++;
if(this.mapGrid[j][i].shipType == CRUISER)
count[CRUISER]++;
if(this.mapGrid[j][i].shipType == SUBMARINE)
count[SUBMARINE]++;
if(this.mapGrid[j][i].shipType == DESTROYER)
count[DESTROYER]++;
}
}
}
for(var i = 0 ;i < 5 ; i++){
if(count[AIRCRAFT_CARRIER]==5){
this.sinkList[AIRCRAFT_CARRIER]=true;
this.updataAttackable(AIRCRAFT_CARRIER);
}
if(count[BATTLESHIP]==4){
this.sinkList[BATTLESHIP]=true;
this.updataAttackable(BATTLESHIP);
}
if(count[CRUISER]==3){
this.sinkList[CRUISER]=true;
this.updataAttackable(CRUISER);
}
if(count[SUBMARINE]==3){
this.sinkList[SUBMARINE]=true;
this.updataAttackable(SUBMARINE);
}
if(count[DESTROYER]==2){
this.sinkList[DESTROYER]=true;
this.updataAttackable(DESTROYER);
}
}
//console.log(count);
}
this.updataAttackable = function(type){
for(var b = 0 ;b < 10 ; b++){
for(var a = 0 ;a < 10 ; a++){
if(this.mapGrid[a][b].shipType == type){
if(this.inIndex(a-1,b) && !this.hasShip(a-1,b) && !this.mapGrid[a-1][b].isAttacked)
this.mapGrid[a-1][b].attackable = false;
if(this.inIndex(a+1,b) && !this.hasShip(a+1,b) && !this.mapGrid[a+1][b].isAttacked)
this.mapGrid[a+1][b].attackable = false;
if(this.inIndex(a-1,b+1) && !this.hasShip(a-1,b+1) && !this.mapGrid[a-1][b+1].isAttacked)
this.mapGrid[a-1][b+1].attackable = false;
if(this.inIndex(a+1,b+1) && !this.hasShip(a+1,b+1) && !this.mapGrid[a+1][b+1].isAttacked)
this.mapGrid[a+1][b+1].attackable = false;
if(this.inIndex(a-1,b-1) && !this.hasShip(a-1,b-1) && !this.mapGrid[a-1][b-1].isAttacked)
this.mapGrid[a-1][b-1].attackable = false;
if(this.inIndex(a+1,b-1) && !this.hasShip(a+1,b-1) && !this.mapGrid[a+1][b-1].isAttacked)
this.mapGrid[a+1][b-1].attackable = false;
if(this.inIndex(a,b+1) && !this.hasShip(a,b+1) && !this.mapGrid[a][b+1].isAttacked)
this.mapGrid[a][b+1].attackable = false;
if(this.inIndex(a,b-1) && !this.hasShip(a,b-1) && !this.mapGrid[a][b-1].isAttacked)
this.mapGrid[a][b-1].attackable = false;
}
}
}
}
this.inIndex = function(a,b){
if(a < 0 || a > 9 || b < 0 || b >9)
return false;
return true;
}
this.resetMap = function() {
for (var i = 0; i < 10; i++) {<|fim▁hole|> this.mapGrid[i] = new Array(10);
}
for (var i = 0; i < 10; i++) {
for (var j = 0; j < 10; j++) {
this.mapGrid[i][j] = new Block();
}
}
}
this.imgBG = document.getElementById("LB");
this.drawMap = function() {
this.ctx.font = "" + this.length + "px airborne";
this.ctx.fillStyle = "rgba(221,221,255,0.6)";
this.ctx.drawImage(this.imgBG,10,10,450,450,this.x + this.length, this.y + this.length, this.scale - this.length, this.scale - this.length);
this.ctx.fillRect(this.x + this.length, this.y + this.length, this.scale - this.length, this.scale - this.length);
this.ctx.strokeRect(this.x, this.y, this.scale, this.scale);
this.ctx.fillStyle = "#ddddff";
for (var i = 1; i <= 10; i++) {
this.ctx.moveTo(this.x + i * this.length, this.y);
this.ctx.lineTo(this.x + i * this.length, this.y + this.scale);
this.ctx.stroke();
this.ctx.fillText(i, this.x + i * this.length + this.length / 20, this.y + this.length - this.length / 10);
this.ctx.strokeText(i, this.x + i * this.length + this.length / 20, this.y + this.length - this.length / 10);
}
for (var i = 1; i <= 10; i++) {
this.ctx.moveTo(this.x, this.y + i * this.length);
this.ctx.lineTo(this.x + this.scale, this.y + i * this.length);
this.ctx.stroke();
this.ctx.fillText(String.fromCharCode(64 + i), this.x + this.length / 10, this.y + (i + 1) * this.length - this.length / 10);
this.ctx.strokeText(String.fromCharCode(64 + i), this.x + this.length / 10, this.y + (i + 1) * this.length - this.length / 10);
}
}
this.drawMark = function(shipOption){
for(var i = 0 ;i < 10 ; i++){
for(var j = 0 ;j < 10 ; j++){
if(shipOption && this.mapGrid[j][i].hasShip && !this.mapGrid[j][i].isAttacked ){
var a = this.mapX + j*this.length;
var b = this.mapY + i*this.length;
this.drawShip(a,b);
}
if(this.mapGrid[j][i].hasShip && this.mapGrid[j][i].isAttacked){
var a = this.mapX + j*this.length;
var b = this.mapY + i*this.length;
this.drawHit(a,b);
}
if(!this.mapGrid[j][i].hasShip && this.mapGrid[j][i].isAttacked){
var a = this.mapX + j*this.length;
var b = this.mapY + i*this.length;
this.drawWave(a,b);
}
if(!this.mapGrid[j][i].attackable && hintSys){
var a = this.mapX + j*this.length;
var b = this.mapY + i*this.length;
this.drawHint(a,b);
}
}
}
}
this.shipAttacked=function(a,b){
if(a>this.mapX && a<this.mapX+10*this.length && b>this.mapY && b<this.mapY+this.length*10){
a=a-this.mapX;
b=b-this.mapY;
a=Math.floor(a/this.length);
b=Math.floor(b/this.length);
if(!this.mapGrid[a][b].attackable || this.mapGrid[a][b].isAttacked){
return true;
}
this.mapGrid[a][b].isAttacked = true;
console.log(a + ", " + b);
this.drawMark();
this.updateSink();
if(this.mapGrid[a][b].hasShip == true){
shipHit();
if(this.inIndex(a+1,b+1) && !this.mapGrid[a+1][b+1].isAttacked){
this.mapGrid[a+1][b+1].attackable = false;
}
if(this.inIndex(a+1,b-1) && !this.mapGrid[a+1][b-1].isAttacked){
this.mapGrid[a+1][b-1].attackable = false;
}
if(this.inIndex(a-1,b+1) && !this.mapGrid[a-1][b+1].isAttacked){
this.mapGrid[a-1][b+1].attackable = false;
}
if(this.inIndex(a-1,b-1) && !this.mapGrid[a-1][b-1].isAttacked){
this.mapGrid[a-1][b-1].attackable = false;
}
this.drawMark();
return true;
}
else{
missedHit();
return false;
}
}
return true;
}
this.aiAttack = function(a,b){
console.log(a + ", " + b);
if(!this.mapGrid[a][b].attackable || this.mapGrid[a][b].isAttacked){
return true;
}
this.mapGrid[a][b].isAttacked = true;
this.drawMark();
this.updateSink();
if(this.mapGrid[a][b].hasShip == true){
if(this.inIndex(a+1,b+1) && !this.mapGrid[a+1][b+1].isAttacked){
this.mapGrid[a+1][b+1].attackable = false;
}
if(this.inIndex(a+1,b-1) && !this.mapGrid[a+1][b-1].isAttacked){
this.mapGrid[a+1][b-1].attackable = false;
}
if(this.inIndex(a-1,b+1) && !this.mapGrid[a-1][b+1].isAttacked){
this.mapGrid[a-1][b+1].attackable = false;
}
if(this.inIndex(a-1,b-1) && !this.mapGrid[a-1][b-1].isAttacked){
this.mapGrid[a-1][b-1].attackable = false;
}
this.drawMark();
return true;
}
else{
return false;
}
return true;
}
this.drawShip = function(a,b){
var temp = this.ctx.fillStyle;
this.ctx.fillStyle = "blue";
this.ctx.fillRect(a,b,this.length,this.length);
this.ctx.fillStyle = temp;
}
this.drawHit = function(a,b){
var temp = this.ctx.fillStyle;
this.ctx.fillStyle = "red";
this.ctx.fillRect(a,b,this.length,this.length);
this.ctx.fillStyle = temp;
}
this.drawWave = function(a,b){
var temp = this.ctx.fillStyle;
this.ctx.fillStyle = "Grey";
this.ctx.fillRect(a,b,this.length,this.length);
this.ctx.fillStyle = temp;
}
this.drawHint = function(a,b){
var temp = this.ctx.fillStyle;
this.ctx.fillStyle = "#DDDDDD";
this.ctx.fillRect(a,b,this.length,this.length);
this.ctx.fillStyle = temp;
}
this.hasShip = function(a,b) {
//if out of map , means no ship in there;
if(a < 0 || a > 9 || b < 0 || b >9)
return false;
return this.mapGrid[a][b].hasShip;
}
//check surrounding
this.checkSurrounding = function(a,b){
if(this.hasShip(a-1,b))
return false;
if(this.hasShip(a+1,b))
return false;
if(this.hasShip(a-1,b+1))
return false;
if(this.hasShip(a+1,b+1))
return false;
if(this.hasShip(a-1,b-1))
return false;
if(this.hasShip(a+1,b-1))
return false;
if(this.hasShip(a,b+1))
return false;
if(this.hasShip(a,b-1))
return false;
return true;
}
this.isPlaceable = function(a,b,direction,length){
//check this position
if(this.hasShip(a,b))
return false;
if(!this.checkSurrounding(a,b)){
return false;
}
if(direction == HORIZONTAL){
for(var i = 1 ; i < length ; i++){
if(a + length - 1 > 9){
return false
}
if(this.hasShip(a+i,b) || !this.checkSurrounding(a+i,b))
return false;
}
}
else{
for(var i = 1 ; i < length ; i++){
if(b + length - 1 > 9){
return false
}
if(this.hasShip(a,b+i) || !this.checkSurrounding(a,b+i))
return false;
}
}
return true;
}
this.randomPlacment = function(){
var direction;
var x;
var y;
do{
direction = Math.floor(Math.random()*2);
x = Math.floor(Math.random()*10);
y = Math.floor(Math.random()*10);
}
while(!this.isPlaceable(x,y,direction,5));
this.placeShip(x,y,AIRCRAFT_CARRIER,direction,5);
do{
direction = Math.floor(Math.random()*2);
x = Math.floor(Math.random()*10);
y = Math.floor(Math.random()*10);
}while(!this.isPlaceable(x,y,direction,4));
this.placeShip(x,y,BATTLESHIP,direction,4);
do{
direction = Math.floor(Math.random()*2);
x = Math.floor(Math.random()*10);
y = Math.floor(Math.random()*10);
}while(!this.isPlaceable(x,y,direction,3));
this.placeShip(x,y,CRUISER,direction,3);
do{
direction = Math.floor(Math.random()*2);
x = Math.floor(Math.random()*10);
y = Math.floor(Math.random()*10);
}while(!this.isPlaceable(x,y,direction,3));
this.placeShip(x,y,SUBMARINE,direction,3);
do{
direction = Math.floor(Math.random()*2);
x = Math.floor(Math.random()*10);
y = Math.floor(Math.random()*10);
}while(!this.isPlaceable(x,y,direction,2));
this.placeShip(x,y,DESTROYER,direction,2);
}
this. placeShip = function(x,y,name,direction,size){
if(direction == HORIZONTAL){
for(var i = 0 ; i< size ; i++){
this.mapGrid[x+i][y].hasShip = true;
this.mapGrid[x+i][y].shipType = name;
this.mapGrid[x+i][y].shipSize = size;
this.mapGrid[x+i][y].direction = direction;
}
}
else{
for(var i = 0 ; i< size ; i++){
this.mapGrid[x][y+i].hasShip = true;
this.mapGrid[x][y+i].shipType = name;
this.mapGrid[x][y+i].shipSize = size;
this.mapGrid[x][y+i].direction = direction;
}
}
}
}<|fim▁end|> | |
<|file_name|>privacy_tuple_struct.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at<|fim▁hole|>// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub struct A(());
pub struct B(isize);
pub struct C(pub isize, isize);
pub struct D(pub isize);<|fim▁end|> | // http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license |
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = ')p9u&kcu@_(8u&-%4(m9!&4*82sx97zyl-!i#m9kic2lycj%0)'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
<|fim▁hole|># Application definition
INSTALLED_APPS = [
'demografia.apps.DemografiaConfig',
'dal',
'dal_select2',
'suit',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'debug_toolbar',
#'input_mask',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'debug_toolbar.middleware.DebugToolbarMiddleware',
]
ROOT_URLCONF = 'comunidad.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'comunidad.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'HOST': '127.0.0.1',
'NAME': 'comunidad',
'PASSWORD': '123456',
'PORT': '5432',
'USER': 'postgres',
'SCHEMAS': 'public,demografia'
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'es'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATIC_URL = '/static/'
SUIT_CONFIG = {
# header
'ADMIN_NAME': 'comunidad',
'HEADER_DATE_FORMAT': 'l, j. F Y',
'HEADER_TIME_FORMAT': 'H:i',
# forms
'SHOW_REQUIRED_ASTERISK': True, # Default True
'CONFIRM_UNSAVED_CHANGES': True, # Default True
# menu
'SEARCH_URL': '/admin/auth/user/',
'MENU_ICONS': {
'sites': 'icon-leaf',
'auth': 'icon-lock',
},
# 'MENU_OPEN_FIRST_CHILD': True, # Default True
'MENU_EXCLUDE': ('demografia.miembrohogar',),
# 'MENU': (
# 'sites',
# {'app': 'auth', 'icon':'icon-lock', 'models': ('user', 'group')},
# {'label': 'Settings', 'icon':'icon-cog', 'models': ('auth.user', 'auth.group')},
# {'label': 'Support', 'icon':'icon-question-sign', 'url': '/support/'},
# ),
# misc
'LIST_PER_PAGE': 20
}
LOGIN_URL = 'login'
LOGOUT_URL = 'logout'
LOGIN_REDIRECT_URL = 'index'
CACHE_BACKEND = 'simple:///'
AUTH_PROFILE_MODULE = "demografia.persona"
DEBUG_TOOLBAR_PANELS = [
'debug_toolbar.panels.versions.VersionsPanel',
'debug_toolbar.panels.timer.TimerPanel',
'debug_toolbar.panels.settings.SettingsPanel',
'debug_toolbar.panels.headers.HeadersPanel',
'debug_toolbar.panels.request.RequestPanel',
'debug_toolbar.panels.sql.SQLPanel',
'debug_toolbar.panels.staticfiles.StaticFilesPanel',
'debug_toolbar.panels.templates.TemplatesPanel',
'debug_toolbar.panels.cache.CachePanel',
'debug_toolbar.panels.signals.SignalsPanel',
'debug_toolbar.panels.logging.LoggingPanel',
'debug_toolbar.panels.redirects.RedirectsPanel',
]<|fim▁end|> | |
<|file_name|>borrowck-closures-mut-and-imm.rs<|end_file_name|><|fim▁begin|>// Tests that two closures cannot simultaneously have mutable
// and immutable access to the variable. Issue #6801.
fn get(x: &isize) -> isize {
*x
}
fn set(x: &mut isize) {
*x = 4;
}
fn a() {
let mut x = 3;
let c1 = || x = 4;
let c2 = || x * 5;
//~^ ERROR cannot borrow `x` as immutable because it is also borrowed as mutable
drop(c1);
}
fn b() {
let mut x = 3;
let c1 = || set(&mut x);
let c2 = || get(&x);
//~^ ERROR cannot borrow `x` as immutable because it is also borrowed as mutable
drop(c1);
}
fn c() {
let mut x = 3;
let c1 = || set(&mut x);
let c2 = || x * 5;
//~^ ERROR cannot borrow `x` as immutable because it is also borrowed as mutable
drop(c1);
}
fn d() {
let mut x = 3;
let c2 = || x * 5;
x = 5;
//~^ ERROR cannot assign to `x` because it is borrowed
drop(c2);
}
fn e() {
let mut x = 3;
let c1 = || get(&x);
x = 5;
//~^ ERROR cannot assign to `x` because it is borrowed
drop(c1);
}
fn f() {
let mut x: Box<_> = Box::new(3);
let c1 = || get(&*x);<|fim▁hole|> drop(c1);
}
fn g() {
struct Foo {
f: Box<isize>
}
let mut x: Box<_> = Box::new(Foo { f: Box::new(3) });
let c1 = || get(&*x.f);
*x.f = 5;
//~^ ERROR cannot assign to `*x.f` because it is borrowed
drop(c1);
}
fn h() {
struct Foo {
f: Box<isize>
}
let mut x: Box<_> = Box::new(Foo { f: Box::new(3) });
let c1 = || get(&*x.f);
let c2 = || *x.f = 5;
//~^ ERROR cannot borrow `x` as mutable because it is also borrowed as immutable
drop(c1);
}
fn main() {
}<|fim▁end|> | *x = 5;
//~^ ERROR cannot assign to `*x` because it is borrowed |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# GitComponentVersion documentation build configuration file, created by
# sphinx-quickstart on Tue Apr 11 10:51:23 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#<|fim▁hole|># Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'GitComponentVersion'
copyright = u'2017, Kevin Johnson'
author = u'Kevin Johnson'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'0.0.1'
# The full version, including alpha/beta/rc tags.
release = u'0.0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'GitComponentVersiondoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'GitComponentVersion.tex', u'GitComponentVersion Documentation',
u'Kevin Johnson', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'gitcomponentversion', u'GitComponentVersion Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'GitComponentVersion', u'GitComponentVersion Documentation',
author, 'GitComponentVersion', 'One line description of project.',
'Miscellaneous'),
]<|fim▁end|> | # needs_sphinx = '1.0'
|
<|file_name|>common.d.ts<|end_file_name|><|fim▁begin|>import _ = require("../index");
// tslint:disable-next-line:strict-export-declare-modifiers
type GlobalPartial<T> = Partial<T>;
declare module "../index" {
type PartialObject<T> = GlobalPartial<T>;
type Many<T> = T | ReadonlyArray<T>;
interface LoDashStatic {
/**
* Creates a lodash object which wraps value to enable implicit method chain sequences.
* Methods that operate on and return arrays, collections, and functions can be chained together.
* Methods that retrieve a single value or may return a primitive value will automatically end the
* chain sequence and return the unwrapped value. Otherwise, the value must be unwrapped with value().
*
* Explicit chain sequences, which must be unwrapped with value(), may be enabled using _.chain.
*
* The execution of chained methods is lazy, that is, it's deferred until value() is
* implicitly or explicitly called.
*
* Lazy evaluation allows several methods to support shortcut fusion. Shortcut fusion
* is an optimization to merge iteratee calls; this avoids the creation of intermediate
* arrays and can greatly reduce the number of iteratee executions. Sections of a chain
* sequence qualify for shortcut fusion if the section is applied to an array and iteratees
* accept only one argument. The heuristic for whether a section qualifies for shortcut
* fusion is subject to change.
*
* Chaining is supported in custom builds as long as the value() method is directly or
* indirectly included in the build.
*
* In addition to lodash methods, wrappers have Array and String methods.
* The wrapper Array methods are:
* concat, join, pop, push, shift, sort, splice, and unshift.
* The wrapper String methods are:
* replace and split.
*
* The wrapper methods that support shortcut fusion are:
* at, compact, drop, dropRight, dropWhile, filter, find, findLast, head, initial, last,
* map, reject, reverse, slice, tail, take, takeRight, takeRightWhile, takeWhile, and toArray
*
* The chainable wrapper methods are:
* after, ary, assign, assignIn, assignInWith, assignWith, at, before, bind, bindAll, bindKey,
* castArray, chain, chunk, commit, compact, concat, conforms, constant, countBy, create,
* curry, debounce, defaults, defaultsDeep, defer, delay, difference, differenceBy, differenceWith,
* drop, dropRight, dropRightWhile, dropWhile, extend, extendWith, fill, filter, flatMap,
* flatMapDeep, flatMapDepth, flatten, flattenDeep, flattenDepth, flip, flow, flowRight,
* fromPairs, functions, functionsIn, groupBy, initial, intersection, intersectionBy, intersectionWith,
* invert, invertBy, invokeMap, iteratee, keyBy, keys, keysIn, map, mapKeys, mapValues,
* matches, matchesProperty, memoize, merge, mergeWith, method, methodOf, mixin, negate,
* nthArg, omit, omitBy, once, orderBy, over, overArgs, overEvery, overSome, partial, partialRight,
* partition, pick, pickBy, plant, property, propertyOf, pull, pullAll, pullAllBy, pullAllWith, pullAt,
* push, range, rangeRight, rearg, reject, remove, rest, reverse, sampleSize, set, setWith,
* shuffle, slice, sort, sortBy, sortedUniq, sortedUniqBy, splice, spread, tail, take,
* takeRight, takeRightWhile, takeWhile, tap, throttle, thru, toArray, toPairs, toPairsIn,
* toPath, toPlainObject, transform, unary, union, unionBy, unionWith, uniq, uniqBy, uniqWith,
* unset, unshift, unzip, unzipWith, update, updateWith, values, valuesIn, without, wrap,
* xor, xorBy, xorWith, zip, zipObject, zipObjectDeep, and zipWith.
*
* The wrapper methods that are not chainable by default are:
* add, attempt, camelCase, capitalize, ceil, clamp, clone, cloneDeep, cloneDeepWith, cloneWith,
* conformsTo, deburr, defaultTo, divide, each, eachRight, endsWith, eq, escape, escapeRegExp,
* every, find, findIndex, findKey, findLast, findLastIndex, findLastKey, first, floor, forEach,
* forEachRight, forIn, forInRight, forOwn, forOwnRight, get, gt, gte, has, hasIn, head,
* identity, includes, indexOf, inRange, invoke, isArguments, isArray, isArrayBuffer,
* isArrayLike, isArrayLikeObject, isBoolean, isBuffer, isDate, isElement, isEmpty, isEqual, isEqualWith,
* isError, isFinite, isFunction, isInteger, isLength, isMap, isMatch, isMatchWith, isNaN,
* isNative, isNil, isNull, isNumber, isObject, isObjectLike, isPlainObject, isRegExp,
* isSafeInteger, isSet, isString, isUndefined, isTypedArray, isWeakMap, isWeakSet, join,
* kebabCase, last, lastIndexOf, lowerCase, lowerFirst, lt, lte, max, maxBy, mean, meanBy,
* min, minBy, multiply, noConflict, noop, now, nth, pad, padEnd, padStart, parseInt, pop,
* random, reduce, reduceRight, repeat, result, round, runInContext, sample, shift, size,
* snakeCase, some, sortedIndex, sortedIndexBy, sortedLastIndex, sortedLastIndexBy, startCase,
* startsWith, stubArray, stubFalse, stubObject, stubString, stubTrue, subtract, sum, sumBy,
* template, times, toFinite, toInteger, toJSON, toLength, toLower, toNumber, toSafeInteger,
* toString, toUpper, trim, trimEnd, trimStart, truncate, unescape, uniqueId, upperCase,
* upperFirst, value, and words.
**/
<T>(value: T): LoDashImplicitWrapper<T>;
/**
* The semantic version number.
**/
VERSION: string;
/**
* By default, the template delimiters used by Lo-Dash are similar to those in embedded Ruby
* (ERB). Change the following template settings to use alternative delimiters.
**/
templateSettings: TemplateSettings;
}
/**
* By default, the template delimiters used by Lo-Dash are similar to those in embedded Ruby
* (ERB). Change the following template settings to use alternative delimiters.
**/
interface TemplateSettings {
/**
* The "escape" delimiter.
**/
escape?: RegExp;
/**
* The "evaluate" delimiter.
**/
evaluate?: RegExp;
/**
* An object to import into the template as local variables.
**/
imports?: Dictionary<any>;
/**
* The "interpolate" delimiter.
**/
interpolate?: RegExp;
/**
* Used to reference the data object in the template text.
**/
variable?: string;
}
/**
* Creates a cache object to store key/value pairs.
*/
interface MapCache {
/**
* Removes `key` and its value from the cache.
* @param key The key of the value to remove.
* @return Returns `true` if the entry was removed successfully, else `false`.
*/
delete(key: any): boolean;
/**
* Gets the cached value for `key`.
* @param key The key of the value to get.
* @return Returns the cached value.
*/
get(key: any): any;
/**
* Checks if a cached value for `key` exists.
* @param key The key of the entry to check.
* @return Returns `true` if an entry for `key` exists, else `false`.
*/
has(key: any): boolean;
/**
* Sets `value` to `key` of the cache.
* @param key The key of the value to cache.
* @param value The value to cache.
* @return Returns the cache object.
*/
set(key: any, value: any): this;
/**
* Removes all key-value entries from the map.
*/
clear?: () => void;
}
interface MapCacheConstructor {
new (): MapCache;
}
interface LoDashImplicitWrapper<TValue> extends LoDashWrapper<TValue> {
pop<T>(this: LoDashImplicitWrapper<List<T> | null | undefined>): T | undefined;
push<T>(this: LoDashImplicitWrapper<List<T> | null | undefined>, ...items: T[]): this;
shift<T>(this: LoDashImplicitWrapper<List<T> | null | undefined>): T | undefined;
sort<T>(this: LoDashImplicitWrapper<List<T> | null | undefined>, compareFn?: (a: T, b: T) => number): this;
splice<T>(this: LoDashImplicitWrapper<List<T> | null | undefined>, start: number, deleteCount?: number, ...items: T[]): this;
unshift<T>(this: LoDashImplicitWrapper<List<T> | null | undefined>, ...items: T[]): this;
}
interface LoDashExplicitWrapper<TValue> extends LoDashWrapper<TValue> {
pop<T>(this: LoDashExplicitWrapper<List<T> | null | undefined>): LoDashExplicitWrapper<T | undefined>;
push<T>(this: LoDashExplicitWrapper<List<T> | null | undefined>, ...items: T[]): this;
shift<T>(this: LoDashExplicitWrapper<List<T> | null | undefined>): LoDashExplicitWrapper<T | undefined>;
sort<T>(this: LoDashExplicitWrapper<List<T> | null | undefined>, compareFn?: (a: T, b: T) => number): this;
splice<T>(this: LoDashExplicitWrapper<List<T> | null | undefined>, start: number, deleteCount?: number, ...items: T[]): this;
unshift<T>(this: LoDashExplicitWrapper<List<T> | null | undefined>, ...items: T[]): this;
}
type NotVoid = {} | null | undefined;
type IterateeShorthand<T> = PropertyName | [PropertyName, any] | PartialDeep<T>;
type ArrayIterator<T, TResult> = (value: T, index: number, collection: T[]) => TResult;
type ListIterator<T, TResult> = (value: T, index: number, collection: List<T>) => TResult;
type ListIteratee<T> = ListIterator<T, NotVoid> | IterateeShorthand<T>;
type ListIterateeCustom<T, TResult> = ListIterator<T, TResult> | IterateeShorthand<T>;
type ListIteratorTypeGuard<T, S extends T> = (value: T, index: number, collection: List<T>) => value is S;
// Note: key should be string, not keyof T, because the actual object may contain extra properties that were not specified in the type.
type ObjectIterator<TObject, TResult> = (value: TObject[keyof TObject], key: string, collection: TObject) => TResult;
type ObjectIteratee<TObject> = ObjectIterator<TObject, NotVoid> | IterateeShorthand<TObject[keyof TObject]>;
type ObjectIterateeCustom<TObject, TResult> = ObjectIterator<TObject, TResult> | IterateeShorthand<TObject[keyof TObject]>;
type ObjectIteratorTypeGuard<TObject, S extends TObject[keyof TObject]> = (value: TObject[keyof TObject], key: string, collection: TObject) => value is S;
type StringIterator<TResult> = (char: string, index: number, string: string) => TResult;
/** @deprecated Use MemoVoidArrayIterator or MemoVoidDictionaryIterator instead. */<|fim▁hole|> type MemoVoidIterator<T, TResult> = (prev: TResult, curr: T, indexOrKey: any, list: T[]) => void;
/** @deprecated Use MemoListIterator or MemoObjectIterator instead. */
type MemoIterator<T, TResult> = (prev: TResult, curr: T, indexOrKey: any, list: T[]) => TResult;
type MemoListIterator<T, TResult, TList> = (prev: TResult, curr: T, index: number, list: TList) => TResult;
type MemoObjectIterator<T, TResult, TList> = (prev: TResult, curr: T, key: string, list: TList) => TResult;
type MemoIteratorCapped<T, TResult> = (prev: TResult, curr: T) => TResult;
type MemoIteratorCappedRight<T, TResult> = (curr: T, prev: TResult) => TResult;
type MemoVoidArrayIterator<T, TResult> = (acc: TResult, curr: T, index: number, arr: T[]) => void;
type MemoVoidDictionaryIterator<T, TResult> = (acc: TResult, curr: T, key: string, dict: Dictionary<T>) => void;
type MemoVoidIteratorCapped<T, TResult> = (acc: TResult, curr: T) => void;
type ValueIteratee<T> = ((value: T) => NotVoid) | IterateeShorthand<T>;
type ValueIterateeCustom<T, TResult> = ((value: T) => TResult) | IterateeShorthand<T>;
type ValueIteratorTypeGuard<T, S extends T> = (value: T) => value is S;
type ValueKeyIteratee<T> = ((value: T, key: string) => NotVoid) | IterateeShorthand<T>;
type ValueKeyIterateeTypeGuard<T, S extends T> = (value: T, key: string) => value is S;
type Comparator<T> = (a: T, b: T) => boolean;
type Comparator2<T1, T2> = (a: T1, b: T2) => boolean;
type PropertyName = string | number | symbol;
type PropertyPath = Many<PropertyName>;
type Omit<T, K extends keyof T> = Pick<T, ({ [P in keyof T]: P } & { [P in K]: never } & { [x: string]: never })[keyof T]>;
/** Common interface between Arrays and jQuery objects */
type List<T> = ArrayLike<T>;
interface Dictionary<T> {
[index: string]: T;
}
interface NumericDictionary<T> {
[index: number]: T;
}
// Crazy typedef needed get _.omit to work properly with Dictionary and NumericDictionary
type AnyKindOfDictionary =
| Dictionary<{} | null | undefined>
| NumericDictionary<{} | null | undefined>;
interface Cancelable {
cancel(): void;
flush(): void;
}
type PartialDeep<T> = {
[P in keyof T]?: PartialDeep<T[P]>;
};
// For backwards compatibility
type LoDashImplicitArrayWrapper<T> = LoDashImplicitWrapper<T[]>;
type LoDashImplicitNillableArrayWrapper<T> = LoDashImplicitWrapper<T[] | null | undefined>;
type LoDashImplicitObjectWrapper<T> = LoDashImplicitWrapper<T>;
type LoDashImplicitNillableObjectWrapper<T> = LoDashImplicitWrapper<T | null | undefined>;
type LoDashImplicitNumberArrayWrapper = LoDashImplicitWrapper<number[]>;
type LoDashImplicitStringWrapper = LoDashImplicitWrapper<string>;
type LoDashExplicitArrayWrapper<T> = LoDashExplicitWrapper<T[]>;
type LoDashExplicitNillableArrayWrapper<T> = LoDashExplicitWrapper<T[] | null | undefined>;
type LoDashExplicitObjectWrapper<T> = LoDashExplicitWrapper<T>;
type LoDashExplicitNillableObjectWrapper<T> = LoDashExplicitWrapper<T | null | undefined>;
type LoDashExplicitNumberArrayWrapper = LoDashExplicitWrapper<number[]>;
type LoDashExplicitStringWrapper = LoDashExplicitWrapper<string>;
type DictionaryIterator<T, TResult> = ObjectIterator<Dictionary<T>, TResult>;
type DictionaryIteratee<T> = ObjectIteratee<Dictionary<T>>;
type DictionaryIteratorTypeGuard<T, S extends T> = ObjectIteratorTypeGuard<Dictionary<T>, S>;
// NOTE: keys of objects at run time are always strings, even when a NumericDictionary is being iterated.
type NumericDictionaryIterator<T, TResult> = (value: T, key: string, collection: NumericDictionary<T>) => TResult;
type NumericDictionaryIteratee<T> = NumericDictionaryIterator<T, NotVoid> | IterateeShorthand<T>;
type NumericDictionaryIterateeCustom<T, TResult> = NumericDictionaryIterator<T, TResult> | IterateeShorthand<T>;
}<|fim▁end|> | |
<|file_name|>MultiDeviceVerifiedUpdateJob.java<|end_file_name|><|fim▁begin|>package org.thoughtcrime.securesms.jobs;
import androidx.annotation.NonNull;
import org.thoughtcrime.securesms.dependencies.ApplicationDependencies;
import org.thoughtcrime.securesms.jobmanager.Data;
import org.thoughtcrime.securesms.jobmanager.Job;
import org.thoughtcrime.securesms.jobmanager.impl.NetworkConstraint;
import org.thoughtcrime.securesms.logging.Log;
import org.thoughtcrime.securesms.crypto.UnidentifiedAccessUtil;
import org.thoughtcrime.securesms.database.IdentityDatabase.VerifiedStatus;
import org.thoughtcrime.securesms.recipients.RecipientId;
import org.thoughtcrime.securesms.recipients.RecipientUtil;
import org.thoughtcrime.securesms.util.Base64;
import org.thoughtcrime.securesms.recipients.Recipient;
import org.thoughtcrime.securesms.util.TextSecurePreferences;
import org.whispersystems.libsignal.IdentityKey;
import org.whispersystems.libsignal.InvalidKeyException;
import org.whispersystems.signalservice.api.SignalServiceMessageSender;
import org.whispersystems.signalservice.api.crypto.UntrustedIdentityException;
import org.whispersystems.signalservice.api.messages.multidevice.SignalServiceSyncMessage;
import org.whispersystems.signalservice.api.messages.multidevice.VerifiedMessage;
import org.whispersystems.signalservice.api.push.SignalServiceAddress;
import org.whispersystems.signalservice.api.push.exceptions.PushNetworkException;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
public class MultiDeviceVerifiedUpdateJob extends BaseJob {
public static final String KEY = "MultiDeviceVerifiedUpdateJob";
private static final String TAG = MultiDeviceVerifiedUpdateJob.class.getSimpleName();
private static final String KEY_DESTINATION = "destination";
private static final String KEY_IDENTITY_KEY = "identity_key";
private static final String KEY_VERIFIED_STATUS = "verified_status";
private static final String KEY_TIMESTAMP = "timestamp";
private RecipientId destination;
private byte[] identityKey;
private VerifiedStatus verifiedStatus;
private long timestamp;
public MultiDeviceVerifiedUpdateJob(@NonNull RecipientId destination, IdentityKey identityKey, VerifiedStatus verifiedStatus) {
this(new Job.Parameters.Builder()
.addConstraint(NetworkConstraint.KEY)
.setQueue("__MULTI_DEVICE_VERIFIED_UPDATE__")
.setLifespan(TimeUnit.DAYS.toMillis(1))
.setMaxAttempts(Parameters.UNLIMITED)<|fim▁hole|> System.currentTimeMillis());
}
private MultiDeviceVerifiedUpdateJob(@NonNull Job.Parameters parameters,
@NonNull RecipientId destination,
@NonNull byte[] identityKey,
@NonNull VerifiedStatus verifiedStatus,
long timestamp)
{
super(parameters);
this.destination = destination;
this.identityKey = identityKey;
this.verifiedStatus = verifiedStatus;
this.timestamp = timestamp;
}
@Override
public @NonNull Data serialize() {
return new Data.Builder().putString(KEY_DESTINATION, destination.serialize())
.putString(KEY_IDENTITY_KEY, Base64.encodeBytes(identityKey))
.putInt(KEY_VERIFIED_STATUS, verifiedStatus.toInt())
.putLong(KEY_TIMESTAMP, timestamp)
.build();
}
@Override
public @NonNull String getFactoryKey() {
return KEY;
}
@Override
public void onRun() throws IOException, UntrustedIdentityException {
try {
if (!TextSecurePreferences.isMultiDevice(context)) {
Log.i(TAG, "Not multi device...");
return;
}
if (destination == null) {
Log.w(TAG, "No destination...");
return;
}
SignalServiceMessageSender messageSender = ApplicationDependencies.getSignalServiceMessageSender();
Recipient recipient = Recipient.resolved(destination);
VerifiedMessage.VerifiedState verifiedState = getVerifiedState(verifiedStatus);
SignalServiceAddress verifiedAddress = RecipientUtil.toSignalServiceAddress(context, recipient);
VerifiedMessage verifiedMessage = new VerifiedMessage(verifiedAddress, new IdentityKey(identityKey, 0), verifiedState, timestamp);
messageSender.sendMessage(SignalServiceSyncMessage.forVerified(verifiedMessage),
UnidentifiedAccessUtil.getAccessFor(context, recipient));
} catch (InvalidKeyException e) {
throw new IOException(e);
}
}
private VerifiedMessage.VerifiedState getVerifiedState(VerifiedStatus status) {
VerifiedMessage.VerifiedState verifiedState;
switch (status) {
case DEFAULT: verifiedState = VerifiedMessage.VerifiedState.DEFAULT; break;
case VERIFIED: verifiedState = VerifiedMessage.VerifiedState.VERIFIED; break;
case UNVERIFIED: verifiedState = VerifiedMessage.VerifiedState.UNVERIFIED; break;
default: throw new AssertionError("Unknown status: " + verifiedStatus);
}
return verifiedState;
}
@Override
public boolean onShouldRetry(@NonNull Exception exception) {
return exception instanceof PushNetworkException;
}
@Override
public void onFailure() {
}
public static final class Factory implements Job.Factory<MultiDeviceVerifiedUpdateJob> {
@Override
public @NonNull MultiDeviceVerifiedUpdateJob create(@NonNull Parameters parameters, @NonNull Data data) {
try {
RecipientId destination = RecipientId.from(data.getString(KEY_DESTINATION));
VerifiedStatus verifiedStatus = VerifiedStatus.forState(data.getInt(KEY_VERIFIED_STATUS));
long timestamp = data.getLong(KEY_TIMESTAMP);
byte[] identityKey = Base64.decode(data.getString(KEY_IDENTITY_KEY));
return new MultiDeviceVerifiedUpdateJob(parameters, destination, identityKey, verifiedStatus, timestamp);
} catch (IOException e) {
throw new AssertionError(e);
}
}
}
}<|fim▁end|> | .build(),
destination,
identityKey.serialize(),
verifiedStatus, |
<|file_name|>urlutils_unit_tests.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Unit tests for the urlutils library."""
__revision__ = "$Id$"
from invenio.testutils import InvenioTestCase
from cgi import parse_qs
from invenio.config import CFG_SITE_URL
from invenio.testutils import make_test_suite, run_test_suite
from invenio.urlutils import (create_AWS_request_url,
string_to_numeric_char_reference,
make_canonical_urlargd,
create_html_link,
create_html_mailto,
same_urls_p,
HASHLIB_IMPORTED,
wash_url_argument,
create_url,
create_Indico_request_url,
get_relative_url)
class TestWashUrlArgument(InvenioTestCase):
def test_wash_url_argument(self):
"""urlutils - washing of URL arguments"""
self.assertEqual(1,
wash_url_argument(['1'], 'int'))
self.assertEqual("1",
wash_url_argument(['1'], 'str'))
self.assertEqual(['1'],
wash_url_argument(['1'], 'list'))
self.assertEqual(0,
wash_url_argument('ellis', 'int'))
self.assertEqual("ellis",
wash_url_argument('ellis', 'str'))
self.assertEqual(["ellis"],
wash_url_argument('ellis', 'list'))
self.assertEqual(0,
wash_url_argument(['ellis'], 'int'))
self.assertEqual("ellis",
wash_url_argument(['ellis'], 'str'))
self.assertEqual(["ellis"],
wash_url_argument(['ellis'], 'list'))
class TestUrls(InvenioTestCase):
"""Tests on URLs"""
def test_url_creation(self):
"""urlutils - test url creation"""
self.assertEqual(create_url('http://www.a.com/search',
{'recid':3, 'of':'hb&'},
escape_urlargd=True),
'http://www.a.com/search?of=hb%26&recid=3')
self.assertEqual(create_url('http://www.a.com/search',
{'recid':3, 'of':'hb&'},
escape_urlargd=False),
'http://www.a.com/search?of=hb&&recid=3')
def test_canonical_urlargd_creation(self):
"""urlutils - test creation of canonical URLs"""
self.assertEqual(make_canonical_urlargd({'a' : 1,
'b' : '2',
'b&': '2=',
':' : '?&'},
{'a': ('int', 1),
'b': ('str', 2)}),
"?b%26=2%3D&%3A=%3F%26&b=2")
if HASHLIB_IMPORTED:
def test_signed_aws_request_creation(self):
"""urlutils - test creation of signed AWS requests"""
signed_aws_request_url = create_AWS_request_url("http://webservices.amazon.com/onca/xml",
{'AWSAccessKeyId': '00000000000000000000',
'Service': 'AWSECommerceService',
'Operation': 'ItemLookup',
'ItemId': '0679722769',
'ResponseGroup': 'ItemAttributes,Offers,Images,Reviews',
'Version': '2009-01-06'},
"1234567890",
_timestamp="2009-01-01T12:00:00Z")
# Are we at least acccessing correct base url?
self.assert_(signed_aws_request_url.startswith("http://webservices.amazon.com/onca/xml"))
# Check that parameters with special characters (, :) get correctly
# encoded/decoded
## Note: using parse_qs() url-decodes the string
self.assertEqual(parse_qs(signed_aws_request_url)["ResponseGroup"],
['ItemAttributes,Offers,Images,Reviews'])
self.assert_('ItemAttributes%2COffers%2CImages%2CReviews' \
in signed_aws_request_url)
self.assertEqual(parse_qs(signed_aws_request_url)["Timestamp"],
['2009-01-01T12:00:00Z'])
# Check signature exists and is correct
self.assertEqual(parse_qs(signed_aws_request_url)["Signature"],
['Nace+U3Az4OhN7tISqgs1vdLBHBEijWcBeCqL5xN9xg='])
self.assert_('Nace%2BU3Az4OhN7tISqgs1vdLBHBEijWcBeCqL5xN9xg%3D&Operation' \
in signed_aws_request_url)
# Continute with an additional request
signed_aws_request_url_2 = \
create_AWS_request_url("http://ecs.amazonaws.co.uk/onca/xml",
{'AWSAccessKeyId': '00000000000000000000',
'Actor': 'Johnny Depp',
'AssociateTag': 'mytag-20',
'Operation': 'ItemSearch',
'ResponseGroup': 'ItemAttributes,Offers,Images,Reviews,Variations',
'SearchIndex': 'DVD',
'Service': 'AWSECommerceService',
'Sort': 'salesrank',
'Version': '2009-01-01'},
"1234567890",
_timestamp="2009-01-01T12:00:00Z")
# Check signature exists and is correct
self.assertEqual(parse_qs(signed_aws_request_url_2)["Signature"],
['TuM6E5L9u/uNqOX09ET03BXVmHLVFfJIna5cxXuHxiU='])
def test_signed_Indico_request_creation(self):
"""urlutils - test creation of signed Indico requests"""
signed_Indico_request_url = create_Indico_request_url("https://indico.cern.ch",
"categ",
"",
[1, 7],
"xml",
{'onlypublic': 'yes',
'order': 'title',
'from': 'today',
'to': 'tomorrow'},
'00000000-0000-0000-0000-000000000000',
'00000000-0000-0000-0000-000000000000',
_timestamp=1234)
# Are we at least acccessing correct base url?
self.assert_(signed_Indico_request_url.startswith("https://indico.cern.ch/export/categ/1-7.xml?"))
# Check parameters
self.assertEqual(parse_qs(signed_Indico_request_url)["order"],
['title'])
self.assertEqual(parse_qs(signed_Indico_request_url)["timestamp"],
['1234'])
# Check signature exists and is correct
self.assertEqual(parse_qs(signed_Indico_request_url)["signature"],
['e984e0c683e36ce3544372f23a397fd2400f4954'])
def test_same_urls_p(self):
"""urlutils - test checking URLs equality"""
self.assertEqual(same_urls_p(CFG_SITE_URL + '?a=b&c=d&e=f',
CFG_SITE_URL + '?e=f&c=d&a=b'),
True)
self.assertEqual(same_urls_p(CFG_SITE_URL + '?a=b&c=d&e=f&ln=fr',
CFG_SITE_URL + '?e=f&c=d&a=b&ln=en'),
False)
class TestHtmlLinks(InvenioTestCase):
"""Tests on HTML links"""
def test_html_link_creation(self):
"""urlutils - test creation of HTML links"""
# Check with various encoding and escaping traps
self.assertEqual(create_html_link('http://www.a.com',
{'a' : 1,
'b' : '2',
'b&': '2=',
':' : '?'},
'my label > & better than yours',
{'style': 'color:#f00',
'target': "_blank"}),
'<a href="http://www.a.com?a=1&%3A=%3F&b%26=2%3D&b=2" style="color:#f00" target="_blank">my label > & better than yours</a>')
def test_html_link_creation_no_argument_escaping(self):
"""urlutils - test creation of HTML links, without arguments escaping"""
self.assertEqual(create_html_link('http://www.a.com',
{'a' : 1,
'b' : '2',
'b&': '2=',
':' : '?'},
'my label > & better than yours',
{'style': 'color:#f00',
'target': "_blank"},
escape_urlargd=False),
'<a href="http://www.a.com?a=1&:=?&b&=2=&b=2" style="color:#f00" target="_blank">my label > & better than yours</a>')
def test_html_link_creation_no_attribute_escaping(self):
"""urlutils - test creation of HTML links, without attributes escaping"""
self.assertEqual(create_html_link('http://www.a.com',
{'a' : 1,
'b' : '2',
'b&': '2=',
':' : '?'},
'my label > & better than yours',
{'style': 'color:#f00',
'target': "_blank"},
escape_linkattrd=False),
'<a href="http://www.a.com?a=1&%3A=%3F&b%26=2%3D&b=2" style="color:#f00" target="_blank">my label > & better than yours</a>')
def test_string_to_numeric_char_reference(self):
"""urlutils - test numeric character conversion from string"""
self.assertEqual(string_to_numeric_char_reference('abc123'),
"abc123")
self.assertEqual(string_to_numeric_char_reference('\/&;,#$%~é'),
"\/&;,#$%~é")
class TestEmailObfuscationMode(InvenioTestCase):
"""Tests on HTML mailto links creation and obfuscation modes"""
def test_html_mailto_obfuscation_mode_minus1(self):
"""urlutils - test creation of HTML "mailto" links, obfuscation mode -1"""
self.assertEqual(create_html_mailto('[email protected]',
subject='Hey there',
body='Lunch at 8pm?\ncu!',
bcc='[email protected]',
link_label="Date creator",
linkattrd={'style': 'text-decoration: blink'},
email_obfuscation_mode=-1),
'')
def test_html_mailto_obfuscation_mode_0(self):
"""urlutils - test creation of HTML "mailto" links, obfuscation mode 0"""
self.assertEqual(create_html_mailto('[email protected]',
subject='Hey there',
body='Lunch at 8pm?\ncu!',
bcc='[email protected]',<|fim▁hole|>
def test_html_mailto_obfuscation_mode_1(self):
"""urlutils - test creation of HTML "mailto" links, obfuscation mode 1"""
self.assertEqual(create_html_mailto('[email protected]',
subject='Hey there',
body='Lunch at 8pm?\ncu!',
bcc='[email protected]',
link_label="Date creator",
linkattrd={'style': 'text-decoration: blink'},
email_obfuscation_mode=1),
'<a href="mailto:juliet [at] cds [dot] cern [dot] ch?body=Lunch%20at%208pm%3F%0D%0Acu%21&bcc=romeo%40cds.cern.ch&subject=Hey%20there" style="text-decoration: blink">Date creator</a>')
def test_html_mailto_obfuscation_mode_2(self):
"""urlutils - test creation of HTML "mailto" links, obfuscation mode 2"""
self.assertEqual(create_html_mailto('[email protected]',
subject='Hey there',
body='Lunch at 8pm?\ncu!',
bcc='[email protected]',
link_label="Date creator",
linkattrd={'style': 'text-decoration: blink'},
email_obfuscation_mode=2),
'<a href="mailto:juliet@cds.cern.ch?body=Lunch%20at%208pm%3F%0D%0Acu%21&bcc=romeo%40cds.cern.ch&subject=Hey%20there" style="text-decoration: blink">Date creator</a>')
def test_html_mailto_obfuscation_mode_3(self):
"""urlutils - test creation of HTML "mailto" links, obfuscation mode 3"""
self.assertEqual(create_html_mailto('[email protected]',
subject='Hey there',
body='Lunch at 8pm?\ncu!',
bcc='[email protected]',
link_label="Date creator",
linkattrd={'style': 'text-decoration: blink'},
email_obfuscation_mode=3),
'<script language="JavaScript" type="text/javascript">document.write(\'>a/<rotaerc etaD>"knilb :noitaroced-txet"=elyts "ereht02%yeH=tcejbus;pma&hc.nrec.sdc04%oemor=ccb;pma&12%ucA0%D0%F3%mp802%ta02%hcnuL=ydob?hc.nrec.sdc@teiluj:otliam"=ferh a<\'.split("").reverse().join(""))</script>')
def test_html_mailto_obfuscation_mode_4(self):
"""urlutils - test creation of HTML "mailto" links, obfuscation mode 4"""
self.assertEqual(create_html_mailto('[email protected]',
subject='Hey there',
body='Lunch at 8pm?\ncu!',
bcc='[email protected]',
link_label="Date creator",
linkattrd={'style': 'text-decoration: blink'},
email_obfuscation_mode=4),
'juliet<img src="%(CFG_SITE_URL)s/img/at.gif" alt=" [at] " style="vertical-align:baseline" />cds<img src="%(CFG_SITE_URL)s/img/dot.gif" alt=" [dot] " style="vertical-align:bottom" />cern<img src="%(CFG_SITE_URL)s/img/dot.gif" alt=" [dot] " style="vertical-align:bottom" />ch' % \
{'CFG_SITE_URL': CFG_SITE_URL})
class TestRelativeURL(InvenioTestCase):
"""Tests the get_relative_url function with different input strings"""
def test_relative_url(self):
"""urlutils - test get_relative_url"""
url_normal = "http://web.net"
self.assertEqual("", get_relative_url(url_normal))
url_normal_trailing = "http://web.net/"
self.assertEqual("", get_relative_url(url_normal_trailing))
url_more = "http://web.net/asd"
self.assertEqual("/asd", get_relative_url(url_more))
url_more_trailing = "http://web.net/asd/"
self.assertEqual("/asd", get_relative_url(url_more_trailing))
url_adv = "http://web.net/asd/qwe"
self.assertEqual("/asd/qwe", get_relative_url(url_adv))
url_adv_trailing = "http://web.net/asd/qwe/"
self.assertEqual("/asd/qwe", get_relative_url(url_adv_trailing))
TEST_SUITE = make_test_suite(TestWashUrlArgument,
TestUrls,
TestHtmlLinks,
TestEmailObfuscationMode,
TestRelativeURL)
if __name__ == "__main__":
run_test_suite(TEST_SUITE)<|fim▁end|> | link_label="Date creator",
linkattrd={'style': 'text-decoration: blink'},
email_obfuscation_mode=0),
'<a href="mailto:[email protected]?body=Lunch%20at%208pm%3F%0D%0Acu%21&bcc=romeo%40cds.cern.ch&subject=Hey%20there" style="text-decoration: blink">Date creator</a>') |
<|file_name|>forkbomb.cpp<|end_file_name|><|fim▁begin|>#include <unistd.h>
int main(void)
{
while(1) <|fim▁hole|> return 0;
}<|fim▁end|> | fork(); |
<|file_name|>04-single_cycle_linked_list.py<|end_file_name|><|fim▁begin|># coding:utf-8
# 单向循环链表的相关操作:
# is_empty() 判断链表是否为空
# length() 返回链表的长度
# travel() 遍历
# add(item) 在头部添加一个节点
# append(item) 在尾部添加一个节点
# insert(pos, item) 在指定位置pos添加节点
# remove(item) 删除一个节点
# search(item) 查找节点是否存在
class Node(object):
"""节点"""
def __init__(self, item):
self.elem = item
self.next = None
class SingleCycleLinkedList(object):
"""单向循环链表"""
def __init__(self, node=None):
self.__head = node
# 如果node不为空,则需要指向自己构成一个循环链表
if node:
node.next = node
def is_empty(self):
"""判断链表是否为空"""
return self.__head is None
def length(self):
"""返回链表的长度"""
if self.is_empty():
return 0
else:
cur = self.__head
count = 1
while cur.next is not self.__head:
count += 1
cur = cur.next
return count
def travel(self):
"""遍历"""
if self.is_empty():
return
else:
cur = self.__head
while cur.next is not self.__head:
print(cur.elem, end=" ")
cur = cur.next
# 循环结束,cur指向尾节点,但是尾节点元素尚未打印,需要单独输出
print(cur.elem)
def add(self, item):
"""在头部添加一个节点,头插法"""
node = Node(item)
if self.is_empty():
self.__head = node
node.next = node
else:
# 需要获取到尾节点
cur = self.__head
while cur.next is not self.__head:
cur = cur.next
node.next = self.__head
self.__head = node
cur.next = node
def append(self, item):
"""在尾部添加一个节点,尾插法"""
node = Node(item)
if self.is_empty():
self.__head = node
node.next = node
else:
# 同样需要获取到尾节点
cur = self.__head<|fim▁hole|> node.next = self.__head
def insert(self, pos, item):
"""在指定位置pos添加节点"""
if pos <= 0:
self.add(item)
elif pos > (self.length() - 1):
self.append(item)
else:
node = Node(item)
prev = self.__head
count = 0
while count < pos - 1:
count += 1
prev = prev.next
# 循环结束,prev指向要插入位置的前一个元素
node.next = prev.next
prev.next = node
def remove(self, item):
"""删除一个节点,需要考虑链表是否为空,删除的节点是头节点,尾节点,还是中间节点"""
if self.is_empty():
return
else:
cur = self.__head
pre = None
while cur.next is not self.__head:
if cur.elem == item:
# 判断是头节点,还是中间节点
if cur is self.__head:
# 头节点,需要找到尾节点
rear = self.__head
while rear.next is not self.__head:
rear = rear.next
self.__head = cur.next
rear.next = self.__head
else:
# 中间节点
pre.next = cur.next
return
else:
pre = cur
cur = cur.next
# 退出循环,cur指向尾节点
if cur.elem == item:
# 注意判断链表中是否只有一个节点
if cur is self.__head:
self.__head = None
else:
pre.next = self.__head
def search(self, item):
"""查找节点是否存在"""
if self.is_empty():
return False
else:
cur = self.__head
while cur.next is not self.__head:
if cur.elem == item:
return True
else:
cur = cur.next
# 循环结束,cur指向尾节点,但是尾节点并未参与比较,需要单独进行判断的
if cur.elem == item:
return True
else:
return False
if __name__ == "__main__":
scll = SingleCycleLinkedList()
print("befor initialized:", scll.is_empty())
print("befor initialized:", scll.length())
scll.add(1)
scll.add(2)
scll.add(3)
scll.add(4)
scll.add(5)
scll.add(6)
scll.travel()
scll.append(7)
scll.travel()
scll.insert(3, 99)
scll.travel()
print("scll.search(99):", scll.search(99))
scll.remove(99)
scll.travel()<|fim▁end|> | while cur.next is not self.__head:
cur = cur.next
cur.next = node |
<|file_name|>exceptions.py<|end_file_name|><|fim▁begin|>class NotSupportedDayError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):<|fim▁hole|><|fim▁end|> | return repr(" ".join([" Day ", value, " is not supported "])) |
<|file_name|>player_unit.rs<|end_file_name|><|fim▁begin|>// OpenAOE: An open source reimplementation of Age of Empires (1997)
// Copyright (c) 2016 Kevin Fuller
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is<|fim▁hole|>// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
use error::*;
use identifier::*;
use io_tools::*;
use std::io::Read;
#[derive(Default, Debug)]
pub struct PlayerUnit {
pub position_x: f32,
pub position_y: f32,
pub position_z: f32,
pub spawn_id: Option<SpawnId>,
pub unit_id: UnitId,
pub state: u8,
pub rotation: f32,
}
impl PlayerUnit {
// TODO: Implement writing
pub fn read_from_stream<S: Read>(stream: &mut S) -> Result<PlayerUnit> {
let mut data: PlayerUnit = Default::default();
data.position_x = try!(stream.read_f32());
data.position_y = try!(stream.read_f32());
data.position_z = try!(stream.read_f32());
data.spawn_id = optional_id!(try!(stream.read_i32()));
data.unit_id = required_id!(try!(stream.read_i16()));
data.state = try!(stream.read_u8());
data.rotation = try!(stream.read_f32());
Ok(data)
}
}<|fim▁end|> | // furnished to do so, subject to the following conditions:
// |
<|file_name|>server.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {ɵAnimationEngine} from '@angular/animations/browser';
import {DOCUMENT, PlatformLocation, ViewportScroller, ɵNullViewportScroller as NullViewportScroller, ɵPLATFORM_SERVER_ID as PLATFORM_SERVER_ID} from '@angular/common';
import {HttpClientModule} from '@angular/common/http';
import {Injectable, InjectionToken, Injector, NgModule, NgZone, Optional, PLATFORM_ID, PLATFORM_INITIALIZER, PlatformRef, Provider, RendererFactory2, RootRenderer, StaticProvider, Testability, createPlatformFactory, platformCore, ɵALLOW_MULTIPLE_PLATFORMS as ALLOW_MULTIPLE_PLATFORMS} from '@angular/core';
import {BrowserModule, EVENT_MANAGER_PLUGINS, ɵSharedStylesHost as SharedStylesHost, ɵgetDOM as getDOM} from '@angular/platform-browser';
import {ɵplatformCoreDynamic as platformCoreDynamic} from '@angular/platform-browser-dynamic';
import {NoopAnimationsModule, ɵAnimationRendererFactory} from '@angular/platform-browser/animations';
import {DominoAdapter, parseDocument} from './domino_adapter';
import {SERVER_HTTP_PROVIDERS} from './http';
import {ServerPlatformLocation} from './location';
import {PlatformState} from './platform_state';
import {ServerEventManagerPlugin} from './server_events';
import {ServerRendererFactory2} from './server_renderer';
import {ServerStylesHost} from './styles_host';
import {INITIAL_CONFIG, PlatformConfig} from './tokens';
function notSupported(feature: string): Error {
throw new Error(`platform-server does not support '${feature}'.`);
}
export const INTERNAL_SERVER_PLATFORM_PROVIDERS: StaticProvider[] = [
{provide: DOCUMENT, useFactory: _document, deps: [Injector]},
{provide: PLATFORM_ID, useValue: PLATFORM_SERVER_ID},
{provide: PLATFORM_INITIALIZER, useFactory: initDominoAdapter, multi: true, deps: [Injector]}, {
provide: PlatformLocation,
useClass: ServerPlatformLocation,
deps: [DOCUMENT, [Optional, INITIAL_CONFIG]]
},
{provide: PlatformState, deps: [DOCUMENT]},
// Add special provider that allows multiple instances of platformServer* to be created.
{provide: ALLOW_MULTIPLE_PLATFORMS, useValue: true}
];
function initDominoAdapter(injector: Injector) {
return () => { DominoAdapter.makeCurrent(); };
}
export function instantiateServerRendererFactory(
renderer: RendererFactory2, engine: ɵAnimationEngine, zone: NgZone) {
return new ɵAnimationRendererFactory(renderer, engine, zone);
}
export const SERVER_RENDER_PROVIDERS: Provider[] = [
ServerRendererFactory2,
{
provide: RendererFactory2,
useFactory: instantiateServerRendererFactory,
deps: [ServerRendererFactory2, ɵAnimationEngine, NgZone]
},
ServerStylesHost,
{provide: SharedStylesHost, useExisting: ServerStylesHost},
{provide: EVENT_MANAGER_PLUGINS, multi: true, useClass: ServerEventManagerPlugin},
];
/**
* The ng module for the server.
*
* @publicApi
*/
@NgModule({
exports: [BrowserModule],
imports: [HttpClientModule, NoopAnimationsModule],
providers: [
SERVER_RENDER_PROVIDERS,
SERVER_HTTP_PROVIDERS,
{provide: Testability, useValue: null},
{provide: ViewportScroller, useClass: NullViewportScroller},
],
})
export class ServerModule {
}
function _document(injector: Injector) {
let config: PlatformConfig|null = injector.get(INITIAL_CONFIG, null);
if (config && config.document) {
return parseDocument(config.document, config.url);
} else {
return getDOM().createHtmlDocument();
}
}
/**<|fim▁hole|> createPlatformFactory(platformCore, 'server', INTERNAL_SERVER_PLATFORM_PROVIDERS);
/**
* The server platform that supports the runtime compiler.
*
* @publicApi
*/
export const platformDynamicServer =
createPlatformFactory(platformCoreDynamic, 'serverDynamic', INTERNAL_SERVER_PLATFORM_PROVIDERS);<|fim▁end|> | * @publicApi
*/
export const platformServer = |
<|file_name|>site-tree-right-side.tsx<|end_file_name|><|fim▁begin|>import * as React from "react";
import {QueryNavWrapper} from "../../widgets/query-tabs/qurery-nav-wrapper";
import {EntityNewPage} from "../entity/entity-new-page";
import {EntityHome} from "../entity/entity-home";
const TAB_CONTENT: { [value: string]: () => JSX.Element } = {
"": () => <EntityHome/>,
"delete": () => <div>delete</div>,
"new-page": () => <EntityNewPage/>,
"new-site": () => <div>new site</div>,
};<|fim▁hole|> return <QueryNavWrapper param="ev" content={TAB_CONTENT}/>;
}
}<|fim▁end|> |
export class SiteTreeRightSide extends React.Component<{}> {
public render() { |
<|file_name|>PowerTimerEdit.py<|end_file_name|><|fim▁begin|>from functools import cmp_to_key
from time import time
from timer import TimerEntry as RealTimerEntry
from PowerTimer import PowerTimerEntry, AFTEREVENT
from Components.ActionMap import ActionMap
from Components.Button import Button
from Components.config import config
from Components.Label import Label
from Components.PowerTimerList import PowerTimerList, gettimerType, getafterEvent
from Components.Sources.StaticText import StaticText
from Components.Sources.ServiceEvent import ServiceEvent
from Screens.ChoiceBox import ChoiceBox
from Screens.MessageBox import MessageBox
from Screens.PowerTimerEntry import TimerEntry
from Screens.Screen import Screen
from Screens.TimerEntry import TimerLog
from Tools.BoundFunction import boundFunction
from Tools.FuzzyDate import FuzzyTime
class PowerTimerEditList(Screen):
EMPTY = 0
ENABLE = 1
DISABLE = 2
CLEANUP = 3
DELETE = 4
def __init__(self, session):
Screen.__init__(self, session)
self.skinName = "TimerEditList"
Screen.setTitle(self, _("PowerTimer list"))
self.onChangedEntry = []
list = []
self.list = list
self.fillTimerList()
self["timerlist"] = PowerTimerList(list)
self.key_red_choice = self.EMPTY
self.key_yellow_choice = self.EMPTY
self.key_blue_choice = self.EMPTY
self["key_red"] = Button(" ")
self["key_green"] = Button(_("Add"))
self["key_yellow"] = Button(" ")
self["key_blue"] = Button(" ")
self["description"] = Label()
self["ServiceEvent"] = ServiceEvent()
self["actions"] = ActionMap(["OkCancelActions", "DirectionActions", "ShortcutActions", "TimerEditActions"],
{
"ok": self.openEdit,
"cancel": self.leave,
"green": self.addCurrentTimer,
"log": self.showLog,
"left": self.left,
"right": self.right,
"up": self.up,
"down": self.down
}, -1)
self.setTitle(_("PowerTimer Overview"))
self.session.nav.PowerTimer.on_state_change.append(self.onStateChange)
self.onShown.append(self.updateState)
def createSummary(self):
return PowerTimerEditListSummary
def up(self):
self["timerlist"].instance.moveSelection(self["timerlist"].instance.moveUp)
self.updateState()
def down(self):
self["timerlist"].instance.moveSelection(self["timerlist"].instance.moveDown)
self.updateState()
def left(self):
self["timerlist"].instance.moveSelection(self["timerlist"].instance.pageUp)
self.updateState()
def right(self):
self["timerlist"].instance.moveSelection(self["timerlist"].instance.pageDown)
self.updateState()
def toggleDisabledState(self):
cur = self["timerlist"].getCurrent()
if cur:
t = cur
if t.disabled:
print("[PowerTimerEdit] try to enable timer")
t.enable()
else:
if t.isRunning():
if t.repeated:
list = (
(_("Stop current event but not coming events"), "stoponlycurrent"),
(_("Stop current event and disable coming events"), "stopall"),
(_("Don't stop current event but disable coming events"), "stoponlycoming")
)
self.session.openWithCallback(boundFunction(self.runningEventCallback, t), ChoiceBox, title=_("Repeating event currently recording... What do you want to do?"), list=list)
else:
t.disable()
self.session.nav.PowerTimer.timeChanged(t)
self.refill()
self.updateState()
def runningEventCallback(self, t, result):
if result is not None:
if result[1] == "stoponlycurrent" or result[1] == "stopall":
t.enable()
t.processRepeated(findRunningEvent=False)
self.session.nav.PowerTimer.doActivate(t)
if result[1] == "stoponlycoming" or result[1] == "stopall":
t.disable()
self.session.nav.PowerTimer.timeChanged(t)
self.refill()
self.updateState()
def removeAction(self, descr):
actions = self["actions"].actions
if descr in actions:
del actions[descr]
def updateState(self):
cur = self["timerlist"].getCurrent()
if cur:
if self.key_red_choice != self.DELETE:
self["actions"].actions.update({"red": self.removeTimerQuestion})
self["key_red"].setText(_("Delete"))
self.key_red_choice = self.DELETE
if cur.disabled and (self.key_yellow_choice != self.ENABLE):
self["actions"].actions.update({"yellow": self.toggleDisabledState})
self["key_yellow"].setText(_("Enable"))
self.key_yellow_choice = self.ENABLE
elif cur.isRunning() and not cur.repeated and (self.key_yellow_choice != self.EMPTY):
self.removeAction("yellow")
self["key_yellow"].setText("")
self.key_yellow_choice = self.EMPTY
elif ((not cur.isRunning()) or cur.repeated) and (not cur.disabled) and (self.key_yellow_choice != self.DISABLE):
self["actions"].actions.update({"yellow": self.toggleDisabledState})
self["key_yellow"].setText(_("Disable"))
self.key_yellow_choice = self.DISABLE
else:
if self.key_red_choice != self.EMPTY:
self.removeAction("red")
self["key_red"].setText("")
self.key_red_choice = self.EMPTY
if self.key_yellow_choice != self.EMPTY:
self.removeAction("yellow")
self["key_yellow"].setText("")
self.key_yellow_choice = self.EMPTY
showCleanup = True
for x in self.list:
if (not x[0].disabled) and (x[1] == True):
break
else:
showCleanup = False
if showCleanup and (self.key_blue_choice != self.CLEANUP):
self["actions"].actions.update({"blue": self.cleanupQuestion})
self["key_blue"].setText(_("Cleanup"))
self.key_blue_choice = self.CLEANUP
elif (not showCleanup) and (self.key_blue_choice != self.EMPTY):
self.removeAction("blue")
self["key_blue"].setText("")
self.key_blue_choice = self.EMPTY
if len(self.list) == 0:
return
timer = self['timerlist'].getCurrent()
if timer:<|fim▁hole|> else:
after = getafterEvent(timer)
time = "%s %s ... %s" % (FuzzyTime(timer.begin)[0], FuzzyTime(timer.begin)[1], FuzzyTime(timer.end)[1])
duration = ("(%d " + _("mins") + ")") % ((timer.end - timer.begin) / 60)
if timer.state == RealTimerEntry.StateWaiting:
state = _("waiting")
elif timer.state == RealTimerEntry.StatePrepared:
state = _("about to start")
elif timer.state == RealTimerEntry.StateRunning:
state = _("running...")
elif timer.state == RealTimerEntry.StateEnded:
state = _("done!")
else:
state = _("<unknown>")
else:
name = ""
after = ""
time = ""
duration = ""
state = ""
for cb in self.onChangedEntry:
cb(name, after, time, duration, state)
def fillTimerList(self):
#helper function to move finished timers to end of list
def _cmp(a, b):
return (a > b) - (a < b)
def eol_compare(x, y):
if x[0].state != y[0].state and x[0].state == RealTimerEntry.StateEnded or y[0].state == RealTimerEntry.StateEnded:
return _cmp(x[0].state, y[0].state)
return _cmp(x[0].begin, y[0].begin)
_list = self.list
del _list[:]
_list.extend([(timer, False) for timer in self.session.nav.PowerTimer.timer_list])
_list.extend([(timer, True) for timer in self.session.nav.PowerTimer.processed_timers])
if config.usage.timerlist_finished_timer_position.index: #end of list
_list.sort(key=cmp_to_key(eol_compare))
else:
_list.sort(key=lambda x: x[0].begin)
def showLog(self):
cur = self["timerlist"].getCurrent()
if cur:
self.session.openWithCallback(self.finishedEdit, PowerTimerLog, cur)
def openEdit(self):
cur = self["timerlist"].getCurrent()
if cur:
self.session.openWithCallback(self.finishedEdit, TimerEntry, cur)
def cleanupQuestion(self):
self.session.openWithCallback(self.cleanupTimer, MessageBox, _("Really delete completed timers?"))
def cleanupTimer(self, delete):
if delete:
self.session.nav.PowerTimer.cleanup()
self.refill()
self.updateState()
def removeTimerQuestion(self):
cur = self["timerlist"].getCurrent()
if not cur:
return
self.session.openWithCallback(self.removeTimer, MessageBox, _("Do you really want to delete this timer ?"), default=False)
def removeTimer(self, result):
if not result:
return
list = self["timerlist"]
cur = list.getCurrent()
if cur:
timer = cur
timer.afterEvent = AFTEREVENT.NONE
self.session.nav.PowerTimer.removeEntry(timer)
self.refill()
self.updateState()
def refill(self):
oldsize = len(self.list)
self.fillTimerList()
lst = self["timerlist"]
newsize = len(self.list)
if oldsize and oldsize != newsize:
idx = lst.getCurrentIndex()
lst.entryRemoved(idx)
else:
lst.invalidate()
def addCurrentTimer(self):
data = (int(time() + 60), int(time() + 120))
self.addTimer(PowerTimerEntry(checkOldTimers=True, *data))
def addTimer(self, timer):
self.session.openWithCallback(self.finishedAdd, TimerEntry, timer)
def finishedEdit(self, answer):
if answer[0]:
entry = answer[1]
self.session.nav.PowerTimer.timeChanged(entry)
self.fillTimerList()
self.updateState()
else:
print("[PowerTimerEdit] PowerTimerEdit aborted")
def finishedAdd(self, answer):
if answer[0]:
entry = answer[1]
simulTimerList = self.session.nav.PowerTimer.record(entry)
self.fillTimerList()
self.updateState()
else:
print("[PowerTimerEdit] TimerEdit aborted")
def finishSanityCorrection(self, answer):
self.finishedAdd(answer)
def leave(self):
self.session.nav.PowerTimer.on_state_change.remove(self.onStateChange)
self.close()
def onStateChange(self, entry):
self.refill()
self.updateState()
class PowerTimerEditListSummary(Screen):
def __init__(self, session, parent):
Screen.__init__(self, session, parent=parent)
self["name"] = StaticText("")
self["after"] = StaticText("")
self["time"] = StaticText("")
self["duration"] = StaticText("")
self["state"] = StaticText("")
self.onShow.append(self.addWatcher)
self.onHide.append(self.removeWatcher)
def addWatcher(self):
self.parent.onChangedEntry.append(self.selectionChanged)
self.parent.updateState()
def removeWatcher(self):
self.parent.onChangedEntry.remove(self.selectionChanged)
def selectionChanged(self, name, after, time, duration, state):
self["name"].text = name
self["after"].text = after
self["time"].text = time
self["duration"].text = duration
self["state"].text = state
class PowerTimerLog(TimerLog):
def __init__(self, session, timer):
TimerLog.__init__(self, session, timer)
self.skinName = "TimerLog"
self.setTitle(_("PowerTimer Log"))<|fim▁end|> | name = gettimerType(timer)
if getafterEvent(timer) == "Nothing":
after = "" |
<|file_name|>UpdateWebService.java<|end_file_name|><|fim▁begin|>package com.lamost.update;
import java.io.IOException;
import org.ksoap2.SoapEnvelope;
import org.ksoap2.serialization.SoapObject;
import org.ksoap2.serialization.SoapSerializationEnvelope;
import org.ksoap2.transport.HttpTransportSE;
import org.xmlpull.v1.XmlPullParserException;
import android.util.Log;
/**
* Created by Jia on 2016/4/6.
*/
public class UpdateWebService {
private static final String TAG = "WebService";
// 命名空间
private final static String SERVICE_NS = "http://ws.smarthome.zfznjj.com/";
// 阿里云
private final static String SERVICE_URL = "http://101.201.211.87:8080/zfzn02/services/smarthome?wsdl=SmarthomeWs.wsdl";
// SOAP Action
private static String soapAction = "";
// 调用的方法名称
private static String methodName = "";
private HttpTransportSE ht;
private SoapSerializationEnvelope envelope;
private SoapObject soapObject;
private SoapObject result;
public UpdateWebService() {
ht = new HttpTransportSE(SERVICE_URL); // ①
ht.debug = true;
}
public String getAppVersionVoice(String appName) {
ht = new HttpTransportSE(SERVICE_URL);
ht.debug = true;
methodName = "getAppVersionVoice";
soapAction = SERVICE_NS + methodName;// 通常为命名空间 + 调用的方法名称
// 使用SOAP1.1协议创建Envelop对象
envelope = new SoapSerializationEnvelope(SoapEnvelope.VER11); // ②
// 实例化SoapObject对象
soapObject = new SoapObject(SERVICE_NS, methodName); // ③
// 将soapObject对象设置为 SoapSerializationEnvelope对象的传出SOAP消息
envelope.bodyOut = soapObject; // ⑤
envelope.dotNet = true;
envelope.setOutputSoapObject(soapObject);
soapObject.addProperty("appName", appName);
try {
// System.out.println("测试1");
ht.call(soapAction, envelope);
// System.out.println("测试2");
// 根据测试发现,运行这行代码时有时会抛出空指针异常,使用加了一句进行处理
if (envelope != null && envelope.getResponse() != null) {
// 获取服务器响应返回的SOAP消息
// System.out.println("测试3");
result = (SoapObject) envelope.bodyIn; // ⑦
// 接下来就是从SoapObject对象中解析响应数据的过程了
// System.out.println("测试4");
String flag = result.getProperty(0).toString();<|fim▁hole|> return flag;
}
} catch (IOException e) {
e.printStackTrace();
} catch (XmlPullParserException e) {
e.printStackTrace();
} finally {
resetParam();
}
return -1 + "";
}
private void resetParam() {
envelope = null;
soapObject = null;
result = null;
}
}<|fim▁end|> | Log.e(TAG, "*********Webservice masterReadElecticOrder 服务器返回值:"
+ flag); |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>'use strict';
var request = require('supertest'),
chai = require('chai'),
expect = chai.expect,
routeValidator = require('../lib/index'),
validator = require('validator'),
express = require('express'),
bodyParser = require('body-parser'),
async = require('async');
describe('INTEGRATION index', function () {
describe('#validates(config)', function () {
describe('basic route validation', function () {
var app;
before( function () {
app = express();
app.get('/items/:item', routeValidator.validate({
params: {
item: { isMongoId: true, isRequired: true }
}
}), function (req, res) {
return res.status(200).end();
});
});
it('should send a 400 when route fails validation', function (done) {
request(app)
.get('/items/aasdklfjklsadlfjik')
.expect(400, done);
});
it('should send a 200 when route passes validation', function (done) {
request(app)
.get('/items/507f1f77bcf86cd799439011')
.expect(200, done);
});
it('should not care if you pass in properties it does not know about', function (done) {
request(app)
.get('/items/507f1f77bcf86cd799439011')
.query({
foo: 'bar',
notes: ''
})
.expect(200, done);
});
it('should ignore properties that you configure that it does not have a method to do' , function (done) {
// i.e. 'description' is set on the validation config object for documentation purposes,
// but routeValidator doesn't care and just ignores it
request(app)
.get('/items/507f1f77bcf86cd799439011')
.expect(200, done);
});
});
describe('validates req.params', function () {
var app;
before( function () {
app = express();
app.get('/items/:item', routeValidator.validate({
params: {
item: { isMongoId: true, isRequired: true }
}
}), function (req, res) {
return res.status(200).end();
});
app.get('/items/:item/messages/:message', routeValidator.validate({
params: {
item: { isMongoId: true, isRequired: true },
message: { isMongoId: true, isRequired: true }
}
}), function (req, res) {
return res.status(200).end();
});
});
it('should validate params passed into route, on success', function (done) {
request(app)
.get('/items/507f1f77bcf86cd799439011')
.expect(200, done);
});
it('should validate params passed into route, on failure', function (done) {
request(app)
.get('/items/banana')
.expect(400, function (err, res) {
if (err) return done(err);
expect(res.body).to.have.property('error').that.contains('params.item');
return done();
});
});
it('should handle multiple params passed into route, on success', function (done) {
request(app)
.get('/items/507f1f77bcf86cd799439011/messages/407f1f77bcf86cd799439011')
.expect(200, done);
});
it('should handle multiple params passed into route, on failure', function (done) {
request(app)
.get('/items/507f1f77bcf86cd799439011/messages/banana')
.expect(400, function (err, res) {
if (err) return done(err);
expect(res.body).to.have.property('error').that.contains('params.message');
return done();
});
});
});
describe('validates req.body', function () {
var app;
before( function () {
app = express();
app.use(bodyParser.json());
app.post('/items', routeValidator.validate({
body: {
name: { isRequired: true },
date: { isRequired: true, isDate: true },
type: { isRequired: true, isIn: ['lawn', 'garden', 'tools'] },
user: { isRequired: true, isEmail: true },
uuid: { isRequired: false, isUUID: true },
url: { isURL: true },
rate: { isInt: true, toInt: true }
}
}), function (req, res) {
return res.status(200).end();
});
});
it('should validate params passed into body, on success', function (done) {
request(app)
.post('/items')
.send({
name: 'Chainsaw',
date: new Date(),
type: 'tools',
user: '[email protected]',
uuid: 'A987FBC9-4BED-3078-CF07-9141BA07C9F3',
url: 'http://tool.com/chainsaw/real-big'
})
.expect(200, done);
});
it('should validate params passed into body, on failure', function (done) {
request(app)
.post('/items')
.send({
name: 'Chainsaw',
date: new Date(),
type: 'tool', // invalid
user: '[email protected]',
uuid: 'A987FBC9-4BED-3078-CF07-9141BA07C9F3',
url: 'http://tool.com/chainsaw/real-big'
})
.expect(400, function (err, res) {
if (err) return done(err);
expect(res.body).to.have.property('error').that.contains('body.type');
return done();
});
});
it('should enforce isRequired', function (done) {
request(app)
.post('/items')
.send({
name: 'Chainsaw',
date: new Date(),
type: 'tools',
// user: '[email protected]',
uuid: 'A987FBC9-4BED-3078-CF07-9141BA07C9F3',
url: 'http://tool.com/chainsaw/real-big'
})
.expect(400, function (err, res) {
if (err) return done(err);
expect(res.body).to.have.property('error').that.contains('body.user');
return done();
});
});
it('should not fail validation when isRequired is set to false and param is not set', function (done) {
request(app)
.post('/items')
.send({
name: 'Chainsaw',
date: new Date(),
type: 'tools',
user: '[email protected]',
// uuid: 'A987FBC9-4BED-3078-CF07-9141BA07C9F3',
url: 'http://tool.com/chainsaw/real-big'
})
.set('Authorization', 'Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==')
.expect(200, done);
});
it('should not fail validation when isRequired is not set and param is not set', function (done) {
request(app)
.post('/items')
.send({
name: 'Chainsaw',
date: new Date(),
type: 'tools',
user: '[email protected]',
uuid: 'A987FBC9-4BED-3078-CF07-9141BA07C9F3',
// url: 'http://tool.com/chainsaw/real-big'
})
.expect(200, done);
});
it('should validate params if they exist, even if isRequired is set to false', function (done) {
request(app)
.post('/items')
.send({
name: 'Chainsaw',
date: new Date(),
type: 'tools',
user: '[email protected]',
uuid: 'banana', // invalid and not required
url: 'http://tool.com/chainsaw/real-big'
})
.expect(400, function (err, res) {
if (err) return done(err);
expect(res.body).to.have.property('error').that.contains('body.uuid');
return done();
});
});
});
describe('validates req.query', function () {
var app;
before( function () {
app = express();
app.use(bodyParser.json());
app.get('/items', routeValidator.validate({
query: {
since: { isDate: true },
limit: { isInt: true, isRequired: true },
page: { isInt: true, isRequired: false },
sort: { isRequired: true }
}
}), function (req, res) {
return res.status(200).end();
});
});
it('should validate query params, on success', function (done) {
request(app)
.get('/items')
.query({
since: new Date(),
limit: 20,
page: 1,
sort: 'date'
})
.expect(200, done);
});
it('should validate query params, on failure', function (done) {
request(app)
.get('/items')
.query({
since: new Date(),
limit: 'get me all of it', // invalid
page: 1,
sort: 'date'
})
.expect(400, function (err, res) {
if (err) return done(err);
expect(res.body).to.have.property('error').that.contains('query.limit');
return done();
});
});
it('should enforce isRequired', function (done) {
request(app)
.get('/items')
.query({
since: new Date(),
// limit: 20,
page: 1,
sort: 'date'
})
.expect(400, function (err, res) {
if (err) return done(err);
expect(res.body).to.have.property('error').that.contains('query.limit');
return done();
});
});
it('should not fail validation when isRequired is set to false and param is not set', function (done) {
request(app)
.get('/items')
.query({
since: new Date(),
limit: 20,
// page: 1,
sort: 'date'
})
.expect(200, done);
});
it('should not fail validation when isRequired is not set and param is not set', function (done) {
request(app)
.get('/items')
.query({
// since: new Date(),
limit: 20,
page: 1,
sort: 'date'
})
.expect(200, done);
});
it('should validate params if they exist, even if isRequired is set to false', function (done) {
request(app)
.get('/items')
.query({
since: 'yesterday', // invalid
limit: 20,
page: 1,
sort: 'date'
})
.expect(400, function (err, res) {
if (err) return done(err);
expect(res.body).to.have.property('error').that.contains('query.since');
return done();
});<|fim▁hole|> var app;
before( function () {
app = express();
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: false }));
app.post('/items', routeValidator.validate({
body: {
name: { isRequired: true }
},
headers: {
'content-type': { isRequired: true, equals: 'application/json' },
'authorization': { isRequired: true },
'accept-version': { isRequired: false, isIn: ['1.0', '2.0'] }
}
}), function (req, res) {
return res.status(200).end();
});
});
it('should validate headers, on success', function (done) {
request(app)
.post('/items')
.send({
name: 'Chainsaw'
})
.set('Authorization', 'Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==')
.set('Accept-Version', '1.0')
.expect(200, done);
});
it('should validate headers, on failure', function (done) {
request(app)
.post('/items')
.type('form')
.send({
name: 'Chainsaw'
})
.set('Authorization', 'Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==')
.expect(400, function (err, res) {
if (err) return done(err);
expect(res.body).to.have.property('error').that.contains('headers.content-type');
return done();
});
});
it('should enforce isRequired', function (done) {
request(app)
.post('/items')
.send({
name: 'Chainsaw'
})
.expect(400, function (err, res) {
if (err) return done(err);
expect(res.body).to.have.property('error').that.contains('headers.authorization');
return done();
});
});
it('should validate headers if they exist, even if isRequired is set to false', function (done) {
request(app)
.post('/items')
.send({
name: 'Chainsaw'
})
.set('Authorization', 'Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==')
.set('Accept-Version', '0.0')
.expect(400, function (err, res) {
if (err) return done(err);
expect(res.body).to.have.property('error').that.contains('headers.accept-version');
return done();
});
});
});
describe('with default coercers', function () {
var app;
before( function () {
app = express();
app.use(bodyParser.json());
app.put('/items/:item', routeValidator.validate({
body: {
user: { isRequired: false, isEmail: true, normalizeEmail: true },
rate: { isRequired: true, isInt: true, toInt: true }
},
params: {
item: { isMongoId: true, isRequired: true }
}
}), function (req, res) {
// Make sure values are coerced
if (typeof req.body.rate !== 'number' || !validator.isLowercase(req.body.user)) {
console.log(JSON.stringify(req.body, null, 2));
return res.status(500).end();
}
return res.status(200).end();
});
});
it('should coerce values when configured with coercers', function (done) {
request(app)
.put('/items/507f1f77bcf86cd799439011')
.send({
user: '[email protected]',
rate: '100'
})
.expect(200, done);
});
});
describe('set callNext in route', function () {
var app;
before( function () {
app = express();
app.use(bodyParser.json());
app.get('/users', routeValidator.validate({
query: {
since: { isDate: true },
limit: { isInt: true, isRequired: true },
page: { isInt: true, isRequired: false },
sort: { isRequired: true }
},
callNext: true
}), function (req, res) {
return res.status(200).end();
});
app.use( function (err, req, res, next) { // jshint ignore:line
return res.status(400).send({
error: err.message,
message: 'calledNext'
});
});
});
it('should do nothing different if callNext is set to true and validation passes', function (done) {
request(app)
.get('/users')
.query({
since: new Date(),
limit: 20,
page: 1,
sort: 'email'
})
.expect(200, done);
});
it('should call next(err) if validation fails and callNext is set to true', function (done) {
request(app)
.get('/users')
.query({
since: new Date(),
limit: 'twenty', // invalid
page: 1,
sort: 'email'
})
.expect(400, function (err, res) {
console.log(JSON.stringify(res.body, null, 2));
if (err) return done(err);
expect(res.body).to.have.property('message').that.equals('calledNext');
expect(res.body).to.have.property('error').that.contains('query.limit');
return done();
});
});
});
describe('set errorHandler in route', function () {
var app;
before( function () {
app = express();
app.get('/users/:user', routeValidator.validate({
params: {
user: { isRequired: true, isEmail: true }
},
errorHandler: function (err, req, res) {
return res.status(400).send({
message: 'routeErrorHandler',
error: err.message
});
}
}), function (req, res) {
return res.status(200).end();
});
});
it('should do nothing different if errorHandler is set to true and validation passes', function (done) {
request(app)
.get('/users/[email protected]')
.expect(200, done);
});
it('should call errorHandler(err, req, res, next) if validation fails and errorHandler is set to true', function (done) {
request(app)
.get('/users/banana')
.expect(400, function (err, res) {
if (err) return done(err);
expect(res.body).to.have.property('message').that.equals('routeErrorHandler');
expect(res.body).to.have.property('error').that.contains('params.user');
return done();
});
});
});
describe('when configured scope is undefined', function () {
var app;
before( function () {
app = express();
// req.body will always be undefined because there is no body parser
app.post('/items', routeValidator.validate({
body: {
name: { isRequired: true }
}
}), function (req, res) {
return res.status(200).end();
});
app.post('/users', routeValidator.validate({
body: {
email: { isRequired: false }
}
}), function (req, res) {
return res.status(200).end();
});
});
it('should invalidate the request if config has required field', function (done) {
request(app)
.post('/items')
.send({
name: 'Chainsaw'
})
.expect(400, function (err, res) {
if (err) return done(err);
expect(res.body).to.have.property('error').that.contains('body.name');
return done();
});
});
it('should not invalidate the request if config has no required fields', function (done) {
request(app)
.post('/users')
.expect(200, done);
});
});
describe('prop.message is set', function () {
var app, message = 'Custom Message';
before( function () {
app = express();
app.get('/items/:item', routeValidator.validate({
params: {
item: { isRequired: true, isMongoId: false, message: message }
}
}), function (req, res) {
return res.status(200).end();
});
});
it('should send prop.message as a custom error message', function (done) {
request(app)
.get('/items/abc')
.expect(400, function (err, res) {
if (err) return done(err);
expect(res.body).to.have.property('error').that.equals(message);
return done();
});
});
});
});
/***********
NEEDS TO TEST PRECENDENCE OF ERROR HANDLING DECISIONS
1. config.errorHandler
2. config.callNext
3. routeValidator._callNext
4. routeValidator._errorHandler
Test Cases
1. callNext set to false in config, true in app
***********/
describe('#set(key, value)', function () {
var errorHandler, callNext;
before( function () {
errorHandler = routeValidator._errorHandler;
callNext = routeValidator._callNext;
});
after( function () {
routeValidator._errorHandler = errorHandler;
routeValidator._callNext = callNext;
});
it('should allow setting callNext to pass err into next rather than default behavior', function () {
routeValidator.set('callNext', true);
expect(routeValidator).to.have.property('_callNext').that.is.true;
});
it('should allow setting the errorHandler to override default behavior', function () {
var newErrorHandler = function (err, req, res) {
return res.status(404).send({
message: 'errorHandled'
});
};
routeValidator.set('errorHandler', newErrorHandler);
expect(routeValidator).to.have.property('_errorHandler').that.equals(newErrorHandler);
});
it('should do nothing if key is not recognized', function () {
routeValidator.set('invalid', 'banana');
expect(routeValidator).to.not.have.property('invalid');
});
});
describe('#addValidator(name, fn)', function () {
var isNumeric, app;
before( function () {
isNumeric = routeValidator._validators.isNumeric;
app = express();
app.use(bodyParser.json());
app.post('/users', routeValidator.validate({
body: {
name: { isRequired: true },
age: { isRequired: true, isValidAge: true },
email: { isRequired: true, isEmail: true }
}
}), function (req, res) {
return res.status(200).end();
});
app.put('/users/:user', routeValidator.validate({
body: {
age: { isNumeric: true, isRequired: true }
}
}), function (req, res) {
return res.status(200).end();
});
});
after( function () {
routeValidator._validators.isNumeric = isNumeric;
});
it('should not add validator or break if not passed a function', function () {
routeValidator.addValidator('isNotFunction', 'banana');
expect(routeValidator._validators).to.not.have.property('isNotFunction');
});
it('should allow adding a custom validator', function (done) {
routeValidator.addValidator('isValidAge', function (str) {
var age = +str;
return age ? (age > 0 && age < 120) : false;
});
expect(routeValidator._validators).to.have.property('isValidAge');
async.parallel([
function (callback) {
request(app)
.post('/users')
.send({
name: 'Billy',
age: 23,
email: '[email protected]'
})
.expect(200, function (err, res) {
console.log(JSON.stringify(res.body,null,2));
return done(err);
});
},
function (callback) {
request(app)
.post('/users')
.send({
name: 'Invalid',
age: 2000,
email: '[email protected]'
})
.expect(400, function (err, res) {
if (err) return callback(err);
expect(res.body).to.have.property('error').that.contains('body.age');
return callback();
});
}
], done);
});
it('should override existing validators of the same name', function (done) {
// Overrides existing validator
routeValidator.addValidator('isNumeric', function (str) {
var validNumbers = ['one', 'two', 'three', 'four', 'five', 'six', 'seven'];
return validNumbers.indexOf(str) !== -1;
});
expect(routeValidator._validators).to.have.property('isNumeric');
async.parallel([
function (callback) {
request(app)
.put('/users/billy')
.send({
age: 'seven'
})
.expect(200, callback);
},
function (callback) {
request(app)
.put('/users/invalid')
.send({
age: 20
})
.expect(400, function (err, res) {
if (err) return callback(err);
expect(res.body).to.have.property('error').that.contains('body.age');
return callback();
});
}
], done);
});
});
describe('#addValidators(obj)', function () {
var isNumeric, app;
before( function () {
isNumeric = routeValidator._validators.isNumeric;
app = express();
app.use(bodyParser.json());
app.post('/turtles', routeValidator.validate({
body: {
size: { isRequired: true, isNumeric: true },
weight: { isRequired: true, isTurtleWeight: true },
name: { isRequired: true }
}
}), function (req, res) {
return res.status(200).end();
});
});
after( function () {
routeValidator._validators.isNumeric = isNumeric;
});
it('should not break if passing in an empty object', function () {
routeValidator.addValidators({});
});
it('should not add validator or break if key is not function', function () {
routeValidator.addValidators({
// Adds invalid
isNotValidator: 'oops'
});
expect(routeValidator._validators).to.not.have.property('isNotValidator');
});
it('should allow passing in an object of validators and set them internally', function (done) {
routeValidator.addValidators({
// Overrides existing
isNumeric: function (str) {
var validNumbers = ['eight', 'nine', 'ten'];
return validNumbers.indexOf(str) !== -1;
},
// Adds new
isTurtleWeight: function (str) {
var weight = +str;
return weight ? (weight > 10 && weight < 800) : false;
}
});
expect(routeValidator._validators).to.have.property('isNumeric');
expect(routeValidator._validators).to.have.property('isTurtleWeight');
async.parallel([
function (callback) {
request(app)
.post('/turtles')
.send({
size: 'nine',
weight: 500,
name: 'Stanley'
})
.expect(200, callback);
},
function (callback) {
request(app)
.post('/turtles')
.send({
size: 9,
weight: 600,
name: 'Loopie'
})
.expect(400, function (err, res) {
if (err) return callback(err);
expect(res.body).to.have.property('error').that.contains('body.size');
return callback();
});
},
function (callback) {
request(app)
.post('/turtles')
.send({
size: 'ten',
weight: 60000,
name: 'Loopie'
})
.expect(400, function (err, res) {
if (err) return callback(err);
expect(res.body).to.have.property('error').that.contains('body.weight');
return callback();
});
}
], done);
});
});
describe('addCoercer()', function () {
var app;
before( function () {
app = express();
app.get('/turtles', routeValidator.validate({
query: {
sizeStr: { isRequired: false, toLowerCaseSize: true, isIn: ['eight', 'nine', 'ten'] },
weightRange: { isRequired: false, isWeightRange: true, toRangeArray: true }
}
}), function (req, res) {
if (req.query.weightRange) {
// Make sure that it was converted properly
// '100-500' -> [100, 500]
var range = req.query.weightRange;
if (!(range instanceof Array) || range.length !== 2 ||
typeof range[0] !== 'number' || typeof range[1] !== 'number') {
return res.status(500).end();
}
}
return res.status(200).end();
});
});
describe('config.stage === "before"', function () {
it('should be able to add a custom coercer run before validation', function (done) {
routeValidator.addCoercer('toLowerCaseSize', {
stage: 'before',
coerce: function (str) {
return str.toLowerCase();
}
});
expect(routeValidator._before).to.have.property('toLowerCaseSize');
async.parallel([
function (callback) {
request(app)
.get('/turtles')
.query({
sizeStr: 'EIGHT'
})
.expect(200, callback);
},
function (callback) {
request(app)
.get('/turtles')
.query({
sizeStr: 'nine'
})
.expect(200, callback);
}
], done);
});
it('should not add coercer if it is not a function', function () {
routeValidator.addCoercer('apple', {
stage: 'before',
coerce: 'apple'
});
expect(routeValidator._before).to.not.have.property('apple');
});
});
describe('config.stage === "after"', function () {
it('should be able to add a custom coercer run before validation', function (done) {
routeValidator.addValidator('isWeightRange', function (str) {
var arr = str.split('-');
return +arr[0] && +arr[1];
});
routeValidator.addCoercer('toRangeArray', {
stage: 'after',
coerce: function (str) {
var arr = str.split('-');
arr[0] = +arr[0];
arr[1] = +arr[1];
return arr;
}
});
expect(routeValidator._after).to.have.property('toRangeArray');
async.parallel([
function (callback) {
request(app)
.get('/turtles')
.query({
weightRange: '500'
})
.expect(400, callback);
},
function (callback) {
request(app)
.get('/turtles')
.query({
weightRange: '100-500'
})
.expect(200, callback);
}
], done);
});
it('should not add coercer if it is not a function', function () {
routeValidator.addCoercer('peach', {
stage: 'after',
coerce: 'peach'
});
expect(routeValidator._before).to.not.have.property('peach');
});
});
describe('invalid config.stage', function () {
it('should do nothing if config.stage is invalid', function () {
routeValidator.addCoercer('banana', {
stage: 'banana',
coerce: function () {
return 'banana';
}
});
expect(routeValidator._before).to.not.have.property('banana');
});
it('should do nothing if config.stage is not set', function () {
routeValidator.addCoercer('pear', {
coerce: function () {
return 'pear';
}
});
expect(routeValidator._before).to.not.have.property('pear');
});
});
});
describe('#addCoercers(obj)', function () {
var toDate, app;
before( function () {
toDate = routeValidator._before.toDate;
app = express();
app.get('/turtles', routeValidator.validate({
query: {
slug: { isRequired: false, toLowerCase: true, replaceSpaces: true },
minDate: { toDate: true }
}
}), function (req, res) {
if (req.query.slug && req.query.slug.indexOf(' ') !== -1) {
return res.status(500).end();
}
return res.status(200).end();
});
});
after( function () {
routeValidator._before.toDate = toDate;
});
it('should not break if passing in an empty object', function () {
routeValidator.addCoercers({});
});
it('should not add coercer or break if key is not a config object', function () {
routeValidator.addCoercers({
// Adds invalid
isNotCoercer: 'oops',
alsoNotCoercer: {
stage: 'notAstage',
coerce: function () {
return true;
}
},
andNotOne: {
stage: 'before',
coerce: 'funky'
}
});
expect(routeValidator._before).to.not.have.property('isNotCoercer');
expect(routeValidator._after).to.not.have.property('isNotCoercer');
expect(routeValidator._before).to.not.have.property('alsoNotCoercer');
expect(routeValidator._after).to.not.have.property('alsoNotCoercer');
expect(routeValidator._before).to.not.have.property('addNotOne');
expect(routeValidator._after).to.not.have.property('addNotOne');
});
it('should allow passing in an object of validators and set them internally', function (done) {
routeValidator.addCoercers({
// Overrides existing
toDate: {
stage: 'after',
coerce: function () {
return 'date';
}
},
// Adds new
toLowerCase: {
stage: 'before',
coerce: function (str) {
return str.toLowerCase();
}
},
replaceSpaces: {
stage: 'after',
coerce: function (str) {
return str.replace(/\s/g, '-');
}
}
});
expect(routeValidator._after).to.have.property('toDate');
expect(routeValidator._before).to.have.property('toLowerCase');
expect(routeValidator._after).to.have.property('replaceSpaces');
async.parallel([
function (callback) {
request(app)
.get('/turtles')
.query({
minDate: new Date()
})
.expect(200, callback);
},
function (callback) {
request(app)
.get('/turtles')
.query({
name: 'Mr Turtles'
})
.expect(200, callback);
},
function (callback) {
request(app)
.get('/turtles')
.query({
slug: 'My Sweet Turtle'
})
.expect(200, callback);
}
], done);
});
});
});<|fim▁end|> | });
});
describe('validates req.headers', function () { |
<|file_name|>device.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2013, First Party Software
# All rights reserved.
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
import gsxws
from django.db.models import Q
from django.contrib import messages
from django.core.cache import cache
from django.shortcuts import render, redirect, get_object_or_404
from django.utils.translation import ugettext as _
from django.template.defaultfilters import slugify
from django.views.decorators.cache import cache_page
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from servo.models import Device, Order, Product, GsxAccount, ServiceOrderItem
from servo.forms.devices import DeviceForm, DeviceUploadForm, DeviceSearchForm
class RepairDiagnosticResults:
pass
class DiagnosticResults(object):
def __init__(self, diags):
if not diags.diagnosticTestData:
raise gsxws.GsxError('Missing diagnostic data')
self.diags = dict(result={}, profile={}, report={})
for r in diags.diagnosticTestData.testResult.result:
self.diags['result'][r.name] = r.value
for r in diags.diagnosticProfileData.profile.unit.key:
self.diags['profile'][r.name] = r.value
for r in diags.diagnosticProfileData.report.reportData.key:
self.diags['report'][r.name] = r.value
def __iter__(self):
return iter(self.diags)
def model_from_slug(product_line, model=None):
"""
Returns product description for model slug or models dict for
the specified product line
"""
if not cache.get("slugmap"):
slugmap = {} # Map model slug to corresponding product description
product_lines = gsxws.products.models()
for k, v in product_lines.items():
d = {}
for p in v['models']:
slug = slugify(p)
d[slug] = p
slugmap[k] = d
cache.set("slugmap", slugmap)
models = cache.get("slugmap").get(product_line)
if model is not None:
return models.get(model)
return models
def prep_list_view(request, product_line=None, model=None):
title = _('Devices')
all_devices = Device.objects.all()
product_lines = gsxws.products.models()
if product_line is None:
product_line = product_lines.keys()[0]
models = model_from_slug(product_line)
if model is None:
model = models.keys()[0]
title = product_lines[product_line]['name']
else:
title = models.get(model)
if product_line == "OTHER":
all_devices = all_devices.filter(product_line=product_line)
else:
all_devices = all_devices.filter(slug=model)
page = request.GET.get('page')
paginator = Paginator(all_devices, 50)
try:
devices = paginator.page(page)
except PageNotAnInteger:
devices = paginator.page(1)
except EmptyPage:
devices = paginator.page(paginator.num_pages)
return locals()
def prep_detail_view(request, pk, product_line=None, model=None):
if pk is None:
device = Device()
else:
device = Device.objects.get(pk=pk)
data = prep_list_view(request, product_line, model)
data['device'] = device
data['title'] = device.description
return data
def index(request, product_line=None, model=None):
if request.session.get('return_to'):
del(request.session['return_to'])
data = prep_list_view(request, product_line, model)
if data['all_devices'].count() > 0:
return redirect(data['all_devices'].latest())
return render(request, "devices/index.html", data)
def delete_device(request, product_line, model, pk):
dev = Device.objects.get(pk=pk)
if request.method == 'POST':
from django.db.models import ProtectedError
try:
dev.delete()
messages.success(request, _("Device deleted"))
except ProtectedError:
messages.error(request, _("Cannot delete device with GSX repairs"))
return redirect(dev)
return redirect(index)
data = {'action': request.path}
data['device'] = dev
return render(request, "devices/remove.html", data)
def edit_device(request, pk=None, product_line=None, model=None):
"""
Edits an existing device or adds a new one
"""
device = Device()
device.sn = request.GET.get('sn', '')
if product_line is not None:
device.product_line = product_line
if model is not None:
device.product_line = product_line
device.description = model_from_slug(product_line, model)
if pk is not None:
device = Device.objects.get(pk=pk)
form = DeviceForm(instance=device)
if request.method == "POST":
form = DeviceForm(request.POST, request.FILES, instance=device)
if form.is_valid():
device = form.save()
messages.success(request, _(u"%s saved") % device.description)
device.add_tags(request.POST.getlist('tag'))
return redirect(view_device,
pk=device.pk,
product_line=device.product_line,
model=device.slug)
data = prep_detail_view(request, pk, product_line, model)
data['form'] = form
return render(request, 'devices/form.html', data)
def view_device(request, pk, product_line=None, model=None):
data = prep_detail_view(request, pk, product_line, model)
return render(request, "devices/view.html", data)
def diagnostics(request, pk):
"""
Fetches MRI diagnostics or initiates iOS diags from GSX
"""
device = get_object_or_404(Device, pk=pk)
if request.GET.get('a') == 'init':
if request.method == 'POST':
from gsxws import diagnostics
order = request.POST.get('order')
order = device.order_set.get(pk=order)
email = request.POST.get('email')
diag = diagnostics.Diagnostics(serialNumber=device.sn)
diag.emailAddress = email
diag.shipTo = order.location.gsx_shipto
try:
GsxAccount.default(request.user)
res = diag.initiate()
msg = _('Diagnostics initiated - diags://%s') % res
order.notify("init_diags", msg, request.user)
messages.success(request, msg)
except gsxws.GsxError, e:
messages.error(request, e)
return redirect(order)
order = request.GET.get('order')
order = device.order_set.get(pk=order)
customer = order.customer
url = request.path
return render(request, "devices/diagnostic_init.html", locals())
if request.GET.get('a') == 'get':
try:
diagnostics = device.get_diagnostics(request.user)
if device.is_ios():
diagnostics = DiagnosticResults(diagnostics)
return render(request, "devices/diagnostic_ios.html", locals())
return render(request, "devices/diagnostic_results.html", locals())
except gsxws.GsxError, e:
return render(request, "devices/diagnostic_error.html", {'error': e})
return render(request, "devices/diagnostics.html", locals())
def get_gsx_search_results(request, what, param, query):
"""
The second phase of a GSX search.
There should be an active GSX session open at this stage.
"""
data = {}
results = []
query = query.upper()
device = Device(sn=query)
error_template = "search/results/gsx_error.html"
# @TODO: this isn't a GSX search. Move it somewhere else.
if what == "orders":
try:
if param == 'serialNumber':
device = Device.objects.get(sn__exact=query)
if param == 'alternateDeviceId':
device = Device.objects.get(imei__exact=query)
except (Device.DoesNotExist, ValueError,):
return render(request, "search/results/gsx_notfound.html")
orders = device.order_set.all()
return render(request, "orders/list.html", locals())
if what == "warranty":
# Update wty info if been here before
try:
device = Device.objects.get(sn__exact=query)
device.update_gsx_details()
except Exception:
try:
device = Device.from_gsx(query)
except Exception as e:
return render(request, error_template, {'message': e})
results.append(device)
# maybe it's a device we've already replaced...
try:
soi = ServiceOrderItem.objects.get(sn__iexact=query)
results[0].repeat_service = soi.order
except ServiceOrderItem.DoesNotExist:
pass
if what == "parts":
# looking for parts
if param == "partNumber":
# ... with a part number
part = gsxws.Part(partNumber=query)
try:
partinfo = part.lookup()
except gsxws.GsxError, e:
return render(request, error_template, {'message': e})
product = Product.from_gsx(partinfo)
cache.set(query, product)
results.append(product)
if param == "serialNumber":
# ... with a serial number
try:
results = device.get_parts()
data['device'] = device
except Exception, e:
return render(request, error_template, {'message': e})
if param == "productName":
product = gsxws.Product(productName=query)
parts = product.parts()
for p in parts:
results.append(Product.from_gsx(p))
if what == "repairs":
# Looking for GSX repairs
if param == "serialNumber":
# ... with a serial number
try:
device = gsxws.Product(query)
#results = device.repairs()
# @TODO: move the encoding hack to py-gsxws
for i, p in enumerate(device.repairs()):
d = {'purchaseOrderNumber': p.purchaseOrderNumber}
d['repairConfirmationNumber'] = p.repairConfirmationNumber
d['createdOn'] = p.createdOn
d['customerName'] = p.customerName.encode('utf-8')
d['repairStatus'] = p.repairStatus
results.append(d)
except gsxws.GsxError, e:
return render(request, "search/results/gsx_notfound.html")
elif param == "dispatchId":
# ... with a repair confirmation number
repair = gsxws.Repair(number=query)
try:
results = repair.lookup()
except gsxws.GsxError as message:
return render(request, error_template, locals())
return render(request, "devices/search_gsx_%s.html" % what, locals())
def search_gsx(request, what, param, query):
"""
The first phase of a GSX search
"""
title = _(u'Search results for "%s"') % query
try:
act = request.session.get("gsx_account")
act = None
if act is None:
GsxAccount.default(user=request.user)
else:
act.connect(request.user)
except gsxws.GsxError as message:
return render(request, "devices/search_gsx_error.html", locals())
if request.is_ajax():
if what == "parts":
try:
dev = Device.from_gsx(query)
products = dev.get_parts()
return render(request, "devices/parts.html", locals())
except gsxws.GsxError as message:
return render(request, "search/results/gsx_error.html", locals())
return get_gsx_search_results(request, what, param, query)
return render(request, "devices/search_gsx.html", locals())
def search(request):
"""
Searching for devices from the main navbar
"""
query = request.GET.get("q", '').strip()
request.session['search_query'] = query
query = query.upper()
valid_arg = gsxws.validate(query)
if valid_arg in ('serialNumber', 'alternateDeviceId',):
return redirect(search_gsx, "warranty", valid_arg, query)
devices = Device.objects.filter(
Q(sn__icontains=query) | Q(description__icontains=query)
)
title = _(u'Devices matching "%s"') % query
return render(request, "devices/search.html", locals())
def find(request):
"""
Searching for device from devices/find
"""
title = _("Device search")
form = DeviceSearchForm()
results = Device.objects.none()
if request.method == 'POST':
form = DeviceSearchForm(request.POST)
if form.is_valid():
fdata = form.cleaned_data
results = Device.objects.all()
if fdata.get("product_line"):
results = results.filter(product_line__in=fdata['product_line'])
if fdata.get("warranty_status"):
results = results.filter(warranty_status__in=fdata['warranty_status'])
if fdata.get("description"):
results = results.filter(description__icontains=fdata['description'])
if fdata.get("sn"):
results = results.filter(sn__icontains=fdata['sn'])
if fdata.get("date_start"):
results = results.filter(created_at__range=[fdata['date_start'],
fdata['date_end']])
paginator = Paginator(results, 100)
page = request.GET.get("page")
try:
devices = paginator.page(page)
except PageNotAnInteger:
devices = paginator.page(1)
except EmptyPage:
devices = paginator.page(paginator.num_pages)
return render(request, "devices/find.html", locals())
#@cache_page(60*5)
def parts(request, pk, order_id, queue_id):
"""
Lists available parts for this device/order
taking into account the order's queues GSX Sold-To
and the Location's corresponding GSX account
"""
from decimal import InvalidOperation<|fim▁hole|>
try:
# remember the right GSX account
act = GsxAccount.default(request.user, order.queue)
request.session['gsx_account'] = act.pk
products = device.get_parts()
except gsxws.GsxError as message:
return render(request, "search/results/gsx_error.html", locals())
except AttributeError:
message = _('Invalid serial number for parts lookup')
return render(request, "search/results/gsx_error.html", locals())
except InvalidOperation:
message = _('Error calculating prices. Please check your system settings.')
return render(request, "search/results/gsx_error.html", locals())
return render(request, "devices/parts.html", locals())
def model_parts(request, product_line=None, model=None):
"""
Shows parts for this device model
"""
data = prep_list_view(request, product_line, model)
if cache.get("slugmap") and model:
models = cache.get("slugmap")[product_line]
data['what'] = "parts"
data['param'] = "productName"
data['query'] = models[model]
data['products'] = Product.objects.filter(tags__tag=data['query'])
return render(request, "devices/index.html", data)
def choose(request, order_id):
"""
Choosing a device from within an SRO
Does GSX lookup in case device is not found locally
"""
context = {'order': order_id}
if request.method == "POST":
query = request.POST.get('q').upper()
results = Device.objects.filter(Q(sn__iexact=query) | Q(imei=query))
if len(results) < 1:
try:
current_order = request.session.get("current_order_id")
current_order = Order.objects.get(pk=current_order)
if current_order and current_order.queue:
GsxAccount.default(request.user, current_order.queue)
else:
GsxAccount.default(request.user)
results = [Device.from_gsx(query)]
except Exception as e:
context['error'] = e
return render(request, "devices/choose-error.html", context)
context['results'] = results
return render(request, "devices/choose-list.html", context)
return render(request, "devices/choose.html", context)
def upload_devices(request):
"""
User uploads device DB as tab-delimited CSV file
SN USERNAME PASSWORD NOTES
"""
gsx_account = None
form = DeviceUploadForm()
if request.method == "POST":
form = DeviceUploadForm(request.POST, request.FILES)
if form.is_valid():
i = 0
df = form.cleaned_data['datafile'].read()
if form.cleaned_data.get('do_warranty_check'):
gsx_account = GsxAccount.default(request.user)
for l in df.split("\r"):
l = l.decode("latin-1").encode("utf-8")
row = l.strip().split("\t")
if gsx_account:
try:
device = Device.from_gsx(row[0])
except Exception, e:
messages.error(request, e)
break
else:
device = Device.objects.get_or_create(sn=row[0])[0]
try:
device.username = row[1]
device.password = row[2]
device.notes = row[3]
except IndexError:
pass
device.save()
i += 1
if form.cleaned_data.get("customer"):
customer = form.cleaned_data['customer']
customer.devices.add(device)
messages.success(request, _("%d devices imported") % i)
return redirect(index)
data = {'form': form, 'action': request.path}
return render(request, "devices/upload_devices.html", data)
def update_gsx_details(request, pk):
"""
Updates devices GSX warranty details
"""
device = get_object_or_404(Device, pk=pk)
try:
GsxAccount.default(request.user)
device.update_gsx_details()
messages.success(request, _("Warranty status updated successfully"))
except Exception, e:
messages.error(request, e)
if request.session.get('return_to'):
return redirect(request.session['return_to'])
return redirect(device)
def get_info(request, pk):
device = get_object_or_404(Device, pk=pk)
return render(request, "devices/get_info.html", locals())<|fim▁end|> |
device = Device.objects.get(pk=pk)
order = device.order_set.get(pk=order_id) |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
import numpy as np
import gc
import matplotlib.pyplot as plt
from random import seed, sample, randint
from ransac import LineModel, ransac
from time import time
random_seed = 0
num_iterations = 100
num_samples = 1000
noise_ratio = 0.8
num_noise = int(noise_ratio * num_samples)
def setup():
global data, model
seed(random_seed)
X = np.asarray(range(num_samples))
Y = 2 * X
noise = [randint(0, 2 * (num_samples - 1)) for i in xrange(num_noise)]
Y[sample(xrange(len(Y)), num_noise)] = noise
data = np.asarray([X, Y]).T
model = LineModel()
plt.plot(X, Y, 'bx')
def run():
global params, residual, mean_time
gc.disable()
start_time = time()
for i in xrange(num_iterations):
try:
(params, inliers, residual) = ransac(data, model, 2, (1 - noise_ratio) * num_samples)
except ValueError:
pass
end_time = time()
mean_time = (end_time - start_time) / num_iterations
gc.enable()
def summary():
if params:
print ' Parameters '.center(40, '=')
print params
print ' Residual '.center(40, '=')
print residual
print ' Time '.center(40, '=')
print '%.1f msecs mean time spent per call' % (1000 * mean_time)
X = np.asarray([0, num_samples - 1])
Y = params[0] * X + params[1]
plt.plot(X, Y, 'k-')
else:
print 'RANSAC failed to find a sufficiently good fit for the data.'
plt.show()
<|fim▁hole|><|fim▁end|> | if __name__ == '__main__':
setup()
run()
summary() |
<|file_name|>base.py<|end_file_name|><|fim▁begin|>from .fields import BitField, Field
from nettest.exceptions import NettestError
import struct
class PacketMeta(type):
def __new__(cls, name, bases, attrs):
fields = attrs.get('fields')
if fields is None:
raise NettestError(_("packet class must have 'fields' field"))
_fields = []
for fieldname in attrs['fields']:
field = attrs.get(fieldname)
if field is None:
for baseclass in bases:
field = getattr(baseclass, fieldname)
if field is not None:
break
else:
raise NettestError(_("field '%s' doesn't exsists in class %s")%(fieldname, name))
if not cls.__check_field_type(cls, field):
raise NettestError(_("field '%s' in class %s should be in type (Field, Packet, list)")%(fieldname, name))
_fields.append((fieldname, field))
if isinstance(field, Field):
attrs[fieldname] = field.default_value
if '_fields' in attrs:
raise NettestError(_("the name '_fields' is reserved in class %s")%(name))
attrs['_fields']= _fields
return super(PacketMeta, cls).__new__(cls, name, bases, attrs)
@staticmethod
def __check_field_type(cls, field):
if not isinstance(field, (Field, Packet, list)):
return False
if isinstance(field, (list)):
for subfield in field:
if not cls.__check_field_type(cls, subfield):
return False
return True
class BitDumper(object):
def __init__(self):
self.data= []
self.data_len = []
self.data_len_sum = 0
def clear(self):
self.data = []
self.data_len = []
self.data_len_sum = 0
def push(self, data, length):
data = int(data)
if data < 0 or data > 2**length:
raise NettestError(_("bit value out of range"))
self.data.append(data)
self.data_len.append(length)
self.data_len_sum += length
def dump(self):
if self.data_len_sum % 8 != 0:
raise NettestError(_("incorrect bit field length"))
data = 0
left_len = self.data_len_sum
index = 0
for field_data in self.data:
data += field_data<<(left_len - self.data_len[index])
left_len -= self.data_len[index]
index += 1
length = self.data_len_sum / 8
if length == 1:
return struct.pack('!B', int(data))
elif length == 2:
return struct.pack('!H', int(data))
elif length == 4:
return struct.pack('!I', int(data))
elif length == 8:
return struct.pack('!Q', int(data))
else:
raise NettestError(_("too long bit field"))
class BitLoader(object):
def __init__(self, packet):
self.fields = []
self.bit_len_sum = 0
self.packet = packet
def clear(self):
self.fields = []
self.bit_len_sum = 0
def push(self, fieldname, field):
self.fields.append((fieldname,field))
self.bit_len_sum += field.length
def load(self, data):
if self.bit_len_sum % 8 != 0:
raise NettestError(_("incorrect bit field length"))
byte_len = int(self.bit_len_sum / 8)
data = data[:byte_len]
loaded_len = 0
for field_name, field in self.fields:
field_data = field.from_netbytes(data, loaded_len)
loaded_len += field.length
setattr(self.packet, field_name, field_data)
return byte_len
class Packet(object, metaclass=PacketMeta):
'''define field order
'''
fields=[]
def __init__(self):
for field_name, field in self._fields:
if isinstance(field, Packet):
setattr(self, field_name, field.__class__())
def dump(self):
'''Serialize self to bytes
'''
data = b''
bit_dumper = BitDumper()
for field_name, field in self._fields:
field_value = getattr(self, field_name)
if field_value is None:
raise NettestError(_("%s is None and haven't default value")%(field_name))
if isinstance(field, BitField):
bit_dumper.push(field_value, field.length)
continue
else:<|fim▁hole|>
if isinstance(field, Packet):
data += field_value.dump()
continue
data += field.to_netbytes(field_value)
if bit_dumper.data_len_sum > 0:
data += bit_dumper.dump()
return data
# def __dump_list_data(self, fields):
# data = b''
# for field in fields:
# if isinstance(field, Packet):
# data += field.dump()
# continue
# if isinstance(field, list):
# data += self.__dump_list_data()
# continue
# if isinstance(field, Field):
# data += field.to_netbytes(field_value)
# continue
def load(self, data):
'''Deserialize bytes to a self.
if success, return the total data length used
else return None
'''
loaded_len = 0
bit_loader = BitLoader(self)
for field_name, field in self._fields:
if isinstance(field, BitField):
bit_loader.push(field_name, field)
continue
else:
if bit_loader.bit_len_sum > 0:
loaded_len += bit_loader.load(data[loaded_len:])
bit_loader.clear()
if isinstance(field, Packet):
field_value = getattr(self, field_name)
length = field_value.load(data[loaded_len:])
if length is None:
return None
loaded_len += length
continue
field_data = field.from_netbytes(data[loaded_len:])
if field_data is None:
return None
loaded_len += field.length
setattr(self, field_name, field_data)
if bit_loader.bit_len_sum > 0:
loaded_len += bit_loader.load(data[loaded_len:])
return loaded_len
def to_printable(self):
string = ''
string += '-'*20+str(self.__class__.__name__)+'-'*20+'\n'
for field_name, field in self._fields:
field_value = getattr(self, field_name)
if field_value is None:
string += '%s\tNone\n'%(field_name)
elif isinstance(field, Packet):
string += '%s\t%s\n'%(field_name, field_value.to_printable())
else:
string += '%s\t%s\n'%(field_name, field.to_printable(field_value))
string += '-'*(40+len(self.__class__.__name__))+'\n'
return string
def __eq__(self, other):
for field_name in self.fields:
field_value1 = getattr(self, field_name)
field_value2 = getattr(other, field_name)
if field_value1 != field_value2:
return False
return True
@property
def length(self):
total_len = 0
bit_len = 0
for field_name, field in self._fields:
if isinstance(field, BitField):
bit_len += field.length
elif field.length > 0:
total_len += field.length
else:
field_value = getattr(self, field_name)
total_len += len(field_value)
total_len += int(bit_len/8)
return total_len<|fim▁end|> | if bit_dumper.data_len_sum > 0:
data += bit_dumper.dump()
bit_dumper.clear() |
<|file_name|>jasmine-demo.js<|end_file_name|><|fim▁begin|>'use strict';
<|fim▁hole|>
describe("A suite", function() {
it("contains spec with an expectation", function() {
expect(true).toBe(true);
});
});<|fim▁end|> | |
<|file_name|>score.js<|end_file_name|><|fim▁begin|>define(["config"], function(Config) {
"use strict";
var width = 100,
height = 27,
padding = 6;
function drawScore(ctx, score, screenWidth) {
ctx.beginPath();
ctx.strokeStyle = "#fff";
ctx.lineWidth = 1;
ctx.moveTo(screenWidth - 100, 0);
ctx.lineTo(screenWidth - 100, height);
ctx.lineTo(screenWidth, height);
ctx.stroke();<|fim▁hole|> ctx.beginPath();
ctx.fillStyle = Config.text.score.color;
ctx.textBaseline = "top";
ctx.font = Config.text.score.font;
ctx.fillText("Score: " + score, screenWidth - width + padding, padding);
ctx.closePath();
}
return drawScore;
});<|fim▁end|> | ctx.closePath();
|
<|file_name|>descriptor.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Implement the declarative descriptor.
"""
from __future__ import absolute_import
import warnings
import weakref
from .events import _DescriptorEvent, _KeywordEvent
from .utils import descriptor__get__, hybridmethod
from ...utils import ReferenceError
from ...exc import CauldronException, CauldronWarning
__all__ = ['KeywordDescriptor', 'DescriptorBase', 'ServiceNotBound', 'ServiceAlreadyBound', 'IntegrityError']
class ServiceNotBound(CauldronException):
"""Error raised when a service is not bound to a descriptor."""
pass
class ServiceAlreadyBound(CauldronException):
"""Error raised when a service is already bound to a descriptor."""
pass
class ServiceAlradyBoundWarning(CauldronWarning):
"""Warning when a service is already bound to a descriptor."""
pass
class IntegrityError(CauldronException):
"""Raised to indicate an instance has a differing initial value from the one in the keyword store."""
pass
class DescriptorBase(object):
"""A keyword descriptor base class which assists in binding descriptors to keywords.
This class should be used as a base class for any class that will use :class:`KeywordDescriptor` to
describe :mod:`Cauldron` keywords as attributes.
This class provides a :meth:`bind` method to associate a :mod:`Cauldron` Service with the descriptors
on this class. There are two stages to binding:
1. Set the DFW Service for these keywords via :meth:`bind`. This can be done at the class level.
2. Bind an instance to the the service. This can be done at __init__ time.
"""
def __init__(self, *args, **kwargs):
"""This initalizer tries to bind the instance, if it can."""
super(DescriptorBase, self).__init__(*args, **kwargs)
try:
self.bind()
except ServiceNotBound as e:
# We swallow this exception, because the instance may not be
# bound to a service.
pass
@classmethod
def keyword_descriptors(cls):
"""Iterate over the keyword descriptors which are members of this class."""
for var in dir(cls):
try:
member = getattr(cls, var)
if isinstance(member, KeywordDescriptor):
yield member
except Exception:
# We don't know what happened here, but there are lots of ways
# to override class-level attribute access and screw this up.
pass
@hybridmethod
def bind(self, service=None):
"""Bind a service to the descriptors in this class.
This method can be called either on the class or the instance. On the class,
it associates a particular Cauldron KTL Service with the the keywords which
are attached to this class. For an instance, it associates the Cauldron KTL
Service if provided, and links the callback methods appropriately.
:param service: The KTL Cauldron Service, or None, to bind to the keywords
attached to this object.
:raises: :exc:`ServiceNotBound` if there is no KTL Cauldron Service associated
with this instance.
"""
try:
for desc in self.keyword_descriptors():
desc.bind(self, service)
except ServiceNotBound as e:
raise ServiceNotBound("In order to bind this object's keyword descriptors, "
"you must set the appropriate service via the bind(service=...) method.")
@bind.classmethod
def bind(cls, service=None):
"""Classmethod implementation of bind. See :meth:`bind` above."""
if service is None:
raise ServiceNotBound("In order to bind this object's keyword descriptors, "
"you must set the appropriate service via the bind(service=...) method.")
for desc in cls.keyword_descriptors():
desc.service = service
class KeywordDescriptor(object):
"""A descriptor which maintains a relationship with a keyword.
The descriptor should be used as a class level variable. It can be accessed as
a regular instance variable, where it will return the result of :meth:`Keyword.update`
operations. Setting the instance variable will result in a :meth:`Keyword.modify` operation.
Parameters
----------
name : str
Keyword name. Case-insensitive, will be translated to upper case.
initial : str
Keyword initial value, should be a string. If not set, no initial value is used
and the descriptor will return ``None`` before the keyword is bound.
type : function
A function which converts an inbound value to the appropraite python type. The python type
returned by this function should be suitable for use as a string to modify the keyword.
doc : str
The docstring for this keyword descriptor.
readonly : bool
Set this keyword descriptor to be read-only.
writeonly : bool
Set this keyword descriptor to be write-only.
"""
_EVENTS = ['preread', 'read', 'postread', 'prewrite', 'write', 'postwrite', 'check']
_service = None
_bound = False
def __init__(self, name, initial=None, type=lambda v : v, doc=None, readonly=False, writeonly=False):
super(KeywordDescriptor, self).__init__()
self.name = name.upper()
self.type = type
self.__doc__ = doc
if readonly and writeonly:
raise ValueError("Keyword {0} cannot be 'readonly' and 'writeonly'.".format(self.name))
self.readonly = readonly
self.writeonly = writeonly
# Prepare the events interface.
self._events = []
for event in self._EVENTS:
evt = _DescriptorEvent(event, replace_method=True)
setattr(self, event, evt)
self._events.append(evt)
# We handle 'callback' separately, as it triggers on the keyword's _propogate method.
#TODO: We should check that this works with DFW and ktl builtins, its kind of a hack
# here
# Note the distinction is important, replace_method=False in this case.
self.callback = _DescriptorEvent("_propogate", replace_method=False)
self._events.append(self.callback)
self._initial = initial
self._orig_initial = initial
self._initial_keyword_values = {}
self._bound = False
@property
def name(self):
"""Keyword name"""
return self._name
@name.setter
def name(self, value):
"""Set the keyword name."""
if self._bound:
raise ServiceAlreadyBound("Can't change the name of the keyword after the service has bound to it.")
self._name = str(value).upper()
self._name_attr = "_{0}_name_{1}".format(self.__class__.__name__, self._name)
def _bind_name(self, name, obj=None):
"""Set the name."""
if obj is not None:
setattr(obj, self._name_attr, name)
initial = self._get_initial_value(obj, name)
if initial is not None:
setattr(obj, self._attr, initial)
def get_bound_attr(self, obj, default=None):
"""Get the bound attribute value."""
if default is None:
default = self._orig_initial
return getattr(obj, self._bound_attr(obj), default)
def set_bound_attr(self, obj, value):
"""Set the bound attribute value."""
setattr(obj, self._bound_attr(obj), value)
def _bound_attr(self, obj):
"""Get the bound attribute name for initial values."""
return "_{0}_{1}".format(self.__class__.__name__, self.get_bound_name(obj))
def get_bound_name(self, obj):
"""Get the bound name."""
return getattr(obj, self._name_attr, self._name)
def set_bound_name(self, obj, value):
"""Set a bound name."""
if self._bound:
warnings.warn(ServiceAlradyBoundWarning("Name change won't take effect until the next time this keyword is bound."))
# Set the new name value.
name = str(value).upper()
initial = self.get_bound_attr(obj)
setattr(obj, self._name_attr, name)
if initial is not None:
self.set_bound_attr(obj, initial)
if self._bound:
# Re-bind events to the right keyword.
#TODO: Need a way to unbind events from previous keyword.
for event in self._events:
_KeywordEvent(self.keyword(obj), obj, event)
def __repr__(self):
"""Represent"""
try:
repr_bind = " bound to {0}".format(self.service) if self.service is not None else ""
except ReferenceError:
repr_bind = ""
return "<{0} name={1}{2}>".format(self.__class__.__name__, self.name, repr_bind)
@descriptor__get__
def __get__(self, obj, objtype=None):
"""Getter"""
if self.writeonly:
raise ValueError("Keyword {0} is write-only.".format(self.name))
try:
keyword = self.keyword(obj)
#TODO: Hmm, I'm not sure about this.
keyword.update()
return self.type(keyword.value)
except ServiceNotBound:
return self.type(self.get_bound_attr(obj))
def __set__(self, obj, value):
"""Set the value."""
if self.readonly:
raise ValueError("Keyword {0} is read-only.".format(self.name))
try:
keyword = self.keyword(obj)
keyword.modify(str(self.type(value)))
return keyword.value
except ServiceNotBound:
return self.set_bound_attr(obj, self.type(value))
def _get_initial_value(self, obj, name):
"""Get initial value for a keyword."""
name = name.upper()
attr = "_{0}_{1}".format(self.__class__.__name__, name)
try:
try:<|fim▁hole|> except:
# We catch this error in case it was caused because no initial value was set.
# If an initial value was set, then we want to raise this back to the user.
if not (self._initial is None and not hasattr(obj, attr)):
raise
else:
if getattr(obj, attr, self._initial) is None:
# Do nothing if it was really None everywhere:
return None
return initial
return None
def _bind_initial_value(self, obj):
"""Bind the initial value for this service."""
# We do this here to retain a reference to the same keyword object
# thoughout the course of this function.
keyword = self.keyword(obj)
if keyword is None:
# This can happen if keyword is an orphan, but bind is triggered
# before the dispatcher has totally set itself up.
return
initial = self._get_initial_value(obj, keyword.name.upper())
if initial is not None:
if keyword['value'] is None:
# Only modify the keyword value if it wasn't already set to anything.
keyword.modify(initial)
elif keyword['value'] == initial:
# But ignore the case where the current keyword value already matches the initial value
pass
else:
raise IntegrityError("Keyword {0!r} has a value {1!r}, and"
" descriptor has initial value {2!r} which"
" do not match.".format(keyword, keyword['value'], initial))
# Clean up the instance initial values.
# try:
# delattr(obj, attr)
# except AttributeError:
# pass
# self._initial = None
def bind(self, obj, service=None):
"""Bind a service to this descriptor, and the descriptor to an instance.
Binding an instance of :class:`DescriptorBase` to this descriptor activates
the listening of events attached to the underlying keyword object.
Binding an instance of :class:`DescriptorBase` to this descriptor will cause
the descriptor to resolve the initial value of the keyword. This initial value
will be taken from the instance itself, if the descriptor was modified before
it was bound to this instance, or the initial value as set by this descriptor
will be used. When the initial value conflicts with a value already written
to the underlying keyword, :exc:`IntegrityError` will be raised.
If this descriptor has already been bound to any one instance, the descriptor
level initial value will not be used, and instead only an instance-level initial
value may be used.
Parameters
----------
obj : object
The python instance which owns this descriptor. This is used to bind
instance method callbacks to changes in this descriptor's value.
service : :class:`DFW.Service.Service`
The DFW Service to be used for this descriptor. May also be set via the
:attr:`service` attribute.
"""
if service is not None and not self._bound:
self.service = service
elif service is not None and service.name != self.service.name and self._bound:
raise ServiceAlreadyBound("Service {0!r} is already bound to {1}".format(self.service, self))
self._bind_initial_value(obj)
for event in self._events:
_KeywordEvent(self.keyword(obj), obj, event)
self._bound = True
@property
def service(self):
"""The DFW Service associated with this descriptor."""
return self._service
@service.setter
def service(self, value):
"""Set the service via a weakreference proxy."""
def _proxy_callback(proxy, weakself=weakref.ref(self)):
self = weakself()
if self is not None:
self._bound = False
self._service = weakref.proxy(value, _proxy_callback)
@service.deleter
def service(self):
"""Delete service."""
self._service = None
def keyword(self, obj):
"""The keyword instance for this descriptor."""
name = self.get_bound_name(obj)
try:
return self._service[name]
except (AttributeError, TypeError, ReferenceError):
raise ServiceNotBound("No service is bound to {0}".format(self))<|fim▁end|> | initial = self._initial_keyword_values[name]
except KeyError:
initial = str(self.type(self.get_bound_attr(obj, self._initial)))
self._initial_keyword_values[name] = initial |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from django.contrib import admin
# Register your models here.
from .models import Photos
admin.site.register(Photos)<|fim▁end|> | |
<|file_name|>jaccard.py<|end_file_name|><|fim▁begin|>import argparse
from collections import defaultdict
def calculateJaccardIndex(x,z,neighbours):
shared = neighbours[x].intersection(neighbours[z])
combined = neighbours[x].union(neighbours[z])<|fim▁hole|> return len(shared)/float(len(combined))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Calculate scores for a set of scores')
parser.add_argument('--cooccurrenceFile',type=str,required=True,help='File containing cooccurrences')
parser.add_argument('--occurrenceFile',type=str,required=True,help='File containing occurrences')
parser.add_argument('--sentenceCount',type=str,required=True,help='File containing sentence count')
parser.add_argument('--relationsToScore',type=str,required=True,help='File containing relations to score')
parser.add_argument('--anniVectors',type=str,help='File containing the raw ANNI vector data')
parser.add_argument('--anniVectorsIndex',type=str,help='File containing the index for the ANNI vector data')
parser.add_argument('--outFile',type=str,required=True,help='File to output scores to')
args = parser.parse_args()
print "Loading relationsToScore"
relationsToScore = []
entitiesToScore = set()
with open(args.relationsToScore) as f:
for line in f:
split = map(int,line.strip().split())
x,y = split[:2]
relationsToScore.append((x,y))
entitiesToScore.add(x)
entitiesToScore.add(y)
entitiesToScore = sorted(list(entitiesToScore))
print "Loaded relationsToScore"
print "Loading cooccurrences..."
neighbours = defaultdict(set)
with open(args.cooccurrenceFile) as f:
for line in f:
x,y,count = map(int,line.strip().split())
neighbours[x].add(y)
neighbours[y].add(x)
print "Loaded cooccurrences"
print "Scoring..."
with open(args.outFile,'w') as outF:
for i,(x,z) in enumerate(relationsToScore):
if (i%10000) == 0:
print i
jaccardScore = calculateJaccardIndex(x,z,neighbours)
outData = [x,z,jaccardScore]
outLine = "\t".join(map(str,outData))
outF.write(outLine+"\n")
print "Completed scoring"
print "Output to %s" % args.outFile<|fim▁end|> | |
<|file_name|>client_test.go<|end_file_name|><|fim▁begin|>// Copyright 2012 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package agent
import (
"bytes"
"crypto/rand"
"errors"
"io"
"net"
"os"
"os/exec"
"path/filepath"
"runtime"
"strconv"
"strings"
"sync"
"testing"
"time"
"github.com/Psiphon-Labs/psiphon-tunnel-core/psiphon/common/crypto/ssh"
)
// startOpenSSHAgent executes ssh-agent, and returns an Agent interface to it.
func startOpenSSHAgent(t *testing.T) (client ExtendedAgent, socket string, cleanup func()) {
if testing.Short() {
// ssh-agent is not always available, and the key
// types supported vary by platform.
t.Skip("skipping test due to -short")
}
bin, err := exec.LookPath("ssh-agent")
if err != nil {
t.Skip("could not find ssh-agent")
}
cmd := exec.Command(bin, "-s")
cmd.Env = []string{} // Do not let the user's environment influence ssh-agent behavior.
cmd.Stderr = new(bytes.Buffer)
out, err := cmd.Output()
if err != nil {
t.Fatalf("%s failed: %v\n%s", strings.Join(cmd.Args, " "), err, cmd.Stderr)
}
// Output looks like:
//
// SSH_AUTH_SOCK=/tmp/ssh-P65gpcqArqvH/agent.15541; export SSH_AUTH_SOCK;
// SSH_AGENT_PID=15542; export SSH_AGENT_PID;
// echo Agent pid 15542;
fields := bytes.Split(out, []byte(";"))
line := bytes.SplitN(fields[0], []byte("="), 2)
line[0] = bytes.TrimLeft(line[0], "\n")
if string(line[0]) != "SSH_AUTH_SOCK" {
t.Fatalf("could not find key SSH_AUTH_SOCK in %q", fields[0])
}
socket = string(line[1])
line = bytes.SplitN(fields[2], []byte("="), 2)
line[0] = bytes.TrimLeft(line[0], "\n")
if string(line[0]) != "SSH_AGENT_PID" {
t.Fatalf("could not find key SSH_AGENT_PID in %q", fields[2])
}
pidStr := line[1]
pid, err := strconv.Atoi(string(pidStr))
if err != nil {
t.Fatalf("Atoi(%q): %v", pidStr, err)
}
conn, err := net.Dial("unix", string(socket))
if err != nil {
t.Fatalf("net.Dial: %v", err)
}
ac := NewClient(conn)
return ac, socket, func() {
proc, _ := os.FindProcess(pid)
if proc != nil {
proc.Kill()
}
conn.Close()
os.RemoveAll(filepath.Dir(socket))
}
}
func startAgent(t *testing.T, agent Agent) (client ExtendedAgent, cleanup func()) {
c1, c2, err := netPipe()
if err != nil {
t.Fatalf("netPipe: %v", err)
}
go ServeAgent(agent, c2)
return NewClient(c1), func() {
c1.Close()
c2.Close()
}
}
// startKeyringAgent uses Keyring to simulate a ssh-agent Server and returns a client.
func startKeyringAgent(t *testing.T) (client ExtendedAgent, cleanup func()) {
return startAgent(t, NewKeyring())
}
func testOpenSSHAgent(t *testing.T, key interface{}, cert *ssh.Certificate, lifetimeSecs uint32) {
agent, _, cleanup := startOpenSSHAgent(t)
defer cleanup()
testAgentInterface(t, agent, key, cert, lifetimeSecs)
}
func testKeyringAgent(t *testing.T, key interface{}, cert *ssh.Certificate, lifetimeSecs uint32) {
agent, cleanup := startKeyringAgent(t)
defer cleanup()
testAgentInterface(t, agent, key, cert, lifetimeSecs)
}
func testAgentInterface(t *testing.T, agent ExtendedAgent, key interface{}, cert *ssh.Certificate, lifetimeSecs uint32) {
signer, err := ssh.NewSignerFromKey(key)
if err != nil {
t.Fatalf("NewSignerFromKey(%T): %v", key, err)
}
// The agent should start up empty.
if keys, err := agent.List(); err != nil {
t.Fatalf("RequestIdentities: %v", err)
} else if len(keys) > 0 {
t.Fatalf("got %d keys, want 0: %v", len(keys), keys)
}
// Attempt to insert the key, with certificate if specified.
var pubKey ssh.PublicKey
if cert != nil {
err = agent.Add(AddedKey{
PrivateKey: key,
Certificate: cert,
Comment: "comment",
LifetimeSecs: lifetimeSecs,
})
pubKey = cert
} else {
err = agent.Add(AddedKey{PrivateKey: key, Comment: "comment", LifetimeSecs: lifetimeSecs})
pubKey = signer.PublicKey()
}
if err != nil {
t.Fatalf("insert(%T): %v", key, err)
}
// Did the key get inserted successfully?
if keys, err := agent.List(); err != nil {
t.Fatalf("List: %v", err)
} else if len(keys) != 1 {
t.Fatalf("got %v, want 1 key", keys)
} else if keys[0].Comment != "comment" {
t.Fatalf("key comment: got %v, want %v", keys[0].Comment, "comment")
} else if !bytes.Equal(keys[0].Blob, pubKey.Marshal()) {
t.Fatalf("key mismatch")
}
// Can the agent make a valid signature?
data := []byte("hello")
sig, err := agent.Sign(pubKey, data)
if err != nil {
t.Fatalf("Sign(%s): %v", pubKey.Type(), err)
}
if err := pubKey.Verify(data, sig); err != nil {
t.Fatalf("Verify(%s): %v", pubKey.Type(), err)
}
// For tests on RSA keys, try signing with SHA-256 and SHA-512 flags
if pubKey.Type() == "ssh-rsa" {
sshFlagTest := func(flag SignatureFlags, expectedSigFormat string) {
sig, err = agent.SignWithFlags(pubKey, data, flag)
if err != nil {
t.Fatalf("SignWithFlags(%s): %v", pubKey.Type(), err)
}
if sig.Format != expectedSigFormat {
t.Fatalf("Signature format didn't match expected value: %s != %s", sig.Format, expectedSigFormat)
}
if err := pubKey.Verify(data, sig); err != nil {
t.Fatalf("Verify(%s): %v", pubKey.Type(), err)
}
}
sshFlagTest(0, ssh.SigAlgoRSA)
sshFlagTest(SignatureFlagRsaSha256, ssh.SigAlgoRSASHA2256)
sshFlagTest(SignatureFlagRsaSha512, ssh.SigAlgoRSASHA2512)
}
// If the key has a lifetime, is it removed when it should be?
if lifetimeSecs > 0 {
time.Sleep(time.Second*time.Duration(lifetimeSecs) + 100*time.Millisecond)
keys, err := agent.List()
if err != nil {
t.Fatalf("List: %v", err)
}
if len(keys) > 0 {
t.Fatalf("key not expired")
}
}
}
func TestMalformedRequests(t *testing.T) {
keyringAgent := NewKeyring()
listener, err := netListener()
if err != nil {
t.Fatalf("netListener: %v", err)
}
defer listener.Close()
testCase := func(t *testing.T, requestBytes []byte, wantServerErr bool) {
var wg sync.WaitGroup
wg.Add(1)
go func() {
defer wg.Done()
c, err := listener.Accept()
if err != nil {
t.Errorf("listener.Accept: %v", err)
return
}
defer c.Close()
err = ServeAgent(keyringAgent, c)
if err == nil {
t.Error("ServeAgent should have returned an error to malformed input")
} else {
if (err != io.EOF) != wantServerErr {
t.Errorf("ServeAgent returned expected error: %v", err)
}
}
}()
c, err := net.Dial("tcp", listener.Addr().String())
if err != nil {
t.Fatalf("net.Dial: %v", err)
}
_, err = c.Write(requestBytes)
if err != nil {
t.Errorf("Unexpected error writing raw bytes on connection: %v", err)
}
c.Close()
wg.Wait()
}
var testCases = []struct {
name string
requestBytes []byte
wantServerErr bool
}{
{"Empty request", []byte{}, false},
{"Short header", []byte{0x00}, true},
{"Empty body", []byte{0x00, 0x00, 0x00, 0x00}, true},
{"Short body", []byte{0x00, 0x00, 0x00, 0x01}, false},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) { testCase(t, tc.requestBytes, tc.wantServerErr) })
}
}
func TestAgent(t *testing.T) {
for _, keyType := range []string{"rsa", "dsa", "ecdsa", "ed25519"} {
testOpenSSHAgent(t, testPrivateKeys[keyType], nil, 0)
testKeyringAgent(t, testPrivateKeys[keyType], nil, 0)
}
}
func TestCert(t *testing.T) {
cert := &ssh.Certificate{
Key: testPublicKeys["rsa"],
ValidBefore: ssh.CertTimeInfinity,
CertType: ssh.UserCert,
}
cert.SignCert(rand.Reader, testSigners["ecdsa"])
testOpenSSHAgent(t, testPrivateKeys["rsa"], cert, 0)
testKeyringAgent(t, testPrivateKeys["rsa"], cert, 0)
}
// netListener creates a localhost network listener.
func netListener() (net.Listener, error) {
listener, err := net.Listen("tcp", "127.0.0.1:0")
if err != nil {
listener, err = net.Listen("tcp", "[::1]:0")
if err != nil {
return nil, err
}
}
return listener, nil
}
// netPipe is analogous to net.Pipe, but it uses a real net.Conn, and
// therefore is buffered (net.Pipe deadlocks if both sides start with
// a write.)
func netPipe() (net.Conn, net.Conn, error) {
listener, err := netListener()
if err != nil {
return nil, nil, err
}
defer listener.Close()
c1, err := net.Dial("tcp", listener.Addr().String())
if err != nil {
return nil, nil, err
}
c2, err := listener.Accept()
if err != nil {
c1.Close()
return nil, nil, err
}
return c1, c2, nil
}
func TestServerResponseTooLarge(t *testing.T) {
a, b, err := netPipe()
if err != nil {
t.Fatalf("netPipe: %v", err)
}
done := make(chan struct{})
defer func() { <-done }()
defer a.Close()
defer b.Close()
var response identitiesAnswerAgentMsg
response.NumKeys = 1
response.Keys = make([]byte, maxAgentResponseBytes+1)
agent := NewClient(a)
go func() {
defer close(done)
n, err := b.Write(ssh.Marshal(response))
if n < 4 {
if runtime.GOOS == "plan9" {
if e1, ok := err.(*net.OpError); ok {
if e2, ok := e1.Err.(*os.PathError); ok {
switch e2.Err.Error() {
case "Hangup", "i/o on hungup channel":
// syscall.Pwrite returns -1 in this case even when some data did get written.
return
}
}
}
}
t.Errorf("At least 4 bytes (the response size) should have been successfully written: %d < 4: %v", n, err)
}
}()
_, err = agent.List()
if err == nil {
t.Fatal("Did not get error result")
}
if err.Error() != "agent: client error: response too large" {
t.Fatal("Did not get expected error result")
}
}
func TestAuth(t *testing.T) {
agent, _, cleanup := startOpenSSHAgent(t)
defer cleanup()
a, b, err := netPipe()
if err != nil {
t.Fatalf("netPipe: %v", err)
}
defer a.Close()
defer b.Close()
if err := agent.Add(AddedKey{PrivateKey: testPrivateKeys["rsa"], Comment: "comment"}); err != nil {
t.Errorf("Add: %v", err)
}
serverConf := ssh.ServerConfig{}
serverConf.AddHostKey(testSigners["rsa"])
serverConf.PublicKeyCallback = func(c ssh.ConnMetadata, key ssh.PublicKey) (*ssh.Permissions, error) {
if bytes.Equal(key.Marshal(), testPublicKeys["rsa"].Marshal()) {
return nil, nil
}
return nil, errors.New("pubkey rejected")
}
go func() {
conn, _, _, err := ssh.NewServerConn(a, &serverConf)
if err != nil {
t.Fatalf("Server: %v", err)
}
conn.Close()
}()
conf := ssh.ClientConfig{
HostKeyCallback: ssh.InsecureIgnoreHostKey(),
}
conf.Auth = append(conf.Auth, ssh.PublicKeysCallback(agent.Signers))
conn, _, _, err := ssh.NewClientConn(b, "", &conf)
if err != nil {
t.Fatalf("NewClientConn: %v", err)
}
conn.Close()
}
func TestLockOpenSSHAgent(t *testing.T) {
agent, _, cleanup := startOpenSSHAgent(t)
defer cleanup()
testLockAgent(agent, t)
}
func TestLockKeyringAgent(t *testing.T) {
agent, cleanup := startKeyringAgent(t)
defer cleanup()
testLockAgent(agent, t)
}
func testLockAgent(agent Agent, t *testing.T) {
if err := agent.Add(AddedKey{PrivateKey: testPrivateKeys["rsa"], Comment: "comment 1"}); err != nil {
t.Errorf("Add: %v", err)
}
if err := agent.Add(AddedKey{PrivateKey: testPrivateKeys["dsa"], Comment: "comment dsa"}); err != nil {
t.Errorf("Add: %v", err)
}
if keys, err := agent.List(); err != nil {
t.Errorf("List: %v", err)
} else if len(keys) != 2 {
t.Errorf("Want 2 keys, got %v", keys)
}
passphrase := []byte("secret")
if err := agent.Lock(passphrase); err != nil {
t.Errorf("Lock: %v", err)
}
if keys, err := agent.List(); err != nil {
t.Errorf("List: %v", err)
} else if len(keys) != 0 {
t.Errorf("Want 0 keys, got %v", keys)
}
signer, _ := ssh.NewSignerFromKey(testPrivateKeys["rsa"])
if _, err := agent.Sign(signer.PublicKey(), []byte("hello")); err == nil {
t.Fatalf("Sign did not fail")
}
if err := agent.Remove(signer.PublicKey()); err == nil {
t.Fatalf("Remove did not fail")
}
if err := agent.RemoveAll(); err == nil {
t.Fatalf("RemoveAll did not fail")
}
if err := agent.Unlock(nil); err == nil {
t.Errorf("Unlock with wrong passphrase succeeded")
}
if err := agent.Unlock(passphrase); err != nil {
t.Errorf("Unlock: %v", err)
}
if err := agent.Remove(signer.PublicKey()); err != nil {
t.Fatalf("Remove: %v", err)
}
if keys, err := agent.List(); err != nil {
t.Errorf("List: %v", err)
} else if len(keys) != 1 {
t.Errorf("Want 1 keys, got %v", keys)
}
}
func testOpenSSHAgentLifetime(t *testing.T) {
agent, _, cleanup := startOpenSSHAgent(t)
defer cleanup()
testAgentLifetime(t, agent)
}
func testKeyringAgentLifetime(t *testing.T) {
agent, cleanup := startKeyringAgent(t)
defer cleanup()
testAgentLifetime(t, agent)
}
func testAgentLifetime(t *testing.T, agent Agent) {
for _, keyType := range []string{"rsa", "dsa", "ecdsa"} {
// Add private keys to the agent.
err := agent.Add(AddedKey{
PrivateKey: testPrivateKeys[keyType],
Comment: "comment",
LifetimeSecs: 1,
})
if err != nil {
t.Fatalf("add: %v", err)
}
// Add certs to the agent.
cert := &ssh.Certificate{
Key: testPublicKeys[keyType],
ValidBefore: ssh.CertTimeInfinity,
CertType: ssh.UserCert,
}
cert.SignCert(rand.Reader, testSigners[keyType])
err = agent.Add(AddedKey{
PrivateKey: testPrivateKeys[keyType],
Certificate: cert,
Comment: "comment",<|fim▁hole|> })
if err != nil {
t.Fatalf("add: %v", err)
}
}
time.Sleep(1100 * time.Millisecond)
if keys, err := agent.List(); err != nil {
t.Errorf("List: %v", err)
} else if len(keys) != 0 {
t.Errorf("Want 0 keys, got %v", len(keys))
}
}
type keyringExtended struct {
*keyring
}
func (r *keyringExtended) Extension(extensionType string, contents []byte) ([]byte, error) {
if extensionType != "[email protected]" {
return []byte{agentExtensionFailure}, nil
}
return append([]byte{agentSuccess}, contents...), nil
}
func TestAgentExtensions(t *testing.T) {
agent, _, cleanup := startOpenSSHAgent(t)
defer cleanup()
_, err := agent.Extension("[email protected]", []byte{0x00, 0x01, 0x02})
if err == nil {
t.Fatal("should have gotten agent extension failure")
}
agent, cleanup = startAgent(t, &keyringExtended{})
defer cleanup()
result, err := agent.Extension("[email protected]", []byte{0x00, 0x01, 0x02})
if err != nil {
t.Fatalf("agent extension failure: %v", err)
}
if len(result) != 4 || !bytes.Equal(result, []byte{agentSuccess, 0x00, 0x01, 0x02}) {
t.Fatalf("agent extension result invalid: %v", result)
}
_, err = agent.Extension("[email protected]", []byte{0x00, 0x01, 0x02})
if err == nil {
t.Fatal("should have gotten agent extension failure")
}
}<|fim▁end|> | LifetimeSecs: 1, |
<|file_name|>plivo.js<|end_file_name|><|fim▁begin|>var extend = require('extend');
var plivo = require('plivo');
var crypto = require('crypto')
var phone = require('phone');
var TelcomPlivoClient = module.exports = function(opts){
if (!(this instanceof TelcomPlivoClient))
return new TelcomPlivoClient(options);
this.options = {};
extend(this.options,opts);
this._client = plivo.RestAPI({
authId: this.options.sid,
authToken: this.options.token<|fim▁hole|>
TelcomPlivoClient.prototype.validateRequest = function(req,callback){
if(req.header('X-Plivo-Signature') === undefined)
return callback('missing requrired header.')
var params = req.body;
if(req.method === 'GET'){
params = req.query;
}
var toSign = req._telcomRequestUrlNoQuery;
var expectedSignature = create_signature(toSign, params,this.options.token);
if(req.header('X-Plivo-Signature') === expectedSignature)
callback();
else
callback('signature does not match');
}
TelcomPlivoClient.prototype.sms = function(obj,callback){
var plivoMesg = {
src : phone(obj.from),
dst : phone(obj.to),
text : obj.body
};
/*
{
api_id: 'xxxxxxxxx-1f9d-11e3-b44b-22000ac53995',
message: 'message(s) queued',
message_uuid: [ 'xxxxxxxx-1f9d-11e3-b1d3-123141013a24' ]
}
*/
this._client.send_message(plivoMesg, function(err, ret) {
if(err === 202){ err = undefined; }
if(!ret) ret = {};
callback(err,ret.message_uuid[0],ret.message,ret);
});
};
/*
{ To: '15559633214',
Type: 'sms',
MessageUUID: 'xxxxxxx-2465-11e3-985d-0025907b94de',
From: '15557894561',
Text: 'Vg\n' }
{
to : '',
from : '',
body : '',
messageId : '',
}
*/
TelcomPlivoClient.prototype._convertSmsRequest = function(params){
return {
to : phone(params['To']),
from : phone(params['From']),
body : params['Text'],
messageId : params['MessageUUID'],
_clientRequest : params
};
}
// For verifying the plivo server signature
// By Jon Keating - https://github.com/mathrawka/plivo-node
function create_signature(url, params,token) {
var toSign = url;
Object.keys(params).sort().forEach(function(key) {
toSign += key + params[key];
});
var signature = crypto
.createHmac('sha1',token)
.update(toSign)
.digest('base64');
return signature;
};<|fim▁end|> | });
};
|
<|file_name|>mkrst.py<|end_file_name|><|fim▁begin|>"""
Simple script to help create files needed to make a sphinx documentation
website of the flopy project. The script will read through all of the
flopy modules and create the sphinx autodoc rst (restructured text) files.
"""
import os
# print current working directory
print(os.getcwd())
# look through the following subdirectories, and grab all of the
# modules that should be added to the sphinx documentation.
<|fim▁hole|>pthlist = ['export', 'modflow', 'modflowlgr', 'modpath', 'mt3d', 'pest',
'plot', 'seawat', 'utils', 'mf6', 'mf6/modflow', 'discretization']
namelist = []
for pth in pthlist:
dirpth = os.path.join(flopypth, pth)
filelist = os.listdir(dirpth)
for filename in filelist:
if '.pyc' in filename:
continue
if '__init__' in filename:
continue
if '.py' in filename:
prefix = filename.strip().split('.')[0]
nm = 'flopy.' + pth + '.' + prefix
print (nm)
namelist.append(nm)
fnamelist = open('fnamelist.txt', 'w')
for name in namelist:
fnamelist.write(' ' + name + '\n')
prefix = name.strip().split('.')[2]
fname = prefix + '.rst'
if not os.path.exists(fname):
print('Creating new rst file: {}'.format(fname))
f = open(fname, 'w')
s = name.replace('/', '.') + ' Module'
f.write(s + '\n')
s = len(s) * '='
f.write(s + '\n\n')
s = '.. automodule:: ' + name.replace('/', '.')
f.write(s + '\n')
s = ' :members:'
f.write(s + '\n')
s = ' :inherited-members:'
f.write(s + '\n')
f.close()
fnamelist.close()<|fim▁end|> | flopypth = os.path.join('..', '..', '..', 'flopy3fork.git', 'flopy')
|
<|file_name|>format.go<|end_file_name|><|fim▁begin|>// Copyright (C) 2017 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package image
import (
"fmt"
"github.com/google/gapid/core/data/protoutil"
"github.com/google/gapid/core/stream"
)
// ErrResizeUnsupported is returned by Format.Resize() when the format does
// not support resizing.
var ErrResizeUnsupported = fmt.Errorf("Format does not support resizing")
// format is the interface for an image and/or pixel format.
type format interface {
// check returns an error if the combination of data, image width and image
// height is invalid for the given format, otherwise check returns nil.
check(data []byte, width, height int) error
// size returns the number of bytes required to hold an image of the specified
// dimensions in this format. If the size varies based on the image data, then
// size returns -1.
size(width, height int) int
// key returns an object that can be used for equality-testing of the format
// and can be used as a key in a map. Formats of the same type and parameters
// will always return equal keys.
// Formats can be deserialized into new objects so testing equality on the
// Format object directly is not safe.
key() interface{}
// Channels returns the list of channels described by this format.
// If the channels vary based on the image data, then channels returns nil.
channels() []stream.Channel
}
// Interface compliance check.
var _ = []format{
&FmtUncompressed{},
&FmtPNG{},
&FmtATC_RGB_AMD{},
&FmtATC_RGBA_EXPLICIT_ALPHA_AMD{},
&FmtATC_RGBA_INTERPOLATED_ALPHA_AMD{},
&FmtETC1_RGB_U8_NORM{},
&FmtETC2_RGB_U8_NORM{},
&FmtETC2_RGBA_U8_NORM{},
&FmtETC2_RGBA_U8U8U8U1_NORM{},
&FmtETC2_R_U11_NORM{},
&FmtETC2_RG_U11_NORM{},
&FmtETC2_R_S11_NORM{},
&FmtETC2_RG_S11_NORM{},
&FmtS3_DXT1_RGB{},
&FmtS3_DXT1_RGBA{},
&FmtS3_DXT3_RGBA{},
&FmtS3_DXT5_RGBA{},
&FmtASTC{},
}
<|fim▁hole|> return f.format().check(data, width, height)
}
// Size returns the number of bytes required to hold an image of the specified
// dimensions in this format. If the size varies based on the image data, then
// Size returns -1.
func (f *Format) Size(width, height int) int {
return f.format().size(width, height)
}
// Key returns an object that can be used for equality-testing of the format
// and can be used as a key in a map. Formats of the same type and parameters
// will always return equal keys.
// Formats can be deserialized into new objects so testing equality on the
// Format object directly is not safe.
func (f *Format) Key() interface{} {
return f.format().key()
}
// Channels returns the list of channels described by this format.
// If the channels vary based on the image data, then Channels returns nil.
func (f *Format) Channels() []stream.Channel {
return f.format().channels()
}
func (f *Format) format() format {
return protoutil.OneOf(f.Format).(format)
}
// resizer is the interface implemented by formats that support resizing.
type resizer interface {
// resize returns an image resized from srcW x srcH to dstW x dstH.
// If the format does not support image resizing then the error
// ErrResizeUnsupported is returned.
resize(data []byte, srcW, srcH, dstW, dstH int) ([]byte, error)
}
// Resize returns an image resized from srcW x srcH to dstW x dstH.
// If the format does not support image resizing then the error
// ErrResizeUnsupported is returned.
func (f *Format) Resize(data []byte, srcW, srcH, dstW, dstH int) ([]byte, error) {
if r, ok := protoutil.OneOf(f.Format).(resizer); ok {
return r.resize(data, srcW, srcH, dstW, dstH)
}
return nil, ErrResizeUnsupported
}
func checkSize(data []byte, f format, width, height int) error {
if expected, actual := f.size(width, height), len(data); expected != actual {
return fmt.Errorf("Image data size (0x%x) did not match expected (0x%x) for dimensions %dx%d",
actual, expected, width, height)
}
return nil
}<|fim▁end|> | // Check returns an error if the combination of data, image width and image
// height is invalid for the given format, otherwise Check returns nil.
func (f *Format) Check(data []byte, width, height int) error { |
<|file_name|>055-rede2014_rede_gephi_com_ipca_csv.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# coding: utf-8
# # rede_gephi_com_ipca_csv
# In[6]:
ano_eleicao = '2014'
rede =f'rede{ano_eleicao}'
csv_dir = f'/home/neilor/{rede}'
<|fim▁hole|>
dbschema = f'rede{ano_eleicao}'
table_edges = f"{dbschema}.gephi_edges_com_ipca_2018"
table_nodes = f"{dbschema}.gephi_nodes_com_ipca_2018"
table_receitas = f"{dbschema}.receitas_com_ipca_2018"
table_candidaturas = f"{dbschema}.candidaturas_com_ipca_2018"
table_municipios = f"{dbschema}.municipios_{ano_eleicao}"
# In[8]:
import sys
sys.path.append('../')
import mod_tse as mtse
# In[9]:
import os
home = os.environ["HOME"]
local_dir = f'{home}/temp'
# In[10]:
mtse.execute_query(f"update {table_municipios} set rede= 'N';")
# ## REDE BRASIL
# In[11]:
def salva_rede_brasil(csv_dir,rede):
rede_dir_BR = f'{csv_dir}/{rede}_Brasil'
os.makedirs(rede_dir_BR)
edges_csv_query=f"""copy
(
select * from {table_edges}
)
TO '{rede_dir_BR}/{rede}_Brasil_edges.csv' DELIMITER ';' CSV HEADER;
"""
mtse.execute_query(edges_csv_query)
nodes_csv_query=f"""copy
(
select * from {table_nodes}
)
TO '{rede_dir_BR}/{rede}_Brasil_nodes.csv' DELIMITER ';' CSV HEADER;
"""
mtse.execute_query(nodes_csv_query)
candidaturas_csv_query=f"""copy
(
select * from {table_candidaturas}
)
TO '{rede_dir_BR}/{rede}_Brasil_candidaturas.csv' DELIMITER ';' CSV HEADER;
"""
mtse.execute_query(candidaturas_csv_query)
receitas_csv_query=f"""copy
(
select * from {table_receitas}
)
TO '{rede_dir_BR}/{rede}_Brasil_receitas.csv' DELIMITER ';' CSV HEADER;
"""
mtse.execute_query(receitas_csv_query)
# ## REDES POR ESTADO
# In[12]:
def salva_rede_csv_uf(csv_dir,rede,sg_uf):
rede_dir_uf = f'{csv_dir}/{rede}_{sg_uf}'
os.makedirs(rede_dir_uf)
edges_query=f"""copy
(
select * from {table_edges} where ue ='{sg_uf}'
)
TO '{rede_dir_uf}/{rede}_{sg_uf}_edges.csv' DELIMITER ';' CSV HEADER;
"""
mtse.execute_query(edges_query)
nodes_query=f"""copy
(
select * from {table_nodes} where ue ='{sg_uf}'
)
TO '{rede_dir_uf}/{rede}_{sg_uf}_nodes.csv' DELIMITER ';' CSV HEADER;
"""
mtse.execute_query(nodes_query)
candidaturas_csv_query=f"""copy
(
select * from {table_candidaturas} where sg_uf ='{sg_uf}'
)
TO '{rede_dir_uf}/{rede}_{sg_uf}_candidaturas.csv' DELIMITER ';' CSV HEADER;
"""
mtse.execute_query(candidaturas_csv_query)
receitas_csv_query=f"""copy
(
select * from {table_receitas} where receptor_uf ='{sg_uf}'
)
TO '{rede_dir_uf}/{rede}_{sg_uf}_receitas.csv' DELIMITER ';' CSV HEADER;
"""
mtse.execute_query(receitas_csv_query)
# In[13]:
import pandas as pd
import shutil
if os.path.exists(csv_dir):
shutil.rmtree(csv_dir)
os.makedirs(csv_dir)
salva_rede_brasil(csv_dir,rede)
df_uf = mtse.pandas_query(f'select sg_uf from {table_candidaturas} group by sg_uf order by sg_uf')
for index, row in df_uf.iterrows():
sg_uf = row['sg_uf']
salva_rede_csv_uf(csv_dir,rede,sg_uf)
# In[14]:
import datetime
print(datetime.datetime.now())
# In[ ]:<|fim▁end|> |
# In[7]:
|
<|file_name|>home_endpoints.py<|end_file_name|><|fim▁begin|># Endpoints for user to control the home.
from datetime import datetime
from flask import Blueprint, jsonify, request
from services import elements_services, home_services
home_api = Blueprint('/home_api', __name__)
elements_services = elements_services.ElementsServices()
home_services = home_services.HomeServices()
@home_api.route('/profiles')
def profiles():
"""Gets all profiles for all elements for user application to display and manipulate elements"""
return jsonify(home_services.get_profiles())
@home_api.route('/element', methods=['POST'])
def update_element():
"""Updates single element with all new values received from the user application"""
received_element = request.get_json()
home_services.update_element(received_element)
return 'OK'
@home_api.route('/elements', methods=['POST'])
def update_elements():<|fim▁hole|> received_elements = request.get_json()
home_services.update_elements(received_elements)
return 'OK'
@home_api.route('/elementdelete', methods=['POST'])
def delete_element():
"""Deletes a single element with given hid"""
element = request.get_json()
home_services.delete_element(element['hid'])
return 'OK'
@home_api.route('/timerules', methods=['POST'])
def timerules():
"""Adds, Updates or deletes time rule for the given element"""
rules = request.get_json()
if len(rules) == 0:
raise Exception("No elements in the list")
for rule in rules:
if 'id' not in rule:
rule['id'] = None
home_services.save_time_rules(rules)
return 'OK'
@home_api.route('/timerules/<string:hid>')
def get_timerules(hid):
"""Gets list of timerules for given hid"""
timerules= home_services.read_time_rules(hid)
return jsonify(timerules)<|fim▁end|> | """Updates all elements with all new values received from the user application""" |
<|file_name|>SNIIS_Mac.cpp<|end_file_name|><|fim▁begin|>/// @file SNIIS_Mac.cpp
/// Mac OSX implementation of input system
#include "SNIIS_Mac.h"
#include "SNIIS_Intern.h"
#if SNIIS_SYSTEM_MAC
using namespace SNIIS;
// --------------------------------------------------------------------------------------------------------------------
// Constructor
MacInput::MacInput(id pWindowId)
{
mWindow = pWindowId;
// create the manager
mHidManager = IOHIDManagerCreate( kCFAllocatorDefault, 0);
if( !mHidManager )
throw std::runtime_error( "Failed to create HIDManager");
// tell 'em we want it all
IOHIDManagerSetDeviceMatching( mHidManager, nullptr);
// register our enumeration callback
IOHIDManagerRegisterDeviceMatchingCallback( mHidManager, &MacInput::HandleNewDeviceCallback, (void*) this);
// register us for running the event loop
IOHIDManagerScheduleWithRunLoop( mHidManager, CFRunLoopGetCurrent(), kCFRunLoopDefaultMode);
// and open the manager, enumerating all devices along with it
IOReturn res = IOHIDManagerOpen( mHidManager, 0);
Log( "IOHIDManagerOpen() returned %d", res);
if( res != kIOReturnSuccess )
throw std::runtime_error( "Failed to open HIDManager / enumerate devices");
// run the update loop to get the callbacks for new devices
while( CFRunLoopRunInMode( kCFRunLoopDefaultMode, 0, TRUE) == kCFRunLoopRunHandledSource )
/**/;
// remove the manager from the callbacks and runloop
IOHIDManagerRegisterDeviceMatchingCallback( mHidManager, nullptr, nullptr);
// Since some OSX update the Unschedule() thingy also unschedules all devices, so we never get any event notifications
// simply leaving it be should be fine, as we unregistered the callback
// IOHIDManagerUnscheduleFromRunLoop( mHidManager, CFRunLoopGetCurrent(), kCFRunLoopDefaultMode);
}
// --------------------------------------------------------------------------------------------------------------------
// Destructor
MacInput::~MacInput()
{
for( auto d : mDevices )
delete d;
if( mHidManager )
IOHIDManagerClose( mHidManager, 0);
CFRelease( mHidManager);
}
// --------------------------------------------------------------------------------------------------------------------
// Updates the inputs, to be called before handling system messages
void MacInput::Update()
{
// Basis work
InputSystem::Update();
// device work
for( size_t a = 0; a < mMacDevices.size(); ++a )
{
MacDevice* d = mMacDevices[a];
d->StartUpdate();
}
// then run the update loop
while( CFRunLoopRunInMode( kCFRunLoopDefaultMode, 0, TRUE) == kCFRunLoopRunHandledSource )
/**/;
// Mice need postprocessing
for( auto d : mDevices )
{
if( auto m = dynamic_cast<MacMouse*> (d) )
m->EndUpdate();
// from now on everything generates signals
d->ResetFirstUpdateFlag();
}
}
// --------------------------------------------------------------------------------------------------------------------
// Notifies the input system that the application has lost/gained focus.
void MacInput::InternSetFocus( bool pHasFocus)
{
for( auto d : mMacDevices )
d->SetFocus( pHasFocus);
}
// --------------------------------------------------------------------------------------------------------------------
void MacInput::InternSetMouseGrab( bool enabled)
{
auto wr = MacHelper_GetWindowRect( mWindow);
Pos p;
if( enabled )
{
// if enabled, move system mouse to window center and start taking offsets from there
p.x = wr.w / 2; p.y = wr.h / 2;
} else
{
// if disabled, move mouse to last reported mouse position to achieve a smooth non-jumpy transition between the modes
p.x = std::max( 0.0f, std::min( wr.w, float( GetMouseX())));
p.y = std::max( 0.0f, std::min( wr.h, float( GetMouseY())));
}
Pos gp = MacHelper_WinToDisplay( mWindow, p);
MacHelper_SetMousePos( gp);
}
// --------------------------------------------------------------------------------------------------------------------
// Callback for each enumerated device
void MacInput::HandleNewDeviceCallback( void* context, IOReturn result, void* sender, IOHIDDeviceRef device)
{
SNIIS_UNUSED( sender);
if( result == kIOReturnSuccess )
{
auto inp = reinterpret_cast<MacInput*> (context);
inp->HandleNewDevice( device);
}
}
// --------------------------------------------------------------------------------------------------------------------
// Handle a newly detected device
void MacInput::HandleNewDevice( IOHIDDeviceRef device)
{
// get usage page and usage
int32_t usepage = 0, usage = 0;
auto ref = IOHIDDeviceGetProperty( device, CFSTR( kIOHIDPrimaryUsagePageKey));
if( ref )
CFNumberGetValue( (CFNumberRef) ref, kCFNumberSInt32Type, &usepage);
ref = IOHIDDeviceGetProperty( device, CFSTR( kIOHIDPrimaryUsageKey));
if( ref )
CFNumberGetValue( (CFNumberRef) ref, kCFNumberSInt32Type, &usage);
// get the names
auto cfstr = (CFStringRef) IOHIDDeviceGetProperty( device, CFSTR(kIOHIDProductKey));
auto cfstr2 = (CFStringRef) IOHIDDeviceGetProperty( device, CFSTR(kIOHIDManufacturerKey));
std::vector<char> tmp( 500, 0);
if( cfstr )
CFStringGetCString( cfstr, &tmp[0], tmp.size(), kCFStringEncodingUTF8);
else
sprintf( &tmp[0], "(null)");
size_t l = strlen( tmp.data());
tmp[l++] = '|';
if( cfstr2 )
CFStringGetCString( cfstr2, &tmp[l], tmp.size() - l, kCFStringEncodingUTF8);
else
sprintf( &tmp[l], "(null)");
Log( "HandleNewDevice \"%s\", page %d, usage %d", tmp.data(), usepage, usage);
if( usepage != kHIDPage_GenericDesktop )
return;
Log( "New device \"%s\" at page %d, usage %d", tmp.data(), usepage, usage);
// extra ugly: store last mouse because we might have to add the second trackpad to it
static MacMouse* lastMouse = nullptr;
switch( usage )
{
case kHIDUsage_GD_Mouse:
case kHIDUsage_GD_Pointer:
{
try {
bool isTrackpad = strncmp( tmp.data(), "Apple Internal Keyboard / Trackpad", 34) == 0;
if( isTrackpad && lastMouse && lastMouse->IsTrackpad() )
{
Log( "-> second HID of internal trackpad, adding to Mouse %d (id %d)", lastMouse->GetCount(), lastMouse->GetId());
lastMouse->AddDevice( device);
}
else
{
Log( "-> Mouse %d (id %d)", mNumMice, mDevices.size());
auto m = new MacMouse( this, mDevices.size(), device, isTrackpad);
InputSystemHelper::AddDevice( m);
mMacDevices.push_back( m);
if( isTrackpad )
lastMouse = m;
}
} catch( std::exception& e)
{
Log( "Exception: %s", e.what());
}
break;
}
case kHIDUsage_GD_Keyboard:
case kHIDUsage_GD_Keypad:
{
try {
Log( "-> Keyboard %d (id %d)", mNumKeyboards, mDevices.size());
auto k = new MacKeyboard( this, mDevices.size(), device);
InputSystemHelper::AddDevice( k);
mMacDevices.push_back( k);
} catch( std::exception& e)
{
Log( "Exception: %s", e.what());
}
break;
}
case kHIDUsage_GD_Joystick:
case kHIDUsage_GD_GamePad:
case kHIDUsage_GD_MultiAxisController:
{
try {
Log( "-> Controller %d (id %d)", mNumJoysticks, mDevices.size());
auto j = new MacJoystick( this, mDevices.size(), device);
InputSystemHelper::AddDevice( j);
mMacDevices.push_back( j);
} catch( std::exception& e)
{
Log( "Exception: %s", e.what());
}<|fim▁hole|>}
// --------------------------------------------------------------------------------------------------------------------
// --------------------------------------------------------------------------------------------------------------------
std::vector<MacControl> EnumerateDeviceControls( IOHIDDeviceRef devref)
{
// enumerate all controls of that device
std::vector<MacControl> controls;
CFArrayRef elements = IOHIDDeviceCopyMatchingElements( devref, nullptr, kIOHIDOptionsTypeNone);
for( size_t a = 0, count = CFArrayGetCount( elements); a < count; ++a )
{
auto elmref = (IOHIDElementRef) CFArrayGetValueAtIndex( elements, CFIndex( a));
auto type = IOHIDElementGetType( elmref);
auto usepage = IOHIDElementGetUsagePage( elmref), usage = IOHIDElementGetUsage( elmref);
size_t prevSize = controls.size();
if( type == kIOHIDElementTypeInput_Axis || type == kIOHIDElementTypeInput_Misc )
{
auto min = IOHIDElementGetLogicalMin( elmref), max = IOHIDElementGetLogicalMax( elmref);
if( usage == kHIDUsage_GD_Hatswitch )
{
controls.push_back( MacControl{ devref, MacControl::Type_Hat, "", 0, usepage, usage, min, max });
controls.push_back( MacControl{ devref, MacControl::Type_Hat_Second, "", 0, usepage, usage, min, max });
}
else
{
controls.push_back( MacControl{ devref, MacControl::Type_Axis, "", 0, usepage, usage, min, max });
}
}
else if( type == kIOHIDElementTypeInput_Button )
{
controls.push_back( MacControl{ devref, MacControl::Type_Button, "", 0, usepage, usage, 0, 1 });
}
// add a few things afterwards if we got new controls
for( size_t a = prevSize; a < controls.size(); ++a )
{
controls[a].mCookie = IOHIDElementGetCookie( elmref);
auto name = IOHIDElementGetName( elmref);
if( name )
{
std::vector<char> tmp( 500, 0);
CFStringGetCString( name, &tmp[0], tmp.size(), kCFStringEncodingUTF8);
controls[a].mName = &tmp[0];
}
}
}
return controls;
}
void InputElementValueChangeCallback( void* ctx, IOReturn res, void* sender, IOHIDValueRef val)
{
SNIIS_UNUSED( sender);
if( res != kIOReturnSuccess )
return;
//also ignore all events if we ain't focus'd. Did I use that word correctly?
if( !gInstance->HasFocus() )
return;
auto dev = static_cast<MacDevice*> (ctx);
auto elm = IOHIDValueGetElement( val);
auto keksie = IOHIDElementGetCookie( elm);
auto value = IOHIDValueGetIntegerValue( val);
auto usepage = IOHIDElementGetUsagePage( elm), usage = IOHIDElementGetUsage( elm);
auto hiddev = IOHIDElementGetDevice( elm);
dev->HandleEvent( hiddev, keksie, usepage, usage, value);
}
std::pair<float, float> ConvertHatToAxes( long min, long max, long value)
{
std::pair<float, float> axes;
// Hats deliver a value that starts at North (x=0, y=-1) and goes a full circle clockwise within the value range
// We need to decompose this into two axes from roughly 8 standard cases with a little safety each
float v = float( value - min) / float( max - min);
if( v > 0.1f && v < 0.4f )
axes.first = 1.0f;
else if( v > 0.6f && v < 0.9f )
axes.first = -1.0f;
else
axes.first = 0.0f;
if( v < 0.15f || v > 0.85f )
axes.second = -1.0f;
else if( v > 0.35f && v < 0.65f )
axes.second = 1.0f;
else
axes.second = 0.0f;
return axes;
}
// --------------------------------------------------------------------------------------------------------------------
// --------------------------------------------------------------------------------------------------------------------
// Initializes the input system with the given InitArgs. When successful, gInstance is not Null.
bool InputSystem::Initialize(void* pInitArg)
{
if( gInstance )
throw std::runtime_error( "Input already initialized");
try
{
gInstance = new MacInput( pInitArg);
} catch( std::exception& e)
{
// nope
if( gLogCallback )
gLogCallback( (std::string( "Exception while creating SNIIS instance: ") + e.what()).c_str());
gInstance = nullptr;
return false;
}
return true;
}
// --------------------------------------------------------------------------------------------------------------------
// Destroys the input system. After returning gInstance is Null again
void InputSystem::Shutdown()
{
delete gInstance;
gInstance = nullptr;
}
#endif // SNIIS_SYSTEM_MAC<|fim▁end|> | break;
}
} |
<|file_name|>InsightsResource.java<|end_file_name|><|fim▁begin|>package org.kalnee.trivor.insights.web.rest;
import com.codahale.metrics.annotation.Timed;
import org.kalnee.trivor.insights.domain.insights.Insights;
import org.kalnee.trivor.insights.service.InsightService;
import org.kalnee.trivor.nlp.domain.ChunkFrequency;
import org.kalnee.trivor.nlp.domain.PhrasalVerbUsage;
import org.kalnee.trivor.nlp.domain.SentenceFrequency;
import org.kalnee.trivor.nlp.domain.WordUsage;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import java.util.List;
import java.util.Map;
import java.util.Set;
@RestController
@RequestMapping(value = "/api/insights")
public class InsightsResource {
private final InsightService insightService;
public InsightsResource(InsightService insightService) {
this.insightService = insightService;
}
@GetMapping
public ResponseEntity<Page<Insights>> findByInsightAndImdb(@RequestParam("imdbId") String imdbId,
Pageable pageable) {
return ResponseEntity.ok().body(insightService.findByImdbId(imdbId, pageable));
}
@GetMapping("/summary")
@Timed
public ResponseEntity<Map<String, Object>> getInsightsSummary(@RequestParam("imdbId") String imdbId) {
return ResponseEntity.ok().body(insightService.getInsightsSummary(imdbId));
}
@GetMapping("/sentences/frequency")
@Timed
public ResponseEntity<List<SentenceFrequency>> findSentencesFrequency(@RequestParam("imdbId") String imdbId,
@RequestParam(value = "limit", required = false) Integer limit) {
return ResponseEntity.ok().body(
insightService.findSentencesFrequencyByImdbId(imdbId, limit)
);
}
@GetMapping("/chunks/frequency")
@Timed
public ResponseEntity<List<ChunkFrequency>> findChunksFrequency(@RequestParam("imdbId") String imdbId,
@RequestParam(value = "limit", required = false) Integer limit) {
return ResponseEntity.ok().body(
insightService.findChunksFrequencyByImdbId(imdbId, limit)
);
}
@GetMapping("/phrasal-verbs/usage")
@Timed
public ResponseEntity<List<PhrasalVerbUsage>> findPhrasalVerbsUsageByImdbId(@RequestParam("imdbId") String imdbId) {
return ResponseEntity.ok().body(insightService.findPhrasalVerbsUsageByImdbId(imdbId));
}
@GetMapping("/vocabulary/{vocabulary}/frequency")
@Timed
public ResponseEntity<Map<String, Integer>> findVocabularyFrequencyByImdbId(
@PathVariable("vocabulary") String vocabulary,
@RequestParam("imdbId") String imdbId,
@RequestParam(value = "limit", required = false) Integer limit) {
return ResponseEntity.ok().body(insightService.findVocabularyFrequencyByImdbId(vocabulary, imdbId, limit));
}
@GetMapping("/vocabulary/{vocabulary}/usage")
@Timed
public ResponseEntity<List<WordUsage>> findVocabularyUsageByImdbAndSeasonAndEpisode(
@PathVariable("vocabulary") String vocabulary,
@RequestParam("imdbId") String imdbId,
@RequestParam(value = "season", required = false) Integer season,
@RequestParam(value = "episode", required = false) Integer episode) {
return ResponseEntity.ok().body(
insightService.findVocabularyUsageByImdbAndSeasonAndEpisode(vocabulary, imdbId, season, episode)
);
}
@GetMapping("/{insight}/genres/{genre}")
@Timed
public ResponseEntity<List<Insights>> findInsightsByGenre(
@PathVariable("genre") String genre) {
return ResponseEntity.ok().body(insightService.findInsightsByInsightAndGenre(genre));
}
@GetMapping("/{insight}/keywords/{keyword}")
@Timed<|fim▁hole|> @PathVariable("keyword") String keyword) {
return ResponseEntity.ok().body(insightService.findInsightsByInsightAndKeyword(keyword));
}
}<|fim▁end|> | public ResponseEntity<List<Insights>> findInsightsByKeyword( |
<|file_name|>main.go<|end_file_name|><|fim▁begin|>//<<<<<extract,9,3,11,3,FOO,fail
package main
import "fmt"
func main() {<|fim▁hole|> }
fmt.Println(i)
i++
}
fmt.Println("after loop")
}<|fim▁end|> | i := 0
for i <= 5 {
if i == 3 {
break |
<|file_name|>PersistentTable.cpp<|end_file_name|><|fim▁begin|>/******************************************************************************
Developed and Copyright (c) by
Erik Unger
Contact: [email protected]
******************************************************************************/
#include "BaseLib/DataBases/PersistentTable.h"
#include "BaseLib/ErrorHandling.h"
namespace BaseLib {
namespace DataBases {
using namespace BaseLib::ErrorHandling;
using namespace BaseLib::Math;
BL_DEFINE_REFLECTION_CLASS(PersistentTable, AbstractTable)
{
}
PersistentTable::PersistentTable(AbstractTable* sourceTable)
: AbstractTable()
, rowCount(0)
, currentRow(-1)
, columns()
, firstRowFound(false)
{
if (checkArgumentNotNull(NAME_VALUE(sourceTable)) == true)
{
firstRowFound = sourceTable->moveToNextRow(); // We need this step for SQLite, because else we won't get column information
initColumns(sourceTable);
initRows(sourceTable);
}
}
void PersistentTable::initColumns(AbstractTable* sourceTable)
{
BL_ASSERT(sourceTable != NULL);
const int columnCount = sourceTable->getColumnCount();
columns.setCount(columnCount);
for (int i = 0; i < columnCount; ++i)
{
String name = sourceTable->getColumnName(i);
String baseType = sourceTable->getColumnBaseType(i);
String exactType = sourceTable->getColumnExactType(i);
String type = baseType.lowerCase();
if (type == "int" || type == "integer")
{
switch (sourceTable->getSizeInBytes(i))
{
case 1:
columns[i] = new IntColumn<int8>(name, baseType, exactType);
break;
case 2:
columns[i] = new IntColumn<int16>(name, baseType, exactType);
break;
case 3:
case 4:
columns[i] = new IntColumn<int32>(name, baseType, exactType);
break;
case 8:
columns[i] = new IntColumn<int64>(name, baseType, exactType);
break;
default:
BL_ASSERT_NEVER_REACHED
columns[i] = new NullColumn(name, baseType, exactType);
break;
}
}
else if (type == "bool" || type == "boolean")
{
columns[i] = new BoolColumn(name, baseType, exactType);
}
else if (type == "float" || type == "double" || type == "real")
{
switch (sourceTable->getSizeInBytes(i))
{
case 4:
columns[i] = new FloatColumn<float32>(name, baseType, exactType);
break;
case 8:
columns[i] = new FloatColumn<float64>(name, baseType, exactType);
break;
default:
BL_ASSERT_NEVER_REACHED
columns[i] = new NullColumn(name, baseType, exactType);
break;
}
}
else if (type == "text" || type == "string" || type == "char" || type == "character")
{
columns[i] = new TextColumn(name, baseType, exactType);
}
else if (type == "blob" || type == "binary")
{
columns[i] = new BlobColumn(name, baseType, exactType);
}
else if (type == "null" || type == "void")
{
columns[i] = new NullColumn(name, baseType, exactType);
}
else
{
BL_ASSERT_NEVER_REACHED
columns[i] = new NullColumn(name, baseType, exactType);
}
}
}
void PersistentTable::initRows(AbstractTable* sourceTable)
{
BL_ASSERT(sourceTable != NULL);
const int columnCount = columns.getCount();
if (columnCount > 0 && firstRowFound == true)
{
do
{
++rowCount;
for (int i = 0; i < columnCount; ++i)
{
columns[i]->copyValueFrom(sourceTable, i);
}
}
while (sourceTable->moveToNextRow() == true);
}
}
PersistentTable::~PersistentTable()
{
int columnCount = columns.getCount();
for (int i = 0; i < columnCount; ++i)
{
delete columns[i];
}
}
bool PersistentTable::isPersistent() const
{
return true;
}
bool PersistentTable::moveToNextRow()
{
if (currentRow < rowCount-1)
{
++currentRow;
return true;
}
else
{
return false;
}
}
bool PersistentTable::moveToPreviousRow()
{
if (currentRow > 0)
{
--currentRow;
return true;
}
else
{
return false;
}
}
int PersistentTable::getCurrentRow() const
{
return currentRow;
}
<|fim▁hole|> if (checkArgumentRange(NAME_VALUE(newCurrentRow), 0, getRowCount()-1) == true)
{
currentRow = newCurrentRow;
}
}
int PersistentTable::getRowCount() const
{
return rowCount;
}
int PersistentTable::getColumnCount() const
{
return columns.getCount();
}
String PersistentTable::getColumnName(int columnIndex) const
{
if (checkArgumentRange(NAME_VALUE(columnIndex), 0, getColumnCount()-1) == true)
{
return columns[columnIndex]->name;
}
else
{
return Empty();
}
}
String PersistentTable::getColumnBaseType(int columnIndex) const
{
if (checkArgumentRange(NAME_VALUE(columnIndex), 0, getColumnCount()-1) == true)
{
return columns[columnIndex]->baseType;
}
else
{
return Empty();
}
}
String PersistentTable::getColumnExactType(int columnIndex) const
{
if (checkArgumentRange(NAME_VALUE(columnIndex), 0, getColumnCount()-1) == true)
{
return columns[columnIndex]->exactType;
}
else
{
return Empty();
}
}
int PersistentTable::getSizeInBytes(int columnIndex) const
{
if (checkArgumentRange(NAME_VALUE(columnIndex), 0, getColumnCount()-1) == true)
{
return columns[columnIndex]->getSizeInBytes(currentRow);
}
else
{
return 0;
}
}
const void* PersistentTable::getBinary(int columnIndex) const
{
if (checkArgumentRange(NAME_VALUE(columnIndex), 0, getColumnCount()-1) == true)
{
return columns[columnIndex]->getBinary(currentRow);
}
else
{
return NULL;
}
}
String PersistentTable::getString(int columnIndex) const
{
if (checkArgumentRange(NAME_VALUE(columnIndex), 0, getColumnCount()-1) == true)
{
return columns[columnIndex]->getString(currentRow);
}
else
{
return Empty();
}
}
bool PersistentTable::getBool(int columnIndex) const
{
if (checkArgumentRange(NAME_VALUE(columnIndex), 0, getColumnCount()-1) == true)
{
return columns[columnIndex]->getBool(currentRow);
}
else
{
return false;
}
}
int32 PersistentTable::getInt(int columnIndex) const
{
if (checkArgumentRange(NAME_VALUE(columnIndex), 0, getColumnCount()-1) == true)
{
return columns[columnIndex]->getInt(currentRow);
}
else
{
return -1;
}
}
int64 PersistentTable::getInt64(int columnIndex) const
{
if (checkArgumentRange(NAME_VALUE(columnIndex), 0, getColumnCount()-1) == true)
{
return columns[columnIndex]->getInt64(currentRow);
}
else
{
return -1;
}
}
float PersistentTable::getFloat(int columnIndex) const
{
if (checkArgumentRange(NAME_VALUE(columnIndex), 0, getColumnCount()-1) == true)
{
return columns[columnIndex]->getFloat(currentRow);
}
else
{
return NAN;
}
}
double PersistentTable::getDouble(int columnIndex) const
{
if (checkArgumentRange(NAME_VALUE(columnIndex), 0, getColumnCount()-1) == true)
{
return columns[columnIndex]->getDouble(currentRow);
}
else
{
return NAN;
}
}
} // namespace DataBases
} // namespace BaseLib<|fim▁end|> |
void PersistentTable::setCurrentRow(int newCurrentRow)
{
|
<|file_name|>files_in_patch.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright 2020 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
<|fim▁hole|>import re
import sys
file_names = set()
def parse_input(input_file):
global file_names
while True:
line_buffer = input_file.readline()
if not line_buffer:
break
line_match = re.search(r"^\s*---\s+([^\s@]+)[\s@]+", line_buffer)
if not line_match:
line_match = re.search(r"^\s*\+\+\+\s+([^\s@]+)[\s@]+",
line_buffer)
if line_match:
curr_file_name = line_match.group(1)
# trim off 'a/' and 'b/' that you will normally see in git output
#
if len(curr_file_name) > 2 and curr_file_name[1] == '/' and (
curr_file_name[0] == 'a' or curr_file_name[0] == 'b'):
curr_file_name = curr_file_name[2:]
file_names.add(curr_file_name)
def prune_unwanted_names():
global file_names
unwanted_names = set(['/dev/null'])
for curr_file_name in file_names:
# ignore files that end in '.orig' as long as non-.orig exists
line_match = re.search(r"^(.+)\.[oO][Rr][iI][gG]$", curr_file_name)
if line_match and line_match.group(1) in file_names:
unwanted_names.add(curr_file_name)
continue
file_names -= unwanted_names
def print_file_names():
for name in sorted(file_names):
print(name)
if __name__ == '__main__':
if len(sys.argv) == 1:
parse_input(sys.stdin)
else:
for curr_input_name in sys.argv[1:]:
try:
with open(curr_input_name, 'r') as curr_input_file:
parse_input(curr_input_file)
except IOError as e_str:
sys.stderr.write(
"Cannot open {}: {}\n".format(curr_input_name, e_str))
sys.exit(255)
prune_unwanted_names()
print_file_names()<|fim▁end|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.