repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
rmboggs/django | django/contrib/contenttypes/fields.py | 21 | 24531 | from __future__ import unicode_literals
from collections import defaultdict
from django.contrib.contenttypes.models import ContentType
from django.core import checks
from django.core.exceptions import FieldDoesNotExist, ObjectDoesNotExist
from django.db import DEFAULT_DB_ALIAS, models, router, transaction
from django.db.models import DO_NOTHING, signals
from django.db.models.base import ModelBase, make_foreign_order_accessors
from django.db.models.fields.related import (
ForeignObject, ForeignObjectRel, ReverseManyToOneDescriptor,
lazy_related_operation,
)
from django.db.models.query_utils import PathInfo
from django.utils.encoding import python_2_unicode_compatible, smart_text
from django.utils.functional import cached_property
@python_2_unicode_compatible
class GenericForeignKey(object):
"""
Provide a generic many-to-one relation through the ``content_type`` and
``object_id`` fields.
This class also doubles as an accessor to the related object (similar to
ForwardManyToOneDescriptor) by adding itself as a model attribute.
"""
# Field flags
auto_created = False
concrete = False
editable = False
hidden = False
is_relation = True
many_to_many = False
many_to_one = True
one_to_many = False
one_to_one = False
related_model = None
remote_field = None
def __init__(self, ct_field='content_type', fk_field='object_id', for_concrete_model=True):
self.ct_field = ct_field
self.fk_field = fk_field
self.for_concrete_model = for_concrete_model
self.editable = False
self.rel = None
self.column = None
def contribute_to_class(self, cls, name, **kwargs):
self.name = name
self.model = cls
self.cache_attr = "_%s_cache" % name
cls._meta.add_field(self, virtual=True)
# Only run pre-initialization field assignment on non-abstract models
if not cls._meta.abstract:
signals.pre_init.connect(self.instance_pre_init, sender=cls)
setattr(cls, name, self)
def get_filter_kwargs_for_object(self, obj):
"""See corresponding method on Field"""
return {
self.fk_field: getattr(obj, self.fk_field),
self.ct_field: getattr(obj, self.ct_field),
}
def get_forward_related_filter(self, obj):
"""See corresponding method on RelatedField"""
return {
self.fk_field: obj.pk,
self.ct_field: ContentType.objects.get_for_model(obj).pk,
}
def __str__(self):
model = self.model
app = model._meta.app_label
return '%s.%s.%s' % (app, model._meta.object_name, self.name)
def check(self, **kwargs):
errors = []
errors.extend(self._check_field_name())
errors.extend(self._check_object_id_field())
errors.extend(self._check_content_type_field())
return errors
def _check_field_name(self):
if self.name.endswith("_"):
return [
checks.Error(
'Field names must not end with an underscore.',
hint=None,
obj=self,
id='fields.E001',
)
]
else:
return []
def _check_object_id_field(self):
try:
self.model._meta.get_field(self.fk_field)
except FieldDoesNotExist:
return [
checks.Error(
"The GenericForeignKey object ID references the non-existent field '%s'." % self.fk_field,
hint=None,
obj=self,
id='contenttypes.E001',
)
]
else:
return []
def _check_content_type_field(self):
"""
Check if field named `field_name` in model `model` exists and is a
valid content_type field (is a ForeignKey to ContentType).
"""
try:
field = self.model._meta.get_field(self.ct_field)
except FieldDoesNotExist:
return [
checks.Error(
"The GenericForeignKey content type references the non-existent field '%s.%s'." % (
self.model._meta.object_name, self.ct_field
),
hint=None,
obj=self,
id='contenttypes.E002',
)
]
else:
if not isinstance(field, models.ForeignKey):
return [
checks.Error(
"'%s.%s' is not a ForeignKey." % (
self.model._meta.object_name, self.ct_field
),
hint=(
"GenericForeignKeys must use a ForeignKey to "
"'contenttypes.ContentType' as the 'content_type' field."
),
obj=self,
id='contenttypes.E003',
)
]
elif field.remote_field.model != ContentType:
return [
checks.Error(
"'%s.%s' is not a ForeignKey to 'contenttypes.ContentType'." % (
self.model._meta.object_name, self.ct_field
),
hint=(
"GenericForeignKeys must use a ForeignKey to "
"'contenttypes.ContentType' as the 'content_type' field."
),
obj=self,
id='contenttypes.E004',
)
]
else:
return []
def instance_pre_init(self, signal, sender, args, kwargs, **_kwargs):
"""
Handle initializing an object with the generic FK instead of
content_type and object_id fields.
"""
if self.name in kwargs:
value = kwargs.pop(self.name)
if value is not None:
kwargs[self.ct_field] = self.get_content_type(obj=value)
kwargs[self.fk_field] = value._get_pk_val()
else:
kwargs[self.ct_field] = None
kwargs[self.fk_field] = None
def get_content_type(self, obj=None, id=None, using=None):
if obj is not None:
return ContentType.objects.db_manager(obj._state.db).get_for_model(
obj, for_concrete_model=self.for_concrete_model)
elif id is not None:
return ContentType.objects.db_manager(using).get_for_id(id)
else:
# This should never happen. I love comments like this, don't you?
raise Exception("Impossible arguments to GFK.get_content_type!")
def get_prefetch_queryset(self, instances, queryset=None):
if queryset is not None:
raise ValueError("Custom queryset can't be used for this lookup.")
# For efficiency, group the instances by content type and then do one
# query per model
fk_dict = defaultdict(set)
# We need one instance for each group in order to get the right db:
instance_dict = {}
ct_attname = self.model._meta.get_field(self.ct_field).get_attname()
for instance in instances:
# We avoid looking for values if either ct_id or fkey value is None
ct_id = getattr(instance, ct_attname)
if ct_id is not None:
fk_val = getattr(instance, self.fk_field)
if fk_val is not None:
fk_dict[ct_id].add(fk_val)
instance_dict[ct_id] = instance
ret_val = []
for ct_id, fkeys in fk_dict.items():
instance = instance_dict[ct_id]
ct = self.get_content_type(id=ct_id, using=instance._state.db)
ret_val.extend(ct.get_all_objects_for_this_type(pk__in=fkeys))
# For doing the join in Python, we have to match both the FK val and the
# content type, so we use a callable that returns a (fk, class) pair.
def gfk_key(obj):
ct_id = getattr(obj, ct_attname)
if ct_id is None:
return None
else:
model = self.get_content_type(id=ct_id,
using=obj._state.db).model_class()
return (model._meta.pk.get_prep_value(getattr(obj, self.fk_field)),
model)
return (ret_val,
lambda obj: (obj._get_pk_val(), obj.__class__),
gfk_key,
True,
self.cache_attr)
def is_cached(self, instance):
return hasattr(instance, self.cache_attr)
def __get__(self, instance, cls=None):
if instance is None:
return self
try:
return getattr(instance, self.cache_attr)
except AttributeError:
rel_obj = None
# Make sure to use ContentType.objects.get_for_id() to ensure that
# lookups are cached (see ticket #5570). This takes more code than
# the naive ``getattr(instance, self.ct_field)``, but has better
# performance when dealing with GFKs in loops and such.
f = self.model._meta.get_field(self.ct_field)
ct_id = getattr(instance, f.get_attname(), None)
if ct_id is not None:
ct = self.get_content_type(id=ct_id, using=instance._state.db)
try:
rel_obj = ct.get_object_for_this_type(pk=getattr(instance, self.fk_field))
except ObjectDoesNotExist:
pass
setattr(instance, self.cache_attr, rel_obj)
return rel_obj
def __set__(self, instance, value):
ct = None
fk = None
if value is not None:
ct = self.get_content_type(obj=value)
fk = value._get_pk_val()
setattr(instance, self.ct_field, ct)
setattr(instance, self.fk_field, fk)
setattr(instance, self.cache_attr, value)
class GenericRel(ForeignObjectRel):
"""
Used by GenericRelation to store information about the relation.
"""
def __init__(self, field, to, related_name=None, related_query_name=None, limit_choices_to=None):
super(GenericRel, self).__init__(
field, to,
related_name=related_query_name or '+',
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
on_delete=DO_NOTHING,
)
class GenericRelation(ForeignObject):
"""
Provide a reverse to a relation created by a GenericForeignKey.
"""
# Field flags
auto_created = False
many_to_many = False
many_to_one = False
one_to_many = True
one_to_one = False
rel_class = GenericRel
def __init__(self, to, object_id_field='object_id', content_type_field='content_type',
for_concrete_model=True, related_query_name=None, limit_choices_to=None, **kwargs):
kwargs['rel'] = self.rel_class(
self, to,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
)
kwargs['blank'] = True
kwargs['on_delete'] = models.CASCADE
kwargs['editable'] = False
kwargs['serialize'] = False
# This construct is somewhat of an abuse of ForeignObject. This field
# represents a relation from pk to object_id field. But, this relation
# isn't direct, the join is generated reverse along foreign key. So,
# the from_field is object_id field, to_field is pk because of the
# reverse join.
super(GenericRelation, self).__init__(
to, from_fields=[object_id_field], to_fields=[], **kwargs)
self.object_id_field_name = object_id_field
self.content_type_field_name = content_type_field
self.for_concrete_model = for_concrete_model
def check(self, **kwargs):
errors = super(GenericRelation, self).check(**kwargs)
errors.extend(self._check_generic_foreign_key_existence())
return errors
def _check_generic_foreign_key_existence(self):
target = self.remote_field.model
if isinstance(target, ModelBase):
fields = target._meta.virtual_fields
if any(isinstance(field, GenericForeignKey) and
field.ct_field == self.content_type_field_name and
field.fk_field == self.object_id_field_name
for field in fields):
return []
else:
return [
checks.Error(
("The GenericRelation defines a relation with the model "
"'%s.%s', but that model does not have a GenericForeignKey.") % (
target._meta.app_label, target._meta.object_name
),
hint=None,
obj=self,
id='contenttypes.E004',
)
]
else:
return []
def resolve_related_fields(self):
self.to_fields = [self.model._meta.pk.name]
return [(self.remote_field.model._meta.get_field(self.object_id_field_name), self.model._meta.pk)]
def get_path_info(self):
opts = self.remote_field.model._meta
target = opts.pk
return [PathInfo(self.model._meta, opts, (target,), self.remote_field, True, False)]
def get_reverse_path_info(self):
opts = self.model._meta
from_opts = self.remote_field.model._meta
return [PathInfo(from_opts, opts, (opts.pk,), self, not self.unique, False)]
def get_choices_default(self):
return super(GenericRelation, self).get_choices(include_blank=False)
def value_to_string(self, obj):
qs = getattr(obj, self.name).all()
return smart_text([instance._get_pk_val() for instance in qs])
def contribute_to_class(self, cls, name, **kwargs):
kwargs['virtual_only'] = True
super(GenericRelation, self).contribute_to_class(cls, name, **kwargs)
self.model = cls
setattr(cls, self.name, ReverseGenericManyToOneDescriptor(self.remote_field))
# Add get_RELATED_order() and set_RELATED_order() methods if the model
# on the other end of this relation is ordered with respect to this.
def matching_gfk(field):
return (
isinstance(field, GenericForeignKey) and
self.content_type_field_name == field.ct_field and
self.object_id_field_name == field.fk_field
)
def make_generic_foreign_order_accessors(related_model, model):
if matching_gfk(model._meta.order_with_respect_to):
make_foreign_order_accessors(model, related_model)
lazy_related_operation(make_generic_foreign_order_accessors, self.model, self.remote_field.model)
def set_attributes_from_rel(self):
pass
def get_internal_type(self):
return "ManyToManyField"
def get_content_type(self):
"""
Return the content type associated with this field's model.
"""
return ContentType.objects.get_for_model(self.model,
for_concrete_model=self.for_concrete_model)
def get_extra_restriction(self, where_class, alias, remote_alias):
field = self.remote_field.model._meta.get_field(self.content_type_field_name)
contenttype_pk = self.get_content_type().pk
cond = where_class()
lookup = field.get_lookup('exact')(field.get_col(remote_alias), contenttype_pk)
cond.add(lookup, 'AND')
return cond
def bulk_related_objects(self, objs, using=DEFAULT_DB_ALIAS):
"""
Return all objects related to ``objs`` via this ``GenericRelation``.
"""
return self.remote_field.model._base_manager.db_manager(using).filter(**{
"%s__pk" % self.content_type_field_name: ContentType.objects.db_manager(using).get_for_model(
self.model, for_concrete_model=self.for_concrete_model).pk,
"%s__in" % self.object_id_field_name: [obj.pk for obj in objs]
})
class ReverseGenericManyToOneDescriptor(ReverseManyToOneDescriptor):
"""
Accessor to the related objects manager on the one-to-many relation created
by GenericRelation.
In the example::
class Post(Model):
comments = GenericRelation(Comment)
``post.comments`` is a ReverseGenericManyToOneDescriptor instance.
"""
@cached_property
def related_manager_cls(self):
return create_generic_related_manager(
self.rel.model._default_manager.__class__,
self.rel,
)
def create_generic_related_manager(superclass, rel):
"""
Factory function to create a manager that subclasses another manager
(generally the default manager of a given model) and adds behaviors
specific to generic relations.
"""
class GenericRelatedObjectManager(superclass):
def __init__(self, instance=None):
super(GenericRelatedObjectManager, self).__init__()
self.instance = instance
self.model = rel.model
content_type = ContentType.objects.db_manager(instance._state.db).get_for_model(
instance, for_concrete_model=rel.field.for_concrete_model)
self.content_type = content_type
self.content_type_field_name = rel.field.content_type_field_name
self.object_id_field_name = rel.field.object_id_field_name
self.prefetch_cache_name = rel.field.attname
self.pk_val = instance._get_pk_val()
self.core_filters = {
'%s__pk' % self.content_type_field_name: content_type.id,
self.object_id_field_name: self.pk_val,
}
def __call__(self, **kwargs):
# We use **kwargs rather than a kwarg argument to enforce the
# `manager='manager_name'` syntax.
manager = getattr(self.model, kwargs.pop('manager'))
manager_class = create_generic_related_manager(manager.__class__, rel)
return manager_class(instance=self.instance)
do_not_call_in_templates = True
def __str__(self):
return repr(self)
def get_queryset(self):
try:
return self.instance._prefetched_objects_cache[self.prefetch_cache_name]
except (AttributeError, KeyError):
db = self._db or router.db_for_read(self.model, instance=self.instance)
return super(GenericRelatedObjectManager, self).get_queryset().using(db).filter(**self.core_filters)
def get_prefetch_queryset(self, instances, queryset=None):
if queryset is None:
queryset = super(GenericRelatedObjectManager, self).get_queryset()
queryset._add_hints(instance=instances[0])
queryset = queryset.using(queryset._db or self._db)
query = {
'%s__pk' % self.content_type_field_name: self.content_type.id,
'%s__in' % self.object_id_field_name: set(obj._get_pk_val() for obj in instances)
}
# We (possibly) need to convert object IDs to the type of the
# instances' PK in order to match up instances:
object_id_converter = instances[0]._meta.pk.to_python
return (queryset.filter(**query),
lambda relobj: object_id_converter(getattr(relobj, self.object_id_field_name)),
lambda obj: obj._get_pk_val(),
False,
self.prefetch_cache_name)
def add(self, *objs, **kwargs):
bulk = kwargs.pop('bulk', True)
db = router.db_for_write(self.model, instance=self.instance)
def check_and_update_obj(obj):
if not isinstance(obj, self.model):
raise TypeError("'%s' instance expected, got %r" % (
self.model._meta.object_name, obj
))
setattr(obj, self.content_type_field_name, self.content_type)
setattr(obj, self.object_id_field_name, self.pk_val)
if bulk:
pks = []
for obj in objs:
if obj._state.adding or obj._state.db != db:
raise ValueError(
"%r instance isn't saved. Use bulk=False or save "
"the object first. but must be." % obj
)
check_and_update_obj(obj)
pks.append(obj.pk)
self.model._base_manager.using(db).filter(pk__in=pks).update(**{
self.content_type_field_name: self.content_type,
self.object_id_field_name: self.pk_val,
})
else:
with transaction.atomic(using=db, savepoint=False):
for obj in objs:
check_and_update_obj(obj)
obj.save()
add.alters_data = True
def remove(self, *objs, **kwargs):
if not objs:
return
bulk = kwargs.pop('bulk', True)
self._clear(self.filter(pk__in=[o.pk for o in objs]), bulk)
remove.alters_data = True
def clear(self, **kwargs):
bulk = kwargs.pop('bulk', True)
self._clear(self, bulk)
clear.alters_data = True
def _clear(self, queryset, bulk):
db = router.db_for_write(self.model, instance=self.instance)
queryset = queryset.using(db)
if bulk:
# `QuerySet.delete()` creates its own atomic block which
# contains the `pre_delete` and `post_delete` signal handlers.
queryset.delete()
else:
with transaction.atomic(using=db, savepoint=False):
for obj in queryset:
obj.delete()
_clear.alters_data = True
def set(self, objs, **kwargs):
# Force evaluation of `objs` in case it's a queryset whose value
# could be affected by `manager.clear()`. Refs #19816.
objs = tuple(objs)
bulk = kwargs.pop('bulk', True)
clear = kwargs.pop('clear', False)
db = router.db_for_write(self.model, instance=self.instance)
with transaction.atomic(using=db, savepoint=False):
if clear:
self.clear()
self.add(*objs, bulk=bulk)
else:
old_objs = set(self.using(db).all())
new_objs = []
for obj in objs:
if obj in old_objs:
old_objs.remove(obj)
else:
new_objs.append(obj)
self.remove(*old_objs)
self.add(*new_objs, bulk=bulk)
set.alters_data = True
def create(self, **kwargs):
kwargs[self.content_type_field_name] = self.content_type
kwargs[self.object_id_field_name] = self.pk_val
db = router.db_for_write(self.model, instance=self.instance)
return super(GenericRelatedObjectManager, self).using(db).create(**kwargs)
create.alters_data = True
def get_or_create(self, **kwargs):
kwargs[self.content_type_field_name] = self.content_type
kwargs[self.object_id_field_name] = self.pk_val
db = router.db_for_write(self.model, instance=self.instance)
return super(GenericRelatedObjectManager, self).using(db).get_or_create(**kwargs)
get_or_create.alters_data = True
def update_or_create(self, **kwargs):
kwargs[self.content_type_field_name] = self.content_type
kwargs[self.object_id_field_name] = self.pk_val
db = router.db_for_write(self.model, instance=self.instance)
return super(GenericRelatedObjectManager, self).using(db).update_or_create(**kwargs)
update_or_create.alters_data = True
return GenericRelatedObjectManager
| bsd-3-clause |
gameduell/duell | bin/mac/python2.7.9/lib/python2.7/plat-mac/macerrors.py | 40 | 116661 | # -coding=latin1-
from warnings import warnpy3k
warnpy3k("In 3.x, the macerrors module is removed.", stacklevel=2)
svTempDisable = -32768 #svTempDisable
svDisabled = -32640 #Reserve range -32640 to -32768 for Apple temp disables.
fontNotOutlineErr = -32615 #bitmap font passed to routine that does outlines only
kURL68kNotSupportedError = -30788 #kURL68kNotSupportedError
kURLAccessNotAvailableError = -30787 #kURLAccessNotAvailableError
kURLInvalidConfigurationError = -30786 #kURLInvalidConfigurationError
kURLExtensionFailureError = -30785 #kURLExtensionFailureError
kURLFileEmptyError = -30783 #kURLFileEmptyError
kURLInvalidCallError = -30781 #kURLInvalidCallError
kURLUnsettablePropertyError = -30780 #kURLUnsettablePropertyError
kURLPropertyBufferTooSmallError = -30779 #kURLPropertyBufferTooSmallError
kURLUnknownPropertyError = -30778 #kURLUnknownPropertyError
kURLPropertyNotYetKnownError = -30777 #kURLPropertyNotYetKnownError
kURLAuthenticationError = -30776 #kURLAuthenticationError
kURLServerBusyError = -30775 #kURLServerBusyError
kURLUnsupportedSchemeError = -30774 #kURLUnsupportedSchemeError
kURLInvalidURLError = -30773 #kURLInvalidURLError
kURLDestinationExistsError = -30772 #kURLDestinationExistsError
kURLProgressAlreadyDisplayedError = -30771 #kURLProgressAlreadyDisplayedError
kURLInvalidURLReferenceError = -30770 #kURLInvalidURLReferenceError
controlHandleInvalidErr = -30599 #controlHandleInvalidErr
controlInvalidDataVersionErr = -30597 #controlInvalidDataVersionErr
errItemNotControl = -30596 #errItemNotControl
errCantEmbedRoot = -30595 #errCantEmbedRoot
errCantEmbedIntoSelf = -30594 #errCantEmbedIntoSelf
errWindowRegionCodeInvalid = -30593 #errWindowRegionCodeInvalid
errControlHiddenOrDisabled = -30592 #errControlHiddenOrDisabled
errDataSizeMismatch = -30591 #errDataSizeMismatch
errControlIsNotEmbedder = -30590 #errControlIsNotEmbedder
errControlsAlreadyExist = -30589 #errControlsAlreadyExist
errInvalidPartCode = -30588 #errInvalidPartCode
errRootAlreadyExists = -30587 #errRootAlreadyExists
errNoRootControl = -30586 #errNoRootControl
errCouldntSetFocus = -30585 #errCouldntSetFocus
errUnknownControl = -30584 #errUnknownControl
errWindowDoesntSupportFocus = -30583 #errWindowDoesntSupportFocus
errControlDoesntSupportFocus = -30582 #errControlDoesntSupportFocus
errDataNotSupported = -30581 #errDataNotSupported
errMessageNotSupported = -30580 #errMessageNotSupported
themeMonitorDepthNotSupportedErr = -30567 #theme not supported at monitor depth
themeScriptFontNotFoundErr = -30566 #theme font requested for uninstalled script system
themeBadCursorIndexErr = -30565 #themeBadCursorIndexErr
themeHasNoAccentsErr = -30564 #themeHasNoAccentsErr
themeBadTextColorErr = -30563 #themeBadTextColorErr
themeProcessNotRegisteredErr = -30562 #themeProcessNotRegisteredErr
themeProcessRegisteredErr = -30561 #themeProcessRegisteredErr
themeInvalidBrushErr = -30560 #pattern index invalid
qtvrUninitialized = -30555 #qtvrUninitialized
qtvrLibraryLoadErr = -30554 #qtvrLibraryLoadErr
streamingNodeNotReadyErr = -30553 #streamingNodeNotReadyErr
noMemoryNodeFailedInitialize = -30552 #noMemoryNodeFailedInitialize
invalidHotSpotIDErr = -30551 #invalidHotSpotIDErr
invalidNodeFormatErr = -30550 #invalidNodeFormatErr
limitReachedErr = -30549 #limitReachedErr
settingNotSupportedByNodeErr = -30548 #settingNotSupportedByNodeErr
propertyNotSupportedByNodeErr = -30547 #propertyNotSupportedByNodeErr
timeNotInViewErr = -30546 #timeNotInViewErr
invalidViewStateErr = -30545 #invalidViewStateErr
invalidNodeIDErr = -30544 #invalidNodeIDErr
selectorNotSupportedByNodeErr = -30543 #selectorNotSupportedByNodeErr
callNotSupportedByNodeErr = -30542 #callNotSupportedByNodeErr
constraintReachedErr = -30541 #constraintReachedErr
notAQTVRMovieErr = -30540 #notAQTVRMovieErr
kFBCnoSuchHit = -30532 #kFBCnoSuchHit
kFBCbadSearchSession = -30531 #kFBCbadSearchSession
kFBCindexDiskIOFailed = -30530 #kFBCindexDiskIOFailed
kFBCsummarizationCanceled = -30529 #kFBCsummarizationCanceled
kFBCbadIndexFileVersion = -30528 #kFBCbadIndexFileVersion
kFBCanalysisNotAvailable = -30527 #kFBCanalysisNotAvailable
kFBCillegalSessionChange = -30526 #tried to add/remove vols to a session
kFBCsomeFilesNotIndexed = -30525 #kFBCsomeFilesNotIndexed
kFBCsearchFailed = -30524 #kFBCsearchFailed
kFBCindexNotAvailable = -30523 #kFBCindexNotAvailable
kFBCindexFileDestroyed = -30522 #kFBCindexFileDestroyed
kFBCaccessCanceled = -30521 #kFBCaccessCanceled
kFBCindexingCanceled = -30520 #kFBCindexingCanceled
kFBCnoSearchSession = -30519 #kFBCnoSearchSession
kFBCindexNotFound = -30518 #kFBCindexNotFound
kFBCflushFailed = -30517 #kFBCflushFailed
kFBCaddDocFailed = -30516 #kFBCaddDocFailed
kFBCaccessorStoreFailed = -30515 #kFBCaccessorStoreFailed
kFBCindexCreationFailed = -30514 #couldn't create index
kFBCmergingFailed = -30513 #couldn't merge index files
kFBCtokenizationFailed = -30512 #couldn't read from document or query
kFBCmoveFailed = -30511 #V-Twin exception caught
kFBCdeletionFailed = -30510 #V-Twin exception caught
kFBCcommitFailed = -30509 #V-Twin exception caught
kFBCindexingFailed = -30508 #V-Twin exception caught
kFBCvalidationFailed = -30507 #V-Twin exception caught
kFBCcompactionFailed = -30506 #V-Twin exception caught
kFBCbadIndexFile = -30505 #bad FSSpec, or bad data in file
kFBCfileNotIndexed = -30504 #kFBCfileNotIndexed
kFBCbadParam = -30503 #kFBCbadParam
kFBCallocFailed = -30502 #probably low memory
kFBCnoIndexesFound = -30501 #kFBCnoIndexesFound
kFBCvTwinExceptionErr = -30500 #no telling what it was
kDSpStereoContextErr = -30450 #kDSpStereoContextErr
kDSpInternalErr = -30449 #kDSpInternalErr
kDSpConfirmSwitchWarning = -30448 #kDSpConfirmSwitchWarning
kDSpFrameRateNotReadyErr = -30447 #kDSpFrameRateNotReadyErr
kDSpContextNotFoundErr = -30446 #kDSpContextNotFoundErr
kDSpContextNotReservedErr = -30445 #kDSpContextNotReservedErr
kDSpContextAlreadyReservedErr = -30444 #kDSpContextAlreadyReservedErr
kDSpInvalidAttributesErr = -30443 #kDSpInvalidAttributesErr
kDSpInvalidContextErr = -30442 #kDSpInvalidContextErr
kDSpSystemSWTooOldErr = -30441 #kDSpSystemSWTooOldErr
kDSpNotInitializedErr = -30440 #kDSpNotInitializedErr
kISpListBusyErr = -30429 #kISpListBusyErr
kISpDeviceActiveErr = -30428 #kISpDeviceActiveErr
kISpSystemActiveErr = -30427 #kISpSystemActiveErr
kISpDeviceInactiveErr = -30426 #kISpDeviceInactiveErr
kISpSystemInactiveErr = -30425 #kISpSystemInactiveErr
kISpElementNotInListErr = -30424 #kISpElementNotInListErr
kISpElementInListErr = -30423 #kISpElementInListErr
kISpBufferToSmallErr = -30422 #kISpBufferToSmallErr
kISpSystemListErr = -30421 #kISpSystemListErr
kISpInternalErr = -30420 #kISpInternalErr
kNSpJoinFailedErr = -30399 #kNSpJoinFailedErr
kNSpCantBlockErr = -30398 #kNSpCantBlockErr
kNSpMessageTooBigErr = -30397 #kNSpMessageTooBigErr
kNSpSendFailedErr = -30396 #kNSpSendFailedErr
kNSpConnectFailedErr = -30395 #kNSpConnectFailedErr
kNSpGameTerminatedErr = -30394 #kNSpGameTerminatedErr
kNSpTimeoutErr = -30393 #kNSpTimeoutErr
kNSpInvalidProtocolListErr = -30392 #kNSpInvalidProtocolListErr
kNSpInvalidProtocolRefErr = -30391 #kNSpInvalidProtocolRefErr
kNSpInvalidDefinitionErr = -30390 #kNSpInvalidDefinitionErr
kNSpAddPlayerFailedErr = -30389 #kNSpAddPlayerFailedErr
kNSpCreateGroupFailedErr = -30388 #kNSpCreateGroupFailedErr
kNSpNoHostVolunteersErr = -30387 #kNSpNoHostVolunteersErr
kNSpNoGroupsErr = -30386 #kNSpNoGroupsErr
kNSpNoPlayersErr = -30385 #kNSpNoPlayersErr
kNSpInvalidGroupIDErr = -30384 #kNSpInvalidGroupIDErr
kNSpInvalidPlayerIDErr = -30383 #kNSpInvalidPlayerIDErr
kNSpNameRequiredErr = -30382 #kNSpNameRequiredErr
kNSpFeatureNotImplementedErr = -30381 #kNSpFeatureNotImplementedErr
kNSpAddressInUseErr = -30380 #kNSpAddressInUseErr
kNSpRemovePlayerFailedErr = -30379 #kNSpRemovePlayerFailedErr
kNSpFreeQExhaustedErr = -30378 #kNSpFreeQExhaustedErr
kNSpInvalidAddressErr = -30377 #kNSpInvalidAddressErr
kNSpNotAdvertisingErr = -30376 #kNSpNotAdvertisingErr
kNSpAlreadyAdvertisingErr = -30374 #kNSpAlreadyAdvertisingErr
kNSpMemAllocationErr = -30373 #kNSpMemAllocationErr
kNSpOTVersionTooOldErr = -30371 #kNSpOTVersionTooOldErr
kNSpOTNotPresentErr = -30370 #kNSpOTNotPresentErr
kNSpInvalidParameterErr = -30369 #kNSpInvalidParameterErr
kNSpInvalidGameRefErr = -30367 #kNSpInvalidGameRefErr
kNSpProtocolNotAvailableErr = -30366 #kNSpProtocolNotAvailableErr
kNSpHostFailedErr = -30365 #kNSpHostFailedErr
kNSpPipeFullErr = -30364 #kNSpPipeFullErr
kNSpTopologyNotSupportedErr = -30362 #kNSpTopologyNotSupportedErr
kNSpAlreadyInitializedErr = -30361 #kNSpAlreadyInitializedErr
kNSpInitializationFailedErr = -30360 #kNSpInitializationFailedErr
kSSpScaleToZeroErr = -30344 #kSSpScaleToZeroErr
kSSpParallelUpVectorErr = -30343 #kSSpParallelUpVectorErr
kSSpCantInstallErr = -30342 #kSSpCantInstallErr
kSSpVersionErr = -30341 #kSSpVersionErr
kSSpInternalErr = -30340 #kSSpInternalErr
kALMInternalErr = -30049 #kALMInternalErr
kALMGroupNotFoundErr = -30048 #kALMGroupNotFoundErr
kALMNoSuchModuleErr = -30047 #kALMNoSuchModuleErr
kALMModuleCommunicationErr = -30046 #kALMModuleCommunicationErr
kALMDuplicateModuleErr = -30045 #kALMDuplicateModuleErr
kALMInstallationErr = -30044 #kALMInstallationErr
kALMDeferSwitchErr = -30043 #kALMDeferSwitchErr
kALMRebootFlagsLevelErr = -30042 #kALMRebootFlagsLevelErr
kLocalesDefaultDisplayStatus = -30029 #Requested display locale unavailable, used default
kLocalesTableFormatErr = -30002 #kLocalesTableFormatErr
kLocalesBufferTooSmallErr = -30001 #kLocalesBufferTooSmallErr
kFNSNameNotFoundErr = -29589 #The name with the requested paramters was not found
kFNSBadFlattenedSizeErr = -29587 #flattened size didn't match input or was too small
kFNSInsufficientDataErr = -29586 #insufficient data for the operation
kFNSMismatchErr = -29585 #reference didn't match or wasn't found in profile
kFNSDuplicateReferenceErr = -29584 #the ref. being added is already in the profile
kFNSBadProfileVersionErr = -29583 #profile version is out of known range
kFNSInvalidProfileErr = -29582 #profile is NULL or otherwise bad
kFNSBadReferenceVersionErr = -29581 #ref. version is out of known range
kFNSInvalidReferenceErr = -29580 #ref. was NULL or otherwise bad
kCollateInvalidCollationRef = -29507 #kCollateInvalidCollationRef
kCollateBufferTooSmall = -29506 #kCollateBufferTooSmall
kCollateInvalidChar = -29505 #kCollateInvalidChar
kCollatePatternNotFoundErr = -29504 #kCollatePatternNotFoundErr
kCollateUnicodeConvertFailedErr = -29503 #kCollateUnicodeConvertFailedErr
kCollateMissingUnicodeTableErr = -29502 #kCollateMissingUnicodeTableErr
kCollateInvalidOptions = -29501 #kCollateInvalidOptions
kCollateAttributesNotFoundErr = -29500 #kCollateAttributesNotFoundErr
kMPInvalidIDErr = -29299 #kMPInvalidIDErr
kMPInsufficientResourcesErr = -29298 #kMPInsufficientResourcesErr
kMPTaskAbortedErr = -29297 #kMPTaskAbortedErr
kMPTimeoutErr = -29296 #kMPTimeoutErr
kMPDeletedErr = -29295 #kMPDeletedErr
kMPBlueBlockingErr = -29293 #kMPBlueBlockingErr
kMPTaskStoppedErr = -29292 #A convention used with MPThrowException.
kMPTaskBlockedErr = -29291 #kMPTaskBlockedErr
kMPTaskCreatedErr = -29290 #kMPTaskCreatedErr
kMPProcessTerminatedErr = -29289 #kMPProcessTerminatedErr
kMPProcessCreatedErr = -29288 #kMPProcessCreatedErr
kMPPrivilegedErr = -29276 #kMPPrivilegedErr
kMPIterationEndErr = -29275 #kMPIterationEndErr
kUCTextBreakLocatorMissingType = -25341 #Unicode text break error
kUCOutputBufferTooSmall = -25340 #Output buffer too small for Unicode string result
errKCCreateChainFailed = -25318 #errKCCreateChainFailed
errKCDataNotModifiable = -25317 #errKCDataNotModifiable
errKCDataNotAvailable = -25316 #errKCDataNotAvailable
errKCInteractionRequired = -25315 #errKCInteractionRequired
errKCNoPolicyModule = -25314 #errKCNoPolicyModule
errKCNoCertificateModule = -25313 #errKCNoCertificateModule
errKCNoStorageModule = -25312 #errKCNoStorageModule
errKCKeySizeNotAllowed = -25311 #errKCKeySizeNotAllowed
errKCWrongKCVersion = -25310 #errKCWrongKCVersion
errKCReadOnlyAttr = -25309 #errKCReadOnlyAttr
errKCInteractionNotAllowed = -25308 #errKCInteractionNotAllowed
errKCNoDefaultKeychain = -25307 #errKCNoDefaultKeychain
errKCNoSuchClass = -25306 #errKCNoSuchClass
errKCInvalidSearchRef = -25305 #errKCInvalidSearchRef
errKCInvalidItemRef = -25304 #errKCInvalidItemRef
errKCNoSuchAttr = -25303 #errKCNoSuchAttr
errKCDataTooLarge = -25302 #errKCDataTooLarge
errKCBufferTooSmall = -25301 #errKCBufferTooSmall
errKCItemNotFound = -25300 #errKCItemNotFound
errKCDuplicateItem = -25299 #errKCDuplicateItem
errKCInvalidCallback = -25298 #errKCInvalidCallback
errKCDuplicateCallback = -25297 #errKCDuplicateCallback
errKCDuplicateKeychain = -25296 #errKCDuplicateKeychain
errKCInvalidKeychain = -25295 #errKCInvalidKeychain
errKCNoSuchKeychain = -25294 #errKCNoSuchKeychain
errKCAuthFailed = -25293 #errKCAuthFailed
errKCReadOnly = -25292 #errKCReadOnly
errKCNotAvailable = -25291 #errKCNotAvailable
printerStatusOpCodeNotSupportedErr = -25280 #printerStatusOpCodeNotSupportedErr
kTXNOutsideOfFrameErr = -22018 #kTXNOutsideOfFrameErr
kTXNOutsideOfLineErr = -22017 #kTXNOutsideOfLineErr
kTXNATSUIIsNotInstalledErr = -22016 #kTXNATSUIIsNotInstalledErr
kTXNDataTypeNotAllowedErr = -22015 #kTXNDataTypeNotAllowedErr
kTXNCopyNotAllowedInEchoModeErr = -22014 #kTXNCopyNotAllowedInEchoModeErr
kTXNCannotTurnTSMOffWhenUsingUnicodeErr = -22013 #kTXNCannotTurnTSMOffWhenUsingUnicodeErr
kTXNAlreadyInitializedErr = -22012 #kTXNAlreadyInitializedErr
kTXNInvalidRunIndex = -22011 #kTXNInvalidRunIndex
kTXNSomeOrAllTagsInvalidForRunErr = -22010 #kTXNSomeOrAllTagsInvalidForRunErr
kTXNAttributeTagInvalidForRunErr = -22009 #dataValue is set to this per invalid tag
kTXNNoMatchErr = -22008 #kTXNNoMatchErr
kTXNRunIndexOutofBoundsErr = -22007 #kTXNRunIndexOutofBoundsErr
kTXNCannotSetAutoIndentErr = -22006 #kTXNCannotSetAutoIndentErr
kTXNBadDefaultFileTypeWarning = -22005 #kTXNBadDefaultFileTypeWarning
kTXNUserCanceledOperationErr = -22004 #kTXNUserCanceledOperationErr
kTXNIllegalToCrossDataBoundariesErr = -22003 #kTXNIllegalToCrossDataBoundariesErr
kTXNInvalidFrameIDErr = -22002 #kTXNInvalidFrameIDErr
kTXNCannotAddFrameErr = -22001 #kTXNCannotAddFrameErr
kTXNEndIterationErr = -22000 #kTXNEndIterationErr
invalidIndexErr = -20002 #The recordIndex parameter is not valid.
recordDataTooBigErr = -20001 #The record data is bigger than buffer size (1024 bytes).
unknownInsertModeErr = -20000 #There is no such an insert mode.
kModemScriptMissing = -14002 #kModemScriptMissing
kModemPreferencesMissing = -14001 #kModemPreferencesMissing
kModemOutOfMemory = -14000 #kModemOutOfMemory
kHIDBaseError = -13950 #kHIDBaseError
kHIDNullStateErr = -13949 #kHIDNullStateErr
kHIDBufferTooSmallErr = -13948 #kHIDBufferTooSmallErr
kHIDValueOutOfRangeErr = -13947 #kHIDValueOutOfRangeErr
kHIDUsageNotFoundErr = -13946 #kHIDUsageNotFoundErr
kHIDNotValueArrayErr = -13945 #kHIDNotValueArrayErr
kHIDInvalidPreparsedDataErr = -13944 #kHIDInvalidPreparsedDataErr
kHIDIncompatibleReportErr = -13943 #kHIDIncompatibleReportErr
kHIDBadLogPhysValuesErr = -13942 #kHIDBadLogPhysValuesErr
kHIDInvalidReportTypeErr = -13941 #kHIDInvalidReportTypeErr
kHIDInvalidReportLengthErr = -13940 #kHIDInvalidReportLengthErr
kHIDNullPointerErr = -13939 #kHIDNullPointerErr
kHIDBadParameterErr = -13938 #kHIDBadParameterErr
kHIDNotEnoughMemoryErr = -13937 #kHIDNotEnoughMemoryErr
kHIDEndOfDescriptorErr = -13936 #kHIDEndOfDescriptorErr
kHIDUsagePageZeroErr = -13935 #kHIDUsagePageZeroErr
kHIDBadLogicalMinimumErr = -13934 #kHIDBadLogicalMinimumErr
kHIDBadLogicalMaximumErr = -13933 #kHIDBadLogicalMaximumErr
kHIDInvertedLogicalRangeErr = -13932 #kHIDInvertedLogicalRangeErr
kHIDInvertedPhysicalRangeErr = -13931 #kHIDInvertedPhysicalRangeErr
kHIDUnmatchedUsageRangeErr = -13930 #kHIDUnmatchedUsageRangeErr
kHIDInvertedUsageRangeErr = -13929 #kHIDInvertedUsageRangeErr
kHIDUnmatchedStringRangeErr = -13928 #kHIDUnmatchedStringRangeErr
kHIDUnmatchedDesignatorRangeErr = -13927 #kHIDUnmatchedDesignatorRangeErr
kHIDReportSizeZeroErr = -13926 #kHIDReportSizeZeroErr
kHIDReportCountZeroErr = -13925 #kHIDReportCountZeroErr
kHIDReportIDZeroErr = -13924 #kHIDReportIDZeroErr
kHIDInvalidRangePageErr = -13923 #kHIDInvalidRangePageErr
kHIDDeviceNotReady = -13910 #The device is still initializing, try again later
kHIDVersionIncompatibleErr = -13909 #kHIDVersionIncompatibleErr
debuggingNoMatchErr = -13887 #debugging component or option not found at this index
debuggingNoCallbackErr = -13886 #debugging component has no callback
debuggingInvalidNameErr = -13885 #componentName or optionName is invalid (NULL)
debuggingInvalidOptionErr = -13884 #optionSelectorNum is not registered
debuggingInvalidSignatureErr = -13883 #componentSignature not registered
debuggingDuplicateOptionErr = -13882 #optionSelectorNum already registered
debuggingDuplicateSignatureErr = -13881 #componentSignature already registered
debuggingExecutionContextErr = -13880 #routine cannot be called at this time
kBridgeSoftwareRunningCantSleep = -13038 #kBridgeSoftwareRunningCantSleep
kNoSuchPowerSource = -13020 #kNoSuchPowerSource
kProcessorTempRoutineRequiresMPLib2 = -13014 #kProcessorTempRoutineRequiresMPLib2
kCantReportProcessorTemperatureErr = -13013 #kCantReportProcessorTemperatureErr
kPowerMgtRequestDenied = -13010 #kPowerMgtRequestDenied
kPowerMgtMessageNotHandled = -13009 #kPowerMgtMessageNotHandled
kPowerHandlerNotFoundForProcErr = -13008 #kPowerHandlerNotFoundForProcErr
kPowerHandlerNotFoundForDeviceErr = -13007 #kPowerHandlerNotFoundForDeviceErr
kPowerHandlerExistsForDeviceErr = -13006 #kPowerHandlerExistsForDeviceErr
pmRecvEndErr = -13005 #during receive, pmgr did not finish hs configured for this connection
pmRecvStartErr = -13004 #during receive, pmgr did not start hs
pmSendEndErr = -13003 #during send, pmgr did not finish hs
pmSendStartErr = -13002 #during send, pmgr did not start hs
pmReplyTOErr = -13001 #Timed out waiting for reply
pmBusyErr = -13000 #Power Mgr never ready to start handshake
pictureDataErr = -11005 #the picture data was invalid
colorsRequestedErr = -11004 #the number of colors requested was illegal
cantLoadPickMethodErr = -11003 #unable to load the custom pick proc
pictInfoVerbErr = -11002 #the passed verb was invalid
pictInfoIDErr = -11001 #the internal consistancy check for the PictInfoID is wrong
pictInfoVersionErr = -11000 #wrong version of the PictInfo structure
errTaskNotFound = -10780 #no task with that task id exists
telNotEnoughdspBW = -10116 #not enough real-time for allocation
telBadSampleRate = -10115 #incompatible sample rate
telBadSWErr = -10114 #Software not installed properly
telDetAlreadyOn = -10113 #detection is already turned on
telAutoAnsNotOn = -10112 #autoAnswer in not turned on
telValidateFailed = -10111 #telValidate failed
telBadProcID = -10110 #invalid procID
telDeviceNotFound = -10109 #device not found
telBadCodeResource = -10108 #code resource not found
telInitFailed = -10107 #initialization failed
telNoCommFolder = -10106 #Communications/Extensions not found
telUnknownErr = -10103 #unable to set config
telNoSuchTool = -10102 #unable to find tool with name specified
telBadFunction = -10091 #bad msgCode specified
telPBErr = -10090 #parameter block error, bad format
telCANotDeflectable = -10082 #CA not "deflectable"
telCANotRejectable = -10081 #CA not "rejectable"
telCANotAcceptable = -10080 #CA not "acceptable"
telTermNotOpen = -10072 #terminal not opened via TELOpenTerm
telStillNeeded = -10071 #terminal driver still needed by someone else
telAlreadyOpen = -10070 #terminal already open
telNoCallbackRef = -10064 #no call back reference was specified, but is required
telDisplayModeNotSupp = -10063 #display mode not supported by tool
telBadDisplayMode = -10062 #bad display mode specified
telFwdTypeNotSupp = -10061 #forward type not supported by tool
telDNTypeNotSupp = -10060 #DN type not supported by tool
telBadRate = -10059 #bad rate specified
telBadBearerType = -10058 #bad bearerType specified
telBadSelect = -10057 #unable to select or deselect DN
telBadParkID = -10056 #bad park id specified
telBadPickupGroupID = -10055 #bad pickup group ID specified
telBadFwdType = -10054 #bad fwdType specified
telBadFeatureID = -10053 #bad feature ID specified
telBadIntercomID = -10052 #bad intercom ID specified
telBadPageID = -10051 #bad page ID specified
telBadDNType = -10050 #DN type invalid
telConfLimitExceeded = -10047 #attempt to exceed switch conference limits
telCBErr = -10046 #call back feature not set previously
telTransferRej = -10045 #transfer request rejected
telTransferErr = -10044 #transfer not prepared
telConfRej = -10043 #conference request was rejected
telConfErr = -10042 #conference was not prepared
telConfNoLimit = -10041 #no limit was specified but required
telConfLimitErr = -10040 #limit specified is too high for this configuration
telFeatNotSupp = -10033 #feature program call not supported by this tool
telFeatActive = -10032 #feature already active
telFeatNotAvail = -10031 #feature subscribed but not available
telFeatNotSub = -10030 #feature not subscribed
errAEPropertiesClash = -10025 #illegal combination of properties settings for Set Data, make new, or duplicate
errAECantPutThatThere = -10024 #in make new, duplicate, etc. class can't be an element of container
errAENotAnEnumMember = -10023 #enumerated value in SetData is not allowed for this property
telIntExtNotSupp = -10022 #internal external type not supported by this tool
telBadIntExt = -10021 #bad internal external error
telStateNotSupp = -10020 #device state not supported by tool
telBadStateErr = -10019 #bad device state specified
telIndexNotSupp = -10018 #index not supported by this tool
telBadIndex = -10017 #bad index specified
telAPattNotSupp = -10016 #alerting pattern not supported by tool
telBadAPattErr = -10015 #bad alerting pattern specified
telVTypeNotSupp = -10014 #volume type not supported by this tool
telBadVTypeErr = -10013 #bad volume type error
telBadLevelErr = -10012 #bad volume level setting
telHTypeNotSupp = -10011 #hook type not supported by this tool
telBadHTypeErr = -10010 #bad hook type specified
errAECantSupplyType = -10009 #errAECantSupplyType
telNoOpenErr = -10008 #unable to open terminal
telNoMemErr = -10007 #no memory to allocate handle
errOSACantAssign = -10006 #Signaled when an object cannot be set in a container.
telBadProcErr = -10005 #bad msgProc specified
telBadHandErr = -10004 #bad handle specified
OSAIllegalAssign = -10003 #Signaled when an object can never be set in a container
telBadDNErr = -10002 #TELDNHandle not found or invalid
telBadTermErr = -10001 #invalid TELHandle or handle not found
errAEEventFailed = -10000 #errAEEventFailed
cannotMoveAttachedController = -9999 #cannotMoveAttachedController
controllerHasFixedHeight = -9998 #controllerHasFixedHeight
cannotSetWidthOfAttachedController = -9997 #cannotSetWidthOfAttachedController
controllerBoundsNotExact = -9996 #controllerBoundsNotExact
editingNotAllowed = -9995 #editingNotAllowed
badControllerHeight = -9994 #badControllerHeight
deviceCantMeetRequest = -9408 #deviceCantMeetRequest
seqGrabInfoNotAvailable = -9407 #seqGrabInfoNotAvailable
badSGChannel = -9406 #badSGChannel
couldntGetRequiredComponent = -9405 #couldntGetRequiredComponent
notEnoughDiskSpaceToGrab = -9404 #notEnoughDiskSpaceToGrab
notEnoughMemoryToGrab = -9403 #notEnoughMemoryToGrab
cantDoThatInCurrentMode = -9402 #cantDoThatInCurrentMode
grabTimeComplete = -9401 #grabTimeComplete
noDeviceForChannel = -9400 #noDeviceForChannel
kNoCardBusCISErr = -9109 #No valid CIS exists for this CardBus card
kNotZVCapableErr = -9108 #This socket does not support Zoomed Video
kCardPowerOffErr = -9107 #Power to the card has been turned off
kAttemptDupCardEntryErr = -9106 #The Enabler was asked to create a duplicate card entry
kAlreadySavedStateErr = -9105 #The state has been saved on previous call
kTooManyIOWindowsErr = -9104 #device requested more than one I/O window
kNotReadyErr = -9103 #PC Card failed to go ready
kClientRequestDenied = -9102 #CS Clients should return this code inorder to
kNoCompatibleNameErr = -9101 #There is no compatible driver name for this device
kNoEnablerForCardErr = -9100 #No Enablers were found that can support the card
kNoCardEnablersFoundErr = -9099 #No Enablers were found
kUnsupportedCardErr = -9098 #Card not supported by generic enabler
kNoClientTableErr = -9097 #The client table has not be initialized yet
kNoMoreInterruptSlotsErr = -9096 #All internal Interrupt slots are in use
kNoMoreTimerClientsErr = -9095 #All timer callbacks are in use
kNoIOWindowRequestedErr = -9094 #Request I/O window before calling configuration
kBadCustomIFIDErr = -9093 #Custom interface ID is invalid
kBadTupleDataErr = -9092 #Data in tuple is invalid
kInvalidCSClientErr = -9091 #Card Services ClientID is not registered
kUnsupportedVsErr = -9090 #Unsupported Voltage Sense
kInvalidDeviceNumber = -9089 #kInvalidDeviceNumber
kPostCardEventErr = -9088 #_PCCSLPostCardEvent failed and dropped an event
kCantConfigureCardErr = -9087 #kCantConfigureCardErr
kPassCallToChainErr = -9086 #kPassCallToChainErr
kCardBusCardErr = -9085 #kCardBusCardErr
k16BitCardErr = -9084 #k16BitCardErr
kBadDeviceErr = -9083 #kBadDeviceErr
kBadLinkErr = -9082 #kBadLinkErr
kInvalidRegEntryErr = -9081 #kInvalidRegEntryErr
kNoCardSevicesSocketsErr = -9080 #kNoCardSevicesSocketsErr
kOutOfResourceErr = -9079 #Card Services has exhausted the resource
kNoMoreItemsErr = -9078 #there are no more of the requested item
kInUseErr = -9077 #requested resource is being used by a client
kConfigurationLockedErr = -9076 #a configuration has already been locked
kWriteProtectedErr = -9075 #media is write-protected
kBusyErr = -9074 #unable to process request at this time - try later
kUnsupportedModeErr = -9073 #mode is not supported
kUnsupportedFunctionErr = -9072 #function is not supported by this implementation
kNoCardErr = -9071 #no PC card in the socket
kGeneralFailureErr = -9070 #an undefined error has occurred
kWriteFailureErr = -9069 #unable to complete write request
kReadFailureErr = -9068 #unable to complete read request
kBadSpeedErr = -9067 #specified speed is unavailable
kBadCISErr = -9066 #CIS on card is invalid
kBadHandleErr = -9065 #clientHandle is invalid
kBadArgsErr = -9064 #values in argument packet are invalid
kBadArgLengthErr = -9063 #ArgLength argument is invalid
kBadWindowErr = -9062 #specified window is invalid
kBadVppErr = -9061 #specified Vpp1 or Vpp2 power level index is invalid
kBadVccErr = -9060 #specified Vcc power level index is invalid
kBadTypeErr = -9059 #specified window or interface type is invalid
kBadSocketErr = -9058 #specified logical or physical socket number is invalid
kBadSizeErr = -9057 #specified size is invalid
kBadPageErr = -9056 #specified page is invalid
kBadOffsetErr = -9055 #specified PC card memory array offset is invalid
kBadIRQErr = -9054 #specified IRQ level is invalid
kBadEDCErr = -9053 #specified EDC generator specified is invalid
kBadBaseErr = -9052 #specified base system memory address is invalid
kBadAttributeErr = -9051 #specified attributes field value is invalid
kBadAdapterErr = -9050 #invalid adapter number
codecOffscreenFailedPleaseRetryErr = -8992 #codecOffscreenFailedPleaseRetryErr
lockPortBitsWrongGDeviceErr = -8991 #lockPortBitsWrongGDeviceErr
directXObjectAlreadyExists = -8990 #directXObjectAlreadyExists
codecDroppedFrameErr = -8989 #returned from ImageCodecDrawBand
codecOffscreenFailedErr = -8988 #codecOffscreenFailedErr
codecNeedAccessKeyErr = -8987 #codec needs password in order to decompress
codecParameterDialogConfirm = -8986 #codecParameterDialogConfirm
lockPortBitsSurfaceLostErr = -8985 #lockPortBitsSurfaceLostErr
lockPortBitsBadPortErr = -8984 #lockPortBitsBadPortErr
lockPortBitsWindowClippedErr = -8983 #lockPortBitsWindowClippedErr
lockPortBitsWindowResizedErr = -8982 #lockPortBitsWindowResizedErr
lockPortBitsWindowMovedErr = -8981 #lockPortBitsWindowMovedErr
lockPortBitsBadSurfaceErr = -8980 #lockPortBitsBadSurfaceErr
codecNeedToFlushChainErr = -8979 #codecNeedToFlushChainErr
codecDisabledErr = -8978 #codec disabled itself -- pass codecFlagReenable to reset
codecNoMemoryPleaseWaitErr = -8977 #codecNoMemoryPleaseWaitErr
codecNothingToBlitErr = -8976 #codecNothingToBlitErr
codecCantQueueErr = -8975 #codecCantQueueErr
codecCantWhenErr = -8974 #codecCantWhenErr
codecOpenErr = -8973 #codecOpenErr
codecConditionErr = -8972 #codecConditionErr
codecExtensionNotFoundErr = -8971 #codecExtensionNotFoundErr
codecDataVersErr = -8970 #codecDataVersErr
codecBadDataErr = -8969 #codecBadDataErr
codecWouldOffscreenErr = -8968 #codecWouldOffscreenErr
codecAbortErr = -8967 #codecAbortErr
codecSpoolErr = -8966 #codecSpoolErr
codecImageBufErr = -8965 #codecImageBufErr
codecScreenBufErr = -8964 #codecScreenBufErr
codecSizeErr = -8963 #codecSizeErr
codecUnimpErr = -8962 #codecUnimpErr
noCodecErr = -8961 #noCodecErr
codecErr = -8960 #codecErr
kIllegalClockValueErr = -8852 #kIllegalClockValueErr
kUTCOverflowErr = -8851 #kUTCOverflowErr
kUTCUnderflowErr = -8850 #kUTCUnderflowErr
kATSULastErr = -8809 #The last ATSUI error code.
kATSULineBreakInWord = -8808 #This is not an error code but is returned by ATSUBreakLine to
kATSUCoordinateOverflowErr = -8807 #Used to indicate the coordinates provided to an ATSUI routine caused
kATSUNoFontScalerAvailableErr = -8806 #Used when no font scaler is available for the font passed
kATSUNoFontCmapAvailableErr = -8805 #Used when no CMAP table can be accessed or synthesized for the
kATSULowLevelErr = -8804 #Used when an error was encountered within the low level ATS
kATSUQuickDrawTextErr = -8803 #Used when QuickDraw Text encounters an error rendering or measuring
kATSUNoStyleRunsAssignedErr = -8802 #Used when an attempt was made to measure, highlight or draw
kATSUNotSetErr = -8801 #Used when the client attempts to retrieve an attribute,
kATSUInvalidCacheErr = -8800 #Used when an attempt was made to read in style data
kATSUInvalidAttributeTagErr = -8799 #Used when an attempt was made to use a tag value that
kATSUInvalidAttributeSizeErr = -8798 #Used when an attempt was made to use an attribute with a
kATSUInvalidAttributeValueErr = -8797 #Used when an attempt was made to use an attribute with
kATSUInvalidFontErr = -8796 #Used when an attempt was made to use an invalid font ID.
kATSUNoCorrespondingFontErr = -8795 #This value is retrned by font ID conversion
kATSUFontsNotMatched = -8794 #This value is returned by ATSUMatchFontsToText()
kATSUFontsMatched = -8793 #This is not an error code but is returned by
kATSUInvalidTextRangeErr = -8792 #An attempt was made to extract information
kATSUInvalidStyleErr = -8791 #An attempt was made to use a ATSUStyle which
kATSUInvalidTextLayoutErr = -8790 #An attempt was made to use a ATSUTextLayout
kTECOutputBufferFullStatus = -8785 #output buffer has no room for conversion of next input text element (partial conversion)
kTECNeedFlushStatus = -8784 #kTECNeedFlushStatus
kTECUsedFallbacksStatus = -8783 #kTECUsedFallbacksStatus
kTECItemUnavailableErr = -8771 #item (e.g. name) not available for specified region (& encoding if relevant)
kTECGlobalsUnavailableErr = -8770 #globals have already been deallocated (premature TERM)
unicodeChecksumErr = -8769 #unicodeChecksumErr
unicodeNoTableErr = -8768 #unicodeNoTableErr
unicodeVariantErr = -8767 #unicodeVariantErr
unicodeFallbacksErr = -8766 #unicodeFallbacksErr
unicodePartConvertErr = -8765 #unicodePartConvertErr
unicodeBufErr = -8764 #unicodeBufErr
unicodeCharErr = -8763 #unicodeCharErr
unicodeElementErr = -8762 #unicodeElementErr
unicodeNotFoundErr = -8761 #unicodeNotFoundErr
unicodeTableFormatErr = -8760 #unicodeTableFormatErr
unicodeDirectionErr = -8759 #unicodeDirectionErr
unicodeContextualErr = -8758 #unicodeContextualErr
unicodeTextEncodingDataErr = -8757 #unicodeTextEncodingDataErr
kTECDirectionErr = -8756 #direction stack overflow, etc.
kTECIncompleteElementErr = -8755 #text element may be incomplete or is too long for internal buffers
kTECUnmappableElementErr = -8754 #kTECUnmappableElementErr
kTECPartialCharErr = -8753 #input buffer ends in the middle of a multibyte character, conversion stopped
kTECBadTextRunErr = -8752 #kTECBadTextRunErr
kTECArrayFullErr = -8751 #supplied name buffer or TextRun, TextEncoding, or UnicodeMapping array is too small
kTECBufferBelowMinimumSizeErr = -8750 #output buffer too small to allow processing of first input text element
kTECNoConversionPathErr = -8749 #kTECNoConversionPathErr
kTECCorruptConverterErr = -8748 #invalid converter object reference
kTECTableFormatErr = -8747 #kTECTableFormatErr
kTECTableChecksumErr = -8746 #kTECTableChecksumErr
kTECMissingTableErr = -8745 #kTECMissingTableErr
kTextUndefinedElementErr = -8740 #text conversion errors
kTextMalformedInputErr = -8739 #in DBCS, for example, high byte followed by invalid low byte
kTextUnsupportedEncodingErr = -8738 #specified encoding not supported for this operation
kRANotEnabled = -7139 #kRANotEnabled
kRACallBackFailed = -7138 #kRACallBackFailed
kRADuplicateIPAddr = -7137 #kRADuplicateIPAddr
kRANCPRejectedbyPeer = -7136 #kRANCPRejectedbyPeer
kRAExtAuthenticationFailed = -7135 #kRAExtAuthenticationFailed
kRAATalkInactive = -7134 #kRAATalkInactive
kRAPeerNotResponding = -7133 #kRAPeerNotResponding
kRAPPPPeerDisconnected = -7132 #kRAPPPPeerDisconnected
kRAPPPUserDisconnected = -7131 #kRAPPPUserDisconnected
kRAPPPNegotiationFailed = -7130 #kRAPPPNegotiationFailed
kRAPPPAuthenticationFailed = -7129 #kRAPPPAuthenticationFailed
kRAPPPProtocolRejected = -7128 #kRAPPPProtocolRejected
dcmBufferOverflowErr = -7127 #data is larger than buffer size
kRANotPrimaryInterface = -7126 #when IPCP is not primary TCP/IP intf.
kRATCPIPNotConfigured = -7125 #TCP/IP not configured, could be loaded
kRATCPIPInactive = -7124 #TCP/IP inactive, cannot be loaded
kRARemoteAccessNotReady = -7123 #kRARemoteAccessNotReady
kRAInitOpenTransportFailed = -7122 #kRAInitOpenTransportFailed
dcmProtectedErr = -7121 #need keyword to use dictionary
kRAUserPwdEntryRequired = -7120 #kRAUserPwdEntryRequired
kRAUserPwdChangeRequired = -7119 #kRAUserPwdChangeRequired
dcmBadFindMethodErr = -7118 #no such find method supported
kRAInvalidSerialProtocol = -7117 #kRAInvalidSerialProtocol
kRAInvalidPortState = -7116 #kRAInvalidPortState
dcmBadKeyErr = -7115 #bad key information
kRAPortBusy = -7114 #kRAPortBusy
kRAInstallationDamaged = -7113 #kRAInstallationDamaged
dcmBadFieldTypeErr = -7112 #no such field type supported
dcmBadFieldInfoErr = -7111 #incomplete information
dcmNecessaryFieldErr = -7110 #lack required/identify field
dcmDupRecordErr = -7109 #same record already exist
kRANotConnected = -7108 #kRANotConnected
dcmBlockFullErr = -7107 #dictionary block full
kRAMissingResources = -7106 #kRAMissingResources
dcmDictionaryBusyErr = -7105 #dictionary is busy
dcmDictionaryNotOpenErr = -7104 #dictionary not opened
dcmPermissionErr = -7103 #invalid permission
dcmBadDictionaryErr = -7102 #invalid dictionary
dcmNotDictionaryErr = -7101 #not dictionary
kRAInvalidParameter = -7100 #kRAInvalidParameter
laEngineNotFoundErr = -7000 #can't find the engine
laPropertyErr = -6999 #Error in properties
kUSBUnknownDeviceErr = -6998 #device ref not recognised
laPropertyIsReadOnlyErr = -6997 #the property is read only
laPropertyUnknownErr = -6996 #the property is unknown to this environment
laPropertyValueErr = -6995 #Invalid property value
laDictionaryTooManyErr = -6994 #too many dictionaries
laDictionaryUnknownErr = -6993 #can't use this dictionary with this environment
laDictionaryNotOpenedErr = -6992 #the dictionary is not opened
laTextOverFlowErr = -6991 #text is too long
laFailAnalysisErr = -6990 #analysis failed
laNoMoreMorphemeErr = -6989 #nothing to read
laInvalidPathErr = -6988 #path is not correct
kUSBNotHandled = -6987 #Notification was not handled (same as NotFound)
laEnvironmentNotFoundErr = -6986 #can't fint the specified environment
laEnvironmentBusyErr = -6985 #specified environment is used
laTooSmallBufferErr = -6984 #output buffer is too small to store any result
kUSBFlagsError = -6983 #Unused flags not zeroed
kUSBAbortedError = -6982 #Pipe aborted
kUSBNoBandwidthError = -6981 #Not enough bandwidth available
kUSBPipeIdleError = -6980 #Pipe is Idle, it will not accept transactions
kUSBPipeStalledError = -6979 #Pipe has stalled, error needs to be cleared
kUSBUnknownInterfaceErr = -6978 #Interface ref not recognised
kUSBDeviceBusy = -6977 #Device is already being configured
kUSBDevicePowerProblem = -6976 #Device has a power problem
kUSBInvalidBuffer = -6975 #bad buffer, usually nil
kUSBDeviceSuspended = -6974 #Device is suspended
kUSBDeviceNotSuspended = -6973 #device is not suspended for resume
kUSBDeviceDisconnected = -6972 #Disconnected during suspend or reset
kUSBTimedOut = -6971 #Transaction timed out.
kUSBQueueAborted = -6970 #Pipe zero stall cleared.
kUSBPortDisabled = -6969 #The port you are attached to is disabled, use USBDeviceReset.
kUSBBadDispatchTable = -6950 #Improper driver dispatch table
kUSBUnknownNotification = -6949 #Notification type not defined
kUSBQueueFull = -6948 #Internal queue maxxed
kUSBLinkErr = -6916 #kUSBLinkErr
kUSBCRCErr = -6915 #Pipe stall, bad CRC
kUSBBitstufErr = -6914 #Pipe stall, bitstuffing
kUSBDataToggleErr = -6913 #Pipe stall, Bad data toggle
kUSBEndpointStallErr = -6912 #Device didn't understand
kUSBNotRespondingErr = -6911 #Pipe stall, No device, device hung
kUSBPIDCheckErr = -6910 #Pipe stall, PID CRC error
kUSBWrongPIDErr = -6909 #Pipe stall, Bad or wrong PID
kUSBOverRunErr = -6908 #Packet too large or more data than buffer
kUSBUnderRunErr = -6907 #Less data than buffer
kUSBRes1Err = -6906 #kUSBRes1Err
kUSBRes2Err = -6905 #kUSBRes2Err
kUSBBufOvrRunErr = -6904 #Host hardware failure on data in, PCI busy?
kUSBBufUnderRunErr = -6903 #Host hardware failure on data out, PCI busy?
kUSBNotSent1Err = -6902 #Transaction not sent
kUSBNotSent2Err = -6901 #Transaction not sent
kDMFoundErr = -6232 #Did not proceed because we found an item
kDMMainDisplayCannotMoveErr = -6231 #Trying to move main display (or a display mirrored to it)
kDMDisplayAlreadyInstalledErr = -6230 #Attempt to add an already installed display.
kDMDisplayNotFoundErr = -6229 #Could not find item (will someday remove).
kDMDriverNotDisplayMgrAwareErr = -6228 #Video Driver does not support display manager.
kDMSWNotInitializedErr = -6227 #Required software not initialized (eg windowmanager or display mgr).
kSysSWTooOld = -6226 #Missing critical pieces of System Software.
kDMMirroringNotOn = -6225 #Returned by all calls that need mirroring to be on to do their thing.
kDMCantBlock = -6224 #Mirroring is already on, canÕt Block now (call DMUnMirror() first).
kDMMirroringBlocked = -6223 #DMBlockMirroring() has been called.
kDMWrongNumberOfDisplays = -6222 #Can only handle 2 displays for now.
kDMMirroringOnAlready = -6221 #Returned by all calls that need mirroring to be off to do their thing.
kDMGenErr = -6220 #Unexpected Error
kQTSSUnknownErr = -6150 #kQTSSUnknownErr
collectionVersionErr = -5753 #collectionVersionErr
collectionIndexRangeErr = -5752 #collectionIndexRangeErr
collectionItemNotFoundErr = -5751 #collectionItemNotFoundErr
collectionItemLockedErr = -5750 #collectionItemLockedErr
kNavMissingKindStringErr = -5699 #kNavMissingKindStringErr
kNavInvalidCustomControlMessageErr = -5698 #kNavInvalidCustomControlMessageErr
kNavCustomControlMessageFailedErr = -5697 #kNavCustomControlMessageFailedErr
kNavInvalidSystemConfigErr = -5696 #kNavInvalidSystemConfigErr
kNavWrongDialogClassErr = -5695 #kNavWrongDialogClassErr
kNavWrongDialogStateErr = -5694 #kNavWrongDialogStateErr
dialogNoTimeoutErr = -5640 #dialogNoTimeoutErr
menuInvalidErr = -5623 #menu is invalid
menuItemNotFoundErr = -5622 #specified menu item wasn't found
menuUsesSystemDefErr = -5621 #GetMenuDefinition failed because the menu uses the system MDEF
menuNotFoundErr = -5620 #specified menu or menu ID wasn't found
windowWrongStateErr = -5615 #window is not in a state that is valid for the current action
windowManagerInternalErr = -5614 #something really weird happened inside the window manager
windowAttributesConflictErr = -5613 #passed some attributes that are mutually exclusive
windowAttributeImmutableErr = -5612 #tried to change attributes which can't be changed
errWindowDoesNotFitOnscreen = -5611 #ConstrainWindowToScreen could not make the window fit onscreen
errWindowNotFound = -5610 #returned from FindWindowOfClass
errFloatingWindowsNotInitialized = -5609 #called HideFloatingWindows or ShowFloatingWindows without calling InitFloatingWindows
errWindowsAlreadyInitialized = -5608 #tried to call InitFloatingWindows twice, or called InitWindows and then floating windows
errUserWantsToDragWindow = -5607 #if returned from TrackWindowProxyDrag, you should call DragWindow on the window
errCorruptWindowDescription = -5606 #tried to load a corrupt window description (size or version fields incorrect)
errUnrecognizedWindowClass = -5605 #tried to create a window with a bad WindowClass
errWindowPropertyNotFound = -5604 #tried to get a nonexistent property
errInvalidWindowProperty = -5603 #tried to access a property tag with private creator
errWindowDoesNotHaveProxy = -5602 #tried to do something requiring a proxy to a window which doesnÕt have a proxy
errUnsupportedWindowAttributesForClass = -5601 #tried to create a window with WindowAttributes not supported by the WindowClass
errInvalidWindowPtr = -5600 #tried to pass a bad WindowRef argument
gestaltLocationErr = -5553 #gestalt function ptr wasn't in sysheap
gestaltDupSelectorErr = -5552 #tried to add an entry that already existed
gestaltUndefSelectorErr = -5551 #undefined selector was passed to Gestalt
gestaltUnknownErr = -5550 #value returned if Gestalt doesn't know the answer
envVersTooBig = -5502 #Version bigger than call can handle
envBadVers = -5501 #Version non-positive
envNotPresent = -5500 #returned by glue.
qtsAddressBusyErr = -5421 #qtsAddressBusyErr
qtsConnectionFailedErr = -5420 #qtsConnectionFailedErr
qtsTimeoutErr = -5408 #qtsTimeoutErr
qtsUnknownValueErr = -5407 #qtsUnknownValueErr
qtsTooMuchDataErr = -5406 #qtsTooMuchDataErr
qtsUnsupportedFeatureErr = -5405 #qtsUnsupportedFeatureErr
qtsUnsupportedRateErr = -5404 #qtsUnsupportedRateErr
qtsUnsupportedDataTypeErr = -5403 #qtsUnsupportedDataTypeErr
qtsBadDataErr = -5402 #something is wrong with the data
qtsBadStateErr = -5401 #qtsBadStateErr
qtsBadSelectorErr = -5400 #qtsBadSelectorErr
errIAEndOfTextRun = -5388 #errIAEndOfTextRun
errIATextExtractionErr = -5387 #errIATextExtractionErr
errIAInvalidDocument = -5386 #errIAInvalidDocument
errIACanceled = -5385 #errIACanceled
errIABufferTooSmall = -5384 #errIABufferTooSmall
errIANoMoreItems = -5383 #errIANoMoreItems
errIAParamErr = -5382 #errIAParamErr
errIAAllocationErr = -5381 #errIAAllocationErr
errIAUnknownErr = -5380 #errIAUnknownErr
hrURLNotHandledErr = -5363 #hrURLNotHandledErr
hrUnableToResizeHandleErr = -5362 #hrUnableToResizeHandleErr
hrMiscellaneousExceptionErr = -5361 #hrMiscellaneousExceptionErr
hrHTMLRenderingLibNotInstalledErr = -5360 #hrHTMLRenderingLibNotInstalledErr
errCannotUndo = -5253 #errCannotUndo
errNonContiuousAttribute = -5252 #errNonContiuousAttribute
errUnknownElement = -5251 #errUnknownElement
errReadOnlyText = -5250 #errReadOnlyText
errEmptyScrap = -5249 #errEmptyScrap
errNoHiliteText = -5248 #errNoHiliteText
errOffsetNotOnElementBounday = -5247 #errOffsetNotOnElementBounday
errInvalidRange = -5246 #errInvalidRange
errIteratorReachedEnd = -5245 #errIteratorReachedEnd
errEngineNotFound = -5244 #errEngineNotFound
errAlreadyInImagingMode = -5243 #errAlreadyInImagingMode
errNotInImagingMode = -5242 #errNotInImagingMode
errMarginWilllNotFit = -5241 #errMarginWilllNotFit
errUnknownAttributeTag = -5240 #errUnknownAttributeTag
afpSameNodeErr = -5063 #An Attempt was made to connect to a file server running on the same machine
afpAlreadyMounted = -5062 #The volume is already mounted
afpCantMountMoreSrvre = -5061 #The Maximum number of server connections has been reached
afpBadDirIDType = -5060 #afpBadDirIDType
afpCallNotAllowed = -5048 #The server knows what you wanted to do, but won't let you do it just now
afpAlreadyLoggedInErr = -5047 #User has been authenticated but is already logged in from another machine (and that's not allowed on this server)
afpPwdPolicyErr = -5046 #Password does not conform to servers password policy
afpPwdNeedsChangeErr = -5045 #The password needs to be changed
afpInsideTrashErr = -5044 #The folder being shared is inside the trash folder OR the shared folder is being moved into the trash folder
afpInsideSharedErr = -5043 #The folder being shared is inside a shared folder OR the folder contains a shared folder and is being moved into a shared folder
afpPwdExpiredErr = -5042 #The password being used is too old: this requires the user to change the password before log-in can continue
afpPwdTooShortErr = -5041 #The password being set is too short: there is a minimum length that must be met or exceeded
afpPwdSameErr = -5040 #Someone tried to change their password to the same password on a mantadory password change
afpBadIDErr = -5039 #afpBadIDErr
afpSameObjectErr = -5038 #afpSameObjectErr
afpCatalogChanged = -5037 #afpCatalogChanged
afpDiffVolErr = -5036 #afpDiffVolErr
afpIDExists = -5035 #afpIDExists
afpIDNotFound = -5034 #afpIDNotFound
afpContainsSharedErr = -5033 #the folder being shared contains a shared folder
afpObjectLocked = -5032 #Object is M/R/D/W inhibited
afpVolLocked = -5031 #Volume is Read-Only
afpIconTypeError = -5030 #Icon size specified different from existing icon size
afpDirNotFound = -5029 #Unknown directory specified
afpCantRename = -5028 #AFPRename cannot rename volume
afpServerGoingDown = -5027 #Server is shutting down
afpTooManyFilesOpen = -5026 #Maximum open file count reached
afpObjectTypeErr = -5025 #File/Directory specified where Directory/File expected
afpCallNotSupported = -5024 #Unsupported AFP call was made
afpUserNotAuth = -5023 #No AFPLogin call has successfully been made for this session
afpSessClosed = -5022 #Session closed
afpRangeOverlap = -5021 #Some or all of range already locked by same user
afpRangeNotLocked = -5020 #Tried to unlock range that was not locked by user
afpParmErr = -5019 #A specified parameter was out of allowable range
afpObjectNotFound = -5018 #Specified file or directory does not exist
afpObjectExists = -5017 #Specified destination file or directory already exists
afpNoServer = -5016 #Server not responding
afpNoMoreLocks = -5015 #Maximum lock limit reached
afpMiscErr = -5014 #Unexpected error encountered during execution
afpLockErr = -5013 #Some or all of requested range is locked by another user
afpItemNotFound = -5012 #Unknown UserName/UserID or missing comment/APPL entry
afpFlatVol = -5011 #Cannot create directory on specified volume
afpFileBusy = -5010 #Cannot delete an open file
afpEofError = -5009 #Read beyond logical end-of-file
afpDiskFull = -5008 #Insufficient free space on volume for operation
afpDirNotEmpty = -5007 #Cannot delete non-empty directory
afpDenyConflict = -5006 #Specified open/deny modes conflict with current open modes
afpCantMove = -5005 #Move destination is offspring of source, or root was specified
afpBitmapErr = -5004 #Bitmap contained bits undefined for call
afpBadVersNum = -5003 #Unknown AFP protocol version number specified
afpBadUAM = -5002 #Unknown user authentication method specified
afpAuthContinue = -5001 #Further information required to complete AFPLogin call
afpAccessDenied = -5000 #Insufficient access privileges for operation
illegalScrapFlavorSizeErr = -4999 #illegalScrapFlavorSizeErr
illegalScrapFlavorTypeErr = -4998 #illegalScrapFlavorTypeErr
illegalScrapFlavorFlagsErr = -4997 #illegalScrapFlavorFlagsErr
scrapFlavorSizeMismatchErr = -4996 #scrapFlavorSizeMismatchErr
scrapFlavorFlagsMismatchErr = -4995 #scrapFlavorFlagsMismatchErr
nilScrapFlavorDataErr = -4994 #nilScrapFlavorDataErr
noScrapPromiseKeeperErr = -4993 #noScrapPromiseKeeperErr
scrapPromiseNotKeptErr = -4992 #scrapPromiseNotKeptErr
processStateIncorrectErr = -4991 #processStateIncorrectErr
badScrapRefErr = -4990 #badScrapRefErr
duplicateScrapFlavorErr = -4989 #duplicateScrapFlavorErr
internalScrapErr = -4988 #internalScrapErr
coreFoundationUnknownErr = -4960 #coreFoundationUnknownErr
badRoutingSizeErr = -4276 #badRoutingSizeErr
routingNotFoundErr = -4275 #routingNotFoundErr
duplicateRoutingErr = -4274 #duplicateRoutingErr
invalidFolderTypeErr = -4273 #invalidFolderTypeErr
noMoreFolderDescErr = -4272 #noMoreFolderDescErr
duplicateFolderDescErr = -4271 #duplicateFolderDescErr
badFolderDescErr = -4270 #badFolderDescErr
cmCantGamutCheckError = -4217 #Gammut checking not supported by this ColorWorld
cmNamedColorNotFound = -4216 #NamedColor not found
cmCantCopyModifiedV1Profile = -4215 #Illegal to copy version 1 profiles that have been modified
cmRangeOverFlow = -4214 #Color conversion warning that some output color values over/underflowed and were clipped
cmInvalidProfileComment = -4213 #Bad Profile comment during drawpicture
cmNoGDevicesError = -4212 #Begin/End Matching -- no gdevices available
cmInvalidDstMap = -4211 #Destination pix/bit map was invalid
cmInvalidSrcMap = -4210 #Source pix/bit map was invalid
cmInvalidColorSpace = -4209 #Profile colorspace does not match bitmap type
cmErrIncompatibleProfile = -4208 #Other ColorSync Errors
cmSearchError = -4207 #cmSearchError
cmInvalidSearch = -4206 #Bad Search Handle
cmInvalidProfileLocation = -4205 #Operation not supported for this profile location
cmInvalidProfile = -4204 #A Profile must contain a 'cs1 ' tag to be valid
cmFatalProfileErr = -4203 #cmFatalProfileErr
cmCantDeleteElement = -4202 #cmCantDeleteElement
cmIndexRangeErr = -4201 #Tag index out of range
kNSLInitializationFailed = -4200 #UNABLE TO INITIALIZE THE MANAGER!!!!! DO NOT CONTINUE!!!!
kNSLNotInitialized = -4199 #kNSLNotInitialized
kNSLInsufficientSysVer = -4198 #kNSLInsufficientSysVer
kNSLInsufficientOTVer = -4197 #kNSLInsufficientOTVer
kNSLNoElementsInList = -4196 #kNSLNoElementsInList
kNSLBadReferenceErr = -4195 #kNSLBadReferenceErr
kNSLBadServiceTypeErr = -4194 #kNSLBadServiceTypeErr
kNSLBadDataTypeErr = -4193 #kNSLBadDataTypeErr
kNSLBadNetConnection = -4192 #kNSLBadNetConnection
kNSLNoSupportForService = -4191 #kNSLNoSupportForService
kNSLInvalidPluginSpec = -4190 #kNSLInvalidPluginSpec
kNSLRequestBufferAlreadyInList = -4189 #kNSLRequestBufferAlreadyInList
kNSLNoContextAvailable = -4188 #(ContinueLookup function ptr invalid)
kNSLBufferTooSmallForData = -4187 #(Client buffer too small for data from plugin)
kNSLCannotContinueLookup = -4186 #(Can't continue lookup; error or bad state)
kNSLBadClientInfoPtr = -4185 #(nil ClientAsyncInfoPtr; no reference available)
kNSLNullListPtr = -4184 #(client is trying to add items to a nil list)
kNSLBadProtocolTypeErr = -4183 #(client is trying to add a null protocol type)
kNSLPluginLoadFailed = -4182 #(manager unable to load one of the plugins)
kNSLNoPluginsFound = -4181 #(manager didn't find any valid plugins to load)
kNSLSearchAlreadyInProgress = -4180 #(you can only have one ongoing search per clientRef)
kNSLNoPluginsForSearch = -4179 #(no plugins will respond to search request; bad protocol(s)?)
kNSLNullNeighborhoodPtr = -4178 #(client passed a null neighborhood ptr)
kNSLSomePluginsFailedToLoad = -4177 #(one or more plugins failed to load, but at least one did load; this error isn't fatal)
kNSLErrNullPtrError = -4176 #kNSLErrNullPtrError
kNSLNotImplementedYet = -4175 #kNSLNotImplementedYet
kNSLUILibraryNotAvailable = -4174 #The NSL UI Library needs to be in the Extensions Folder
kNSLNoCarbonLib = -4173 #kNSLNoCarbonLib
kNSLBadURLSyntax = -4172 #URL contains illegal characters
kNSLSchedulerError = -4171 #A custom thread routine encountered an error
kNSL68kContextNotSupported = -4170 #no 68k allowed
noHelpForItem = -4009 #noHelpForItem
badProfileError = -4008 #badProfileError
colorSyncNotInstalled = -4007 #colorSyncNotInstalled
pickerCantLive = -4006 #pickerCantLive
cantLoadPackage = -4005 #cantLoadPackage
cantCreatePickerWindow = -4004 #cantCreatePickerWindow
cantLoadPicker = -4003 #cantLoadPicker
pickerResourceError = -4002 #pickerResourceError
requiredFlagsDontMatch = -4001 #requiredFlagsDontMatch
firstPickerError = -4000 #firstPickerError
kOTPortLostConnection = -3285 #
kOTUserRequestedErr = -3284 #
kOTConfigurationChangedErr = -3283 #
kOTBadConfigurationErr = -3282 #
kOTPortWasEjectedErr = -3281 #
kOTPortHasDiedErr = -3280 #
kOTClientNotInittedErr = -3279 #
kENOMSGErr = -3278 #
kESRCHErr = -3277 #
kEINPROGRESSErr = -3276 #
kENODATAErr = -3275 #
kENOSTRErr = -3274 #
kECANCELErr = -3273 #
kEBADMSGErr = -3272 #
kENOSRErr = -3271 #
kETIMEErr = -3270 #
kEPROTOErr = -3269 # fill out missing codes
kEHOSTUNREACHErr = -3264 #No route to host
kEHOSTDOWNErr = -3263 #Host is down
kECONNREFUSEDErr = -3260 #Connection refused
kETIMEDOUTErr = -3259 #Connection timed out
kETOOMANYREFSErr = -3258 #Too many references: can't splice
kESHUTDOWNErr = -3257 #Can't send after socket shutdown
kENOTCONNErr = -3256 #Socket is not connected
kEISCONNErr = -3255 #Socket is already connected
kENOBUFSErr = -3254 #No buffer space available
kECONNRESETErr = -3253 #Connection reset by peer
kECONNABORTEDErr = -3252 #Software caused connection abort
kENETRESETErr = -3251 #Network dropped connection on reset
kENETUNREACHErr = -3250 #Network is unreachable
kENETDOWNErr = -3249 #Network is down
kEADDRNOTAVAILErr = -3248 #Can't assign requested address
kEADDRINUSEErr = -3247 #Address already in use
kEOPNOTSUPPErr = -3244 #Operation not supported on socket
kESOCKTNOSUPPORTErr = -3243 #Socket type not supported
kEPROTONOSUPPORTErr = -3242 #Protocol not supported
kENOPROTOOPTErr = -3241 #Protocol not available
kEPROTOTYPEErr = -3240 #Protocol wrong type for socket
kEMSGSIZEErr = -3239 #Message too long
kEDESTADDRREQErr = -3238 #Destination address required
kENOTSOCKErr = -3237 #Socket operation on non-socket
kEALREADYErr = -3236 #
kEWOULDBLOCKErr = -3234 #Call would block, so was aborted
kERANGEErr = -3233 #Message size too large for STREAM
kEPIPEErr = -3231 #Broken pipe
kENOTTYErr = -3224 #Not a character device
kEINVALErr = -3221 #Invalid argument
kENODEVErr = -3218 #No such device
kOTDuplicateFoundErr = -3216 #OT generic duplicate found error
kEBUSYErr = -3215 #Device or resource busy
kEFAULTErr = -3213 #Bad address
kEACCESErr = -3212 #Permission denied
kOTOutOfMemoryErr = -3211 #OT ran out of memory, may be a temporary
kEAGAINErr = -3210 #Try operation again later
kEBADFErr = -3208 #Bad file number
kENXIOErr = -3205 #No such device or address
kEIOErr = -3204 #I/O error
kEINTRErr = -3203 #Interrupted system service
kENORSRCErr = -3202 #No such resource
kOTNotFoundErr = -3201 #OT generic not found error
kEPERMErr = -3200 #Permission denied
kOTCanceledErr = -3180 #XTI2OSStatus(TCANCELED) The command was cancelled
kOTBadSyncErr = -3179 #XTI2OSStatus(TBADSYNC) A synchronous call at interrupt time
kOTProtocolErr = -3178 #XTI2OSStatus(TPROTO) An unspecified provider error occurred
kOTQFullErr = -3177 #XTI2OSStatus(TQFULL)
kOTResAddressErr = -3176 #XTI2OSStatus(TRESADDR)
kOTResQLenErr = -3175 #XTI2OSStatus(TRESQLEN)
kOTProviderMismatchErr = -3174 #XTI2OSStatus(TPROVMISMATCH) Tried to accept on incompatible endpoint
kOTIndOutErr = -3173 #XTI2OSStatus(TINDOUT) Accept failed because of pending listen
kOTAddressBusyErr = -3172 #XTI2OSStatus(TADDRBUSY) Address requested is already in use
kOTBadQLenErr = -3171 #XTI2OSStatus(TBADQLEN) A Bind to an in-use addr with qlen > 0
kOTBadNameErr = -3170 #XTI2OSStatus(TBADNAME) A bad endpoint name was supplied
kOTNoStructureTypeErr = -3169 #XTI2OSStatus(TNOSTRUCTYPE) Bad structure type requested for OTAlloc
kOTStateChangeErr = -3168 #XTI2OSStatus(TSTATECHNG) State is changing - try again later
kOTNotSupportedErr = -3167 #XTI2OSStatus(TNOTSUPPORT) Command is not supported
kOTNoReleaseErr = -3166 #XTI2OSStatus(TNOREL) No orderly release indication available
kOTBadFlagErr = -3165 #XTI2OSStatus(TBADFLAG) A Bad flag value was supplied
kOTNoUDErrErr = -3164 #XTI2OSStatus(TNOUDERR) No Unit Data Error indication available
kOTNoDisconnectErr = -3163 #XTI2OSStatus(TNODIS) No disconnect indication available
kOTNoDataErr = -3162 #XTI2OSStatus(TNODATA) No data available for reading
kOTFlowErr = -3161 #XTI2OSStatus(TFLOW) Provider is flow-controlled
kOTBufferOverflowErr = -3160 #XTI2OSStatus(TBUFOVFLW) Passed buffer not big enough
kOTBadDataErr = -3159 #XTI2OSStatus(TBADDATA) An illegal amount of data was specified
kOTLookErr = -3158 #XTI2OSStatus(TLOOK) An event occurred - call Look()
kOTSysErrorErr = -3157 #XTI2OSStatus(TSYSERR) A system error occurred
kOTBadSequenceErr = -3156 #XTI2OSStatus(TBADSEQ) Sequence specified does not exist
kOTOutStateErr = -3155 #XTI2OSStatus(TOUTSTATE) Call issued in wrong state
kOTNoAddressErr = -3154 #XTI2OSStatus(TNOADDR) No address was specified
kOTBadReferenceErr = -3153 #XTI2OSStatus(TBADF) Bad provider reference
kOTAccessErr = -3152 #XTI2OSStatus(TACCES) Missing access permission
kOTBadOptionErr = -3151 #XTI2OSStatus(TBADOPT) A Bad option was specified
kOTBadAddressErr = -3150 #XTI2OSStatus(TBADADDR) A Bad address was specified
sktClosedErr = -3109 #sktClosedErr
recNotFnd = -3108 #recNotFnd
atpBadRsp = -3107 #atpBadRsp
atpLenErr = -3106 #atpLenErr
readQErr = -3105 #readQErr
extractErr = -3104 #extractErr
ckSumErr = -3103 #ckSumErr
noMPPErr = -3102 #noMPPErr
buf2SmallErr = -3101 #buf2SmallErr
noPrefAppErr = -3032 #noPrefAppErr
badTranslationSpecErr = -3031 #badTranslationSpecErr
noTranslationPathErr = -3030 #noTranslationPathErr
couldNotParseSourceFileErr = -3026 #Source document does not contain source type
invalidTranslationPathErr = -3025 #Source type to destination type not a valid path
retryComponentRegistrationErr = -3005 #retryComponentRegistrationErr
unresolvedComponentDLLErr = -3004 #unresolvedComponentDLLErr
componentDontRegister = -3003 #componentDontRegister
componentNotCaptured = -3002 #componentNotCaptured
validInstancesExist = -3001 #validInstancesExist
invalidComponentID = -3000 #invalidComponentID
cfragLastErrCode = -2899 #The last value in the range of CFM errors.
cfragOutputLengthErr = -2831 #An output parameter is too small to hold the value.
cfragAbortClosureErr = -2830 #Used by notification handlers to abort a closure.
cfragClosureIDErr = -2829 #The closure ID was not valid.
cfragContainerIDErr = -2828 #The fragment container ID was not valid.
cfragNoRegistrationErr = -2827 #The registration name was not found.
cfragNotClosureErr = -2826 #The closure ID was actually a connection ID.
cfragFileSizeErr = -2825 #A file was too large to be mapped.
cfragFragmentUsageErr = -2824 #A semantic error in usage of the fragment.
cfragArchitectureErr = -2823 #A fragment has an unacceptable architecture.
cfragNoApplicationErr = -2822 #No application member found in the cfrg resource.
cfragInitFunctionErr = -2821 #A fragment's initialization routine returned an error.
cfragFragmentCorruptErr = -2820 #A fragment's container was corrupt (known format).
cfragCFMInternalErr = -2819 #An internal inconstistancy has been detected.
cfragCFMStartupErr = -2818 #Internal error during CFM initialization.
cfragLibConnErr = -2817 #
cfragInitAtBootErr = -2816 #A boot library has an initialization function. (System 7 only)
cfragInitLoopErr = -2815 #Circularity in required initialization order.
cfragImportTooNewErr = -2814 #An import library was too new for a client.
cfragImportTooOldErr = -2813 #An import library was too old for a client.
cfragInitOrderErr = -2812 #
cfragNoIDsErr = -2811 #No more CFM IDs for contexts, connections, etc.
cfragNoClientMemErr = -2810 #Out of memory for fragment mapping or section instances.
cfragNoPrivateMemErr = -2809 #Out of memory for internal bookkeeping.
cfragNoPositionErr = -2808 #The registration insertion point was not found.
cfragUnresolvedErr = -2807 #A fragment had "hard" unresolved imports.
cfragFragmentFormatErr = -2806 #A fragment's container format is unknown.
cfragDupRegistrationErr = -2805 #The registration name was already in use.
cfragNoLibraryErr = -2804 #The named library was not found.
cfragNoSectionErr = -2803 #The specified section was not found.
cfragNoSymbolErr = -2802 #The specified symbol was not found.
cfragConnectionIDErr = -2801 #The connection ID was not valid.
cfragFirstErrCode = -2800 #The first value in the range of CFM errors.
errASInconsistentNames = -2780 #English errors:
errASNoResultReturned = -2763 #The range -2780 thru -2799 is reserved for dialect specific error codes. (Error codes from different dialects may overlap.)
errASParameterNotForEvent = -2762 #errASParameterNotForEvent
errASIllegalFormalParameter = -2761 #errASIllegalFormalParameter
errASTerminologyNestingTooDeep = -2760 #errASTerminologyNestingTooDeep
OSAControlFlowError = -2755 #Signaled when illegal control flow occurs in an application (no catcher for throw, non-lexical loop exit, etc.)
OSAInconsistentDeclarations = -2754 #Signaled when a variable is declared inconsistently in the same scope, such as both local and global
OSAUndefinedVariable = -2753 #Signaled when a variable is accessed that has no value
OSADuplicateHandler = -2752 #Signaled when more than one handler is defined with the same name in a scope where the language doesn't allow it
OSADuplicateProperty = -2751 #Signaled when a formal parameter, local variable, or instance variable is specified more than once.
OSADuplicateParameter = -2750 #Signaled when a formal parameter, local variable, or instance variable is specified more than once
OSATokenTooLong = -2742 #Signaled when a name or number is too long to be parsed
OSASyntaxTypeError = -2741 #Signaled when another form of syntax was expected. (e.g. "expected a <type> but found <this>")
OSASyntaxError = -2740 #Signaled when a syntax error occurs. (e.g. "Syntax error" or "<this> can't go after <that>")
errASCantCompareMoreThan32k = -2721 #Parser/Compiler errors:
errASCantConsiderAndIgnore = -2720 #errASCantConsiderAndIgnore
errOSACantCreate = -2710 #errOSACantCreate
errOSACantGetTerminology = -2709 #errOSACantGetTerminology
errOSADataBlockTooLarge = -2708 #Signaled when an intrinsic limitation is exceeded for the size of a value or data structure.
errOSAInternalTableOverflow = -2707 #Signaled when a runtime internal data structure overflows
errOSAStackOverflow = -2706 #Signaled when the runtime stack overflows
errOSACorruptTerminology = -2705 #Signaled when an application's terminology resource is not readable
errOSAAppNotHighLevelEventAware = -2704 #Signaled when an application can't respond to AppleEvents
errOSACantLaunch = -2703 #Signaled when application can't be launched or when it is remote and program linking is not enabled
errOSANumericOverflow = -2702 #Signaled when integer or real value is too large to be represented
errOSADivideByZero = -2701 #Signaled when there is an attempt to divide by zero
errOSAGeneralError = -2700 #Signaled by user scripts or applications when no actual error code is to be returned.
noIconDataAvailableErr = -2582 #The necessary icon data is not available
noSuchIconErr = -2581 #The requested icon could not be found
invalidIconRefErr = -2580 #The icon ref is not valid
nrCallNotSupported = -2557 #This call is not available or supported on this machine
nrTransactionAborted = -2556 #transaction was aborted
nrExitedIteratorScope = -2555 #outer scope of iterator was exited
nrIterationDone = -2554 #iteration operation is done
nrPropertyAlreadyExists = -2553 #property already exists
nrInvalidEntryIterationOp = -2552 #invalid entry iteration operation
nrPathBufferTooSmall = -2551 #buffer for path is too small
nrPathNotFound = -2550 #a path component lookup failed
nrResultCodeBase = -2549 #nrResultCodeBase
nrOverrunErr = -2548 #nrOverrunErr
nrNotModifiedErr = -2547 #nrNotModifiedErr
nrTypeMismatchErr = -2546 #nrTypeMismatchErr
nrPowerSwitchAbortErr = -2545 #nrPowerSwitchAbortErr
nrPowerErr = -2544 #nrPowerErr
nrDataTruncatedErr = -2543 #nrDataTruncatedErr
nrNotSlotDeviceErr = -2542 #nrNotSlotDeviceErr
nrNameErr = -2541 #nrNameErr
nrNotCreatedErr = -2540 #nrNotCreatedErr
nrNotFoundErr = -2539 #nrNotFoundErr
nrInvalidNodeErr = -2538 #nrInvalidNodeErr
nrNotEnoughMemoryErr = -2537 #nrNotEnoughMemoryErr
nrLockedErr = -2536 #nrLockedErr
mmInternalError = -2526 #mmInternalError
tsmDefaultIsNotInputMethodErr = -2524 #Current Input source is KCHR or uchr, not Input Method (GetDefaultInputMethod)
tsmNoStem = -2523 #No stem exists for the token
tsmNoMoreTokens = -2522 #No more tokens are available for the source text
tsmNoHandler = -2521 #No Callback Handler exists for callback
tsmInvalidContext = -2520 #Invalid TSMContext specified in call
tsmUnknownErr = -2519 #any other errors
tsmUnsupportedTypeErr = -2518 #unSupported interface type error
tsmScriptHasNoIMErr = -2517 #script has no imput method or is using old IM
tsmInputMethodIsOldErr = -2516 #returned by GetDefaultInputMethod
tsmComponentAlreadyOpenErr = -2515 #text service already opened for the document
tsmTSNotOpenErr = -2514 #text service is not open
tsmTSHasNoMenuErr = -2513 #the text service has no menu
tsmUseInputWindowErr = -2512 #not TSM aware because we are using input window
tsmDocumentOpenErr = -2511 #there are open documents
tsmTextServiceNotFoundErr = -2510 #no text service found
tsmCantOpenComponentErr = -2509 #canÕt open the component
tsmNoOpenTSErr = -2508 #no open text service
tsmDocNotActiveErr = -2507 #document is NOT active
tsmTSMDocBusyErr = -2506 #document is still active
tsmInvalidDocIDErr = -2505 #invalid TSM documentation id
tsmNeverRegisteredErr = -2504 #app never registered error (not TSM aware)
tsmAlreadyRegisteredErr = -2503 #want to register again error
tsmNotAnAppErr = -2502 #not an application error
tsmInputMethodNotFoundErr = -2501 #tsmInputMethodNotFoundErr
tsmUnsupScriptLanguageErr = -2500 #tsmUnsupScriptLanguageErr
kernelUnrecoverableErr = -2499 #kernelUnrecoverableErr
kernelReturnValueErr = -2422 #kernelReturnValueErr
kernelAlreadyFreeErr = -2421 #kernelAlreadyFreeErr
kernelIDErr = -2419 #kernelIDErr
kernelExceptionErr = -2418 #kernelExceptionErr
kernelTerminatedErr = -2417 #kernelTerminatedErr
kernelInUseErr = -2416 #kernelInUseErr
kernelTimeoutErr = -2415 #kernelTimeoutErr
kernelAsyncReceiveLimitErr = -2414 #kernelAsyncReceiveLimitErr
kernelAsyncSendLimitErr = -2413 #kernelAsyncSendLimitErr
kernelAttributeErr = -2412 #kernelAttributeErr
kernelExecutionLevelErr = -2411 #kernelExecutionLevelErr
kernelDeletePermissionErr = -2410 #kernelDeletePermissionErr
kernelExecutePermissionErr = -2409 #kernelExecutePermissionErr
kernelReadPermissionErr = -2408 #kernelReadPermissionErr
kernelWritePermissionErr = -2407 #kernelWritePermissionErr
kernelObjectExistsErr = -2406 #kernelObjectExistsErr
kernelUnsupportedErr = -2405 #kernelUnsupportedErr
kernelPrivilegeErr = -2404 #kernelPrivilegeErr
kernelOptionsErr = -2403 #kernelOptionsErr
kernelCanceledErr = -2402 #kernelCanceledErr
kernelIncompleteErr = -2401 #kernelIncompleteErr
badCallOrderErr = -2209 #Usually due to a status call being called prior to being setup first
noDMAErr = -2208 #CanÕt do DMA digitizing (i.e. can't go to requested dest
badDepthErr = -2207 #CanÕt digitize into this depth
notExactSizeErr = -2206 #CanÕt do exact size requested
noMoreKeyColorsErr = -2205 #all key indexes in use
notExactMatrixErr = -2204 #warning of bad matrix, digitizer did its best
matrixErr = -2203 #bad matrix, digitizer did nothing
qtParamErr = -2202 #bad input parameter (out of range, etc)
digiUnimpErr = -2201 #feature unimplemented
qtXMLApplicationErr = -2159 #qtXMLApplicationErr
qtXMLParseErr = -2158 #qtXMLParseErr
qtActionNotHandledErr = -2157 #qtActionNotHandledErr
notEnoughDataErr = -2149 #notEnoughDataErr
urlDataHFTPURLErr = -2148 #urlDataHFTPURLErr
urlDataHFTPServerDisconnectedErr = -2147 #urlDataHFTPServerDisconnectedErr
urlDataHFTPNoPasswordErr = -2146 #urlDataHFTPNoPasswordErr
urlDataHFTPNeedPasswordErr = -2145 #urlDataHFTPNeedPasswordErr
urlDataHFTPBadNameListErr = -2144 #urlDataHFTPBadNameListErr
urlDataHFTPNoNetDriverErr = -2143 #urlDataHFTPNoNetDriverErr
urlDataHFTPFilenameErr = -2142 #urlDataHFTPFilenameErr
urlDataHFTPPermissionsErr = -2141 #urlDataHFTPPermissionsErr
urlDataHFTPQuotaErr = -2140 #urlDataHFTPQuotaErr
urlDataHFTPNoDirectoryErr = -2139 #urlDataHFTPNoDirectoryErr
urlDataHFTPDataConnectionErr = -2138 #urlDataHFTPDataConnectionErr
urlDataHFTPServerErr = -2137 #urlDataHFTPServerErr
urlDataHFTPBadPasswordErr = -2136 #urlDataHFTPBadPasswordErr
urlDataHFTPBadUserErr = -2135 #urlDataHFTPBadUserErr
urlDataHFTPShutdownErr = -2134 #urlDataHFTPShutdownErr
urlDataHFTPProtocolErr = -2133 #urlDataHFTPProtocolErr
urlDataHHTTPRedirectErr = -2132 #urlDataHHTTPRedirectErr
urlDataHHTTPURLErr = -2131 #urlDataHHTTPURLErr
urlDataHHTTPNoNetDriverErr = -2130 #urlDataHHTTPNoNetDriverErr
urlDataHHTTPProtocolErr = -2129 #urlDataHHTTPProtocolErr
qtNetworkAlreadyAllocatedErr = -2127 #qtNetworkAlreadyAllocatedErr
notAllowedToSaveMovieErr = -2126 #notAllowedToSaveMovieErr
fileOffsetTooBigErr = -2125 #fileOffsetTooBigErr
ASDEntryNotFoundErr = -2124 #ASDEntryNotFoundErr
ASDBadForkErr = -2123 #ASDBadForkErr
ASDBadHeaderErr = -2122 #ASDBadHeaderErr
AAPNotFoundErr = -2121 #AAPNotFoundErr
AAPNotCreatedErr = -2120 #AAPNotCreatedErr
qfcbNotCreatedErr = -2119 #qfcbNotCreatedErr
qfcbNotFoundErr = -2118 #qfcbNotFoundErr
wackBadMetaDataErr = -2117 #wackBadMetaDataErr
wackForkNotFoundErr = -2116 #wackForkNotFoundErr
wackBadFileErr = -2115 #wackBadFileErr
unknownFormatErr = -2114 #unknownFormatErr
pathNotVerifiedErr = -2113 #pathNotVerifiedErr
noPathMappingErr = -2112 #noPathMappingErr
emptyPathErr = -2111 #emptyPathErr
pathTooLongErr = -2110 #pathTooLongErr
cannotBeLeafAtomErr = -2109 #cannotBeLeafAtomErr
invalidAtomTypeErr = -2108 #invalidAtomTypeErr
invalidAtomContainerErr = -2107 #invalidAtomContainerErr
invalidAtomErr = -2106 #invalidAtomErr
duplicateAtomTypeAndIDErr = -2105 #duplicateAtomTypeAndIDErr
atomIndexInvalidErr = -2104 #atomIndexInvalidErr
atomsNotOfSameTypeErr = -2103 #atomsNotOfSameTypeErr
notLeafAtomErr = -2102 #notLeafAtomErr
cannotFindAtomErr = -2101 #cannotFindAtomErr
unsupportedProcessorErr = -2097 #unsupportedProcessorErr
unsupportedOSErr = -2096 #unsupportedOSErr
qtmlUninitialized = -2095 #qtmlUninitialized
qtmlDllEntryNotFoundErr = -2094 #Windows specific errors (when qtml is loading)
qtmlDllLoadErr = -2093 #Windows specific errors (when qtml is loading)
componentDllEntryNotFoundErr = -2092 #Windows specific errors (when component is loading)
componentDllLoadErr = -2091 #Windows specific errors (when component is loading)
videoOutputInUseErr = -2090 #videoOutputInUseErr
noExportProcAvailableErr = -2089 #noExportProcAvailableErr
tuneParseOSErr = -2087 #tuneParseOSErr
tunePlayerFullOSErr = -2086 #tunePlayerFullOSErr
noteChannelNotAllocatedOSErr = -2085 #noteChannelNotAllocatedOSErr
illegalNoteChannelOSErr = -2084 #illegalNoteChannelOSErr
synthesizerOSErr = -2083 #synthesizerOSErr
synthesizerNotRespondingOSErr = -2082 #synthesizerNotRespondingOSErr
midiManagerAbsentOSErr = -2081 #midiManagerAbsentOSErr
illegalControllerOSErr = -2080 #illegalControllerOSErr
illegalInstrumentOSErr = -2079 #illegalInstrumentOSErr
illegalKnobValueOSErr = -2078 #illegalKnobValueOSErr
illegalKnobOSErr = -2077 #illegalKnobOSErr
illegalChannelOSErr = -2076 #illegalChannelOSErr
illegalPartOSErr = -2075 #illegalPartOSErr
illegalVoiceAllocationOSErr = -2074 #illegalVoiceAllocationOSErr
cantReceiveFromSynthesizerOSErr = -2073 #cantReceiveFromSynthesizerOSErr
cantSendToSynthesizerOSErr = -2072 #cantSendToSynthesizerOSErr
notImplementedMusicOSErr = -2071 #notImplementedMusicOSErr
internalComponentErr = -2070 #internalComponentErr
invalidSpriteIDErr = -2069 #invalidSpriteIDErr
invalidImageIndexErr = -2068 #invalidImageIndexErr
invalidSpriteIndexErr = -2067 #invalidSpriteIndexErr
gWorldsNotSameDepthAndSizeErr = -2066 #gWorldsNotSameDepthAndSizeErr
invalidSpritePropertyErr = -2065 #invalidSpritePropertyErr
invalidSpriteWorldPropertyErr = -2064 #invalidSpriteWorldPropertyErr
missingRequiredParameterErr = -2063 #missingRequiredParameterErr
movieTextNotFoundErr = -2062 #movieTextNotFoundErr
sourceNotFoundErr = -2061 #sourceNotFoundErr
noSourceTreeFoundErr = -2060 #noSourceTreeFoundErr
samplesAlreadyInMediaErr = -2059 #samplesAlreadyInMediaErr
auxiliaryExportDataUnavailable = -2058 #auxiliaryExportDataUnavailable
unsupportedAuxiliaryImportData = -2057 #unsupportedAuxiliaryImportData
soundSupportNotAvailableErr = -2056 #QT for Windows error
noSoundTrackInMovieErr = -2055 #QT for Windows error
noVideoTrackInMovieErr = -2054 #QT for Windows error
featureUnsupported = -2053 #featureUnsupported
couldNotUseAnExistingSample = -2052 #couldNotUseAnExistingSample
noDefaultDataRef = -2051 #noDefaultDataRef
badDataRefIndex = -2050 #badDataRefIndex
invalidDataRefContainer = -2049 #invalidDataRefContainer
noMovieFound = -2048 #noMovieFound
dataNoDataRef = -2047 #dataNoDataRef
endOfDataReached = -2046 #endOfDataReached
dataAlreadyClosed = -2045 #dataAlreadyClosed
dataAlreadyOpenForWrite = -2044 #dataAlreadyOpenForWrite
dataNotOpenForWrite = -2043 #dataNotOpenForWrite
dataNotOpenForRead = -2042 #dataNotOpenForRead
invalidSampleDescription = -2041 #invalidSampleDescription
invalidChunkCache = -2040 #invalidChunkCache
invalidSampleDescIndex = -2039 #invalidSampleDescIndex
invalidChunkNum = -2038 #invalidChunkNum
invalidSampleNum = -2037 #invalidSampleNum
invalidRect = -2036 #invalidRect
cantEnableTrack = -2035 #cantEnableTrack
internalQuickTimeError = -2034 #internalQuickTimeError
badEditIndex = -2033 #badEditIndex
timeNotInMedia = -2032 #timeNotInMedia
timeNotInTrack = -2031 #timeNotInTrack
trackNotInMovie = -2030 #trackNotInMovie
trackIDNotFound = -2029 #trackIDNotFound
badTrackIndex = -2028 #badTrackIndex
maxSizeToGrowTooSmall = -2027 #maxSizeToGrowTooSmall
userDataItemNotFound = -2026 #userDataItemNotFound
staleEditState = -2025 #staleEditState
nonMatchingEditState = -2024 #nonMatchingEditState
invalidEditState = -2023 #invalidEditState
cantCreateSingleForkFile = -2022 #happens when file already exists
wfFileNotFound = -2021 #wfFileNotFound
movieToolboxUninitialized = -2020 #movieToolboxUninitialized
progressProcAborted = -2019 #progressProcAborted
mediaTypesDontMatch = -2018 #mediaTypesDontMatch
badEditList = -2017 #badEditList
cantPutPublicMovieAtom = -2016 #cantPutPublicMovieAtom
invalidTime = -2015 #invalidTime
invalidDuration = -2014 #invalidDuration
invalidHandler = -2013 #invalidHandler
invalidDataRef = -2012 #invalidDataRef
invalidSampleTable = -2011 #invalidSampleTable
invalidMovie = -2010 #invalidMovie
invalidTrack = -2009 #invalidTrack
invalidMedia = -2008 #invalidMedia
noDataHandler = -2007 #noDataHandler
noMediaHandler = -2006 #noMediaHandler
badComponentType = -2005 #badComponentType
cantOpenHandler = -2004 #cantOpenHandler
cantFindHandler = -2003 #cantFindHandler
badPublicMovieAtom = -2002 #badPublicMovieAtom
badImageDescription = -2001 #badImageDescription
couldNotResolveDataRef = -2000 #couldNotResolveDataRef
nonDragOriginatorErr = -1862 #illegal attempt at originator only data
badImageErr = -1861 #bad translucent image PixMap
badImageRgnErr = -1860 #bad translucent image region
noSuitableDisplaysErr = -1859 #no displays support translucency
unsupportedForPlatformErr = -1858 #call is for PowerPC only
dragNotAcceptedErr = -1857 #drag was not accepted by receiver
handlerNotFoundErr = -1856 #handler not found
duplicateHandlerErr = -1855 #handler already exists
cantGetFlavorErr = -1854 #error while trying to get flavor data
duplicateFlavorErr = -1853 #flavor type already exists
badDragFlavorErr = -1852 #unknown flavor type
badDragItemErr = -1851 #unknown drag item reference
badDragRefErr = -1850 #unknown drag reference
errEndOfBody = -1813 #errEndOfBody
errEndOfDocument = -1812 #errEndOfDocument
errTopOfBody = -1811 #errTopOfBody
errTopOfDocument = -1810 #errTopOfDocument
errOffsetIsOutsideOfView = -1801 #errOffsetIsOutsideOfView
errOffsetInvalid = -1800 #errOffsetInvalid
errOSACantOpenComponent = -1762 #Can't connect to scripting system with that ID
errOSAComponentMismatch = -1761 #Parameters are from 2 different components
errOSADataFormatTooNew = -1759 #errOSADataFormatTooNew
errOSADataFormatObsolete = -1758 #errOSADataFormatObsolete
errOSANoSuchDialect = -1757 #errOSANoSuchDialect
errOSASourceNotAvailable = -1756 #errOSASourceNotAvailable
errOSABadSelector = -1754 #errOSABadSelector
errOSAScriptError = -1753 #errOSAScriptError
errOSABadStorageType = -1752 #errOSABadStorageType
errOSAInvalidID = -1751 #errOSAInvalidID
errOSASystemError = -1750 #errOSASystemError
errAEBufferTooSmall = -1741 #buffer for AEFlattenDesc too small
errAEBuildSyntaxError = -1740 #AEBuildDesc and friends detected a syntax error
errAEDescIsNull = -1739 #attempting to perform an invalid operation on a null descriptor
errAEStreamAlreadyConverted = -1738 #attempt to convert a stream that has already been converted
errAEStreamBadNesting = -1737 #nesting violation while streaming
errAEDuplicateHandler = -1736 #attempt to install handler in table for identical class and id (1.1 or greater)
errAEEventFiltered = -1735 #event has been filtered, and should not be propogated (1.1 or greater)
errAEReceiveEscapeCurrent = -1734 #break out of only lowest level of AEReceive (1.1 or greater)
errAEReceiveTerminate = -1733 #break out of all levels of AEReceive to the topmost (1.1 or greater)
errAERecordingIsAlreadyOn = -1732 #available only in version 1.0.1 or greater
errAEUnknownObjectType = -1731 #available only in version 1.0.1 or greater
errAEEmptyListContainer = -1730 #Attempt to pass empty list as container to accessor
errAENegativeCount = -1729 #CountProc returned negative value
errAENoSuchObject = -1728 #e.g.,: specifier asked for the 3rd, but there are only 2. Basically, this indicates a run-time resolution error.
errAENotAnObjSpec = -1727 #Param to AEResolve not of type 'obj '
errAEBadTestKey = -1726 #Test is neither typeLogicalDescriptor nor typeCompDescriptor
errAENoSuchLogical = -1725 #Something other than AND, OR, or NOT
errAEAccessorNotFound = -1723 #Accessor proc matching wantClass and containerType or wildcards not found
errAEWrongNumberArgs = -1721 #Logical op kAENOT used with other than 1 term
errAEImpossibleRange = -1720 #A range like 3rd to 2nd, or 1st to all.
errAEIllegalIndex = -1719 #index is out of range in a put operation
errAEReplyNotArrived = -1718 #the contents of the reply you are accessing have not arrived yet
errAEHandlerNotFound = -1717 #no handler in the dispatch tables fits the parameters to AEGetEventHandler or AEGetCoercionHandler
errAEUnknownAddressType = -1716 #the target address type is not known
errAEParamMissed = -1715 #a required parameter was not accessed
errAENotASpecialFunction = -1714 #there is no special function for/with this keyword
errAENoUserInteraction = -1713 #no user interaction is allowed
errAETimeout = -1712 #the AppleEvent timed out
errAEWaitCanceled = -1711 #in AESend, the user cancelled out of wait loop for reply or receipt
errAEUnknownSendMode = -1710 #mode wasn't NoReply, WaitReply, or QueueReply or Interaction level is unknown
errAEReplyNotValid = -1709 #AEResetTimer was passed an invalid reply parameter
errAEEventNotHandled = -1708 #the AppleEvent was not handled by any handler
errAENotAppleEvent = -1707 #the event is not in AppleEvent format
errAENewerVersion = -1706 #need newer version of the AppleEvent manager
errAEBadListItem = -1705 #the specified list item does not exist
errAENotAEDesc = -1704 #errAENotAEDesc
errAEWrongDataType = -1703 #errAEWrongDataType
errAECorruptData = -1702 #errAECorruptData
errAEDescNotFound = -1701 #errAEDescNotFound
errAECoercionFail = -1700 #bad parameter data or unable to coerce the data supplied
errFSIteratorNotSupported = -1424 #The iterator's flags or container are not supported by this call
errFSIteratorNotFound = -1423 #Passed FSIterator is not an open iterator
errFSBadIteratorFlags = -1422 #Flags passed to FSOpenIterator are bad
errFSForkExists = -1421 #Named fork already exists.
errFSRefsDifferent = -1420 #FSCompareFSRefs; refs are for different objects
errFSBadSearchParams = -1419 #Something wrong with CatalogSearch searchParams
errFSBadItemCount = -1418 #maximumItems was zero
errFSNoMoreItems = -1417 #Iteration ran out of items to return
errFSBadAllocFlags = -1413 #Invalid bits set in allocationFlags
errFSBadPosMode = -1412 #Newline bits set in positionMode
errFSMissingName = -1411 #A Unicode name parameter was NULL or nameLength parameter was zero
errFSNameTooLong = -1410 #File/fork name is too long to create/rename
errFSForkNotFound = -1409 #Named fork does not exist
errFSNotAFolder = -1407 #Expected a folder, got a file
errFSMissingCatInfo = -1406 #A CatalogInfo parameter was NULL
errFSBadInfoBitmap = -1405 #A CatalogInfoBitmap or VolumeInfoBitmap has reserved or invalid bits set
errFSBadForkRef = -1404 #A ForkRefNum parameter was bad
errFSBadBuffer = -1403 #A buffer parameter was bad
errFSBadForkName = -1402 #Fork name parameter is bad
errFSBadFSRef = -1401 #FSRef parameter is bad
errFSUnknownCall = -1400 #selector is not recognized by this filesystem
badFCBErr = -1327 #FCBRecPtr is not valid
volVMBusyErr = -1311 #can't eject because volume is in use by VM
fsDataTooBigErr = -1310 #file or volume is too big for system
fileBoundsErr = -1309 #file's EOF, offset, mark or size is too big
notARemountErr = -1308 #when _Mount allows only remounts and doesn't get one
badFidErr = -1307 #file id is dangling or doesn't match with the file number
sameFileErr = -1306 #can't exchange a file with itself
desktopDamagedErr = -1305 #desktop database files are corrupted
catChangedErr = -1304 #the catalog has been modified
diffVolErr = -1303 #files on different volumes
notAFileErr = -1302 #directory specified
fidExists = -1301 #file id already exists
fidNotFound = -1300 #no file thread exists.
errRefNum = -1280 #bad connection refNum
errAborted = -1279 #control call was aborted
errState = -1278 #bad connection state for this operation
errOpening = -1277 #open connection request failed
errAttention = -1276 #attention message too long
errFwdReset = -1275 #read terminated by forward reset
errDSPQueueSize = -1274 #DSP Read/Write Queue Too small
errOpenDenied = -1273 #open connection request was denied
reqAborted = -1105 #reqAborted
noDataArea = -1104 #noDataArea
noSendResp = -1103 #noSendResp
cbNotFound = -1102 #cbNotFound
noRelErr = -1101 #noRelErr
badBuffNum = -1100 #badBuffNum
badATPSkt = -1099 #badATPSkt
tooManySkts = -1098 #tooManySkts
tooManyReqs = -1097 #tooManyReqs
reqFailed = -1096 #reqFailed
aspNoAck = -1075 #No ack on attention request (server err)
aspTooMany = -1074 #Too many clients (server error)
aspSizeErr = -1073 #Command block too big
aspSessClosed = -1072 #Session closed
aspServerBusy = -1071 #Server cannot open another session
aspParamErr = -1070 #Parameter error
aspNoServers = -1069 #No servers at that address
aspNoMoreSess = -1068 #No more sessions on server
aspBufTooSmall = -1067 #Buffer too small
aspBadVersNum = -1066 #Server cannot support this ASP version
nbpNISErr = -1029 #Error trying to open the NIS
nbpNotFound = -1028 #Name not found on remove
nbpDuplicate = -1027 #Duplicate name exists already
nbpConfDiff = -1026 #Name confirmed at different socket
nbpNoConfirm = -1025 #nbpNoConfirm
nbpBuffOvr = -1024 #Buffer overflow in LookupName
noMaskFoundErr = -1000 #Icon Utilties Error
kFMFontContainerAccessErr = -985 #kFMFontContainerAccessErr
kFMFontTableAccessErr = -984 #kFMFontTableAccessErr
kFMIterationScopeModifiedErr = -983 #kFMIterationScopeModifiedErr
kFMInvalidFontErr = -982 #kFMInvalidFontErr
kFMInvalidFontFamilyErr = -981 #kFMInvalidFontFamilyErr
kFMIterationCompleted = -980 #kFMIterationCompleted
guestNotAllowedErr = -932 #destination port requires authentication
badLocNameErr = -931 #location name malformed
badServiceMethodErr = -930 #illegal service type, or not supported
noUserRecErr = -928 #Invalid user reference number
authFailErr = -927 #unable to authenticate user at destination
noInformErr = -926 #PPCStart failed because destination did not have inform pending
networkErr = -925 #An error has occurred in the network, not too likely
noUserRefErr = -924 #unable to create a new userRefNum
notLoggedInErr = -923 #The default userRefNum does not yet exist
noDefaultUserErr = -922 #user hasn't typed in owners name in Network Setup Control Pannel
badPortNameErr = -919 #PPCPortRec malformed
sessClosedErr = -917 #session was closed
portClosedErr = -916 #port was closed
noResponseErr = -915 #unable to contact destination
noToolboxNameErr = -914 #A system resource is missing, not too likely
noMachineNameErr = -913 #user hasn't named his Macintosh in the Network Setup Control Panel
userRejectErr = -912 #Destination rejected the session request
noUserNameErr = -911 #user name unknown on destination machine
portNameExistsErr = -910 #port is already open (perhaps in another app)
badReqErr = -909 #bad parameter or invalid state for operation
noSessionErr = -908 #Invalid session reference number
sessTableErr = -907 #Out of session tables, try again later
destPortErr = -906 #Port does not exist at destination
localOnlyErr = -905 #Network activity is currently disabled
noGlobalsErr = -904 #The system is hosed, better re-boot
noPortErr = -903 #Unable to open port or bad portRefNum. If you're calling
nameTypeErr = -902 #Invalid or inappropriate locationKindSelector in locationName
notInitErr = -900 #PPCToolBox not initialized
notAppropriateForClassic = -877 #This application won't or shouldn't run on Classic (Problem 2481058).
appVersionTooOld = -876 #The application's creator and version are incompatible with the current version of Mac OS.
wrongApplicationPlatform = -875 #The application could not launch because the required platform is not available
hmCloseViewActive = -863 #Returned from HMRemoveBalloon if CloseView was active
hmNoBalloonUp = -862 #Returned from HMRemoveBalloon if no balloon was visible when call was made
hmOperationUnsupported = -861 #Returned from HMShowBalloon call if bad method passed to routine
hmUnknownHelpType = -859 #Returned if help msg record contained a bad type
hmWrongVersion = -858 #Returned if help mgr resource was the wrong version
hmSkippedBalloon = -857 #Returned from calls if helpmsg specified a skip balloon
hmHelpManagerNotInited = -855 #Returned from HMGetHelpMenuHandle if help menu not setup
hmSameAsLastBalloon = -854 #Returned from HMShowMenuBalloon if menu & item is same as last time
hmBalloonAborted = -853 #Returned if mouse was moving or mouse wasn't in window port rect
hmHelpDisabled = -850 #Show Balloons mode was off, call to routine ignored
rcDBPackNotInited = -813 #attempt to call other routine before InitDBPack
rcDBWrongVersion = -812 #incompatible versions
rcDBNoHandler = -811 #no app handler for specified data type
rcDBBadAsyncPB = -810 #tried to kill a bad pb
rcDBAsyncNotSupp = -809 #ddev does not support async calls
rcDBBadDDEV = -808 #bad ddev specified on DBInit
rcDBBadSessNum = -807 #bad session number for DBGetConnInfo
rcDBBadSessID = -806 #rcDBBadSessID
rcDBExec = -805 #rcDBExec
rcDBBreak = -804 #rcDBBreak
rcDBBadType = -803 #rcDBBadType
rcDBError = -802 #rcDBError
rcDBValue = -801 #rcDBValue
rcDBNull = -800 #rcDBNull
icTooManyProfilesErr = -677 #too many profiles in database
icProfileNotFoundErr = -676 #profile not found
icConfigInappropriateErr = -675 #incorrect manufacturer code
icConfigNotFoundErr = -674 #no internet configuration was found
icNoURLErr = -673 #no URL found
icNothingToOverrideErr = -672 #no component for the override component to capture
icNoMoreWritersErr = -671 #you cannot begin a write session because someone else is already doing it
icTruncatedErr = -670 #more data was present than was returned
icInternalErr = -669 #Internet Config internal error
icPrefDataErr = -668 #problem with preference data
icPermErr = -667 #cannot set preference
icPrefNotFoundErr = -666 #Internet preference not found
vmInvalidOwningProcessErr = -648 #current process does not own the BackingFileID or FileViewID
vmAddressNotInFileViewErr = -647 #address is not in a FileView
vmNoMoreFileViewsErr = -646 #no more FileViews were found
vmFileViewAccessErr = -645 #requested FileViewAccess cannot be obtained
vmInvalidFileViewIDErr = -644 #invalid FileViewID
vmNoMoreBackingFilesErr = -643 #no more BackingFiles were found
vmBusyBackingFileErr = -642 #open views found on BackingFile
vmMappingPrivilegesErr = -641 #requested MappingPrivileges cannot be obtained
vmInvalidBackingFileIDErr = -640 #invalid BackingFileID
noMMUErr = -626 #no MMU present
cannotDeferErr = -625 #unable to defer additional functions
interruptsMaskedErr = -624 #donÕt call with interrupts masked
notLockedErr = -623 #specified range of memory is not locked
cannotMakeContiguousErr = -622 #cannot make specified range contiguous
notHeldErr = -621 #specified range of memory is not held
notEnoughMemoryErr = -620 #insufficient physical memory
threadProtocolErr = -619 #threadProtocolErr
threadNotFoundErr = -618 #threadNotFoundErr
threadTooManyReqsErr = -617 #threadTooManyReqsErr
noUserInteractionAllowed = -610 #no user interaction allowed
connectionInvalid = -609 #connectionInvalid
noOutstandingHLE = -608 #noOutstandingHLE
bufferIsSmall = -607 #error returns from Post and Accept
appIsDaemon = -606 #app is BG-only, and launch flags disallow this
appMemFullErr = -605 #application SIZE not big enough for launch
hardwareConfigErr = -604 #hardware configuration not correct for call
protocolErr = -603 #app made module calls in improper order
appModeErr = -602 #memory mode is 32-bit, but app not 32-bit clean
memFragErr = -601 #not enough room to launch app w/special requirements
procNotFound = -600 #no eligible process with specified descriptor
driverHardwareGoneErr = -503 #disk driver's hardware was disconnected
hwParamErr = -502 #bad selector for _HWPriv
teScrapSizeErr = -501 #scrap item too big for text edit record
rgnTooBigErr = -500 #rgnTooBigErr
exUserBreak = -492 #user debugger break; execute debugger commands on stack
strUserBreak = -491 #user debugger break; display string on stack
userBreak = -490 #user debugger break
notThePublisherWrn = -463 #not the first registered publisher for that container
containerAlreadyOpenWrn = -462 #container already opened by this section
containerNotFoundWrn = -461 #could not find editionContainer at this time
multiplePublisherWrn = -460 #A Publisher is already registered for that container
badSubPartErr = -454 #can not use sub parts in this release
badEditionFileErr = -453 #edition file is corrupt
notRegisteredSectionErr = -452 #not a registered SectionRecord
badSectionErr = -451 #not a valid SectionRecord
editionMgrInitErr = -450 #edition manager not inited by this app
fsmUnknownFSMMessageErr = -438 #unknown message passed to FSM
fsmNoAlternateStackErr = -437 #no alternate stack for HFS CI
fsmBadFSDVersionErr = -436 #FSM version incompatible with FSD
fsmDuplicateFSIDErr = -435 #FSID already exists on InstallFS
fsmBadFSDLenErr = -434 #FSD size incompatible with current FSM vers
fsmBadFFSNameErr = -433 #Name length not 1 <= length <= 31
fsmBusyFFSErr = -432 #File system is busy, cannot be removed
fsmFFSNotFoundErr = -431 #Foreign File system does not exist - new Pack2 could return this error too
btKeyAttrErr = -417 #There is no such a key attribute.
btKeyLenErr = -416 #Maximum key length is too long or equal to zero.
btRecNotFnd = -415 #Record cannot be found.
btDupRecErr = -414 #Record already exists.
btNoSpace = -413 #Can't allocate disk space.
notBTree = -410 #The file is not a dictionary.
gcrOnMFMErr = -400 #gcr format on high density media error
slotNumErr = -360 #invalid slot # error
smRecNotFnd = -351 #Record not found in the SRT.
smSRTOvrFlErr = -350 #SRT over flow.
smNoGoodOpens = -349 #No opens were successfull in the loop.
smOffsetErr = -348 #Offset was too big (temporary error
smByteLanesErr = -347 #NumByteLanes was determined to be zero.
smBadsPtrErr = -346 #Bad pointer was passed to sCalcsPointer
smsGetDrvrErr = -345 #Error occurred during _sGetDriver.
smNoMoresRsrcs = -344 #No more sResources
smDisDrvrNamErr = -343 #Error occurred during _sDisDrvrName.
smGetDrvrNamErr = -342 #Error occurred during _sGetDrvrName.
smCkStatusErr = -341 #Status of slot = fail.
smBlkMoveErr = -340 #_BlockMove error
smNewPErr = -339 #_NewPtr error
smSelOOBErr = -338 #Selector out of bounds error
smSlotOOBErr = -337 #Slot out of bounds error
smNilsBlockErr = -336 #Nil sBlock error (Dont allocate and try to use a nil sBlock)
smsPointerNil = -335 #LPointer is nil From sOffsetData. If this error occurs; check sInfo rec for more information.
smCPUErr = -334 #Code revision is wrong
smCodeRevErr = -333 #Code revision is wrong
smReservedErr = -332 #Reserved field not zero
smBadsList = -331 #Bad sList: Id1 < Id2 < Id3 ...format is not followed.
smBadRefId = -330 #Reference Id not found in List
smBusErrTO = -320 #BusError time out.
smBadBoardId = -319 #BoardId was wrong; re-init the PRAM record.
smReservedSlot = -318 #slot is reserved, VM should not use this address space.
smInitTblVErr = -317 #An error occurred while trying to initialize the Slot Resource Table.
smInitStatVErr = -316 #The InitStatusV field was negative after primary or secondary init.
smNoBoardId = -315 #No Board Id.
smGetPRErr = -314 #Error occurred during _sGetPRAMRec (See SIMStatus).
smNoBoardSRsrc = -313 #No Board sResource.
smDisposePErr = -312 #_DisposePointer error
smFHBlkDispErr = -311 #Error occurred during _sDisposePtr (Dispose of FHeader block).
smFHBlockRdErr = -310 #Error occurred during _sGetFHeader.
smBLFieldBad = -309 #ByteLanes field was bad.
smUnExBusErr = -308 #Unexpected BusError
smResrvErr = -307 #Fatal reserved error. Reserved field != 0.
smNosInfoArray = -306 #No sInfoArray. Memory Mgr error.
smDisabledSlot = -305 #This slot is disabled (-305 use to be smLWTstBad)
smNoDir = -304 #Directory offset is Nil
smRevisionErr = -303 #Wrong revison level
smFormatErr = -302 #FHeader Format is not Apple's
smCRCFail = -301 #CRC check failed for declaration data
smEmptySlot = -300 #No card in slot
nmTypErr = -299 #Notification Manager:wrong queue type
smPriInitErr = -293 #Error; Cards could not be initialized.
smPRAMInitErr = -292 #Error; Slot Resource Table could not be initialized.
smSRTInitErr = -291 #Error; Slot Resource Table could not be initialized.
smSDMInitErr = -290 #Error; SDM could not be initialized.
midiInvalidCmdErr = -261 #command not supported for port type
midiDupIDErr = -260 #duplicate client ID
midiNameLenErr = -259 #name supplied is longer than 31 characters
midiWriteErr = -258 #MIDIWritePacket couldn't write to all connected ports
midiNoConErr = -257 #no connection exists between specified ports
midiVConnectRmvd = -256 #pending virtual connection removed
midiVConnectMade = -255 #pending virtual connection resolved
midiVConnectErr = -254 #pending virtual connection created
midiTooManyConsErr = -253 #too many connections made
midiTooManyPortsErr = -252 #too many ports already installed in the system
midiNoPortErr = -251 #no port with that ID found
midiNoClientErr = -250 #no client with that ID found
badInputText = -247 #badInputText
badDictFormat = -246 #badDictFormat
incompatibleVoice = -245 #incompatibleVoice
voiceNotFound = -244 #voiceNotFound
bufTooSmall = -243 #bufTooSmall
synthNotReady = -242 #synthNotReady
synthOpenFailed = -241 #synthOpenFailed
noSynthFound = -240 #noSynthFound
siUnknownQuality = -232 #invalid quality selector (returned by driver)
siUnknownInfoType = -231 #invalid info type selector (returned by driver)
siInputDeviceErr = -230 #input device hardware failure
siBadRefNum = -229 #invalid input device reference number
siBadDeviceName = -228 #input device could not be opened
siDeviceBusyErr = -227 #input device already in use
siInvalidSampleSize = -226 #invalid sample size
siInvalidSampleRate = -225 #invalid sample rate
siHardDriveTooSlow = -224 #hard drive too slow to record to disk
siInvalidCompression = -223 #invalid compression type
siNoBufferSpecified = -222 #returned by synchronous SPBRecord if nil buffer passed
siBadSoundInDevice = -221 #invalid index passed to SoundInGetIndexedDevice
siNoSoundInHardware = -220 #no Sound Input hardware
siVBRCompressionNotSupported = -213 #vbr audio compression not supported for this operation
noMoreRealTime = -212 #not enough CPU cycles left to add another task
channelNotBusy = -211 #channelNotBusy
buffersTooSmall = -210 #can not operate in the memory allowed
channelBusy = -209 #the Channel is being used for a PFD already
badFileFormat = -208 #was not type AIFF or was of bad format,corrupt
notEnoughBufferSpace = -207 #could not allocate enough memory
badFormat = -206 #Sound Manager Error Returns
badChannel = -205 #Sound Manager Error Returns
resProblem = -204 #Sound Manager Error Returns
queueFull = -203 #Sound Manager Error Returns
notEnoughHardwareErr = -201 #Sound Manager Error Returns
noHardwareErr = -200 #Sound Manager Error Returns
mapReadErr = -199 #map inconsistent with operation
resAttrErr = -198 #attribute inconsistent with operation
rmvRefFailed = -197 #RmveReference failed
rmvResFailed = -196 #RmveResource failed
addRefFailed = -195 #AddReference failed
addResFailed = -194 #AddResource failed
resFNotFound = -193 #Resource file not found
resNotFound = -192 #Resource not found
inputOutOfBounds = -190 #Offset of Count out of bounds
writingPastEnd = -189 #Writing past end of file
resourceInMemory = -188 #Resource already in memory
CantDecompress = -186 #resource bent ("the bends") - can't decompress a compressed resource
badExtResource = -185 #extended resource has a bad format.
cmNoCurrentProfile = -182 #Responder error
cmUnsupportedDataType = -181 #Responder error
cmCantDeleteProfile = -180 #Responder error
cmCantXYZ = -179 #CMM cant handle XYZ space
cmCantConcatenateError = -178 #Profile can't be concatenated
cmProfilesIdentical = -177 #Profiles the same
cmProfileNotFound = -176 #Responder error
cmMethodNotFound = -175 #CMM not present
cmMethodError = -171 #cmMethodError
cmProfileError = -170 #cmProfileError
cDepthErr = -157 #invalid pixel depth
cResErr = -156 #invalid resolution for MakeITable
cDevErr = -155 #invalid type of graphics device
cProtectErr = -154 #colorTable entry protection violation
cRangeErr = -153 #range error on colorTable request
cNoMemErr = -152 #failed to allocate memory for structure
cTempMemErr = -151 #failed to allocate memory for temporary structures
cMatchErr = -150 #Color2Index failed to find an index
insufficientStackErr = -149 #insufficientStackErr
pixMapTooDeepErr = -148 #pixMapTooDeepErr
rgnOverflowErr = -147 #rgnOverflowErr
noMemForPictPlaybackErr = -145 #noMemForPictPlaybackErr
userCanceledErr = -128 #userCanceledErr
hMenuFindErr = -127 #could not find HMenu's parent in MenuKey (wrong error code - obsolete)
mBarNFnd = -126 #system error code for MBDF not found
updPixMemErr = -125 #insufficient memory to update a pixmap
volGoneErr = -124 #Server volume has been disconnected.
wrgVolTypErr = -123 #Wrong volume type error [operation not supported for MFS]
badMovErr = -122 #Move into offspring error
tmwdoErr = -121 #No free WDCB available
dirNFErr = -120 #Directory not found
memLockedErr = -117 #trying to move a locked block (MoveHHi)
memSCErr = -116 #Size Check failed
memBCErr = -115 #Block Check failed
memPCErr = -114 #Pointer Check failed
memAZErr = -113 #Address in zone check failed
memPurErr = -112 #trying to purge a locked or non-purgeable block
memWZErr = -111 #WhichZone failed (applied to free block)
memAdrErr = -110 #address was odd; or out of range
nilHandleErr = -109 #Master Pointer was NIL in HandleZone or other
memFullErr = -108 #Not enough room in heap zone
noTypeErr = -102 #No object of that type in scrap
noScrapErr = -100 #No scrap exists error
memROZWarn = -99 #soft error in ROZ
portNotCf = -98 #driver Open error code (parameter RAM not configured for this connection)
portInUse = -97 #driver Open error code (port is in use)
portNotPwr = -96 #serial port not currently powered
excessCollsns = -95 #excessive collisions on write
lapProtErr = -94 #error in attaching/detaching protocol
noBridgeErr = -93 #no network bridge for non-local send
eLenErr = -92 #Length error ddpLenErr
eMultiErr = -91 #Multicast address error ddpSktErr
breakRecd = -90 #Break received (SCC)
rcvrErr = -89 #SCC receiver error (framing; parity; OR)
prInitErr = -88 #InitUtil found the parameter ram uninitialized
prWrErr = -87 #parameter ram written didn't read-verify
clkWrErr = -86 #time written did not verify
clkRdErr = -85 #unable to read same clock value twice
verErr = -84 #track failed to verify
fmt2Err = -83 #can't get enough sync
fmt1Err = -82 #can't find sector 0 after track format
sectNFErr = -81 #sector number never found on a track
seekErr = -80 #track number wrong on address mark
spdAdjErr = -79 #unable to correctly adjust disk speed
twoSideErr = -78 #tried to read 2nd side on a 1-sided drive
initIWMErr = -77 #unable to initialize IWM
tk0BadErr = -76 #track 0 detect doesn't change
cantStepErr = -75 #step handshake failed
wrUnderrun = -74 #write underrun occurred
badDBtSlp = -73 #bad data mark bit slip nibbles
badDCksum = -72 #bad data mark checksum
noDtaMkErr = -71 #couldn't find a data mark header
badBtSlpErr = -70 #bad addr mark bit slip nibbles
badCksmErr = -69 #addr mark checksum didn't check
dataVerErr = -68 #read verify compare failed
noAdrMkErr = -67 #couldn't find valid addr mark
noNybErr = -66 #couldn't find 5 nybbles in 200 tries
offLinErr = -65 #r/w requested for an off-line drive
fontDecError = -64 #error during font declaration
wrPermErr = -61 #write permissions error
badMDBErr = -60 #bad master directory block
fsRnErr = -59 #file system internal error:during rename the old entry was deleted but could not be restored.
extFSErr = -58 #volume in question belongs to an external fs
noMacDskErr = -57 #not a mac diskette (sig bytes are wrong)
nsDrvErr = -56 #no such drive (tried to mount a bad drive num)
volOnLinErr = -55 #drive volume already on-line at MountVol
permErr = -54 #permissions error (on file open)
volOffLinErr = -53 #volume not on line error (was Ejected)
gfpErr = -52 #get file position error
rfNumErr = -51 #refnum error
paramErr = -50 #error in user parameter list
opWrErr = -49 #file already open with with write permission
dupFNErr = -48 #duplicate filename (rename)
fBsyErr = -47 #File is busy (delete)
vLckdErr = -46 #volume is locked
fLckdErr = -45 #file is locked
wPrErr = -44 #diskette is write protected.
fnfErr = -43 #File not found
tmfoErr = -42 #too many files open
mFulErr = -41 #memory full (open) or file won't fit (load)
posErr = -40 #tried to position to before start of file (r/w)
eofErr = -39 #End of file
fnOpnErr = -38 #File not open
bdNamErr = -37 #there may be no bad names in the final system!
ioErr = -36 #I/O error (bummers)
nsvErr = -35 #no such volume
dskFulErr = -34 #disk full
dirFulErr = -33 #Directory full
dceExtErr = -30 #dce extension error
unitTblFullErr = -29 #unit table has no more entries
notOpenErr = -28 #Couldn't rd/wr/ctl/sts cause driver not opened
iIOAbortErr = -27 #IO abort error (Printing Manager)
dInstErr = -26 #DrvrInstall couldn't find driver in resources
dRemovErr = -25 #tried to remove an open driver
closErr = -24 #I/O System Errors
openErr = -23 #I/O System Errors
unitEmptyErr = -22 #I/O System Errors
badUnitErr = -21 #I/O System Errors
writErr = -20 #I/O System Errors
readErr = -19 #I/O System Errors
statusErr = -18 #I/O System Errors
controlErr = -17 #I/O System Errors
dsExtensionsDisabled = -13 #say Extensions Disabled
dsHD20Installed = -12 #say HD20 Startup
dsDisassemblerInstalled = -11 #say Disassembler Installed
dsMacsBugInstalled = -10 #say MacsBug Installed
seNoDB = -8 #no debugger installed to handle debugger command
SlpTypeErr = -5 #invalid queue element
unimpErr = -4 #unimplemented core routine
corErr = -3 #core routine number out of range
dsNoExtsDisassembler = -2 #not a SysErr, just a placeholder
qErr = -1 #queue element not found during deletion
tsmComponentNoErr = 0 #component result = no error
EPERM = 1 #Operation not permitted
ENOENT = 2 #No such file or directory
ESRCH = 3 #No such process
EINTR = 4 #Interrupted system call
EIO = 5 #Input/output error
ENXIO = 6 #Device not configured
E2BIG = 7 #Argument list too long
ENOEXEC = 8 #Exec format error
EBADF = 9 #Bad file descriptor
ECHILD = 10 #No child processes
EDEADLK = 11 #Resource deadlock avoided
ENOMEM = 12 #Cannot allocate memory
EACCES = 13 #Permission denied
EFAULT = 14 #Bad address
ECANCELED = 15 #Operation cancelled
EBUSY = 16 #Device busy
EEXIST = 17 #File exists
EXDEV = 18 #Cross-device link
ENODEV = 19 #Operation not supported by device
ENOTDIR = 20 #Not a directory
EISDIR = 21 #Is a directory
EINVAL = 22 #Invalid argument
ENFILE = 23 #Too many open files in system
EMFILE = 24 #Too many open files
ENOTTY = 25 #Inappropriate ioctl for device
ESIGPARM = 26 #Signal error
EFBIG = 27 #File too large
ENOSPC = 28 #No space left on device
ESPIPE = 29 #Illegal seek
EROFS = 30 #Read-only file system
EMLINK = 31 #Too many links
EPIPE = 32 #Broken pipe
EDOM = 33 #Numerical argument out of domain
ERANGE = 34 #Result too large
EAGAIN = 35 #Resource temporarily unavailable
EINPROGRESS = 36 #Operation now in progress
EALREADY = 37 #Operation already in progress
ENOTSOCK = 38 #Socket operation on non-socket
EDESTADDRREQ = 39 #Destination address required
EMSGSIZE = 40 #Message too long
EPROTOTYPE = 41 #Protocol wrong type for socket
ENOPROTOOPT = 42 #Protocol not available
EPROTONOSUPPORT = 43 #Protocol not supported
ESOCKTNOSUPPORT = 44 #Socket type not supported
EOPNOTSUPP = 45 #Operation not supported
EPFNOSUPPORT = 46 #Protocol family not supported
EAFNOSUPPORT = 47 #Address family not supported by protocol family
EADDRINUSE = 48 #Address already in use
EADDRNOTAVAIL = 49 #Can't assign requested address
ENETDOWN = 50 #Network is down
ENETUNREACH = 51 #Network is unreachable
ENETRESET = 52 #Network dropped connection on reset
ECONNABORTED = 53 #Software caused connection abort
ECONNRESET = 54 #Connection reset by peer
ENOBUFS = 55 #No buffer space available
EISCONN = 56 #Socket is already connected
ENOTCONN = 57 #Socket is not connected
ESHUTDOWN = 58 #Can't send after socket shutdown
ETOOMANYREFS = 59 #Too many references: can't splice
ETIMEDOUT = 60 #Operation timed out
ECONNREFUSED = 61 #Connection refused
ELOOP = 62 #Too many levels of symbolic links
ENAMETOOLONG = 63 #File name too long
EHOSTDOWN = 64 #Host is down
EHOSTUNREACH = 65 #No route to host
ENOTEMPTY = 66 #Directory not empty
ELOOK = 67 #Internal mapping for kOTLookErr, don't return to client
ENOLCK = 77 #No locks available
ENOSYS = 78 #Function not implemented
EILSEQ = 88 #Wide character encoding error
EUNKNOWN = 99 #Unknown error
| bsd-2-clause |
yashodhank/frappe | frappe/commands/__init__.py | 10 | 1452 | # Copyright (c) 2015, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals, absolute_import
import sys
import click
import cProfile
import StringIO
import pstats
import frappe
import frappe.utils
from functools import wraps
click.disable_unicode_literals_warning = True
def pass_context(f):
@wraps(f)
def _func(ctx, *args, **kwargs):
profile = ctx.obj['profile']
if profile:
pr = cProfile.Profile()
pr.enable()
ret = f(frappe._dict(ctx.obj), *args, **kwargs)
if profile:
pr.disable()
s = StringIO.StringIO()
ps = pstats.Stats(pr, stream=s)\
.sort_stats('cumtime', 'tottime', 'ncalls')
ps.print_stats()
print s.getvalue()
return ret
return click.pass_context(_func)
def get_site(context):
try:
site = context.sites[0]
return site
except (IndexError, TypeError):
print 'Please specify --site sitename'
sys.exit(1)
def call_command(cmd, context):
return click.Context(cmd, obj=context).forward(cmd)
def get_commands():
# prevent circular imports
from .docs import commands as doc_commands
from .scheduler import commands as scheduler_commands
from .site import commands as site_commands
from .translate import commands as translate_commands
from .utils import commands as utils_commands
return list(set(doc_commands + scheduler_commands + site_commands + translate_commands + utils_commands))
commands = get_commands()
| mit |
erwinsanchez/bitcoinwithkeccak | contrib/bitrpc/bitrpc.py | 84 | 9663 | from jsonrpc import ServiceProxy
import sys
import string
import getpass
# ===== BEGIN USER SETTINGS =====
# if you do not set these you will be prompted for a password for every command
rpcuser = ""
rpcpass = ""
# ====== END USER SETTINGS ======
if rpcpass == "":
access = ServiceProxy("http://127.0.0.1:8332")
else:
access = ServiceProxy("http://"+rpcuser+":"+rpcpass+"@127.0.0.1:8332")
cmd = sys.argv[1].lower()
if cmd == "backupwallet":
try:
path = raw_input("Enter destination path/filename: ")
print access.backupwallet(path)
except:
print "\n---An error occurred---\n"
elif cmd == "encryptwallet":
try:
pwd = getpass.getpass(prompt="Enter passphrase: ")
pwd2 = getpass.getpass(prompt="Repeat passphrase: ")
if pwd == pwd2:
access.encryptwallet(pwd)
print "\n---Wallet encrypted. Server stopping, restart to run with encrypted wallet---\n"
else:
print "\n---Passphrases do not match---\n"
except:
print "\n---An error occurred---\n"
elif cmd == "getaccount":
try:
addr = raw_input("Enter a Bitcoin address: ")
print access.getaccount(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccountaddress":
try:
acct = raw_input("Enter an account name: ")
print access.getaccountaddress(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getaddressesbyaccount":
try:
acct = raw_input("Enter an account name: ")
print access.getaddressesbyaccount(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getbalance":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getbalance(acct, mc)
except:
print access.getbalance()
except:
print "\n---An error occurred---\n"
elif cmd == "getblockbycount":
try:
height = raw_input("Height: ")
print access.getblockbycount(height)
except:
print "\n---An error occurred---\n"
elif cmd == "getblockcount":
try:
print access.getblockcount()
except:
print "\n---An error occurred---\n"
elif cmd == "getblocknumber":
try:
print access.getblocknumber()
except:
print "\n---An error occurred---\n"
elif cmd == "getconnectioncount":
try:
print access.getconnectioncount()
except:
print "\n---An error occurred---\n"
elif cmd == "getdifficulty":
try:
print access.getdifficulty()
except:
print "\n---An error occurred---\n"
elif cmd == "getgenerate":
try:
print access.getgenerate()
except:
print "\n---An error occurred---\n"
elif cmd == "gethashespersec":
try:
print access.gethashespersec()
except:
print "\n---An error occurred---\n"
elif cmd == "getinfo":
try:
print access.getinfo()
except:
print "\n---An error occurred---\n"
elif cmd == "getnewaddress":
try:
acct = raw_input("Enter an account name: ")
try:
print access.getnewaddress(acct)
except:
print access.getnewaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaccount":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaccount(acct, mc)
except:
print access.getreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaddress":
try:
addr = raw_input("Enter a Bitcoin address (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaddress(addr, mc)
except:
print access.getreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "gettransaction":
try:
txid = raw_input("Enter a transaction ID: ")
print access.gettransaction(txid)
except:
print "\n---An error occurred---\n"
elif cmd == "getwork":
try:
data = raw_input("Data (optional): ")
try:
print access.gettransaction(data)
except:
print access.gettransaction()
except:
print "\n---An error occurred---\n"
elif cmd == "help":
try:
cmd = raw_input("Command (optional): ")
try:
print access.help(cmd)
except:
print access.help()
except:
print "\n---An error occurred---\n"
elif cmd == "listaccounts":
try:
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.listaccounts(mc)
except:
print access.listaccounts()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaccount":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaccount(mc, incemp)
except:
print access.listreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaddress":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaddress(mc, incemp)
except:
print access.listreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "listtransactions":
try:
acct = raw_input("Account (optional): ")
count = raw_input("Number of transactions (optional): ")
frm = raw_input("Skip (optional):")
try:
print access.listtransactions(acct, count, frm)
except:
print access.listtransactions()
except:
print "\n---An error occurred---\n"
elif cmd == "move":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.move(frm, to, amt, mc, comment)
except:
print access.move(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendfrom":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendfrom(frm, to, amt, mc, comment, commentto)
except:
print access.sendfrom(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendmany":
try:
frm = raw_input("From: ")
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.sendmany(frm,to,mc,comment)
except:
print access.sendmany(frm,to)
except:
print "\n---An error occurred---\n"
elif cmd == "sendtoaddress":
try:
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
amt = raw_input("Amount:")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendtoaddress(to,amt,comment,commentto)
except:
print access.sendtoaddress(to,amt)
except:
print "\n---An error occurred---\n"
elif cmd == "setaccount":
try:
addr = raw_input("Address: ")
acct = raw_input("Account:")
print access.setaccount(addr,acct)
except:
print "\n---An error occurred---\n"
elif cmd == "setgenerate":
try:
gen= raw_input("Generate? (true/false): ")
cpus = raw_input("Max processors/cores (-1 for unlimited, optional):")
try:
print access.setgenerate(gen, cpus)
except:
print access.setgenerate(gen)
except:
print "\n---An error occurred---\n"
elif cmd == "settxfee":
try:
amt = raw_input("Amount:")
print access.settxfee(amt)
except:
print "\n---An error occurred---\n"
elif cmd == "stop":
try:
print access.stop()
except:
print "\n---An error occurred---\n"
elif cmd == "validateaddress":
try:
addr = raw_input("Address: ")
print access.validateaddress(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrase":
try:
pwd = getpass.getpass(prompt="Enter wallet passphrase: ")
access.walletpassphrase(pwd, 60)
print "\n---Wallet unlocked---\n"
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrasechange":
try:
pwd = getpass.getpass(prompt="Enter old wallet passphrase: ")
pwd2 = getpass.getpass(prompt="Enter new wallet passphrase: ")
access.walletpassphrasechange(pwd, pwd2)
print
print "\n---Passphrase changed---\n"
except:
print
print "\n---An error occurred---\n"
print
else:
print "Command not found or not supported"
| mit |
titasakgm/brc-stock | openerp/addons/report_geraldo/lib/geraldo/site/newsite/django_1_0/tests/regressiontests/forms/error_messages.py | 13 | 10396 | # -*- coding: utf-8 -*-
tests = r"""
>>> from django.forms import *
>>> from django.core.files.uploadedfile import SimpleUploadedFile
# CharField ###################################################################
>>> e = {'required': 'REQUIRED'}
>>> e['min_length'] = 'LENGTH %(length)s, MIN LENGTH %(min)s'
>>> e['max_length'] = 'LENGTH %(length)s, MAX LENGTH %(max)s'
>>> f = CharField(min_length=5, max_length=10, error_messages=e)
>>> f.clean('')
Traceback (most recent call last):
...
ValidationError: [u'REQUIRED']
>>> f.clean('1234')
Traceback (most recent call last):
...
ValidationError: [u'LENGTH 4, MIN LENGTH 5']
>>> f.clean('12345678901')
Traceback (most recent call last):
...
ValidationError: [u'LENGTH 11, MAX LENGTH 10']
# IntegerField ################################################################
>>> e = {'required': 'REQUIRED'}
>>> e['invalid'] = 'INVALID'
>>> e['min_value'] = 'MIN VALUE IS %s'
>>> e['max_value'] = 'MAX VALUE IS %s'
>>> f = IntegerField(min_value=5, max_value=10, error_messages=e)
>>> f.clean('')
Traceback (most recent call last):
...
ValidationError: [u'REQUIRED']
>>> f.clean('abc')
Traceback (most recent call last):
...
ValidationError: [u'INVALID']
>>> f.clean('4')
Traceback (most recent call last):
...
ValidationError: [u'MIN VALUE IS 5']
>>> f.clean('11')
Traceback (most recent call last):
...
ValidationError: [u'MAX VALUE IS 10']
# FloatField ##################################################################
>>> e = {'required': 'REQUIRED'}
>>> e['invalid'] = 'INVALID'
>>> e['min_value'] = 'MIN VALUE IS %s'
>>> e['max_value'] = 'MAX VALUE IS %s'
>>> f = FloatField(min_value=5, max_value=10, error_messages=e)
>>> f.clean('')
Traceback (most recent call last):
...
ValidationError: [u'REQUIRED']
>>> f.clean('abc')
Traceback (most recent call last):
...
ValidationError: [u'INVALID']
>>> f.clean('4')
Traceback (most recent call last):
...
ValidationError: [u'MIN VALUE IS 5']
>>> f.clean('11')
Traceback (most recent call last):
...
ValidationError: [u'MAX VALUE IS 10']
# DecimalField ################################################################
>>> e = {'required': 'REQUIRED'}
>>> e['invalid'] = 'INVALID'
>>> e['min_value'] = 'MIN VALUE IS %s'
>>> e['max_value'] = 'MAX VALUE IS %s'
>>> e['max_digits'] = 'MAX DIGITS IS %s'
>>> e['max_decimal_places'] = 'MAX DP IS %s'
>>> e['max_whole_digits'] = 'MAX DIGITS BEFORE DP IS %s'
>>> f = DecimalField(min_value=5, max_value=10, error_messages=e)
>>> f2 = DecimalField(max_digits=4, decimal_places=2, error_messages=e)
>>> f.clean('')
Traceback (most recent call last):
...
ValidationError: [u'REQUIRED']
>>> f.clean('abc')
Traceback (most recent call last):
...
ValidationError: [u'INVALID']
>>> f.clean('4')
Traceback (most recent call last):
...
ValidationError: [u'MIN VALUE IS 5']
>>> f.clean('11')
Traceback (most recent call last):
...
ValidationError: [u'MAX VALUE IS 10']
>>> f2.clean('123.45')
Traceback (most recent call last):
...
ValidationError: [u'MAX DIGITS IS 4']
>>> f2.clean('1.234')
Traceback (most recent call last):
...
ValidationError: [u'MAX DP IS 2']
>>> f2.clean('123.4')
Traceback (most recent call last):
...
ValidationError: [u'MAX DIGITS BEFORE DP IS 2']
# DateField ###################################################################
>>> e = {'required': 'REQUIRED'}
>>> e['invalid'] = 'INVALID'
>>> f = DateField(error_messages=e)
>>> f.clean('')
Traceback (most recent call last):
...
ValidationError: [u'REQUIRED']
>>> f.clean('abc')
Traceback (most recent call last):
...
ValidationError: [u'INVALID']
# TimeField ###################################################################
>>> e = {'required': 'REQUIRED'}
>>> e['invalid'] = 'INVALID'
>>> f = TimeField(error_messages=e)
>>> f.clean('')
Traceback (most recent call last):
...
ValidationError: [u'REQUIRED']
>>> f.clean('abc')
Traceback (most recent call last):
...
ValidationError: [u'INVALID']
# DateTimeField ###############################################################
>>> e = {'required': 'REQUIRED'}
>>> e['invalid'] = 'INVALID'
>>> f = DateTimeField(error_messages=e)
>>> f.clean('')
Traceback (most recent call last):
...
ValidationError: [u'REQUIRED']
>>> f.clean('abc')
Traceback (most recent call last):
...
ValidationError: [u'INVALID']
# RegexField ##################################################################
>>> e = {'required': 'REQUIRED'}
>>> e['invalid'] = 'INVALID'
>>> e['min_length'] = 'LENGTH %(length)s, MIN LENGTH %(min)s'
>>> e['max_length'] = 'LENGTH %(length)s, MAX LENGTH %(max)s'
>>> f = RegexField(r'^\d+$', min_length=5, max_length=10, error_messages=e)
>>> f.clean('')
Traceback (most recent call last):
...
ValidationError: [u'REQUIRED']
>>> f.clean('abcde')
Traceback (most recent call last):
...
ValidationError: [u'INVALID']
>>> f.clean('1234')
Traceback (most recent call last):
...
ValidationError: [u'LENGTH 4, MIN LENGTH 5']
>>> f.clean('12345678901')
Traceback (most recent call last):
...
ValidationError: [u'LENGTH 11, MAX LENGTH 10']
# EmailField ##################################################################
>>> e = {'required': 'REQUIRED'}
>>> e['invalid'] = 'INVALID'
>>> e['min_length'] = 'LENGTH %(length)s, MIN LENGTH %(min)s'
>>> e['max_length'] = 'LENGTH %(length)s, MAX LENGTH %(max)s'
>>> f = EmailField(min_length=8, max_length=10, error_messages=e)
>>> f.clean('')
Traceback (most recent call last):
...
ValidationError: [u'REQUIRED']
>>> f.clean('abcdefgh')
Traceback (most recent call last):
...
ValidationError: [u'INVALID']
>>> f.clean('[email protected]')
Traceback (most recent call last):
...
ValidationError: [u'LENGTH 7, MIN LENGTH 8']
>>> f.clean('[email protected]')
Traceback (most recent call last):
...
ValidationError: [u'LENGTH 11, MAX LENGTH 10']
# FileField ##################################################################
>>> e = {'required': 'REQUIRED'}
>>> e['invalid'] = 'INVALID'
>>> e['missing'] = 'MISSING'
>>> e['empty'] = 'EMPTY FILE'
>>> f = FileField(error_messages=e)
>>> f.clean('')
Traceback (most recent call last):
...
ValidationError: [u'REQUIRED']
>>> f.clean('abc')
Traceback (most recent call last):
...
ValidationError: [u'INVALID']
>>> f.clean(SimpleUploadedFile('name', None))
Traceback (most recent call last):
...
ValidationError: [u'EMPTY FILE']
>>> f.clean(SimpleUploadedFile('name', ''))
Traceback (most recent call last):
...
ValidationError: [u'EMPTY FILE']
# URLField ##################################################################
>>> e = {'required': 'REQUIRED'}
>>> e['invalid'] = 'INVALID'
>>> e['invalid_link'] = 'INVALID LINK'
>>> f = URLField(verify_exists=True, error_messages=e)
>>> f.clean('')
Traceback (most recent call last):
...
ValidationError: [u'REQUIRED']
>>> f.clean('abc.c')
Traceback (most recent call last):
...
ValidationError: [u'INVALID']
>>> f.clean('http://www.broken.djangoproject.com')
Traceback (most recent call last):
...
ValidationError: [u'INVALID LINK']
# BooleanField ################################################################
>>> e = {'required': 'REQUIRED'}
>>> f = BooleanField(error_messages=e)
>>> f.clean('')
Traceback (most recent call last):
...
ValidationError: [u'REQUIRED']
# ChoiceField #################################################################
>>> e = {'required': 'REQUIRED'}
>>> e['invalid_choice'] = '%(value)s IS INVALID CHOICE'
>>> f = ChoiceField(choices=[('a', 'aye')], error_messages=e)
>>> f.clean('')
Traceback (most recent call last):
...
ValidationError: [u'REQUIRED']
>>> f.clean('b')
Traceback (most recent call last):
...
ValidationError: [u'b IS INVALID CHOICE']
# MultipleChoiceField #########################################################
>>> e = {'required': 'REQUIRED'}
>>> e['invalid_choice'] = '%(value)s IS INVALID CHOICE'
>>> e['invalid_list'] = 'NOT A LIST'
>>> f = MultipleChoiceField(choices=[('a', 'aye')], error_messages=e)
>>> f.clean('')
Traceback (most recent call last):
...
ValidationError: [u'REQUIRED']
>>> f.clean('b')
Traceback (most recent call last):
...
ValidationError: [u'NOT A LIST']
>>> f.clean(['b'])
Traceback (most recent call last):
...
ValidationError: [u'b IS INVALID CHOICE']
# SplitDateTimeField ##########################################################
>>> e = {'required': 'REQUIRED'}
>>> e['invalid_date'] = 'INVALID DATE'
>>> e['invalid_time'] = 'INVALID TIME'
>>> f = SplitDateTimeField(error_messages=e)
>>> f.clean('')
Traceback (most recent call last):
...
ValidationError: [u'REQUIRED']
>>> f.clean(['a', 'b'])
Traceback (most recent call last):
...
ValidationError: [u'INVALID DATE', u'INVALID TIME']
# IPAddressField ##############################################################
>>> e = {'required': 'REQUIRED'}
>>> e['invalid'] = 'INVALID IP ADDRESS'
>>> f = IPAddressField(error_messages=e)
>>> f.clean('')
Traceback (most recent call last):
...
ValidationError: [u'REQUIRED']
>>> f.clean('127.0.0')
Traceback (most recent call last):
...
ValidationError: [u'INVALID IP ADDRESS']
###############################################################################
# Create choices for the model choice field tests below.
>>> from regressiontests.forms.models import ChoiceModel
>>> ChoiceModel.objects.create(pk=1, name='a')
<ChoiceModel: ChoiceModel object>
>>> ChoiceModel.objects.create(pk=2, name='b')
<ChoiceModel: ChoiceModel object>
>>> ChoiceModel.objects.create(pk=3, name='c')
<ChoiceModel: ChoiceModel object>
# ModelChoiceField ############################################################
>>> e = {'required': 'REQUIRED'}
>>> e['invalid_choice'] = 'INVALID CHOICE'
>>> f = ModelChoiceField(queryset=ChoiceModel.objects.all(), error_messages=e)
>>> f.clean('')
Traceback (most recent call last):
...
ValidationError: [u'REQUIRED']
>>> f.clean('4')
Traceback (most recent call last):
...
ValidationError: [u'INVALID CHOICE']
# ModelMultipleChoiceField ####################################################
>>> e = {'required': 'REQUIRED'}
>>> e['invalid_choice'] = '%s IS INVALID CHOICE'
>>> e['list'] = 'NOT A LIST OF VALUES'
>>> f = ModelMultipleChoiceField(queryset=ChoiceModel.objects.all(), error_messages=e)
>>> f.clean('')
Traceback (most recent call last):
...
ValidationError: [u'REQUIRED']
>>> f.clean('3')
Traceback (most recent call last):
...
ValidationError: [u'NOT A LIST OF VALUES']
>>> f.clean(['4'])
Traceback (most recent call last):
...
ValidationError: [u'4 IS INVALID CHOICE']
"""
| agpl-3.0 |
theyaa/Impala | thirdparty/hive-1.1.0-cdh5.7.0-SNAPSHOT/lib/py/thrift/transport/THttpClient.py | 71 | 2916 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from TTransport import *
from cStringIO import StringIO
import urlparse
import httplib
import warnings
class THttpClient(TTransportBase):
"""Http implementation of TTransport base."""
def __init__(self, uri_or_host, port=None, path=None):
"""THttpClient supports two different types constructor parameters.
THttpClient(host, port, path) - deprecated
THttpClient(uri)
Only the second supports https."""
if port is not None:
warnings.warn("Please use the THttpClient('http://host:port/path') syntax", DeprecationWarning, stacklevel=2)
self.host = uri_or_host
self.port = port
assert path
self.path = path
self.scheme = 'http'
else:
parsed = urlparse.urlparse(uri_or_host)
self.scheme = parsed.scheme
assert self.scheme in ('http', 'https')
if self.scheme == 'http':
self.port = parsed.port or httplib.HTTP_PORT
elif self.scheme == 'https':
self.port = parsed.port or httplib.HTTPS_PORT
self.host = parsed.hostname
self.path = parsed.path
self.__wbuf = StringIO()
self.__http = None
def open(self):
if self.scheme == 'http':
self.__http = httplib.HTTP(self.host, self.port)
else:
self.__http = httplib.HTTPS(self.host, self.port)
def close(self):
self.__http.close()
self.__http = None
def isOpen(self):
return self.__http != None
def read(self, sz):
return self.__http.file.read(sz)
def write(self, buf):
self.__wbuf.write(buf)
def flush(self):
if self.isOpen():
self.close()
self.open();
# Pull data out of buffer
data = self.__wbuf.getvalue()
self.__wbuf = StringIO()
# HTTP request
self.__http.putrequest('POST', self.path)
# Write headers
self.__http.putheader('Host', self.host)
self.__http.putheader('Content-Type', 'application/x-thrift')
self.__http.putheader('Content-Length', str(len(data)))
self.__http.endheaders()
# Write payload
self.__http.send(data)
# Get reply to flush the request
self.code, self.message, self.headers = self.__http.getreply()
| apache-2.0 |
eagleamon/home-assistant | tests/components/sensor/test_random.py | 17 | 1067 | """The test for the random number sensor platform."""
import unittest
from homeassistant.bootstrap import setup_component
from tests.common import get_test_home_assistant
class TestRandomSensor(unittest.TestCase):
"""Test the Random number sensor."""
def setup_method(self, method):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
def teardown_method(self, method):
"""Stop everything that was started."""
self.hass.stop()
def test_random_sensor(self):
"""Test the Randowm number sensor."""
config = {
'sensor': {
'platform': 'random',
'name': 'test',
'minimum': 10,
'maximum': 20,
}
}
assert setup_component(self.hass, 'sensor', config)
state = self.hass.states.get('sensor.test')
self.assertLessEqual(int(state.state), config['sensor']['maximum'])
self.assertGreaterEqual(int(state.state), config['sensor']['minimum'])
| apache-2.0 |
analyseuc3m/ANALYSE-v1 | lms/djangoapps/course_api/blocks/transformers/navigation.py | 35 | 3103 | """
TODO
"""
from openedx.core.lib.block_structure.transformer import BlockStructureTransformer
from .block_depth import BlockDepthTransformer
class DescendantList(object):
"""
Contain
"""
def __init__(self):
self.items = []
class BlockNavigationTransformer(BlockStructureTransformer):
"""
Creates a table of contents for the course.
Prerequisites: BlockDepthTransformer must be run before this in the
transform phase.
"""
VERSION = 1
BLOCK_NAVIGATION = 'block_nav'
BLOCK_NAVIGATION_FOR_CHILDREN = 'children_block_nav'
def __init__(self, nav_depth):
self.nav_depth = nav_depth
@classmethod
def name(cls):
return "blocks_api:block_navigation"
@classmethod
def collect(cls, block_structure):
"""
Collects any information that's necessary to execute this transformer's
transform method.
"""
# collect basic xblock fields
block_structure.request_xblock_fields('hide_from_toc')
def transform(self, usage_info, block_structure):
"""
Mutates block_structure based on the given usage_info.
"""
if self.nav_depth is None:
return
for block_key in block_structure.topological_traversal():
parents = block_structure.get_parents(block_key)
parents_descendants_list = set()
for parent_key in parents:
parent_nav = block_structure.get_transformer_block_field(
parent_key,
self,
self.BLOCK_NAVIGATION_FOR_CHILDREN,
)
if parent_nav is not None:
parents_descendants_list |= parent_nav
children_descendants_list = None
if (
not block_structure.get_xblock_field(block_key, 'hide_from_toc', False) and (
not parents or
any(parent_desc_list is not None for parent_desc_list in parents_descendants_list)
)
):
# add self to parent's descendants
for parent_desc_list in parents_descendants_list:
if parent_desc_list is not None:
parent_desc_list.items.append(unicode(block_key))
if BlockDepthTransformer.get_block_depth(block_structure, block_key) > self.nav_depth:
children_descendants_list = parents_descendants_list
else:
block_nav_list = DescendantList()
children_descendants_list = {block_nav_list}
block_structure.set_transformer_block_field(
block_key,
self,
self.BLOCK_NAVIGATION,
block_nav_list.items
)
block_structure.set_transformer_block_field(
block_key,
self,
self.BLOCK_NAVIGATION_FOR_CHILDREN,
children_descendants_list
)
| agpl-3.0 |
DVegaCapital/zipline | zipline/data/treasuries_can.py | 32 | 4188 | #
# Copyright 2013 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import requests
from . loader_utils import (
source_to_records
)
from zipline.data.treasuries import (
treasury_mappings, get_treasury_date, get_treasury_rate
)
_CURVE_MAPPINGS = {
'date': (get_treasury_date, "Date"),
'1month': (get_treasury_rate, "V39063"),
'3month': (get_treasury_rate, "V39065"),
'6month': (get_treasury_rate, "V39066"),
'1year': (get_treasury_rate, "V39067"),
'2year': (get_treasury_rate, "V39051"),
'3year': (get_treasury_rate, "V39052"),
'5year': (get_treasury_rate, "V39053"),
'7year': (get_treasury_rate, "V39054"),
'10year': (get_treasury_rate, "V39055"),
# Bank of Canada refers to this as 'Long' Rate, approximately 30 years.
'30year': (get_treasury_rate, "V39056"),
}
BILLS = ['V39063', 'V39065', 'V39066', 'V39067']
BONDS = ['V39051', 'V39052', 'V39053', 'V39054', 'V39055', 'V39056']
def get_treasury_source(start_date=None, end_date=None):
today = datetime.date.today()
# Bank of Canada only has 10 years of data and has this in the URL.
restriction = datetime.date(today.year - 10, today.month, today.day)
if not end_date:
end_date = today
if not start_date:
start_date = restriction
bill_url = (
"http://www.bankofcanada.ca/stats/results/csv?"
"lP=lookup_tbill_yields.php&sR={restrict}&se="
"L_V39063-L_V39065-L_V39066-L_V39067&dF={start}&dT={end}"
.format(restrict=restriction.strftime("%Y-%m-%d"),
start=start_date.strftime("%Y-%m-%d"),
end=end_date.strftime("%Y-%m-%d"),
)
)
bond_url = (
"http://www.bankofcanada.ca/stats/results/csv?"
"lP=lookup_bond_yields.php&sR={restrict}&se="
"L_V39051-L_V39052-L_V39053-L_V39054-L_V39055-L_V39056"
"&dF={start}&dT={end}"
.format(restrict=restriction.strftime("%Y-%m-%d"),
start=start_date.strftime("%Y-%m-%d"),
end=end_date.strftime("%Y-%m-%d")
)
)
res_bill = requests.get(bill_url, stream=True)
res_bond = requests.get(bond_url, stream=True)
bill_iter = res_bill.iter_lines()
bond_iter = res_bond.iter_lines()
bill_row = ""
while ",".join(BILLS) not in bill_row:
bill_row = bill_iter.next()
if 'Daily series:' in bill_row:
bill_end_date = datetime.datetime.strptime(
bill_row.split(' - ')[1].strip(),
"%Y-%m-%d").date()
bill_header = bill_row.split(",")
bond_row = ""
while ",".join(BONDS) not in bond_row:
bond_row = bond_iter.next()
if 'Daily series:' in bond_row:
bond_end_date = datetime.datetime.strptime(
bond_row.split(' - ')[1].strip(),
"%Y-%m-%d").date()
bond_header = bond_row.split(",")
# Line up the two dates
if bill_end_date > bond_end_date:
bill_iter.next()
elif bond_end_date > bill_end_date:
bond_iter.next()
for bill_row in bill_iter:
bond_row = bond_iter.next()
bill_dict = dict(zip(bill_header, bill_row.split(",")))
bond_dict = dict(zip(bond_header, bond_row.split(",")))
if ' Bank holiday' in bond_row.split(",") + bill_row.split(","):
continue
if ' Not available' in bond_row.split(",") + bill_row.split(","):
continue
bill_dict.update(bond_dict)
yield bill_dict
def get_treasury_data():
mappings = treasury_mappings(_CURVE_MAPPINGS)
source = get_treasury_source()
return source_to_records(mappings, source)
| apache-2.0 |
snyderr/robotframework | src/robot/output/librarylogger.py | 3 | 2191 | # Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of the public test library logging API.
This is exposed via :py:mod:`robot.api.logger`. Implementation must reside
here to avoid cyclic imports.
"""
import sys
import threading
from robot.errors import DataError
from robot.utils import unic, console_encode
from .logger import LOGGER
from .loggerhelper import Message
LOGGING_THREADS = ('MainThread', 'RobotFrameworkTimeoutThread')
def write(msg, level, html=False):
# Callable messages allow lazy logging internally, but we don't want to
# expose this functionality publicly. See the following issue for details:
# https://github.com/robotframework/robotframework/issues/1505
if callable(msg):
msg = unic(msg)
if level.upper() not in ('TRACE', 'DEBUG', 'INFO', 'HTML', 'WARN', 'ERROR'):
raise DataError("Invalid log level '%s'." % level)
if threading.currentThread().getName() in LOGGING_THREADS:
LOGGER.log_message(Message(msg, level, html))
def trace(msg, html=False):
write(msg, 'TRACE', html)
def debug(msg, html=False):
write(msg, 'DEBUG', html)
def info(msg, html=False, also_console=False):
write(msg, 'INFO', html)
if also_console:
console(msg)
def warn(msg, html=False):
write(msg, 'WARN', html)
def error(msg, html=False):
write(msg, 'ERROR', html)
def console(msg, newline=True, stream='stdout'):
msg = unic(msg)
if newline:
msg += '\n'
stream = sys.__stdout__ if stream.lower() != 'stderr' else sys.__stderr__
stream.write(console_encode(msg, stream=stream))
stream.flush()
| apache-2.0 |
Edu-Glez/Bank_sentiment_analysis | env/lib/python3.6/site-packages/zmq/eventloop/minitornado/platform/auto.py | 50 | 1424 | #!/usr/bin/env python
#
# Copyright 2011 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Implementation of platform-specific functionality.
For each function or class described in `tornado.platform.interface`,
the appropriate platform-specific implementation exists in this module.
Most code that needs access to this functionality should do e.g.::
from tornado.platform.auto import set_close_exec
"""
from __future__ import absolute_import, division, print_function, with_statement
import os
if os.name == 'nt':
from .common import Waker
from .windows import set_close_exec
else:
from .posix import set_close_exec, Waker
try:
# monotime monkey-patches the time module to have a monotonic function
# in versions of python before 3.3.
import monotime
except ImportError:
pass
try:
from time import monotonic as monotonic_time
except ImportError:
monotonic_time = None
| apache-2.0 |
candrews/portage | pym/portage/dbapi/_SyncfsProcess.py | 8 | 1169 | # Copyright 2012 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
from portage import os
from portage.util._ctypes import find_library, LoadLibrary
from portage.util._async.ForkProcess import ForkProcess
class SyncfsProcess(ForkProcess):
"""
Isolate ctypes usage in a subprocess, in order to avoid
potential problems with stale cached libraries as
described in bug #448858, comment #14 (also see
http://bugs.python.org/issue14597).
"""
__slots__ = ('paths',)
@staticmethod
def _get_syncfs():
filename = find_library("c")
if filename is not None:
library = LoadLibrary(filename)
if library is not None:
try:
return library.syncfs
except AttributeError:
pass
return None
def _run(self):
syncfs_failed = False
syncfs = self._get_syncfs()
if syncfs is not None:
for path in self.paths:
try:
fd = os.open(path, os.O_RDONLY)
except OSError:
pass
else:
try:
if syncfs(fd) != 0:
# Happens with PyPy (bug #446610)
syncfs_failed = True
finally:
os.close(fd)
if syncfs is None or syncfs_failed:
return 1
return os.EX_OK
| gpl-2.0 |
MiLk/youtube-dl | youtube_dl/extractor/wimp.py | 13 | 1787 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from .youtube import YoutubeIE
class WimpIE(InfoExtractor):
_VALID_URL = r'http://(?:www\.)?wimp\.com/([^/]+)/'
_TESTS = [{
'url': 'http://www.wimp.com/maruexhausted/',
'md5': 'f1acced123ecb28d9bb79f2479f2b6a1',
'info_dict': {
'id': 'maruexhausted',
'ext': 'flv',
'title': 'Maru is exhausted.',
'description': 'md5:57e099e857c0a4ea312542b684a869b8',
}
}, {
# youtube video
'url': 'http://www.wimp.com/clowncar/',
'info_dict': {
'id': 'cG4CEr2aiSg',
'ext': 'mp4',
'title': 'Basset hound clown car...incredible!',
'description': 'md5:8d228485e0719898c017203f900b3a35',
'uploader': 'Gretchen Hoey',
'uploader_id': 'gretchenandjeff1',
'upload_date': '20140303',
},
'add_ie': ['Youtube'],
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group(1)
webpage = self._download_webpage(url, video_id)
video_url = self._search_regex(
r's1\.addVariable\("file",\s*"([^"]+)"\);', webpage, 'video URL')
if YoutubeIE.suitable(video_url):
self.to_screen('Found YouTube video')
return {
'_type': 'url',
'url': video_url,
'ie_key': YoutubeIE.ie_key(),
}
return {
'id': video_id,
'url': video_url,
'title': self._og_search_title(webpage),
'thumbnail': self._og_search_thumbnail(webpage),
'description': self._og_search_description(webpage),
}
| unlicense |
benoitsteiner/tensorflow | tensorflow/python/ops/math_grad.py | 5 | 33823 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Gradients for operators defined in math_ops.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_array_ops
from tensorflow.python.ops import gen_math_ops
from tensorflow.python.ops import math_ops
def _safe_shape_div(x, y):
"""Divides `x / y` assuming `x, y >= 0`, treating `0 / 0 = 0`."""
return x // math_ops.maximum(y, 1)
@ops.RegisterGradient("Sum")
def _SumGrad(op, grad):
"""Gradient for Sum."""
# Fast path for when reducing to a scalar and ndims is known: adds only
# Reshape and Tile ops (and possibly a Shape).
if (op.inputs[0].get_shape().ndims is not None and
op.inputs[1].op.type == "Const"):
rank = op.inputs[0].get_shape().ndims
axes = tensor_util.MakeNdarray(op.inputs[1].op.get_attr("value"))
if np.array_equal(axes, np.arange(rank)): # Reduce all dims.
grad = array_ops.reshape(grad, [1] * rank)
# If shape is not fully defined (but rank is), we use Shape.
if op.inputs[0].get_shape().is_fully_defined():
input_shape = op.inputs[0].get_shape().as_list()
else:
input_shape = array_ops.shape(op.inputs[0])
return [array_ops.tile(grad, input_shape), None]
input_shape = array_ops.shape(op.inputs[0])
output_shape_kept_dims = math_ops.reduced_shape(input_shape, op.inputs[1])
tile_scaling = _safe_shape_div(input_shape, output_shape_kept_dims)
grad = array_ops.reshape(grad, output_shape_kept_dims)
return [array_ops.tile(grad, tile_scaling), None]
def _MinOrMaxGrad(op, grad):
"""Gradient for Min or Max. Amazingly it's precisely the same code."""
input_shape = array_ops.shape(op.inputs[0])
output_shape_kept_dims = math_ops.reduced_shape(input_shape, op.inputs[1])
y = op.outputs[0]
y = array_ops.reshape(y, output_shape_kept_dims)
grad = array_ops.reshape(grad, output_shape_kept_dims)
# Compute the number of selected (maximum or minimum) elements in each
# reduction dimension. If there are multiple minimum or maximum elements
# then the gradient will be divided between them.
indicators = math_ops.cast(math_ops.equal(y, op.inputs[0]), grad.dtype)
num_selected = array_ops.reshape(
math_ops.reduce_sum(indicators, op.inputs[1]), output_shape_kept_dims)
return [math_ops.div(indicators, num_selected) * grad, None]
@ops.RegisterGradient("Max")
def _MaxGrad(op, grad):
"""Gradient for Max."""
return _MinOrMaxGrad(op, grad)
@ops.RegisterGradient("Min")
def _MinGrad(op, grad):
return _MinOrMaxGrad(op, grad)
@ops.RegisterGradient("Mean")
def _MeanGrad(op, grad):
"""Gradient for Mean."""
sum_grad = _SumGrad(op, grad)[0]
input_shape = array_ops.shape(op.inputs[0])
output_shape = array_ops.shape(op.outputs[0])
factor = _safe_shape_div(
math_ops.reduce_prod(input_shape), math_ops.reduce_prod(output_shape))
return sum_grad / math_ops.cast(factor, sum_grad.dtype), None
@ops.RegisterGradient("Prod")
def _ProdGrad(op, grad):
"""Gradient for Prod."""
# The gradient can be expressed by dividing the product by each entry of the
# input tensor, but this approach can't deal with zeros in the input.
# Here, we avoid this problem by composing the output as a product of two
# cumprod operations.
input_shape = array_ops.shape(op.inputs[0])
# Reshape reduction indices for the case where the parameter is a scalar
reduction_indices = array_ops.reshape(op.inputs[1], [-1])
# Expand grad to full input shape
output_shape_kept_dims = math_ops.reduced_shape(input_shape, op.inputs[1])
tile_scaling = _safe_shape_div(input_shape, output_shape_kept_dims)
grad = array_ops.reshape(grad, output_shape_kept_dims)
grad = array_ops.tile(grad, tile_scaling)
# Pack all reduced dimensions into a single one, so we can perform the
# cumprod ops. If the reduction dims list is empty, it defaults to float32,
# so we need to cast here. We put all the shape-related ops on CPU to avoid
# copying back and forth, and since listdiff is CPU only.
with ops.device("/cpu:0"):
reduced = math_ops.cast(reduction_indices, dtypes.int32)
idx = math_ops.range(0, array_ops.rank(op.inputs[0]))
other, _ = array_ops.setdiff1d(idx, reduced)
perm = array_ops.concat([reduced, other], 0)
reduced_num = math_ops.reduce_prod(array_ops.gather(input_shape, reduced))
other_num = math_ops.reduce_prod(array_ops.gather(input_shape, other))
permuted = array_ops.transpose(op.inputs[0], perm)
permuted_shape = array_ops.shape(permuted)
reshaped = array_ops.reshape(permuted, (reduced_num, other_num))
# Calculate product, leaving out the current entry
left = math_ops.cumprod(reshaped, axis=0, exclusive=True)
right = math_ops.cumprod(reshaped, axis=0, exclusive=True, reverse=True)
y = array_ops.reshape(left * right, permuted_shape)
# Invert the transpose and reshape operations.
# Make sure to set the statically known shape information through a reshape.
out = grad * array_ops.transpose(y, array_ops.invert_permutation(perm))
return array_ops.reshape(out, input_shape), None
@ops.RegisterGradient("SegmentSum")
def _SegmentSumGrad(op, grad):
"""Gradient for SegmentSum."""
return array_ops.gather(grad, op.inputs[1]), None
@ops.RegisterGradient("SegmentMean")
def _SegmentMeanGrad(op, grad):
"""Gradient for SegmentMean."""
input_rank = array_ops.rank(op.inputs[0])
ones_shape = array_ops.concat([
array_ops.shape(op.inputs[1]),
array_ops.fill(array_ops.expand_dims(input_rank - 1, 0), 1)
], 0)
ones = array_ops.fill(ones_shape,
constant_op.constant(1, dtype=grad.dtype))
scaled_grad = math_ops.div(grad, math_ops.segment_sum(ones, op.inputs[1]))
return array_ops.gather(scaled_grad, op.inputs[1]), None
@ops.RegisterGradient("SparseSegmentSum")
def _SparseSegmentSumGrad(op, grad):
"""Gradient for SparseSegmentSum."""
input_rows = array_ops.shape(op.inputs[0])[0]
return (math_ops.unsorted_segment_sum(
array_ops.gather(grad, op.inputs[2]), op.inputs[1], input_rows), None,
None)
@ops.RegisterGradient("SparseSegmentMean")
def _SparseSegmentMeanGrad(op, grad):
"""Gradient for SparseSegmentMean."""
dim0 = array_ops.shape(op.inputs[0])[0]
return (math_ops.sparse_segment_mean_grad(grad, op.inputs[1], op.inputs[2],
dim0), None, None)
@ops.RegisterGradient("SparseSegmentSqrtN")
def _SparseSegmentSqrtNGrad(op, grad):
"""Gradient for SparseSegmentSqrtN."""
dim0 = array_ops.shape(op.inputs[0])[0]
return (math_ops.sparse_segment_sqrt_n_grad(grad, op.inputs[1], op.inputs[2],
dim0), None, None)
def _SegmentMinOrMaxGrad(op, grad, is_sorted):
"""Gradient for SegmentMin and (unsorted) SegmentMax. They share similar code."""
zeros = array_ops.zeros(array_ops.shape(op.inputs[0]),
dtype=op.inputs[0].dtype)
# Get the number of selected (minimum or maximum) elements in each segment.
gathered_outputs = array_ops.gather(op.outputs[0], op.inputs[1])
is_selected = math_ops.equal(op.inputs[0], gathered_outputs)
if is_sorted:
num_selected = math_ops.segment_sum(math_ops.cast(is_selected, grad.dtype),
op.inputs[1])
else:
num_selected = math_ops.unsorted_segment_sum(math_ops.cast(is_selected, grad.dtype),
op.inputs[1], op.inputs[2])
# Compute the gradient for each segment. The gradient for the ith segment is
# divided evenly among the selected elements in that segment.
weighted_grads = math_ops.div(grad, num_selected)
gathered_grads = array_ops.gather(weighted_grads, op.inputs[1])
if is_sorted:
return array_ops.where(is_selected, gathered_grads, zeros), None
else:
return array_ops.where(is_selected, gathered_grads, zeros), None, None
@ops.RegisterGradient("SegmentMin")
def _SegmentMinGrad(op, grad):
"""Gradient for SegmentMin."""
return _SegmentMinOrMaxGrad(op, grad, True)
@ops.RegisterGradient("SegmentMax")
def _SegmentMaxGrad(op, grad):
"""Gradient for SegmentMax."""
return _SegmentMinOrMaxGrad(op, grad, True)
@ops.RegisterGradient("UnsortedSegmentSum")
def _UnsortedSegmentSumGrad(op, grad):
"""Gradient for SegmentSum."""
return array_ops.gather(grad, op.inputs[1]), None, None
@ops.RegisterGradient("UnsortedSegmentMax")
def _UnsortedSegmentMaxGrad(op, grad):
return _SegmentMinOrMaxGrad(op, grad, False)
@ops.RegisterGradient("Abs")
def _AbsGrad(op, grad):
x = op.inputs[0]
return grad * math_ops.sign(x)
@ops.RegisterGradient("Neg")
def _NegGrad(_, grad):
"""Returns -grad."""
return -grad
@ops.RegisterGradient("Inv")
def _InvGrad(op, grad):
"""Returns -grad * (1 / x^2)."""
y = op.outputs[0] # y = 1 / x
# pylint: disable=protected-access
return gen_math_ops._reciprocal_grad(y, grad)
@ops.RegisterGradient("Reciprocal")
def _ReciprocalGrad(op, grad):
"""Returns -grad * (1 / x^2)."""
y = op.outputs[0] # y = 1 / x
# pylint: disable=protected-access
return gen_math_ops._reciprocal_grad(y, grad)
@ops.RegisterGradient("InvGrad")
def _InvGradGrad(op, grad):
b = op.inputs[1]
# op.output[0]: y = -b * conj(a)^2
with ops.control_dependencies([grad.op]):
ca = math_ops.conj(op.inputs[0])
cg = math_ops.conj(grad)
# pylint: disable=protected-access
return cg * -2.0 * b * ca, gen_math_ops._reciprocal_grad(ca, grad)
@ops.RegisterGradient("ReciprocalGrad")
def _ReciprocalGradGrad(op, grad):
b = op.inputs[1]
# op.output[0]: y = -b * conj(a)^2
with ops.control_dependencies([grad.op]):
ca = math_ops.conj(op.inputs[0])
cg = math_ops.conj(grad)
# pylint: disable=protected-access
return cg * -2.0 * b * ca, gen_math_ops._reciprocal_grad(ca, grad)
@ops.RegisterGradient("Square")
def _SquareGrad(op, grad):
x = op.inputs[0]
# Added control dependencies to prevent 2*x from being computed too early.
with ops.control_dependencies([grad.op]):
x = math_ops.conj(x)
return grad * (2.0 * x)
@ops.RegisterGradient("Sqrt")
def _SqrtGrad(op, grad):
y = op.outputs[0] # y = x^(1/2)
return gen_math_ops._sqrt_grad(y, grad)
@ops.RegisterGradient("SqrtGrad")
def _SqrtGradGrad(op, grad):
a = op.inputs[0]
y = op.outputs[0] # y = 0.5 * b / conj(a)
with ops.control_dependencies([grad.op]):
ga = grad / a
return -math_ops.conj(ga) * y, 0.5 * ga
@ops.RegisterGradient("Rsqrt")
def _RsqrtGrad(op, grad):
"""Returns -0.5 * grad * conj(y)^3."""
y = op.outputs[0] # y = x^(-1/2)
return gen_math_ops._rsqrt_grad(y, grad)
@ops.RegisterGradient("RsqrtGrad")
def _RsqrtGradGrad(op, grad):
"""Returns backprop gradient for f(a,b) = -0.5 * b * conj(a)^3."""
a = op.inputs[0] # a = x^{-1/2}
b = op.inputs[1] # backprop gradient for a
with ops.control_dependencies([grad.op]):
ca = math_ops.conj(a)
cg = math_ops.conj(grad)
grad_a = -1.5 * cg * b * math_ops.square(ca)
# pylint: disable=protected-access
grad_b = gen_math_ops._rsqrt_grad(ca, grad)
return grad_a, grad_b
@ops.RegisterGradient("Exp")
def _ExpGrad(op, grad):
"""Returns grad * exp(x)."""
y = op.outputs[0] # y = e^x
with ops.control_dependencies([grad.op]):
y = math_ops.conj(y)
return grad * y
@ops.RegisterGradient("Expm1")
def _Expm1Grad(op, grad):
"""Returns grad * exp(x)."""
x = op.inputs[0]
with ops.control_dependencies([grad.op]):
x = math_ops.conj(x)
y = math_ops.exp(x)
return grad * y
@ops.RegisterGradient("Log")
def _LogGrad(op, grad):
"""Returns grad * (1/x)."""
x = op.inputs[0]
with ops.control_dependencies([grad.op]):
x = math_ops.conj(x)
return grad * math_ops.reciprocal(x)
@ops.RegisterGradient("Log1p")
def _Log1pGrad(op, grad):
"""Returns grad * (1/(1 + x))."""
x = op.inputs[0]
with ops.control_dependencies([grad.op]):
x = math_ops.conj(x)
return grad * math_ops.reciprocal(1 + x)
@ops.RegisterGradient("Sinh")
def _SinhGrad(op, grad):
"""Returns grad * cosh(x)."""
x = op.inputs[0]
with ops.control_dependencies([grad.op]):
x = math_ops.conj(x)
return grad * math_ops.cosh(x)
@ops.RegisterGradient("Cosh")
def _CoshGrad(op, grad):
"""Returns grad * sinh(x)."""
x = op.inputs[0]
with ops.control_dependencies([grad.op]):
x = math_ops.conj(x)
return grad * math_ops.sinh(x)
@ops.RegisterGradient("Tanh")
def _TanhGrad(op, grad):
"""Returns grad * (1 - tanh(x) * tanh(x))."""
y = op.outputs[0] # y = tanh(x)
with ops.control_dependencies([grad.op]):
y = math_ops.conj(y)
# pylint: disable=protected-access
return gen_math_ops._tanh_grad(y, grad)
@ops.RegisterGradient("TanhGrad")
def _TanhGradGrad(op, grad):
with ops.control_dependencies([grad.op]):
a = math_ops.conj(op.inputs[0])
b = math_ops.conj(op.inputs[1])
# pylint: disable=protected-access
return grad * -2.0 * b * a, gen_math_ops._tanh_grad(a, grad)
@ops.RegisterGradient("Erf")
def _ErfGrad(op, grad):
"""Returns grad * 2/sqrt(pi) * exp(-x**2)."""
x = op.inputs[0]
two_over_root_pi = constant_op.constant(2 / np.sqrt(np.pi), dtype=grad.dtype)
with ops.control_dependencies([grad.op]):
x = math_ops.conj(x)
return grad * two_over_root_pi * math_ops.exp(-math_ops.square(x))
@ops.RegisterGradient("Erfc")
def _ErfcGrad(op, grad):
"""Returns -grad * 2/sqrt(pi) * exp(-x**2)."""
x = op.inputs[0]
minus_two_over_root_pi = constant_op.constant(
-2 / np.sqrt(np.pi), dtype=grad.dtype)
with ops.control_dependencies([grad.op]):
x = math_ops.conj(x)
return grad * minus_two_over_root_pi * math_ops.exp(-math_ops.square(x))
@ops.RegisterGradient("Lgamma")
def _LgammaGrad(op, grad):
"""Returns grad * digamma(x)."""
x = op.inputs[0]
with ops.control_dependencies([grad.op]):
x = math_ops.conj(x)
return grad * math_ops.digamma(x)
@ops.RegisterGradient("Digamma")
def _DigammaGrad(op, grad):
"""Compute gradient of the digamma function with respect to its argument."""
x = op.inputs[0]
with ops.control_dependencies([grad.op]):
x = math_ops.conj(x)
return grad * math_ops.polygamma(array_ops.constant(1, dtype=x.dtype), x)
@ops.RegisterGradient("Igamma")
def _IgammaGrad(op, grad):
"""Returns gradient of igamma(a, x) with respect to x."""
# TODO(ebrevdo): Perhaps add the derivative w.r.t. a
a = op.inputs[0]
x = op.inputs[1]
sa = array_ops.shape(a)
sx = array_ops.shape(x)
unused_ra, rx = gen_array_ops._broadcast_gradient_args(sa, sx)
# Perform operations in log space before summing, because Gamma(a)
# and Gamma'(a) can grow large.
partial_x = math_ops.exp(-x + (a - 1) * math_ops.log(x) - math_ops.lgamma(a))
# TODO(b/36815900): Mark None return values as NotImplemented
return (None,
array_ops.reshape(math_ops.reduce_sum(partial_x * grad, rx), sx))
@ops.RegisterGradient("Igammac")
def _IgammacGrad(op, grad):
"""Returns gradient of igammac(a, x) = 1 - igamma(a, x) w.r.t. x."""
_, igamma_grad_x = _IgammaGrad(op, grad)
return None, -igamma_grad_x
@ops.RegisterGradient("Betainc")
def _BetaincGrad(op, grad):
"""Returns gradient of betainc(a, b, x) with respect to x."""
# TODO(ebrevdo): Perhaps add the derivative w.r.t. a, b
a, b, x = op.inputs
# two cases: x is a scalar and a/b are same-shaped tensors, or vice
# versa; so its sufficient to check against shape(a).
sa = array_ops.shape(a)
sx = array_ops.shape(x)
# pylint: disable=protected-access
_, rx = gen_array_ops._broadcast_gradient_args(sa, sx)
# pylint: enable=protected-access
# Perform operations in log space before summing, because terms
# can grow large.
log_beta = (gen_math_ops.lgamma(a) + gen_math_ops.lgamma(b)
- gen_math_ops.lgamma(a + b))
partial_x = math_ops.exp(
(b - 1) * math_ops.log(1 - x) + (a - 1) * math_ops.log(x) - log_beta)
# TODO(b/36815900): Mark None return values as NotImplemented
return (None, # da
None, # db
array_ops.reshape(math_ops.reduce_sum(partial_x * grad, rx), sx))
@ops.RegisterGradient("Zeta")
def _ZetaGrad(op, grad):
"""Returns gradient of zeta(x, q) with respect to x and q."""
# TODO(tillahoffmann): Add derivative with respect to x
x = op.inputs[0]
q = op.inputs[1]
# Broadcast gradients
sx = array_ops.shape(x)
sq = array_ops.shape(q)
unused_rx, rq = gen_array_ops._broadcast_gradient_args(sx, sq)
# Evaluate gradient
with ops.control_dependencies([grad.op]):
x = math_ops.conj(x)
q = math_ops.conj(q)
partial_q = -x * math_ops.zeta(x + 1, q)
# TODO(b/36815900): Mark None return values as NotImplemented
return (None,
array_ops.reshape(math_ops.reduce_sum(partial_q * grad, rq), sq))
@ops.RegisterGradient("Polygamma")
def _PolygammaGrad(op, grad):
"""Returns gradient of psi(n, x) with respect to n and x."""
# TODO(tillahoffmann): Add derivative with respect to n
n = op.inputs[0]
x = op.inputs[1]
# Broadcast gradients
sn = array_ops.shape(n)
sx = array_ops.shape(x)
unused_rn, rx = gen_array_ops._broadcast_gradient_args(sn, sx)
# Evaluate gradient
with ops.control_dependencies([grad.op]):
n = math_ops.conj(n)
x = math_ops.conj(x)
partial_x = math_ops.polygamma(n + 1, x)
# TODO(b/36815900): Mark None return values as NotImplemented
return (None,
array_ops.reshape(math_ops.reduce_sum(partial_x * grad, rx), sx))
@ops.RegisterGradient("Sigmoid")
def _SigmoidGrad(op, grad):
"""Returns grad * sigmoid(x) * (1 - sigmoid(x))."""
y = op.outputs[0] # y = sigmoid(x)
with ops.control_dependencies([grad.op]):
y = math_ops.conj(y)
# pylint: disable=protected-access
return gen_math_ops._sigmoid_grad(y, grad)
@ops.RegisterGradient("SigmoidGrad")
def _SigmoidGradGrad(op, grad):
with ops.control_dependencies([grad.op]):
a = math_ops.conj(op.inputs[0])
b = math_ops.conj(op.inputs[1])
gb = grad * b
# pylint: disable=protected-access
return gb - 2.0 * gb * a, gen_math_ops._sigmoid_grad(a, grad)
@ops.RegisterGradient("Sign")
def _SignGrad(op, _):
"""Returns 0."""
x = op.inputs[0]
return array_ops.zeros(array_ops.shape(x), dtype=x.dtype)
@ops.RegisterGradient("Sin")
def _SinGrad(op, grad):
"""Returns grad * cos(x)."""
x = op.inputs[0]
with ops.control_dependencies([grad.op]):
x = math_ops.conj(x)
return grad * math_ops.cos(x)
@ops.RegisterGradient("Cos")
def _CosGrad(op, grad):
"""Returns grad * -sin(x)."""
x = op.inputs[0]
with ops.control_dependencies([grad.op]):
x = math_ops.conj(x)
return -grad * math_ops.sin(x)
@ops.RegisterGradient("Tan")
def _TanGrad(op, grad):
"""Returns grad * 1/sec^2(x)."""
x = op.inputs[0]
with ops.control_dependencies([grad.op]):
x = math_ops.conj(x)
secx = math_ops.reciprocal(math_ops.cos(x))
secx2 = math_ops.square(secx)
return grad * secx2
@ops.RegisterGradient("Asin")
def _AsinGrad(op, grad):
"""Returns grad * 1/sqrt(1-x^2)."""
x = op.inputs[0]
with ops.control_dependencies([grad.op]):
x = math_ops.conj(x)
x2 = math_ops.square(x)
one = constant_op.constant(1, dtype=grad.dtype)
den = math_ops.sqrt(math_ops.subtract(one, x2))
inv = math_ops.reciprocal(den)
return grad * inv
@ops.RegisterGradient("Acos")
def _AcosGrad(op, grad):
"""Returns grad * -1/sqrt(1-x^2)."""
x = op.inputs[0]
with ops.control_dependencies([grad.op]):
x = math_ops.conj(x)
x2 = math_ops.square(x)
one = constant_op.constant(1, dtype=grad.dtype)
den = math_ops.sqrt(math_ops.subtract(one, x2))
inv = math_ops.reciprocal(den)
return -grad * inv
@ops.RegisterGradient("Atan")
def _AtanGrad(op, grad):
"""Returns grad * 1/ (1 + x^2)."""
x = op.inputs[0]
with ops.control_dependencies([grad.op]):
x = math_ops.conj(x)
x2 = math_ops.square(x)
one = constant_op.constant(1, dtype=grad.dtype)
inv = math_ops.reciprocal(math_ops.add(one, x2))
return grad * inv
@ops.RegisterGradient("Atan2")
def _Atan2Grad(op, grad):
"""Returns grad * x / (x^2 + y^2), grad * -y / (x^2 + y^2)."""
y = op.inputs[0]
x = op.inputs[1]
with ops.control_dependencies([grad.op]):
grad_inv = grad / (math_ops.square(x) + math_ops.square(y))
return x * grad_inv, -y * grad_inv
@ops.RegisterGradient("AddN")
def _AddNGrad(op, grad):
"""Copies the gradient to all inputs."""
# Not broadcasting.
return [grad] * len(op.inputs)
@ops.RegisterGradient("Add")
def _AddGrad(op, grad):
x = op.inputs[0]
y = op.inputs[1]
sx = array_ops.shape(x)
sy = array_ops.shape(y)
rx, ry = gen_array_ops._broadcast_gradient_args(sx, sy)
return (array_ops.reshape(math_ops.reduce_sum(grad, rx), sx),
array_ops.reshape(math_ops.reduce_sum(grad, ry), sy))
@ops.RegisterGradient("Sub")
def _SubGrad(op, grad):
x = op.inputs[0]
y = op.inputs[1]
sx = array_ops.shape(x)
sy = array_ops.shape(y)
rx, ry = gen_array_ops._broadcast_gradient_args(sx, sy)
return (array_ops.reshape(math_ops.reduce_sum(grad, rx), sx),
array_ops.reshape(-math_ops.reduce_sum(grad, ry), sy))
@ops.RegisterGradient("Mul")
def _MulGrad(op, grad):
"""The gradient of scalar multiplication."""
x = op.inputs[0]
y = op.inputs[1]
assert x.dtype.base_dtype == y.dtype.base_dtype, (x.dtype, " vs. ", y.dtype)
sx = array_ops.shape(x)
sy = array_ops.shape(y)
rx, ry = gen_array_ops._broadcast_gradient_args(sx, sy)
x = math_ops.conj(x)
y = math_ops.conj(y)
return (array_ops.reshape(math_ops.reduce_sum(grad * y, rx), sx),
array_ops.reshape(math_ops.reduce_sum(x * grad, ry), sy))
@ops.RegisterGradient("Div")
def _DivGrad(op, grad):
"""The gradient for the Div operator."""
x = op.inputs[0]
y = op.inputs[1]
sx = array_ops.shape(x)
sy = array_ops.shape(y)
# pylint: disable=protected-access
rx, ry = gen_array_ops._broadcast_gradient_args(sx, sy)
# pylint: enable=protected-access
x = math_ops.conj(x)
y = math_ops.conj(y)
return (array_ops.reshape(math_ops.reduce_sum(math_ops.div(grad, y), rx), sx),
array_ops.reshape(
math_ops.reduce_sum(grad * math_ops.div(math_ops.div(-x, y), y),
ry), sy))
@ops.RegisterGradient("FloorDiv")
def _FloorDivGrad(_, unused_grad):
"""The gradient for the FloorDiv operator."""
return None, None
@ops.RegisterGradient("TruncateDiv")
def _TruncateDivGrad(_, unused_grad):
return None, None
@ops.RegisterGradient("RealDiv")
def _RealDivGrad(op, grad):
"""RealDiv op gradient."""
x = op.inputs[0]
y = op.inputs[1]
sx = array_ops.shape(x)
sy = array_ops.shape(y)
# pylint: disable=protected-access
rx, ry = gen_array_ops._broadcast_gradient_args(sx, sy)
# pylint: enable=protected-access
x = math_ops.conj(x)
y = math_ops.conj(y)
return (array_ops.reshape(
math_ops.reduce_sum(math_ops.realdiv(grad, y), rx),
sx), array_ops.reshape(
math_ops.reduce_sum(grad * math_ops.realdiv(math_ops.realdiv(-x, y), y),
ry), sy))
@ops.RegisterGradient("Pow")
def _PowGrad(op, grad):
"""Returns grad * (y*x^(y-1), z*log(x))."""
x = op.inputs[0]
y = op.inputs[1]
z = op.outputs[0]
sx = array_ops.shape(x)
sy = array_ops.shape(y)
rx, ry = gen_array_ops._broadcast_gradient_args(sx, sy)
x = math_ops.conj(x)
y = math_ops.conj(y)
z = math_ops.conj(z)
gx = array_ops.reshape(
math_ops.reduce_sum(grad * y * math_ops.pow(x, y - 1), rx), sx)
# Avoid false singularity at x = 0
if x.dtype.is_complex:
# real(x) < 0 is fine for the complex case
log_x = array_ops.where(
math_ops.not_equal(x, 0), math_ops.log(x), array_ops.zeros_like(x))
else:
# There's no sensible real value to return if x < 0, so return 0
log_x = array_ops.where(x > 0, math_ops.log(x), array_ops.zeros_like(x))
gy = array_ops.reshape(math_ops.reduce_sum(grad * z * log_x, ry), sy)
return gx, gy
def _MaximumMinimumGrad(op, grad, selector_op):
"""Factor out the code for the gradient of Maximum or Minimum."""
x = op.inputs[0]
y = op.inputs[1]
gdtype = grad.dtype
sx = array_ops.shape(x)
sy = array_ops.shape(y)
gradshape = array_ops.shape(grad)
zeros = array_ops.zeros(gradshape, gdtype)
xmask = selector_op(x, y)
rx, ry = gen_array_ops._broadcast_gradient_args(sx, sy)
xgrad = array_ops.where(xmask, grad, zeros)
ygrad = array_ops.where(math_ops.logical_not(xmask), grad, zeros)
gx = array_ops.reshape(math_ops.reduce_sum(xgrad, rx), sx)
gy = array_ops.reshape(math_ops.reduce_sum(ygrad, ry), sy)
return (gx, gy)
@ops.RegisterGradient("Maximum")
def _MaximumGrad(op, grad):
"""Returns grad*(x > y, x <= y) with type of grad."""
return _MaximumMinimumGrad(op, grad, math_ops.greater_equal)
@ops.RegisterGradient("Minimum")
def _MinimumGrad(op, grad):
"""Returns grad*(x < y, x >= y) with type of grad."""
return _MaximumMinimumGrad(op, grad, math_ops.less_equal)
@ops.RegisterGradient("SquaredDifference")
def _SquaredDifferenceGrad(op, grad):
"""Returns the gradient for (x-y)^2."""
x = op.inputs[0]
y = op.inputs[1]
sx = array_ops.shape(x)
sy = array_ops.shape(y)
# pylint: disable=protected-access
rx, ry = gen_array_ops._broadcast_gradient_args(sx, sy)
# pylint: enable=protected-access
# .op works with Tensors or IndexedSlices
with ops.control_dependencies([grad.op]):
# The parens ensure that if grad is IndexedSlices, it'll get multiplied by
# Tensor (not a number like 2.0) which causes it to convert to Tensor.
x_grad = math_ops.scalar_mul(2.0, grad) * (x - y)
return (array_ops.reshape(math_ops.reduce_sum(x_grad, rx), sx),
-array_ops.reshape(math_ops.reduce_sum(x_grad, ry), sy))
# Logical operations have no gradients.
ops.NotDifferentiable("Less")
ops.NotDifferentiable("LessEqual")
ops.NotDifferentiable("Greater")
ops.NotDifferentiable("GreaterEqual")
ops.NotDifferentiable("Equal")
ops.NotDifferentiable("ApproximateEqual")
ops.NotDifferentiable("NotEqual")
ops.NotDifferentiable("LogicalAnd")
ops.NotDifferentiable("LogicalOr")
ops.NotDifferentiable("LogicalNot")
@ops.RegisterGradient("Select")
def _SelectGrad(op, grad):
c = op.inputs[0]
x = op.inputs[1]
zeros = array_ops.zeros_like(x)
return (None, array_ops.where(c, grad, zeros),
array_ops.where(c, zeros, grad))
@ops.RegisterGradient("MatMul")
def _MatMulGrad(op, grad):
"""Gradient for MatMul."""
t_a = op.get_attr("transpose_a")
t_b = op.get_attr("transpose_b")
a = math_ops.conj(op.inputs[0])
b = math_ops.conj(op.inputs[1])
if not t_a and not t_b:
grad_a = math_ops.matmul(grad, b, transpose_b=True)
grad_b = math_ops.matmul(a, grad, transpose_a=True)
elif not t_a and t_b:
grad_a = math_ops.matmul(grad, b)
grad_b = math_ops.matmul(grad, a, transpose_a=True)
elif t_a and not t_b:
grad_a = math_ops.matmul(b, grad, transpose_b=True)
grad_b = math_ops.matmul(a, grad)
elif t_a and t_b:
grad_a = math_ops.matmul(b, grad, transpose_a=True, transpose_b=True)
grad_b = math_ops.matmul(grad, a, transpose_a=True, transpose_b=True)
return grad_a, grad_b
@ops.RegisterGradient("SparseMatMul")
def _SparseMatMulGrad(op, grad):
"""Gradient for SparseMatMul."""
t_a = op.get_attr("transpose_a")
t_b = op.get_attr("transpose_b")
is_sparse = {
op.inputs[0]: op.get_attr("a_is_sparse"),
op.inputs[1]: op.get_attr("b_is_sparse"),
# Use heuristic to figure out if grad might be sparse
grad: (grad.op.type == "ReluGrad")
}
def _SparseMatMul(t1, t2, out_dtype, transpose_a=False, transpose_b=False):
"""Helper function to create SparseMatMul op."""
assert t1 in is_sparse and t2 in is_sparse
t1_sparse = is_sparse[t1]
t2_sparse = is_sparse[t2]
if transpose_b:
t2 = array_ops.transpose(t2)
transpose_b = False
prod = math_ops.matmul(
t1,
t2,
transpose_a=transpose_a,
transpose_b=transpose_b,
a_is_sparse=t1_sparse,
b_is_sparse=t2_sparse)
if prod.dtype != out_dtype:
prod = math_ops.cast(prod, out_dtype)
return prod
dtype_a = op.inputs[0].dtype
dtype_b = op.inputs[1].dtype
if not t_a and not t_b:
return (_SparseMatMul(
grad, op.inputs[1], dtype_a, transpose_b=True), _SparseMatMul(
op.inputs[0], grad, dtype_b, transpose_a=True))
elif not t_a and t_b:
return (_SparseMatMul(grad, op.inputs[1], dtype_a), _SparseMatMul(
grad, op.inputs[0], dtype_b, transpose_a=True))
elif t_a and not t_b:
return (_SparseMatMul(
op.inputs[1], grad, dtype_a, transpose_b=True),
_SparseMatMul(op.inputs[0], grad, dtype_b))
elif t_a and t_b:
return (_SparseMatMul(
op.inputs[1], grad, dtype_a, transpose_a=True,
transpose_b=True), _SparseMatMul(
grad, op.inputs[0], dtype_b, transpose_a=True, transpose_b=True))
@ops.RegisterGradient("Floor")
def _FloorGrad(_, unused_grad):
return [None]
@ops.RegisterGradient("Ceil")
def _CeilGrad(_, unused_grad):
return [None]
@ops.RegisterGradient("Round")
def _RoundGrad(_, unused_grad):
return [None]
@ops.RegisterGradient("Rint")
def _RintGrad(_, unused_grad):
# the gradient of Rint is zero
return [None]
@ops.RegisterGradient("BatchMatMul")
def _BatchMatMul(op, grad):
"""Returns the gradient of x and y given the gradient of x * y."""
x = op.inputs[0]
y = op.inputs[1]
adj_x = op.get_attr("adj_x")
adj_y = op.get_attr("adj_y")
if not adj_x:
if not adj_y:
grad_x = math_ops.matmul(grad, y, adjoint_a=False, adjoint_b=True)
grad_y = math_ops.matmul(x, grad, adjoint_a=True, adjoint_b=False)
else:
grad_x = math_ops.matmul(grad, y, adjoint_a=False, adjoint_b=False)
grad_y = math_ops.matmul(grad, x, adjoint_a=True, adjoint_b=False)
else:
if not adj_y:
grad_x = math_ops.matmul(y, grad, adjoint_a=False, adjoint_b=True)
grad_y = math_ops.matmul(x, grad, adjoint_a=False, adjoint_b=False)
else:
grad_x = math_ops.matmul(y, grad, adjoint_a=True, adjoint_b=True)
grad_y = math_ops.matmul(grad, x, adjoint_a=True, adjoint_b=True)
return grad_x, grad_y
ops.NotDifferentiable("Range")
ops.NotDifferentiable("LinSpace")
@ops.RegisterGradient("Complex")
def _ComplexGrad(op, grad):
"""Returns the real and imaginary components of 'grad', respectively."""
x = op.inputs[0]
y = op.inputs[1]
sx = array_ops.shape(x)
sy = array_ops.shape(y)
rx, ry = gen_array_ops._broadcast_gradient_args(sx, sy)
return (array_ops.reshape(math_ops.reduce_sum(math_ops.real(grad), rx), sx),
array_ops.reshape(math_ops.reduce_sum(math_ops.imag(grad), ry), sy))
@ops.RegisterGradient("Real")
def _RealGrad(_, grad):
"""Returns 'grad' as the real part and set the imaginary part 0."""
zero = constant_op.constant(0, dtype=grad.dtype)
return math_ops.complex(grad, zero)
@ops.RegisterGradient("Imag")
def _ImagGrad(_, grad):
"""Returns 'grad' as the imaginary part and set the real part 0."""
zero = constant_op.constant(0, dtype=grad.dtype)
return math_ops.complex(zero, grad)
@ops.RegisterGradient("Conj")
def _ConjGrad(_, grad):
"""Returns the complex conjugate of grad."""
return math_ops.conj(grad)
@ops.RegisterGradient("ComplexAbs")
def _ComplexAbsGrad(op, grad):
"""Returns the gradient of ComplexAbs."""
# TODO(b/27786104): The cast to complex could be removed once arithmetic
# supports mixtures of complex64 and real values.
return (math_ops.complex(grad, array_ops.zeros_like(grad)) *
math_ops.sign(op.inputs[0]))
@ops.RegisterGradient("Cast")
def _CastGrad(op, grad):
t = [
dtypes.float16, dtypes.float32, dtypes.float64, dtypes.bfloat16,
dtypes.complex64, dtypes.complex128
]
src_type = op.inputs[0].dtype.base_dtype
dst_type = grad.dtype.base_dtype
if src_type in t and dst_type in t:
return math_ops.cast(grad, src_type)
else:
return None
@ops.RegisterGradient("Cross")
def _CrossGrad(op, grad):
u = op.inputs[0]
v = op.inputs[1]
return (math_ops.cross(v, grad), math_ops.cross(grad, u))
@ops.RegisterGradient("Cumsum")
def _CumsumGrad(op, grad):
axis = op.inputs[1]
exclusive = op.get_attr("exclusive")
reverse = op.get_attr("reverse")
return [
math_ops.cumsum(
grad, axis, exclusive=exclusive, reverse=not reverse), None
]
@ops.RegisterGradient("Cumprod")
def _CumprodGrad(op, grad):
x = op.inputs[0]
axis = op.inputs[1]
exclusive = op.get_attr("exclusive")
reverse = op.get_attr("reverse")
# TODO This fails when x contains 0 and should be fixed
prod = math_ops.cumprod(x, axis, exclusive=exclusive, reverse=reverse)
out = math_ops.cumsum(
prod * grad, axis, exclusive=exclusive, reverse=not reverse)
return [out / x, None]
| apache-2.0 |
amyvmiwei/kbengine | kbe/src/lib/python/Lib/traceback.py | 87 | 11167 | """Extract, format and print information about Python stack traces."""
import linecache
import sys
import operator
__all__ = ['extract_stack', 'extract_tb', 'format_exception',
'format_exception_only', 'format_list', 'format_stack',
'format_tb', 'print_exc', 'format_exc', 'print_exception',
'print_last', 'print_stack', 'print_tb',
'clear_frames']
#
# Formatting and printing lists of traceback lines.
#
def _format_list_iter(extracted_list):
for filename, lineno, name, line in extracted_list:
item = ' File "{}", line {}, in {}\n'.format(filename, lineno, name)
if line:
item = item + ' {}\n'.format(line.strip())
yield item
def print_list(extracted_list, file=None):
"""Print the list of tuples as returned by extract_tb() or
extract_stack() as a formatted stack trace to the given file."""
if file is None:
file = sys.stderr
for item in _format_list_iter(extracted_list):
print(item, file=file, end="")
def format_list(extracted_list):
"""Format a list of traceback entry tuples for printing.
Given a list of tuples as returned by extract_tb() or
extract_stack(), return a list of strings ready for printing.
Each string in the resulting list corresponds to the item with the
same index in the argument list. Each string ends in a newline;
the strings may contain internal newlines as well, for those items
whose source text line is not None.
"""
return list(_format_list_iter(extracted_list))
#
# Printing and Extracting Tracebacks.
#
# extractor takes curr and needs to return a tuple of:
# - Frame object
# - Line number
# - Next item (same type as curr)
# In practice, curr is either a traceback or a frame.
def _extract_tb_or_stack_iter(curr, limit, extractor):
if limit is None:
limit = getattr(sys, 'tracebacklimit', None)
n = 0
while curr is not None and (limit is None or n < limit):
f, lineno, next_item = extractor(curr)
co = f.f_code
filename = co.co_filename
name = co.co_name
linecache.checkcache(filename)
line = linecache.getline(filename, lineno, f.f_globals)
if line:
line = line.strip()
else:
line = None
yield (filename, lineno, name, line)
curr = next_item
n += 1
def _extract_tb_iter(tb, limit):
return _extract_tb_or_stack_iter(
tb, limit,
operator.attrgetter("tb_frame", "tb_lineno", "tb_next"))
def print_tb(tb, limit=None, file=None):
"""Print up to 'limit' stack trace entries from the traceback 'tb'.
If 'limit' is omitted or None, all entries are printed. If 'file'
is omitted or None, the output goes to sys.stderr; otherwise
'file' should be an open file or file-like object with a write()
method.
"""
print_list(extract_tb(tb, limit=limit), file=file)
def format_tb(tb, limit=None):
"""A shorthand for 'format_list(extract_tb(tb, limit))'."""
return format_list(extract_tb(tb, limit=limit))
def extract_tb(tb, limit=None):
"""Return list of up to limit pre-processed entries from traceback.
This is useful for alternate formatting of stack traces. If
'limit' is omitted or None, all entries are extracted. A
pre-processed stack trace entry is a quadruple (filename, line
number, function name, text) representing the information that is
usually printed for a stack trace. The text is a string with
leading and trailing whitespace stripped; if the source is not
available it is None.
"""
return list(_extract_tb_iter(tb, limit=limit))
#
# Exception formatting and output.
#
_cause_message = (
"\nThe above exception was the direct cause "
"of the following exception:\n")
_context_message = (
"\nDuring handling of the above exception, "
"another exception occurred:\n")
def _iter_chain(exc, custom_tb=None, seen=None):
if seen is None:
seen = set()
seen.add(exc)
its = []
context = exc.__context__
cause = exc.__cause__
if cause is not None and cause not in seen:
its.append(_iter_chain(cause, False, seen))
its.append([(_cause_message, None)])
elif (context is not None and
not exc.__suppress_context__ and
context not in seen):
its.append(_iter_chain(context, None, seen))
its.append([(_context_message, None)])
its.append([(exc, custom_tb or exc.__traceback__)])
# itertools.chain is in an extension module and may be unavailable
for it in its:
yield from it
def _format_exception_iter(etype, value, tb, limit, chain):
if chain:
values = _iter_chain(value, tb)
else:
values = [(value, tb)]
for value, tb in values:
if isinstance(value, str):
# This is a cause/context message line
yield value + '\n'
continue
if tb:
yield 'Traceback (most recent call last):\n'
yield from _format_list_iter(_extract_tb_iter(tb, limit=limit))
yield from _format_exception_only_iter(type(value), value)
def print_exception(etype, value, tb, limit=None, file=None, chain=True):
"""Print exception up to 'limit' stack trace entries from 'tb' to 'file'.
This differs from print_tb() in the following ways: (1) if
traceback is not None, it prints a header "Traceback (most recent
call last):"; (2) it prints the exception type and value after the
stack trace; (3) if type is SyntaxError and value has the
appropriate format, it prints the line where the syntax error
occurred with a caret on the next line indicating the approximate
position of the error.
"""
if file is None:
file = sys.stderr
for line in _format_exception_iter(etype, value, tb, limit, chain):
print(line, file=file, end="")
def format_exception(etype, value, tb, limit=None, chain=True):
"""Format a stack trace and the exception information.
The arguments have the same meaning as the corresponding arguments
to print_exception(). The return value is a list of strings, each
ending in a newline and some containing internal newlines. When
these lines are concatenated and printed, exactly the same text is
printed as does print_exception().
"""
return list(_format_exception_iter(etype, value, tb, limit, chain))
def format_exception_only(etype, value):
"""Format the exception part of a traceback.
The arguments are the exception type and value such as given by
sys.last_type and sys.last_value. The return value is a list of
strings, each ending in a newline.
Normally, the list contains a single string; however, for
SyntaxError exceptions, it contains several lines that (when
printed) display detailed information about where the syntax
error occurred.
The message indicating which exception occurred is always the last
string in the list.
"""
return list(_format_exception_only_iter(etype, value))
def _format_exception_only_iter(etype, value):
# Gracefully handle (the way Python 2.4 and earlier did) the case of
# being called with (None, None).
if etype is None:
yield _format_final_exc_line(etype, value)
return
stype = etype.__name__
smod = etype.__module__
if smod not in ("__main__", "builtins"):
stype = smod + '.' + stype
if not issubclass(etype, SyntaxError):
yield _format_final_exc_line(stype, value)
return
# It was a syntax error; show exactly where the problem was found.
filename = value.filename or "<string>"
lineno = str(value.lineno) or '?'
yield ' File "{}", line {}\n'.format(filename, lineno)
badline = value.text
offset = value.offset
if badline is not None:
yield ' {}\n'.format(badline.strip())
if offset is not None:
caretspace = badline.rstrip('\n')
offset = min(len(caretspace), offset) - 1
caretspace = caretspace[:offset].lstrip()
# non-space whitespace (likes tabs) must be kept for alignment
caretspace = ((c.isspace() and c or ' ') for c in caretspace)
yield ' {}^\n'.format(''.join(caretspace))
msg = value.msg or "<no detail available>"
yield "{}: {}\n".format(stype, msg)
def _format_final_exc_line(etype, value):
valuestr = _some_str(value)
if value is None or not valuestr:
line = "%s\n" % etype
else:
line = "%s: %s\n" % (etype, valuestr)
return line
def _some_str(value):
try:
return str(value)
except:
return '<unprintable %s object>' % type(value).__name__
def print_exc(limit=None, file=None, chain=True):
"""Shorthand for 'print_exception(*sys.exc_info(), limit, file)'."""
print_exception(*sys.exc_info(), limit=limit, file=file, chain=chain)
def format_exc(limit=None, chain=True):
"""Like print_exc() but return a string."""
return "".join(format_exception(*sys.exc_info(), limit=limit, chain=chain))
def print_last(limit=None, file=None, chain=True):
"""This is a shorthand for 'print_exception(sys.last_type,
sys.last_value, sys.last_traceback, limit, file)'."""
if not hasattr(sys, "last_type"):
raise ValueError("no last exception")
print_exception(sys.last_type, sys.last_value, sys.last_traceback,
limit, file, chain)
#
# Printing and Extracting Stacks.
#
def _extract_stack_iter(f, limit=None):
return _extract_tb_or_stack_iter(
f, limit, lambda f: (f, f.f_lineno, f.f_back))
def _get_stack(f):
if f is None:
f = sys._getframe().f_back.f_back
return f
def print_stack(f=None, limit=None, file=None):
"""Print a stack trace from its invocation point.
The optional 'f' argument can be used to specify an alternate
stack frame at which to start. The optional 'limit' and 'file'
arguments have the same meaning as for print_exception().
"""
print_list(extract_stack(_get_stack(f), limit=limit), file=file)
def format_stack(f=None, limit=None):
"""Shorthand for 'format_list(extract_stack(f, limit))'."""
return format_list(extract_stack(_get_stack(f), limit=limit))
def extract_stack(f=None, limit=None):
"""Extract the raw traceback from the current stack frame.
The return value has the same format as for extract_tb(). The
optional 'f' and 'limit' arguments have the same meaning as for
print_stack(). Each item in the list is a quadruple (filename,
line number, function name, text), and the entries are in order
from oldest to newest stack frame.
"""
stack = list(_extract_stack_iter(_get_stack(f), limit=limit))
stack.reverse()
return stack
def clear_frames(tb):
"Clear all references to local variables in the frames of a traceback."
while tb is not None:
try:
tb.tb_frame.clear()
except RuntimeError:
# Ignore the exception raised if the frame is still executing.
pass
tb = tb.tb_next
| lgpl-3.0 |
40223249-1/2015cd_midterm2 | static/Brython3.1.1-20150328-091302/Lib/_socket.py | 742 | 6431 | """Implementation module for socket operations.
See the socket module for documentation."""
AF_APPLETALK = 16
AF_DECnet = 12
AF_INET = 2
AF_INET6 = 23
AF_IPX = 6
AF_IRDA = 26
AF_SNA = 11
AF_UNSPEC = 0
AI_ADDRCONFIG = 1024
AI_ALL = 256
AI_CANONNAME = 2
AI_NUMERICHOST = 4
AI_NUMERICSERV = 8
AI_PASSIVE = 1
AI_V4MAPPED = 2048
CAPI = '<capsule object "_socket.CAPI" at 0x00BC4F38>'
EAI_AGAIN = 11002
EAI_BADFLAGS = 10022
EAI_FAIL = 11003
EAI_FAMILY = 10047
EAI_MEMORY = 8
EAI_NODATA = 11001
EAI_NONAME = 11001
EAI_SERVICE = 10109
EAI_SOCKTYPE = 10044
INADDR_ALLHOSTS_GROUP = -536870911
INADDR_ANY = 0
INADDR_BROADCAST = -1
INADDR_LOOPBACK = 2130706433
INADDR_MAX_LOCAL_GROUP = -536870657
INADDR_NONE = -1
INADDR_UNSPEC_GROUP = -536870912
IPPORT_RESERVED = 1024
IPPORT_USERRESERVED = 5000
IPPROTO_ICMP = 1
IPPROTO_IP = 0
IPPROTO_RAW = 255
IPPROTO_TCP = 6
IPPROTO_UDP = 17
IPV6_CHECKSUM = 26
IPV6_DONTFRAG = 14
IPV6_HOPLIMIT = 21
IPV6_HOPOPTS = 1
IPV6_JOIN_GROUP = 12
IPV6_LEAVE_GROUP = 13
IPV6_MULTICAST_HOPS = 10
IPV6_MULTICAST_IF = 9
IPV6_MULTICAST_LOOP = 11
IPV6_PKTINFO = 19
IPV6_RECVRTHDR = 38
IPV6_RECVTCLASS = 40
IPV6_RTHDR = 32
IPV6_TCLASS = 39
IPV6_UNICAST_HOPS = 4
IPV6_V6ONLY = 27
IP_ADD_MEMBERSHIP = 12
IP_DROP_MEMBERSHIP = 13
IP_HDRINCL = 2
IP_MULTICAST_IF = 9
IP_MULTICAST_LOOP = 11
IP_MULTICAST_TTL = 10
IP_OPTIONS = 1
IP_RECVDSTADDR = 25
IP_TOS = 3
IP_TTL = 4
MSG_BCAST = 1024
MSG_CTRUNC = 512
MSG_DONTROUTE = 4
MSG_MCAST = 2048
MSG_OOB = 1
MSG_PEEK = 2
MSG_TRUNC = 256
NI_DGRAM = 16
NI_MAXHOST = 1025
NI_MAXSERV = 32
NI_NAMEREQD = 4
NI_NOFQDN = 1
NI_NUMERICHOST = 2
NI_NUMERICSERV = 8
RCVALL_MAX = 3
RCVALL_OFF = 0
RCVALL_ON = 1
RCVALL_SOCKETLEVELONLY = 2
SHUT_RD = 0
SHUT_RDWR = 2
SHUT_WR = 1
SIO_KEEPALIVE_VALS = 2550136836
SIO_RCVALL = 2550136833
SOCK_DGRAM = 2
SOCK_RAW = 3
SOCK_RDM = 4
SOCK_SEQPACKET = 5
SOCK_STREAM = 1
SOL_IP = 0
SOL_SOCKET = 65535
SOL_TCP = 6
SOL_UDP = 17
SOMAXCONN = 2147483647
SO_ACCEPTCONN = 2
SO_BROADCAST = 32
SO_DEBUG = 1
SO_DONTROUTE = 16
SO_ERROR = 4103
SO_EXCLUSIVEADDRUSE = -5
SO_KEEPALIVE = 8
SO_LINGER = 128
SO_OOBINLINE = 256
SO_RCVBUF = 4098
SO_RCVLOWAT = 4100
SO_RCVTIMEO = 4102
SO_REUSEADDR = 4
SO_SNDBUF = 4097
SO_SNDLOWAT = 4099
SO_SNDTIMEO = 4101
SO_TYPE = 4104
SO_USELOOPBACK = 64
class SocketType:
pass
TCP_MAXSEG = 4
TCP_NODELAY = 1
__loader__ = '<_frozen_importlib.ExtensionFileLoader object at 0x00CA2D90>'
def dup(*args,**kw):
"""dup(integer) -> integer
Duplicate an integer socket file descriptor. This is like os.dup(), but for
sockets; on some platforms os.dup() won't work for socket file descriptors."""
pass
class error:
pass
class gaierror:
pass
def getaddrinfo(*args,**kw):
"""getaddrinfo(host, port [, family, socktype, proto, flags]) -> list of (family, socktype, proto, canonname, sockaddr)
Resolve host and port into addrinfo struct."""
pass
def getdefaulttimeout(*args,**kw):
"""getdefaulttimeout() -> timeout
Returns the default timeout in seconds (float) for new socket objects.
A value of None indicates that new socket objects have no timeout.
When the socket module is first imported, the default is None."""
pass
def gethostbyaddr(*args,**kw):
"""gethostbyaddr(host) -> (name, aliaslist, addresslist)
Return the true host name, a list of aliases, and a list of IP addresses,
for a host. The host argument is a string giving a host name or IP number."""
pass
def gethostbyname(*args,**kw):
"""gethostbyname(host) -> address
Return the IP address (a string of the form '255.255.255.255') for a host."""
pass
def gethostbyname_ex(*args,**kw):
"""gethostbyname_ex(host) -> (name, aliaslist, addresslist)
Return the true host name, a list of aliases, and a list of IP addresses,
for a host. The host argument is a string giving a host name or IP number."""
pass
def gethostname(*args,**kw):
"""gethostname() -> string
Return the current host name."""
pass
def getnameinfo(*args,**kw):
"""getnameinfo(sockaddr, flags) --> (host, port)
Get host and port for a sockaddr."""
pass
def getprotobyname(*args,**kw):
"""getprotobyname(name) -> integer
Return the protocol number for the named protocol. (Rarely used.)"""
pass
def getservbyname(*args,**kw):
"""getservbyname(servicename[, protocolname]) -> integer
Return a port number from a service name and protocol name.
The optional protocol name, if given, should be 'tcp' or 'udp',
otherwise any protocol will match."""
pass
def getservbyport(*args,**kw):
"""getservbyport(port[, protocolname]) -> string
Return the service name from a port number and protocol name.
The optional protocol name, if given, should be 'tcp' or 'udp',
otherwise any protocol will match."""
pass
has_ipv6 = True
class herror:
pass
def htonl(*args,**kw):
"""htonl(integer) -> integer
Convert a 32-bit integer from host to network byte order."""
pass
def htons(*args,**kw):
"""htons(integer) -> integer
Convert a 16-bit integer from host to network byte order."""
pass
def inet_aton(*args,**kw):
"""inet_aton(string) -> bytes giving packed 32-bit IP representation
Convert an IP address in string format (123.45.67.89) to the 32-bit packed
binary format used in low-level network functions."""
pass
def inet_ntoa(*args,**kw):
"""inet_ntoa(packed_ip) -> ip_address_string
Convert an IP address from 32-bit packed binary format to string format"""
pass
def ntohl(*args,**kw):
"""ntohl(integer) -> integer
Convert a 32-bit integer from network to host byte order."""
pass
def ntohs(*args,**kw):
"""ntohs(integer) -> integer
Convert a 16-bit integer from network to host byte order."""
pass
def setdefaulttimeout(*args,**kw):
"""setdefaulttimeout(timeout)
Set the default timeout in seconds (float) for new socket objects.
A value of None indicates that new socket objects have no timeout.
When the socket module is first imported, the default is None."""
pass
class socket:
def __init__(self,*args,**kw):
pass
def bind(self,*args,**kw):
pass
def close(self):
pass
class timeout:
pass
| agpl-3.0 |
AIFDR/inasafe | test_suite.py | 3 | 3260 | # coding=utf-8
"""
Test Suite for InaSAFE.
Contact : etienne at kartoza dot com
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
import sys
import os
import unittest
import qgis # NOQA For SIP API to V2 if run outside of QGIS
try:
from pip import main as pipmain
except:
from pip._internal import main as pipmain
try:
import coverage
except ImportError:
pipmain(['install', 'coverage'])
import coverage
import tempfile
from osgeo import gdal
from qgis.PyQt import Qt
from safe.utilities.gis import qgis_version
__author__ = 'etiennetrimaille'
__revision__ = '$Format:%H$'
__date__ = '14/06/2016'
__copyright__ = (
'Copyright 2012, Australia Indonesia Facility for Disaster Reduction')
def _run_tests(test_suite, package_name, with_coverage=False):
"""Core function to test a test suite."""
count = test_suite.countTestCases()
print('########')
print('%s tests has been discovered in %s' % (count, package_name))
print('QGIS : %s' % qgis_version())
print('Python GDAL : %s' % gdal.VersionInfo('VERSION_NUM'))
print('QT : %s' % Qt.QT_VERSION_STR)
print('Run slow tests : %s' % (not os.environ.get('ON_TRAVIS', False)))
print('########')
if with_coverage:
cov = coverage.Coverage(
source=['safe/'],
omit=['*/test/*', 'safe/definitions/*'],
)
cov.start()
unittest.TextTestRunner(verbosity=3, stream=sys.stdout).run(test_suite)
if with_coverage:
cov.stop()
cov.save()
report = tempfile.NamedTemporaryFile(delete=False)
cov.report(file=report)
# Produce HTML reports in the `htmlcov` folder and open index.html
# cov.html_report()
report.close()
with open(report.name, 'r') as fin:
print(fin.read())
def test_package(package='safe'):
"""Test package.
This function is called by travis without arguments.
:param package: The package to test.
:type package: str
"""
test_loader = unittest.defaultTestLoader
try:
test_suite = test_loader.discover(package)
except ImportError:
test_suite = unittest.TestSuite()
_run_tests(test_suite, package)
def test_environment():
"""Test package with an environment variable."""
package = os.environ.get('TESTING_PACKAGE', 'safe')
test_loader = unittest.defaultTestLoader
test_suite = test_loader.discover(package)
_run_tests(test_suite, package)
def test_manually():
"""Test manually a test class.
You can change this function as much as you want.
"""
from safe.test.test_init import TestInit
test_suite = unittest.makeSuite(TestInit, 'test')
_run_tests(test_suite, 'custom test class')
def test_one():
"""Run a single test"""
from safe.gui.tools.test.test_extent_selector import ExtentSelectorTest
test_runner = unittest.TextTestRunner(verbosity=3, stream=sys.stdout)
test_runner.run(unittest.makeSuite(ExtentSelectorTest, 'test'))
if __name__ == '__main__':
test_package()
| gpl-3.0 |
Venturi/oldcms | env/lib/python2.7/site-packages/cmsplugin_filer_image/migrations_django/0001_initial.py | 8 | 4643 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
import filer.fields.file
import filer.fields.image
import cms.models.fields
class Migration(migrations.Migration):
dependencies = [
('cms', '0003_auto_20140926_2347'),
('filer', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='FilerImage',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin')),
('style', models.CharField(max_length=50, verbose_name='Style', default=settings.CMSPLUGIN_FILER_IMAGE_DEFAULT_STYLE, blank=True, choices=settings.CMSPLUGIN_FILER_IMAGE_STYLE_CHOICES)),
('caption_text', models.CharField(max_length=255, null=True, verbose_name='caption text', blank=True)),
('image_url', models.URLField(default=None, null=True, verbose_name='alternative image url', blank=True)),
('alt_text', models.CharField(max_length=255, null=True, verbose_name='alt text', blank=True)),
('use_original_image', models.BooleanField(default=False, help_text='do not resize the image. use the original image instead.', verbose_name='use the original image')),
('use_autoscale', models.BooleanField(default=False, help_text='tries to auto scale the image based on the placeholder context', verbose_name='use automatic scaling')),
('width', models.PositiveIntegerField(null=True, verbose_name='width', blank=True)),
('height', models.PositiveIntegerField(null=True, verbose_name='height', blank=True)),
('crop', models.BooleanField(default=True, verbose_name='crop')),
('upscale', models.BooleanField(default=True, verbose_name='upscale')),
('alignment', models.CharField(blank=True, max_length=10, null=True, verbose_name='image alignment', choices=[('left', 'left'), ('right', 'right')])),
('free_link', models.CharField(help_text='if present image will be clickable', max_length=255, null=True, verbose_name='link', blank=True)),
('original_link', models.BooleanField(default=False, help_text='if present image will be clickable', verbose_name='link original image')),
('description', models.TextField(null=True, verbose_name='description', blank=True)),
('target_blank', models.BooleanField(default=False, verbose_name='Open link in new window')),
('file_link', filer.fields.file.FilerFileField(related_name='+', default=None, to='filer.File', blank=True, help_text='if present image will be clickable', null=True, verbose_name='file link')),
('image', filer.fields.image.FilerImageField(default=None, blank=True, to='filer.Image', null=True, verbose_name='image')),
('page_link', cms.models.fields.PageField(blank=True, to='cms.Page', help_text='if present image will be clickable', null=True, verbose_name='page link')),
],
options={
'verbose_name': 'filer image',
'verbose_name_plural': 'filer images',
},
bases=('cms.cmsplugin',),
),
migrations.CreateModel(
name='ThumbnailOption',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100, verbose_name='name')),
('width', models.IntegerField(help_text='width in pixel.', verbose_name='width')),
('height', models.IntegerField(help_text='height in pixel.', verbose_name='height')),
('crop', models.BooleanField(default=True, verbose_name='crop')),
('upscale', models.BooleanField(default=True, verbose_name='upscale')),
],
options={
'ordering': ('width', 'height'),
'verbose_name': 'thumbnail option',
'verbose_name_plural': 'thumbnail options',
},
bases=(models.Model,),
),
migrations.AddField(
model_name='filerimage',
name='thumbnail_option',
field=models.ForeignKey(blank=True, to='cmsplugin_filer_image.ThumbnailOption', help_text='overrides width, height, crop and upscale with values from the selected thumbnail option', null=True, verbose_name='thumbnail option'),
preserve_default=True,
),
]
| apache-2.0 |
jschueller/numpy | numpy/core/tests/test_records.py | 42 | 11668 | from __future__ import division, absolute_import, print_function
import sys
import collections
import pickle
from os import path
import numpy as np
from numpy.compat import asbytes
from numpy.testing import (
TestCase, run_module_suite, assert_, assert_equal, assert_array_equal,
assert_array_almost_equal, assert_raises
)
class TestFromrecords(TestCase):
def test_fromrecords(self):
r = np.rec.fromrecords([[456, 'dbe', 1.2], [2, 'de', 1.3]],
names='col1,col2,col3')
assert_equal(r[0].item(), (456, 'dbe', 1.2))
assert_equal(r['col1'].dtype.kind, 'i')
if sys.version_info[0] >= 3:
assert_equal(r['col2'].dtype.kind, 'U')
assert_equal(r['col2'].dtype.itemsize, 12)
else:
assert_equal(r['col2'].dtype.kind, 'S')
assert_equal(r['col2'].dtype.itemsize, 3)
assert_equal(r['col3'].dtype.kind, 'f')
def test_method_array(self):
r = np.rec.array(asbytes('abcdefg') * 100, formats='i2,a3,i4', shape=3, byteorder='big')
assert_equal(r[1].item(), (25444, asbytes('efg'), 1633837924))
def test_method_array2(self):
r = np.rec.array([(1, 11, 'a'), (2, 22, 'b'), (3, 33, 'c'), (4, 44, 'd'), (5, 55, 'ex'),
(6, 66, 'f'), (7, 77, 'g')], formats='u1,f4,a1')
assert_equal(r[1].item(), (2, 22.0, asbytes('b')))
def test_recarray_slices(self):
r = np.rec.array([(1, 11, 'a'), (2, 22, 'b'), (3, 33, 'c'), (4, 44, 'd'), (5, 55, 'ex'),
(6, 66, 'f'), (7, 77, 'g')], formats='u1,f4,a1')
assert_equal(r[1::2][1].item(), (4, 44.0, asbytes('d')))
def test_recarray_fromarrays(self):
x1 = np.array([1, 2, 3, 4])
x2 = np.array(['a', 'dd', 'xyz', '12'])
x3 = np.array([1.1, 2, 3, 4])
r = np.rec.fromarrays([x1, x2, x3], names='a,b,c')
assert_equal(r[1].item(), (2, 'dd', 2.0))
x1[1] = 34
assert_equal(r.a, np.array([1, 2, 3, 4]))
def test_recarray_fromfile(self):
data_dir = path.join(path.dirname(__file__), 'data')
filename = path.join(data_dir, 'recarray_from_file.fits')
fd = open(filename, 'rb')
fd.seek(2880 * 2)
r1 = np.rec.fromfile(fd, formats='f8,i4,a5', shape=3, byteorder='big')
fd.seek(2880 * 2)
r2 = np.rec.array(fd, formats='f8,i4,a5', shape=3, byteorder='big')
fd.close()
assert_equal(r1, r2)
def test_recarray_from_obj(self):
count = 10
a = np.zeros(count, dtype='O')
b = np.zeros(count, dtype='f8')
c = np.zeros(count, dtype='f8')
for i in range(len(a)):
a[i] = list(range(1, 10))
mine = np.rec.fromarrays([a, b, c], names='date,data1,data2')
for i in range(len(a)):
assert_((mine.date[i] == list(range(1, 10))))
assert_((mine.data1[i] == 0.0))
assert_((mine.data2[i] == 0.0))
def test_recarray_from_repr(self):
a = np.array([(1,'ABC'), (2, "DEF")],
dtype=[('foo', int), ('bar', 'S4')])
recordarr = np.rec.array(a)
recarr = a.view(np.recarray)
recordview = a.view(np.dtype((np.record, a.dtype)))
recordarr_r = eval("numpy." + repr(recordarr), {'numpy': np})
recarr_r = eval("numpy." + repr(recarr), {'numpy': np})
recordview_r = eval("numpy." + repr(recordview), {'numpy': np})
assert_equal(type(recordarr_r), np.recarray)
assert_equal(recordarr_r.dtype.type, np.record)
assert_equal(recordarr, recordarr_r)
assert_equal(type(recarr_r), np.recarray)
assert_equal(recarr_r.dtype.type, np.record)
assert_equal(recarr, recarr_r)
assert_equal(type(recordview_r), np.ndarray)
assert_equal(recordview.dtype.type, np.record)
assert_equal(recordview, recordview_r)
def test_recarray_views(self):
a = np.array([(1,'ABC'), (2, "DEF")],
dtype=[('foo', int), ('bar', 'S4')])
b = np.array([1,2,3,4,5], dtype=np.int64)
#check that np.rec.array gives right dtypes
assert_equal(np.rec.array(a).dtype.type, np.record)
assert_equal(type(np.rec.array(a)), np.recarray)
assert_equal(np.rec.array(b).dtype.type, np.int64)
assert_equal(type(np.rec.array(b)), np.recarray)
#check that viewing as recarray does the same
assert_equal(a.view(np.recarray).dtype.type, np.record)
assert_equal(type(a.view(np.recarray)), np.recarray)
assert_equal(b.view(np.recarray).dtype.type, np.int64)
assert_equal(type(b.view(np.recarray)), np.recarray)
#check that view to non-structured dtype preserves type=np.recarray
r = np.rec.array(np.ones(4, dtype="f4,i4"))
rv = r.view('f8').view('f4,i4')
assert_equal(type(rv), np.recarray)
assert_equal(rv.dtype.type, np.record)
#check that we can undo the view
arrs = [np.ones(4, dtype='f4,i4'), np.ones(4, dtype='f8')]
for arr in arrs:
rec = np.rec.array(arr)
# recommended way to view as an ndarray:
arr2 = rec.view(rec.dtype.fields or rec.dtype, np.ndarray)
assert_equal(arr2.dtype.type, arr.dtype.type)
assert_equal(type(arr2), type(arr))
def test_recarray_repr(self):
# make sure non-structured dtypes also show up as rec.array
a = np.array(np.ones(4, dtype='f8'))
assert_(repr(np.rec.array(a)).startswith('rec.array'))
def test_recarray_from_names(self):
ra = np.rec.array([
(1, 'abc', 3.7000002861022949, 0),
(2, 'xy', 6.6999998092651367, 1),
(0, ' ', 0.40000000596046448, 0)],
names='c1, c2, c3, c4')
pa = np.rec.fromrecords([
(1, 'abc', 3.7000002861022949, 0),
(2, 'xy', 6.6999998092651367, 1),
(0, ' ', 0.40000000596046448, 0)],
names='c1, c2, c3, c4')
assert_(ra.dtype == pa.dtype)
assert_(ra.shape == pa.shape)
for k in range(len(ra)):
assert_(ra[k].item() == pa[k].item())
def test_recarray_conflict_fields(self):
ra = np.rec.array([(1, 'abc', 2.3), (2, 'xyz', 4.2),
(3, 'wrs', 1.3)],
names='field, shape, mean')
ra.mean = [1.1, 2.2, 3.3]
assert_array_almost_equal(ra['mean'], [1.1, 2.2, 3.3])
assert_(type(ra.mean) is type(ra.var))
ra.shape = (1, 3)
assert_(ra.shape == (1, 3))
ra.shape = ['A', 'B', 'C']
assert_array_equal(ra['shape'], [['A', 'B', 'C']])
ra.field = 5
assert_array_equal(ra['field'], [[5, 5, 5]])
assert_(isinstance(ra.field, collections.Callable))
def test_fromrecords_with_explicit_dtype(self):
a = np.rec.fromrecords([(1, 'a'), (2, 'bbb')],
dtype=[('a', int), ('b', np.object)])
assert_equal(a.a, [1, 2])
assert_equal(a[0].a, 1)
assert_equal(a.b, ['a', 'bbb'])
assert_equal(a[-1].b, 'bbb')
#
ndtype = np.dtype([('a', int), ('b', np.object)])
a = np.rec.fromrecords([(1, 'a'), (2, 'bbb')], dtype=ndtype)
assert_equal(a.a, [1, 2])
assert_equal(a[0].a, 1)
assert_equal(a.b, ['a', 'bbb'])
assert_equal(a[-1].b, 'bbb')
def test_recarray_stringtypes(self):
# Issue #3993
a = np.array([('abc ', 1), ('abc', 2)],
dtype=[('foo', 'S4'), ('bar', int)])
a = a.view(np.recarray)
assert_equal(a.foo[0] == a.foo[1], False)
def test_recarray_returntypes(self):
qux_fields = {'C': (np.dtype('S5'), 0), 'D': (np.dtype('S5'), 6)}
a = np.rec.array([('abc ', (1,1), 1, ('abcde', 'fgehi')),
('abc', (2,3), 1, ('abcde', 'jklmn'))],
dtype=[('foo', 'S4'),
('bar', [('A', int), ('B', int)]),
('baz', int), ('qux', qux_fields)])
assert_equal(type(a.foo), np.ndarray)
assert_equal(type(a['foo']), np.ndarray)
assert_equal(type(a.bar), np.recarray)
assert_equal(type(a['bar']), np.recarray)
assert_equal(a.bar.dtype.type, np.record)
assert_equal(type(a['qux']), np.recarray)
assert_equal(a.qux.dtype.type, np.record)
assert_equal(dict(a.qux.dtype.fields), qux_fields)
assert_equal(type(a.baz), np.ndarray)
assert_equal(type(a['baz']), np.ndarray)
assert_equal(type(a[0].bar), np.record)
assert_equal(type(a[0]['bar']), np.record)
assert_equal(a[0].bar.A, 1)
assert_equal(a[0].bar['A'], 1)
assert_equal(a[0]['bar'].A, 1)
assert_equal(a[0]['bar']['A'], 1)
assert_equal(a[0].qux.D, asbytes('fgehi'))
assert_equal(a[0].qux['D'], asbytes('fgehi'))
assert_equal(a[0]['qux'].D, asbytes('fgehi'))
assert_equal(a[0]['qux']['D'], asbytes('fgehi'))
class TestRecord(TestCase):
def setUp(self):
self.data = np.rec.fromrecords([(1, 2, 3), (4, 5, 6)],
dtype=[("col1", "<i4"),
("col2", "<i4"),
("col3", "<i4")])
def test_assignment1(self):
a = self.data
assert_equal(a.col1[0], 1)
a[0].col1 = 0
assert_equal(a.col1[0], 0)
def test_assignment2(self):
a = self.data
assert_equal(a.col1[0], 1)
a.col1[0] = 0
assert_equal(a.col1[0], 0)
def test_invalid_assignment(self):
a = self.data
def assign_invalid_column(x):
x[0].col5 = 1
self.assertRaises(AttributeError, assign_invalid_column, a)
def test_out_of_order_fields(self):
"""Ticket #1431."""
x = self.data[['col1', 'col2']]
y = self.data[['col2', 'col1']]
assert_equal(x[0][0], y[0][1])
def test_pickle_1(self):
# Issue #1529
a = np.array([(1, [])], dtype=[('a', np.int32), ('b', np.int32, 0)])
assert_equal(a, pickle.loads(pickle.dumps(a)))
assert_equal(a[0], pickle.loads(pickle.dumps(a[0])))
def test_pickle_2(self):
a = self.data
assert_equal(a, pickle.loads(pickle.dumps(a)))
assert_equal(a[0], pickle.loads(pickle.dumps(a[0])))
def test_objview_record(self):
# https://github.com/numpy/numpy/issues/2599
dt = np.dtype([('foo', 'i8'), ('bar', 'O')])
r = np.zeros((1,3), dtype=dt).view(np.recarray)
r.foo = np.array([1, 2, 3]) # TypeError?
# https://github.com/numpy/numpy/issues/3256
ra = np.recarray((2,), dtype=[('x', object), ('y', float), ('z', int)])
ra[['x','y']] # TypeError?
def test_record_scalar_setitem(self):
# https://github.com/numpy/numpy/issues/3561
rec = np.recarray(1, dtype=[('x', float, 5)])
rec[0].x = 1
assert_equal(rec[0].x, np.ones(5))
def test_missing_field(self):
# https://github.com/numpy/numpy/issues/4806
arr = np.zeros((3,), dtype=[('x', int), ('y', int)])
assert_raises(ValueError, lambda: arr[['nofield']])
def test_find_duplicate():
l1 = [1, 2, 3, 4, 5, 6]
assert_(np.rec.find_duplicate(l1) == [])
l2 = [1, 2, 1, 4, 5, 6]
assert_(np.rec.find_duplicate(l2) == [1])
l3 = [1, 2, 1, 4, 1, 6, 2, 3]
assert_(np.rec.find_duplicate(l3) == [1, 2])
l3 = [2, 2, 1, 4, 1, 6, 2, 3]
assert_(np.rec.find_duplicate(l3) == [2, 1])
if __name__ == "__main__":
run_module_suite()
| bsd-3-clause |
blarghmatey/pip | pip/_vendor/html5lib/treewalkers/__init__.py | 499 | 5766 | """A collection of modules for iterating through different kinds of
tree, generating tokens identical to those produced by the tokenizer
module.
To create a tree walker for a new type of tree, you need to do
implement a tree walker object (called TreeWalker by convention) that
implements a 'serialize' method taking a tree as sole argument and
returning an iterator generating tokens.
"""
from __future__ import absolute_import, division, unicode_literals
__all__ = ["getTreeWalker", "pprint", "dom", "etree", "genshistream", "lxmletree",
"pulldom"]
import sys
from .. import constants
from ..utils import default_etree
treeWalkerCache = {}
def getTreeWalker(treeType, implementation=None, **kwargs):
"""Get a TreeWalker class for various types of tree with built-in support
treeType - the name of the tree type required (case-insensitive). Supported
values are:
"dom" - The xml.dom.minidom DOM implementation
"pulldom" - The xml.dom.pulldom event stream
"etree" - A generic walker for tree implementations exposing an
elementtree-like interface (known to work with
ElementTree, cElementTree and lxml.etree).
"lxml" - Optimized walker for lxml.etree
"genshi" - a Genshi stream
implementation - (Currently applies to the "etree" tree type only). A module
implementing the tree type e.g. xml.etree.ElementTree or
cElementTree."""
treeType = treeType.lower()
if treeType not in treeWalkerCache:
if treeType in ("dom", "pulldom"):
name = "%s.%s" % (__name__, treeType)
__import__(name)
mod = sys.modules[name]
treeWalkerCache[treeType] = mod.TreeWalker
elif treeType == "genshi":
from . import genshistream
treeWalkerCache[treeType] = genshistream.TreeWalker
elif treeType == "lxml":
from . import lxmletree
treeWalkerCache[treeType] = lxmletree.TreeWalker
elif treeType == "etree":
from . import etree
if implementation is None:
implementation = default_etree
# XXX: NEVER cache here, caching is done in the etree submodule
return etree.getETreeModule(implementation, **kwargs).TreeWalker
return treeWalkerCache.get(treeType)
def concatenateCharacterTokens(tokens):
pendingCharacters = []
for token in tokens:
type = token["type"]
if type in ("Characters", "SpaceCharacters"):
pendingCharacters.append(token["data"])
else:
if pendingCharacters:
yield {"type": "Characters", "data": "".join(pendingCharacters)}
pendingCharacters = []
yield token
if pendingCharacters:
yield {"type": "Characters", "data": "".join(pendingCharacters)}
def pprint(walker):
"""Pretty printer for tree walkers"""
output = []
indent = 0
for token in concatenateCharacterTokens(walker):
type = token["type"]
if type in ("StartTag", "EmptyTag"):
# tag name
if token["namespace"] and token["namespace"] != constants.namespaces["html"]:
if token["namespace"] in constants.prefixes:
ns = constants.prefixes[token["namespace"]]
else:
ns = token["namespace"]
name = "%s %s" % (ns, token["name"])
else:
name = token["name"]
output.append("%s<%s>" % (" " * indent, name))
indent += 2
# attributes (sorted for consistent ordering)
attrs = token["data"]
for (namespace, localname), value in sorted(attrs.items()):
if namespace:
if namespace in constants.prefixes:
ns = constants.prefixes[namespace]
else:
ns = namespace
name = "%s %s" % (ns, localname)
else:
name = localname
output.append("%s%s=\"%s\"" % (" " * indent, name, value))
# self-closing
if type == "EmptyTag":
indent -= 2
elif type == "EndTag":
indent -= 2
elif type == "Comment":
output.append("%s<!-- %s -->" % (" " * indent, token["data"]))
elif type == "Doctype":
if token["name"]:
if token["publicId"]:
output.append("""%s<!DOCTYPE %s "%s" "%s">""" %
(" " * indent,
token["name"],
token["publicId"],
token["systemId"] if token["systemId"] else ""))
elif token["systemId"]:
output.append("""%s<!DOCTYPE %s "" "%s">""" %
(" " * indent,
token["name"],
token["systemId"]))
else:
output.append("%s<!DOCTYPE %s>" % (" " * indent,
token["name"]))
else:
output.append("%s<!DOCTYPE >" % (" " * indent,))
elif type == "Characters":
output.append("%s\"%s\"" % (" " * indent, token["data"]))
elif type == "SpaceCharacters":
assert False, "concatenateCharacterTokens should have got rid of all Space tokens"
else:
raise ValueError("Unknown token type, %s" % type)
return "\n".join(output)
| mit |
olexiim/edx-platform | lms/djangoapps/instructor/enrollment.py | 2 | 13610 | """
Enrollment operations for use by instructor APIs.
Does not include any access control, be sure to check access before calling.
"""
import json
from django.contrib.auth.models import User
from django.conf import settings
from django.core.urlresolvers import reverse
from django.core.mail import send_mail
from student.models import CourseEnrollment, CourseEnrollmentAllowed
from courseware.models import StudentModule
from edxmako.shortcuts import render_to_string
from submissions import api as sub_api # installed from the edx-submissions repository
from student.models import anonymous_id_for_user
from microsite_configuration import microsite
class EmailEnrollmentState(object):
""" Store the complete enrollment state of an email in a class """
def __init__(self, course_id, email):
exists_user = User.objects.filter(email=email).exists()
if exists_user:
user = User.objects.get(email=email)
mode, is_active = CourseEnrollment.enrollment_mode_for_user(user, course_id)
# is_active is `None` if the user is not enrolled in the course
exists_ce = is_active is not None and is_active
full_name = user.profile.name
else:
mode = None
exists_ce = False
full_name = None
ceas = CourseEnrollmentAllowed.objects.filter(course_id=course_id, email=email).all()
exists_allowed = ceas.exists()
state_auto_enroll = exists_allowed and ceas[0].auto_enroll
self.user = exists_user
self.enrollment = exists_ce
self.allowed = exists_allowed
self.auto_enroll = bool(state_auto_enroll)
self.full_name = full_name
self.mode = mode
def __repr__(self):
return "{}(user={}, enrollment={}, allowed={}, auto_enroll={})".format(
self.__class__.__name__,
self.user,
self.enrollment,
self.allowed,
self.auto_enroll,
)
def to_dict(self):
"""
example: {
'user': False,
'enrollment': False,
'allowed': True,
'auto_enroll': True,
}
"""
return {
'user': self.user,
'enrollment': self.enrollment,
'allowed': self.allowed,
'auto_enroll': self.auto_enroll,
}
def enroll_email(course_id, student_email, auto_enroll=False, email_students=False, email_params=None):
"""
Enroll a student by email.
`student_email` is student's emails e.g. "[email protected]"
`auto_enroll` determines what is put in CourseEnrollmentAllowed.auto_enroll
if auto_enroll is set, then when the email registers, they will be
enrolled in the course automatically.
`email_students` determines if student should be notified of action by email.
`email_params` parameters used while parsing email templates (a `dict`).
returns two EmailEnrollmentState's
representing state before and after the action.
"""
previous_state = EmailEnrollmentState(course_id, student_email)
if previous_state.user:
# if the student is currently unenrolled, don't enroll them in their
# previous mode
course_mode = u"honor"
if previous_state.enrollment:
course_mode = previous_state.mode
CourseEnrollment.enroll_by_email(student_email, course_id, course_mode)
if email_students:
email_params['message'] = 'enrolled_enroll'
email_params['email_address'] = student_email
email_params['full_name'] = previous_state.full_name
send_mail_to_student(student_email, email_params)
else:
cea, _ = CourseEnrollmentAllowed.objects.get_or_create(course_id=course_id, email=student_email)
cea.auto_enroll = auto_enroll
cea.save()
if email_students:
email_params['message'] = 'allowed_enroll'
email_params['email_address'] = student_email
send_mail_to_student(student_email, email_params)
after_state = EmailEnrollmentState(course_id, student_email)
return previous_state, after_state
def unenroll_email(course_id, student_email, email_students=False, email_params=None):
"""
Unenroll a student by email.
`student_email` is student's emails e.g. "[email protected]"
`email_students` determines if student should be notified of action by email.
`email_params` parameters used while parsing email templates (a `dict`).
returns two EmailEnrollmentState's
representing state before and after the action.
"""
previous_state = EmailEnrollmentState(course_id, student_email)
if previous_state.enrollment:
CourseEnrollment.unenroll_by_email(student_email, course_id)
if email_students:
email_params['message'] = 'enrolled_unenroll'
email_params['email_address'] = student_email
email_params['full_name'] = previous_state.full_name
send_mail_to_student(student_email, email_params)
if previous_state.allowed:
CourseEnrollmentAllowed.objects.get(course_id=course_id, email=student_email).delete()
if email_students:
email_params['message'] = 'allowed_unenroll'
email_params['email_address'] = student_email
# Since no User object exists for this student there is no "full_name" available.
send_mail_to_student(student_email, email_params)
after_state = EmailEnrollmentState(course_id, student_email)
return previous_state, after_state
def send_beta_role_email(action, user, email_params):
"""
Send an email to a user added or removed as a beta tester.
`action` is one of 'add' or 'remove'
`user` is the User affected
`email_params` parameters used while parsing email templates (a `dict`).
"""
if action == 'add':
email_params['message'] = 'add_beta_tester'
email_params['email_address'] = user.email
email_params['full_name'] = user.profile.name
elif action == 'remove':
email_params['message'] = 'remove_beta_tester'
email_params['email_address'] = user.email
email_params['full_name'] = user.profile.name
else:
raise ValueError("Unexpected action received '{}' - expected 'add' or 'remove'".format(action))
send_mail_to_student(user.email, email_params)
def reset_student_attempts(course_id, student, module_state_key, delete_module=False):
"""
Reset student attempts for a problem. Optionally deletes all student state for the specified problem.
In the previous instructor dashboard it was possible to modify/delete
modules that were not problems. That has been disabled for safety.
`student` is a User
`problem_to_reset` is the name of a problem e.g. 'L2Node1'.
To build the module_state_key 'problem/' and course information will be appended to `problem_to_reset`.
Raises:
ValueError: `problem_state` is invalid JSON.
StudentModule.DoesNotExist: could not load the student module.
submissions.SubmissionError: unexpected error occurred while resetting the score in the submissions API.
"""
# Reset the student's score in the submissions API
# Currently this is used only by open assessment (ORA 2)
# We need to do this *before* retrieving the `StudentModule` model,
# because it's possible for a score to exist even if no student module exists.
if delete_module:
sub_api.reset_score(
anonymous_id_for_user(student, course_id),
course_id.to_deprecated_string(),
module_state_key.to_deprecated_string(),
)
module_to_reset = StudentModule.objects.get(
student_id=student.id,
course_id=course_id,
module_state_key=module_state_key
)
if delete_module:
module_to_reset.delete()
else:
_reset_module_attempts(module_to_reset)
def _reset_module_attempts(studentmodule):
"""
Reset the number of attempts on a studentmodule.
Throws ValueError if `problem_state` is invalid JSON.
"""
# load the state json
problem_state = json.loads(studentmodule.state)
# old_number_of_attempts = problem_state["attempts"]
problem_state["attempts"] = 0
# save
studentmodule.state = json.dumps(problem_state)
studentmodule.save()
def get_email_params(course, auto_enroll, secure=True):
"""
Generate parameters used when parsing email templates.
`auto_enroll` is a flag for auto enrolling non-registered students: (a `boolean`)
Returns a dict of parameters
"""
protocol = 'https' if secure else 'http'
stripped_site_name = microsite.get_value(
'SITE_NAME',
settings.SITE_NAME
)
# TODO: Use request.build_absolute_uri rather than '{proto}://{site}{path}'.format
# and check with the Services team that this works well with microsites
registration_url = u'{proto}://{site}{path}'.format(
proto=protocol,
site=stripped_site_name,
path=reverse('student.views.register_user')
)
course_url = u'{proto}://{site}{path}'.format(
proto=protocol,
site=stripped_site_name,
path=reverse('course_root', kwargs={'course_id': course.id.to_deprecated_string()})
)
# We can't get the url to the course's About page if the marketing site is enabled.
course_about_url = None
if not settings.FEATURES.get('ENABLE_MKTG_SITE', False):
course_about_url = u'{proto}://{site}{path}'.format(
proto=protocol,
site=stripped_site_name,
path=reverse('about_course', kwargs={'course_id': course.id.to_deprecated_string()})
)
is_shib_course = uses_shib(course)
# Composition of email
email_params = {
'site_name': stripped_site_name,
'registration_url': registration_url,
'course': course,
'auto_enroll': auto_enroll,
'course_url': course_url,
'course_about_url': course_about_url,
'is_shib_course': is_shib_course,
}
return email_params
def send_mail_to_student(student, param_dict):
"""
Construct the email using templates and then send it.
`student` is the student's email address (a `str`),
`param_dict` is a `dict` with keys
[
`site_name`: name given to edX instance (a `str`)
`registration_url`: url for registration (a `str`)
`course_id`: id of course (a `str`)
`auto_enroll`: user input option (a `str`)
`course_url`: url of course (a `str`)
`email_address`: email of student (a `str`)
`full_name`: student full name (a `str`)
`message`: type of email to send and template to use (a `str`)
`is_shib_course`: (a `boolean`)
]
Returns a boolean indicating whether the email was sent successfully.
"""
# add some helpers and microconfig subsitutions
if 'course' in param_dict:
param_dict['course_name'] = param_dict['course'].display_name_with_default
param_dict['site_name'] = microsite.get_value(
'SITE_NAME',
param_dict['site_name']
)
subject = None
message = None
# see if we are running in a microsite and that there is an
# activation email template definition available as configuration, if so, then render that
message_type = param_dict['message']
email_template_dict = {
'allowed_enroll': (
'emails/enroll_email_allowedsubject.txt',
'emails/enroll_email_allowedmessage.txt'
),
'enrolled_enroll': (
'emails/enroll_email_enrolledsubject.txt',
'emails/enroll_email_enrolledmessage.txt'
),
'allowed_unenroll': (
'emails/unenroll_email_subject.txt',
'emails/unenroll_email_allowedmessage.txt'
),
'enrolled_unenroll': (
'emails/unenroll_email_subject.txt',
'emails/unenroll_email_enrolledmessage.txt'
),
'add_beta_tester': (
'emails/add_beta_tester_email_subject.txt',
'emails/add_beta_tester_email_message.txt'
),
'remove_beta_tester': (
'emails/remove_beta_tester_email_subject.txt',
'emails/remove_beta_tester_email_message.txt'
),
'account_creation_and_enrollment': (
'emails/enroll_email_enrolledsubject.txt',
'emails/account_creation_and_enroll_emailMessage.txt'
),
}
subject_template, message_template = email_template_dict.get(message_type, (None, None))
if subject_template is not None and message_template is not None:
subject = render_to_string(subject_template, param_dict)
message = render_to_string(message_template, param_dict)
if subject and message:
# Remove leading and trailing whitespace from body
message = message.strip()
# Email subject *must not* contain newlines
subject = ''.join(subject.splitlines())
from_address = microsite.get_value(
'email_from_address',
settings.DEFAULT_FROM_EMAIL
)
send_mail(subject, message, from_address, [student], fail_silently=False)
def uses_shib(course):
"""
Used to return whether course has Shibboleth as the enrollment domain
Returns a boolean indicating if Shibboleth authentication is set for this course.
"""
return course.enrollment_domain and course.enrollment_domain.startswith(settings.SHIBBOLETH_DOMAIN_PREFIX)
| agpl-3.0 |
akarol/cfme_tests | cfme/tests/distributed/test_appliance_replication.py | 1 | 12939 | # -*- coding: utf-8 -*-
import pytest
from time import sleep
from six.moves.urllib.parse import urlparse
from cfme.base.ui import ServerView
from cfme.common.vm import VM
from cfme.infrastructure.provider import wait_for_a_provider
from cfme.utils.appliance import provision_appliance
from cfme.utils.appliance.implementations.ui import navigate_to
from cfme.utils.conf import credentials
from cfme.utils.generators import random_vm_name
from cfme.utils.log import logger
from cfme.utils.ssh import SSHClient
from cfme.utils.wait import wait_for
from cfme import test_requirements
pytestmark = [
pytest.mark.long_running,
test_requirements.distributed,
pytest.mark.uncollect(reason="test framework broke browser_steal"),
]
def get_ssh_client(hostname):
""" Returns fresh ssh client connected to given server using given credentials
"""
hostname = urlparse('scheme://' + hostname).netloc
connect_kwargs = {
'username': credentials['ssh']['username'],
'password': credentials['ssh']['password'],
'hostname': hostname,
}
return SSHClient(**connect_kwargs)
def get_replication_appliances(appliance):
"""Returns two database-owning appliances configured
with unique region numbers.
"""
ver_to_prov = str(appliance.version)
appl1 = provision_appliance(ver_to_prov, 'long-test_repl_A')
appl2 = provision_appliance(ver_to_prov, 'long-test_repl_B')
appl1.configure(region=1)
appl1.ipapp.wait_for_web_ui()
appl2.update_guid()
appl2.configure(region=2, key_address=appl1.hostname)
appl2.ipapp.wait_for_web_ui()
return appl1, appl2
def get_distributed_appliances(appliance):
"""Returns one database-owning appliance, and a second appliance
that connects to the database of the first.
"""
ver_to_prov = str(appliance.version)
appl1 = provision_appliance(ver_to_prov, 'long-test_childDB_A')
appl2 = provision_appliance(ver_to_prov, 'long-test_childDB_B')
appl1.configure(region=1)
appl1.ipapp.wait_for_web_ui()
appl2.configure(region=1, key_address=appl1.hostname, db_address=appl1.hostname)
appl2.ipapp.wait_for_web_ui()
return appl1, appl2
def configure_db_replication(db_address, appliance):
"""Enables the sync role and configures the appliance to replicate to
the db_address specified. Then, it waits for the UI to show the replication
as active and the backlog as empty.
"""
replication_conf = appliance.server.zone.region.replication
replication_conf.set_replication(
{'host': db_address}, 'global')
view = appliance.server.browser.create_view(ServerView)
view.flash.assert_message("Configuration settings saved for CFME Server") # may be partial
appliance.server.settings.enable_server_roles('database_synchronization')
rep_status, _ = wait_for(replication_conf.get_replication_status, fail_condition=False,
num_sec=360, delay=10,
fail_func=appliance.server.browser.refresh,
message="get_replication_status")
assert rep_status
wait_for(lambda: replication_conf.get_global_replication_backlog == 0, fail_condition=False,
num_sec=120, delay=10,
fail_func=appliance.server.browser.refresh, message="get_replication_backlog")
@pytest.yield_fixture(scope="module")
def test_vm(virtualcenter_provider):
"""Fixture to provision appliance to the provider being tested if necessary"""
vm_name = random_vm_name('distpwr')
vm = VM.factory(vm_name, virtualcenter_provider)
if not virtualcenter_provider.mgmt.does_vm_exist(vm_name):
logger.info("deploying %r on provider %r", vm_name, virtualcenter_provider.key)
vm.create_on_provider(find_in_cfme=True, allow_skip="default")
else:
logger.info("recycling deployed vm %r on provider %r", vm_name, virtualcenter_provider.key)
yield vm
try:
virtualcenter_provider.mgmt.delete_vm(vm_name=vm_name)
except Exception:
logger.exception('Failed deleting VM "%r" on "%r"', vm_name, virtualcenter_provider.name)
@pytest.mark.tier(2)
@pytest.mark.ignore_stream("upstream")
def test_appliance_replicate_between_regions(request, virtualcenter_provider):
"""Tests that a provider added to an appliance in one region
is replicated to the parent appliance in another region.
Metadata:
test_flag: replication
"""
appl1, appl2 = get_replication_appliances()
def finalize():
appl1.destroy()
appl2.destroy()
request.addfinalizer(finalize)
appl1.ipapp.browser_steal = True
with appl1.ipapp:
configure_db_replication(appl2.hostname)
virtualcenter_provider.create()
wait_for_a_provider()
appl2.ipapp.browser_steal = True
with appl2.ipapp:
wait_for_a_provider()
assert virtualcenter_provider.exists
@pytest.mark.tier(2)
@pytest.mark.ignore_stream("upstream")
def test_external_database_appliance(request, virtualcenter_provider, appliance):
"""Tests that one appliance can externally
connect to the database of another appliance.
Metadata:
test_flag: replication
"""
appl1, appl2 = get_distributed_appliances(appliance)
def finalize():
appl1.destroy()
appl2.destroy()
request.addfinalizer(finalize)
appl1.ipapp.browser_steal = True
with appl1.ipapp:
virtualcenter_provider.create()
wait_for_a_provider()
appl2.ipapp.browser_steal = True
with appl2.ipapp:
wait_for_a_provider()
assert virtualcenter_provider.exists
@pytest.mark.tier(2)
@pytest.mark.ignore_stream("upstream")
def test_appliance_replicate_sync_role_change(request, virtualcenter_provider, appliance):
"""Tests that a role change is replicated
Metadata:
test_flag: replication
"""
appl1, appl2 = get_replication_appliances()
replication_conf = appliance.server.zone.region.replication
def finalize():
appl1.destroy()
appl2.destroy()
request.addfinalizer(finalize)
appl1.ipapp.browser_steal = True
with appl1.ipapp:
server_settings = appliance.server.settings
configure_db_replication(appl2.hostname)
# Replication is up and running, now disable DB sync role
server_settings.disable_server_roles('database_synchronization')
wait_for(replication_conf.get_replication_status, fail_condition=True, num_sec=360,
delay=10, fail_func=appl1.server.browser.refresh, message="get_replication_status")
server_settings.enable_server_roles('database_synchronization')
wait_for(replication_conf.get_replication_status, fail_condition=False, num_sec=360,
delay=10, fail_func=appl1.server.browser.refresh, message="get_replication_status")
assert replication_conf.get_replication_status()
virtualcenter_provider.create()
wait_for_a_provider()
appl2.ipapp.browser_steal = True
with appl2.ipapp:
wait_for_a_provider()
assert virtualcenter_provider.exists
@pytest.mark.tier(2)
@pytest.mark.ignore_stream("upstream", "5.7") # no config->diagnostics->replication tab in 5.7
def test_appliance_replicate_sync_role_change_with_backlog(request, virtualcenter_provider,
appliance):
"""Tests that a role change is replicated with backlog
Metadata:
test_flag: replication
"""
appl1, appl2 = get_replication_appliances()
replication_conf = appliance.server.zone.region.replication
def finalize():
appl1.destroy()
appl2.destroy()
request.addfinalizer(finalize)
appl1.ipapp.browser_steal = True
with appl1.ipapp:
server_settings = appliance.server.settings
configure_db_replication(appl2.hostname)
# Replication is up and running, now disable DB sync role
virtualcenter_provider.create()
server_settings.disable_server_roles('database_synchronization')
wait_for(replication_conf.get_replication_status, fail_condition=True, num_sec=360,
delay=10, fail_func=appl1.server.browser.refresh, message="get_replication_status")
server_settings.enable_server_roles('database_synchronization')
wait_for(replication_conf.get_replication_status, fail_condition=False, num_sec=360,
delay=10, fail_func=appl1.server.browser.refresh, message="get_replication_status")
assert replication_conf.get_replication_status()
wait_for_a_provider()
appl2.ipapp.browser_steal = True
with appl2.ipapp:
wait_for_a_provider()
assert virtualcenter_provider.exists
@pytest.mark.tier(2)
@pytest.mark.ignore_stream("upstream", "5.7") # no config->diagnostics->replication tab in 5.7
def test_appliance_replicate_database_disconnection(request, virtualcenter_provider, appliance):
"""Tests a database disconnection
Metadata:
test_flag: replication
"""
appl1, appl2 = get_replication_appliances()
replication_conf = appliance.server.zone.region.replication
def finalize():
appl1.destroy()
appl2.destroy()
request.addfinalizer(finalize)
appl1.ipapp.browser_steal = True
with appl1.ipapp:
configure_db_replication(appl2.hostname)
# Replication is up and running, now stop the DB on the replication parent
appl2.db.stop_db_service()
sleep(60)
appl2.db.start_db_service()
wait_for(replication_conf.get_replication_status, fail_condition=False, num_sec=360,
delay=10, fail_func=appl1.server.browser.refresh, message="get_replication_status")
assert replication_conf.get_replication_status()
virtualcenter_provider.create()
wait_for_a_provider()
appl2.ipapp.browser_steal = True
with appl2.ipapp:
wait_for_a_provider()
assert virtualcenter_provider.exists
@pytest.mark.tier(2)
@pytest.mark.ignore_stream("upstream", "5.7") # no config->diagnostics->replication tab in 5.7
def test_appliance_replicate_database_disconnection_with_backlog(request, virtualcenter_provider,
appliance):
"""Tests a database disconnection with backlog
Metadata:
test_flag: replication
"""
appl1, appl2 = get_replication_appliances()
replication_conf = appliance.server.zone.region.replication
def finalize():
appl1.destroy()
appl2.destroy()
request.addfinalizer(finalize)
appl1.ipapp.browser_steal = True
with appl1.ipapp:
configure_db_replication(appl2.hostname)
# Replication is up and running, now stop the DB on the replication parent
virtualcenter_provider.create()
appl2.db.stop_db_service()
sleep(60)
appl2.db.start_db_service()
wait_for(replication_conf.get_replication_status, fail_condition=False, num_sec=360,
delay=10, fail_func=appl1.server.browser.refresh, message="get_replication_status")
assert replication_conf.get_replication_status()
wait_for_a_provider()
appl2.ipapp.browser_steal = True
with appl2.ipapp:
wait_for_a_provider()
assert virtualcenter_provider.exists
@pytest.mark.tier(2)
@pytest.mark.ignore_stream("upstream", "5.7") # no config->diagnostics->replication tab in 5.7
def test_distributed_vm_power_control(request, test_vm, virtualcenter_provider, verify_vm_running,
register_event, soft_assert):
"""Tests that a replication parent appliance can control the power state of a
VM being managed by a replication child appliance.
Metadata:
test_flag: replication
"""
appl1, appl2 = get_replication_appliances()
def finalize():
appl1.destroy()
appl2.destroy()
request.addfinalizer(finalize)
appl1.ipapp.browser_steal = True
with appl1.ipapp:
configure_db_replication(appl2.hostname)
virtualcenter_provider.create()
wait_for_a_provider()
appl2.ipapp.browser_steal = True
with appl2.ipapp:
register_event(target_type='VmOrTemplate', target_name=test_vm.name,
event_type='request_vm_poweroff')
register_event(target_type='VmOrTemplate', target_name=test_vm.name,
event_type='vm_poweroff')
test_vm.power_control_from_cfme(option=test_vm.POWER_OFF, cancel=False)
navigate_to(test_vm.provider, 'Details')
test_vm.wait_for_vm_state_change(desired_state=test_vm.STATE_OFF, timeout=900)
soft_assert(test_vm.find_quadicon().data['state'] == 'currentstate-off')
soft_assert(
not test_vm.provider.mgmt.is_vm_running(test_vm.name),
"vm running")
| gpl-2.0 |
patrickhartling/protobuf | python/mox.py | 603 | 38237 | #!/usr/bin/python2.4
#
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file is used for testing. The original is at:
# http://code.google.com/p/pymox/
"""Mox, an object-mocking framework for Python.
Mox works in the record-replay-verify paradigm. When you first create
a mock object, it is in record mode. You then programmatically set
the expected behavior of the mock object (what methods are to be
called on it, with what parameters, what they should return, and in
what order).
Once you have set up the expected mock behavior, you put it in replay
mode. Now the mock responds to method calls just as you told it to.
If an unexpected method (or an expected method with unexpected
parameters) is called, then an exception will be raised.
Once you are done interacting with the mock, you need to verify that
all the expected interactions occured. (Maybe your code exited
prematurely without calling some cleanup method!) The verify phase
ensures that every expected method was called; otherwise, an exception
will be raised.
Suggested usage / workflow:
# Create Mox factory
my_mox = Mox()
# Create a mock data access object
mock_dao = my_mox.CreateMock(DAOClass)
# Set up expected behavior
mock_dao.RetrievePersonWithIdentifier('1').AndReturn(person)
mock_dao.DeletePerson(person)
# Put mocks in replay mode
my_mox.ReplayAll()
# Inject mock object and run test
controller.SetDao(mock_dao)
controller.DeletePersonById('1')
# Verify all methods were called as expected
my_mox.VerifyAll()
"""
from collections import deque
import re
import types
import unittest
import stubout
class Error(AssertionError):
"""Base exception for this module."""
pass
class ExpectedMethodCallsError(Error):
"""Raised when Verify() is called before all expected methods have been called
"""
def __init__(self, expected_methods):
"""Init exception.
Args:
# expected_methods: A sequence of MockMethod objects that should have been
# called.
expected_methods: [MockMethod]
Raises:
ValueError: if expected_methods contains no methods.
"""
if not expected_methods:
raise ValueError("There must be at least one expected method")
Error.__init__(self)
self._expected_methods = expected_methods
def __str__(self):
calls = "\n".join(["%3d. %s" % (i, m)
for i, m in enumerate(self._expected_methods)])
return "Verify: Expected methods never called:\n%s" % (calls,)
class UnexpectedMethodCallError(Error):
"""Raised when an unexpected method is called.
This can occur if a method is called with incorrect parameters, or out of the
specified order.
"""
def __init__(self, unexpected_method, expected):
"""Init exception.
Args:
# unexpected_method: MockMethod that was called but was not at the head of
# the expected_method queue.
# expected: MockMethod or UnorderedGroup the method should have
# been in.
unexpected_method: MockMethod
expected: MockMethod or UnorderedGroup
"""
Error.__init__(self)
self._unexpected_method = unexpected_method
self._expected = expected
def __str__(self):
return "Unexpected method call: %s. Expecting: %s" % \
(self._unexpected_method, self._expected)
class UnknownMethodCallError(Error):
"""Raised if an unknown method is requested of the mock object."""
def __init__(self, unknown_method_name):
"""Init exception.
Args:
# unknown_method_name: Method call that is not part of the mocked class's
# public interface.
unknown_method_name: str
"""
Error.__init__(self)
self._unknown_method_name = unknown_method_name
def __str__(self):
return "Method called is not a member of the object: %s" % \
self._unknown_method_name
class Mox(object):
"""Mox: a factory for creating mock objects."""
# A list of types that should be stubbed out with MockObjects (as
# opposed to MockAnythings).
_USE_MOCK_OBJECT = [types.ClassType, types.InstanceType, types.ModuleType,
types.ObjectType, types.TypeType]
def __init__(self):
"""Initialize a new Mox."""
self._mock_objects = []
self.stubs = stubout.StubOutForTesting()
def CreateMock(self, class_to_mock):
"""Create a new mock object.
Args:
# class_to_mock: the class to be mocked
class_to_mock: class
Returns:
MockObject that can be used as the class_to_mock would be.
"""
new_mock = MockObject(class_to_mock)
self._mock_objects.append(new_mock)
return new_mock
def CreateMockAnything(self):
"""Create a mock that will accept any method calls.
This does not enforce an interface.
"""
new_mock = MockAnything()
self._mock_objects.append(new_mock)
return new_mock
def ReplayAll(self):
"""Set all mock objects to replay mode."""
for mock_obj in self._mock_objects:
mock_obj._Replay()
def VerifyAll(self):
"""Call verify on all mock objects created."""
for mock_obj in self._mock_objects:
mock_obj._Verify()
def ResetAll(self):
"""Call reset on all mock objects. This does not unset stubs."""
for mock_obj in self._mock_objects:
mock_obj._Reset()
def StubOutWithMock(self, obj, attr_name, use_mock_anything=False):
"""Replace a method, attribute, etc. with a Mock.
This will replace a class or module with a MockObject, and everything else
(method, function, etc) with a MockAnything. This can be overridden to
always use a MockAnything by setting use_mock_anything to True.
Args:
obj: A Python object (class, module, instance, callable).
attr_name: str. The name of the attribute to replace with a mock.
use_mock_anything: bool. True if a MockAnything should be used regardless
of the type of attribute.
"""
attr_to_replace = getattr(obj, attr_name)
if type(attr_to_replace) in self._USE_MOCK_OBJECT and not use_mock_anything:
stub = self.CreateMock(attr_to_replace)
else:
stub = self.CreateMockAnything()
self.stubs.Set(obj, attr_name, stub)
def UnsetStubs(self):
"""Restore stubs to their original state."""
self.stubs.UnsetAll()
def Replay(*args):
"""Put mocks into Replay mode.
Args:
# args is any number of mocks to put into replay mode.
"""
for mock in args:
mock._Replay()
def Verify(*args):
"""Verify mocks.
Args:
# args is any number of mocks to be verified.
"""
for mock in args:
mock._Verify()
def Reset(*args):
"""Reset mocks.
Args:
# args is any number of mocks to be reset.
"""
for mock in args:
mock._Reset()
class MockAnything:
"""A mock that can be used to mock anything.
This is helpful for mocking classes that do not provide a public interface.
"""
def __init__(self):
""" """
self._Reset()
def __getattr__(self, method_name):
"""Intercept method calls on this object.
A new MockMethod is returned that is aware of the MockAnything's
state (record or replay). The call will be recorded or replayed
by the MockMethod's __call__.
Args:
# method name: the name of the method being called.
method_name: str
Returns:
A new MockMethod aware of MockAnything's state (record or replay).
"""
return self._CreateMockMethod(method_name)
def _CreateMockMethod(self, method_name):
"""Create a new mock method call and return it.
Args:
# method name: the name of the method being called.
method_name: str
Returns:
A new MockMethod aware of MockAnything's state (record or replay).
"""
return MockMethod(method_name, self._expected_calls_queue,
self._replay_mode)
def __nonzero__(self):
"""Return 1 for nonzero so the mock can be used as a conditional."""
return 1
def __eq__(self, rhs):
"""Provide custom logic to compare objects."""
return (isinstance(rhs, MockAnything) and
self._replay_mode == rhs._replay_mode and
self._expected_calls_queue == rhs._expected_calls_queue)
def __ne__(self, rhs):
"""Provide custom logic to compare objects."""
return not self == rhs
def _Replay(self):
"""Start replaying expected method calls."""
self._replay_mode = True
def _Verify(self):
"""Verify that all of the expected calls have been made.
Raises:
ExpectedMethodCallsError: if there are still more method calls in the
expected queue.
"""
# If the list of expected calls is not empty, raise an exception
if self._expected_calls_queue:
# The last MultipleTimesGroup is not popped from the queue.
if (len(self._expected_calls_queue) == 1 and
isinstance(self._expected_calls_queue[0], MultipleTimesGroup) and
self._expected_calls_queue[0].IsSatisfied()):
pass
else:
raise ExpectedMethodCallsError(self._expected_calls_queue)
def _Reset(self):
"""Reset the state of this mock to record mode with an empty queue."""
# Maintain a list of method calls we are expecting
self._expected_calls_queue = deque()
# Make sure we are in setup mode, not replay mode
self._replay_mode = False
class MockObject(MockAnything, object):
"""A mock object that simulates the public/protected interface of a class."""
def __init__(self, class_to_mock):
"""Initialize a mock object.
This determines the methods and properties of the class and stores them.
Args:
# class_to_mock: class to be mocked
class_to_mock: class
"""
# This is used to hack around the mixin/inheritance of MockAnything, which
# is not a proper object (it can be anything. :-)
MockAnything.__dict__['__init__'](self)
# Get a list of all the public and special methods we should mock.
self._known_methods = set()
self._known_vars = set()
self._class_to_mock = class_to_mock
for method in dir(class_to_mock):
if callable(getattr(class_to_mock, method)):
self._known_methods.add(method)
else:
self._known_vars.add(method)
def __getattr__(self, name):
"""Intercept attribute request on this object.
If the attribute is a public class variable, it will be returned and not
recorded as a call.
If the attribute is not a variable, it is handled like a method
call. The method name is checked against the set of mockable
methods, and a new MockMethod is returned that is aware of the
MockObject's state (record or replay). The call will be recorded
or replayed by the MockMethod's __call__.
Args:
# name: the name of the attribute being requested.
name: str
Returns:
Either a class variable or a new MockMethod that is aware of the state
of the mock (record or replay).
Raises:
UnknownMethodCallError if the MockObject does not mock the requested
method.
"""
if name in self._known_vars:
return getattr(self._class_to_mock, name)
if name in self._known_methods:
return self._CreateMockMethod(name)
raise UnknownMethodCallError(name)
def __eq__(self, rhs):
"""Provide custom logic to compare objects."""
return (isinstance(rhs, MockObject) and
self._class_to_mock == rhs._class_to_mock and
self._replay_mode == rhs._replay_mode and
self._expected_calls_queue == rhs._expected_calls_queue)
def __setitem__(self, key, value):
"""Provide custom logic for mocking classes that support item assignment.
Args:
key: Key to set the value for.
value: Value to set.
Returns:
Expected return value in replay mode. A MockMethod object for the
__setitem__ method that has already been called if not in replay mode.
Raises:
TypeError if the underlying class does not support item assignment.
UnexpectedMethodCallError if the object does not expect the call to
__setitem__.
"""
setitem = self._class_to_mock.__dict__.get('__setitem__', None)
# Verify the class supports item assignment.
if setitem is None:
raise TypeError('object does not support item assignment')
# If we are in replay mode then simply call the mock __setitem__ method.
if self._replay_mode:
return MockMethod('__setitem__', self._expected_calls_queue,
self._replay_mode)(key, value)
# Otherwise, create a mock method __setitem__.
return self._CreateMockMethod('__setitem__')(key, value)
def __getitem__(self, key):
"""Provide custom logic for mocking classes that are subscriptable.
Args:
key: Key to return the value for.
Returns:
Expected return value in replay mode. A MockMethod object for the
__getitem__ method that has already been called if not in replay mode.
Raises:
TypeError if the underlying class is not subscriptable.
UnexpectedMethodCallError if the object does not expect the call to
__setitem__.
"""
getitem = self._class_to_mock.__dict__.get('__getitem__', None)
# Verify the class supports item assignment.
if getitem is None:
raise TypeError('unsubscriptable object')
# If we are in replay mode then simply call the mock __getitem__ method.
if self._replay_mode:
return MockMethod('__getitem__', self._expected_calls_queue,
self._replay_mode)(key)
# Otherwise, create a mock method __getitem__.
return self._CreateMockMethod('__getitem__')(key)
def __call__(self, *params, **named_params):
"""Provide custom logic for mocking classes that are callable."""
# Verify the class we are mocking is callable
callable = self._class_to_mock.__dict__.get('__call__', None)
if callable is None:
raise TypeError('Not callable')
# Because the call is happening directly on this object instead of a method,
# the call on the mock method is made right here
mock_method = self._CreateMockMethod('__call__')
return mock_method(*params, **named_params)
@property
def __class__(self):
"""Return the class that is being mocked."""
return self._class_to_mock
class MockMethod(object):
"""Callable mock method.
A MockMethod should act exactly like the method it mocks, accepting parameters
and returning a value, or throwing an exception (as specified). When this
method is called, it can optionally verify whether the called method (name and
signature) matches the expected method.
"""
def __init__(self, method_name, call_queue, replay_mode):
"""Construct a new mock method.
Args:
# method_name: the name of the method
# call_queue: deque of calls, verify this call against the head, or add
# this call to the queue.
# replay_mode: False if we are recording, True if we are verifying calls
# against the call queue.
method_name: str
call_queue: list or deque
replay_mode: bool
"""
self._name = method_name
self._call_queue = call_queue
if not isinstance(call_queue, deque):
self._call_queue = deque(self._call_queue)
self._replay_mode = replay_mode
self._params = None
self._named_params = None
self._return_value = None
self._exception = None
self._side_effects = None
def __call__(self, *params, **named_params):
"""Log parameters and return the specified return value.
If the Mock(Anything/Object) associated with this call is in record mode,
this MockMethod will be pushed onto the expected call queue. If the mock
is in replay mode, this will pop a MockMethod off the top of the queue and
verify this call is equal to the expected call.
Raises:
UnexpectedMethodCall if this call is supposed to match an expected method
call and it does not.
"""
self._params = params
self._named_params = named_params
if not self._replay_mode:
self._call_queue.append(self)
return self
expected_method = self._VerifyMethodCall()
if expected_method._side_effects:
expected_method._side_effects(*params, **named_params)
if expected_method._exception:
raise expected_method._exception
return expected_method._return_value
def __getattr__(self, name):
"""Raise an AttributeError with a helpful message."""
raise AttributeError('MockMethod has no attribute "%s". '
'Did you remember to put your mocks in replay mode?' % name)
def _PopNextMethod(self):
"""Pop the next method from our call queue."""
try:
return self._call_queue.popleft()
except IndexError:
raise UnexpectedMethodCallError(self, None)
def _VerifyMethodCall(self):
"""Verify the called method is expected.
This can be an ordered method, or part of an unordered set.
Returns:
The expected mock method.
Raises:
UnexpectedMethodCall if the method called was not expected.
"""
expected = self._PopNextMethod()
# Loop here, because we might have a MethodGroup followed by another
# group.
while isinstance(expected, MethodGroup):
expected, method = expected.MethodCalled(self)
if method is not None:
return method
# This is a mock method, so just check equality.
if expected != self:
raise UnexpectedMethodCallError(self, expected)
return expected
def __str__(self):
params = ', '.join(
[repr(p) for p in self._params or []] +
['%s=%r' % x for x in sorted((self._named_params or {}).items())])
desc = "%s(%s) -> %r" % (self._name, params, self._return_value)
return desc
def __eq__(self, rhs):
"""Test whether this MockMethod is equivalent to another MockMethod.
Args:
# rhs: the right hand side of the test
rhs: MockMethod
"""
return (isinstance(rhs, MockMethod) and
self._name == rhs._name and
self._params == rhs._params and
self._named_params == rhs._named_params)
def __ne__(self, rhs):
"""Test whether this MockMethod is not equivalent to another MockMethod.
Args:
# rhs: the right hand side of the test
rhs: MockMethod
"""
return not self == rhs
def GetPossibleGroup(self):
"""Returns a possible group from the end of the call queue or None if no
other methods are on the stack.
"""
# Remove this method from the tail of the queue so we can add it to a group.
this_method = self._call_queue.pop()
assert this_method == self
# Determine if the tail of the queue is a group, or just a regular ordered
# mock method.
group = None
try:
group = self._call_queue[-1]
except IndexError:
pass
return group
def _CheckAndCreateNewGroup(self, group_name, group_class):
"""Checks if the last method (a possible group) is an instance of our
group_class. Adds the current method to this group or creates a new one.
Args:
group_name: the name of the group.
group_class: the class used to create instance of this new group
"""
group = self.GetPossibleGroup()
# If this is a group, and it is the correct group, add the method.
if isinstance(group, group_class) and group.group_name() == group_name:
group.AddMethod(self)
return self
# Create a new group and add the method.
new_group = group_class(group_name)
new_group.AddMethod(self)
self._call_queue.append(new_group)
return self
def InAnyOrder(self, group_name="default"):
"""Move this method into a group of unordered calls.
A group of unordered calls must be defined together, and must be executed
in full before the next expected method can be called. There can be
multiple groups that are expected serially, if they are given
different group names. The same group name can be reused if there is a
standard method call, or a group with a different name, spliced between
usages.
Args:
group_name: the name of the unordered group.
Returns:
self
"""
return self._CheckAndCreateNewGroup(group_name, UnorderedGroup)
def MultipleTimes(self, group_name="default"):
"""Move this method into group of calls which may be called multiple times.
A group of repeating calls must be defined together, and must be executed in
full before the next expected mehtod can be called.
Args:
group_name: the name of the unordered group.
Returns:
self
"""
return self._CheckAndCreateNewGroup(group_name, MultipleTimesGroup)
def AndReturn(self, return_value):
"""Set the value to return when this method is called.
Args:
# return_value can be anything.
"""
self._return_value = return_value
return return_value
def AndRaise(self, exception):
"""Set the exception to raise when this method is called.
Args:
# exception: the exception to raise when this method is called.
exception: Exception
"""
self._exception = exception
def WithSideEffects(self, side_effects):
"""Set the side effects that are simulated when this method is called.
Args:
side_effects: A callable which modifies the parameters or other relevant
state which a given test case depends on.
Returns:
Self for chaining with AndReturn and AndRaise.
"""
self._side_effects = side_effects
return self
class Comparator:
"""Base class for all Mox comparators.
A Comparator can be used as a parameter to a mocked method when the exact
value is not known. For example, the code you are testing might build up a
long SQL string that is passed to your mock DAO. You're only interested that
the IN clause contains the proper primary keys, so you can set your mock
up as follows:
mock_dao.RunQuery(StrContains('IN (1, 2, 4, 5)')).AndReturn(mock_result)
Now whatever query is passed in must contain the string 'IN (1, 2, 4, 5)'.
A Comparator may replace one or more parameters, for example:
# return at most 10 rows
mock_dao.RunQuery(StrContains('SELECT'), 10)
or
# Return some non-deterministic number of rows
mock_dao.RunQuery(StrContains('SELECT'), IsA(int))
"""
def equals(self, rhs):
"""Special equals method that all comparators must implement.
Args:
rhs: any python object
"""
raise NotImplementedError, 'method must be implemented by a subclass.'
def __eq__(self, rhs):
return self.equals(rhs)
def __ne__(self, rhs):
return not self.equals(rhs)
class IsA(Comparator):
"""This class wraps a basic Python type or class. It is used to verify
that a parameter is of the given type or class.
Example:
mock_dao.Connect(IsA(DbConnectInfo))
"""
def __init__(self, class_name):
"""Initialize IsA
Args:
class_name: basic python type or a class
"""
self._class_name = class_name
def equals(self, rhs):
"""Check to see if the RHS is an instance of class_name.
Args:
# rhs: the right hand side of the test
rhs: object
Returns:
bool
"""
try:
return isinstance(rhs, self._class_name)
except TypeError:
# Check raw types if there was a type error. This is helpful for
# things like cStringIO.StringIO.
return type(rhs) == type(self._class_name)
def __repr__(self):
return str(self._class_name)
class IsAlmost(Comparator):
"""Comparison class used to check whether a parameter is nearly equal
to a given value. Generally useful for floating point numbers.
Example mock_dao.SetTimeout((IsAlmost(3.9)))
"""
def __init__(self, float_value, places=7):
"""Initialize IsAlmost.
Args:
float_value: The value for making the comparison.
places: The number of decimal places to round to.
"""
self._float_value = float_value
self._places = places
def equals(self, rhs):
"""Check to see if RHS is almost equal to float_value
Args:
rhs: the value to compare to float_value
Returns:
bool
"""
try:
return round(rhs-self._float_value, self._places) == 0
except TypeError:
# This is probably because either float_value or rhs is not a number.
return False
def __repr__(self):
return str(self._float_value)
class StrContains(Comparator):
"""Comparison class used to check whether a substring exists in a
string parameter. This can be useful in mocking a database with SQL
passed in as a string parameter, for example.
Example:
mock_dao.RunQuery(StrContains('IN (1, 2, 4, 5)')).AndReturn(mock_result)
"""
def __init__(self, search_string):
"""Initialize.
Args:
# search_string: the string you are searching for
search_string: str
"""
self._search_string = search_string
def equals(self, rhs):
"""Check to see if the search_string is contained in the rhs string.
Args:
# rhs: the right hand side of the test
rhs: object
Returns:
bool
"""
try:
return rhs.find(self._search_string) > -1
except Exception:
return False
def __repr__(self):
return '<str containing \'%s\'>' % self._search_string
class Regex(Comparator):
"""Checks if a string matches a regular expression.
This uses a given regular expression to determine equality.
"""
def __init__(self, pattern, flags=0):
"""Initialize.
Args:
# pattern is the regular expression to search for
pattern: str
# flags passed to re.compile function as the second argument
flags: int
"""
self.regex = re.compile(pattern, flags=flags)
def equals(self, rhs):
"""Check to see if rhs matches regular expression pattern.
Returns:
bool
"""
return self.regex.search(rhs) is not None
def __repr__(self):
s = '<regular expression \'%s\'' % self.regex.pattern
if self.regex.flags:
s += ', flags=%d' % self.regex.flags
s += '>'
return s
class In(Comparator):
"""Checks whether an item (or key) is in a list (or dict) parameter.
Example:
mock_dao.GetUsersInfo(In('expectedUserName')).AndReturn(mock_result)
"""
def __init__(self, key):
"""Initialize.
Args:
# key is any thing that could be in a list or a key in a dict
"""
self._key = key
def equals(self, rhs):
"""Check to see whether key is in rhs.
Args:
rhs: dict
Returns:
bool
"""
return self._key in rhs
def __repr__(self):
return '<sequence or map containing \'%s\'>' % self._key
class ContainsKeyValue(Comparator):
"""Checks whether a key/value pair is in a dict parameter.
Example:
mock_dao.UpdateUsers(ContainsKeyValue('stevepm', stevepm_user_info))
"""
def __init__(self, key, value):
"""Initialize.
Args:
# key: a key in a dict
# value: the corresponding value
"""
self._key = key
self._value = value
def equals(self, rhs):
"""Check whether the given key/value pair is in the rhs dict.
Returns:
bool
"""
try:
return rhs[self._key] == self._value
except Exception:
return False
def __repr__(self):
return '<map containing the entry \'%s: %s\'>' % (self._key, self._value)
class SameElementsAs(Comparator):
"""Checks whether iterables contain the same elements (ignoring order).
Example:
mock_dao.ProcessUsers(SameElementsAs('stevepm', 'salomaki'))
"""
def __init__(self, expected_seq):
"""Initialize.
Args:
expected_seq: a sequence
"""
self._expected_seq = expected_seq
def equals(self, actual_seq):
"""Check to see whether actual_seq has same elements as expected_seq.
Args:
actual_seq: sequence
Returns:
bool
"""
try:
expected = dict([(element, None) for element in self._expected_seq])
actual = dict([(element, None) for element in actual_seq])
except TypeError:
# Fall back to slower list-compare if any of the objects are unhashable.
expected = list(self._expected_seq)
actual = list(actual_seq)
expected.sort()
actual.sort()
return expected == actual
def __repr__(self):
return '<sequence with same elements as \'%s\'>' % self._expected_seq
class And(Comparator):
"""Evaluates one or more Comparators on RHS and returns an AND of the results.
"""
def __init__(self, *args):
"""Initialize.
Args:
*args: One or more Comparator
"""
self._comparators = args
def equals(self, rhs):
"""Checks whether all Comparators are equal to rhs.
Args:
# rhs: can be anything
Returns:
bool
"""
for comparator in self._comparators:
if not comparator.equals(rhs):
return False
return True
def __repr__(self):
return '<AND %s>' % str(self._comparators)
class Or(Comparator):
"""Evaluates one or more Comparators on RHS and returns an OR of the results.
"""
def __init__(self, *args):
"""Initialize.
Args:
*args: One or more Mox comparators
"""
self._comparators = args
def equals(self, rhs):
"""Checks whether any Comparator is equal to rhs.
Args:
# rhs: can be anything
Returns:
bool
"""
for comparator in self._comparators:
if comparator.equals(rhs):
return True
return False
def __repr__(self):
return '<OR %s>' % str(self._comparators)
class Func(Comparator):
"""Call a function that should verify the parameter passed in is correct.
You may need the ability to perform more advanced operations on the parameter
in order to validate it. You can use this to have a callable validate any
parameter. The callable should return either True or False.
Example:
def myParamValidator(param):
# Advanced logic here
return True
mock_dao.DoSomething(Func(myParamValidator), true)
"""
def __init__(self, func):
"""Initialize.
Args:
func: callable that takes one parameter and returns a bool
"""
self._func = func
def equals(self, rhs):
"""Test whether rhs passes the function test.
rhs is passed into func.
Args:
rhs: any python object
Returns:
the result of func(rhs)
"""
return self._func(rhs)
def __repr__(self):
return str(self._func)
class IgnoreArg(Comparator):
"""Ignore an argument.
This can be used when we don't care about an argument of a method call.
Example:
# Check if CastMagic is called with 3 as first arg and 'disappear' as third.
mymock.CastMagic(3, IgnoreArg(), 'disappear')
"""
def equals(self, unused_rhs):
"""Ignores arguments and returns True.
Args:
unused_rhs: any python object
Returns:
always returns True
"""
return True
def __repr__(self):
return '<IgnoreArg>'
class MethodGroup(object):
"""Base class containing common behaviour for MethodGroups."""
def __init__(self, group_name):
self._group_name = group_name
def group_name(self):
return self._group_name
def __str__(self):
return '<%s "%s">' % (self.__class__.__name__, self._group_name)
def AddMethod(self, mock_method):
raise NotImplementedError
def MethodCalled(self, mock_method):
raise NotImplementedError
def IsSatisfied(self):
raise NotImplementedError
class UnorderedGroup(MethodGroup):
"""UnorderedGroup holds a set of method calls that may occur in any order.
This construct is helpful for non-deterministic events, such as iterating
over the keys of a dict.
"""
def __init__(self, group_name):
super(UnorderedGroup, self).__init__(group_name)
self._methods = []
def AddMethod(self, mock_method):
"""Add a method to this group.
Args:
mock_method: A mock method to be added to this group.
"""
self._methods.append(mock_method)
def MethodCalled(self, mock_method):
"""Remove a method call from the group.
If the method is not in the set, an UnexpectedMethodCallError will be
raised.
Args:
mock_method: a mock method that should be equal to a method in the group.
Returns:
The mock method from the group
Raises:
UnexpectedMethodCallError if the mock_method was not in the group.
"""
# Check to see if this method exists, and if so, remove it from the set
# and return it.
for method in self._methods:
if method == mock_method:
# Remove the called mock_method instead of the method in the group.
# The called method will match any comparators when equality is checked
# during removal. The method in the group could pass a comparator to
# another comparator during the equality check.
self._methods.remove(mock_method)
# If this group is not empty, put it back at the head of the queue.
if not self.IsSatisfied():
mock_method._call_queue.appendleft(self)
return self, method
raise UnexpectedMethodCallError(mock_method, self)
def IsSatisfied(self):
"""Return True if there are not any methods in this group."""
return len(self._methods) == 0
class MultipleTimesGroup(MethodGroup):
"""MultipleTimesGroup holds methods that may be called any number of times.
Note: Each method must be called at least once.
This is helpful, if you don't know or care how many times a method is called.
"""
def __init__(self, group_name):
super(MultipleTimesGroup, self).__init__(group_name)
self._methods = set()
self._methods_called = set()
def AddMethod(self, mock_method):
"""Add a method to this group.
Args:
mock_method: A mock method to be added to this group.
"""
self._methods.add(mock_method)
def MethodCalled(self, mock_method):
"""Remove a method call from the group.
If the method is not in the set, an UnexpectedMethodCallError will be
raised.
Args:
mock_method: a mock method that should be equal to a method in the group.
Returns:
The mock method from the group
Raises:
UnexpectedMethodCallError if the mock_method was not in the group.
"""
# Check to see if this method exists, and if so add it to the set of
# called methods.
for method in self._methods:
if method == mock_method:
self._methods_called.add(mock_method)
# Always put this group back on top of the queue, because we don't know
# when we are done.
mock_method._call_queue.appendleft(self)
return self, method
if self.IsSatisfied():
next_method = mock_method._PopNextMethod();
return next_method, None
else:
raise UnexpectedMethodCallError(mock_method, self)
def IsSatisfied(self):
"""Return True if all methods in this group are called at least once."""
# NOTE(psycho): We can't use the simple set difference here because we want
# to match different parameters which are considered the same e.g. IsA(str)
# and some string. This solution is O(n^2) but n should be small.
tmp = self._methods.copy()
for called in self._methods_called:
for expected in tmp:
if called == expected:
tmp.remove(expected)
if not tmp:
return True
break
return False
class MoxMetaTestBase(type):
"""Metaclass to add mox cleanup and verification to every test.
As the mox unit testing class is being constructed (MoxTestBase or a
subclass), this metaclass will modify all test functions to call the
CleanUpMox method of the test class after they finish. This means that
unstubbing and verifying will happen for every test with no additional code,
and any failures will result in test failures as opposed to errors.
"""
def __init__(cls, name, bases, d):
type.__init__(cls, name, bases, d)
# also get all the attributes from the base classes to account
# for a case when test class is not the immediate child of MoxTestBase
for base in bases:
for attr_name in dir(base):
d[attr_name] = getattr(base, attr_name)
for func_name, func in d.items():
if func_name.startswith('test') and callable(func):
setattr(cls, func_name, MoxMetaTestBase.CleanUpTest(cls, func))
@staticmethod
def CleanUpTest(cls, func):
"""Adds Mox cleanup code to any MoxTestBase method.
Always unsets stubs after a test. Will verify all mocks for tests that
otherwise pass.
Args:
cls: MoxTestBase or subclass; the class whose test method we are altering.
func: method; the method of the MoxTestBase test class we wish to alter.
Returns:
The modified method.
"""
def new_method(self, *args, **kwargs):
mox_obj = getattr(self, 'mox', None)
cleanup_mox = False
if mox_obj and isinstance(mox_obj, Mox):
cleanup_mox = True
try:
func(self, *args, **kwargs)
finally:
if cleanup_mox:
mox_obj.UnsetStubs()
if cleanup_mox:
mox_obj.VerifyAll()
new_method.__name__ = func.__name__
new_method.__doc__ = func.__doc__
new_method.__module__ = func.__module__
return new_method
class MoxTestBase(unittest.TestCase):
"""Convenience test class to make stubbing easier.
Sets up a "mox" attribute which is an instance of Mox - any mox tests will
want this. Also automatically unsets any stubs and verifies that all mock
methods have been called at the end of each test, eliminating boilerplate
code.
"""
__metaclass__ = MoxMetaTestBase
def setUp(self):
self.mox = Mox()
| bsd-3-clause |
anudr01d/anudr01d.github.io | node_modules/grunt-docker/node_modules/docker/node_modules/pygmentize-bundled/vendor/pygments/pygments/lexers/_asybuiltins.py | 369 | 27319 | # -*- coding: utf-8 -*-
"""
pygments.lexers._asybuiltins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This file contains the asy-function names and asy-variable names of
Asymptote.
Do not edit the ASYFUNCNAME and ASYVARNAME sets by hand.
TODO: perl/python script in Asymptote SVN similar to asy-list.pl but only
for function and variable names.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
ASYFUNCNAME = set([
'AND',
'Arc',
'ArcArrow',
'ArcArrows',
'Arrow',
'Arrows',
'Automatic',
'AvantGarde',
'BBox',
'BWRainbow',
'BWRainbow2',
'Bar',
'Bars',
'BeginArcArrow',
'BeginArrow',
'BeginBar',
'BeginDotMargin',
'BeginMargin',
'BeginPenMargin',
'Blank',
'Bookman',
'Bottom',
'BottomTop',
'Bounds',
'Break',
'Broken',
'BrokenLog',
'Ceil',
'Circle',
'CircleBarIntervalMarker',
'Cos',
'Courier',
'CrossIntervalMarker',
'DefaultFormat',
'DefaultLogFormat',
'Degrees',
'Dir',
'DotMargin',
'DotMargins',
'Dotted',
'Draw',
'Drawline',
'Embed',
'EndArcArrow',
'EndArrow',
'EndBar',
'EndDotMargin',
'EndMargin',
'EndPenMargin',
'Fill',
'FillDraw',
'Floor',
'Format',
'Full',
'Gaussian',
'Gaussrand',
'Gaussrandpair',
'Gradient',
'Grayscale',
'Helvetica',
'Hermite',
'HookHead',
'InOutTicks',
'InTicks',
'J',
'Label',
'Landscape',
'Left',
'LeftRight',
'LeftTicks',
'Legend',
'Linear',
'Link',
'Log',
'LogFormat',
'Margin',
'Margins',
'Mark',
'MidArcArrow',
'MidArrow',
'NOT',
'NewCenturySchoolBook',
'NoBox',
'NoMargin',
'NoModifier',
'NoTicks',
'NoTicks3',
'NoZero',
'NoZeroFormat',
'None',
'OR',
'OmitFormat',
'OmitTick',
'OutTicks',
'Ox',
'Oy',
'Palatino',
'PaletteTicks',
'Pen',
'PenMargin',
'PenMargins',
'Pentype',
'Portrait',
'RadialShade',
'Rainbow',
'Range',
'Relative',
'Right',
'RightTicks',
'Rotate',
'Round',
'SQR',
'Scale',
'ScaleX',
'ScaleY',
'ScaleZ',
'Seascape',
'Shift',
'Sin',
'Slant',
'Spline',
'StickIntervalMarker',
'Straight',
'Symbol',
'Tan',
'TeXify',
'Ticks',
'Ticks3',
'TildeIntervalMarker',
'TimesRoman',
'Top',
'TrueMargin',
'UnFill',
'UpsideDown',
'Wheel',
'X',
'XEquals',
'XOR',
'XY',
'XYEquals',
'XYZero',
'XYgrid',
'XZEquals',
'XZZero',
'XZero',
'XZgrid',
'Y',
'YEquals',
'YXgrid',
'YZ',
'YZEquals',
'YZZero',
'YZero',
'YZgrid',
'Z',
'ZX',
'ZXgrid',
'ZYgrid',
'ZapfChancery',
'ZapfDingbats',
'_cputime',
'_draw',
'_eval',
'_image',
'_labelpath',
'_projection',
'_strokepath',
'_texpath',
'aCos',
'aSin',
'aTan',
'abort',
'abs',
'accel',
'acos',
'acosh',
'acot',
'acsc',
'add',
'addArrow',
'addMargins',
'addSaveFunction',
'addnode',
'addnodes',
'addpenarc',
'addpenline',
'addseg',
'adjust',
'alias',
'align',
'all',
'altitude',
'angabscissa',
'angle',
'angpoint',
'animate',
'annotate',
'anticomplementary',
'antipedal',
'apply',
'approximate',
'arc',
'arcarrowsize',
'arccircle',
'arcdir',
'arcfromcenter',
'arcfromfocus',
'arclength',
'arcnodesnumber',
'arcpoint',
'arcsubtended',
'arcsubtendedcenter',
'arctime',
'arctopath',
'array',
'arrow',
'arrow2',
'arrowbase',
'arrowbasepoints',
'arrowsize',
'asec',
'asin',
'asinh',
'ask',
'assert',
'asy',
'asycode',
'asydir',
'asyfigure',
'asyfilecode',
'asyinclude',
'asywrite',
'atan',
'atan2',
'atanh',
'atbreakpoint',
'atexit',
'atime',
'attach',
'attract',
'atupdate',
'autoformat',
'autoscale',
'autoscale3',
'axes',
'axes3',
'axialshade',
'axis',
'axiscoverage',
'azimuth',
'babel',
'background',
'bangles',
'bar',
'barmarksize',
'barsize',
'basealign',
'baseline',
'bbox',
'beep',
'begin',
'beginclip',
'begingroup',
'beginpoint',
'between',
'bevel',
'bezier',
'bezierP',
'bezierPP',
'bezierPPP',
'bezulate',
'bibliography',
'bibliographystyle',
'binarytree',
'binarytreeNode',
'binomial',
'binput',
'bins',
'bisector',
'bisectorpoint',
'blend',
'boutput',
'box',
'bqe',
'breakpoint',
'breakpoints',
'brick',
'buildRestoreDefaults',
'buildRestoreThunk',
'buildcycle',
'bulletcolor',
'canonical',
'canonicalcartesiansystem',
'cartesiansystem',
'case1',
'case2',
'case3',
'cbrt',
'cd',
'ceil',
'center',
'centerToFocus',
'centroid',
'cevian',
'change2',
'changecoordsys',
'checkSegment',
'checkconditionlength',
'checker',
'checklengths',
'checkposition',
'checktriangle',
'choose',
'circle',
'circlebarframe',
'circlemarkradius',
'circlenodesnumber',
'circumcenter',
'circumcircle',
'clamped',
'clear',
'clip',
'clipdraw',
'close',
'cmyk',
'code',
'colatitude',
'collect',
'collinear',
'color',
'colorless',
'colors',
'colorspace',
'comma',
'compassmark',
'complement',
'complementary',
'concat',
'concurrent',
'cone',
'conic',
'conicnodesnumber',
'conictype',
'conj',
'connect',
'containmentTree',
'contains',
'contour',
'contour3',
'controlSpecifier',
'convert',
'coordinates',
'coordsys',
'copy',
'cos',
'cosh',
'cot',
'countIntersections',
'cputime',
'crop',
'cropcode',
'cross',
'crossframe',
'crosshatch',
'crossmarksize',
'csc',
'cubicroots',
'curabscissa',
'curlSpecifier',
'curpoint',
'currentarrow',
'currentexitfunction',
'currentmomarrow',
'currentpolarconicroutine',
'curve',
'cut',
'cutafter',
'cutbefore',
'cyclic',
'cylinder',
'debugger',
'deconstruct',
'defaultdir',
'defaultformat',
'defaultpen',
'defined',
'degenerate',
'degrees',
'delete',
'deletepreamble',
'determinant',
'diagonal',
'diamond',
'diffdiv',
'dir',
'dirSpecifier',
'dirtime',
'display',
'distance',
'divisors',
'do_overpaint',
'dot',
'dotframe',
'dotsize',
'downcase',
'draw',
'drawAll',
'drawDoubleLine',
'drawFermion',
'drawGhost',
'drawGluon',
'drawMomArrow',
'drawPhoton',
'drawScalar',
'drawVertex',
'drawVertexBox',
'drawVertexBoxO',
'drawVertexBoxX',
'drawVertexO',
'drawVertexOX',
'drawVertexTriangle',
'drawVertexTriangleO',
'drawVertexX',
'drawarrow',
'drawarrow2',
'drawline',
'drawtick',
'duplicate',
'elle',
'ellipse',
'ellipsenodesnumber',
'embed',
'embed3',
'empty',
'enclose',
'end',
'endScript',
'endclip',
'endgroup',
'endl',
'endpoint',
'endpoints',
'eof',
'eol',
'equation',
'equations',
'erase',
'erasestep',
'erf',
'erfc',
'error',
'errorbar',
'errorbars',
'eval',
'excenter',
'excircle',
'exit',
'exitXasyMode',
'exitfunction',
'exp',
'expfactors',
'expi',
'expm1',
'exradius',
'extend',
'extension',
'extouch',
'fabs',
'factorial',
'fermat',
'fft',
'fhorner',
'figure',
'file',
'filecode',
'fill',
'filldraw',
'filloutside',
'fillrule',
'filltype',
'find',
'finite',
'finiteDifferenceJacobian',
'firstcut',
'firstframe',
'fit',
'fit2',
'fixedscaling',
'floor',
'flush',
'fmdefaults',
'fmod',
'focusToCenter',
'font',
'fontcommand',
'fontsize',
'foot',
'format',
'frac',
'frequency',
'fromCenter',
'fromFocus',
'fspline',
'functionshade',
'gamma',
'generate_random_backtrace',
'generateticks',
'gergonne',
'getc',
'getint',
'getpair',
'getreal',
'getstring',
'gettriple',
'gluon',
'gouraudshade',
'graph',
'graphic',
'gray',
'grestore',
'grid',
'grid3',
'gsave',
'halfbox',
'hatch',
'hdiffdiv',
'hermite',
'hex',
'histogram',
'history',
'hline',
'hprojection',
'hsv',
'hyperbola',
'hyperbolanodesnumber',
'hyperlink',
'hypot',
'identity',
'image',
'incenter',
'incentral',
'incircle',
'increasing',
'incrementposition',
'indexedTransform',
'indexedfigure',
'initXasyMode',
'initdefaults',
'input',
'inradius',
'insert',
'inside',
'integrate',
'interactive',
'interior',
'interp',
'interpolate',
'intersect',
'intersection',
'intersectionpoint',
'intersectionpoints',
'intersections',
'intouch',
'inverse',
'inversion',
'invisible',
'is3D',
'isDuplicate',
'isogonal',
'isogonalconjugate',
'isotomic',
'isotomicconjugate',
'isparabola',
'italic',
'item',
'key',
'kurtosis',
'kurtosisexcess',
'label',
'labelaxis',
'labelmargin',
'labelpath',
'labels',
'labeltick',
'labelx',
'labelx3',
'labely',
'labely3',
'labelz',
'labelz3',
'lastcut',
'latex',
'latitude',
'latticeshade',
'layer',
'layout',
'ldexp',
'leastsquares',
'legend',
'legenditem',
'length',
'lift',
'light',
'limits',
'line',
'linear',
'linecap',
'lineinversion',
'linejoin',
'linemargin',
'lineskip',
'linetype',
'linewidth',
'link',
'list',
'lm_enorm',
'lm_evaluate_default',
'lm_lmdif',
'lm_lmpar',
'lm_minimize',
'lm_print_default',
'lm_print_quiet',
'lm_qrfac',
'lm_qrsolv',
'locale',
'locate',
'locatefile',
'location',
'log',
'log10',
'log1p',
'logaxiscoverage',
'longitude',
'lookup',
'magnetize',
'makeNode',
'makedraw',
'makepen',
'map',
'margin',
'markangle',
'markangleradius',
'markanglespace',
'markarc',
'marker',
'markinterval',
'marknodes',
'markrightangle',
'markuniform',
'mass',
'masscenter',
'massformat',
'math',
'max',
'max3',
'maxbezier',
'maxbound',
'maxcoords',
'maxlength',
'maxratio',
'maxtimes',
'mean',
'medial',
'median',
'midpoint',
'min',
'min3',
'minbezier',
'minbound',
'minipage',
'minratio',
'mintimes',
'miterlimit',
'momArrowPath',
'momarrowsize',
'monotonic',
'multifigure',
'nativeformat',
'natural',
'needshipout',
'newl',
'newpage',
'newslide',
'newton',
'newtree',
'nextframe',
'nextnormal',
'nextpage',
'nib',
'nodabscissa',
'none',
'norm',
'normalvideo',
'notaknot',
'nowarn',
'numberpage',
'nurb',
'object',
'offset',
'onpath',
'opacity',
'opposite',
'orientation',
'orig_circlenodesnumber',
'orig_circlenodesnumber1',
'orig_draw',
'orig_ellipsenodesnumber',
'orig_ellipsenodesnumber1',
'orig_hyperbolanodesnumber',
'orig_parabolanodesnumber',
'origin',
'orthic',
'orthocentercenter',
'outformat',
'outline',
'outprefix',
'output',
'overloadedMessage',
'overwrite',
'pack',
'pad',
'pairs',
'palette',
'parabola',
'parabolanodesnumber',
'parallel',
'partialsum',
'path',
'path3',
'pattern',
'pause',
'pdf',
'pedal',
'periodic',
'perp',
'perpendicular',
'perpendicularmark',
'phantom',
'phi1',
'phi2',
'phi3',
'photon',
'piecewisestraight',
'point',
'polar',
'polarconicroutine',
'polargraph',
'polygon',
'postcontrol',
'postscript',
'pow10',
'ppoint',
'prc',
'prc0',
'precision',
'precontrol',
'prepend',
'print_random_addresses',
'project',
'projection',
'purge',
'pwhermite',
'quadrant',
'quadraticroots',
'quantize',
'quarticroots',
'quotient',
'radialshade',
'radians',
'radicalcenter',
'radicalline',
'radius',
'rand',
'randompath',
'rd',
'readline',
'realmult',
'realquarticroots',
'rectangle',
'rectangular',
'rectify',
'reflect',
'relabscissa',
'relative',
'relativedistance',
'reldir',
'relpoint',
'reltime',
'remainder',
'remark',
'removeDuplicates',
'rename',
'replace',
'report',
'resetdefaultpen',
'restore',
'restoredefaults',
'reverse',
'reversevideo',
'rf',
'rfind',
'rgb',
'rgba',
'rgbint',
'rms',
'rotate',
'rotateO',
'rotation',
'round',
'roundbox',
'roundedpath',
'roundrectangle',
'samecoordsys',
'sameside',
'sample',
'save',
'savedefaults',
'saveline',
'scale',
'scale3',
'scaleO',
'scaleT',
'scaleless',
'scientific',
'search',
'searchtree',
'sec',
'secondaryX',
'secondaryY',
'seconds',
'section',
'sector',
'seek',
'seekeof',
'segment',
'sequence',
'setpens',
'sgn',
'sgnd',
'sharpangle',
'sharpdegrees',
'shift',
'shiftless',
'shipout',
'shipout3',
'show',
'side',
'simeq',
'simpson',
'sin',
'single',
'sinh',
'size',
'size3',
'skewness',
'skip',
'slant',
'sleep',
'slope',
'slopefield',
'solve',
'solveBVP',
'sort',
'sourceline',
'sphere',
'split',
'sqrt',
'square',
'srand',
'standardizecoordsys',
'startScript',
'startTrembling',
'stdev',
'step',
'stickframe',
'stickmarksize',
'stickmarkspace',
'stop',
'straight',
'straightness',
'string',
'stripdirectory',
'stripextension',
'stripfile',
'strokepath',
'subdivide',
'subitem',
'subpath',
'substr',
'sum',
'surface',
'symmedial',
'symmedian',
'system',
'tab',
'tableau',
'tan',
'tangent',
'tangential',
'tangents',
'tanh',
'tell',
'tensionSpecifier',
'tensorshade',
'tex',
'texcolor',
'texify',
'texpath',
'texpreamble',
'texreset',
'texshipout',
'texsize',
'textpath',
'thick',
'thin',
'tick',
'tickMax',
'tickMax3',
'tickMin',
'tickMin3',
'ticklabelshift',
'ticklocate',
'tildeframe',
'tildemarksize',
'tile',
'tiling',
'time',
'times',
'title',
'titlepage',
'topbox',
'transform',
'transformation',
'transpose',
'tremble',
'trembleFuzz',
'tremble_circlenodesnumber',
'tremble_circlenodesnumber1',
'tremble_draw',
'tremble_ellipsenodesnumber',
'tremble_ellipsenodesnumber1',
'tremble_hyperbolanodesnumber',
'tremble_marknodes',
'tremble_markuniform',
'tremble_parabolanodesnumber',
'triangle',
'triangleAbc',
'triangleabc',
'triangulate',
'tricoef',
'tridiagonal',
'trilinear',
'trim',
'trueMagnetize',
'truepoint',
'tube',
'uncycle',
'unfill',
'uniform',
'unit',
'unitrand',
'unitsize',
'unityroot',
'unstraighten',
'upcase',
'updatefunction',
'uperiodic',
'upscale',
'uptodate',
'usepackage',
'usersetting',
'usetypescript',
'usleep',
'value',
'variance',
'variancebiased',
'vbox',
'vector',
'vectorfield',
'verbatim',
'view',
'vline',
'vperiodic',
'vprojection',
'warn',
'warning',
'windingnumber',
'write',
'xaxis',
'xaxis3',
'xaxis3At',
'xaxisAt',
'xequals',
'xinput',
'xlimits',
'xoutput',
'xpart',
'xscale',
'xscaleO',
'xtick',
'xtick3',
'xtrans',
'yaxis',
'yaxis3',
'yaxis3At',
'yaxisAt',
'yequals',
'ylimits',
'ypart',
'yscale',
'yscaleO',
'ytick',
'ytick3',
'ytrans',
'zaxis3',
'zaxis3At',
'zero',
'zero3',
'zlimits',
'zpart',
'ztick',
'ztick3',
'ztrans'
])
ASYVARNAME = set([
'AliceBlue',
'Align',
'Allow',
'AntiqueWhite',
'Apricot',
'Aqua',
'Aquamarine',
'Aspect',
'Azure',
'BeginPoint',
'Beige',
'Bisque',
'Bittersweet',
'Black',
'BlanchedAlmond',
'Blue',
'BlueGreen',
'BlueViolet',
'Both',
'Break',
'BrickRed',
'Brown',
'BurlyWood',
'BurntOrange',
'CCW',
'CW',
'CadetBlue',
'CarnationPink',
'Center',
'Centered',
'Cerulean',
'Chartreuse',
'Chocolate',
'Coeff',
'Coral',
'CornflowerBlue',
'Cornsilk',
'Crimson',
'Crop',
'Cyan',
'Dandelion',
'DarkBlue',
'DarkCyan',
'DarkGoldenrod',
'DarkGray',
'DarkGreen',
'DarkKhaki',
'DarkMagenta',
'DarkOliveGreen',
'DarkOrange',
'DarkOrchid',
'DarkRed',
'DarkSalmon',
'DarkSeaGreen',
'DarkSlateBlue',
'DarkSlateGray',
'DarkTurquoise',
'DarkViolet',
'DeepPink',
'DeepSkyBlue',
'DefaultHead',
'DimGray',
'DodgerBlue',
'Dotted',
'Draw',
'E',
'ENE',
'EPS',
'ESE',
'E_Euler',
'E_PC',
'E_RK2',
'E_RK3BS',
'Emerald',
'EndPoint',
'Euler',
'Fill',
'FillDraw',
'FireBrick',
'FloralWhite',
'ForestGreen',
'Fuchsia',
'Gainsboro',
'GhostWhite',
'Gold',
'Goldenrod',
'Gray',
'Green',
'GreenYellow',
'Honeydew',
'HookHead',
'Horizontal',
'HotPink',
'I',
'IgnoreAspect',
'IndianRed',
'Indigo',
'Ivory',
'JOIN_IN',
'JOIN_OUT',
'JungleGreen',
'Khaki',
'LM_DWARF',
'LM_MACHEP',
'LM_SQRT_DWARF',
'LM_SQRT_GIANT',
'LM_USERTOL',
'Label',
'Lavender',
'LavenderBlush',
'LawnGreen',
'LeftJustified',
'LeftSide',
'LemonChiffon',
'LightBlue',
'LightCoral',
'LightCyan',
'LightGoldenrodYellow',
'LightGreen',
'LightGrey',
'LightPink',
'LightSalmon',
'LightSeaGreen',
'LightSkyBlue',
'LightSlateGray',
'LightSteelBlue',
'LightYellow',
'Lime',
'LimeGreen',
'Linear',
'Linen',
'Log',
'Logarithmic',
'Magenta',
'Mahogany',
'Mark',
'MarkFill',
'Maroon',
'Max',
'MediumAquamarine',
'MediumBlue',
'MediumOrchid',
'MediumPurple',
'MediumSeaGreen',
'MediumSlateBlue',
'MediumSpringGreen',
'MediumTurquoise',
'MediumVioletRed',
'Melon',
'MidPoint',
'MidnightBlue',
'Min',
'MintCream',
'MistyRose',
'Moccasin',
'Move',
'MoveQuiet',
'Mulberry',
'N',
'NE',
'NNE',
'NNW',
'NW',
'NavajoWhite',
'Navy',
'NavyBlue',
'NoAlign',
'NoCrop',
'NoFill',
'NoSide',
'OldLace',
'Olive',
'OliveDrab',
'OliveGreen',
'Orange',
'OrangeRed',
'Orchid',
'Ox',
'Oy',
'PC',
'PaleGoldenrod',
'PaleGreen',
'PaleTurquoise',
'PaleVioletRed',
'PapayaWhip',
'Peach',
'PeachPuff',
'Periwinkle',
'Peru',
'PineGreen',
'Pink',
'Plum',
'PowderBlue',
'ProcessBlue',
'Purple',
'RK2',
'RK3',
'RK3BS',
'RK4',
'RK5',
'RK5DP',
'RK5F',
'RawSienna',
'Red',
'RedOrange',
'RedViolet',
'Rhodamine',
'RightJustified',
'RightSide',
'RosyBrown',
'RoyalBlue',
'RoyalPurple',
'RubineRed',
'S',
'SE',
'SSE',
'SSW',
'SW',
'SaddleBrown',
'Salmon',
'SandyBrown',
'SeaGreen',
'Seashell',
'Sepia',
'Sienna',
'Silver',
'SimpleHead',
'SkyBlue',
'SlateBlue',
'SlateGray',
'Snow',
'SpringGreen',
'SteelBlue',
'Suppress',
'SuppressQuiet',
'Tan',
'TeXHead',
'Teal',
'TealBlue',
'Thistle',
'Ticksize',
'Tomato',
'Turquoise',
'UnFill',
'VERSION',
'Value',
'Vertical',
'Violet',
'VioletRed',
'W',
'WNW',
'WSW',
'Wheat',
'White',
'WhiteSmoke',
'WildStrawberry',
'XYAlign',
'YAlign',
'Yellow',
'YellowGreen',
'YellowOrange',
'addpenarc',
'addpenline',
'align',
'allowstepping',
'angularsystem',
'animationdelay',
'appendsuffix',
'arcarrowangle',
'arcarrowfactor',
'arrow2sizelimit',
'arrowangle',
'arrowbarb',
'arrowdir',
'arrowfactor',
'arrowhookfactor',
'arrowlength',
'arrowsizelimit',
'arrowtexfactor',
'authorpen',
'axis',
'axiscoverage',
'axislabelfactor',
'background',
'backgroundcolor',
'backgroundpen',
'barfactor',
'barmarksizefactor',
'basealign',
'baselinetemplate',
'beveljoin',
'bigvertexpen',
'bigvertexsize',
'black',
'blue',
'bm',
'bottom',
'bp',
'brown',
'bullet',
'byfoci',
'byvertices',
'camerafactor',
'chartreuse',
'circlemarkradiusfactor',
'circlenodesnumberfactor',
'circleprecision',
'circlescale',
'cm',
'codefile',
'codepen',
'codeskip',
'colorPen',
'coloredNodes',
'coloredSegments',
'conditionlength',
'conicnodesfactor',
'count',
'cputimeformat',
'crossmarksizefactor',
'currentcoordsys',
'currentlight',
'currentpatterns',
'currentpen',
'currentpicture',
'currentposition',
'currentprojection',
'curvilinearsystem',
'cuttings',
'cyan',
'darkblue',
'darkbrown',
'darkcyan',
'darkgray',
'darkgreen',
'darkgrey',
'darkmagenta',
'darkolive',
'darkred',
'dashdotted',
'dashed',
'datepen',
'dateskip',
'debuggerlines',
'debugging',
'deepblue',
'deepcyan',
'deepgray',
'deepgreen',
'deepgrey',
'deepmagenta',
'deepred',
'default',
'defaultControl',
'defaultS',
'defaultbackpen',
'defaultcoordsys',
'defaultfilename',
'defaultformat',
'defaultmassformat',
'defaultpen',
'diagnostics',
'differentlengths',
'dot',
'dotfactor',
'dotframe',
'dotted',
'doublelinepen',
'doublelinespacing',
'down',
'duplicateFuzz',
'ellipsenodesnumberfactor',
'eps',
'epsgeo',
'epsilon',
'evenodd',
'extendcap',
'fermionpen',
'figureborder',
'figuremattpen',
'firstnode',
'firststep',
'foregroundcolor',
'fuchsia',
'fuzz',
'gapfactor',
'ghostpen',
'gluonamplitude',
'gluonpen',
'gluonratio',
'gray',
'green',
'grey',
'hatchepsilon',
'havepagenumber',
'heavyblue',
'heavycyan',
'heavygray',
'heavygreen',
'heavygrey',
'heavymagenta',
'heavyred',
'hline',
'hwratio',
'hyperbolanodesnumberfactor',
'identity4',
'ignore',
'inXasyMode',
'inch',
'inches',
'includegraphicscommand',
'inf',
'infinity',
'institutionpen',
'intMax',
'intMin',
'invert',
'invisible',
'itempen',
'itemskip',
'itemstep',
'labelmargin',
'landscape',
'lastnode',
'left',
'legendhskip',
'legendlinelength',
'legendmargin',
'legendmarkersize',
'legendmaxrelativewidth',
'legendvskip',
'lightblue',
'lightcyan',
'lightgray',
'lightgreen',
'lightgrey',
'lightmagenta',
'lightolive',
'lightred',
'lightyellow',
'linemargin',
'lm_infmsg',
'lm_shortmsg',
'longdashdotted',
'longdashed',
'magenta',
'magneticPoints',
'magneticRadius',
'mantissaBits',
'markangleradius',
'markangleradiusfactor',
'markanglespace',
'markanglespacefactor',
'mediumblue',
'mediumcyan',
'mediumgray',
'mediumgreen',
'mediumgrey',
'mediummagenta',
'mediumred',
'mediumyellow',
'middle',
'minDistDefault',
'minblockheight',
'minblockwidth',
'mincirclediameter',
'minipagemargin',
'minipagewidth',
'minvertexangle',
'miterjoin',
'mm',
'momarrowfactor',
'momarrowlength',
'momarrowmargin',
'momarrowoffset',
'momarrowpen',
'monoPen',
'morepoints',
'nCircle',
'newbulletcolor',
'ngraph',
'nil',
'nmesh',
'nobasealign',
'nodeMarginDefault',
'nodesystem',
'nomarker',
'nopoint',
'noprimary',
'nullpath',
'nullpen',
'numarray',
'ocgindex',
'oldbulletcolor',
'olive',
'orange',
'origin',
'overpaint',
'page',
'pageheight',
'pagemargin',
'pagenumberalign',
'pagenumberpen',
'pagenumberposition',
'pagewidth',
'paleblue',
'palecyan',
'palegray',
'palegreen',
'palegrey',
'palemagenta',
'palered',
'paleyellow',
'parabolanodesnumberfactor',
'perpfactor',
'phi',
'photonamplitude',
'photonpen',
'photonratio',
'pi',
'pink',
'plain',
'plus',
'preamblenodes',
'pt',
'purple',
'r3',
'r4a',
'r4b',
'randMax',
'realDigits',
'realEpsilon',
'realMax',
'realMin',
'red',
'relativesystem',
'reverse',
'right',
'roundcap',
'roundjoin',
'royalblue',
'salmon',
'saveFunctions',
'scalarpen',
'sequencereal',
'settings',
'shipped',
'signedtrailingzero',
'solid',
'springgreen',
'sqrtEpsilon',
'squarecap',
'squarepen',
'startposition',
'stdin',
'stdout',
'stepfactor',
'stepfraction',
'steppagenumberpen',
'stepping',
'stickframe',
'stickmarksizefactor',
'stickmarkspacefactor',
'textpen',
'ticksize',
'tildeframe',
'tildemarksizefactor',
'tinv',
'titlealign',
'titlepagepen',
'titlepageposition',
'titlepen',
'titleskip',
'top',
'trailingzero',
'treeLevelStep',
'treeMinNodeWidth',
'treeNodeStep',
'trembleAngle',
'trembleFrequency',
'trembleRandom',
'tremblingMode',
'undefined',
'unitcircle',
'unitsquare',
'up',
'urlpen',
'urlskip',
'version',
'vertexpen',
'vertexsize',
'viewportmargin',
'viewportsize',
'vline',
'white',
'wye',
'xformStack',
'yellow',
'ylabelwidth',
'zerotickfuzz',
'zerowinding'
])
| mit |
jumpstarter-io/horizon | openstack_dashboard/dashboards/project/data_processing/jobs/tests.py | 11 | 1751 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django import http
from mox import IsA # noqa
from openstack_dashboard import api
from openstack_dashboard.test import helpers as test
INDEX_URL = reverse('horizon:project:data_processing.jobs:index')
DETAILS_URL = reverse(
'horizon:project:data_processing.jobs:details', args=['id'])
class DataProcessingJobTests(test.TestCase):
@test.create_stubs({api.sahara: ('job_list',)})
def test_index(self):
api.sahara.job_list(IsA(http.HttpRequest)) \
.AndReturn(self.jobs.list())
self.mox.ReplayAll()
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(res,
'project/data_processing.jobs/jobs.html')
self.assertContains(res, 'Jobs')
self.assertContains(res, 'Name')
@test.create_stubs({api.sahara: ('job_get',)})
def test_details(self):
api.sahara.job_get(IsA(http.HttpRequest), IsA(unicode)) \
.AndReturn(self.jobs.list()[0])
self.mox.ReplayAll()
res = self.client.get(DETAILS_URL)
self.assertTemplateUsed(res,
'project/data_processing.jobs/details.html')
self.assertContains(res, 'pigjob')
| apache-2.0 |
Weicong-Lin/pymo-global | android/pgs4a-0.9.6/python-install/lib/python2.7/encodings/cp858.py | 416 | 34271 | """ Python Character Mapping Codec for CP858, modified from cp850.
"""
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp858',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
0x0081: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
0x0082: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
0x0083: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x0084: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
0x0085: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
0x0086: 0x00e5, # LATIN SMALL LETTER A WITH RING ABOVE
0x0087: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
0x0088: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
0x0089: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
0x008a: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
0x008b: 0x00ef, # LATIN SMALL LETTER I WITH DIAERESIS
0x008c: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x008d: 0x00ec, # LATIN SMALL LETTER I WITH GRAVE
0x008e: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x008f: 0x00c5, # LATIN CAPITAL LETTER A WITH RING ABOVE
0x0090: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
0x0091: 0x00e6, # LATIN SMALL LIGATURE AE
0x0092: 0x00c6, # LATIN CAPITAL LIGATURE AE
0x0093: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x0094: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
0x0095: 0x00f2, # LATIN SMALL LETTER O WITH GRAVE
0x0096: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX
0x0097: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE
0x0098: 0x00ff, # LATIN SMALL LETTER Y WITH DIAERESIS
0x0099: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x009a: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x009b: 0x00f8, # LATIN SMALL LETTER O WITH STROKE
0x009c: 0x00a3, # POUND SIGN
0x009d: 0x00d8, # LATIN CAPITAL LETTER O WITH STROKE
0x009e: 0x00d7, # MULTIPLICATION SIGN
0x009f: 0x0192, # LATIN SMALL LETTER F WITH HOOK
0x00a0: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
0x00a1: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
0x00a4: 0x00f1, # LATIN SMALL LETTER N WITH TILDE
0x00a5: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE
0x00a6: 0x00aa, # FEMININE ORDINAL INDICATOR
0x00a7: 0x00ba, # MASCULINE ORDINAL INDICATOR
0x00a8: 0x00bf, # INVERTED QUESTION MARK
0x00a9: 0x00ae, # REGISTERED SIGN
0x00aa: 0x00ac, # NOT SIGN
0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF
0x00ac: 0x00bc, # VULGAR FRACTION ONE QUARTER
0x00ad: 0x00a1, # INVERTED EXCLAMATION MARK
0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00b0: 0x2591, # LIGHT SHADE
0x00b1: 0x2592, # MEDIUM SHADE
0x00b2: 0x2593, # DARK SHADE
0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x00b5: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE
0x00b6: 0x00c2, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
0x00b7: 0x00c0, # LATIN CAPITAL LETTER A WITH GRAVE
0x00b8: 0x00a9, # COPYRIGHT SIGN
0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
0x00bd: 0x00a2, # CENT SIGN
0x00be: 0x00a5, # YEN SIGN
0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x00c6: 0x00e3, # LATIN SMALL LETTER A WITH TILDE
0x00c7: 0x00c3, # LATIN CAPITAL LETTER A WITH TILDE
0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x00cf: 0x00a4, # CURRENCY SIGN
0x00d0: 0x00f0, # LATIN SMALL LETTER ETH
0x00d1: 0x00d0, # LATIN CAPITAL LETTER ETH
0x00d2: 0x00ca, # LATIN CAPITAL LETTER E WITH CIRCUMFLEX
0x00d3: 0x00cb, # LATIN CAPITAL LETTER E WITH DIAERESIS
0x00d4: 0x00c8, # LATIN CAPITAL LETTER E WITH GRAVE
0x00d5: 0x20ac, # EURO SIGN
0x00d6: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE
0x00d7: 0x00ce, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
0x00d8: 0x00cf, # LATIN CAPITAL LETTER I WITH DIAERESIS
0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x00db: 0x2588, # FULL BLOCK
0x00dc: 0x2584, # LOWER HALF BLOCK
0x00dd: 0x00a6, # BROKEN BAR
0x00de: 0x00cc, # LATIN CAPITAL LETTER I WITH GRAVE
0x00df: 0x2580, # UPPER HALF BLOCK
0x00e0: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S
0x00e2: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
0x00e3: 0x00d2, # LATIN CAPITAL LETTER O WITH GRAVE
0x00e4: 0x00f5, # LATIN SMALL LETTER O WITH TILDE
0x00e5: 0x00d5, # LATIN CAPITAL LETTER O WITH TILDE
0x00e6: 0x00b5, # MICRO SIGN
0x00e7: 0x00fe, # LATIN SMALL LETTER THORN
0x00e8: 0x00de, # LATIN CAPITAL LETTER THORN
0x00e9: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE
0x00ea: 0x00db, # LATIN CAPITAL LETTER U WITH CIRCUMFLEX
0x00eb: 0x00d9, # LATIN CAPITAL LETTER U WITH GRAVE
0x00ec: 0x00fd, # LATIN SMALL LETTER Y WITH ACUTE
0x00ed: 0x00dd, # LATIN CAPITAL LETTER Y WITH ACUTE
0x00ee: 0x00af, # MACRON
0x00ef: 0x00b4, # ACUTE ACCENT
0x00f0: 0x00ad, # SOFT HYPHEN
0x00f1: 0x00b1, # PLUS-MINUS SIGN
0x00f2: 0x2017, # DOUBLE LOW LINE
0x00f3: 0x00be, # VULGAR FRACTION THREE QUARTERS
0x00f4: 0x00b6, # PILCROW SIGN
0x00f5: 0x00a7, # SECTION SIGN
0x00f6: 0x00f7, # DIVISION SIGN
0x00f7: 0x00b8, # CEDILLA
0x00f8: 0x00b0, # DEGREE SIGN
0x00f9: 0x00a8, # DIAERESIS
0x00fa: 0x00b7, # MIDDLE DOT
0x00fb: 0x00b9, # SUPERSCRIPT ONE
0x00fc: 0x00b3, # SUPERSCRIPT THREE
0x00fd: 0x00b2, # SUPERSCRIPT TWO
0x00fe: 0x25a0, # BLACK SQUARE
0x00ff: 0x00a0, # NO-BREAK SPACE
})
### Decoding Table
decoding_table = (
u'\x00' # 0x0000 -> NULL
u'\x01' # 0x0001 -> START OF HEADING
u'\x02' # 0x0002 -> START OF TEXT
u'\x03' # 0x0003 -> END OF TEXT
u'\x04' # 0x0004 -> END OF TRANSMISSION
u'\x05' # 0x0005 -> ENQUIRY
u'\x06' # 0x0006 -> ACKNOWLEDGE
u'\x07' # 0x0007 -> BELL
u'\x08' # 0x0008 -> BACKSPACE
u'\t' # 0x0009 -> HORIZONTAL TABULATION
u'\n' # 0x000a -> LINE FEED
u'\x0b' # 0x000b -> VERTICAL TABULATION
u'\x0c' # 0x000c -> FORM FEED
u'\r' # 0x000d -> CARRIAGE RETURN
u'\x0e' # 0x000e -> SHIFT OUT
u'\x0f' # 0x000f -> SHIFT IN
u'\x10' # 0x0010 -> DATA LINK ESCAPE
u'\x11' # 0x0011 -> DEVICE CONTROL ONE
u'\x12' # 0x0012 -> DEVICE CONTROL TWO
u'\x13' # 0x0013 -> DEVICE CONTROL THREE
u'\x14' # 0x0014 -> DEVICE CONTROL FOUR
u'\x15' # 0x0015 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x0016 -> SYNCHRONOUS IDLE
u'\x17' # 0x0017 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x0018 -> CANCEL
u'\x19' # 0x0019 -> END OF MEDIUM
u'\x1a' # 0x001a -> SUBSTITUTE
u'\x1b' # 0x001b -> ESCAPE
u'\x1c' # 0x001c -> FILE SEPARATOR
u'\x1d' # 0x001d -> GROUP SEPARATOR
u'\x1e' # 0x001e -> RECORD SEPARATOR
u'\x1f' # 0x001f -> UNIT SEPARATOR
u' ' # 0x0020 -> SPACE
u'!' # 0x0021 -> EXCLAMATION MARK
u'"' # 0x0022 -> QUOTATION MARK
u'#' # 0x0023 -> NUMBER SIGN
u'$' # 0x0024 -> DOLLAR SIGN
u'%' # 0x0025 -> PERCENT SIGN
u'&' # 0x0026 -> AMPERSAND
u"'" # 0x0027 -> APOSTROPHE
u'(' # 0x0028 -> LEFT PARENTHESIS
u')' # 0x0029 -> RIGHT PARENTHESIS
u'*' # 0x002a -> ASTERISK
u'+' # 0x002b -> PLUS SIGN
u',' # 0x002c -> COMMA
u'-' # 0x002d -> HYPHEN-MINUS
u'.' # 0x002e -> FULL STOP
u'/' # 0x002f -> SOLIDUS
u'0' # 0x0030 -> DIGIT ZERO
u'1' # 0x0031 -> DIGIT ONE
u'2' # 0x0032 -> DIGIT TWO
u'3' # 0x0033 -> DIGIT THREE
u'4' # 0x0034 -> DIGIT FOUR
u'5' # 0x0035 -> DIGIT FIVE
u'6' # 0x0036 -> DIGIT SIX
u'7' # 0x0037 -> DIGIT SEVEN
u'8' # 0x0038 -> DIGIT EIGHT
u'9' # 0x0039 -> DIGIT NINE
u':' # 0x003a -> COLON
u';' # 0x003b -> SEMICOLON
u'<' # 0x003c -> LESS-THAN SIGN
u'=' # 0x003d -> EQUALS SIGN
u'>' # 0x003e -> GREATER-THAN SIGN
u'?' # 0x003f -> QUESTION MARK
u'@' # 0x0040 -> COMMERCIAL AT
u'A' # 0x0041 -> LATIN CAPITAL LETTER A
u'B' # 0x0042 -> LATIN CAPITAL LETTER B
u'C' # 0x0043 -> LATIN CAPITAL LETTER C
u'D' # 0x0044 -> LATIN CAPITAL LETTER D
u'E' # 0x0045 -> LATIN CAPITAL LETTER E
u'F' # 0x0046 -> LATIN CAPITAL LETTER F
u'G' # 0x0047 -> LATIN CAPITAL LETTER G
u'H' # 0x0048 -> LATIN CAPITAL LETTER H
u'I' # 0x0049 -> LATIN CAPITAL LETTER I
u'J' # 0x004a -> LATIN CAPITAL LETTER J
u'K' # 0x004b -> LATIN CAPITAL LETTER K
u'L' # 0x004c -> LATIN CAPITAL LETTER L
u'M' # 0x004d -> LATIN CAPITAL LETTER M
u'N' # 0x004e -> LATIN CAPITAL LETTER N
u'O' # 0x004f -> LATIN CAPITAL LETTER O
u'P' # 0x0050 -> LATIN CAPITAL LETTER P
u'Q' # 0x0051 -> LATIN CAPITAL LETTER Q
u'R' # 0x0052 -> LATIN CAPITAL LETTER R
u'S' # 0x0053 -> LATIN CAPITAL LETTER S
u'T' # 0x0054 -> LATIN CAPITAL LETTER T
u'U' # 0x0055 -> LATIN CAPITAL LETTER U
u'V' # 0x0056 -> LATIN CAPITAL LETTER V
u'W' # 0x0057 -> LATIN CAPITAL LETTER W
u'X' # 0x0058 -> LATIN CAPITAL LETTER X
u'Y' # 0x0059 -> LATIN CAPITAL LETTER Y
u'Z' # 0x005a -> LATIN CAPITAL LETTER Z
u'[' # 0x005b -> LEFT SQUARE BRACKET
u'\\' # 0x005c -> REVERSE SOLIDUS
u']' # 0x005d -> RIGHT SQUARE BRACKET
u'^' # 0x005e -> CIRCUMFLEX ACCENT
u'_' # 0x005f -> LOW LINE
u'`' # 0x0060 -> GRAVE ACCENT
u'a' # 0x0061 -> LATIN SMALL LETTER A
u'b' # 0x0062 -> LATIN SMALL LETTER B
u'c' # 0x0063 -> LATIN SMALL LETTER C
u'd' # 0x0064 -> LATIN SMALL LETTER D
u'e' # 0x0065 -> LATIN SMALL LETTER E
u'f' # 0x0066 -> LATIN SMALL LETTER F
u'g' # 0x0067 -> LATIN SMALL LETTER G
u'h' # 0x0068 -> LATIN SMALL LETTER H
u'i' # 0x0069 -> LATIN SMALL LETTER I
u'j' # 0x006a -> LATIN SMALL LETTER J
u'k' # 0x006b -> LATIN SMALL LETTER K
u'l' # 0x006c -> LATIN SMALL LETTER L
u'm' # 0x006d -> LATIN SMALL LETTER M
u'n' # 0x006e -> LATIN SMALL LETTER N
u'o' # 0x006f -> LATIN SMALL LETTER O
u'p' # 0x0070 -> LATIN SMALL LETTER P
u'q' # 0x0071 -> LATIN SMALL LETTER Q
u'r' # 0x0072 -> LATIN SMALL LETTER R
u's' # 0x0073 -> LATIN SMALL LETTER S
u't' # 0x0074 -> LATIN SMALL LETTER T
u'u' # 0x0075 -> LATIN SMALL LETTER U
u'v' # 0x0076 -> LATIN SMALL LETTER V
u'w' # 0x0077 -> LATIN SMALL LETTER W
u'x' # 0x0078 -> LATIN SMALL LETTER X
u'y' # 0x0079 -> LATIN SMALL LETTER Y
u'z' # 0x007a -> LATIN SMALL LETTER Z
u'{' # 0x007b -> LEFT CURLY BRACKET
u'|' # 0x007c -> VERTICAL LINE
u'}' # 0x007d -> RIGHT CURLY BRACKET
u'~' # 0x007e -> TILDE
u'\x7f' # 0x007f -> DELETE
u'\xc7' # 0x0080 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xfc' # 0x0081 -> LATIN SMALL LETTER U WITH DIAERESIS
u'\xe9' # 0x0082 -> LATIN SMALL LETTER E WITH ACUTE
u'\xe2' # 0x0083 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe4' # 0x0084 -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe0' # 0x0085 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe5' # 0x0086 -> LATIN SMALL LETTER A WITH RING ABOVE
u'\xe7' # 0x0087 -> LATIN SMALL LETTER C WITH CEDILLA
u'\xea' # 0x0088 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0x0089 -> LATIN SMALL LETTER E WITH DIAERESIS
u'\xe8' # 0x008a -> LATIN SMALL LETTER E WITH GRAVE
u'\xef' # 0x008b -> LATIN SMALL LETTER I WITH DIAERESIS
u'\xee' # 0x008c -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xec' # 0x008d -> LATIN SMALL LETTER I WITH GRAVE
u'\xc4' # 0x008e -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc5' # 0x008f -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xc9' # 0x0090 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xe6' # 0x0091 -> LATIN SMALL LIGATURE AE
u'\xc6' # 0x0092 -> LATIN CAPITAL LIGATURE AE
u'\xf4' # 0x0093 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf6' # 0x0094 -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf2' # 0x0095 -> LATIN SMALL LETTER O WITH GRAVE
u'\xfb' # 0x0096 -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xf9' # 0x0097 -> LATIN SMALL LETTER U WITH GRAVE
u'\xff' # 0x0098 -> LATIN SMALL LETTER Y WITH DIAERESIS
u'\xd6' # 0x0099 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xdc' # 0x009a -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xf8' # 0x009b -> LATIN SMALL LETTER O WITH STROKE
u'\xa3' # 0x009c -> POUND SIGN
u'\xd8' # 0x009d -> LATIN CAPITAL LETTER O WITH STROKE
u'\xd7' # 0x009e -> MULTIPLICATION SIGN
u'\u0192' # 0x009f -> LATIN SMALL LETTER F WITH HOOK
u'\xe1' # 0x00a0 -> LATIN SMALL LETTER A WITH ACUTE
u'\xed' # 0x00a1 -> LATIN SMALL LETTER I WITH ACUTE
u'\xf3' # 0x00a2 -> LATIN SMALL LETTER O WITH ACUTE
u'\xfa' # 0x00a3 -> LATIN SMALL LETTER U WITH ACUTE
u'\xf1' # 0x00a4 -> LATIN SMALL LETTER N WITH TILDE
u'\xd1' # 0x00a5 -> LATIN CAPITAL LETTER N WITH TILDE
u'\xaa' # 0x00a6 -> FEMININE ORDINAL INDICATOR
u'\xba' # 0x00a7 -> MASCULINE ORDINAL INDICATOR
u'\xbf' # 0x00a8 -> INVERTED QUESTION MARK
u'\xae' # 0x00a9 -> REGISTERED SIGN
u'\xac' # 0x00aa -> NOT SIGN
u'\xbd' # 0x00ab -> VULGAR FRACTION ONE HALF
u'\xbc' # 0x00ac -> VULGAR FRACTION ONE QUARTER
u'\xa1' # 0x00ad -> INVERTED EXCLAMATION MARK
u'\xab' # 0x00ae -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbb' # 0x00af -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u2591' # 0x00b0 -> LIGHT SHADE
u'\u2592' # 0x00b1 -> MEDIUM SHADE
u'\u2593' # 0x00b2 -> DARK SHADE
u'\u2502' # 0x00b3 -> BOX DRAWINGS LIGHT VERTICAL
u'\u2524' # 0x00b4 -> BOX DRAWINGS LIGHT VERTICAL AND LEFT
u'\xc1' # 0x00b5 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xc2' # 0x00b6 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\xc0' # 0x00b7 -> LATIN CAPITAL LETTER A WITH GRAVE
u'\xa9' # 0x00b8 -> COPYRIGHT SIGN
u'\u2563' # 0x00b9 -> BOX DRAWINGS DOUBLE VERTICAL AND LEFT
u'\u2551' # 0x00ba -> BOX DRAWINGS DOUBLE VERTICAL
u'\u2557' # 0x00bb -> BOX DRAWINGS DOUBLE DOWN AND LEFT
u'\u255d' # 0x00bc -> BOX DRAWINGS DOUBLE UP AND LEFT
u'\xa2' # 0x00bd -> CENT SIGN
u'\xa5' # 0x00be -> YEN SIGN
u'\u2510' # 0x00bf -> BOX DRAWINGS LIGHT DOWN AND LEFT
u'\u2514' # 0x00c0 -> BOX DRAWINGS LIGHT UP AND RIGHT
u'\u2534' # 0x00c1 -> BOX DRAWINGS LIGHT UP AND HORIZONTAL
u'\u252c' # 0x00c2 -> BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
u'\u251c' # 0x00c3 -> BOX DRAWINGS LIGHT VERTICAL AND RIGHT
u'\u2500' # 0x00c4 -> BOX DRAWINGS LIGHT HORIZONTAL
u'\u253c' # 0x00c5 -> BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
u'\xe3' # 0x00c6 -> LATIN SMALL LETTER A WITH TILDE
u'\xc3' # 0x00c7 -> LATIN CAPITAL LETTER A WITH TILDE
u'\u255a' # 0x00c8 -> BOX DRAWINGS DOUBLE UP AND RIGHT
u'\u2554' # 0x00c9 -> BOX DRAWINGS DOUBLE DOWN AND RIGHT
u'\u2569' # 0x00ca -> BOX DRAWINGS DOUBLE UP AND HORIZONTAL
u'\u2566' # 0x00cb -> BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
u'\u2560' # 0x00cc -> BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
u'\u2550' # 0x00cd -> BOX DRAWINGS DOUBLE HORIZONTAL
u'\u256c' # 0x00ce -> BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
u'\xa4' # 0x00cf -> CURRENCY SIGN
u'\xf0' # 0x00d0 -> LATIN SMALL LETTER ETH
u'\xd0' # 0x00d1 -> LATIN CAPITAL LETTER ETH
u'\xca' # 0x00d2 -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
u'\xcb' # 0x00d3 -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\xc8' # 0x00d4 -> LATIN CAPITAL LETTER E WITH GRAVE
u'\u20ac' # 0x00d5 -> EURO SIGN
u'\xcd' # 0x00d6 -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0x00d7 -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\xcf' # 0x00d8 -> LATIN CAPITAL LETTER I WITH DIAERESIS
u'\u2518' # 0x00d9 -> BOX DRAWINGS LIGHT UP AND LEFT
u'\u250c' # 0x00da -> BOX DRAWINGS LIGHT DOWN AND RIGHT
u'\u2588' # 0x00db -> FULL BLOCK
u'\u2584' # 0x00dc -> LOWER HALF BLOCK
u'\xa6' # 0x00dd -> BROKEN BAR
u'\xcc' # 0x00de -> LATIN CAPITAL LETTER I WITH GRAVE
u'\u2580' # 0x00df -> UPPER HALF BLOCK
u'\xd3' # 0x00e0 -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xdf' # 0x00e1 -> LATIN SMALL LETTER SHARP S
u'\xd4' # 0x00e2 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\xd2' # 0x00e3 -> LATIN CAPITAL LETTER O WITH GRAVE
u'\xf5' # 0x00e4 -> LATIN SMALL LETTER O WITH TILDE
u'\xd5' # 0x00e5 -> LATIN CAPITAL LETTER O WITH TILDE
u'\xb5' # 0x00e6 -> MICRO SIGN
u'\xfe' # 0x00e7 -> LATIN SMALL LETTER THORN
u'\xde' # 0x00e8 -> LATIN CAPITAL LETTER THORN
u'\xda' # 0x00e9 -> LATIN CAPITAL LETTER U WITH ACUTE
u'\xdb' # 0x00ea -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'\xd9' # 0x00eb -> LATIN CAPITAL LETTER U WITH GRAVE
u'\xfd' # 0x00ec -> LATIN SMALL LETTER Y WITH ACUTE
u'\xdd' # 0x00ed -> LATIN CAPITAL LETTER Y WITH ACUTE
u'\xaf' # 0x00ee -> MACRON
u'\xb4' # 0x00ef -> ACUTE ACCENT
u'\xad' # 0x00f0 -> SOFT HYPHEN
u'\xb1' # 0x00f1 -> PLUS-MINUS SIGN
u'\u2017' # 0x00f2 -> DOUBLE LOW LINE
u'\xbe' # 0x00f3 -> VULGAR FRACTION THREE QUARTERS
u'\xb6' # 0x00f4 -> PILCROW SIGN
u'\xa7' # 0x00f5 -> SECTION SIGN
u'\xf7' # 0x00f6 -> DIVISION SIGN
u'\xb8' # 0x00f7 -> CEDILLA
u'\xb0' # 0x00f8 -> DEGREE SIGN
u'\xa8' # 0x00f9 -> DIAERESIS
u'\xb7' # 0x00fa -> MIDDLE DOT
u'\xb9' # 0x00fb -> SUPERSCRIPT ONE
u'\xb3' # 0x00fc -> SUPERSCRIPT THREE
u'\xb2' # 0x00fd -> SUPERSCRIPT TWO
u'\u25a0' # 0x00fe -> BLACK SQUARE
u'\xa0' # 0x00ff -> NO-BREAK SPACE
)
### Encoding Map
encoding_map = {
0x0000: 0x0000, # NULL
0x0001: 0x0001, # START OF HEADING
0x0002: 0x0002, # START OF TEXT
0x0003: 0x0003, # END OF TEXT
0x0004: 0x0004, # END OF TRANSMISSION
0x0005: 0x0005, # ENQUIRY
0x0006: 0x0006, # ACKNOWLEDGE
0x0007: 0x0007, # BELL
0x0008: 0x0008, # BACKSPACE
0x0009: 0x0009, # HORIZONTAL TABULATION
0x000a: 0x000a, # LINE FEED
0x000b: 0x000b, # VERTICAL TABULATION
0x000c: 0x000c, # FORM FEED
0x000d: 0x000d, # CARRIAGE RETURN
0x000e: 0x000e, # SHIFT OUT
0x000f: 0x000f, # SHIFT IN
0x0010: 0x0010, # DATA LINK ESCAPE
0x0011: 0x0011, # DEVICE CONTROL ONE
0x0012: 0x0012, # DEVICE CONTROL TWO
0x0013: 0x0013, # DEVICE CONTROL THREE
0x0014: 0x0014, # DEVICE CONTROL FOUR
0x0015: 0x0015, # NEGATIVE ACKNOWLEDGE
0x0016: 0x0016, # SYNCHRONOUS IDLE
0x0017: 0x0017, # END OF TRANSMISSION BLOCK
0x0018: 0x0018, # CANCEL
0x0019: 0x0019, # END OF MEDIUM
0x001a: 0x001a, # SUBSTITUTE
0x001b: 0x001b, # ESCAPE
0x001c: 0x001c, # FILE SEPARATOR
0x001d: 0x001d, # GROUP SEPARATOR
0x001e: 0x001e, # RECORD SEPARATOR
0x001f: 0x001f, # UNIT SEPARATOR
0x0020: 0x0020, # SPACE
0x0021: 0x0021, # EXCLAMATION MARK
0x0022: 0x0022, # QUOTATION MARK
0x0023: 0x0023, # NUMBER SIGN
0x0024: 0x0024, # DOLLAR SIGN
0x0025: 0x0025, # PERCENT SIGN
0x0026: 0x0026, # AMPERSAND
0x0027: 0x0027, # APOSTROPHE
0x0028: 0x0028, # LEFT PARENTHESIS
0x0029: 0x0029, # RIGHT PARENTHESIS
0x002a: 0x002a, # ASTERISK
0x002b: 0x002b, # PLUS SIGN
0x002c: 0x002c, # COMMA
0x002d: 0x002d, # HYPHEN-MINUS
0x002e: 0x002e, # FULL STOP
0x002f: 0x002f, # SOLIDUS
0x0030: 0x0030, # DIGIT ZERO
0x0031: 0x0031, # DIGIT ONE
0x0032: 0x0032, # DIGIT TWO
0x0033: 0x0033, # DIGIT THREE
0x0034: 0x0034, # DIGIT FOUR
0x0035: 0x0035, # DIGIT FIVE
0x0036: 0x0036, # DIGIT SIX
0x0037: 0x0037, # DIGIT SEVEN
0x0038: 0x0038, # DIGIT EIGHT
0x0039: 0x0039, # DIGIT NINE
0x003a: 0x003a, # COLON
0x003b: 0x003b, # SEMICOLON
0x003c: 0x003c, # LESS-THAN SIGN
0x003d: 0x003d, # EQUALS SIGN
0x003e: 0x003e, # GREATER-THAN SIGN
0x003f: 0x003f, # QUESTION MARK
0x0040: 0x0040, # COMMERCIAL AT
0x0041: 0x0041, # LATIN CAPITAL LETTER A
0x0042: 0x0042, # LATIN CAPITAL LETTER B
0x0043: 0x0043, # LATIN CAPITAL LETTER C
0x0044: 0x0044, # LATIN CAPITAL LETTER D
0x0045: 0x0045, # LATIN CAPITAL LETTER E
0x0046: 0x0046, # LATIN CAPITAL LETTER F
0x0047: 0x0047, # LATIN CAPITAL LETTER G
0x0048: 0x0048, # LATIN CAPITAL LETTER H
0x0049: 0x0049, # LATIN CAPITAL LETTER I
0x004a: 0x004a, # LATIN CAPITAL LETTER J
0x004b: 0x004b, # LATIN CAPITAL LETTER K
0x004c: 0x004c, # LATIN CAPITAL LETTER L
0x004d: 0x004d, # LATIN CAPITAL LETTER M
0x004e: 0x004e, # LATIN CAPITAL LETTER N
0x004f: 0x004f, # LATIN CAPITAL LETTER O
0x0050: 0x0050, # LATIN CAPITAL LETTER P
0x0051: 0x0051, # LATIN CAPITAL LETTER Q
0x0052: 0x0052, # LATIN CAPITAL LETTER R
0x0053: 0x0053, # LATIN CAPITAL LETTER S
0x0054: 0x0054, # LATIN CAPITAL LETTER T
0x0055: 0x0055, # LATIN CAPITAL LETTER U
0x0056: 0x0056, # LATIN CAPITAL LETTER V
0x0057: 0x0057, # LATIN CAPITAL LETTER W
0x0058: 0x0058, # LATIN CAPITAL LETTER X
0x0059: 0x0059, # LATIN CAPITAL LETTER Y
0x005a: 0x005a, # LATIN CAPITAL LETTER Z
0x005b: 0x005b, # LEFT SQUARE BRACKET
0x005c: 0x005c, # REVERSE SOLIDUS
0x005d: 0x005d, # RIGHT SQUARE BRACKET
0x005e: 0x005e, # CIRCUMFLEX ACCENT
0x005f: 0x005f, # LOW LINE
0x0060: 0x0060, # GRAVE ACCENT
0x0061: 0x0061, # LATIN SMALL LETTER A
0x0062: 0x0062, # LATIN SMALL LETTER B
0x0063: 0x0063, # LATIN SMALL LETTER C
0x0064: 0x0064, # LATIN SMALL LETTER D
0x0065: 0x0065, # LATIN SMALL LETTER E
0x0066: 0x0066, # LATIN SMALL LETTER F
0x0067: 0x0067, # LATIN SMALL LETTER G
0x0068: 0x0068, # LATIN SMALL LETTER H
0x0069: 0x0069, # LATIN SMALL LETTER I
0x006a: 0x006a, # LATIN SMALL LETTER J
0x006b: 0x006b, # LATIN SMALL LETTER K
0x006c: 0x006c, # LATIN SMALL LETTER L
0x006d: 0x006d, # LATIN SMALL LETTER M
0x006e: 0x006e, # LATIN SMALL LETTER N
0x006f: 0x006f, # LATIN SMALL LETTER O
0x0070: 0x0070, # LATIN SMALL LETTER P
0x0071: 0x0071, # LATIN SMALL LETTER Q
0x0072: 0x0072, # LATIN SMALL LETTER R
0x0073: 0x0073, # LATIN SMALL LETTER S
0x0074: 0x0074, # LATIN SMALL LETTER T
0x0075: 0x0075, # LATIN SMALL LETTER U
0x0076: 0x0076, # LATIN SMALL LETTER V
0x0077: 0x0077, # LATIN SMALL LETTER W
0x0078: 0x0078, # LATIN SMALL LETTER X
0x0079: 0x0079, # LATIN SMALL LETTER Y
0x007a: 0x007a, # LATIN SMALL LETTER Z
0x007b: 0x007b, # LEFT CURLY BRACKET
0x007c: 0x007c, # VERTICAL LINE
0x007d: 0x007d, # RIGHT CURLY BRACKET
0x007e: 0x007e, # TILDE
0x007f: 0x007f, # DELETE
0x00a0: 0x00ff, # NO-BREAK SPACE
0x00a1: 0x00ad, # INVERTED EXCLAMATION MARK
0x00a2: 0x00bd, # CENT SIGN
0x00a3: 0x009c, # POUND SIGN
0x00a4: 0x00cf, # CURRENCY SIGN
0x00a5: 0x00be, # YEN SIGN
0x00a6: 0x00dd, # BROKEN BAR
0x00a7: 0x00f5, # SECTION SIGN
0x00a8: 0x00f9, # DIAERESIS
0x00a9: 0x00b8, # COPYRIGHT SIGN
0x00aa: 0x00a6, # FEMININE ORDINAL INDICATOR
0x00ab: 0x00ae, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00ac: 0x00aa, # NOT SIGN
0x00ad: 0x00f0, # SOFT HYPHEN
0x00ae: 0x00a9, # REGISTERED SIGN
0x00af: 0x00ee, # MACRON
0x00b0: 0x00f8, # DEGREE SIGN
0x00b1: 0x00f1, # PLUS-MINUS SIGN
0x00b2: 0x00fd, # SUPERSCRIPT TWO
0x00b3: 0x00fc, # SUPERSCRIPT THREE
0x00b4: 0x00ef, # ACUTE ACCENT
0x00b5: 0x00e6, # MICRO SIGN
0x00b6: 0x00f4, # PILCROW SIGN
0x00b7: 0x00fa, # MIDDLE DOT
0x00b8: 0x00f7, # CEDILLA
0x00b9: 0x00fb, # SUPERSCRIPT ONE
0x00ba: 0x00a7, # MASCULINE ORDINAL INDICATOR
0x00bb: 0x00af, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00bc: 0x00ac, # VULGAR FRACTION ONE QUARTER
0x00bd: 0x00ab, # VULGAR FRACTION ONE HALF
0x00be: 0x00f3, # VULGAR FRACTION THREE QUARTERS
0x00bf: 0x00a8, # INVERTED QUESTION MARK
0x00c0: 0x00b7, # LATIN CAPITAL LETTER A WITH GRAVE
0x00c1: 0x00b5, # LATIN CAPITAL LETTER A WITH ACUTE
0x00c2: 0x00b6, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
0x00c3: 0x00c7, # LATIN CAPITAL LETTER A WITH TILDE
0x00c4: 0x008e, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x00c5: 0x008f, # LATIN CAPITAL LETTER A WITH RING ABOVE
0x00c6: 0x0092, # LATIN CAPITAL LIGATURE AE
0x00c7: 0x0080, # LATIN CAPITAL LETTER C WITH CEDILLA
0x00c8: 0x00d4, # LATIN CAPITAL LETTER E WITH GRAVE
0x00c9: 0x0090, # LATIN CAPITAL LETTER E WITH ACUTE
0x00ca: 0x00d2, # LATIN CAPITAL LETTER E WITH CIRCUMFLEX
0x00cb: 0x00d3, # LATIN CAPITAL LETTER E WITH DIAERESIS
0x00cc: 0x00de, # LATIN CAPITAL LETTER I WITH GRAVE
0x00cd: 0x00d6, # LATIN CAPITAL LETTER I WITH ACUTE
0x00ce: 0x00d7, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
0x00cf: 0x00d8, # LATIN CAPITAL LETTER I WITH DIAERESIS
0x00d0: 0x00d1, # LATIN CAPITAL LETTER ETH
0x00d1: 0x00a5, # LATIN CAPITAL LETTER N WITH TILDE
0x00d2: 0x00e3, # LATIN CAPITAL LETTER O WITH GRAVE
0x00d3: 0x00e0, # LATIN CAPITAL LETTER O WITH ACUTE
0x00d4: 0x00e2, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
0x00d5: 0x00e5, # LATIN CAPITAL LETTER O WITH TILDE
0x00d6: 0x0099, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x00d7: 0x009e, # MULTIPLICATION SIGN
0x00d8: 0x009d, # LATIN CAPITAL LETTER O WITH STROKE
0x00d9: 0x00eb, # LATIN CAPITAL LETTER U WITH GRAVE
0x00da: 0x00e9, # LATIN CAPITAL LETTER U WITH ACUTE
0x00db: 0x00ea, # LATIN CAPITAL LETTER U WITH CIRCUMFLEX
0x00dc: 0x009a, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x00dd: 0x00ed, # LATIN CAPITAL LETTER Y WITH ACUTE
0x00de: 0x00e8, # LATIN CAPITAL LETTER THORN
0x00df: 0x00e1, # LATIN SMALL LETTER SHARP S
0x00e0: 0x0085, # LATIN SMALL LETTER A WITH GRAVE
0x00e1: 0x00a0, # LATIN SMALL LETTER A WITH ACUTE
0x00e2: 0x0083, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x00e3: 0x00c6, # LATIN SMALL LETTER A WITH TILDE
0x00e4: 0x0084, # LATIN SMALL LETTER A WITH DIAERESIS
0x00e5: 0x0086, # LATIN SMALL LETTER A WITH RING ABOVE
0x00e6: 0x0091, # LATIN SMALL LIGATURE AE
0x00e7: 0x0087, # LATIN SMALL LETTER C WITH CEDILLA
0x00e8: 0x008a, # LATIN SMALL LETTER E WITH GRAVE
0x00e9: 0x0082, # LATIN SMALL LETTER E WITH ACUTE
0x00ea: 0x0088, # LATIN SMALL LETTER E WITH CIRCUMFLEX
0x00eb: 0x0089, # LATIN SMALL LETTER E WITH DIAERESIS
0x00ec: 0x008d, # LATIN SMALL LETTER I WITH GRAVE
0x00ed: 0x00a1, # LATIN SMALL LETTER I WITH ACUTE
0x00ee: 0x008c, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x00ef: 0x008b, # LATIN SMALL LETTER I WITH DIAERESIS
0x00f0: 0x00d0, # LATIN SMALL LETTER ETH
0x00f1: 0x00a4, # LATIN SMALL LETTER N WITH TILDE
0x00f2: 0x0095, # LATIN SMALL LETTER O WITH GRAVE
0x00f3: 0x00a2, # LATIN SMALL LETTER O WITH ACUTE
0x00f4: 0x0093, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x00f5: 0x00e4, # LATIN SMALL LETTER O WITH TILDE
0x00f6: 0x0094, # LATIN SMALL LETTER O WITH DIAERESIS
0x00f7: 0x00f6, # DIVISION SIGN
0x00f8: 0x009b, # LATIN SMALL LETTER O WITH STROKE
0x00f9: 0x0097, # LATIN SMALL LETTER U WITH GRAVE
0x00fa: 0x00a3, # LATIN SMALL LETTER U WITH ACUTE
0x00fb: 0x0096, # LATIN SMALL LETTER U WITH CIRCUMFLEX
0x00fc: 0x0081, # LATIN SMALL LETTER U WITH DIAERESIS
0x00fd: 0x00ec, # LATIN SMALL LETTER Y WITH ACUTE
0x00fe: 0x00e7, # LATIN SMALL LETTER THORN
0x00ff: 0x0098, # LATIN SMALL LETTER Y WITH DIAERESIS
0x20ac: 0x00d5, # EURO SIGN
0x0192: 0x009f, # LATIN SMALL LETTER F WITH HOOK
0x2017: 0x00f2, # DOUBLE LOW LINE
0x2500: 0x00c4, # BOX DRAWINGS LIGHT HORIZONTAL
0x2502: 0x00b3, # BOX DRAWINGS LIGHT VERTICAL
0x250c: 0x00da, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x2510: 0x00bf, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x2514: 0x00c0, # BOX DRAWINGS LIGHT UP AND RIGHT
0x2518: 0x00d9, # BOX DRAWINGS LIGHT UP AND LEFT
0x251c: 0x00c3, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x2524: 0x00b4, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x252c: 0x00c2, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x2534: 0x00c1, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x253c: 0x00c5, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x2550: 0x00cd, # BOX DRAWINGS DOUBLE HORIZONTAL
0x2551: 0x00ba, # BOX DRAWINGS DOUBLE VERTICAL
0x2554: 0x00c9, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x2557: 0x00bb, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x255a: 0x00c8, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x255d: 0x00bc, # BOX DRAWINGS DOUBLE UP AND LEFT
0x2560: 0x00cc, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x2563: 0x00b9, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x2566: 0x00cb, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x2569: 0x00ca, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x256c: 0x00ce, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x2580: 0x00df, # UPPER HALF BLOCK
0x2584: 0x00dc, # LOWER HALF BLOCK
0x2588: 0x00db, # FULL BLOCK
0x2591: 0x00b0, # LIGHT SHADE
0x2592: 0x00b1, # MEDIUM SHADE
0x2593: 0x00b2, # DARK SHADE
0x25a0: 0x00fe, # BLACK SQUARE
}
| mit |
rombie/contrail-controller | src/dns/scripts/del_virtual_dns.py | 22 | 2658 | #!/usr/bin/python
#
#Copyright (c) 2013 Juniper Networks, Inc. All rights reserved.
#
import sys
import argparse
import ConfigParser
from provision_dns import DnsProvisioner
from requests.exceptions import ConnectionError
class DelVirtualDns(object):
def __init__(self, args_str = None):
self._args = None
if not args_str:
args_str = ' '.join(sys.argv[1:])
self._parse_args(args_str)
try:
dp_obj = DnsProvisioner(self._args.admin_user, self._args.admin_password,
self._args.admin_tenant_name,
self._args.api_server_ip, self._args.api_server_port)
except ConnectionError:
print 'Connection to API server failed '
return
dp_obj.del_virtual_dns(self._args.fq_name)
#end __init__
def _parse_args(self, args_str):
'''
Eg. python del_virtual_dns.py --fq_name default-domain:vdns1
'''
# Source any specified config/ini file
# Turn off help, so we print all options in response to -h
conf_parser = argparse.ArgumentParser(add_help = False)
args, remaining_argv = conf_parser.parse_known_args(args_str.split())
defaults = {
'api_server_ip' : '127.0.0.1',
'api_server_port' : '8082',
'admin_user': None,
'admin_password': None,
'admin_tenant_name': None
}
# Don't surpress add_help here so it will handle -h
parser = argparse.ArgumentParser(
# Inherit options from config_parser
parents=[conf_parser],
# print script description with -h/--help
description=__doc__,
# Don't mess with format of description
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.set_defaults(**defaults)
parser.add_argument("--fq_name", help = "Fully qualified Virtual DNS Name")
parser.add_argument("--api_server_ip", help = "IP address of api server")
parser.add_argument("--api_server_port", help = "Port of api server")
parser.add_argument("--admin_user", help = "Name of keystone admin user")
parser.add_argument("--admin_password", help = "Password of keystone admin user")
parser.add_argument("--admin_tenant_name", help = "Tenamt name for keystone admin user")
self._args = parser.parse_args(remaining_argv)
#end _parse_args
# end class DelVirtualDns
def main(args_str = None):
DelVirtualDns(args_str)
#end main
if __name__ == "__main__":
main()
| apache-2.0 |
daenamkim/ansible | test/units/module_utils/network/nso/test_nso.py | 2 | 6821 | # Copyright (c) 2017 Cisco and/or its affiliates.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
import json
from ansible.compat.tests.mock import patch
from ansible.compat.tests import unittest
from ansible.module_utils.network.nso import nso
MODULE_PREFIX_MAP = '''
{
"ansible-nso": "an",
"tailf-ncs": "ncs"
}
'''
SCHEMA_DATA = {
'/an:id-name-leaf': '''
{
"meta": {
"prefix": "an",
"namespace": "http://github.com/ansible/nso",
"types": {
"http://github.com/ansible/nso:id-name-t": [
{
"name": "http://github.com/ansible/nso:id-name-t",
"enumeration": [
{
"label": "id-one"
},
{
"label": "id-two"
}
]
},
{
"name": "identityref"
}
]
},
"keypath": "/an:id-name-leaf"
},
"data": {
"kind": "leaf",
"type": {
"namespace": "http://github.com/ansible/nso",
"name": "id-name-t"
},
"name": "id-name-leaf",
"qname": "an:id-name-leaf"
}
}''',
'/an:id-name-values': '''
{
"meta": {
"prefix": "an",
"namespace": "http://github.com/ansible/nso",
"types": {},
"keypath": "/an:id-name-values"
},
"data": {
"kind": "container",
"name": "id-name-values",
"qname": "an:id-name-values",
"children": [
{
"kind": "list",
"name": "id-name-value",
"qname": "an:id-name-value",
"key": [
"name"
]
}
]
}
}
''',
'/an:id-name-values/id-name-value': '''
{
"meta": {
"prefix": "an",
"namespace": "http://github.com/ansible/nso",
"types": {
"http://github.com/ansible/nso:id-name-t": [
{
"name": "http://github.com/ansible/nso:id-name-t",
"enumeration": [
{
"label": "id-one"
},
{
"label": "id-two"
}
]
},
{
"name": "identityref"
}
]
},
"keypath": "/an:id-name-values/id-name-value"
},
"data": {
"kind": "list",
"name": "id-name-value",
"qname": "an:id-name-value",
"key": [
"name"
],
"children": [
{
"kind": "key",
"name": "name",
"qname": "an:name",
"type": {
"namespace": "http://github.com/ansible/nso",
"name": "id-name-t"
}
},
{
"kind": "leaf",
"type": {
"primitive": true,
"name": "string"
},
"name": "value",
"qname": "an:value"
}
]
}
}
'''
}
class MockResponse(object):
def __init__(self, method, params, code, body, headers=None):
if headers is None:
headers = {}
self.method = method
self.params = params
self.code = code
self.body = body
self.headers = dict(headers)
def read(self):
return self.body
def mock_call(calls, url, data=None, headers=None, method=None):
result = calls[0]
del calls[0]
request = json.loads(data)
if result.method != request['method']:
raise ValueError('expected method {0}({1}), got {2}({3})'.format(
result.method, result.params,
request['method'], request['params']))
for key, value in result.params.items():
if key not in request['params']:
raise ValueError('{0} not in parameters'.format(key))
if value != request['params'][key]:
raise ValueError('expected {0} to be {1}, got {2}'.format(
key, value, request['params'][key]))
return result
def get_schema_response(path):
return MockResponse(
'get_schema', {'path': path}, 200, '{{"result": {0}}}'.format(
SCHEMA_DATA[path]))
class TestValueBuilder(unittest.TestCase):
@patch('ansible.module_utils.network.nso.nso.open_url')
def test_identityref_leaf(self, open_url_mock):
calls = [
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
get_schema_response('/an:id-name-leaf'),
MockResponse('get_module_prefix_map', {}, 200, '{{"result": {0}}}'.format(MODULE_PREFIX_MAP))
]
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
parent = "/an:id-name-leaf"
schema_data = json.loads(
SCHEMA_DATA['/an:id-name-leaf'])
schema = schema_data['data']
vb = nso.ValueBuilder(nso.JsonRpc('http://localhost:8080/jsonrpc'))
vb.build(parent, None, 'ansible-nso:id-two', schema)
self.assertEquals(1, len(vb.values))
value = vb.values[0]
self.assertEquals(parent, value.path)
self.assertEquals('set', value.state)
self.assertEquals('an:id-two', value.value)
self.assertEqual(0, len(calls))
@patch('ansible.module_utils.network.nso.nso.open_url')
def test_identityref_key(self, open_url_mock):
calls = [
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
get_schema_response('/an:id-name-values/id-name-value'),
MockResponse('get_module_prefix_map', {}, 200, '{{"result": {0}}}'.format(MODULE_PREFIX_MAP)),
MockResponse('exists', {'path': '/an:id-name-values/id-name-value{an:id-one}'}, 200, '{"result": {"exists": true}}'),
]
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
parent = "/an:id-name-values"
schema_data = json.loads(
SCHEMA_DATA['/an:id-name-values/id-name-value'])
schema = schema_data['data']
vb = nso.ValueBuilder(nso.JsonRpc('http://localhost:8080/jsonrpc'))
vb.build(parent, 'id-name-value', [{'name': 'ansible-nso:id-one', 'value': '1'}], schema)
self.assertEquals(1, len(vb.values))
value = vb.values[0]
self.assertEquals('{0}/id-name-value{{an:id-one}}/value'.format(parent), value.path)
self.assertEquals('set', value.state)
self.assertEquals('1', value.value)
self.assertEqual(0, len(calls))
| gpl-3.0 |
patmcb/odoo | addons/payment_paypal/models/res_company.py | 422 | 1752 | # -*- coding: utf-8 -*-
from openerp.osv import fields, osv
class ResCompany(osv.Model):
_inherit = "res.company"
def _get_paypal_account(self, cr, uid, ids, name, arg, context=None):
Acquirer = self.pool['payment.acquirer']
company_id = self.pool['res.users'].browse(cr, uid, uid, context=context).company_id.id
paypal_ids = Acquirer.search(cr, uid, [
('website_published', '=', True),
('name', 'ilike', 'paypal'),
('company_id', '=', company_id),
], limit=1, context=context)
if paypal_ids:
paypal = Acquirer.browse(cr, uid, paypal_ids[0], context=context)
return dict.fromkeys(ids, paypal.paypal_email_account)
return dict.fromkeys(ids, False)
def _set_paypal_account(self, cr, uid, id, name, value, arg, context=None):
Acquirer = self.pool['payment.acquirer']
company_id = self.pool['res.users'].browse(cr, uid, uid, context=context).company_id.id
paypal_account = self.browse(cr, uid, id, context=context).paypal_account
paypal_ids = Acquirer.search(cr, uid, [
('website_published', '=', True),
('paypal_email_account', '=', paypal_account),
('company_id', '=', company_id),
], context=context)
if paypal_ids:
Acquirer.write(cr, uid, paypal_ids, {'paypal_email_account': value}, context=context)
return True
_columns = {
'paypal_account': fields.function(
_get_paypal_account,
fnct_inv=_set_paypal_account,
nodrop=True,
type='char', string='Paypal Account',
help="Paypal username (usually email) for receiving online payments."
),
}
| agpl-3.0 |
darkleons/BE | addons/mrp/stock.py | 24 | 18483 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields
from openerp.osv import osv
from openerp.tools.translate import _
from openerp import SUPERUSER_ID
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT, float_compare
class StockMove(osv.osv):
_inherit = 'stock.move'
_columns = {
'production_id': fields.many2one('mrp.production', 'Production Order for Produced Products', select=True, copy=False),
'raw_material_production_id': fields.many2one('mrp.production', 'Production Order for Raw Materials', select=True),
'consumed_for': fields.many2one('stock.move', 'Consumed for', help='Technical field used to make the traceability of produced products'),
}
def check_tracking(self, cr, uid, move, lot_id, context=None):
super(StockMove, self).check_tracking(cr, uid, move, lot_id, context=context)
if move.product_id.track_production and (move.location_id.usage == 'production' or move.location_dest_id.usage == 'production') and not lot_id:
raise osv.except_osv(_('Warning!'), _('You must assign a serial number for the product %s') % (move.product_id.name))
if move.raw_material_production_id and move.location_dest_id.usage == 'production' and move.raw_material_production_id.product_id.track_production and not move.consumed_for:
raise osv.except_osv(_('Warning!'), _("Because the product %s requires it, you must assign a serial number to your raw material %s to proceed further in your production. Please use the 'Produce' button to do so.") % (move.raw_material_production_id.product_id.name, move.product_id.name))
def _check_phantom_bom(self, cr, uid, move, context=None):
"""check if product associated to move has a phantom bom
return list of ids of mrp.bom for that product """
user_company = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id
#doing the search as SUPERUSER because a user with the permission to write on a stock move should be able to explode it
#without giving him the right to read the boms.
domain = [
'|', ('product_id', '=', move.product_id.id),
'&', ('product_id', '=', False), ('product_tmpl_id.product_variant_ids', '=', move.product_id.id),
('type', '=', 'phantom'),
'|', ('date_start', '=', False), ('date_start', '<=', time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)),
'|', ('date_stop', '=', False), ('date_stop', '>=', time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)),
('company_id', '=', user_company)]
return self.pool.get('mrp.bom').search(cr, SUPERUSER_ID, domain, context=context)
def _action_explode(self, cr, uid, move, context=None):
""" Explodes pickings.
@param move: Stock moves
@return: True
"""
bom_obj = self.pool.get('mrp.bom')
move_obj = self.pool.get('stock.move')
prod_obj = self.pool.get("product.product")
proc_obj = self.pool.get("procurement.order")
uom_obj = self.pool.get("product.uom")
to_explode_again_ids = []
processed_ids = []
bis = self._check_phantom_bom(cr, uid, move, context=context)
if bis:
bom_point = bom_obj.browse(cr, SUPERUSER_ID, bis[0], context=context)
factor = uom_obj._compute_qty(cr, SUPERUSER_ID, move.product_uom.id, move.product_uom_qty, bom_point.product_uom.id) / bom_point.product_qty
res = bom_obj._bom_explode(cr, SUPERUSER_ID, bom_point, move.product_id, factor, [], context=context)
state = 'confirmed'
if move.state == 'assigned':
state = 'assigned'
for line in res[0]:
product = prod_obj.browse(cr, uid, line['product_id'], context=context)
if product.type != 'service':
valdef = {
'picking_id': move.picking_id.id if move.picking_id else False,
'product_id': line['product_id'],
'product_uom': line['product_uom'],
'product_uom_qty': line['product_qty'],
'product_uos': line['product_uos'],
'product_uos_qty': line['product_uos_qty'],
'state': state,
'name': line['name'],
'procurement_id': move.procurement_id.id,
'split_from': move.id, #Needed in order to keep sale connection, but will be removed by unlink
}
mid = move_obj.copy(cr, uid, move.id, default=valdef, context=context)
to_explode_again_ids.append(mid)
else:
if prod_obj.need_procurement(cr, uid, [product.id], context=context):
valdef = {
'name': move.rule_id and move.rule_id.name or "/",
'origin': move.origin,
'company_id': move.company_id and move.company_id.id or False,
'date_planned': move.date,
'product_id': line['product_id'],
'product_qty': line['product_qty'],
'product_uom': line['product_uom'],
'product_uos_qty': line['product_uos_qty'],
'product_uos': line['product_uos'],
'group_id': move.group_id.id,
'priority': move.priority,
'partner_dest_id': move.partner_id.id,
}
if move.procurement_id:
proc = proc_obj.copy(cr, uid, move.procurement_id.id, default=valdef, context=context)
else:
proc = proc_obj.create(cr, uid, valdef, context=context)
proc_obj.run(cr, uid, [proc], context=context) #could be omitted
#check if new moves needs to be exploded
if to_explode_again_ids:
for new_move in self.browse(cr, uid, to_explode_again_ids, context=context):
processed_ids.extend(self._action_explode(cr, uid, new_move, context=context))
if not move.split_from and move.procurement_id:
# Check if procurements have been made to wait for
moves = move.procurement_id.move_ids
if len(moves) == 1:
proc_obj.write(cr, uid, [move.procurement_id.id], {'state': 'done'}, context=context)
#delete the move with original product which is not relevant anymore
move_obj.unlink(cr, SUPERUSER_ID, [move.id], context=context)
#return list of newly created move or the move id otherwise, unless there is no move anymore
return processed_ids or (not bis and [move.id]) or []
def action_confirm(self, cr, uid, ids, context=None):
move_ids = []
for move in self.browse(cr, uid, ids, context=context):
#in order to explode a move, we must have a picking_type_id on that move because otherwise the move
#won't be assigned to a picking and it would be weird to explode a move into several if they aren't
#all grouped in the same picking.
if move.picking_type_id:
move_ids.extend(self._action_explode(cr, uid, move, context=context))
else:
move_ids.append(move.id)
#we go further with the list of ids potentially changed by action_explode
return super(StockMove, self).action_confirm(cr, uid, move_ids, context=context)
def action_consume(self, cr, uid, ids, product_qty, location_id=False, restrict_lot_id=False, restrict_partner_id=False,
consumed_for=False, context=None):
""" Consumed product with specific quantity from specific source location.
@param product_qty: Consumed/produced product quantity (= in quantity of UoM of product)
@param location_id: Source location
@param restrict_lot_id: optionnal parameter that allows to restrict the choice of quants on this specific lot
@param restrict_partner_id: optionnal parameter that allows to restrict the choice of quants to this specific partner
@param consumed_for: optionnal parameter given to this function to make the link between raw material consumed and produced product, for a better traceability
@return: New lines created if not everything was consumed for this line
"""
if context is None:
context = {}
res = []
production_obj = self.pool.get('mrp.production')
if product_qty <= 0:
raise osv.except_osv(_('Warning!'), _('Please provide proper quantity.'))
#because of the action_confirm that can create extra moves in case of phantom bom, we need to make 2 loops
ids2 = []
for move in self.browse(cr, uid, ids, context=context):
if move.state == 'draft':
ids2.extend(self.action_confirm(cr, uid, [move.id], context=context))
else:
ids2.append(move.id)
prod_orders = set()
for move in self.browse(cr, uid, ids2, context=context):
prod_orders.add(move.raw_material_production_id.id or move.production_id.id)
move_qty = move.product_qty
if move_qty <= 0:
raise osv.except_osv(_('Error!'), _('Cannot consume a move with negative or zero quantity.'))
quantity_rest = move_qty - product_qty
# Compare with numbers of move uom as we want to avoid a split with 0 qty
quantity_rest_uom = move.product_uom_qty - self.pool.get("product.uom")._compute_qty_obj(cr, uid, move.product_id.uom_id, product_qty, move.product_uom)
if float_compare(quantity_rest_uom, 0, precision_rounding=move.product_uom.rounding) != 0:
new_mov = self.split(cr, uid, move, quantity_rest, context=context)
res.append(new_mov)
vals = {'restrict_lot_id': restrict_lot_id,
'restrict_partner_id': restrict_partner_id,
'consumed_for': consumed_for}
if location_id:
vals.update({'location_id': location_id})
self.write(cr, uid, [move.id], vals, context=context)
# Original moves will be the quantities consumed, so they need to be done
self.action_done(cr, uid, ids2, context=context)
if res:
self.action_assign(cr, uid, res, context=context)
if prod_orders:
production_obj.signal_workflow(cr, uid, list(prod_orders), 'button_produce')
return res
def action_scrap(self, cr, uid, ids, product_qty, location_id, restrict_lot_id=False, restrict_partner_id=False, context=None):
""" Move the scrap/damaged product into scrap location
@param product_qty: Scraped product quantity
@param location_id: Scrap location
@return: Scraped lines
"""
res = []
production_obj = self.pool.get('mrp.production')
for move in self.browse(cr, uid, ids, context=context):
new_moves = super(StockMove, self).action_scrap(cr, uid, [move.id], product_qty, location_id,
restrict_lot_id=restrict_lot_id,
restrict_partner_id=restrict_partner_id, context=context)
#If we are not scrapping our whole move, tracking and lot references must not be removed
production_ids = production_obj.search(cr, uid, [('move_lines', 'in', [move.id])])
for prod_id in production_ids:
production_obj.signal_workflow(cr, uid, [prod_id], 'button_produce')
for new_move in new_moves:
production_obj.write(cr, uid, production_ids, {'move_lines': [(4, new_move)]})
res.append(new_move)
return res
def write(self, cr, uid, ids, vals, context=None):
if isinstance(ids, (int, long)):
ids = [ids]
res = super(StockMove, self).write(cr, uid, ids, vals, context=context)
from openerp import workflow
if vals.get('state') == 'assigned':
moves = self.browse(cr, uid, ids, context=context)
orders = list(set([x.raw_material_production_id.id for x in moves if x.raw_material_production_id and x.raw_material_production_id.state == 'confirmed']))
for order_id in orders:
if self.pool.get('mrp.production').test_ready(cr, uid, [order_id]):
workflow.trg_validate(uid, 'mrp.production', order_id, 'moves_ready', cr)
return res
class stock_warehouse(osv.osv):
_inherit = 'stock.warehouse'
_columns = {
'manufacture_to_resupply': fields.boolean('Manufacture in this Warehouse',
help="When products are manufactured, they can be manufactured in this warehouse."),
'manufacture_pull_id': fields.many2one('procurement.rule', 'Manufacture Rule'),
}
def _get_manufacture_pull_rule(self, cr, uid, warehouse, context=None):
route_obj = self.pool.get('stock.location.route')
data_obj = self.pool.get('ir.model.data')
try:
manufacture_route_id = data_obj.get_object_reference(cr, uid, 'stock', 'route_warehouse0_manufacture')[1]
except:
manufacture_route_id = route_obj.search(cr, uid, [('name', 'like', _('Manufacture'))], context=context)
manufacture_route_id = manufacture_route_id and manufacture_route_id[0] or False
if not manufacture_route_id:
raise osv.except_osv(_('Error!'), _('Can\'t find any generic Manufacture route.'))
return {
'name': self._format_routename(cr, uid, warehouse, _(' Manufacture'), context=context),
'location_id': warehouse.lot_stock_id.id,
'route_id': manufacture_route_id,
'action': 'manufacture',
'picking_type_id': warehouse.int_type_id.id,
'propagate': False,
'warehouse_id': warehouse.id,
}
def create_routes(self, cr, uid, ids, warehouse, context=None):
pull_obj = self.pool.get('procurement.rule')
res = super(stock_warehouse, self).create_routes(cr, uid, ids, warehouse, context=context)
if warehouse.manufacture_to_resupply:
manufacture_pull_vals = self._get_manufacture_pull_rule(cr, uid, warehouse, context=context)
manufacture_pull_id = pull_obj.create(cr, uid, manufacture_pull_vals, context=context)
res['manufacture_pull_id'] = manufacture_pull_id
return res
def write(self, cr, uid, ids, vals, context=None):
pull_obj = self.pool.get('procurement.rule')
if isinstance(ids, (int, long)):
ids = [ids]
if 'manufacture_to_resupply' in vals:
if vals.get("manufacture_to_resupply"):
for warehouse in self.browse(cr, uid, ids, context=context):
if not warehouse.manufacture_pull_id:
manufacture_pull_vals = self._get_manufacture_pull_rule(cr, uid, warehouse, context=context)
manufacture_pull_id = pull_obj.create(cr, uid, manufacture_pull_vals, context=context)
vals['manufacture_pull_id'] = manufacture_pull_id
else:
for warehouse in self.browse(cr, uid, ids, context=context):
if warehouse.manufacture_pull_id:
pull_obj.unlink(cr, uid, warehouse.manufacture_pull_id.id, context=context)
return super(stock_warehouse, self).write(cr, uid, ids, vals, context=None)
def get_all_routes_for_wh(self, cr, uid, warehouse, context=None):
all_routes = super(stock_warehouse, self).get_all_routes_for_wh(cr, uid, warehouse, context=context)
if warehouse.manufacture_to_resupply and warehouse.manufacture_pull_id and warehouse.manufacture_pull_id.route_id:
all_routes += [warehouse.manufacture_pull_id.route_id.id]
return all_routes
def _handle_renaming(self, cr, uid, warehouse, name, code, context=None):
res = super(stock_warehouse, self)._handle_renaming(cr, uid, warehouse, name, code, context=context)
pull_obj = self.pool.get('procurement.rule')
#change the manufacture pull rule name
if warehouse.manufacture_pull_id:
pull_obj.write(cr, uid, warehouse.manufacture_pull_id.id, {'name': warehouse.manufacture_pull_id.name.replace(warehouse.name, name, 1)}, context=context)
return res
def _get_all_products_to_resupply(self, cr, uid, warehouse, context=None):
res = super(stock_warehouse, self)._get_all_products_to_resupply(cr, uid, warehouse, context=context)
if warehouse.manufacture_pull_id and warehouse.manufacture_pull_id.route_id:
for product_id in res:
for route in self.pool.get('product.product').browse(cr, uid, product_id, context=context).route_ids:
if route.id == warehouse.manufacture_pull_id.route_id.id:
res.remove(product_id)
break
return res
| agpl-3.0 |
hn8841182/20150623-test02 | static/Brython3.1.0-20150301-090019/Lib/_strptime.py | 518 | 21683 | """Strptime-related classes and functions.
CLASSES:
LocaleTime -- Discovers and stores locale-specific time information
TimeRE -- Creates regexes for pattern matching a string of text containing
time information
FUNCTIONS:
_getlang -- Figure out what language is being used for the locale
strptime -- Calculates the time struct represented by the passed-in string
"""
import time
import locale
import calendar
from re import compile as re_compile
from re import IGNORECASE
from re import escape as re_escape
from datetime import (date as datetime_date,
timedelta as datetime_timedelta,
timezone as datetime_timezone)
try:
from _thread import allocate_lock as _thread_allocate_lock
except ImportError:
from _dummy_thread import allocate_lock as _thread_allocate_lock
__all__ = []
def _getlang():
# Figure out what the current language is set to.
return locale.getlocale(locale.LC_TIME)
class LocaleTime(object):
"""Stores and handles locale-specific information related to time.
ATTRIBUTES:
f_weekday -- full weekday names (7-item list)
a_weekday -- abbreviated weekday names (7-item list)
f_month -- full month names (13-item list; dummy value in [0], which
is added by code)
a_month -- abbreviated month names (13-item list, dummy value in
[0], which is added by code)
am_pm -- AM/PM representation (2-item list)
LC_date_time -- format string for date/time representation (string)
LC_date -- format string for date representation (string)
LC_time -- format string for time representation (string)
timezone -- daylight- and non-daylight-savings timezone representation
(2-item list of sets)
lang -- Language used by instance (2-item tuple)
"""
def __init__(self):
"""Set all attributes.
Order of methods called matters for dependency reasons.
The locale language is set at the offset and then checked again before
exiting. This is to make sure that the attributes were not set with a
mix of information from more than one locale. This would most likely
happen when using threads where one thread calls a locale-dependent
function while another thread changes the locale while the function in
the other thread is still running. Proper coding would call for
locks to prevent changing the locale while locale-dependent code is
running. The check here is done in case someone does not think about
doing this.
Only other possible issue is if someone changed the timezone and did
not call tz.tzset . That is an issue for the programmer, though,
since changing the timezone is worthless without that call.
"""
self.lang = _getlang()
self.__calc_weekday()
self.__calc_month()
self.__calc_am_pm()
self.__calc_timezone()
self.__calc_date_time()
if _getlang() != self.lang:
raise ValueError("locale changed during initialization")
def __pad(self, seq, front):
# Add '' to seq to either the front (is True), else the back.
seq = list(seq)
if front:
seq.insert(0, '')
else:
seq.append('')
return seq
def __calc_weekday(self):
# Set self.a_weekday and self.f_weekday using the calendar
# module.
a_weekday = [calendar.day_abbr[i].lower() for i in range(7)]
f_weekday = [calendar.day_name[i].lower() for i in range(7)]
self.a_weekday = a_weekday
self.f_weekday = f_weekday
def __calc_month(self):
# Set self.f_month and self.a_month using the calendar module.
a_month = [calendar.month_abbr[i].lower() for i in range(13)]
f_month = [calendar.month_name[i].lower() for i in range(13)]
self.a_month = a_month
self.f_month = f_month
def __calc_am_pm(self):
# Set self.am_pm by using time.strftime().
# The magic date (1999,3,17,hour,44,55,2,76,0) is not really that
# magical; just happened to have used it everywhere else where a
# static date was needed.
am_pm = []
for hour in (1, 22):
time_tuple = time.struct_time((1999,3,17,hour,44,55,2,76,0))
am_pm.append(time.strftime("%p", time_tuple).lower())
self.am_pm = am_pm
def __calc_date_time(self):
# Set self.date_time, self.date, & self.time by using
# time.strftime().
# Use (1999,3,17,22,44,55,2,76,0) for magic date because the amount of
# overloaded numbers is minimized. The order in which searches for
# values within the format string is very important; it eliminates
# possible ambiguity for what something represents.
time_tuple = time.struct_time((1999,3,17,22,44,55,2,76,0))
date_time = [None, None, None]
date_time[0] = time.strftime("%c", time_tuple).lower()
date_time[1] = time.strftime("%x", time_tuple).lower()
date_time[2] = time.strftime("%X", time_tuple).lower()
replacement_pairs = [('%', '%%'), (self.f_weekday[2], '%A'),
(self.f_month[3], '%B'), (self.a_weekday[2], '%a'),
(self.a_month[3], '%b'), (self.am_pm[1], '%p'),
('1999', '%Y'), ('99', '%y'), ('22', '%H'),
('44', '%M'), ('55', '%S'), ('76', '%j'),
('17', '%d'), ('03', '%m'), ('3', '%m'),
# '3' needed for when no leading zero.
('2', '%w'), ('10', '%I')]
replacement_pairs.extend([(tz, "%Z") for tz_values in self.timezone
for tz in tz_values])
for offset,directive in ((0,'%c'), (1,'%x'), (2,'%X')):
current_format = date_time[offset]
for old, new in replacement_pairs:
# Must deal with possible lack of locale info
# manifesting itself as the empty string (e.g., Swedish's
# lack of AM/PM info) or a platform returning a tuple of empty
# strings (e.g., MacOS 9 having timezone as ('','')).
if old:
current_format = current_format.replace(old, new)
# If %W is used, then Sunday, 2005-01-03 will fall on week 0 since
# 2005-01-03 occurs before the first Monday of the year. Otherwise
# %U is used.
time_tuple = time.struct_time((1999,1,3,1,1,1,6,3,0))
if '00' in time.strftime(directive, time_tuple):
U_W = '%W'
else:
U_W = '%U'
date_time[offset] = current_format.replace('11', U_W)
self.LC_date_time = date_time[0]
self.LC_date = date_time[1]
self.LC_time = date_time[2]
def __calc_timezone(self):
# Set self.timezone by using time.tzname.
# Do not worry about possibility of time.tzname[0] == timetzname[1]
# and time.daylight; handle that in strptime .
#try:
#time.tzset()
#except AttributeError:
#pass
no_saving = frozenset(["utc", "gmt", time.tzname[0].lower()])
if time.daylight:
has_saving = frozenset([time.tzname[1].lower()])
else:
has_saving = frozenset()
self.timezone = (no_saving, has_saving)
class TimeRE(dict):
"""Handle conversion from format directives to regexes."""
def __init__(self, locale_time=None):
"""Create keys/values.
Order of execution is important for dependency reasons.
"""
if locale_time:
self.locale_time = locale_time
else:
self.locale_time = LocaleTime()
base = super()
base.__init__({
# The " \d" part of the regex is to make %c from ANSI C work
'd': r"(?P<d>3[0-1]|[1-2]\d|0[1-9]|[1-9]| [1-9])",
'f': r"(?P<f>[0-9]{1,6})",
'H': r"(?P<H>2[0-3]|[0-1]\d|\d)",
'I': r"(?P<I>1[0-2]|0[1-9]|[1-9])",
'j': r"(?P<j>36[0-6]|3[0-5]\d|[1-2]\d\d|0[1-9]\d|00[1-9]|[1-9]\d|0[1-9]|[1-9])",
'm': r"(?P<m>1[0-2]|0[1-9]|[1-9])",
'M': r"(?P<M>[0-5]\d|\d)",
'S': r"(?P<S>6[0-1]|[0-5]\d|\d)",
'U': r"(?P<U>5[0-3]|[0-4]\d|\d)",
'w': r"(?P<w>[0-6])",
# W is set below by using 'U'
'y': r"(?P<y>\d\d)",
#XXX: Does 'Y' need to worry about having less or more than
# 4 digits?
'Y': r"(?P<Y>\d\d\d\d)",
'z': r"(?P<z>[+-]\d\d[0-5]\d)",
'A': self.__seqToRE(self.locale_time.f_weekday, 'A'),
'a': self.__seqToRE(self.locale_time.a_weekday, 'a'),
'B': self.__seqToRE(self.locale_time.f_month[1:], 'B'),
'b': self.__seqToRE(self.locale_time.a_month[1:], 'b'),
'p': self.__seqToRE(self.locale_time.am_pm, 'p'),
'Z': self.__seqToRE((tz for tz_names in self.locale_time.timezone
for tz in tz_names),
'Z'),
'%': '%'})
base.__setitem__('W', base.__getitem__('U').replace('U', 'W'))
base.__setitem__('c', self.pattern(self.locale_time.LC_date_time))
base.__setitem__('x', self.pattern(self.locale_time.LC_date))
base.__setitem__('X', self.pattern(self.locale_time.LC_time))
def __seqToRE(self, to_convert, directive):
"""Convert a list to a regex string for matching a directive.
Want possible matching values to be from longest to shortest. This
prevents the possibility of a match occurring for a value that also
a substring of a larger value that should have matched (e.g., 'abc'
matching when 'abcdef' should have been the match).
"""
to_convert = sorted(to_convert, key=len, reverse=True)
for value in to_convert:
if value != '':
break
else:
return ''
regex = '|'.join(re_escape(stuff) for stuff in to_convert)
regex = '(?P<%s>%s' % (directive, regex)
return '%s)' % regex
def pattern(self, format):
"""Return regex pattern for the format string.
Need to make sure that any characters that might be interpreted as
regex syntax are escaped.
"""
processed_format = ''
# The sub() call escapes all characters that might be misconstrued
# as regex syntax. Cannot use re.escape since we have to deal with
# format directives (%m, etc.).
regex_chars = re_compile(r"([\\.^$*+?\(\){}\[\]|])")
format = regex_chars.sub(r"\\\1", format)
whitespace_replacement = re_compile('\s+')
format = whitespace_replacement.sub('\s+', format)
while '%' in format:
directive_index = format.index('%')+1
processed_format = "%s%s%s" % (processed_format,
format[:directive_index-1],
self[format[directive_index]])
format = format[directive_index+1:]
return "%s%s" % (processed_format, format)
def compile(self, format):
"""Return a compiled re object for the format string."""
return re_compile(self.pattern(format), IGNORECASE)
_cache_lock = _thread_allocate_lock()
# DO NOT modify _TimeRE_cache or _regex_cache without acquiring the cache lock
# first!
_TimeRE_cache = TimeRE()
_CACHE_MAX_SIZE = 5 # Max number of regexes stored in _regex_cache
_regex_cache = {}
def _calc_julian_from_U_or_W(year, week_of_year, day_of_week, week_starts_Mon):
"""Calculate the Julian day based on the year, week of the year, and day of
the week, with week_start_day representing whether the week of the year
assumes the week starts on Sunday or Monday (6 or 0)."""
first_weekday = datetime_date(year, 1, 1).weekday()
# If we are dealing with the %U directive (week starts on Sunday), it's
# easier to just shift the view to Sunday being the first day of the
# week.
if not week_starts_Mon:
first_weekday = (first_weekday + 1) % 7
day_of_week = (day_of_week + 1) % 7
# Need to watch out for a week 0 (when the first day of the year is not
# the same as that specified by %U or %W).
week_0_length = (7 - first_weekday) % 7
if week_of_year == 0:
return 1 + day_of_week - first_weekday
else:
days_to_week = week_0_length + (7 * (week_of_year - 1))
return 1 + days_to_week + day_of_week
def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"):
"""Return a 2-tuple consisting of a time struct and an int containing
the number of microseconds based on the input string and the
format string."""
for index, arg in enumerate([data_string, format]):
if not isinstance(arg, str):
msg = "strptime() argument {} must be str, not {}"
raise TypeError(msg.format(index, type(arg)))
global _TimeRE_cache, _regex_cache
with _cache_lock:
if _getlang() != _TimeRE_cache.locale_time.lang:
_TimeRE_cache = TimeRE()
_regex_cache.clear()
if len(_regex_cache) > _CACHE_MAX_SIZE:
_regex_cache.clear()
locale_time = _TimeRE_cache.locale_time
format_regex = _regex_cache.get(format)
if not format_regex:
try:
format_regex = _TimeRE_cache.compile(format)
# KeyError raised when a bad format is found; can be specified as
# \\, in which case it was a stray % but with a space after it
except KeyError as err:
bad_directive = err.args[0]
if bad_directive == "\\":
bad_directive = "%"
del err
raise ValueError("'%s' is a bad directive in format '%s'" %
(bad_directive, format)) from None
# IndexError only occurs when the format string is "%"
except IndexError:
raise ValueError("stray %% in format '%s'" % format) from None
_regex_cache[format] = format_regex
found = format_regex.match(data_string)
if not found:
raise ValueError("time data %r does not match format %r" %
(data_string, format))
if len(data_string) != found.end():
raise ValueError("unconverted data remains: %s" %
data_string[found.end():])
year = None
month = day = 1
hour = minute = second = fraction = 0
tz = -1
tzoffset = None
# Default to -1 to signify that values not known; not critical to have,
# though
week_of_year = -1
week_of_year_start = -1
# weekday and julian defaulted to -1 so as to signal need to calculate
# values
weekday = julian = -1
found_dict = found.groupdict()
for group_key in found_dict.keys():
# Directives not explicitly handled below:
# c, x, X
# handled by making out of other directives
# U, W
# worthless without day of the week
if group_key == 'y':
year = int(found_dict['y'])
# Open Group specification for strptime() states that a %y
#value in the range of [00, 68] is in the century 2000, while
#[69,99] is in the century 1900
if year <= 68:
year += 2000
else:
year += 1900
elif group_key == 'Y':
year = int(found_dict['Y'])
elif group_key == 'm':
month = int(found_dict['m'])
elif group_key == 'B':
month = locale_time.f_month.index(found_dict['B'].lower())
elif group_key == 'b':
month = locale_time.a_month.index(found_dict['b'].lower())
elif group_key == 'd':
day = int(found_dict['d'])
elif group_key == 'H':
hour = int(found_dict['H'])
elif group_key == 'I':
hour = int(found_dict['I'])
ampm = found_dict.get('p', '').lower()
# If there was no AM/PM indicator, we'll treat this like AM
if ampm in ('', locale_time.am_pm[0]):
# We're in AM so the hour is correct unless we're
# looking at 12 midnight.
# 12 midnight == 12 AM == hour 0
if hour == 12:
hour = 0
elif ampm == locale_time.am_pm[1]:
# We're in PM so we need to add 12 to the hour unless
# we're looking at 12 noon.
# 12 noon == 12 PM == hour 12
if hour != 12:
hour += 12
elif group_key == 'M':
minute = int(found_dict['M'])
elif group_key == 'S':
second = int(found_dict['S'])
elif group_key == 'f':
s = found_dict['f']
# Pad to always return microseconds.
s += "0" * (6 - len(s))
fraction = int(s)
elif group_key == 'A':
weekday = locale_time.f_weekday.index(found_dict['A'].lower())
elif group_key == 'a':
weekday = locale_time.a_weekday.index(found_dict['a'].lower())
elif group_key == 'w':
weekday = int(found_dict['w'])
if weekday == 0:
weekday = 6
else:
weekday -= 1
elif group_key == 'j':
julian = int(found_dict['j'])
elif group_key in ('U', 'W'):
week_of_year = int(found_dict[group_key])
if group_key == 'U':
# U starts week on Sunday.
week_of_year_start = 6
else:
# W starts week on Monday.
week_of_year_start = 0
elif group_key == 'z':
z = found_dict['z']
tzoffset = int(z[1:3]) * 60 + int(z[3:5])
if z.startswith("-"):
tzoffset = -tzoffset
elif group_key == 'Z':
# Since -1 is default value only need to worry about setting tz if
# it can be something other than -1.
found_zone = found_dict['Z'].lower()
for value, tz_values in enumerate(locale_time.timezone):
if found_zone in tz_values:
# Deal with bad locale setup where timezone names are the
# same and yet time.daylight is true; too ambiguous to
# be able to tell what timezone has daylight savings
if (time.tzname[0] == time.tzname[1] and
time.daylight and found_zone not in ("utc", "gmt")):
break
else:
tz = value
break
leap_year_fix = False
if year is None and month == 2 and day == 29:
year = 1904 # 1904 is first leap year of 20th century
leap_year_fix = True
elif year is None:
year = 1900
# If we know the week of the year and what day of that week, we can figure
# out the Julian day of the year.
if julian == -1 and week_of_year != -1 and weekday != -1:
week_starts_Mon = True if week_of_year_start == 0 else False
julian = _calc_julian_from_U_or_W(year, week_of_year, weekday,
week_starts_Mon)
# Cannot pre-calculate datetime_date() since can change in Julian
# calculation and thus could have different value for the day of the week
# calculation.
if julian == -1:
# Need to add 1 to result since first day of the year is 1, not 0.
julian = datetime_date(year, month, day).toordinal() - \
datetime_date(year, 1, 1).toordinal() + 1
else: # Assume that if they bothered to include Julian day it will
# be accurate.
datetime_result = datetime_date.fromordinal((julian - 1) + datetime_date(year, 1, 1).toordinal())
year = datetime_result.year
month = datetime_result.month
day = datetime_result.day
if weekday == -1:
weekday = datetime_date(year, month, day).weekday()
# Add timezone info
tzname = found_dict.get("Z")
if tzoffset is not None:
gmtoff = tzoffset * 60
else:
gmtoff = None
if leap_year_fix:
# the caller didn't supply a year but asked for Feb 29th. We couldn't
# use the default of 1900 for computations. We set it back to ensure
# that February 29th is smaller than March 1st.
year = 1900
return (year, month, day,
hour, minute, second,
weekday, julian, tz, tzname, gmtoff), fraction
def _strptime_time(data_string, format="%a %b %d %H:%M:%S %Y"):
"""Return a time struct based on the input string and the
format string."""
tt = _strptime(data_string, format)[0]
return time.struct_time(tt[:time._STRUCT_TM_ITEMS])
def _strptime_datetime(cls, data_string, format="%a %b %d %H:%M:%S %Y"):
"""Return a class cls instance based on the input string and the
format string."""
tt, fraction = _strptime(data_string, format)
tzname, gmtoff = tt[-2:]
args = tt[:6] + (fraction,)
if gmtoff is not None:
tzdelta = datetime_timedelta(seconds=gmtoff)
if tzname:
tz = datetime_timezone(tzdelta, tzname)
else:
tz = datetime_timezone(tzdelta)
args += (tz,)
return cls(*args)
| gpl-3.0 |
BambooHR/rapid | rapid/master/controllers/api/upgrade_controller.py | 1 | 1295 | """
Copyright (c) 2015 Michael Bright and Bamboo HR LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from flask import Response
from rapid.lib.version import Version
from rapid.lib import api_key_required
from rapid.lib.utils import UpgradeUtil
class UpgradeController(object):
def __init__(self, flask_app):
self.flask_app = flask_app
def configure_routing(self):
self.flask_app.add_url_rule('/api/upgrade/<path:version>', 'upgrade_master', api_key_required(self.upgrade_master), methods=['POST'])
def upgrade_master(self, version):
worked = UpgradeUtil.upgrade_version(version, self.flask_app.rapid_config)
return Response("It worked!" if worked else "It didn't work, version {} restored!".format(Version.get_version()), status=200 if worked else 505)
| apache-2.0 |
leilihh/novaha | nova/keymgr/single_key_mgr.py | 10 | 2555 | # Copyright (c) 2013 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
An implementation of a key manager that returns a single key in response to
all invocations of get_key.
"""
from nova import exception
from nova.keymgr import mock_key_mgr
from nova.openstack.common.gettextutils import _
from nova.openstack.common import log as logging
LOG = logging.getLogger(__name__)
class SingleKeyManager(mock_key_mgr.MockKeyManager):
"""This key manager implementation supports all the methods specified by
the key manager interface. This implementation creates a single key in
response to all invocations of create_key. Side effects
(e.g., raising exceptions) for each method are handled as specified by
the key manager interface.
"""
def __init__(self):
LOG.warn(_('This key manager is insecure and is not recommended for '
'production deployments'))
super(SingleKeyManager, self).__init__()
self.key_id = '00000000-0000-0000-0000-000000000000'
self.key = self._generate_key(key_length=256)
# key should exist by default
self.keys[self.key_id] = self.key
def _generate_hex_key(self, **kwargs):
key_length = kwargs.get('key_length', 256)
return '0' * (key_length / 4) # hex digit => 4 bits
def _generate_key_id(self):
return self.key_id
def store_key(self, ctxt, key, **kwargs):
if key != self.key:
raise exception.KeyManagerError(
reason="cannot store arbitrary keys")
return super(SingleKeyManager, self).store_key(ctxt, key, **kwargs)
def delete_key(self, ctxt, key_id, **kwargs):
if ctxt is None:
raise exception.NotAuthorized()
if key_id != self.key_id:
raise exception.KeyManagerError(
reason="cannot delete non-existent key")
LOG.warn(_("Not deleting key %s"), key_id)
| apache-2.0 |
DietPawel/playhistory | app/api.py | 1 | 5417 | from flask import Flask, jsonify, make_response, abort,request, send_from_directory, redirect, render_template, Response
import db, datetime, csv
from time import time
from io import StringIO
DEBUG = False
#path to file to be displayed on index page
INDEX = '/opt/index.html'
app = Flask(__name__, static_url_path='')
@app.route('/', methods=['GET'])
def root():
contents="<p>Set home page in %s !</p>" % (str(INDEX) )
try:
with open(INDEX, 'r') as indexfile:
contents = indexfile.readlines()
except:
pass
return render_template('index.html', index=contents)
@app.route('/hist/', methods=['GET'])
def history_data():
return render_template('hist.html')
@app.route('/day/', methods=['GET'])
def show_day_template():
t = request.args.get('t')
day_name="Dzisiaj"
if t is None:
t = time()
else:
try:
t = int(t)
day_name = datetime.datetime.fromtimestamp(t).strftime("%d.%m.%Y r.")
except:
t = time()
return render_template('day.html', songs = db.get_day(t), debug=DEBUG, t=t, day_name=day_name)
@app.route('/edit/<int:playid>', methods=['GET'])
def edit_play_object(playid):
play = db.get_play_id(playid)
if play is None:
abort(404)
ret = request.args.get('ret')
if(ret is None):
ret = '/day/'
return render_template('edit.html', play = play, ret=ret, debug=DEBUG)
@app.route('/stats/', methods=['GET'])
def stats():
start = request.args.get('startts')
stop = request.args.get('stopts')
if(start is None or stop is None):
start=stop=0
else:
try:
start = int(start)
stop = int(stop) + 86399
except:
start = 0
stop = 0
return render_template('stats.html', data=db.get_stats(start,stop), date_start=start, date_stop=stop)
## db export
@app.route('/download/', methods=['GET'])
def download():
si = StringIO()
cw = csv.writer(si)
cw.writerows(db.generate_all())
output = make_response(si.getvalue())
output.headers["Content-Disposition"] = "attachment; filename=db_%s.csv" % datetime.datetime.fromtimestamp(time()).strftime("%d_%m_%Y")
output.headers["Content-type"] = "text/csv"
return output
## raport generator
@app.route('/report/', methods=['GET'])
def get_day_report():
t = request.args.get('t')
if t is None:
t = time()
print("t is None");
else:
try:
t=int(t)
print("t is orig");
except:
t = time()
print("t is Str");
content = render_template('report.txt', songs = db.get_day(t), date=t)
return Response(content, mimetype="text/plain", headers={"Content-disposition":"attachment;filename=report_%s.txt"%datetime.datetime.fromtimestamp(t).strftime("%d_%m_%Y")})
## api methods
"""@app.route('/api/v1/day/', methods=['GET'])
def current_day():
return jsonify({'plays':db.get_day(), })
@app.route('/api/v1/day/<int:timestamp>', methods=['GET'])
def day_from_timestamp(timestamp):
return jsonify({'plays':db.get_day(timestamp)})
@app.route('/api/v1/play/<int:play_id>', methods=['GET'])
def get_play_by_id(plastrftimey_id):
play = db.get_play_id(play_id)
if play is None:
abort(404)
return jsonify({'play':db.get_play_id(play_id).__dict__})
"""
@app.route('/api/v1/play/', methods=['POST'])
def add_new_play():
if not request.json or not 'DJ' in request.json or not 'song' in request.json:
abort(400)
play = db.Play(DJ=request.json['DJ'], song=request.json['song'])
play.save()
return jsonify({'status':play.check()})
@app.route('/api/v1/play/<int:play_id>', methods=['DELETE'])
def del_play_id(play_id):
play = db.get_play_id(play_id)
print(1)
if play is None:
abort(404)
print(2)
play.delete()
return jsonify({'status':play.check()})
@app.route('/api/v1/play/<int:play_id>', methods=['PUT'])
def pul_play_id(play_id):
play = db.get_play_id(play_id)
if play is None:
abort(404)
if 'DJ' in request.json and type(request.json['DJ']) != str:
abort(400)
if 'song' in request.json and type(request.json['song']) != str:
abort(400)
if 'date' in request.json and type(request.json['date']) != int:
abort(400)
play.DJ = request.json.get('DJ', play.DJ)
play.song = request.json.get('song', play.song)
play.date = request.json.get('date', play.date)
play.save()
return jsonify({'status':play.check(), 'play':play.__dict__})
### static files
@app.route('/static/<path:path>')
def send_static_www(path):
return send_from_directory('static', path)
### template_tags
@app.template_filter('display')
def display_date_from_timestamp(ts):
return datetime.datetime.fromtimestamp(ts).strftime("%d.%m.%Y r.")
### other
@app.errorhandler(404)
def not_found(error):
#return make_response(jsonify({'error': 'Not found'}), 404)
return make_response('<center style="font-size:6vh;margin-top:20vh;"> 404 </center>')
if __name__ == '__main__':
print("""PlayHistory Copyright (C) 2017 Paweł Dietrich
This program comes with ABSOLUTELY NO WARRANTY; for details check LICENSE file.
This is free software, and you are welcome to redistribute it
under certain conditions.""")
app.run(debug=DEBUG,host="localhost", port=int("80"))
| gpl-3.0 |
dannyperry571/theapprentice | plugin.video.youtube/resources/lib/kodion/utils/search_history.py | 26 | 1063 | import hashlib
from storage import Storage
from .methods import to_utf8
class SearchHistory(Storage):
def __init__(self, filename, max_items=10):
Storage.__init__(self, filename, max_item_count=max_items)
pass
def is_empty(self):
return self._is_empty()
def list(self):
result = []
keys = self._get_ids(oldest_first=False)
for key in keys:
item = self._get(key)
result.append(item[0])
pass
return result
def clear(self):
self._clear()
pass
def _make_id(self, search_text):
m = hashlib.md5()
m.update(to_utf8(search_text))
return m.hexdigest()
def rename(self, old_search_text, new_search_text):
self.remove(old_search_text)
self.update(new_search_text)
pass
def remove(self, search_text):
self._remove(self._make_id(search_text))
pass
def update(self, search_text):
self._set(self._make_id(search_text), search_text)
pass
pass | gpl-2.0 |
SaturdayNeighborhoodHealthClinic/osler | appointment/migrations/0001_initial.py | 2 | 4147 | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2018-08-17 03:20
from __future__ import unicode_literals
import datetime
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
from django.utils.timezone import utc
class Migration(migrations.Migration):
initial = True
dependencies = [
('pttrack', '0005_simplehistory_add_change_reason'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Appointment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('written_datetime', models.DateTimeField(auto_now_add=True)),
('last_modified', models.DateTimeField(auto_now=True)),
('clindate', models.DateField(verbose_name=b'Appointment Date')),
('clintime', models.TimeField(default=datetime.datetime(2018, 8, 17, 9, 0, tzinfo=utc), verbose_name=b'Time of Appointment')),
('appointment_type', models.CharField(choices=[(b'PSYCH_NIGHT', b'Psych Night'), (b'ACUTE_FOLLOWUP', b'Acute Followup'), (b'CHRONIC_CARE', b'Chronic Care')], default=b'CHRONIC_CARE', max_length=15, verbose_name=b'Appointment Type')),
('comment', models.TextField(help_text=b'What should happen at this appointment?')),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pttrack.Provider')),
('author_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pttrack.ProviderType')),
('patient', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pttrack.Patient')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='HistoricalAppointment',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('written_datetime', models.DateTimeField(blank=True, editable=False)),
('last_modified', models.DateTimeField(blank=True, editable=False)),
('clindate', models.DateField(verbose_name=b'Appointment Date')),
('clintime', models.TimeField(default=datetime.datetime(2018, 8, 17, 9, 0, tzinfo=utc), verbose_name=b'Time of Appointment')),
('appointment_type', models.CharField(choices=[(b'PSYCH_NIGHT', b'Psych Night'), (b'ACUTE_FOLLOWUP', b'Acute Followup'), (b'CHRONIC_CARE', b'Chronic Care')], default=b'CHRONIC_CARE', max_length=15, verbose_name=b'Appointment Type')),
('comment', models.TextField(help_text=b'What should happen at this appointment?')),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('author', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pttrack.Provider')),
('author_type', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pttrack.ProviderType')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('patient', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pttrack.Patient')),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical appointment',
},
),
]
| gpl-3.0 |
IntelLabs/numba | numba/cuda/tests/cudapy/test_nondet.py | 5 | 1378 | import numpy as np
from numba import cuda, float32, void
from numba.cuda.testing import unittest, CUDATestCase
def generate_input(n):
A = np.array(np.arange(n * n).reshape(n, n), dtype=np.float32)
B = np.array(np.arange(n) + 0, dtype=A.dtype)
return A, B
class TestCudaNonDet(CUDATestCase):
def test_for_pre(self):
"""Test issue with loop not running due to bad sign-extension at the for loop
precondition.
"""
@cuda.jit(void(float32[:, :], float32[:, :], float32[:]))
def diagproduct(c, a, b):
startX, startY = cuda.grid(2)
gridX = cuda.gridDim.x * cuda.blockDim.x
gridY = cuda.gridDim.y * cuda.blockDim.y
height = c.shape[0]
width = c.shape[1]
for x in range(startX, width, (gridX)):
for y in range(startY, height, (gridY)):
c[y, x] = a[y, x] * b[x]
N = 8
A, B = generate_input(N)
F = np.empty(A.shape, dtype=A.dtype)
blockdim = (32, 8)
griddim = (1, 1)
dA = cuda.to_device(A)
dB = cuda.to_device(B)
dF = cuda.to_device(F, copy=False)
diagproduct[griddim, blockdim](dF, dA, dB)
E = np.dot(A, np.diag(B))
np.testing.assert_array_almost_equal(dF.copy_to_host(), E)
if __name__ == '__main__':
unittest.main()
| bsd-2-clause |
lmtierney/selenium | py/test/unit/selenium/webdriver/edge/edge_options_tests.py | 7 | 1807 | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pytest
from selenium.webdriver.edge.options import Options
@pytest.fixture
def options():
return Options()
def test_raises_exception_with_invalid_page_load_strategy(options):
with pytest.raises(ValueError):
options.page_load_strategy = 'never'
def test_set_page_load_strategy(options):
options.page_load_strategy = 'normal'
assert options._page_load_strategy == 'normal'
def test_get_page_load_strategy(options):
options._page_load_strategy = 'normal'
assert options.page_load_strategy == 'normal'
def test_creates_capabilities(options):
options.page_load_strategy = 'eager'
caps = options.to_capabilities()
assert caps['pageLoadStrategy'] == 'eager'
def test_starts_with_default_capabilities(options):
from selenium.webdriver import DesiredCapabilities
assert options._caps == DesiredCapabilities.EDGE
def test_is_a_baseoptions(options):
from selenium.webdriver.common.options import BaseOptions
assert isinstance(options, BaseOptions)
| apache-2.0 |
gmr/helper | helper/setupext.py | 2 | 2299 | """Add a setuptools command that runs a helper-based application."""
try:
from setuptools import Command
except ImportError:
from distutils.core import Command
try:
from functools import reduce
except ImportError:
pass # use the builtin for py 2.x
from . import parser
from . import platform
class RunCommand(Command):
"""Run a helper-based application.
This extension is installed as a ``distutils.commands``
entry point that provides the *run_helper* command. When
run, it imports a :class:`helper.Controller` subclass by
name, creates a new instance, and runs it in the foreground
until interrupted. The dotted-name of the controller class
and an optional configuration file are provided as command
line parameters.
:param str configuration: the name of a configuration file
to pass to the application *(optional)*
:param str controller: the dotted-name of the Python class
to load and run
"""
description = 'run a helper.Controller'
user_options = [
('configuration=', 'c', 'path to application configuration file'),
('controller=', 'C', 'controller to run'),
]
def initialize_options(self):
"""Initialize parameters."""
self.configuration = None
self.controller = None
def finalize_options(self):
"""Required override that does nothing."""
pass
def run(self):
"""Import the controller and run it.
This mimics the processing done by :func:`helper.start`
when a controller is run in the foreground. A new instance
of ``self.controller`` is created and run until a keyboard
interrupt occurs or the controller stops on its own accord.
"""
segments = self.controller.split('.')
controller_class = reduce(getattr, segments[1:],
__import__('.'.join(segments[:-1])))
cmd_line = ['-f']
if self.configuration is not None:
cmd_line.extend(['-c', self.configuration])
args = parser.get().parse_args(cmd_line)
controller_instance = controller_class(args, platform)
try:
controller_instance.start()
except KeyboardInterrupt:
controller_instance.stop()
| bsd-3-clause |
divya-csekar/flask-microblog-server | flask/Lib/site-packages/flask/config.py | 781 | 6234 | # -*- coding: utf-8 -*-
"""
flask.config
~~~~~~~~~~~~
Implements the configuration related objects.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import imp
import os
import errno
from werkzeug.utils import import_string
from ._compat import string_types
class ConfigAttribute(object):
"""Makes an attribute forward to the config"""
def __init__(self, name, get_converter=None):
self.__name__ = name
self.get_converter = get_converter
def __get__(self, obj, type=None):
if obj is None:
return self
rv = obj.config[self.__name__]
if self.get_converter is not None:
rv = self.get_converter(rv)
return rv
def __set__(self, obj, value):
obj.config[self.__name__] = value
class Config(dict):
"""Works exactly like a dict but provides ways to fill it from files
or special dictionaries. There are two common patterns to populate the
config.
Either you can fill the config from a config file::
app.config.from_pyfile('yourconfig.cfg')
Or alternatively you can define the configuration options in the
module that calls :meth:`from_object` or provide an import path to
a module that should be loaded. It is also possible to tell it to
use the same module and with that provide the configuration values
just before the call::
DEBUG = True
SECRET_KEY = 'development key'
app.config.from_object(__name__)
In both cases (loading from any Python file or loading from modules),
only uppercase keys are added to the config. This makes it possible to use
lowercase values in the config file for temporary values that are not added
to the config or to define the config keys in the same file that implements
the application.
Probably the most interesting way to load configurations is from an
environment variable pointing to a file::
app.config.from_envvar('YOURAPPLICATION_SETTINGS')
In this case before launching the application you have to set this
environment variable to the file you want to use. On Linux and OS X
use the export statement::
export YOURAPPLICATION_SETTINGS='/path/to/config/file'
On windows use `set` instead.
:param root_path: path to which files are read relative from. When the
config object is created by the application, this is
the application's :attr:`~flask.Flask.root_path`.
:param defaults: an optional dictionary of default values
"""
def __init__(self, root_path, defaults=None):
dict.__init__(self, defaults or {})
self.root_path = root_path
def from_envvar(self, variable_name, silent=False):
"""Loads a configuration from an environment variable pointing to
a configuration file. This is basically just a shortcut with nicer
error messages for this line of code::
app.config.from_pyfile(os.environ['YOURAPPLICATION_SETTINGS'])
:param variable_name: name of the environment variable
:param silent: set to `True` if you want silent failure for missing
files.
:return: bool. `True` if able to load config, `False` otherwise.
"""
rv = os.environ.get(variable_name)
if not rv:
if silent:
return False
raise RuntimeError('The environment variable %r is not set '
'and as such configuration could not be '
'loaded. Set this variable and make it '
'point to a configuration file' %
variable_name)
return self.from_pyfile(rv, silent=silent)
def from_pyfile(self, filename, silent=False):
"""Updates the values in the config from a Python file. This function
behaves as if the file was imported as module with the
:meth:`from_object` function.
:param filename: the filename of the config. This can either be an
absolute filename or a filename relative to the
root path.
:param silent: set to `True` if you want silent failure for missing
files.
.. versionadded:: 0.7
`silent` parameter.
"""
filename = os.path.join(self.root_path, filename)
d = imp.new_module('config')
d.__file__ = filename
try:
with open(filename) as config_file:
exec(compile(config_file.read(), filename, 'exec'), d.__dict__)
except IOError as e:
if silent and e.errno in (errno.ENOENT, errno.EISDIR):
return False
e.strerror = 'Unable to load configuration file (%s)' % e.strerror
raise
self.from_object(d)
return True
def from_object(self, obj):
"""Updates the values from the given object. An object can be of one
of the following two types:
- a string: in this case the object with that name will be imported
- an actual object reference: that object is used directly
Objects are usually either modules or classes.
Just the uppercase variables in that object are stored in the config.
Example usage::
app.config.from_object('yourapplication.default_config')
from yourapplication import default_config
app.config.from_object(default_config)
You should not use this function to load the actual configuration but
rather configuration defaults. The actual config should be loaded
with :meth:`from_pyfile` and ideally from a location not within the
package because the package might be installed system wide.
:param obj: an import name or object
"""
if isinstance(obj, string_types):
obj = import_string(obj)
for key in dir(obj):
if key.isupper():
self[key] = getattr(obj, key)
def __repr__(self):
return '<%s %s>' % (self.__class__.__name__, dict.__repr__(self))
| bsd-3-clause |
smallyear/linuxLearn | salt/salt/states/keyboard.py | 3 | 2081 | # -*- coding: utf-8 -*-
'''
Management of keyboard layouts
==============================
The keyboard layout can be managed for the system:
.. code-block:: yaml
us:
keyboard.system
Or it can be managed for XOrg:
.. code-block:: yaml
us:
keyboard.xorg
'''
def __virtual__():
'''
Only load if the keyboard module is available in __salt__
'''
return 'keyboard.get_sys' in __salt__
def system(name):
'''
Set the keyboard layout for the system
name
The keyboard layout to use
'''
ret = {'name': name,
'changes': {},
'result': None,
'comment': ''}
if __salt__['keyboard.get_sys']() == name:
ret['result'] = True
ret['comment'] = 'System layout {0} already set'.format(name)
return ret
if __opts__['test']:
ret['comment'] = 'System layout {0} needs to be set'.format(name)
return ret
if __salt__['keyboard.set_sys'](name):
ret['changes'] = {'layout': name}
ret['result'] = True
ret['comment'] = 'Set system keyboard layout {0}'.format(name)
return ret
else:
ret['result'] = False
ret['comment'] = 'Failed to set system keyboard layout'
return ret
def xorg(name):
'''
Set the keyboard layout for XOrg
layout
The keyboard layout to use
'''
ret = {'name': name,
'changes': {},
'result': None,
'comment': ''}
if __salt__['keyboard.get_x']() == name:
ret['result'] = True
ret['comment'] = 'XOrg layout {0} already set'.format(name)
return ret
if __opts__['test']:
ret['comment'] = 'XOrg layout {0} needs to be set'.format(name)
return ret
if __salt__['keyboard.set_x'](name):
ret['changes'] = {'layout': name}
ret['result'] = True
ret['comment'] = 'Set XOrg keyboard layout {0}'.format(name)
return ret
else:
ret['result'] = False
ret['comment'] = 'Failed to set XOrg keyboard layout'
return ret
| apache-2.0 |
newerthcom/savagerebirth | libs/python-2.72/Lib/plat-mac/Carbon/CarbonEvents.py | 81 | 17904 | # Generated from 'CarbonEvents.h'
def FOUR_CHAR_CODE(x): return x
def FOUR_CHAR_CODE(x): return x
false = 0
true = 1
keyAEEventClass = FOUR_CHAR_CODE('evcl')
keyAEEventID = FOUR_CHAR_CODE('evti')
eventAlreadyPostedErr = -9860
eventTargetBusyErr = -9861
eventClassInvalidErr = -9862
eventClassIncorrectErr = -9864
eventHandlerAlreadyInstalledErr = -9866
eventInternalErr = -9868
eventKindIncorrectErr = -9869
eventParameterNotFoundErr = -9870
eventNotHandledErr = -9874
eventLoopTimedOutErr = -9875
eventLoopQuitErr = -9876
eventNotInQueueErr = -9877
eventHotKeyExistsErr = -9878
eventHotKeyInvalidErr = -9879
kEventPriorityLow = 0
kEventPriorityStandard = 1
kEventPriorityHigh = 2
kEventLeaveInQueue = false
kEventRemoveFromQueue = true
kTrackMouseLocationOptionDontConsumeMouseUp = (1 << 0)
kMouseTrackingMouseDown = 1
kMouseTrackingMouseUp = 2
kMouseTrackingMouseExited = 3
kMouseTrackingMouseEntered = 4
kMouseTrackingMouseDragged = 5
kMouseTrackingKeyModifiersChanged = 6
kMouseTrackingUserCancelled = 7
kMouseTrackingTimedOut = 8
kMouseTrackingMouseMoved = 9
kEventAttributeNone = 0
kEventAttributeUserEvent = (1 << 0)
kEventClassMouse = FOUR_CHAR_CODE('mous')
kEventClassKeyboard = FOUR_CHAR_CODE('keyb')
kEventClassTextInput = FOUR_CHAR_CODE('text')
kEventClassApplication = FOUR_CHAR_CODE('appl')
kEventClassAppleEvent = FOUR_CHAR_CODE('eppc')
kEventClassMenu = FOUR_CHAR_CODE('menu')
kEventClassWindow = FOUR_CHAR_CODE('wind')
kEventClassControl = FOUR_CHAR_CODE('cntl')
kEventClassCommand = FOUR_CHAR_CODE('cmds')
kEventClassTablet = FOUR_CHAR_CODE('tblt')
kEventClassVolume = FOUR_CHAR_CODE('vol ')
kEventClassAppearance = FOUR_CHAR_CODE('appm')
kEventClassService = FOUR_CHAR_CODE('serv')
kEventMouseDown = 1
kEventMouseUp = 2
kEventMouseMoved = 5
kEventMouseDragged = 6
kEventMouseWheelMoved = 10
kEventMouseButtonPrimary = 1
kEventMouseButtonSecondary = 2
kEventMouseButtonTertiary = 3
kEventMouseWheelAxisX = 0
kEventMouseWheelAxisY = 1
kEventTextInputUpdateActiveInputArea = 1
kEventTextInputUnicodeForKeyEvent = 2
kEventTextInputOffsetToPos = 3
kEventTextInputPosToOffset = 4
kEventTextInputShowHideBottomWindow = 5
kEventTextInputGetSelectedText = 6
kEventRawKeyDown = 1
kEventRawKeyRepeat = 2
kEventRawKeyUp = 3
kEventRawKeyModifiersChanged = 4
kEventHotKeyPressed = 5
kEventHotKeyReleased = 6
kEventKeyModifierNumLockBit = 16
kEventKeyModifierFnBit = 17
kEventKeyModifierNumLockMask = 1L << kEventKeyModifierNumLockBit
kEventKeyModifierFnMask = 1L << kEventKeyModifierFnBit
kEventAppActivated = 1
kEventAppDeactivated = 2
kEventAppQuit = 3
kEventAppLaunchNotification = 4
kEventAppLaunched = 5
kEventAppTerminated = 6
kEventAppFrontSwitched = 7
kEventAppGetDockTileMenu = 20
kEventAppleEvent = 1
kEventWindowUpdate = 1
kEventWindowDrawContent = 2
kEventWindowActivated = 5
kEventWindowDeactivated = 6
kEventWindowGetClickActivation = 7
kEventWindowShowing = 22
kEventWindowHiding = 23
kEventWindowShown = 24
kEventWindowHidden = 25
kEventWindowCollapsing = 86
kEventWindowCollapsed = 67
kEventWindowExpanding = 87
kEventWindowExpanded = 70
kEventWindowZoomed = 76
kEventWindowBoundsChanging = 26
kEventWindowBoundsChanged = 27
kEventWindowResizeStarted = 28
kEventWindowResizeCompleted = 29
kEventWindowDragStarted = 30
kEventWindowDragCompleted = 31
kEventWindowClosed = 73
kWindowBoundsChangeUserDrag = (1 << 0)
kWindowBoundsChangeUserResize = (1 << 1)
kWindowBoundsChangeSizeChanged = (1 << 2)
kWindowBoundsChangeOriginChanged = (1 << 3)
kWindowBoundsChangeZoom = (1 << 4)
kEventWindowClickDragRgn = 32
kEventWindowClickResizeRgn = 33
kEventWindowClickCollapseRgn = 34
kEventWindowClickCloseRgn = 35
kEventWindowClickZoomRgn = 36
kEventWindowClickContentRgn = 37
kEventWindowClickProxyIconRgn = 38
kEventWindowClickToolbarButtonRgn = 41
kEventWindowClickStructureRgn = 42
kEventWindowCursorChange = 40
kEventWindowCollapse = 66
kEventWindowCollapseAll = 68
kEventWindowExpand = 69
kEventWindowExpandAll = 71
kEventWindowClose = 72
kEventWindowCloseAll = 74
kEventWindowZoom = 75
kEventWindowZoomAll = 77
kEventWindowContextualMenuSelect = 78
kEventWindowPathSelect = 79
kEventWindowGetIdealSize = 80
kEventWindowGetMinimumSize = 81
kEventWindowGetMaximumSize = 82
kEventWindowConstrain = 83
kEventWindowHandleContentClick = 85
kEventWindowProxyBeginDrag = 128
kEventWindowProxyEndDrag = 129
kEventWindowToolbarSwitchMode = 150
kDockChangedUser = 1
kDockChangedOrientation = 2
kDockChangedAutohide = 3
kDockChangedDisplay = 4
kDockChangedItems = 5
kDockChangedUnknown = 6
kEventWindowFocusAcquired = 200
kEventWindowFocusRelinquish = 201
kEventWindowDrawFrame = 1000
kEventWindowDrawPart = 1001
kEventWindowGetRegion = 1002
kEventWindowHitTest = 1003
kEventWindowInit = 1004
kEventWindowDispose = 1005
kEventWindowDragHilite = 1006
kEventWindowModified = 1007
kEventWindowSetupProxyDragImage = 1008
kEventWindowStateChanged = 1009
kEventWindowMeasureTitle = 1010
kEventWindowDrawGrowBox = 1011
kEventWindowGetGrowImageRegion = 1012
kEventWindowPaint = 1013
kEventMenuBeginTracking = 1
kEventMenuEndTracking = 2
kEventMenuChangeTrackingMode = 3
kEventMenuOpening = 4
kEventMenuClosed = 5
kEventMenuTargetItem = 6
kEventMenuMatchKey = 7
kEventMenuEnableItems = 8
kEventMenuPopulate = 9
kEventMenuMeasureItemWidth = 100
kEventMenuMeasureItemHeight = 101
kEventMenuDrawItem = 102
kEventMenuDrawItemContent = 103
kEventMenuDispose = 1001
kMenuContextMenuBar = 1 << 0
kMenuContextPullDown = 1 << 8
kMenuContextPopUp = 1 << 9
kMenuContextSubmenu = 1 << 10
kMenuContextMenuBarTracking = 1 << 16
kMenuContextPopUpTracking = 1 << 17
kMenuContextKeyMatching = 1 << 18
kMenuContextMenuEnabling = 1 << 19
kMenuContextCommandIDSearch = 1 << 20
kEventProcessCommand = 1
kEventCommandProcess = 1
kEventCommandUpdateStatus = 2
kHICommandOK = FOUR_CHAR_CODE('ok ')
kHICommandCancel = FOUR_CHAR_CODE('not!')
kHICommandQuit = FOUR_CHAR_CODE('quit')
kHICommandUndo = FOUR_CHAR_CODE('undo')
kHICommandRedo = FOUR_CHAR_CODE('redo')
kHICommandCut = FOUR_CHAR_CODE('cut ')
kHICommandCopy = FOUR_CHAR_CODE('copy')
kHICommandPaste = FOUR_CHAR_CODE('past')
kHICommandClear = FOUR_CHAR_CODE('clea')
kHICommandSelectAll = FOUR_CHAR_CODE('sall')
kHICommandHide = FOUR_CHAR_CODE('hide')
kHICommandHideOthers = FOUR_CHAR_CODE('hido')
kHICommandShowAll = FOUR_CHAR_CODE('shal')
kHICommandPreferences = FOUR_CHAR_CODE('pref')
kHICommandZoomWindow = FOUR_CHAR_CODE('zoom')
kHICommandMinimizeWindow = FOUR_CHAR_CODE('mini')
kHICommandMinimizeAll = FOUR_CHAR_CODE('mina')
kHICommandMaximizeWindow = FOUR_CHAR_CODE('maxi')
kHICommandMaximizeAll = FOUR_CHAR_CODE('maxa')
kHICommandArrangeInFront = FOUR_CHAR_CODE('frnt')
kHICommandBringAllToFront = FOUR_CHAR_CODE('bfrt')
kHICommandWindowListSeparator = FOUR_CHAR_CODE('wldv')
kHICommandWindowListTerminator = FOUR_CHAR_CODE('wlst')
kHICommandSelectWindow = FOUR_CHAR_CODE('swin')
kHICommandAbout = FOUR_CHAR_CODE('abou')
kHICommandNew = FOUR_CHAR_CODE('new ')
kHICommandOpen = FOUR_CHAR_CODE('open')
kHICommandClose = FOUR_CHAR_CODE('clos')
kHICommandSave = FOUR_CHAR_CODE('save')
kHICommandSaveAs = FOUR_CHAR_CODE('svas')
kHICommandRevert = FOUR_CHAR_CODE('rvrt')
kHICommandPrint = FOUR_CHAR_CODE('prnt')
kHICommandPageSetup = FOUR_CHAR_CODE('page')
kHICommandAppHelp = FOUR_CHAR_CODE('ahlp')
kHICommandFromMenu = (1L << 0)
kHICommandFromControl = (1L << 1)
kHICommandFromWindow = (1L << 2)
kEventControlInitialize = 1000
kEventControlDispose = 1001
kEventControlGetOptimalBounds = 1003
kEventControlDefInitialize = kEventControlInitialize
kEventControlDefDispose = kEventControlDispose
kEventControlHit = 1
kEventControlSimulateHit = 2
kEventControlHitTest = 3
kEventControlDraw = 4
kEventControlApplyBackground = 5
kEventControlApplyTextColor = 6
kEventControlSetFocusPart = 7
kEventControlGetFocusPart = 8
kEventControlActivate = 9
kEventControlDeactivate = 10
kEventControlSetCursor = 11
kEventControlContextualMenuClick = 12
kEventControlClick = 13
kEventControlTrack = 51
kEventControlGetScrollToHereStartPoint = 52
kEventControlGetIndicatorDragConstraint = 53
kEventControlIndicatorMoved = 54
kEventControlGhostingFinished = 55
kEventControlGetActionProcPart = 56
kEventControlGetPartRegion = 101
kEventControlGetPartBounds = 102
kEventControlSetData = 103
kEventControlGetData = 104
kEventControlValueFieldChanged = 151
kEventControlAddedSubControl = 152
kEventControlRemovingSubControl = 153
kEventControlBoundsChanged = 154
kEventControlOwningWindowChanged = 159
kEventControlArbitraryMessage = 201
kControlBoundsChangeSizeChanged = (1 << 2)
kControlBoundsChangePositionChanged = (1 << 3)
kEventTabletPoint = 1
kEventTabletProximity = 2
kEventTabletPointer = 1
kEventVolumeMounted = 1
kEventVolumeUnmounted = 2
typeFSVolumeRefNum = FOUR_CHAR_CODE('voln')
kEventAppearanceScrollBarVariantChanged = 1
kEventServiceCopy = 1
kEventServicePaste = 2
kEventServiceGetTypes = 3
kEventServicePerform = 4
kEventParamDirectObject = FOUR_CHAR_CODE('----')
kEventParamPostTarget = FOUR_CHAR_CODE('ptrg')
typeEventTargetRef = FOUR_CHAR_CODE('etrg')
kEventParamWindowRef = FOUR_CHAR_CODE('wind')
kEventParamGrafPort = FOUR_CHAR_CODE('graf')
kEventParamDragRef = FOUR_CHAR_CODE('drag')
kEventParamMenuRef = FOUR_CHAR_CODE('menu')
kEventParamEventRef = FOUR_CHAR_CODE('evnt')
kEventParamControlRef = FOUR_CHAR_CODE('ctrl')
kEventParamRgnHandle = FOUR_CHAR_CODE('rgnh')
kEventParamEnabled = FOUR_CHAR_CODE('enab')
kEventParamDimensions = FOUR_CHAR_CODE('dims')
kEventParamAvailableBounds = FOUR_CHAR_CODE('avlb')
kEventParamAEEventID = keyAEEventID
kEventParamAEEventClass = keyAEEventClass
kEventParamCGContextRef = FOUR_CHAR_CODE('cntx')
kEventParamDeviceDepth = FOUR_CHAR_CODE('devd')
kEventParamDeviceColor = FOUR_CHAR_CODE('devc')
typeWindowRef = FOUR_CHAR_CODE('wind')
typeGrafPtr = FOUR_CHAR_CODE('graf')
typeGWorldPtr = FOUR_CHAR_CODE('gwld')
typeDragRef = FOUR_CHAR_CODE('drag')
typeMenuRef = FOUR_CHAR_CODE('menu')
typeControlRef = FOUR_CHAR_CODE('ctrl')
typeCollection = FOUR_CHAR_CODE('cltn')
typeQDRgnHandle = FOUR_CHAR_CODE('rgnh')
typeOSStatus = FOUR_CHAR_CODE('osst')
typeCFStringRef = FOUR_CHAR_CODE('cfst')
typeCFIndex = FOUR_CHAR_CODE('cfix')
typeCFTypeRef = FOUR_CHAR_CODE('cfty')
typeCGContextRef = FOUR_CHAR_CODE('cntx')
typeHIPoint = FOUR_CHAR_CODE('hipt')
typeHISize = FOUR_CHAR_CODE('hisz')
typeHIRect = FOUR_CHAR_CODE('hirc')
kEventParamMouseLocation = FOUR_CHAR_CODE('mloc')
kEventParamMouseButton = FOUR_CHAR_CODE('mbtn')
kEventParamClickCount = FOUR_CHAR_CODE('ccnt')
kEventParamMouseWheelAxis = FOUR_CHAR_CODE('mwax')
kEventParamMouseWheelDelta = FOUR_CHAR_CODE('mwdl')
kEventParamMouseDelta = FOUR_CHAR_CODE('mdta')
kEventParamMouseChord = FOUR_CHAR_CODE('chor')
kEventParamTabletEventType = FOUR_CHAR_CODE('tblt')
typeMouseButton = FOUR_CHAR_CODE('mbtn')
typeMouseWheelAxis = FOUR_CHAR_CODE('mwax')
kEventParamKeyCode = FOUR_CHAR_CODE('kcod')
kEventParamKeyMacCharCodes = FOUR_CHAR_CODE('kchr')
kEventParamKeyModifiers = FOUR_CHAR_CODE('kmod')
kEventParamKeyUnicodes = FOUR_CHAR_CODE('kuni')
kEventParamKeyboardType = FOUR_CHAR_CODE('kbdt')
typeEventHotKeyID = FOUR_CHAR_CODE('hkid')
kEventParamTextInputSendRefCon = FOUR_CHAR_CODE('tsrc')
kEventParamTextInputSendComponentInstance = FOUR_CHAR_CODE('tsci')
kEventParamTextInputSendSLRec = FOUR_CHAR_CODE('tssl')
kEventParamTextInputReplySLRec = FOUR_CHAR_CODE('trsl')
kEventParamTextInputSendText = FOUR_CHAR_CODE('tstx')
kEventParamTextInputReplyText = FOUR_CHAR_CODE('trtx')
kEventParamTextInputSendUpdateRng = FOUR_CHAR_CODE('tsup')
kEventParamTextInputSendHiliteRng = FOUR_CHAR_CODE('tshi')
kEventParamTextInputSendClauseRng = FOUR_CHAR_CODE('tscl')
kEventParamTextInputSendPinRng = FOUR_CHAR_CODE('tspn')
kEventParamTextInputSendFixLen = FOUR_CHAR_CODE('tsfx')
kEventParamTextInputSendLeadingEdge = FOUR_CHAR_CODE('tsle')
kEventParamTextInputReplyLeadingEdge = FOUR_CHAR_CODE('trle')
kEventParamTextInputSendTextOffset = FOUR_CHAR_CODE('tsto')
kEventParamTextInputReplyTextOffset = FOUR_CHAR_CODE('trto')
kEventParamTextInputReplyRegionClass = FOUR_CHAR_CODE('trrg')
kEventParamTextInputSendCurrentPoint = FOUR_CHAR_CODE('tscp')
kEventParamTextInputSendDraggingMode = FOUR_CHAR_CODE('tsdm')
kEventParamTextInputReplyPoint = FOUR_CHAR_CODE('trpt')
kEventParamTextInputReplyFont = FOUR_CHAR_CODE('trft')
kEventParamTextInputReplyFMFont = FOUR_CHAR_CODE('trfm')
kEventParamTextInputReplyPointSize = FOUR_CHAR_CODE('trpz')
kEventParamTextInputReplyLineHeight = FOUR_CHAR_CODE('trlh')
kEventParamTextInputReplyLineAscent = FOUR_CHAR_CODE('trla')
kEventParamTextInputReplyTextAngle = FOUR_CHAR_CODE('trta')
kEventParamTextInputSendShowHide = FOUR_CHAR_CODE('tssh')
kEventParamTextInputReplyShowHide = FOUR_CHAR_CODE('trsh')
kEventParamTextInputSendKeyboardEvent = FOUR_CHAR_CODE('tske')
kEventParamTextInputSendTextServiceEncoding = FOUR_CHAR_CODE('tsse')
kEventParamTextInputSendTextServiceMacEncoding = FOUR_CHAR_CODE('tssm')
kEventParamHICommand = FOUR_CHAR_CODE('hcmd')
typeHICommand = FOUR_CHAR_CODE('hcmd')
kEventParamWindowFeatures = FOUR_CHAR_CODE('wftr')
kEventParamWindowDefPart = FOUR_CHAR_CODE('wdpc')
kEventParamCurrentBounds = FOUR_CHAR_CODE('crct')
kEventParamOriginalBounds = FOUR_CHAR_CODE('orct')
kEventParamPreviousBounds = FOUR_CHAR_CODE('prct')
kEventParamClickActivation = FOUR_CHAR_CODE('clac')
kEventParamWindowRegionCode = FOUR_CHAR_CODE('wshp')
kEventParamWindowDragHiliteFlag = FOUR_CHAR_CODE('wdhf')
kEventParamWindowModifiedFlag = FOUR_CHAR_CODE('wmff')
kEventParamWindowProxyGWorldPtr = FOUR_CHAR_CODE('wpgw')
kEventParamWindowProxyImageRgn = FOUR_CHAR_CODE('wpir')
kEventParamWindowProxyOutlineRgn = FOUR_CHAR_CODE('wpor')
kEventParamWindowStateChangedFlags = FOUR_CHAR_CODE('wscf')
kEventParamWindowTitleFullWidth = FOUR_CHAR_CODE('wtfw')
kEventParamWindowTitleTextWidth = FOUR_CHAR_CODE('wttw')
kEventParamWindowGrowRect = FOUR_CHAR_CODE('grct')
kEventParamAttributes = FOUR_CHAR_CODE('attr')
kEventParamDockChangedReason = FOUR_CHAR_CODE('dcrs')
kEventParamPreviousDockRect = FOUR_CHAR_CODE('pdrc')
kEventParamCurrentDockRect = FOUR_CHAR_CODE('cdrc')
typeWindowRegionCode = FOUR_CHAR_CODE('wshp')
typeWindowDefPartCode = FOUR_CHAR_CODE('wdpt')
typeClickActivationResult = FOUR_CHAR_CODE('clac')
kEventParamControlPart = FOUR_CHAR_CODE('cprt')
kEventParamInitCollection = FOUR_CHAR_CODE('icol')
kEventParamControlMessage = FOUR_CHAR_CODE('cmsg')
kEventParamControlParam = FOUR_CHAR_CODE('cprm')
kEventParamControlResult = FOUR_CHAR_CODE('crsl')
kEventParamControlRegion = FOUR_CHAR_CODE('crgn')
kEventParamControlAction = FOUR_CHAR_CODE('caup')
kEventParamControlIndicatorDragConstraint = FOUR_CHAR_CODE('cidc')
kEventParamControlIndicatorRegion = FOUR_CHAR_CODE('cirn')
kEventParamControlIsGhosting = FOUR_CHAR_CODE('cgst')
kEventParamControlIndicatorOffset = FOUR_CHAR_CODE('ciof')
kEventParamControlClickActivationResult = FOUR_CHAR_CODE('ccar')
kEventParamControlSubControl = FOUR_CHAR_CODE('csub')
kEventParamControlOptimalBounds = FOUR_CHAR_CODE('cobn')
kEventParamControlOptimalBaselineOffset = FOUR_CHAR_CODE('cobo')
kEventParamControlDataTag = FOUR_CHAR_CODE('cdtg')
kEventParamControlDataBuffer = FOUR_CHAR_CODE('cdbf')
kEventParamControlDataBufferSize = FOUR_CHAR_CODE('cdbs')
kEventParamControlDrawDepth = FOUR_CHAR_CODE('cddp')
kEventParamControlDrawInColor = FOUR_CHAR_CODE('cdic')
kEventParamControlFeatures = FOUR_CHAR_CODE('cftr')
kEventParamControlPartBounds = FOUR_CHAR_CODE('cpbd')
kEventParamControlOriginalOwningWindow = FOUR_CHAR_CODE('coow')
kEventParamControlCurrentOwningWindow = FOUR_CHAR_CODE('ccow')
typeControlActionUPP = FOUR_CHAR_CODE('caup')
typeIndicatorDragConstraint = FOUR_CHAR_CODE('cidc')
typeControlPartCode = FOUR_CHAR_CODE('cprt')
kEventParamCurrentMenuTrackingMode = FOUR_CHAR_CODE('cmtm')
kEventParamNewMenuTrackingMode = FOUR_CHAR_CODE('nmtm')
kEventParamMenuFirstOpen = FOUR_CHAR_CODE('1sto')
kEventParamMenuItemIndex = FOUR_CHAR_CODE('item')
kEventParamMenuCommand = FOUR_CHAR_CODE('mcmd')
kEventParamEnableMenuForKeyEvent = FOUR_CHAR_CODE('fork')
kEventParamMenuEventOptions = FOUR_CHAR_CODE('meop')
kEventParamMenuContext = FOUR_CHAR_CODE('mctx')
kEventParamMenuItemBounds = FOUR_CHAR_CODE('mitb')
kEventParamMenuMarkBounds = FOUR_CHAR_CODE('mmkb')
kEventParamMenuIconBounds = FOUR_CHAR_CODE('micb')
kEventParamMenuTextBounds = FOUR_CHAR_CODE('mtxb')
kEventParamMenuTextBaseline = FOUR_CHAR_CODE('mtbl')
kEventParamMenuCommandKeyBounds = FOUR_CHAR_CODE('mcmb')
kEventParamMenuVirtualTop = FOUR_CHAR_CODE('mvrt')
kEventParamMenuVirtualBottom = FOUR_CHAR_CODE('mvrb')
kEventParamMenuDrawState = FOUR_CHAR_CODE('mdrs')
kEventParamMenuItemType = FOUR_CHAR_CODE('mitp')
kEventParamMenuItemWidth = FOUR_CHAR_CODE('mitw')
kEventParamMenuItemHeight = FOUR_CHAR_CODE('mith')
typeMenuItemIndex = FOUR_CHAR_CODE('midx')
typeMenuCommand = FOUR_CHAR_CODE('mcmd')
typeMenuTrackingMode = FOUR_CHAR_CODE('mtmd')
typeMenuEventOptions = FOUR_CHAR_CODE('meop')
typeThemeMenuState = FOUR_CHAR_CODE('tmns')
typeThemeMenuItemType = FOUR_CHAR_CODE('tmit')
kEventParamProcessID = FOUR_CHAR_CODE('psn ')
kEventParamLaunchRefCon = FOUR_CHAR_CODE('lref')
kEventParamLaunchErr = FOUR_CHAR_CODE('err ')
kEventParamTabletPointRec = FOUR_CHAR_CODE('tbrc')
kEventParamTabletProximityRec = FOUR_CHAR_CODE('tbpx')
typeTabletPointRec = FOUR_CHAR_CODE('tbrc')
typeTabletProximityRec = FOUR_CHAR_CODE('tbpx')
kEventParamTabletPointerRec = FOUR_CHAR_CODE('tbrc')
typeTabletPointerRec = FOUR_CHAR_CODE('tbrc')
kEventParamNewScrollBarVariant = FOUR_CHAR_CODE('nsbv')
kEventParamScrapRef = FOUR_CHAR_CODE('scrp')
kEventParamServiceCopyTypes = FOUR_CHAR_CODE('svsd')
kEventParamServicePasteTypes = FOUR_CHAR_CODE('svpt')
kEventParamServiceMessageName = FOUR_CHAR_CODE('svmg')
kEventParamServiceUserData = FOUR_CHAR_CODE('svud')
typeScrapRef = FOUR_CHAR_CODE('scrp')
typeCFMutableArrayRef = FOUR_CHAR_CODE('cfma')
# sHandler = NewEventHandlerUPP( x )
kMouseTrackingMousePressed = kMouseTrackingMouseDown
kMouseTrackingMouseReleased = kMouseTrackingMouseUp
| gpl-2.0 |
angelapper/edx-platform | lms/djangoapps/survey/tests/test_utils.py | 8 | 4057 | """
Python tests for the Survey models
"""
from collections import OrderedDict
from django.contrib.auth.models import User
from django.test.client import Client
from survey.models import SurveyForm
from survey.utils import is_survey_required_for_course, is_survey_required_and_unanswered
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
class SurveyModelsTests(ModuleStoreTestCase):
"""
All tests for the utils.py file
"""
def setUp(self):
"""
Set up the test data used in the specific tests
"""
super(SurveyModelsTests, self).setUp()
self.client = Client()
# Create two accounts
self.password = 'abc'
self.student = User.objects.create_user('student', '[email protected]', self.password)
self.student2 = User.objects.create_user('student2', '[email protected]', self.password)
self.staff = User.objects.create_user('staff', '[email protected]', self.password)
self.staff.is_staff = True
self.staff.save()
self.test_survey_name = 'TestSurvey'
self.test_form = '<input name="foo"></input>'
self.student_answers = OrderedDict({
'field1': 'value1',
'field2': 'value2',
})
self.student2_answers = OrderedDict({
'field1': 'value3'
})
self.course = CourseFactory.create(
course_survey_required=True,
course_survey_name=self.test_survey_name
)
self.survey = SurveyForm.create(self.test_survey_name, self.test_form)
def test_is_survey_required_for_course(self):
"""
Assert the a requried course survey is when both the flags is set and a survey name
is set on the course descriptor
"""
self.assertTrue(is_survey_required_for_course(self.course))
def test_is_survey_not_required_for_course(self):
"""
Assert that if various data is not available or if the survey is not found
then the survey is not considered required
"""
course = CourseFactory.create()
self.assertFalse(is_survey_required_for_course(course))
course = CourseFactory.create(
course_survey_required=False
)
self.assertFalse(is_survey_required_for_course(course))
course = CourseFactory.create(
course_survey_required=True,
course_survey_name="NonExisting"
)
self.assertFalse(is_survey_required_for_course(course))
course = CourseFactory.create(
course_survey_required=False,
course_survey_name=self.test_survey_name
)
self.assertFalse(is_survey_required_for_course(course))
def test_user_not_yet_answered_required_survey(self):
"""
Assert that a new course which has a required survey but user has not answered it yet
"""
self.assertTrue(is_survey_required_and_unanswered(self.student, self.course))
temp_course = CourseFactory.create(
course_survey_required=False
)
self.assertFalse(is_survey_required_and_unanswered(self.student, temp_course))
temp_course = CourseFactory.create(
course_survey_required=True,
course_survey_name="NonExisting"
)
self.assertFalse(is_survey_required_and_unanswered(self.student, temp_course))
def test_user_has_answered_required_survey(self):
"""
Assert that a new course which has a required survey and user has answers for it
"""
self.survey.save_user_answers(self.student, self.student_answers, None)
self.assertFalse(is_survey_required_and_unanswered(self.student, self.course))
def test_staff_must_answer_survey(self):
"""
Assert that someone with staff level permissions does not have to answer the survey
"""
self.assertFalse(is_survey_required_and_unanswered(self.staff, self.course))
| agpl-3.0 |
glovebx/zulip | zilencer/management/commands/populate_db.py | 113 | 34944 | from __future__ import absolute_import
from django.core.management.base import BaseCommand
from django.utils.timezone import now
from django.contrib.sites.models import Site
from zerver.models import Message, UserProfile, Stream, Recipient, Client, \
Subscription, Huddle, get_huddle, Realm, UserMessage, \
get_huddle_hash, clear_database, get_client, get_user_profile_by_id, \
split_email_to_domain, email_to_username
from zerver.lib.actions import do_send_message, set_default_streams, \
do_activate_user, do_deactivate_user, do_change_password, do_change_is_admin
from zerver.lib.parallel import run_parallel
from django.db.models import Count
from django.conf import settings
from zerver.lib.bulk_create import bulk_create_realms, \
bulk_create_streams, bulk_create_users, bulk_create_huddles, \
bulk_create_clients
from zerver.lib.timestamp import timestamp_to_datetime
from zerver.models import MAX_MESSAGE_LENGTH
from zerver.models import DefaultStream, get_stream
from zilencer.models import Deployment
import ujson
import datetime
import random
import glob
import os
from optparse import make_option
settings.TORNADO_SERVER = None
def create_users(realms, name_list, bot=False):
user_set = set()
for full_name, email in name_list:
short_name = email_to_username(email)
user_set.add((email, full_name, short_name, True))
bulk_create_users(realms, user_set, bot)
def create_streams(realms, realm, stream_list):
stream_set = set()
for stream_name in stream_list:
stream_set.add((realm.domain, stream_name))
bulk_create_streams(realms, stream_set)
class Command(BaseCommand):
help = "Populate a test database"
option_list = BaseCommand.option_list + (
make_option('-n', '--num-messages',
dest='num_messages',
type='int',
default=600,
help='The number of messages to create.'),
make_option('--extra-users',
dest='extra_users',
type='int',
default=0,
help='The number of extra users to create'),
make_option('--huddles',
dest='num_huddles',
type='int',
default=3,
help='The number of huddles to create.'),
make_option('--personals',
dest='num_personals',
type='int',
default=6,
help='The number of personal pairs to create.'),
make_option('--threads',
dest='threads',
type='int',
default=10,
help='The number of threads to use.'),
make_option('--percent-huddles',
dest='percent_huddles',
type='float',
default=15,
help='The percent of messages to be huddles.'),
make_option('--percent-personals',
dest='percent_personals',
type='float',
default=15,
help='The percent of messages to be personals.'),
make_option('--stickyness',
dest='stickyness',
type='float',
default=20,
help='The percent of messages to repeat recent folks.'),
make_option('--nodelete',
action="store_false",
default=True,
dest='delete',
help='Whether to delete all the existing messages.'),
make_option('--test-suite',
default=False,
action="store_true",
help='Whether to delete all the existing messages.'),
make_option('--replay-old-messages',
action="store_true",
default=False,
dest='replay_old_messages',
help='Whether to replace the log of old messages.'),
)
def handle(self, **options):
if options["percent_huddles"] + options["percent_personals"] > 100:
self.stderr.write("Error! More than 100% of messages allocated.\n")
return
if options["delete"]:
# Start by clearing all the data in our database
clear_database()
# Create our two default realms
zulip_realm = Realm.objects.create(domain="zulip.com", name="Zulip Dev")
if options["test_suite"]:
Realm.objects.create(domain="mit.edu")
realms = {}
for realm in Realm.objects.all():
realms[realm.domain] = realm
# Create test Users (UserProfiles are automatically created,
# as are subscriptions to the ability to receive personals).
names = [("Othello, the Moor of Venice", "[email protected]"), ("Iago", "[email protected]"),
("Prospero from The Tempest", "[email protected]"),
("Cordelia Lear", "[email protected]"), ("King Hamlet", "[email protected]")]
for i in xrange(options["extra_users"]):
names.append(('Extra User %d' % (i,), 'extrauser%[email protected]' % (i,)))
create_users(realms, names)
iago = UserProfile.objects.get(email="[email protected]")
do_change_is_admin(iago, True)
# Create public streams.
stream_list = ["Verona", "Denmark", "Scotland", "Venice", "Rome"]
create_streams(realms, zulip_realm, stream_list)
recipient_streams = [Stream.objects.get(name=name, realm=zulip_realm).id for name in stream_list]
# Create subscriptions to streams
subscriptions_to_add = []
profiles = UserProfile.objects.select_related().all()
for i, profile in enumerate(profiles):
# Subscribe to some streams.
for type_id in recipient_streams[:int(len(recipient_streams) *
float(i)/len(profiles)) + 1]:
r = Recipient.objects.get(type=Recipient.STREAM, type_id=type_id)
s = Subscription(recipient=r, user_profile=profile)
subscriptions_to_add.append(s)
Subscription.objects.bulk_create(subscriptions_to_add)
else:
zulip_realm = Realm.objects.get(domain="zulip.com")
recipient_streams = [klass.type_id for klass in
Recipient.objects.filter(type=Recipient.STREAM)]
# Extract a list of all users
user_profiles = [user_profile.id for user_profile in UserProfile.objects.all()]
# Create several initial huddles
for i in xrange(options["num_huddles"]):
get_huddle(random.sample(user_profiles, random.randint(3, 4)))
# Create several initial pairs for personals
personals_pairs = [random.sample(user_profiles, 2)
for i in xrange(options["num_personals"])]
threads = options["threads"]
jobs = []
for i in xrange(threads):
count = options["num_messages"] / threads
if i < options["num_messages"] % threads:
count += 1
jobs.append((count, personals_pairs, options, self.stdout.write))
for job in jobs:
send_messages(job)
if options["delete"]:
# Create the "website" and "API" clients; if we don't, the
# default values in zerver/decorators.py will not work
# with the Django test suite.
get_client("website")
get_client("API")
if options["test_suite"]:
# Create test users; the MIT ones are needed to test
# the Zephyr mirroring codepaths.
testsuite_mit_users = [
("Fred Sipb (MIT)", "[email protected]"),
("Athena Consulting Exchange User (MIT)", "[email protected]"),
("Esp Classroom (MIT)", "[email protected]"),
]
create_users(realms, testsuite_mit_users)
# These bots are directly referenced from code and thus
# are needed for the test suite.
all_realm_bots = [(bot['name'], bot['email_template'] % (settings.INTERNAL_BOT_DOMAIN,))
for bot in settings.INTERNAL_BOTS]
zulip_realm_bots = [
("Zulip New User Bot", "[email protected]"),
("Zulip Error Bot", "[email protected]"),
]
zulip_realm_bots.extend(all_realm_bots)
create_users(realms, zulip_realm_bots, bot=True)
if not options["test_suite"]:
# To keep the messages.json fixtures file for the test
# suite fast, don't add these users and subscriptions
# when running populate_db for the test suite
zulip_stream_list = ["devel", "all", "zulip", "design", "support", "social", "test",
"errors", "sales"]
create_streams(realms, zulip_realm, zulip_stream_list)
# Add a few default streams
for stream_name in ["design", "devel", "social", "support"]:
DefaultStream.objects.create(realm=zulip_realm, stream=get_stream(stream_name, zulip_realm))
# Now subscribe everyone to these streams
subscriptions_to_add = []
profiles = UserProfile.objects.select_related().filter(realm=zulip_realm)
for cls in zulip_stream_list:
stream = Stream.objects.get(name=cls, realm=zulip_realm)
recipient = Recipient.objects.get(type=Recipient.STREAM, type_id=stream.id)
for profile in profiles:
# Subscribe to some streams.
s = Subscription(recipient=recipient, user_profile=profile)
subscriptions_to_add.append(s)
Subscription.objects.bulk_create(subscriptions_to_add)
# These bots are not needed by the test suite
internal_zulip_users_nosubs = [
("Zulip Commit Bot", "[email protected]"),
("Zulip Trac Bot", "[email protected]"),
("Zulip Nagios Bot", "[email protected]"),
("Zulip Feedback Bot", "[email protected]"),
]
create_users(realms, internal_zulip_users_nosubs, bot=True)
# Mark all messages as read
UserMessage.objects.all().update(flags=UserMessage.flags.read)
self.stdout.write("Successfully populated test database.\n")
if options["replay_old_messages"]:
restore_saved_messages()
recipient_hash = {}
def get_recipient_by_id(rid):
if rid in recipient_hash:
return recipient_hash[rid]
return Recipient.objects.get(id=rid)
def restore_saved_messages():
old_messages = []
duplicate_suppression_hash = {}
stream_dict = {}
user_set = set()
email_set = set([u.email for u in UserProfile.objects.all()])
realm_set = set()
# Initial client_set is nonempty temporarily because we don't have
# clients in logs at all right now -- later we can start with nothing.
client_set = set(["populate_db", "website", "zephyr_mirror"])
huddle_user_set = set()
# First, determine all the objects our messages will need.
print datetime.datetime.now(), "Creating realms/streams/etc..."
def process_line(line):
old_message_json = line.strip()
# Due to populate_db's shakespeare mode, we have a lot of
# duplicate messages in our log that only differ in their
# logged ID numbers (same timestamp, content, etc.). With
# sqlite, bulk creating those messages won't work properly: in
# particular, the first 100 messages will actually only result
# in 20 rows ending up in the target table, which screws up
# the below accounting where for handling changing
# subscriptions, we assume that the Nth row populate_db
# created goes with the Nth non-subscription row of the input
# So suppress the duplicates when using sqlite.
if "sqlite" in settings.DATABASES["default"]["ENGINE"]:
tmp_message = ujson.loads(old_message_json)
tmp_message['id'] = '1'
duplicate_suppression_key = ujson.dumps(tmp_message)
if duplicate_suppression_key in duplicate_suppression_hash:
return
duplicate_suppression_hash[duplicate_suppression_key] = True
old_message = ujson.loads(old_message_json)
message_type = old_message["type"]
# Lower case emails and domains; it will screw up
# deduplication if we don't
def fix_email(email):
return email.strip().lower()
if message_type in ["stream", "huddle", "personal"]:
old_message["sender_email"] = fix_email(old_message["sender_email"])
# Fix the length on too-long messages before we start processing them
if len(old_message["content"]) > MAX_MESSAGE_LENGTH:
old_message["content"] = "[ This message was deleted because it was too long ]"
if message_type in ["subscription_added", "subscription_removed"]:
old_message["domain"] = old_message["domain"].lower()
old_message["user"] = fix_email(old_message["user"])
elif message_type == "subscription_property":
old_message["user"] = fix_email(old_message["user"])
elif message_type == "user_email_changed":
old_message["old_email"] = fix_email(old_message["old_email"])
old_message["new_email"] = fix_email(old_message["new_email"])
elif message_type.startswith("user_"):
old_message["user"] = fix_email(old_message["user"])
elif message_type.startswith("enable_"):
old_message["user"] = fix_email(old_message["user"])
if message_type == 'personal':
old_message["recipient"][0]["email"] = fix_email(old_message["recipient"][0]["email"])
elif message_type == "huddle":
for i in xrange(len(old_message["recipient"])):
old_message["recipient"][i]["email"] = fix_email(old_message["recipient"][i]["email"])
old_messages.append(old_message)
if message_type in ["subscription_added", "subscription_removed"]:
stream_name = old_message["name"].strip()
canon_stream_name = stream_name.lower()
if canon_stream_name not in stream_dict:
stream_dict[(old_message["domain"], canon_stream_name)] = \
(old_message["domain"], stream_name)
elif message_type == "user_created":
user_set.add((old_message["user"], old_message["full_name"], old_message["short_name"], False))
elif message_type == "realm_created":
realm_set.add(old_message["domain"])
if message_type not in ["stream", "huddle", "personal"]:
return
sender_email = old_message["sender_email"]
domain = split_email_to_domain(sender_email)
realm_set.add(domain)
if old_message["sender_email"] not in email_set:
user_set.add((old_message["sender_email"],
old_message["sender_full_name"],
old_message["sender_short_name"],
False))
if 'sending_client' in old_message:
client_set.add(old_message['sending_client'])
if message_type == 'stream':
stream_name = old_message["recipient"].strip()
canon_stream_name = stream_name.lower()
if canon_stream_name not in stream_dict:
stream_dict[(domain, canon_stream_name)] = (domain, stream_name)
elif message_type == 'personal':
u = old_message["recipient"][0]
if u["email"] not in email_set:
user_set.add((u["email"], u["full_name"], u["short_name"], False))
email_set.add(u["email"])
elif message_type == 'huddle':
for u in old_message["recipient"]:
user_set.add((u["email"], u["full_name"], u["short_name"], False))
if u["email"] not in email_set:
user_set.add((u["email"], u["full_name"], u["short_name"], False))
email_set.add(u["email"])
huddle_user_set.add(tuple(sorted(set(u["email"] for u in old_message["recipient"]))))
else:
raise ValueError('Bad message type')
event_glob = os.path.join(settings.EVENT_LOG_DIR, 'events.*')
for filename in sorted(glob.glob(event_glob)):
with file(filename, "r") as message_log:
for line in message_log.readlines():
process_line(line)
stream_recipients = {}
user_recipients = {}
huddle_recipients = {}
# Then, create the objects our messages need.
print datetime.datetime.now(), "Creating realms..."
bulk_create_realms(realm_set)
realms = {}
for realm in Realm.objects.all():
realms[realm.domain] = realm
print datetime.datetime.now(), "Creating clients..."
bulk_create_clients(client_set)
clients = {}
for client in Client.objects.all():
clients[client.name] = client
print datetime.datetime.now(), "Creating streams..."
bulk_create_streams(realms, stream_dict.values())
streams = {}
for stream in Stream.objects.all():
streams[stream.id] = stream
for recipient in Recipient.objects.filter(type=Recipient.STREAM):
stream_recipients[(streams[recipient.type_id].realm_id,
streams[recipient.type_id].name.lower())] = recipient
print datetime.datetime.now(), "Creating users..."
bulk_create_users(realms, user_set)
users = {}
users_by_id = {}
for user_profile in UserProfile.objects.select_related().all():
users[user_profile.email] = user_profile
users_by_id[user_profile.id] = user_profile
for recipient in Recipient.objects.filter(type=Recipient.PERSONAL):
user_recipients[users_by_id[recipient.type_id].email] = recipient
print datetime.datetime.now(), "Creating huddles..."
bulk_create_huddles(users, huddle_user_set)
huddles_by_id = {}
for huddle in Huddle.objects.all():
huddles_by_id[huddle.id] = huddle
for recipient in Recipient.objects.filter(type=Recipient.HUDDLE):
huddle_recipients[huddles_by_id[recipient.type_id].huddle_hash] = recipient
# TODO: Add a special entry type in the log that is a subscription
# change and import those as we go to make subscription changes
# take effect!
print datetime.datetime.now(), "Importing subscriptions..."
subscribers = {}
for s in Subscription.objects.select_related().all():
if s.active:
subscribers.setdefault(s.recipient.id, set()).add(s.user_profile.id)
# Then create all the messages, without talking to the DB!
print datetime.datetime.now(), "Importing messages, part 1..."
first_message_id = None
if Message.objects.exists():
first_message_id = Message.objects.all().order_by("-id")[0].id + 1
messages_to_create = []
for idx, old_message in enumerate(old_messages):
message_type = old_message["type"]
if message_type not in ["stream", "huddle", "personal"]:
continue
message = Message()
sender_email = old_message["sender_email"]
domain = split_email_to_domain(sender_email)
realm = realms[domain]
message.sender = users[sender_email]
type_hash = {"stream": Recipient.STREAM,
"huddle": Recipient.HUDDLE,
"personal": Recipient.PERSONAL}
if 'sending_client' in old_message:
message.sending_client = clients[old_message['sending_client']]
elif sender_email in ["[email protected]", "[email protected]", "[email protected]",
"[email protected]", "[email protected]"]:
message.sending_client = clients['populate_db']
elif realm.domain == "zulip.com":
message.sending_client = clients["website"]
elif realm.domain == "mit.edu":
message.sending_client = clients['zephyr_mirror']
else:
message.sending_client = clients['populate_db']
message.type = type_hash[message_type]
message.content = old_message["content"]
message.subject = old_message["subject"]
message.pub_date = timestamp_to_datetime(old_message["timestamp"])
if message.type == Recipient.PERSONAL:
message.recipient = user_recipients[old_message["recipient"][0]["email"]]
elif message.type == Recipient.STREAM:
message.recipient = stream_recipients[(realm.id,
old_message["recipient"].lower())]
elif message.type == Recipient.HUDDLE:
huddle_hash = get_huddle_hash([users[u["email"]].id
for u in old_message["recipient"]])
message.recipient = huddle_recipients[huddle_hash]
else:
raise ValueError('Bad message type')
messages_to_create.append(message)
print datetime.datetime.now(), "Importing messages, part 2..."
Message.objects.bulk_create(messages_to_create)
messages_to_create = []
# Finally, create all the UserMessage objects
print datetime.datetime.now(), "Importing usermessages, part 1..."
personal_recipients = {}
for r in Recipient.objects.filter(type = Recipient.PERSONAL):
personal_recipients[r.id] = True
all_messages = Message.objects.all()
user_messages_to_create = []
messages_by_id = {}
for message in all_messages:
messages_by_id[message.id] = message
if len(messages_by_id) == 0:
print datetime.datetime.now(), "No old messages to replay"
return
if first_message_id is None:
first_message_id = min(messages_by_id.keys())
tot_user_messages = 0
pending_subs = {}
current_message_id = first_message_id
pending_colors = {}
for old_message in old_messages:
message_type = old_message["type"]
if message_type == 'subscription_added':
stream_key = (realms[old_message["domain"]].id, old_message["name"].strip().lower())
subscribers.setdefault(stream_recipients[stream_key].id,
set()).add(users[old_message["user"]].id)
pending_subs[(stream_recipients[stream_key].id,
users[old_message["user"]].id)] = True
continue
elif message_type == "subscription_removed":
stream_key = (realms[old_message["domain"]].id, old_message["name"].strip().lower())
user_id = users[old_message["user"]].id
subscribers.setdefault(stream_recipients[stream_key].id, set())
try:
subscribers[stream_recipients[stream_key].id].remove(user_id)
except KeyError:
print "Error unsubscribing %s from %s: not subscribed" % (
old_message["user"], old_message["name"])
pending_subs[(stream_recipients[stream_key].id,
users[old_message["user"]].id)] = False
continue
elif message_type == "user_activated" or message_type == "user_created":
# These are rare, so just handle them the slow way
user_profile = users[old_message["user"]]
join_date = timestamp_to_datetime(old_message['timestamp'])
do_activate_user(user_profile, log=False, join_date=join_date)
# Update the cache of users to show this user as activated
users_by_id[user_profile.id] = user_profile
users[old_message["user"]] = user_profile
continue
elif message_type == "user_deactivated":
user_profile = users[old_message["user"]]
do_deactivate_user(user_profile, log=False)
continue
elif message_type == "user_change_password":
# Just handle these the slow way
user_profile = users[old_message["user"]]
do_change_password(user_profile, old_message["pwhash"], log=False,
hashed_password=True)
continue
elif message_type == "user_change_full_name":
# Just handle these the slow way
user_profile = users[old_message["user"]]
user_profile.full_name = old_message["full_name"]
user_profile.save(update_fields=["full_name"])
continue
elif message_type == "enable_desktop_notifications_changed":
# Just handle these the slow way
user_profile = users[old_message["user"]]
user_profile.enable_desktop_notifications = (old_message["enable_desktop_notifications"] != "false")
user_profile.save(update_fields=["enable_desktop_notifications"])
continue
elif message_type == "enable_sounds_changed":
user_profile = users[old_message["user"]]
user_profile.enable_sounds = (old_message["enable_sounds"] != "false")
user_profile.save(update_fields=["enable_sounds"])
elif message_type == "enable_offline_email_notifications_changed":
user_profile = users[old_message["user"]]
user_profile.enable_offline_email_notifications = (old_message["enable_offline_email_notifications"] != "false")
user_profile.save(update_fields=["enable_offline_email_notifications"])
continue
elif message_type == "enable_offline_push_notifications_changed":
user_profile = users[old_message["user"]]
user_profile.enable_offline_push_notifications = (old_message["enable_offline_push_notifications"] != "false")
user_profile.save(update_fields=["enable_offline_push_notifications"])
continue
elif message_type == "default_streams":
set_default_streams(Realm.objects.get(domain=old_message["domain"]),
old_message["streams"])
continue
elif message_type == "subscription_property":
property_name = old_message.get("property")
if property_name == "stream_color" or property_name == "color":
color = old_message.get("color", old_message.get("value"))
pending_colors[(old_message["user"],
old_message["stream_name"].lower())] = color
elif property_name in ["in_home_view", "notifications"]:
# TODO: Handle this
continue
else:
raise RuntimeError("Unknown property %s" % (property_name,))
continue
elif message_type == "realm_created":
# No action required
continue
elif message_type in ["user_email_changed", "update_onboarding", "update_message"]:
# TODO: Handle these
continue
if message_type not in ["stream", "huddle", "personal"]:
raise RuntimeError("Unexpected message type %s" % (message_type,))
message = messages_by_id[current_message_id]
current_message_id += 1
if message.recipient_id not in subscribers:
# Nobody received this message -- probably due to our
# subscriptions being out-of-date.
continue
recipient_user_ids = set()
for user_profile_id in subscribers[message.recipient_id]:
recipient_user_ids.add(user_profile_id)
if message.recipient_id in personal_recipients:
# Include the sender in huddle recipients
recipient_user_ids.add(message.sender_id)
for user_profile_id in recipient_user_ids:
if users_by_id[user_profile_id].is_active:
um = UserMessage(user_profile_id=user_profile_id,
message=message)
user_messages_to_create.append(um)
if len(user_messages_to_create) > 100000:
tot_user_messages += len(user_messages_to_create)
UserMessage.objects.bulk_create(user_messages_to_create)
user_messages_to_create = []
print datetime.datetime.now(), "Importing usermessages, part 2..."
tot_user_messages += len(user_messages_to_create)
UserMessage.objects.bulk_create(user_messages_to_create)
print datetime.datetime.now(), "Finalizing subscriptions..."
current_subs = {}
current_subs_obj = {}
for s in Subscription.objects.select_related().all():
current_subs[(s.recipient_id, s.user_profile_id)] = s.active
current_subs_obj[(s.recipient_id, s.user_profile_id)] = s
subscriptions_to_add = []
subscriptions_to_change = []
for pending_sub in pending_subs.keys():
(recipient_id, user_profile_id) = pending_sub
current_state = current_subs.get(pending_sub)
if pending_subs[pending_sub] == current_state:
# Already correct in the database
continue
elif current_state is not None:
subscriptions_to_change.append((pending_sub, pending_subs[pending_sub]))
continue
s = Subscription(recipient_id=recipient_id,
user_profile_id=user_profile_id,
active=pending_subs[pending_sub])
subscriptions_to_add.append(s)
Subscription.objects.bulk_create(subscriptions_to_add)
for (sub, active) in subscriptions_to_change:
current_subs_obj[sub].active = active
current_subs_obj[sub].save(update_fields=["active"])
subs = {}
for sub in Subscription.objects.all():
subs[(sub.user_profile_id, sub.recipient_id)] = sub
# TODO: do restore of subscription colors -- we're currently not
# logging changes so there's little point in having the code :(
print datetime.datetime.now(), "Finished importing %s messages (%s usermessages)" % \
(len(all_messages), tot_user_messages)
site = Site.objects.get_current()
site.domain = 'zulip.com'
site.save()
print datetime.datetime.now(), "Filling in user pointers..."
# Set restored pointers to the very latest messages
for user_profile in UserProfile.objects.all():
try:
top = UserMessage.objects.filter(
user_profile_id=user_profile.id).order_by("-message")[0]
user_profile.pointer = top.message_id
except IndexError:
user_profile.pointer = -1
user_profile.save(update_fields=["pointer"])
print datetime.datetime.now(), "Done replaying old messages"
# Create some test messages, including:
# - multiple streams
# - multiple subjects per stream
# - multiple huddles
# - multiple personals converastions
# - multiple messages per subject
# - both single and multi-line content
def send_messages(data):
(tot_messages, personals_pairs, options, output) = data
random.seed(os.getpid())
texts = file("zilencer/management/commands/test_messages.txt", "r").readlines()
offset = random.randint(0, len(texts))
recipient_streams = [klass.id for klass in
Recipient.objects.filter(type=Recipient.STREAM)]
recipient_huddles = [h.id for h in Recipient.objects.filter(type=Recipient.HUDDLE)]
huddle_members = {}
for h in recipient_huddles:
huddle_members[h] = [s.user_profile.id for s in
Subscription.objects.filter(recipient_id=h)]
num_messages = 0
random_max = 1000000
recipients = {}
while num_messages < tot_messages:
saved_data = ''
message = Message()
message.sending_client = get_client('populate_db')
length = random.randint(1, 5)
lines = (t.strip() for t in texts[offset: offset + length])
message.content = '\n'.join(lines)
offset += length
offset = offset % len(texts)
randkey = random.randint(1, random_max)
if (num_messages > 0 and
random.randint(1, random_max) * 100. / random_max < options["stickyness"]):
# Use an old recipient
message_type, recipient_id, saved_data = recipients[num_messages - 1]
if message_type == Recipient.PERSONAL:
personals_pair = saved_data
random.shuffle(personals_pair)
elif message_type == Recipient.STREAM:
message.subject = saved_data
message.recipient = get_recipient_by_id(recipient_id)
elif message_type == Recipient.HUDDLE:
message.recipient = get_recipient_by_id(recipient_id)
elif (randkey <= random_max * options["percent_huddles"] / 100.):
message_type = Recipient.HUDDLE
message.recipient = get_recipient_by_id(random.choice(recipient_huddles))
elif (randkey <= random_max * (options["percent_huddles"] + options["percent_personals"]) / 100.):
message_type = Recipient.PERSONAL
personals_pair = random.choice(personals_pairs)
random.shuffle(personals_pair)
elif (randkey <= random_max * 1.0):
message_type = Recipient.STREAM
message.recipient = get_recipient_by_id(random.choice(recipient_streams))
if message_type == Recipient.HUDDLE:
sender_id = random.choice(huddle_members[message.recipient.id])
message.sender = get_user_profile_by_id(sender_id)
elif message_type == Recipient.PERSONAL:
message.recipient = Recipient.objects.get(type=Recipient.PERSONAL,
type_id=personals_pair[0])
message.sender = get_user_profile_by_id(personals_pair[1])
saved_data = personals_pair
elif message_type == Recipient.STREAM:
stream = Stream.objects.get(id=message.recipient.type_id)
# Pick a random subscriber to the stream
message.sender = random.choice(Subscription.objects.filter(
recipient=message.recipient)).user_profile
message.subject = stream.name + str(random.randint(1, 3))
saved_data = message.subject
message.pub_date = now()
do_send_message(message)
recipients[num_messages] = [message_type, message.recipient.id, saved_data]
num_messages += 1
return tot_messages
| apache-2.0 |
pratikmallya/hue | desktop/core/ext-py/Django-1.6.10/tests/test_client_regress/views.py | 53 | 4346 | import json
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render_to_response
from django.core.serializers.json import DjangoJSONEncoder
from django.test.client import CONTENT_TYPE_RE
from django.template import RequestContext
class CustomTestException(Exception):
pass
def no_template_view(request):
"A simple view that expects a GET request, and returns a rendered template"
return HttpResponse("No template used. Sample content: twice once twice. Content ends.")
def staff_only_view(request):
"A view that can only be visited by staff. Non staff members get an exception"
if request.user.is_staff:
return HttpResponse('')
else:
raise CustomTestException()
def get_view(request):
"A simple login protected view"
return HttpResponse("Hello world")
get_view = login_required(get_view)
def request_data(request, template='base.html', data='sausage'):
"A simple view that returns the request data in the context"
return render_to_response(template, {
'get-foo':request.GET.get('foo',None),
'get-bar':request.GET.get('bar',None),
'post-foo':request.POST.get('foo',None),
'post-bar':request.POST.get('bar',None),
'request-foo':request.REQUEST.get('foo',None),
'request-bar':request.REQUEST.get('bar',None),
'data': data,
})
def view_with_argument(request, name):
"""A view that takes a string argument
The purpose of this view is to check that if a space is provided in
the argument, the test framework unescapes the %20 before passing
the value to the view.
"""
if name == 'Arthur Dent':
return HttpResponse('Hi, Arthur')
else:
return HttpResponse('Howdy, %s' % name)
def login_protected_redirect_view(request):
"A view that redirects all requests to the GET view"
return HttpResponseRedirect('/test_client_regress/get_view/')
login_protected_redirect_view = login_required(login_protected_redirect_view)
def set_session_view(request):
"A view that sets a session variable"
request.session['session_var'] = 'YES'
return HttpResponse('set_session')
def check_session_view(request):
"A view that reads a session variable"
return HttpResponse(request.session.get('session_var', 'NO'))
def request_methods_view(request):
"A view that responds with the request method"
return HttpResponse('request method: %s' % request.method)
def return_unicode(request):
return render_to_response('unicode.html')
def return_undecodable_binary(request):
return HttpResponse(
b'%PDF-1.4\r\n%\x93\x8c\x8b\x9e ReportLab Generated PDF document http://www.reportlab.com'
)
def return_json_file(request):
"A view that parses and returns a JSON string as a file."
match = CONTENT_TYPE_RE.match(request.META['CONTENT_TYPE'])
if match:
charset = match.group(1)
else:
charset = settings.DEFAULT_CHARSET
# This just checks that the uploaded data is JSON
obj_dict = json.loads(request.body.decode(charset))
obj_json = json.dumps(obj_dict, cls=DjangoJSONEncoder, ensure_ascii=False)
response = HttpResponse(obj_json.encode(charset), status=200,
content_type='application/json; charset=%s' % charset)
response['Content-Disposition'] = 'attachment; filename=testfile.json'
return response
def check_headers(request):
"A view that responds with value of the X-ARG-CHECK header"
return HttpResponse('HTTP_X_ARG_CHECK: %s' % request.META.get('HTTP_X_ARG_CHECK', 'Undefined'))
def body(request):
"A view that is requested with GET and accesses request.body. Refs #14753."
return HttpResponse(request.body)
def read_all(request):
"A view that is requested with accesses request.read()."
return HttpResponse(request.read())
def read_buffer(request):
"A view that is requested with accesses request.read(LARGE_BUFFER)."
return HttpResponse(request.read(99999))
def request_context_view(request):
# Special attribute that won't be present on a plain HttpRequest
request.special_path = request.path
return render_to_response('request_context.html', context_instance=RequestContext(request, {}))
| apache-2.0 |
slozier/ironpython2 | Src/StdLib/Lib/site-packages/win32/test/test_pywintypes.py | 2 | 3632 | import sys
import unittest
import pywintypes
import time
from pywin32_testutil import str2bytes, ob2memory
import datetime
import operator
class TestCase(unittest.TestCase):
def testPyTimeFormat(self):
struct_current = time.localtime()
pytime_current = pywintypes.Time(struct_current)
# try and test all the standard parts of the format
# Note we used to include '%Z' testing, but that was pretty useless as
# it always returned the local timezone.
format_strings = "%a %A %b %B %c %d %H %I %j %m %M %p %S %U %w %W %x %X %y %Y"
for fmt in format_strings.split():
v1 = pytime_current.Format(fmt)
v2 = time.strftime(fmt, struct_current)
self.assertEquals(v1, v2, "format %s failed - %r != %r" % (fmt, v1, v2))
def testPyTimePrint(self):
# This used to crash with an invalid, or too early time.
# We don't really want to check that it does cause a ValueError
# (as hopefully this wont be true forever). So either working, or
# ValueError is OK.
try:
t = pywintypes.Time(-2)
t.Format()
except ValueError:
return
def testTimeInDict(self):
d = {}
d['t1'] = pywintypes.Time(1)
self.failUnlessEqual(d['t1'], pywintypes.Time(1))
def testPyTimeCompare(self):
t1 = pywintypes.Time(100)
t1_2 = pywintypes.Time(100)
t2 = pywintypes.Time(101)
self.failUnlessEqual(t1, t1_2)
self.failUnless(t1 <= t1_2)
self.failUnless(t1_2 >= t1)
self.failIfEqual(t1, t2)
self.failUnless(t1 < t2)
self.failUnless(t2 > t1 )
def testTimeTuple(self):
now = datetime.datetime.now() # has usec...
# timetuple() lost usec - pt must be <=...
pt = pywintypes.Time(now.timetuple())
# *sob* - only if we have a datetime object can we compare like this.
if isinstance(pt, datetime.datetime):
self.failUnless(pt <= now)
def testTimeTuplems(self):
now = datetime.datetime.now() # has usec...
tt = now.timetuple() + (now.microsecond // 1000,)
pt = pywintypes.Time(tt)
# we can't compare if using the old type, as it loses all sub-second res.
if isinstance(pt, datetime.datetime):
self.failUnlessEqual(now, pt)
def testPyTimeFromTime(self):
t1 = pywintypes.Time(time.time())
self.failUnless(pywintypes.Time(t1) is t1)
def testGUID(self):
s = "{00020400-0000-0000-C000-000000000046}"
iid = pywintypes.IID(s)
iid2 = pywintypes.IID(ob2memory(iid), True)
self.assertEquals(iid, iid2)
self.assertRaises(ValueError, pywintypes.IID, str2bytes('00'), True) # too short
self.assertRaises(TypeError, pywintypes.IID, 0, True) # no buffer
def testGUIDRichCmp(self):
s = "{00020400-0000-0000-C000-000000000046}"
iid = pywintypes.IID(s)
self.failIf(s==None)
self.failIf(None==s)
self.failUnless(s!=None)
self.failUnless(None!=s)
if sys.version_info > (3,0):
self.assertRaises(TypeError, operator.gt, None, s)
self.assertRaises(TypeError, operator.gt, s, None)
self.assertRaises(TypeError, operator.lt, None, s)
self.assertRaises(TypeError, operator.lt, s, None)
def testGUIDInDict(self):
s = "{00020400-0000-0000-C000-000000000046}"
iid = pywintypes.IID(s)
d = dict(item=iid)
self.failUnlessEqual(d['item'], iid)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
JioCloud/nova | nova/api/openstack/compute/schemas/v3/admin_password.py | 111 | 1078 | # Copyright 2013 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.validation import parameter_types
change_password = {
'type': 'object',
'properties': {
'changePassword': {
'type': 'object',
'properties': {
'adminPass': parameter_types.admin_password,
},
'required': ['adminPass'],
'additionalProperties': False,
},
},
'required': ['changePassword'],
'additionalProperties': False,
}
| apache-2.0 |
ktnyt/chainer | chainer/distributions/gamma.py | 2 | 2638 | import chainer
from chainer.backends import cuda
from chainer import distribution
from chainer.functions.array import broadcast
from chainer.functions.array import where
from chainer.functions.math import digamma
from chainer.functions.math import exponential
from chainer.functions.math import lgamma
class Gamma(distribution.Distribution):
"""Gamma Distribution.
Args:
k(:class:`~chainer.Variable` or :ref:`ndarray`): Parameter of
distribution.
theta(:class:`~chainer.Variable` or :ref:`ndarray`): Parameter of
distribution.
"""
def __init__(self, k, theta):
super(Gamma, self).__init__()
self.__k = chainer.as_variable(k)
self.__theta = chainer.as_variable(theta)
@property
def k(self):
return self.__k
@property
def theta(self):
return self.__theta
@property
def batch_shape(self):
return self.k.shape
@property
def entropy(self):
return self.k + exponential.log(self.theta) + lgamma.lgamma(self.k) \
+ (1 - self.k) * digamma.digamma(self.k)
@property
def event_shape(self):
return ()
@property
def _is_gpu(self):
return isinstance(self.k.data, cuda.ndarray)
def log_prob(self, x):
logp = - lgamma.lgamma(self.k) - self.k * exponential.log(self.theta) \
+ (self.k - 1) * exponential.log(x) - x / self.theta
xp = logp.xp
inf = xp.full_like(logp.array, xp.inf)
if isinstance(x, chainer.Variable):
x = x.array
return where.where(xp.asarray(x >= 0), logp, xp.asarray(-inf))
@property
def mean(self):
return self.k * self.theta
def sample_n(self, n):
xp = cuda.get_array_module(self.k)
if xp is cuda.cupy:
eps = xp.random.gamma(
self.k.data, size=(n,) + self.batch_shape, dtype=self.k.dtype)
else:
eps = xp.random.gamma(
self.k.data, size=(n,) + self.batch_shape).astype(self.k.dtype)
noise = broadcast.broadcast_to(self.theta, eps.shape) * eps
return noise
@property
def support(self):
return 'positive'
@property
def variance(self):
return self.k * self.theta * self.theta
@distribution.register_kl(Gamma, Gamma)
def _kl_gamma_gamma(dist1, dist2):
return (dist1.k - dist2.k) * digamma.digamma(dist1.k) \
- (lgamma.lgamma(dist1.k) - lgamma.lgamma(dist2.k)) \
+ dist2.k\
* (exponential.log(dist2.theta) - exponential.log(dist1.theta)) \
+ dist1.k * (dist1.theta / dist2.theta - 1)
| mit |
diegocortassa/TACTIC | src/context/client/tactic-api-python-4.0.api04/Tools/Scripts/ftpmirror.py | 10 | 13254 | #! /usr/bin/env python
"""Mirror a remote ftp subtree into a local directory tree.
usage: ftpmirror [-v] [-q] [-i] [-m] [-n] [-r] [-s pat]
[-l username [-p passwd [-a account]]]
hostname[:port] [remotedir [localdir]]
-v: verbose
-q: quiet
-i: interactive mode
-m: macintosh server (NCSA telnet 2.4) (implies -n -s '*.o')
-n: don't log in
-r: remove local files/directories no longer pertinent
-l username [-p passwd [-a account]]: login info (default .netrc or anonymous)
-s pat: skip files matching pattern
hostname: remote host w/ optional port separated by ':'
remotedir: remote directory (default initial)
localdir: local directory (default current)
"""
import os
import sys
import time
import getopt
import ftplib
import netrc
from fnmatch import fnmatch
# Print usage message and exit
def usage(*args):
sys.stdout = sys.stderr
for msg in args: print msg
print __doc__
sys.exit(2)
verbose = 1 # 0 for -q, 2 for -v
interactive = 0
mac = 0
rmok = 0
nologin = 0
skippats = ['.', '..', '.mirrorinfo']
# Main program: parse command line and start processing
def main():
global verbose, interactive, mac, rmok, nologin
try:
opts, args = getopt.getopt(sys.argv[1:], 'a:bil:mnp:qrs:v')
except getopt.error, msg:
usage(msg)
login = ''
passwd = ''
account = ''
if not args: usage('hostname missing')
host = args[0]
port = 0
if ':' in host:
host, port = host.split(':', 1)
port = int(port)
try:
auth = netrc.netrc().authenticators(host)
if auth is not None:
login, account, passwd = auth
except (netrc.NetrcParseError, IOError):
pass
for o, a in opts:
if o == '-l': login = a
if o == '-p': passwd = a
if o == '-a': account = a
if o == '-v': verbose = verbose + 1
if o == '-q': verbose = 0
if o == '-i': interactive = 1
if o == '-m': mac = 1; nologin = 1; skippats.append('*.o')
if o == '-n': nologin = 1
if o == '-r': rmok = 1
if o == '-s': skippats.append(a)
remotedir = ''
localdir = ''
if args[1:]:
remotedir = args[1]
if args[2:]:
localdir = args[2]
if args[3:]: usage('too many arguments')
#
f = ftplib.FTP()
if verbose: print "Connecting to '%s%s'..." % (host,
(port and ":%d"%port or ""))
f.connect(host,port)
if not nologin:
if verbose:
print 'Logging in as %r...' % (login or 'anonymous')
f.login(login, passwd, account)
if verbose: print 'OK.'
pwd = f.pwd()
if verbose > 1: print 'PWD =', repr(pwd)
if remotedir:
if verbose > 1: print 'cwd(%s)' % repr(remotedir)
f.cwd(remotedir)
if verbose > 1: print 'OK.'
pwd = f.pwd()
if verbose > 1: print 'PWD =', repr(pwd)
#
mirrorsubdir(f, localdir)
# Core logic: mirror one subdirectory (recursively)
def mirrorsubdir(f, localdir):
pwd = f.pwd()
if localdir and not os.path.isdir(localdir):
if verbose: print 'Creating local directory', repr(localdir)
try:
makedir(localdir)
except os.error, msg:
print "Failed to establish local directory", repr(localdir)
return
infofilename = os.path.join(localdir, '.mirrorinfo')
try:
text = open(infofilename, 'r').read()
except IOError, msg:
text = '{}'
try:
info = eval(text)
except (SyntaxError, NameError):
print 'Bad mirror info in', repr(infofilename)
info = {}
subdirs = []
listing = []
if verbose: print 'Listing remote directory %r...' % (pwd,)
f.retrlines('LIST', listing.append)
filesfound = []
for line in listing:
if verbose > 1: print '-->', repr(line)
if mac:
# Mac listing has just filenames;
# trailing / means subdirectory
filename = line.strip()
mode = '-'
if filename[-1:] == '/':
filename = filename[:-1]
mode = 'd'
infostuff = ''
else:
# Parse, assuming a UNIX listing
words = line.split(None, 8)
if len(words) < 6:
if verbose > 1: print 'Skipping short line'
continue
filename = words[-1].lstrip()
i = filename.find(" -> ")
if i >= 0:
# words[0] had better start with 'l'...
if verbose > 1:
print 'Found symbolic link %r' % (filename,)
linkto = filename[i+4:]
filename = filename[:i]
infostuff = words[-5:-1]
mode = words[0]
skip = 0
for pat in skippats:
if fnmatch(filename, pat):
if verbose > 1:
print 'Skip pattern', repr(pat),
print 'matches', repr(filename)
skip = 1
break
if skip:
continue
if mode[0] == 'd':
if verbose > 1:
print 'Remembering subdirectory', repr(filename)
subdirs.append(filename)
continue
filesfound.append(filename)
if info.has_key(filename) and info[filename] == infostuff:
if verbose > 1:
print 'Already have this version of',repr(filename)
continue
fullname = os.path.join(localdir, filename)
tempname = os.path.join(localdir, '@'+filename)
if interactive:
doit = askabout('file', filename, pwd)
if not doit:
if not info.has_key(filename):
info[filename] = 'Not retrieved'
continue
try:
os.unlink(tempname)
except os.error:
pass
if mode[0] == 'l':
if verbose:
print "Creating symlink %r -> %r" % (filename, linkto)
try:
os.symlink(linkto, tempname)
except IOError, msg:
print "Can't create %r: %s" % (tempname, msg)
continue
else:
try:
fp = open(tempname, 'wb')
except IOError, msg:
print "Can't create %r: %s" % (tempname, msg)
continue
if verbose:
print 'Retrieving %r from %r as %r...' % (filename, pwd, fullname)
if verbose:
fp1 = LoggingFile(fp, 1024, sys.stdout)
else:
fp1 = fp
t0 = time.time()
try:
f.retrbinary('RETR ' + filename,
fp1.write, 8*1024)
except ftplib.error_perm, msg:
print msg
t1 = time.time()
bytes = fp.tell()
fp.close()
if fp1 != fp:
fp1.close()
try:
os.unlink(fullname)
except os.error:
pass # Ignore the error
try:
os.rename(tempname, fullname)
except os.error, msg:
print "Can't rename %r to %r: %s" % (tempname, fullname, msg)
continue
info[filename] = infostuff
writedict(info, infofilename)
if verbose and mode[0] != 'l':
dt = t1 - t0
kbytes = bytes / 1024.0
print int(round(kbytes)),
print 'Kbytes in',
print int(round(dt)),
print 'seconds',
if t1 > t0:
print '(~%d Kbytes/sec)' % \
int(round(kbytes/dt),)
print
#
# Remove files from info that are no longer remote
deletions = 0
for filename in info.keys():
if filename not in filesfound:
if verbose:
print "Removing obsolete info entry for",
print repr(filename), "in", repr(localdir or ".")
del info[filename]
deletions = deletions + 1
if deletions:
writedict(info, infofilename)
#
# Remove local files that are no longer in the remote directory
try:
if not localdir: names = os.listdir(os.curdir)
else: names = os.listdir(localdir)
except os.error:
names = []
for name in names:
if name[0] == '.' or info.has_key(name) or name in subdirs:
continue
skip = 0
for pat in skippats:
if fnmatch(name, pat):
if verbose > 1:
print 'Skip pattern', repr(pat),
print 'matches', repr(name)
skip = 1
break
if skip:
continue
fullname = os.path.join(localdir, name)
if not rmok:
if verbose:
print 'Local file', repr(fullname),
print 'is no longer pertinent'
continue
if verbose: print 'Removing local file/dir', repr(fullname)
remove(fullname)
#
# Recursively mirror subdirectories
for subdir in subdirs:
if interactive:
doit = askabout('subdirectory', subdir, pwd)
if not doit: continue
if verbose: print 'Processing subdirectory', repr(subdir)
localsubdir = os.path.join(localdir, subdir)
pwd = f.pwd()
if verbose > 1:
print 'Remote directory now:', repr(pwd)
print 'Remote cwd', repr(subdir)
try:
f.cwd(subdir)
except ftplib.error_perm, msg:
print "Can't chdir to", repr(subdir), ":", repr(msg)
else:
if verbose: print 'Mirroring as', repr(localsubdir)
mirrorsubdir(f, localsubdir)
if verbose > 1: print 'Remote cwd ..'
f.cwd('..')
newpwd = f.pwd()
if newpwd != pwd:
print 'Ended up in wrong directory after cd + cd ..'
print 'Giving up now.'
break
else:
if verbose > 1: print 'OK.'
# Helper to remove a file or directory tree
def remove(fullname):
if os.path.isdir(fullname) and not os.path.islink(fullname):
try:
names = os.listdir(fullname)
except os.error:
names = []
ok = 1
for name in names:
if not remove(os.path.join(fullname, name)):
ok = 0
if not ok:
return 0
try:
os.rmdir(fullname)
except os.error, msg:
print "Can't remove local directory %r: %s" % (fullname, msg)
return 0
else:
try:
os.unlink(fullname)
except os.error, msg:
print "Can't remove local file %r: %s" % (fullname, msg)
return 0
return 1
# Wrapper around a file for writing to write a hash sign every block.
class LoggingFile:
def __init__(self, fp, blocksize, outfp):
self.fp = fp
self.bytes = 0
self.hashes = 0
self.blocksize = blocksize
self.outfp = outfp
def write(self, data):
self.bytes = self.bytes + len(data)
hashes = int(self.bytes) / self.blocksize
while hashes > self.hashes:
self.outfp.write('#')
self.outfp.flush()
self.hashes = self.hashes + 1
self.fp.write(data)
def close(self):
self.outfp.write('\n')
# Ask permission to download a file.
def askabout(filetype, filename, pwd):
prompt = 'Retrieve %s %s from %s ? [ny] ' % (filetype, filename, pwd)
while 1:
reply = raw_input(prompt).strip().lower()
if reply in ['y', 'ye', 'yes']:
return 1
if reply in ['', 'n', 'no', 'nop', 'nope']:
return 0
print 'Please answer yes or no.'
# Create a directory if it doesn't exist. Recursively create the
# parent directory as well if needed.
def makedir(pathname):
if os.path.isdir(pathname):
return
dirname = os.path.dirname(pathname)
if dirname: makedir(dirname)
os.mkdir(pathname, 0777)
# Write a dictionary to a file in a way that can be read back using
# rval() but is still somewhat readable (i.e. not a single long line).
# Also creates a backup file.
def writedict(dict, filename):
dir, fname = os.path.split(filename)
tempname = os.path.join(dir, '@' + fname)
backup = os.path.join(dir, fname + '~')
try:
os.unlink(backup)
except os.error:
pass
fp = open(tempname, 'w')
fp.write('{\n')
for key, value in dict.items():
fp.write('%r: %r,\n' % (key, value))
fp.write('}\n')
fp.close()
try:
os.rename(filename, backup)
except os.error:
pass
os.rename(tempname, filename)
if __name__ == '__main__':
main()
| epl-1.0 |
jupyter/jupyterlab | conftest.py | 4 | 1210 | # -*- coding: utf-8 -*-
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import pytest
pytest_plugins = [
"jupyter_server.pytest_plugin",
"jupyterlab_server.pytest_plugin",
"jupyterlab.pytest_plugin"
]
def pytest_addoption(parser):
"""
Adds flags for py.test.
This is called by the pytest API
"""
group = parser.getgroup("general")
group.addoption('--quick', action='store_true',
help="Skip slow tests")
group.addoption('--slow', action='store_true',
help="Run only slow tests")
def pytest_configure(config):
config.addinivalue_line("markers", "slow: mark test as slow to run")
def pytest_collection_modifyitems(config, items):
if config.getoption("--quick"):
skip_slow = pytest.mark.skip(reason="skipping slow test")
for item in items:
if "slow" in item.keywords:
item.add_marker(skip_slow)
elif config.getoption("--slow"):
skip_quick = pytest.mark.skip(reason="skipping non-slow test")
for item in items:
if "slow" not in item.keywords:
item.add_marker(skip_quick)
| bsd-3-clause |
TEAM-Gummy/platform_external_chromium_org | third_party/closure_linter/closure_linter/common/matcher.py | 284 | 2158 | #!/usr/bin/env python
#
# Copyright 2007 The Closure Linter Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Regular expression based JavaScript matcher classes."""
__author__ = ('[email protected] (Robert Walker)',
'[email protected] (Andy Perelson)')
from closure_linter.common import position
from closure_linter.common import tokens
# Shorthand
Token = tokens.Token
Position = position.Position
class Matcher(object):
"""A token matcher.
Specifies a pattern to match, the type of token it represents, what mode the
token changes to, and what mode the token applies to.
Modes allow more advanced grammars to be incorporated, and are also necessary
to tokenize line by line. We can have different patterns apply to different
modes - i.e. looking for documentation while in comment mode.
Attributes:
regex: The regular expression representing this matcher.
type: The type of token indicated by a successful match.
result_mode: The mode to move to after a successful match.
"""
def __init__(self, regex, token_type, result_mode=None, line_start=False):
"""Create a new matcher template.
Args:
regex: The regular expression to match.
token_type: The type of token a successful match indicates.
result_mode: What mode to change to after a successful match. Defaults to
None, which means to not change the current mode.
line_start: Whether this matcher should only match string at the start
of a line.
"""
self.regex = regex
self.type = token_type
self.result_mode = result_mode
self.line_start = line_start
| bsd-3-clause |
PrasannaVenkadesh/portia | slyd/tests/test_repoman.py | 14 | 9855 | import unittest
from tempfile import mkdtemp
from os.path import join
from shutil import rmtree
from json import dumps, loads
import copy
from .settings import SPEC_DATA_DIR
from slyd.gitstorage.repoman import Repoman
def j(json):
return dumps(json, sort_keys=True, indent=4)
class RepomanTest(unittest.TestCase):
def setUp(self):
self.temp_repos_dir = mkdtemp(dir=SPEC_DATA_DIR,
prefix='test-run-')
Repoman.setup(
storage_backend='dulwich.fsrepo.FsRepo',
location=self.temp_repos_dir
)
def tearDown(self):
rmtree(self.temp_repos_dir)
def get_full_name(self, repo_name):
return join(self.temp_repos_dir, repo_name)
def test_create(self):
Repoman.create_repo(self.get_full_name('my_repo'))
self.assertTrue(Repoman.repo_exists(self.get_full_name('my_repo')))
def test_save_file(self):
repoman = Repoman.create_repo(self.get_full_name('my_repo'))
contents = j({'a': 1})
repoman.save_file('f1', contents, 'testbranch')
self.assertEqual(['f1'], repoman.list_files_for_branch('testbranch'))
self.assertEqual(
contents, repoman.file_contents_for_branch('f1', 'testbranch'))
def test_delete_file(self):
repoman = Repoman.create_repo(self.get_full_name('my_repo'))
contents = j({'a': 1})
repoman.save_file('f1', contents, 'testbranch')
repoman.delete_file('f1', 'testbranch')
self.assertEqual([], repoman.list_files_for_branch('testbranch'))
def test_branch_ops(self):
repoman = Repoman.create_repo(self.get_full_name('my_repo'))
repoman.create_branch('b1')
self.assertTrue(repoman.has_branch('b1'))
self.assertEqual(len(repoman.get_branch('b1')), 40)
repoman.delete_branch('b1')
self.assertFalse(repoman.has_branch('b1'))
def test_simple_publish(self):
repoman = Repoman.create_repo(self.get_full_name('my_repo'))
f1, f2, f3 = j({'a': 1}), j({'b': 2}), j({'c': 3})
repoman.save_file('f1', f1, 'b1')
repoman.save_file('f2', f2, 'b1')
repoman.save_file('x/f3', f3, 'b1')
repoman.save_file('f4', '{}', 'b1')
repoman.delete_file('f4', 'b1')
self.assertTrue(repoman.has_branch('b1'))
self.assertTrue(repoman.has_branch('master'))
self.assertEqual([], repoman.list_files_for_branch('master'))
self.assertTrue(repoman.publish_branch('b1'))
self.assertItemsEqual(['f1', 'f2', 'x/f3'],
repoman.list_files_for_branch('master'))
self.assertEqual([f1, f2, f3],
[repoman.file_contents_for_branch(x, 'b1')
for x in ('f1', 'f2', 'x/f3')])
self.assertEqual([f1, f2, f3],
[repoman.file_contents_for_branch(x, 'master')
for x in ('f1', 'f2', 'x/f3')])
# Only one published revision
self.assertEqual(len(repoman.get_published_revisions()), 1)
# 6 checkpoints, 1 per operation (5) + 1 for the original state.
self.assertEqual(len(repoman.get_branch_checkpoints('b1')), 6)
def test_sequential_publishes(self):
repoman = Repoman.create_repo(self.get_full_name('my_repo'))
f1, f2 = j({'a': 1}), j({'b': 2})
repoman.save_file('f1', f1, 'b1')
repoman.save_file('x/f2', f2, 'b1')
repoman.publish_branch('b1')
repoman.delete_branch('b1')
# f1 is modified in branch b2
f1 = j({'a': 3})
repoman.save_file('f1', f1, 'b2')
self.assertTrue(repoman.publish_branch('b2'))
self.assertEqual([f1, f2],
[repoman.file_contents_for_branch(x, 'master')
for x in ('f1', 'x/f2')])
self.assertEqual(len(repoman.get_published_revisions()), 2)
def test_two_interleaved_publishes_1(self):
repoman = Repoman.create_repo(self.get_full_name('my_repo'))
f1, f2 = j({'a': 1}), j({'b': 2})
repoman.save_file('f1', f1, 'b1')
repoman.save_file('x/f2', f2, 'b1')
# branch b2 modifies the same files concurrently
f1, f2 = j({'c': 3}), j({'d': 4})
repoman.save_file('f1', f1, 'b2')
repoman.save_file('x/f2', f2, 'b2')
# both publish their changes, but the automerge should solve conflicts
self.assertTrue(repoman.publish_branch('b1'))
self.assertTrue(repoman.publish_branch('b2'))
self.assertEqual(j({'a': 1, 'c': 3}),
repoman.file_contents_for_branch('f1', 'master'))
self.assertEqual(j({'b': 2, 'd': 4}),
repoman.file_contents_for_branch('x/f2', 'master'))
self.assertEqual(len(repoman.get_published_revisions()), 2)
def test_two_interleaved_publishes_2(self):
repoman = Repoman.create_repo(self.get_full_name('my_repo'))
f1 = j({'a': 1, 'c': 3})
repoman.save_file('f1', f1, 'b1')
self.assertTrue(repoman.publish_branch('b1'))
repoman.delete_branch('b1')
# b1 adds x/f2.
f2 = j({'b': 2})
repoman.save_file('x/f2', f2, 'b1')
# branch b2 adds a file with the same name but different content
f2 = j({'a': 2, 'c': {'d': 1}})
repoman.save_file('x/f2', f2, 'b2')
repoman.delete_file('f1', 'b2')
# both publish their changes, but the automerge should solve conflicts
self.assertTrue(repoman.publish_branch('b1'))
self.assertTrue(repoman.publish_branch('b2'))
self.assertEqual(j({'a': 2, 'b': 2, 'c': {'d': 1}}),
repoman.file_contents_for_branch('x/f2', 'master'))
self.assertEqual(len(repoman.get_published_revisions()), 3)
@unittest.skip('Broken, TODO check') # TODO
def test_two_interleaved_publishes_3(self):
repoman = Repoman.create_repo(self.get_full_name('my_repo'))
f1 = j({'a': 1, 'c': 3, 'd': 4, 'e': 5})
repoman.save_file('f1', f1, 'b1')
self.assertTrue(repoman.publish_branch('b1'))
repoman.delete_branch('b1')
# b1 heavily edits f1
repoman.save_file('f1', j({'b': 2, 'e': 5}), 'b1')
# this case is VERY tricky. branch 2 renames f1 to f2 and changes
# it a bit. The merge algorithm detects the rename and the merged
# output ends up containing all b1 changes + all b2 changes, and the
# file is stored under the name given by branch2
repoman.delete_file('f1', 'b2')
repoman.save_file('f2', j({'a': 1, 'c': 3, 'd': 4, 'e': 6}), 'b2')
# both publish their changes, but the automerge should solve conflicts
self.assertTrue(repoman.publish_branch('b1'))
self.assertTrue(repoman.publish_branch('b2'))
self.assertEqual(j({'b': 2, 'e': 6}),
repoman.file_contents_for_branch('f2', 'master'))
self.assertEqual(len(repoman.get_published_revisions()), 3)
def test_modify_delete(self):
# Although this is usually treated as a conflict, here we just keep the
# modified version and ignore the delete.
repoman = Repoman.create_repo(self.get_full_name('my_repo'))
repoman.save_file('f1', j({'a': 1}), 'b1')
self.assertTrue(repoman.publish_branch('b1'))
repoman.delete_branch('b1')
# b1 deletes f1 and b2 modifies it.
repoman.delete_file('f1', 'b1')
repoman.save_file('f1', j({'a': 2, 'c': 3}), 'b2')
self.assertTrue(repoman.publish_branch('b1'))
self.assertTrue(repoman.publish_branch('b2'))
# master has f1.
self.assertEqual(['f1'], repoman.list_files_for_branch('master'))
self.assertEqual(j({'a': 2, 'c': 3}),
repoman.file_contents_for_branch('f1', 'master'))
def test_unresolved_conflicts_both_modify(self):
repoman = Repoman.create_repo(self.get_full_name('my_repo'))
repoman.save_file('f1', j({'a': 1}), 'b1')
self.assertTrue(repoman.publish_branch('b1'))
repoman.delete_branch('b1')
# both branches update the same key of the same file with different
# values. This conflict must be manually resolved
repoman.save_file('f1', j({'a': 2}), 'b1')
repoman.save_file('f1', j({'a': 3}), 'b2')
self.assertTrue(repoman.publish_branch('b1'))
self.assertFalse(repoman.publish_branch('b2'))
# the file appears as published by b1 in the master branch
self.assertEqual(j({'a': 2}),
repoman.file_contents_for_branch('f1', 'master'))
# the file in b2 has an unresolved conflict
self.assertIn('__CONFLICT',
j(repoman.file_contents_for_branch('f1', 'b2')))
# b2 solves the conflict, saves again and forces the publish
repoman.save_file('f1', j({'a': 3}), 'b2')
self.assertTrue(repoman.publish_branch('b2', force=True))
self.assertEqual(j({'a': 3}),
repoman.file_contents_for_branch('f1', 'master'))
def test_unresolved_conflicts_both_add(self):
repoman = Repoman.create_repo(self.get_full_name('my_repo'))
# both add the same file with a conflicting key
repoman.save_file('f1', j({'a': 1}), 'b1')
repoman.save_file('f1', j({'a': 2}), 'b2')
self.assertTrue(repoman.publish_branch('b1'))
self.assertFalse(repoman.publish_branch('b2'))
# the file appears as published by b1 in the master branch
self.assertEqual(j({'a': 1}),
repoman.file_contents_for_branch('f1', 'master'))
# the file in b2 has an unresolved conflict
self.assertIn('__CONFLICT',
j(repoman.file_contents_for_branch('f1', 'b2')))
| bsd-3-clause |
0x20c24/linux-psec | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/SchedGui.py | 12980 | 5411 | # SchedGui.py - Python extension for perf script, basic GUI code for
# traces drawing and overview.
#
# Copyright (C) 2010 by Frederic Weisbecker <[email protected]>
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
try:
import wx
except ImportError:
raise ImportError, "You need to install the wxpython lib for this script"
class RootFrame(wx.Frame):
Y_OFFSET = 100
RECT_HEIGHT = 100
RECT_SPACE = 50
EVENT_MARKING_WIDTH = 5
def __init__(self, sched_tracer, title, parent = None, id = -1):
wx.Frame.__init__(self, parent, id, title)
(self.screen_width, self.screen_height) = wx.GetDisplaySize()
self.screen_width -= 10
self.screen_height -= 10
self.zoom = 0.5
self.scroll_scale = 20
self.sched_tracer = sched_tracer
self.sched_tracer.set_root_win(self)
(self.ts_start, self.ts_end) = sched_tracer.interval()
self.update_width_virtual()
self.nr_rects = sched_tracer.nr_rectangles() + 1
self.height_virtual = RootFrame.Y_OFFSET + (self.nr_rects * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
# whole window panel
self.panel = wx.Panel(self, size=(self.screen_width, self.screen_height))
# scrollable container
self.scroll = wx.ScrolledWindow(self.panel)
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale)
self.scroll.EnableScrolling(True, True)
self.scroll.SetFocus()
# scrollable drawing area
self.scroll_panel = wx.Panel(self.scroll, size=(self.screen_width - 15, self.screen_height / 2))
self.scroll_panel.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll_panel.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll_panel.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Fit()
self.Fit()
self.scroll_panel.SetDimensions(-1, -1, self.width_virtual, self.height_virtual, wx.SIZE_USE_EXISTING)
self.txt = None
self.Show(True)
def us_to_px(self, val):
return val / (10 ** 3) * self.zoom
def px_to_us(self, val):
return (val / self.zoom) * (10 ** 3)
def scroll_start(self):
(x, y) = self.scroll.GetViewStart()
return (x * self.scroll_scale, y * self.scroll_scale)
def scroll_start_us(self):
(x, y) = self.scroll_start()
return self.px_to_us(x)
def paint_rectangle_zone(self, nr, color, top_color, start, end):
offset_px = self.us_to_px(start - self.ts_start)
width_px = self.us_to_px(end - self.ts_start)
offset_py = RootFrame.Y_OFFSET + (nr * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
width_py = RootFrame.RECT_HEIGHT
dc = self.dc
if top_color is not None:
(r, g, b) = top_color
top_color = wx.Colour(r, g, b)
brush = wx.Brush(top_color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, RootFrame.EVENT_MARKING_WIDTH)
width_py -= RootFrame.EVENT_MARKING_WIDTH
offset_py += RootFrame.EVENT_MARKING_WIDTH
(r ,g, b) = color
color = wx.Colour(r, g, b)
brush = wx.Brush(color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, width_py)
def update_rectangles(self, dc, start, end):
start += self.ts_start
end += self.ts_start
self.sched_tracer.fill_zone(start, end)
def on_paint(self, event):
dc = wx.PaintDC(self.scroll_panel)
self.dc = dc
width = min(self.width_virtual, self.screen_width)
(x, y) = self.scroll_start()
start = self.px_to_us(x)
end = self.px_to_us(x + width)
self.update_rectangles(dc, start, end)
def rect_from_ypixel(self, y):
y -= RootFrame.Y_OFFSET
rect = y / (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
height = y % (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
if rect < 0 or rect > self.nr_rects - 1 or height > RootFrame.RECT_HEIGHT:
return -1
return rect
def update_summary(self, txt):
if self.txt:
self.txt.Destroy()
self.txt = wx.StaticText(self.panel, -1, txt, (0, (self.screen_height / 2) + 50))
def on_mouse_down(self, event):
(x, y) = event.GetPositionTuple()
rect = self.rect_from_ypixel(y)
if rect == -1:
return
t = self.px_to_us(x) + self.ts_start
self.sched_tracer.mouse_down(rect, t)
def update_width_virtual(self):
self.width_virtual = self.us_to_px(self.ts_end - self.ts_start)
def __zoom(self, x):
self.update_width_virtual()
(xpos, ypos) = self.scroll.GetViewStart()
xpos = self.us_to_px(x) / self.scroll_scale
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale, xpos, ypos)
self.Refresh()
def zoom_in(self):
x = self.scroll_start_us()
self.zoom *= 2
self.__zoom(x)
def zoom_out(self):
x = self.scroll_start_us()
self.zoom /= 2
self.__zoom(x)
def on_key_press(self, event):
key = event.GetRawKeyCode()
if key == ord("+"):
self.zoom_in()
return
if key == ord("-"):
self.zoom_out()
return
key = event.GetKeyCode()
(x, y) = self.scroll.GetViewStart()
if key == wx.WXK_RIGHT:
self.scroll.Scroll(x + 1, y)
elif key == wx.WXK_LEFT:
self.scroll.Scroll(x - 1, y)
elif key == wx.WXK_DOWN:
self.scroll.Scroll(x, y + 1)
elif key == wx.WXK_UP:
self.scroll.Scroll(x, y - 1)
| gpl-2.0 |
StepicOrg/Stepic-API | examples/get_courses_by_params.py | 1 | 2154 | import json
import requests
def get_token():
client_id = "..."
client_secret = "..."
auth = requests.auth.HTTPBasicAuth(client_id, client_secret)
resp = requests.post('https://stepik.org/oauth2/token/',
data={'grant_type': 'client_credentials'},
auth=auth)
token = json.loads(resp.text)['access_token']
return token
def get_data(pageNum):
api_url = 'https://stepik.org/api/courses?page={}'.format(pageNum)
course = json.loads(requests.get(api_url, headers={'Authorization': 'Bearer ' + get_token()}).text)
return course
def get_chosen_courses(amountOfUnits, courseLang, amountOfDiscuss):
pageNum = 0
hasNextPage = True
listOfChoices = []
while hasNextPage:
try:
pageNum += 1
pageContent = get_data(pageNum)
hasNextPage = pageContent['meta']['has_next']
courses = pageContent['courses']
for course in courses: # Select only active courses (courses with active session)
if ((course['total_units']) > amountOfUnits and (course['language'] == courseLang)
and (course['is_active'] == True) and (course['discussions_count'] > amountOfDiscuss)):
listOfChoices.append({
'course_name': course['slug'],
'amount_of_units': course['total_units'],
'language': course['language'],
'create_date': course['create_date'],
'discussions_count': course['discussions_count']
})
except:
print("Error exception: something was broken!")
print(listOfChoices)
def main():
# Choose values of parameters for a course choice
# Example:
amountOfUnits = 5 # amount of units in a course
courseLang = 'ru' # language of the chosen course
amountOfDiscuss = 30 # number of discussions in a course (as an indicator of the popularity)
get_chosen_courses(amountOfUnits, courseLang, amountOfDiscuss)
main()
| mit |
ajaali/django | django/core/handlers/base.py | 234 | 13346 | from __future__ import unicode_literals
import logging
import sys
import types
import warnings
from django import http
from django.conf import settings
from django.core import signals, urlresolvers
from django.core.exceptions import (
MiddlewareNotUsed, PermissionDenied, SuspiciousOperation,
)
from django.db import connections, transaction
from django.http.multipartparser import MultiPartParserError
from django.utils import six
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import force_text
from django.utils.module_loading import import_string
from django.views import debug
logger = logging.getLogger('django.request')
class BaseHandler(object):
# Changes that are always applied to a response (in this order).
response_fixes = [
http.conditional_content_removal,
]
def __init__(self):
self._request_middleware = None
self._view_middleware = None
self._template_response_middleware = None
self._response_middleware = None
self._exception_middleware = None
def load_middleware(self):
"""
Populate middleware lists from settings.MIDDLEWARE_CLASSES.
Must be called after the environment is fixed (see __call__ in subclasses).
"""
self._view_middleware = []
self._template_response_middleware = []
self._response_middleware = []
self._exception_middleware = []
request_middleware = []
for middleware_path in settings.MIDDLEWARE_CLASSES:
mw_class = import_string(middleware_path)
try:
mw_instance = mw_class()
except MiddlewareNotUsed as exc:
if settings.DEBUG:
if six.text_type(exc):
logger.debug('MiddlewareNotUsed(%r): %s', middleware_path, exc)
else:
logger.debug('MiddlewareNotUsed: %r', middleware_path)
continue
if hasattr(mw_instance, 'process_request'):
request_middleware.append(mw_instance.process_request)
if hasattr(mw_instance, 'process_view'):
self._view_middleware.append(mw_instance.process_view)
if hasattr(mw_instance, 'process_template_response'):
self._template_response_middleware.insert(0, mw_instance.process_template_response)
if hasattr(mw_instance, 'process_response'):
self._response_middleware.insert(0, mw_instance.process_response)
if hasattr(mw_instance, 'process_exception'):
self._exception_middleware.insert(0, mw_instance.process_exception)
# We only assign to this when initialization is complete as it is used
# as a flag for initialization being complete.
self._request_middleware = request_middleware
def make_view_atomic(self, view):
non_atomic_requests = getattr(view, '_non_atomic_requests', set())
for db in connections.all():
if (db.settings_dict['ATOMIC_REQUESTS']
and db.alias not in non_atomic_requests):
view = transaction.atomic(using=db.alias)(view)
return view
def get_exception_response(self, request, resolver, status_code, exception):
try:
callback, param_dict = resolver.resolve_error_handler(status_code)
# Unfortunately, inspect.getargspec result is not trustable enough
# depending on the callback wrapping in decorators (frequent for handlers).
# Falling back on try/except:
try:
response = callback(request, **dict(param_dict, exception=exception))
except TypeError:
warnings.warn(
"Error handlers should accept an exception parameter. Update "
"your code as this parameter will be required in Django 2.0",
RemovedInDjango20Warning, stacklevel=2
)
response = callback(request, **param_dict)
except:
signals.got_request_exception.send(sender=self.__class__, request=request)
response = self.handle_uncaught_exception(request, resolver, sys.exc_info())
return response
def get_response(self, request):
"Returns an HttpResponse object for the given HttpRequest"
# Setup default url resolver for this thread, this code is outside
# the try/except so we don't get a spurious "unbound local
# variable" exception in the event an exception is raised before
# resolver is set
urlconf = settings.ROOT_URLCONF
urlresolvers.set_urlconf(urlconf)
resolver = urlresolvers.get_resolver(urlconf)
# Use a flag to check if the response was rendered to prevent
# multiple renderings or to force rendering if necessary.
response_is_rendered = False
try:
response = None
# Apply request middleware
for middleware_method in self._request_middleware:
response = middleware_method(request)
if response:
break
if response is None:
if hasattr(request, 'urlconf'):
# Reset url resolver with a custom urlconf.
urlconf = request.urlconf
urlresolvers.set_urlconf(urlconf)
resolver = urlresolvers.get_resolver(urlconf)
resolver_match = resolver.resolve(request.path_info)
callback, callback_args, callback_kwargs = resolver_match
request.resolver_match = resolver_match
# Apply view middleware
for middleware_method in self._view_middleware:
response = middleware_method(request, callback, callback_args, callback_kwargs)
if response:
break
if response is None:
wrapped_callback = self.make_view_atomic(callback)
try:
response = wrapped_callback(request, *callback_args, **callback_kwargs)
except Exception as e:
response = self.process_exception_by_middleware(e, request)
# Complain if the view returned None (a common error).
if response is None:
if isinstance(callback, types.FunctionType): # FBV
view_name = callback.__name__
else: # CBV
view_name = callback.__class__.__name__ + '.__call__'
raise ValueError("The view %s.%s didn't return an HttpResponse object. It returned None instead."
% (callback.__module__, view_name))
# If the response supports deferred rendering, apply template
# response middleware and then render the response
if hasattr(response, 'render') and callable(response.render):
for middleware_method in self._template_response_middleware:
response = middleware_method(request, response)
# Complain if the template response middleware returned None (a common error).
if response is None:
raise ValueError(
"%s.process_template_response didn't return an "
"HttpResponse object. It returned None instead."
% (middleware_method.__self__.__class__.__name__))
try:
response = response.render()
except Exception as e:
response = self.process_exception_by_middleware(e, request)
response_is_rendered = True
except http.Http404 as exc:
logger.warning('Not Found: %s', request.path,
extra={
'status_code': 404,
'request': request
})
if settings.DEBUG:
response = debug.technical_404_response(request, exc)
else:
response = self.get_exception_response(request, resolver, 404, exc)
except PermissionDenied as exc:
logger.warning(
'Forbidden (Permission denied): %s', request.path,
extra={
'status_code': 403,
'request': request
})
response = self.get_exception_response(request, resolver, 403, exc)
except MultiPartParserError as exc:
logger.warning(
'Bad request (Unable to parse request body): %s', request.path,
extra={
'status_code': 400,
'request': request
})
response = self.get_exception_response(request, resolver, 400, exc)
except SuspiciousOperation as exc:
# The request logger receives events for any problematic request
# The security logger receives events for all SuspiciousOperations
security_logger = logging.getLogger('django.security.%s' %
exc.__class__.__name__)
security_logger.error(
force_text(exc),
extra={
'status_code': 400,
'request': request
})
if settings.DEBUG:
return debug.technical_500_response(request, *sys.exc_info(), status_code=400)
response = self.get_exception_response(request, resolver, 400, exc)
except SystemExit:
# Allow sys.exit() to actually exit. See tickets #1023 and #4701
raise
except: # Handle everything else.
# Get the exception info now, in case another exception is thrown later.
signals.got_request_exception.send(sender=self.__class__, request=request)
response = self.handle_uncaught_exception(request, resolver, sys.exc_info())
try:
# Apply response middleware, regardless of the response
for middleware_method in self._response_middleware:
response = middleware_method(request, response)
# Complain if the response middleware returned None (a common error).
if response is None:
raise ValueError(
"%s.process_response didn't return an "
"HttpResponse object. It returned None instead."
% (middleware_method.__self__.__class__.__name__))
response = self.apply_response_fixes(request, response)
except: # Any exception should be gathered and handled
signals.got_request_exception.send(sender=self.__class__, request=request)
response = self.handle_uncaught_exception(request, resolver, sys.exc_info())
response._closable_objects.append(request)
# If the exception handler returns a TemplateResponse that has not
# been rendered, force it to be rendered.
if not response_is_rendered and callable(getattr(response, 'render', None)):
response = response.render()
return response
def process_exception_by_middleware(self, exception, request):
"""
Pass the exception to the exception middleware. If no middleware
return a response for this exception, raise it.
"""
for middleware_method in self._exception_middleware:
response = middleware_method(request, exception)
if response:
return response
raise
def handle_uncaught_exception(self, request, resolver, exc_info):
"""
Processing for any otherwise uncaught exceptions (those that will
generate HTTP 500 responses). Can be overridden by subclasses who want
customised 500 handling.
Be *very* careful when overriding this because the error could be
caused by anything, so assuming something like the database is always
available would be an error.
"""
if settings.DEBUG_PROPAGATE_EXCEPTIONS:
raise
logger.error('Internal Server Error: %s', request.path,
exc_info=exc_info,
extra={
'status_code': 500,
'request': request
}
)
if settings.DEBUG:
return debug.technical_500_response(request, *exc_info)
# If Http500 handler is not installed, re-raise last exception
if resolver.urlconf_module is None:
six.reraise(*exc_info)
# Return an HttpResponse that displays a friendly error message.
callback, param_dict = resolver.resolve_error_handler(500)
return callback(request, **param_dict)
def apply_response_fixes(self, request, response):
"""
Applies each of the functions in self.response_fixes to the request and
response, modifying the response in the process. Returns the new
response.
"""
for func in self.response_fixes:
response = func(request, response)
return response
| bsd-3-clause |
hackendless/heekscnc | roughing_funcs.py | 24 | 13251 | import kurve
import area
from nc.nc import *
import math
# roughing_funcs.py- intended to be used for lathe roughing
# adapted from area_funcs.py and turning.py
# and possibly roughing a profile-approaching the part from the side
# some globals, to save passing variables as parameters too much
area_for_feed_possible = None
tool_radius_for_pocket = None
def make_area_for_roughing(k):
num_spans = kurve.num_spans(k)
if num_spans == 0:
raise "sketch has no spans!"
d, startx, starty, ex, ey, cx, cy = kurve.get_span(k, 0)
d, sx, sy, endx, endy, cx, cy = kurve.get_span(k, num_spans - 1)
a = area.Area()
c = area.Curve()
largey = 7
for span in range(0, num_spans):
d, sx, sy, ex, ey, cx, cy = kurve.get_span(k, span)
if span == 0:# first span
c.append(area.Vertex(0, area.Point(startx, largey), area.Point(0, 0)))
c.append(area.Vertex(d, area.Point(ex, ey), area.Point(cx, cy)))
# close the area
c.append(area.Vertex(0, area.Point(endx, largey), area.Point(0, 0)))
c.append(area.Vertex(0, area.Point(startx, largey), area.Point(0, 0)))
a.append(c)
return a
def cut_curve(curve, need_rapid, p, rapid_down_to_height, final_depth):
prev_p = p
first = True
for vertex in curve.getVertices():
if need_rapid and first:
# rapid across
rapid(vertex.p.x, vertex.p.y)
##rapid down
rapid(z = rapid_down_to_height)
#feed down
feed(z = final_depth)
#x_first=vertex.p.x;y_first=vertex.p.y
first = False
else:
dc = vertex.c - prev_p
if vertex.type == 1:
arc_ccw(vertex.p.x, vertex.p.y, i = dc.x, j = dc.y)
elif vertex.type == -1:
arc_cw(vertex.p.x, vertex.p.y, i = dc.x, j = dc.y)
else:
feed(vertex.p.x, vertex.p.y)
#rapid(x_first,y_first)
#rapid(x_first)
#rapid(vertex.p.y)
#x_first=vertex.p.x;y_first=vertex.p.y
#rapid(x=(vertex.p.x+1))
prev_p = vertex.p
return prev_p
def cut_curve_lathe(curve, need_rapid, p, rapid_down_to_height, final_depth):
prev_p = p
first = True
l = []
feed(z=0)
for vertex in curve.getVertices():
if need_rapid and first:
# rapid across
rapid(vertex.p.x, vertex.p.y)
first = False
l.append((vertex.p.x,vertex.p.y))
feed(x=l[0][0])
feed(y=l[0][1])
feed(x=l[1][0])
#pull tool away from profile at 45 degree angle- back towards Y+ and X start point
rapid(x=(l[1][0]+(l[2][1]-l[0][1])),y=l[2][1])
rapid(x=l[3][0])
rapid(y=l[0][1])
prev_p = vertex.p
return prev_p
def area_distance(a, old_area):
best_dist = None
for curve in a.getCurves():
for vertex in curve.getVertices():
c = old_area.NearestPoint(vertex.p)
d = c.dist(vertex.p)
if best_dist == None or d < best_dist:
best_dist = d
for curve in old_area.getCurves():
for vertex in curve.getVertices():
c = a.NearestPoint(vertex.p)
d = c.dist(vertex.p)
if best_dist == None or d < best_dist:
best_dist = d
return best_dist
def make_obround(p0, p1, radius):
dir = p1 - p0
d = dir.length()
dir.normalize()
right = area.Point(dir.y, -dir.x)
obround = area.Area()
c = area.Curve()
vt0 = p0 + right * radius
vt1 = p1 + right * radius
vt2 = p1 - right * radius
vt3 = p0 - right * radius
c.append(area.Vertex(0, vt0, area.Point(0, 0)))
c.append(area.Vertex(0, vt1, area.Point(0, 0)))
c.append(area.Vertex(1, vt2, p1))
c.append(area.Vertex(0, vt3, area.Point(0, 0)))
c.append(area.Vertex(1, vt0, p0))
obround.append(c)
return obround
def feed_possible(p0, p1):
obround = make_obround(p0, p1, tool_radius_for_pocket)
a = area.Area(area_for_feed_possible)
obround.Subtract(a)
if obround.num_curves() > 0:
return False
return True
def cut_curvelist(curve_list, rapid_down_to_height, depth, clearance_height, keep_tool_down_if_poss):
p = area.Point(0, 0)
first = True
for curve in curve_list:
need_rapid = True
if first == False:
s = curve.FirstVertex().p
if keep_tool_down_if_poss == True:
# see if we can feed across
if feed_possible(p, s):
need_rapid = False
elif s.x == p.x and s.y == p.y:
need_rapid = False
#rapid(p.x,p.y)
if need_rapid:
rapid(z = clearance_height)
p = cut_curve_lathe(curve, need_rapid, p, rapid_down_to_height, depth)
first = False
rapid(z = clearance_height)
def get_curve_list(arealist):
curve_list = list()
for a in arealist:
for curve in a.getCurves():
curve_list.append(curve)
return curve_list
curve_list_for_zigs = []
rightward_for_zigs = True
sin_angle_for_zigs = 0.0
cos_angle_for_zigs = 1.0
sin_minus_angle_for_zigs = 0.0
cos_minus_angle_for_zigs = 1.0
test_count = 0
def make_zig_curve(curve, y0, y):
global test_count
if rightward_for_zigs:
curve.Reverse()
zig = area.Curve()
zig_started = False
zag_found = False
prev_p = None
for vertex in curve.getVertices():
if prev_p != None:
if math.fabs(vertex.p.y - y0) < 0.002:
if zig_started:
zig.append(unrotated_vertex(vertex))
elif math.fabs(prev_p.y - y0) < 0.002 and vertex.type == 0:
zig.append(area.Vertex(0, unrotated_point(prev_p), area.Point(0, 0)))
zig.append(unrotated_vertex(vertex))
zig_started = True
elif zig_started:
zig.append(unrotated_vertex(vertex))
if math.fabs(vertex.p.y - y) < 0.002:
zag_found = True
break
prev_p = vertex.p
if zig_started:
curve_list_for_zigs.append(zig)
def make_zig(a, y0, y):
for curve in a.getCurves():
make_zig_curve(curve, y0, y)
reorder_zig_list_list = []
def add_reorder_zig(curve):
global reorder_zig_list_list
# look in existing lists
s = curve.FirstVertex().p
for curve_list in reorder_zig_list_list:
last_curve = curve_list[len(curve_list) - 1]
e = last_curve.LastVertex().p
if math.fabs(s.x - e.x) < 0.002 and math.fabs(s.y - e.y) < 0.002:
curve_list.append(curve)
return
# else add a new list
curve_list = []
curve_list.append(curve)
reorder_zig_list_list.append(curve_list)
def reorder_zigs():
global curve_list_for_zigs
global reorder_zig_list_list
reorder_zig_list_list = []
for curve in curve_list_for_zigs:
add_reorder_zig(curve)
curve_list_for_zigs = []
for curve_list in reorder_zig_list_list:
for curve in curve_list:
curve_list_for_zigs.append(curve)
def rotated_point(p):
return area.Point(p.x * cos_angle_for_zigs - p.y * sin_angle_for_zigs, p.x * sin_angle_for_zigs + p.y * cos_angle_for_zigs)
def unrotated_point(p):
return area.Point(p.x * cos_minus_angle_for_zigs - p.y * sin_minus_angle_for_zigs, p.x * sin_minus_angle_for_zigs + p.y * cos_minus_angle_for_zigs)
def rotated_vertex(v):
if v.type:
return area.Vertex(v.type, rotated_point(v.p), rotated_point(v.c))
return area.Vertex(v.type, rotated_point(v.p), area.Point(0, 0))
def unrotated_vertex(v):
if v.type:
return area.Vertex(v.type, unrotated_point(v.p), unrotated_point(v.c))
return area.Vertex(v.type, unrotated_point(v.p), area.Point(0, 0))
def rotated_area(a):
an = area.Area()
for curve in a.getCurves():
curve_new = area.Curve()
for v in curve.getVertices():
curve_new.append(rotated_vertex(v))
an.append(curve_new)
return an
def zigzag(a, a_firstoffset, stepover):
if a.num_curves() == 0:
return
global rightward_for_zigs
global curve_list_for_zigs
global test_count
global sin_angle_for_zigs
global cos_angle_for_zigs
global sin_minus_angle_for_zigs
global cos_minus_angle_for_zigs
a = rotated_area(a)
b = area.Box()
a.GetBox(b)
#x0 = b.MinX() - 1.0
#x1 = b.MaxX() + 1.0
x1 = b.MinX() - 1.0
x0 = b.MaxX() + 1.0
height = b.MaxY() - b.MinY()
num_steps = int(height / stepover + 1)
#y = b.MinY() + 0.1
y = b.MaxY() - 0.1
null_point = area.Point(0, 0)
rightward_for_zigs = True
curve_list_for_zigs = []
test_count = 0
for i in range(0, num_steps):
#collect vertices for a box shape from X+,Y+ toward the curve
#then move the tool Y+ and then back toward the X start position
# ------->
# |
# -------<
test_count = test_count + 1
y0 = y
#y = y + stepover
y = y - stepover
p0 = area.Point(x0, y0)
p1 = area.Point(x0, y)
p2 = area.Point(x1, y)
p3 = area.Point(x1, y0)
c = area.Curve()
c.append(area.Vertex(0, p0, null_point, 0))
c.append(area.Vertex(0, p1, null_point, 0))
c.append(area.Vertex(0, p2, null_point, 1))
c.append(area.Vertex(0, p3, null_point, 0))
c.append(area.Vertex(0, p0, null_point, 1))
a2 = area.Area()
a2.append(c)
a2.Intersect(a)
rightward_for_zigs = (rightward_for_zigs == False)
y10 = y + stepover
#y = y + stepover
y2 = y + stepover*2
p10 = area.Point(x0, y10)
p11 = area.Point(x0, y2)
p12 = area.Point(x1, y2)
p13 = area.Point(x1, y10)
c2 = area.Curve()
c2.append(area.Vertex(0, p10, null_point, 0))
c2.append(area.Vertex(0, p11, null_point, 0))
c2.append(area.Vertex(0, p12, null_point, 1))
c2.append(area.Vertex(0, p13, null_point, 0))
c2.append(area.Vertex(0, p10, null_point, 1))
a3 = area.Area()
a3.append(c2)
a3.Intersect(a)
make_zig(a3, y0, y)
rightward_for_zigs = (rightward_for_zigs == False)
reorder_zigs()
def pocket(a, tool_radius, extra_offset, rapid_down_to_height, start_depth, final_depth, stepover, stepdown, round_corner_factor, clearance_height, from_center, keep_tool_down_if_poss, use_zig_zag, zig_angle):
global area_for_feed_possible
global tool_radius_for_pocket
global sin_angle_for_zigs
global cos_angle_for_zigs
global sin_minus_angle_for_zigs
global cos_minus_angle_for_zigs
tool_radius_for_pocket = tool_radius
radians_angle = zig_angle * math.pi / 180
sin_angle_for_zigs = math.sin(-radians_angle)
cos_angle_for_zigs = math.cos(-radians_angle)
sin_minus_angle_for_zigs = math.sin(radians_angle)
cos_minus_angle_for_zigs = math.cos(radians_angle)
if rapid_down_to_height > clearance_height:
rapid_down_to_height = clearance_height
area.set_round_corner_factor(round_corner_factor)
arealist = list()
area_for_feed_possible = area.Area(a)
area_for_feed_possible.Offset(extra_offset - 0.01)
a_firstoffset = area.Area(a)
a_firstoffset.Offset(tool_radius + extra_offset)
if use_zig_zag:
zigzag(a_firstoffset, a_firstoffset, stepover)
curve_list = curve_list_for_zigs
else:
pass #we're just using zig_zag for roughing
layer_count = int((start_depth - final_depth) / stepdown)
if layer_count * stepdown + 0.00001 < start_depth - final_depth:
layer_count += 1
for i in range(1, layer_count+1):
if i == layer_count:
depth = final_depth
else:
depth = start_depth - i * stepdown
cut_curvelist(curve_list, rapid_down_to_height, depth, clearance_height, keep_tool_down_if_poss)
def rough_open_prof(k,tool_diameter, extra_offset, rapid_down_to_height, start_depth, final_depth, stepover, stepdown, round_corner_factor, clearance_height):
pass
a = make_area_for_roughing(k)
pocket(a, tool_diameter/2, extra_offset, rapid_down_to_height, start_depth, final_depth, stepover, stepdown, round_corner_factor, clearance_height, 1, True, True, 0)
#pocket(a7, tool_diameter/2, 0.05, rapid_down_to_height, start_depth, final_depth, 0.075, step_down, 1, clearance, 1, True, True, 0)
| bsd-3-clause |
alexforencich/python-ivi | ivi/agilent/agilentMSOX92004A.py | 2 | 1692 | """
Python Interchangeable Virtual Instrument Library
Copyright (c) 2012-2017 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .agilent90000 import *
class agilentMSOX92004A(agilent90000):
"Agilent Infiniium MSOX92004A IVI oscilloscope driver"
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', 'MSOX92004A')
super(agilentMSOX92004A, self).__init__(*args, **kwargs)
self._analog_channel_count = 4
self._digital_channel_count = 16
self._channel_count = self._analog_channel_count + self._digital_channel_count
self._bandwidth = 20e9
self._init_channels()
| mit |
DemocracyClub/yournextrepresentative | ynr/apps/cached_counts/views.py | 1 | 3171 | import json
from django.db.models import Count
from django.http import HttpResponse
from django.views.generic import TemplateView
from candidates.models import Ballot
from elections.mixins import ElectionMixin
from elections.models import Election
from parties.models import Party
from popolo.models import Membership
from .models import get_attention_needed_posts
def get_counts(for_json=True):
election_id_to_candidates = {}
qs = (
Membership.objects.all()
.values("ballot__election")
.annotate(count=Count("ballot__election"))
.order_by()
)
for d in qs:
election_id_to_candidates[d["ballot__election"]] = d["count"]
grouped_elections = Election.group_and_order_elections(for_json=for_json)
for era_data in grouped_elections:
for date, elections in era_data["dates"].items():
for role_data in elections:
for election_data in role_data["elections"]:
e = election_data["election"]
total = election_id_to_candidates.get(e.id, 0)
election_counts = {
"id": e.slug,
"html_id": e.slug.replace(".", "-"),
"name": e.name,
"total": total,
}
election_data.update(election_counts)
del election_data["election"]
return grouped_elections
class ReportsHomeView(TemplateView):
template_name = "reports.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context["all_elections"] = get_counts()
return context
def get(self, *args, **kwargs):
if self.request.GET.get("format") == "json":
return HttpResponse(
json.dumps(get_counts(for_json=True)),
content_type="application/json",
)
return super().get(*args, **kwargs)
class PartyCountsView(ElectionMixin, TemplateView):
template_name = "party_counts.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
qs = Party.objects.filter(
membership__ballot__election=self.election_data
)
qs = qs.annotate(count=Count("membership"))
qs = qs.order_by("-count", "name")
context["party_counts"] = qs
return context
class ConstituencyCountsView(ElectionMixin, TemplateView):
template_name = "constituency_counts.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
qs = Ballot.objects.filter(election=self.election_data).annotate(
count=Count("membership")
)
qs = qs.select_related("post", "election")
qs = qs.order_by("-count")
context["post_counts"] = qs
return context
class AttentionNeededView(TemplateView):
template_name = "attention_needed.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context["post_counts"] = get_attention_needed_posts()
return context
| agpl-3.0 |
olt/mapproxy | mapproxy/service/ows.py | 13 | 1357 | # This file is part of the MapProxy project.
# Copyright (C) 2011 Omniscale <http://omniscale.de>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Wrapper service handler for all OWS services (/service?).
"""
class OWSServer(object):
"""
Wraps all OWS services (/service?, /ows?, /wms?, /wmts?) and dispatches requests
based on the ``services`` query argument.
"""
def __init__(self, services):
self.names = ['service', 'ows']
self.services = {}
for service in services:
if service.service == 'wms' and 'wms' not in self.names:
self.names.append('wms')
self.services[service.service] = service
def handle(self, req):
service = req.args.get('service', 'wms').lower()
assert service in self.services
return self.services[service].handle(req)
| apache-2.0 |
rfleschenberg/djangocms-cascade | cmsplugin_cascade/migrations/0009_cascadepage.py | 1 | 1447 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-04-07 22:03
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import jsonfield.fields
class Migration(migrations.Migration):
dependencies = [
('cms', '0013_urlconfrevision'),
('cmsplugin_cascade', '0008_sortableinlinecascadeelement'),
]
operations = [
migrations.CreateModel(
name='CascadePage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('settings', jsonfield.fields.JSONField(blank=True, default={}, help_text='User editable settings for this page.')),
('glossary', jsonfield.fields.JSONField(blank=True, default={}, help_text='Store for arbitrary page data.')),
('extended_object', models.OneToOneField(editable=False, on_delete=django.db.models.deletion.CASCADE, to='cms.Page')),
('public_extension', models.OneToOneField(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='draft_extension', to='cmsplugin_cascade.CascadePage')),
],
options={
'db_table': 'cmsplugin_cascade_page',
'verbose_name': 'Cascade Page Settings',
'verbose_name_plural': 'Cascade Page Settings',
},
),
]
| mit |
softak/webfaction_demo | vendor-local/lib/python/django/core/management/base.py | 248 | 16452 | """
Base classes for writing management commands (named commands which can
be executed through ``django-admin.py`` or ``manage.py``).
"""
import os
import sys
from optparse import make_option, OptionParser
import django
from django.core.exceptions import ImproperlyConfigured
from django.core.management.color import color_style
from django.utils.encoding import smart_str
class CommandError(Exception):
"""
Exception class indicating a problem while executing a management
command.
If this exception is raised during the execution of a management
command, it will be caught and turned into a nicely-printed error
message to the appropriate output stream (i.e., stderr); as a
result, raising this exception (with a sensible description of the
error) is the preferred way to indicate that something has gone
wrong in the execution of a command.
"""
pass
def handle_default_options(options):
"""
Include any default options that all commands should accept here
so that ManagementUtility can handle them before searching for
user commands.
"""
if options.settings:
os.environ['DJANGO_SETTINGS_MODULE'] = options.settings
if options.pythonpath:
sys.path.insert(0, options.pythonpath)
class BaseCommand(object):
"""
The base class from which all management commands ultimately
derive.
Use this class if you want access to all of the mechanisms which
parse the command-line arguments and work out what code to call in
response; if you don't need to change any of that behavior,
consider using one of the subclasses defined in this file.
If you are interested in overriding/customizing various aspects of
the command-parsing and -execution behavior, the normal flow works
as follows:
1. ``django-admin.py`` or ``manage.py`` loads the command class
and calls its ``run_from_argv()`` method.
2. The ``run_from_argv()`` method calls ``create_parser()`` to get
an ``OptionParser`` for the arguments, parses them, performs
any environment changes requested by options like
``pythonpath``, and then calls the ``execute()`` method,
passing the parsed arguments.
3. The ``execute()`` method attempts to carry out the command by
calling the ``handle()`` method with the parsed arguments; any
output produced by ``handle()`` will be printed to standard
output and, if the command is intended to produce a block of
SQL statements, will be wrapped in ``BEGIN`` and ``COMMIT``.
4. If ``handle()`` raised a ``CommandError``, ``execute()`` will
instead print an error message to ``stderr``.
Thus, the ``handle()`` method is typically the starting point for
subclasses; many built-in commands and command types either place
all of their logic in ``handle()``, or perform some additional
parsing work in ``handle()`` and then delegate from it to more
specialized methods as needed.
Several attributes affect behavior at various steps along the way:
``args``
A string listing the arguments accepted by the command,
suitable for use in help messages; e.g., a command which takes
a list of application names might set this to '<appname
appname ...>'.
``can_import_settings``
A boolean indicating whether the command needs to be able to
import Django settings; if ``True``, ``execute()`` will verify
that this is possible before proceeding. Default value is
``True``.
``help``
A short description of the command, which will be printed in
help messages.
``option_list``
This is the list of ``optparse`` options which will be fed
into the command's ``OptionParser`` for parsing arguments.
``output_transaction``
A boolean indicating whether the command outputs SQL
statements; if ``True``, the output will automatically be
wrapped with ``BEGIN;`` and ``COMMIT;``. Default value is
``False``.
``requires_model_validation``
A boolean; if ``True``, validation of installed models will be
performed prior to executing the command. Default value is
``True``. To validate an individual application's models
rather than all applications' models, call
``self.validate(app)`` from ``handle()``, where ``app`` is the
application's Python module.
"""
# Metadata about this command.
option_list = (
make_option('-v', '--verbosity', action='store', dest='verbosity', default='1',
type='choice', choices=['0', '1', '2', '3'],
help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
make_option('--settings',
help='The Python path to a settings module, e.g. "myproject.settings.main". If this isn\'t provided, the DJANGO_SETTINGS_MODULE environment variable will be used.'),
make_option('--pythonpath',
help='A directory to add to the Python path, e.g. "/home/djangoprojects/myproject".'),
make_option('--traceback', action='store_true',
help='Print traceback on exception'),
)
help = ''
args = ''
# Configuration shortcuts that alter various logic.
can_import_settings = True
requires_model_validation = True
output_transaction = False # Whether to wrap the output in a "BEGIN; COMMIT;"
def __init__(self):
self.style = color_style()
def get_version(self):
"""
Return the Django version, which should be correct for all
built-in Django commands. User-supplied commands should
override this method.
"""
return django.get_version()
def usage(self, subcommand):
"""
Return a brief description of how to use this command, by
default from the attribute ``self.help``.
"""
usage = '%%prog %s [options] %s' % (subcommand, self.args)
if self.help:
return '%s\n\n%s' % (usage, self.help)
else:
return usage
def create_parser(self, prog_name, subcommand):
"""
Create and return the ``OptionParser`` which will be used to
parse the arguments to this command.
"""
return OptionParser(prog=prog_name,
usage=self.usage(subcommand),
version=self.get_version(),
option_list=self.option_list)
def print_help(self, prog_name, subcommand):
"""
Print the help message for this command, derived from
``self.usage()``.
"""
parser = self.create_parser(prog_name, subcommand)
parser.print_help()
def run_from_argv(self, argv):
"""
Set up any environment changes requested (e.g., Python path
and Django settings), then run this command.
"""
parser = self.create_parser(argv[0], argv[1])
options, args = parser.parse_args(argv[2:])
handle_default_options(options)
self.execute(*args, **options.__dict__)
def execute(self, *args, **options):
"""
Try to execute this command, performing model validation if
needed (as controlled by the attribute
``self.requires_model_validation``). If the command raises a
``CommandError``, intercept it and print it sensibly to
stderr.
"""
# Switch to English, because django-admin.py creates database content
# like permissions, and those shouldn't contain any translations.
# But only do this if we can assume we have a working settings file,
# because django.utils.translation requires settings.
if self.can_import_settings:
try:
from django.utils import translation
translation.activate('en-us')
except ImportError, e:
# If settings should be available, but aren't,
# raise the error and quit.
sys.stderr.write(smart_str(self.style.ERROR('Error: %s\n' % e)))
sys.exit(1)
try:
self.stdout = options.get('stdout', sys.stdout)
self.stderr = options.get('stderr', sys.stderr)
if self.requires_model_validation:
self.validate()
output = self.handle(*args, **options)
if output:
if self.output_transaction:
# This needs to be imported here, because it relies on
# settings.
from django.db import connections, DEFAULT_DB_ALIAS
connection = connections[options.get('database', DEFAULT_DB_ALIAS)]
if connection.ops.start_transaction_sql():
self.stdout.write(self.style.SQL_KEYWORD(connection.ops.start_transaction_sql()) + '\n')
self.stdout.write(output)
if self.output_transaction:
self.stdout.write('\n' + self.style.SQL_KEYWORD("COMMIT;") + '\n')
except CommandError, e:
self.stderr.write(smart_str(self.style.ERROR('Error: %s\n' % e)))
sys.exit(1)
def validate(self, app=None, display_num_errors=False):
"""
Validates the given app, raising CommandError for any errors.
If app is None, then this will validate all installed apps.
"""
from django.core.management.validation import get_validation_errors
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
s = StringIO()
num_errors = get_validation_errors(s, app)
if num_errors:
s.seek(0)
error_text = s.read()
raise CommandError("One or more models did not validate:\n%s" % error_text)
if display_num_errors:
self.stdout.write("%s error%s found\n" % (num_errors, num_errors != 1 and 's' or ''))
def handle(self, *args, **options):
"""
The actual logic of the command. Subclasses must implement
this method.
"""
raise NotImplementedError()
class AppCommand(BaseCommand):
"""
A management command which takes one or more installed application
names as arguments, and does something with each of them.
Rather than implementing ``handle()``, subclasses must implement
``handle_app()``, which will be called once for each application.
"""
args = '<appname appname ...>'
def handle(self, *app_labels, **options):
from django.db import models
if not app_labels:
raise CommandError('Enter at least one appname.')
try:
app_list = [models.get_app(app_label) for app_label in app_labels]
except (ImproperlyConfigured, ImportError), e:
raise CommandError("%s. Are you sure your INSTALLED_APPS setting is correct?" % e)
output = []
for app in app_list:
app_output = self.handle_app(app, **options)
if app_output:
output.append(app_output)
return '\n'.join(output)
def handle_app(self, app, **options):
"""
Perform the command's actions for ``app``, which will be the
Python module corresponding to an application name given on
the command line.
"""
raise NotImplementedError()
class LabelCommand(BaseCommand):
"""
A management command which takes one or more arbitrary arguments
(labels) on the command line, and does something with each of
them.
Rather than implementing ``handle()``, subclasses must implement
``handle_label()``, which will be called once for each label.
If the arguments should be names of installed applications, use
``AppCommand`` instead.
"""
args = '<label label ...>'
label = 'label'
def handle(self, *labels, **options):
if not labels:
raise CommandError('Enter at least one %s.' % self.label)
output = []
for label in labels:
label_output = self.handle_label(label, **options)
if label_output:
output.append(label_output)
return '\n'.join(output)
def handle_label(self, label, **options):
"""
Perform the command's actions for ``label``, which will be the
string as given on the command line.
"""
raise NotImplementedError()
class NoArgsCommand(BaseCommand):
"""
A command which takes no arguments on the command line.
Rather than implementing ``handle()``, subclasses must implement
``handle_noargs()``; ``handle()`` itself is overridden to ensure
no arguments are passed to the command.
Attempting to pass arguments will raise ``CommandError``.
"""
args = ''
def handle(self, *args, **options):
if args:
raise CommandError("Command doesn't accept any arguments")
return self.handle_noargs(**options)
def handle_noargs(self, **options):
"""
Perform this command's actions.
"""
raise NotImplementedError()
def copy_helper(style, app_or_project, name, directory, other_name=''):
"""
Copies either a Django application layout template or a Django project
layout template into the specified directory.
"""
# style -- A color style object (see django.core.management.color).
# app_or_project -- The string 'app' or 'project'.
# name -- The name of the application or project.
# directory -- The directory to which the layout template should be copied.
# other_name -- When copying an application layout, this should be the name
# of the project.
import re
import shutil
other = {'project': 'app', 'app': 'project'}[app_or_project]
if not re.search(r'^[_a-zA-Z]\w*$', name): # If it's not a valid directory name.
# Provide a smart error message, depending on the error.
if not re.search(r'^[_a-zA-Z]', name):
message = 'make sure the name begins with a letter or underscore'
else:
message = 'use only numbers, letters and underscores'
raise CommandError("%r is not a valid %s name. Please %s." % (name, app_or_project, message))
top_dir = os.path.join(directory, name)
try:
os.mkdir(top_dir)
except OSError, e:
raise CommandError(e)
# Determine where the app or project templates are. Use
# django.__path__[0] because we don't know into which directory
# django has been installed.
template_dir = os.path.join(django.__path__[0], 'conf', '%s_template' % app_or_project)
for d, subdirs, files in os.walk(template_dir):
relative_dir = d[len(template_dir)+1:].replace('%s_name' % app_or_project, name)
if relative_dir:
os.mkdir(os.path.join(top_dir, relative_dir))
for subdir in subdirs[:]:
if subdir.startswith('.'):
subdirs.remove(subdir)
for f in files:
if not f.endswith('.py'):
# Ignore .pyc, .pyo, .py.class etc, as they cause various
# breakages.
continue
path_old = os.path.join(d, f)
path_new = os.path.join(top_dir, relative_dir, f.replace('%s_name' % app_or_project, name))
fp_old = open(path_old, 'r')
fp_new = open(path_new, 'w')
fp_new.write(fp_old.read().replace('{{ %s_name }}' % app_or_project, name).replace('{{ %s_name }}' % other, other_name))
fp_old.close()
fp_new.close()
try:
shutil.copymode(path_old, path_new)
_make_writeable(path_new)
except OSError:
sys.stderr.write(style.NOTICE("Notice: Couldn't set permission bits on %s. You're probably using an uncommon filesystem setup. No problem.\n" % path_new))
def _make_writeable(filename):
"""
Make sure that the file is writeable. Useful if our source is
read-only.
"""
import stat
if sys.platform.startswith('java'):
# On Jython there is no os.access()
return
if not os.access(filename, os.W_OK):
st = os.stat(filename)
new_permissions = stat.S_IMODE(st.st_mode) | stat.S_IWUSR
os.chmod(filename, new_permissions)
| bsd-3-clause |
Changaco/oh-mainline | vendor/packages/gdata/src/gdata/tlslite/X509CertChain.py | 238 | 6861 | """Class representing an X.509 certificate chain."""
from utils import cryptomath
class X509CertChain:
"""This class represents a chain of X.509 certificates.
@type x509List: list
@ivar x509List: A list of L{tlslite.X509.X509} instances,
starting with the end-entity certificate and with every
subsequent certificate certifying the previous.
"""
def __init__(self, x509List=None):
"""Create a new X509CertChain.
@type x509List: list
@param x509List: A list of L{tlslite.X509.X509} instances,
starting with the end-entity certificate and with every
subsequent certificate certifying the previous.
"""
if x509List:
self.x509List = x509List
else:
self.x509List = []
def getNumCerts(self):
"""Get the number of certificates in this chain.
@rtype: int
"""
return len(self.x509List)
def getEndEntityPublicKey(self):
"""Get the public key from the end-entity certificate.
@rtype: L{tlslite.utils.RSAKey.RSAKey}
"""
if self.getNumCerts() == 0:
raise AssertionError()
return self.x509List[0].publicKey
def getFingerprint(self):
"""Get the hex-encoded fingerprint of the end-entity certificate.
@rtype: str
@return: A hex-encoded fingerprint.
"""
if self.getNumCerts() == 0:
raise AssertionError()
return self.x509List[0].getFingerprint()
def getCommonName(self):
"""Get the Subject's Common Name from the end-entity certificate.
The cryptlib_py module must be installed in order to use this
function.
@rtype: str or None
@return: The CN component of the certificate's subject DN, if
present.
"""
if self.getNumCerts() == 0:
raise AssertionError()
return self.x509List[0].getCommonName()
def validate(self, x509TrustList):
"""Check the validity of the certificate chain.
This checks that every certificate in the chain validates with
the subsequent one, until some certificate validates with (or
is identical to) one of the passed-in root certificates.
The cryptlib_py module must be installed in order to use this
function.
@type x509TrustList: list of L{tlslite.X509.X509}
@param x509TrustList: A list of trusted root certificates. The
certificate chain must extend to one of these certificates to
be considered valid.
"""
import cryptlib_py
c1 = None
c2 = None
lastC = None
rootC = None
try:
rootFingerprints = [c.getFingerprint() for c in x509TrustList]
#Check that every certificate in the chain validates with the
#next one
for cert1, cert2 in zip(self.x509List, self.x509List[1:]):
#If we come upon a root certificate, we're done.
if cert1.getFingerprint() in rootFingerprints:
return True
c1 = cryptlib_py.cryptImportCert(cert1.writeBytes(),
cryptlib_py.CRYPT_UNUSED)
c2 = cryptlib_py.cryptImportCert(cert2.writeBytes(),
cryptlib_py.CRYPT_UNUSED)
try:
cryptlib_py.cryptCheckCert(c1, c2)
except:
return False
cryptlib_py.cryptDestroyCert(c1)
c1 = None
cryptlib_py.cryptDestroyCert(c2)
c2 = None
#If the last certificate is one of the root certificates, we're
#done.
if self.x509List[-1].getFingerprint() in rootFingerprints:
return True
#Otherwise, find a root certificate that the last certificate
#chains to, and validate them.
lastC = cryptlib_py.cryptImportCert(self.x509List[-1].writeBytes(),
cryptlib_py.CRYPT_UNUSED)
for rootCert in x509TrustList:
rootC = cryptlib_py.cryptImportCert(rootCert.writeBytes(),
cryptlib_py.CRYPT_UNUSED)
if self._checkChaining(lastC, rootC):
try:
cryptlib_py.cryptCheckCert(lastC, rootC)
return True
except:
return False
return False
finally:
if not (c1 is None):
cryptlib_py.cryptDestroyCert(c1)
if not (c2 is None):
cryptlib_py.cryptDestroyCert(c2)
if not (lastC is None):
cryptlib_py.cryptDestroyCert(lastC)
if not (rootC is None):
cryptlib_py.cryptDestroyCert(rootC)
def _checkChaining(self, lastC, rootC):
import cryptlib_py
import array
def compareNames(name):
try:
length = cryptlib_py.cryptGetAttributeString(lastC, name, None)
lastName = array.array('B', [0] * length)
cryptlib_py.cryptGetAttributeString(lastC, name, lastName)
lastName = lastName.tostring()
except cryptlib_py.CryptException, e:
if e[0] == cryptlib_py.CRYPT_ERROR_NOTFOUND:
lastName = None
try:
length = cryptlib_py.cryptGetAttributeString(rootC, name, None)
rootName = array.array('B', [0] * length)
cryptlib_py.cryptGetAttributeString(rootC, name, rootName)
rootName = rootName.tostring()
except cryptlib_py.CryptException, e:
if e[0] == cryptlib_py.CRYPT_ERROR_NOTFOUND:
rootName = None
return lastName == rootName
cryptlib_py.cryptSetAttribute(lastC,
cryptlib_py.CRYPT_CERTINFO_ISSUERNAME,
cryptlib_py.CRYPT_UNUSED)
if not compareNames(cryptlib_py.CRYPT_CERTINFO_COUNTRYNAME):
return False
if not compareNames(cryptlib_py.CRYPT_CERTINFO_LOCALITYNAME):
return False
if not compareNames(cryptlib_py.CRYPT_CERTINFO_ORGANIZATIONNAME):
return False
if not compareNames(cryptlib_py.CRYPT_CERTINFO_ORGANIZATIONALUNITNAME):
return False
if not compareNames(cryptlib_py.CRYPT_CERTINFO_COMMONNAME):
return False
return True | agpl-3.0 |
mszewczy/odoo | addons/account/wizard/account_open_closed_fiscalyear.py | 237 | 2537 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
class account_open_closed_fiscalyear(osv.osv_memory):
_name = "account.open.closed.fiscalyear"
_description = "Choose Fiscal Year"
_columns = {
'fyear_id': fields.many2one('account.fiscalyear', \
'Fiscal Year', required=True, help='Select Fiscal Year which you want to remove entries for its End of year entries journal'),
}
def remove_entries(self, cr, uid, ids, context=None):
move_obj = self.pool.get('account.move')
data = self.browse(cr, uid, ids, context=context)[0]
period_journal = data.fyear_id.end_journal_period_id or False
if not period_journal:
raise osv.except_osv(_('Error!'), _("You have to set the 'End of Year Entries Journal' for this Fiscal Year which is set after generating opening entries from 'Generate Opening Entries'."))
if period_journal.period_id.state == 'done':
raise osv.except_osv(_('Error!'), _("You can not cancel closing entries if the 'End of Year Entries Journal' period is closed."))
ids_move = move_obj.search(cr, uid, [('journal_id','=',period_journal.journal_id.id),('period_id','=',period_journal.period_id.id)])
if ids_move:
cr.execute('delete from account_move where id IN %s', (tuple(ids_move),))
self.invalidate_cache(cr, uid, context=context)
return {'type': 'ir.actions.act_window_close'}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
jendap/tensorflow | tensorflow/contrib/distributions/python/ops/moving_stats.py | 42 | 10125 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functions for computing moving statistics."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope
__all__ = [
"assign_moving_mean_variance",
"assign_log_moving_mean_exp",
"moving_mean_variance",
]
def assign_moving_mean_variance(
mean_var, variance_var, value, decay, name=None):
"""Compute exponentially weighted moving {mean,variance} of a streaming value.
The `value` updated exponentially weighted moving `mean_var` and
`variance_var` are given by the following recurrence relations:
```python
variance_var = decay * (variance_var + (1-decay) * (value - mean_var)**2)
mean_var = decay * mean_var + (1 - decay) * value
```
Note: `mean_var` is updated *after* `variance_var`, i.e., `variance_var` uses
the lag-1 mean.
For derivation justification, see [Finch (2009; Eq. 143)][1].
Args:
mean_var: `float`-like `Variable` representing the exponentially weighted
moving mean. Same shape as `variance_var` and `value`.
variance_var: `float`-like `Variable` representing the
exponentially weighted moving variance. Same shape as `mean_var` and
`value`.
value: `float`-like `Tensor`. Same shape as `mean_var` and `variance_var`.
decay: A `float`-like `Tensor`. The moving mean decay. Typically close to
`1.`, e.g., `0.999`.
name: Optional name of the returned operation.
Returns:
mean_var: `Variable` representing the `value`-updated exponentially weighted
moving mean.
variance_var: `Variable` representing the `value`-updated
exponentially weighted moving variance.
Raises:
TypeError: if `mean_var` does not have float type `dtype`.
TypeError: if `mean_var`, `variance_var`, `value`, `decay` have different
`base_dtype`.
#### References
[1]: Tony Finch. Incremental calculation of weighted mean and variance.
_Technical Report_, 2009.
http://people.ds.cam.ac.uk/fanf2/hermes/doc/antiforgery/stats.pdf
"""
with ops.name_scope(name, "assign_moving_mean_variance",
[variance_var, mean_var, value, decay]):
with ops.colocate_with(variance_var):
with ops.colocate_with(mean_var):
base_dtype = mean_var.dtype.base_dtype
if not base_dtype.is_floating:
raise TypeError(
"mean_var.base_dtype({}) does not have float type "
"`dtype`.".format(base_dtype.name))
if base_dtype != variance_var.dtype.base_dtype:
raise TypeError(
"mean_var.base_dtype({}) != variance_var.base_dtype({})".format(
base_dtype.name,
variance_var.dtype.base_dtype.name))
value = ops.convert_to_tensor(value, dtype=base_dtype, name="value")
decay = ops.convert_to_tensor(decay, dtype=base_dtype, name="decay")
delta = value - mean_var
with ops.control_dependencies([delta]):
mean_var = state_ops.assign_add(
mean_var,
(1. - decay) * delta)
variance_var = state_ops.assign_sub(
variance_var,
(1. - decay) * (variance_var - decay * math_ops.square(delta)))
return mean_var, variance_var
def assign_log_moving_mean_exp(
log_mean_exp_var, log_value, decay, name=None):
"""Compute the log of the exponentially weighted moving mean of the exp.
If `log_value` is a draw from a stationary random variable, this function
approximates `log(E[exp(log_value)])`, i.e., a weighted log-sum-exp. More
precisely, a `tf.Variable`, `log_mean_exp_var`, is updated by `log_value`
using the following identity:
```none
log_mean_exp_var =
= log(decay exp(log_mean_exp_var) + (1 - decay) exp(log_value))
= log(exp(log_mean_exp_var + log(decay)) + exp(log_value + log1p(-decay)))
= log_mean_exp_var
+ log( exp(log_mean_exp_var - log_mean_exp_var + log(decay))
+ exp(log_value - log_mean_exp_var + log1p(-decay)))
= log_mean_exp_var
+ log_sum_exp([log(decay), log_value - log_mean_exp_var + log1p(-decay)]).
```
In addition to numerical stability, this formulation is advantageous because
`log_mean_exp_var` can be updated in a lock-free manner, i.e., using
`assign_add`. (Note: the updates are not thread-safe; it's just that the
update to the tf.Variable is presumed efficient due to being lock-free.)
Args:
log_mean_exp_var: `float`-like `Variable` representing the log of the
exponentially weighted moving mean of the exp. Same shape as `log_value`.
log_value: `float`-like `Tensor` representing a new (streaming) observation.
Same shape as `log_mean_exp_var`.
decay: A `float`-like `Tensor`. The moving mean decay. Typically close to
`1.`, e.g., `0.999`.
name: Optional name of the returned operation.
Returns:
log_mean_exp_var: A reference to the input 'Variable' tensor with the
`log_value`-updated log of the exponentially weighted moving mean of exp.
Raises:
TypeError: if `log_mean_exp_var` does not have float type `dtype`.
TypeError: if `log_mean_exp_var`, `log_value`, `decay` have different
`base_dtype`.
"""
with ops.name_scope(name, "assign_log_moving_mean_exp",
[log_mean_exp_var, log_value, decay]):
# We want to update the variable in a numerically stable and lock-free way.
# To do this, observe that variable `x` updated by `v` is:
# x = log(w exp(x) + (1-w) exp(v))
# = log(exp(x + log(w)) + exp(v + log1p(-w)))
# = x + log(exp(x - x + log(w)) + exp(v - x + log1p(-w)))
# = x + lse([log(w), v - x + log1p(-w)])
with ops.colocate_with(log_mean_exp_var):
base_dtype = log_mean_exp_var.dtype.base_dtype
if not base_dtype.is_floating:
raise TypeError(
"log_mean_exp_var.base_dtype({}) does not have float type "
"`dtype`.".format(base_dtype.name))
log_value = ops.convert_to_tensor(log_value, dtype=base_dtype,
name="log_value")
decay = ops.convert_to_tensor(decay, dtype=base_dtype, name="decay")
delta = (log_value - log_mean_exp_var)[array_ops.newaxis, ...]
x = array_ops.concat([
math_ops.log(decay) * array_ops.ones_like(delta),
delta + math_ops.log1p(-decay)
], axis=0)
x = math_ops.reduce_logsumexp(x, axis=0)
return log_mean_exp_var.assign_add(x)
def moving_mean_variance(value, decay, collections=None, name=None):
"""Compute exponentially weighted moving {mean,variance} of a streaming value.
The exponentially-weighting moving `mean_var` and `variance_var` are updated
by `value` according to the following recurrence:
```python
variance_var = decay * (variance_var + (1-decay) * (value - mean_var)**2)
mean_var = decay * mean_var + (1 - decay) * value
```
Note: `mean_var` is updated *after* `variance_var`, i.e., `variance_var` uses
the lag-`1` mean.
For derivation justification, see [Finch (2009; Eq. 143)][1].
Unlike `assign_moving_mean_variance`, this function handles
variable creation.
Args:
value: `float`-like `Tensor`. Same shape as `mean_var` and `variance_var`.
decay: A `float`-like `Tensor`. The moving mean decay. Typically close to
`1.`, e.g., `0.999`.
collections: Python list of graph-collections keys to which the internal
variables `mean_var` and `variance_var` are added.
Default value is `[GraphKeys.GLOBAL_VARIABLES]`.
name: Optional name of the returned operation.
Returns:
mean_var: `Variable` representing the `value`-updated exponentially weighted
moving mean.
variance_var: `Variable` representing the `value`-updated
exponentially weighted moving variance.
Raises:
TypeError: if `value_var` does not have float type `dtype`.
TypeError: if `value`, `decay` have different `base_dtype`.
#### References
[1]: Tony Finch. Incremental calculation of weighted mean and variance.
_Technical Report_, 2009.
http://people.ds.cam.ac.uk/fanf2/hermes/doc/antiforgery/stats.pdf
"""
if collections is None:
collections = [ops.GraphKeys.GLOBAL_VARIABLES]
with variable_scope.variable_scope(
name, "moving_mean_variance", [value, decay]):
value = ops.convert_to_tensor(value, name="value")
base_dtype = value.dtype.base_dtype
if not base_dtype.is_floating:
raise TypeError(
"value.base_dtype({}) does not have float type `dtype`.".format(
base_dtype.name))
decay = ops.convert_to_tensor(decay, dtype=base_dtype, name="decay")
variance_var = variable_scope.get_variable(
"moving_variance",
shape=value.shape,
dtype=value.dtype,
initializer=init_ops.zeros_initializer(),
trainable=False,
collections=collections)
mean_var = variable_scope.get_variable(
"moving_mean",
shape=value.shape,
dtype=value.dtype,
initializer=init_ops.zeros_initializer(),
trainable=False,
collections=collections)
return assign_moving_mean_variance(
mean_var, variance_var, value, decay)
| apache-2.0 |
apache/incubator-airflow | tests/www/test_security.py | 3 | 19165 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
import unittest
from unittest import mock
from flask_appbuilder import SQLA, Model, expose, has_access
from flask_appbuilder.security.sqla import models as sqla_models
from flask_appbuilder.views import BaseView, ModelView
from sqlalchemy import Column, Date, Float, Integer, String
from airflow import settings
from airflow.exceptions import AirflowException
from airflow.models import DagModel
from airflow.security import permissions
from airflow.www import app as application
from airflow.www.utils import CustomSQLAInterface
from tests.test_utils import fab_utils
from tests.test_utils.db import clear_db_dags, clear_db_runs
from tests.test_utils.mock_security_manager import MockSecurityManager
READ_WRITE = {permissions.ACTION_CAN_READ, permissions.ACTION_CAN_EDIT}
READ_ONLY = {permissions.ACTION_CAN_READ}
logging.basicConfig(format='%(asctime)s:%(levelname)s:%(name)s:%(message)s')
logging.getLogger().setLevel(logging.DEBUG)
log = logging.getLogger(__name__)
class SomeModel(Model):
id = Column(Integer, primary_key=True)
field_string = Column(String(50), unique=True, nullable=False)
field_integer = Column(Integer())
field_float = Column(Float())
field_date = Column(Date())
def __repr__(self):
return str(self.field_string)
class SomeModelView(ModelView):
datamodel = CustomSQLAInterface(SomeModel)
base_permissions = [
'can_list',
'can_show',
'can_add',
permissions.ACTION_CAN_EDIT,
permissions.ACTION_CAN_DELETE,
]
list_columns = ['field_string', 'field_integer', 'field_float', 'field_date']
class SomeBaseView(BaseView):
route_base = ''
@expose('/some_action')
@has_access
def some_action(self):
return "action!"
class TestSecurity(unittest.TestCase):
@classmethod
def setUpClass(cls):
settings.configure_orm()
cls.session = settings.Session
cls.app = application.create_app(testing=True)
cls.appbuilder = cls.app.appbuilder # pylint: disable=no-member
cls.app.config['WTF_CSRF_ENABLED'] = False
cls.security_manager = cls.appbuilder.sm
cls.delete_roles()
def setUp(self):
clear_db_runs()
clear_db_dags()
self.db = SQLA(self.app)
self.appbuilder.add_view(SomeBaseView, "SomeBaseView", category="BaseViews")
self.appbuilder.add_view(SomeModelView, "SomeModelView", category="ModelViews")
log.debug("Complete setup!")
@classmethod
def delete_roles(cls):
for role_name in ['team-a', 'MyRole1', 'MyRole5', 'Test_Role', 'MyRole3', 'MyRole2']:
fab_utils.delete_role(cls.app, role_name)
def expect_user_is_in_role(self, user, rolename):
self.security_manager.init_role(rolename, [])
role = self.security_manager.find_role(rolename)
if not role:
self.security_manager.add_role(rolename)
role = self.security_manager.find_role(rolename)
user.roles = [role]
self.security_manager.update_user(user)
def assert_user_has_dag_perms(self, perms, dag_id, user=None):
for perm in perms:
self.assertTrue(
self._has_dag_perm(perm, dag_id, user),
f"User should have '{perm}' on DAG '{dag_id}'",
)
def assert_user_does_not_have_dag_perms(self, dag_id, perms, user=None):
for perm in perms:
self.assertFalse(
self._has_dag_perm(perm, dag_id, user),
f"User should not have '{perm}' on DAG '{dag_id}'",
)
def _has_dag_perm(self, perm, dag_id, user):
# if not user:
# user = self.user
return self.security_manager.has_access(perm, self.security_manager.prefixed_dag_id(dag_id), user)
def tearDown(self):
clear_db_runs()
clear_db_dags()
self.appbuilder = None
self.app = None
self.db = None
log.debug("Complete teardown!")
def test_init_role_baseview(self):
role_name = 'MyRole3'
role_perms = [('can_some_action', 'SomeBaseView')]
self.security_manager.init_role(role_name, perms=role_perms)
role = self.appbuilder.sm.find_role(role_name)
self.assertIsNotNone(role)
self.assertEqual(len(role_perms), len(role.permissions))
def test_init_role_modelview(self):
role_name = 'MyRole2'
role_perms = [
('can_list', 'SomeModelView'),
('can_show', 'SomeModelView'),
('can_add', 'SomeModelView'),
(permissions.ACTION_CAN_EDIT, 'SomeModelView'),
(permissions.ACTION_CAN_DELETE, 'SomeModelView'),
]
self.security_manager.init_role(role_name, role_perms)
role = self.appbuilder.sm.find_role(role_name)
self.assertIsNotNone(role)
self.assertEqual(len(role_perms), len(role.permissions))
def test_update_and_verify_permission_role(self):
role_name = 'Test_Role'
self.security_manager.init_role(role_name, [])
role = self.security_manager.find_role(role_name)
perm = self.security_manager.find_permission_view_menu(permissions.ACTION_CAN_EDIT, 'RoleModelView')
self.security_manager.add_permission_role(role, perm)
role_perms_len = len(role.permissions)
self.security_manager.init_role(role_name, [])
new_role_perms_len = len(role.permissions)
self.assertEqual(role_perms_len, new_role_perms_len)
def test_get_user_roles(self):
user = mock.MagicMock()
user.is_anonymous = False
roles = self.appbuilder.sm.find_role('Admin')
user.roles = roles
self.assertEqual(self.security_manager.get_user_roles(user), roles)
def test_get_user_roles_for_anonymous_user(self):
viewer_role_perms = {
(permissions.ACTION_CAN_READ, permissions.RESOURCE_CONFIG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_CODE),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_RUN),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_IMPORT_ERROR),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_AUDIT_LOG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_JOB),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_PLUGIN),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_SLA_MISS),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_LOG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_XCOM),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_WEBSITE),
(permissions.ACTION_CAN_ACCESS_MENU, permissions.RESOURCE_BROWSE_MENU),
(permissions.ACTION_CAN_ACCESS_MENU, permissions.RESOURCE_DAG_RUN),
(permissions.ACTION_CAN_ACCESS_MENU, permissions.RESOURCE_DOCS_LINK),
(permissions.ACTION_CAN_ACCESS_MENU, permissions.RESOURCE_DOCS_MENU),
(permissions.ACTION_CAN_ACCESS_MENU, permissions.RESOURCE_JOB),
(permissions.ACTION_CAN_ACCESS_MENU, permissions.RESOURCE_AUDIT_LOG),
(permissions.ACTION_CAN_ACCESS_MENU, permissions.RESOURCE_PLUGIN),
(permissions.ACTION_CAN_ACCESS_MENU, permissions.RESOURCE_SLA_MISS),
(permissions.ACTION_CAN_ACCESS_MENU, permissions.RESOURCE_TASK_INSTANCE),
(permissions.ACTION_CAN_THIS_FORM_GET, permissions.RESOURCE_RESET_MY_PASSWORD_VIEW),
(permissions.ACTION_CAN_THIS_FORM_POST, permissions.RESOURCE_RESET_MY_PASSWORD_VIEW),
(permissions.ACTION_RESETMYPASSWORD, permissions.RESOURCE_USER_DB_MODELVIEW),
(permissions.ACTION_CAN_THIS_FORM_GET, permissions.RESOURCE_USERINFO_EDIT_VIEW),
(permissions.ACTION_CAN_THIS_FORM_POST, permissions.RESOURCE_USERINFO_EDIT_VIEW),
(permissions.ACTION_USERINFOEDIT, permissions.RESOURCE_USER_DB_MODELVIEW),
(permissions.ACTION_CAN_USERINFO, permissions.RESOURCE_USER_DB_MODELVIEW),
(permissions.ACTION_CAN_USERINFO, permissions.RESOURCE_USER_OID_MODELVIEW),
(permissions.ACTION_CAN_USERINFO, permissions.RESOURCE_USER_LDAP_MODELVIEW),
(permissions.ACTION_CAN_USERINFO, permissions.RESOURCE_USER_OAUTH_MODELVIEW),
(permissions.ACTION_CAN_USERINFO, permissions.RESOURCE_USER_REMOTEUSER_MODELVIEW),
}
self.app.config['AUTH_ROLE_PUBLIC'] = 'Viewer'
with self.app.app_context():
user = mock.MagicMock()
user.is_anonymous = True
perms_views = set()
for role in self.security_manager.get_user_roles(user):
perms_views.update(
{(perm_view.permission.name, perm_view.view_menu.name) for perm_view in role.permissions}
)
self.assertEqual(perms_views, viewer_role_perms)
@mock.patch('airflow.www.security.AirflowSecurityManager.get_user_roles')
def test_get_all_permissions_views(self, mock_get_user_roles):
role_name = 'MyRole5'
role_perm = 'can_some_action'
role_vm = 'SomeBaseView'
username = 'get_all_permissions_views'
with self.app.app_context():
user = fab_utils.create_user(
self.app,
username,
role_name,
permissions=[
(role_perm, role_vm),
],
)
role = user.roles[0]
mock_get_user_roles.return_value = [role]
self.assertEqual(self.security_manager.get_all_permissions_views(), {(role_perm, role_vm)})
mock_get_user_roles.return_value = []
self.assertEqual(len(self.security_manager.get_all_permissions_views()), 0)
def test_get_accessible_dag_ids(self):
role_name = 'MyRole1'
permission_action = [permissions.ACTION_CAN_READ]
dag_id = 'dag_id'
username = "ElUser"
user = fab_utils.create_user(
self.app,
username,
role_name,
permissions=[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
],
)
dag_model = DagModel(dag_id=dag_id, fileloc="/tmp/dag_.py", schedule_interval="2 2 * * *")
self.session.add(dag_model)
self.session.commit()
self.security_manager.sync_perm_for_dag( # type: ignore # pylint: disable=no-member
dag_id, access_control={role_name: permission_action}
)
self.assertEqual(self.security_manager.get_accessible_dag_ids(user), {'dag_id'})
def test_dont_get_inaccessible_dag_ids_for_dag_resource_permission(self):
# In this test case,
# get_readable_dag_ids() don't return DAGs to which the user has CAN_EDIT permission
username = "Monsieur User"
role_name = "MyRole1"
permission_action = [permissions.ACTION_CAN_EDIT]
dag_id = "dag_id"
user = fab_utils.create_user(
self.app,
username,
role_name,
permissions=[
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG),
],
)
dag_model = DagModel(dag_id=dag_id, fileloc="/tmp/dag_.py", schedule_interval="2 2 * * *")
self.session.add(dag_model)
self.session.commit()
self.security_manager.sync_perm_for_dag( # type: ignore # pylint: disable=no-member
dag_id, access_control={role_name: permission_action}
)
self.assertEqual(self.security_manager.get_readable_dag_ids(user), set())
@mock.patch('airflow.www.security.AirflowSecurityManager._has_view_access')
def test_has_access(self, mock_has_view_access):
user = mock.MagicMock()
user.is_anonymous = False
mock_has_view_access.return_value = True
self.assertTrue(self.security_manager.has_access('perm', 'view', user))
def test_sync_perm_for_dag_creates_permissions_on_view_menus(self):
test_dag_id = 'TEST_DAG'
prefixed_test_dag_id = f'DAG:{test_dag_id}'
self.security_manager.sync_perm_for_dag(test_dag_id, access_control=None)
self.assertIsNotNone(
self.security_manager.find_permission_view_menu(permissions.ACTION_CAN_READ, prefixed_test_dag_id)
)
self.assertIsNotNone(
self.security_manager.find_permission_view_menu(permissions.ACTION_CAN_EDIT, prefixed_test_dag_id)
)
@mock.patch('airflow.www.security.AirflowSecurityManager._has_perm')
@mock.patch('airflow.www.security.AirflowSecurityManager._has_role')
def test_has_all_dag_access(self, mock_has_role, mock_has_perm):
mock_has_role.return_value = True
self.assertTrue(self.security_manager.has_all_dags_access())
mock_has_role.return_value = False
mock_has_perm.return_value = False
self.assertFalse(self.security_manager.has_all_dags_access())
mock_has_perm.return_value = True
self.assertTrue(self.security_manager.has_all_dags_access())
def test_access_control_with_non_existent_role(self):
with self.assertRaises(AirflowException) as context:
self.security_manager.sync_perm_for_dag(
dag_id='access-control-test',
access_control={
'this-role-does-not-exist': [permissions.ACTION_CAN_EDIT, permissions.ACTION_CAN_READ]
},
)
self.assertIn("role does not exist", str(context.exception))
def test_all_dag_access_doesnt_give_non_dag_access(self):
username = 'dag_access_user'
role_name = 'dag_access_role'
with self.app.app_context():
user = fab_utils.create_user(
self.app,
username,
role_name,
permissions=[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
],
)
self.assertTrue(
self.security_manager.has_access(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG, user)
)
self.assertFalse(
self.security_manager.has_access(
permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE, user
)
)
def test_access_control_with_invalid_permission(self):
invalid_permissions = [
'can_varimport', # a real permission, but not a member of DAG_PERMS
'can_eat_pudding', # clearly not a real permission
]
username = "LaUser"
user = fab_utils.create_user(
self.app,
username=username,
role_name='team-a',
)
for permission in invalid_permissions:
self.expect_user_is_in_role(user, rolename='team-a')
with self.assertRaises(AirflowException) as context:
self.security_manager.sync_perm_for_dag(
'access_control_test', access_control={'team-a': {permission}}
)
self.assertIn("invalid permissions", str(context.exception))
def test_access_control_is_set_on_init(self):
username = 'access_control_is_set_on_init'
role_name = 'team-a'
with self.app.app_context():
user = fab_utils.create_user(
self.app,
username,
role_name,
permissions=[],
)
self.expect_user_is_in_role(user, rolename='team-a')
self.security_manager.sync_perm_for_dag(
'access_control_test',
access_control={'team-a': [permissions.ACTION_CAN_EDIT, permissions.ACTION_CAN_READ]},
)
self.assert_user_has_dag_perms(
perms=[permissions.ACTION_CAN_EDIT, permissions.ACTION_CAN_READ],
dag_id='access_control_test',
user=user,
)
self.expect_user_is_in_role(user, rolename='NOT-team-a')
self.assert_user_does_not_have_dag_perms(
perms=[permissions.ACTION_CAN_EDIT, permissions.ACTION_CAN_READ],
dag_id='access_control_test',
user=user,
)
def test_access_control_stale_perms_are_revoked(self):
username = 'access_control_stale_perms_are_revoked'
role_name = 'team-a'
with self.app.app_context():
user = fab_utils.create_user(
self.app,
username,
role_name,
permissions=[],
)
self.expect_user_is_in_role(user, rolename='team-a')
self.security_manager.sync_perm_for_dag(
'access_control_test', access_control={'team-a': READ_WRITE}
)
self.assert_user_has_dag_perms(perms=READ_WRITE, dag_id='access_control_test', user=user)
self.security_manager.sync_perm_for_dag(
'access_control_test', access_control={'team-a': READ_ONLY}
)
self.assert_user_has_dag_perms(
perms=[permissions.ACTION_CAN_READ], dag_id='access_control_test', user=user
)
self.assert_user_does_not_have_dag_perms(
perms=[permissions.ACTION_CAN_EDIT], dag_id='access_control_test', user=user
)
def test_no_additional_dag_permission_views_created(self):
ab_perm_view_role = sqla_models.assoc_permissionview_role
self.security_manager.sync_roles()
num_pv_before = self.db.session().query(ab_perm_view_role).count()
self.security_manager.sync_roles()
num_pv_after = self.db.session().query(ab_perm_view_role).count()
self.assertEqual(num_pv_before, num_pv_after)
def test_override_role_vm(self):
test_security_manager = MockSecurityManager(appbuilder=self.appbuilder)
self.assertEqual(len(test_security_manager.VIEWER_VMS), 1)
self.assertEqual(test_security_manager.VIEWER_VMS, {'Airflow'})
| apache-2.0 |
marratj/ansible | lib/ansible/plugins/connection/lxd.py | 44 | 4475 | # (c) 2016 Matt Clay <[email protected]>
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
author: Matt Clay <[email protected]>
connection: lxd
short_description: Run tasks in lxc containers via lxc CLI
description:
- Run commands or put/fetch files to an existing lxc container using lxc CLI
version_added: "2.0"
options:
remote_addr:
description:
- Container identifier
default: The set user as per docker's configuration
vars:
- name: ansible_host
- name: ansible_lxd_host
executable:
description:
- shell to use for execution inside container
default: /bin/sh
vars:
- name: ansible_executable
- name: ansible_lxd_executable
"""
import os
from distutils.spawn import find_executable
from subprocess import call, Popen, PIPE
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
from ansible.module_utils._text import to_bytes, to_text
from ansible.plugins.connection import ConnectionBase
class Connection(ConnectionBase):
""" lxd based connections """
transport = "lxd"
has_pipelining = True
def __init__(self, play_context, new_stdin, *args, **kwargs):
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
self._host = self._play_context.remote_addr
self._lxc_cmd = find_executable("lxc")
if not self._lxc_cmd:
raise AnsibleError("lxc command not found in PATH")
if self._play_context.remote_user is not None and self._play_context.remote_user != 'root':
self._display.warning('lxd does not support remote_user, using container default: root')
def _connect(self):
"""connect to lxd (nothing to do here) """
super(Connection, self)._connect()
if not self._connected:
self._display.vvv(u"ESTABLISH LXD CONNECTION FOR USER: root", host=self._host)
self._connected = True
def exec_command(self, cmd, in_data=None, sudoable=True):
""" execute a command on the lxd host """
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
self._display.vvv(u"EXEC {0}".format(cmd), host=self._host)
local_cmd = [self._lxc_cmd, "exec", self._host, "--", self._play_context.executable, "-c", cmd]
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
in_data = to_bytes(in_data, errors='surrogate_or_strict', nonstring='passthru')
process = Popen(local_cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
stdout, stderr = process.communicate(in_data)
stdout = to_text(stdout)
stderr = to_text(stderr)
if stderr == "error: Container is not running.\n":
raise AnsibleConnectionFailure("container not running: %s" % self._host)
if stderr == "error: not found\n":
raise AnsibleConnectionFailure("container not found: %s" % self._host)
return process.returncode, stdout, stderr
def put_file(self, in_path, out_path):
""" put a file from local to lxd """
super(Connection, self).put_file(in_path, out_path)
self._display.vvv(u"PUT {0} TO {1}".format(in_path, out_path), host=self._host)
if not os.path.isfile(to_bytes(in_path, errors='surrogate_or_strict')):
raise AnsibleFileNotFound("input path is not a file: %s" % in_path)
local_cmd = [self._lxc_cmd, "file", "push", in_path, self._host + "/" + out_path]
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
call(local_cmd)
def fetch_file(self, in_path, out_path):
""" fetch a file from lxd to local """
super(Connection, self).fetch_file(in_path, out_path)
self._display.vvv(u"FETCH {0} TO {1}".format(in_path, out_path), host=self._host)
local_cmd = [self._lxc_cmd, "file", "pull", self._host + "/" + in_path, out_path]
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
call(local_cmd)
def close(self):
""" close the connection (nothing to do here) """
super(Connection, self).close()
self._connected = False
| gpl-3.0 |
golharam/StarCluster | starcluster/sshutils.py | 14 | 32130 | # Copyright 2009-2014 Justin Riley
#
# This file is part of StarCluster.
#
# StarCluster is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# StarCluster is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with StarCluster. If not, see <http://www.gnu.org/licenses/>.
import os
import re
import sys
import stat
import glob
import atexit
import string
import socket
import fnmatch
import hashlib
import warnings
import posixpath
import scp
import paramiko
from Crypto.PublicKey import RSA
from Crypto.PublicKey import DSA
# windows does not have termios...
try:
import termios
import tty
HAS_TERMIOS = True
except ImportError:
HAS_TERMIOS = False
from starcluster import exception
from starcluster import progressbar
from starcluster.logger import log
class SSHClient(object):
"""
Establishes an SSH connection to a remote host using either password or
private key authentication. Once established, this object allows executing
commands, copying files to/from the remote host, various file querying
similar to os.path.*, and much more.
"""
def __init__(self,
host,
username=None,
password=None,
private_key=None,
private_key_pass=None,
compress=False,
port=22,
timeout=30):
self._host = host
self._port = port
self._pkey = None
self._username = username or os.environ['LOGNAME']
self._password = password
self._timeout = timeout
self._sftp = None
self._scp = None
self._transport = None
self._progress_bar = None
self._compress = compress
if private_key:
self._pkey = self.load_private_key(private_key, private_key_pass)
elif not password:
raise exception.SSHNoCredentialsError()
self._glob = SSHGlob(self)
self.__last_status = None
atexit.register(self.close)
def load_private_key(self, private_key, private_key_pass=None):
# Use Private Key.
log.debug('loading private key %s' % private_key)
if private_key.endswith('rsa') or private_key.count('rsa'):
pkey = self._load_rsa_key(private_key, private_key_pass)
elif private_key.endswith('dsa') or private_key.count('dsa'):
pkey = self._load_dsa_key(private_key, private_key_pass)
else:
log.debug(
"specified key does not end in either rsa or dsa, trying both")
pkey = self._load_rsa_key(private_key, private_key_pass)
if pkey is None:
pkey = self._load_dsa_key(private_key, private_key_pass)
return pkey
def connect(self, host=None, username=None, password=None,
private_key=None, private_key_pass=None, port=None, timeout=30,
compress=None):
host = host or self._host
username = username or self._username
password = password or self._password
compress = compress or self._compress
port = port if port is not None else self._port
pkey = self._pkey
if private_key:
pkey = self.load_private_key(private_key, private_key_pass)
log.debug("connecting to host %s on port %d as user %s" % (host, port,
username))
try:
sock = self._get_socket(host, port)
transport = paramiko.Transport(sock)
transport.banner_timeout = timeout
except socket.error:
raise exception.SSHConnectionError(host, port)
# Enable/disable compression
transport.use_compression(compress)
# Authenticate the transport.
try:
transport.connect(username=username, pkey=pkey, password=password)
except paramiko.AuthenticationException:
raise exception.SSHAuthException(username, host)
except paramiko.SSHException, e:
msg = e.args[0]
raise exception.SSHError(msg)
except socket.error:
raise exception.SSHConnectionError(host, port)
except EOFError:
raise exception.SSHConnectionError(host, port)
except Exception, e:
raise exception.SSHError(str(e))
self.close()
self._transport = transport
try:
assert self.sftp is not None
except paramiko.SFTPError, e:
if 'Garbage packet received' in e:
log.debug("Garbage packet received", exc_info=True)
raise exception.SSHAccessDeniedViaAuthKeys(username)
raise
return self
@property
def transport(self):
"""
This property attempts to return an active SSH transport
"""
if not self._transport or not self._transport.is_active():
self.connect(self._host, self._username, self._password,
port=self._port, timeout=self._timeout,
compress=self._compress)
return self._transport
def get_server_public_key(self):
return self.transport.get_remote_server_key()
def is_active(self):
if self._transport:
return self._transport.is_active()
return False
def _get_socket(self, hostname, port):
addrinfo = socket.getaddrinfo(hostname, port, socket.AF_UNSPEC,
socket.SOCK_STREAM)
for (family, socktype, proto, canonname, sockaddr) in addrinfo:
if socktype == socket.SOCK_STREAM:
af = family
break
else:
raise exception.SSHError(
'No suitable address family for %s' % hostname)
sock = socket.socket(af, socket.SOCK_STREAM)
sock.settimeout(self._timeout)
sock.connect((hostname, port))
return sock
def _load_rsa_key(self, private_key, private_key_pass=None):
private_key_file = os.path.expanduser(private_key)
try:
rsa_key = get_rsa_key(key_location=private_key_file,
passphrase=private_key_pass)
log.debug("Using private key %s (RSA)" % private_key)
return rsa_key
except (paramiko.SSHException, exception.SSHError):
log.error('invalid rsa key or passphrase specified')
def _load_dsa_key(self, private_key, private_key_pass=None):
private_key_file = os.path.expanduser(private_key)
try:
dsa_key = get_dsa_key(key_location=private_key_file,
passphrase=private_key_pass)
log.info("Using private key %s (DSA)" % private_key)
return dsa_key
except (paramiko.SSHException, exception.SSHError):
log.error('invalid dsa key or passphrase specified')
@property
def sftp(self):
"""Establish the SFTP connection."""
if not self._sftp or self._sftp.sock.closed:
log.debug("creating sftp connection")
self._sftp = paramiko.SFTPClient.from_transport(self.transport)
return self._sftp
@property
def scp(self):
"""Initialize the SCP client."""
if not self._scp or not self._scp.transport.is_active():
log.debug("creating scp connection")
self._scp = scp.SCPClient(self.transport,
progress=self._file_transfer_progress,
socket_timeout=self._timeout)
return self._scp
def generate_rsa_key(self):
warnings.warn("This method is deprecated: please use "
"starcluster.sshutils.generate_rsa_key instead")
return generate_rsa_key()
def get_public_key(self, key):
warnings.warn("This method is deprecated: please use "
"starcluster.sshutils.get_public_key instead")
return get_public_key(key)
def load_remote_rsa_key(self, remote_filename):
"""
Returns paramiko.RSAKey object for an RSA key located on the remote
machine
"""
rfile = self.remote_file(remote_filename, 'r')
key = get_rsa_key(key_file_obj=rfile)
rfile.close()
return key
def makedirs(self, path, mode=0755):
"""
Same as os.makedirs - makes a new directory and automatically creates
all parent directories if they do not exist.
mode specifies unix permissions to apply to the new dir
"""
head, tail = posixpath.split(path)
if not tail:
head, tail = posixpath.split(head)
if head and tail and not self.path_exists(head):
try:
self.makedirs(head, mode)
except OSError, e:
# be happy if someone already created the path
if e.errno != os.errno.EEXIST:
raise
# xxx/newdir/. exists if xxx/newdir exists
if tail == posixpath.curdir:
return
self.mkdir(path, mode)
def mkdir(self, path, mode=0755, ignore_failure=False):
"""
Make a new directory on the remote machine
If parent is True, create all parent directories that do not exist
mode specifies unix permissions to apply to the new dir
"""
try:
return self.sftp.mkdir(path, mode)
except IOError:
if not ignore_failure:
raise
def get_remote_file_lines(self, remote_file, regex=None, matching=True):
"""
Returns list of lines in a remote_file
If regex is passed only lines that contain a pattern that matches
regex will be returned
If matching is set to False then only lines *not* containing a pattern
that matches regex will be returned
"""
f = self.remote_file(remote_file, 'r')
flines = f.readlines()
f.close()
if regex is None:
return flines
r = re.compile(regex)
lines = []
for line in flines:
match = r.search(line)
if matching and match:
lines.append(line)
elif not matching and not match:
lines.append(line)
return lines
def remove_lines_from_file(self, remote_file, regex):
"""
Removes lines matching regex from remote_file
"""
if regex in [None, '']:
log.debug('no regex supplied...returning')
return
lines = self.get_remote_file_lines(remote_file, regex, matching=False)
log.debug("new %s after removing regex (%s) matches:\n%s" %
(remote_file, regex, ''.join(lines)))
f = self.remote_file(remote_file)
f.writelines(lines)
f.close()
def unlink(self, remote_file):
return self.sftp.unlink(remote_file)
def remote_file(self, file, mode='w'):
"""
Returns a remote file descriptor
"""
rfile = self.sftp.open(file, mode)
rfile.name = file
return rfile
def path_exists(self, path):
"""
Test whether a remote path exists.
Returns False for broken symbolic links
"""
try:
self.stat(path)
return True
except IOError:
return False
def lpath_exists(self, path):
"""
Test whether a remote path exists.
Returns True for broken symbolic links
"""
try:
self.lstat(path)
return True
except IOError:
return False
def chown(self, uid, gid, remote_path):
"""
Set user (uid) and group (gid) owner for remote_path
"""
return self.sftp.chown(remote_path, uid, gid)
def chmod(self, mode, remote_path):
"""
Apply permissions (mode) to remote_path
"""
return self.sftp.chmod(remote_path, mode)
def ls(self, path):
"""
Return a list containing the names of the entries in the remote path.
"""
return [posixpath.join(path, f) for f in self.sftp.listdir(path)]
def glob(self, pattern):
return self._glob.glob(pattern)
def isdir(self, path):
"""
Return true if the remote path refers to an existing directory.
"""
try:
s = self.stat(path)
except IOError:
return False
return stat.S_ISDIR(s.st_mode)
def isfile(self, path):
"""
Return true if the remote path refers to an existing file.
"""
try:
s = self.stat(path)
except IOError:
return False
return stat.S_ISREG(s.st_mode)
def stat(self, path):
"""
Perform a stat system call on the given remote path.
"""
return self.sftp.stat(path)
def lstat(self, path):
"""
Same as stat but doesn't follow symlinks
"""
return self.sftp.lstat(path)
@property
def progress_bar(self):
if not self._progress_bar:
widgets = ['FileTransfer: ', ' ', progressbar.Percentage(), ' ',
progressbar.Bar(marker=progressbar.RotatingMarker()),
' ', progressbar.ETA(), ' ',
progressbar.FileTransferSpeed()]
pbar = progressbar.ProgressBar(widgets=widgets,
maxval=1,
force_update=True)
self._progress_bar = pbar
return self._progress_bar
def _file_transfer_progress(self, filename, size, sent):
pbar = self.progress_bar
pbar.widgets[0] = filename
pbar.maxval = size
pbar.update(sent)
if pbar.finished:
pbar.reset()
def _make_list(self, obj):
if not isinstance(obj, (list, tuple)):
return [obj]
return obj
def get(self, remotepaths, localpath=''):
"""
Copies one or more files from the remote host to the local host.
"""
remotepaths = self._make_list(remotepaths)
localpath = localpath or os.getcwd()
globs = []
noglobs = []
for rpath in remotepaths:
if glob.has_magic(rpath):
globs.append(rpath)
else:
noglobs.append(rpath)
globresults = [self.glob(g) for g in globs]
remotepaths = noglobs
for globresult in globresults:
remotepaths.extend(globresult)
recursive = False
for rpath in remotepaths:
if not self.path_exists(rpath):
raise exception.BaseException(
"Remote file or directory does not exist: %s" % rpath)
for rpath in remotepaths:
if self.isdir(rpath):
recursive = True
break
try:
self.scp.get(remotepaths, local_path=localpath,
recursive=recursive)
except Exception, e:
log.debug("get failed: remotepaths=%s, localpath=%s",
str(remotepaths), localpath)
raise exception.SCPException(str(e))
def put(self, localpaths, remotepath='.'):
"""
Copies one or more files from the local host to the remote host.
"""
localpaths = self._make_list(localpaths)
recursive = False
for lpath in localpaths:
if os.path.isdir(lpath):
recursive = True
break
try:
self.scp.put(localpaths, remote_path=remotepath,
recursive=recursive)
except Exception, e:
log.debug("put failed: localpaths=%s, remotepath=%s",
str(localpaths), remotepath)
raise exception.SCPException(str(e))
def execute_async(self, command, source_profile=True):
"""
Executes a remote command so that it continues running even after this
SSH connection closes. The remote process will be put into the
background via nohup. Does not return output or check for non-zero exit
status.
"""
return self.execute(command, detach=True,
source_profile=source_profile)
def get_last_status(self):
return self.__last_status
def get_status(self, command, source_profile=True):
"""
Execute a remote command and return the exit status
"""
channel = self.transport.open_session()
if source_profile:
command = "source /etc/profile && %s" % command
channel.exec_command(command)
self.__last_status = channel.recv_exit_status()
return self.__last_status
def _get_output(self, channel, silent=True, only_printable=False):
"""
Returns the stdout/stderr output from a ssh channel as a list of
strings (non-interactive only)
"""
# stdin = channel.makefile('wb', -1)
stdout = channel.makefile('rb', -1)
stderr = channel.makefile_stderr('rb', -1)
if silent:
output = stdout.readlines() + stderr.readlines()
else:
output = []
line = None
while line != '':
line = stdout.readline()
if only_printable:
line = ''.join(c for c in line if c in string.printable)
if line != '':
output.append(line)
print line,
for line in stderr.readlines():
output.append(line)
print line,
if only_printable:
output = map(lambda line: ''.join(c for c in line if c in
string.printable), output)
output = map(lambda line: line.strip(), output)
return output
def execute(self, command, silent=True, only_printable=False,
ignore_exit_status=False, log_output=True, detach=False,
source_profile=True, raise_on_failure=True):
"""
Execute a remote command and return stdout/stderr
NOTE: this function blocks until the process finishes
kwargs:
silent - don't print the command's output to the console
only_printable - filter the command's output to allow only printable
characters
ignore_exit_status - don't warn about non-zero exit status
log_output - log all remote output to the debug file
detach - detach the remote process so that it continues to run even
after the SSH connection closes (does NOT return output or
check for non-zero exit status if detach=True)
source_profile - if True prefix the command with "source /etc/profile"
raise_on_failure - raise exception.SSHError if command fails
returns List of output lines
"""
channel = self.transport.open_session()
if detach:
command = "nohup %s &" % command
if source_profile:
command = "source /etc/profile && %s" % command
channel.exec_command(command)
channel.close()
self.__last_status = None
return
if source_profile:
command = "source /etc/profile && %s" % command
log.debug("executing remote command: %s" % command)
channel.exec_command(command)
output = self._get_output(channel, silent=silent,
only_printable=only_printable)
exit_status = channel.recv_exit_status()
self.__last_status = exit_status
out_str = '\n'.join(output)
if exit_status != 0:
msg = "remote command '%s' failed with status %d"
msg %= (command, exit_status)
if log_output:
msg += ":\n%s" % out_str
else:
msg += " (no output log requested)"
if not ignore_exit_status:
if raise_on_failure:
raise exception.RemoteCommandFailed(
msg, command, exit_status, out_str)
else:
log.error(msg)
else:
log.debug("(ignored) " + msg)
else:
if log_output:
log.debug("output of '%s':\n%s" % (command, out_str))
else:
log.debug("output of '%s' has been hidden" % command)
return output
def has_required(self, progs):
"""
Same as check_required but returns False if not all commands exist
"""
try:
return self.check_required(progs)
except exception.RemoteCommandNotFound:
return False
def check_required(self, progs):
"""
Checks that all commands in the progs list exist on the remote system.
Returns True if all commands exist and raises exception.CommandNotFound
if not.
"""
for prog in progs:
if not self.which(prog):
raise exception.RemoteCommandNotFound(prog)
return True
def which(self, prog):
return self.execute('which %s' % prog, ignore_exit_status=True)
def get_path(self):
"""Returns the PATH environment variable on the remote machine"""
return self.get_env()['PATH']
def get_env(self):
"""Returns the remote machine's environment as a dictionary"""
env = {}
for line in self.execute('env'):
key, val = line.split('=', 1)
env[key] = val
return env
def close(self):
"""Closes the connection and cleans up."""
if self._sftp:
self._sftp.close()
if self._transport:
self._transport.close()
def _invoke_shell(self, term='screen', cols=80, lines=24):
chan = self.transport.open_session()
chan.get_pty(term, cols, lines)
chan.invoke_shell()
return chan
def get_current_user(self):
if not self.is_active():
return
return self.transport.get_username()
def switch_user(self, user):
"""
Reconnect, if necessary, to host as user
"""
if not self.is_active() or user and self.get_current_user() != user:
self.connect(username=user)
else:
user = user or self._username
log.debug("already connected as user %s" % user)
def interactive_shell(self, user='root'):
orig_user = self.get_current_user()
self.switch_user(user)
chan = self._invoke_shell()
log.info('Starting Pure-Python SSH shell...')
if HAS_TERMIOS:
self._posix_shell(chan)
else:
self._windows_shell(chan)
chan.close()
self.switch_user(orig_user)
def _posix_shell(self, chan):
import select
oldtty = termios.tcgetattr(sys.stdin)
try:
tty.setraw(sys.stdin.fileno())
tty.setcbreak(sys.stdin.fileno())
chan.settimeout(0.0)
# needs to be sent to give vim correct size FIX
chan.send('eval $(resize)\n')
while True:
r, w, e = select.select([chan, sys.stdin], [], [])
if chan in r:
try:
x = chan.recv(1024)
if len(x) == 0:
print '\r\n*** EOF\r\n',
break
sys.stdout.write(x)
sys.stdout.flush()
except socket.timeout:
pass
if sys.stdin in r:
# fixes up arrow problem
x = os.read(sys.stdin.fileno(), 1)
if len(x) == 0:
break
chan.send(x)
finally:
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, oldtty)
# thanks to Mike Looijmans for this code
def _windows_shell(self, chan):
import threading
sys.stdout.write("Line-buffered terminal emulation. "
"Press F6 or ^Z to send EOF.\r\n\r\n")
def writeall(sock):
while True:
data = sock.recv(256)
if not data:
sys.stdout.write('\r\n*** EOF ***\r\n\r\n')
sys.stdout.flush()
break
sys.stdout.write(data)
sys.stdout.flush()
writer = threading.Thread(target=writeall, args=(chan,))
writer.start()
# needs to be sent to give vim correct size FIX
chan.send('eval $(resize)\n')
try:
while True:
d = sys.stdin.read(1)
if not d:
break
chan.send(d)
except EOFError:
# user hit ^Z or F6
pass
def __del__(self):
"""Attempt to clean up if not explicitly closed."""
log.debug('__del__ called')
self.close()
# for backwards compatibility
Connection = SSHClient
class SSHGlob(object):
def __init__(self, ssh_client):
self.ssh = ssh_client
def glob(self, pathname):
return list(self.iglob(pathname))
def iglob(self, pathname):
"""
Return an iterator which yields the paths matching a pathname pattern.
The pattern may contain simple shell-style wildcards a la fnmatch.
"""
if not glob.has_magic(pathname):
if self.ssh.lpath_exists(pathname):
yield pathname
return
dirname, basename = posixpath.split(pathname)
if not dirname:
for name in self.glob1(posixpath.curdir, basename):
yield name
return
if glob.has_magic(dirname):
dirs = self.iglob(dirname)
else:
dirs = [dirname]
if glob.has_magic(basename):
glob_in_dir = self.glob1
else:
glob_in_dir = self.glob0
for dirname in dirs:
for name in glob_in_dir(dirname, basename):
yield posixpath.join(dirname, name)
def glob0(self, dirname, basename):
if basename == '':
# `os.path.split()` returns an empty basename for paths ending with
# a directory separator. 'q*x/' should match only directories.
if self.ssh.isdir(dirname):
return [basename]
else:
if self.ssh.lexists(posixpath.join(dirname, basename)):
return [basename]
return []
def glob1(self, dirname, pattern):
if not dirname:
dirname = posixpath.curdir
if isinstance(pattern, unicode) and not isinstance(dirname, unicode):
# enc = sys.getfilesystemencoding() or sys.getdefaultencoding()
# dirname = unicode(dirname, enc)
dirname = unicode(dirname, 'UTF-8')
try:
names = [posixpath.basename(n) for n in self.ssh.ls(dirname)]
except os.error:
return []
if pattern[0] != '.':
names = filter(lambda x: x[0] != '.', names)
return fnmatch.filter(names, pattern)
def insert_char_every_n_chars(string, char='\n', every=64):
return char.join(
string[i:i + every] for i in xrange(0, len(string), every))
def get_rsa_key(key_location=None, key_file_obj=None, passphrase=None,
use_pycrypto=False):
key_fobj = key_file_obj or open(key_location)
try:
if use_pycrypto:
key = RSA.importKey(key_fobj, passphrase=passphrase)
else:
key = paramiko.RSAKey.from_private_key(key_fobj,
password=passphrase)
return key
except (paramiko.SSHException, ValueError):
raise exception.SSHError(
"Invalid RSA private key file or missing passphrase: %s" %
key_location)
def get_dsa_key(key_location=None, key_file_obj=None, passphrase=None,
use_pycrypto=False):
key_fobj = key_file_obj or open(key_location)
try:
key = paramiko.DSSKey.from_private_key(key_fobj,
password=passphrase)
if use_pycrypto:
key = DSA.construct((key.y, key.g, key.p, key.q, key.x))
return key
except (paramiko.SSHException, ValueError):
raise exception.SSHError(
"Invalid DSA private key file or missing passphrase: %s" %
key_location)
def get_public_key(key):
return ' '.join([key.get_name(), key.get_base64()])
def generate_rsa_key():
return paramiko.RSAKey.generate(2048)
def get_private_rsa_fingerprint(key_location=None, key_file_obj=None,
passphrase=None):
"""
Returns the fingerprint of a private RSA key as a 59-character string (40
characters separated every 2 characters by a ':'). The fingerprint is
computed using the SHA1 (hex) digest of the DER-encoded (pkcs8) RSA private
key.
"""
k = get_rsa_key(key_location=key_location, key_file_obj=key_file_obj,
passphrase=passphrase, use_pycrypto=True)
sha1digest = hashlib.sha1(k.exportKey('DER', pkcs=8)).hexdigest()
fingerprint = insert_char_every_n_chars(sha1digest, ':', 2)
key = key_location or key_file_obj
log.debug("rsa private key fingerprint (%s): %s" % (key, fingerprint))
return fingerprint
def get_public_rsa_fingerprint(key_location=None, key_file_obj=None,
passphrase=None):
"""
Returns the fingerprint of the public portion of an RSA key as a
47-character string (32 characters separated every 2 characters by a ':').
The fingerprint is computed using the MD5 (hex) digest of the DER-encoded
RSA public key.
"""
privkey = get_rsa_key(key_location=key_location, key_file_obj=key_file_obj,
passphrase=passphrase, use_pycrypto=True)
pubkey = privkey.publickey()
md5digest = hashlib.md5(pubkey.exportKey('DER')).hexdigest()
fingerprint = insert_char_every_n_chars(md5digest, ':', 2)
key = key_location or key_file_obj
log.debug("rsa public key fingerprint (%s): %s" % (key, fingerprint))
return fingerprint
def test_create_keypair_fingerprint(keypair=None):
"""
TODO: move this to 'live' tests
"""
from starcluster import config
cfg = config.StarClusterConfig().load()
ec2 = cfg.get_easy_ec2()
if keypair is None:
keypair = cfg.keys.keys()[0]
key_location = cfg.get_key(keypair).key_location
localfprint = get_private_rsa_fingerprint(key_location)
ec2fprint = ec2.get_keypair(keypair).fingerprint
print 'local fingerprint: %s' % localfprint
print ' ec2 fingerprint: %s' % ec2fprint
assert localfprint == ec2fprint
def test_import_keypair_fingerprint(keypair):
"""
TODO: move this to 'live' tests
"""
from starcluster import config
cfg = config.StarClusterConfig().load()
ec2 = cfg.get_easy_ec2()
key_location = cfg.get_key(keypair).key_location
localfprint = get_public_rsa_fingerprint(key_location)
ec2fprint = ec2.get_keypair(keypair).fingerprint
print 'local fingerprint: %s' % localfprint
print ' ec2 fingerprint: %s' % ec2fprint
assert localfprint == ec2fprint
| gpl-3.0 |
DARKPOP/external_chromium_org | build/android/avd.py | 46 | 3735 | #!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Launches Android Virtual Devices with a set configuration for testing Chrome.
The script will launch a specified number of Android Virtual Devices (AVD's).
"""
import install_emulator_deps
import logging
import optparse
import os
import re
import sys
from pylib import cmd_helper
from pylib import constants
from pylib.utils import emulator
def main(argv):
# ANDROID_SDK_ROOT needs to be set to the location of the SDK used to launch
# the emulator to find the system images upon launch.
emulator_sdk = os.path.join(constants.EMULATOR_SDK_ROOT, 'sdk')
os.environ['ANDROID_SDK_ROOT'] = emulator_sdk
opt_parser = optparse.OptionParser(description='AVD script.')
opt_parser.add_option('--name', help='Optinaly, name of existing AVD to '
'launch. If not specified, new AVD\'s will be created')
opt_parser.add_option('-n', '--num', dest='emulator_count',
help='Number of emulators to launch (default is 1).',
type='int', default='1')
opt_parser.add_option('--abi', default='x86',
help='Platform of emulators to launch (x86 default).')
opt_parser.add_option('--api-level', dest='api_level',
help='API level for the image, e.g. 19 for Android 4.4',
type='int', default=constants.ANDROID_SDK_VERSION)
options, _ = opt_parser.parse_args(argv[1:])
logging.basicConfig(level=logging.INFO,
format='# %(asctime)-15s: %(message)s')
logging.root.setLevel(logging.INFO)
# Check if KVM is enabled for x86 AVD's and check for x86 system images.
# TODO(andrewhayden) Since we can fix all of these with install_emulator_deps
# why don't we just run it?
if options.abi == 'x86':
if not install_emulator_deps.CheckKVM():
logging.critical('ERROR: KVM must be enabled in BIOS, and installed. '
'Enable KVM in BIOS and run install_emulator_deps.py')
return 1
elif not install_emulator_deps.CheckX86Image(options.api_level):
logging.critical('ERROR: System image for x86 AVD not installed. Run '
'install_emulator_deps.py')
return 1
if not install_emulator_deps.CheckSDK():
logging.critical('ERROR: Emulator SDK not installed. Run '
'install_emulator_deps.py.')
return 1
# If AVD is specified, check that the SDK has the required target. If not,
# check that the SDK has the desired target for the temporary AVD's.
api_level = options.api_level
if options.name:
android = os.path.join(constants.EMULATOR_SDK_ROOT, 'sdk', 'tools',
'android')
avds_output = cmd_helper.GetCmdOutput([android, 'list', 'avd'])
names = re.findall('Name: (\w+)', avds_output)
api_levels = re.findall('API level (\d+)', avds_output)
try:
avd_index = names.index(options.name)
except ValueError:
logging.critical('ERROR: Specified AVD %s does not exist.' % options.name)
return 1
api_level = int(api_levels[avd_index])
if not install_emulator_deps.CheckSDKPlatform(api_level):
logging.critical('ERROR: Emulator SDK missing required target for API %d. '
'Run install_emulator_deps.py.')
return 1
if options.name:
emulator.LaunchEmulator(options.name, options.abi)
else:
emulator.LaunchTempEmulators(options.emulator_count, options.abi,
options.api_level, True)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| bsd-3-clause |
jakdept/pythonbook | ch4/picture_grid_test.py | 1 | 1147 | #!/usr/bin/env python3.5
'''
Automate the Boring Stuff with Python
generic testing for chapter 4 projects
Jack Hayhurst
'''
from io import StringIO
import unittest
from unittest.mock import patch
import picture_grid
class TestPictureGrid(unittest.TestCase):
'''tests the picture_grid.py script'''
def test_picture_grid(self):
'''single test to verify picture transfer'''
grid = [['.', '.', '.', '.', '.', '.'],
['.', 'O', 'O', '.', '.', '.'],
['O', 'O', 'O', 'O', '.', '.'],
['O', 'O', 'O', 'O', 'O', '.'],
['.', 'O', 'O', 'O', 'O', 'O'],
['O', 'O', 'O', 'O', 'O', '.'],
['O', 'O', 'O', 'O', '.', '.'],
['.', 'O', 'O', '.', '.', '.'],
['.', '.', '.', '.', '.', '.']]
output = """
..OO.OO..
.OOOOOOO.
.OOOOOOO.
..OOOOO..
...OOO...
....O....
""".strip()
with patch('sys.stdout', new=StringIO()) as fakeOutput:
picture_grid.picture_grid(grid)
self.assertEqual(fakeOutput.getvalue().strip(), output)
if __name__ == "__main__":
unittest.main()
| mit |
myerpengine/odoo | addons/calendar/controllers/main.py | 36 | 3503 | import simplejson
import openerp
import openerp.addons.web.http as http
from openerp.addons.web.http import request
import openerp.addons.web.controllers.main as webmain
import json
class meeting_invitation(http.Controller):
@http.route('/calendar/meeting/accept', type='http', auth="calendar")
def accept(self, db, token, action, id, **kwargs):
registry = openerp.modules.registry.RegistryManager.get(db)
attendee_pool = registry.get('calendar.attendee')
with registry.cursor() as cr:
attendee_id = attendee_pool.search(cr, openerp.SUPERUSER_ID, [('access_token', '=', token), ('state', '!=', 'accepted')])
if attendee_id:
attendee_pool.do_accept(cr, openerp.SUPERUSER_ID, attendee_id)
return self.view(db, token, action, id, view='form')
@http.route('/calendar/meeting/decline', type='http', auth="calendar")
def declined(self, db, token, action, id):
registry = openerp.modules.registry.RegistryManager.get(db)
attendee_pool = registry.get('calendar.attendee')
with registry.cursor() as cr:
attendee_id = attendee_pool.search(cr, openerp.SUPERUSER_ID, [('access_token', '=', token), ('state', '!=', 'declined')])
if attendee_id:
attendee_pool.do_decline(cr, openerp.SUPERUSER_ID, attendee_id)
return self.view(db, token, action, id, view='form')
@http.route('/calendar/meeting/view', type='http', auth="calendar")
def view(self, db, token, action, id, view='calendar'):
registry = openerp.modules.registry.RegistryManager.get(db)
meeting_pool = registry.get('calendar.event')
attendee_pool = registry.get('calendar.attendee')
partner_pool = registry.get('res.partner')
with registry.cursor() as cr:
attendee = attendee_pool.search_read(cr, openerp.SUPERUSER_ID, [('access_token', '=', token)], [])
if attendee and attendee[0] and attendee[0].get('partner_id'):
partner_id = int(attendee[0].get('partner_id')[0])
tz = partner_pool.read(cr, openerp.SUPERUSER_ID, partner_id, ['tz'])['tz']
else:
tz = False
attendee_data = meeting_pool.get_attendee(cr, openerp.SUPERUSER_ID, id, dict(tz=tz))
if attendee:
attendee_data['current_attendee'] = attendee[0]
values = dict(init="s.calendar.event('%s', '%s', '%s', '%s' , '%s');" % (db, action, id, 'form', json.dumps(attendee_data)))
return request.render('web.webclient_bootstrap', values)
# Function used, in RPC to check every 5 minutes, if notification to do for an event or not
@http.route('/calendar/notify', type='json', auth="none")
def notify(self):
registry = openerp.modules.registry.RegistryManager.get(request.session.db)
uid = request.session.uid
context = request.session.context
with registry.cursor() as cr:
res = registry.get("calendar.alarm_manager").get_next_notif(cr, uid, context=context)
return res
@http.route('/calendar/notify_ack', type='json', auth="none")
def notify_ack(self, type=''):
registry = openerp.modules.registry.RegistryManager.get(request.session.db)
uid = request.session.uid
context = request.session.context
with registry.cursor() as cr:
res = registry.get("res.partner").calendar_last_notif_ack(cr, uid, context=context)
return res
| agpl-3.0 |
s20121035/rk3288_android5.1_repo | external/mesa3d/scons/crossmingw.py | 13 | 8201 | """SCons.Tool.gcc
Tool-specific initialization for MinGW (http://www.mingw.org/)
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
See also http://www.scons.org/wiki/CrossCompilingMingw
"""
#
# Copyright (c) 2001, 2002, 2003, 2004 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import os
import os.path
import string
import SCons.Action
import SCons.Builder
import SCons.Tool
import SCons.Util
# This is what we search for to find mingw:
prefixes32 = SCons.Util.Split("""
mingw32-
mingw32msvc-
i386-mingw32-
i486-mingw32-
i586-mingw32-
i686-mingw32-
i386-mingw32msvc-
i486-mingw32msvc-
i586-mingw32msvc-
i686-mingw32msvc-
i686-pc-mingw32-
i686-w64-mingw32-
""")
prefixes64 = SCons.Util.Split("""
x86_64-w64-mingw32-
amd64-mingw32-
amd64-mingw32msvc-
amd64-pc-mingw32-
""")
def find(env):
if env['machine'] == 'x86_64':
prefixes = prefixes64
else:
prefixes = prefixes32
for prefix in prefixes:
# First search in the SCons path and then the OS path:
if env.WhereIs(prefix + 'gcc') or SCons.Util.WhereIs(prefix + 'gcc'):
return prefix
return ''
def shlib_generator(target, source, env, for_signature):
cmd = SCons.Util.CLVar(['$SHLINK', '$SHLINKFLAGS'])
dll = env.FindIxes(target, 'SHLIBPREFIX', 'SHLIBSUFFIX')
if dll: cmd.extend(['-o', dll])
cmd.extend(['$SOURCES', '$_LIBDIRFLAGS', '$_LIBFLAGS'])
implib = env.FindIxes(target, 'LIBPREFIX', 'LIBSUFFIX')
if implib: cmd.append('-Wl,--out-implib,'+implib.get_string(for_signature))
def_target = env.FindIxes(target, 'WIN32DEFPREFIX', 'WIN32DEFSUFFIX')
if def_target: cmd.append('-Wl,--output-def,'+def_target.get_string(for_signature))
return [cmd]
def shlib_emitter(target, source, env):
dll = env.FindIxes(target, 'SHLIBPREFIX', 'SHLIBSUFFIX')
no_import_lib = env.get('no_import_lib', 0)
if not dll:
raise SCons.Errors.UserError, "A shared library should have exactly one target with the suffix: %s" % env.subst("$SHLIBSUFFIX")
if not no_import_lib and \
not env.FindIxes(target, 'LIBPREFIX', 'LIBSUFFIX'):
# Append an import library to the list of targets.
target.append(env.ReplaceIxes(dll,
'SHLIBPREFIX', 'SHLIBSUFFIX',
'LIBPREFIX', 'LIBSUFFIX'))
# Append a def file target if there isn't already a def file target
# or a def file source. There is no option to disable def file
# target emitting, because I can't figure out why someone would ever
# want to turn it off.
def_source = env.FindIxes(source, 'WIN32DEFPREFIX', 'WIN32DEFSUFFIX')
def_target = env.FindIxes(target, 'WIN32DEFPREFIX', 'WIN32DEFSUFFIX')
if not def_source and not def_target:
target.append(env.ReplaceIxes(dll,
'SHLIBPREFIX', 'SHLIBSUFFIX',
'WIN32DEFPREFIX', 'WIN32DEFSUFFIX'))
return (target, source)
shlib_action = SCons.Action.Action(shlib_generator, '$SHLINKCOMSTR', generator=1)
res_action = SCons.Action.Action('$RCCOM', '$RCCOMSTR')
res_builder = SCons.Builder.Builder(action=res_action, suffix='.o',
source_scanner=SCons.Tool.SourceFileScanner)
SCons.Tool.SourceFileScanner.add_scanner('.rc', SCons.Defaults.CScan)
def compile_without_gstabs(env, sources, c_file):
'''This is a hack used to compile some source files without the
-gstabs option.
It seems that some versions of mingw32's gcc (4.4.2 at least) die
when compiling large files with the -gstabs option. -gstabs is
related to debug symbols and can be omitted from the effected
files.
This function compiles the given c_file without -gstabs, removes
the c_file from the sources list, then appends the new .o file to
sources. Then return the new sources list.
'''
# Modify CCFLAGS to not have -gstabs option:
env2 = env.Clone()
flags = str(env2['CCFLAGS'])
flags = flags.replace("-gstabs", "")
env2['CCFLAGS'] = SCons.Util.CLVar(flags)
# Build the special-case files:
obj_file = env2.SharedObject(c_file)
# Replace ".cpp" or ".c" with ".o"
o_file = c_file.replace(".cpp", ".o")
o_file = o_file.replace(".c", ".o")
# Replace the .c files with the specially-compiled .o file
sources.remove(c_file)
sources.append(o_file)
return sources
def generate(env):
mingw_prefix = find(env)
if mingw_prefix:
dir = os.path.dirname(env.WhereIs(mingw_prefix + 'gcc') or SCons.Util.WhereIs(mingw_prefix + 'gcc'))
# The mingw bin directory must be added to the path:
path = env['ENV'].get('PATH', [])
if not path:
path = []
if SCons.Util.is_String(path):
path = string.split(path, os.pathsep)
env['ENV']['PATH'] = string.join([dir] + path, os.pathsep)
# Most of mingw is the same as gcc and friends...
gnu_tools = ['gcc', 'g++', 'gnulink', 'ar', 'gas']
for tool in gnu_tools:
SCons.Tool.Tool(tool)(env)
#... but a few things differ:
env['CC'] = mingw_prefix + 'gcc'
env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS')
env['CXX'] = mingw_prefix + 'g++'
env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS')
env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared')
env['SHLINKCOM'] = shlib_action
env.Append(SHLIBEMITTER = [shlib_emitter])
env['LINK'] = mingw_prefix + 'g++'
env['AR'] = mingw_prefix + 'ar'
env['RANLIB'] = mingw_prefix + 'ranlib'
env['LINK'] = mingw_prefix + 'g++'
env['AS'] = mingw_prefix + 'as'
env['WIN32DEFPREFIX'] = ''
env['WIN32DEFSUFFIX'] = '.def'
env['SHOBJSUFFIX'] = '.o'
env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1
env['RC'] = mingw_prefix + 'windres'
env['RCFLAGS'] = SCons.Util.CLVar('')
env['RCCOM'] = '$RC $_CPPDEFFLAGS $_CPPINCFLAGS ${INCPREFIX}${SOURCE.dir} $RCFLAGS -i $SOURCE -o $TARGET'
env['BUILDERS']['RES'] = res_builder
# Some setting from the platform also have to be overridden:
env['OBJPREFIX'] = ''
env['OBJSUFFIX'] = '.o'
env['SHOBJPREFIX'] = '$OBJPREFIX'
env['SHOBJSUFFIX'] = '$OBJSUFFIX'
env['PROGPREFIX'] = ''
env['PROGSUFFIX'] = '.exe'
env['LIBPREFIX'] = 'lib'
env['LIBSUFFIX'] = '.a'
env['SHLIBPREFIX'] = ''
env['SHLIBSUFFIX'] = '.dll'
env['LIBPREFIXES'] = [ 'lib', '' ]
env['LIBSUFFIXES'] = [ '.a', '.lib' ]
# MinGW x86 port of gdb does not handle well dwarf debug info which is the
# default in recent gcc versions. The x64 port gdb from mingw-w64 seems to
# handle it fine though, so stick with the default there.
if env['machine'] != 'x86_64':
env.AppendUnique(CCFLAGS = ['-gstabs'])
env.AddMethod(compile_without_gstabs, 'compile_without_gstabs')
def exists(env):
return find(env)
| gpl-3.0 |
eleonrk/SickRage | lib/pbr/tests/test_version.py | 13 | 14100 | # Copyright 2012 Red Hat, Inc.
# Copyright 2012-2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import itertools
from testtools import matchers
from pbr.tests import base
from pbr import version
from_pip_string = version.SemanticVersion.from_pip_string
class TestSemanticVersion(base.BaseTestCase):
def test_ordering(self):
ordered_versions = [
"1.2.3.dev6",
"1.2.3.dev7",
"1.2.3.a4.dev12",
"1.2.3.a4.dev13",
"1.2.3.a4",
"1.2.3.a5.dev1",
"1.2.3.a5",
"1.2.3.b3.dev1",
"1.2.3.b3",
"1.2.3.rc2.dev1",
"1.2.3.rc2",
"1.2.3.rc3.dev1",
"1.2.3",
"1.2.4",
"1.3.3",
"2.2.3",
]
for v in ordered_versions:
sv = version.SemanticVersion.from_pip_string(v)
self.expectThat(sv, matchers.Equals(sv))
for left, right in itertools.combinations(ordered_versions, 2):
l_pos = ordered_versions.index(left)
r_pos = ordered_versions.index(right)
if l_pos < r_pos:
m1 = matchers.LessThan
m2 = matchers.GreaterThan
else:
m1 = matchers.GreaterThan
m2 = matchers.LessThan
left_sv = version.SemanticVersion.from_pip_string(left)
right_sv = version.SemanticVersion.from_pip_string(right)
self.expectThat(left_sv, m1(right_sv))
self.expectThat(right_sv, m2(left_sv))
def test_from_pip_string_legacy_alpha(self):
expected = version.SemanticVersion(
1, 2, 0, prerelease_type='rc', prerelease=1)
parsed = from_pip_string('1.2.0rc1')
self.assertEqual(expected, parsed)
def test_from_pip_string_legacy_postN(self):
# When pbr trunk was incompatible with PEP-440, a stable release was
# made that used postN versions to represent developer builds. As
# we expect only to be parsing versions of our own, we map those
# into dev builds of the next version.
expected = version.SemanticVersion(1, 2, 4, dev_count=5)
parsed = from_pip_string('1.2.3.post5')
self.expectThat(expected, matchers.Equals(parsed))
expected = version.SemanticVersion(1, 2, 3, 'a', 5, dev_count=6)
parsed = from_pip_string('1.2.3.0a4.post6')
self.expectThat(expected, matchers.Equals(parsed))
# We can't define a mapping for .postN.devM, so it should raise.
self.expectThat(
lambda: from_pip_string('1.2.3.post5.dev6'),
matchers.raises(ValueError))
def test_from_pip_string_v_version(self):
parsed = from_pip_string('v1.2.3')
expected = version.SemanticVersion(1, 2, 3)
self.expectThat(expected, matchers.Equals(parsed))
expected = version.SemanticVersion(1, 2, 3, 'a', 5, dev_count=6)
parsed = from_pip_string('V1.2.3.0a4.post6')
self.expectThat(expected, matchers.Equals(parsed))
self.expectThat(
lambda: from_pip_string('x1.2.3'),
matchers.raises(ValueError))
def test_from_pip_string_legacy_nonzero_lead_in(self):
# reported in bug 1361251
expected = version.SemanticVersion(
0, 0, 1, prerelease_type='a', prerelease=2)
parsed = from_pip_string('0.0.1a2')
self.assertEqual(expected, parsed)
def test_from_pip_string_legacy_short_nonzero_lead_in(self):
expected = version.SemanticVersion(
0, 1, 0, prerelease_type='a', prerelease=2)
parsed = from_pip_string('0.1a2')
self.assertEqual(expected, parsed)
def test_from_pip_string_legacy_no_0_prerelease(self):
expected = version.SemanticVersion(
2, 1, 0, prerelease_type='rc', prerelease=1)
parsed = from_pip_string('2.1.0.rc1')
self.assertEqual(expected, parsed)
def test_from_pip_string_legacy_no_0_prerelease_2(self):
expected = version.SemanticVersion(
2, 0, 0, prerelease_type='rc', prerelease=1)
parsed = from_pip_string('2.0.0.rc1')
self.assertEqual(expected, parsed)
def test_from_pip_string_legacy_non_440_beta(self):
expected = version.SemanticVersion(
2014, 2, prerelease_type='b', prerelease=2)
parsed = from_pip_string('2014.2.b2')
self.assertEqual(expected, parsed)
def test_from_pip_string_pure_git_hash(self):
self.assertRaises(ValueError, from_pip_string, '6eed5ae')
def test_from_pip_string_non_digit_start(self):
self.assertRaises(ValueError, from_pip_string,
'non-release-tag/2014.12.16-1')
def test_final_version(self):
semver = version.SemanticVersion(1, 2, 3)
self.assertEqual((1, 2, 3, 'final', 0), semver.version_tuple())
self.assertEqual("1.2.3", semver.brief_string())
self.assertEqual("1.2.3", semver.debian_string())
self.assertEqual("1.2.3", semver.release_string())
self.assertEqual("1.2.3", semver.rpm_string())
self.assertEqual(semver, from_pip_string("1.2.3"))
def test_parsing_short_forms(self):
semver = version.SemanticVersion(1, 0, 0)
self.assertEqual(semver, from_pip_string("1"))
self.assertEqual(semver, from_pip_string("1.0"))
self.assertEqual(semver, from_pip_string("1.0.0"))
def test_dev_version(self):
semver = version.SemanticVersion(1, 2, 4, dev_count=5)
self.assertEqual((1, 2, 4, 'dev', 4), semver.version_tuple())
self.assertEqual("1.2.4", semver.brief_string())
self.assertEqual("1.2.4~dev5", semver.debian_string())
self.assertEqual("1.2.4.dev5", semver.release_string())
self.assertEqual("1.2.3.dev5", semver.rpm_string())
self.assertEqual(semver, from_pip_string("1.2.4.dev5"))
def test_dev_no_git_version(self):
semver = version.SemanticVersion(1, 2, 4, dev_count=5)
self.assertEqual((1, 2, 4, 'dev', 4), semver.version_tuple())
self.assertEqual("1.2.4", semver.brief_string())
self.assertEqual("1.2.4~dev5", semver.debian_string())
self.assertEqual("1.2.4.dev5", semver.release_string())
self.assertEqual("1.2.3.dev5", semver.rpm_string())
self.assertEqual(semver, from_pip_string("1.2.4.dev5"))
def test_dev_zero_version(self):
semver = version.SemanticVersion(1, 2, 0, dev_count=5)
self.assertEqual((1, 2, 0, 'dev', 4), semver.version_tuple())
self.assertEqual("1.2.0", semver.brief_string())
self.assertEqual("1.2.0~dev5", semver.debian_string())
self.assertEqual("1.2.0.dev5", semver.release_string())
self.assertEqual("1.1.9999.dev5", semver.rpm_string())
self.assertEqual(semver, from_pip_string("1.2.0.dev5"))
def test_alpha_dev_version(self):
semver = version.SemanticVersion(1, 2, 4, 'a', 1, 12)
self.assertEqual((1, 2, 4, 'alphadev', 12), semver.version_tuple())
self.assertEqual("1.2.4", semver.brief_string())
self.assertEqual("1.2.4~a1.dev12", semver.debian_string())
self.assertEqual("1.2.4.0a1.dev12", semver.release_string())
self.assertEqual("1.2.3.a1.dev12", semver.rpm_string())
self.assertEqual(semver, from_pip_string("1.2.4.0a1.dev12"))
def test_alpha_version(self):
semver = version.SemanticVersion(1, 2, 4, 'a', 1)
self.assertEqual((1, 2, 4, 'alpha', 1), semver.version_tuple())
self.assertEqual("1.2.4", semver.brief_string())
self.assertEqual("1.2.4~a1", semver.debian_string())
self.assertEqual("1.2.4.0a1", semver.release_string())
self.assertEqual("1.2.3.a1", semver.rpm_string())
self.assertEqual(semver, from_pip_string("1.2.4.0a1"))
def test_alpha_zero_version(self):
semver = version.SemanticVersion(1, 2, 0, 'a', 1)
self.assertEqual((1, 2, 0, 'alpha', 1), semver.version_tuple())
self.assertEqual("1.2.0", semver.brief_string())
self.assertEqual("1.2.0~a1", semver.debian_string())
self.assertEqual("1.2.0.0a1", semver.release_string())
self.assertEqual("1.1.9999.a1", semver.rpm_string())
self.assertEqual(semver, from_pip_string("1.2.0.0a1"))
def test_alpha_major_zero_version(self):
semver = version.SemanticVersion(1, 0, 0, 'a', 1)
self.assertEqual((1, 0, 0, 'alpha', 1), semver.version_tuple())
self.assertEqual("1.0.0", semver.brief_string())
self.assertEqual("1.0.0~a1", semver.debian_string())
self.assertEqual("1.0.0.0a1", semver.release_string())
self.assertEqual("0.9999.9999.a1", semver.rpm_string())
self.assertEqual(semver, from_pip_string("1.0.0.0a1"))
def test_alpha_default_version(self):
semver = version.SemanticVersion(1, 2, 4, 'a')
self.assertEqual((1, 2, 4, 'alpha', 0), semver.version_tuple())
self.assertEqual("1.2.4", semver.brief_string())
self.assertEqual("1.2.4~a0", semver.debian_string())
self.assertEqual("1.2.4.0a0", semver.release_string())
self.assertEqual("1.2.3.a0", semver.rpm_string())
self.assertEqual(semver, from_pip_string("1.2.4.0a0"))
def test_beta_dev_version(self):
semver = version.SemanticVersion(1, 2, 4, 'b', 1, 12)
self.assertEqual((1, 2, 4, 'betadev', 12), semver.version_tuple())
self.assertEqual("1.2.4", semver.brief_string())
self.assertEqual("1.2.4~b1.dev12", semver.debian_string())
self.assertEqual("1.2.4.0b1.dev12", semver.release_string())
self.assertEqual("1.2.3.b1.dev12", semver.rpm_string())
self.assertEqual(semver, from_pip_string("1.2.4.0b1.dev12"))
def test_beta_version(self):
semver = version.SemanticVersion(1, 2, 4, 'b', 1)
self.assertEqual((1, 2, 4, 'beta', 1), semver.version_tuple())
self.assertEqual("1.2.4", semver.brief_string())
self.assertEqual("1.2.4~b1", semver.debian_string())
self.assertEqual("1.2.4.0b1", semver.release_string())
self.assertEqual("1.2.3.b1", semver.rpm_string())
self.assertEqual(semver, from_pip_string("1.2.4.0b1"))
def test_decrement_nonrelease(self):
# The prior version of any non-release is a release
semver = version.SemanticVersion(1, 2, 4, 'b', 1)
self.assertEqual(
version.SemanticVersion(1, 2, 3), semver.decrement())
def test_decrement_nonrelease_zero(self):
# We set an arbitrary max version of 9999 when decrementing versions
# - this is part of handling rpm support.
semver = version.SemanticVersion(1, 0, 0)
self.assertEqual(
version.SemanticVersion(0, 9999, 9999), semver.decrement())
def test_decrement_release(self):
# The next patch version of a release version requires a change to the
# patch level.
semver = version.SemanticVersion(2, 2, 5)
self.assertEqual(
version.SemanticVersion(2, 2, 4), semver.decrement())
def test_increment_nonrelease(self):
# The next patch version of a non-release version is another
# non-release version as the next release doesn't need to be
# incremented.
semver = version.SemanticVersion(1, 2, 4, 'b', 1)
self.assertEqual(
version.SemanticVersion(1, 2, 4, 'b', 2), semver.increment())
# Major and minor increments however need to bump things.
self.assertEqual(
version.SemanticVersion(1, 3, 0), semver.increment(minor=True))
self.assertEqual(
version.SemanticVersion(2, 0, 0), semver.increment(major=True))
def test_increment_release(self):
# The next patch version of a release version requires a change to the
# patch level.
semver = version.SemanticVersion(1, 2, 5)
self.assertEqual(
version.SemanticVersion(1, 2, 6), semver.increment())
self.assertEqual(
version.SemanticVersion(1, 3, 0), semver.increment(minor=True))
self.assertEqual(
version.SemanticVersion(2, 0, 0), semver.increment(major=True))
def test_rc_dev_version(self):
semver = version.SemanticVersion(1, 2, 4, 'rc', 1, 12)
self.assertEqual((1, 2, 4, 'candidatedev', 12), semver.version_tuple())
self.assertEqual("1.2.4", semver.brief_string())
self.assertEqual("1.2.4~rc1.dev12", semver.debian_string())
self.assertEqual("1.2.4.0rc1.dev12", semver.release_string())
self.assertEqual("1.2.3.rc1.dev12", semver.rpm_string())
self.assertEqual(semver, from_pip_string("1.2.4.0rc1.dev12"))
def test_rc_version(self):
semver = version.SemanticVersion(1, 2, 4, 'rc', 1)
self.assertEqual((1, 2, 4, 'candidate', 1), semver.version_tuple())
self.assertEqual("1.2.4", semver.brief_string())
self.assertEqual("1.2.4~rc1", semver.debian_string())
self.assertEqual("1.2.4.0rc1", semver.release_string())
self.assertEqual("1.2.3.rc1", semver.rpm_string())
self.assertEqual(semver, from_pip_string("1.2.4.0rc1"))
def test_to_dev(self):
self.assertEqual(
version.SemanticVersion(1, 2, 3, dev_count=1),
version.SemanticVersion(1, 2, 3).to_dev(1))
self.assertEqual(
version.SemanticVersion(1, 2, 3, 'rc', 1, dev_count=1),
version.SemanticVersion(1, 2, 3, 'rc', 1).to_dev(1))
| gpl-3.0 |
hynnet/hiwifi-openwrt-HC5661-HC5761 | staging_dir/host/lib/python2.7/idlelib/WidgetRedirector.py | 143 | 4476 | from Tkinter import *
class WidgetRedirector:
"""Support for redirecting arbitrary widget subcommands.
Some Tk operations don't normally pass through Tkinter. For example, if a
character is inserted into a Text widget by pressing a key, a default Tk
binding to the widget's 'insert' operation is activated, and the Tk library
processes the insert without calling back into Tkinter.
Although a binding to <Key> could be made via Tkinter, what we really want
to do is to hook the Tk 'insert' operation itself.
When a widget is instantiated, a Tcl command is created whose name is the
same as the pathname widget._w. This command is used to invoke the various
widget operations, e.g. insert (for a Text widget). We are going to hook
this command and provide a facility ('register') to intercept the widget
operation.
In IDLE, the function being registered provides access to the top of a
Percolator chain. At the bottom of the chain is a call to the original
Tk widget operation.
"""
def __init__(self, widget):
self._operations = {}
self.widget = widget # widget instance
self.tk = tk = widget.tk # widget's root
w = widget._w # widget's (full) Tk pathname
self.orig = w + "_orig"
# Rename the Tcl command within Tcl:
tk.call("rename", w, self.orig)
# Create a new Tcl command whose name is the widget's pathname, and
# whose action is to dispatch on the operation passed to the widget:
tk.createcommand(w, self.dispatch)
def __repr__(self):
return "WidgetRedirector(%s<%s>)" % (self.widget.__class__.__name__,
self.widget._w)
def close(self):
for operation in list(self._operations):
self.unregister(operation)
widget = self.widget; del self.widget
orig = self.orig; del self.orig
tk = widget.tk
w = widget._w
tk.deletecommand(w)
# restore the original widget Tcl command:
tk.call("rename", orig, w)
def register(self, operation, function):
self._operations[operation] = function
setattr(self.widget, operation, function)
return OriginalCommand(self, operation)
def unregister(self, operation):
if operation in self._operations:
function = self._operations[operation]
del self._operations[operation]
if hasattr(self.widget, operation):
delattr(self.widget, operation)
return function
else:
return None
def dispatch(self, operation, *args):
'''Callback from Tcl which runs when the widget is referenced.
If an operation has been registered in self._operations, apply the
associated function to the args passed into Tcl. Otherwise, pass the
operation through to Tk via the original Tcl function.
Note that if a registered function is called, the operation is not
passed through to Tk. Apply the function returned by self.register()
to *args to accomplish that. For an example, see ColorDelegator.py.
'''
m = self._operations.get(operation)
try:
if m:
return m(*args)
else:
return self.tk.call((self.orig, operation) + args)
except TclError:
return ""
class OriginalCommand:
def __init__(self, redir, operation):
self.redir = redir
self.operation = operation
self.tk = redir.tk
self.orig = redir.orig
self.tk_call = self.tk.call
self.orig_and_operation = (self.orig, self.operation)
def __repr__(self):
return "OriginalCommand(%r, %r)" % (self.redir, self.operation)
def __call__(self, *args):
return self.tk_call(self.orig_and_operation + args)
def main():
root = Tk()
root.wm_protocol("WM_DELETE_WINDOW", root.quit)
text = Text()
text.pack()
text.focus_set()
redir = WidgetRedirector(text)
global previous_tcl_fcn
def my_insert(*args):
print "insert", args
previous_tcl_fcn(*args)
previous_tcl_fcn = redir.register("insert", my_insert)
root.mainloop()
redir.unregister("insert") # runs after first 'close window'
redir.close()
root.mainloop()
root.destroy()
if __name__ == "__main__":
main()
| gpl-2.0 |
eusi/MissionPlanerHM | Lib/site-packages/scipy/constants/__init__.py | 55 | 1121 | """
Various useful constants and conversion formulae
Modules
-------
.. autosummary::
:toctree: generated/
codata - CODATA Recommended Values of Fundamental Physical Const (2006)
constants - Collection of physical constants and conversion factors
Functions
---------
.. autosummary::
:toctree: generated/
C2F - Convert Celsius to Fahrenheit
C2K - Convert Celsius to Kelvin
F2C - Convert Fahrenheit to Celsius
F2K - Convert Fahrenheit to Kelvin
K2C - Convert Kelvin to Celsius
K2F - Convert Kelvin to Fahrenheit
find - Find the codata.physical_constant keys containing a given string
lambda2nu - Convert wavelength to optical frequency
nu2lambda - Convert optical frequency to wavelength
precision - Relative precision in physical_constants indexed by key
unit - Unit in physical_constants indexed by key
value - Value in physical_constants indexed by key
"""
# Modules contributed by BasSw ([email protected])
from codata import *
from constants import *
__all__ = filter(lambda s:not s.startswith('_'),dir())
from numpy.testing import Tester
test = Tester().test
| gpl-3.0 |
indico/indico | indico/modules/rb/operations/rooms.py | 4 | 9012 | # This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from datetime import date, datetime, time
from dateutil.relativedelta import relativedelta
from flask import session
from sqlalchemy.orm import joinedload, load_only
from indico.core.db import db
from indico.core.db.sqlalchemy.principals import PrincipalType
from indico.core.db.sqlalchemy.util.queries import db_dates_overlap, escape_like
from indico.modules.rb import rb_settings
from indico.modules.rb.models.equipment import EquipmentType, RoomEquipmentAssociation
from indico.modules.rb.models.favorites import favorite_room_table
from indico.modules.rb.models.principals import RoomPrincipal
from indico.modules.rb.models.reservation_occurrences import ReservationOccurrence
from indico.modules.rb.models.reservations import Reservation
from indico.modules.rb.models.room_features import RoomFeature
from indico.modules.rb.models.rooms import Room
from indico.modules.rb.statistics import calculate_rooms_occupancy
from indico.modules.rb.util import rb_is_admin
from indico.util.caching import memoize_redis
def _filter_coordinates(query, filters):
try:
sw_lat = filters['sw_lat']
sw_lng = filters['sw_lng']
ne_lat = filters['ne_lat']
ne_lng = filters['ne_lng']
except KeyError:
return query
return query.filter(Room.latitude >= sw_lat,
Room.latitude <= ne_lat,
Room.longitude >= sw_lng,
Room.longitude <= ne_lng)
def _make_room_text_filter(text):
text = f'%{escape_like(text)}%'
columns = ('site', 'division', 'building', 'floor', 'number', 'comments', 'full_name')
return db.or_(getattr(Room, col).ilike(text) for col in columns)
def _query_managed_rooms(user):
criteria = [db.and_(RoomPrincipal.type == PrincipalType.user,
RoomPrincipal.user_id == user.id,
RoomPrincipal.has_management_permission())]
for group in user.local_groups:
criteria.append(db.and_(RoomPrincipal.type == PrincipalType.local_group,
RoomPrincipal.local_group_id == group.id,
RoomPrincipal.has_management_permission()))
for group in user.iter_all_multipass_groups():
criteria.append(db.and_(RoomPrincipal.type == PrincipalType.multipass_group,
RoomPrincipal.multipass_group_provider == group.provider.name,
db.func.lower(RoomPrincipal.multipass_group_name) == group.name.lower(),
RoomPrincipal.has_management_permission()))
return Room.query.filter(~Room.is_deleted, Room.acl_entries.any(db.or_(*criteria)) | (Room.owner == user))
def _query_all_rooms_for_acl_check():
return (Room.query
.filter(~Room.is_deleted)
.options(load_only('id', 'protection_mode', 'reservations_need_confirmation'),
joinedload('owner').load_only('id'),
joinedload('acl_entries')))
@memoize_redis(900)
def has_managed_rooms(user):
if user.can_get_all_multipass_groups:
return _query_managed_rooms(user).has_rows()
else:
query = _query_all_rooms_for_acl_check()
return any(r.can_manage(user, allow_admin=False) for r in query)
@memoize_redis(900)
def get_managed_room_ids(user):
if user.can_get_all_multipass_groups:
return {id_ for id_, in _query_managed_rooms(user).with_entities(Room.id)}
else:
query = _query_all_rooms_for_acl_check()
return {r.id for r in query if r.can_manage(user, allow_admin=False)}
@memoize_redis(3600)
def get_room_statistics(room):
data = {
'count': {
'id': 'times_booked',
'values': [],
'note': False
},
'percentage': {
'id': 'occupancy',
'values': [],
'note': True
}
}
ranges = [7, 30, 365]
end_date = date.today()
for days in ranges:
start_date = date.today() - relativedelta(days=days)
count = (ReservationOccurrence.query
.join(ReservationOccurrence.reservation)
.join(Reservation.room)
.filter(Room.id == room.id,
ReservationOccurrence.is_valid,
db_dates_overlap(ReservationOccurrence,
'start_dt', datetime.combine(start_date, time()),
'end_dt', datetime.combine(end_date, time.max)))
.count())
percentage = calculate_rooms_occupancy([room], start_date, end_date) * 100
if count > 0 or percentage > 0:
data['count']['values'].append({'days': days, 'value': count})
data['percentage']['values'].append({'days': days, 'value': percentage})
return data
def search_for_rooms(filters, allow_admin=False, availability=None):
"""Search for a room, using the provided filters.
:param filters: The filters, provided as a dictionary
:param allow_admin: A boolean specifying whether admins have override privileges
:param availability: A boolean specifying whether (un)available rooms should be provided,
or `None` in case all rooms should be returned.
"""
query = (Room.query
.outerjoin(favorite_room_table, db.and_(favorite_room_table.c.user_id == session.user.id,
favorite_room_table.c.room_id == Room.id))
.reset_joinpoint() # otherwise filter_by() would apply to the favorite table
.filter(~Room.is_deleted)
.order_by(favorite_room_table.c.user_id.is_(None), db.func.indico.natsort(Room.full_name)))
criteria = {}
if 'capacity' in filters:
query = query.filter(Room.capacity >= filters['capacity'])
if 'building' in filters:
criteria['building'] = filters['building']
if 'division' in filters:
criteria['division'] = filters['division']
query = query.filter_by(**criteria)
if 'text' in filters:
text = ' '.join(filters['text'].strip().split())
if text.startswith('#') and text[1:].isdigit():
query = query.filter(Room.id == int(text[1:]))
else:
query = query.filter(_make_room_text_filter(text))
if filters.get('equipment'):
subquery = (db.session.query(RoomEquipmentAssociation)
.with_entities(db.func.count(RoomEquipmentAssociation.c.room_id))
.filter(RoomEquipmentAssociation.c.room_id == Room.id,
EquipmentType.name.in_(filters['equipment']))
.join(EquipmentType, RoomEquipmentAssociation.c.equipment_id == EquipmentType.id)
.correlate(Room)
.scalar_subquery())
query = query.filter(subquery == len(filters['equipment']))
if filters.get('features'):
for feature in filters['features']:
query = query.filter(Room.available_equipment.any(EquipmentType.features.any(RoomFeature.name == feature)))
if filters.get('favorite'):
query = query.filter(favorite_room_table.c.user_id.isnot(None))
if filters.get('mine'):
ids = get_managed_room_ids(session.user)
query = query.filter(Room.id.in_(ids))
query = _filter_coordinates(query, filters)
if availability is None:
return query
start_dt, end_dt = filters['start_dt'], filters['end_dt']
repeatability = (filters['repeat_frequency'], filters['repeat_interval'])
availability_filters = [Room.filter_available(start_dt, end_dt, repeatability, include_blockings=False,
include_pre_bookings=False)]
if not (allow_admin and rb_is_admin(session.user)):
selected_period_days = (filters['end_dt'] - filters['start_dt']).days
booking_limit_days = db.func.coalesce(Room.booking_limit_days, rb_settings.get('booking_limit'))
criterion = db.and_(Room.filter_bookable_hours(start_dt.time(), end_dt.time()),
Room.filter_nonbookable_periods(start_dt, end_dt),
db.or_(booking_limit_days.is_(None),
selected_period_days <= booking_limit_days))
unbookable_ids = [room.id
for room in query.filter(db.and_(*availability_filters), ~criterion)
if not room.can_override(session.user, allow_admin=False)]
availability_filters.append(~Room.id.in_(unbookable_ids))
availability_criterion = db.and_(*availability_filters)
if availability is False:
availability_criterion = ~availability_criterion
return query.filter(availability_criterion)
| mit |
pschmitt/home-assistant | homeassistant/components/radarr/sensor.py | 16 | 7642 | """Support for Radarr."""
from datetime import datetime, timedelta
import logging
import time
from pytz import timezone
import requests
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_API_KEY,
CONF_HOST,
CONF_MONITORED_CONDITIONS,
CONF_PORT,
CONF_SSL,
DATA_BYTES,
DATA_EXABYTES,
DATA_GIGABYTES,
DATA_KILOBYTES,
DATA_MEGABYTES,
DATA_PETABYTES,
DATA_TERABYTES,
DATA_YOTTABYTES,
DATA_ZETTABYTES,
HTTP_OK,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
CONF_DAYS = "days"
CONF_INCLUDED = "include_paths"
CONF_UNIT = "unit"
CONF_URLBASE = "urlbase"
DEFAULT_HOST = "localhost"
DEFAULT_PORT = 7878
DEFAULT_URLBASE = ""
DEFAULT_DAYS = "1"
DEFAULT_UNIT = DATA_GIGABYTES
SCAN_INTERVAL = timedelta(minutes=10)
SENSOR_TYPES = {
"diskspace": ["Disk Space", DATA_GIGABYTES, "mdi:harddisk"],
"upcoming": ["Upcoming", "Movies", "mdi:television"],
"wanted": ["Wanted", "Movies", "mdi:television"],
"movies": ["Movies", "Movies", "mdi:television"],
"commands": ["Commands", "Commands", "mdi:code-braces"],
"status": ["Status", "Status", "mdi:information"],
}
ENDPOINTS = {
"diskspace": "{0}://{1}:{2}/{3}api/diskspace",
"upcoming": "{0}://{1}:{2}/{3}api/calendar?start={4}&end={5}",
"movies": "{0}://{1}:{2}/{3}api/movie",
"commands": "{0}://{1}:{2}/{3}api/command",
"status": "{0}://{1}:{2}/{3}api/system/status",
}
# Support to Yottabytes for the future, why not
BYTE_SIZES = [
DATA_BYTES,
DATA_KILOBYTES,
DATA_MEGABYTES,
DATA_GIGABYTES,
DATA_TERABYTES,
DATA_PETABYTES,
DATA_EXABYTES,
DATA_ZETTABYTES,
DATA_YOTTABYTES,
]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Optional(CONF_DAYS, default=DEFAULT_DAYS): cv.string,
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_INCLUDED, default=[]): cv.ensure_list,
vol.Optional(CONF_MONITORED_CONDITIONS, default=["movies"]): vol.All(
cv.ensure_list, [vol.In(list(SENSOR_TYPES))]
),
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_SSL, default=False): cv.boolean,
vol.Optional(CONF_UNIT, default=DEFAULT_UNIT): vol.In(BYTE_SIZES),
vol.Optional(CONF_URLBASE, default=DEFAULT_URLBASE): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Radarr platform."""
conditions = config.get(CONF_MONITORED_CONDITIONS)
add_entities([RadarrSensor(hass, config, sensor) for sensor in conditions], True)
class RadarrSensor(Entity):
"""Implementation of the Radarr sensor."""
def __init__(self, hass, conf, sensor_type):
"""Create Radarr entity."""
self.conf = conf
self.host = conf.get(CONF_HOST)
self.port = conf.get(CONF_PORT)
self.urlbase = conf.get(CONF_URLBASE)
if self.urlbase:
self.urlbase = f"{self.urlbase.strip('/')}/"
self.apikey = conf.get(CONF_API_KEY)
self.included = conf.get(CONF_INCLUDED)
self.days = int(conf.get(CONF_DAYS))
self.ssl = "https" if conf.get(CONF_SSL) else "http"
self._state = None
self.data = []
self._tz = timezone(str(hass.config.time_zone))
self.type = sensor_type
self._name = SENSOR_TYPES[self.type][0]
if self.type == "diskspace":
self._unit = conf.get(CONF_UNIT)
else:
self._unit = SENSOR_TYPES[self.type][1]
self._icon = SENSOR_TYPES[self.type][2]
self._available = False
@property
def name(self):
"""Return the name of the sensor."""
return "{} {}".format("Radarr", self._name)
@property
def state(self):
"""Return sensor state."""
return self._state
@property
def available(self):
"""Return sensor availability."""
return self._available
@property
def unit_of_measurement(self):
"""Return the unit of the sensor."""
return self._unit
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
attributes = {}
if self.type == "upcoming":
for movie in self.data:
attributes[to_key(movie)] = get_release_date(movie)
elif self.type == "commands":
for command in self.data:
attributes[command["name"]] = command["state"]
elif self.type == "diskspace":
for data in self.data:
free_space = to_unit(data["freeSpace"], self._unit)
total_space = to_unit(data["totalSpace"], self._unit)
percentage_used = (
0 if total_space == 0 else free_space / total_space * 100
)
attributes[data["path"]] = "{:.2f}/{:.2f}{} ({:.2f}%)".format(
free_space, total_space, self._unit, percentage_used
)
elif self.type == "movies":
for movie in self.data:
attributes[to_key(movie)] = movie["downloaded"]
elif self.type == "status":
attributes = self.data
return attributes
@property
def icon(self):
"""Return the icon of the sensor."""
return self._icon
def update(self):
"""Update the data for the sensor."""
start = get_date(self._tz)
end = get_date(self._tz, self.days)
try:
res = requests.get(
ENDPOINTS[self.type].format(
self.ssl, self.host, self.port, self.urlbase, start, end
),
headers={"X-Api-Key": self.apikey},
timeout=10,
)
except OSError:
_LOGGER.warning("Host %s is not available", self.host)
self._available = False
self._state = None
return
if res.status_code == HTTP_OK:
if self.type in ["upcoming", "movies", "commands"]:
self.data = res.json()
self._state = len(self.data)
elif self.type == "diskspace":
# If included paths are not provided, use all data
if self.included == []:
self.data = res.json()
else:
# Filter to only show lists that are included
self.data = list(
filter(lambda x: x["path"] in self.included, res.json())
)
self._state = "{:.2f}".format(
to_unit(sum([data["freeSpace"] for data in self.data]), self._unit)
)
elif self.type == "status":
self.data = res.json()
self._state = self.data["version"]
self._available = True
def get_date(zone, offset=0):
"""Get date based on timezone and offset of days."""
day = 60 * 60 * 24
return datetime.date(datetime.fromtimestamp(time.time() + day * offset, tz=zone))
def get_release_date(data):
"""Get release date."""
date = data.get("physicalRelease")
if not date:
date = data.get("inCinemas")
return date
def to_key(data):
"""Get key."""
return "{} ({})".format(data["title"], data["year"])
def to_unit(value, unit):
"""Convert bytes to give unit."""
return value / 1024 ** BYTE_SIZES.index(unit)
| apache-2.0 |
andyrooger/OAT | src/interactive/commandui.py | 1 | 6086 | """
Command based UI for the obfuscator.
"""
# OAT - Obfuscation and Analysis Tool
# Copyright (C) 2011 Andy Gurden
#
# This file is part of OAT.
#
# OAT is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OAT is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with OAT. If not, see <http://www.gnu.org/licenses/>.
import cmd
import os
try:
import argparse
except ImportError:
from thirdparty import argparse
class CommandUI(cmd.Cmd):
"""Command UI base class, use self.cmdloop() to run."""
def __init__(self):
cmd.Cmd.__init__(self)
self.prompt = "--) "
self.intro = ("Welcome to OAT - Obfuscation and Analysis Tool!\n"
"If you are confused, type help.")
self._commands = {}
def cmdloop(self, intro = None):
"""Un-KeyboardInterrup-able cmdloop."""
try:
super().cmdloop(intro)
except KeyboardInterrupt:
print()
self.cmdloop("")
def postcmd(self, stop, line):
print()
return stop
def emptyline(self):
pass
def _split_line(self, line):
command, ignore, params = line.partition(" ")
params = params.lstrip()
return (command, params)
def default(self, line):
# Should look through added commands and call the correct one
command, params = self._split_line(line)
try:
todo = self._commands[command]
except KeyError:
return cmd.Cmd.default(self, line)
else:
return todo.do(params)
def do_quit(self, line):
"""Exit the program."""
return True
def do_EOF(self, line):
"""Exit the program. Use CTRL^D."""
print()
return True
def completedefault(self, text, line, begidx, endidx):
# Should look through added commands and call the correct one
command, params = self._split_line(line)
try:
todo = self._commands[command]
except KeyError:
return cmd.Cmd.completedefault(self, text, line, begidx, endidx)
else:
return todo.complete(text, params, begidx, endidx)
def do_help(self, line):
"""Get help on a given subject."""
if not line:
return self.help_topics()
# Should check for help in our added commands or fall back
try:
todo = self._commands[line]
except KeyError:
return cmd.Cmd.do_help(self, line)
else:
return todo.help()
def help_topics(self):
"""Print topics for help. This uses the code from Cmd's implementation."""
cmds_doc = ["help", "quit", "status"] + list(self._commands.keys())
self.stdout.write("%s\n"%str(self.doc_leader))
self.print_topics(self.doc_header, cmds_doc, 15,80)
def completenames(self, text, *ignored):
return cmd.Cmd.completenames(self, text, ignored) + [name for name in self._commands.keys() if name.startswith(text)]
def add_command(self, command : "interactive.Command to add to the console."):
"""Add a command to the console."""
self._commands[command.id] = command
def do_status(self, line):
"""Show status for the current session."""
for command in self._commands:
self._commands[command].status()
class Command:
"""Base class for any commands to add to the console."""
def __init__(self, id : "Name of the command"):
self._opts = Command.CommandArgs(description = self.run.__doc__,
add_help = False,
prog = id)
self.id = id
def do(self, line):
try:
args = self._opts.parse_args(line.split())
except ValueError as exc:
print("Problem: " + str(exc))
print()
self.help()
return False
except IOError as exc:
print(exc.strerror + ": " + exc.filename)
else:
return self.run(args)
def complete(self, text, line, begidx, endidx):
beg = begidx - len(self.id) - 1
end = endidx - len(self.id) - 1
begarg = line.rfind(" ", None, end) + 1
endarg = end #line.rfind(" ", beg, None)
if begarg == -1:
begarg = 0
if endarg == -1:
endarg = len(line)
arg = line[begarg:endarg]
before = line[:begarg].split()
after = line[endarg:].split()
completions = self.autocomplete(before, arg, after)
return [completion[len(arg)-len(text):] for completion in completions]
def run(self, args): raise NotImplementedError
def autocomplete(self, before, arg, after): return []
def status(self): pass
def help(self):
self._opts.print_help()
class CommandArgs(argparse.ArgumentParser):
"""Child of OptionParser tailored to be used in the command interface."""
def __init__(self, *args, **kwargs):
argparse.ArgumentParser.__init__(self, *args, **kwargs)
def error(self, msg):
raise ValueError(msg)
def path_completer(path : "Path to complete"):
"""Completer for file paths."""
directory, base = os.path.split(path)
entries = []
try:
if directory:
entries = os.listdir(directory)
else:
entries = os.listdir(os.getcwd())
except OSError:
entries = []
suggestions = [os.path.join(directory, file) for file in entries if file.startswith(base)]
return suggestions
| gpl-3.0 |
jeffrey4l/nova | nova/api/openstack/compute/contrib/used_limits.py | 62 | 3281 | # Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova import quota
QUOTAS = quota.QUOTAS
XMLNS = "http://docs.openstack.org/compute/ext/used_limits/api/v1.1"
ALIAS = "os-used-limits"
authorize = extensions.soft_extension_authorizer('compute', 'used_limits')
authorize_for_admin = extensions.extension_authorizer('compute',
'used_limits_for_admin')
class UsedLimitsController(wsgi.Controller):
def __init__(self, ext_mgr):
self.ext_mgr = ext_mgr
@staticmethod
def _reserved(req):
try:
return int(req.GET['reserved'])
except (ValueError, KeyError):
return False
@wsgi.extends
def index(self, req, resp_obj):
context = req.environ['nova.context']
project_id = self._project_id(context, req)
quotas = QUOTAS.get_project_quotas(context, project_id, usages=True)
quota_map = {
'totalRAMUsed': 'ram',
'totalCoresUsed': 'cores',
'totalInstancesUsed': 'instances',
'totalFloatingIpsUsed': 'floating_ips',
'totalSecurityGroupsUsed': 'security_groups',
}
if self.ext_mgr.is_loaded('os-server-group-quotas'):
quota_map['totalServerGroupsUsed'] = 'server_groups'
used_limits = {}
for display_name, key in six.iteritems(quota_map):
if key in quotas:
reserved = (quotas[key]['reserved']
if self._reserved(req) else 0)
used_limits[display_name] = quotas[key]['in_use'] + reserved
resp_obj.obj['limits']['absolute'].update(used_limits)
def _project_id(self, context, req):
if self.ext_mgr.is_loaded('os-used-limits-for-admin'):
if 'tenant_id' in req.GET:
tenant_id = req.GET.get('tenant_id')
target = {
'project_id': tenant_id,
'user_id': context.user_id
}
authorize_for_admin(context, target=target)
return tenant_id
return context.project_id
class Used_limits(extensions.ExtensionDescriptor):
"""Provide data on limited resources that are being used."""
name = "UsedLimits"
alias = ALIAS
namespace = XMLNS
updated = "2012-07-13T00:00:00Z"
def get_controller_extensions(self):
controller = UsedLimitsController(self.ext_mgr)
limits_ext = extensions.ControllerExtension(self, 'limits',
controller=controller)
return [limits_ext]
| apache-2.0 |
jeffery9/mixprint_addons | project_long_term/wizard/project_compute_tasks.py | 63 | 2545 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class project_compute_tasks(osv.osv_memory):
_name = 'project.compute.tasks'
_description = 'Project Compute Tasks'
_columns = {
'project_id': fields.many2one('project.project', 'Project', required=True)
}
def compute_date(self, cr, uid, ids, context=None):
"""
Schedule the tasks according to users and priority.
"""
project_pool = self.pool.get('project.project')
task_pool = self.pool.get('project.task')
if context is None:
context = {}
context['compute_by'] = 'project'
data = self.read(cr, uid, ids, [])[0]
project_id = data['project_id'][0]
project_pool.schedule_tasks(cr, uid, [project_id], context=context)
return self._open_task_list(cr, uid, data, context=context)
def _open_task_list(self, cr, uid, data, context=None):
"""
Return the scheduled task list.
"""
if context is None:
context = {}
mod_obj = self.pool.get('ir.model.data')
act_obj = self.pool.get('ir.actions.act_window')
result = mod_obj._get_id(cr, uid, 'project_long_term', 'act_resouce_allocation')
id = mod_obj.read(cr, uid, [result], ['res_id'])[0]['res_id']
result = {}
if not id:
return result
result = act_obj.read(cr, uid, [id], context=context)[0]
result['target'] = 'current'
return result
project_compute_tasks()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
kleientertainment/ds_mod_tools | pkg/win32/Python27/Lib/htmlentitydefs.py | 65 | 18327 | """HTML character entity references."""
# maps the HTML entity name to the Unicode codepoint
name2codepoint = {
'AElig': 0x00c6, # latin capital letter AE = latin capital ligature AE, U+00C6 ISOlat1
'Aacute': 0x00c1, # latin capital letter A with acute, U+00C1 ISOlat1
'Acirc': 0x00c2, # latin capital letter A with circumflex, U+00C2 ISOlat1
'Agrave': 0x00c0, # latin capital letter A with grave = latin capital letter A grave, U+00C0 ISOlat1
'Alpha': 0x0391, # greek capital letter alpha, U+0391
'Aring': 0x00c5, # latin capital letter A with ring above = latin capital letter A ring, U+00C5 ISOlat1
'Atilde': 0x00c3, # latin capital letter A with tilde, U+00C3 ISOlat1
'Auml': 0x00c4, # latin capital letter A with diaeresis, U+00C4 ISOlat1
'Beta': 0x0392, # greek capital letter beta, U+0392
'Ccedil': 0x00c7, # latin capital letter C with cedilla, U+00C7 ISOlat1
'Chi': 0x03a7, # greek capital letter chi, U+03A7
'Dagger': 0x2021, # double dagger, U+2021 ISOpub
'Delta': 0x0394, # greek capital letter delta, U+0394 ISOgrk3
'ETH': 0x00d0, # latin capital letter ETH, U+00D0 ISOlat1
'Eacute': 0x00c9, # latin capital letter E with acute, U+00C9 ISOlat1
'Ecirc': 0x00ca, # latin capital letter E with circumflex, U+00CA ISOlat1
'Egrave': 0x00c8, # latin capital letter E with grave, U+00C8 ISOlat1
'Epsilon': 0x0395, # greek capital letter epsilon, U+0395
'Eta': 0x0397, # greek capital letter eta, U+0397
'Euml': 0x00cb, # latin capital letter E with diaeresis, U+00CB ISOlat1
'Gamma': 0x0393, # greek capital letter gamma, U+0393 ISOgrk3
'Iacute': 0x00cd, # latin capital letter I with acute, U+00CD ISOlat1
'Icirc': 0x00ce, # latin capital letter I with circumflex, U+00CE ISOlat1
'Igrave': 0x00cc, # latin capital letter I with grave, U+00CC ISOlat1
'Iota': 0x0399, # greek capital letter iota, U+0399
'Iuml': 0x00cf, # latin capital letter I with diaeresis, U+00CF ISOlat1
'Kappa': 0x039a, # greek capital letter kappa, U+039A
'Lambda': 0x039b, # greek capital letter lambda, U+039B ISOgrk3
'Mu': 0x039c, # greek capital letter mu, U+039C
'Ntilde': 0x00d1, # latin capital letter N with tilde, U+00D1 ISOlat1
'Nu': 0x039d, # greek capital letter nu, U+039D
'OElig': 0x0152, # latin capital ligature OE, U+0152 ISOlat2
'Oacute': 0x00d3, # latin capital letter O with acute, U+00D3 ISOlat1
'Ocirc': 0x00d4, # latin capital letter O with circumflex, U+00D4 ISOlat1
'Ograve': 0x00d2, # latin capital letter O with grave, U+00D2 ISOlat1
'Omega': 0x03a9, # greek capital letter omega, U+03A9 ISOgrk3
'Omicron': 0x039f, # greek capital letter omicron, U+039F
'Oslash': 0x00d8, # latin capital letter O with stroke = latin capital letter O slash, U+00D8 ISOlat1
'Otilde': 0x00d5, # latin capital letter O with tilde, U+00D5 ISOlat1
'Ouml': 0x00d6, # latin capital letter O with diaeresis, U+00D6 ISOlat1
'Phi': 0x03a6, # greek capital letter phi, U+03A6 ISOgrk3
'Pi': 0x03a0, # greek capital letter pi, U+03A0 ISOgrk3
'Prime': 0x2033, # double prime = seconds = inches, U+2033 ISOtech
'Psi': 0x03a8, # greek capital letter psi, U+03A8 ISOgrk3
'Rho': 0x03a1, # greek capital letter rho, U+03A1
'Scaron': 0x0160, # latin capital letter S with caron, U+0160 ISOlat2
'Sigma': 0x03a3, # greek capital letter sigma, U+03A3 ISOgrk3
'THORN': 0x00de, # latin capital letter THORN, U+00DE ISOlat1
'Tau': 0x03a4, # greek capital letter tau, U+03A4
'Theta': 0x0398, # greek capital letter theta, U+0398 ISOgrk3
'Uacute': 0x00da, # latin capital letter U with acute, U+00DA ISOlat1
'Ucirc': 0x00db, # latin capital letter U with circumflex, U+00DB ISOlat1
'Ugrave': 0x00d9, # latin capital letter U with grave, U+00D9 ISOlat1
'Upsilon': 0x03a5, # greek capital letter upsilon, U+03A5 ISOgrk3
'Uuml': 0x00dc, # latin capital letter U with diaeresis, U+00DC ISOlat1
'Xi': 0x039e, # greek capital letter xi, U+039E ISOgrk3
'Yacute': 0x00dd, # latin capital letter Y with acute, U+00DD ISOlat1
'Yuml': 0x0178, # latin capital letter Y with diaeresis, U+0178 ISOlat2
'Zeta': 0x0396, # greek capital letter zeta, U+0396
'aacute': 0x00e1, # latin small letter a with acute, U+00E1 ISOlat1
'acirc': 0x00e2, # latin small letter a with circumflex, U+00E2 ISOlat1
'acute': 0x00b4, # acute accent = spacing acute, U+00B4 ISOdia
'aelig': 0x00e6, # latin small letter ae = latin small ligature ae, U+00E6 ISOlat1
'agrave': 0x00e0, # latin small letter a with grave = latin small letter a grave, U+00E0 ISOlat1
'alefsym': 0x2135, # alef symbol = first transfinite cardinal, U+2135 NEW
'alpha': 0x03b1, # greek small letter alpha, U+03B1 ISOgrk3
'amp': 0x0026, # ampersand, U+0026 ISOnum
'and': 0x2227, # logical and = wedge, U+2227 ISOtech
'ang': 0x2220, # angle, U+2220 ISOamso
'aring': 0x00e5, # latin small letter a with ring above = latin small letter a ring, U+00E5 ISOlat1
'asymp': 0x2248, # almost equal to = asymptotic to, U+2248 ISOamsr
'atilde': 0x00e3, # latin small letter a with tilde, U+00E3 ISOlat1
'auml': 0x00e4, # latin small letter a with diaeresis, U+00E4 ISOlat1
'bdquo': 0x201e, # double low-9 quotation mark, U+201E NEW
'beta': 0x03b2, # greek small letter beta, U+03B2 ISOgrk3
'brvbar': 0x00a6, # broken bar = broken vertical bar, U+00A6 ISOnum
'bull': 0x2022, # bullet = black small circle, U+2022 ISOpub
'cap': 0x2229, # intersection = cap, U+2229 ISOtech
'ccedil': 0x00e7, # latin small letter c with cedilla, U+00E7 ISOlat1
'cedil': 0x00b8, # cedilla = spacing cedilla, U+00B8 ISOdia
'cent': 0x00a2, # cent sign, U+00A2 ISOnum
'chi': 0x03c7, # greek small letter chi, U+03C7 ISOgrk3
'circ': 0x02c6, # modifier letter circumflex accent, U+02C6 ISOpub
'clubs': 0x2663, # black club suit = shamrock, U+2663 ISOpub
'cong': 0x2245, # approximately equal to, U+2245 ISOtech
'copy': 0x00a9, # copyright sign, U+00A9 ISOnum
'crarr': 0x21b5, # downwards arrow with corner leftwards = carriage return, U+21B5 NEW
'cup': 0x222a, # union = cup, U+222A ISOtech
'curren': 0x00a4, # currency sign, U+00A4 ISOnum
'dArr': 0x21d3, # downwards double arrow, U+21D3 ISOamsa
'dagger': 0x2020, # dagger, U+2020 ISOpub
'darr': 0x2193, # downwards arrow, U+2193 ISOnum
'deg': 0x00b0, # degree sign, U+00B0 ISOnum
'delta': 0x03b4, # greek small letter delta, U+03B4 ISOgrk3
'diams': 0x2666, # black diamond suit, U+2666 ISOpub
'divide': 0x00f7, # division sign, U+00F7 ISOnum
'eacute': 0x00e9, # latin small letter e with acute, U+00E9 ISOlat1
'ecirc': 0x00ea, # latin small letter e with circumflex, U+00EA ISOlat1
'egrave': 0x00e8, # latin small letter e with grave, U+00E8 ISOlat1
'empty': 0x2205, # empty set = null set = diameter, U+2205 ISOamso
'emsp': 0x2003, # em space, U+2003 ISOpub
'ensp': 0x2002, # en space, U+2002 ISOpub
'epsilon': 0x03b5, # greek small letter epsilon, U+03B5 ISOgrk3
'equiv': 0x2261, # identical to, U+2261 ISOtech
'eta': 0x03b7, # greek small letter eta, U+03B7 ISOgrk3
'eth': 0x00f0, # latin small letter eth, U+00F0 ISOlat1
'euml': 0x00eb, # latin small letter e with diaeresis, U+00EB ISOlat1
'euro': 0x20ac, # euro sign, U+20AC NEW
'exist': 0x2203, # there exists, U+2203 ISOtech
'fnof': 0x0192, # latin small f with hook = function = florin, U+0192 ISOtech
'forall': 0x2200, # for all, U+2200 ISOtech
'frac12': 0x00bd, # vulgar fraction one half = fraction one half, U+00BD ISOnum
'frac14': 0x00bc, # vulgar fraction one quarter = fraction one quarter, U+00BC ISOnum
'frac34': 0x00be, # vulgar fraction three quarters = fraction three quarters, U+00BE ISOnum
'frasl': 0x2044, # fraction slash, U+2044 NEW
'gamma': 0x03b3, # greek small letter gamma, U+03B3 ISOgrk3
'ge': 0x2265, # greater-than or equal to, U+2265 ISOtech
'gt': 0x003e, # greater-than sign, U+003E ISOnum
'hArr': 0x21d4, # left right double arrow, U+21D4 ISOamsa
'harr': 0x2194, # left right arrow, U+2194 ISOamsa
'hearts': 0x2665, # black heart suit = valentine, U+2665 ISOpub
'hellip': 0x2026, # horizontal ellipsis = three dot leader, U+2026 ISOpub
'iacute': 0x00ed, # latin small letter i with acute, U+00ED ISOlat1
'icirc': 0x00ee, # latin small letter i with circumflex, U+00EE ISOlat1
'iexcl': 0x00a1, # inverted exclamation mark, U+00A1 ISOnum
'igrave': 0x00ec, # latin small letter i with grave, U+00EC ISOlat1
'image': 0x2111, # blackletter capital I = imaginary part, U+2111 ISOamso
'infin': 0x221e, # infinity, U+221E ISOtech
'int': 0x222b, # integral, U+222B ISOtech
'iota': 0x03b9, # greek small letter iota, U+03B9 ISOgrk3
'iquest': 0x00bf, # inverted question mark = turned question mark, U+00BF ISOnum
'isin': 0x2208, # element of, U+2208 ISOtech
'iuml': 0x00ef, # latin small letter i with diaeresis, U+00EF ISOlat1
'kappa': 0x03ba, # greek small letter kappa, U+03BA ISOgrk3
'lArr': 0x21d0, # leftwards double arrow, U+21D0 ISOtech
'lambda': 0x03bb, # greek small letter lambda, U+03BB ISOgrk3
'lang': 0x2329, # left-pointing angle bracket = bra, U+2329 ISOtech
'laquo': 0x00ab, # left-pointing double angle quotation mark = left pointing guillemet, U+00AB ISOnum
'larr': 0x2190, # leftwards arrow, U+2190 ISOnum
'lceil': 0x2308, # left ceiling = apl upstile, U+2308 ISOamsc
'ldquo': 0x201c, # left double quotation mark, U+201C ISOnum
'le': 0x2264, # less-than or equal to, U+2264 ISOtech
'lfloor': 0x230a, # left floor = apl downstile, U+230A ISOamsc
'lowast': 0x2217, # asterisk operator, U+2217 ISOtech
'loz': 0x25ca, # lozenge, U+25CA ISOpub
'lrm': 0x200e, # left-to-right mark, U+200E NEW RFC 2070
'lsaquo': 0x2039, # single left-pointing angle quotation mark, U+2039 ISO proposed
'lsquo': 0x2018, # left single quotation mark, U+2018 ISOnum
'lt': 0x003c, # less-than sign, U+003C ISOnum
'macr': 0x00af, # macron = spacing macron = overline = APL overbar, U+00AF ISOdia
'mdash': 0x2014, # em dash, U+2014 ISOpub
'micro': 0x00b5, # micro sign, U+00B5 ISOnum
'middot': 0x00b7, # middle dot = Georgian comma = Greek middle dot, U+00B7 ISOnum
'minus': 0x2212, # minus sign, U+2212 ISOtech
'mu': 0x03bc, # greek small letter mu, U+03BC ISOgrk3
'nabla': 0x2207, # nabla = backward difference, U+2207 ISOtech
'nbsp': 0x00a0, # no-break space = non-breaking space, U+00A0 ISOnum
'ndash': 0x2013, # en dash, U+2013 ISOpub
'ne': 0x2260, # not equal to, U+2260 ISOtech
'ni': 0x220b, # contains as member, U+220B ISOtech
'not': 0x00ac, # not sign, U+00AC ISOnum
'notin': 0x2209, # not an element of, U+2209 ISOtech
'nsub': 0x2284, # not a subset of, U+2284 ISOamsn
'ntilde': 0x00f1, # latin small letter n with tilde, U+00F1 ISOlat1
'nu': 0x03bd, # greek small letter nu, U+03BD ISOgrk3
'oacute': 0x00f3, # latin small letter o with acute, U+00F3 ISOlat1
'ocirc': 0x00f4, # latin small letter o with circumflex, U+00F4 ISOlat1
'oelig': 0x0153, # latin small ligature oe, U+0153 ISOlat2
'ograve': 0x00f2, # latin small letter o with grave, U+00F2 ISOlat1
'oline': 0x203e, # overline = spacing overscore, U+203E NEW
'omega': 0x03c9, # greek small letter omega, U+03C9 ISOgrk3
'omicron': 0x03bf, # greek small letter omicron, U+03BF NEW
'oplus': 0x2295, # circled plus = direct sum, U+2295 ISOamsb
'or': 0x2228, # logical or = vee, U+2228 ISOtech
'ordf': 0x00aa, # feminine ordinal indicator, U+00AA ISOnum
'ordm': 0x00ba, # masculine ordinal indicator, U+00BA ISOnum
'oslash': 0x00f8, # latin small letter o with stroke, = latin small letter o slash, U+00F8 ISOlat1
'otilde': 0x00f5, # latin small letter o with tilde, U+00F5 ISOlat1
'otimes': 0x2297, # circled times = vector product, U+2297 ISOamsb
'ouml': 0x00f6, # latin small letter o with diaeresis, U+00F6 ISOlat1
'para': 0x00b6, # pilcrow sign = paragraph sign, U+00B6 ISOnum
'part': 0x2202, # partial differential, U+2202 ISOtech
'permil': 0x2030, # per mille sign, U+2030 ISOtech
'perp': 0x22a5, # up tack = orthogonal to = perpendicular, U+22A5 ISOtech
'phi': 0x03c6, # greek small letter phi, U+03C6 ISOgrk3
'pi': 0x03c0, # greek small letter pi, U+03C0 ISOgrk3
'piv': 0x03d6, # greek pi symbol, U+03D6 ISOgrk3
'plusmn': 0x00b1, # plus-minus sign = plus-or-minus sign, U+00B1 ISOnum
'pound': 0x00a3, # pound sign, U+00A3 ISOnum
'prime': 0x2032, # prime = minutes = feet, U+2032 ISOtech
'prod': 0x220f, # n-ary product = product sign, U+220F ISOamsb
'prop': 0x221d, # proportional to, U+221D ISOtech
'psi': 0x03c8, # greek small letter psi, U+03C8 ISOgrk3
'quot': 0x0022, # quotation mark = APL quote, U+0022 ISOnum
'rArr': 0x21d2, # rightwards double arrow, U+21D2 ISOtech
'radic': 0x221a, # square root = radical sign, U+221A ISOtech
'rang': 0x232a, # right-pointing angle bracket = ket, U+232A ISOtech
'raquo': 0x00bb, # right-pointing double angle quotation mark = right pointing guillemet, U+00BB ISOnum
'rarr': 0x2192, # rightwards arrow, U+2192 ISOnum
'rceil': 0x2309, # right ceiling, U+2309 ISOamsc
'rdquo': 0x201d, # right double quotation mark, U+201D ISOnum
'real': 0x211c, # blackletter capital R = real part symbol, U+211C ISOamso
'reg': 0x00ae, # registered sign = registered trade mark sign, U+00AE ISOnum
'rfloor': 0x230b, # right floor, U+230B ISOamsc
'rho': 0x03c1, # greek small letter rho, U+03C1 ISOgrk3
'rlm': 0x200f, # right-to-left mark, U+200F NEW RFC 2070
'rsaquo': 0x203a, # single right-pointing angle quotation mark, U+203A ISO proposed
'rsquo': 0x2019, # right single quotation mark, U+2019 ISOnum
'sbquo': 0x201a, # single low-9 quotation mark, U+201A NEW
'scaron': 0x0161, # latin small letter s with caron, U+0161 ISOlat2
'sdot': 0x22c5, # dot operator, U+22C5 ISOamsb
'sect': 0x00a7, # section sign, U+00A7 ISOnum
'shy': 0x00ad, # soft hyphen = discretionary hyphen, U+00AD ISOnum
'sigma': 0x03c3, # greek small letter sigma, U+03C3 ISOgrk3
'sigmaf': 0x03c2, # greek small letter final sigma, U+03C2 ISOgrk3
'sim': 0x223c, # tilde operator = varies with = similar to, U+223C ISOtech
'spades': 0x2660, # black spade suit, U+2660 ISOpub
'sub': 0x2282, # subset of, U+2282 ISOtech
'sube': 0x2286, # subset of or equal to, U+2286 ISOtech
'sum': 0x2211, # n-ary sumation, U+2211 ISOamsb
'sup': 0x2283, # superset of, U+2283 ISOtech
'sup1': 0x00b9, # superscript one = superscript digit one, U+00B9 ISOnum
'sup2': 0x00b2, # superscript two = superscript digit two = squared, U+00B2 ISOnum
'sup3': 0x00b3, # superscript three = superscript digit three = cubed, U+00B3 ISOnum
'supe': 0x2287, # superset of or equal to, U+2287 ISOtech
'szlig': 0x00df, # latin small letter sharp s = ess-zed, U+00DF ISOlat1
'tau': 0x03c4, # greek small letter tau, U+03C4 ISOgrk3
'there4': 0x2234, # therefore, U+2234 ISOtech
'theta': 0x03b8, # greek small letter theta, U+03B8 ISOgrk3
'thetasym': 0x03d1, # greek small letter theta symbol, U+03D1 NEW
'thinsp': 0x2009, # thin space, U+2009 ISOpub
'thorn': 0x00fe, # latin small letter thorn with, U+00FE ISOlat1
'tilde': 0x02dc, # small tilde, U+02DC ISOdia
'times': 0x00d7, # multiplication sign, U+00D7 ISOnum
'trade': 0x2122, # trade mark sign, U+2122 ISOnum
'uArr': 0x21d1, # upwards double arrow, U+21D1 ISOamsa
'uacute': 0x00fa, # latin small letter u with acute, U+00FA ISOlat1
'uarr': 0x2191, # upwards arrow, U+2191 ISOnum
'ucirc': 0x00fb, # latin small letter u with circumflex, U+00FB ISOlat1
'ugrave': 0x00f9, # latin small letter u with grave, U+00F9 ISOlat1
'uml': 0x00a8, # diaeresis = spacing diaeresis, U+00A8 ISOdia
'upsih': 0x03d2, # greek upsilon with hook symbol, U+03D2 NEW
'upsilon': 0x03c5, # greek small letter upsilon, U+03C5 ISOgrk3
'uuml': 0x00fc, # latin small letter u with diaeresis, U+00FC ISOlat1
'weierp': 0x2118, # script capital P = power set = Weierstrass p, U+2118 ISOamso
'xi': 0x03be, # greek small letter xi, U+03BE ISOgrk3
'yacute': 0x00fd, # latin small letter y with acute, U+00FD ISOlat1
'yen': 0x00a5, # yen sign = yuan sign, U+00A5 ISOnum
'yuml': 0x00ff, # latin small letter y with diaeresis, U+00FF ISOlat1
'zeta': 0x03b6, # greek small letter zeta, U+03B6 ISOgrk3
'zwj': 0x200d, # zero width joiner, U+200D NEW RFC 2070
'zwnj': 0x200c, # zero width non-joiner, U+200C NEW RFC 2070
}
# maps the Unicode codepoint to the HTML entity name
codepoint2name = {}
# maps the HTML entity name to the character
# (or a character reference if the character is outside the Latin-1 range)
entitydefs = {}
for (name, codepoint) in name2codepoint.iteritems():
codepoint2name[codepoint] = name
if codepoint <= 0xff:
entitydefs[name] = chr(codepoint)
else:
entitydefs[name] = '&#%d;' % codepoint
del name, codepoint
| mit |
skevy/django | tests/regressiontests/utils/text.py | 51 | 2200 | import unittest
from django.utils import text
class TestUtilsText(unittest.TestCase):
def test_truncate_words(self):
self.assertEqual(u'The quick brown fox jumped over the lazy dog.',
text.truncate_words(u'The quick brown fox jumped over the lazy dog.', 10))
self.assertEqual(u'The quick brown fox ...',
text.truncate_words('The quick brown fox jumped over the lazy dog.', 4))
self.assertEqual(u'The quick brown fox ....',
text.truncate_words('The quick brown fox jumped over the lazy dog.', 4, '....'))
def test_truncate_html_words(self):
self.assertEqual(u'<p><strong><em>The quick brown fox jumped over the lazy dog.</em></strong></p>',
text.truncate_html_words('<p><strong><em>The quick brown fox jumped over the lazy dog.</em></strong></p>', 10))
self.assertEqual(u'<p><strong><em>The quick brown fox ...</em></strong></p>',
text.truncate_html_words('<p><strong><em>The quick brown fox jumped over the lazy dog.</em></strong></p>', 4))
self.assertEqual(u'<p><strong><em>The quick brown fox ....</em></strong></p>',
text.truncate_html_words('<p><strong><em>The quick brown fox jumped over the lazy dog.</em></strong></p>', 4, '....'))
self.assertEqual(u'<p><strong><em>The quick brown fox</em></strong></p>',
text.truncate_html_words('<p><strong><em>The quick brown fox jumped over the lazy dog.</em></strong></p>', 4, None))
def test_wrap(self):
digits = '1234 67 9'
self.assertEqual(text.wrap(digits, 100), u'1234 67 9')
self.assertEqual(text.wrap(digits, 9), u'1234 67 9')
self.assertEqual(text.wrap(digits, 8), u'1234 67\n9')
self.assertEqual(text.wrap('short\na long line', 7),
u'short\na long\nline')
self.assertEqual(text.wrap('do-not-break-long-words please? ok', 8),
u'do-not-break-long-words\nplease?\nok')
long_word = 'l%sng' % ('o' * 20)
self.assertEqual(text.wrap(long_word, 20), long_word)
self.assertEqual(text.wrap('a %s word' % long_word, 10),
u'a\n%s\nword' % long_word)
| bsd-3-clause |
fafaman/django | tests/db_functions/models.py | 245 | 1332 | """
Tests for built in Function expressions.
"""
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Author(models.Model):
name = models.CharField(max_length=50)
alias = models.CharField(max_length=50, null=True, blank=True)
goes_by = models.CharField(max_length=50, null=True, blank=True)
age = models.PositiveSmallIntegerField(default=30)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Article(models.Model):
authors = models.ManyToManyField(Author, related_name='articles')
title = models.CharField(max_length=50)
summary = models.CharField(max_length=200, null=True, blank=True)
text = models.TextField()
written = models.DateTimeField()
published = models.DateTimeField(null=True, blank=True)
updated = models.DateTimeField(null=True, blank=True)
views = models.PositiveIntegerField(default=0)
def __str__(self):
return self.title
@python_2_unicode_compatible
class Fan(models.Model):
name = models.CharField(max_length=50)
age = models.PositiveSmallIntegerField(default=30)
author = models.ForeignKey(Author, models.CASCADE, related_name='fans')
def __str__(self):
return self.name
| bsd-3-clause |
batermj/algorithm-challenger | code-analysis/programming_anguage/python/source_codes/Python3.8.0/Python-3.8.0/Tools/pynche/TypeinViewer.py | 116 | 6102 | """TypeinViewer class.
The TypeinViewer is what you see at the lower right of the main Pynche
widget. It contains three text entry fields, one each for red, green, blue.
Input into these windows is highly constrained; it only allows you to enter
values that are legal for a color axis. This usually means 0-255 for decimal
input and 0x0 - 0xff for hex input.
You can toggle whether you want to view and input the values in either decimal
or hex by clicking on Hexadecimal. By clicking on Update while typing, the
color selection will be made on every change to the text field. Otherwise,
you must hit Return or Tab to select the color.
"""
from tkinter import *
class TypeinViewer:
def __init__(self, switchboard, master=None):
# non-gui ivars
self.__sb = switchboard
optiondb = switchboard.optiondb()
self.__hexp = BooleanVar()
self.__hexp.set(optiondb.get('HEXTYPE', 0))
self.__uwtyping = BooleanVar()
self.__uwtyping.set(optiondb.get('UPWHILETYPE', 0))
# create the gui
self.__frame = Frame(master, relief=RAISED, borderwidth=1)
self.__frame.grid(row=3, column=1, sticky='NSEW')
# Red
self.__xl = Label(self.__frame, text='Red:')
self.__xl.grid(row=0, column=0, sticky=E)
subframe = Frame(self.__frame)
subframe.grid(row=0, column=1)
self.__xox = Label(subframe, text='0x')
self.__xox.grid(row=0, column=0, sticky=E)
self.__xox['font'] = 'courier'
self.__x = Entry(subframe, width=3)
self.__x.grid(row=0, column=1)
self.__x.bindtags(self.__x.bindtags() + ('Normalize', 'Update'))
self.__x.bind_class('Normalize', '<Key>', self.__normalize)
self.__x.bind_class('Update' , '<Key>', self.__maybeupdate)
# Green
self.__yl = Label(self.__frame, text='Green:')
self.__yl.grid(row=1, column=0, sticky=E)
subframe = Frame(self.__frame)
subframe.grid(row=1, column=1)
self.__yox = Label(subframe, text='0x')
self.__yox.grid(row=0, column=0, sticky=E)
self.__yox['font'] = 'courier'
self.__y = Entry(subframe, width=3)
self.__y.grid(row=0, column=1)
self.__y.bindtags(self.__y.bindtags() + ('Normalize', 'Update'))
# Blue
self.__zl = Label(self.__frame, text='Blue:')
self.__zl.grid(row=2, column=0, sticky=E)
subframe = Frame(self.__frame)
subframe.grid(row=2, column=1)
self.__zox = Label(subframe, text='0x')
self.__zox.grid(row=0, column=0, sticky=E)
self.__zox['font'] = 'courier'
self.__z = Entry(subframe, width=3)
self.__z.grid(row=0, column=1)
self.__z.bindtags(self.__z.bindtags() + ('Normalize', 'Update'))
# Update while typing?
self.__uwt = Checkbutton(self.__frame,
text='Update while typing',
variable=self.__uwtyping)
self.__uwt.grid(row=3, column=0, columnspan=2, sticky=W)
# Hex/Dec
self.__hex = Checkbutton(self.__frame,
text='Hexadecimal',
variable=self.__hexp,
command=self.__togglehex)
self.__hex.grid(row=4, column=0, columnspan=2, sticky=W)
def __togglehex(self, event=None):
red, green, blue = self.__sb.current_rgb()
if self.__hexp.get():
label = '0x'
else:
label = ' '
self.__xox['text'] = label
self.__yox['text'] = label
self.__zox['text'] = label
self.update_yourself(red, green, blue)
def __normalize(self, event=None):
ew = event.widget
contents = ew.get()
icursor = ew.index(INSERT)
if contents and contents[0] in 'xX' and self.__hexp.get():
contents = '0' + contents
# Figure out the contents in the current base.
try:
if self.__hexp.get():
v = int(contents, 16)
else:
v = int(contents)
except ValueError:
v = None
# If value is not legal, or empty, delete the last character inserted
# and ring the bell. Don't ring the bell if the field is empty (it'll
# just equal zero.
if v is None:
pass
elif v < 0 or v > 255:
i = ew.index(INSERT)
if event.char:
contents = contents[:i-1] + contents[i:]
icursor -= 1
ew.bell()
elif self.__hexp.get():
contents = hex(v)[2:]
else:
contents = int(v)
ew.delete(0, END)
ew.insert(0, contents)
ew.icursor(icursor)
def __maybeupdate(self, event=None):
if self.__uwtyping.get() or event.keysym in ('Return', 'Tab'):
self.__update(event)
def __update(self, event=None):
redstr = self.__x.get() or '0'
greenstr = self.__y.get() or '0'
bluestr = self.__z.get() or '0'
if self.__hexp.get():
base = 16
else:
base = 10
red, green, blue = [int(x, base) for x in (redstr, greenstr, bluestr)]
self.__sb.update_views(red, green, blue)
def update_yourself(self, red, green, blue):
if self.__hexp.get():
sred, sgreen, sblue = [hex(x)[2:] for x in (red, green, blue)]
else:
sred, sgreen, sblue = red, green, blue
x, y, z = self.__x, self.__y, self.__z
xicursor = x.index(INSERT)
yicursor = y.index(INSERT)
zicursor = z.index(INSERT)
x.delete(0, END)
y.delete(0, END)
z.delete(0, END)
x.insert(0, sred)
y.insert(0, sgreen)
z.insert(0, sblue)
x.icursor(xicursor)
y.icursor(yicursor)
z.icursor(zicursor)
def hexp_var(self):
return self.__hexp
def save_options(self, optiondb):
optiondb['HEXTYPE'] = self.__hexp.get()
optiondb['UPWHILETYPE'] = self.__uwtyping.get()
| apache-2.0 |
ThePletch/ansible | lib/ansible/modules/network/ipinfoio_facts.py | 39 | 4067 | #!/usr/bin/python
#
# (c) 2016, Aleksei Kostiuk <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: ipinfoio_facts
short_description: "Retrieve IP geolocation facts of a host's IP address"
description:
- "Gather IP geolocation facts of a host's IP address using ipinfo.io API"
version_added: "2.3"
author: "Aleksei Kostiuk (@akostyuk)"
options:
timeout:
description:
- HTTP connection timeout in seconds
required: false
default: 10
http_agent:
description:
- Set http user agent
required: false
default: "ansible-ipinfoio-module/0.0.1"
notes:
- "Check http://ipinfo.io/ for more information"
'''
EXAMPLES = '''
# Retrieve geolocation data of a host's IP address
- name: get IP geolocation data
ipinfoio_facts:
'''
RETURN = '''
ansible_facts:
description: "Dictionary of ip geolocation facts for a host's IP address"
returned: changed
type: dictionary
contains:
ip:
description: "Public IP address of a host"
type: string
sample: "8.8.8.8"
hostname:
description: Domain name
type: string
sample: "google-public-dns-a.google.com"
country:
description: ISO 3166-1 alpha-2 country code
type: string
sample: "US"
region:
description: State or province name
type: string
sample: "California"
city:
description: City name
type: string
sample: "Mountain View"
loc:
description: Latitude and Longitude of the location
type: string
sample: "37.3860,-122.0838"
org:
description: "organization's name"
type: string
sample: "AS3356 Level 3 Communications, Inc."
postal:
description: Postal code
type: string
sample: "94035"
'''
USER_AGENT = 'ansible-ipinfoio-module/0.0.1'
class IpinfoioFacts(object):
def __init__(self, module):
self.url = 'https://ipinfo.io/json'
self.timeout = module.params.get('timeout')
self.module = module
def get_geo_data(self):
response, info = fetch_url(self.module, self.url, force=True, # NOQA
timeout=self.timeout)
try:
info['status'] == 200
except AssertionError:
self.module.fail_json(msg='Could not get {} page, '
'check for connectivity!'.format(self.url))
else:
try:
content = response.read()
result = self.module.from_json(content.decode('utf8'))
except ValueError:
self.module.fail_json(
msg='Failed to parse the ipinfo.io response: '
'{0} {1}'.format(self.url, content))
else:
return result
def main():
module = AnsibleModule( # NOQA
argument_spec=dict(
http_agent=dict(default=USER_AGENT),
timeout=dict(type='int', default=10),
),
supports_check_mode=True,
)
ipinfoio = IpinfoioFacts(module)
ipinfoio_result = dict(
changed=False, ansible_facts=ipinfoio.get_geo_data())
module.exit_json(**ipinfoio_result)
from ansible.module_utils.basic import * # NOQA
from ansible.module_utils.urls import * # NOQA
if __name__ == '__main__':
main()
| gpl-3.0 |
hiepthai/django-activity-stream | actstream/migrations/0001_initial.py | 8 | 7969 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from actstream.compat import user_model_label
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Follow'
db.create_table('actstream_follow', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm[user_model_label])),
('content_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'])),
('object_id', self.gf('django.db.models.fields.PositiveIntegerField')()),
))
db.send_create_signal('actstream', ['Follow'])
# Adding unique constraint on 'Follow', fields ['user', 'content_type', 'object_id']
db.create_unique('actstream_follow', ['user_id', 'content_type_id', 'object_id'])
# Adding model 'Action'
db.create_table('actstream_action', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('actor_content_type', self.gf('django.db.models.fields.related.ForeignKey')(related_name='actor', to=orm['contenttypes.ContentType'])),
('actor_object_id', self.gf('django.db.models.fields.PositiveIntegerField')()),
('verb', self.gf('django.db.models.fields.CharField')(max_length=255)),
('description', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('target_content_type', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='target', null=True, to=orm['contenttypes.ContentType'])),
('target_object_id', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True)),
('action_object_content_type', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='action_object', null=True, to=orm['contenttypes.ContentType'])),
('action_object_object_id', self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True)),
('timestamp', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('public', self.gf('django.db.models.fields.BooleanField')(default=True)),
))
db.send_create_signal('actstream', ['Action'])
def backwards(self, orm):
# Removing unique constraint on 'Follow', fields ['user', 'content_type', 'object_id']
db.delete_unique('actstream_follow', ['user_id', 'content_type_id', 'object_id'])
# Deleting model 'Follow'
db.delete_table('actstream_follow')
# Deleting model 'Action'
db.delete_table('actstream_action')
models = {
'actstream.action': {
'Meta': {'object_name': 'Action'},
'action_object_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'action_object'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'action_object_object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'actor_content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'actor'", 'to': "orm['contenttypes.ContentType']"}),
'actor_object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'target_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'target'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'target_object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'verb': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'actstream.follow': {
'Meta': {'unique_together': "(('user', 'content_type', 'object_id'),)", 'object_name': 'Follow'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % user_model_label})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
user_model_label: {
'Meta': {'object_name': user_model_label.split('.')[-1]},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['actstream']
| bsd-3-clause |
jbaiter/spreads | spreadsplug/intervaltrigger.py | 2 | 2100 | # -*- coding: utf-8 -*-
# Copyright (C) 2014 Johannes Baiter <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import threading
import time
from spreads.config import OptionTemplate
from spreads.plugin import HookPlugin, TriggerHooksMixin
logger = logging.getLogger('spreadsplug.intervaltrigger')
class IntervalTrigger(HookPlugin, TriggerHooksMixin):
__name__ = 'intervaltrigger'
_loop_thread = None
_exit_event = None
@classmethod
def configuration_template(cls):
return {'interval': OptionTemplate(5.0, "Interval between captures"
" (in seconds)")}
def start_trigger_loop(self, capture_callback):
logger.debug("Starting event loop")
self._exit_event = threading.Event()
self._loop_thread = threading.Thread(target=self._trigger_loop,
args=(capture_callback, ))
self._loop_thread.start()
def stop_trigger_loop(self):
logger.debug("Stopping event loop")
self._exit_event.set()
self._loop_thread.join()
def _trigger_loop(self, capture_func):
interval = self.config['interval'].get(float)
while True:
sleep_time = 0
while sleep_time < interval:
if self._exit_event.is_set():
return
time.sleep(0.01)
sleep_time += 0.01
capture_func()
| agpl-3.0 |
kumar303/rockit | vendor-local/boto/ec2/instance.py | 2 | 17445 | # Copyright (c) 2006-2010 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2010, Eucalyptus Systems, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Represents an EC2 Instance
"""
import boto
from boto.ec2.ec2object import EC2Object, TaggedEC2Object
from boto.resultset import ResultSet
from boto.ec2.address import Address
from boto.ec2.blockdevicemapping import BlockDeviceMapping
from boto.ec2.image import ProductCodes
from boto.ec2.networkinterface import NetworkInterface
from boto.ec2.group import Group
import base64
class Reservation(EC2Object):
"""
Represents a Reservation response object.
:ivar id: The unique ID of the Reservation.
:ivar owner_id: The unique ID of the owner of the Reservation.
:ivar groups: A list of Group objects representing the security
groups associated with launched instances.
:ivar instances: A list of Instance objects launched in this
Reservation.
"""
def __init__(self, connection=None):
EC2Object.__init__(self, connection)
self.id = None
self.owner_id = None
self.groups = []
self.instances = []
def __repr__(self):
return 'Reservation:%s' % self.id
def startElement(self, name, attrs, connection):
if name == 'instancesSet':
self.instances = ResultSet([('item', Instance)])
return self.instances
elif name == 'groupSet':
self.groups = ResultSet([('item', Group)])
return self.groups
else:
return None
def endElement(self, name, value, connection):
if name == 'reservationId':
self.id = value
elif name == 'ownerId':
self.owner_id = value
else:
setattr(self, name, value)
def stop_all(self):
for instance in self.instances:
instance.stop()
class Instance(TaggedEC2Object):
"""
Represents an instance.
:ivar id: The unique ID of the Instance.
:ivar groups: A list of Group objects representing the security
groups associated with the instance.
:ivar public_dns_name: The public dns name of the instance.
:ivar private_dns_name: The private dns name of the instance.
:ivar state: The string representation of the instances current state.
:ivar state_code: An integer representation of the instances current state.
:ivar key_name: The name of the SSH key associated with the instance.
:ivar instance_type: The type of instance (e.g. m1.small).
:ivar launch_time: The time the instance was launched.
:ivar image_id: The ID of the AMI used to launch this instance.
:ivar placement: The availability zone in which the instance is running.
:ivar kernel: The kernel associated with the instance.
:ivar ramdisk: The ramdisk associated with the instance.
:ivar architecture: The architecture of the image (i386|x86_64).
:ivar hypervisor: The hypervisor used.
:ivar virtualization_type: The type of virtualization used.
:ivar product_codes: A list of product codes associated with this instance.
:ivar ami_launch_index: This instances position within it's launch group.
:ivar monitored: A boolean indicating whether monitoring is enabled or not.
:ivar spot_instance_request_id: The ID of the spot instance request
if this is a spot instance.
:ivar subnet_id: The VPC Subnet ID, if running in VPC.
:ivar vpc_id: The VPC ID, if running in VPC.
:ivar private_ip_address: The private IP address of the instance.
:ivar ip_address: The public IP address of the instance.
:ivar platform: Platform of the instance (e.g. Windows)
:ivar root_device_name: The name of the root device.
:ivar root_device_type: The root device type (ebs|instance-store).
:ivar block_device_mapping: The Block Device Mapping for the instance.
:ivar state_reason: The reason for the most recent state transition.
:ivar groups: List of security Groups associated with the instance.
:ivar interfaces: List of Elastic Network Interfaces associated with
this instance.
"""
def __init__(self, connection=None):
TaggedEC2Object.__init__(self, connection)
self.id = None
self.dns_name = None
self.public_dns_name = None
self.private_dns_name = None
self.state = None
self.state_code = None
self.key_name = None
self.shutdown_state = None
self.previous_state = None
self.instance_type = None
self.launch_time = None
self.image_id = None
self.placement = None
self.kernel = None
self.ramdisk = None
self.product_codes = ProductCodes()
self.ami_launch_index = None
self.monitored = False
self.spot_instance_request_id = None
self.subnet_id = None
self.vpc_id = None
self.private_ip_address = None
self.ip_address = None
self.requester_id = None
self._in_monitoring_element = False
self.persistent = False
self.root_device_name = None
self.root_device_type = None
self.block_device_mapping = None
self.state_reason = None
self.group_name = None
self.client_token = None
self.eventsSet = None
self.groups = []
self.platform = None
self.interfaces = []
self.hypervisor = None
self.virtualization_type = None
self.architecture = None
def __repr__(self):
return 'Instance:%s' % self.id
def startElement(self, name, attrs, connection):
retval = TaggedEC2Object.startElement(self, name, attrs, connection)
if retval is not None:
return retval
if name == 'monitoring':
self._in_monitoring_element = True
elif name == 'blockDeviceMapping':
self.block_device_mapping = BlockDeviceMapping()
return self.block_device_mapping
elif name == 'productCodes':
return self.product_codes
elif name == 'stateReason':
self.state_reason = SubParse('stateReason')
return self.state_reason
elif name == 'groupSet':
self.groups = ResultSet([('item', Group)])
return self.groups
elif name == "eventsSet":
self.eventsSet = SubParse('eventsSet')
return self.eventsSet
elif name == 'networkInterfaceSet':
self.interfaces = ResultSet([('item', NetworkInterface)])
return None
def endElement(self, name, value, connection):
if name == 'instanceId':
self.id = value
elif name == 'imageId':
self.image_id = value
elif name == 'dnsName' or name == 'publicDnsName':
self.dns_name = value # backwards compatibility
self.public_dns_name = value
elif name == 'privateDnsName':
self.private_dns_name = value
elif name == 'keyName':
self.key_name = value
elif name == 'amiLaunchIndex':
self.ami_launch_index = value
elif name == 'shutdownState':
self.shutdown_state = value
elif name == 'previousState':
self.previous_state = value
elif name == 'name':
self.state = value
elif name == 'code':
try:
self.state_code = int(value)
except ValueError:
boto.log.warning('Error converting code (%s) to int' % value)
self.state_code = value
elif name == 'instanceType':
self.instance_type = value
elif name == 'rootDeviceName':
self.root_device_name = value
elif name == 'rootDeviceType':
self.root_device_type = value
elif name == 'launchTime':
self.launch_time = value
elif name == 'availabilityZone':
self.placement = value
elif name == 'platform':
self.platform = value
elif name == 'placement':
pass
elif name == 'kernelId':
self.kernel = value
elif name == 'ramdiskId':
self.ramdisk = value
elif name == 'state':
if self._in_monitoring_element:
if value == 'enabled':
self.monitored = True
self._in_monitoring_element = False
elif name == 'spotInstanceRequestId':
self.spot_instance_request_id = value
elif name == 'subnetId':
self.subnet_id = value
elif name == 'vpcId':
self.vpc_id = value
elif name == 'privateIpAddress':
self.private_ip_address = value
elif name == 'ipAddress':
self.ip_address = value
elif name == 'requesterId':
self.requester_id = value
elif name == 'persistent':
if value == 'true':
self.persistent = True
else:
self.persistent = False
elif name == 'groupName':
if self._in_monitoring_element:
self.group_name = value
elif name == 'clientToken':
self.client_token = value
elif name == "eventsSet":
self.events = value
elif name == 'hypervisor':
self.hypervisor = value
elif name == 'virtualizationType':
self.virtualization_type = value
elif name == 'architecture':
self.architecture = value
else:
setattr(self, name, value)
def _update(self, updated):
self.__dict__.update(updated.__dict__)
def update(self, validate=False):
"""
Update the instance's state information by making a call to fetch
the current instance attributes from the service.
:type validate: bool
:param validate: By default, if EC2 returns no data about the
instance the update method returns quietly. If
the validate param is True, however, it will
raise a ValueError exception if no data is
returned from EC2.
"""
rs = self.connection.get_all_instances([self.id])
if len(rs) > 0:
r = rs[0]
for i in r.instances:
if i.id == self.id:
self._update(i)
elif validate:
raise ValueError('%s is not a valid Instance ID' % self.id)
return self.state
def terminate(self):
"""
Terminate the instance
"""
rs = self.connection.terminate_instances([self.id])
if len(rs) > 0:
self._update(rs[0])
def stop(self, force=False):
"""
Stop the instance
:type force: bool
:param force: Forces the instance to stop
:rtype: list
:return: A list of the instances stopped
"""
rs = self.connection.stop_instances([self.id], force)
if len(rs) > 0:
self._update(rs[0])
def start(self):
"""
Start the instance.
"""
rs = self.connection.start_instances([self.id])
if len(rs) > 0:
self._update(rs[0])
def reboot(self):
return self.connection.reboot_instances([self.id])
def get_console_output(self):
"""
Retrieves the console output for the instance.
:rtype: :class:`boto.ec2.instance.ConsoleOutput`
:return: The console output as a ConsoleOutput object
"""
return self.connection.get_console_output(self.id)
def confirm_product(self, product_code):
return self.connection.confirm_product_instance(self.id, product_code)
def use_ip(self, ip_address):
if isinstance(ip_address, Address):
ip_address = ip_address.public_ip
return self.connection.associate_address(self.id, ip_address)
def monitor(self):
return self.connection.monitor_instance(self.id)
def unmonitor(self):
return self.connection.unmonitor_instance(self.id)
def get_attribute(self, attribute):
"""
Gets an attribute from this instance.
:type attribute: string
:param attribute: The attribute you need information about
Valid choices are:
instanceType|kernel|ramdisk|userData|
disableApiTermination|
instanceInitiatedShutdownBehavior|
rootDeviceName|blockDeviceMapping
:rtype: :class:`boto.ec2.image.InstanceAttribute`
:return: An InstanceAttribute object representing the value of the
attribute requested
"""
return self.connection.get_instance_attribute(self.id, attribute)
def modify_attribute(self, attribute, value):
"""
Changes an attribute of this instance
:type attribute: string
:param attribute: The attribute you wish to change.
AttributeName - Expected value (default)
instanceType - A valid instance type (m1.small)
kernel - Kernel ID (None)
ramdisk - Ramdisk ID (None)
userData - Base64 encoded String (None)
disableApiTermination - Boolean (true)
instanceInitiatedShutdownBehavior - stop|terminate
rootDeviceName - device name (None)
:type value: string
:param value: The new value for the attribute
:rtype: bool
:return: Whether the operation succeeded or not
"""
return self.connection.modify_instance_attribute(self.id, attribute,
value)
def reset_attribute(self, attribute):
"""
Resets an attribute of this instance to its default value.
:type attribute: string
:param attribute: The attribute to reset. Valid values are:
kernel|ramdisk
:rtype: bool
:return: Whether the operation succeeded or not
"""
return self.connection.reset_instance_attribute(self.id, attribute)
class ConsoleOutput:
def __init__(self, parent=None):
self.parent = parent
self.instance_id = None
self.timestamp = None
self.output = None
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'instanceId':
self.instance_id = value
elif name == 'timestamp':
self.timestamp = value
elif name == 'output':
self.output = base64.b64decode(value)
else:
setattr(self, name, value)
class InstanceAttribute(dict):
ValidValues = ['instanceType', 'kernel', 'ramdisk', 'userData',
'disableApiTermination', 'instanceInitiatedShutdownBehavior',
'rootDeviceName', 'blockDeviceMapping', 'sourceDestCheck',
'groupSet']
def __init__(self, parent=None):
dict.__init__(self)
self.instance_id = None
self.request_id = None
self._current_value = None
def startElement(self, name, attrs, connection):
if name == 'blockDeviceMapping':
self[name] = BlockDeviceMapping()
return self[name]
elif name == 'groupSet':
self[name] = ResultSet([('item', Group)])
return self[name]
else:
return None
def endElement(self, name, value, connection):
if name == 'instanceId':
self.instance_id = value
elif name == 'requestId':
self.request_id = value
elif name == 'value':
self._current_value = value
elif name in self.ValidValues:
self[name] = self._current_value
class SubParse(dict):
def __init__(self, section, parent=None):
dict.__init__(self)
self.section = section
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name != self.section:
self[name] = value
| bsd-3-clause |
jsquare/hikeplanner | hikes/views.py | 1 | 4162 | from django.http import HttpResponse, HttpResponseRedirect, HttpResponseNotFound
from hikes.models import Hike
from django.shortcuts import render_to_response, render
from django.contrib.gis import forms
from django.contrib import auth
from django.contrib.auth.forms import UserCreationForm
from django.core.urlresolvers import reverse
from urllib import urlencode
from django.contrib.gis.measure import D
from django.contrib.gis.geos import fromstr, Point
from django.template import RequestContext
from django.core.context_processors import csrf
# Create your views here.
class SearchForm(forms.Form):
start_location = forms.CharField()
start_latitude = forms.FloatField(widget=forms.HiddenInput())
start_longitude = forms.FloatField(widget=forms.HiddenInput())
min_radius = forms.IntegerField(widget=forms.HiddenInput(),initial=0)
max_radius = forms.IntegerField(widget=forms.HiddenInput(),initial=1)
min_length = forms.IntegerField(widget=forms.HiddenInput(),initial=1)
max_length = forms.IntegerField(widget=forms.HiddenInput(),initial=2)
min_terrain = forms.IntegerField(widget=forms.HiddenInput(),initial=0)
max_terrain = forms.IntegerField(widget=forms.HiddenInput(), initial=1)
def home(request):
if request.GET:
form = SearchForm(request.GET)
else:
form = SearchForm()
context = {
'form' : form
}
return render_to_response('search.html', context_instance = RequestContext(request))
def results(request):
form = SearchForm(request.GET)
if not form.is_valid():
url = reverse('home')
params = urlencode(request.GET)
return HttpResponseRedirect('%s?%s' % (url,params))
# Request hikes from db within min and max day limits
min_days = form.cleaned_data['min_length'] # TODO: change db fields to be min/max length instead of days. TODO: convert 'lengths' ==> days
max_days = form.cleaned_data['max_length']
radius = form.cleaned_data['max_radius'] # TODO: support min radius
start_latitude = form.cleaned_data['start_latitude']
start_longitude = form.cleaned_data['start_longitude']
start_location = Point(start_longitude,start_latitude)
hike_list = Hike.objects.filter(days__gte=min_days, days__lte=max_days,location__distance_lt=(start_location, D(km=radius)))
context = {
'hike_list' : hike_list,
'page_title' : 'Hike Results'
}
hike_str = "Here are all the hikes within your limits: {}".format([hike.__unicode__() for hike in hike_list])
return render_to_response('results.html', context)
def hike_detail(request, hike_id, slug=''):
'''
The general information page about a hike.
@param slug: optional, ignored (allows StackOverflow-style URL)
'''
try:
hike = Hike.objects.get(id=hike_id)
except Hike.DoesNotExist:
return HttpResponseNotFound() # TODO
context = {
'hike': hike,
'page_title': hike.name,
}
return render_to_response('hike_detail.html', context)
#Gets called when a user submits login info. Authenticates and redirects user.
def login(request):
username = request.POST.get('username', '')
password = request.POST.get('password', '')
user = auth.authenticate(username=username, password=password)
if user is not None and user.is_active:
#Verified corect password, user is marked as active, so log them in
auth.login(request, user)
#Redirect to success page
return HttpResponseRedirect("/account/loggedin")
else:
#Show error page
return HttpResponseRedirect("/account/invalid")
#Gets called when user clicks on logout
def logout(request):
auth.logout(request)
#REdirect to success page
return HttpResponseRedirect("/")
def register(request):
if request.method == 'POST':
form = UserCreationForm(request.POST)
if form.is_valid():
form.save()
return HttpResponseRedirect("/")
else:
args = {}
args.update(csrf(request))
args['form'] = UserCreationForm()
return render_to_response('register.html', args)
| gpl-2.0 |
da-nrw/DNSCore | 3rdParty/fido/fido/argparselocal.py | 6 | 87524 | # -*- coding: utf-8 -*-
# Copyright © 2006-2009 Steven J. Bethard <[email protected]>.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Command-line parsing library
This module is an optparse-inspired command-line parsing library that:
- handles both optional and positional arguments
- produces highly informative usage messages
- supports parsers that dispatch to sub-parsers
The following is a simple usage example that sums integers from the
command-line and writes the result to a file::
parser = argparse.ArgumentParser(
description='sum the integers at the command line')
parser.add_argument(
'integers', metavar='int', nargs='+', type=int,
help='an integer to be summed')
parser.add_argument(
'--log', default=sys.stdout, type=argparse.FileType('w'),
help='the file where the sum should be written')
args = parser.parse_args()
args.log.write('%s' % sum(args.integers))
args.log.close()
The module contains the following public classes:
- ArgumentParser -- The main entry point for command-line parsing. As the
example above shows, the add_argument() method is used to populate
the parser with actions for optional and positional arguments. Then
the parse_args() method is invoked to convert the args at the
command-line into an object with attributes.
- ArgumentError -- The exception raised by ArgumentParser objects when
there are errors with the parser's actions. Errors raised while
parsing the command-line are caught by ArgumentParser and emitted
as command-line messages.
- FileType -- A factory for defining types of files to be created. As the
example above shows, instances of FileType are typically passed as
the type= argument of add_argument() calls.
- Action -- The base class for parser actions. Typically actions are
selected by passing strings like 'store_true' or 'append_const' to
the action= argument of add_argument(). However, for greater
customization of ArgumentParser actions, subclasses of Action may
be defined and passed as the action= argument.
- HelpFormatter, RawDescriptionHelpFormatter, RawTextHelpFormatter,
ArgumentDefaultsHelpFormatter -- Formatter classes which
may be passed as the formatter_class= argument to the
ArgumentParser constructor. HelpFormatter is the default,
RawDescriptionHelpFormatter and RawTextHelpFormatter tell the parser
not to change the formatting for help text, and
ArgumentDefaultsHelpFormatter adds information about argument defaults
to the help.
All other classes in this module are considered implementation details.
(Also note that HelpFormatter and RawDescriptionHelpFormatter are only
considered public as object names -- the API of the formatter objects is
still considered an implementation detail.)
"""
__version__ = '1.1'
__all__ = [
'ArgumentParser',
'ArgumentError',
'Namespace',
'Action',
'FileType',
'HelpFormatter',
'RawDescriptionHelpFormatter',
'RawTextHelpFormatter',
'ArgumentDefaultsHelpFormatter',
]
import copy as _copy
import os as _os
import re as _re
import sys as _sys
import textwrap as _textwrap
from gettext import gettext as _
try:
_set = set
except NameError:
from sets import Set as _set
try:
_basestring = basestring
except NameError:
_basestring = str
try:
_sorted = sorted
except NameError:
def _sorted(iterable, reverse=False):
result = list(iterable)
result.sort()
if reverse:
result.reverse()
return result
def _callable(obj):
return hasattr(obj, '__call__') or hasattr(obj, '__bases__')
# silence Python 2.6 buggy warnings about Exception.message
if _sys.version_info[:2] == (2, 6):
import warnings
warnings.filterwarnings(
action='ignore',
message='BaseException.message has been deprecated as of Python 2.6',
category=DeprecationWarning,
module='argparse')
SUPPRESS = '==SUPPRESS=='
OPTIONAL = '?'
ZERO_OR_MORE = '*'
ONE_OR_MORE = '+'
PARSER = 'A...'
REMAINDER = '...'
# =============================
# Utility functions and classes
# =============================
class _AttributeHolder(object):
"""Abstract base class that provides __repr__.
The __repr__ method returns a string in the format::
ClassName(attr=name, attr=name, ...)
The attributes are determined either by a class-level attribute,
'_kwarg_names', or by inspecting the instance __dict__.
"""
def __repr__(self):
type_name = type(self).__name__
arg_strings = []
for arg in self._get_args():
arg_strings.append(repr(arg))
for name, value in self._get_kwargs():
arg_strings.append('%s=%r' % (name, value))
return '%s(%s)' % (type_name, ', '.join(arg_strings))
def _get_kwargs(self):
return _sorted(self.__dict__.items())
def _get_args(self):
return []
def _ensure_value(namespace, name, value):
if getattr(namespace, name, None) is None:
setattr(namespace, name, value)
return getattr(namespace, name)
# ===============
# Formatting Help
# ===============
class HelpFormatter(object):
"""Formatter for generating usage messages and argument help strings.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def __init__(self,
prog,
indent_increment=2,
max_help_position=24,
width=None):
# default setting for width
if width is None:
try:
width = int(_os.environ['COLUMNS'])
except (KeyError, ValueError):
width = 80
width -= 2
self._prog = prog
self._indent_increment = indent_increment
self._max_help_position = max_help_position
self._width = width
self._current_indent = 0
self._level = 0
self._action_max_length = 0
self._root_section = self._Section(self, None)
self._current_section = self._root_section
self._whitespace_matcher = _re.compile(r'\s+')
self._long_break_matcher = _re.compile(r'\n\n\n+')
# ===============================
# Section and indentation methods
# ===============================
def _indent(self):
self._current_indent += self._indent_increment
self._level += 1
def _dedent(self):
self._current_indent -= self._indent_increment
assert self._current_indent >= 0, 'Indent decreased below 0.'
self._level -= 1
class _Section(object):
def __init__(self, formatter, parent, heading=None):
self.formatter = formatter
self.parent = parent
self.heading = heading
self.items = []
def format_help(self):
# format the indented section
if self.parent is not None:
self.formatter._indent()
join = self.formatter._join_parts
for func, args in self.items:
func(*args)
item_help = join([func(*args) for func, args in self.items])
if self.parent is not None:
self.formatter._dedent()
# return nothing if the section was empty
if not item_help:
return ''
# add the heading if the section was non-empty
if self.heading is not SUPPRESS and self.heading is not None:
current_indent = self.formatter._current_indent
heading = '%*s%s:\n' % (current_indent, '', self.heading)
else:
heading = ''
# join the section-initial newline, the heading and the help
return join(['\n', heading, item_help, '\n'])
def _add_item(self, func, args):
self._current_section.items.append((func, args))
# ========================
# Message building methods
# ========================
def start_section(self, heading):
self._indent()
section = self._Section(self, self._current_section, heading)
self._add_item(section.format_help, [])
self._current_section = section
def end_section(self):
self._current_section = self._current_section.parent
self._dedent()
def add_text(self, text):
if text is not SUPPRESS and text is not None:
self._add_item(self._format_text, [text])
def add_usage(self, usage, actions, groups, prefix=None):
if usage is not SUPPRESS:
args = usage, actions, groups, prefix
self._add_item(self._format_usage, args)
def add_argument(self, action):
if action.help is not SUPPRESS:
# find all invocations
get_invocation = self._format_action_invocation
invocations = [get_invocation(action)]
for subaction in self._iter_indented_subactions(action):
invocations.append(get_invocation(subaction))
# update the maximum item length
invocation_length = max([len(s) for s in invocations])
action_length = invocation_length + self._current_indent
self._action_max_length = max(self._action_max_length,
action_length)
# add the item to the list
self._add_item(self._format_action, [action])
def add_arguments(self, actions):
for action in actions:
self.add_argument(action)
# =======================
# Help-formatting methods
# =======================
def format_help(self):
help = self._root_section.format_help()
if help:
help = self._long_break_matcher.sub('\n\n', help)
help = help.strip('\n') + '\n'
return help
def _join_parts(self, part_strings):
return ''.join([part
for part in part_strings
if part and part is not SUPPRESS])
def _format_usage(self, usage, actions, groups, prefix):
if prefix is None:
prefix = _('usage: ')
# if usage is specified, use that
if usage is not None:
usage = usage % dict(prog=self._prog)
# if no optionals or positionals are available, usage is just prog
elif usage is None and not actions:
usage = '%(prog)s' % dict(prog=self._prog)
# if optionals and positionals are available, calculate usage
elif usage is None:
prog = '%(prog)s' % dict(prog=self._prog)
# split optionals from positionals
optionals = []
positionals = []
for action in actions:
if action.option_strings:
optionals.append(action)
else:
positionals.append(action)
# build full usage string
format = self._format_actions_usage
action_usage = format(optionals + positionals, groups)
usage = ' '.join([s for s in [prog, action_usage] if s])
# wrap the usage parts if it's too long
text_width = self._width - self._current_indent
if len(prefix) + len(usage) > text_width:
# break usage into wrappable parts
part_regexp = r'\(.*?\)+|\[.*?\]+|\S+'
opt_usage = format(optionals, groups)
pos_usage = format(positionals, groups)
opt_parts = _re.findall(part_regexp, opt_usage)
pos_parts = _re.findall(part_regexp, pos_usage)
assert ' '.join(opt_parts) == opt_usage
assert ' '.join(pos_parts) == pos_usage
# helper for wrapping lines
def get_lines(parts, indent, prefix=None):
lines = []
line = []
if prefix is not None:
line_len = len(prefix) - 1
else:
line_len = len(indent) - 1
for part in parts:
if line_len + 1 + len(part) > text_width:
lines.append(indent + ' '.join(line))
line = []
line_len = len(indent) - 1
line.append(part)
line_len += len(part) + 1
if line:
lines.append(indent + ' '.join(line))
if prefix is not None:
lines[0] = lines[0][len(indent):]
return lines
# if prog is short, follow it with optionals or positionals
if len(prefix) + len(prog) <= 0.75 * text_width:
indent = ' ' * (len(prefix) + len(prog) + 1)
if opt_parts:
lines = get_lines([prog] + opt_parts, indent, prefix)
lines.extend(get_lines(pos_parts, indent))
elif pos_parts:
lines = get_lines([prog] + pos_parts, indent, prefix)
else:
lines = [prog]
# if prog is long, put it on its own line
else:
indent = ' ' * len(prefix)
parts = opt_parts + pos_parts
lines = get_lines(parts, indent)
if len(lines) > 1:
lines = []
lines.extend(get_lines(opt_parts, indent))
lines.extend(get_lines(pos_parts, indent))
lines = [prog] + lines
# join lines into usage
usage = '\n'.join(lines)
# prefix with 'usage:'
return '%s%s\n\n' % (prefix, usage)
def _format_actions_usage(self, actions, groups):
# find group indices and identify actions in groups
group_actions = _set()
inserts = {}
for group in groups:
try:
start = actions.index(group._group_actions[0])
except ValueError:
continue
else:
end = start + len(group._group_actions)
if actions[start:end] == group._group_actions:
for action in group._group_actions:
group_actions.add(action)
if not group.required:
inserts[start] = '['
inserts[end] = ']'
else:
inserts[start] = '('
inserts[end] = ')'
for i in range(start + 1, end):
inserts[i] = '|'
# collect all actions format strings
parts = []
for i, action in enumerate(actions):
# suppressed arguments are marked with None
# remove | separators for suppressed arguments
if action.help is SUPPRESS:
parts.append(None)
if inserts.get(i) == '|':
inserts.pop(i)
elif inserts.get(i + 1) == '|':
inserts.pop(i + 1)
# produce all arg strings
elif not action.option_strings:
part = self._format_args(action, action.dest)
# if it's in a group, strip the outer []
if action in group_actions:
if part[0] == '[' and part[-1] == ']':
part = part[1:-1]
# add the action string to the list
parts.append(part)
# produce the first way to invoke the option in brackets
else:
option_string = action.option_strings[0]
# if the Optional doesn't take a value, format is:
# -s or --long
if action.nargs == 0:
part = '%s' % option_string
# if the Optional takes a value, format is:
# -s ARGS or --long ARGS
else:
default = action.dest.upper()
args_string = self._format_args(action, default)
part = '%s %s' % (option_string, args_string)
# make it look optional if it's not required or in a group
if not action.required and action not in group_actions:
part = '[%s]' % part
# add the action string to the list
parts.append(part)
# insert things at the necessary indices
for i in _sorted(inserts, reverse=True):
parts[i:i] = [inserts[i]]
# join all the action items with spaces
text = ' '.join([item for item in parts if item is not None])
# clean up separators for mutually exclusive groups
open = r'[\[(]'
close = r'[\])]'
text = _re.sub(r'(%s) ' % open, r'\1', text)
text = _re.sub(r' (%s)' % close, r'\1', text)
text = _re.sub(r'%s *%s' % (open, close), r'', text)
text = _re.sub(r'\(([^|]*)\)', r'\1', text)
text = text.strip()
# return the text
return text
def _format_text(self, text):
if '%(prog)' in text:
text = text % dict(prog=self._prog)
text_width = self._width - self._current_indent
indent = ' ' * self._current_indent
return self._fill_text(text, text_width, indent) + '\n\n'
def _format_action(self, action):
# determine the required width and the entry label
help_position = min(self._action_max_length + 2,
self._max_help_position)
help_width = self._width - help_position
action_width = help_position - self._current_indent - 2
action_header = self._format_action_invocation(action)
# ho nelp; start on same line and add a final newline
if not action.help:
tup = self._current_indent, '', action_header
action_header = '%*s%s\n' % tup
# short action name; start on the same line and pad two spaces
elif len(action_header) <= action_width:
tup = self._current_indent, '', action_width, action_header
action_header = '%*s%-*s ' % tup
indent_first = 0
# long action name; start on the next line
else:
tup = self._current_indent, '', action_header
action_header = '%*s%s\n' % tup
indent_first = help_position
# collect the pieces of the action help
parts = [action_header]
# if there was help for the action, add lines of help text
if action.help:
help_text = self._expand_help(action)
help_lines = self._split_lines(help_text, help_width)
parts.append('%*s%s\n' % (indent_first, '', help_lines[0]))
for line in help_lines[1:]:
parts.append('%*s%s\n' % (help_position, '', line))
# or add a newline if the description doesn't end with one
elif not action_header.endswith('\n'):
parts.append('\n')
# if there are any sub-actions, add their help as well
for subaction in self._iter_indented_subactions(action):
parts.append(self._format_action(subaction))
# return a single string
return self._join_parts(parts)
def _format_action_invocation(self, action):
if not action.option_strings:
metavar, = self._metavar_formatter(action, action.dest)(1)
return metavar
else:
parts = []
# if the Optional doesn't take a value, format is:
# -s, --long
if action.nargs == 0:
parts.extend(action.option_strings)
# if the Optional takes a value, format is:
# -s ARGS, --long ARGS
else:
default = action.dest.upper()
args_string = self._format_args(action, default)
for option_string in action.option_strings:
parts.append('%s %s' % (option_string, args_string))
return ', '.join(parts)
def _metavar_formatter(self, action, default_metavar):
if action.metavar is not None:
result = action.metavar
elif action.choices is not None:
choice_strs = [str(choice) for choice in action.choices]
result = '{%s}' % ','.join(choice_strs)
else:
result = default_metavar
def format(tuple_size):
if isinstance(result, tuple):
return result
else:
return (result, ) * tuple_size
return format
def _format_args(self, action, default_metavar):
get_metavar = self._metavar_formatter(action, default_metavar)
if action.nargs is None:
result = '%s' % get_metavar(1)
elif action.nargs == OPTIONAL:
result = '[%s]' % get_metavar(1)
elif action.nargs == ZERO_OR_MORE:
result = '[%s [%s ...]]' % get_metavar(2)
elif action.nargs == ONE_OR_MORE:
result = '%s [%s ...]' % get_metavar(2)
elif action.nargs == REMAINDER:
result = '...'
elif action.nargs == PARSER:
result = '%s ...' % get_metavar(1)
else:
formats = ['%s' for _ in range(action.nargs)]
result = ' '.join(formats) % get_metavar(action.nargs)
return result
def _expand_help(self, action):
params = dict(vars(action), prog=self._prog)
for name in list(params):
if params[name] is SUPPRESS:
del params[name]
for name in list(params):
if hasattr(params[name], '__name__'):
params[name] = params[name].__name__
if params.get('choices') is not None:
choices_str = ', '.join([str(c) for c in params['choices']])
params['choices'] = choices_str
return self._get_help_string(action) % params
def _iter_indented_subactions(self, action):
try:
get_subactions = action._get_subactions
except AttributeError:
pass
else:
self._indent()
for subaction in get_subactions():
yield subaction
self._dedent()
def _split_lines(self, text, width):
text = self._whitespace_matcher.sub(' ', text).strip()
return _textwrap.wrap(text, width)
def _fill_text(self, text, width, indent):
text = self._whitespace_matcher.sub(' ', text).strip()
return _textwrap.fill(text, width, initial_indent=indent,
subsequent_indent=indent)
def _get_help_string(self, action):
return action.help
class RawDescriptionHelpFormatter(HelpFormatter):
"""Help message formatter which retains any formatting in descriptions.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def _fill_text(self, text, width, indent):
return ''.join([indent + line for line in text.splitlines(True)])
class RawTextHelpFormatter(RawDescriptionHelpFormatter):
"""Help message formatter which retains formatting of all help text.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def _split_lines(self, text, width):
return text.splitlines()
class ArgumentDefaultsHelpFormatter(HelpFormatter):
"""Help message formatter which adds default values to argument help.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def _get_help_string(self, action):
help = action.help
if '%(default)' not in action.help:
if action.default is not SUPPRESS:
defaulting_nargs = [OPTIONAL, ZERO_OR_MORE]
if action.option_strings or action.nargs in defaulting_nargs:
help += ' (default: %(default)s)'
return help
# =====================
# Options and Arguments
# =====================
def _get_action_name(argument):
if argument is None:
return None
elif argument.option_strings:
return '/'.join(argument.option_strings)
elif argument.metavar not in (None, SUPPRESS):
return argument.metavar
elif argument.dest not in (None, SUPPRESS):
return argument.dest
else:
return None
class ArgumentError(Exception):
"""An error from creating or using an argument (optional or positional).
The string value of this exception is the message, augmented with
information about the argument that caused it.
"""
def __init__(self, argument, message):
self.argument_name = _get_action_name(argument)
self.message = message
def __str__(self):
if self.argument_name is None:
format = '%(message)s'
else:
format = 'argument %(argument_name)s: %(message)s'
return format % dict(message=self.message,
argument_name=self.argument_name)
class ArgumentTypeError(Exception):
"""An error from trying to convert a command line string to a type."""
pass
# ==============
# Action classes
# ==============
class Action(_AttributeHolder):
"""Information about how to convert command line strings to Python objects.
Action objects are used by an ArgumentParser to represent the information
needed to parse a single argument from one or more strings from the
command line. The keyword arguments to the Action constructor are also
all attributes of Action instances.
Keyword Arguments:
- option_strings -- A list of command-line option strings which
should be associated with this action.
- dest -- The name of the attribute to hold the created object(s)
- nargs -- The number of command-line arguments that should be
consumed. By default, one argument will be consumed and a single
value will be produced. Other values include:
- N (an integer) consumes N arguments (and produces a list)
- '?' consumes zero or one arguments
- '*' consumes zero or more arguments (and produces a list)
- '+' consumes one or more arguments (and produces a list)
Note that the difference between the default and nargs=1 is that
with the default, a single value will be produced, while with
nargs=1, a list containing a single value will be produced.
- const -- The value to be produced if the option is specified and the
option uses an action that takes no values.
- default -- The value to be produced if the option is not specified.
- type -- The type which the command-line arguments should be converted
to, should be one of 'string', 'int', 'float', 'complex' or a
callable object that accepts a single string argument. If None,
'string' is assumed.
- choices -- A container of values that should be allowed. If not None,
after a command-line argument has been converted to the appropriate
type, an exception will be raised if it is not a member of this
collection.
- required -- True if the action must always be specified at the
command line. This is only meaningful for optional command-line
arguments.
- help -- The help string describing the argument.
- metavar -- The name to be used for the option's argument with the
help string. If None, the 'dest' value will be used as the name.
"""
def __init__(self,
option_strings,
dest,
nargs=None,
const=None,
default=None,
type=None,
choices=None,
required=False,
help=None,
metavar=None):
self.option_strings = option_strings
self.dest = dest
self.nargs = nargs
self.const = const
self.default = default
self.type = type
self.choices = choices
self.required = required
self.help = help
self.metavar = metavar
def _get_kwargs(self):
names = [
'option_strings',
'dest',
'nargs',
'const',
'default',
'type',
'choices',
'help',
'metavar',
]
return [(name, getattr(self, name)) for name in names]
def __call__(self, parser, namespace, values, option_string=None):
raise NotImplementedError(_('.__call__() not defined'))
class _StoreAction(Action):
def __init__(self,
option_strings,
dest,
nargs=None,
const=None,
default=None,
type=None,
choices=None,
required=False,
help=None,
metavar=None):
if nargs == 0:
raise ValueError('nargs for store actions must be > 0; if you '
'have nothing to store, actions such as store '
'true or store const may be more appropriate')
if const is not None and nargs != OPTIONAL:
raise ValueError('nargs must be %r to supply const' % OPTIONAL)
super(_StoreAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=nargs,
const=const,
default=default,
type=type,
choices=choices,
required=required,
help=help,
metavar=metavar)
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, values)
class _StoreConstAction(Action):
def __init__(self,
option_strings,
dest,
const,
default=None,
required=False,
help=None,
metavar=None):
super(_StoreConstAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=0,
const=const,
default=default,
required=required,
help=help)
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, self.const)
class _StoreTrueAction(_StoreConstAction):
def __init__(self,
option_strings,
dest,
default=False,
required=False,
help=None):
super(_StoreTrueAction, self).__init__(
option_strings=option_strings,
dest=dest,
const=True,
default=default,
required=required,
help=help)
class _StoreFalseAction(_StoreConstAction):
def __init__(self,
option_strings,
dest,
default=True,
required=False,
help=None):
super(_StoreFalseAction, self).__init__(
option_strings=option_strings,
dest=dest,
const=False,
default=default,
required=required,
help=help)
class _AppendAction(Action):
def __init__(self,
option_strings,
dest,
nargs=None,
const=None,
default=None,
type=None,
choices=None,
required=False,
help=None,
metavar=None):
if nargs == 0:
raise ValueError('nargs for append actions must be > 0; if arg '
'strings are not supplying the value to append, '
'the append const action may be more appropriate')
if const is not None and nargs != OPTIONAL:
raise ValueError('nargs must be %r to supply const' % OPTIONAL)
super(_AppendAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=nargs,
const=const,
default=default,
type=type,
choices=choices,
required=required,
help=help,
metavar=metavar)
def __call__(self, parser, namespace, values, option_string=None):
items = _copy.copy(_ensure_value(namespace, self.dest, []))
items.append(values)
setattr(namespace, self.dest, items)
class _AppendConstAction(Action):
def __init__(self,
option_strings,
dest,
const,
default=None,
required=False,
help=None,
metavar=None):
super(_AppendConstAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=0,
const=const,
default=default,
required=required,
help=help,
metavar=metavar)
def __call__(self, parser, namespace, values, option_string=None):
items = _copy.copy(_ensure_value(namespace, self.dest, []))
items.append(self.const)
setattr(namespace, self.dest, items)
class _CountAction(Action):
def __init__(self,
option_strings,
dest,
default=None,
required=False,
help=None):
super(_CountAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=0,
default=default,
required=required,
help=help)
def __call__(self, parser, namespace, values, option_string=None):
new_count = _ensure_value(namespace, self.dest, 0) + 1
setattr(namespace, self.dest, new_count)
class _HelpAction(Action):
def __init__(self,
option_strings,
dest=SUPPRESS,
default=SUPPRESS,
help=None):
super(_HelpAction, self).__init__(
option_strings=option_strings,
dest=dest,
default=default,
nargs=0,
help=help)
def __call__(self, parser, namespace, values, option_string=None):
parser.print_help()
parser.exit()
class _VersionAction(Action):
def __init__(self,
option_strings,
version=None,
dest=SUPPRESS,
default=SUPPRESS,
help=None):
super(_VersionAction, self).__init__(
option_strings=option_strings,
dest=dest,
default=default,
nargs=0,
help=help)
self.version = version
def __call__(self, parser, namespace, values, option_string=None):
version = self.version
if version is None:
version = parser.version
formatter = parser._get_formatter()
formatter.add_text(version)
parser.exit(message=formatter.format_help())
class _SubParsersAction(Action):
class _ChoicesPseudoAction(Action):
def __init__(self, name, help):
sup = super(_SubParsersAction._ChoicesPseudoAction, self)
sup.__init__(option_strings=[], dest=name, help=help)
def __init__(self,
option_strings,
prog,
parser_class,
dest=SUPPRESS,
help=None,
metavar=None):
self._prog_prefix = prog
self._parser_class = parser_class
self._name_parser_map = {}
self._choices_actions = []
super(_SubParsersAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=PARSER,
choices=self._name_parser_map,
help=help,
metavar=metavar)
def add_parser(self, name, **kwargs):
# set prog from the existing prefix
if kwargs.get('prog') is None:
kwargs['prog'] = '%s %s' % (self._prog_prefix, name)
# create a pseudo-action to hold the choice help
if 'help' in kwargs:
help = kwargs.pop('help')
choice_action = self._ChoicesPseudoAction(name, help)
self._choices_actions.append(choice_action)
# create the parser and add it to the map
parser = self._parser_class(**kwargs)
self._name_parser_map[name] = parser
return parser
def _get_subactions(self):
return self._choices_actions
def __call__(self, parser, namespace, values, option_string=None):
parser_name = values[0]
arg_strings = values[1:]
# set the parser name if requested
if self.dest is not SUPPRESS:
setattr(namespace, self.dest, parser_name)
# select the parser
try:
parser = self._name_parser_map[parser_name]
except KeyError:
tup = parser_name, ', '.join(self._name_parser_map)
msg = _('unknown parser %r (choices: %s)' % tup)
raise ArgumentError(self, msg)
# parse all the remaining options into the namespace
parser.parse_args(arg_strings, namespace)
# ==============
# Type classes
# ==============
class FileType(object):
"""Factory for creating file object types
Instances of FileType are typically passed as type= arguments to the
ArgumentParser add_argument() method.
Keyword Arguments:
- mode -- A string indicating how the file is to be opened. Accepts the
same values as the builtin open() function.
- bufsize -- The file's desired buffer size. Accepts the same values as
the builtin open() function.
"""
def __init__(self, mode='r', bufsize=None):
self._mode = mode
self._bufsize = bufsize
def __call__(self, string):
# the special argument "-" means sys.std{in,out}
if string == '-':
if 'r' in self._mode:
return _sys.stdin
elif 'w' in self._mode:
return _sys.stdout
else:
msg = _('argument "-" with mode %r' % self._mode)
raise ValueError(msg)
# all other arguments are used as file names
if self._bufsize:
return open(string, self._mode, self._bufsize)
else:
return open(string, self._mode)
def __repr__(self):
args = [self._mode, self._bufsize]
args_str = ', '.join([repr(arg) for arg in args if arg is not None])
return '%s(%s)' % (type(self).__name__, args_str)
# ===========================
# Optional and Positional Parsing
# ===========================
class Namespace(_AttributeHolder):
"""Simple object for storing attributes.
Implements equality by attribute names and values, and provides a simple
string representation.
"""
def __init__(self, **kwargs):
for name in kwargs:
setattr(self, name, kwargs[name])
def __eq__(self, other):
return vars(self) == vars(other)
def __ne__(self, other):
return not (self == other)
def __contains__(self, key):
return key in self.__dict__
class _ActionsContainer(object):
def __init__(self,
description,
prefix_chars,
argument_default,
conflict_handler):
super(_ActionsContainer, self).__init__()
self.description = description
self.argument_default = argument_default
self.prefix_chars = prefix_chars
self.conflict_handler = conflict_handler
# set up registries
self._registries = {}
# register actions
self.register('action', None, _StoreAction)
self.register('action', 'store', _StoreAction)
self.register('action', 'store_const', _StoreConstAction)
self.register('action', 'store_true', _StoreTrueAction)
self.register('action', 'store_false', _StoreFalseAction)
self.register('action', 'append', _AppendAction)
self.register('action', 'append_const', _AppendConstAction)
self.register('action', 'count', _CountAction)
self.register('action', 'help', _HelpAction)
self.register('action', 'version', _VersionAction)
self.register('action', 'parsers', _SubParsersAction)
# raise an exception if the conflict handler is invalid
self._get_handler()
# action storage
self._actions = []
self._option_string_actions = {}
# groups
self._action_groups = []
self._mutually_exclusive_groups = []
# defaults storage
self._defaults = {}
# determines whether an "option" looks like a negative number
self._negative_number_matcher = _re.compile(r'^-\d+$|^-\d*\.\d+$')
# whether or not there are any optionals that look like negative
# numbers -- uses a list so it can be shared and edited
self._has_negative_number_optionals = []
# ====================
# Registration methods
# ====================
def register(self, registry_name, value, object):
registry = self._registries.setdefault(registry_name, {})
registry[value] = object
def _registry_get(self, registry_name, value, default=None):
return self._registries[registry_name].get(value, default)
# ==================================
# Namespace default accessor methods
# ==================================
def set_defaults(self, **kwargs):
self._defaults.update(kwargs)
# if these defaults match any existing arguments, replace
# the previous default on the object with the new one
for action in self._actions:
if action.dest in kwargs:
action.default = kwargs[action.dest]
def get_default(self, dest):
for action in self._actions:
if action.dest == dest and action.default is not None:
return action.default
return self._defaults.get(dest, None)
# =======================
# Adding argument actions
# =======================
def add_argument(self, *args, **kwargs):
"""
add_argument(dest, ..., name=value, ...)
add_argument(option_string, option_string, ..., name=value, ...)
"""
# if no positional args are supplied or only one is supplied and
# it doesn't look like an option string, parse a positional
# argument
chars = self.prefix_chars
if not args or len(args) == 1 and args[0][0] not in chars:
if args and 'dest' in kwargs:
raise ValueError('dest supplied twice for positional argument')
kwargs = self._get_positional_kwargs(*args, **kwargs)
# otherwise, we're adding an optional argument
else:
kwargs = self._get_optional_kwargs(*args, **kwargs)
# if no default was supplied, use the parser-level default
if 'default' not in kwargs:
dest = kwargs['dest']
if dest in self._defaults:
kwargs['default'] = self._defaults[dest]
elif self.argument_default is not None:
kwargs['default'] = self.argument_default
# create the action object, and add it to the parser
action_class = self._pop_action_class(kwargs)
if not _callable(action_class):
raise ValueError('unknown action "%s"' % action_class)
action = action_class(**kwargs)
# raise an error if the action type is not callable
type_func = self._registry_get('type', action.type, action.type)
if not _callable(type_func):
raise ValueError('%r is not callable' % type_func)
return self._add_action(action)
def add_argument_group(self, *args, **kwargs):
group = _ArgumentGroup(self, *args, **kwargs)
self._action_groups.append(group)
return group
def add_mutually_exclusive_group(self, **kwargs):
group = _MutuallyExclusiveGroup(self, **kwargs)
self._mutually_exclusive_groups.append(group)
return group
def _add_action(self, action):
# resolve any conflicts
self._check_conflict(action)
# add to actions list
self._actions.append(action)
action.container = self
# index the action by any option strings it has
for option_string in action.option_strings:
self._option_string_actions[option_string] = action
# set the flag if any option strings look like negative numbers
for option_string in action.option_strings:
if self._negative_number_matcher.match(option_string):
if not self._has_negative_number_optionals:
self._has_negative_number_optionals.append(True)
# return the created action
return action
def _remove_action(self, action):
self._actions.remove(action)
def _add_container_actions(self, container):
# collect groups by titles
title_group_map = {}
for group in self._action_groups:
if group.title in title_group_map:
msg = _('cannot merge actions - two groups are named %r')
raise ValueError(msg % (group.title))
title_group_map[group.title] = group
# map each action to its group
group_map = {}
for group in container._action_groups:
# if a group with the title exists, use that, otherwise
# create a new group matching the container's group
if group.title not in title_group_map:
title_group_map[group.title] = self.add_argument_group(
title=group.title,
description=group.description,
conflict_handler=group.conflict_handler)
# map the actions to their new group
for action in group._group_actions:
group_map[action] = title_group_map[group.title]
# add container's mutually exclusive groups
# NOTE: if add_mutually_exclusive_group ever gains title= and
# description= then this code will need to be expanded as above
for group in container._mutually_exclusive_groups:
mutex_group = self.add_mutually_exclusive_group(
required=group.required)
# map the actions to their new mutex group
for action in group._group_actions:
group_map[action] = mutex_group
# add all actions to this container or their group
for action in container._actions:
group_map.get(action, self)._add_action(action)
def _get_positional_kwargs(self, dest, **kwargs):
# make sure required is not specified
if 'required' in kwargs:
msg = _("'required' is an invalid argument for positionals")
raise TypeError(msg)
# mark positional arguments as required if at least one is
# always required
if kwargs.get('nargs') not in [OPTIONAL, ZERO_OR_MORE]:
kwargs['required'] = True
if kwargs.get('nargs') == ZERO_OR_MORE and 'default' not in kwargs:
kwargs['required'] = True
# return the keyword arguments with no option strings
return dict(kwargs, dest=dest, option_strings=[])
def _get_optional_kwargs(self, *args, **kwargs):
# determine short and long option strings
option_strings = []
long_option_strings = []
for option_string in args:
# error on strings that don't start with an appropriate prefix
if not option_string[0] in self.prefix_chars:
msg = _('invalid option string %r: '
'must start with a character %r')
tup = option_string, self.prefix_chars
raise ValueError(msg % tup)
# strings starting with two prefix characters are long options
option_strings.append(option_string)
if option_string[0] in self.prefix_chars:
if len(option_string) > 1:
if option_string[1] in self.prefix_chars:
long_option_strings.append(option_string)
# infer destination, '--foo-bar' -> 'foo_bar' and '-x' -> 'x'
dest = kwargs.pop('dest', None)
if dest is None:
if long_option_strings:
dest_option_string = long_option_strings[0]
else:
dest_option_string = option_strings[0]
dest = dest_option_string.lstrip(self.prefix_chars)
if not dest:
msg = _('dest= is required for options like %r')
raise ValueError(msg % option_string)
dest = dest.replace('-', '_')
# return the updated keyword arguments
return dict(kwargs, dest=dest, option_strings=option_strings)
def _pop_action_class(self, kwargs, default=None):
action = kwargs.pop('action', default)
return self._registry_get('action', action, action)
def _get_handler(self):
# determine function from conflict handler string
handler_func_name = '_handle_conflict_%s' % self.conflict_handler
try:
return getattr(self, handler_func_name)
except AttributeError:
msg = _('invalid conflict_resolution value: %r')
raise ValueError(msg % self.conflict_handler)
def _check_conflict(self, action):
# find all options that conflict with this option
confl_optionals = []
for option_string in action.option_strings:
if option_string in self._option_string_actions:
confl_optional = self._option_string_actions[option_string]
confl_optionals.append((option_string, confl_optional))
# resolve any conflicts
if confl_optionals:
conflict_handler = self._get_handler()
conflict_handler(action, confl_optionals)
def _handle_conflict_error(self, action, conflicting_actions):
message = _('conflicting option string(s): %s')
conflict_string = ', '.join([option_string
for option_string, action
in conflicting_actions])
raise ArgumentError(action, message % conflict_string)
def _handle_conflict_resolve(self, action, conflicting_actions):
# remove all conflicting options
for option_string, action in conflicting_actions:
# remove the conflicting option
action.option_strings.remove(option_string)
self._option_string_actions.pop(option_string, None)
# if the option now has no option string, remove it from the
# container holding it
if not action.option_strings:
action.container._remove_action(action)
class _ArgumentGroup(_ActionsContainer):
def __init__(self, container, title=None, description=None, **kwargs):
# add any missing keyword arguments by checking the container
update = kwargs.setdefault
update('conflict_handler', container.conflict_handler)
update('prefix_chars', container.prefix_chars)
update('argument_default', container.argument_default)
super_init = super(_ArgumentGroup, self).__init__
super_init(description=description, **kwargs)
# group attributes
self.title = title
self._group_actions = []
# share most attributes with the container
self._registries = container._registries
self._actions = container._actions
self._option_string_actions = container._option_string_actions
self._defaults = container._defaults
self._has_negative_number_optionals = \
container._has_negative_number_optionals
def _add_action(self, action):
action = super(_ArgumentGroup, self)._add_action(action)
self._group_actions.append(action)
return action
def _remove_action(self, action):
super(_ArgumentGroup, self)._remove_action(action)
self._group_actions.remove(action)
class _MutuallyExclusiveGroup(_ArgumentGroup):
def __init__(self, container, required=False):
super(_MutuallyExclusiveGroup, self).__init__(container)
self.required = required
self._container = container
def _add_action(self, action):
if action.required:
msg = _('mutually exclusive arguments must be optional')
raise ValueError(msg)
action = self._container._add_action(action)
self._group_actions.append(action)
return action
def _remove_action(self, action):
self._container._remove_action(action)
self._group_actions.remove(action)
class ArgumentParser(_AttributeHolder, _ActionsContainer):
"""Object for parsing command line strings into Python objects.
Keyword Arguments:
- prog -- The name of the program (default: sys.argv[0])
- usage -- A usage message (default: auto-generated from arguments)
- description -- A description of what the program does
- epilog -- Text following the argument descriptions
- parents -- Parsers whose arguments should be copied into this one
- formatter_class -- HelpFormatter class for printing help messages
- prefix_chars -- Characters that prefix optional arguments
- fromfile_prefix_chars -- Characters that prefix files containing
additional arguments
- argument_default -- The default value for all arguments
- conflict_handler -- String indicating how to handle conflicts
- add_help -- Add a -h/-help option
"""
def __init__(self,
prog=None,
usage=None,
description=None,
epilog=None,
version=None,
parents=[],
formatter_class=HelpFormatter,
prefix_chars='-',
fromfile_prefix_chars=None,
argument_default=None,
conflict_handler='error',
add_help=True):
if version is not None:
import warnings
warnings.warn(
"""The "version" argument to ArgumentParser is deprecated. """
"""Please use """
""""add_argument(..., action='version', version="N", ...)" """
"""instead""", DeprecationWarning)
superinit = super(ArgumentParser, self).__init__
superinit(description=description,
prefix_chars=prefix_chars,
argument_default=argument_default,
conflict_handler=conflict_handler)
# default setting for prog
if prog is None:
prog = _os.path.basename(_sys.argv[0])
self.prog = prog
self.usage = usage
self.epilog = epilog
self.version = version
self.formatter_class = formatter_class
self.fromfile_prefix_chars = fromfile_prefix_chars
self.add_help = add_help
add_group = self.add_argument_group
self._positionals = add_group(_('positional arguments'))
self._optionals = add_group(_('optional arguments'))
self._subparsers = None
# register types
def identity(string):
return string
self.register('type', None, identity)
# add help and version arguments if necessary
# (using explicit default to override global argument_default)
if self.add_help:
self.add_argument(
'-h', '--help', action='help', default=SUPPRESS,
help=_('show this help message and exit'))
if self.version:
self.add_argument(
'-v', '--version', action='version', default=SUPPRESS,
version=self.version,
help=_("show program's version number and exit"))
# add parent arguments and defaults
for parent in parents:
self._add_container_actions(parent)
try:
defaults = parent._defaults
except AttributeError:
pass
else:
self._defaults.update(defaults)
# =======================
# Pretty __repr__ methods
# =======================
def _get_kwargs(self):
names = [
'prog',
'usage',
'description',
'version',
'formatter_class',
'conflict_handler',
'add_help',
]
return [(name, getattr(self, name)) for name in names]
# ==================================
# Optional/Positional adding methods
# ==================================
def add_subparsers(self, **kwargs):
if self._subparsers is not None:
self.error(_('cannot have multiple subparser arguments'))
# add the parser class to the arguments if it's not present
kwargs.setdefault('parser_class', type(self))
if 'title' in kwargs or 'description' in kwargs:
title = _(kwargs.pop('title', 'subcommands'))
description = _(kwargs.pop('description', None))
self._subparsers = self.add_argument_group(title, description)
else:
self._subparsers = self._positionals
# prog defaults to the usage message of this parser, skipping
# optional arguments and with no "usage:" prefix
if kwargs.get('prog') is None:
formatter = self._get_formatter()
positionals = self._get_positional_actions()
groups = self._mutually_exclusive_groups
formatter.add_usage(self.usage, positionals, groups, '')
kwargs['prog'] = formatter.format_help().strip()
# create the parsers action and add it to the positionals list
parsers_class = self._pop_action_class(kwargs, 'parsers')
action = parsers_class(option_strings=[], **kwargs)
self._subparsers._add_action(action)
# return the created parsers action
return action
def _add_action(self, action):
if action.option_strings:
self._optionals._add_action(action)
else:
self._positionals._add_action(action)
return action
def _get_optional_actions(self):
return [action
for action in self._actions
if action.option_strings]
def _get_positional_actions(self):
return [action
for action in self._actions
if not action.option_strings]
# =====================================
# Command line argument parsing methods
# =====================================
def parse_args(self, args=None, namespace=None):
args, argv = self.parse_known_args(args, namespace)
if argv:
msg = _('unrecognized arguments: %s')
self.error(msg % ' '.join(argv))
return args
def parse_known_args(self, args=None, namespace=None):
# args default to the system args
if args is None:
args = _sys.argv[1:]
# default Namespace built from parser defaults
if namespace is None:
namespace = Namespace()
# add any action defaults that aren't present
for action in self._actions:
if action.dest is not SUPPRESS:
if not hasattr(namespace, action.dest):
if action.default is not SUPPRESS:
default = action.default
if isinstance(action.default, _basestring):
default = self._get_value(action, default)
setattr(namespace, action.dest, default)
# add any parser defaults that aren't present
for dest in self._defaults:
if not hasattr(namespace, dest):
setattr(namespace, dest, self._defaults[dest])
# parse the arguments and exit if there are any errors
try:
return self._parse_known_args(args, namespace)
except ArgumentError:
err = _sys.exc_info()[1]
self.error(str(err))
def _parse_known_args(self, arg_strings, namespace):
# replace arg strings that are file references
if self.fromfile_prefix_chars is not None:
arg_strings = self._read_args_from_files(arg_strings)
# map all mutually exclusive arguments to the other arguments
# they can't occur with
action_conflicts = {}
for mutex_group in self._mutually_exclusive_groups:
group_actions = mutex_group._group_actions
for i, mutex_action in enumerate(mutex_group._group_actions):
conflicts = action_conflicts.setdefault(mutex_action, [])
conflicts.extend(group_actions[:i])
conflicts.extend(group_actions[i + 1:])
# find all option indices, and determine the arg_string_pattern
# which has an 'O' if there is an option at an index,
# an 'A' if there is an argument, or a '-' if there is a '--'
option_string_indices = {}
arg_string_pattern_parts = []
arg_strings_iter = iter(arg_strings)
for i, arg_string in enumerate(arg_strings_iter):
# all args after -- are non-options
if arg_string == '--':
arg_string_pattern_parts.append('-')
for arg_string in arg_strings_iter:
arg_string_pattern_parts.append('A')
# otherwise, add the arg to the arg strings
# and note the index if it was an option
else:
option_tuple = self._parse_optional(arg_string)
if option_tuple is None:
pattern = 'A'
else:
option_string_indices[i] = option_tuple
pattern = 'O'
arg_string_pattern_parts.append(pattern)
# join the pieces together to form the pattern
arg_strings_pattern = ''.join(arg_string_pattern_parts)
# converts arg strings to the appropriate and then takes the action
seen_actions = _set()
seen_non_default_actions = _set()
def take_action(action, argument_strings, option_string=None):
seen_actions.add(action)
argument_values = self._get_values(action, argument_strings)
# error if this argument is not allowed with other previously
# seen arguments, assuming that actions that use the default
# value don't really count as "present"
if argument_values is not action.default:
seen_non_default_actions.add(action)
for conflict_action in action_conflicts.get(action, []):
if conflict_action in seen_non_default_actions:
msg = _('not allowed with argument %s')
action_name = _get_action_name(conflict_action)
raise ArgumentError(action, msg % action_name)
# take the action if we didn't receive a SUPPRESS value
# (e.g. from a default)
if argument_values is not SUPPRESS:
action(self, namespace, argument_values, option_string)
# function to convert arg_strings into an optional action
def consume_optional(start_index):
# get the optional identified at this index
option_tuple = option_string_indices[start_index]
action, option_string, explicit_arg = option_tuple
# identify additional optionals in the same arg string
# (e.g. -xyz is the same as -x -y -z if no args are required)
match_argument = self._match_argument
action_tuples = []
while True:
# if we found no optional action, skip it
if action is None:
extras.append(arg_strings[start_index])
return start_index + 1
# if there is an explicit argument, try to match the
# optional's string arguments to only this
if explicit_arg is not None:
arg_count = match_argument(action, 'A')
# if the action is a single-dash option and takes no
# arguments, try to parse more single-dash options out
# of the tail of the option string
chars = self.prefix_chars
if arg_count == 0 and option_string[1] not in chars:
action_tuples.append((action, [], option_string))
for char in self.prefix_chars:
option_string = char + explicit_arg[0]
explicit_arg = explicit_arg[1:] or None
optionals_map = self._option_string_actions
if option_string in optionals_map:
action = optionals_map[option_string]
break
else:
msg = _('ignored explicit argument %r')
raise ArgumentError(action, msg % explicit_arg)
# if the action expect exactly one argument, we've
# successfully matched the option; exit the loop
elif arg_count == 1:
stop = start_index + 1
args = [explicit_arg]
action_tuples.append((action, args, option_string))
break
# error if a double-dash option did not use the
# explicit argument
else:
msg = _('ignored explicit argument %r')
raise ArgumentError(action, msg % explicit_arg)
# if there is no explicit argument, try to match the
# optional's string arguments with the following strings
# if successful, exit the loop
else:
start = start_index + 1
selected_patterns = arg_strings_pattern[start:]
arg_count = match_argument(action, selected_patterns)
stop = start + arg_count
args = arg_strings[start:stop]
action_tuples.append((action, args, option_string))
break
# add the Optional to the list and return the index at which
# the Optional's string args stopped
assert action_tuples
for action, args, option_string in action_tuples:
take_action(action, args, option_string)
return stop
# the list of Positionals left to be parsed; this is modified
# by consume_positionals()
positionals = self._get_positional_actions()
# function to convert arg_strings into positional actions
def consume_positionals(start_index):
# match as many Positionals as possible
match_partial = self._match_arguments_partial
selected_pattern = arg_strings_pattern[start_index:]
arg_counts = match_partial(positionals, selected_pattern)
# slice off the appropriate arg strings for each Positional
# and add the Positional and its args to the list
for action, arg_count in zip(positionals, arg_counts):
args = arg_strings[start_index: start_index + arg_count]
start_index += arg_count
take_action(action, args)
# slice off the Positionals that we just parsed and return the
# index at which the Positionals' string args stopped
positionals[:] = positionals[len(arg_counts):]
return start_index
# consume Positionals and Optionals alternately, until we have
# passed the last option string
extras = []
start_index = 0
if option_string_indices:
max_option_string_index = max(option_string_indices)
else:
max_option_string_index = -1
while start_index <= max_option_string_index:
# consume any Positionals preceding the next option
next_option_string_index = min([
index
for index in option_string_indices
if index >= start_index])
if start_index != next_option_string_index:
positionals_end_index = consume_positionals(start_index)
# only try to parse the next optional if we didn't consume
# the option string during the positionals parsing
if positionals_end_index > start_index:
start_index = positionals_end_index
continue
else:
start_index = positionals_end_index
# if we consumed all the positionals we could and we're not
# at the index of an option string, there were extra arguments
if start_index not in option_string_indices:
strings = arg_strings[start_index:next_option_string_index]
extras.extend(strings)
start_index = next_option_string_index
# consume the next optional and any arguments for it
start_index = consume_optional(start_index)
# consume any positionals following the last Optional
stop_index = consume_positionals(start_index)
# if we didn't consume all the argument strings, there were extras
extras.extend(arg_strings[stop_index:])
# if we didn't use all the Positional objects, there were too few
# arg strings supplied.
if positionals:
self.error(_('too few arguments'))
# make sure all required actions were present
for action in self._actions:
if action.required:
if action not in seen_actions:
name = _get_action_name(action)
self.error(_('argument %s is required') % name)
# make sure all required groups had one option present
for group in self._mutually_exclusive_groups:
if group.required:
for action in group._group_actions:
if action in seen_non_default_actions:
break
# if no actions were used, report the error
else:
names = [_get_action_name(action)
for action in group._group_actions
if action.help is not SUPPRESS]
msg = _('one of the arguments %s is required')
self.error(msg % ' '.join(names))
# return the updated namespace and the extra arguments
return namespace, extras
def _read_args_from_files(self, arg_strings):
# expand arguments referencing files
new_arg_strings = []
for arg_string in arg_strings:
# for regular arguments, just add them back into the list
# AF PATCH: if there was an empty string value such as '', then arg_string[0] failed
# AF PATCH: so I added a len==0 option
if len(arg_string) == 0 or arg_string[0] not in self.fromfile_prefix_chars:
new_arg_strings.append(arg_string)
# replace arguments referencing files with the file content
else:
try:
args_file = open(arg_string[1:])
try:
arg_strings = []
for arg_line in args_file.read().splitlines():
for arg in self.convert_arg_line_to_args(arg_line):
arg_strings.append(arg)
arg_strings = self._read_args_from_files(arg_strings)
new_arg_strings.extend(arg_strings)
finally:
args_file.close()
except IOError:
err = _sys.exc_info()[1]
self.error(str(err))
# return the modified argument list
return new_arg_strings
def convert_arg_line_to_args(self, arg_line):
return [arg_line]
def _match_argument(self, action, arg_strings_pattern):
# match the pattern for this action to the arg strings
nargs_pattern = self._get_nargs_pattern(action)
match = _re.match(nargs_pattern, arg_strings_pattern)
# raise an exception if we weren't able to find a match
if match is None:
nargs_errors = {
None: _('expected one argument'),
OPTIONAL: _('expected at most one argument'),
ONE_OR_MORE: _('expected at least one argument'),
}
default = _('expected %s argument(s)') % action.nargs
msg = nargs_errors.get(action.nargs, default)
raise ArgumentError(action, msg)
# return the number of arguments matched
return len(match.group(1))
def _match_arguments_partial(self, actions, arg_strings_pattern):
# progressively shorten the actions list by slicing off the
# final actions until we find a match
result = []
for i in range(len(actions), 0, -1):
actions_slice = actions[:i]
pattern = ''.join([self._get_nargs_pattern(action)
for action in actions_slice])
match = _re.match(pattern, arg_strings_pattern)
if match is not None:
result.extend([len(string) for string in match.groups()])
break
# return the list of arg string counts
return result
def _parse_optional(self, arg_string):
# if it's an empty string, it was meant to be a positional
if not arg_string:
return None
# if it doesn't start with a prefix, it was meant to be positional
if not arg_string[0] in self.prefix_chars:
return None
# if the option string is present in the parser, return the action
if arg_string in self._option_string_actions:
action = self._option_string_actions[arg_string]
return action, arg_string, None
# if it's just a single character, it was meant to be positional
if len(arg_string) == 1:
return None
# if the option string before the "=" is present, return the action
if '=' in arg_string:
option_string, explicit_arg = arg_string.split('=', 1)
if option_string in self._option_string_actions:
action = self._option_string_actions[option_string]
return action, option_string, explicit_arg
# search through all possible prefixes of the option string
# and all actions in the parser for possible interpretations
option_tuples = self._get_option_tuples(arg_string)
# if multiple actions match, the option string was ambiguous
if len(option_tuples) > 1:
options = ', '.join([option_string
for action, option_string, explicit_arg in option_tuples])
tup = arg_string, options
self.error(_('ambiguous option: %s could match %s') % tup)
# if exactly one action matched, this segmentation is good,
# so return the parsed action
elif len(option_tuples) == 1:
option_tuple, = option_tuples
return option_tuple
# if it was not found as an option, but it looks like a negative
# number, it was meant to be positional
# unless there are negative-number-like options
if self._negative_number_matcher.match(arg_string):
if not self._has_negative_number_optionals:
return None
# if it contains a space, it was meant to be a positional
if ' ' in arg_string:
return None
# it was meant to be an optional but there is no such option
# in this parser (though it might be a valid option in a subparser)
return None, arg_string, None
def _get_option_tuples(self, option_string):
result = []
# option strings starting with two prefix characters are only
# split at the '='
chars = self.prefix_chars
if option_string[0] in chars and option_string[1] in chars:
if '=' in option_string:
option_prefix, explicit_arg = option_string.split('=', 1)
else:
option_prefix = option_string
explicit_arg = None
for option_string in self._option_string_actions:
if option_string.startswith(option_prefix):
action = self._option_string_actions[option_string]
tup = action, option_string, explicit_arg
result.append(tup)
# single character options can be concatenated with their arguments
# but multiple character options always have to have their argument
# separate
elif option_string[0] in chars and option_string[1] not in chars:
option_prefix = option_string
explicit_arg = None
short_option_prefix = option_string[:2]
short_explicit_arg = option_string[2:]
for option_string in self._option_string_actions:
if option_string == short_option_prefix:
action = self._option_string_actions[option_string]
tup = action, option_string, short_explicit_arg
result.append(tup)
elif option_string.startswith(option_prefix):
action = self._option_string_actions[option_string]
tup = action, option_string, explicit_arg
result.append(tup)
# shouldn't ever get here
else:
self.error(_('unexpected option string: %s') % option_string)
# return the collected option tuples
return result
def _get_nargs_pattern(self, action):
# in all examples below, we have to allow for '--' args
# which are represented as '-' in the pattern
nargs = action.nargs
# the default (None) is assumed to be a single argument
if nargs is None:
nargs_pattern = '(-*A-*)'
# allow zero or one arguments
elif nargs == OPTIONAL:
nargs_pattern = '(-*A?-*)'
# allow zero or more arguments
elif nargs == ZERO_OR_MORE:
nargs_pattern = '(-*[A-]*)'
# allow one or more arguments
elif nargs == ONE_OR_MORE:
nargs_pattern = '(-*A[A-]*)'
# allow any number of options or arguments
elif nargs == REMAINDER:
nargs_pattern = '([-AO]*)'
# allow one argument followed by any number of options or arguments
elif nargs == PARSER:
nargs_pattern = '(-*A[-AO]*)'
# all others should be integers
else:
nargs_pattern = '(-*%s-*)' % '-*'.join('A' * nargs)
# if this is an optional action, -- is not allowed
if action.option_strings:
nargs_pattern = nargs_pattern.replace('-*', '')
nargs_pattern = nargs_pattern.replace('-', '')
# return the pattern
return nargs_pattern
# ========================
# Value conversion methods
# ========================
def _get_values(self, action, arg_strings):
# for everything but PARSER args, strip out '--'
if action.nargs not in [PARSER, REMAINDER]:
arg_strings = [s for s in arg_strings if s != '--']
# optional argument produces a default when not present
if not arg_strings and action.nargs == OPTIONAL:
if action.option_strings:
value = action.const
else:
value = action.default
if isinstance(value, _basestring):
value = self._get_value(action, value)
self._check_value(action, value)
# when nargs='*' on a positional, if there were no command-line
# args, use the default if it is anything other than None
elif (not arg_strings and action.nargs == ZERO_OR_MORE and
not action.option_strings):
if action.default is not None:
value = action.default
else:
value = arg_strings
self._check_value(action, value)
# single argument or optional argument produces a single value
elif len(arg_strings) == 1 and action.nargs in [None, OPTIONAL]:
arg_string, = arg_strings
value = self._get_value(action, arg_string)
self._check_value(action, value)
# REMAINDER arguments convert all values, checking none
elif action.nargs == REMAINDER:
value = [self._get_value(action, v) for v in arg_strings]
# PARSER arguments convert all values, but check only the first
elif action.nargs == PARSER:
value = [self._get_value(action, v) for v in arg_strings]
self._check_value(action, value[0])
# all other types of nargs produce a list
else:
value = [self._get_value(action, v) for v in arg_strings]
for v in value:
self._check_value(action, v)
# return the converted value
return value
def _get_value(self, action, arg_string):
type_func = self._registry_get('type', action.type, action.type)
if not _callable(type_func):
msg = _('%r is not callable')
raise ArgumentError(action, msg % type_func)
# convert the value to the appropriate type
try:
result = type_func(arg_string)
# ArgumentTypeErrors indicate errors
except ArgumentTypeError:
name = getattr(action.type, '__name__', repr(action.type))
msg = str(_sys.exc_info()[1])
raise ArgumentError(action, msg)
# TypeErrors or ValueErrors also indicate errors
except (TypeError, ValueError):
name = getattr(action.type, '__name__', repr(action.type))
msg = _('invalid %s value: %r')
raise ArgumentError(action, msg % (name, arg_string))
# return the converted value
return result
def _check_value(self, action, value):
# converted value must be one of the choices (if specified)
if action.choices is not None and value not in action.choices:
tup = value, ', '.join(map(repr, action.choices))
msg = _('invalid choice: %r (choose from %s)') % tup
raise ArgumentError(action, msg)
# =======================
# Help-formatting methods
# =======================
def format_usage(self):
formatter = self._get_formatter()
formatter.add_usage(self.usage, self._actions,
self._mutually_exclusive_groups)
return formatter.format_help()
def format_help(self):
formatter = self._get_formatter()
# usage
formatter.add_usage(self.usage, self._actions,
self._mutually_exclusive_groups)
# description
formatter.add_text(self.description)
# positionals, optionals and user-defined groups
for action_group in self._action_groups:
formatter.start_section(action_group.title)
formatter.add_text(action_group.description)
formatter.add_arguments(action_group._group_actions)
formatter.end_section()
# epilog
formatter.add_text(self.epilog)
# determine help from format above
return formatter.format_help()
def format_version(self):
import warnings
warnings.warn(
'The format_version method is deprecated -- the "version" '
'argument to ArgumentParser is no longer supported.',
DeprecationWarning)
formatter = self._get_formatter()
formatter.add_text(self.version)
return formatter.format_help()
def _get_formatter(self):
return self.formatter_class(prog=self.prog)
# =====================
# Help-printing methods
# =====================
def print_usage(self, file=None):
if file is None:
file = _sys.stdout
self._print_message(self.format_usage(), file)
def print_help(self, file=None):
if file is None:
file = _sys.stdout
self._print_message(self.format_help(), file)
def print_version(self, file=None):
import warnings
warnings.warn(
'The print_version method is deprecated -- the "version" '
'argument to ArgumentParser is no longer supported.',
DeprecationWarning)
self._print_message(self.format_version(), file)
def _print_message(self, message, file=None):
if message:
if file is None:
file = _sys.stderr
file.write(message)
# ===============
# Exiting methods
# ===============
def exit(self, status=0, message=None):
if message:
self._print_message(message, _sys.stderr)
_sys.exit(status)
def error(self, message):
"""error(message: string)
Prints a usage message incorporating the message to stderr and
exits.
If you override this in a subclass, it should not return -- it
should either exit or raise an exception.
"""
self.print_usage(_sys.stderr)
self.exit(2, _('%s: error: %s\n') % (self.prog, message))
| gpl-3.0 |
bdyetton/prettychart | website/addons/forward/tests/test_models.py | 44 | 1805 | # -*- coding: utf-8 -*-
from nose.tools import * # PEP8 asserts
from modularodm.exceptions import ValidationError
from tests.base import OsfTestCase
from website.addons.forward.tests.factories import ForwardSettingsFactory
class TestSettingsValidation(OsfTestCase):
def setUp(self):
super(TestSettingsValidation, self).setUp()
self.settings = ForwardSettingsFactory()
def test_validate_url_bad(self):
self.settings.url = 'badurl'
with assert_raises(ValidationError):
self.settings.save()
def test_validate_url_good(self):
self.settings.url = 'http://frozen.pizza.reviews/'
try:
self.settings.save()
except ValidationError:
assert 0
def test_validate_redirect_bool_bad(self):
self.settings.redirect_bool = 'notabool'
with assert_raises(ValidationError):
self.settings.save()
def test_validate_redirect_bool_good(self):
self.settings.redirect_bool = False
try:
self.settings.save()
except ValidationError:
assert 0
def test_validate_redirect_secs_bad(self):
self.settings.redirect_secs = -2
with assert_raises(ValidationError):
self.settings.save()
def test_validate_redirect_secs_good(self):
self.settings.redirect_secs = 20
try:
self.settings.save()
except ValidationError:
assert 0
def test_label_sanitary(self):
self.settings.label = 'safe'
try:
self.settings.save()
except ValidationError:
assert False
def test_label_unsanitary(self):
self.settings.label = 'un<br />safe'
with assert_raises(ValidationError):
self.settings.save()
| apache-2.0 |
weitengchu/rt-thread | components/external/freetype/src/tools/docmaker/formatter.py | 132 | 6019 | #
# formatter.py
#
# Convert parsed content blocks to a structured document (library file).
#
# Copyright 2002, 2004, 2007, 2008, 2014 by
# David Turner.
#
# This file is part of the FreeType project, and may only be used,
# modified, and distributed under the terms of the FreeType project
# license, LICENSE.TXT. By continuing to use, modify, or distribute
# this file you indicate that you have read the license and
# understand and accept it fully.
#
# This is the base Formatter class. Its purpose is to convert a content
# processor's data into specific documents (i.e., table of contents, global
# index, and individual API reference indices).
#
# You need to sub-class it to output anything sensible. For example, the
# file `tohtml.py' contains the definition of the `HtmlFormatter' sub-class
# to output HTML.
#
from sources import *
from content import *
from utils import *
################################################################
##
## FORMATTER CLASS
##
class Formatter:
def __init__( self, processor ):
self.processor = processor
self.identifiers = {}
self.chapters = processor.chapters
self.sections = processor.sections.values()
self.block_index = []
# store all blocks in a dictionary
self.blocks = []
for section in self.sections:
for block in section.blocks.values():
self.add_identifier( block.name, block )
# add enumeration values to the index, since this is useful
for markup in block.markups:
if markup.tag == 'values':
for field in markup.fields:
self.add_identifier( field.name, block )
self.block_index = self.identifiers.keys()
self.block_index.sort( key = index_key )
def add_identifier( self, name, block ):
if name in self.identifiers:
# duplicate name!
sys.stderr.write( "WARNING: duplicate definition for"
+ " '" + name + "' "
+ "in " + block.location() + ", "
+ "previous definition in "
+ self.identifiers[name].location()
+ "\n" )
else:
self.identifiers[name] = block
#
# formatting the table of contents
#
def toc_enter( self ):
pass
def toc_chapter_enter( self, chapter ):
pass
def toc_section_enter( self, section ):
pass
def toc_section_exit( self, section ):
pass
def toc_chapter_exit( self, chapter ):
pass
def toc_index( self, index_filename ):
pass
def toc_exit( self ):
pass
def toc_dump( self, toc_filename = None, index_filename = None ):
output = None
if toc_filename:
output = open_output( toc_filename )
self.toc_enter()
for chap in self.processor.chapters:
self.toc_chapter_enter( chap )
for section in chap.sections:
self.toc_section_enter( section )
self.toc_section_exit( section )
self.toc_chapter_exit( chap )
self.toc_index( index_filename )
self.toc_exit()
if output:
close_output( output )
#
# formatting the index
#
def index_enter( self ):
pass
def index_name_enter( self, name ):
pass
def index_name_exit( self, name ):
pass
def index_exit( self ):
pass
def index_dump( self, index_filename = None ):
output = None
if index_filename:
output = open_output( index_filename )
self.index_enter()
for name in self.block_index:
self.index_name_enter( name )
self.index_name_exit( name )
self.index_exit()
if output:
close_output( output )
#
# formatting a section
#
def section_enter( self, section ):
pass
def block_enter( self, block ):
pass
def markup_enter( self, markup, block = None ):
pass
def field_enter( self, field, markup = None, block = None ):
pass
def field_exit( self, field, markup = None, block = None ):
pass
def markup_exit( self, markup, block = None ):
pass
def block_exit( self, block ):
pass
def section_exit( self, section ):
pass
def section_dump( self, section, section_filename = None ):
output = None
if section_filename:
output = open_output( section_filename )
self.section_enter( section )
for name in section.block_names:
skip_entry = 0
try:
block = self.identifiers[name]
# `block_names' can contain field names also,
# which we filter out
for markup in block.markups:
if markup.tag == 'values':
for field in markup.fields:
if field.name == name:
skip_entry = 1
except:
skip_entry = 1 # this happens e.g. for `/empty/' entries
if skip_entry:
continue
self.block_enter( block )
for markup in block.markups[1:]: # always ignore first markup!
self.markup_enter( markup, block )
for field in markup.fields:
self.field_enter( field, markup, block )
self.field_exit( field, markup, block )
self.markup_exit( markup, block )
self.block_exit( block )
self.section_exit( section )
if output:
close_output( output )
def section_dump_all( self ):
for section in self.sections:
self.section_dump( section )
# eof
| gpl-2.0 |
nccgroup/typofinder | howoldisdomain/whois.py | 2 | 10182 | #
# Typofinder for domain typo discovery
#
# Released as open source by NCC Group Plc - http://www.nccgroup.com/
#
# Simple whois query function
#
# Based on RFC3912
#
# Developed by Matt Summers, matt dot summers at nccgroup dot com
# and Stephen Tomkinson
#
# http://www.github.com/nccgroup/typofinder
#
# Released under AGPL see LICENSE for more information
#
import socket
import codecs
import re
from publicsuffix import PublicSuffixList
import datetime
import pprint
import sys
#Seed the whois server map with special cases that aren't in our whois-servers.txt list nor returned by iana
#Based on http://www.nirsoft.net/whois-servers.txt
FIELD_SEPERATOR = ', '
RATE_LIMITTED_RESPONSES = ("WHOIS LIMIT EXCEEDED",
"Too many simulataneous connections from your host",
"Please try again later.",
"You have been banned for abuse.",
"has exceeded the established limit",
"WHOIS LIMI",
"Still in grace period, wait",
"Permission denied.")
_tld_to_whois = dict()
with open("datasources/whois-servers.txt", "r") as whois_servers:
for line in whois_servers:
if line.startswith(';'):
continue
parts = line.split(' ')
_tld_to_whois['.' + parts[0].strip()] = parts[1].strip()
_psl = PublicSuffixList(input_file=codecs.open("datasources/effective_tld_names.dat", "r", "utf8"))
def _whois_lookup(sServer, sDomain):
"""
Perform the network connection to the Whois Server and query for the given domain.
@param sServer: The hostname of the whois server to query.
@param sDomain: The domain to query for.
@return: The whois result string.
"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(5)
try:
s.connect((sServer, 43))
except socket.timeout:
return "Timeout connecting to " + sServer
except socket.error:
return "Unable to connect to " + sServer
try:
query = str(codecs.encode(sDomain, "idna"), "ascii") + '\r\n'
except:
#Assumes an encoding error, just send the raw string instead.
query = sDomain + '\r\n'
response = ''
try:
s.send(query.encode())
while len(response) < 10000:
bytes = s.recv(1000)
try:
block = bytes.decode("utf-8")
except:
#If it's not UTF-8, the second most popular encoding appears to be iso-8859-1
block = bytes.decode("iso-8859-1")
if block == '':
break
response = response + block
except socket.error:
pass
finally:
try:
s.shutdown(socket.SHUT_RDWR)
except socket.error:
#Not much more we can do here
pass
finally:
s.close()
return response
def whois(sDomain):
"""
Entry point for this package, which fetches whois data from the appropriate server.
@param sDomain: The domain to query whois for.
@return: The whois result.
"""
sDomain = _psl.get_public_suffix(sDomain)
sLDot = sDomain.find(".")
tld = sDomain[sLDot:]
if tld in _tld_to_whois:
sServer = _tld_to_whois[tld]
else:
sServer = "whois.iana.org"
try:
for sLine in _whois_lookup(sServer, tld).split('\n'):
if "refer:" in sLine or "whois:" in sLine:
sServer = sLine[6:].lstrip()
_tld_to_whois[tld] = sServer
break
except:
pass
result = _recursive_whois(sServer, sDomain)
#Special case to handle the fuzzy matching at the ICANN whois server
if 'To single out one record, look it up with "xxx", where xxx is one of the' in result:
all_domain_records = _whois_lookup(sServer, '=' + sDomain)
all_whois_servers = _extract_field(all_domain_records, "Whois Server")
if all_whois_servers != None:
next_whois_server = all_whois_servers.split(', ')[-1]
return _recursive_whois(next_whois_server, sDomain)
else:
return result
else:
return result
def _recursive_whois(sServer, sDomain):
"""
A recursive whois function which will follow the "Whois Server:" referals.
@param sServer: The hostname of the whois server to query.
@param sDomain: The domain to query for.
@return: The whois result string.
"""
result = _whois_lookup(sServer, sDomain)
next_whois_server = _extract_field(result, "Whois Server")
if next_whois_server and next_whois_server != sServer and not next_whois_server.startswith("http"):
return _recursive_whois(next_whois_server, sDomain)
for error_message in RATE_LIMITTED_RESPONSES:
if error_message in result:
return "Rate limited by " + sServer
if result.strip() == '':
return "Empty response from " + sServer
return result.lstrip()
def _extract_field(whois_blob, *args):
"""
Extract from the given WHOIS result blob the value that is associated with the given field name.
@param whois_blob The whois data to search for the value
@param *args One or more field names (interpreted as regexes) that the requested value may be referred to as.
"""
result = list()
if len(args) == 1:
field_name = args[0]
else:
field_name = "(?:"
field_list = list()
for arg in args:
field_list.append("(?:" + arg + ")")
field_name += "|".join(field_list)
field_name += ")"
regex = field_name + r"\.*:(?: |\t)*(.+)\n"
match_list = re.finditer(regex, whois_blob, flags=re.IGNORECASE)
for match in match_list:
if match.group(1):
value = match.group(1).strip()
if value and value != "null":
result.append(value)
if not result:
return None
else:
return FIELD_SEPERATOR.join(result)
def _date_parse(date_string):
"""
Date parser which attempts to work with a range of date formats.
@param date_string The string representing a date or date/time.
@return A datetime object if one could be parsed, or None
"""
if not date_string:
return None
date_string = date_string.rstrip('.')
date_string = re.sub('(\d)T(\d)', '\g<1>\g<2>', date_string)
date_string = date_string.replace(' ', '')
date_string = date_string.replace('.', '-')
date_string = date_string.rstrip('Z')
#Handle timezones ourselves on python 2.X because the native datetime won't parse them
tz_match = None
if sys.version_info < (3,0):
tz_match = re.match(r"(.*)(\+|-)(\d{2}):?(\d{2})$", date_string)
if tz_match:
date_string = tz_match.group(1)
result = None
for format in ("%Y-%m-%d%H:%M:%S", "%Y-%m-%d%H:%M:%S%z", "%Y-%m-%d", "%d-%b-%Y", "%a%b%d%H:%M:%S%Z%Y", "%Y-%d-%m", "%Y-%m-%d%H:%M:%S-%f", "%d-%b-%Y%H:%M:%S%Z"):
try:
result = datetime.datetime.strptime(date_string, format)
break
except ValueError:
#Attempt the next format
continue
if result and tz_match:
#Manipulate the datetime into UTC if we don't have timezone support
delta = datetime.timedelta(hours=int(tz_match.group(3)), minutes=int(tz_match.group(4)))
if tz_match.group(2) == '-':
result += delta
else:
result -= delta
return result
contact_types = {"registrant": "(?:Registrant|Owner)(?: Contact)?",
"tech": "Tech(?:nical)?(?: Contact)?",
"admin": "Admin(?:istrative)?(?: Contact)?"}
contact_fields = {"name": "(?:Name)?",
"organization": "Organi[sz]ation",
"street": "(?:(?:Street)|(?:Add?ress ?)1?)",
"city": "City",
"state": "State(?:/Province)?",
"country": "Country(?:/Economy)?",
"post_code": "Postal ?Code|zip",
"email": "E-?mail",
"phone": "(?:tele)?phone(?: Number)?",
"phone_ext": "Phone Ext",
"fax": "(?:Fax|Facsimile)[ -]?(?:Number|No)?",
"fax_ext": "Fax Ext"}
registrar_fields = {"name": "Registrar(?: Name)?",
"url": "Registrar (?:(?:URL)|(?:Homepage))",
"abuse_email": "Abuse Contact Email",
"abuse_phone": "Abuse Contact Phone",
"iana_id": "Registrar IANA ID"}
date_fields = {"created": ("Creation Date", "(?:Date )?created(?: on)?", "Registered(?: on)?", "Registration Date"),
"updated": ("(?:Last )?Modified", "Updated Date", "(?:last )?updated?(?: on)?"),
"expires": ("Expiration Date", "Expiry Date", "renewal date", "Expires(?: on)?", "Expire Date")}
def parse(whois_str):
"""
Parses the given whois result string in an attempt to extract common fields.
@param whois_str The raw WHOIS result
@return A dictionary of dictionaries containing the parsed data.
"""
result_dict = {}
for type in contact_types.keys():
person_dict = dict()
for field in contact_fields.keys():
person_dict[field] = _extract_field(whois_str, contact_types[type] + "(?: |-)" + contact_fields[field])
result_dict[type] = person_dict
registrar_dict = dict()
for field in registrar_fields.keys():
registrar_dict[field] = _extract_field(whois_str, registrar_fields[field])
result_dict['registrar'] = registrar_dict
result_dict['reseller'] = {'name': _extract_field(whois_str, "Reseller")}
dates_dict = {}
for field in date_fields.keys():
date_str = _extract_field(whois_str, *date_fields[field])
if date_str:
date_str = date_str.split(FIELD_SEPERATOR)[0]
dates_dict[field] = _date_parse(date_str)
else:
dates_dict[field] = None
result_dict['date'] = dates_dict
return result_dict | agpl-3.0 |
r03ert0/ldsc | test/test_sumstats.py | 3 | 16976 | from __future__ import division
import ldscore.sumstats as s
import ldscore.parse as ps
import unittest
import numpy as np
import pandas as pd
from pandas.util.testing import assert_series_equal, assert_frame_equal
from nose.tools import *
from numpy.testing import assert_array_equal, assert_array_almost_equal, assert_allclose
from nose.plugins.attrib import attr
import os
from ldsc import parser
DIR = os.path.dirname(__file__)
N_REP = 200
s._N_CHR = 2 # having to mock 22 files is annoying
class Mock(object):
'''
Dumb object for mocking args and log
'''
def __init__(self):
pass
def log(self, x):
# pass
print x
log = Mock()
args = Mock()
t = lambda attr: lambda obj: getattr(obj, attr, float('nan'))
def test_check_condnum():
x = np.ones((2, 2))
x[1, 1] += 1e-5
args.invert_anyway = False
assert_raises(ValueError, s._check_ld_condnum, args, log, x)
args.invert_anyway = True
s._check_ld_condnum(args, log, x) # no error
def test_check_variance():
ld = pd.DataFrame({'SNP': ['a', 'b', 'c'],
'LD1': np.ones(3).astype(float),
'LD2': np.arange(3).astype(float)})
ld = ld[['SNP', 'LD1', 'LD2']]
M_annot = np.array([[1, 2]])
M_annot, ld, novar_col = s._check_variance(log, M_annot, ld)
assert_array_equal(M_annot.shape, (1, 1))
assert_array_equal(M_annot, [[2]])
assert_allclose(ld.iloc[:, 1], [0, 1, 2])
assert_array_equal(novar_col, [True, False])
def test_align_alleles():
beta = pd.Series(np.ones(6))
alleles = pd.Series(['ACAC', 'TGTG', 'GTGT', 'AGCT', 'AGTC', 'TCTC'])
beta = s._align_alleles(beta, alleles)
assert_series_equal(beta, pd.Series([1.0, 1, 1, -1, 1, 1]))
def test_filter_bad_alleles():
alleles = pd.Series(['ATAT', 'ATAG', 'DIID', 'ACAC'])
bad_alleles = s._filter_alleles(alleles)
print bad_alleles
assert_series_equal(bad_alleles, pd.Series([False, False, False, True]))
def test_read_annot():
ref_ld_chr = None
ref_ld = os.path.join(DIR, 'annot_test/test')
overlap_matrix, M_tot = s._read_chr_split_files(ref_ld_chr, ref_ld, log, 'annot matrix',
ps.annot, frqfile=None)
assert_array_equal(overlap_matrix, [[1, 0, 0], [0, 2, 2], [0, 2, 2]])
assert_array_equal(M_tot, 3)
frqfile = os.path.join(DIR, 'annot_test/test1')
overlap_matrix, M_tot = s._read_chr_split_files(ref_ld_chr, ref_ld, log, 'annot matrix',
ps.annot, frqfile=frqfile)
assert_array_equal(overlap_matrix, [[1, 0, 0], [0, 1, 1], [0, 1, 1]])
assert_array_equal(M_tot, 2)
def test_valid_snps():
x = {'AC', 'AG', 'CA', 'CT', 'GA', 'GT', 'TC', 'TG'}
assert_equal(x, s.VALID_SNPS)
def test_bases():
x = set(['A', 'T', 'G', 'C'])
assert_equal(x, set(s.BASES))
def test_complement():
assert_equal(s.COMPLEMENT, {'A': 'T', 'T': 'A', 'C': 'G', 'G': 'C'})
def test_warn_len():
# nothing to test except that it doesn't throw an error at runtime
s._warn_length(log, [1])
def test_match_alleles():
m = {'ACAC',
'ACCA',
'ACGT',
'ACTG',
'AGAG',
'AGCT',
'AGGA',
'AGTC',
'CAAC',
'CACA',
'CAGT',
'CATG',
'CTAG',
'CTCT',
'CTGA',
'CTTC',
'GAAG',
'GACT',
'GAGA',
'GATC',
'GTAC',
'GTCA',
'GTGT',
'GTTG',
'TCAG',
'TCCT',
'TCGA',
'TCTC',
'TGAC',
'TGCA',
'TGGT',
'TGTG'}
assert_equal(m, s.MATCH_ALLELES)
def test_flip_alleles():
m = {'ACAC': False,
'ACCA': True,
'ACGT': True,
'ACTG': False,
'AGAG': False,
'AGCT': True,
'AGGA': True,
'AGTC': False,
'CAAC': True,
'CACA': False,
'CAGT': False,
'CATG': True,
'CTAG': True,
'CTCT': False,
'CTGA': False,
'CTTC': True,
'GAAG': True,
'GACT': False,
'GAGA': False,
'GATC': True,
'GTAC': True,
'GTCA': False,
'GTGT': False,
'GTTG': True,
'TCAG': False,
'TCCT': True,
'TCGA': True,
'TCTC': False,
'TGAC': False,
'TGCA': True,
'TGGT': True,
'TGTG': False}
assert_equal(m, s.FLIP_ALLELES)
def test_strand_ambiguous():
m = {'AC': False,
'AG': False,
'AT': True,
'CA': False,
'CG': True,
'CT': False,
'GA': False,
'GC': True,
'GT': False,
'TA': True,
'TC': False,
'TG': False}
assert_equal(m, s.STRAND_AMBIGUOUS)
@attr('rg')
@attr('slow')
class Test_RG_Statistical():
@classmethod
def setUpClass(cls):
args = parser.parse_args('')
args.ref_ld = DIR + '/simulate_test/ldscore/twold_onefile'
args.w_ld = DIR + '/simulate_test/ldscore/w'
args.rg = ','.join(
(DIR + '/simulate_test/sumstats/' + str(i) for i in xrange(N_REP)))
args.out = DIR + '/simulate_test/1'
x = s.estimate_rg(args, log)
args.intercept_gencov = ','.join(('0' for _ in xrange(N_REP)))
args.intercept_h2 = ','.join(('1' for _ in xrange(N_REP)))
y = s.estimate_rg(args, log)
cls.rg = x
cls.rg_noint = y
def test_rg_ratio(self):
assert_allclose(np.nanmean(map(t('rg_ratio'), self.rg)), 0, atol=0.02)
def test_rg_ratio_noint(self):
assert_allclose(
np.nanmean(map(t('rg_ratio'), self.rg_noint)), 0, atol=0.02)
def test_rg_se(self):
assert_allclose(np.nanmean(map(t('rg_se'), self.rg)), np.nanstd(
map(t('rg_ratio'), self.rg)), atol=0.02)
def test_rg_se_noint(self):
assert_allclose(np.nanmean(map(t('rg_se'), self.rg_noint)), np.nanstd(
map(t('rg_ratio'), self.rg_noint)), atol=0.02)
def test_gencov_tot(self):
assert_allclose(
np.nanmean(map(t('tot'), map(t('gencov'), self.rg))), 0, atol=0.02)
def test_gencov_tot_noint(self):
assert_allclose(
np.nanmean(map(t('tot'), map(t('gencov'), self.rg_noint))), 0, atol=0.02)
def test_gencov_tot_se(self):
assert_allclose(np.nanstd(map(t('tot'), map(t('gencov'), self.rg))), np.nanmean(
map(t('tot_se'), map(t('gencov'), self.rg))), atol=0.02)
def test_gencov_tot_se_noint(self):
assert_allclose(np.nanstd(map(t('tot'), map(t('gencov'), self.rg_noint))), np.nanmean(
map(t('tot_se'), map(t('gencov'), self.rg_noint))), atol=0.02)
def test_gencov_cat(self):
assert_allclose(
np.nanmean(map(t('cat'), map(t('gencov'), self.rg))), [0, 0], atol=0.02)
def test_gencov_cat_noint(self):
assert_allclose(
np.nanmean(map(t('cat'), map(t('gencov'), self.rg_noint))), [0, 0], atol=0.02)
def test_gencov_cat_se(self):
assert_allclose(np.nanstd(map(t('cat'), map(t('gencov'), self.rg))), np.nanmean(
map(t('cat_se'), map(t('gencov'), self.rg))), atol=0.02)
def test_gencov_cat_se_noint(self):
assert_allclose(np.nanstd(map(t('cat'), map(t('gencov'), self.rg_noint))), np.nanmean(
map(t('cat_se'), map(t('gencov'), self.rg_noint))), atol=0.02)
def test_gencov_int(self):
assert_allclose(
np.nanmean(map(t('intercept'), map(t('gencov'), self.rg))), 0, atol=0.1)
def test_gencov_int_se(self):
assert_allclose(np.nanmean(map(t('intercept_se'), map(t('gencov'), self.rg))), np.nanstd(
map(t('intercept'), map(t('gencov'), self.rg))), atol=0.1)
def test_hsq_int(self):
assert_allclose(
np.nanmean(map(t('intercept'), map(t('hsq2'), self.rg))), 1, atol=0.1)
def test_hsq_int_se(self):
assert_allclose(np.nanmean(map(t('intercept_se'), map(t('hsq2'), self.rg))), np.nanstd(
map(t('intercept'), map(t('hsq2'), self.rg))), atol=0.1)
@attr('h2')
@attr('slow')
class Test_H2_Statistical(unittest.TestCase):
@classmethod
def setUpClass(cls):
args = parser.parse_args('')
args.ref_ld = DIR + '/simulate_test/ldscore/twold_onefile'
args.w_ld = DIR + '/simulate_test/ldscore/w'
args.chisq_max = 99999
h2 = []
h2_noint = []
for i in xrange(N_REP):
args.intercept_h2 = None
args.h2 = DIR + '/simulate_test/sumstats/' + str(i)
args.out = DIR + '/simulate_test/1'
h2.append(s.estimate_h2(args, log))
args.intercept_h2 = 1
h2_noint.append(s.estimate_h2(args, log))
cls.h2 = h2
cls.h2_noint = h2_noint
def test_tot(self):
assert_allclose(np.nanmean(map(t('tot'), self.h2)), 0.9, atol=0.05)
def test_tot_noint(self):
assert_allclose(
np.nanmean(map(t('tot'), self.h2_noint)), 0.9, atol=0.05)
def test_tot_se(self):
assert_allclose(np.nanmean(map(t('tot_se'), self.h2)), np.nanstd(
map(t('tot'), self.h2)), atol=0.05)
def test_tot_se_noint(self):
assert_allclose(np.nanmean(map(t('tot_se'), self.h2_noint)), np.nanstd(
map(t('tot'), self.h2_noint)), atol=0.05)
def test_cat(self):
x = np.nanmean(map(t('cat'), self.h2_noint), axis=0)
y = np.array((0.3, 0.6)).reshape(x.shape)
assert_allclose(x, y, atol=0.05)
def test_cat_noint(self):
x = np.nanmean(map(t('cat'), self.h2_noint), axis=0)
y = np.array((0.3, 0.6)).reshape(x.shape)
assert_allclose(x, y, atol=0.05)
def test_cat_se(self):
x = np.nanmean(map(t('cat_se'), self.h2), axis=0)
y = np.nanstd(map(t('cat'), self.h2), axis=0).reshape(x.shape)
assert_allclose(x, y, atol=0.05)
def test_cat_se_noint(self):
x = np.nanmean(map(t('cat_se'), self.h2_noint), axis=0)
y = np.nanstd(map(t('cat'), self.h2_noint), axis=0).reshape(x.shape)
assert_allclose(x, y, atol=0.05)
def test_coef(self):
# should be h^2/M = [[0.3, 0.9]] / M
coef = np.array(((0.3, 0.9))) / self.h2[0].M
for h in [self.h2, self.h2_noint]:
assert np.all(np.abs(np.nanmean(map(t('coef'), h), axis=0) - coef) < 1e6)
def test_coef_se(self):
for h in [self.h2, self.h2_noint]:
assert_array_almost_equal(np.nanmean(map(t('coef_se'), h), axis=0),
np.nanstd(map(t('coef'), h), axis=0))
def test_prop(self):
for h in [self.h2, self.h2_noint]:
assert np.all(np.nanmean(map(t('prop'), h), axis=0) - [1/3, 2/3] < 0.02)
def test_prop_se(self):
for h in [self.h2, self.h2_noint]:
assert np.all(np.nanmean(map(t('prop_se'), h), axis=0) - np.nanstd(map(t('prop'), h), axis=0) < 0.02)
def test_int(self):
assert_allclose(np.nanmean(map(t('intercept'), self.h2)), 1, atol=0.1)
def test_int_se(self):
assert_allclose(np.nanstd(map(t('intercept'), self.h2)), np.nanmean(
map(t('intercept_se'), self.h2)), atol=0.1)
class Test_Estimate(unittest.TestCase):
def test_h2_M(self): # check --M works
args = parser.parse_args('')
args.ref_ld = DIR + '/simulate_test/ldscore/oneld_onefile'
args.w_ld = DIR + '/simulate_test/ldscore/w'
args.h2 = DIR + '/simulate_test/sumstats/1'
args.out = DIR + '/simulate_test/1'
args.print_cov = True # right now just check no runtime errors
args.print_delete_vals = True
x = s.estimate_h2(args, log)
args.M = str(
float(open(DIR + '/simulate_test/ldscore/oneld_onefile.l2.M_5_50').read()))
y = s.estimate_h2(args, log)
assert_array_almost_equal(x.tot, y.tot)
assert_array_almost_equal(x.tot_se, y.tot_se)
args.M = '1,2'
assert_raises(ValueError, s.estimate_h2, args, log)
args.M = 'foo_bar'
assert_raises(ValueError, s.estimate_h2, args, log)
def test_h2_ref_ld(self): # test different ways of reading ref ld
args = parser.parse_args('')
args.ref_ld_chr = DIR + '/simulate_test/ldscore/twold_onefile'
args.w_ld = DIR + '/simulate_test/ldscore/w'
args.h2 = DIR + '/simulate_test/sumstats/555'
args.out = DIR + '/simulate_test/'
x = s.estimate_h2(args, log)
args.ref_ld = DIR + '/simulate_test/ldscore/twold_firstfile,' + \
DIR + '/simulate_test/ldscore/twold_secondfile'
y = s.estimate_h2(args, log)
args.ref_ld_chr = DIR + '/simulate_test/ldscore/twold_firstfile,' + \
DIR + '/simulate_test/ldscore/twold_secondfile'
z = s.estimate_h2(args, log)
assert_almost_equal(x.tot, y.tot)
assert_array_almost_equal(y.cat, z.cat)
assert_array_almost_equal(x.prop, y.prop)
assert_array_almost_equal(y.coef, z.coef)
assert_array_almost_equal(x.tot_se, y.tot_se)
assert_array_almost_equal(y.cat_se, z.cat_se)
assert_array_almost_equal(x.prop_se, y.prop_se)
assert_array_almost_equal(y.coef_se, z.coef_se)
# test statistical properties (constrain intercept here)
def test_rg_M(self):
args = parser.parse_args('')
args.ref_ld = DIR + '/simulate_test/ldscore/oneld_onefile'
args.w_ld = DIR + '/simulate_test/ldscore/w'
args.rg = ','.join(
[DIR + '/simulate_test/sumstats/1' for _ in xrange(2)])
args.out = DIR + '/simulate_test/1'
x = s.estimate_rg(args, log)[0]
args.M = open(
DIR + '/simulate_test/ldscore/oneld_onefile.l2.M_5_50', 'rb').read().rstrip('\n')
y = s.estimate_rg(args, log)[0]
assert_array_almost_equal(x.rg_ratio, y.rg_ratio)
assert_array_almost_equal(x.rg_se, y.rg_se)
args.M = '1,2'
assert_raises(ValueError, s.estimate_rg, args, log)
args.M = 'foo_bar'
assert_raises(ValueError, s.estimate_rg, args, log)
def test_rg_ref_ld(self):
args = parser.parse_args('')
args.ref_ld_chr = DIR + '/simulate_test/ldscore/twold_onefile'
args.w_ld = DIR + '/simulate_test/ldscore/w'
args.rg = ','.join(
[DIR + '/simulate_test/sumstats/1' for _ in xrange(2)])
args.out = DIR + '/simulate_test/1'
args.print_cov = True # right now just check no runtime errors
args.print_delete_vals = True
x = s.estimate_rg(args, log)[0]
args.ref_ld = DIR + '/simulate_test/ldscore/twold_firstfile,' + \
DIR + '/simulate_test/ldscore/twold_secondfile'
y = s.estimate_rg(args, log)[0]
args.ref_ld_chr = DIR + '/simulate_test/ldscore/twold_firstfile,' + \
DIR + '/simulate_test/ldscore/twold_secondfile'
z = s.estimate_rg(args, log)[0]
assert_almost_equal(x.rg_ratio, y.rg_ratio)
assert_almost_equal(y.rg_jknife, z.rg_jknife)
assert_almost_equal(x.rg_se, y.rg_se)
def test_no_check_alleles(self):
args = parser.parse_args('')
args.ref_ld = DIR + '/simulate_test/ldscore/oneld_onefile'
args.w_ld = DIR + '/simulate_test/ldscore/w'
args.rg = ','.join(
[DIR + '/simulate_test/sumstats/1' for _ in xrange(2)])
args.out = DIR + '/simulate_test/1'
x = s.estimate_rg(args, log)[0]
args.no_check_alleles = True
y = s.estimate_rg(args, log)[0]
assert_equal(x.rg_ratio, y.rg_ratio)
assert_almost_equal(x.rg_jknife, y.rg_jknife)
assert_equal(x.rg_se, y.rg_se)
def test_twostep_h2(self):
# make sure two step isn't going crazy
args = parser.parse_args('')
args.ref_ld = DIR + '/simulate_test/ldscore/oneld_onefile'
args.w_ld = DIR + '/simulate_test/ldscore/w'
args.h2 = DIR + '/simulate_test/sumstats/1'
args.out = DIR + '/simulate_test/1'
args.chisq_max = 9999999
args.two_step = 999
x = s.estimate_h2(args, log)
args.chisq_max = 9999
args.two_step = 99999
y = s.estimate_h2(args, log)
assert_allclose(x.tot, y.tot, atol=1e-5)
def test_twostep_rg(self):
# make sure two step isn't going crazy
args = parser.parse_args('')
args.ref_ld_chr = DIR + '/simulate_test/ldscore/oneld_onefile'
args.w_ld = DIR + '/simulate_test/ldscore/w'
args.rg = ','.join(
[DIR + '/simulate_test/sumstats/1' for _ in xrange(2)])
args.out = DIR + '/simulate_test/rg'
args.two_step = 999
x = s.estimate_rg(args, log)[0]
args.two_step = 99999
y = s.estimate_rg(args, log)[0]
assert_allclose(x.rg_ratio, y.rg_ratio, atol=1e-5)
assert_allclose(x.gencov.tot, y.gencov.tot, atol=1e-5)
| gpl-3.0 |
PaulVanSchayck/irods | tests/pydevtest/test_resource_types.py | 2 | 222736 | import commands
import getpass
import os
import re
import shutil
import subprocess
import sys
if sys.version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
import configuration
import lib
from resource_suite import ResourceSuite, ResourceBase
from test_chunkydevtest import ChunkyDevTest
class Test_Resource_RandomWithinReplication(ResourceSuite, ChunkyDevTest, unittest.TestCase):
def setUp(self):
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc replication", 'STDOUT_SINGLELINE', 'replication')
admin_session.assert_icommand("iadmin mkresc rrResc random", 'STDOUT_SINGLELINE', 'random')
admin_session.assert_icommand("iadmin mkresc unixA 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/unixAVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc unixB1 'unixfilesystem' " + configuration.HOSTNAME_2 + ":" +
lib.get_irods_top_level_dir() + "/unixB1Vault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc unixB2 'unixfilesystem' " + configuration.HOSTNAME_3 + ":" +
lib.get_irods_top_level_dir() + "/unixB2Vault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin addchildtoresc demoResc rrResc")
admin_session.assert_icommand("iadmin addchildtoresc demoResc unixA")
admin_session.assert_icommand("iadmin addchildtoresc rrResc unixB1")
admin_session.assert_icommand("iadmin addchildtoresc rrResc unixB2")
super(Test_Resource_RandomWithinReplication, self).setUp()
def tearDown(self):
super(Test_Resource_RandomWithinReplication, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc rrResc unixB2")
admin_session.assert_icommand("iadmin rmchildfromresc rrResc unixB1")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc unixA")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc rrResc")
admin_session.assert_icommand("iadmin rmresc unixB1")
admin_session.assert_icommand("iadmin rmresc unixB2")
admin_session.assert_icommand("iadmin rmresc unixA")
admin_session.assert_icommand("iadmin rmresc rrResc")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/unixB2Vault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/unixB1Vault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/unixAVault", ignore_errors=True)
@unittest.skip("EMPTY_RESC_PATH - no vault path for coordinating resources")
def test_ireg_as_rodsuser_in_vault(self):
pass
def test_iput_with_purgec(self):
# local setup
filename = "purgecfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput --purgec " + filename) # put file
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
# should be listed once - replica 1
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename])
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_iget_with_purgec(self):
# local setup
filename = "purgecgetfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("iget -f --purgec " + filename) # get file and purge 'cached' replica
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed once
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_irepl_with_purgec(self):
# local setup
filename = "purgecreplfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " --purgec " + filename) # replicate to test resource
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed twice - 2 of 3
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_irepl_over_existing_bad_replica__ticket_1705(self):
# local setup
filename = "reploverwritebad.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# overwrite default repl with different data
self.admin.assert_icommand("iput -f %s %s" % (doublefile, filename))
# default resource repl 0 should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# default resource repl 0 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " & " + filename])
# default resource repl 1 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# default resource 1 should have double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " & " + filename])
# test resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 2 " + self.testresc, " " + doublesize + " ", " " + filename])
# replicate back onto test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# test resource should have new clean doublesize file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 2 " + self.testresc, " " + doublesize + " ", " & " + filename])
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_irepl_over_existing_third_replica__ticket_1705(self):
# local setup
filename = "thirdreplicatest.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate to third resource
self.admin.assert_icommand("irepl " + filename) # replicate overtop default resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate overtop test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate overtop third resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should not have a replica 4
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should not have a replica 5
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_second_replica__ticket_1705(self):
# local setup
filename = "secondreplicatest.txt"
filepath = lib.create_local_testfile(filename)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput -R " + self.testresc + " " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate to default resource
self.admin.assert_icommand("irepl " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate overtop default resource
self.admin.assert_icommand("irepl " + filename)
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# replicate overtop test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# local cleanup
os.remove(filepath)
def test_irepl_update_replicas(self):
# local setup
filename = "updatereplicasfile.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("iadmin mkresc fourthresc unixfilesystem %s:/tmp/%s/fourthrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create fourth resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# replicate to third resource
self.admin.assert_icommand("irepl -R thirdresc " + filename)
# replicate to fourth resource
self.admin.assert_icommand("irepl -R fourthresc " + filename)
# repave overtop test resource
self.admin.assert_icommand("iput -f -R " + self.testresc + " " + doublefile + " " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irepl -U " + filename) # update last replica
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irepl -aU " + filename) # update all replicas
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
self.admin.assert_icommand("iadmin rmresc fourthresc") # remove third resource
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_irm_specific_replica(self):
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed
self.admin.assert_icommand("irepl -R " + self.testresc + " " + self.testfile) # creates replica
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed twice
self.admin.assert_icommand("irm -n 0 " + self.testfile) # remove original from cacheResc only
# replica 2 should still be there
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', ["2 " + self.testresc, self.testfile])
self.admin.assert_icommand_fail("ils -L " + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica 0 should be gone
trashpath = "/" + self.admin.zone_name + "/trash/home/" + self.admin.username + \
"/" + self.admin._session_id
self.admin.assert_icommand_fail("ils -L " + trashpath + "/" + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica should not be in trash
def test_local_iput_with_force_and_destination_resource__ticket_1706(self):
# local setup
filename = "iputwithforceanddestination.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
# should not be listed
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist")
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) #
# overwrite test repl with different data
self.admin.assert_icommand("iput -f -R %s %s %s" % (self.testresc, doublefile, filename))
# default resource cache should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + filename])
# default resource archive should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + filename])
# default resource cache should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " " + filename])
# default resource archive should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " " + filename])
# targeted resource should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + doublesize + " ", "& " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
class Test_Resource_RoundRobinWithinReplication(ChunkyDevTest, ResourceSuite, unittest.TestCase):
def setUp(self):
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc replication", 'STDOUT_SINGLELINE', 'replication')
admin_session.assert_icommand("iadmin mkresc rrResc roundrobin", 'STDOUT_SINGLELINE', 'roundrobin')
admin_session.assert_icommand("iadmin mkresc unixA 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/unixAVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc unixB1 'unixfilesystem' " + configuration.HOSTNAME_2 + ":" +
lib.get_irods_top_level_dir() + "/unixB1Vault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc unixB2 'unixfilesystem' " + configuration.HOSTNAME_3 + ":" +
lib.get_irods_top_level_dir() + "/unixB2Vault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin addchildtoresc demoResc rrResc")
admin_session.assert_icommand("iadmin addchildtoresc demoResc unixA")
admin_session.assert_icommand("iadmin addchildtoresc rrResc unixB1")
admin_session.assert_icommand("iadmin addchildtoresc rrResc unixB2")
super(Test_Resource_RoundRobinWithinReplication, self).setUp()
def tearDown(self):
super(Test_Resource_RoundRobinWithinReplication, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc rrResc unixB2")
admin_session.assert_icommand("iadmin rmchildfromresc rrResc unixB1")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc unixA")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc rrResc")
admin_session.assert_icommand("iadmin rmresc unixB1")
admin_session.assert_icommand("iadmin rmresc unixB2")
admin_session.assert_icommand("iadmin rmresc unixA")
admin_session.assert_icommand("iadmin rmresc rrResc")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/unixB2Vault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/unixB1Vault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/unixAVault", ignore_errors=True)
def test_next_child_iteration__2884(self):
filename="foobar"
lib.make_file( filename, 100 )
# extract the next resource in the rr from the context string
_, out, _ =self.admin.assert_icommand('ilsresc -l rrResc', 'STDOUT_SINGLELINE', 'demoResc')
for line in out.split('\n'):
if 'context:' in line:
_, _, next_resc = line.partition('context:')
next_resc = next_resc.strip()
# determine the 'other' resource
resc_set = set(['unixB1', 'unixB2'])
remaining_set = resc_set - set([next_resc])
resc_remaining = remaining_set.pop()
# resources listed should be 'next_resc'
self.admin.assert_icommand('iput ' + filename + ' file0') # put file
self.admin.assert_icommand('ils -L file0', 'STDOUT_SINGLELINE', next_resc) # put file
# resources listed should be 'resc_remaining'
self.admin.assert_icommand('iput ' + filename + ' file1') # put file
self.admin.assert_icommand('ils -L file1', 'STDOUT_SINGLELINE', resc_remaining) # put file
# resources listed should be 'next_resc' once again
self.admin.assert_icommand('iput ' + filename + ' file2') # put file
self.admin.assert_icommand('ils -L file2', 'STDOUT_SINGLELINE', next_resc) # put file
os.remove(filename)
@unittest.skip("EMPTY_RESC_PATH - no vault path for coordinating resources")
def test_ireg_as_rodsuser_in_vault(self):
pass
def test_iput_with_purgec(self):
# local setup
filename = "purgecfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput --purgec " + filename) # put file
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
# should be listed once - replica 1
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename])
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_iget_with_purgec(self):
# local setup
filename = "purgecgetfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("iget -f --purgec " + filename) # get file and purge 'cached' replica
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed once
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_irepl_with_purgec(self):
# local setup
filename = "purgecreplfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " --purgec " + filename) # replicate to test resource
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed twice - 2 of 3
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_irepl_over_existing_bad_replica__ticket_1705(self):
# local setup
filename = "reploverwritebad.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# overwrite default repl with different data
self.admin.assert_icommand("iput -f %s %s" % (doublefile, filename))
# default resource repl 0 should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# default resource repl 0 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " & " + filename])
# default resource repl 1 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# default resource 1 should have double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " & " + filename])
# test resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 2 " + self.testresc, " " + doublesize + " ", " " + filename])
# replicate back onto test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# test resource should have new clean doublesize file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 2 " + self.testresc, " " + doublesize + " ", " & " + filename])
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_irepl_over_existing_third_replica__ticket_1705(self):
# local setup
filename = "thirdreplicatest.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate to third resource
self.admin.assert_icommand("irepl " + filename) # replicate overtop default resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate overtop test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate overtop third resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should not have a replica 4
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should not have a replica 5
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_second_replica__ticket_1705(self):
# local setup
filename = "secondreplicatest.txt"
filepath = lib.create_local_testfile(filename)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput -R " + self.testresc + " " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate to default resource
self.admin.assert_icommand("irepl " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate overtop default resource
self.admin.assert_icommand("irepl " + filename)
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# replicate overtop test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# local cleanup
os.remove(filepath)
def test_irepl_update_replicas(self):
# local setup
filename = "updatereplicasfile.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("iadmin mkresc fourthresc unixfilesystem %s:/tmp/%s/fourthrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create fourth resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# replicate to third resource
self.admin.assert_icommand("irepl -R thirdresc " + filename)
# replicate to fourth resource
self.admin.assert_icommand("irepl -R fourthresc " + filename)
# repave overtop test resource
self.admin.assert_icommand("iput -f -R " + self.testresc + " " + doublefile + " " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irepl -U " + filename) # update last replica
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irepl -aU " + filename) # update all replicas
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
self.admin.assert_icommand("iadmin rmresc fourthresc") # remove third resource
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_irm_specific_replica(self):
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed
self.admin.assert_icommand("irepl -R " + self.testresc + " " + self.testfile) # creates replica
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed twice
self.admin.assert_icommand("irm -n 0 " + self.testfile) # remove original from cacheResc only
# replica 2 should still be there
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', ["2 " + self.testresc, self.testfile])
self.admin.assert_icommand_fail("ils -L " + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica 0 should be gone
trashpath = "/" + self.admin.zone_name + "/trash/home/" + self.admin.username + \
"/" + self.admin._session_id
self.admin.assert_icommand_fail("ils -L " + trashpath + "/" + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica should not be in trash
def test_local_iput_with_force_and_destination_resource__ticket_1706(self):
# local setup
filename = "iputwithforceanddestination.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
# should not be listed
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist")
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) #
# overwrite test repl with different data
self.admin.assert_icommand("iput -f -R %s %s %s" % (self.testresc, doublefile, filename))
# default resource cache should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + filename])
# default resource archive should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + filename])
# default resource cache should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " " + filename])
# default resource archive should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " " + filename])
# targeted resource should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + doublesize + " ", "& " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
class Test_Resource_Unixfilesystem(ResourceSuite, ChunkyDevTest, unittest.TestCase):
def setUp(self):
hostname = lib.get_hostname()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc 'unixfilesystem' " + hostname + ":" +
lib.get_irods_top_level_dir() + "/demoRescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
super(Test_Resource_Unixfilesystem, self).setUp()
def tearDown(self):
super(Test_Resource_Unixfilesystem, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/demoRescVault", ignore_errors=True)
def test_key_value_passthru(self):
env = os.environ.copy()
env['spLogLevel'] = '11'
lib.restart_irods_server(env=env)
lib.make_file('file.txt', 15)
initial_log_size = lib.get_log_size('server')
self.user0.assert_icommand('iput --kv_pass="put_key=val1" file.txt')
assert lib.count_occurrences_of_string_in_log('server', 'key [put_key] - value [val1]', start_index=initial_log_size) in [1, 2] # double print if collection missing
initial_log_size = lib.get_log_size('server')
self.user0.assert_icommand('iget -f --kv_pass="get_key=val3" file.txt other.txt')
assert lib.count_occurrences_of_string_in_log('server', 'key [get_key] - value [val3]', start_index=initial_log_size) in [1, 2] # double print if collection missing
lib.restart_irods_server()
lib.assert_command('rm -f file.txt other.txt')
@unittest.skipIf(configuration.RUN_IN_TOPOLOGY, "Skip for Topology Testing: Checks local file")
def test_ifsck__2650(self):
# local setup
filename = 'fsckfile.txt'
filepath = lib.create_local_testfile(filename)
full_logical_path = '/' + self.admin.zone_name + '/home/' + self.admin.username + '/' + self.admin._session_id + '/' + filename
# assertions
self.admin.assert_icommand('ils -L ' + filename, 'STDERR_SINGLELINE', 'does not exist') # should not be listed
self.admin.assert_icommand('iput -K ' + self.testfile + ' ' + full_logical_path) # iput
self.admin.assert_icommand('ils -L ' + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
file_vault_full_path = os.path.join(lib.get_vault_session_path(self.admin), filename)
# method 1
self.admin.assert_icommand('ichksum -K ' + full_logical_path, 'STDOUT_MULTILINE',
['Total checksum performed = 1, Failed checksum = 0',
'sha2:0MczF/+UQ4lYmtu417LDmMb4mEarpxPShHfg1PhLtQw=']) # ichksum
# method 2
self.admin.assert_icommand("iquest \"select DATA_CHECKSUM where DATA_NAME = '%s'\"" % filename,
'STDOUT_SINGLELINE', ['DATA_CHECKSUM = sha2:0MczF/+UQ4lYmtu417LDmMb4mEarpxPShHfg1PhLtQw=']) # iquest
# method 3
self.admin.assert_icommand('ils -L', 'STDOUT_SINGLELINE', filename) # ils
self.admin.assert_icommand('ifsck -K ' + file_vault_full_path) # ifsck
# change content in vault
with open(file_vault_full_path, 'r+') as f:
f.seek(0)
f.write("x")
self.admin.assert_icommand('ifsck -K ' + file_vault_full_path, 'STDOUT_SINGLELINE', ['CORRUPTION', 'checksum not consistent with iRODS object']) # ifsck
# change size in vault
lib.cat(file_vault_full_path, 'extra letters')
self.admin.assert_icommand('ifsck ' + file_vault_full_path, 'STDOUT_SINGLELINE', ['CORRUPTION', 'size not consistent with iRODS object']) # ifsck
# unregister, reregister (to update filesize in iCAT), recalculate checksum, and confirm
self.admin.assert_icommand('irm -U ' + full_logical_path)
self.admin.assert_icommand('ireg ' + file_vault_full_path + ' ' + full_logical_path)
self.admin.assert_icommand('ifsck -K ' + file_vault_full_path, 'STDOUT_SINGLELINE', ['WARNING: checksum not available']) # ifsck
self.admin.assert_icommand('ichksum -f ' + full_logical_path, 'STDOUT_MULTILINE',
['Total checksum performed = 1, Failed checksum = 0',
'sha2:zJhArM/en4wfI9lVq+AIFAZa6RTqqdC6LVXf6tPbqxI='])
self.admin.assert_icommand('ifsck -K ' + file_vault_full_path) # ifsck
# local cleanup
os.remove(filepath)
class Test_Resource_Passthru(ChunkyDevTest, ResourceSuite, unittest.TestCase):
def setUp(self):
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc passthru", 'STDOUT_SINGLELINE', 'passthru')
admin_session.assert_icommand("iadmin mkresc unix1Resc 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/unix1RescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin addchildtoresc demoResc unix1Resc")
super(Test_Resource_Passthru, self).setUp()
def tearDown(self):
super(Test_Resource_Passthru, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc demoResc unix1Resc")
admin_session.assert_icommand("iadmin rmresc unix1Resc")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/unix1RescVault", ignore_errors=True)
@unittest.skip("EMPTY_RESC_PATH - no vault path for coordinating resources")
def test_ireg_as_rodsuser_in_vault(self):
pass
class Test_Resource_WeightedPassthru(ResourceBase, unittest.TestCase):
def setUp(self):
hostname = lib.get_hostname()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc replication", 'STDOUT_SINGLELINE', 'replication')
admin_session.assert_icommand("iadmin mkresc unixA 'unixfilesystem' " + hostname + ":" +
lib.get_irods_top_level_dir() + "/unixAVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc unixB 'unixfilesystem' " + hostname + ":" +
lib.get_irods_top_level_dir() + "/unixBVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc w_pt passthru '' 'write=1.0;read=1.0'", 'STDOUT_SINGLELINE', 'passthru')
admin_session.assert_icommand("iadmin addchildtoresc demoResc unixA")
admin_session.assert_icommand("iadmin addchildtoresc demoResc w_pt")
admin_session.assert_icommand("iadmin addchildtoresc w_pt unixB")
super(Test_Resource_WeightedPassthru, self).setUp()
def tearDown(self):
super(Test_Resource_WeightedPassthru, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc w_pt unixB")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc w_pt")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc unixA")
admin_session.assert_icommand("iadmin rmresc unixB")
admin_session.assert_icommand("iadmin rmresc unixA")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin rmresc w_pt")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/unixBVault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/unixAVault", ignore_errors=True)
def test_weighted_passthrough(self):
filename = "some_local_file.txt"
filepath = lib.create_local_testfile(filename)
self.admin.assert_icommand("iput " + filepath)
self.admin.assert_icommand("ils -L", 'STDOUT_SINGLELINE', "local")
# repave a copy in the vault to differentiate
vaultpath = os.path.join(lib.get_irods_top_level_dir(), "unixBVault/home/" + self.admin.username, os.path.basename(self.admin._session_id), filename)
subprocess.check_call("echo 'THISISBROEKN' | cat > %s" % (vaultpath), shell=True)
self.admin.assert_icommand("iadmin modresc w_pt context 'write=1.0;read=2.0'")
self.admin.assert_icommand("iget " + filename + " - ", 'STDOUT_SINGLELINE', "THISISBROEKN")
self.admin.assert_icommand("iadmin modresc w_pt context 'write=1.0;read=0.01'")
self.admin.assert_icommand("iget " + filename + " - ", 'STDOUT_SINGLELINE', "TESTFILE")
def test_weighted_passthrough__2789(self):
### write=1.0;read=1.0
self.admin.assert_icommand("iadmin modresc w_pt context 'write=1.0;read=1.0'")
filename = "some_local_file_A.txt"
filepath = lib.create_local_testfile(filename)
self.admin.assert_icommand('icp {0} {1}'.format(self.testfile,filename))
self.admin.assert_icommand("ils -L", 'STDOUT_SINGLELINE', ['unixA',filename])
self.admin.assert_icommand("ils -L", 'STDOUT_SINGLELINE', ['unixB',filename])
self.admin.assert_icommand("irm -f " + filename)
self.admin.assert_icommand("iput " + filepath)
self.admin.assert_icommand("ils -L", 'STDOUT_SINGLELINE', ['unixA',filename])
self.admin.assert_icommand("ils -L", 'STDOUT_SINGLELINE', ['unixB',filename])
# repave a copy in the vault to differentiate
vaultpath = os.path.join(lib.get_irods_top_level_dir(), "unixBVault/home/" + self.admin.username, os.path.basename(self.admin._session_id), filename)
subprocess.check_call("echo 'THISISBROEKN' | cat > %s" % (vaultpath), shell=True)
self.admin.assert_icommand("iadmin modresc w_pt context 'write=1.0;read=2.0'")
self.admin.assert_icommand("iget " + filename + " - ", 'STDOUT_SINGLELINE', "THISISBROEKN")
self.admin.assert_icommand("iadmin modresc w_pt context 'write=1.0;read=0.01'")
self.admin.assert_icommand("iget " + filename + " - ", 'STDOUT_SINGLELINE', "TESTFILE")
self.admin.assert_icommand("irm -f " + filename)
### write=0.9;read=0.0
self.admin.assert_icommand("iadmin modresc w_pt context 'write=0.9;read=0.0'")
filename = "some_local_file_B.txt"
filepath = lib.create_local_testfile(filename)
self.admin.assert_icommand('icp {0} {1}'.format(self.testfile,filename))
self.admin.assert_icommand("ils -L", 'STDOUT_SINGLELINE', ['unixA',filename])
self.admin.assert_icommand("ils -L", 'STDOUT_SINGLELINE', ['unixB',filename])
self.admin.assert_icommand("irm -f " + filename)
self.admin.assert_icommand("iput " + filepath)
self.admin.assert_icommand("ils -L", 'STDOUT_SINGLELINE', ['unixA',filename])
self.admin.assert_icommand("ils -L", 'STDOUT_SINGLELINE', ['unixB',filename])
# repave a copy in the vault to differentiate
vaultpath = os.path.join(lib.get_irods_top_level_dir(), "unixBVault/home/" + self.admin.username, os.path.basename(self.admin._session_id), filename)
subprocess.check_call("echo 'THISISBROEKN' | cat > %s" % (vaultpath), shell=True)
self.admin.assert_icommand("iadmin modresc w_pt context 'write=1.0;read=2.0'")
self.admin.assert_icommand("iget " + filename + " - ", 'STDOUT_SINGLELINE', "THISISBROEKN")
self.admin.assert_icommand("iadmin modresc w_pt context 'write=1.0;read=0.01'")
self.admin.assert_icommand("iget " + filename + " - ", 'STDOUT_SINGLELINE', "TESTFILE")
self.admin.assert_icommand("irm -f " + filename)
class Test_Resource_Deferred(ChunkyDevTest, ResourceSuite, unittest.TestCase):
def setUp(self):
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc deferred", 'STDOUT_SINGLELINE', 'deferred')
admin_session.assert_icommand("iadmin mkresc unix1Resc 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/unix1RescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin addchildtoresc demoResc unix1Resc")
super(Test_Resource_Deferred, self).setUp()
def tearDown(self):
super(Test_Resource_Deferred, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc demoResc unix1Resc")
admin_session.assert_icommand("iadmin rmresc unix1Resc")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/unix1RescVault", ignore_errors=True)
@unittest.skip("EMPTY_RESC_PATH - no vault path for coordinating resources")
def test_ireg_as_rodsuser_in_vault(self):
pass
class Test_Resource_Random(ChunkyDevTest, ResourceSuite, unittest.TestCase):
def setUp(self):
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc random", 'STDOUT_SINGLELINE', 'random')
admin_session.assert_icommand("iadmin mkresc unix1Resc 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/unix1RescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc unix2Resc 'unixfilesystem' " + configuration.HOSTNAME_2 + ":" +
lib.get_irods_top_level_dir() + "/unix2RescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc unix3Resc 'unixfilesystem' " + configuration.HOSTNAME_3 + ":" +
lib.get_irods_top_level_dir() + "/unix3RescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin addchildtoresc demoResc unix1Resc")
admin_session.assert_icommand("iadmin addchildtoresc demoResc unix2Resc")
admin_session.assert_icommand("iadmin addchildtoresc demoResc unix3Resc")
super(Test_Resource_Random, self).setUp()
def tearDown(self):
super(Test_Resource_Random, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc demoResc unix3Resc")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc unix2Resc")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc unix1Resc")
admin_session.assert_icommand("iadmin rmresc unix3Resc")
admin_session.assert_icommand("iadmin rmresc unix2Resc")
admin_session.assert_icommand("iadmin rmresc unix1Resc")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/unix1RescVault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/unix2RescVault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/unix3RescVault", ignore_errors=True)
@unittest.skip("EMPTY_RESC_PATH - no vault path for coordinating resources")
def test_ireg_as_rodsuser_in_vault(self):
pass
class Test_Resource_NonBlocking(ChunkyDevTest, ResourceSuite, unittest.TestCase):
def setUp(self):
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc nonblocking " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/nbVault", 'STDOUT_SINGLELINE', 'nonblocking')
super(Test_Resource_NonBlocking, self).setUp()
def tearDown(self):
super(Test_Resource_NonBlocking, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
class Test_Resource_CompoundWithMockarchive(ChunkyDevTest, ResourceSuite, unittest.TestCase):
def setUp(self):
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc compound", 'STDOUT_SINGLELINE', 'compound')
admin_session.assert_icommand("iadmin mkresc cacheResc 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/cacheRescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc archiveResc mockarchive " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/archiveRescVault univMSSInterface.sh", 'STDOUT_SINGLELINE', 'mockarchive')
admin_session.assert_icommand("iadmin addchildtoresc demoResc cacheResc cache")
admin_session.assert_icommand("iadmin addchildtoresc demoResc archiveResc archive")
super(Test_Resource_CompoundWithMockarchive, self).setUp()
def tearDown(self):
super(Test_Resource_CompoundWithMockarchive, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc demoResc archiveResc")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc cacheResc")
admin_session.assert_icommand("iadmin rmresc archiveResc")
admin_session.assert_icommand("iadmin rmresc cacheResc")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/archiveRescVault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/cacheRescVault", ignore_errors=True)
def test_irm_specific_replica(self):
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed
self.admin.assert_icommand("irepl -R " + self.testresc + " " + self.testfile) # creates replica
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed twice
self.admin.assert_icommand("irm -n 0 " + self.testfile) # remove original from cacheResc only
# replica 2 should still be there
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', ["2 " + self.testresc, self.testfile])
self.admin.assert_icommand_fail("ils -L " + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica 0 should be gone
trashpath = "/" + self.admin.zone_name + "/trash/home/" + self.admin.username + \
"/" + self.admin._session_id
self.admin.assert_icommand_fail("ils -L " + trashpath + "/" + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica should not be in trash
@unittest.skip("--wlock has possible race condition due to Compound/Replication PDMO")
def test_local_iput_collision_with_wlock(self):
pass
@unittest.skip("NOTSURE / FIXME ... -K not supported, perhaps")
def test_local_iput_checksum(self):
pass
@unittest.skip("EMPTY_RESC_PATH - no vault path for coordinating resources")
def test_ireg_as_rodsuser_in_vault(self):
pass
def test_local_iput_with_force_and_destination_resource__ticket_1706(self):
# local setup
filename = "iputwithforceanddestination.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
# should not be listed
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist")
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) #
# overwrite test repl with different data
self.admin.assert_icommand("iput -f -R %s %s %s" % (self.testresc, doublefile, filename))
# default resource cache should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + filename])
# default resource archive should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + filename])
# default resource cache should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " " + filename])
# default resource archive should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " " + filename])
# targeted resource should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + doublesize + " ", "& " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
###################
# irepl
###################
def test_irepl_update_replicas(self):
# local setup
filename = "updatereplicasfile.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("iadmin mkresc fourthresc unixfilesystem %s:/tmp/%s/fourthrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create fourth resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# replicate to third resource
self.admin.assert_icommand("irepl -R thirdresc " + filename)
# replicate to fourth resource
self.admin.assert_icommand("irepl -R fourthresc " + filename)
# repave overtop test resource
self.admin.assert_icommand("iput -f -R " + self.testresc + " " + doublefile + " " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irepl -U " + filename) # update last replica
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irepl -aU " + filename) # update all replicas
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
self.admin.assert_icommand("iadmin rmresc fourthresc") # remove third resource
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_irepl_over_existing_second_replica__ticket_1705(self):
# local setup
filename = "secondreplicatest.txt"
filepath = lib.create_local_testfile(filename)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput -R " + self.testresc + " " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate to default resource
self.admin.assert_icommand("irepl " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate overtop default resource
self.admin.assert_icommand("irepl " + filename)
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# replicate overtop test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_third_replica__ticket_1705(self):
# local setup
filename = "thirdreplicatest.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate to third resource
self.admin.assert_icommand("irepl " + filename) # replicate overtop default resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate overtop test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate overtop third resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should not have a replica 4
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should not have a replica 5
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_bad_replica__ticket_1705(self):
# local setup
filename = "reploverwritebad.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# overwrite default repl with different data
self.admin.assert_icommand("iput -f %s %s" % (doublefile, filename))
# default resource cache should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# default resource cache should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " & " + filename])
# default resource archive should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# default resource archive should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " & " + filename])
# test resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 2 " + self.testresc, " " + doublesize + " ", " " + filename])
# replicate back onto test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# test resource should have new clean doublesize file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 2 " + self.testresc, " " + doublesize + " ", " & " + filename])
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_iput_with_purgec(self):
# local setup
filename = "purgecfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput --purgec " + filename) # put file
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
# should be listed once - replica 1
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename])
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should be listed only once
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_iget_with_purgec(self):
# local setup
filename = "purgecgetfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("iget -f --purgec " + filename) # get file and purge 'cached' replica
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed once
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should not be listed
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_irepl_with_purgec(self):
# local setup
filename = "purgecreplfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " --purgec " + filename) # replicate to test resource
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed twice - 2 of 3
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should be listed twice - 1 of 3
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
class Test_Resource_CompoundWithUnivmss(ChunkyDevTest, ResourceSuite, unittest.TestCase):
def setUp(self):
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc compound", 'STDOUT_SINGLELINE', 'compound')
admin_session.assert_icommand("iadmin mkresc cacheResc 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/cacheRescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc archiveResc univmss " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/archiveRescVault univMSSInterface.sh", 'STDOUT_SINGLELINE', 'univmss')
admin_session.assert_icommand("iadmin addchildtoresc demoResc cacheResc cache")
admin_session.assert_icommand("iadmin addchildtoresc demoResc archiveResc archive")
super(Test_Resource_CompoundWithUnivmss, self).setUp()
def tearDown(self):
super(Test_Resource_CompoundWithUnivmss, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc demoResc archiveResc")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc cacheResc")
admin_session.assert_icommand("iadmin rmresc archiveResc")
admin_session.assert_icommand("iadmin rmresc cacheResc")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/archiveRescVault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/cacheRescVault", ignore_errors=True)
def test_irm_specific_replica(self):
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed
self.admin.assert_icommand("irepl -R " + self.testresc + " " + self.testfile) # creates replica
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed twice
self.admin.assert_icommand("irm -n 0 " + self.testfile) # remove original from cacheResc only
# replica 2 should still be there
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', ["2 " + self.testresc, self.testfile])
self.admin.assert_icommand_fail("ils -L " + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica 0 should be gone
trashpath = "/" + self.admin.zone_name + "/trash/home/" + self.admin.username + \
"/" + self.admin._session_id
self.admin.assert_icommand_fail("ils -L " + trashpath + "/" + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica should not be in trash
@unittest.skip("--wlock has possible race condition due to Compound/Replication PDMO")
def test_local_iput_collision_with_wlock(self):
pass
@unittest.skip("EMPTY_RESC_PATH - no vault path for coordinating resources")
def test_ireg_as_rodsuser_in_vault(self):
pass
def test_local_iput_with_force_and_destination_resource__ticket_1706(self):
# local setup
filename = "iputwithforceanddestination.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
# should not be listed
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist")
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) #
# overwrite test repl with different data
self.admin.assert_icommand("iput -f -R %s %s %s" % (self.testresc, doublefile, filename))
# default resource cache should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + filename])
# default resource archive should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + filename])
# default resource cache should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " " + filename])
# default resource archive should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " " + filename])
# targeted resource should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + doublesize + " ", "& " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
###################
# irepl
###################
def test_irepl_update_replicas(self):
# local setup
filename = "updatereplicasfile.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("iadmin mkresc fourthresc unixfilesystem %s:/tmp/%s/fourthrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create fourth resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# replicate to third resource
self.admin.assert_icommand("irepl -R thirdresc " + filename)
# replicate to fourth resource
self.admin.assert_icommand("irepl -R fourthresc " + filename)
# repave overtop test resource
self.admin.assert_icommand("iput -f -R " + self.testresc + " " + doublefile + " " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irepl -U " + filename) # update last replica
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irepl -aU " + filename) # update all replicas
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
self.admin.assert_icommand("iadmin rmresc fourthresc") # remove third resource
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_irepl_over_existing_second_replica__ticket_1705(self):
# local setup
filename = "secondreplicatest.txt"
filepath = lib.create_local_testfile(filename)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput -R " + self.testresc + " " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate to default resource
self.admin.assert_icommand("irepl " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate overtop default resource
self.admin.assert_icommand("irepl " + filename)
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# replicate overtop test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_third_replica__ticket_1705(self):
# local setup
filename = "thirdreplicatest.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate to third resource
self.admin.assert_icommand("irepl " + filename) # replicate overtop default resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate overtop test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate overtop third resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should not have a replica 4
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should not have a replica 5
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_bad_replica__ticket_1705(self):
# local setup
filename = "reploverwritebad.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# overwrite default repl with different data
self.admin.assert_icommand("iput -f %s %s" % (doublefile, filename))
# default resource cache should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# default resource cache should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " & " + filename])
# default resource archive should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# default resource archive should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " & " + filename])
# test resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 2 " + self.testresc, " " + doublesize + " ", " " + filename])
# replicate back onto test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# test resource should have new clean doublesize file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 2 " + self.testresc, " " + doublesize + " ", " & " + filename])
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_iput_with_purgec(self):
# local setup
filename = "purgecfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput --purgec " + filename) # put file
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
# should be listed once - replica 1
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename])
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should be listed only once
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_iget_with_purgec(self):
# local setup
filename = "purgecgetfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("iget -f --purgec " + filename) # get file and purge 'cached' replica
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed once
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should not be listed
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_irepl_with_purgec(self):
# local setup
filename = "purgecreplfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " --purgec " + filename) # replicate to test resource
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed twice - 2 of 3
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should be listed twice - 1 of 3
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
class Test_Resource_Compound(ChunkyDevTest, ResourceSuite, unittest.TestCase):
def setUp(self):
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc compound", 'STDOUT_SINGLELINE', 'compound')
admin_session.assert_icommand("iadmin mkresc cacheResc 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/cacheRescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc archiveResc 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/archiveRescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin addchildtoresc demoResc cacheResc cache")
admin_session.assert_icommand("iadmin addchildtoresc demoResc archiveResc archive")
super(Test_Resource_Compound, self).setUp()
def tearDown(self):
super(Test_Resource_Compound, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc demoResc archiveResc")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc cacheResc")
admin_session.assert_icommand("iadmin rmresc archiveResc")
admin_session.assert_icommand("iadmin rmresc cacheResc")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/archiveRescVault", ignore_errors=True)
shutil.rmtree("rm -rf " + lib.get_irods_top_level_dir() + "/cacheRescVault", ignore_errors=True)
def test_irm_specific_replica(self):
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed
self.admin.assert_icommand("irepl -R " + self.testresc + " " + self.testfile) # creates replica
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed twice
self.admin.assert_icommand("irm -n 0 " + self.testfile) # remove original from cacheResc only
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', ["2 " + self.testresc, self.testfile])
self.admin.assert_icommand_fail("ils -L " + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica 0 should be gone
trashpath = "/" + self.admin.zone_name + "/trash/home/" + self.admin.username + \
"/" + self.admin._session_id
self.admin.assert_icommand_fail("ils -L " + trashpath + "/" + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica should not be in trash
@unittest.skip("--wlock has possible race condition due to Compound/Replication PDMO")
def test_local_iput_collision_with_wlock(self):
pass
@unittest.skip("EMPTY_RESC_PATH - no vault path for coordinating resources")
def test_ireg_as_rodsuser_in_vault(self):
pass
@unittest.skip("TEMPORARY")
def test_iget_prefer_from_archive__ticket_1660(self):
# define core.re filepath
corefile = lib.get_core_re_dir() + "/core.re"
backupcorefile = corefile + "--" + self._testMethodName
# new file to put and get
filename = "archivepolicyfile.txt"
filepath = lib.create_local_testfile(filename)
# manipulate core.re (leave as 'when_necessary' - default)
# put the file
self.admin.assert_icommand("iput " + filename) # put file
# manually update the replica in archive vault
output = self.admin.run_icommand('ils -L ' + filename)
archivereplicaphypath = output[1].split()[-1] # split into tokens, get the last one
with open(archivereplicaphypath, 'w') as f:
f.write('MANUALLY UPDATED ON ARCHIVE\n')
# get file
retrievedfile = "retrieved.txt"
os.system("rm -f %s" % retrievedfile)
self.admin.assert_icommand("iget -f %s %s" % (filename, retrievedfile)) # get file from cache
# confirm retrieved file is same as original
assert 0 == os.system("diff %s %s" % (filepath, retrievedfile))
# manipulate the core.re to add the new policy
shutil.copy(corefile, backupcorefile)
with open(corefile, 'a') as f:
f.write('pep_resource_resolve_hierarchy_pre(*OUT){*OUT="compound_resource_cache_refresh_policy=always";}\n')
# restart the server to reread the new core.re
os.system(lib.get_irods_top_level_dir() + "/iRODS/irodsctl stop")
os.system(lib.get_irods_top_level_dir() + "/tests/zombiereaper.sh")
os.system(lib.get_irods_top_level_dir() + "/iRODS/irodsctl start")
# manually update the replica in archive vault
output = self.admin.run_icommand('ils -L ' + filename)
archivereplicaphypath = output[1].split()[-1] # split into tokens, get the last one
with open(archivereplicaphypath, 'w') as f:
f.write('MANUALLY UPDATED ON ARCHIVE **AGAIN**\n')
# get the file
self.admin.assert_icommand("iget -f %s %s" % (filename, retrievedfile)) # get file from archive
# confirm this is the new archive file
matchfound = False
with open(retrievedfile) as f:
for line in f:
if "**AGAIN**" in line:
matchfound = True
assert matchfound
# restore the original core.re
shutil.copy(backupcorefile, corefile)
os.remove(backupcorefile)
# local cleanup
os.remove(filepath)
os.remove(retrievedfile)
def test_local_iput_with_force_and_destination_resource__ticket_1706(self):
# local setup
filename = "iputwithforceanddestination.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
# should not be listed
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist")
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# debugging
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename)
# overwrite test repl with different data
self.admin.assert_icommand("iput -f -R %s %s %s" % (self.testresc, doublefile, filename))
# default resource cache should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + filename])
# default resource archive should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + filename])
# default resource cache should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " " + filename])
# default resource archive should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " " + filename])
# targeted resource should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + doublesize + " ", "& " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
###################
# irepl
###################
def test_irepl_update_replicas(self):
# local setup
filename = "updatereplicasfile.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("iadmin mkresc fourthresc unixfilesystem %s:/tmp/%s/fourthrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create fourth resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# replicate to third resource
self.admin.assert_icommand("irepl -R thirdresc " + filename)
# replicate to fourth resource
self.admin.assert_icommand("irepl -R fourthresc " + filename)
# repave overtop test resource
self.admin.assert_icommand("iput -f -R " + self.testresc + " " + doublefile + " " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irepl -U " + filename) # update last replica
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irepl -aU " + filename) # update all replicas
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
self.admin.assert_icommand("iadmin rmresc fourthresc") # remove third resource
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_irepl_over_existing_second_replica__ticket_1705(self):
# local setup
filename = "secondreplicatest.txt"
filepath = lib.create_local_testfile(filename)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput -R " + self.testresc + " " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate to default resource
self.admin.assert_icommand("irepl " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate overtop default resource
self.admin.assert_icommand("irepl " + filename)
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# replicate overtop test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_third_replica__ticket_1705(self):
# local setup
filename = "thirdreplicatest.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate to third resource
self.admin.assert_icommand("irepl " + filename) # replicate overtop default resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate overtop test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate overtop third resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should not have a replica 4
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should not have a replica 5
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_bad_replica__ticket_1705(self):
# local setup
filename = "reploverwritebad.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# overwrite default repl with different data
self.admin.assert_icommand("iput -f %s %s" % (doublefile, filename))
# default resource cache should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# default resource cache should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " & " + filename])
# default resource archive should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# default resource archive should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " & " + filename])
# test resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 2 " + self.testresc, " " + doublesize + " ", " " + filename])
# replicate back onto test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# test resource should have new clean doublesize file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 2 " + self.testresc, " " + doublesize + " ", " & " + filename])
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_iput_with_purgec(self):
# local setup
filename = "purgecfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput --purgec " + filename) # put file
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
# should be listed once - replica 1
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename])
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should be listed only once
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_iget_with_purgec(self):
# local setup
filename = "purgecgetfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should be listed
self.admin.assert_icommand("iget -f --purgec " + filename) # get file and purge 'cached' replica
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed once
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should not be listed
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_irepl_with_purgec(self):
# local setup
filename = "purgecreplfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " --purgec " + filename) # replicate to test resource
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed twice - 2 of 3
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should be listed twice - 1 of 3
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
class Test_Resource_ReplicationWithinReplication(ChunkyDevTest, ResourceSuite, unittest.TestCase):
def setUp(self):
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc replication", 'STDOUT_SINGLELINE', 'replication')
admin_session.assert_icommand("iadmin mkresc replResc replication", 'STDOUT_SINGLELINE', 'replication')
admin_session.assert_icommand("iadmin mkresc unixA 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/unixAVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc unixB1 'unixfilesystem' " + configuration.HOSTNAME_2 + ":" +
lib.get_irods_top_level_dir() + "/unixB1Vault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc unixB2 'unixfilesystem' " + configuration.HOSTNAME_3 + ":" +
lib.get_irods_top_level_dir() + "/unixB2Vault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin addchildtoresc demoResc replResc")
admin_session.assert_icommand("iadmin addchildtoresc demoResc unixA")
admin_session.assert_icommand("iadmin addchildtoresc replResc unixB1")
admin_session.assert_icommand("iadmin addchildtoresc replResc unixB2")
super(Test_Resource_ReplicationWithinReplication, self).setUp()
def tearDown(self):
super(Test_Resource_ReplicationWithinReplication, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc replResc unixB2")
admin_session.assert_icommand("iadmin rmchildfromresc replResc unixB1")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc unixA")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc replResc")
admin_session.assert_icommand("iadmin rmresc unixB1")
admin_session.assert_icommand("iadmin rmresc unixB2")
admin_session.assert_icommand("iadmin rmresc unixA")
admin_session.assert_icommand("iadmin rmresc replResc")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/unixB2Vault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/unixB1Vault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/unixAVault", ignore_errors=True)
def test_iget_with_purgec(self):
# local setup
filename = "purgecgetfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("iget -f --purgec " + filename) # get file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename]) # replica 0 should be trimmed
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # replica 1 should be listed
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # replica 2 should be listed
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_iput_with_purgec(self):
# local setup
filename = "purgecfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput --purgec " + filename) # put file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename]) # replica 0 should be trimmed
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # replica 1 should be listed
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # replica 2 should be listed
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_irepl_with_purgec(self):
# local setup
filename = "purgecreplfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " --purgec " + filename) # replicate to test resource
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename]) # replica 0 should be trimmed
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # replica 1 should be listed
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # replica 2 should be listed
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", filename]) # replica 2 should be listed
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
@unittest.skip("--wlock has possible race condition due to Compound/Replication PDMO")
def test_local_iput_collision_with_wlock(self):
pass
@unittest.skip("EMPTY_RESC_PATH - no vault path for coordinating resources")
def test_ireg_as_rodsuser_in_vault(self):
pass
def test_irepl_over_existing_bad_replica__ticket_1705(self):
# local setup
filename = "reploverwritebad.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# overwrite default repl with different data
self.admin.assert_icommand("iput -f %s %s" % (doublefile, filename))
# default resource should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# default resource should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " & " + filename])
# default resource should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# default resource should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " & " + filename])
# default resource should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# default resource should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + doublesize + " ", " & " + filename])
# test resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 3 " + self.testresc, " " + doublesize + " ", " " + filename])
# replicate back onto test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# test resource should have new clean doublesize file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 3 " + self.testresc, " " + doublesize + " ", " & " + filename])
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", filename]) # should not have a replica 4
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_irepl_over_existing_second_replica__ticket_1705(self):
# local setup
filename = "secondreplicatest.txt"
filepath = lib.create_local_testfile(filename)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput -R " + self.testresc + " " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl " + filename) # replicate to default resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl " + filename) # replicate overtop default resource
# should not have a replica 4
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate overtop test resource
# should not have a replica 4
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_third_replica__ticket_1705(self):
# local setup
filename = "thirdreplicatest.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate to third resource
self.admin.assert_icommand("irepl " + filename) # replicate overtop default resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate overtop test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate overtop third resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should not have a replica 5
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
# local cleanup
os.remove(filepath)
def test_irepl_update_replicas(self):
# local setup
filename = "updatereplicasfile.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("iadmin mkresc fourthresc unixfilesystem %s:/tmp/%s/fourthrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create fourth resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# replicate to third resource
self.admin.assert_icommand("irepl -R thirdresc " + filename)
# replicate to fourth resource
self.admin.assert_icommand("irepl -R fourthresc " + filename)
# repave overtop test resource
self.admin.assert_icommand("iput -f -R " + self.testresc + " " + doublefile + " " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
self.admin.assert_icommand("irepl -U " + filename) # update last replica
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
self.admin.assert_icommand("irepl -aU " + filename) # update all replicas
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
self.admin.assert_icommand("iadmin rmresc fourthresc") # remove third resource
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_irm_specific_replica(self):
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed
self.admin.assert_icommand("irepl -R " + self.testresc + " " + self.testfile) # creates replica
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed twice
self.admin.assert_icommand("irm -n 0 " + self.testfile) # remove original from grid
# replica 1 should be there
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', ["1 ", self.testfile])
# replica 2 should be there
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', ["2 ", self.testfile])
# replica 3 should be there
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', ["3 " + self.testresc, self.testfile])
self.admin.assert_icommand_fail("ils -L " + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica 0 should be gone
trashpath = "/" + self.admin.zone_name + "/trash/home/" + self.admin.username + \
"/" + self.admin._session_id
self.admin.assert_icommand_fail("ils -L " + trashpath + "/" + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica should not be in trash
def test_local_iput_with_force_and_destination_resource__ticket_1706(self):
# local setup
filename = "iputwithforceanddestination.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename)
# overwrite test repl with different data
self.admin.assert_icommand("iput -f -R %s %s %s" % (self.testresc, doublefile, filename))
# default resource should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + filename])
# default resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " " + filename])
# default resource should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + filename])
# default resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " " + filename])
# default resource should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + filename])
# default resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + doublesize + " ", " " + filename])
# targeted resource should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " " + doublesize + " ", "& " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
class Test_Resource_ReplicationToTwoCompound(ChunkyDevTest, ResourceSuite, unittest.TestCase):
def setUp(self):
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc replication", 'STDOUT_SINGLELINE', 'replication')
admin_session.assert_icommand("iadmin mkresc compResc1 compound", 'STDOUT_SINGLELINE', 'compound')
admin_session.assert_icommand("iadmin mkresc compResc2 compound", 'STDOUT_SINGLELINE', 'compound')
admin_session.assert_icommand("iadmin mkresc cacheResc1 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/cacheResc1Vault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc archiveResc1 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/archiveResc1Vault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc cacheResc2 'unixfilesystem' " + configuration.HOSTNAME_2 + ":" +
lib.get_irods_top_level_dir() + "/cacheResc2Vault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc archiveResc2 'unixfilesystem' " + configuration.HOSTNAME_2 + ":" +
lib.get_irods_top_level_dir() + "/archiveResc2Vault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin addchildtoresc demoResc compResc1")
admin_session.assert_icommand("iadmin addchildtoresc demoResc compResc2")
admin_session.assert_icommand("iadmin addchildtoresc compResc1 cacheResc1 cache")
admin_session.assert_icommand("iadmin addchildtoresc compResc1 archiveResc1 archive")
admin_session.assert_icommand("iadmin addchildtoresc compResc2 cacheResc2 cache")
admin_session.assert_icommand("iadmin addchildtoresc compResc2 archiveResc2 archive")
super(Test_Resource_ReplicationToTwoCompound, self).setUp()
def tearDown(self):
super(Test_Resource_ReplicationToTwoCompound, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc compResc2 archiveResc2")
admin_session.assert_icommand("iadmin rmchildfromresc compResc2 cacheResc2")
admin_session.assert_icommand("iadmin rmchildfromresc compResc1 archiveResc1")
admin_session.assert_icommand("iadmin rmchildfromresc compResc1 cacheResc1")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc compResc2")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc compResc1")
admin_session.assert_icommand("iadmin rmresc archiveResc2")
admin_session.assert_icommand("iadmin rmresc cacheResc2")
admin_session.assert_icommand("iadmin rmresc archiveResc1")
admin_session.assert_icommand("iadmin rmresc cacheResc1")
admin_session.assert_icommand("iadmin rmresc compResc2")
admin_session.assert_icommand("iadmin rmresc compResc1")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/archiveResc1Vault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/cacheResc1Vault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/archiveResc2Vault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/cacheResc2Vault", ignore_errors=True)
def test_irm_specific_replica(self):
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed
self.admin.assert_icommand("irepl -R " + self.testresc + " " + self.testfile) # creates replica
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed twice
self.admin.assert_icommand("irm -n 0 " + self.testfile) # remove original from cacheResc only
# replica 2 should still be there
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', ["4 " + self.testresc, self.testfile])
self.admin.assert_icommand_fail("ils -L " + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica 0 should be gone
trashpath = "/" + self.admin.zone_name + "/trash/home/" + self.admin.username + \
"/" + self.admin._session_id
self.admin.assert_icommand_fail("ils -L " + trashpath + "/" + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica should not be in trash
@unittest.skip("--wlock has possible race condition due to Compound/Replication PDMO")
def test_local_iput_collision_with_wlock(self):
pass
@unittest.skip("EMPTY_RESC_PATH - no vault path for coordinating resources")
def test_ireg_as_rodsuser_in_vault(self):
pass
@unittest.skipIf(configuration.RUN_IN_TOPOLOGY, "Skip for Topology Testing")
def test_iget_prefer_from_archive__ticket_1660(self):
# define core.re filepath
corefile = lib.get_core_re_dir() + "/core.re"
backupcorefile = corefile + "--" + self._testMethodName
# new file to put and get
filename = "archivepolicyfile.txt"
filepath = lib.create_local_testfile(filename)
# manipulate core.re (leave as 'when_necessary' - default)
# put the file
self.admin.assert_icommand("iput " + filename) # put file
# manually update the replicas in archive vaults
stdout = self.admin.run_icommand('ils -L ' + filename)[1]
print stdout
archive1replicaphypath = stdout.split()[-19] # split into tokens, get the 19th from the end
archive2replicaphypath = stdout.split()[-1] # split into tokens, get the last one
print archive1replicaphypath
print archive2replicaphypath
with open(archive1replicaphypath, 'w') as f:
f.write('MANUALLY UPDATED ON ARCHIVE 1\n')
with open(archive2replicaphypath, 'w') as f:
f.write('MANUALLY UPDATED ON ARCHIVE 2\n')
# get file
retrievedfile = "retrieved.txt"
os.system("rm -f %s" % retrievedfile)
self.admin.assert_icommand("iget -f %s %s" % (filename, retrievedfile)) # get file from cache
# confirm retrieved file is same as original
assert 0 == os.system("diff %s %s" % (filepath, retrievedfile))
print "original file diff confirmed"
# manipulate the core.re to add the new policy
shutil.copy(corefile, backupcorefile)
with open(corefile, 'a') as f:
f.write('pep_resource_resolve_hierarchy_pre(*OUT){*OUT="compound_resource_cache_refresh_policy=always";}\n')
# restart the server to reread the new core.re
lib.restart_irods_server()
# manually update the replicas in archive vaults
stdout = self.admin.run_icommand('ils -L ' + filename)[1]
archivereplica1phypath = stdout.split()[-19] # split into tokens, get the 19th from the end
archivereplica2phypath = stdout.split()[-1] # split into tokens, get the last one
print archive1replicaphypath
print archive2replicaphypath
with open(archivereplica1phypath, 'w') as f:
f.write('MANUALLY UPDATED ON ARCHIVE 1 **AGAIN**\n')
with open(archivereplica2phypath, 'w') as f:
f.write('MANUALLY UPDATED ON ARCHIVE 2 **AGAIN**\n')
# confirm the new content is on disk
with open(archivereplica1phypath) as f:
for line in f:
print line
with open(archivereplica2phypath) as f:
for line in f:
print line
# confirm the core file has new policy
print "----- confirm core has new policy ----"
with open(corefile) as f:
for line in f:
if 'pep_' in line:
print line
else:
print '.',
print "----- confirmation done ----"
self.admin.assert_icommand(['iget', '-f', filename, retrievedfile])
# confirm this is the new archive file
with open(retrievedfile) as f:
for line in f:
print line
if 'AGAIN' in line:
break
else:
assert False
# restore the original core.re
shutil.copy(backupcorefile, corefile)
os.remove(backupcorefile)
# local cleanup
os.remove(filepath)
def test_local_iput_with_force_and_destination_resource__ticket_1706(self):
# local setup
filename = "iputwithforceanddestination.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
# should not be listed
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist")
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# debugging
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename)
# overwrite test repl with different data
self.admin.assert_icommand("iput -f -R %s %s %s" % (self.testresc, doublefile, filename))
# default resource cache 1 should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + filename])
# default resource archive 1 should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + filename])
# default resource cache 2 should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + filename])
# default resource archive 2 should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " " + filename])
# default resource cache 1 should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " " + filename])
# default resource archive 1 should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " " + filename])
# default resource cache 2 should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + doublesize + " ", " " + filename])
# default resource archive 2 should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " " + doublesize + " ", " " + filename])
# targeted resource should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " " + doublesize + " ", "& " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
###################
# irepl
###################
def test_irepl_update_replicas(self):
# local setup
filename = "updatereplicasfile.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("iadmin mkresc fourthresc unixfilesystem %s:/tmp/%s/fourthrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create fourth resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# replicate to third resource
self.admin.assert_icommand("irepl -R thirdresc " + filename)
# replicate to fourth resource
self.admin.assert_icommand("irepl -R fourthresc " + filename)
# repave overtop test resource
self.admin.assert_icommand("iput -f -R " + self.testresc + " " + doublefile + " " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 6 ", " & " + filename])
self.admin.assert_icommand("irepl -U " + filename) # update last replica
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 6 ", " & " + filename])
self.admin.assert_icommand("irepl -aU " + filename) # update all replicas
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 6 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
self.admin.assert_icommand("iadmin rmresc fourthresc") # remove third resource
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_irepl_over_existing_second_replica__ticket_1705(self):
# local setup
filename = "secondreplicatest.txt"
filepath = lib.create_local_testfile(filename)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput -R " + self.testresc + " " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate to default resource
self.admin.assert_icommand("irepl " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate overtop default resource
self.admin.assert_icommand("irepl " + filename)
# should not have a replica 5
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
# replicate overtop test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# should not have a replica 5
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_third_replica__ticket_1705(self):
# local setup
filename = "thirdreplicatest.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate to third resource
self.admin.assert_icommand("irepl " + filename) # replicate overtop default resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate overtop test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate overtop third resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should not have a replica 6
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 6 ", " & " + filename])
# should not have a replica 7
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 7 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_bad_replica__ticket_1705(self):
# local setup
filename = "reploverwritebad.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# overwrite default repl with different data
self.admin.assert_icommand("iput -f %s %s" % (doublefile, filename))
# default resource cache 1 should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# default resource cache 1 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " & " + filename])
# default resource archive 1 should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# default resource archive 1 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " & " + filename])
# default resource cache 2 should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# default resource cache 2 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + doublesize + " ", " & " + filename])
# default resource archive 2 should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# default resource archive 2 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " " + doublesize + " ", " & " + filename])
# test resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 4 " + self.testresc, " " + doublesize + " ", " " + filename])
# replicate back onto test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# test resource should have new clean doublesize file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 4 " + self.testresc, " " + doublesize + " ", " & " + filename])
# should not have a replica 5
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_iput_with_purgec(self):
# local setup
filename = "purgecfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput --purgec " + filename) # put file
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed 3x - replica 1
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should be listed 3x - replica 2
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", filename]) # should be listed 3x - replica 3
# should not have any extra replicas
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", filename])
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_iget_with_purgec(self):
# local setup
filename = "purgecgetfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("iget -f --purgec " + filename) # get file and purge 'cached' replica
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed 3x - replica 1
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should be listed 3x - replica 2
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", filename]) # should be listed 3x - replica 3
# should not have any extra replicas
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", filename])
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_irepl_with_purgec(self):
# local setup
filename = "purgecreplfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " --purgec " + filename) # replicate to test resource
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed 4x - replica 1
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should be listed 4x - replica 2
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", filename]) # should be listed 4x - replica 3
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", filename]) # should be listed 4x - replica 4
# should not have any extra replicas
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", filename])
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
class Test_Resource_ReplicationToTwoCompoundResourcesWithPreferArchive(ChunkyDevTest, ResourceSuite, unittest.TestCase):
def setUp(self):
# back up core file
corefile = lib.get_core_re_dir() + "/core.re"
backupcorefile = corefile + "--" + self._testMethodName
shutil.copy(corefile, backupcorefile)
# manipulate the core.re to add the new policy
with open(corefile, 'a') as f:
f.write('pep_resource_resolve_hierarchy_pre(*OUT){*OUT="compound_resource_cache_refresh_policy=always";}\n')
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc replication", 'STDOUT_SINGLELINE', 'replication')
admin_session.assert_icommand("iadmin mkresc compResc1 compound", 'STDOUT_SINGLELINE', 'compound')
admin_session.assert_icommand("iadmin mkresc compResc2 compound", 'STDOUT_SINGLELINE', 'compound')
admin_session.assert_icommand("iadmin mkresc cacheResc1 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/cacheResc1Vault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc archiveResc1 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/archiveResc1Vault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc cacheResc2 'unixfilesystem' " + configuration.HOSTNAME_2 + ":" +
lib.get_irods_top_level_dir() + "/cacheResc2Vault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc archiveResc2 'unixfilesystem' " + configuration.HOSTNAME_2 + ":" +
lib.get_irods_top_level_dir() + "/archiveResc2Vault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin addchildtoresc demoResc compResc1")
admin_session.assert_icommand("iadmin addchildtoresc demoResc compResc2")
admin_session.assert_icommand("iadmin addchildtoresc compResc1 cacheResc1 cache")
admin_session.assert_icommand("iadmin addchildtoresc compResc1 archiveResc1 archive")
admin_session.assert_icommand("iadmin addchildtoresc compResc2 cacheResc2 cache")
admin_session.assert_icommand("iadmin addchildtoresc compResc2 archiveResc2 archive")
super(Test_Resource_ReplicationToTwoCompoundResourcesWithPreferArchive, self).setUp()
def tearDown(self):
super(Test_Resource_ReplicationToTwoCompoundResourcesWithPreferArchive, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc compResc2 archiveResc2")
admin_session.assert_icommand("iadmin rmchildfromresc compResc2 cacheResc2")
admin_session.assert_icommand("iadmin rmchildfromresc compResc1 archiveResc1")
admin_session.assert_icommand("iadmin rmchildfromresc compResc1 cacheResc1")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc compResc2")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc compResc1")
admin_session.assert_icommand("iadmin rmresc archiveResc2")
admin_session.assert_icommand("iadmin rmresc cacheResc2")
admin_session.assert_icommand("iadmin rmresc archiveResc1")
admin_session.assert_icommand("iadmin rmresc cacheResc1")
admin_session.assert_icommand("iadmin rmresc compResc2")
admin_session.assert_icommand("iadmin rmresc compResc1")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/archiveResc1Vault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/cacheResc1Vault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/archiveResc2Vault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/cacheResc2Vault", ignore_errors=True)
# restore the original core.re
corefile = lib.get_core_re_dir() + "/core.re"
backupcorefile = corefile + "--" + self._testMethodName
shutil.copy(backupcorefile, corefile)
os.remove(backupcorefile)
def test_irm_specific_replica(self):
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed
self.admin.assert_icommand("irepl -R " + self.testresc + " " + self.testfile) # creates replica
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', self.testfile) # should be listed twice
self.admin.assert_icommand("irm -n 0 " + self.testfile) # remove original from cacheResc only
# replica 2 should still be there
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', ["4 " + self.testresc, self.testfile])
self.admin.assert_icommand_fail("ils -L " + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica 0 should be gone
trashpath = "/" + self.admin.zone_name + "/trash/home/" + self.admin.username + \
"/" + self.admin._session_id
self.admin.assert_icommand_fail("ils -L " + trashpath + "/" + self.testfile, 'STDOUT_SINGLELINE',
["0 " + self.admin.default_resource, self.testfile]) # replica should not be in trash
@unittest.skip("--wlock has possible race condition due to Compound/Replication PDMO")
def test_local_iput_collision_with_wlock(self):
pass
@unittest.skip("EMPTY_RESC_PATH - no vault path for coordinating resources")
def test_ireg_as_rodsuser_in_vault(self):
pass
@unittest.skip("this is tested elsewhere")
def test_iget_prefer_from_archive__ticket_1660(self):
pass
def test_local_iput_with_force_and_destination_resource__ticket_1706(self):
# local setup
filename = "iputwithforceanddestination.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
# should not be listed
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist")
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# debugging
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename)
# overwrite test repl with different data
self.admin.assert_icommand("iput -f -R %s %s %s" % (self.testresc, doublefile, filename))
# default resource cache 1 should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + filename])
# default resource archive 1 should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + filename])
# default resource cache 2 should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + filename])
# default resource archive 2 should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " " + filename])
# default resource cache 1 should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " " + filename])
# default resource archive 1 should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " " + filename])
# default resource cache 2 should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + doublesize + " ", " " + filename])
# default resource archive 2 should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " " + doublesize + " ", " " + filename])
# targeted resource should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " " + doublesize + " ", "& " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
###################
# irepl
###################
def test_irepl_update_replicas(self):
# local setup
filename = "updatereplicasfile.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("iadmin mkresc fourthresc unixfilesystem %s:/tmp/%s/fourthrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create fourth resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# replicate to third resource
self.admin.assert_icommand("irepl -R thirdresc " + filename)
# replicate to fourth resource
self.admin.assert_icommand("irepl -R fourthresc " + filename)
# repave overtop test resource
self.admin.assert_icommand("iput -f -R " + self.testresc + " " + doublefile + " " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 6 ", " & " + filename])
self.admin.assert_icommand("irepl -U " + filename) # update last replica
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 6 ", " & " + filename])
self.admin.assert_icommand("irepl -aU " + filename) # update all replicas
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 6 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
self.admin.assert_icommand("iadmin rmresc fourthresc") # remove third resource
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_irepl_over_existing_second_replica__ticket_1705(self):
# local setup
filename = "secondreplicatest.txt"
filepath = lib.create_local_testfile(filename)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput -R " + self.testresc + " " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate to default resource
self.admin.assert_icommand("irepl " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate overtop default resource
self.admin.assert_icommand("irepl " + filename)
# should not have a replica 5
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
# replicate overtop test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# should not have a replica 5
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_third_replica__ticket_1705(self):
# local setup
filename = "thirdreplicatest.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate to third resource
self.admin.assert_icommand("irepl " + filename) # replicate overtop default resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate overtop test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate overtop third resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should not have a replica 6
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 6 ", " & " + filename])
# should not have a replica 7
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 7 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_bad_replica__ticket_1705(self):
# local setup
filename = "reploverwritebad.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# overwrite default repl with different data
self.admin.assert_icommand("iput -f %s %s" % (doublefile, filename))
# default resource cache 1 should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# default resource cache 1 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " & " + filename])
# default resource archive 1 should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# default resource archive 1 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " & " + filename])
# default resource cache 2 should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# default resource cache 2 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + doublesize + " ", " & " + filename])
# default resource archive 2 should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# default resource archive 2 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " " + doublesize + " ", " & " + filename])
# test resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 4 " + self.testresc, " " + doublesize + " ", " " + filename])
# replicate back onto test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# test resource should have new clean doublesize file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 4 " + self.testresc, " " + doublesize + " ", " & " + filename])
# should not have a replica 5
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_iput_with_purgec(self):
# local setup
filename = "purgecfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput --purgec " + filename) # put file
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed 3x - replica 1
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should be listed 3x - replica 2
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", filename]) # should be listed 3x - replica 3
# should not have any extra replicas
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", filename])
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_iget_with_purgec(self):
# local setup
filename = "purgecgetfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("iget -f --purgec " + filename) # get file and purge 'cached' replica
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed 3x - replica 1
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should be listed 3x - replica 2
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", filename]) # should be listed 3x - replica 3
# should not have any extra replicas
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", filename])
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_irepl_with_purgec(self):
# local setup
filename = "purgecreplfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " --purgec " + filename) # replicate to test resource
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed 4x - replica 1
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should be listed 4x - replica 2
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", filename]) # should be listed 4x - replica 3
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", filename]) # should be listed 4x - replica 4
# should not have any extra replicas
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", filename])
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
class Test_Resource_RoundRobin(ChunkyDevTest, ResourceSuite, unittest.TestCase):
def setUp(self):
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc roundrobin", 'STDOUT_SINGLELINE', 'roundrobin')
admin_session.assert_icommand("iadmin mkresc unix1Resc 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/unix1RescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc unix2Resc 'unixfilesystem' " + configuration.HOSTNAME_2 + ":" +
lib.get_irods_top_level_dir() + "/unix2RescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin addchildtoresc demoResc unix1Resc")
admin_session.assert_icommand("iadmin addchildtoresc demoResc unix2Resc")
super(Test_Resource_RoundRobin, self).setUp()
def tearDown(self):
super(Test_Resource_RoundRobin, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc demoResc unix2Resc")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc unix1Resc")
admin_session.assert_icommand("iadmin rmresc unix2Resc")
admin_session.assert_icommand("iadmin rmresc unix1Resc")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/unix1RescVault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/unix2RescVault", ignore_errors=True)
@unittest.skip("EMPTY_RESC_PATH - no vault path for coordinating resources")
def test_ireg_as_rodsuser_in_vault(self):
pass
def test_round_robin_mechanism(self):
# local setup
filename = "rrfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
self.user1.assert_icommand("iput " + filename + " file0.txt")
self.user1.assert_icommand("iput " + filename + " file1.txt")
self.user1.assert_icommand("ils -l", 'STDOUT_SINGLELINE', "unix1Resc")
self.user1.assert_icommand("ils -l", 'STDOUT_SINGLELINE', "unix2Resc")
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
class Test_Resource_Replication(ChunkyDevTest, ResourceSuite, unittest.TestCase):
def setUp(self):
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc replication", 'STDOUT_SINGLELINE', 'replication')
admin_session.assert_icommand("iadmin mkresc unix1Resc 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/unix1RescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc unix2Resc 'unixfilesystem' " + configuration.HOSTNAME_2 + ":" +
lib.get_irods_top_level_dir() + "/unix2RescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc unix3Resc 'unixfilesystem' " + configuration.HOSTNAME_3 + ":" +
lib.get_irods_top_level_dir() + "/unix3RescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin addchildtoresc demoResc unix1Resc")
admin_session.assert_icommand("iadmin addchildtoresc demoResc unix2Resc")
admin_session.assert_icommand("iadmin addchildtoresc demoResc unix3Resc")
super(Test_Resource_Replication, self).setUp()
def tearDown(self):
super(Test_Resource_Replication, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc demoResc unix3Resc")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc unix2Resc")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc unix1Resc")
admin_session.assert_icommand("iadmin rmresc unix3Resc")
admin_session.assert_icommand("iadmin rmresc unix2Resc")
admin_session.assert_icommand("iadmin rmresc unix1Resc")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/unix1RescVault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/unix2RescVault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/unix3RescVault", ignore_errors=True)
def test_irm_specific_replica(self):
# not allowed here - this is a managed replication resource
# should be listed 3x
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', [" 0 ", " & " + self.testfile])
# should be listed 3x
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', [" 1 ", " & " + self.testfile])
# should be listed 3x
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', [" 2 ", " & " + self.testfile])
self.admin.assert_icommand("irm -n 1 " + self.testfile) # try to remove one of the managed replicas
# should be listed 2x
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', [" 0 ", " & " + self.testfile])
# should not be listed
self.admin.assert_icommand_fail("ils -L " + self.testfile, 'STDOUT_SINGLELINE', [" 1 ", " & " + self.testfile])
# should be listed 2x
self.admin.assert_icommand("ils -L " + self.testfile, 'STDOUT_SINGLELINE', [" 2 ", " & " + self.testfile])
@unittest.skip("--wlock has possible race condition due to Compound/Replication PDMO")
def test_local_iput_collision_with_wlock(self):
pass
@unittest.skip("EMPTY_RESC_PATH - no vault path for coordinating resources")
def test_ireg_as_rodsuser_in_vault(self):
pass
def test_reliable_iput__ticket_2557(self):
# local setup
# break the second child resource
self.admin.assert_icommand("iadmin modresc unix2Resc path /nopes", 'STDOUT_SINGLELINE', "unix2RescVault")
filename = "reliableputfile.txt"
filepath = lib.create_local_testfile(filename)
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand_fail("iput " + filename, 'STDOUT_SINGLELINE', "put error") # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', "unix1Resc") # should be listed
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', "unix3Resc") # should be listed
# cleanup
oldvault = lib.get_irods_top_level_dir() + "/unix2RescVault"
self.admin.assert_icommand("iadmin modresc unix2Resc path " + oldvault, 'STDOUT_SINGLELINE', "/nopes")
def test_local_iput_with_force_and_destination_resource__ticket_1706(self):
# local setup
filename = "iputwithforceanddestination.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename)
# overwrite test repl with different data
self.admin.assert_icommand("iput -f -R %s %s %s" % (self.testresc, doublefile, filename))
# default resource should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + filename])
# default resource should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + filename])
# default resource should have dirty copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + filename])
# default resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " " + filename])
# default resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " " + filename])
# default resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + doublesize + " ", " " + filename])
# targeted resource should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " " + doublesize + " ", "& " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_irepl_update_replicas(self):
# local setup
filename = "updatereplicasfile.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("iadmin mkresc fourthresc unixfilesystem %s:/tmp/%s/fourthrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create fourth resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# replicate to third resource
self.admin.assert_icommand("irepl -R thirdresc " + filename)
# replicate to fourth resource
self.admin.assert_icommand("irepl -R fourthresc " + filename)
# repave overtop test resource
self.admin.assert_icommand("iput -f -R " + self.testresc + " " + doublefile + " " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
self.admin.assert_icommand("irepl -U " + filename) # update last replica
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a dirty copy
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
self.admin.assert_icommand("irepl -aU " + filename) # update all replicas
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# should have a clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
self.admin.assert_icommand("iadmin rmresc fourthresc") # remove third resource
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_irepl_over_existing_second_replica__ticket_1705(self):
# local setup
filename = "secondreplicatest.txt"
filepath = lib.create_local_testfile(filename)
# assertions
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput -R " + self.testresc + " " + filename) # put file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate to default resource
self.admin.assert_icommand("irepl " + filename)
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# replicate overtop default resource
self.admin.assert_icommand("irepl " + filename)
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# replicate overtop test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_third_replica__ticket_1705(self):
# local setup
filename = "thirdreplicatest.txt"
filepath = lib.create_local_testfile(filename)
hostname = lib.get_hostname()
hostuser = getpass.getuser()
# assertions
self.admin.assert_icommand("iadmin mkresc thirdresc unixfilesystem %s:/tmp/%s/thirdrescVault" %
(hostname, hostuser), 'STDOUT_SINGLELINE', "Creating") # create third resource
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist") # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate to test resource
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate to third resource
self.admin.assert_icommand("irepl " + filename) # replicate overtop default resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename) # replicate overtop test resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
self.admin.assert_icommand("irepl -R thirdresc " + filename) # replicate overtop third resource
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # for debugging
# should not have a replica 4
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 5 ", " & " + filename])
# should not have a replica 5
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 6 ", " & " + filename])
self.admin.assert_icommand("irm -f " + filename) # cleanup file
self.admin.assert_icommand("iadmin rmresc thirdresc") # remove third resource
# local cleanup
os.remove(filepath)
def test_irepl_over_existing_bad_replica__ticket_1705(self):
# local setup
filename = "reploverwritebad.txt"
filepath = lib.create_local_testfile(filename)
doublefile = "doublefile.txt"
os.system("cat %s %s > %s" % (filename, filename, doublefile))
doublesize = str(os.stat(doublefile).st_size)
# assertions
# should not be listed
self.admin.assert_icommand("ils -L " + filename, 'STDERR_SINGLELINE', "does not exist")
# put file
self.admin.assert_icommand("iput " + filename)
# for debugging
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename)
# replicate to test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# for debugging
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', filename)
# overwrite default repl with different data
self.admin.assert_icommand("iput -f %s %s" % (doublefile, filename))
# default resource 1 should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " & " + filename])
# default resource 1 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", " " + doublesize + " ", " & " + filename])
# default resource 2 should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " & " + filename])
# default resource 2 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", " " + doublesize + " ", " & " + filename])
# default resource 3 should have clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " & " + filename])
# default resource 3 should have new double clean copy
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", " " + doublesize + " ", " & " + filename])
# test resource should not have doublesize file
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 3 " + self.testresc, " " + doublesize + " ", " " + filename])
# replicate back onto test resource
self.admin.assert_icommand("irepl -R " + self.testresc + " " + filename)
# test resource should have new clean doublesize file
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE',
[" 3 " + self.testresc, " " + doublesize + " ", " & " + filename])
# should not have a replica 3
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 4 ", " & " + filename])
# local cleanup
os.remove(filepath)
os.remove(doublefile)
def test_iput_with_purgec(self):
# local setup
filename = "purgecfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'w') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
# put file, but trim 'cache' copy (purgec) (backwards compatibility)
self.admin.assert_icommand("iput --purgec " + filename)
# should not be listed (trimmed first replica)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
# should be listed twice - replica 2 of 3
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename])
# should be listed twice - replica 3 of 3
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename])
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_iget_with_purgec(self):
# local setup
filename = "purgecgetfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'wb') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("iget -f --purgec " + filename) # get file and purge 'cached' replica
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed twice - 2 of 3
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should be listed twice - 2 of 3
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
def test_irepl_with_purgec(self):
# local setup
filename = "purgecreplfile.txt"
filepath = os.path.abspath(filename)
with open(filepath, 'wb') as f:
f.write("TESTFILE -- [" + filepath + "]")
# assertions
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', filename) # should not be listed
self.admin.assert_icommand("iput " + filename) # put file
self.admin.assert_icommand("irepl -R " + self.testresc + " --purgec " + filename) # replicate to test resource
# should not be listed (trimmed)
self.admin.assert_icommand_fail("ils -L " + filename, 'STDOUT_SINGLELINE', [" 0 ", filename])
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 1 ", filename]) # should be listed 3x - 1 of 3
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 2 ", filename]) # should be listed 3x - 2 of 3
self.admin.assert_icommand("ils -L " + filename, 'STDOUT_SINGLELINE', [" 3 ", filename]) # should be listed 3x - 3 of 3
# local cleanup
output = commands.getstatusoutput('rm ' + filepath)
class Test_Resource_MultiLayered(ChunkyDevTest, ResourceSuite, unittest.TestCase):
def setUp(self):
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin modresc demoResc name origResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
admin_session.assert_icommand("iadmin mkresc demoResc passthru", 'STDOUT_SINGLELINE', 'passthru')
admin_session.assert_icommand("iadmin mkresc pass2Resc passthru", 'STDOUT_SINGLELINE', 'passthru')
admin_session.assert_icommand("iadmin mkresc rrResc roundrobin", 'STDOUT_SINGLELINE', 'roundrobin')
admin_session.assert_icommand("iadmin mkresc unix1Resc 'unixfilesystem' " + configuration.HOSTNAME_1 + ":" +
lib.get_irods_top_level_dir() + "/unix1RescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc unix2Resc 'unixfilesystem' " + configuration.HOSTNAME_2 + ":" +
lib.get_irods_top_level_dir() + "/unix2RescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin mkresc unix3Resc 'unixfilesystem' " + configuration.HOSTNAME_3 + ":" +
lib.get_irods_top_level_dir() + "/unix3RescVault", 'STDOUT_SINGLELINE', 'unixfilesystem')
admin_session.assert_icommand("iadmin addchildtoresc demoResc pass2Resc")
admin_session.assert_icommand("iadmin addchildtoresc pass2Resc rrResc")
admin_session.assert_icommand("iadmin addchildtoresc rrResc unix1Resc")
admin_session.assert_icommand("iadmin addchildtoresc rrResc unix2Resc")
admin_session.assert_icommand("iadmin addchildtoresc rrResc unix3Resc")
super(Test_Resource_MultiLayered, self).setUp()
def tearDown(self):
super(Test_Resource_MultiLayered, self).tearDown()
with lib.make_session_for_existing_admin() as admin_session:
admin_session.assert_icommand("iadmin rmchildfromresc rrResc unix3Resc")
admin_session.assert_icommand("iadmin rmchildfromresc rrResc unix2Resc")
admin_session.assert_icommand("iadmin rmchildfromresc rrResc unix1Resc")
admin_session.assert_icommand("iadmin rmchildfromresc pass2Resc rrResc")
admin_session.assert_icommand("iadmin rmchildfromresc demoResc pass2Resc")
admin_session.assert_icommand("iadmin rmresc unix3Resc")
admin_session.assert_icommand("iadmin rmresc unix2Resc")
admin_session.assert_icommand("iadmin rmresc unix1Resc")
admin_session.assert_icommand("iadmin rmresc rrResc")
admin_session.assert_icommand("iadmin rmresc pass2Resc")
admin_session.assert_icommand("iadmin rmresc demoResc")
admin_session.assert_icommand("iadmin modresc origResc name demoResc", 'STDOUT_SINGLELINE', 'rename', stdin_string='yes\n')
shutil.rmtree(lib.get_irods_top_level_dir() + "/unix1RescVault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/unix2RescVault", ignore_errors=True)
shutil.rmtree(lib.get_irods_top_level_dir() + "/unix3RescVault", ignore_errors=True)
@unittest.skip("EMPTY_RESC_PATH - no vault path for coordinating resources")
def test_ireg_as_rodsuser_in_vault(self):
pass
| bsd-3-clause |
dreal/dreal | benchmarks/network/thermostat/thermostat-double-i-p.py | 11 | 3580 |
from gen import *
##########
# shared #
##########
flow_var[0] = """
(declare-fun tau () Real)
"""
flow_dec[0] = """
(define-ode flow_1 ((= d/dt[tau] 1)))
"""
state_dec[0] = """
(declare-fun time_{0} () Real)
(declare-fun tau_{0}_0 () Real)
(declare-fun tau_{0}_t () Real)
"""
state_val[0] = """
(assert (<= 0 time_{0})) (assert (<= time_{0} 1))
(assert (<= 0 tau_{0}_0)) (assert (<= tau_{0}_0 1))
(assert (<= 0 tau_{0}_t)) (assert (<= tau_{0}_t 1))
(assert (and (not (and (= mode_1_{0} 1) (= mode_1_{0} 2)))
(not (and (= mode_2_{0} 1) (= mode_2_{0} 2)))))
"""
cont_cond[0] = ["""
(assert (and (>= tau_{0}_0 0) (<= tau_{0}_0 1)
(>= tau_{0}_t 0) (<= tau_{0}_t 1)
(forall_t 1 [0 time_{0}] (>= tau_{0}_t 0))
(forall_t 2 [0 time_{0}] (<= tau_{0}_t 1))))
(assert (and (= [x1_{0}_t x2_{0}_t tau_{0}_t]
(pintegral 0. time_{0}
[x1_{0}_0 x2_{0}_0 tau_{0}_0]
[holder_{1} holder_{2} holder_{3}]))
(connect holder_{3} flow_1)))"""]
jump_cond[0] = ["""
(assert (and (= tau_{0}_t 1) (= tau_{1}_0 0)))"""]
################
# thermostat 1 #
################
flow_var[1] = """
(declare-fun x1 () Real)
"""
flow_dec[1] = """
(define-ode flow_2 ((= d/dt[x1] (* 0.015 (- 100 (+ (* (- 1 0.01) x1) (* 0.01 x2)))))))
(define-ode flow_3 ((= d/dt[x1] (* -0.015 (+ (* (- 1 0.01) x1) (* 0.01 x2))))))
"""
state_dec[1] = """
(declare-fun mode_1_{0} () Int)
(declare-fun x1_{0}_0 () Real)
(declare-fun x1_{0}_t () Real)
"""
state_val[1] = """
(assert (<= -20 x1_{0}_0)) (assert (<= x1_{0}_0 100))
(assert (<= -20 x1_{0}_t)) (assert (<= x1_{0}_t 100))
"""
cont_cond[1] = ["""
(assert (or (and (= mode_1_{0} 2) (connect holder_{1} flow_2))
(and (= mode_1_{0} 1) (connect holder_{1} flow_3))))
(assert (not (and (connect holder_{1} flow_2) (connect holder_{1} flow_3))))"""]
jump_cond[1] = ["""
(assert (and (= x1_{1}_0 x1_{0}_t)))
(assert (or (and (<= x1_{0}_t 20) (= mode_1_{1} 2))
(and (> x1_{0}_t 20) (= mode_1_{1} 1))))"""]
################
# thermostat 2 #
################
flow_var[2] = """
(declare-fun x2 () Real)
"""
flow_dec[2] = """
(define-ode flow_4 ((= d/dt[x2] (* 0.045 (- 200 (+ (* (- 1 0.01) x2) (* 0.01 x1)))))))
(define-ode flow_5 ((= d/dt[x2] (* -0.045 (+ (* (- 1 0.01) x2) (* 0.01 x1))))))
"""
state_dec[2] = """
(declare-fun mode_2_{0} () Int)
(declare-fun x2_{0}_0 () Real)
(declare-fun x2_{0}_t () Real)
"""
state_val[2] = """
(assert (<= -20 x2_{0}_0)) (assert (<= x2_{0}_0 100))
(assert (<= -20 x2_{0}_t)) (assert (<= x2_{0}_t 100))
"""
cont_cond[2] = ["""
(assert (or (and (= mode_2_{0} 2) (connect holder_{2} flow_4))
(and (= mode_2_{0} 1) (connect holder_{2} flow_5))))
(assert (not (and (connect holder_{2} flow_4) (connect holder_{2} flow_5))))"""]
jump_cond[2] = ["""
(assert (and (= x2_{1}_0 x2_{0}_t)))
(assert (or (and (<= x2_{0}_t 20) (= mode_2_{1} 2))
(and (> x2_{0}_t 20) (= mode_2_{1} 1))))"""]
#############
# Init/Goal #
#############
init_cond = """
(assert (= tau_{0}_0 0))
(assert (= mode_1_{0} 2))
(assert (and (>= x1_{0}_0 (- 20 1)) (<= x1_{0}_0 (+ 20 1))))
(assert (= mode_2_{0} 2))
(assert (and (>= x2_{0}_0 (- 20 1)) (<= x2_{0}_0 (+ 20 1))))
"""
goal_cond = """
(assert (or (< x1_{0}_t (- 20 5)) (> x1_{0}_t (+ 20 5))))
(assert (or (< x2_{0}_t (- 20 5)) (> x2_{0}_t (+ 20 5))))
"""
import sys
try:
bound = int(sys.argv[1])
except:
print("Usage:", sys.argv[0], "<Bound>")
else:
generate(bound, 1, [0,1,2], 3, init_cond, goal_cond)
| gpl-2.0 |
fingeronthebutton/robotframework | src/robot/variables/variables.py | 20 | 2701 | # Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from robot.utils import is_list_like
from .filesetter import VariableFileSetter
from .finders import VariableFinder
from .replacer import VariableReplacer
from .store import VariableStore
from .tablesetter import VariableTableSetter
class Variables(object):
"""Represents a set of variables.
Contains methods for replacing variables from list, scalars, and strings.
On top of ${scalar}, @{list} and &{dict} variables, these methods handle
also %{environment} variables.
"""
def __init__(self):
self.store = VariableStore(self)
self._replacer = VariableReplacer(self)
self._finder = VariableFinder(self.store)
def __setitem__(self, name, value):
self.store.add(name, value)
def __getitem__(self, name):
return self._finder.find(name)
def __contains__(self, name):
return name in self.store
def resolve_delayed(self):
self.store.resolve_delayed()
def replace_list(self, items, replace_until=None):
if not is_list_like(items):
raise ValueError("'replace_list' requires list-like input.")
return self._replacer.replace_list(items, replace_until)
def replace_scalar(self, item):
return self._replacer.replace_scalar(item)
def replace_string(self, item, ignore_errors=False):
return self._replacer.replace_string(item, ignore_errors)
def set_from_file(self, path_or_variables, args=None, overwrite=False):
setter = VariableFileSetter(self.store)
return setter.set(path_or_variables, args, overwrite)
def set_from_variable_table(self, variables, overwrite=False):
setter = VariableTableSetter(self.store)
setter.set(variables, overwrite)
def clear(self):
self.store.clear()
def copy(self):
variables = Variables()
variables.store.data = self.store.data.copy()
return variables
def update(self, variables):
self.store.update(variables.store)
def as_dict(self, decoration=True):
return self.store.as_dict(decoration=decoration)
| apache-2.0 |
ashleyh/zoo | callgraph/clang/cindex.py | 1 | 42680 | #===- cindex.py - Python Indexing Library Bindings -----------*- python -*--===#
#
# The LLVM Compiler Infrastructure
#
# This file is distributed under the University of Illinois Open Source
# License. See LICENSE.TXT for details.
#
#===------------------------------------------------------------------------===#
r"""
Clang Indexing Library Bindings
===============================
This module provides an interface to the Clang indexing library. It is a
low-level interface to the indexing library which attempts to match the Clang
API directly while also being "pythonic". Notable differences from the C API
are:
* string results are returned as Python strings, not CXString objects.
* null cursors are translated to None.
* access to child cursors is done via iteration, not visitation.
The major indexing objects are:
Index
The top-level object which manages some global library state.
TranslationUnit
High-level object encapsulating the AST for a single translation unit. These
can be loaded from .ast files or parsed on the fly.
Cursor
Generic object for representing a node in the AST.
SourceRange, SourceLocation, and File
Objects representing information about the input source.
Most object information is exposed using properties, when the underlying API
call is efficient.
"""
# TODO
# ====
#
# o API support for invalid translation units. Currently we can't even get the
# diagnostics on failure because they refer to locations in an object that
# will have been invalidated.
#
# o fix memory management issues (currently client must hold on to index and
# translation unit, or risk crashes).
#
# o expose code completion APIs.
#
# o cleanup ctypes wrapping, would be nice to separate the ctypes details more
# clearly, and hide from the external interface (i.e., help(cindex)).
#
# o implement additional SourceLocation, SourceRange, and File methods.
from ctypes import *
def get_cindex_library():
# FIXME: It's probably not the case that the library is actually found in
# this location. We need a better system of identifying and loading the
# CIndex library. It could be on path or elsewhere, or versioned, etc.
import platform
name = platform.system()
if name == 'Darwin':
return cdll.LoadLibrary('libclang.dylib')
elif name == 'Windows':
return cdll.LoadLibrary('libclang.dll')
else:
return cdll.LoadLibrary('libclang.so')
# ctypes doesn't implicitly convert c_void_p to the appropriate wrapper
# object. This is a problem, because it means that from_parameter will see an
# integer and pass the wrong value on platforms where int != void*. Work around
# this by marshalling object arguments as void**.
c_object_p = POINTER(c_void_p)
lib = get_cindex_library()
### Structures and Utility Classes ###
class _CXString(Structure):
"""Helper for transforming CXString results."""
_fields_ = [("spelling", c_char_p), ("free", c_int)]
def __del__(self):
_CXString_dispose(self)
@staticmethod
def from_result(res, fn, args):
assert isinstance(res, _CXString)
return _CXString_getCString(res)
class SourceLocation(Structure):
"""
A SourceLocation represents a particular location within a source file.
"""
_fields_ = [("ptr_data", c_void_p * 2), ("int_data", c_uint)]
_data = None
def _get_instantiation(self):
if self._data is None:
f, l, c, o = c_object_p(), c_uint(), c_uint(), c_uint()
SourceLocation_loc(self, byref(f), byref(l), byref(c), byref(o))
f = File(f) if f else None
self._data = (f, int(l.value), int(c.value), int(c.value))
return self._data
@property
def file(self):
"""Get the file represented by this source location."""
return self._get_instantiation()[0]
@property
def line(self):
"""Get the line represented by this source location."""
return self._get_instantiation()[1]
@property
def column(self):
"""Get the column represented by this source location."""
return self._get_instantiation()[2]
@property
def offset(self):
"""Get the file offset represented by this source location."""
return self._get_instantiation()[3]
def __repr__(self):
return "<SourceLocation file %r, line %r, column %r>" % (
self.file.name if self.file else None, self.line, self.column)
class SourceRange(Structure):
"""
A SourceRange describes a range of source locations within the source
code.
"""
_fields_ = [
("ptr_data", c_void_p * 2),
("begin_int_data", c_uint),
("end_int_data", c_uint)]
# FIXME: Eliminate this and make normal constructor? Requires hiding ctypes
# object.
@staticmethod
def from_locations(start, end):
return SourceRange_getRange(start, end)
@property
def start(self):
"""
Return a SourceLocation representing the first character within a
source range.
"""
return SourceRange_start(self)
@property
def end(self):
"""
Return a SourceLocation representing the last character within a
source range.
"""
return SourceRange_end(self)
def __repr__(self):
return "<SourceRange start %r, end %r>" % (self.start, self.end)
class Diagnostic(object):
"""
A Diagnostic is a single instance of a Clang diagnostic. It includes the
diagnostic severity, the message, the location the diagnostic occurred, as
well as additional source ranges and associated fix-it hints.
"""
Ignored = 0
Note = 1
Warning = 2
Error = 3
Fatal = 4
def __init__(self, ptr):
self.ptr = ptr
def __del__(self):
_clang_disposeDiagnostic(self)
@property
def severity(self):
return _clang_getDiagnosticSeverity(self)
@property
def location(self):
return _clang_getDiagnosticLocation(self)
@property
def spelling(self):
return _clang_getDiagnosticSpelling(self)
@property
def ranges(self):
class RangeIterator:
def __init__(self, diag):
self.diag = diag
def __len__(self):
return int(_clang_getDiagnosticNumRanges(self.diag))
def __getitem__(self, key):
if (key >= len(self)):
raise IndexError
return _clang_getDiagnosticRange(self.diag, key)
return RangeIterator(self)
@property
def fixits(self):
class FixItIterator:
def __init__(self, diag):
self.diag = diag
def __len__(self):
return int(_clang_getDiagnosticNumFixIts(self.diag))
def __getitem__(self, key):
range = SourceRange()
value = _clang_getDiagnosticFixIt(self.diag, key, byref(range))
if len(value) == 0:
raise IndexError
return FixIt(range, value)
return FixItIterator(self)
def __repr__(self):
return "<Diagnostic severity %r, location %r, spelling %r>" % (
self.severity, self.location, self.spelling)
def from_param(self):
return self.ptr
class FixIt(object):
"""
A FixIt represents a transformation to be applied to the source to
"fix-it". The fix-it shouldbe applied by replacing the given source range
with the given value.
"""
def __init__(self, range, value):
self.range = range
self.value = value
def __repr__(self):
return "<FixIt range %r, value %r>" % (self.range, self.value)
### Cursor Kinds ###
class CursorKind(object):
"""
A CursorKind describes the kind of entity that a cursor points to.
"""
# The unique kind objects, indexed by id.
_kinds = []
_name_map = None
def __init__(self, value):
if value >= len(CursorKind._kinds):
CursorKind._kinds += [None] * (value - len(CursorKind._kinds) + 1)
if CursorKind._kinds[value] is not None:
raise ValueError,'CursorKind already loaded'
self.value = value
CursorKind._kinds[value] = self
CursorKind._name_map = None
def from_param(self):
return self.value
@property
def name(self):
"""Get the enumeration name of this cursor kind."""
if self._name_map is None:
self._name_map = {}
for key,value in CursorKind.__dict__.items():
if isinstance(value,CursorKind):
self._name_map[value] = key
return self._name_map[self]
@staticmethod
def from_id(id):
if id >= len(CursorKind._kinds) or CursorKind._kinds[id] is None:
raise ValueError,'Unknown cursor kind'
return CursorKind._kinds[id]
@staticmethod
def get_all_kinds():
"""Return all CursorKind enumeration instances."""
return filter(None, CursorKind._kinds)
def is_declaration(self):
"""Test if this is a declaration kind."""
return CursorKind_is_decl(self)
def is_reference(self):
"""Test if this is a reference kind."""
return CursorKind_is_ref(self)
def is_expression(self):
"""Test if this is an expression kind."""
return CursorKind_is_expr(self)
def is_statement(self):
"""Test if this is a statement kind."""
return CursorKind_is_stmt(self)
def is_invalid(self):
"""Test if this is an invalid kind."""
return CursorKind_is_inv(self)
def __repr__(self):
return 'CursorKind.%s' % (self.name,)
# FIXME: Is there a nicer way to expose this enumeration? We could potentially
# represent the nested structure, or even build a class hierarchy. The main
# things we want for sure are (a) simple external access to kinds, (b) a place
# to hang a description and name, (c) easy to keep in sync with Index.h.
###
# Declaration Kinds
# A declaration whose specific kind is not exposed via this interface.
#
# Unexposed declarations have the same operations as any other kind of
# declaration; one can extract their location information, spelling, find their
# definitions, etc. However, the specific kind of the declaration is not
# reported.
CursorKind.UNEXPOSED_DECL = CursorKind(1)
# A C or C++ struct.
CursorKind.STRUCT_DECL = CursorKind(2)
# A C or C++ union.
CursorKind.UNION_DECL = CursorKind(3)
# A C++ class.
CursorKind.CLASS_DECL = CursorKind(4)
# An enumeration.
CursorKind.ENUM_DECL = CursorKind(5)
# A field (in C) or non-static data member (in C++) in a struct, union, or C++
# class.
CursorKind.FIELD_DECL = CursorKind(6)
# An enumerator constant.
CursorKind.ENUM_CONSTANT_DECL = CursorKind(7)
# A function.
CursorKind.FUNCTION_DECL = CursorKind(8)
# A variable.
CursorKind.VAR_DECL = CursorKind(9)
# A function or method parameter.
CursorKind.PARM_DECL = CursorKind(10)
# An Objective-C @interface.
CursorKind.OBJC_INTERFACE_DECL = CursorKind(11)
# An Objective-C @interface for a category.
CursorKind.OBJC_CATEGORY_DECL = CursorKind(12)
# An Objective-C @protocol declaration.
CursorKind.OBJC_PROTOCOL_DECL = CursorKind(13)
# An Objective-C @property declaration.
CursorKind.OBJC_PROPERTY_DECL = CursorKind(14)
# An Objective-C instance variable.
CursorKind.OBJC_IVAR_DECL = CursorKind(15)
# An Objective-C instance method.
CursorKind.OBJC_INSTANCE_METHOD_DECL = CursorKind(16)
# An Objective-C class method.
CursorKind.OBJC_CLASS_METHOD_DECL = CursorKind(17)
# An Objective-C @implementation.
CursorKind.OBJC_IMPLEMENTATION_DECL = CursorKind(18)
# An Objective-C @implementation for a category.
CursorKind.OBJC_CATEGORY_IMPL_DECL = CursorKind(19)
# A typedef.
CursorKind.TYPEDEF_DECL = CursorKind(20)
# A C++ class method.
CursorKind.CXX_METHOD = CursorKind(21)
# A C++ namespace.
CursorKind.NAMESPACE = CursorKind(22)
# A linkage specification, e.g. 'extern "C"'.
CursorKind.LINKAGE_SPEC = CursorKind(23)
# A C++ constructor.
CursorKind.CONSTRUCTOR = CursorKind(24)
# A C++ destructor.
CursorKind.DESTRUCTOR = CursorKind(25)
# A C++ conversion function.
CursorKind.CONVERSION_FUNCTION = CursorKind(26)
# A C++ template type parameter
CursorKind.TEMPLATE_TYPE_PARAMETER = CursorKind(27)
# A C++ non-type template paramater.
CursorKind.TEMPLATE_NON_TYPE_PARAMETER = CursorKind(28)
# A C++ template template parameter.
CursorKind.TEMPLATE_TEMPLATE_PARAMTER = CursorKind(29)
# A C++ function template.
CursorKind.FUNCTION_TEMPLATE = CursorKind(30)
# A C++ class template.
CursorKind.CLASS_TEMPLATE = CursorKind(31)
# A C++ class template partial specialization.
CursorKind.CLASS_TEMPLATE_PARTIAL_SPECIALIZATION = CursorKind(32)
# A C++ namespace alias declaration.
CursorKind.NAMESPACE_ALIAS = CursorKind(33)
# A C++ using directive
CursorKind.USING_DIRECTIVE = CursorKind(34)
# A C++ using declaration
CursorKind.USING_DECLARATION = CursorKind(35)
###
# Reference Kinds
CursorKind.OBJC_SUPER_CLASS_REF = CursorKind(40)
CursorKind.OBJC_PROTOCOL_REF = CursorKind(41)
CursorKind.OBJC_CLASS_REF = CursorKind(42)
# A reference to a type declaration.
#
# A type reference occurs anywhere where a type is named but not
# declared. For example, given:
# typedef unsigned size_type;
# size_type size;
#
# The typedef is a declaration of size_type (CXCursor_TypedefDecl),
# while the type of the variable "size" is referenced. The cursor
# referenced by the type of size is the typedef for size_type.
CursorKind.TYPE_REF = CursorKind(43)
CursorKind.CXX_BASE_SPECIFIER = CursorKind(44)
# A reference to a class template, function template, template
# template parameter, or class template partial specialization.
CursorKind.TEMPLATE_REF = CursorKind(45)
# A reference to a namespace or namepsace alias.
CursorKind.NAMESPACE_REF = CursorKind(46)
# A reference to a member of a struct, union, or class that occurs in
# some non-expression context, e.g., a designated initializer.
CursorKind.MEMBER_REF = CursorKind(47)
# A reference to a labeled statement.
CursorKind.LABEL_REF = CursorKind(48)
# A reference toa a set of overloaded functions or function templates
# that has not yet been resolved to a specific function or function template.
CursorKind.OVERLOADED_DECL_REF = CursorKind(49)
###
# Invalid/Error Kinds
CursorKind.INVALID_FILE = CursorKind(70)
CursorKind.NO_DECL_FOUND = CursorKind(71)
CursorKind.NOT_IMPLEMENTED = CursorKind(72)
CursorKind.INVALID_CODE = CursorKind(73)
###
# Expression Kinds
# An expression whose specific kind is not exposed via this interface.
#
# Unexposed expressions have the same operations as any other kind of
# expression; one can extract their location information, spelling, children,
# etc. However, the specific kind of the expression is not reported.
CursorKind.UNEXPOSED_EXPR = CursorKind(100)
# An expression that refers to some value declaration, such as a function,
# varible, or enumerator.
CursorKind.DECL_REF_EXPR = CursorKind(101)
# An expression that refers to a member of a struct, union, class, Objective-C
# class, etc.
CursorKind.MEMBER_REF_EXPR = CursorKind(102)
# An expression that calls a function.
CursorKind.CALL_EXPR = CursorKind(103)
# An expression that sends a message to an Objective-C object or class.
CursorKind.OBJC_MESSAGE_EXPR = CursorKind(104)
# An expression that represents a block literal.
CursorKind.BLOCK_EXPR = CursorKind(105)
# A statement whose specific kind is not exposed via this interface.
#
# Unexposed statements have the same operations as any other kind of statement;
# one can extract their location information, spelling, children, etc. However,
# the specific kind of the statement is not reported.
CursorKind.UNEXPOSED_STMT = CursorKind(200)
# A labelled statement in a function.
CursorKind.LABEL_STMT = CursorKind(201)
###
# Other Kinds
# Cursor that represents the translation unit itself.
#
# The translation unit cursor exists primarily to act as the root cursor for
# traversing the contents of a translation unit.
CursorKind.TRANSLATION_UNIT = CursorKind(300)
###
# Attributes
# An attribute whoe specific kind is note exposed via this interface
CursorKind.UNEXPOSED_ATTR = CursorKind(400)
CursorKind.IB_ACTION_ATTR = CursorKind(401)
CursorKind.IB_OUTLET_ATTR = CursorKind(402)
CursorKind.IB_OUTLET_COLLECTION_ATTR = CursorKind(403)
###
# Preprocessing
CursorKind.PREPROCESSING_DIRECTIVE = CursorKind(500)
CursorKind.MACRO_DEFINITION = CursorKind(501)
CursorKind.MACRO_INSTANTIATION = CursorKind(502)
CursorKind.INCLUSION_DIRECTIVE = CursorKind(503)
### Cursors ###
class Cursor(Structure):
"""
The Cursor class represents a reference to an element within the AST. It
acts as a kind of iterator.
"""
_fields_ = [("_kind_id", c_int), ("data", c_void_p * 3)]
def __eq__(self, other):
return Cursor_eq(self, other)
def __ne__(self, other):
return not Cursor_eq(self, other)
def is_definition(self):
"""
Returns true if the declaration pointed at by the cursor is also a
definition of that entity.
"""
return Cursor_is_def(self)
def get_definition(self):
"""
If the cursor is a reference to a declaration or a declaration of
some entity, return a cursor that points to the definition of that
entity.
"""
# TODO: Should probably check that this is either a reference or
# declaration prior to issuing the lookup.
return Cursor_def(self)
def get_usr(self):
"""Return the Unified Symbol Resultion (USR) for the entity referenced
by the given cursor (or None).
A Unified Symbol Resolution (USR) is a string that identifies a
particular entity (function, class, variable, etc.) within a
program. USRs can be compared across translation units to determine,
e.g., when references in one translation refer to an entity defined in
another translation unit."""
return Cursor_usr(self)
@property
def kind(self):
"""Return the kind of this cursor."""
return CursorKind.from_id(self._kind_id)
@property
def spelling(self):
"""Return the spelling of the entity pointed at by the cursor."""
if not self.kind.is_declaration():
# FIXME: clang_getCursorSpelling should be fixed to not assert on
# this, for consistency with clang_getCursorUSR.
return None
return Cursor_spelling(self)
@property
def location(self):
"""
Return the source location (the starting character) of the entity
pointed at by the cursor.
"""
return Cursor_loc(self)
@property
def extent(self):
"""
Return the source range (the range of text) occupied by the entity
pointed at by the cursor.
"""
return Cursor_extent(self)
def get_children(self):
"""Return an iterator for accessing the children of this cursor."""
# FIXME: Expose iteration from CIndex, PR6125.
def visitor(child, parent, children):
# FIXME: Document this assertion in API.
# FIXME: There should just be an isNull method.
assert child != Cursor_null()
children.append(child)
return 1 # continue
children = []
Cursor_visit(self, Cursor_visit_callback(visitor), children)
return iter(children)
@staticmethod
def from_result(res, fn, args):
assert isinstance(res, Cursor)
# FIXME: There should just be an isNull method.
if res == Cursor_null():
return None
return res
## CIndex Objects ##
# CIndex objects (derived from ClangObject) are essentially lightweight
# wrappers attached to some underlying object, which is exposed via CIndex as
# a void*.
class ClangObject(object):
"""
A helper for Clang objects. This class helps act as an intermediary for
the ctypes library and the Clang CIndex library.
"""
def __init__(self, obj):
assert isinstance(obj, c_object_p) and obj
self.obj = self._as_parameter_ = obj
def from_param(self):
return self._as_parameter_
class _CXUnsavedFile(Structure):
"""Helper for passing unsaved file arguments."""
_fields_ = [("name", c_char_p), ("contents", c_char_p), ('length', c_ulong)]
## Diagnostic Conversion ##
_clang_getNumDiagnostics = lib.clang_getNumDiagnostics
_clang_getNumDiagnostics.argtypes = [c_object_p]
_clang_getNumDiagnostics.restype = c_uint
_clang_getDiagnostic = lib.clang_getDiagnostic
_clang_getDiagnostic.argtypes = [c_object_p, c_uint]
_clang_getDiagnostic.restype = c_object_p
_clang_disposeDiagnostic = lib.clang_disposeDiagnostic
_clang_disposeDiagnostic.argtypes = [Diagnostic]
_clang_getDiagnosticSeverity = lib.clang_getDiagnosticSeverity
_clang_getDiagnosticSeverity.argtypes = [Diagnostic]
_clang_getDiagnosticSeverity.restype = c_int
_clang_getDiagnosticLocation = lib.clang_getDiagnosticLocation
_clang_getDiagnosticLocation.argtypes = [Diagnostic]
_clang_getDiagnosticLocation.restype = SourceLocation
_clang_getDiagnosticSpelling = lib.clang_getDiagnosticSpelling
_clang_getDiagnosticSpelling.argtypes = [Diagnostic]
_clang_getDiagnosticSpelling.restype = _CXString
_clang_getDiagnosticSpelling.errcheck = _CXString.from_result
_clang_getDiagnosticNumRanges = lib.clang_getDiagnosticNumRanges
_clang_getDiagnosticNumRanges.argtypes = [Diagnostic]
_clang_getDiagnosticNumRanges.restype = c_uint
_clang_getDiagnosticRange = lib.clang_getDiagnosticRange
_clang_getDiagnosticRange.argtypes = [Diagnostic, c_uint]
_clang_getDiagnosticRange.restype = SourceRange
_clang_getDiagnosticNumFixIts = lib.clang_getDiagnosticNumFixIts
_clang_getDiagnosticNumFixIts.argtypes = [Diagnostic]
_clang_getDiagnosticNumFixIts.restype = c_uint
_clang_getDiagnosticFixIt = lib.clang_getDiagnosticFixIt
_clang_getDiagnosticFixIt.argtypes = [Diagnostic, c_uint, POINTER(SourceRange)]
_clang_getDiagnosticFixIt.restype = _CXString
_clang_getDiagnosticFixIt.errcheck = _CXString.from_result
###
class CompletionChunk:
class Kind:
def __init__(self, name):
self.name = name
def __str__(self):
return self.name
def __repr__(self):
return "<ChunkKind: %s>" % self
def __init__(self, completionString, key):
self.cs = completionString
self.key = key
def __repr__(self):
return "{'" + self.spelling + "', " + str(self.kind) + "}"
@property
def spelling(self):
return _clang_getCompletionChunkText(self.cs, self.key).spelling
@property
def kind(self):
res = _clang_getCompletionChunkKind(self.cs, self.key)
return completionChunkKindMap[res]
@property
def string(self):
res = _clang_getCompletionChunkCompletionString(self.cs, self.key)
if (res):
return CompletionString(res)
else:
None
def isKindOptional(self):
return self.kind == completionChunkKindMap[0]
def isKindTypedText(self):
return self.kind == completionChunkKindMap[1]
def isKindPlaceHolder(self):
return self.kind == completionChunkKindMap[3]
def isKindInformative(self):
return self.kind == completionChunkKindMap[4]
def isKindResultType(self):
return self.kind == completionChunkKindMap[15]
completionChunkKindMap = {
0: CompletionChunk.Kind("Optional"),
1: CompletionChunk.Kind("TypedText"),
2: CompletionChunk.Kind("Text"),
3: CompletionChunk.Kind("Placeholder"),
4: CompletionChunk.Kind("Informative"),
5: CompletionChunk.Kind("CurrentParameter"),
6: CompletionChunk.Kind("LeftParen"),
7: CompletionChunk.Kind("RightParen"),
8: CompletionChunk.Kind("LeftBracket"),
9: CompletionChunk.Kind("RightBracket"),
10: CompletionChunk.Kind("LeftBrace"),
11: CompletionChunk.Kind("RightBrace"),
12: CompletionChunk.Kind("LeftAngle"),
13: CompletionChunk.Kind("RightAngle"),
14: CompletionChunk.Kind("Comma"),
15: CompletionChunk.Kind("ResultType"),
16: CompletionChunk.Kind("Colon"),
17: CompletionChunk.Kind("SemiColon"),
18: CompletionChunk.Kind("Equal"),
19: CompletionChunk.Kind("HorizontalSpace"),
20: CompletionChunk.Kind("VerticalSpace")}
class CompletionString(ClangObject):
class Availability:
def __init__(self, name):
self.name = name
def __str__(self):
return self.name
def __repr__(self):
return "<Availability: %s>" % self
def __len__(self):
return _clang_getNumCompletionChunks(self.obj)
def __getitem__(self, key):
if len(self) <= key:
raise IndexError
return CompletionChunk(self.obj, key)
@property
def priority(self):
return _clang_getCompletionPriority(self.obj)
@property
def availability(self):
res = _clang_getCompletionAvailability(self.obj)
return availabilityKinds[res]
def __repr__(self):
return " | ".join([str(a) for a in self]) \
+ " || Priority: " + str(self.priority) \
+ " || Availability: " + str(self.availability)
availabilityKinds = {
0: CompletionChunk.Kind("Available"),
1: CompletionChunk.Kind("Deprecated"),
2: CompletionChunk.Kind("NotAvailable")}
class CodeCompletionResult(Structure):
_fields_ = [('cursorKind', c_int), ('completionString', c_object_p)]
def __repr__(self):
return str(CompletionString(self.completionString))
@property
def kind(self):
return CursorKind.from_id(self.cursorKind)
@property
def string(self):
return CompletionString(self.completionString)
class CCRStructure(Structure):
_fields_ = [('results', POINTER(CodeCompletionResult)),
('numResults', c_int)]
def __len__(self):
return self.numResults
def __getitem__(self, key):
if len(self) <= key:
raise IndexError
return self.results[key]
class CodeCompletionResults(ClangObject):
def __init__(self, ptr):
assert isinstance(ptr, POINTER(CCRStructure)) and ptr
self.ptr = self._as_parameter_ = ptr
def from_param(self):
return self._as_parameter_
def __del__(self):
CodeCompletionResults_dispose(self)
@property
def results(self):
return self.ptr.contents
@property
def diagnostics(self):
class DiagnosticsItr:
def __init__(self, ccr):
self.ccr= ccr
def __len__(self):
return int(_clang_codeCompleteGetNumDiagnostics(self.ccr))
def __getitem__(self, key):
return _clang_codeCompleteGetDiagnostic(self.ccr, key)
return DiagnosticsItr(self)
class Index(ClangObject):
"""
The Index type provides the primary interface to the Clang CIndex library,
primarily by providing an interface for reading and parsing translation
units.
"""
@staticmethod
def create(excludeDecls=False):
"""
Create a new Index.
Parameters:
excludeDecls -- Exclude local declarations from translation units.
"""
return Index(Index_create(excludeDecls, 0))
def __del__(self):
Index_dispose(self)
def read(self, path):
"""Load the translation unit from the given AST file."""
ptr = TranslationUnit_read(self, path)
return TranslationUnit(ptr) if ptr else None
def parse(self, path, args = [], unsaved_files = [], options = 0):
"""
Load the translation unit from the given source code file by running
clang and generating the AST before loading. Additional command line
parameters can be passed to clang via the args parameter.
In-memory contents for files can be provided by passing a list of pairs
to as unsaved_files, the first item should be the filenames to be mapped
and the second should be the contents to be substituted for the
file. The contents may be passed as strings or file objects.
"""
arg_array = 0
if len(args):
arg_array = (c_char_p * len(args))(* args)
unsaved_files_array = 0
if len(unsaved_files):
unsaved_files_array = (_CXUnsavedFile * len(unsaved_files))()
for i,(name,value) in enumerate(unsaved_files):
if not isinstance(value, str):
# FIXME: It would be great to support an efficient version
# of this, one day.
value = value.read()
print value
if not isinstance(value, str):
raise TypeError,'Unexpected unsaved file contents.'
unsaved_files_array[i].name = name
unsaved_files_array[i].contents = value
unsaved_files_array[i].length = len(value)
ptr = TranslationUnit_parse(self, path, arg_array, len(args),
unsaved_files_array, len(unsaved_files),
options)
return TranslationUnit(ptr) if ptr else None
class TranslationUnit(ClangObject):
"""
The TranslationUnit class represents a source code translation unit and
provides read-only access to its top-level declarations.
"""
def __init__(self, ptr):
ClangObject.__init__(self, ptr)
def __del__(self):
TranslationUnit_dispose(self)
@property
def cursor(self):
"""Retrieve the cursor that represents the given translation unit."""
return TranslationUnit_cursor(self)
@property
def spelling(self):
"""Get the original translation unit source file name."""
return TranslationUnit_spelling(self)
def get_includes(self):
"""
Return an iterable sequence of FileInclusion objects that describe the
sequence of inclusions in a translation unit. The first object in
this sequence is always the input file. Note that this method will not
recursively iterate over header files included through precompiled
headers.
"""
def visitor(fobj, lptr, depth, includes):
loc = lptr.contents
includes.append(FileInclusion(loc.file, File(fobj), loc, depth))
# Automatically adapt CIndex/ctype pointers to python objects
includes = []
TranslationUnit_includes(self,
TranslationUnit_includes_callback(visitor),
includes)
return iter(includes)
@property
def diagnostics(self):
"""
Return an iterable (and indexable) object containing the diagnostics.
"""
class DiagIterator:
def __init__(self, tu):
self.tu = tu
def __len__(self):
return int(_clang_getNumDiagnostics(self.tu))
def __getitem__(self, key):
diag = _clang_getDiagnostic(self.tu, key)
if not diag:
raise IndexError
return Diagnostic(diag)
return DiagIterator(self)
def reparse(self, unsaved_files = [], options = 0):
"""
Reparse an already parsed translation unit.
In-memory contents for files can be provided by passing a list of pairs
as unsaved_files, the first items should be the filenames to be mapped
and the second should be the contents to be substituted for the
file. The contents may be passed as strings or file objects.
"""
unsaved_files_array = 0
if len(unsaved_files):
unsaved_files_array = (_CXUnsavedFile * len(unsaved_files))()
for i,(name,value) in enumerate(unsaved_files):
if not isinstance(value, str):
# FIXME: It would be great to support an efficient version
# of this, one day.
value = value.read()
print value
if not isinstance(value, str):
raise TypeError,'Unexpected unsaved file contents.'
unsaved_files_array[i].name = name
unsaved_files_array[i].contents = value
unsaved_files_array[i].length = len(value)
ptr = TranslationUnit_reparse(self, len(unsaved_files),
unsaved_files_array,
options)
def codeComplete(self, path, line, column, unsaved_files = [], options = 0):
"""
Code complete in this translation unit.
In-memory contents for files can be provided by passing a list of pairs
as unsaved_files, the first items should be the filenames to be mapped
and the second should be the contents to be substituted for the
file. The contents may be passed as strings or file objects.
"""
unsaved_files_array = 0
if len(unsaved_files):
unsaved_files_array = (_CXUnsavedFile * len(unsaved_files))()
for i,(name,value) in enumerate(unsaved_files):
if not isinstance(value, str):
# FIXME: It would be great to support an efficient version
# of this, one day.
value = value.read()
print value
if not isinstance(value, str):
raise TypeError,'Unexpected unsaved file contents.'
unsaved_files_array[i].name = name
unsaved_files_array[i].contents = value
unsaved_files_array[i].length = len(value)
ptr = TranslationUnit_codeComplete(self, path,
line, column,
unsaved_files_array,
len(unsaved_files),
options)
return CodeCompletionResults(ptr) if ptr else None
class File(ClangObject):
"""
The File class represents a particular source file that is part of a
translation unit.
"""
@property
def name(self):
"""Return the complete file and path name of the file."""
return File_name(self)
@property
def time(self):
"""Return the last modification time of the file."""
return File_time(self)
class FileInclusion(object):
"""
The FileInclusion class represents the inclusion of one source file by
another via a '#include' directive or as the input file for the translation
unit. This class provides information about the included file, the including
file, the location of the '#include' directive and the depth of the included
file in the stack. Note that the input file has depth 0.
"""
def __init__(self, src, tgt, loc, depth):
self.source = src
self.include = tgt
self.location = loc
self.depth = depth
@property
def is_input_file(self):
"""True if the included file is the input file."""
return self.depth == 0
# Additional Functions and Types
# String Functions
_CXString_dispose = lib.clang_disposeString
_CXString_dispose.argtypes = [_CXString]
_CXString_getCString = lib.clang_getCString
_CXString_getCString.argtypes = [_CXString]
_CXString_getCString.restype = c_char_p
# Source Location Functions
SourceLocation_loc = lib.clang_getInstantiationLocation
SourceLocation_loc.argtypes = [SourceLocation, POINTER(c_object_p),
POINTER(c_uint), POINTER(c_uint),
POINTER(c_uint)]
# Source Range Functions
SourceRange_getRange = lib.clang_getRange
SourceRange_getRange.argtypes = [SourceLocation, SourceLocation]
SourceRange_getRange.restype = SourceRange
SourceRange_start = lib.clang_getRangeStart
SourceRange_start.argtypes = [SourceRange]
SourceRange_start.restype = SourceLocation
SourceRange_end = lib.clang_getRangeEnd
SourceRange_end.argtypes = [SourceRange]
SourceRange_end.restype = SourceLocation
# CursorKind Functions
CursorKind_is_decl = lib.clang_isDeclaration
CursorKind_is_decl.argtypes = [CursorKind]
CursorKind_is_decl.restype = bool
CursorKind_is_ref = lib.clang_isReference
CursorKind_is_ref.argtypes = [CursorKind]
CursorKind_is_ref.restype = bool
CursorKind_is_expr = lib.clang_isExpression
CursorKind_is_expr.argtypes = [CursorKind]
CursorKind_is_expr.restype = bool
CursorKind_is_stmt = lib.clang_isStatement
CursorKind_is_stmt.argtypes = [CursorKind]
CursorKind_is_stmt.restype = bool
CursorKind_is_inv = lib.clang_isInvalid
CursorKind_is_inv.argtypes = [CursorKind]
CursorKind_is_inv.restype = bool
# Cursor Functions
# TODO: Implement this function
Cursor_get = lib.clang_getCursor
Cursor_get.argtypes = [TranslationUnit, SourceLocation]
Cursor_get.restype = Cursor
Cursor_null = lib.clang_getNullCursor
Cursor_null.restype = Cursor
Cursor_usr = lib.clang_getCursorUSR
Cursor_usr.argtypes = [Cursor]
Cursor_usr.restype = _CXString
Cursor_usr.errcheck = _CXString.from_result
Cursor_is_def = lib.clang_isCursorDefinition
Cursor_is_def.argtypes = [Cursor]
Cursor_is_def.restype = bool
Cursor_def = lib.clang_getCursorDefinition
Cursor_def.argtypes = [Cursor]
Cursor_def.restype = Cursor
Cursor_def.errcheck = Cursor.from_result
Cursor_eq = lib.clang_equalCursors
Cursor_eq.argtypes = [Cursor, Cursor]
Cursor_eq.restype = c_uint
Cursor_spelling = lib.clang_getCursorSpelling
Cursor_spelling.argtypes = [Cursor]
Cursor_spelling.restype = _CXString
Cursor_spelling.errcheck = _CXString.from_result
Cursor_loc = lib.clang_getCursorLocation
Cursor_loc.argtypes = [Cursor]
Cursor_loc.restype = SourceLocation
Cursor_extent = lib.clang_getCursorExtent
Cursor_extent.argtypes = [Cursor]
Cursor_extent.restype = SourceRange
Cursor_ref = lib.clang_getCursorReferenced
Cursor_ref.argtypes = [Cursor]
Cursor_ref.restype = Cursor
Cursor_ref.errcheck = Cursor.from_result
Cursor_visit_callback = CFUNCTYPE(c_int, Cursor, Cursor, py_object)
Cursor_visit = lib.clang_visitChildren
Cursor_visit.argtypes = [Cursor, Cursor_visit_callback, py_object]
Cursor_visit.restype = c_uint
# Index Functions
Index_create = lib.clang_createIndex
Index_create.argtypes = [c_int, c_int]
Index_create.restype = c_object_p
Index_dispose = lib.clang_disposeIndex
Index_dispose.argtypes = [Index]
# Translation Unit Functions
TranslationUnit_read = lib.clang_createTranslationUnit
TranslationUnit_read.argtypes = [Index, c_char_p]
TranslationUnit_read.restype = c_object_p
TranslationUnit_parse = lib.clang_parseTranslationUnit
TranslationUnit_parse.argtypes = [Index, c_char_p, c_void_p,
c_int, c_void_p, c_int, c_int]
TranslationUnit_parse.restype = c_object_p
TranslationUnit_reparse = lib.clang_reparseTranslationUnit
TranslationUnit_reparse.argtypes = [TranslationUnit, c_int, c_void_p, c_int]
TranslationUnit_reparse.restype = c_int
TranslationUnit_codeComplete = lib.clang_codeCompleteAt
TranslationUnit_codeComplete.argtypes = [TranslationUnit, c_char_p, c_int,
c_int, c_void_p, c_int, c_int]
TranslationUnit_codeComplete.restype = POINTER(CCRStructure)
TranslationUnit_cursor = lib.clang_getTranslationUnitCursor
TranslationUnit_cursor.argtypes = [TranslationUnit]
TranslationUnit_cursor.restype = Cursor
TranslationUnit_cursor.errcheck = Cursor.from_result
TranslationUnit_spelling = lib.clang_getTranslationUnitSpelling
TranslationUnit_spelling.argtypes = [TranslationUnit]
TranslationUnit_spelling.restype = _CXString
TranslationUnit_spelling.errcheck = _CXString.from_result
TranslationUnit_dispose = lib.clang_disposeTranslationUnit
TranslationUnit_dispose.argtypes = [TranslationUnit]
TranslationUnit_includes_callback = CFUNCTYPE(None,
c_object_p,
POINTER(SourceLocation),
c_uint, py_object)
TranslationUnit_includes = lib.clang_getInclusions
TranslationUnit_includes.argtypes = [TranslationUnit,
TranslationUnit_includes_callback,
py_object]
# File Functions
File_name = lib.clang_getFileName
File_name.argtypes = [File]
File_name.restype = c_char_p
File_time = lib.clang_getFileTime
File_time.argtypes = [File]
File_time.restype = c_uint
# Code completion
CodeCompletionResults_dispose = lib.clang_disposeCodeCompleteResults
CodeCompletionResults_dispose.argtypes = [CodeCompletionResults]
_clang_codeCompleteGetNumDiagnostics = lib.clang_codeCompleteGetNumDiagnostics
_clang_codeCompleteGetNumDiagnostics.argtypes = [CodeCompletionResults]
_clang_codeCompleteGetNumDiagnostics.restype = c_int
_clang_codeCompleteGetDiagnostic = lib.clang_codeCompleteGetDiagnostic
_clang_codeCompleteGetDiagnostic.argtypes = [CodeCompletionResults, c_int]
_clang_codeCompleteGetDiagnostic.restype = Diagnostic
_clang_getCompletionChunkText = lib.clang_getCompletionChunkText
_clang_getCompletionChunkText.argtypes = [c_void_p, c_int]
_clang_getCompletionChunkText.restype = _CXString
_clang_getCompletionChunkKind = lib.clang_getCompletionChunkKind
_clang_getCompletionChunkKind.argtypes = [c_void_p, c_int]
_clang_getCompletionChunkKind.restype = c_int
_clang_getCompletionChunkCompletionString = lib.clang_getCompletionChunkCompletionString
_clang_getCompletionChunkCompletionString.argtypes = [c_void_p, c_int]
_clang_getCompletionChunkCompletionString.restype = c_object_p
_clang_getNumCompletionChunks = lib.clang_getNumCompletionChunks
_clang_getNumCompletionChunks.argtypes = [c_void_p]
_clang_getNumCompletionChunks.restype = c_int
_clang_getCompletionAvailability = lib.clang_getCompletionAvailability
_clang_getCompletionAvailability.argtypes = [c_void_p]
_clang_getCompletionAvailability.restype = c_int
_clang_getCompletionPriority = lib.clang_getCompletionPriority
_clang_getCompletionPriority.argtypes = [c_void_p]
_clang_getCompletionPriority.restype = c_int
###
__all__ = ['Index', 'TranslationUnit', 'Cursor', 'CursorKind',
'Diagnostic', 'FixIt', 'CodeCompletionResults', 'SourceRange',
'SourceLocation', 'File']
| gpl-3.0 |
blakfeld/ansible | lib/ansible/plugins/action/script.py | 15 | 4083 | # (c) 2012, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible import constants as C
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
TRANSFERS_FILES = True
def run(self, tmp=None, task_vars=None):
''' handler for file transfer operations '''
if self._connection_info.check_mode:
return dict(skipped=True, msg='check mode not supported for this module')
if not tmp:
tmp = self._make_tmp_path()
creates = self._task.args.get('creates')
if creates:
# do not run the command if the line contains creates=filename
# and the filename already exists. This allows idempotence
# of command executions.
result = self._execute_module(module_name='stat', module_args=dict(path=creates), task_vars=task_vars, tmp=tmp, persist_files=True)
stat = result.get('stat', None)
if stat and stat.get('exists', False):
return dict(skipped=True, msg=("skipped, since %s exists" % creates))
removes = self._task.args.get('removes')
if removes:
# do not run the command if the line contains removes=filename
# and the filename does not exist. This allows idempotence
# of command executions.
result = self._execute_module(module_name='stat', module_args=dict(path=removes), task_vars=task_vars, tmp=tmp, persist_files=True)
stat = result.get('stat', None)
if stat and not stat.get('exists', False):
return dict(skipped=True, msg=("skipped, since %s does not exist" % removes))
# the script name is the first item in the raw params, so we split it
# out now so we know the file name we need to transfer to the remote,
# and everything else is an argument to the script which we need later
# to append to the remote command
parts = self._task.args.get('_raw_params', '').strip().split()
source = parts[0]
args = ' '.join(parts[1:])
if self._task._role is not None:
source = self._loader.path_dwim_relative(self._task._role._role_path, 'files', source)
else:
source = self._loader.path_dwim(source)
# transfer the file to a remote tmp location
tmp_src = self._connection._shell.join_path(tmp, os.path.basename(source))
self._connection.put_file(source, tmp_src)
sudoable = True
# set file permissions, more permissive when the copy is done as a different user
if self._connection_info.become and self._connection_info.become_user != 'root':
chmod_mode = 'a+rx'
sudoable = False
else:
chmod_mode = '+rx'
self._remote_chmod(tmp, chmod_mode, tmp_src, sudoable=sudoable)
# add preparation steps to one ssh roundtrip executing the script
env_string = self._compute_environment_string()
script_cmd = ' '.join([env_string, tmp_src, args])
result = self._low_level_execute_command(cmd=script_cmd, tmp=None, sudoable=sudoable)
# clean up after
if tmp and "tmp" in tmp and not C.DEFAULT_KEEP_REMOTE_FILES:
self._remove_tmp_path(tmp)
result['changed'] = True
return result
| gpl-3.0 |
Y3K/django | tests/prefetch_related/models.py | 255 | 7972 | import uuid
from django.contrib.contenttypes.fields import (
GenericForeignKey, GenericRelation,
)
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
# Basic tests
@python_2_unicode_compatible
class Author(models.Model):
name = models.CharField(max_length=50, unique=True)
first_book = models.ForeignKey('Book', models.CASCADE, related_name='first_time_authors')
favorite_authors = models.ManyToManyField(
'self', through='FavoriteAuthors', symmetrical=False, related_name='favors_me')
def __str__(self):
return self.name
class Meta:
ordering = ['id']
class AuthorWithAge(Author):
author = models.OneToOneField(Author, models.CASCADE, parent_link=True)
age = models.IntegerField()
class FavoriteAuthors(models.Model):
author = models.ForeignKey(Author, models.CASCADE, to_field='name', related_name='i_like')
likes_author = models.ForeignKey(Author, models.CASCADE, to_field='name', related_name='likes_me')
class Meta:
ordering = ['id']
@python_2_unicode_compatible
class AuthorAddress(models.Model):
author = models.ForeignKey(Author, models.CASCADE, to_field='name', related_name='addresses')
address = models.TextField()
class Meta:
ordering = ['id']
def __str__(self):
return self.address
@python_2_unicode_compatible
class Book(models.Model):
title = models.CharField(max_length=255)
authors = models.ManyToManyField(Author, related_name='books')
def __str__(self):
return self.title
class Meta:
ordering = ['id']
class BookWithYear(Book):
book = models.OneToOneField(Book, models.CASCADE, parent_link=True)
published_year = models.IntegerField()
aged_authors = models.ManyToManyField(
AuthorWithAge, related_name='books_with_year')
class Bio(models.Model):
author = models.OneToOneField(Author, models.CASCADE)
books = models.ManyToManyField(Book, blank=True)
@python_2_unicode_compatible
class Reader(models.Model):
name = models.CharField(max_length=50)
books_read = models.ManyToManyField(Book, related_name='read_by')
def __str__(self):
return self.name
class Meta:
ordering = ['id']
class BookReview(models.Model):
book = models.ForeignKey(BookWithYear, models.CASCADE)
notes = models.TextField(null=True, blank=True)
# Models for default manager tests
class Qualification(models.Model):
name = models.CharField(max_length=10)
class Meta:
ordering = ['id']
class TeacherManager(models.Manager):
def get_queryset(self):
return super(TeacherManager, self).get_queryset().prefetch_related('qualifications')
@python_2_unicode_compatible
class Teacher(models.Model):
name = models.CharField(max_length=50)
qualifications = models.ManyToManyField(Qualification)
objects = TeacherManager()
def __str__(self):
return "%s (%s)" % (self.name, ", ".join(q.name for q in self.qualifications.all()))
class Meta:
ordering = ['id']
class Department(models.Model):
name = models.CharField(max_length=50)
teachers = models.ManyToManyField(Teacher)
class Meta:
ordering = ['id']
# GenericRelation/GenericForeignKey tests
@python_2_unicode_compatible
class TaggedItem(models.Model):
tag = models.SlugField()
content_type = models.ForeignKey(
ContentType,
models.CASCADE,
related_name="taggeditem_set2",
)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
created_by_ct = models.ForeignKey(
ContentType,
models.SET_NULL,
null=True,
related_name='taggeditem_set3',
)
created_by_fkey = models.PositiveIntegerField(null=True)
created_by = GenericForeignKey('created_by_ct', 'created_by_fkey',)
favorite_ct = models.ForeignKey(
ContentType,
models.SET_NULL,
null=True,
related_name='taggeditem_set4',
)
favorite_fkey = models.CharField(max_length=64, null=True)
favorite = GenericForeignKey('favorite_ct', 'favorite_fkey')
def __str__(self):
return self.tag
class Meta:
ordering = ['id']
class Bookmark(models.Model):
url = models.URLField()
tags = GenericRelation(TaggedItem, related_query_name='bookmarks')
favorite_tags = GenericRelation(TaggedItem,
content_type_field='favorite_ct',
object_id_field='favorite_fkey',
related_query_name='favorite_bookmarks')
class Meta:
ordering = ['id']
class Comment(models.Model):
comment = models.TextField()
# Content-object field
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_pk = models.TextField()
content_object = GenericForeignKey(ct_field="content_type", fk_field="object_pk")
class Meta:
ordering = ['id']
# Models for lookup ordering tests
class House(models.Model):
name = models.CharField(max_length=50)
address = models.CharField(max_length=255)
owner = models.ForeignKey('Person', models.SET_NULL, null=True)
main_room = models.OneToOneField('Room', models.SET_NULL, related_name='main_room_of', null=True)
class Meta:
ordering = ['id']
class Room(models.Model):
name = models.CharField(max_length=50)
house = models.ForeignKey(House, models.CASCADE, related_name='rooms')
class Meta:
ordering = ['id']
class Person(models.Model):
name = models.CharField(max_length=50)
houses = models.ManyToManyField(House, related_name='occupants')
@property
def primary_house(self):
# Assume business logic forces every person to have at least one house.
return sorted(self.houses.all(), key=lambda house: -house.rooms.count())[0]
@property
def all_houses(self):
return list(self.houses.all())
class Meta:
ordering = ['id']
# Models for nullable FK tests
@python_2_unicode_compatible
class Employee(models.Model):
name = models.CharField(max_length=50)
boss = models.ForeignKey('self', models.SET_NULL, null=True, related_name='serfs')
def __str__(self):
return self.name
class Meta:
ordering = ['id']
# Ticket #19607
@python_2_unicode_compatible
class LessonEntry(models.Model):
name1 = models.CharField(max_length=200)
name2 = models.CharField(max_length=200)
def __str__(self):
return "%s %s" % (self.name1, self.name2)
@python_2_unicode_compatible
class WordEntry(models.Model):
lesson_entry = models.ForeignKey(LessonEntry, models.CASCADE)
name = models.CharField(max_length=200)
def __str__(self):
return "%s (%s)" % (self.name, self.id)
# Ticket #21410: Regression when related_name="+"
@python_2_unicode_compatible
class Author2(models.Model):
name = models.CharField(max_length=50, unique=True)
first_book = models.ForeignKey('Book', models.CASCADE, related_name='first_time_authors+')
favorite_books = models.ManyToManyField('Book', related_name='+')
def __str__(self):
return self.name
class Meta:
ordering = ['id']
# Models for many-to-many with UUID pk test:
class Pet(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
name = models.CharField(max_length=20)
people = models.ManyToManyField(Person, related_name='pets')
class Flea(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
current_room = models.ForeignKey(Room, models.SET_NULL, related_name='fleas', null=True)
pets_visited = models.ManyToManyField(Pet, related_name='fleas_hosted')
people_visited = models.ManyToManyField(Person, related_name='fleas_hosted')
| bsd-3-clause |
BryanCutler/spark | python/pyspark/tests/test_readwrite.py | 23 | 14386 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import shutil
import tempfile
import unittest
from pyspark.testing.utils import ReusedPySparkTestCase, SPARK_HOME
class InputFormatTests(ReusedPySparkTestCase):
@classmethod
def setUpClass(cls):
ReusedPySparkTestCase.setUpClass()
cls.tempdir = tempfile.NamedTemporaryFile(delete=False)
os.unlink(cls.tempdir.name)
cls.sc._jvm.WriteInputFormatTestDataGenerator.generateData(cls.tempdir.name, cls.sc._jsc)
@classmethod
def tearDownClass(cls):
ReusedPySparkTestCase.tearDownClass()
shutil.rmtree(cls.tempdir.name)
def test_oldhadoop(self):
basepath = self.tempdir.name
ints = sorted(self.sc.hadoopFile(basepath + "/sftestdata/sfint/",
"org.apache.hadoop.mapred.SequenceFileInputFormat",
"org.apache.hadoop.io.IntWritable",
"org.apache.hadoop.io.Text").collect())
ei = [(1, u'aa'), (1, u'aa'), (2, u'aa'), (2, u'bb'), (2, u'bb'), (3, u'cc')]
self.assertEqual(ints, ei)
hellopath = os.path.join(SPARK_HOME, "python/test_support/hello/hello.txt")
oldconf = {"mapreduce.input.fileinputformat.inputdir": hellopath}
hello = self.sc.hadoopRDD("org.apache.hadoop.mapred.TextInputFormat",
"org.apache.hadoop.io.LongWritable",
"org.apache.hadoop.io.Text",
conf=oldconf).collect()
result = [(0, u'Hello World!')]
self.assertEqual(hello, result)
def test_newhadoop(self):
basepath = self.tempdir.name
ints = sorted(self.sc.newAPIHadoopFile(
basepath + "/sftestdata/sfint/",
"org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat",
"org.apache.hadoop.io.IntWritable",
"org.apache.hadoop.io.Text").collect())
ei = [(1, u'aa'), (1, u'aa'), (2, u'aa'), (2, u'bb'), (2, u'bb'), (3, u'cc')]
self.assertEqual(ints, ei)
hellopath = os.path.join(SPARK_HOME, "python/test_support/hello/hello.txt")
newconf = {"mapreduce.input.fileinputformat.inputdir": hellopath}
hello = self.sc.newAPIHadoopRDD("org.apache.hadoop.mapreduce.lib.input.TextInputFormat",
"org.apache.hadoop.io.LongWritable",
"org.apache.hadoop.io.Text",
conf=newconf).collect()
result = [(0, u'Hello World!')]
self.assertEqual(hello, result)
def test_newolderror(self):
basepath = self.tempdir.name
self.assertRaises(Exception, lambda: self.sc.hadoopFile(
basepath + "/sftestdata/sfint/",
"org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat",
"org.apache.hadoop.io.IntWritable",
"org.apache.hadoop.io.Text"))
self.assertRaises(Exception, lambda: self.sc.newAPIHadoopFile(
basepath + "/sftestdata/sfint/",
"org.apache.hadoop.mapred.SequenceFileInputFormat",
"org.apache.hadoop.io.IntWritable",
"org.apache.hadoop.io.Text"))
def test_bad_inputs(self):
basepath = self.tempdir.name
self.assertRaises(Exception, lambda: self.sc.sequenceFile(
basepath + "/sftestdata/sfint/",
"org.apache.hadoop.io.NotValidWritable",
"org.apache.hadoop.io.Text"))
self.assertRaises(Exception, lambda: self.sc.hadoopFile(
basepath + "/sftestdata/sfint/",
"org.apache.hadoop.mapred.NotValidInputFormat",
"org.apache.hadoop.io.IntWritable",
"org.apache.hadoop.io.Text"))
self.assertRaises(Exception, lambda: self.sc.newAPIHadoopFile(
basepath + "/sftestdata/sfint/",
"org.apache.hadoop.mapreduce.lib.input.NotValidInputFormat",
"org.apache.hadoop.io.IntWritable",
"org.apache.hadoop.io.Text"))
def test_converters(self):
# use of custom converters
basepath = self.tempdir.name
maps = sorted(self.sc.sequenceFile(
basepath + "/sftestdata/sfmap/",
"org.apache.hadoop.io.IntWritable",
"org.apache.hadoop.io.MapWritable",
keyConverter="org.apache.spark.api.python.TestInputKeyConverter",
valueConverter="org.apache.spark.api.python.TestInputValueConverter").collect())
em = [(u'\x01', []),
(u'\x01', [3.0]),
(u'\x02', [1.0]),
(u'\x02', [1.0]),
(u'\x03', [2.0])]
self.assertEqual(maps, em)
def test_binary_files(self):
path = os.path.join(self.tempdir.name, "binaryfiles")
os.mkdir(path)
data = b"short binary data"
with open(os.path.join(path, "part-0000"), 'wb') as f:
f.write(data)
[(p, d)] = self.sc.binaryFiles(path).collect()
self.assertTrue(p.endswith("part-0000"))
self.assertEqual(d, data)
def test_binary_records(self):
path = os.path.join(self.tempdir.name, "binaryrecords")
os.mkdir(path)
with open(os.path.join(path, "part-0000"), 'w') as f:
for i in range(100):
f.write('%04d' % i)
result = self.sc.binaryRecords(path, 4).map(int).collect()
self.assertEqual(list(range(100)), result)
class OutputFormatTests(ReusedPySparkTestCase):
def setUp(self):
self.tempdir = tempfile.NamedTemporaryFile(delete=False)
os.unlink(self.tempdir.name)
def tearDown(self):
shutil.rmtree(self.tempdir.name, ignore_errors=True)
def test_oldhadoop(self):
basepath = self.tempdir.name
dict_data = [(1, {}),
(1, {"row1": 1.0}),
(2, {"row2": 2.0})]
self.sc.parallelize(dict_data).saveAsHadoopFile(
basepath + "/oldhadoop/",
"org.apache.hadoop.mapred.SequenceFileOutputFormat",
"org.apache.hadoop.io.IntWritable",
"org.apache.hadoop.io.MapWritable")
result = self.sc.hadoopFile(
basepath + "/oldhadoop/",
"org.apache.hadoop.mapred.SequenceFileInputFormat",
"org.apache.hadoop.io.IntWritable",
"org.apache.hadoop.io.MapWritable").collect()
for v in result:
self.assertTrue(v, dict_data)
conf = {
"mapred.output.format.class": "org.apache.hadoop.mapred.SequenceFileOutputFormat",
"mapreduce.job.output.key.class": "org.apache.hadoop.io.IntWritable",
"mapreduce.job.output.value.class": "org.apache.hadoop.io.MapWritable",
"mapreduce.output.fileoutputformat.outputdir": basepath + "/olddataset/"
}
self.sc.parallelize(dict_data).saveAsHadoopDataset(conf)
input_conf = {"mapreduce.input.fileinputformat.inputdir": basepath + "/olddataset/"}
result = self.sc.hadoopRDD(
"org.apache.hadoop.mapred.SequenceFileInputFormat",
"org.apache.hadoop.io.IntWritable",
"org.apache.hadoop.io.MapWritable",
conf=input_conf).collect()
for v in result:
self.assertTrue(v, dict_data)
def test_newhadoop(self):
basepath = self.tempdir.name
data = [(1, ""),
(1, "a"),
(2, "bcdf")]
self.sc.parallelize(data).saveAsNewAPIHadoopFile(
basepath + "/newhadoop/",
"org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat",
"org.apache.hadoop.io.IntWritable",
"org.apache.hadoop.io.Text")
result = sorted(self.sc.newAPIHadoopFile(
basepath + "/newhadoop/",
"org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat",
"org.apache.hadoop.io.IntWritable",
"org.apache.hadoop.io.Text").collect())
self.assertEqual(result, data)
conf = {
"mapreduce.job.outputformat.class":
"org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat",
"mapreduce.job.output.key.class": "org.apache.hadoop.io.IntWritable",
"mapreduce.job.output.value.class": "org.apache.hadoop.io.Text",
"mapreduce.output.fileoutputformat.outputdir": basepath + "/newdataset/"
}
self.sc.parallelize(data).saveAsNewAPIHadoopDataset(conf)
input_conf = {"mapreduce.input.fileinputformat.inputdir": basepath + "/newdataset/"}
new_dataset = sorted(self.sc.newAPIHadoopRDD(
"org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat",
"org.apache.hadoop.io.IntWritable",
"org.apache.hadoop.io.Text",
conf=input_conf).collect())
self.assertEqual(new_dataset, data)
def test_newolderror(self):
basepath = self.tempdir.name
rdd = self.sc.parallelize(range(1, 4)).map(lambda x: (x, "a" * x))
self.assertRaises(Exception, lambda: rdd.saveAsHadoopFile(
basepath + "/newolderror/saveAsHadoopFile/",
"org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat"))
self.assertRaises(Exception, lambda: rdd.saveAsNewAPIHadoopFile(
basepath + "/newolderror/saveAsNewAPIHadoopFile/",
"org.apache.hadoop.mapred.SequenceFileOutputFormat"))
def test_bad_inputs(self):
basepath = self.tempdir.name
rdd = self.sc.parallelize(range(1, 4)).map(lambda x: (x, "a" * x))
self.assertRaises(Exception, lambda: rdd.saveAsHadoopFile(
basepath + "/badinputs/saveAsHadoopFile/",
"org.apache.hadoop.mapred.NotValidOutputFormat"))
self.assertRaises(Exception, lambda: rdd.saveAsNewAPIHadoopFile(
basepath + "/badinputs/saveAsNewAPIHadoopFile/",
"org.apache.hadoop.mapreduce.lib.output.NotValidOutputFormat"))
def test_converters(self):
# use of custom converters
basepath = self.tempdir.name
data = [(1, {3.0: u'bb'}),
(2, {1.0: u'aa'}),
(3, {2.0: u'dd'})]
self.sc.parallelize(data).saveAsNewAPIHadoopFile(
basepath + "/converters/",
"org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat",
keyConverter="org.apache.spark.api.python.TestOutputKeyConverter",
valueConverter="org.apache.spark.api.python.TestOutputValueConverter")
converted = sorted(self.sc.sequenceFile(basepath + "/converters/").collect())
expected = [(u'1', 3.0),
(u'2', 1.0),
(u'3', 2.0)]
self.assertEqual(converted, expected)
def test_reserialization(self):
basepath = self.tempdir.name
x = range(1, 5)
y = range(1001, 1005)
data = list(zip(x, y))
rdd = self.sc.parallelize(x).zip(self.sc.parallelize(y))
rdd.saveAsSequenceFile(basepath + "/reserialize/sequence")
result1 = sorted(self.sc.sequenceFile(basepath + "/reserialize/sequence").collect())
self.assertEqual(result1, data)
rdd.saveAsHadoopFile(
basepath + "/reserialize/hadoop",
"org.apache.hadoop.mapred.SequenceFileOutputFormat")
result2 = sorted(self.sc.sequenceFile(basepath + "/reserialize/hadoop").collect())
self.assertEqual(result2, data)
rdd.saveAsNewAPIHadoopFile(
basepath + "/reserialize/newhadoop",
"org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat")
result3 = sorted(self.sc.sequenceFile(basepath + "/reserialize/newhadoop").collect())
self.assertEqual(result3, data)
conf4 = {
"mapred.output.format.class": "org.apache.hadoop.mapred.SequenceFileOutputFormat",
"mapreduce.job.output.key.class": "org.apache.hadoop.io.IntWritable",
"mapreduce.job.output.value.class": "org.apache.hadoop.io.IntWritable",
"mapreduce.output.fileoutputformat.outputdir": basepath + "/reserialize/dataset"}
rdd.saveAsHadoopDataset(conf4)
result4 = sorted(self.sc.sequenceFile(basepath + "/reserialize/dataset").collect())
self.assertEqual(result4, data)
conf5 = {"mapreduce.job.outputformat.class":
"org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat",
"mapreduce.job.output.key.class": "org.apache.hadoop.io.IntWritable",
"mapreduce.job.output.value.class": "org.apache.hadoop.io.IntWritable",
"mapreduce.output.fileoutputformat.outputdir": basepath + "/reserialize/newdataset"
}
rdd.saveAsNewAPIHadoopDataset(conf5)
result5 = sorted(self.sc.sequenceFile(basepath + "/reserialize/newdataset").collect())
self.assertEqual(result5, data)
def test_malformed_RDD(self):
basepath = self.tempdir.name
# non-batch-serialized RDD[[(K, V)]] should be rejected
data = [[(1, "a")], [(2, "aa")], [(3, "aaa")]]
rdd = self.sc.parallelize(data, len(data))
self.assertRaises(Exception, lambda: rdd.saveAsSequenceFile(
basepath + "/malformed/sequence"))
if __name__ == "__main__":
from pyspark.tests.test_readwrite import * # noqa: F401
try:
import xmlrunner # type: ignore[import]
testRunner = xmlrunner.XMLTestRunner(output='target/test-reports', verbosity=2)
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.