id
int64 0
843k
| repository_name
stringlengths 7
55
| file_path
stringlengths 9
332
| class_name
stringlengths 3
290
| human_written_code
stringlengths 12
4.36M
| class_skeleton
stringlengths 19
2.2M
| total_program_units
int64 1
9.57k
| total_doc_str
int64 0
4.2k
| AvgCountLine
float64 0
7.89k
| AvgCountLineBlank
float64 0
300
| AvgCountLineCode
float64 0
7.89k
| AvgCountLineComment
float64 0
7.89k
| AvgCyclomatic
float64 0
130
| CommentToCodeRatio
float64 0
176
| CountClassBase
float64 0
48
| CountClassCoupled
float64 0
589
| CountClassCoupledModified
float64 0
581
| CountClassDerived
float64 0
5.37k
| CountDeclInstanceMethod
float64 0
4.2k
| CountDeclInstanceVariable
float64 0
299
| CountDeclMethod
float64 0
4.2k
| CountDeclMethodAll
float64 0
4.2k
| CountLine
float64 1
115k
| CountLineBlank
float64 0
9.01k
| CountLineCode
float64 0
94.4k
| CountLineCodeDecl
float64 0
46.1k
| CountLineCodeExe
float64 0
91.3k
| CountLineComment
float64 0
27k
| CountStmt
float64 1
93.2k
| CountStmtDecl
float64 0
46.1k
| CountStmtExe
float64 0
90.2k
| MaxCyclomatic
float64 0
759
| MaxInheritanceTree
float64 0
16
| MaxNesting
float64 0
34
| SumCyclomatic
float64 0
6k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1,900 |
APSL/transmanager
|
APSL_transmanager/transmanager/permissions.py
|
transmanager.permissions.AuthenticationMixin
|
class AuthenticationMixin(object):
"""
Mixin to check that the user has the translator profile
"""
translator_user = None
def dispatch(self, request, *args, **kwargs):
if request.user.is_superuser:
authorized = True
else:
try:
self.translator_user = request.user.translator_user
authorized = True
except ObjectDoesNotExist:
authorized = False
if not authorized:
return redirect(reverse_lazy('index'))
return super(AuthenticationMixin, self).dispatch(request, *args, **kwargs)
|
class AuthenticationMixin(object):
'''
Mixin to check that the user has the translator profile
'''
def dispatch(self, request, *args, **kwargs):
pass
| 2 | 1 | 15 | 3 | 12 | 0 | 4 | 0.21 | 1 | 1 | 0 | 3 | 1 | 0 | 1 | 1 | 22 | 5 | 14 | 4 | 12 | 3 | 13 | 4 | 11 | 4 | 1 | 2 | 4 |
1,901 |
APSL/transmanager
|
APSL_transmanager/transmanager/search_indexes.py
|
transmanager.search_indexes.TransTaskIndex
|
class TransTaskIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
has_value = indexes.CharField(model_attr='has_value')
language = indexes.CharField(model_attr='language')
user = indexes.CharField(model_attr='user')
def get_model(self):
return TransTask
def index_queryset(self, using=None):
"""
Used when the entire index for model is updated.
"""
return self.get_model().objects.filter(date_creation__lte=datetime.datetime.now())
|
class TransTaskIndex(indexes.SearchIndex, indexes.Indexable):
def get_model(self):
pass
def index_queryset(self, using=None):
'''
Used when the entire index for model is updated.
'''
pass
| 3 | 1 | 4 | 0 | 2 | 2 | 1 | 0.33 | 2 | 2 | 1 | 0 | 2 | 0 | 2 | 2 | 14 | 2 | 9 | 7 | 6 | 3 | 9 | 7 | 6 | 1 | 1 | 0 | 2 |
1,902 |
APSL/transmanager
|
APSL_transmanager/transmanager/serializers.py
|
transmanager.serializers.TaskBulksSerializer
|
class TaskBulksSerializer(serializers.Serializer):
app_label = serializers.CharField(required=True)
model = serializers.CharField(required=True)
languages = ListField(required=True)
ids = ListField(required=True)
class Meta:
fields = ('app_label', 'model', 'languages', 'ids')
def validate(self, attrs):
try:
self.model_class = None
self.app_label = attrs['app_label'].strip()
self.model = attrs['model'].strip()
self.languages = attrs['languages']
self.ids = attrs['ids']
if not self.languages:
raise serializers.ValidationError(detail=_('No se han especificado códigos de idioma'))
if not self.ids:
raise serializers.ValidationError(detail=_('No se han especificado códigos de item'))
ct = ContentType.objects.get_by_natural_key(self.app_label.lower(), self.model.lower())
if not ct:
raise serializers.ValidationError(detail=_('El Content Type no existe'))
self.model_class = ct.model_class()
return attrs
except Exception as e:
raise serializers.ValidationError(detail=str(e))
def save(self, **kwargs):
"""
Method that creates the translations tasks for every selected instance
:param kwargs:
:return:
"""
try:
# result_ids = []
manager = Manager()
for item in self.model_class.objects.language(manager.get_main_language()).filter(pk__in=self.ids).all():
create_translations_for_item_and_its_children.delay(self.model_class, item.pk, self.languages,
update_item_languages=True)
# return TransTaskSerializer(TransTask.objects.filter(pk__in=result_ids), many=True).data
return {'status': 'ok'}
except Exception as e:
raise serializers.ValidationError(detail=str(e))
def delete(self, **kwargs):
"""
Method that deletes the translations tasks for every selected instance
:param kwargs:
:return:
"""
try:
manager = Manager()
for item in self.model_class.objects.language(manager.get_main_language()).filter(pk__in=self.ids).all():
delete_translations_for_item_and_its_children.delay(self.model_class, item.pk, self.languages,
update_item_languages=True)
return
except Exception as e:
raise serializers.ValidationError(detail=str(e))
|
class TaskBulksSerializer(serializers.Serializer):
class Meta:
def validate(self, attrs):
pass
def save(self, **kwargs):
'''
Method that creates the translations tasks for every selected instance
:param kwargs:
:return:
'''
pass
def delete(self, **kwargs):
'''
Method that deletes the translations tasks for every selected instance
:param kwargs:
:return:
'''
pass
| 5 | 2 | 17 | 1 | 12 | 4 | 4 | 0.28 | 1 | 3 | 1 | 0 | 3 | 1 | 3 | 3 | 61 | 6 | 43 | 19 | 38 | 12 | 41 | 16 | 36 | 5 | 1 | 2 | 11 |
1,903 |
APSL/transmanager
|
APSL_transmanager/transmanager/serializers.py
|
transmanager.serializers.TransUserSerializer
|
class TransUserSerializer(serializers.ModelSerializer):
first_name = serializers.CharField(source='user.first_name')
last_name = serializers.CharField(source='user.last_name')
email = serializers.CharField(source='user.email')
languages = serializers.SerializerMethodField()
@staticmethod
def get_languages(obj):
return ', '.join([lang.code for lang in obj.languages.all()])
class Meta:
model = TransUser
fields = ('first_name', 'last_name', 'email', 'languages')
|
class TransUserSerializer(serializers.ModelSerializer):
@staticmethod
def get_languages(obj):
pass
class Meta:
| 4 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 14 | 3 | 11 | 10 | 7 | 0 | 10 | 9 | 7 | 1 | 1 | 0 | 1 |
1,904 |
APSL/transmanager
|
APSL_transmanager/transmanager/tables.py
|
transmanager.tables.TaskTable
|
class TaskTable(tables.Table):
detail = tables.Column(accessor=Accessor('pk'), verbose_name=' ')
class Meta:
model = TransTask
fields = ('user', 'language', 'object_name', 'object_pk', 'object_field_label', 'object_field_value',
'number_of_words', 'date_creation', 'date_modification', 'done', 'detail')
empty_text = _('No se han encontrado resultados')
attrs = {'class': 'table table-bordered table-hover table-condensed', 'id': 'taskList'}
template = 'table.html'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if 'request' in kwargs:
self.GET = kwargs['request'].GET
else:
self.GET = None
def render_object_field_value(self, record, value):
return text.Truncator(value).chars(TM_ORIGINAL_VALUE_CHARS_NUMBER, html=True)
def render_detail(self, record, value):
url = reverse('transmanager-task-detail', kwargs={'pk': record.id})
if self.GET:
return mark_safe('<a href="{0}?{1}">'
'<i class="fa fa-pencil-square-o" aria-hidden="true"></i> '
'</a>'.format(url, self.GET.urlencode()))
else:
return mark_safe('<a href="{0}">'
'<i class="fa fa-pencil-square-o" aria-hidden="true"></i> '
'</a>'.format(url))
|
class TaskTable(tables.Table):
class Meta:
def __init__(self, *args, **kwargs):
pass
def render_object_field_value(self, record, value):
pass
def render_detail(self, record, value):
pass
| 5 | 0 | 6 | 0 | 6 | 0 | 2 | 0 | 1 | 1 | 0 | 0 | 3 | 1 | 3 | 3 | 32 | 5 | 27 | 13 | 22 | 0 | 20 | 13 | 15 | 2 | 1 | 1 | 5 |
1,905 |
APSL/transmanager
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/APSL_transmanager/transmanager/models.py
|
transmanager.models.TransTask.Meta
|
class Meta:
verbose_name = _(u'Tarea')
verbose_name_plural = _(u'Tareas')
# unique_together = ('object_class', 'object_pk', 'object_field')
ordering = ['-id']
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 | 0 | 4 | 4 | 3 | 1 | 4 | 4 | 3 | 0 | 0 | 0 | 0 |
1,906 |
APSL/transmanager
|
APSL_transmanager/transmanager/signals.py
|
transmanager.signals.SignalBlocker
|
class SignalBlocker(object):
"""
Class used to block the models signals in certains cases
"""
def __init__(self, signal):
self.signal = signal
self.receivers = signal.receivers
def __enter__(self, *args, **kwargs):
self.signal.receivers = []
def __exit__(self, *args, **kwargs):
self.signal.receivers = self.receivers
|
class SignalBlocker(object):
'''
Class used to block the models signals in certains cases
'''
def __init__(self, signal):
pass
def __enter__(self, *args, **kwargs):
pass
def __exit__(self, *args, **kwargs):
pass
| 4 | 1 | 2 | 0 | 2 | 0 | 1 | 0.38 | 1 | 0 | 0 | 0 | 3 | 2 | 3 | 3 | 13 | 2 | 8 | 6 | 4 | 3 | 8 | 6 | 4 | 1 | 1 | 0 | 3 |
1,907 |
APSL/transmanager
|
APSL_transmanager/transmanager/models.py
|
transmanager.models.TransApplicationLanguage
|
class TransApplicationLanguage(models.Model):
application = models.CharField(max_length=100, verbose_name=_('Aplicación'), unique=True)
languages = models.ManyToManyField(TransLanguage, verbose_name=_('Idiomas'),
help_text=_('Idiomas por defecto de la aplicación'))
class Meta:
verbose_name = _('Idiomas por aplicación')
verbose_name_plural = _('Idiomas por aplicaciones')
def __str__(self):
return self.application
def _languages(self):
return ', '.join([lang.name for lang in self.languages.order_by('name')])
_languages.short_description = _('Idiomas')
|
class TransApplicationLanguage(models.Model):
class Meta:
def __str__(self):
pass
def _languages(self):
pass
| 4 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 2 | 16 | 4 | 12 | 8 | 8 | 0 | 11 | 8 | 7 | 1 | 1 | 0 | 2 |
1,908 |
APSL/transmanager
|
APSL_transmanager/transmanager/models.py
|
transmanager.models.TransItemLanguage
|
class TransItemLanguage(models.Model):
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE, verbose_name=_('Modelo'))
object_id = models.PositiveIntegerField(verbose_name=_('Identificador'))
content_object = GenericForeignKey('content_type', 'object_id')
languages = models.ManyToManyField(TransLanguage, verbose_name=_('Idiomas'),
help_text=_('Idiomas por defecto del item'))
class Meta:
verbose_name = _('Idiomas por item')
verbose_name_plural = _('Idiomas por item')
def __str__(self):
return '{}'.format(self.content_object)
def _languages(self):
return ', '.join([lang.name for lang in self.languages.order_by('name')])
_languages.short_description = _('Idiomas')
|
class TransItemLanguage(models.Model):
class Meta:
def __str__(self):
pass
def _languages(self):
pass
| 4 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 2 | 18 | 4 | 14 | 10 | 10 | 0 | 13 | 10 | 9 | 1 | 1 | 0 | 2 |
1,909 |
ARMmbed/autoversion
|
ARMmbed_autoversion/src/auto_version/tests/test_autoversion.py
|
auto_version.tests.test_autoversion.TestMultiFileBumps
|
class TestMultiFileBumps(unittest.TestCase):
call = functools.partial(main, config_path="double_target.toml")
@classmethod
def setUpClass(cls):
dir = os.path.dirname(__file__)
os.chdir(os.path.abspath(dir))
def tearDown(self):
self.call(set_to="19.99.0")
def test_bump_patch(self):
old, new, updates = self.call(bump="patch", release=True)
self.assertEqual(
updates,
{
"RELEASE": True,
"VERSION": "19.99.1",
"VERSION_AGAIN": "19.99.1",
"STRICT_VERSION": "19.99.1",
},
)
with open("example2.py", "r") as f:
second_file = f.read()
self.assertEqual(second_file, '''LOCK = False
RELEASE = True
VERSION = "19.99.1"
VERSION_AGAIN = "19.99.1"
STRICT_VERSION = "19.99.1"
UNRELATED_STRING = "apple"
''')
|
class TestMultiFileBumps(unittest.TestCase):
@classmethod
def setUpClass(cls):
pass
def tearDown(self):
pass
def test_bump_patch(self):
pass
| 5 | 0 | 8 | 0 | 8 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 2 | 0 | 3 | 75 | 31 | 3 | 28 | 9 | 23 | 0 | 13 | 7 | 9 | 1 | 2 | 1 | 3 |
1,910 |
ARMmbed/autoversion
|
ARMmbed_autoversion/src/auto_version/tests/test_autoversion.py
|
auto_version.tests.test_autoversion.XMLRegexTest
|
class XMLRegexTest(BaseReplaceCheck):
regexer = re.compile(config.regexers[".csproj"])
lines = [" <custom_Key>1.2.3.4+dev0</custom_Key>\r\n"]
non_matching = [
'<Project Sdk="Microsoft.NET.Sdk">\r\n',
"""<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|AnyCPU'">\r\n""",
]
|
class XMLRegexTest(BaseReplaceCheck):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 75 | 7 | 0 | 7 | 4 | 6 | 0 | 4 | 4 | 3 | 0 | 3 | 0 | 0 |
1,911 |
ARMmbed/autoversion
|
ARMmbed_autoversion/src/auto_version/tests/test_autoversion.py
|
auto_version.tests.test_autoversion.TestVCSTags
|
class TestVCSTags(unittest.TestCase):
call = functools.partial(main, config_path="example.toml")
@classmethod
def setUpClass(cls):
dir = os.path.dirname(__file__)
os.chdir(os.path.abspath(dir))
@classmethod
def tearDownClass(cls):
cls.call(set_to="19.99.0")
def setUp(self):
cmd = "git tag release/4.5.6"
subprocess.check_call(shlex.split(cmd))
cmd = "git tag release/4.5.7-dev.1"
subprocess.check_call(shlex.split(cmd))
# todo: build a git tree with a branch, release and RC on that branch
# (to distinguish global vs ancestry tests)
self.addCleanup(
subprocess.check_call, shlex.split("git tag --delete release/4.5.7-dev.1")
)
self.addCleanup(
subprocess.check_call, shlex.split("git tag --delete release/4.5.6")
)
def test_from_ancestor_version(self):
bumped = "4.5.7-dev.1"
old, new, updates = self.call(
persist_from=[Constants.FROM_VCS_PREVIOUS_VERSION]
)
self.assertEqual(
updates,
{
"VERSION": bumped,
"VERSION_AGAIN": bumped,
"STRICT_VERSION": semver.finalize_version(bumped),
},
)
def test_from_ancestor_release(self):
bumped = "4.5.6"
old, new, updates = self.call(
persist_from=[Constants.FROM_VCS_PREVIOUS_RELEASE]
)
self.assertEqual(
updates,
{
"VERSION": bumped,
"VERSION_AGAIN": bumped,
"STRICT_VERSION": semver.finalize_version(bumped),
},
)
def test_from_latest_of_all_time(self):
bumped = "4.5.7-dev.1"
old, new, updates = self.call(persist_from=[Constants.FROM_VCS_LATEST_VERSION])
self.assertEqual(
updates,
{
"VERSION": bumped,
"VERSION_AGAIN": bumped,
"STRICT_VERSION": semver.finalize_version(bumped),
},
)
def test_from_latest_of_all_time_release(self):
bumped = "4.5.6"
old, new, updates = self.call(persist_from=[Constants.FROM_VCS_LATEST_RELEASE])
self.assertEqual(
updates,
{
"VERSION": bumped,
"VERSION_AGAIN": bumped,
"STRICT_VERSION": semver.finalize_version(bumped),
},
)
def test_to_tag(self):
"""writes a tag in to git
"""
bumped = "5.0.0-dev.1"
old, new, updates = self.call(
persist_from=[Constants.FROM_VCS_LATEST_VERSION],
persist_to=[Constants.TO_VCS],
bump="major",
)
self.addCleanup(
subprocess.check_call, shlex.split("git tag --delete release/5.0.0-dev.1")
)
self.assertEqual(
updates,
{
"VERSION": bumped,
"VERSION_AGAIN": bumped,
"STRICT_VERSION": semver.finalize_version(bumped),
},
)
version = auto_version_tool.get_dvcs_repo_latest_version_semver()
self.assertEqual(
dict(version._asdict()),
dict(major=5, minor=0, patch=0, build=None, prerelease="dev.1"),
)
|
class TestVCSTags(unittest.TestCase):
@classmethod
def setUpClass(cls):
pass
@classmethod
def tearDownClass(cls):
pass
def setUpClass(cls):
pass
def test_from_ancestor_version(self):
pass
def test_from_ancestor_release(self):
pass
def test_from_latest_of_all_time(self):
pass
def test_from_latest_of_all_time_release(self):
pass
def test_to_tag(self):
'''writes a tag in to git
'''
pass
| 11 | 1 | 11 | 0 | 11 | 1 | 1 | 0.04 | 1 | 2 | 1 | 0 | 6 | 0 | 8 | 80 | 103 | 8 | 91 | 24 | 80 | 4 | 37 | 22 | 28 | 1 | 2 | 0 | 8 |
1,912 |
ARMmbed/autoversion
|
ARMmbed_autoversion/src/auto_version/tests/test_autoversion.py
|
auto_version.tests.test_autoversion.TestUtils
|
class TestUtils(unittest.TestCase):
def test_is_release(self):
self.assertTrue(utils.is_release(semver.parse_version_info("1.2.3")))
self.assertFalse(utils.is_release(semver.parse_version_info("1.2.3-RC.1")))
self.assertFalse(utils.is_release(semver.parse_version_info("1.2.3+abc")))
def test_sigfig_max(self):
self.assertEqual("minor", utils.max_sigfig(["minor", "patch"]))
def test_sigfig_min(self):
self.assertEqual("minor", utils.min_sigfig(["minor", "major"]))
def test_sigfig_compare_gt(self):
self.assertFalse(utils.sigfig_gt("minor", "major"))
self.assertFalse(utils.sigfig_gt("minor", "minor"))
self.assertTrue(utils.sigfig_gt("major", "patch"))
def test_sigfig_compare_lt(self):
self.assertTrue(utils.sigfig_lt("minor", "major"))
self.assertFalse(utils.sigfig_lt("minor", "minor"))
self.assertFalse(utils.sigfig_lt("major", "patch"))
def test_semver_diff(self):
self.assertEqual(
"minor",
utils.semver_diff(
semver.parse_version_info("1.2.3"), semver.parse_version_info("1.3.5")
),
)
self.assertEqual(
"patch",
utils.semver_diff(
semver.parse_version_info("1.2.3"),
semver.parse_version_info("1.2.4-RC.1"),
),
)
self.assertEqual(
None,
utils.semver_diff(
semver.parse_version_info("1.2.3"), semver.parse_version_info("1.2.3")
),
)
|
class TestUtils(unittest.TestCase):
def test_is_release(self):
pass
def test_sigfig_max(self):
pass
def test_sigfig_min(self):
pass
def test_sigfig_compare_gt(self):
pass
def test_sigfig_compare_lt(self):
pass
def test_semver_diff(self):
pass
| 7 | 0 | 6 | 0 | 6 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 6 | 0 | 6 | 78 | 42 | 5 | 37 | 7 | 30 | 0 | 21 | 7 | 14 | 1 | 2 | 0 | 6 |
1,913 |
ARMmbed/autoversion
|
ARMmbed_autoversion/src/auto_version/tests/test_autoversion.py
|
auto_version.tests.test_autoversion.TestTagReplacements
|
class TestTagReplacements(unittest.TestCase):
some_tags = [
"0.0.0",
"0.1.0",
"v0.2.0",
"0.3.0v",
"my_project/0.4.0",
"my_project/0.5.0/releases",
"my_project/0.6.0-RC.2+build-99/releases",
r"£*ORWI\H'#[;'Q",
]
@classmethod
def setUpClass(cls):
cls._default_template = config.TAG_TEMPLATE
@classmethod
def tearDownClass(cls):
config.TAG_TEMPLATE = cls._default_template
def eval(self, template, tags, expect):
config.TAG_TEMPLATE = template
self.assertEqual(get_all_versions_from_tags(tags), expect)
def test_empty_tag(self):
self.eval("", self.some_tags, [])
def test_v_tag(self):
self.eval("v{version}", self.some_tags, ["0.2.0"])
def test_plain_tag(self):
self.eval("{version}", self.some_tags, ["0.0.0", "0.1.0"])
def test_prefix_tag(self):
self.eval("my_project/{version}", self.some_tags, ["0.4.0"])
def test_prefix_suffix_tag(self):
self.eval(
"my_project/{version}/releases",
self.some_tags,
["0.5.0", "0.6.0-RC.2+build-99"],
)
|
class TestTagReplacements(unittest.TestCase):
@classmethod
def setUpClass(cls):
pass
@classmethod
def tearDownClass(cls):
pass
def eval(self, template, tags, expect):
pass
def test_empty_tag(self):
pass
def test_v_tag(self):
pass
def test_plain_tag(self):
pass
def test_prefix_tag(self):
pass
def test_prefix_suffix_tag(self):
pass
| 11 | 0 | 3 | 0 | 3 | 0 | 1 | 0.03 | 1 | 1 | 1 | 0 | 6 | 0 | 8 | 80 | 42 | 8 | 34 | 12 | 23 | 1 | 19 | 10 | 10 | 1 | 2 | 0 | 8 |
1,914 |
ARMmbed/autoversion
|
ARMmbed_autoversion/src/auto_version/tests/test_autoversion.py
|
auto_version.tests.test_autoversion.TestNewSemVerLogic
|
class TestNewSemVerLogic(unittest.TestCase):
"""Unit testing the core logic that determines a bump"""
@classmethod
def setUpClass(cls):
test_dir = os.path.dirname(__file__)
auto_version_tool.load_config(os.path.join(test_dir, "example.toml"))
def check(self, previous, current, bumps, expect):
previous = semver.parse_version_info(previous) if previous else None
self.assertEqual(
expect,
str(
utils.make_new_semver(
semver.parse_version_info(current), previous, bumps
)
),
)
def test_release_bump(self):
self.check(None, "1.2.3", {"minor"}, "1.3.0-dev.1")
def test_no_history_bump(self):
self.check(None, "1.2.3", {"prerelease"}, "1.2.4-dev.1")
# this would be wrong, because you can't pre-release something that's released
# self.check(None, "1.2.3", ["prerelease"], "1.2.3-dev.1")
def test_no_history_pre_bump(self):
self.check(None, "1.2.3-dev.1", {"prerelease"}, "1.2.3-dev.2")
def test_release_bump_with_history(self):
self.check("1.2.2", "1.2.3", {"minor"}, "1.3.0-dev.1")
def test_candidate_bump_with_history_less(self):
# the bump is less significant than the original RC increment
self.check("1.0.0", "1.1.0-dev.3", {"patch"}, "1.1.0-dev.4")
def test_candidate_bump_with_history_same(self):
# the RC has the same significance from the previous release as the bump
self.check("1.2.2", "1.2.3-dev.1", {"patch"}, "1.2.3-dev.2")
def test_candidate_bump_with_history_more(self):
# the bump is more significant than the previous release, so perform that bump
self.check("1.2.2", "1.2.3-dev.1", {"minor"}, "1.3.0-dev.1")
|
class TestNewSemVerLogic(unittest.TestCase):
'''Unit testing the core logic that determines a bump'''
@classmethod
def setUpClass(cls):
pass
def check(self, previous, current, bumps, expect):
pass
def test_release_bump(self):
pass
def test_no_history_bump(self):
pass
def test_no_history_pre_bump(self):
pass
def test_release_bump_with_history(self):
pass
def test_candidate_bump_with_history_less(self):
pass
def test_candidate_bump_with_history_same(self):
pass
def test_candidate_bump_with_history_more(self):
pass
| 11 | 1 | 3 | 0 | 3 | 0 | 1 | 0.21 | 1 | 1 | 0 | 0 | 8 | 0 | 9 | 81 | 45 | 10 | 29 | 12 | 18 | 6 | 21 | 11 | 11 | 2 | 2 | 0 | 10 |
1,915 |
ARMmbed/autoversion
|
ARMmbed_autoversion/src/auto_version/tests/test_autoversion.py
|
auto_version.tests.test_autoversion.TestBumps
|
class TestBumps(unittest.TestCase):
call = functools.partial(main, config_path="example.toml")
@classmethod
def setUpClass(cls):
dir = os.path.dirname(__file__)
os.chdir(os.path.abspath(dir))
def tearDown(self):
self.call(set_to="19.99.0")
def test_bump_patch(self):
old, new, updates = self.call(bump="patch", release=True)
self.assertEqual(
updates,
{
"RELEASE": True,
"VERSION": "19.99.1",
"VERSION_AGAIN": "19.99.1",
"STRICT_VERSION": "19.99.1",
},
)
def test_bump_major(self):
old, new, updates = self.call(bump="major", release=True)
self.assertEqual(
updates,
{
"RELEASE": True,
"VERSION": "20.0.0",
"VERSION_AGAIN": "20.0.0",
"STRICT_VERSION": "20.0.0",
},
)
def test_bump_news(self):
old, new, updates = self.call(enable_file_triggers=True, release=True)
self.assertEqual(
updates,
{
"RELEASE": True,
"VERSION": "19.100.0",
"VERSION_AGAIN": "19.100.0",
"STRICT_VERSION": "19.100.0",
},
)
def test_dev(self):
old, new, updates = self.call(bump="prerelease")
self.assertEqual(
updates,
{
"VERSION": "19.99.1-dev.1",
"VERSION_AGAIN": "19.99.1-dev.1",
"STRICT_VERSION": "19.99.1",
},
)
def test_build(self):
# can't just tag a build onto something that's already a release version
self.call(set_to="19.99.0+build.1")
old, new, updates = self.call(bump="build")
self.assertEqual(
updates,
{
"VERSION": "19.99.0+build.2",
"VERSION_AGAIN": "19.99.0+build.2",
"STRICT_VERSION": "19.99.0",
},
)
def test_non_release_bump(self):
old, new, updates = self.call(bump="minor")
self.assertEqual(
updates,
{
"VERSION": "19.100.0-dev.1",
"VERSION_AGAIN": "19.100.0-dev.1",
"STRICT_VERSION": "19.100.0",
},
)
def test_invalid_bump(self):
with self.assertRaises(KeyError):
self.call(bump="banana")
def test_increment_existing_prerelease(self):
old, new, updates = self.call(set_to="1.2.3-RC.1")
self.assertEqual(new, "1.2.3-RC.1")
old, new, updates = self.call(bump="prerelease")
self.assertEqual(new, "1.2.3-RC.2")
def test_end_to_end(self):
self.call(bump="major")
filepath = os.path.join(os.path.dirname(__file__), "example.py")
example = imp.load_source("example", filepath)
self.assertEqual(example.VERSION, "20.0.0-dev.1")
def test_simple_config_bump(self):
old, new, updates = self.call(config_path="simple.toml", bump="minor")
self.assertEqual(new, "19.100.0-dev.1")
# do our own teardown...
self.call(config_path="simple.toml", set_to="19.99.0")
def test_custom_field_set(self):
old, new, updates = self.call(UNRELATED_STRING="apple")
self.assertEqual(updates["UNRELATED_STRING"], "apple")
|
class TestBumps(unittest.TestCase):
@classmethod
def setUpClass(cls):
pass
def tearDown(self):
pass
def test_bump_patch(self):
pass
def test_bump_major(self):
pass
def test_bump_news(self):
pass
def test_dev(self):
pass
def test_build(self):
pass
def test_non_release_bump(self):
pass
def test_invalid_bump(self):
pass
def test_increment_existing_prerelease(self):
pass
def test_end_to_end(self):
pass
def test_simple_config_bump(self):
pass
def test_custom_field_set(self):
pass
| 15 | 0 | 7 | 0 | 7 | 0 | 1 | 0.02 | 1 | 1 | 0 | 0 | 12 | 0 | 13 | 85 | 107 | 13 | 92 | 27 | 77 | 2 | 46 | 26 | 32 | 1 | 2 | 1 | 13 |
1,916 |
ARMmbed/autoversion
|
ARMmbed_autoversion/src/auto_version/tests/test_autoversion.py
|
auto_version.tests.test_autoversion.PythonRegexTest
|
class PythonRegexTest(BaseReplaceCheck):
regexer = re.compile(config.regexers[".py"], flags=re.DOTALL)
lines = [
'custom_Key = "1.2.3.4+dev0"\r\n',
' custom_Key = "1.2.3.4+dev0"\r\n',
' custom_Key: "1.2.3.4+dev0",\r\n',
]
non_matching = ['# custom_Key = "1.2.3.4+dev0"\r\n']
|
class PythonRegexTest(BaseReplaceCheck):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0.13 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 75 | 8 | 0 | 8 | 4 | 7 | 1 | 4 | 4 | 3 | 0 | 3 | 0 | 0 |
1,917 |
ARMmbed/autoversion
|
ARMmbed_autoversion/src/auto_version/tests/test_autoversion.py
|
auto_version.tests.test_autoversion.PropertiesRegexTest
|
class PropertiesRegexTest(BaseReplaceCheck):
regexer = re.compile(config.regexers[".properties"])
lines = ["custom_Key=1.2.3.4+dev0\r\n", " custom_Key = 1.2.3.4+dev0\r\n"]
explicit_replacement = {"custom_Key=\r\n": "custom_Key=5.6.7.8+dev1\r\n"}
|
class PropertiesRegexTest(BaseReplaceCheck):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 75 | 4 | 0 | 4 | 4 | 3 | 0 | 4 | 4 | 3 | 0 | 3 | 0 | 0 |
1,918 |
ARMmbed/autoversion
|
ARMmbed_autoversion/src/auto_version/tests/test_autoversion.py
|
auto_version.tests.test_autoversion.JSONRegexTest
|
class JSONRegexTest(BaseReplaceCheck):
regexer = re.compile(config.regexers[".json"])
lines = [
'"custom_Key": "1.2.3.4+dev0"\r\n',
' "custom_Key" : "1.2.3.4+dev0",\r\n',
]
|
class JSONRegexTest(BaseReplaceCheck):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 75 | 6 | 0 | 6 | 3 | 5 | 0 | 3 | 3 | 2 | 0 | 3 | 0 | 0 |
1,919 |
ARMmbed/autoversion
|
ARMmbed_autoversion/src/auto_version/tests/test_autoversion.py
|
auto_version.tests.test_autoversion.JSONBoolRegexTest
|
class JSONBoolRegexTest(BaseReplaceCheck):
regexer = re.compile(config.regexers[".json"])
value = "false"
value_replaced = "true"
key = "is_production"
lines = []
explicit_replacement = {'"is_production": false,\r\n': '"is_production": true,\r\n'}
|
class JSONBoolRegexTest(BaseReplaceCheck):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 75 | 7 | 0 | 7 | 7 | 6 | 0 | 7 | 7 | 6 | 0 | 3 | 0 | 0 |
1,920 |
ARMmbed/autoversion
|
ARMmbed_autoversion/src/auto_version/tests/test_autoversion.py
|
auto_version.tests.test_autoversion.CSharpRegexTest
|
class CSharpRegexTest(BaseReplaceCheck):
regexer = re.compile(config.regexers[".cs"])
lines = [' public const string custom_Key = "1.2.3.4+dev0"; // auto\r\n']
non_matching = [
'// <copyright file="Version.cs" company="Arm">\r\n',
'// public const string custom_Key = "1.2.3.4+dev0"; // auto\r\n',
]
explicit_replacement = {
# check for no-op on these comment strings that contain variable assignment
'// <copyright file="Version.cs" company="Arm">': '// <copyright file="Version.cs" company="Arm">',
'// <copyright file="Version.cs" company="Arm">\r\n': '// <copyright file="Version.cs" company="Arm">\r\n',
}
|
class CSharpRegexTest(BaseReplaceCheck):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0.09 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 75 | 12 | 0 | 11 | 5 | 10 | 1 | 5 | 5 | 4 | 0 | 3 | 0 | 0 |
1,921 |
ARMmbed/autoversion
|
ARMmbed_autoversion/src/auto_version/tests/test_autoversion.py
|
auto_version.tests.test_autoversion.BaseReplaceCheck
|
class BaseReplaceCheck(unittest.TestCase):
key = "custom_Key"
value = "1.2.3.4+dev0"
value_replaced = "5.6.7.8+dev1"
regexer = None
lines = [] # simply specify the line if it's trivial to do ''.replace() with
explicit_replacement = {} # otherwise, specify the line, and the output
non_matching = [] # specify example lines that should not match
def test_match(self):
"""
Check that for each specified line, a match is triggered
n.b. a match must include the full length of the line, or nothing at all
if it includes the full length of the line, there must be two named groups
`KEY` and `VALUE` that contain only the key and value respectively
:return:
"""
for line in self.lines:
with self.subTest(line=line) if six.PY3 else Noop():
extracted = extract_keypairs([line], self.regexer)
self.assertEqual({self.key: self.value}, extracted)
def test_non_match(self):
"""
Check lines that shouldn't trigger any matches
:return:
"""
for line in self.non_matching:
with self.subTest(line=line) if six.PY3 else Noop():
extracted = extract_keypairs([line], self.regexer)
self.assertEqual({}, extracted)
def test_replace(self):
"""
takes all the 'lines' and generates an expected value with a simple replacement
(1.2.3.4+dev0 -> 5.6.7.8+dev1)
additionally, explicit replacements can be tested
they are all run through the ReplacementHandler to check
the expected value
"""
replacements = {}
replacements.update(self.explicit_replacement)
replacements.update(
{k: k.replace(self.value, self.value_replaced) for k in self.lines}
)
for line, replaced in replacements.items():
with self.subTest(line=line) if six.PY3 else Noop():
extracted = replace_lines(
self.regexer,
ReplacementHandler(**{self.key: self.value_replaced}),
[line],
)
self.assertEqual([replaced], extracted)
|
class BaseReplaceCheck(unittest.TestCase):
def test_match(self):
'''
Check that for each specified line, a match is triggered
n.b. a match must include the full length of the line, or nothing at all
if it includes the full length of the line, there must be two named groups
`KEY` and `VALUE` that contain only the key and value respectively
:return:
'''
pass
def test_non_match(self):
'''
Check lines that shouldn't trigger any matches
:return:
'''
pass
def test_replace(self):
'''
takes all the 'lines' and generates an expected value with a simple replacement
(1.2.3.4+dev0 -> 5.6.7.8+dev1)
additionally, explicit replacements can be tested
they are all run through the ReplacementHandler to check
the expected value
'''
pass
| 4 | 3 | 15 | 1 | 8 | 6 | 3 | 0.66 | 1 | 1 | 1 | 7 | 3 | 0 | 3 | 75 | 57 | 7 | 32 | 18 | 28 | 21 | 26 | 18 | 22 | 3 | 2 | 2 | 9 |
1,922 |
ARMmbed/autoversion
|
ARMmbed_autoversion/src/auto_version/replacement_handler.py
|
auto_version.replacement_handler.ReplacementHandler
|
class ReplacementHandler(object):
"""Tool used by regex when performing substitutions
We store state so that we consume our parameters as we make each replacement
"""
def __init__(self, **params):
"""New handler instance
:param params: mapping of <key to match> <value to replace with>
"""
self.params = params
self.missing = set(params.keys())
def __call__(self, match):
"""Given a regex Match Object, return the entire replacement string
:raises KeyError:
"""
original = match.string
key = match.group(Constants.KEY_GROUP)
replacement = self.params[key] # if there's nothing in the lookup, raise KeyError
start, end = match.span(Constants.VALUE_GROUP)
if start < 0:
# when there's a match but zero-length for the value group, we insert it at the end
# of the line just after the last non-whitespace character
# e.g. blah=\n --> blah=text\n
start = end = len(original.rstrip())
if key in self.missing:
self.missing.remove(key)
return "".join([original[:start], str(replacement), original[end:]])
|
class ReplacementHandler(object):
'''Tool used by regex when performing substitutions
We store state so that we consume our parameters as we make each replacement
'''
def __init__(self, **params):
'''New handler instance
:param params: mapping of <key to match> <value to replace with>
'''
pass
def __call__(self, match):
'''Given a regex Match Object, return the entire replacement string
:raises KeyError:
'''
pass
| 3 | 3 | 12 | 1 | 7 | 5 | 2 | 0.93 | 1 | 3 | 1 | 0 | 2 | 2 | 2 | 2 | 31 | 5 | 14 | 9 | 11 | 13 | 14 | 9 | 11 | 3 | 1 | 1 | 4 |
1,923 |
ARMmbed/autoversion
|
ARMmbed_autoversion/src/auto_version/config.py
|
auto_version.config.Constants
|
class Constants(object):
"""Internal - reused strings"""
# regex groups
KEY_GROUP = "KEY"
VALUE_GROUP = "VALUE"
# internal field keys
VERSION_FIELD = "VERSION_KEY"
VERSION_STRICT_FIELD = "VERSION_KEY_STRICT"
VERSION_LOCK_FIELD = "VERSION_LOCK"
RELEASE_FIELD = "RELEASE_FIELD"
COMMIT_COUNT_FIELD = "COMMIT_COUNT"
COMMIT_FIELD = "COMMIT"
# source and destination control
FROM_SOURCE = "source"
FROM_VCS_PREVIOUS_VERSION = "vcs-prev-version"
FROM_VCS_PREVIOUS_RELEASE = "vcs-prev-release"
FROM_VCS_LATEST_VERSION = "vcs-global-version"
FROM_VCS_LATEST_RELEASE = "vcs-global-release"
TO_SOURCE = "source"
TO_VCS = "vcs"
# as used in toml file
CONFIG_KEY = "AutoVersionConfig"
|
class Constants(object):
'''Internal - reused strings'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.29 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 26 | 4 | 17 | 17 | 16 | 5 | 17 | 17 | 16 | 0 | 1 | 0 | 0 |
1,924 |
ARMmbed/autoversion
|
ARMmbed_autoversion/src/auto_version/config.py
|
auto_version.config.AutoVersionConfig
|
class AutoVersionConfig(object):
"""Configuration - can be overridden using a toml config file"""
CONFIG_NAME = "DEFAULT"
RELEASED_VALUE = True
VERSION_LOCK_VALUE = True
VERSION_UNLOCK_VALUE = False
key_aliases = {
"__version__": Constants.VERSION_FIELD,
"__strict_version__": Constants.VERSION_STRICT_FIELD,
"PRODUCTION": Constants.RELEASE_FIELD,
"MAJOR": SemVerSigFig.major,
"MINOR": SemVerSigFig.minor,
"PATCH": SemVerSigFig.patch,
"VERSION_LOCK": Constants.VERSION_LOCK_FIELD,
Constants.COMMIT_COUNT_FIELD: Constants.COMMIT_COUNT_FIELD,
Constants.COMMIT_FIELD: Constants.COMMIT_FIELD,
}
_forward_aliases = {} # autopopulated later - reverse mapping of the above
targets = [os.path.join("src", "_version.py")]
regexers = {
".json": r"""^\s*[\"]?(?P<KEY>[\w:]+)[\"]?\s*:[\t ]*[\"']?(?P<VALUE>((\\\")?[^\r\n\t\f\v\",](\\\")?)+)[\"']?,?""", # noqa
".yaml": r"""^\s*[\"']?(?P<KEY>[\w]+)[\"']?\s*:\s*[\"']?(?P<VALUE>[\w\-.+\\\/:]*[^'\",\[\]#\s]).*""", # noqa
".yml": r"""^\s*[\"']?(?P<KEY>[\w]+)[\"']?\s*:\s*[\"']?(?P<VALUE>[\w\-.+\\\/:]*[^'\",\[\]#\s]).*""", # noqa
".py": r"""^\s*['\"]?(?P<KEY>\w+)['\"]?\s*[=:]\s*['\"]?(?P<VALUE>[^\r\n\t\f\v\"']+)['\"]?,?""", # noqa
".cs": r"""^(\w*\s+)*(?P<KEY>\w+)\s?[=:]\s*['\"]?(?P<VALUE>[^\r\n\t\f\v\"']+)['\"].*""", # noqa
".csproj": r"""^<(?P<KEY>\w+)>(?P<VALUE>\S+)<\/\w+>""", # noqa
".properties": r"""^\s*(?P<KEY>\w+)\s*=[\t ]*(?P<VALUE>[^\r\n\t\f\v\"']+)?""", # noqa
}
trigger_patterns = {
os.path.join("docs", "news", "*.major"): SemVerSigFig.major,
os.path.join("docs", "news", "*.feature"): SemVerSigFig.minor,
os.path.join("docs", "news", "*.bugfix"): SemVerSigFig.patch,
}
PRERELEASE_TOKEN = "pre"
BUILD_TOKEN = "build"
TAG_TEMPLATE = "release/{version}"
MIN_NONE_RELEASE_SIGFIG = (
"prerelease"
) # the minimum significant figure to increment is this isn't a release
@classmethod
def _deflate(cls):
"""Prepare for serialisation - returns a dictionary"""
data = {k: v for k, v in vars(cls).items() if not k.startswith("_")}
return {Constants.CONFIG_KEY: data}
@classmethod
def _inflate(cls, data):
"""Update config by deserialising input dictionary"""
for k, v in data[Constants.CONFIG_KEY].items():
setattr(cls, k, v)
return cls._deflate()
|
class AutoVersionConfig(object):
'''Configuration - can be overridden using a toml config file'''
@classmethod
def _deflate(cls):
'''Prepare for serialisation - returns a dictionary'''
pass
@classmethod
def _inflate(cls, data):
'''Update config by deserialising input dictionary'''
pass
| 5 | 3 | 5 | 0 | 4 | 1 | 2 | 0.26 | 1 | 1 | 1 | 0 | 0 | 0 | 2 | 2 | 53 | 3 | 47 | 20 | 42 | 12 | 21 | 18 | 18 | 2 | 1 | 1 | 3 |
1,925 |
ARMmbed/autoversion
|
ARMmbed_autoversion/src/auto_version/tests/test_autoversion.py
|
auto_version.tests.test_autoversion.YamlRegexTest
|
class YamlRegexTest(BaseReplaceCheck):
regexer = re.compile(config.regexers[".yaml"])
lines = [
""" "custom_Key": '1.2.3.4+dev0'\r\n""",
""" custom_Key: 1.2.3.4+dev0""",
""" custom_Key: 1.2.3.4+dev0 # comment""",
]
explicit_replacement = {
" name: python:3.7.1\r\n": " name: python:3.7.1\r\n",
" custom_Key: 1.2.3.4+dev0 # yay": " custom_Key: 5.6.7.8+dev1 # yay",
" CTEST_ARGS: -L node_cpu\r\n": " CTEST_ARGS: -L node_cpu\r\n",
}
non_matching = ["""entrypoint: [""]\r\n"""]
|
class YamlRegexTest(BaseReplaceCheck):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0.23 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 75 | 13 | 0 | 13 | 5 | 12 | 3 | 5 | 5 | 4 | 0 | 3 | 0 | 0 |
1,926 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/TestSuite/TestcaseFilter.py
|
icetea_lib.TestSuite.TestcaseFilter.FilterException
|
class FilterException(Exception):
"""
Exception the filter can throw when something really goes wrong.
"""
pass
|
class FilterException(Exception):
'''
Exception the filter can throw when something really goes wrong.
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 5 | 0 | 2 | 1 | 1 | 3 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
1,927 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/TestSuite/TestcaseContainer.py
|
icetea_lib.TestSuite.TestcaseContainer.TestcaseContainer
|
class TestcaseContainer(object):
"""
Container for a single test case.
"""
def __init__(self, logger=None):
self.logger = logger
self.status = None
self.tcname = None
self._instance = None
self._modulename = None
self._moduleroot = None
self._final_configuration = {}
self._meta_schema = None
self._result = None
self._filepath = None
self._suiteconfig = {}
self._infodict = {}
if not logger:
import logging
self.logger = logging.getLogger("TCContainer")
if not self.logger.handlers:
self.logger.addHandler(logging.StreamHandler())
self.logger.setLevel(logging.INFO)
@staticmethod
def find_testcases(modulename, moduleroot, tc_meta_schema, path=None, suiteconfig=None,
logger=None):
"""
Static method for generating a list of TestcaseContainer objects from a module.
:param modulename: Name of module to parse
:param moduleroot: module root
:param tc_meta_schema: Schema to use in validation
:param path: Path to module file
:param suiteconfig: Optional configuration dictionary from suite
:param logger: Logger
:return: list of TestcaseContainer instances
"""
if not isinstance(modulename, str):
raise TypeError("modulename should be a string")
if len(modulename) == 0: # pylint: disable=len-as-condition
raise ValueError("modulename shouldn't be empty")
# Try to import the module.
try:
module = import_module(modulename)
except Exception as error:
if logger:
logger.debug("Error while importing module {}: {}".format(modulename, error))
raise ImportError("Error importing module {}: {}".format(modulename, error))
tclist = []
for test_class_name, test_class in iteritems(module.__dict__):
if not isclass(test_class):
continue
# if a class as the constant flag IS_TEST set to true or is named Testcase,
# fulfill test description and add it to the list
if getattr(test_class, "IS_TEST", False) or test_class_name == "Testcase":
testcase = TestcaseContainer(logger=logger)
test_case = test_class()
testcase.generate_members(modulename, test_case, moduleroot, path,
tc_meta_schema, test_class_name, suiteconfig)
tclist.append(testcase)
return tclist
def __copy__(self):
cont = TestcaseContainer.find_testcases(self._modulename, self._moduleroot,
self._meta_schema, self._filepath,
self._suiteconfig, self.logger)
for testcase in cont:
if self.tcname == testcase.tcname:
return testcase
return None
def generate_members(self, modulename, tc_instance, moduleroot, path, meta_schema,
test_class_name, suiteconfig=None):
"""
Setter and generator for internal variables.
:param modulename: Name of the module
:param tc_instance: Bench instance
:param moduleroot: Root folder of the module
:param path: Path to module file
:param meta_schema: Schema used for Validation
:param test_class_name: Name of the class
:param suiteconfig: Optional configuration dictionary from suite
:return: Nothing, modifies objects content in place
"""
self._modulename = modulename
self.tcname = tc_instance.test_name
self.status = TestStatus.PENDING
self._instance = tc_instance
self._final_configuration = {}
self._moduleroot = moduleroot
self._meta_schema = meta_schema
self._result = None
self._filepath = path
self._suiteconfig = suiteconfig if suiteconfig else {}
if tc_instance.get_test_component():
comp = tc_instance.get_test_component()
else:
comp = ["None"]
if tc_instance.get_features_under_test():
feat = tc_instance.get_features_under_test()
else:
feat = ''
if tc_instance.get_allowed_platforms():
platforms = tc_instance.get_allowed_platforms()
else:
platforms = ''
self._infodict = {
'name': self.tcname,
'path': modulename + "." + test_class_name,
'status': tc_instance.status() if tc_instance.status() else '',
'type': tc_instance.type() if tc_instance.type() else '',
'subtype': tc_instance.subtype() if tc_instance.subtype() else '',
'group': moduleroot,
'file': self._filepath, # path to the file which hold this test
'comp': comp,
'feature': feat,
'allowed_platforms': platforms,
'fail': ''
}
def get_infodict(self):
"""
Getter for internal infodict variable.
:return: dict
"""
return self._infodict
def get(self, field):
"""
Gets value of property/configuration field.
:param field: Name of configuration property to get
:return: Value of configuration property field. None if not found.
"""
return self.get_instance().config.get(field)
def get_instance_config(self):
"""
Get configuration currently set into the test instance.
:return: dict
"""
config = self.get_instance().config
config["filepath"] = self._filepath
return config
def get_final_config(self):
"""
Getter for final configuration of the test case from the _final_configuration variable.
:return: dict
"""
return self._final_configuration
def get_suiteconfig(self):
"""
Getter for the internal _suiteconfig variable.
:return: dict
"""
return self._suiteconfig
def get_instance(self):
"""
Getter for testcase Bench instance. If instance does not exist, it will be created.
:return: Bench instance of this testcase.
"""
return self._instance
def get_result(self):
"""
Get the internal Result object.
:return: Result
"""
return self._result
def get_name(self):
"""
Get the test case name.
:return: str
"""
return self.tcname
def merge_tc_config(self, conf_to_merge):
"""
Merges testcase configuration with dictionary conf_to_merge.
:param conf_to_merge: Dictionary of configuration to
merge with testcase default configuration
:return: Nothing
"""
self._final_configuration = merge(self._final_configuration, conf_to_merge)
def set_suiteconfig(self, config):
"""
Setter for suite config.
:param config: dict
:return: Nothing
"""
self._suiteconfig = config
def set_result(self, result):
"""
Setter for result object.
:param result: Result
:return: Nothing
"""
self._result = result
def set_final_config(self):
"""
Sets configuration for testcase instance from self._final_configuration field.
"""
if self._instance:
self._instance.set_config(self._final_configuration)
def validate_tc_instance(self):
"""
Validates this testcase instance metadata and fetches the tc configuration.
:return Nothing
:raises SyntaxError
"""
if not self.validate_testcase_metadata(self.get_instance()):
raise SyntaxError("Invalid TC metadata")
self._final_configuration = self.get_instance().get_config()
def validate_testcase_metadata(self, testcase):
"""
Validate tc metadata. Returns True if validation succeeds or False if if fails.
:param testcase: Bench
:return: Boolean
"""
try:
validate(testcase.config, self._meta_schema)
except ValidationError as err:
self.logger.error("Metadata validation failed! Please fix your TC Metadata!")
self.logger.debug(testcase.config)
self.logger.error(err)
return False
except SchemaError as err:
self.logger.error("Schema error")
self.logger.error(err)
return False
return True
def run(self, forceflash=False):
"""
Runs the testcase associated with this container.
:param forceflash: boolean, True if forceflash should be used
:return: Result
"""
if self.status == TestStatus.FINISHED:
self.logger.debug("Creating new bench instance for repeat.")
self._instance = self._create_new_bench_instance(self._modulename)
self.set_final_config()
self.status = TestStatus.RUNNING
self.logger.debug("Starting test case %s", self.tcname)
tc_instance = self.get_instance()
result = self._check_skip(tc_instance)
if result:
self.logger.debug("Skipping test case %s", self.tcname)
self._result = result
self.status = TestStatus.FINISHED
return result
# Check if version checking is enabled in cli
# and if the bench has the compatible key in it's config.
result = self._check_version(tc_instance)
if result is not None:
self.logger.debug("Version check triggered, skipping test case %s", self.tcname)
self._result = result
self.status = TestStatus.FINISHED
return result
parser = get_tc_arguments(get_base_arguments(get_parser()))
args, unknown = parser.parse_known_args()
if unknown:
for para in unknown:
self.logger.warning("Icetea received unknown parameter %s", para)
if not args.ignore_invalid_params:
self.logger.error(
"Unknown parameters received, exiting. To ignore this add "
"--ignore_invalid_params flag.")
parser.print_help()
result = tc_instance.get_result()
result.set_verdict(verdict="inconclusive", retcode=-1, duration=0)
self.status = TestStatus.FINISHED
return result
args.forceflash = forceflash
self.status = TestStatus.RUNNING
tc_instance.args = args
self.logger.info("")
self.logger.info("START TEST CASE EXECUTION: '%s'", tc_instance.test_name)
self.logger.info("")
start_time = datetime.datetime.now()
try:
retcode = tc_instance.run()
self.logger.debug("Test bench returned return code %d", retcode)
except: # pylint: disable=bare-except
traceback.print_exc()
retcode = -9999
stop_time = datetime.datetime.now()
if tc_instance.results:
result = tc_instance.results
else:
result = tc_instance.get_result(tc_file=self._filepath)
# Force garbage collection
# cleanup Testcase
tc_instance = None
gc.collect()
LogManager.finish_testcase_logging()
self.status = TestStatus.FINISHED
if isinstance(result, ResultList):
self.logger.debug("Received a list of results from test bench.")
return result
if result.retcode == ReturnCodes.RETCODE_FAIL_ABORTED_BY_USER:
print("Press CTRL + C again if you want to abort test run")
try:
time.sleep(5)
except KeyboardInterrupt:
self.status = TestStatus.FINISHED
raise
total_duration = stop_time - start_time
duration = total_duration.total_seconds()
self.logger.debug("Duration: %d seconds", duration)
verdict = None
if retcode == 0:
verdict = "pass"
elif retcode in ReturnCodes.INCONCLUSIVE_RETCODES:
verdict = "inconclusive"
elif retcode == ReturnCodes.RETCODE_SKIP:
verdict = "skip"
else:
verdict = "fail"
result.set_verdict(
verdict=verdict,
retcode=retcode,
duration=duration)
self._result = result
return result
def _create_new_bench_instance(self, modulename):
"""
Create a new Bench instance of this test for repeat and retry purposes.
:param modulename: Name of the original module.
:return: Bench or None
"""
module = import_module(modulename)
for test_class_name, test_class in iteritems(module.__dict__):
if not isclass(test_class):
continue
if getattr(test_class, "IS_TEST", False) is True or test_class_name == "Testcase":
inst = test_class()
if inst.test_name == self.tcname:
return inst
else:
continue
return None
def _load_testcase(self, modulename, verbose=False): # pylint: disable=no-self-use
"""
:param modulename: testcase to be loaded
:param verbose: print exceptions when loading class
:return: testcase instance
:raises TypeError exception when modulename is not string
:raises ImportError exception when cannot load testcase
"""
if not isinstance(modulename, str):
raise TypeError("Error, runTest: modulename not a string.")
try:
module = load_class(modulename, verbose)
except ValueError as error:
raise ImportError("Error, load_testcase: loadClass raised ValueError: {}".format(error))
if module is None:
raise ImportError("Error, runTest: "
"loadClass returned NoneType for modulename: %s" % modulename)
return module()
def _check_skip(self, tc_instance):
"""
Check if this tc should be skipped according to the configuration.
:param tc_instance: Bench
:return: False if no skip is needed, Result with the skip set otherwise.
"""
# Skip the TC IF NOT defined on the command line
if tc_instance.skip():
info = tc_instance.skip_info()
if info.get('only_type') or info.get('platforms'):
# only_type cannot be properly checked here, se we proceed
# and check the final configuration in Bench.
return False
self.logger.info("TC '%s' will be skipped because of '%s'", tc_instance.get_test_name(),
tc_instance.skip_reason())
result = tc_instance.get_result()
result.set_verdict(verdict='skip', retcode=-1, duration=0)
del tc_instance
self._result = result
return result
return False
def _check_major_version(self, fw_version, version_string): # pylint: disable=no-self-use
"""
Check if major version matches.
:param fw_version: semver string
:param version_string: semver string
:return: Boolean
"""
if int(fw_version[0]) > 0 and version_string[0] == '0':
return False
elif int(fw_version[0]) > 0 and version_string[1] == '0':
return False
elif int(fw_version[0]) > 0 and version_string[2] == '0':
return False
return True
def _check_version(self, tc_instance):
"""
Check if version number is compatible with this version of Icetea.
:param tc_instance: Bench
:return: None or Result.
"""
if tc_instance.config.get(
"compatible") and tc_instance.config['compatible']['framework']['name']:
framework = tc_instance.config['compatible']['framework']
# Check if version requirement is available
# and that the testcase is meant for this framework
if framework['version'] and framework['name'] == "Icetea":
ver_str = framework['version']
fw_version = get_fw_version()
try:
if not self._check_major_version(fw_version, ver_str):
result = self._wrong_version(tc_instance, ver_str,
"Testcase not suitable for version >1.0.0. "
"Please install Icetea {}".format(ver_str))
return result
except ValueError:
# Unable to convert fw_version to integer, let's just proceed.
return None
if ver_str[0].isdigit():
return self._wrong_version(
tc_instance, ver_str) if fw_version != ver_str else None
# Handle case where the version is a version number without comparison operators
if not semver.match(fw_version, ver_str):
result = self._wrong_version(tc_instance, ver_str)
return result
return None
else:
return None
def _wrong_version(self, tc_instance, ver_str, msg=None):
"""
Helper for constructing a Result object for version checking.
:param tc_instance: Bench, del is called for this before returning.
:param ver_str: semver string.
:param msg: message.
:return: Result
"""
msg = msg if msg else "Version {} of Icetea required".format(ver_str)
self.logger.info("TC '%s' will be skipped because of '%s'", tc_instance.get_test_name(),
msg)
result = tc_instance.get_result()
result.skip_reason = msg
result.set_verdict(verdict='skip', retcode=-1, duration=0)
del tc_instance
return result
|
class TestcaseContainer(object):
'''
Container for a single test case.
'''
def __init__(self, logger=None):
pass
@staticmethod
def find_testcases(modulename, moduleroot, tc_meta_schema, path=None, suiteconfig=None,
logger=None):
'''
Static method for generating a list of TestcaseContainer objects from a module.
:param modulename: Name of module to parse
:param moduleroot: module root
:param tc_meta_schema: Schema to use in validation
:param path: Path to module file
:param suiteconfig: Optional configuration dictionary from suite
:param logger: Logger
:return: list of TestcaseContainer instances
'''
pass
def __copy__(self):
pass
def generate_members(self, modulename, tc_instance, moduleroot, path, meta_schema,
test_class_name, suiteconfig=None):
'''
Setter and generator for internal variables.
:param modulename: Name of the module
:param tc_instance: Bench instance
:param moduleroot: Root folder of the module
:param path: Path to module file
:param meta_schema: Schema used for Validation
:param test_class_name: Name of the class
:param suiteconfig: Optional configuration dictionary from suite
:return: Nothing, modifies objects content in place
'''
pass
def get_infodict(self):
'''
Getter for internal infodict variable.
:return: dict
'''
pass
def get_infodict(self):
'''
Gets value of property/configuration field.
:param field: Name of configuration property to get
:return: Value of configuration property field. None if not found.
'''
pass
def get_instance_config(self):
'''
Get configuration currently set into the test instance.
:return: dict
'''
pass
def get_final_config(self):
'''
Getter for final configuration of the test case from the _final_configuration variable.
:return: dict
'''
pass
def get_suiteconfig(self):
'''
Getter for the internal _suiteconfig variable.
:return: dict
'''
pass
def get_instance_config(self):
'''
Getter for testcase Bench instance. If instance does not exist, it will be created.
:return: Bench instance of this testcase.
'''
pass
def get_result(self):
'''
Get the internal Result object.
:return: Result
'''
pass
def get_name(self):
'''
Get the test case name.
:return: str
'''
pass
def merge_tc_config(self, conf_to_merge):
'''
Merges testcase configuration with dictionary conf_to_merge.
:param conf_to_merge: Dictionary of configuration to
merge with testcase default configuration
:return: Nothing
'''
pass
def set_suiteconfig(self, config):
'''
Setter for suite config.
:param config: dict
:return: Nothing
'''
pass
def set_result(self, result):
'''
Setter for result object.
:param result: Result
:return: Nothing
'''
pass
def set_final_config(self):
'''
Sets configuration for testcase instance from self._final_configuration field.
'''
pass
def validate_tc_instance(self):
'''
Validates this testcase instance metadata and fetches the tc configuration.
:return Nothing
:raises SyntaxError
'''
pass
def validate_testcase_metadata(self, testcase):
'''
Validate tc metadata. Returns True if validation succeeds or False if if fails.
:param testcase: Bench
:return: Boolean
'''
pass
def run(self, forceflash=False):
'''
Runs the testcase associated with this container.
:param forceflash: boolean, True if forceflash should be used
:return: Result
'''
pass
def _create_new_bench_instance(self, modulename):
'''
Create a new Bench instance of this test for repeat and retry purposes.
:param modulename: Name of the original module.
:return: Bench or None
'''
pass
def _load_testcase(self, modulename, verbose=False):
'''
:param modulename: testcase to be loaded
:param verbose: print exceptions when loading class
:return: testcase instance
:raises TypeError exception when modulename is not string
:raises ImportError exception when cannot load testcase
'''
pass
def _check_skip(self, tc_instance):
'''
Check if this tc should be skipped according to the configuration.
:param tc_instance: Bench
:return: False if no skip is needed, Result with the skip set otherwise.
'''
pass
def _check_major_version(self, fw_version, version_string):
'''
Check if major version matches.
:param fw_version: semver string
:param version_string: semver string
:return: Boolean
'''
pass
def _check_version(self, tc_instance):
'''
Check if version number is compatible with this version of Icetea.
:param tc_instance: Bench
:return: None or Result.
'''
pass
def _wrong_version(self, tc_instance, ver_str, msg=None):
'''
Helper for constructing a Result object for version checking.
:param tc_instance: Bench, del is called for this before returning.
:param ver_str: semver string.
:param msg: message.
:return: Result
'''
pass
| 27 | 24 | 19 | 2 | 12 | 6 | 3 | 0.49 | 1 | 15 | 3 | 1 | 24 | 12 | 25 | 25 | 501 | 67 | 294 | 78 | 264 | 145 | 253 | 72 | 226 | 15 | 1 | 4 | 81 |
1,928 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/TestSuite/TestcaseFilter.py
|
icetea_lib.TestSuite.TestcaseFilter.TestcaseFilter
|
class TestcaseFilter(object):
"""
TestcaseFilter class. Provides the handling for different filtering arguments.
Provides a match function to match testcases with filter.
"""
def __init__(self):
self._filter = {'list': False, 'name': False, 'status': False,
'group': False, 'type': False, 'subtype': False,
'comp': False, 'feature': False, 'platform': False}
def tc(self, value): # pylint: disable=invalid-name,too-many-branches
"""
Tc filter.
:param value: test case.
:return: TestcaseFilter (self)
"""
# tc can be:
# int, tuple, list or str(any of the above)
if isinstance(value, str):
# Wildcard check
if value == 'all':
self._filter['name'] = 'all'
return self
pfilter = []
try:
pfilter = le(value)
except (ValueError, SyntaxError):
# tc wasn't immediately parseable.
# This can mean that it was a list/tuple with a string, which gets
# interpreted as a variable, which causes a malformed string exception.
# Therefore, need to split and evaluate each part of the list separately
pass
if pfilter == []:
# we get bad names/indexes if we leave parentheses.
# A dictionary will not add anything to pfilter.
value = value.strip('([])')
for item in value.split(','):
try:
# Transforms string into other python type
# Troublesome if someone names a testcase as a valid python type...
le_item = le(item)
pfilter.append(le_item)
except (ValueError, SyntaxError):
# It's just a string, but it's also a name(maybe).
# Hopefully we're on a filesystem that allows files with identical paths
pfilter.append(item)
if len(pfilter) == 1:
# It was a single string.
self._filter['name'] = pfilter[0]
return self
elif not pfilter: # pylint: disable=len-as-condition
pass
value = pfilter
if isinstance(value, int) and (value is not False and value is not True):
if value < 1:
raise TypeError("Error, createFilter: non-positive integer " + str(value))
else:
self._filter['list'] = [value - 1]
elif isinstance(value, (list, tuple)):
if len(value) < 1:
raise IndexError("Error, createFilter: Index list empty.")
for i in value:
if not isinstance(i, int) and not isinstance(i, str):
raise TypeError("Error, createFilter: "
"Index list has invalid member: {}".format(str(value)))
self._filter['list'] = [x - 1 for x in value if isinstance(x, int)]
# pylint: disable=no-member
self._filter['list'].extend([x for x in value if isinstance(x, str)])
elif value is None:
raise TypeError("tc filter cannot be None")
else:
# In case someone calls with NoneType or anything else
raise TypeError("Unrecognised type for tc filter. tc must be int, str, list or tuple")
return self
def status(self, status):
"""
Add status filter.
:param status: filter value
:return: TestcaseFilter
"""
return self._add_filter_key("status", status)
def group(self, group):
"""
Add group filter.
:param group: Filter value
:return: TestcaseFilter
"""
return self._add_filter_key("group", group)
def testtype(self, testtype):
"""
Add type filter.
:param testtype: Filter value
:return: TestcaseFilter
"""
return self._add_filter_key("type", testtype)
def subtype(self, subtype):
"""
Add subtype filter.
:param subtype: Filter value
:return: TestcaseFilter
"""
return self._add_filter_key("subtype", subtype)
def component(self, component):
"""
Add component filter.
:param component: Filter value
:return: TestcaseFilter
"""
return self._add_filter_key("comp", component)
def feature(self, feature):
"""
Add feature filter.
:param feature: Filter value
:return: TestcaseFilter
"""
return self._add_filter_key("feature", feature)
def platform(self, platform):
"""
Add platform filter.
:param platform: Filter value
:return: TestcaseFilter
"""
return self._add_filter_key("platform", platform)
def get_filter(self):
"""
Get the filter dictionary.
:return: dict.
"""
return self._filter
@staticmethod
def _match_group(string_to_match, args):
"""
Matcher for group-filter.
:param string_to_match: Filter string.
:param args: tuple or list of arguments, args[0] must be the test case metadata.
args[1] must be the filter dictionary.
:return: Boolean
"""
testcase = args[0]
filters = args[1]
group_ok = True
if 'group' in filters and string_to_match:
group = string_to_match.split(os.sep) # pylint: disable=no-member
group = [x for x in group if len(x)] # Remove empty members
if len(group) == 1:
group = string_to_match.split(',') # pylint: disable=no-member
tcgroup = testcase['group'].split(os.sep)
for member in group:
if member not in tcgroup:
group_ok = False
break
return group_ok
def _match_list(self, testcase, tc_index):
"""
Matcher for test case list.
:param testcase: Testcase metadata
:param tc_index: index
:return: Boolean
"""
list_ok = False
if 'list' in self._filter.keys() and self._filter['list']:
for index in self._filter['list']: # pylint: disable=not-an-iterable
if isinstance(index, int):
if index < 0:
raise TypeError(
"Error, filterTestcases: "
"index list contained non-positive integer: %s" % self._filter['list'])
if index == tc_index:
list_ok = True
break
elif isinstance(index, str):
if testcase['name'] == index:
list_ok = True
break
else:
raise TypeError("Error, filterTestcases: "
"index list contained non-integer: '%s'" % self._filter['list'])
if not list_ok:
return False
else:
list_ok = True
return list_ok
@staticmethod
def _match_platform(string_to_match, args):
"""
Matcher for allowed platforms
:param string_to_match: Filter string
:param args: Tuple or list of arguments, args[0] must be test case metadata dictionary,
args[1] must be filter dictionary.
:return: Boolean
"""
testcase = args[0]
filters = args[1]
platform_ok = True
if 'platform' in filters and string_to_match:
platforms = string_to_match
platforms = platforms if isinstance(platforms, list) else [platforms]
tcplatforms = testcase['allowed_platforms']
if tcplatforms:
for member in platforms:
if member not in tcplatforms:
platform_ok = False
else:
platform_ok = True
break
return platform_ok
@staticmethod
def _match_rest(string_to_match, args):
"""
Matcher for generic metadata
:param string_to_match: Filter string to match against.
:param args: arguments as list or tuple, args[0] must be test case metadata as
dictionary, args[1] must be list of filter keys, args[2] must be key currently
being processed.
:return: Boolean
"""
testcase = args[0]
filter_keys = args[1]
filter_key = args[2]
rest_ok = True
if filter_key in filter_keys and string_to_match:
# Possible that string comparison can cause encoding comparison error.
# In the case where the caseFilter is 'all', the step is skipped
if filter_key == 'name' and string_to_match == 'all':
return True
if isinstance(testcase[filter_key], list):
if isinstance(
string_to_match, str) and string_to_match not in testcase[filter_key]:
return False
elif isinstance(testcase[filter_key], str):
# pylint: disable=unsupported-membership-test
if isinstance(string_to_match, str) and testcase[filter_key] != string_to_match:
return False
elif isinstance(
string_to_match, list) and testcase[filter_key] not in string_to_match:
return False
return rest_ok
def match(self, testcase, tc_index): # pylint: disable=too-many-branches,too-many-statements
"""
Match function. Matches testcase information with this filter.
:param testcase: TestcaseContainer instance
:param tc_index: Index of testcase in list
:return: True if all filter fields were successfully matched. False otherwise.
"""
testcase = testcase.get_infodict()
filter_keys = self._filter.keys()
list_ok = self._match_list(testcase, tc_index)
try:
if self._filter["group"]:
group_ok = create_match_bool(self._filter["group"], self._match_group, (testcase,
filter_keys)
)
else:
group_ok = True
except SyntaxError as error:
raise FilterException("Error while handling group filter {}".format(
self._filter["group"]), error)
try:
if self._filter["platform"]:
platform_ok = create_match_bool(self._filter["platform"],
self._match_platform, (testcase, filter_keys))
else:
platform_ok = True
except SyntaxError as error:
raise FilterException("Error while handling platform filter {}".format(
self._filter["platform"]), error)
keys = ['status', 'type', 'subtype', 'comp', 'name', 'feature']
rest_ok = True
for key in keys:
try:
if self._filter[key]:
key_ok = create_match_bool(self._filter[key], self._match_rest, (testcase,
filter_keys,
key))
if not key_ok:
rest_ok = False
break
except SyntaxError as error:
raise FilterException(
"Error while handling filter {}: {}".format(key, self._filter[key]), error)
return list_ok and group_ok and rest_ok and platform_ok
def _add_filter_key(self, key, value):
"""
Helper for populating filter keys.
:param key: str
:param value: multiple types, value to set.
:return: TestcaseFilter (self).
"""
if not key or not value:
return self
if not isinstance(value, str):
raise TypeError("createFilter: filter argument {} not string.")
self._filter[key] = value
return self
|
class TestcaseFilter(object):
'''
TestcaseFilter class. Provides the handling for different filtering arguments.
Provides a match function to match testcases with filter.
'''
def __init__(self):
pass
def tc(self, value):
'''
Tc filter.
:param value: test case.
:return: TestcaseFilter (self)
'''
pass
def status(self, status):
'''
Add status filter.
:param status: filter value
:return: TestcaseFilter
'''
pass
def group(self, group):
'''
Add group filter.
:param group: Filter value
:return: TestcaseFilter
'''
pass
def testtype(self, testtype):
'''
Add type filter.
:param testtype: Filter value
:return: TestcaseFilter
'''
pass
def subtype(self, subtype):
'''
Add subtype filter.
:param subtype: Filter value
:return: TestcaseFilter
'''
pass
def component(self, component):
'''
Add component filter.
:param component: Filter value
:return: TestcaseFilter
'''
pass
def feature(self, feature):
'''
Add feature filter.
:param feature: Filter value
:return: TestcaseFilter
'''
pass
def platform(self, platform):
'''
Add platform filter.
:param platform: Filter value
:return: TestcaseFilter
'''
pass
def get_filter(self):
'''
Get the filter dictionary.
:return: dict.
'''
pass
@staticmethod
def _match_group(string_to_match, args):
'''
Matcher for group-filter.
:param string_to_match: Filter string.
:param args: tuple or list of arguments, args[0] must be the test case metadata.
args[1] must be the filter dictionary.
:return: Boolean
'''
pass
def _match_list(self, testcase, tc_index):
'''
Matcher for test case list.
:param testcase: Testcase metadata
:param tc_index: index
:return: Boolean
'''
pass
@staticmethod
def _match_platform(string_to_match, args):
'''
Matcher for allowed platforms
:param string_to_match: Filter string
:param args: Tuple or list of arguments, args[0] must be test case metadata dictionary,
args[1] must be filter dictionary.
:return: Boolean
'''
pass
@staticmethod
def _match_rest(string_to_match, args):
'''
Matcher for generic metadata
:param string_to_match: Filter string to match against.
:param args: arguments as list or tuple, args[0] must be test case metadata as
dictionary, args[1] must be list of filter keys, args[2] must be key currently
being processed.
:return: Boolean
'''
pass
def match(self, testcase, tc_index):
'''
Match function. Matches testcase information with this filter.
:param testcase: TestcaseContainer instance
:param tc_index: Index of testcase in list
:return: True if all filter fields were successfully matched. False otherwise.
'''
pass
def _add_filter_key(self, key, value):
'''
Helper for populating filter keys.
:param key: str
:param value: multiple types, value to set.
:return: TestcaseFilter (self).
'''
pass
| 20 | 16 | 19 | 2 | 11 | 7 | 4 | 0.61 | 1 | 9 | 1 | 0 | 13 | 1 | 16 | 16 | 333 | 40 | 186 | 52 | 166 | 114 | 154 | 48 | 137 | 16 | 1 | 4 | 65 |
1,929 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/TestStepError.py
|
icetea_lib.TestStepError.InconclusiveError
|
class InconclusiveError(Exception):
"""
Inconclusive error exception, used for showing errors such as
broken environments and missing requirements.
"""
pass
|
class InconclusiveError(Exception):
'''
Inconclusive error exception, used for showing errors such as
broken environments and missing requirements.
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 6 | 0 | 2 | 1 | 1 | 4 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
1,930 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/TestBench/RunnerSM.py
|
icetea_lib.TestBench.RunnerSM.RunnerSM
|
class RunnerSM(object):
"""
State machine class.
States defined in STATES constant and transitions in TRANSITIONS constant
"""
INITIAL_STATE = "initial"
SETUP_STATE = "setup"
SETUP_TEST_STATE = "setup_test"
EXECUTING_STATE = "executing"
TD_TEST_STATE = "teardown_test"
TEARDOWN_STATE = "teardown"
FINISHED_STATE = "finished"
STATES = [
INITIAL_STATE,
{"name": SETUP_STATE, "on_enter": "_setup_bench"},
{"name": SETUP_TEST_STATE, "on_enter": "setup"},
{"name": EXECUTING_STATE, "on_enter": "_run_cases"},
{"name": TD_TEST_STATE, "on_enter": "teardown"},
{"name": TEARDOWN_STATE, "on_enter": "_teardown_bench"},
{"name": FINISHED_STATE, "on_enter": "_finish"}
]
TRANSITIONS = [
{"trigger": "start", "source": INITIAL_STATE, "dest": SETUP_STATE},
{"trigger": "setup_to_setup_test", "source": SETUP_STATE, "dest": SETUP_TEST_STATE},
{"trigger": "setup_test_to_executing", "source": SETUP_TEST_STATE, "dest": EXECUTING_STATE},
{"trigger": "executing_to_teardown_test", "source": EXECUTING_STATE, "dest": TD_TEST_STATE},
{"trigger": "teardown_test_to_teardown", "source": TD_TEST_STATE, "dest": TEARDOWN_STATE},
{"trigger": "finish",
"source": [SETUP_STATE, SETUP_TEST_STATE, TEARDOWN_STATE], "dest": FINISHED_STATE},
{"trigger": "jump_to_teardown",
"source": [SETUP_STATE, SETUP_TEST_STATE, EXECUTING_STATE, TD_TEST_STATE],
"dest": TEARDOWN_STATE},
{"trigger": "jump_to_teardown_test",
"source": [SETUP_TEST_STATE, EXECUTING_STATE], "dest": TD_TEST_STATE}
]
EXCEPTIONS = (
EnvironmentError,
AllocationError,
TestStepError,
ResourceInitError,
TestStepTimeout,
TestStepFail,
InconclusiveError,
SkippedTestcaseException,
KeyboardInterrupt,
NameError,
LookupError,
ValueError,
Exception
)
def __init__(self, benchapi, logger=None, **kwargs):
super(RunnerSM, self).__init__(**kwargs)
self.machine = None
self.logger = logger if logger else LogManager.get_dummy_logger()
self._benchapi = benchapi
def setup(self):
"""
Alias for test case setup method.
"""
if hasattr(self._benchapi, "setup"):
if self._benchapi.args.skip_setup:
self.logger.info("Skipping setup")
return True
return self._benchapi.setup()
return True
def teardown(self):
"""
Alias for test case teardown method.
"""
if hasattr(self._benchapi, "teardown"):
if self._benchapi.args.skip_teardown:
self.logger.info("Skipping teardown")
return True
return self._benchapi.teardown()
return True
def _run_cases(self):
"""
Run test case functions.
:return: Nothing
"""
if not self._benchapi.args.skip_case:
tests = test_methods(self._benchapi)
for test in tests:
getattr(self._benchapi, test)()
else:
self.logger.info("Skipping case-functions.")
def check_skip(self):
"""
Check if we need to skip this tc.
:return: ReturnCodes.RETCODE_SKIP or None
"""
skip = self._benchapi.check_skip()
if skip:
self.logger.info("TC '%s' will be skipped because of '%s'", self._benchapi.test_name,
(self._benchapi.skip_reason()))
result = self._benchapi.get_result()
result.set_verdict(verdict='skip', retcode=-1, duration=0)
self._benchapi.set_failure(ReturnCodes.RETCODE_SKIP, self._benchapi.skip_reason())
return ReturnCodes.RETCODE_SKIP
return None
def _setup_bench(self):
"""
Initialize test bench. Validate dut configurations, kill putty and kitty processes,
load plugins, create empty Result object for this test case, initialize duts,
collect metainformation from initialized duts, start sniffer, start test case timer,
start external services and finally send pre-commands to duts.
:return: Nothing
"""
self._benchapi.load_plugins()
if self._benchapi.args.kill_putty:
self.logger.debug("Kill putty/kitty processes")
self._benchapi.kill_process(['kitty.exe', 'putty.exe'])
self._benchapi.init_duts()
self._benchapi.start_external_services()
self._benchapi.send_pre_commands(self._benchapi.args.pre_cmds)
def _teardown_bench(self):
"""
Tear down the Bench object.
:return: Nothing
"""
# pylint: disable=broad-except
try:
self._benchapi.send_post_commands(self._benchapi.args.post_cmds)
except Exception as error:
self.logger.error(error)
try:
self._benchapi.duts_release()
except Exception as error:
self.logger.error(error)
try:
self._benchapi.clear_sniffer()
except Exception as error:
self.logger.error(error)
try:
self._benchapi.stop_external_services()
except Exception as error:
self.logger.error(error)
def _call_exception(self, method, error, retcode, message=None):
"""
Handle error situation. Makes sure that the test case proceeds to the correct step from
the step where it failed.
:param method: Method where fail happened
:param error: Exception
:param retcode: int
:param message: Message to log
:return: Nothing
"""
if not self.retcode and retcode:
self.__retcode = retcode
message = message if message else str(error)
self._benchapi.set_failure(retcode, message)
self.logger.error("%s failed: %s" % (method, message))
if method in ["setup_bench", "teardown_test"]:
self.logger.info("------TEST BENCH TEARDOWN STARTS---------")
try:
self.jump_to_teardown()
except self.EXCEPTIONS:
self.logger.exception("Exception in test bench teardown!")
self.logger.info("------TEST BENCH TEARDOWN ENDS---------")
elif method in ["setup", "case"]:
if (error.__class__ == TestStepFail and method == "setup") or method == "case":
self.logger.info("------TC TEARDOWN STARTS---------")
try:
self.jump_to_teardown_test()
except self.EXCEPTIONS:
self.logger.exception("Exception in test case teardown!")
self.logger.info("------TC TEARDOWN ENDS---------")
self.logger.info("------TEST BENCH TEARDOWN STARTS---------")
try:
self.jump_to_teardown()
except self.EXCEPTIONS:
self.logger.exception("Exception in test bench teardown!")
self.logger.info("------TEST BENCH TEARDOWN ENDS---------")
if self.state != "finished":
self.finish()
def _finish(self):
"""
Finish step.
:return: Nothing
"""
self.logger.info("Test case finished.")
def run(self): # pylint: disable=too-many-return-statements
"""
Run through the state machine states, triggering states in the correct order for correct
operation. Fail states are triggered through method 'failure'.
:return: int
"""
self.machine = Machine(self, states=self.STATES, transitions=self.TRANSITIONS,
initial="initial")
self.retcode = ReturnCodes.RETCODE_SUCCESS
try:
# Move from initial to setup
self.logger.info("------TEST BENCH SETUP STARTS---------")
self.start()
except self.EXCEPTIONS as error:
returncode, message = self._select_error_returncode(self.SETUP_STATE, error)
self.logger.info("------TEST BENCH SETUP ENDS---------")
self.failure(error, "setup_bench", returncode, message)
if self.state == self.FINISHED_STATE:
return self.retcode
self.logger.info("------TEST BENCH SETUP ENDS---------")
try:
# Move from setup to setup_test
self.logger.info("------TC SETUP STARTS---------")
self.setup_to_setup_test()
except self.EXCEPTIONS as error:
returncode, message = self._select_error_returncode(self.SETUP_TEST_STATE, error)
self.logger.info("------TC SETUP ENDS---------")
self.failure(error, "setup", returncode, message)
if self.state == self.FINISHED_STATE:
return self.retcode
self.logger.info("------TC SETUP ENDS---------")
try:
# Move from setup_test to executing
self.logger.info("------TEST CASE STARTS---------")
self.setup_test_to_executing()
except self.EXCEPTIONS as error:
self.logger.error(error)
returncode, message = self._select_error_returncode(self.EXECUTING_STATE, error)
self.logger.info("-----------TEST CASE ENDS-----------")
self.failure(error, "case", returncode, message)
if self.state == self.FINISHED_STATE:
return self.retcode
self.logger.info("-----------TEST CASE ENDS-----------")
try:
# Move from executing to teardown_test
self.logger.info("------TC TEARDOWN STARTS---------")
self.executing_to_teardown_test()
except self.EXCEPTIONS as error:
returncode, message = self._select_error_returncode(self.TD_TEST_STATE, error)
self.logger.info("------TC TEARDOWN ENDS---------")
self.failure(error, "teardown_test", returncode, message)
if self.state == self.FINISHED_STATE:
return self.retcode
self.logger.info("------TC TEARDOWN ENDS---------")
try:
# Move from teardown_test to teardown
self.logger.info("------TEST BENCH TEARDOWN STARTS---------")
self.teardown_test_to_teardown()
except self.EXCEPTIONS as error:
returncode, message = self._select_error_returncode(self.TEARDOWN_STATE, error)
self.logger.info("------TEST BENCH TEARDOWN ENDS---------")
self.failure(error, "teardown_bench", returncode, message)
if self.state == self.FINISHED_STATE:
return self.retcode
self.logger.info("------TEST BENCH TEARDOWN ENDS---------")
try:
# Move from teardown to finished
self.finish()
except self.EXCEPTIONS as error:
returncode, message = self._select_error_returncode(self.FINISHED_STATE, error)
self.failure(error, "finish", returncode, message)
if self.state == self.FINISHED_STATE:
return self.retcode
return self.retcode
def _select_error_returncode(self, state, error):
"""
Matrix for selecting return codes and messages for different fail states.
:param state: State where fail happened
:param error: Exception that happened.
:return: (return_code, message)
"""
retcode_matrix = {
"setup": {
EnvironmentError: ReturnCodes.RETCODE_FAIL_SETUP_BENCH,
ResourceInitError: ReturnCodes.RETCODE_FAIL_SETUP_BENCH,
SkippedTestcaseException: ReturnCodes.RETCODE_SKIP,
KeyboardInterrupt: {"retcode": ReturnCodes.RETCODE_FAIL_ABORTED_BY_USER,
"message": "Aborted by user"},
Exception: ReturnCodes.RETCODE_FAIL_SETUP_BENCH,
},
"setup_test": {
TestStepTimeout: ReturnCodes.RETCODE_FAIL_SETUP_TC,
TestStepFail: ReturnCodes.RETCODE_FAIL_SETUP_TC,
TestStepError: ReturnCodes.RETCODE_FAIL_SETUP_TC,
InconclusiveError: ReturnCodes.RETCODE_FAIL_INCONCLUSIVE,
SkippedTestcaseException: ReturnCodes.RETCODE_SKIP,
KeyboardInterrupt: {"retcode": ReturnCodes.RETCODE_FAIL_ABORTED_BY_USER,
"message": "Aborted by user"},
Exception: ReturnCodes.RETCODE_FAIL_SETUP_TC
},
"executing": {
TestStepTimeout: ReturnCodes.RETCODE_FAIL_TC_EXCEPTION,
TestStepFail: ReturnCodes.RETCODE_FAIL_TC_EXCEPTION,
TestStepError: ReturnCodes.RETCODE_FAIL_TC_EXCEPTION,
InconclusiveError: ReturnCodes.RETCODE_FAIL_INCONCLUSIVE,
SkippedTestcaseException: ReturnCodes.RETCODE_SKIP,
NameError: ReturnCodes.RETCODE_FAIL_TC_EXCEPTION,
LookupError: ReturnCodes.RETCODE_FAIL_TC_EXCEPTION,
ValueError: ReturnCodes.RETCODE_FAIL_TC_EXCEPTION,
KeyboardInterrupt: {"retcode": ReturnCodes.RETCODE_FAIL_ABORTED_BY_USER,
"message": "Aborted by user"},
Exception: ReturnCodes.RETCODE_FAIL_TC_EXCEPTION
},
"teardown_test": {
TestStepTimeout: ReturnCodes.RETCODE_FAIL_TEARDOWN_TC,
TestStepFail: ReturnCodes.RETCODE_FAIL_TEARDOWN_TC,
TestStepError: ReturnCodes.RETCODE_FAIL_TEARDOWN_TC,
InconclusiveError: ReturnCodes.RETCODE_FAIL_INCONCLUSIVE,
SkippedTestcaseException: ReturnCodes.RETCODE_SKIP,
KeyboardInterrupt: {"retcode": ReturnCodes.RETCODE_FAIL_ABORTED_BY_USER,
"message": "Aborted by user"},
Exception: ReturnCodes.RETCODE_FAIL_TEARDOWN_TC
},
"teardown": {
KeyboardInterrupt: ReturnCodes.RETCODE_FAIL_ABORTED_BY_USER,
}
}
message = str(error)
return_code = None
if state in retcode_matrix:
state_dict = retcode_matrix.get(state)
if state_dict is not None:
retcode = state_dict.get(error.__class__)
else:
retcode = state_dict.get(Exception)
if retcode is None:
return_code = state_dict.get(Exception)
elif isinstance(retcode, dict):
return_code = retcode.get("retcode")
message = retcode.get("message")
else:
return_code = retcode
self.retcode = return_code
return return_code, message
def failure(self, exception, method, retcode, message=None):
"""
Failure handling.
:param exception: Exception that happened
:param method: Method where error happened
:param retcode: int
:param message: str
:return: Nothing
"""
exc_info = True if not isinstance(exception, SkippedTestcaseException) else False
if method == "setup_bench":
self.logger.error("Test bench initialization failed.", exc_info=exc_info)
elif method == "setup":
self.logger.error("Test setup failed.", exc_info=exc_info)
elif method == "case":
self.logger.error("Test case failed.", exc_info=exc_info)
elif method == "teardown_test":
self.logger.error("Test case teardown failed.", exc_info=exc_info)
elif method == "teardown_bench":
self.logger.error("Test bench teardown failed.", exc_info=exc_info)
else:
self.logger.error("Failed method unknown to handler: %s", method)
self._call_exception(method, exception, retcode, message)
|
class RunnerSM(object):
'''
State machine class.
States defined in STATES constant and transitions in TRANSITIONS constant
'''
def __init__(self, benchapi, logger=None, **kwargs):
pass
def setup(self):
'''
Alias for test case setup method.
'''
pass
def teardown(self):
'''
Alias for test case teardown method.
'''
pass
def _run_cases(self):
'''
Run test case functions.
:return: Nothing
'''
pass
def check_skip(self):
'''
Check if we need to skip this tc.
:return: ReturnCodes.RETCODE_SKIP or None
'''
pass
def _setup_bench(self):
'''
Initialize test bench. Validate dut configurations, kill putty and kitty processes,
load plugins, create empty Result object for this test case, initialize duts,
collect metainformation from initialized duts, start sniffer, start test case timer,
start external services and finally send pre-commands to duts.
:return: Nothing
'''
pass
def _teardown_bench(self):
'''
Tear down the Bench object.
:return: Nothing
'''
pass
def _call_exception(self, method, error, retcode, message=None):
'''
Handle error situation. Makes sure that the test case proceeds to the correct step from
the step where it failed.
:param method: Method where fail happened
:param error: Exception
:param retcode: int
:param message: Message to log
:return: Nothing
'''
pass
def _finish(self):
'''
Finish step.
:return: Nothing
'''
pass
def run(self):
'''
Run through the state machine states, triggering states in the correct order for correct
operation. Fail states are triggered through method 'failure'.
:return: int
'''
pass
def _select_error_returncode(self, state, error):
'''
Matrix for selecting return codes and messages for different fail states.
:param state: State where fail happened
:param error: Exception that happened.
:return: (return_code, message)
'''
pass
def failure(self, exception, method, retcode, message=None):
'''
Failure handling.
:param exception: Exception that happened
:param method: Method where error happened
:param retcode: int
:param message: str
:return: Nothing
'''
pass
| 13 | 12 | 27 | 2 | 20 | 5 | 5 | 0.25 | 1 | 15 | 7 | 0 | 12 | 5 | 12 | 12 | 383 | 35 | 280 | 41 | 267 | 69 | 188 | 39 | 175 | 13 | 1 | 3 | 56 |
1,931 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/TestBench/Results.py
|
icetea_lib.TestBench.Results.Results
|
class Results(object):
"""
ResultMixer manage test results and verdicts.
It provide public API get_result() for TestManagement.
"""
def __init__(self, logger, resources, configuration, args, **kwargs):
super(Results, self).__init__(**kwargs)
self._result_list = ResultList()
self._retcode = ReturnCodes.RETCODE_SUCCESS
self._failreason = ''
self._logger = logger
self._configuration = configuration
self._args = args
self._resources = resources
def init(self, logger=None):
"""
Initialize the internal ResultList.
:return: Nothing
"""
if logger:
self._logger = logger
self._result_list = ResultList()
@staticmethod
def create_new_result(verdict, retcode, duration, input_data):
"""
Create a new Result object with data in function arguments.
:param verdict: Verdict as string
:param retcode: Return code as int
:param duration: Duration as time
:param input_data: Input data as dictionary
:return: Result
"""
new_result = Result(input_data)
new_result.set_verdict(verdict, retcode, duration)
return new_result
def add_new_result(self, verdict, retcode, duration, input_data):
"""
Add a new Result to result object to the internal ResultList.
:param verdict: Verdict as string
:param retcode: Return code as int
:param duration: Duration as time
:param input_data: Input data as dict
:return: Result
"""
new_result = Results.create_new_result(verdict, retcode, duration, input_data)
self._result_list.append(new_result)
return new_result
@property
def retcode(self):
"""
Getter for return code.
:return: int
"""
return self._retcode
@retcode.setter
def retcode(self, value):
"""
Setter for retcode.
:param value: int
:return: Nothing
"""
self._retcode = value
def set_failure(self, retcode, reason):
"""
Set internal state to reflect failure of test.
:param retcode: return code
:param reason: failure reason as string
:return: Nothing
"""
if self._resources.resource_provider:
if hasattr(self._resources.resource_provider, "allocator"):
# Check for backwards compatibility with older pyclient versions.
if hasattr(self._resources.resource_provider.allocator, "get_status"):
pr_reason = self._resources.resource_provider.allocator.get_status()
if pr_reason:
reason = "{}. Other error: {}".format(pr_reason, reason)
retcode = ReturnCodes.RETCODE_FAIL_DUT_CONNECTION_FAIL
if self.retcode is None or self.retcode == ReturnCodes.RETCODE_SUCCESS:
self.retcode = retcode
self._failreason = reason
self._logger.error("Test Case fails because of: %s", reason)
else:
self._logger.info("another fail reasons: %s", reason)
def get_results(self):
"""
Getter for internal _results variable.
"""
return self._result_list
def get_result(self, tc_file=None):
"""
Generate a Result object from this test case.
:param tc_file: Location of test case file
:return: Result
"""
self.append_result(tc_file)
return self._result_list.data[0]
def set_results(self, value):
"""
Setter for _result_list.
:param value: ResultList
:return: Nothing
"""
self._result_list = value
def append_result(self, tc_file=None):
"""
Append a new fully constructed Result to the internal ResultList.
:param tc_file: Test case file path
:return: Nothing
"""
result = Result()
result.set_tc_metadata(self._configuration.config)
tc_rev = get_git_info(self._configuration.get_tc_abspath(tc_file),
verbose=self._args.verbose)
if self._logger:
self._logger.debug(tc_rev)
result.set_tc_git_info(tc_rev)
result.component = self._configuration.get_test_component()
result.feature = self._configuration.get_features_under_test()
result.skip_reason = self._configuration.skip_reason() if self._configuration.skip() else ''
result.fail_reason = self._failreason
result.logpath = os.path.abspath(LogManager.get_base_dir())
result.logfiles = LogManager.get_logfiles()
result.retcode = self.retcode
result.set_dutinformation(self._resources.dutinformations)
# pylint: disable=unused-variable
for platform, serialnumber in zip(self._resources.get_platforms(),
self._resources.get_serialnumbers()):
# Zipping done to keep platforms and serial numbers aligned in case some sn:s are
# missing
result.dut_vendor.append('')
result.dut_resource_id.append(serialnumber)
result.dut_count = self._resources.get_dut_count()
result.duts = self._resources.resource_configuration.get_dut_configuration()
if self._resources.resource_configuration.count_hardware() > 0:
result.dut_type = 'hw'
elif self._resources.resource_configuration.count_process() > 0:
result.dut_type = 'process'
else:
result.dut_type = None
self._result_list.append(result)
|
class Results(object):
'''
ResultMixer manage test results and verdicts.
It provide public API get_result() for TestManagement.
'''
def __init__(self, logger, resources, configuration, args, **kwargs):
pass
def init(self, logger=None):
'''
Initialize the internal ResultList.
:return: Nothing
'''
pass
@staticmethod
def create_new_result(verdict, retcode, duration, input_data):
'''
Create a new Result object with data in function arguments.
:param verdict: Verdict as string
:param retcode: Return code as int
:param duration: Duration as time
:param input_data: Input data as dictionary
:return: Result
'''
pass
def add_new_result(self, verdict, retcode, duration, input_data):
'''
Add a new Result to result object to the internal ResultList.
:param verdict: Verdict as string
:param retcode: Return code as int
:param duration: Duration as time
:param input_data: Input data as dict
:return: Result
'''
pass
@property
def retcode(self):
'''
Getter for return code.
:return: int
'''
pass
@retcode.setter
def retcode(self):
'''
Setter for retcode.
:param value: int
:return: Nothing
'''
pass
def set_failure(self, retcode, reason):
'''
Set internal state to reflect failure of test.
:param retcode: return code
:param reason: failure reason as string
:return: Nothing
'''
pass
def get_results(self):
'''
Getter for internal _results variable.
'''
pass
def get_results(self):
'''
Generate a Result object from this test case.
:param tc_file: Location of test case file
:return: Result
'''
pass
def set_results(self, value):
'''
Setter for _result_list.
:param value: ResultList
:return: Nothing
'''
pass
def append_result(self, tc_file=None):
'''
Append a new fully constructed Result to the internal ResultList.
:param tc_file: Test case file path
:return: Nothing
'''
pass
| 15 | 11 | 13 | 1 | 7 | 5 | 2 | 0.77 | 1 | 5 | 3 | 0 | 10 | 7 | 11 | 11 | 161 | 21 | 79 | 28 | 64 | 61 | 71 | 25 | 59 | 6 | 1 | 4 | 22 |
1,932 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/TestBench/Resources.py
|
icetea_lib.TestBench.Resources.ResourceFunctions
|
class ResourceFunctions(object):
"""
ResourceFunctions manage test required resources, like DUT's.
It provide public API's to get individual DUT's object or iterator over all DUT's.
"""
def __init__(self, args, logger, configuration, **kwargs):
super(ResourceFunctions, self).__init__(**kwargs)
self._allocation_context = None
self._resource_configuration = ResourceConfig()
self._resource_provider = None
self._starttime = None
self._dutinformations = None
self._commands = None
self._duts = []
self._args = args
self._logger = logger
self._configuration = configuration
def init(self, commands, logger=None):
"""
Initialize ResourceConfig and ResourceProvider
:return: Nothing
"""
if logger:
self._logger = logger
# Create ResourceProvider object and resolve the resource requirements from configuration
self._resource_provider = ResourceProvider(self._args)
# @todo need better way to handle forceflash_once option
# because provider is singleton instance.
self._resource_provider.args.forceflash = self._args.forceflash
self._resource_configuration = ResourceConfig(logger=self._logger)
self._resource_provider.resolve_configuration(self._configuration.config,
self._resource_configuration)
self._commands = commands
def init_duts(self, benchapi):
"""
Initialize Duts, and the network sniffer.
:return: Nothing
"""
# Validate dut configurations
self.validate_dut_configs(self.resource_configuration.get_dut_configuration(),
self._logger)
# Initialize duts
if self.resource_configuration.count_duts() > 0:
self._initialize_duts()
benchapi._nwsniffer.init_sniffer()
else:
self._logger.debug("This TC doesn't use DUT's")
self._starttime = time.time()
def duts_release(self):
"""
Release Duts.
:return: Nothing
"""
try:
# try to close node's by nicely by `exit` command
# if it didn't work, kill it by OS process kill command
# also close reading threads if any
self._logger.debug("Close dut connections")
# pylint: disable=unused-variable
for i, dut in self.duts_iterator():
try:
dut.close_dut()
dut.close_connection()
except Exception: # pylint: disable=broad-except
# We want to catch all uncaught Exceptions here.
self._logger.error("Exception while closing dut %s!",
dut.dut_name,
exc_info=True if not self._args.silent else False)
finally:
if hasattr(self._resource_provider.allocator, "share_allocations"):
if getattr(self._resource_provider.allocator, "share_allocations"):
pass
else:
self._logger.debug("Releasing dut {}".format(dut.index))
self.resource_provider.allocator.release(dut=dut)
else:
self._logger.debug("Releasing dut {}".format(dut.index))
self.resource_provider.allocator.release(dut=dut)
self._logger.debug("Close dut threads")
# finalize dut thread
for ind, dut in self.duts_iterator():
while not dut.finished():
time.sleep(0.5)
self._logger.debug("Dut #%i is not finished yet..", ind)
except KeyboardInterrupt:
self._logger.debug("key interrupt")
for ind, dut in self.duts_iterator():
dut.kill_received = True
self._duts_delete()
def get_start_time(self):
"""
Get start time.
:return: None if test has not started, start time stamp fetched with time.time() otherwise.
"""
return self._starttime
@property
def resource_configuration(self):
"""
Getter for __resource_configuration.
:return: ResourceConfig
"""
return self._resource_configuration
@resource_configuration.setter
def resource_configuration(self, value):
"""
Setter for __resource_configuration.
:param value: ResourceConfig
:return: Nothing
"""
self._resource_configuration = value
def dut_count(self):
"""
Getter for dut count from resource configuration.
:return: int
"""
if self.resource_configuration:
return self.resource_configuration.count_duts()
return 0
def get_dut_count(self):
"""
Get dut count.
:return: int
"""
return self.dut_count()
@property
def resource_provider(self):
"""
Getter for __resource_provider
:return: ResourceProvider
"""
return self._resource_provider
@property
def duts(self):
"""
Get _duts.
:return: list
"""
return self._duts
@duts.setter
def duts(self, value):
"""
set a list as _duts.
:param value: list
:return: Nothing
"""
self._duts = value
@property
def dut_indexes(self):
"""
Get a list with dut indexes.
:return: list
"""
return range(1, self._resource_configuration.count_duts() + 1)
def _duts_delete(self):
"""
Reset internal __duts list to empty list.
:return: Nothing
"""
self._logger.debug("delete duts")
self._duts = []
def duts_iterator_all(self):
"""
Yield indexes and related duts.
"""
for ind, dut in enumerate(self.duts):
yield ind, dut
def duts_iterator(self):
"""
Yield indexes and related duts that are for this test case.
"""
for ind, dut in enumerate(self.duts):
if self.is_my_dut_index(ind):
yield ind, dut
def is_allowed_dut_index(self, dut_index):
"""
Check if dut_index is one of the duts for this test case.
:param dut_index: int
:return: Boolean
"""
return dut_index in self.dut_indexes
def get_dut(self, k):
"""
Get dut object.
:param k: index or nickname of dut.
:return: Dut
"""
dut_index = k
if isinstance(k, str):
dut_index = self.get_dut_index(k)
if dut_index > len(self.duts) or dut_index < 1:
self._logger.error("Invalid DUT number")
raise ValueError("Invalid DUT number when calling get_dut(%i)" % dut_index)
return self.duts[dut_index - 1]
def get_node_endpoint(self, endpoint_id, bench):
"""
get NodeEndPoint object for dut endpoint_id.
:param endpoint_id: nickname of dut
:return: NodeEndPoint
"""
if isinstance(endpoint_id, string_types):
endpoint_id = self.get_dut_index(endpoint_id)
return NodeEndPoint(bench, endpoint_id)
def is_my_dut_index(self, dut_index):
"""
:return: Boolean
"""
if self._args.my_duts:
myduts = self._args.my_duts.split(',')
if str(dut_index) in myduts:
return True
return False
else:
return True
@property
def dutinformations(self):
"""
Getter for DutInformation list.
:return: list
"""
if self._allocation_context:
return self._allocation_context.get_dutinformations()
return list()
@dutinformations.setter
def dutinformations(self, value):
if self._allocation_context:
self._allocation_context.dutinformations = value
def reset_dut(self, dut_index='*'):
"""
Reset dut k.
:param dut_index: index of dut to reset. Default is *, which causes all duts to be reset.
:return: Nothing
"""
if dut_index == '*':
for ind in self.resource_configuration.get_dut_range():
if self.is_my_dut_index(ind):
self.reset_dut(ind)
return
method = None
if self._args.reset == "hard" or self._args.reset == "soft":
self._logger.debug("Sending reset %s to dut %d", self._args.reset, dut_index - 1)
method = self._args.reset
self.duts[dut_index - 1].init_wait_register()
self.duts[dut_index - 1].reset(method)
self._logger.debug("Waiting for dut %d to initialize", dut_index)
result = self.duts[dut_index - 1].wait_init()
if not result:
self._logger.warning("Cli initialization trigger not found. Maybe your application"
" started before we started reading? Try adding --reset"
" to your run command.")
raise DutConnectionError("Dut cli failed to initialize within set timeout!")
if self._args.sync_start:
self._commands.sync_cli(dut_index)
self._logger.debug("CLI initialized")
self.duts[dut_index - 1].init_cli()
def _open_dut_connections(self, allocations):
"""
Internal helper. Registers waiting for cli initialization and handles the wait
as well as opens connections.
"""
for dut in self._duts:
dut.init_wait_register()
try:
allocations.open_dut_connections()
except DutConnectionError:
self._logger.exception("Error while opening DUT connections!")
for dut in self._duts:
dut.close_dut()
dut.close_connection()
raise
for ind, dut in self.duts_iterator():
self._logger.info("Waiting for dut %d to initialize.", ind + 1)
res = dut.wait_init()
if not res:
self._logger.warning("Cli initialization trigger not found. Maybe your application"
" started before we started reading? Try adding --reset"
" to your run command.")
raise DutConnectionError("Dut cli failed to initialize within set timeout!")
if self._args.sync_start:
self._logger.info("Synchronizing the command line interface.")
try:
self._commands.sync_cli(dut.index)
except TestStepError:
raise DutConnectionError("Synchronized start for dut {} failed!".format(
dut.index))
def _alloc_error_helper(self):
"""
Helper for exception handling in the __init_duts method.
"""
d_info_list = []
for i, resource in enumerate(self.resource_configuration.get_dut_configuration()):
dutinfo = DutInformation(resource.get("platform_name"), None, i)
d_info_list.append(dutinfo)
self._dutinformations = d_info_list
def get_platforms(self):
"""
Get list of dut platforms.
:return: list
"""
plat_list = []
for info in self.dutinformations:
plat_list.append(info.platform)
return plat_list
def get_serialnumbers(self):
"""
Get list of dut serial numbers.
:return: list
"""
serial_number_list = []
for info in self.dutinformations:
serial_number_list.append(info.resource_id)
return serial_number_list
# Internal function to Initialize cli dut's
def _initialize_duts(self):
"""
Internal function to initialize duts
:return: Nothing
:raises: DutConnectionError if correct amount of duts were not initialized or if reset
failed or if cli initialization wait loop timed out.
"""
# Initialize command line interface
self._logger.info("Initialize DUT's connections")
try:
allocations = self.resource_provider.allocate_duts(self.resource_configuration)
except (AllocationError, ResourceInitError):
self._alloc_error_helper()
raise
self._allocation_context = allocations
allocations.set_logger(self._logger)
allocations.set_resconf(self.resource_configuration)
try:
self._duts = allocations.init_duts(args=self._args)
if len(self._duts) != self.resource_configuration.count_duts():
raise AllocationError("Unable to initialize required amount of duts.")
except AllocationError:
self._alloc_error_helper()
raise
self._open_dut_connections(allocations)
for ind, dut in self.duts_iterator():
dut.Testcase = self._configuration.name
dut.init_cli()
self._logger.debug("DUT[%i]: Cli initialized.", ind)
for ind, dut in self.duts_iterator():
self._logger.debug("DUT[%i]: %s", ind, dut.comport)
self._logger.debug("Initialized %d %s "
"for this testcase.", len(self._duts),
"dut" if len(self._duts) == 1 else "duts")
def validate_dut_configs(self, dut_configuration_list, logger):
"""
Validate dut configurations.
:param dut_configuration_list: dictionary with dut configurations
:param logger: logger to be used
:raises EnvironmentError if something is wrong
"""
# for now we validate only binaries - if it exists or not.
if not self._args.skip_flash:
for conf in dut_configuration_list:
try:
binar = conf.get("application").get("bin")
if binar:
build = Build.init(binar)
if not build.is_exists():
logger.warning("Binary '{}' not found".format(binar))
raise EnvironmentError("Binary not found")
except(KeyError, AttributeError):
pass
if logger is not None:
logger.debug("Configurations seems to be ok")
def get_dut_versions(self, commands):
"""
Get nname results and set them to duts.
:return: Nothing
"""
resps = commands.command('*', "nname")
for i, resp in enumerate(resps):
self.duts[i].version = resp.parsed
def get_dut_nick(self, dut_index):
"""
Get nick of dut index k.
:param dut_index: index of dut
:return: string
"""
nick = str(dut_index)
int_index_in_duts = dut_index in self._configuration.config["requirements"]["duts"]
str_index_in_duts = False
if not int_index_in_duts:
str_index_in_duts = nick in self._configuration.config["requirements"]["duts"]
if str_index_in_duts:
nick_in_indexed_reqs = "nick" in self._configuration.config[
"requirements"]["duts"][nick]
elif int_index_in_duts:
nick_in_indexed_reqs = "nick" in self._configuration.config[
"requirements"]["duts"][dut_index]
else:
nick_in_indexed_reqs = False
if int_index_in_duts and nick_in_indexed_reqs:
return self._configuration.config["requirements"]["duts"][dut_index]['nick']
elif str_index_in_duts and nick_in_indexed_reqs:
return self._configuration.config["requirements"]["duts"][nick]['nick']
return nick
def get_dut_index(self, nick):
"""
Get index of dut with nickname nick.
:param nick: string
:return: integer > 1
"""
for dut_index, dut in enumerate(self.resource_configuration.get_dut_configuration()):
nickname = dut.get("nick")
if nickname and nickname == nick:
return dut_index + 1
raise ValueError("Cannot find DUT by nick '%s'" % nick)
def is_my_dut(self, k):
"""
:return: Boolean
"""
if self._args.my_duts:
myduts = self._args.my_duts.split(',')
if str(k) in myduts:
return True
return False
else:
return True
|
class ResourceFunctions(object):
'''
ResourceFunctions manage test required resources, like DUT's.
It provide public API's to get individual DUT's object or iterator over all DUT's.
'''
def __init__(self, args, logger, configuration, **kwargs):
pass
def init(self, commands, logger=None):
'''
Initialize ResourceConfig and ResourceProvider
:return: Nothing
'''
pass
def init_duts(self, benchapi):
'''
Initialize Duts, and the network sniffer.
:return: Nothing
'''
pass
def duts_release(self):
'''
Release Duts.
:return: Nothing
'''
pass
def get_start_time(self):
'''
Get start time.
:return: None if test has not started, start time stamp fetched with time.time() otherwise.
'''
pass
@property
def resource_configuration(self):
'''
Getter for __resource_configuration.
:return: ResourceConfig
'''
pass
@resource_configuration.setter
def resource_configuration(self):
'''
Setter for __resource_configuration.
:param value: ResourceConfig
:return: Nothing
'''
pass
def dut_count(self):
'''
Getter for dut count from resource configuration.
:return: int
'''
pass
def get_dut_count(self):
'''
Get dut count.
:return: int
'''
pass
@property
def resource_provider(self):
'''
Getter for __resource_provider
:return: ResourceProvider
'''
pass
@property
def duts_release(self):
'''
Get _duts.
:return: list
'''
pass
@duts.setter
def duts_release(self):
'''
set a list as _duts.
:param value: list
:return: Nothing
'''
pass
@property
def dut_indexes(self):
'''
Get a list with dut indexes.
:return: list
'''
pass
def _duts_delete(self):
'''
Reset internal __duts list to empty list.
:return: Nothing
'''
pass
def duts_iterator_all(self):
'''
Yield indexes and related duts.
'''
pass
def duts_iterator_all(self):
'''
Yield indexes and related duts that are for this test case.
'''
pass
def is_allowed_dut_index(self, dut_index):
'''
Check if dut_index is one of the duts for this test case.
:param dut_index: int
:return: Boolean
'''
pass
def get_dut_count(self):
'''
Get dut object.
:param k: index or nickname of dut.
:return: Dut
'''
pass
def get_node_endpoint(self, endpoint_id, bench):
'''
get NodeEndPoint object for dut endpoint_id.
:param endpoint_id: nickname of dut
:return: NodeEndPoint
'''
pass
def is_my_dut_index(self, dut_index):
'''
:return: Boolean
'''
pass
@property
def dutinformations(self):
'''
Getter for DutInformation list.
:return: list
'''
pass
@dutinformations.setter
def dutinformations(self):
pass
def reset_dut(self, dut_index='*'):
'''
Reset dut k.
:param dut_index: index of dut to reset. Default is *, which causes all duts to be reset.
:return: Nothing
'''
pass
def _open_dut_connections(self, allocations):
'''
Internal helper. Registers waiting for cli initialization and handles the wait
as well as opens connections.
'''
pass
def _alloc_error_helper(self):
'''
Helper for exception handling in the __init_duts method.
'''
pass
def get_platforms(self):
'''
Get list of dut platforms.
:return: list
'''
pass
def get_serialnumbers(self):
'''
Get list of dut serial numbers.
:return: list
'''
pass
def _initialize_duts(self):
'''
Internal function to initialize duts
:return: Nothing
:raises: DutConnectionError if correct amount of duts were not initialized or if reset
failed or if cli initialization wait loop timed out.
'''
pass
def validate_dut_configs(self, dut_configuration_list, logger):
'''
Validate dut configurations.
:param dut_configuration_list: dictionary with dut configurations
:param logger: logger to be used
:raises EnvironmentError if something is wrong
'''
pass
def get_dut_versions(self, commands):
'''
Get nname results and set them to duts.
:return: Nothing
'''
pass
def get_dut_nick(self, dut_index):
'''
Get nick of dut index k.
:param dut_index: index of dut
:return: string
'''
pass
def get_dut_index(self, nick):
'''
Get index of dut with nickname nick.
:param nick: string
:return: integer > 1
'''
pass
def is_my_dut_index(self, dut_index):
'''
:return: Boolean
'''
pass
| 42 | 32 | 14 | 1 | 8 | 4 | 3 | 0.56 | 1 | 18 | 8 | 0 | 33 | 10 | 33 | 33 | 492 | 71 | 272 | 85 | 230 | 151 | 242 | 77 | 208 | 10 | 1 | 5 | 93 |
1,933 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/TestSuite/TestcaseContainer.py
|
icetea_lib.TestSuite.TestcaseContainer.TestStatus
|
class TestStatus(object): # pylint: disable=no-init,too-few-public-methods
"""
Enumeration for test statuses.
"""
PENDING = 4
PREPARED = 3
READY = 2
RUNNING = 1
FINISHED = 0
|
class TestStatus(object):
'''
Enumeration for test statuses.
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.67 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 | 0 | 6 | 6 | 5 | 4 | 6 | 6 | 5 | 0 | 1 | 0 | 0 |
1,934 |
ARMmbed/icetea
|
ARMmbed_icetea/test/test_bench_commands.py
|
test.test_bench_commands.MockResponse
|
class MockResponse(object):
def __init__(self, lines=None, retcode=0):
self.lines = lines if lines else []
self.retcode = retcode
self.timeout = None
|
class MockResponse(object):
def __init__(self, lines=None, retcode=0):
pass
| 2 | 0 | 4 | 0 | 4 | 0 | 2 | 0 | 1 | 0 | 0 | 0 | 1 | 3 | 1 | 1 | 5 | 0 | 5 | 5 | 3 | 0 | 5 | 5 | 3 | 2 | 1 | 0 | 2 |
1,935 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/TestSuite/TestSuite.py
|
icetea_lib.TestSuite.TestSuite.TestSuite
|
class TestSuite(object):
"""
Test suite container object.
"""
def __init__(self, logger=None, cloud_module=None, args=None):
self.logger = logger
if logger is None:
import logging
self.logger = logging.getLogger("TestSuite")
if not self.logger.handlers:
self.logger.addHandler(logging.StreamHandler())
self.logger.setLevel(logging.INFO)
self.args = args
self.cloud_module = cloud_module
self._testcases = []
self._default_configs = {}
self.status = TestStatus.PENDING
self._results = ResultList()
self._create_tc_list()
def __len__(self):
return len(self._testcases)
def get_testcases(self):
"""
Return internal list of TestcaseContainers
"""
return self._testcases
def get_tcnames(self):
"""
Return list of names of all test cases in this Suite.
:return: list
"""
return [tc.get_name() for tc in self._testcases]
def run(self):
"""
Test runner
"""
self.status = TestStatus.RUNNING
self.logger.info("Starting suite.")
i = 0
repeats = int(self.args.repeat) if self.args.repeat and int(self.args.repeat) >= 2 else 1
repeat = 1
self.logger.debug("Test suite repeats: %i", repeats)
while repeat <= repeats:
self.logger.info("Starting repeat %i of %i", repeat, repeats)
repeat += 1
for test in self._testcases:
self.logger.debug("Starting next test case: %s", test.get_name())
iterations = self.get_default_configs().get('iteration', 1)
if iterations == 0:
continue
iteration = 0
while iteration < iterations:
self.logger.info("Iteration %i of %i", iteration + 1, iterations)
retries = self.get_default_configs().get("retryCount", 0)
self.logger.debug("Test case retries: %i", retries)
retryreason = self.get_default_configs().get("retryReason", "inconclusive")
iteration += 1
if self.args.forceflash_once:
self.args.forceflash = i == 0
self.logger.debug("Forceflash_once set: Forceflash is %s",
self.args.forceflash)
i += 1
try:
result, retries, repeat, iteration = self._run_testcase(test,
retries,
repeat,
repeats,
iteration,
iterations,
retryreason)
except KeyboardInterrupt:
self.logger.error("Test run aborted.")
self.status = TestStatus.FINISHED
return self._results
self._upload_results(result)
if result and result.get_verdict() not in ['pass',
'skip'] and self.args.stop_on_failure:
break
self.status = TestStatus.FINISHED
i += 1
return self._results
def _run_testcase(self, test, retries, repeat, repeats, iteration, iterations, retryreason):
"""
Internal runner for handling a single test case run in the suite.
Repeats and iterations are handled outside this function.
:param test: TestcaseContainer to be run
:param retries: Amount of retries desired
:param repeat: Current repeat index
:param repeats: Total amount of repeats
:param iteration: Current iteration index
:param iterations: Total number of iterations
:param retryreason: suite related parameter for which test verdicts to retry.
:return: (Result, retries(int), repeat(int), iteration(int))
"""
result = None
while True:
try:
self.logger.debug("Starting test case run.")
result = test.run(forceflash=self.args.forceflash)
result.retries_left = retries
if isinstance(result, ResultList):
for result_item in result:
if not result_item.success:
if retryreason == "includeFailures" or (retryreason == "inconclusive"
and result.inconclusive):
result_item.retries_left = retries
except KeyboardInterrupt:
self.logger.info("User aborted test run")
iteration = iterations
repeat = repeats + 1
raise
if result is not None:
# Test had required attributes and ran succesfully or was skipped.
# Note that a fail *during* a testcase run will still be reported.
if not isinstance(result, ResultList):
result.build_result_metadata(args=self.args)
self._results.append(result)
if isinstance(result, ResultList):
result = self._results.get(len(self._results) - 1)
if self.args.stop_on_failure and result.get_verdict() not in ['pass', 'skip']:
# Stopping run on failure,
self.logger.info("Test case %s failed or was inconclusive, "
"stopping run.\n", test.get_name())
repeat = repeats + 1
iteration = iterations + 1
break
if result.get_verdict() == 'pass':
self.logger.info("Test case %s passed.\n", test.get_name())
result.retries_left = 0
break
if result.get_verdict() == 'skip':
iteration = iterations
result.retries_left = 0
self.logger.info("Test case %s skipped.\n", test.get_name())
break
elif retries > 0:
if retryreason == "includeFailures" or (retryreason == "inconclusive"
and result.inconclusive):
if not isinstance(result, ResultList):
if result.retcode == ReturnCodes.RETCODE_FAIL_TC_NOT_FOUND:
result.retries_left = 0
break
self.logger.error("Testcase %s failed, %d "
"retries left.\n", test.get_name(), retries)
retries -= 1
self._upload_results(result)
continue
else:
result.retries_left = 0
break
else:
self.logger.error("Test case %s failed, No retries left.\n",
test.get_name())
break
if self._results:
# Generate or update reports.
self._results.save(heads={'Build': '', 'Branch': self.args.branch}, console=False)
return result, retries, repeat, iteration
def _upload_results(self, result):
"""
Upload result to cloud.
:param result: Result object
:return: Nothing
"""
if self.cloud_module:
self.logger.debug("Uploading results to DB.")
if isinstance(result, Result):
self._upload_result(result)
elif isinstance(result, ResultList):
for result_item in result:
self._upload_result(result_item)
def _upload_result(self, result_object):
"""
Upload a single result to the cloud.
:param result_object: Result
:return: Nothing
"""
if not result_object.uploaded:
response_data = self.cloud_module.send_result(result_object)
if response_data:
data = response_data
self.logger.info("Results sent to the server. ID: %s", data.get('_id'))
result_object.uploaded = True
def get_default_configs(self):
"""
Get suite default configs
"""
return self._default_configs
def get_results(self):
"""
Return results
"""
return self._results
def list_testcases(self):
"""
List all test cases in this Suite in a neat table.
:return: PrettyTable
"""
testcases = []
try:
if self.args.json:
self._create_json_objects(testcases)
if self.args.export:
self._create_suite_file(testcases, self.args.export)
return json.dumps(testcases)
else:
self._create_rows_for_table(testcases)
from prettytable import PrettyTable
table = PrettyTable(
["Index", "Name", "Status", "Type", "Subtype", "Group", "Component",
"Feature", "Allowed platforms"])
table.align["Index"] = "l"
for row in testcases:
table.add_row(row)
return table
except TypeError:
self.logger.error("Error, print_list_testcases: error during iteration.")
return None
def _create_suite_file(self, testcases, suite_name): # pylint: disable=no-self-use
"""
Create a suite file at suite_name.
:param testcases: Test case list.
:param suite_name: File path
:return: Nothing
"""
base_json = dict()
base_json["default"] = {}
base_json["testcases"] = []
for testcase in testcases:
base_json["testcases"].append({"name": testcase["name"]})
with open(suite_name, "w") as filehandle:
filehandle.write(json.dumps(base_json))
def _create_json_objects(self, testcases):
"""
Create test case config json object list.
:param testcases: Test case list.
:return: list
"""
for testcase in self._testcases:
info = testcase.get_instance_config()
testcases.append(info)
return testcases
def _create_rows_for_table(self, rows):
"""
Internal helper for creating rows for test case display table.
:param rows: list
:return: Nothing
"""
index = 0
for testcase in self._testcases:
info = testcase.get_infodict()
try:
index += 1
grp = info.get('group')
if grp:
group = os.sep.join(info.get('group').split(os.sep)[1:])
if not group:
group = "no group"
else:
group = "no group"
rows.append([index, info.get('name'), info.get('status'),
info.get('type'), info.get('subtype'),
group, info.get('comp'), info.get('feature'),
info.get("allowed_platforms")])
except KeyError:
self.logger.error("Error, printListTestcases: Testcase list item with "
"index %d missing attributes.", index)
def update_testcases(self):
"""
Update test cases of this Suite from cloud.
:return: Nothing
"""
if not self.cloud_module:
self.logger.error("Cloud module has not been initialized! "
"Skipping testcase update.")
return False
for testcase in self._testcases:
try:
tc_instance = testcase.get_instance()
self.cloud_module.update_testcase(tc_instance.config)
except Exception as err: # pylint: disable=broad-except
self.logger.error(err)
self.logger.debug("Invalid TC: %s", testcase.tcname)
return True
@staticmethod
def get_suite_files(path):
"""
Static method for finding all suite files in path.
:param path: Search path
:return: List of json files.
"""
return_list = []
if not isinstance(path, str):
return return_list
if not os.path.exists(path):
return return_list
for _, _, files in os.walk(path):
for fil in sorted(files):
_, extension = os.path.splitext(fil)
if extension != '.json':
continue
return_list.append(fil)
return return_list
def _create_tc_list(self):
"""
Parses testcase metadata from suite file or from testcase list in args.
Sets TestSuite status to 'parsed' to indicate that it has not yet been prepared.
:raises: SuiteException
"""
suite = None
if self.args.suite:
if os.path.exists(os.path.abspath(self.args.suite)):
# If suite can be found using just the suite argument, we use that.
suitedir, filename = os.path.split(os.path.abspath(self.args.suite))
elif os.path.exists(self.args.suitedir):
suitedir = self.args.suitedir
# We presume that this is just the filename, or a path relative to the suitedir.
filename = self.args.suite
else:
raise SuiteException("Suite creation from file failed. Unable to determine suite "
"directory. Check --suitedir and --suite.")
suite = self._load_suite_file(filename, suitedir)
if not suite:
raise SuiteException("Suite creation from file failed. "
"Check your suite file format, path and access rights.")
self._default_configs = suite.get("default", {})
tcnames = []
for i, testcase in enumerate(suite.get("testcases")):
tcnames.append(str(testcase.get("name")))
testcases = self._get_suite_tcs(self.args.tcdir, tcnames)
if not testcases:
raise SuiteException("Suite creation failed: Unable to find or filter testcases.")
self._testcases = testcases
self._print_search_errors()
if len(testcases) != len(suite.get("testcases")):
raise SuiteException("Suite creation from file failed: "
"Number of requested testcases does not match "
"amount of found testcases.")
for i, testcase in enumerate(suite.get("testcases")):
suiteconfig = testcase.get("config")
self._testcases.get(i).set_suiteconfig(suiteconfig)
else:
tclist = self._load_suite_list()
if tclist is False:
raise SuiteException("Suite creation failed.")
self._testcases = tclist
if self.args.tc and self.args.tc != "all":
self._print_search_errors()
elif self._testcases.search_errors:
self.logger.error("Failed import the following modules during test case search:")
for item in self._testcases.search_errors:
self.logger.error("%s: %s", item["module"], item["error"])
self.logger.info("Suite creation complete.")
self._prepare_suite()
def _print_search_errors(self):
"""
Logs errors that were collected during test case search.
:return: Nothing
"""
for testcase in self._testcases:
if isinstance(testcase, DummyContainer):
self.logger.error("Some test cases were not found.")
for item in self._testcases.search_errors:
self.logger.error("%s: %s", item["module"], item["error"])
def _prepare_suite(self):
"""
Prepares parsed testcases for running.
:raises: SyntaxError, SuiteException
"""
for i, testcase in enumerate(self._testcases):
try:
self._prepare_testcase(testcase)
except (TypeError, ImportError, ValueError) as err:
raise SuiteException("Test case preparation failed for "
"test case {}: {}".format(i, err))
except SyntaxError:
if self.args.list:
pass
else:
raise
testcase.status = TestStatus.READY
self.logger.info("Test cases prepared.")
self.status = TestStatus.READY
def _get_suite_tcs(self, tcdir, testcases):
"""
Generate a TestcaseList from a Suite.
:param tcdir: Test case directory
:param testcases: Names of testcases.
:return: TestcaseList or None
"""
if not os.path.isdir(tcdir):
self.logger.error("Test case directory does not exist!")
return None
self.logger.info("Importing testcases for filtering")
abs_tcpath = os.path.abspath(tcdir)
sys.path.append(abs_tcpath)
tclist = TestcaseList(logger=self.logger)
tclist.import_from_path(abs_tcpath)
if not tclist:
self.logger.error("Error, runSuite: "
"Could not find any python files in given testcase dirpath")
return None
try:
filt = TestcaseFilter().tc(testcases)
except (TypeError, IndexError):
self.logger.error("Error: Failed to create testcase filter for suite.")
return None
self.logger.info("Filtering testcases")
if testcases == "all":
testcases = None
try:
final_tclist = tclist.filter(filt, testcases)
except FilterException as error:
raise SuiteException(error)
if not final_tclist:
self.logger.error("Error, create_suite: "
"Specified testcases not found in %s.", abs_tcpath)
return None
return final_tclist
def _prepare_testcase(self, testcase):
"""
Run some preparatory commands on a test case to prep it for running.
:param testcase: TestcaseContainer
:return: Nothing
"""
testcase.validate_tc_instance()
testcase.merge_tc_config(self._default_configs)
if testcase.get_suiteconfig():
testcase.merge_tc_config(testcase.get_suiteconfig())
testcase.set_final_config()
testcase.validate_tc_instance()
def _load_suite_file(self, name, suitedir):
"""
Load a suite file from json to dict.
:param name: Name of suite
:param suitedir: Path to suite
:return: Dictionary or None
"""
self.logger.info("Loading suite from file")
if not isinstance(name, str):
self.logger.error("Error, load_suite: Suite name not a string")
return None
filename = name if name.split('.')[-1] == 'json' else name + '.json'
filepath = os.path.join(suitedir, filename)
suite = None
if not os.path.exists(filepath):
if self.cloud_module:
suite = self.cloud_module.get_suite(name)
else:
self.logger.error("Error, load_suite_file: "
"Suite file not found and cloud module not defined.")
return None
if not suite:
self.logger.error("Error, load_suite_file: "
"Suite file not found locally or in cloud.")
return suite
try:
with open(filepath) as fil:
suite = json.load(fil, object_pairs_hook=find_duplicate_keys)
return suite
except IOError:
self.logger.error("Error, load_suite_file: "
"Test suite %s cannot be read.", name)
except ValueError as error:
self.logger.error("Error, load_suite_file: "
"Could not load test suite. %s.", error)
return None
def _load_suite_list(self):
"""
Generate a TestcaseList from command line filters.
:return: TestcaseList or False
"""
self.logger.info("Generating suite from command line.")
args = self.args
filt = TestcaseFilter()
testcase = args.tc if args.tc else "all"
try:
filt = TestcaseFilter().tc(testcase)
filt.status(args.status).group(args.group).testtype(args.testtype)
filt.subtype(args.subtype).component(args.component).feature(args.feature)
filt.platform(args.platform_filter)
except (TypeError, IndexError):
self.logger.exception("Filter creation failed.")
return False
self.logger.info("Importing testcases for filtering")
if not os.path.isdir(args.tcdir):
self.logger.error("Test case directory does not exist!")
return False
abs_tcpath = os.path.abspath(args.tcdir)
sys.path.append(abs_tcpath)
tclist = TestcaseList(self.logger)
tclist.import_from_path(abs_tcpath)
if not tclist:
self.logger.error("Could not find any python files in given path")
return False
self.logger.info("Filtering testcases")
if filt.get_filter()["list"] is not False:
if isinstance(filt.get_filter()["list"], list):
testcases = filt.get_filter()["list"]
else:
testcases = None
else:
testcases = None
try:
final_tclist = tclist.filter(filt, testcases)
except FilterException as error:
raise SuiteException(error)
if not final_tclist:
self.logger.error("Error, create_suite: "
"Specified testcases not found in %s.", abs_tcpath)
return final_tclist
|
class TestSuite(object):
'''
Test suite container object.
'''
def __init__(self, logger=None, cloud_module=None, args=None):
pass
def __len__(self):
pass
def get_testcases(self):
'''
Return internal list of TestcaseContainers
'''
pass
def get_tcnames(self):
'''
Return list of names of all test cases in this Suite.
:return: list
'''
pass
def run(self):
'''
Test runner
'''
pass
def _run_testcase(self, test, retries, repeat, repeats, iteration, iterations, retryreason):
'''
Internal runner for handling a single test case run in the suite.
Repeats and iterations are handled outside this function.
:param test: TestcaseContainer to be run
:param retries: Amount of retries desired
:param repeat: Current repeat index
:param repeats: Total amount of repeats
:param iteration: Current iteration index
:param iterations: Total number of iterations
:param retryreason: suite related parameter for which test verdicts to retry.
:return: (Result, retries(int), repeat(int), iteration(int))
'''
pass
def _upload_results(self, result):
'''
Upload result to cloud.
:param result: Result object
:return: Nothing
'''
pass
def _upload_results(self, result):
'''
Upload a single result to the cloud.
:param result_object: Result
:return: Nothing
'''
pass
def get_default_configs(self):
'''
Get suite default configs
'''
pass
def get_results(self):
'''
Return results
'''
pass
def list_testcases(self):
'''
List all test cases in this Suite in a neat table.
:return: PrettyTable
'''
pass
def _create_suite_file(self, testcases, suite_name):
'''
Create a suite file at suite_name.
:param testcases: Test case list.
:param suite_name: File path
:return: Nothing
'''
pass
def _create_json_objects(self, testcases):
'''
Create test case config json object list.
:param testcases: Test case list.
:return: list
'''
pass
def _create_rows_for_table(self, rows):
'''
Internal helper for creating rows for test case display table.
:param rows: list
:return: Nothing
'''
pass
def update_testcases(self):
'''
Update test cases of this Suite from cloud.
:return: Nothing
'''
pass
@staticmethod
def get_suite_files(path):
'''
Static method for finding all suite files in path.
:param path: Search path
:return: List of json files.
'''
pass
def _create_tc_list(self):
'''
Parses testcase metadata from suite file or from testcase list in args.
Sets TestSuite status to 'parsed' to indicate that it has not yet been prepared.
:raises: SuiteException
'''
pass
def _print_search_errors(self):
'''
Logs errors that were collected during test case search.
:return: Nothing
'''
pass
def _prepare_suite(self):
'''
Prepares parsed testcases for running.
:raises: SyntaxError, SuiteException
'''
pass
def _get_suite_tcs(self, tcdir, testcases):
'''
Generate a TestcaseList from a Suite.
:param tcdir: Test case directory
:param testcases: Names of testcases.
:return: TestcaseList or None
'''
pass
def _prepare_testcase(self, testcase):
'''
Run some preparatory commands on a test case to prep it for running.
:param testcase: TestcaseContainer
:return: Nothing
'''
pass
def _load_suite_file(self, name, suitedir):
'''
Load a suite file from json to dict.
:param name: Name of suite
:param suitedir: Path to suite
:return: Dictionary or None
'''
pass
def _load_suite_list(self):
'''
Generate a TestcaseList from command line filters.
:return: TestcaseList or False
'''
pass
| 25 | 22 | 23 | 1 | 17 | 5 | 5 | 0.28 | 1 | 23 | 9 | 0 | 22 | 7 | 23 | 23 | 555 | 44 | 401 | 98 | 374 | 112 | 354 | 90 | 328 | 18 | 1 | 6 | 115 |
1,936 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/TestBench/Plugins.py
|
icetea_lib.TestBench.Plugins.Plugins
|
class Plugins(object):
"""
This Mixer manage Test used Plugins.
"""
def __init__(self, logger, env, args, config):
super(Plugins, self).__init__()
self._parser_manager = None
self._pluginmanager = None
self._logger = logger
self._env = env
self._args = args
self._config = config
def init(self, benchapi, logger=None):
"""
Initialize Parser and Plugin managers.
:return: Nothing
"""
self._env = benchapi.env
if logger:
self._logger = logger
self._parser_manager = ParserManager(self._logger)
self._pluginmanager = PluginManager(responseparser=self._parser_manager,
bench=benchapi,
logger=self._logger)
def load_plugins(self):
"""
Initialize PluginManager and Load bench related plugins.
:return: Nothing
"""
self._pluginmanager.load_default_tc_plugins()
self._pluginmanager.load_custom_tc_plugins(self._args.plugin_path)
@property
def pluginmanager(self):
"""
Getter for PluginManager.
:return: PluginManager
"""
return self._pluginmanager
@pluginmanager.setter
def pluginmanager(self, value):
"""
Setter for PluginManager.
"""
self._pluginmanager = value
# All required external services starting here
def start_external_services(self):
"""
Start ExtApps required by test case.
:return: Nothing
"""
apps = get(self._config, 'requirements.external.apps', [])
for app in apps:
# Check if we have an environment configuration for this app
conf = app
try:
conf = merge(conf, self._env["extApps"][app["name"]])
except KeyError:
self._logger.warning("Unable to merge configuration for app %s", app,
exc_info=True if not self._args.silent else False)
if 'name' in app:
try:
self.pluginmanager.start_external_service(app['name'], conf=conf)
except PluginException:
self._logger.error("Failed to start requested external services.")
raise EnvironmentError("Failed to start requested external services.")
self._logger.info("done")
else:
conf_path = None
conf_cmd = None
try:
conf_path = conf["path"]
except KeyError:
self._logger.warning("No path defined for app %s", app)
try:
conf_cmd = conf["cmd"]
except KeyError:
self._logger.warning("No command defined for app %s", app)
appname = 'generic'
newapp = GenericProcess(name=appname, path=conf_path, cmd=conf_cmd)
newapp.ignore_return_code = True
newapp.start_process()
def stop_external_services(self):
"""
Stop external services started via PluginManager
"""
self._logger.debug("Stop external services if any")
self.pluginmanager.stop_external_services()
def parse_response(self, cmd, response):
"""
Parse a response for command cmd.
:param cmd: Command
:param response: Response
:return: Parsed response (usually dict)
"""
return self._parser_manager.parse(cmd, response)
|
class Plugins(object):
'''
This Mixer manage Test used Plugins.
'''
def __init__(self, logger, env, args, config):
pass
def init(self, benchapi, logger=None):
'''
Initialize Parser and Plugin managers.
:return: Nothing
'''
pass
def load_plugins(self):
'''
Initialize PluginManager and Load bench related plugins.
:return: Nothing
'''
pass
@property
def pluginmanager(self):
'''
Getter for PluginManager.
:return: PluginManager
'''
pass
@pluginmanager.setter
def pluginmanager(self):
'''
Setter for PluginManager.
'''
pass
def start_external_services(self):
'''
Start ExtApps required by test case.
:return: Nothing
'''
pass
def stop_external_services(self):
'''
Stop external services started via PluginManager
'''
pass
def parse_response(self, cmd, response):
'''
Parse a response for command cmd.
:param cmd: Command
:param response: Response
:return: Parsed response (usually dict)
'''
pass
| 11 | 8 | 12 | 1 | 7 | 4 | 2 | 0.53 | 1 | 6 | 4 | 0 | 8 | 6 | 8 | 8 | 108 | 13 | 62 | 24 | 51 | 33 | 56 | 22 | 47 | 8 | 1 | 3 | 16 |
1,937 |
ARMmbed/icetea
|
ARMmbed_icetea/test/test_cliresponseparser.py
|
test.test_cliresponseparser.TestVerify
|
class TestVerify(unittest.TestCase):
def setUp(self):
plugin = DefaultParsers()
parsers = plugin.get_parsers()
self.parsermanager = ParserManager(logging.getLogger())
for parser in parsers:
self.parsermanager.add_parser(parser, parsers[parser])
def _create_line_response_parser(self, command, path):
output = open(path, 'r').read()
response = CliResponse()
for line in output.splitlines():
response.lines.append(line)
parser = self.parsermanager
resp = parser.parse(command, response)
return resp
|
class TestVerify(unittest.TestCase):
def setUp(self):
pass
def _create_line_response_parser(self, command, path):
pass
| 3 | 0 | 7 | 0 | 7 | 0 | 2 | 0 | 1 | 3 | 3 | 0 | 2 | 1 | 2 | 74 | 17 | 2 | 15 | 12 | 12 | 0 | 15 | 12 | 12 | 2 | 2 | 1 | 4 |
1,938 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/TestSuite/TestSuite.py
|
icetea_lib.TestSuite.TestSuite.SuiteException
|
class SuiteException(Exception):
"""
Raised when something goes wrong with suite creation
or other operations performed here.
"""
pass
|
class SuiteException(Exception):
'''
Raised when something goes wrong with suite creation
or other operations performed here.
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 6 | 0 | 2 | 1 | 1 | 4 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
1,939 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/TestStepError.py
|
icetea_lib.TestStepError.TestStepTimeout
|
class TestStepTimeout(TestStepFail):
"""
TestStepTimeout, failure for showing timeouts.
"""
pass
|
class TestStepTimeout(TestStepFail):
'''
TestStepTimeout, failure for showing timeouts.
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 5 | 0 | 2 | 1 | 1 | 3 | 2 | 1 | 1 | 0 | 4 | 0 | 0 |
1,940 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/TestStepError.py
|
icetea_lib.TestStepError.TestStepFail
|
class TestStepFail(Exception):
'''
TestStepFail exception is used when failure causes because of
device/software under test, and probably not related to test environment
'''
pass
|
class TestStepFail(Exception):
'''
TestStepFail exception is used when failure causes because of
device/software under test, and probably not related to test environment
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 2 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 10 | 6 | 0 | 2 | 1 | 1 | 4 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
1,941 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/TestStepError.py
|
icetea_lib.TestStepError.TestStepError
|
class TestStepError(Exception):
'''
TestStepError exception is used in case where something
very fatal unexpected happens in test environment or serial port connection dies.
'''
def __init__(self, message="TestStepError"):
super(TestStepError, self).__init__(message)
self.logger = None
# print detailed info
def detailed_info(self):
"""
Print detailed exception information from traceback
"""
_, _, trace = sys.exc_info()
filename, linenumber, functionname, line = traceback.extract_tb(trace)[-2]
self.logger = get_bench_logger()
self.logger.error("Exception details: ")
self.logger.error("TC Name: " + str(filename))
self.logger.error("Function name: " + str(functionname))
self.logger.error("Line number: " + str(linenumber))
self.logger.error("Line: " + str(line))
|
class TestStepError(Exception):
'''
TestStepError exception is used in case where something
very fatal unexpected happens in test environment or serial port connection dies.
'''
def __init__(self, message="TestStepError"):
pass
def detailed_info(self):
'''
Print detailed exception information from traceback
'''
pass
| 3 | 2 | 8 | 0 | 6 | 2 | 1 | 0.62 | 1 | 2 | 0 | 0 | 2 | 1 | 2 | 12 | 22 | 1 | 13 | 6 | 10 | 8 | 13 | 6 | 10 | 1 | 3 | 0 | 2 |
1,942 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/TestStepError.py
|
icetea_lib.TestStepError.SkippedTestcaseException
|
class SkippedTestcaseException(Exception):
"""
Error that indicates that this test case should be skipped for one reason or another.
"""
pass
|
class SkippedTestcaseException(Exception):
'''
Error that indicates that this test case should be skipped for one reason or another.
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 5 | 0 | 2 | 1 | 1 | 3 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
1,943 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/TestSuite/TestcaseContainer.py
|
icetea_lib.TestSuite.TestcaseContainer.DummyContainer
|
class DummyContainer(TestcaseContainer):
"""
Class DummyContainer
subclasses TestcaseContainer, acts as a dummy object for listing test cases
that were not found when importing test cases.
"""
def __init__(self, logger=None):
"""
Just initialize the super class.
:param logger: logger to use.
"""
super(DummyContainer, self).__init__(logger)
@staticmethod
def find_testcases(modulename, moduleroot, tc_meta_schema, path=None, suiteconfig=None,
logger=None):
"""
Static method find_testcases. Returns a DummyContainer with attributes collected from
function params.
"""
dummycontainer = DummyContainer(logger)
dummycontainer.tcname = modulename
dummycontainer._modulename = modulename
dummycontainer.status = TestStatus.PENDING
dummycontainer._instance = None
dummycontainer._final_configuration = {}
dummycontainer._moduleroot = moduleroot
dummycontainer._meta_schema = tc_meta_schema
dummycontainer._result = None
dummycontainer._filepath = path
dummycontainer._suiteconfig = suiteconfig if suiteconfig else {}
return dummycontainer
def run(self, forceflash=False):
"""
Just returns the Result object for this Dummy.
"""
return self._result
def get(self, field):
return None
def set_final_config(self):
pass
def set_result_verdict(self, reason):
"""
Sets the inconclusive verdict for this DummyContainer with reason reason.
:param reason: String reason for why this dummy exists.
:return: Nothing
"""
if not self._result:
self._result = Result()
self._result.set_verdict(verdict="inconclusive",
retcode=ReturnCodes.RETCODE_FAIL_INCONCLUSIVE,
duration=0)
self._result.tc_metadata["name"] = self.tcname
self._result.fail_reason = reason
def validate_tc_instance(self):
return True
|
class DummyContainer(TestcaseContainer):
'''
Class DummyContainer
subclasses TestcaseContainer, acts as a dummy object for listing test cases
that were not found when importing test cases.
'''
def __init__(self, logger=None):
'''
Just initialize the super class.
:param logger: logger to use.
'''
pass
@staticmethod
def find_testcases(modulename, moduleroot, tc_meta_schema, path=None, suiteconfig=None,
logger=None):
'''
Static method find_testcases. Returns a DummyContainer with attributes collected from
function params.
'''
pass
def run(self, forceflash=False):
'''
Just returns the Result object for this Dummy.
'''
pass
def get(self, field):
pass
def set_final_config(self):
pass
def set_result_verdict(self, reason):
'''
Sets the inconclusive verdict for this DummyContainer with reason reason.
:param reason: String reason for why this dummy exists.
:return: Nothing
'''
pass
def validate_tc_instance(self):
pass
| 9 | 5 | 7 | 0 | 5 | 2 | 1 | 0.62 | 1 | 4 | 3 | 0 | 6 | 1 | 7 | 32 | 67 | 12 | 34 | 12 | 24 | 21 | 30 | 10 | 22 | 2 | 2 | 1 | 9 |
1,944 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/TestBench/NetworkSniffer.py
|
icetea_lib.TestBench.NetworkSniffer.NetworkSniffer
|
class NetworkSniffer(object):
"""
This Mixer provide public wshark related API's for Bench.
"""
def __init__(self, resources, configurations, args, logger=None, **kwargs):
super(NetworkSniffer, self).__init__(**kwargs)
self.__sniffing = False
self.__wshark = None
self.__capture_file = None
self.__tshark_arguments = {}
self._logger = logger if logger else LogManager.get_dummy_logger()
self._resources = resources
self._configurations = configurations
self._args = args
def init(self, logger=None):
"""
Set logger.
"""
self._logger = logger if logger else self._logger
@property
def wshark(self):
"""
Return wireshark object.
:return: Wireshark
"""
return self.__wshark
@property
def tshark_arguments(self):
"""
Get tshark arguments.
:return: dict
"""
return self.__tshark_arguments
@property
def sniffer_required(self):
"""
Check if sniffer was requested for this run.
:return: Boolean
"""
return self._args.use_sniffer
def init_sniffer(self):
"""
Initialize and start sniffer if it is required.
:return: Nothing
"""
if self.sniffer_required:
self.__start_sniffer()
def clear_sniffer(self):
"""
Clear sniffer
:return: Nothing
"""
if self.__sniffing:
import psutil
self._logger.debug("Close wshark pipes")
# Note: the psutil has changed the API at around version 3.0 but user likely has
# the older version installed unless it has specifically installed via pip.
if parse_version(psutil.__version__) < parse_version('3.0.0'):
self._logger.warning("NOTE: your psutil version %s is likely too old,"
" please update!", psutil.__version__)
dumpcaps = []
for process in self.wshark.liveLoggingCapture.running_processes:
children = psutil.Process(process.pid).children(recursive=True)
for child in children:
if child.name() in ('dumpcap', 'tshark-bin', 'dumpcap-bin'):
dumpcaps.append(child)
self.__stop_sniffer()
for child in dumpcaps:
try:
child.kill()
child.wait(timeout=2)
except (OSError, psutil.NoSuchProcess, psutil.TimeoutExpired):
pass
def __create_wshark_object(self):
"""
Internal creator for Wireshark object.
:return: Nothing
"""
from icetea_lib.wireshark import Wireshark
if not self.wshark:
self.__wshark = Wireshark()
def __start_sniffer(self):
"""
Start network sniffer capturing pcap to a file.
:return: Nothing
"""
iface = self.__get_nw_interface()
if not iface:
raise TestStepError("Cannot capture wireshark log")
try:
self.__create_wshark_object()
except ImportError as error:
raise TestStepError(error)
self.__capture_file = LogManager.get_testcase_logfilename("network.nw.pcap")
self._logger.debug('Start wireshark capture: %s', self.capture_file)
# Add self.tshark_preferences to parameters
# when pyshark starts supporting the -o tshark argument
self.wshark.startCapture(iface,
self.__capture_file,
self.__tshark_arguments)
self.__sniffing = True
@property
def capture_file(self):
"""
Return capture file path.
:return: file path of capture file.
"""
return self.__capture_file
def __stop_sniffer(self):
"""
Stop the network sniffer.
:return: Nothing
"""
if self.__sniffing:
self._logger.debug('Stop wireshark capture: %s', self.capture_file)
packet_count = self.__wshark.stopCapture()
self._logger.debug("Got total %i NW packets", packet_count)
def __get_nw_interface(self):
"""
Get the capture pipe or sniffer interface.
:return:
"""
return get(self._configurations.env, 'sniffer.iface')
def get_nw_log_filename(self): # pylint: disable=no-self-use
"""
Get nw data log file name.
:return: string
"""
return LogManager.get_testcase_logfilename("network.nw.pcap")
|
class NetworkSniffer(object):
'''
This Mixer provide public wshark related API's for Bench.
'''
def __init__(self, resources, configurations, args, logger=None, **kwargs):
pass
def init(self, logger=None):
'''
Set logger.
'''
pass
@property
def wshark(self):
'''
Return wireshark object.
:return: Wireshark
'''
pass
@property
def tshark_arguments(self):
'''
Get tshark arguments.
:return: dict
'''
pass
@property
def sniffer_required(self):
'''
Check if sniffer was requested for this run.
:return: Boolean
'''
pass
def init_sniffer(self):
'''
Initialize and start sniffer if it is required.
:return: Nothing
'''
pass
def clear_sniffer(self):
'''
Clear sniffer
:return: Nothing
'''
pass
def __create_wshark_object(self):
'''
Internal creator for Wireshark object.
:return: Nothing
'''
pass
def __start_sniffer(self):
'''
Start network sniffer capturing pcap to a file.
:return: Nothing
'''
pass
@property
def capture_file(self):
'''
Return capture file path.
:return: file path of capture file.
'''
pass
def __stop_sniffer(self):
'''
Stop the network sniffer.
:return: Nothing
'''
pass
def __get_nw_interface(self):
'''
Get the capture pipe or sniffer interface.
:return:
'''
pass
def get_nw_log_filename(self):
'''
Get nw data log file name.
:return: string
'''
pass
| 18 | 13 | 10 | 1 | 5 | 4 | 2 | 0.73 | 1 | 8 | 2 | 0 | 13 | 8 | 13 | 13 | 153 | 24 | 75 | 35 | 55 | 55 | 68 | 30 | 52 | 8 | 1 | 4 | 27 |
1,945 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/TestSuite/TestcaseList.py
|
icetea_lib.TestSuite.TestcaseList.TestcaseList
|
class TestcaseList(object):
"""
TestcaseList object, a list-like structure with helpers to collect local test cases and
filter found tests for running.
"""
def __init__(self, logger=None, testcases=None):
self.logger = logger
self._testcases = testcases if testcases else []
self.search_errors = []
if self.logger is None:
import logging
self.logger = logging.getLogger("TestcaseList")
if not self.logger.handlers:
self.logger.addHandler(logging.StreamHandler())
self.logger.setLevel(logging.INFO)
def __iter__(self):
return iter(self._testcases)
def __len__(self):
return len(self._testcases)
def get_list(self):
"""
Returns the internal list of TestcaseContainers
"""
return self._testcases
def get_names(self):
"""
Gets names of test cases in this TestcaseList.
:return: list
"""
lst = []
for testcase in self._testcases:
lst.append(testcase.tcname)
return lst
def filter(self, filt, tc_names=None):
"""
Filter test cases from this list into a new TestcaseList object.
:param filt: TestcaseFilter
:param tc_names: List of test case names. Is used when running with tc or with a suite
:return: new TestcaseList with filtered test cases.
"""
templist = TestcaseList(self.logger)
if tc_names is not None:
self._filter_from_names(tc_names, templist, filt)
else:
for i, testcase in enumerate(self._testcases):
if filt.match(testcase, i):
templist.append(testcase)
# Check that all named testcases were found. Append dummies if some are missing
self._check_filtered_tcs(filt, templist)
templist.search_errors = self.search_errors
return templist
def import_from_path(self, path="./testcases"):
"""
Import test cases from path to this TestcaseList
:param path: Path to import from
:return: Nothing
"""
local_testcases = self._get_local_testcases(path)
self._testcases = self._parse_local_testcases(local_testcases, True)
def get(self, index):
"""
dict-like getter based on index.
:param index: Index of test case
:return: TestcaseContainer or None if index is outside len
"""
return self._testcases[index] if index < len(self) else None
def append(self, val):
"""
Append val to internal list of test cases.
:param val: test case to append
:return: Nothing
"""
self._testcases.append(val)
def _get_local_testcases(self, tcpath):
"""
Crawl given path for .py files
"""
i = 0
returnlist = []
if not isinstance(tcpath, str):
self.logger.error("Error: testcase path is not string")
sys.exit(0)
# path is absolute
tcpath = os.path.abspath(tcpath)
if len(tcpath.split(os.sep)) > 1:
strip_dir = os.sep.join(tcpath.split(os.sep)[:-1]) + os.sep
else:
strip_dir = ''
for root, _, files in os.walk(tcpath):
for file_handle in sorted(files):
basename, extension = os.path.splitext(file_handle)
if (basename == '__init__') or extension != '.py':
continue
moduleroot = ''
modulename = ''
moduleroot = root.replace(strip_dir, '', 1)
modulename = moduleroot.replace(os.sep, ".") + '.' + basename
returnlist.append((modulename, moduleroot, root + os.sep + file_handle))
i += 1
if i == 0:
self.logger.error("Error: No files found in given path: %s", tcpath)
return returnlist
def _parse_local_testcases(self, tc_list, verbose):
"""
Parse list produced by get_local_testcases()
"""
return_list = []
if not isinstance(tc_list, list):
self.logger.error("Error, parseLocalTestcases: Given argument not a list.")
return return_list
i = 0
from icetea_lib.IceteaManager import TCMetaSchema
schema = TCMetaSchema().get_meta_schema()
for testcase in tc_list:
i += 1
try:
parsedcases = TestcaseContainer.find_testcases(modulename=testcase[0],
moduleroot=testcase[1],
path=testcase[2],
tc_meta_schema=schema,
logger=self.logger)
return_list.extend(parsedcases)
except (IndexError, TypeError, ValueError):
self.logger.error("Error, parse_local_testcases: Malformed list item. "
"Skipping item %d", i)
self.logger.debug("tc: %s", str(testcase))
if verbose:
traceback.print_exc()
except ImportError as error:
error_item = {"module": testcase[0], "error": error}
self.search_errors.append(error_item)
continue
return return_list
def _filter_from_names(self, tc_names, tclist, filt):
"""
Fill out missing test cases with dummies if a names test case is missing.
:param tc_names: List of test case names requested.
:param tclist: List of found test cases.
:param filt: TestcaseFilter.
:return: Nothing, modifies tclist in place.
"""
for tcname in tc_names:
found = False
for i, testcase in enumerate(self._testcases):
if filt.match(testcase, i) and testcase.tcname == tcname:
if testcase in tclist:
tclist.append(copy(testcase))
else:
tclist.append(testcase)
found = True
if not found:
dummy = DummyContainer(self.logger)
dummy.tcname = tcname
dummy.set_result_verdict("Test case not found")
tclist.append(dummy)
def _check_filtered_tcs(self, filt, tclist):
"""
Check filtered test cases afor a few special circumstances.
:param filt: TestcaseFilter.
:param tclist: List of test cases.
:return: Nothing, modifies tclist in place.
"""
if filt.get_filter().get("name") is not False and filt.get_filter().get("name") != "all":
# If --tc filter is set, length can be max 1
if len(tclist) != 1:
dummy = DummyContainer(self.logger)
dummy.tcname = filt.get_filter().get("name")
dummy.set_result_verdict("Test case not found")
tclist.append(dummy)
if filt.get_filter().get("list") is not False:
if len(tclist) < len(filt.get_filter().get("list")):
needed_names = filt.get_filter().get("list")
found_names = tclist.get_names()
counter1 = Counter(needed_names)
counter2 = Counter(found_names)
diff = counter1 - counter2
lst = list(diff.elements())
for name in lst:
dummy = DummyContainer(self.logger)
dummy.tcname = name
dummy.set_result_verdict("Test case not found")
tclist.append(dummy)
|
class TestcaseList(object):
'''
TestcaseList object, a list-like structure with helpers to collect local test cases and
filter found tests for running.
'''
def __init__(self, logger=None, testcases=None):
pass
def __iter__(self):
pass
def __len__(self):
pass
def get_list(self):
'''
Returns the internal list of TestcaseContainers
'''
pass
def get_names(self):
'''
Gets names of test cases in this TestcaseList.
:return: list
'''
pass
def filter(self, filt, tc_names=None):
'''
Filter test cases from this list into a new TestcaseList object.
:param filt: TestcaseFilter
:param tc_names: List of test case names. Is used when running with tc or with a suite
:return: new TestcaseList with filtered test cases.
'''
pass
def import_from_path(self, path="./testcases"):
'''
Import test cases from path to this TestcaseList
:param path: Path to import from
:return: Nothing
'''
pass
def get_list(self):
'''
dict-like getter based on index.
:param index: Index of test case
:return: TestcaseContainer or None if index is outside len
'''
pass
def append(self, val):
'''
Append val to internal list of test cases.
:param val: test case to append
:return: Nothing
'''
pass
def _get_local_testcases(self, tcpath):
'''
Crawl given path for .py files
'''
pass
def _parse_local_testcases(self, tc_list, verbose):
'''
Parse list produced by get_local_testcases()
'''
pass
def _filter_from_names(self, tc_names, tclist, filt):
'''
Fill out missing test cases with dummies if a names test case is missing.
:param tc_names: List of test case names requested.
:param tclist: List of found test cases.
:param filt: TestcaseFilter.
:return: Nothing, modifies tclist in place.
'''
pass
def _check_filtered_tcs(self, filt, tclist):
'''
Check filtered test cases afor a few special circumstances.
:param filt: TestcaseFilter.
:param tclist: List of test cases.
:return: Nothing, modifies tclist in place.
'''
pass
| 14 | 11 | 14 | 1 | 10 | 4 | 3 | 0.42 | 1 | 12 | 3 | 0 | 13 | 3 | 13 | 13 | 205 | 23 | 128 | 51 | 112 | 54 | 120 | 50 | 104 | 7 | 1 | 4 | 42 |
1,946 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/arguments.py
|
icetea_lib.arguments.Abspathify
|
class Abspathify(argparse.Action): # pylint: disable=too-few-public-methods
"""
Action to convert paths to absolute paths.
"""
def __call__(self, parser, args, values, option_string=None):
setattr(args, self.dest, os.path.abspath(values))
|
class Abspathify(argparse.Action):
'''
Action to convert paths to absolute paths.
'''
def __call__(self, parser, args, values, option_string=None):
pass
| 2 | 1 | 2 | 0 | 2 | 0 | 1 | 1.33 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 8 | 6 | 0 | 3 | 2 | 1 | 4 | 3 | 2 | 1 | 1 | 3 | 0 | 1 |
1,947 |
ARMmbed/icetea
|
ARMmbed_icetea/test/test_allocationcontext.py
|
test.test_allocationcontext.AllocContextTestcase
|
class AllocContextTestcase(unittest.TestCase):
def test_set(self):
con1 = AllocationContext("id1", "al_id1", {"data": "data1"})
con1["test"] = "test"
self.assertEqual(con1.get_alloc_data()["test"], "test")
con1.set("test", "test2")
self.assertEqual(con1.get_alloc_data()["test"], "test2")
def test_get(self):
con1 = AllocationContext("id1", "al_id1", {"data": "data1"})
con1.set("test", "test3")
self.assertEqual(con1.get("test"), "test3")
self.assertEqual(con1["test"], "test3")
|
class AllocContextTestcase(unittest.TestCase):
def test_set(self):
pass
def test_get(self):
pass
| 3 | 0 | 6 | 0 | 6 | 0 | 1 | 0 | 1 | 1 | 1 | 0 | 2 | 0 | 2 | 74 | 14 | 2 | 12 | 5 | 9 | 0 | 12 | 5 | 9 | 1 | 2 | 0 | 2 |
1,948 |
ARMmbed/icetea
|
ARMmbed_icetea/test/test_arguments.py
|
test.test_arguments.MyTestCase
|
class MyTestCase(unittest.TestCase):
def test_args_from_file(self):
sys.argv = ["filename.py", "--tc", "test_case_1", "--tcdir", "test_directory",
"--cfg_file",
os.path.abspath(os.path.join(os.path.dirname(__file__),
"data/conf_file.txt")),
"--suitedir",
"shouldoverwrite"]
parser = get_parser() # pylint: disable=unused-variable
args, unknown = _parse_arguments() # pylint: disable=unused-variable
for arg in ["tc", "tcdir", "cfg_file", "baudrate", "suitedir"]:
self.assertTrue(hasattr(args, arg))
self.assertEqual(args.tc, "test_case_1")
self.assertEqual(args.tcdir, "shouldoverwrite")
self.assertEqual(args.baudrate, 9600)
self.assertEqual(args.suitedir, "shouldoverwrite")
def test_args_from_file_with_other_file(self): # pylint: disable=invalid-name
sys.argv = ["filename.py", "--cfg_file",
os.path.abspath(os.path.join(os.path.dirname(__file__),
"data/conf_file_2.txt"))]
parser = get_parser() # pylint: disable=unused-variable
args, unknown = _parse_arguments() # pylint: disable=unused-variable
for arg in ["tcdir", "baudrate", "suitedir"]:
self.assertTrue(hasattr(args, arg))
self.assertEqual(args.tcdir, "shouldoverwrite")
self.assertEqual(args.baudrate, 9600)
self.assertEqual(args.suitedir, "shouldoverwrite")
def test_args_from_file_with_file_infinite_recursion(self): # pylint: disable=invalid-name
sys.argv = ["filename.py", "--cfg_file",
os.path.abspath(os.path.join(os.path.dirname(__file__),
"data/conf_file_3.txt"))]
parser = get_parser() # pylint: disable=unused-variable
args, unknown = _parse_arguments() # pylint: disable=unused-variable
for arg in ["tcdir", "baudrate", "suitedir"]:
self.assertTrue(hasattr(args, arg))
self.assertEqual(args.tcdir, "shouldoverwrite")
def test_str2bool(self):
positives_list = ["y", "Y", "t", "T", "1", "yes", "YES", "True", "true"]
negatives_list = ["n", "N", "no", "No", "f", "F", "False", "false", "0"]
for bool_to_conv in positives_list:
self.assertTrue(str_arg_to_bool(bool_to_conv))
for bool_to_conv in negatives_list:
self.assertFalse(str_arg_to_bool(bool_to_conv))
with self.assertRaises(ArgumentTypeError):
str_arg_to_bool("2")
with self.assertRaises(ArgumentTypeError):
str_arg_to_bool("test")
|
class MyTestCase(unittest.TestCase):
def test_args_from_file(self):
pass
def test_args_from_file_with_other_file(self):
pass
def test_args_from_file_with_file_infinite_recursion(self):
pass
def test_str2bool(self):
pass
| 5 | 0 | 12 | 0 | 12 | 2 | 2 | 0.17 | 1 | 1 | 0 | 0 | 4 | 0 | 4 | 76 | 51 | 4 | 47 | 17 | 42 | 8 | 38 | 17 | 33 | 3 | 2 | 1 | 9 |
1,949 |
ARMmbed/icetea
|
ARMmbed_icetea/test/test_asserts.py
|
test.test_asserts.AssertTestcase
|
class AssertTestcase(unittest.TestCase):
def setUp(self):
self.asserts = asserts
def test_assert_booleans(self):
with self.assertRaises(TestStepFail):
self.asserts.assertTrue(False, "False was not True!")
with self.assertRaises(TestStepFail):
self.asserts.assertFalse(True)
try:
self.asserts.assertTrue(True, "True was somehow False?!")
self.asserts.assertTrue([1, 2])
self.assertTrue(True, "No fail was raised.")
except TestStepFail:
self.assertTrue(False, "TestStepFail was raised! ")
try:
self.asserts.assertFalse(False)
self.asserts.assertFalse([])
self.asserts.assertFalse({})
self.assertTrue(True, "No fail was raised.")
except TestStepFail:
self.assertTrue(False, "TestStepFail was raised! ")
def test_assert_nones(self):
with self.assertRaises(TestStepFail):
self.asserts.assertNone(1)
with self.assertRaises(TestStepFail):
self.asserts.assertNotNone(None)
try:
self.asserts.assertNone(None)
self.asserts.assertNotNone(1)
except TestStepFail:
self.assertTrue(False, "TestStepFail was raised!")
def test_assert_equals(self):
with self.assertRaises(TestStepFail):
self.asserts.assertEqual(1, 2)
with self.assertRaises(TestStepFail):
self.asserts.assertNotEqual(1, 1)
try:
self.asserts.assertEqual(1, 1)
self.asserts.assertNotEqual(1, 2)
except TestStepFail:
self.assertTrue(False, "TestStepFail was raised!")
def test_assert_json_contains(self):
with self.assertRaises(TestStepFail):
self.asserts.assertJsonContains('{"test": "key"}', "test2")
with self.assertRaises(TestStepFail):
self.asserts.assertJsonContains("{'test': 'key'}", "test")
with self.assertRaises(TestStepFail):
self.asserts.assertJsonContains(None, "test")
try:
self.asserts.assertJsonContains('{"test": "key"}', 'test')
except TestStepFail:
self.assertTrue(False, 'Key test was not contained in {"test": "key"}?')
def test_assert_dut_trace_contains(self):
mock_bench = mock.MagicMock()
mock_bench.verify_trace = mock.MagicMock(side_effect=[True, False])
self.asserts.assertDutTraceContains(1, "message_found", mock_bench)
with self.assertRaises(TestStepFail):
self.asserts.assertDutTraceContains(1, "message_not_found", mock_bench)
def test_assert_dut_trace_does_not_contain(self): # pylint: disable=invalid-name
mock_bench = mock.MagicMock()
mock_bench.verify_trace = mock.MagicMock(side_effect=[True, False])
with self.assertRaises(TestStepFail):
self.asserts.assertDutTraceDoesNotContain(1, "message_found", mock_bench)
self.asserts.assertDutTraceDoesNotContain(1, "message_not_found", mock_bench)
|
class AssertTestcase(unittest.TestCase):
def setUp(self):
pass
def test_assert_booleans(self):
pass
def test_assert_nones(self):
pass
def test_assert_equals(self):
pass
def test_assert_json_contains(self):
pass
def test_assert_dut_trace_contains(self):
pass
def test_assert_dut_trace_does_not_contain(self):
pass
| 8 | 0 | 10 | 1 | 9 | 0 | 2 | 0.02 | 1 | 1 | 1 | 0 | 7 | 1 | 7 | 79 | 77 | 13 | 64 | 11 | 56 | 1 | 64 | 11 | 56 | 3 | 2 | 1 | 12 |
1,950 |
ARMmbed/icetea
|
ARMmbed_icetea/test/test_asserts.py
|
test.test_asserts.MockBench
|
class MockBench(object):
def __init__(self):
pass
def command(self):
pass
def logger(self):
pass
|
class MockBench(object):
def __init__(self):
pass
def command(self):
pass
def logger(self):
pass
| 4 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 3 | 0 | 3 | 3 | 9 | 2 | 7 | 4 | 3 | 0 | 7 | 4 | 3 | 1 | 1 | 0 | 3 |
1,951 |
ARMmbed/icetea
|
ARMmbed_icetea/test/test_bench.py
|
test.test_bench.ApiTestcase
|
class ApiTestcase(Bench):
"""
Testcase class for testing all exception cases
"""
def __init__(self):
Bench.__init__(self,
name="ut_apis",
title="unittest apis in testcase",
status="development",
type="acceptance",
purpose="dummy",
component=["UT"],
feature=["public apis"],
requirements={
"duts": {
'*': {
"count": 0,
"allowed_platforms": ["TEST1"]
}
}
}
)
self.expected_config = {'status': 'development',
'component': ["UT"],
'feature': ["public apis"],
'requirements': {
'duts': {
'*': {
'count': 0,
'application': {
'bin': None
},
"allowed_platforms": ["TEST1"]
}
},
'external': {
'apps': []
}
},
'name': 'ut_apis',
'title': 'unittest apis in testcase',
'compatible': {
'framework': {
'version': '>=1.0.0',
'name': 'Icetea'
}, 'hw': {
'value': True
}, 'automation': {
'value': True}
}, 'purpose': 'dummy',
'type': 'acceptance',
'sub_type': None
}
def setup(self):
pass
def raise_exc(self, step): # pylint: disable=no-self-use
print(step)
raise TestStepFail(step)
def case(self):
if self.test_name != "ut_apis":
self.raise_exc("test_name broken.")
if self.config != self.expected_config:
self.raise_exc("config getter broken.")
self.expected_config["test"] = "test"
self.config = self.expected_config
if self.config != self.expected_config:
self.raise_exc("config setter broken.")
if self.env != {'sniffer': {'iface': 'Sniffer'}}:
self.raise_exc("env broken.")
if self.is_hardware_in_use():
self.raise_exc("is_hardware_in_use broken.")
if self.get_platforms() != list():
self.raise_exc("get_platforms broken.")
if self.get_serialnumbers():
self.raise_exc("get_serialnumbers broken")
if self.get_test_component() != self.expected_config["component"]:
self.raise_exc("get_test_component broken")
if len(self.get_allowed_platforms()) != 1 or self.get_allowed_platforms()[0] != "TEST1":
self.raise_exc("Allowed platforms broken.")
if self.status() != self.expected_config['status']:
self.raise_exc("status broken")
if self.type() != self.expected_config["type"]:
self.raise_exc("type broken")
if self.get_features_under_test() != self.expected_config["feature"]:
self.raise_exc("features broken")
if self.subtype() != self.expected_config["sub_type"]:
self.raise_exc("subtype broken")
if self.config != self.get_config():
self.raise_exc("config not the same as get_config()")
if self.skip() is not None:
self.raise_exc("skip is not None")
if self.skip_info() is not None:
self.raise_exc("skip_info is not None")
if self.skip_reason() != "":
self.raise_exc("skip_reason is not empty")
if self.check_skip() is not False:
self.raise_exc("check_skip was not False")
if self.get_tc_abspath(__file__) != __file__:
self.raise_exc("get_tc_abspath file name did not match")
self.set_config({"test": "test1"})
if self.config != {"test": "test1"}:
self.raise_exc("set_config broken.")
self.set_config(self.expected_config)
if self.dut_count() != 0:
self.raise_exc("dut_count broken.")
if self.get_dut_count() != 0:
self.raise_exc("get_dut_count broken.")
if not isinstance(self.resource_provider, ResourceProvider):
self.raise_exc("resource_provider broken.")
if not isinstance(self.resource_configuration, ResourceConfig):
self.raise_exc("resource_configuration broken.")
if self.duts != list():
self.raise_exc("duts broken.")
self.duts = ["D1"]
if self.duts != ["D1"]:
self.raise_exc("duts setter broken.")
self.duts = []
if self.dut_indexes:
self.raise_exc("dut_indexes broken.")
if not inspect.isgenerator(self.duts_iterator_all()):
self.raise_exc("duts_iterator_all broken.")
if not inspect.isgenerator(self.duts_iterator()):
self.raise_exc("duts_iterator is broken.")
if self.is_allowed_dut_index(1) is not False:
self.raise_exc("is_allowed_dut_index broken")
try:
self.get_dut(1)
except ValueError:
pass
except Exception:
self.raise_exc("get_dut is broken.")
try:
self.get_node_endpoint(1)
except ValueError:
pass
except Exception:
self.raise_exc("get_node_endpoint is broken.")
if not self.is_my_dut_index(1):
self.raise_exc("is_my_dut_index is broken.")
if self.dutinformations != list():
self.raise_exc("dutinformations is broken.")
if self.get_dut_nick(1) != "1":
self.raise_exc("get_dut_nick is broken.")
try:
self.get_dut_nick("does not exists")
except KeyError:
pass
except Exception:
self.raise_exc("get_dut_nick is broken.")
try:
self.get_dut_index("does not exist")
except ValueError:
pass
except Exception:
self.raise_exc("get_dut_index is broken.")
if not self.is_my_dut(1):
self.raise_exc("is_my_dut broken.")
if self.results is None:
self.raise_exc("results broken.")
if self.retcode != ReturnCodes.RETCODE_SUCCESS:
self.raise_exc("retcode broken.")
if self.wshark is not None:
self.raise_exc("wshark default is broken.")
if self.tshark_arguments != dict():
self.raise_exc("tshark_arguments default is broken.")
if self.sniffer_required is not False:
self.raise_exc("sniffer_required is broken.")
if not self.get_nw_log_filename().endswith("network.nw.pcap"):
self.raise_exc("get_nw_log_filename broken.")
if not isinstance(self.pluginmanager, PluginManager):
self.raise_exc("pluginmanager broken.")
logger = self.get_logger()
if not isinstance(logger, BenchLoggerAdapter):
self.raise_exc("get_logger broken.")
if not isinstance(self.unknown, list):
self.raise_exc("unknown broken.")
old_list = self.unknown
new_list = ["val1"]
self.unknown = new_list
if self.unknown is not new_list:
self.raise_exc("unknown setter broken.")
self.unknown = old_list
old_res_conf = self.resource_configuration
new_res_conf = ["val1"]
self.resource_configuration = new_res_conf
if self.resource_configuration is not new_res_conf:
self.raise_exc("resource_configuration setter broken.")
self.resource_configuration = old_res_conf
if self.retcode != 0:
self.raise_exc("retcode broken.")
self.retcode = 1
if self.retcode != 1:
self.raise_exc("retcode setter broken")
self.retcode = 0
try:
self.append_result()
except Exception:
self.raise_exc("append_result broken.")
self.delay(0.1)
if not isinstance(self.get_time(), float):
self.raise_exc("get_time broken.")
try:
self.verify_trace_skip_fail(1, "a")
except IndexError:
pass
except Exception:
self.raise_exc("verify_trace_skip_fail broken.")
retval = None
try:
retval = self.wait_for_async_response("a", "b")
except AttributeError:
pass
except Exception:
self.raise_exc("wait_for_async_response broken.")
if retval is not None:
self.raise_exc("wait_for_async_response broken.")
try:
self.execute_command(1, "a")
except ValueError:
pass
except Exception:
self.raise_exc("execute_command broken.")
old_pm = self.pluginmanager
new_pm = ["val1"]
self.pluginmanager = new_pm
if self.pluginmanager is not new_pm:
self.raise_exc("pluginmanager setter broken")
self.pluginmanager = old_pm
if self.parse_response("a", "b"):
self.raise_exc("parse_response broken.")
if self.get_test_name() != "ut_apis":
self.raise_exc("get_test_name broken.")
if self.dut_count() != 0:
self.raise_exc("dut_count broken.")
try:
self.init_duts()
except Exception:
self.raise_exc("init_duts broken")
if self.name != "ut_apis":
self.raise_exc("name broken.")
mock_dut = mock.MagicMock()
type(mock_dut).index = mock.PropertyMock(return_value=0)
mock_dut.close_dut = mock.MagicMock()
mock_dut.close_connection = mock.MagicMock()
try:
self.sync_cli("1", retries=0)
except ValueError:
pass
else:
self.raise_exc("sync_cli broken")
def teardown(self):
pass
|
class ApiTestcase(Bench):
'''
Testcase class for testing all exception cases
'''
def __init__(self):
pass
def setup(self):
pass
def raise_exc(self, step):
pass
def case(self):
pass
def teardown(self):
pass
| 6 | 1 | 51 | 1 | 50 | 0 | 15 | 0.02 | 1 | 15 | 6 | 0 | 5 | 7 | 5 | 111 | 264 | 12 | 249 | 22 | 243 | 4 | 203 | 22 | 197 | 72 | 3 | 1 | 76 |
1,952 |
ARMmbed/icetea
|
ARMmbed_icetea/test/test_bench.py
|
test.test_bench.MockDut
|
class MockDut(object):
def __init__(self):
self.traces = ["this is test line 1", "this is test line 2"]
|
class MockDut(object):
def __init__(self):
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 3 | 0 | 3 | 3 | 1 | 0 | 3 | 3 | 1 | 1 | 1 | 0 | 1 |
1,953 |
ARMmbed/icetea
|
ARMmbed_icetea/test/test_cliresponse.py
|
test.test_cliresponse.TestVerify
|
class TestVerify(unittest.TestCase):
def test_resps(self):
response = CliResponse()
response.lines = [
'aapeli',
'beeveli',
'oopeli',
'huhheli']
self.assertTrue(response.verify_message(['oopeli'], False))
self.assertTrue(response.verify_message(['oopeli', 'huhheli'], False))
self.assertFalse(response.verify_message(['huhheli', 'oopeli'], False))
with self.assertRaises(LookupError):
self.assertTrue(response.verify_message(['oop eli']))
def test_traces(self):
response = CliResponse()
response.traces = [
'aapeli',
'beeveli',
'oopeli',
'huhheli']
self.assertTrue(response.verify_trace(['oopeli'], False))
self.assertTrue(response.verify_trace(['oopeli', 'huhheli'], False))
self.assertFalse(response.verify_trace(['huhheli', 'oopeli'], False))
with self.assertRaises(LookupError):
self.assertTrue(response.verify_trace(['oop eli']))
def test_traces_deprecated(self):
response = CliResponse()
response.traces = [
'aapeli',
'beeveli',
'oopeli',
'huhheli']
self.assertTrue(response.verify_trace(['oopeli'], False))
self.assertTrue(response.verify_trace(['oopeli', 'huhheli'], False))
self.assertFalse(response.verify_trace(['huhheli', 'oopeli'], False))
with self.assertRaises(LookupError):
self.assertTrue(response.verify_trace(['oop eli']))
|
class TestVerify(unittest.TestCase):
def test_resps(self):
pass
def test_traces(self):
pass
def test_traces_deprecated(self):
pass
| 4 | 0 | 13 | 1 | 12 | 0 | 1 | 0 | 1 | 2 | 1 | 0 | 3 | 0 | 3 | 75 | 43 | 6 | 37 | 7 | 33 | 0 | 25 | 7 | 21 | 1 | 2 | 1 | 3 |
1,954 |
ARMmbed/icetea
|
ARMmbed_icetea/test/test_bench_runner.py
|
test.test_bench_runner.RunnerSMTestcase
|
class RunnerSMTestcase(unittest.TestCase):
def test_run(self):
mocked_bench = mock.MagicMock()
runner = RunnerSM(mocked_bench)
runner.args = MockArgs()
runner._load_plugins = template_function
runner._init_duts = template_function
runner._start_external_services = template_function
runner._send_pre_commands = template_function
runner._send_post_commands = template_function
runner._duts_release = template_function
runner._clear_sniffer = template_function
runner._stop_external_services = template_function
runner.kill_putty = template_function
runner.logger = MockLogger()
runner.run()
runner.args.kill_putty = True
runner.run()
def test_benchteardown_errors(self):
mocked_bench = mock.MagicMock()
runner = RunnerSM(mocked_bench)
mocked_bench.send_post_commands = mock.MagicMock(side_effect=[Exception, 0, 0, 0, 0])
mocked_bench.duts_release = mock.MagicMock(side_effect=[0, 0, Exception, 0, 0])
mocked_bench.clear_sniffer = mock.MagicMock(side_effect=[0, 0, 0, Exception, 0])
mocked_bench.stop_external_services = mock.MagicMock(side_effect=[0, 0, 0, 0, Exception])
runner._teardown_bench()
mocked_bench.duts_release.assert_called_once()
mocked_bench.clear_sniffer.assert_called_once()
mocked_bench.stop_external_services.assert_called_once()
runner._teardown_bench()
self.assertEqual(mocked_bench.send_post_commands.call_count, 2)
self.assertEqual(mocked_bench.duts_release.call_count, 2)
self.assertEqual(mocked_bench.clear_sniffer.call_count, 2)
self.assertEqual(mocked_bench.stop_external_services.call_count, 2)
runner._teardown_bench()
self.assertEqual(mocked_bench.send_post_commands.call_count, 3)
self.assertEqual(mocked_bench.duts_release.call_count, 3)
self.assertEqual(mocked_bench.clear_sniffer.call_count, 3)
self.assertEqual(mocked_bench.stop_external_services.call_count, 3)
runner._teardown_bench()
self.assertEqual(mocked_bench.send_post_commands.call_count, 4)
self.assertEqual(mocked_bench.duts_release.call_count, 4)
self.assertEqual(mocked_bench.clear_sniffer.call_count, 4)
self.assertEqual(mocked_bench.stop_external_services.call_count, 4)
runner._teardown_bench()
self.assertEqual(mocked_bench.send_post_commands.call_count, 5)
self.assertEqual(mocked_bench.duts_release.call_count, 5)
self.assertEqual(mocked_bench.clear_sniffer.call_count, 5)
self.assertEqual(mocked_bench.stop_external_services.call_count, 5)
|
class RunnerSMTestcase(unittest.TestCase):
def test_run(self):
pass
def test_benchteardown_errors(self):
pass
| 3 | 0 | 24 | 0 | 24 | 0 | 1 | 0 | 1 | 4 | 3 | 0 | 2 | 0 | 2 | 74 | 51 | 2 | 49 | 7 | 46 | 0 | 49 | 7 | 46 | 1 | 2 | 0 | 2 |
1,955 |
ARMmbed/icetea
|
ARMmbed_icetea/test/test_bench_runner.py
|
test.test_bench_runner.MockLogger
|
class MockLogger(object):
def __init__(self):
pass
def info(self, *args, **kwargs):
pass
def debug(self, *args, **kwargs):
pass
def error(self, *args, **kwargs):
pass
|
class MockLogger(object):
def __init__(self):
pass
def info(self, *args, **kwargs):
pass
def debug(self, *args, **kwargs):
pass
def error(self, *args, **kwargs):
pass
| 5 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 4 | 0 | 4 | 4 | 12 | 3 | 9 | 5 | 4 | 0 | 9 | 5 | 4 | 1 | 1 | 0 | 4 |
1,956 |
ARMmbed/icetea
|
ARMmbed_icetea/test/test_bench_runner.py
|
test.test_bench_runner.MockArgs
|
class MockArgs(object): # pylint: disable=too-few-public-methods
def __init__(self):
self.pre_cmds = []
self.post_cmds = []
self.kill_putty = False
|
class MockArgs(object):
def __init__(self):
pass
| 2 | 0 | 4 | 0 | 4 | 0 | 1 | 0.2 | 1 | 0 | 0 | 0 | 1 | 3 | 1 | 1 | 5 | 0 | 5 | 5 | 3 | 1 | 5 | 5 | 3 | 1 | 1 | 0 | 1 |
1,957 |
ARMmbed/icetea
|
ARMmbed_icetea/test/test_bench_resources.py
|
test.test_bench_resources.MockLogger
|
class MockLogger(object):
def __init__(self):
pass
def debug(self, *args, **kwargs):
pass
def warning(self, *args, **kwargs):
pass
def error(self, *args, **kwargs):
pass
|
class MockLogger(object):
def __init__(self):
pass
def debug(self, *args, **kwargs):
pass
def warning(self, *args, **kwargs):
pass
def error(self, *args, **kwargs):
pass
| 5 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 4 | 0 | 4 | 4 | 12 | 3 | 9 | 5 | 4 | 0 | 9 | 5 | 4 | 1 | 1 | 0 | 4 |
1,958 |
ARMmbed/icetea
|
ARMmbed_icetea/test/test_bench_resources.py
|
test.test_bench_resources.MockArgs
|
class MockArgs(object): # pylint: disable=too-few-public-methods
def __init__(self):
self.silent = True
|
class MockArgs(object):
def __init__(self):
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0.33 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 3 | 0 | 3 | 3 | 1 | 1 | 3 | 3 | 1 | 1 | 1 | 0 | 1 |
1,959 |
ARMmbed/icetea
|
ARMmbed_icetea/test/test_bench_plugin.py
|
test.test_bench_plugin.MockLogger
|
class MockLogger(object):
def __init__(self):
pass
def warning(self, *args, **kwargs):
pass
def info(self, *args, **kwargs):
pass
def error(self, *args, **kwargs):
pass
|
class MockLogger(object):
def __init__(self):
pass
def warning(self, *args, **kwargs):
pass
def info(self, *args, **kwargs):
pass
def error(self, *args, **kwargs):
pass
| 5 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 4 | 0 | 4 | 4 | 12 | 3 | 9 | 5 | 4 | 0 | 9 | 5 | 4 | 1 | 1 | 0 | 4 |
1,960 |
ARMmbed/icetea
|
ARMmbed_icetea/test/test_bench_functions.py
|
test.test_bench_functions.MockedDut
|
class MockedDut(object): # pylint: disable=too-few-public-methods
def __init__(self):
self.traces = ["test1", "test2"]
|
class MockedDut(object):
def __init__(self):
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0.33 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 3 | 0 | 3 | 3 | 1 | 1 | 3 | 3 | 1 | 1 | 1 | 0 | 1 |
1,961 |
ARMmbed/icetea
|
ARMmbed_icetea/test/test_bench_commands.py
|
test.test_bench_commands.MockLogger
|
class MockLogger(object):
def __init__(self):
pass
def info(self, *args, **kwargs):
pass
def debug(self, *args, **kwargs):
pass
def error(self, *args, **kwargs):
pass
def warning(self, *args, **kwargs):
pass
|
class MockLogger(object):
def __init__(self):
pass
def info(self, *args, **kwargs):
pass
def debug(self, *args, **kwargs):
pass
def error(self, *args, **kwargs):
pass
def warning(self, *args, **kwargs):
pass
| 6 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 5 | 0 | 5 | 5 | 15 | 4 | 11 | 6 | 5 | 0 | 11 | 6 | 5 | 1 | 1 | 0 | 5 |
1,962 |
ARMmbed/icetea
|
ARMmbed_icetea/setup.py
|
setup.VerifyVersionCommand
|
class VerifyVersionCommand(install):
"""
Custom command to verify that the git tag matches our version
"""
description = "verify that the git tag matches our version"
def run(self):
is_ci = os.getenv("CIRCLECI")
if is_ci:
tag = os.getenv("CIRCLE_TAG")
version = "v" + VERSION
if tag != version:
info = "Git tag: {0} does not match the"\
"version of this app: {1}".format(tag, version)
sys.exit(info)
|
class VerifyVersionCommand(install):
'''
Custom command to verify that the git tag matches our version
'''
def run(self):
pass
| 2 | 1 | 9 | 0 | 9 | 0 | 3 | 0.27 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 58 | 15 | 1 | 11 | 7 | 9 | 3 | 10 | 7 | 8 | 3 | 3 | 2 | 3 |
1,963 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/wireshark.py
|
icetea_lib.wireshark.Wireshark
|
class Wireshark(NwPacketManager):
__iface = 'Sniffer'
__captureThreadFile = None
fileLoggingCapture = None
liveCapture = None
def __init__(self):
NwPacketManager.__init__(self)
self.logger = LogManager.get_bench_logger("bench", "WS")
self.__captureThreadLive = None
self.__captureThreadFile = None
if not pyshark:
raise ImportError("Pyshark not installed.")
def setMark(self, mark):
self.setMarkForHead(mark)
# Start live network capturing to a file
def startCapture(self, iface, file, tshark_arguments=None):
self.__iface = iface
if file:
# first capture data to a file
self.logger.debug('Start wireshark capturing to a file: %s', file)
kwargs = dict(iface=self.__iface, file=file)
if isinstance(tshark_arguments, dict):
for key in tshark_arguments.keys():
kwargs[key] = tshark_arguments[key]
self.__captureThreadFile = Thread(target=self.__sniff_to_file, kwargs=kwargs)
self.__captureThreadFile.setName("NW file capture")
self.__captureThreadFile.start()
# capture live data to a python side..
kwargs = dict(iface=self.__iface)
if isinstance(tshark_arguments, dict):
for key in tshark_arguments.keys():
kwargs[key] = tshark_arguments[key]
self.__captureThreadLive = Thread(target=self.__live_capture, kwargs=kwargs)
self.__captureThreadLive.setName("NW live capture")
self.__captureThreadLive.start()
# Load captured packets from a file
def loadCapture(self, file):
self.logger.debug('Loading capture file: %s', file)
capture = pyshark.FileCapture(input_file=file)
if capture is None:
raise ValueError('Loading capture file FAILED.')
for packet in capture:
self.push(packet)
self.logger.debug('Done loading capture file.')
# Stop live network data logging
def stopCapture(self):
if self.fileLoggingCapture:
self.fileLoggingCapture.close()
self.__captureThreadFile.join(timeout=5)
self.liveLoggingCapture.close()
self.__captureThreadLive.join(timeout=5)
return self.count()
# Get captured packets
def getCaptures(self):
return self.getPackets()
# Once pyshark supports it, add preference overriding arguments
def __live_capture(self, iface, decode_as=None):
self.logger.debug("Sniffing if: '%s' -> live" % iface)
self.liveLoggingCapture = pyshark.LiveCapture(interface=iface, decode_as=decode_as)
for packet in self.liveLoggingCapture.sniff_continuously():
self.push(packet)
self.logger.debug("live sniffing ends")
# Once pyshark supports it, add preference overriding arguments
def __sniff_to_file(self, iface, file, decode_as=None):
self.logger.debug("Sniffing if: '%s' -> file: %s" % (iface, file))
self.fileLoggingCapture = pyshark.LiveCapture(interface=iface, output_file=file,
decode_as=decode_as)
ret = self.fileLoggingCapture.sniff() # pylint: disable=unused-variable
self.logger.debug("file sniffing ends")
|
class Wireshark(NwPacketManager):
def __init__(self):
pass
def setMark(self, mark):
pass
def startCapture(self, iface, file, tshark_arguments=None):
pass
def loadCapture(self, file):
pass
def stopCapture(self):
pass
def getCaptures(self):
pass
def __live_capture(self, iface, decode_as=None):
pass
def __sniff_to_file(self, iface, file, decode_as=None):
pass
| 9 | 0 | 7 | 0 | 7 | 0 | 2 | 0.15 | 1 | 4 | 0 | 0 | 8 | 3 | 8 | 23 | 80 | 11 | 61 | 22 | 52 | 9 | 60 | 22 | 51 | 6 | 2 | 3 | 18 |
1,964 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/wireshark.py
|
icetea_lib.wireshark.NwPacketManager
|
class NwPacketManager(object):
def __init__(self):
self.__packets = []
self.__marks = []
self.__lock = Lock()
self.setMarkForHead('start')
self.logger = LogManager.get_bench_logger("bench", "WS")
def setMarkForHead(self, mark):
if self.hasPackets():
# set mark
self.__packets[-1].appendMark(mark)
return
self.__lock.acquire()
self.__marks.append(mark)
self.__lock.release()
def push(self, packet, mark=None):
packet = NwPacket(packet)
self.__lock.acquire()
while len(self.__marks) > 0:
packet.appendMark(self.__marks.pop())
self.__lock.release()
self.__packets.append(packet)
# print("Got packet, count now: %i" % len(self))
def count(self):
return len(self.__packets)
def getPackets(self):
return self.__packets
def lastPacketIndex(self):
return len(self.__packets) - 1
def hasPackets(self):
return len(self.__packets) > 0
def __getitem__(self, index):
return self.__packets[index].packet
''' print packet to console between markers.
By default it prints all packets from beginning to end.
usage example:
self.wshark.setMark("S1")
self.delay(10)
self.wshark.setMark("S2")
self.wshark.printPackets("S1", "S2")
'''
def printPackets(self, startMarker='start', endMarker=None):
if not self.hasPackets():
raise ValueError("Not packets available")
start_index = self.findIndexByMark(startMarker)
end_index = self.lastPacketIndex() if endMarker is None else self.findIndexByMark(endMarker)
for n in range(start_index, end_index):
print(self[n])
# verify packet(s) that it contains valid contents
# Usage example:
# verifyPackets( [{"WPAN": {"Command Identifier": "Beacon Request"}}] )
# OR verifyPackets( [{"WPAN.Command Identifier": "Beacon Request"}] )
# raise TestStepError exception if packet(s) not contains expected content
def verifyPackets(self, expectedPackets, startMarker='start', endMarker=None):
if not self.hasPackets():
raise ValueError("Not packets available")
start_index = self.findIndexByMark(startMarker)
end_index = self.lastPacketIndex() if endMarker is None else self.findIndexByMark(endMarker)
is_ok, expected_that_not_found, packet = self.__verify_packets(expectedPackets,
start_index,
end_index)
if not is_ok:
# @todo print why packet didn't match to expected
raise TestStepError("Packet not found: " + str(expected_that_not_found))
self.logger.debug("verifyPackets success")
# Count number of packets that match the given specifications (packet fields)
# Usage example:
# countPackets( {"IPV6":{"Destination":"fe80::166e:a00:0:2"}} )
def countPackets(self, expectedPacket, startMarker='start', endMarker=None):
if not self.hasPackets():
raise ValueError("Not packets available")
start_index = self.findIndexByMark(startMarker)
end_index = self.lastPacketIndex() if endMarker is None else self.findIndexByMark(endMarker)
count = self.__count_packets(expectedPacket, start_index, end_index)
return count
def findIndexByMark(self, mark):
index = 0
for pck in self.__packets:
if pck.isMark(mark):
return index
index += 1
return None
def FindNext(self, expectedPacket, begIndex, toIndex):
for index in range(begIndex, toIndex+1):
is_ok = NwPacket.verify(self.__packets[index], expectedPacket)
if is_ok:
return True, index, self.__packets[index]
raise LookupError("Not found")
def __verify_packets(self, expectedPackets, startIndex, endIndex):
position = startIndex
for expectedContent in expectedPackets:
try:
is_ok, position, match = self.FindNext(expectedContent, position, endIndex)
if not is_ok:
return False, expectedContent, match
position = position + 1
except LookupError as msg:
# Not found
# print("Not Found: %s" % msg)
return False, expectedContent, None
return True, None, None
def __count_packets(self, expectedContent, startIndex, endIndex):
position = startIndex
count = 0
is_ok = True
try:
while is_ok:
is_ok, position, match = self.FindNext(expectedContent, position, endIndex)
if is_ok:
count = count + 1
position = position + 1
except LookupError:
return count
return count
|
class NwPacketManager(object):
def __init__(self):
pass
def setMarkForHead(self, mark):
pass
def push(self, packet, mark=None):
pass
def count(self):
pass
def getPackets(self):
pass
def lastPacketIndex(self):
pass
def hasPackets(self):
pass
def __getitem__(self, index):
pass
def printPackets(self, startMarker='start', endMarker=None):
pass
def verifyPackets(self, expectedPackets, startMarker='start', endMarker=None):
pass
def countPackets(self, expectedPacket, startMarker='start', endMarker=None):
pass
def findIndexByMark(self, mark):
pass
def FindNext(self, expectedPacket, begIndex, toIndex):
pass
def __verify_packets(self, expectedPackets, startIndex, endIndex):
pass
def __count_packets(self, expectedContent, startIndex, endIndex):
pass
| 16 | 0 | 7 | 1 | 6 | 0 | 2 | 0.23 | 1 | 6 | 2 | 1 | 15 | 4 | 15 | 15 | 140 | 26 | 93 | 41 | 77 | 21 | 91 | 40 | 75 | 4 | 1 | 3 | 35 |
1,965 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/wireshark.py
|
icetea_lib.wireshark.NwPacket
|
class NwPacket(object):
@staticmethod
def verify(packet, expected_values):
lines = packet.packetStr.splitlines()
field_values = dict() # field keys assumed to be unique accross layers
for layer_key, layer_value in expected_values.items():
# support for legacy JSON interface
if re.match(r"^.+\..+", layer_key):
layer_key, field_key = layer_key.split('.')
layer_value = {field_key: layer_value}
layer_found = False
for line in lines:
if not layer_found:
match = re.search(r"^Layer "+layer_key+":", line)
if not match:
continue
layer_found = True
for field_key, field_value in layer_value.items():
if field_key in field_values and field_values[field_key]:
continue
else:
field_values[field_key] = False
# Whole row match (key starts with '*')
if field_key.startswith("*"):
match = re.search(r"(\t)" + field_value, line)
if match:
field_values[field_key] = True
continue
match = re.search(r"(\t|(, )|= )" + field_key + r":[\s]{0,}([^,]*)[,]{0,1}",
line)
if not match:
continue
if re.search(field_value, match.group(3)):
field_values[field_key] = True
else:
continue
if not layer_found:
return False
for field_key, field_value in field_values.items():
if field_value is False:
return False
return True
def __init__(self, packet, mark=None):
self.__marks = []
self.timestamp = datetime.now()
self.packetStr = str(packet)
self.packet = packet
self.appendMark(mark)
def isMark(self, mark):
return mark in self.__marks
def appendMark(self, mark):
if mark:
self.__marks.append(mark)
def __str__(self):
return str(self.packet)
def getMarks(self):
return ','.join(self.__marks)
|
class NwPacket(object):
@staticmethod
def verify(packet, expected_values):
pass
def __init__(self, packet, mark=None):
pass
def isMark(self, mark):
pass
def appendMark(self, mark):
pass
def __str__(self):
pass
def getMarks(self):
pass
| 8 | 0 | 9 | 0 | 9 | 1 | 4 | 0.05 | 1 | 3 | 0 | 0 | 5 | 4 | 6 | 6 | 63 | 6 | 55 | 20 | 47 | 3 | 51 | 19 | 44 | 15 | 1 | 5 | 21 |
1,966 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/arguments.py
|
icetea_lib.arguments.LoadFromFile
|
class LoadFromFile(argparse.Action): # pylint: disable=too-few-public-methods
"""
Action to load more arguments into parser from a file.
"""
def __call__(self, parser, namespace, values, option_string=None):
with values as fil:
data = fil.read().split()
if "--cfg_file" in data:
index = data.index("--cfg_file")
if data[index+1] == fil.name:
del data[index+1]
del data[index]
parser.parse_args(data, namespace)
|
class LoadFromFile(argparse.Action):
'''
Action to load more arguments into parser from a file.
'''
def __call__(self, parser, namespace, values, option_string=None):
pass
| 2 | 1 | 9 | 0 | 9 | 0 | 3 | 0.4 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 8 | 13 | 0 | 10 | 5 | 8 | 4 | 10 | 4 | 8 | 3 | 3 | 3 | 3 |
1,967 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/build/build.py
|
icetea_lib.build.build.BuildFile
|
class BuildFile(Build):
"""
Build as a File
"""
def __init__(self, ref):
"""
Constructor
"""
super(BuildFile, self).__init__(ref=ref, type='file')
sha = sha1_of_file(ref)
self.sha1 = sha if sha else ""
def is_exists(self):
"""
Check if file exists.
:return: Boolean
"""
return os.path.isfile(self._ref)
def get_file(self):
return self._ref if self.is_exists() else None
def _load(self):
"""
Function load.
:return: file contents
:raises: NotFoundError if file not found
"""
if self.is_exists():
return open(self._ref, "rb").read()
raise NotFoundError("File %s not found" % self._ref)
|
class BuildFile(Build):
'''
Build as a File
'''
def __init__(self, ref):
'''
Constructor
'''
pass
def is_exists(self):
'''
Check if file exists.
:return: Boolean
'''
pass
def get_file(self):
pass
def _load(self):
'''
Function load.
:return: file contents
:raises: NotFoundError if file not found
'''
pass
| 5 | 4 | 7 | 1 | 3 | 3 | 2 | 1.15 | 1 | 2 | 1 | 0 | 4 | 1 | 4 | 13 | 34 | 6 | 13 | 7 | 8 | 15 | 13 | 7 | 8 | 2 | 2 | 1 | 7 |
1,968 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/build/build.py
|
icetea_lib.build.build.BuildHttp
|
class BuildHttp(Build):
"""
Build as a Http link
"""
def __init__(self, ref):
super(BuildHttp, self).__init__(ref=ref, type='http')
self.auth = ('user', 'pass')
self.http_verify = False
self.timeout = None
def _load(self):
"""
Function load.
:return: Response content
:raises: NotFoundError
"""
try:
get = requests.get(self._ref,
verify=self.http_verify,
auth=self.auth,
timeout=self.timeout)
except requests.exceptions.RequestException as err:
raise NotFoundError(err)
return get.content
def is_exists(self):
"""
Check if file exists
:return: Boolean
"""
try:
return self._load() is not None
except NotFoundError:
return False
def get_file(self):
"""
Load data into a file and return file path.
:return: path to file as string
"""
content = self._load()
if not content:
return None
filename = "temporary_file.bin"
with open(filename, "wb") as file_name:
file_name.write(content)
return filename
|
class BuildHttp(Build):
'''
Build as a Http link
'''
def __init__(self, ref):
pass
def _load(self):
'''
Function load.
:return: Response content
:raises: NotFoundError
'''
pass
def is_exists(self):
'''
Check if file exists
:return: Boolean
'''
pass
def get_file(self):
'''
Load data into a file and return file path.
:return: path to file as string
'''
pass
| 5 | 4 | 10 | 0 | 7 | 3 | 2 | 0.57 | 1 | 2 | 1 | 0 | 4 | 4 | 4 | 13 | 49 | 5 | 28 | 13 | 23 | 16 | 25 | 11 | 20 | 2 | 2 | 1 | 7 |
1,969 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/build/build.py
|
icetea_lib.build.build.NotFoundError
|
class NotFoundError(Exception):
"""
Not Found Error
"""
pass
|
class NotFoundError(Exception):
'''
Not Found Error
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 5 | 0 | 2 | 1 | 1 | 3 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
1,970 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/cloud.py
|
icetea_lib.cloud.Cloud
|
class Cloud(object):
"""
Cloud adapter. Originally designed to work with python client for Opentmi.
"""
__version = 0
__api = "/api/v"
@staticmethod
def _convert_to_db_tc_metadata(tc_metadata):
"""
Convert tc_metadata to match Opentmi metadata format
:param tc_metadata: metadata as dict
:return: converted metadata
"""
db_meta = copy.deepcopy(tc_metadata)
# tcid is a mandatory field, it should throw an error if it is missing
db_meta['tcid'] = db_meta['name']
del db_meta['name']
# Encapsulate current status inside dictionary
if 'status' in db_meta:
status = db_meta['status']
del db_meta['status']
db_meta['status'] = {'value': status}
# Node and dut information
if 'requirements' in db_meta:
db_meta['requirements']['node'] = {'count': 1}
try:
count = db_meta['requirements']['duts']['*']['count']
db_meta['requirements']['node']['count'] = count
except KeyError:
pass
# Collect and pack other info from meta
db_meta['other_info'] = {}
if 'title' in db_meta:
db_meta['other_info']['title'] = db_meta['title']
del db_meta['title']
if 'feature' in db_meta:
db_meta['other_info']['features'] = db_meta['feature']
del db_meta['feature']
else:
db_meta['other_info']['features'] = ['unknown']
if 'component' in db_meta:
db_meta['other_info']['components'] = db_meta["component"]
del db_meta['component']
return db_meta
# pylint: disable=too-many-arguments
def __init__(self, host=None, module=None, result_converter=None, tc_converter=None,
logger=None, args=None):
self.args = args
self.logger = logger
# Try to fetch cloud provider from ENV variables
if not module:
module = os.environ.get("ICETEA_CLOUD_PROVIDER", 'opentmi-client')
self.module = __import__(module, globals(), fromlist=[""])
version = get_pkg_version(module)
if self.logger and version is not None:
self.logger.info("using {} version {}".format(module, version))
else:
if self.logger:
self.logger.warning("Unable to parse cloud module version")
# Parse host and port from combined host
if host is None:
host = self._resolve_host()
host, port = self._find_port(host)
# Ensure result converter has an implementation
resconv = result_converter
if resconv is None:
if self.args and self.args.with_logs:
resconv = create_result_object_with_logs
else:
resconv = create_result_object
# Ensure testcase converter has an implementation
tc_conv = tc_converter if tc_converter else self._convert_to_db_tc_metadata
# Setup client
try:
self._client = self.module.create(host, port, resconv, tc_conv)
self._client.set_logger(logger)
except AttributeError:
raise ImportError("Cloud module was imported but it does not "
"contain a method to create a client.")
def _resolve_host(self): # pylint: disable=no-self-use
"""
Resolve cloud provider host name. Defaults to environment variables
OPENTMI_ADDRESS_PRIVATE or OPENTMI_ADDRESS_PUBLIC if environment variable NODE_NAME
starts with 'aws'. Otherwise gets ICETEA_CLOUD_HOST environment variable OR
localhost:3000 if that one does not exist.
:return: Cloud host information
"""
node_name = os.environ.get('NODE_NAME', '')
if node_name.startswith('aws'):
_host = os.environ.get('OPENTMI_ADDRESS_PRIVATE', None)
else:
_host = os.environ.get('OPENTMI_ADDRESS_PUBLIC', None)
if _host is None:
_host = os.environ.get("ICETEA_CLOUD_HOST", "localhost:3000")
return _host
def _find_port(self, host): # pylint: disable=no-self-use
"""
Finds port number from host. Defaults to 3000 if not found
:param host: host as string
:return: (host, port)
"""
ind = host.rfind(":")
if ind != -1:
try:
port = int(host[ind + 1:])
host = host[:ind]
except ValueError:
port = 3000
else:
port = 3000
return host, port
def get_suite(self, suite, options=''):
"""
Calls cloud client method get_suite
:param suite: passed to cloud client
:param options: passed to cloud client
:return: _client.get_suite(suite, options)
"""
return self._client.get_suite(suite, options)
def get_campaign_id(self, campaign_name):
"""
Calls client method get_campaign_id
:param campaign_name: passed to cloud client
:return: _client.get_campaign_id(campaign_name)
"""
return self._client.get_campaign_id(campaign_name)
def get_campaigns(self):
"""
Calls client method get_campaigns
:return: _client.get_campaigns()
"""
return self._client.get_campaigns()
def get_campaign_names(self):
"""
Calls client method get_campaign_names
:return: _client.get_campaign_names()
"""
return self._client.get_campaign_names()
def update_testcase(self, metadata):
"""
Updates test case metadata with
:param metadata: Test case metadata
:return: _client.update_testcase(metadata)
"""
return self._client.update_testcase(metadata)
def send_result(self, result):
"""
Send results to the cloud
:param result: result dictionary
:return: response from _client.upload_results(result) or None if something went wrong
"""
response_data = self._client.upload_results(result)
if response_data:
if self.logger is not None:
self.logger.info("Results sent to the server. ID: {}".format(response_data["_id"]))
return response_data
else:
if self.logger is not None:
self.logger.info("Server didn't respond or client initialization has failed.")
return None
|
class Cloud(object):
'''
Cloud adapter. Originally designed to work with python client for Opentmi.
'''
@staticmethod
def _convert_to_db_tc_metadata(tc_metadata):
'''
Convert tc_metadata to match Opentmi metadata format
:param tc_metadata: metadata as dict
:return: converted metadata
'''
pass
def __init__(self, host=None, module=None, result_converter=None, tc_converter=None,
logger=None, args=None):
pass
def _resolve_host(self):
'''
Resolve cloud provider host name. Defaults to environment variables
OPENTMI_ADDRESS_PRIVATE or OPENTMI_ADDRESS_PUBLIC if environment variable NODE_NAME
starts with 'aws'. Otherwise gets ICETEA_CLOUD_HOST environment variable OR
localhost:3000 if that one does not exist.
:return: Cloud host information
'''
pass
def _find_port(self, host):
'''
Finds port number from host. Defaults to 3000 if not found
:param host: host as string
:return: (host, port)
'''
pass
def get_suite(self, suite, options=''):
'''
Calls cloud client method get_suite
:param suite: passed to cloud client
:param options: passed to cloud client
:return: _client.get_suite(suite, options)
'''
pass
def get_campaign_id(self, campaign_name):
'''
Calls client method get_campaign_id
:param campaign_name: passed to cloud client
:return: _client.get_campaign_id(campaign_name)
'''
pass
def get_campaigns(self):
'''
Calls client method get_campaigns
:return: _client.get_campaigns()
'''
pass
def get_campaign_names(self):
'''
Calls client method get_campaign_names
:return: _client.get_campaign_names()
'''
pass
def update_testcase(self, metadata):
'''
Updates test case metadata with
:param metadata: Test case metadata
:return: _client.update_testcase(metadata)
'''
pass
def send_result(self, result):
'''
Send results to the cloud
:param result: result dictionary
:return: response from _client.upload_results(result) or None if something went wrong
'''
pass
| 12 | 10 | 17 | 2 | 10 | 6 | 3 | 0.6 | 1 | 5 | 0 | 0 | 9 | 4 | 10 | 10 | 187 | 27 | 101 | 31 | 88 | 61 | 92 | 29 | 81 | 9 | 1 | 2 | 31 |
1,971 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/enhancedserial.py
|
icetea_lib.enhancedserial.EnhancedSerial
|
class EnhancedSerial(Serial): # pylint: disable=too-many-ancestors
"""
EnhancedSerial class, based on the examples in pyserial project.
"""
def __init__(self, *args, **kwargs):
# ensure that a reasonable timeout is set
timeout = kwargs.get('timeout', 0.1)
if timeout < 0.01:
timeout = 0.1
kwargs['timeout'] = timeout
Serial.__init__(self, *args, **kwargs)
self.buffer_lock = Lock()
self.buf = ''
self.re_float = re.compile(r"^\d+\.\d+")
self.pyserial_version = self.get_pyserial_version()
self.is_pyserial_v3 = self.pyserial_version >= 3.0
def get_pyserial_version(self):
"""! Retrieve pyserial module version
@return Returns float with pyserial module number
"""
pyserial_version = pkg_resources.require("pyserial")[0].version
version = 3.0
match = self.re_float.search(pyserial_version)
if match:
try:
version = float(match.group(0))
except ValueError:
version = 3.0 # We will assume you've got latest (3.0+)
return version
def safe_sendBreak(self): # pylint: disable=invalid-name
"""
! Closure for pyserial version dependant API calls
"""
if self.is_pyserial_v3:
return self._safe_sendBreak_v3_0()
return self._safe_sendBreak_v2_7()
def _safe_sendBreak_v2_7(self): # pylint: disable=invalid-name
"""! pyserial 2.7 API implementation of sendBreak/setBreak
@details
Below API is deprecated for pyserial 3.x versions!
http://pyserial.readthedocs.org/en/latest/pyserial_api.html#serial.Serial.sendBreak
http://pyserial.readthedocs.org/en/latest/pyserial_api.html#serial.Serial.setBreak
"""
result = True
try:
self.sendBreak()
except: # pylint: disable=bare-except
# In Linux a termios.error is raised in sendBreak and in setBreak.
# The following setBreak() is needed to release the reset signal on the target mcu.
try:
self.setBreak(False)
except: # pylint: disable=bare-except
result = False
return result
def _safe_sendBreak_v3_0(self): # pylint: disable=invalid-name
"""! pyserial 3.x API implementation of send_brea / break_condition
@details
http://pyserial.readthedocs.org/en/latest/pyserial_api.html#serial.Serial.send_break
http://pyserial.readthedocs.org/en/latest/pyserial_api.html#serial.Serial.break_condition
"""
result = True
try:
self.send_break()
except: # pylint: disable=bare-except
# In Linux a termios.error is raised in sendBreak and in setBreak.
# The following break_condition = False is needed to
# release the reset signal on the target mcu.
self.break_condition = False
return result
def readline(self, timeout=1):
"""
maxsize is ignored, timeout in seconds is the max time that is way for a complete line
"""
tries = 0
while 1:
try:
block = self.read(512)
if isinstance(block, bytes):
block = block.decode()
elif isinstance(block, str):
block = block.decode()
else:
raise ValueError("Unknown data")
except SerialTimeoutException:
# Exception that is raised on write timeouts.
block = ''
except SerialException:
# In case the device can not be found or can not be configured.
block = ''
except ValueError:
# Will be raised when parameter are out of range, e.g. baud rate, data bits.
# UnicodeError-Raised when a Unicode-related encoding or
# decoding error occurs. It is a subclass of ValueError.
block = ''
with self.buffer_lock:
# Let's lock, just in case
self.buf += block
pos = self.buf.find('\n')
if pos >= 0:
line, self.buf = self.buf[:pos+1], self.buf[pos+1:]
return line
tries += 1
if tries * self.timeout > timeout:
break
return None
def peek(self):
"""
Peek into the buffer to see if there are unfinished lines of data available.
:return: str
"""
with self.buffer_lock:
# Let's lock, just in case.
return self.buf
def readlines(self, timeout=1):
"""
read all lines that are available. abort after timeout
when no more data arrives.
"""
lines = []
while 1:
line = self.readline(timeout=timeout)
if line:
lines.append(line)
if not line or line[-1:] != '\n':
break
return lines
|
class EnhancedSerial(Serial):
'''
EnhancedSerial class, based on the examples in pyserial project.
'''
def __init__(self, *args, **kwargs):
pass
def get_pyserial_version(self):
'''! Retrieve pyserial module version
@return Returns float with pyserial module number
'''
pass
def safe_sendBreak(self):
'''
! Closure for pyserial version dependant API calls
'''
pass
def _safe_sendBreak_v2_7(self):
'''! pyserial 2.7 API implementation of sendBreak/setBreak
@details
Below API is deprecated for pyserial 3.x versions!
http://pyserial.readthedocs.org/en/latest/pyserial_api.html#serial.Serial.sendBreak
http://pyserial.readthedocs.org/en/latest/pyserial_api.html#serial.Serial.setBreak
'''
pass
def _safe_sendBreak_v3_0(self):
'''! pyserial 3.x API implementation of send_brea / break_condition
@details
http://pyserial.readthedocs.org/en/latest/pyserial_api.html#serial.Serial.send_break
http://pyserial.readthedocs.org/en/latest/pyserial_api.html#serial.Serial.break_condition
'''
pass
def readline(self, timeout=1):
'''
maxsize is ignored, timeout in seconds is the max time that is way for a complete line
'''
pass
def peek(self):
'''
Peek into the buffer to see if there are unfinished lines of data available.
:return: str
'''
pass
def readlines(self, timeout=1):
'''
read all lines that are available. abort after timeout
when no more data arrives.
'''
pass
| 9 | 8 | 15 | 0 | 10 | 6 | 3 | 0.63 | 1 | 4 | 0 | 0 | 8 | 6 | 8 | 8 | 134 | 8 | 82 | 27 | 73 | 52 | 80 | 27 | 71 | 9 | 1 | 3 | 26 |
1,972 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/timer.py
|
icetea_lib.timer.Timeout.Timeout
|
class Timeout(object): # pylint: disable=too-few-public-methods
"""
Timeout class using ALARM signal.
"""
class Timeout(Exception):
"""
Internal Timeout exception
"""
pass
def __init__(self, sec):
self.sec = sec
def __enter__(self):
signal.signal(signal.SIGABRT, self.raise_timeout)
signal.alarm(self.sec)
def __exit__(self, *args):
signal.alarm(0) # disable alarm
def raise_timeout(self, *args): # pylint: disable=unused-argument,no-self-use
"""
Raise a Timeout exception
:param args: Not used
:return: Nothing
:raises: Timeout
"""
raise Timeout.Timeout()
|
class Timeout(object):
'''
Timeout class using ALARM signal.
'''
class Timeout(object):
'''
Internal Timeout exception
'''
def __init__(self, sec):
pass
def __enter__(self):
pass
def __exit__(self, *args):
pass
def raise_timeout(self, *args):
'''
Raise a Timeout exception
:param args: Not used
:return: Nothing
:raises: Timeout
'''
pass
| 6 | 3 | 0 | 0 | 0 | 0 | 0 | 1.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 5 | 0 | 2 | 1 | 1 | 3 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
1,973 |
ARMmbed/icetea
|
ARMmbed_icetea/test/test_bench_commands.py
|
test.test_bench_commands.MockReq
|
class MockReq(object):
def __init__(self, response=None):
self.response = response if response else None
self.dut_index = 1
self.cmd = "test_command"
|
class MockReq(object):
def __init__(self, response=None):
pass
| 2 | 0 | 4 | 0 | 4 | 0 | 2 | 0 | 1 | 0 | 0 | 0 | 1 | 3 | 1 | 1 | 5 | 0 | 5 | 5 | 3 | 0 | 5 | 5 | 3 | 2 | 1 | 0 | 2 |
1,974 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/timer.py
|
icetea_lib.timer.Timer
|
class Timer(Thread):
"""
Timer class, simple timer that sleeps for timeout seconds.
"""
def __init__(self):
super(Timer, self).__init__()
self.timeout = None
def wait(self, timeout):
"""
Starts the Timer for timeout seconds, then gives 5 second grace period to join the thread.
:param timeout: Duration for timer.
:return: Nothing
"""
self.timeout = timeout
self.start()
self.join(timeout=5)
def run(self):
"""
Just sleep for self.timeout seconds.
:return:
"""
time.sleep(self.timeout)
|
class Timer(Thread):
'''
Timer class, simple timer that sleeps for timeout seconds.
'''
def __init__(self):
pass
def wait(self, timeout):
'''
Starts the Timer for timeout seconds, then gives 5 second grace period to join the thread.
:param timeout: Duration for timer.
:return: Nothing
'''
pass
def run(self):
'''
Just sleep for self.timeout seconds.
:return:
'''
pass
| 4 | 3 | 6 | 0 | 3 | 3 | 1 | 1.2 | 1 | 1 | 0 | 0 | 3 | 1 | 3 | 28 | 24 | 2 | 10 | 5 | 6 | 12 | 10 | 5 | 6 | 1 | 1 | 0 | 3 |
1,975 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/tools/GenericProcess.py
|
icetea_lib.tools.GenericProcess.NonBlockingStreamReader
|
class NonBlockingStreamReader(object):
"""
Implementation for a non-blocking stream reader.
"""
_instance = None
_streams = None
_stream_mtx = None
_rt = None
_run_flag = False
def __init__(self, stream, callback=None):
# Global class variables
if NonBlockingStreamReader._rt is None:
NonBlockingStreamReader._streams = []
NonBlockingStreamReader._stream_mtx = Lock()
NonBlockingStreamReader._run_flag = True
NonBlockingStreamReader._rt = Thread(target=NonBlockingStreamReader.run)
NonBlockingStreamReader._rt.setDaemon(True)
NonBlockingStreamReader._rt.start()
# Instance variables
self._descriptor = StreamDescriptor(stream, callback)
def start(self):
"""
Start the reader, acquires the global lock before appending the descriptor on the stream.
Releases the lock afterwards.
:return: Nothing
"""
NonBlockingStreamReader._stream_mtx.acquire()
NonBlockingStreamReader._streams.append(self._descriptor)
NonBlockingStreamReader._stream_mtx.release()
@staticmethod
def _get_sd(file_descr):
"""
Get streamdescriptor matching file_descr fileno.
:param file_descr: file object
:return: StreamDescriptor or None
"""
for stream_descr in NonBlockingStreamReader._streams:
if file_descr == stream_descr.stream.fileno():
return stream_descr
return None
@staticmethod
def _read_fd(file_descr):
"""
Read incoming data from file handle.
Then find the matching StreamDescriptor by file_descr value.
:param file_descr: file object
:return: Return number of bytes read
"""
try:
line = os.read(file_descr, 1024 * 1024)
except OSError:
stream_desc = NonBlockingStreamReader._get_sd(file_descr)
if stream_desc is not None:
stream_desc.has_error = True
if stream_desc.callback is not None:
stream_desc.callback()
return 0
if line:
stream_desc = NonBlockingStreamReader._get_sd(file_descr)
if stream_desc is None:
return 0 # Process closing
if IS_PYTHON3:
try:
# @TODO: further develop for not ascii/unicode binary content
line = line.decode("ascii")
except UnicodeDecodeError:
line = repr(line)
stream_desc.buf += line
# Break lines
split = stream_desc.buf.split(os.linesep)
for line in split[:-1]:
stream_desc.read_queue.appendleft(strip_escape(line.strip()))
if stream_desc.callback is not None:
stream_desc.callback()
# Store the remainded, its either '' if last char was '\n'
# or remaining buffer before line end
stream_desc.buf = split[-1]
return len(line)
return 0
@staticmethod
def _read_select_poll(poll):
"""
Read PIPEs using select.poll() method
Available on Linux and some Unixes
"""
npipes = len(NonBlockingStreamReader._streams)
for stream_descr in NonBlockingStreamReader._streams:
if not stream_descr.has_error:
poll.register(stream_descr.stream,
select.POLLIN | select.POLLERR | select.POLLHUP | select.POLLNVAL)
while NonBlockingStreamReader._run_flag:
for (file_descr, event) in poll.poll(500):
if event == select.POLLIN:
NonBlockingStreamReader._read_fd(file_descr)
else:
# Because event != select.POLLIN, the pipe is closed
# but we still want to read all bytes
while NonBlockingStreamReader._read_fd(file_descr) != 0:
pass
# Dut died, signal the processing thread so it notices that no lines coming in
stream_descr = NonBlockingStreamReader._get_sd(file_descr)
if stream_descr is None:
return # PIPE closed but DUT already disappeared
stream_descr.has_error = True
if stream_descr.callback is not None:
stream_descr.callback()
return # Force poll object to reregister only alive descriptors
# Check if new pipes added, don't need mutext just for reading the size
# If we will not get it right now, we will at next time
if npipes != len(NonBlockingStreamReader._streams):
return
@staticmethod
def _read_select_kqueue(k_queue):
"""
Read PIPES using BSD Kqueue
"""
npipes = len(NonBlockingStreamReader._streams)
# Create list of kevent objects
# pylint: disable=no-member
kevents = [select.kevent(s.stream.fileno(),
filter=select.KQ_FILTER_READ,
flags=select.KQ_EV_ADD | select.KQ_EV_ENABLE)
for s in NonBlockingStreamReader._streams]
while NonBlockingStreamReader._run_flag:
events = k_queue.control(kevents, npipes, 0.5) # Wake up twice in second
for event in events:
if event.filter == select.KQ_FILTER_READ: # pylint: disable=no-member
NonBlockingStreamReader._read_fd(event.ident)
# Check if new pipes added.
if npipes != len(NonBlockingStreamReader._streams):
return
@staticmethod
def run():
"""
Run loop
"""
while NonBlockingStreamReader._run_flag:
# Wait for streams to appear
if not NonBlockingStreamReader._streams:
time.sleep(0.2)
continue
# Try to get correct select/poll method for this OS
# Try if select.poll() is supported (Linux/UNIX)
try:
poll = select.poll()
except AttributeError:
pass
else:
NonBlockingStreamReader._read_select_poll(poll)
del poll
continue
# Try is select.kqueue is supported (BSD/OS X)
try:
k_queue = select.kqueue() # pylint: disable=no-member
except AttributeError:
pass
else:
NonBlockingStreamReader._read_select_kqueue(k_queue)
k_queue.close()
continue
# Not workable polling method found
raise RuntimeError('This OS is not supporting select.poll() or select.kqueue()')
def stop(self):
"""
Stop the reader
"""
# print('stopping NonBlockingStreamReader..')
# print('acquire..')
NonBlockingStreamReader._stream_mtx.acquire()
# print('acquire..ok')
NonBlockingStreamReader._streams.remove(self._descriptor)
if not NonBlockingStreamReader._streams:
NonBlockingStreamReader._run_flag = False
# print('release..')
NonBlockingStreamReader._stream_mtx.release()
# print('release..ok')
if NonBlockingStreamReader._run_flag is False:
# print('join..')
NonBlockingStreamReader._rt.join()
# print('join..ok')
del NonBlockingStreamReader._rt
NonBlockingStreamReader._rt = None
# print('stopping NonBlockingStreamReader..ok')
def has_error(self):
"""
:return: Boolean, True if _descriptor.has_error is True. False otherwise
"""
return self._descriptor.has_error
def readline(self):
"""
Readline implementation.
:return: popped line from descriptor queue. None if nothing found
:raises: RuntimeError if errors happened while reading PIPE
"""
try:
return self._descriptor.read_queue.pop()
except IndexError:
# No lines in queue
if self.has_error():
raise RuntimeError("Errors reading PIPE")
return None
|
class NonBlockingStreamReader(object):
'''
Implementation for a non-blocking stream reader.
'''
def __init__(self, stream, callback=None):
pass
def start(self):
'''
Start the reader, acquires the global lock before appending the descriptor on the stream.
Releases the lock afterwards.
:return: Nothing
'''
pass
@staticmethod
def _get_sd(file_descr):
'''
Get streamdescriptor matching file_descr fileno.
:param file_descr: file object
:return: StreamDescriptor or None
'''
pass
@staticmethod
def _read_fd(file_descr):
'''
Read incoming data from file handle.
Then find the matching StreamDescriptor by file_descr value.
:param file_descr: file object
:return: Return number of bytes read
'''
pass
@staticmethod
def _read_select_poll(poll):
'''
Read PIPEs using select.poll() method
Available on Linux and some Unixes
'''
pass
@staticmethod
def _read_select_kqueue(k_queue):
'''
Read PIPES using BSD Kqueue
'''
pass
@staticmethod
def run():
'''
Run loop
'''
pass
def stop(self):
'''
Stop the reader
'''
pass
def has_error(self):
'''
:return: Boolean, True if _descriptor.has_error is True. False otherwise
'''
pass
def readline(self):
'''
Readline implementation.
:return: popped line from descriptor queue. None if nothing found
:raises: RuntimeError if errors happened while reading PIPE
'''
pass
| 16 | 10 | 19 | 1 | 12 | 7 | 4 | 0.56 | 1 | 7 | 1 | 0 | 5 | 1 | 10 | 10 | 218 | 17 | 133 | 35 | 117 | 74 | 123 | 30 | 112 | 10 | 1 | 4 | 43 |
1,976 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/tools/GenericProcess.py
|
icetea_lib.tools.GenericProcess.StreamDescriptor
|
class StreamDescriptor(object): # pylint: disable=too-few-public-methods
"""
StreamDescriptor class, container for stream components.
"""
def __init__(self, stream, callback):
self.stream = stream
self.buf = ""
self.read_queue = deque() # pylint: disable=invalid-name
self.has_error = False
self.callback = callback
|
class StreamDescriptor(object):
'''
StreamDescriptor class, container for stream components.
'''
def __init__(self, stream, callback):
pass
| 2 | 1 | 6 | 0 | 6 | 1 | 1 | 0.71 | 1 | 0 | 0 | 0 | 1 | 5 | 1 | 1 | 10 | 0 | 7 | 7 | 5 | 5 | 7 | 7 | 5 | 1 | 1 | 0 | 1 |
1,977 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/tools/HTTP/Api.py
|
icetea_lib.tools.HTTP.Api.HttpApi
|
class HttpApi(object):
# pylint: disable=invalid-name
def __init__(self, host, defaultHeaders=None, cert=None, logger=None):
self.logger = initLogger("HttpApi") if logger is None else logger
self.defaultHeaders = {} if defaultHeaders is None else defaultHeaders
self.host = host
self.cert = cert
self.logger.info("HttpApi initialized")
def set_logger(self, logger):
"""
Sets a custom logger that is to be used with the HttpApi class.
:param logger: custom logger to use to log HttpApi log messages
:return: Nothing
"""
self.logger = logger
def set_header(self, key, value):
"""
Sets a new value for a header field in defaultHeader.
Replaces old value if the key already exists.
:param key: HTTP header key name
:param value:HTTP header key value
:return: Nothing, modifies defaultHeaders in place
"""
self.defaultHeaders[key] = value
def set_cert(self, cert):
"""
Setter for certificate field. Valid values are either a string
containing path to certificate .pem file or Tuple, ('cert', 'key') pair.
:param cert: Valid values are either a string containing path to certificate .pem file
or Tuple, ('cert', 'key') pair.
:return: Nothing, modifies field in place
"""
self.cert = cert
def set_host(self, host):
"""
Setter for host parameter
:param host: address of HTTP service
:return: Nothing, modifies field in place
"""
self.host = host
def get(self, path, headers=None, params=None, **kwargs):
"""
Sends a GET request to host/path.
:param path: String, Resource path on server
:param params: Dictionary of parameters to be added to URL
:param headers: Dictionary of HTTP headers to be sent with the request,
overwrites default headers if there is overlap
:param kwargs: Other arguments used in the requests.request call
valid parameters in kwargs are the optional parameters of Requests.Request
http://docs.python-requests.org/en/master/api/
:return: requests.Response
:raises: RequestException
"""
if headers is not None:
merger = jsonmerge.Merger(SCHEMA)
kwargs["headers"] = merger.merge(self.defaultHeaders, headers)
else:
kwargs["headers"] = self.defaultHeaders
if self.cert is not None:
kwargs["cert"] = self.cert
if params is None:
params = {}
url = combine_urls(self.host, path)
self.logger.debug(
"Trying to send HTTP GET to {0}{1}".format(url,
"?" + urllib.urlencode(
params,
doseq=True) if params else ''))
try:
resp = requests.get(url, params, **kwargs)
self._log_response(resp)
except requests.RequestException as es:
self._log_exception(es)
raise
return resp
def post(self, path, data=None, json=None, headers=None, **kwargs):
"""
Sends a POST request to host/path.
:param path: String, resource path on server
:param data: Dictionary, bytes or file-like object to send in the body of the request
:param json: JSON formatted data to send in the body of the request
:param headers: Dictionary of HTTP headers to be sent with the request,
overwrites default headers if there is overlap
:param kwargs: Other arguments used in the requests.request call
valid parameters in kwargs are the optional parameters of Requests.Request
http://docs.python-requests.org/en/master/api/
:return: requests.Response
:raises: RequestException
"""
if headers is not None:
merger = jsonmerge.Merger(SCHEMA)
kwargs["headers"] = merger.merge(self.defaultHeaders, headers)
else:
kwargs["headers"] = self.defaultHeaders
url = combine_urls(self.host, path)
if self.cert is not None:
kwargs["cert"] = self.cert
self.logger.debug("Trying to send HTTP POST to {}".format(url))
try:
resp = requests.post(url, data, json, **kwargs)
self._log_response(resp)
except requests.RequestException as es:
self._log_exception(es)
raise
return resp
def put(self, path, data=None, headers=None, **kwargs):
"""
Sends a PUT request to host/path.
:param path: String, resource path on server
:param data: Dictionary, bytes or file-like object to send in the body of the request
:param headers: Dictionary of HTTP headers to be sent with the request,
overwrites default headers if there is overlap
:param kwargs: Other arguments used in the requests.request call
valid parameters in kwargs are the optional parameters of Requests.
Request http://docs.python-requests.org/en/master/api/
:return: requests.Response
:raises: RequestException
"""
if headers is not None:
merger = jsonmerge.Merger(SCHEMA)
kwargs["headers"] = merger.merge(self.defaultHeaders, headers)
else:
kwargs["headers"] = self.defaultHeaders
url = combine_urls(self.host, path)
if self.cert is not None:
kwargs["cert"] = self.cert
self.logger.debug("Trying to send HTTP PUT to {}".format(url))
try:
resp = requests.put(url, data, **kwargs)
self._log_response(resp)
except requests.RequestException as es:
self._log_exception(es)
raise
return resp
def delete(self, path, headers=None, **kwargs):
"""
Sends a DELETE request to host/path.
:param path: String, resource path on server
:param headers: Dictionary of HTTP headers to be sent with the request,
overwrites default headers if there is overlap
:param kwargs: Other arguments used in the requests.request call
valid parameters in kwargs are the optional parameters of Requests.Request
http://docs.python-requests.org/en/master/api/
:return: requests.Response
:raises: RequestException
"""
if headers is not None:
merger = jsonmerge.Merger(SCHEMA)
kwargs["headers"] = merger.merge(self.defaultHeaders, headers)
else:
kwargs["headers"] = self.defaultHeaders
url = combine_urls(self.host, path)
if self.cert is not None:
kwargs["cert"] = self.cert
self.logger.debug("Trying to send HTTP DELETE to {}".format(url))
try:
resp = requests.delete(url, **kwargs)
self._log_response(resp)
except requests.RequestException as es:
self._log_exception(es)
raise
return resp
def patch(self, path, data=None, headers=None, **kwargs):
"""
Sends a PATCH request to host/path.
:param path: String, resource path on server
:param data: Data as a dictionary, bytes, or file-like object to
send in the body of the request.
:param headers: Dictionary of HTTP headers to be sent with the request,
overwrites default headers if there is overlap
:param kwargs: Other arguments used in the requests.request call
valid parameters in kwargs are the optional parameters of Requests.Request
http://docs.python-requests.org/en/master/api/
:return: requests.Response
:raises: RequestException
"""
if headers is not None:
merger = jsonmerge.Merger(SCHEMA)
kwargs["headers"] = merger.merge(self.defaultHeaders, headers)
else:
kwargs["headers"] = self.defaultHeaders
url = combine_urls(self.host, path)
if self.cert is not None:
kwargs["cert"] = self.cert
self.logger.debug("Trying to send HTTP PATCH to {}".format(url))
try:
resp = requests.patch(url, data=data, **kwargs)
self._log_response(resp)
except requests.RequestException as es:
self._log_exception(es)
raise
return resp
# pylint: disable=len-as-condition
def _log_response(self, resp):
self.logger.debug("Request url: %s", resp.request.url)
self.logger.debug("Request headers: %s", resp.request.headers)
self.logger.debug("Server responded with %d", resp.status_code)
self.logger.debug("Response headers: %s", resp.headers)
if hasattr(resp, "content") and len(resp.content) > 0:
try:
json_content = json.loads(resp.content)
self.logger.debug("Response content: {}".format(json_content))
except ValueError:
if isinstance(resp.content, binary_type):
try:
self.logger.debug("Response payload: {}".format(resp.content))
except UnicodeDecodeError:
self.logger.debug("Response payload: {}".format(repr(resp.content)))
elif isinstance(resp.content, string_types):
self.logger.debug("Response payload: {}".format(resp.content.decode("utf-8")))
else:
self.logger.debug("Unable to parse response payload")
def _log_exception(self, exception):
if hasattr(exception, "request") and exception.request:
self.logger.debug("Request url: {}".format(exception.request.url))
self.logger.error("Request headers: {}".format(exception.request.headers))
if hasattr(exception.request, "data"):
if exception.request.data and len(exception.request.data) > 0:
self.logger.error("Request payload {}".format(exception.request.data))
self.logger.error("Exception when performing request: {}".format(exception))
|
class HttpApi(object):
def __init__(self, host, defaultHeaders=None, cert=None, logger=None):
pass
def set_logger(self, logger):
'''
Sets a custom logger that is to be used with the HttpApi class.
:param logger: custom logger to use to log HttpApi log messages
:return: Nothing
'''
pass
def set_header(self, key, value):
'''
Sets a new value for a header field in defaultHeader.
Replaces old value if the key already exists.
:param key: HTTP header key name
:param value:HTTP header key value
:return: Nothing, modifies defaultHeaders in place
'''
pass
def set_cert(self, cert):
'''
Setter for certificate field. Valid values are either a string
containing path to certificate .pem file or Tuple, ('cert', 'key') pair.
:param cert: Valid values are either a string containing path to certificate .pem file
or Tuple, ('cert', 'key') pair.
:return: Nothing, modifies field in place
'''
pass
def set_host(self, host):
'''
Setter for host parameter
:param host: address of HTTP service
:return: Nothing, modifies field in place
'''
pass
def get(self, path, headers=None, params=None, **kwargs):
'''
Sends a GET request to host/path.
:param path: String, Resource path on server
:param params: Dictionary of parameters to be added to URL
:param headers: Dictionary of HTTP headers to be sent with the request,
overwrites default headers if there is overlap
:param kwargs: Other arguments used in the requests.request call
valid parameters in kwargs are the optional parameters of Requests.Request
http://docs.python-requests.org/en/master/api/
:return: requests.Response
:raises: RequestException
'''
pass
def post(self, path, data=None, json=None, headers=None, **kwargs):
'''
Sends a POST request to host/path.
:param path: String, resource path on server
:param data: Dictionary, bytes or file-like object to send in the body of the request
:param json: JSON formatted data to send in the body of the request
:param headers: Dictionary of HTTP headers to be sent with the request,
overwrites default headers if there is overlap
:param kwargs: Other arguments used in the requests.request call
valid parameters in kwargs are the optional parameters of Requests.Request
http://docs.python-requests.org/en/master/api/
:return: requests.Response
:raises: RequestException
'''
pass
def put(self, path, data=None, headers=None, **kwargs):
'''
Sends a PUT request to host/path.
:param path: String, resource path on server
:param data: Dictionary, bytes or file-like object to send in the body of the request
:param headers: Dictionary of HTTP headers to be sent with the request,
overwrites default headers if there is overlap
:param kwargs: Other arguments used in the requests.request call
valid parameters in kwargs are the optional parameters of Requests.
Request http://docs.python-requests.org/en/master/api/
:return: requests.Response
:raises: RequestException
'''
pass
def delete(self, path, headers=None, **kwargs):
'''
Sends a DELETE request to host/path.
:param path: String, resource path on server
:param headers: Dictionary of HTTP headers to be sent with the request,
overwrites default headers if there is overlap
:param kwargs: Other arguments used in the requests.request call
valid parameters in kwargs are the optional parameters of Requests.Request
http://docs.python-requests.org/en/master/api/
:return: requests.Response
:raises: RequestException
'''
pass
def patch(self, path, data=None, headers=None, **kwargs):
'''
Sends a PATCH request to host/path.
:param path: String, resource path on server
:param data: Data as a dictionary, bytes, or file-like object to
send in the body of the request.
:param headers: Dictionary of HTTP headers to be sent with the request,
overwrites default headers if there is overlap
:param kwargs: Other arguments used in the requests.request call
valid parameters in kwargs are the optional parameters of Requests.Request
http://docs.python-requests.org/en/master/api/
:return: requests.Response
:raises: RequestException
'''
pass
def _log_response(self, resp):
pass
def _log_exception(self, exception):
pass
| 13 | 9 | 21 | 2 | 11 | 7 | 3 | 0.65 | 1 | 3 | 0 | 1 | 12 | 4 | 12 | 12 | 260 | 40 | 133 | 38 | 120 | 87 | 122 | 33 | 109 | 6 | 1 | 4 | 39 |
1,978 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/tools/NodeEndPoint.py
|
icetea_lib.tools.NodeEndPoint.NodeEndPoint
|
class NodeEndPoint(object): # pylint: disable=too-few-public-methods
"""
Wrapper for a dut, contains a shortcut to send commands to this dut specifically.
"""
def __init__(self, bench, endpoint_id):
self.bench = bench
self.endpoint_id = endpoint_id
def command(self, cmd, expected_retcode=0): # pylint: disable=invalid-name
# expected_retcode kwd argument is used in many test cases, we cannot rename it.
"""
Shortcut for sending a command to this node specifically.
:param cmd: Command to send
:param expected_retcode: Expected return code as int, default is 0
:return: CliResponse
"""
return self.bench.execute_command(self.endpoint_id, cmd, expected_retcode=expected_retcode)
|
class NodeEndPoint(object):
'''
Wrapper for a dut, contains a shortcut to send commands to this dut specifically.
'''
def __init__(self, bench, endpoint_id):
pass
def command(self, cmd, expected_retcode=0):
'''
Shortcut for sending a command to this node specifically.
:param cmd: Command to send
:param expected_retcode: Expected return code as int, default is 0
:return: CliResponse
'''
pass
| 3 | 2 | 6 | 0 | 3 | 4 | 1 | 2 | 1 | 0 | 0 | 0 | 2 | 2 | 2 | 2 | 18 | 2 | 6 | 5 | 3 | 12 | 6 | 5 | 3 | 1 | 1 | 0 | 2 |
1,979 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/tools/file/FileUtils.py
|
icetea_lib.tools.file.FileUtils.LockFile
|
class LockFile(object):
"""
Simple lock class for locking a file.
"""
def __init__(self, filename):
self._lock_filename = filename + ".lock"
self._lock_fd = None
def acquire(self):
"""
Acquire the lock.
:return: Boolean.
"""
try:
self._lock_fd = os.open(self._lock_filename, os.O_CREAT | os.O_EXCL)
except OSError:
return False
return True
def release(self):
"""
Release the lock.
:return: Nothing
"""
if self._lock_fd is not None:
os.close(self._lock_fd)
try:
os.remove(self._lock_filename)
except OSError:
# Hmm, someone deleted our lock while we had it locked?
# Nothing we can do, so just continue
pass
|
class LockFile(object):
'''
Simple lock class for locking a file.
'''
def __init__(self, filename):
pass
def acquire(self):
'''
Acquire the lock.
:return: Boolean.
'''
pass
def release(self):
'''
Release the lock.
:return: Nothing
'''
pass
| 4 | 3 | 9 | 1 | 5 | 3 | 2 | 0.76 | 1 | 1 | 0 | 0 | 3 | 2 | 3 | 3 | 34 | 4 | 17 | 6 | 13 | 13 | 17 | 6 | 13 | 3 | 1 | 2 | 6 |
1,980 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/tools/file/SessionFiles.py
|
icetea_lib.tools.file.SessionFiles.JsonFile
|
class JsonFile(object):
"""
JsonFile class, for generating and handling json files.
"""
def __init__(self, logger=None, filepath=None, filename=None):
self.logger = logger if logger else initLogger("JsonFile")
self.filepath = filepath if filepath else os.path.sep
self.filename = filename if filename else "default_file.json"
def write_file(self, content, filepath=None, filename=None, indent=None, keys_to_write=None):
'''
Write a Python dictionary as JSON to a file.
:param content: Dictionary of key-value pairs to save to a file
:param filepath: Path where the file is to be created
:param filename: Name of the file to be created
:param indent: You can use this to specify indent level for pretty printing the file
:param keys_to_write: array of keys that are to be picked from data and written to file.
Default is None, when all data is written to file.
:return: Path of file used
:raises OSError, EnvironmentError, ValueError
'''
path = filepath if filepath else self.filepath
name = filename if filename else self.filename
if not os.path.exists(path):
try:
os.makedirs(path)
except OSError as error:
self.logger.error("Error while creating directory: {}".format(error))
raise
name = self._ends_with(name, ".json")
path = self._ends_with(path, os.path.sep)
if keys_to_write:
data_to_write = {}
for key in keys_to_write:
data_to_write[key] = content[key]
else:
data_to_write = content
try:
indent = indent if indent else 2
self._write_json(path, name, 'w', data_to_write, indent)
return os.path.join(path, name)
except EnvironmentError as error:
self.logger.error("Error while opening or writing to file: {}".format(error))
raise
except ValueError:
raise
def read_file(self, filepath=None, filename=None):
"""
Tries to read JSON content from filename and convert it to a dict.
:param filepath: Path where the file is
:param filename: File name
:return: Dictionary read from the file
:raises EnvironmentError, ValueError
"""
name = filename if filename else self.filename
path = filepath if filepath else self.filepath
name = self._ends_with(name, ".json")
path = self._ends_with(path, os.path.sep)
try:
return self._read_json(path, name)
except EnvironmentError as error:
self.logger.error("Error while opening or reading the file: {}".format(error))
raise
except ValueError as error:
self.logger.error("File contents cannot be decoded to JSON: {}".format(error))
raise
def read_value(self, key, filepath=None, filename=None):
"""
Tries to read the value of given key from JSON file filename.
:param filepath: Path to file
:param filename: Name of file
:param key: Key to search for
:return: Value corresponding to given key
:raises OSError, EnvironmentError, KeyError
"""
path = filepath if filepath else self.filepath
name = filename if filename else self.filename
name = self._ends_with(name, ".json")
path = self._ends_with(path, os.path.sep)
try:
output = self._read_json(path, name)
if key not in output:
raise KeyError("Key '{}' not found in file {}".format(key, filename))
else:
return output[key]
except EnvironmentError as error:
self.logger.error("Error while opening or reading the file: {}".format(error))
raise
def write_values(self, data, filepath=None, filename=None, indent=None, keys_to_write=None):
"""
Tries to write extra content to a JSON file.
Creates filename.temp with updated content, removes the old file and
finally renames the .temp to match the old file.
This is in effort to preserve the data in case of some weird errors cause problems.
:param filepath: Path to file
:param filename: Name of file
:param data: Data to write as a dictionary
:param indent: indent level for pretty printing the resulting file
:param keys_to_write: array of keys that are to be picked from data and written to file.
Default is None, when all data is written to file.
:return: Path to file used
:raises EnvironmentError ValueError
"""
name = filename if filename else self.filename
path = filepath if filepath else self.filepath
name = self._ends_with(name, ".json")
path = self._ends_with(path, os.path.sep)
if not os.path.isfile(path + name):
try:
return self.write_file(data, path, name, indent, keys_to_write)
except EnvironmentError as error:
self.logger.error("Error while opening or writing to file: {}".format(error))
raise
except ValueError:
raise
if keys_to_write:
data_to_write = {}
for key in keys_to_write:
data_to_write[key] = data[key]
else:
data_to_write = data
try:
with open(path + name, 'r') as fil:
output = json.load(fil)
self.logger.info("Read contents of {}".format(filename))
for key in data_to_write:
try:
output[key] = data_to_write[key]
except TypeError as error:
self.logger.error(
"File contents could not be serialized into a dict. {}".format(error))
raise
self._write_json(path, name + ".temp", "w", output, indent)
FileUtils.remove_file(name, path)
FileUtils.rename_file(name + '.temp', name, path)
return os.path.join(path, name)
except EnvironmentError as error:
self.logger.error(
"Error while writing to, opening or reading the file: {}".format(error))
raise
except ValueError as error:
self.logger.error(
"File could not be decoded to JSON. It might be empty? {}".format(error))
try:
self._write_json(path, name, "w", data_to_write, indent)
return os.path.join(path, name)
except EnvironmentError:
raise
def _write_json(self, filepath, filename, writemode, content, indent):
"""
Helper for writing content to a file.
:param filepath: path to file
:param filename: name of file
:param writemode: writemode used
:param content: content to write
:param indent: value for dump indent parameter.
:return: Norhing
"""
with open(os.path.join(filepath, filename), writemode) as fil:
json.dump(content, fil, indent=indent)
self.logger.info("Wrote content to file {}".format(filename))
def _read_json(self, path, name):
"""
Load a json into a dictionary from a file.
:param path: path to file
:param name: name of file
:return: dict
"""
with open(os.path.join(path, name), 'r') as fil:
output = json.load(fil)
self.logger.info("Read contents of {}".format(name))
return output
def _ends_with(self, string_to_edit, end): # pylint: disable=no-self-use
"""
Check if string ends with characters in end, if not merge end to string.
:param string_to_edit: string to check and edit.
:param end: str
:return: string_to_edit or string_to_edit + end
"""
if not string_to_edit.endswith(end):
return string_to_edit + end
return string_to_edit
|
class JsonFile(object):
'''
JsonFile class, for generating and handling json files.
'''
def __init__(self, logger=None, filepath=None, filename=None):
pass
def write_file(self, content, filepath=None, filename=None, indent=None, keys_to_write=None):
'''
Write a Python dictionary as JSON to a file.
:param content: Dictionary of key-value pairs to save to a file
:param filepath: Path where the file is to be created
:param filename: Name of the file to be created
:param indent: You can use this to specify indent level for pretty printing the file
:param keys_to_write: array of keys that are to be picked from data and written to file.
Default is None, when all data is written to file.
:return: Path of file used
:raises OSError, EnvironmentError, ValueError
'''
pass
def read_file(self, filepath=None, filename=None):
'''
Tries to read JSON content from filename and convert it to a dict.
:param filepath: Path where the file is
:param filename: File name
:return: Dictionary read from the file
:raises EnvironmentError, ValueError
'''
pass
def read_value(self, key, filepath=None, filename=None):
'''
Tries to read the value of given key from JSON file filename.
:param filepath: Path to file
:param filename: Name of file
:param key: Key to search for
:return: Value corresponding to given key
:raises OSError, EnvironmentError, KeyError
'''
pass
def write_values(self, data, filepath=None, filename=None, indent=None, keys_to_write=None):
'''
Tries to write extra content to a JSON file.
Creates filename.temp with updated content, removes the old file and
finally renames the .temp to match the old file.
This is in effort to preserve the data in case of some weird errors cause problems.
:param filepath: Path to file
:param filename: Name of file
:param data: Data to write as a dictionary
:param indent: indent level for pretty printing the resulting file
:param keys_to_write: array of keys that are to be picked from data and written to file.
Default is None, when all data is written to file.
:return: Path to file used
:raises EnvironmentError ValueError
'''
pass
def _write_json(self, filepath, filename, writemode, content, indent):
'''
Helper for writing content to a file.
:param filepath: path to file
:param filename: name of file
:param writemode: writemode used
:param content: content to write
:param indent: value for dump indent parameter.
:return: Norhing
'''
pass
def _read_json(self, path, name):
'''
Load a json into a dictionary from a file.
:param path: path to file
:param name: name of file
:return: dict
'''
pass
def _ends_with(self, string_to_edit, end):
'''
Check if string ends with characters in end, if not merge end to string.
:param string_to_edit: string to check and edit.
:param end: str
:return: string_to_edit or string_to_edit + end
'''
pass
| 9 | 8 | 24 | 2 | 15 | 8 | 5 | 0.56 | 1 | 4 | 0 | 0 | 8 | 3 | 8 | 8 | 205 | 24 | 117 | 34 | 108 | 65 | 111 | 27 | 102 | 13 | 1 | 4 | 41 |
1,981 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/tools/tools.py
|
icetea_lib.tools.tools.Singleton
|
class Singleton(type):
"""
Singleton metaclass implementation:
http://stackoverflow.com/questions/6760685/creating-a-singleton-in-python
"""
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls]
|
class Singleton(type):
'''
Singleton metaclass implementation:
http://stackoverflow.com/questions/6760685/creating-a-singleton-in-python
'''
def __call__(cls, *args, **kwargs):
pass
| 2 | 1 | 4 | 0 | 4 | 0 | 2 | 0.67 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 14 | 12 | 2 | 6 | 3 | 4 | 4 | 6 | 3 | 4 | 2 | 2 | 1 | 2 |
1,982 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/tools/GenericProcess.py
|
icetea_lib.tools.GenericProcess.GenericProcess
|
class GenericProcess(object):
"""
Generic process implementation for use with Dut.
"""
# Contstruct GenericProcess instance
def __init__(self, name, cmd=None, path=None, logger=None):
self.name = name
self.proc = None
self.logger = logger
self.cmd = None
self.cmd_arr = None
self.path = None
self.gdb = False
self.gdbs = False
self.vgdb = False
self.gdbs_port = None
self.nobuf = False
self.valgrind = None
self.valgrind_xml = None
self.valgrind_console = None
self.valgrind_track_origins = None
self.valgrind_extra_params = None
self.__print_io = True
self.__valgrind_log_basename = None
self.read_thread = None
self.__ignore_return_code = False
self.default_retcode = 0
if not self.logger:
self.logger = LogManager.get_bench_logger(name, 'GP', False)
self.cmd = cmd
self.path = path
def enable_io_prints(self):
"""
Enable IO prints
"""
self.__print_io = True
def disable_io_prints(self):
"""
Disable IO prints
"""
self.__print_io = False
@property
def ignore_return_code(self):
"""
Return value of __ignoreReturnCode
"""
return self.__ignore_return_code
@ignore_return_code.setter
def ignore_return_code(self, value):
"""
Set __ignoreReturnCode
"""
self.__ignore_return_code = value
# use gdb for process
def use_gdb(self, gdb=True):
"""
Set gdb use for process.
:param gdb: Boolean, defaults to True.
"""
self.gdb = gdb
def use_gdbs(self, gdbs=True, port=2345):
"""
Set gdbs use for process.
:param gdbs: Boolean, default is True
:param port: Port number for gdbserver
"""
self.gdbs = gdbs
self.gdbs_port = port
# use vgdb for process
def use_vgdb(self, vgdb=True):
"""
Set vgdb for process.
:param vgdb: Boolean, defaults to True
"""
self.vgdb = vgdb
def no_std_buf(self, nobuf=True):
"""
Set buffering of stdio.
:param nobuf: Defaults to True (no buffering)
"""
self.nobuf = nobuf
# pylint: disable=too-many-arguments
def use_valgrind(self, tool, xml, console, track_origins, valgrind_extra_params):
"""
Use Valgrind.
:param tool: Tool name, must be memcheck, callgrind or massif
:param xml: Boolean output xml
:param console: Dump output to console, Boolean
:param track_origins: Boolean, set --track-origins=yes
:param valgrind_extra_params: Extra parameters
:return: Nothing
:raises: AttributeError if invalid tool set.
"""
self.valgrind = tool
self.valgrind_xml = xml
self.valgrind_console = console
self.valgrind_track_origins = track_origins
self.valgrind_extra_params = valgrind_extra_params
if not tool in ['memcheck', 'callgrind', 'massif']:
raise AttributeError("Invalid valgrind tool: %s" % tool)
def __get_valgrind_params(self):
"""
Get Valgrind command as list.
:return: list
"""
valgrind = []
if self.valgrind:
valgrind.extend(['valgrind'])
if self.valgrind == 'memcheck':
valgrind.extend(['--tool=memcheck', '--leak-check=full'])
if self.valgrind_track_origins:
valgrind.extend(['--track-origins=yes'])
if self.valgrind_console:
# just dump the default output, which is text dumped to console
valgrind.extend([])
elif self.valgrind_xml:
valgrind.extend([
'--xml=yes',
'--xml-file=' + LogManager.get_testcase_logfilename(
self.name + '_valgrind_mem.xml', prepend_tc_name=True)
])
else:
valgrind.extend([
'--log-file=' + LogManager.get_testcase_logfilename(
self.name + '_valgrind_mem.txt')
])
elif self.valgrind == 'callgrind':
valgrind.extend([
'--tool=callgrind',
'--dump-instr=yes',
'--simulate-cache=yes',
'--collect-jumps=yes'])
if self.valgrind_console:
# just dump the default output, which is text dumped to console
valgrind.extend([])
elif self.valgrind_xml:
valgrind.extend([
'--xml=yes',
'--xml-file=' + LogManager.get_testcase_logfilename(
self.name + '_valgrind_calls.xml', prepend_tc_name=True)
])
else:
valgrind.extend([
'--callgrind-out-file=' + LogManager.get_testcase_logfilename(
self.name + '_valgrind_calls.data')
])
elif self.valgrind == 'massif':
valgrind.extend(['--tool=massif'])
valgrind.extend([
'--massif-out-file=' + LogManager.get_testcase_logfilename(
self.name + '_valgrind_massif.data')
])
# this allows one to specify misc params to valgrind,
# eg. "--threshold=0.4" to get some more data from massif
if self.valgrind_extra_params != '':
valgrind.extend(self.valgrind_extra_params.split())
return valgrind
def start_process(self, cmd=None, path="", processing_callback=None):
"""
Start the process.
:param cmd: Command to run
:param path: cwd
:param processing_callback: Callback for processing lines
:return: Nothing
:raises: NameError if Connection fails
"""
self.cmd = self.cmd if not cmd else cmd
self.path = self.path if not path else path
if self.path:
self.path = os.path.abspath(self.path)
self.cmd_arr = []
# set stdbuf in/out/err to zero size = no buffers in use
if self.nobuf:
self.cmd_arr.extend(['stdbuf', '-i0', '-o0', '-e0'])
# check if user want to debug this process
if self.gdb:
# add gdb parameters, run program immediately
self.cmd_arr.extend(['gdb', '-ex=run', '--args'])
elif self.gdbs:
# add gdbserver parameters, run program immediately
self.cmd_arr.extend(['gdbserver', 'localhost:' + str(self.gdbs_port)])
elif self.vgdb:
# add valgrind vgdb parameters, run program but wait for remote gdb connection
self.cmd_arr.extend(['valgrind', '--vgdb=yes', '--vgdb-error=0'])
if self.valgrind:
self.cmd_arr.extend(self.__get_valgrind_params())
self.cmd_arr.extend(self.cmd)
prefn = None
if not platform.system() == "Windows":
prefn = os.setsid
self.logger.debug("Instantiating process "
"%s at %s with command %s"
% (self.name, self.path, " ".join(self.cmd_arr)),
extra={"type": " "})
self.proc = subprocess.Popen(self.cmd_arr, cwd=self.path, stdout=subprocess.PIPE,
stdin=subprocess.PIPE, preexec_fn=prefn)
if UNIXPLATFORM:
import fcntl
file_descr = self.proc.stdout.fileno()
fcntl_var = fcntl.fcntl(file_descr, fcntl.F_GETFL)
fcntl.fcntl(file_descr, fcntl.F_SETFL, fcntl_var | os.O_NONBLOCK)
if self.proc.pid:
# Start stream reader thread
self.read_thread = NonBlockingStreamReader(self.proc.stdout, processing_callback)
self.read_thread.start()
self.logger.info("Process '%s' running with pid: %i" % (' '.join(self.cmd_arr),
self.proc.pid),
extra={'type': '<->'})
else:
self.logger.warning("Process start fails", extra={'type': '<->'})
raise NameError('Connection Fails')
def stop_process(self):
"""
Stop the process.
:raises: EnvironmentError if stopping fails due to unknown environment
TestStepError if process stops with non-default returncode and return code is not ignored.
"""
if self.read_thread is not None:
self.logger.debug("stop_process::readThread.stop()-in")
self.read_thread.stop()
self.logger.debug("stop_process::readThread.stop()-out")
returncode = None
if self.proc:
self.logger.debug("os.killpg(%d)", self.proc.pid)
for sig in (signal.SIGINT, signal.SIGTERM, signal.SIGKILL):
timeout = 5
try:
try:
self.logger.debug("Trying signal %s", sig)
os.killpg(self.proc.pid, sig)
except AttributeError:
self.logger.debug("os.killpg::AttributeError")
# Failed most likely because in windows,
# so use taskkill to kill whole process tree of proc
if platform.system() == "Windows":
subprocess.call(['taskkill', '/F', '/T', '/PID', str(self.proc.pid)])
else:
self.logger.debug("os.killpg::unknown env")
raise EnvironmentError("Unknown platform, "
"don't know how to terminate process")
while self.proc.poll() is None and timeout > 0:
time.sleep(1)
timeout -= 1
returncode = self.proc.poll()
if returncode is not None:
break
except OSError as error:
self.logger.info("os.killpg::OSError: %s", error)
self.proc = None
if returncode is not None:
self.logger.debug("Process stopped with returncode %s" % returncode)
if returncode != self.default_retcode and not self.__ignore_return_code:
raise TestStepError("Process stopped with returncode %d" % returncode)
self.logger.debug("stop_process-out")
def stop(self):
"""
Stop the process
See stop_process for more information
"""
self.stop_process()
def readline(self, timeout=1): # pylint: disable=unused-argument
"""
Readline implementation.
:param timeout: Timeout, not used
:return: Line read or None
"""
data = None
if self.read_thread:
# Ignore the timeout value, return imediately if no lines in queue
data = self.read_thread.readline()
if data and self.__print_io:
self.logger.info(data, extra={'type': '<--'})
return data
def writeline(self, data, crlf="\r\n"):
"""
Writeline implementation.
:param data: Data to write
:param crlf: Line end characters, defailt is \r\n
:return: Nothing
:raises: RuntimeError if errors happen while writing to PIPE or process stops.
"""
if self.read_thread:
if self.read_thread.has_error():
raise RuntimeError("Error writing PIPE")
# Check if process still alive
if self.proc.poll() is not None:
raise RuntimeError("Process stopped")
if self.__print_io:
self.logger.info(data, extra={'type': '-->'})
self.proc.stdin.write(bytearray(data + crlf, 'ascii'))
self.proc.stdin.flush()
def is_alive(self):
"""
Is process alive.
:return: Boolean, True is process is still running.
"""
return is_pid_running(self.proc.pid) if self.proc else False
|
class GenericProcess(object):
'''
Generic process implementation for use with Dut.
'''
def __init__(self, name, cmd=None, path=None, logger=None):
pass
def enable_io_prints(self):
'''
Enable IO prints
'''
pass
def disable_io_prints(self):
'''
Disable IO prints
'''
pass
@property
def ignore_return_code(self):
'''
Return value of __ignoreReturnCode
'''
pass
@ignore_return_code.setter
def ignore_return_code(self):
'''
Set __ignoreReturnCode
'''
pass
def use_gdb(self, gdb=True):
'''
Set gdb use for process.
:param gdb: Boolean, defaults to True.
'''
pass
def use_gdbs(self, gdbs=True, port=2345):
'''
Set gdbs use for process.
:param gdbs: Boolean, default is True
:param port: Port number for gdbserver
'''
pass
def use_vgdb(self, vgdb=True):
'''
Set vgdb for process.
:param vgdb: Boolean, defaults to True
'''
pass
def no_std_buf(self, nobuf=True):
'''
Set buffering of stdio.
:param nobuf: Defaults to True (no buffering)
'''
pass
def use_valgrind(self, tool, xml, console, track_origins, valgrind_extra_params):
'''
Use Valgrind.
:param tool: Tool name, must be memcheck, callgrind or massif
:param xml: Boolean output xml
:param console: Dump output to console, Boolean
:param track_origins: Boolean, set --track-origins=yes
:param valgrind_extra_params: Extra parameters
:return: Nothing
:raises: AttributeError if invalid tool set.
'''
pass
def __get_valgrind_params(self):
'''
Get Valgrind command as list.
:return: list
'''
pass
def start_process(self, cmd=None, path="", processing_callback=None):
'''
Start the process.
:param cmd: Command to run
:param path: cwd
:param processing_callback: Callback for processing lines
:return: Nothing
:raises: NameError if Connection fails
'''
pass
def stop_process(self):
'''
Stop the process.
:raises: EnvironmentError if stopping fails due to unknown environment
TestStepError if process stops with non-default returncode and return code is not ignored.
'''
pass
def stop_process(self):
'''
Stop the process
See stop_process for more information
'''
pass
def readline(self, timeout=1):
'''
Readline implementation.
:param timeout: Timeout, not used
:return: Line read or None
'''
pass
def writeline(self, data, crlf="\r\n"):
'''
Writeline implementation.
:param data: Data to write
:param crlf: Line end characters, defailt is
:return: Nothing
:raises: RuntimeError if errors happen while writing to PIPE or process stops.
'''
pass
def is_alive(self):
'''
Is process alive.
:return: Boolean, True is process is still running.
'''
pass
| 20 | 17 | 18 | 1 | 12 | 5 | 3 | 0.49 | 1 | 9 | 2 | 1 | 17 | 21 | 17 | 17 | 336 | 39 | 200 | 51 | 179 | 98 | 160 | 48 | 141 | 12 | 1 | 5 | 57 |
1,983 |
ARMmbed/icetea
|
ARMmbed_icetea/test/test_bench_config.py
|
test.test_bench_config.MockArgs
|
class MockArgs(object):
def __init__(self):
self.tc_cfg = None
self.channel = None
self.type = None
self.bin = None
self.platform_name = None
|
class MockArgs(object):
def __init__(self):
pass
| 2 | 0 | 6 | 0 | 6 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 5 | 1 | 1 | 7 | 0 | 7 | 7 | 5 | 0 | 7 | 7 | 5 | 1 | 1 | 0 | 1 |
1,984 |
ARMmbed/icetea
|
ARMmbed_icetea/examples/sample_serial.py
|
examples.sample_serial.Testcase
|
class Testcase(Bench):
def __init__(self):
Bench.__init__(self,
name="sample_serial",
title="Example test for using serial type duts.",
status="released",
purpose="Act as an example of defining a serial type dut.",
component=["cmdline"],
type="smoke",
requirements={
"duts": {
'*': {
"count": 1,
"type": "serial"
},
"1": {"serial_port": "/dev/ttyACM0"}
}
}
)
def setup(self):
pass
def case(self):
self.command(1, "echo 'Hello World'")
def teardown(self):
pass
|
class Testcase(Bench):
def __init__(self):
pass
def setup(self):
pass
def case(self):
pass
def teardown(self):
pass
| 5 | 0 | 6 | 0 | 6 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 4 | 0 | 4 | 110 | 28 | 3 | 25 | 5 | 20 | 0 | 9 | 5 | 4 | 1 | 3 | 0 | 4 |
1,985 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/TestBench/Configurations.py
|
icetea_lib.TestBench.Configurations.Configurations
|
class Configurations(object):
"""
Configurations manage test and environment configurations.
Provide public API's to read configuration values.
"""
def __init__(self, args=None, logger=None, **kwargs):
super(Configurations, self).__init__()
self._config, self._integer_keys_found = self._parse_config(**kwargs)
self._env_cfg = None
self._logger = logger if logger else LogManager.get_dummy_logger()
self._args = args
def init(self, logger):
"""
When test starts Bench will call _init() -function to initialize
configurations and read execution configuration file if exists.
:param args: arguments
"""
self._logger = logger
if self._integer_keys_found:
self._logger.warning("Integer keys found in configuration for DUT requirements. "
"Keys forced to strings for this run. "
"Please update your DUT requirements keys to strings.")
# Read cli given environment configuration file
self._env_cfg = self._read_env_configs(self._args.env_cfg, self._args.iface)
# Read cli given TC configuration file and merge it
self._read_exec_configs(self._args)
@property
def test_name(self):
"""
Get test bench name
:return: string
"""
# Return unknown also when name is set as None
return "unknown" if self._config.get("name",
"unknown") is None else self._config.get("name",
"unknown")
@property
def name(self):
"""
Get test bench name
:return: string
"""
return self.test_name
@property
def config(self):
"""
Getter for the internal config dict.
:return: dict
"""
return self._config
@config.setter
def config(self, value):
self.set_config(value)
@property
def env(self):
"""
Getter for env configuration
:return: dict
"""
return self._env_cfg
def is_hardware_in_use(self):
"""
:return: True if type is hardware
"""
return get(self.config, "requirements.duts.*.type") == "hardware"
def get_test_component(self):
"""
Get test component.
:return: string
"""
return self.config.get("component", [''])
def get_features_under_test(self):
"""
Get features tested by this test case.
:return: list
"""
fea = self.config.get("feature", [])
if isinstance(fea, str):
return [fea]
return fea
def get_allowed_platforms(self):
"""
Return list of allowed platfroms from requirements.
:return: list
"""
return get(self.config, "requirements.duts.*.allowed_platforms", list())
# @todo find better place for these
def status(self):
"""
Get TC implementation status.
:return: string or None
"""
return self.config.get('status')
def type(self):
"""
Get test case type.
:return: string or None
"""
return self.config.get('type')
def subtype(self):
"""
Get test case subtype.
:return: string or None
"""
return self.config.get('subtype')
def get_config(self):
"""
Get test case configuration.
:return: dict
"""
return self.config
def skip(self):
"""
Get skip value.
:return: Boolean or None
"""
return get(self.config, "execution.skip.value")
def skip_info(self):
"""
Get the entire skip dictionary.
:return: dictionary or None
"""
return get(self.config, "execution.skip")
# Get Skip Reason
def skip_reason(self):
"""
Get skip reason.
:return: string
"""
return get(self.config, "execution.skip.reason", "")
def check_skip(self):
"""
Check if tc should be skipped
:return: Boolean
"""
if not self.skip():
return False
info = self.skip_info()
only_type = info.get('only_type')
platforms = info.get('platforms', [])
allowed = get(self.config, "requirements.duts.*.allowed_platforms", list())
# validate platforms in allowed_platforms, otherwise no skip
if allowed and platforms and not set(platforms).issubset(allowed):
return False
# skip tests by either only_type or platforms
if only_type or platforms:
for keys in get(self.config, "requirements.duts", dict()):
# skip tests by only_type
type_expr = "type" in self.config["requirements"]["duts"][keys] and \
self.config['requirements']['duts'][keys]["type"] == only_type
if only_type and type_expr:
return True
# skip test by platforms condition 1:
plat_expr = "platform_name" in self.config['requirements']['duts'][keys] and \
self.config['requirements']['duts'][keys]["platform_name"] in platforms
if platforms and plat_expr:
return True
# no skip if neither only_type nor platforms is defined
return False
def get_tc_abspath(self, tc_file=None):
"""
Get path to test case.
:param tc_file: name of the file. If None, tcdir used instead.
:return: absolute path.
"""
if not tc_file:
return os.path.abspath(self._args.tcdir)
return os.path.abspath(tc_file)
def set_config(self, config):
"""
Set the configuration for this test case.
:param config: dictionary
:return: Nothing
"""
self._config = config
# Read Environment Configuration JSON file
def _read_env_configs(self, env_cfg, iface): # pylint: disable=no-self-use
"""
Read environment configuration json file.
:return: False if read fails, True otherwise.
"""
data = None
if env_cfg != '':
env_cfg_filename = env_cfg
else:
env_cfg_filename = os.path.abspath(os.path.join(__file__,
os.path.pardir,
os.path.pardir,
os.path.pardir,
"env_cfg_json"))
if os.path.exists(env_cfg_filename):
with open(env_cfg_filename) as data_file:
try:
data = json.load(data_file, object_pairs_hook=find_duplicate_keys)
except ValueError as error:
self._logger.error(error)
raise InconclusiveError("Environment file {} read failed: {}".format(
env_cfg_filename, error))
elif env_cfg != '':
raise InconclusiveError('Environment file {} does not exist'.format(env_cfg))
env = merge({}, data) if data else {}
if iface:
env = merge(env, {'sniffer': {'iface': iface}})
else:
env = merge(env, {'sniffer': {'iface': "Sniffer"}})
return env
# Read Execution Configuration file
def _read_exec_configs(self, args): # pylint: disable=too-many-branches
"""
Read execution configuration file.
:return: Nothing.
:raises TestStepError if file cannot be read or merged into config, or if platform_name
is not in allowed_platforms.
"""
tc_cfg = None
if args.tc_cfg:
tc_cfg = args.tc_cfg
# TODO: this bit is not compatible with IceteaManagement's --tc argument.
elif isinstance(args.tc, string_types) and os.path.exists(args.tc + '.json'):
tc_cfg = args.tc + '.json'
if tc_cfg:
if not os.path.exists(tc_cfg):
self._logger.error("Execution configuration file {} does not exist.".format(tc_cfg))
raise InconclusiveError(
"Execution configuration file {} does not exist.".format(tc_cfg))
with open(tc_cfg) as data_file:
try:
data = json.load(data_file, object_pairs_hook=find_duplicate_keys)
self._config = merge(self._config, data)
except Exception as error:
self._logger.error("Testcase configuration read from file (%s) failed!", tc_cfg)
self._logger.error(error)
raise TestStepError("TC CFG read fail! {}".format(error))
if args.type:
self._config["requirements"]["duts"]["*"] = merge(
self._config["requirements"]["duts"]["*"],
{"type": args.type})
if args.bin:
self._config["requirements"]["duts"]["*"] = merge(
self._config["requirements"]["duts"]["*"],
{"application": {'bin': args.bin}})
if args.platform_name:
allowed = self._config["requirements"]["duts"]["*"].get("allowed_platforms")
if allowed:
if args.platform_name in allowed:
self._config["requirements"]["duts"]["*"][
"platform_name"] = args.platform_name
else:
raise TestStepError("Required platform_name not in allowed_platforms.")
else:
self._config["requirements"]["duts"]["*"][
"platform_name"] = args.platform_name
@staticmethod
def _parse_config(**kwargs):
"""
Internal helper for parsing configurations.
:param kwargs: dict
:return: dict
"""
config = {
"compatible": {
"framework": {
"name": "Icetea",
"version": ">=1.0.0"
},
"automation": {
"value": True
},
"hw": {
"value": True
}
},
"name": None,
"type": None,
"sub_type": None,
"requirements": {
"duts": {"*": {
"application": {
"bin": None
}
}},
"external": {
"apps": [
]
}
}
}
integer_keys_found = False
try:
reqs = kwargs["requirements"]["duts"]
if len(reqs) > 1:
integer_keys_found = False
new_keys = {}
int_keys = []
for key in reqs:
if isinstance(key, int):
integer_keys_found = True
int_keys.append(key)
val = reqs[key]
new_keys[str(key)] = val
for key in new_keys:
reqs[key] = new_keys[key]
for key in int_keys:
reqs.pop(key)
except KeyError:
pass
for key in kwargs:
if isinstance(kwargs[key], dict) and key in config:
config[key] = merge(config[key], kwargs[key])
else:
config[key] = kwargs[key]
return config, integer_keys_found
|
class Configurations(object):
'''
Configurations manage test and environment configurations.
Provide public API's to read configuration values.
'''
def __init__(self, args=None, logger=None, **kwargs):
pass
def init(self, logger):
'''
When test starts Bench will call _init() -function to initialize
configurations and read execution configuration file if exists.
:param args: arguments
'''
pass
@property
def test_name(self):
'''
Get test bench name
:return: string
'''
pass
@property
def name(self):
'''
Get test bench name
:return: string
'''
pass
@property
def config(self):
'''
Getter for the internal config dict.
:return: dict
'''
pass
@config.setter
def config(self):
pass
@property
def env(self):
'''
Getter for env configuration
:return: dict
'''
pass
def is_hardware_in_use(self):
'''
:return: True if type is hardware
'''
pass
def get_test_component(self):
'''
Get test component.
:return: string
'''
pass
def get_features_under_test(self):
'''
Get features tested by this test case.
:return: list
'''
pass
def get_allowed_platforms(self):
'''
Return list of allowed platfroms from requirements.
:return: list
'''
pass
def status(self):
'''
Get TC implementation status.
:return: string or None
'''
pass
def type(self):
'''
Get test case type.
:return: string or None
'''
pass
def subtype(self):
'''
Get test case subtype.
:return: string or None
'''
pass
def get_config(self):
'''
Get test case configuration.
:return: dict
'''
pass
def skip(self):
'''
Get skip value.
:return: Boolean or None
'''
pass
def skip_info(self):
'''
Get the entire skip dictionary.
:return: dictionary or None
'''
pass
def skip_reason(self):
'''
Get skip reason.
:return: string
'''
pass
def check_skip(self):
'''
Check if tc should be skipped
:return: Boolean
'''
pass
def get_tc_abspath(self, tc_file=None):
'''
Get path to test case.
:param tc_file: name of the file. If None, tcdir used instead.
:return: absolute path.
'''
pass
def set_config(self, config):
'''
Set the configuration for this test case.
:param config: dictionary
:return: Nothing
'''
pass
def _read_env_configs(self, env_cfg, iface):
'''
Read environment configuration json file.
:return: False if read fails, True otherwise.
'''
pass
def _read_exec_configs(self, args):
'''
Read execution configuration file.
:return: Nothing.
:raises TestStepError if file cannot be read or merged into config, or if platform_name
is not in allowed_platforms.
'''
pass
@staticmethod
def _parse_config(**kwargs):
'''
Internal helper for parsing configurations.
:param kwargs: dict
:return: dict
'''
pass
| 31 | 23 | 14 | 1 | 8 | 4 | 2 | 0.55 | 1 | 11 | 2 | 0 | 23 | 5 | 24 | 24 | 367 | 55 | 202 | 60 | 171 | 112 | 144 | 50 | 119 | 11 | 1 | 4 | 59 |
1,986 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/AllocationContext.py
|
icetea_lib.AllocationContext.AllocationContextList
|
class AllocationContextList(object):
"""
Class AllocationContextList
Is used for storing and handling Duts after they have been allocated.
Contains methods to iterate over AllocationContext objects and initialize duts from
AllocationContext objects.
"""
def __init__(self, logger=None):
self._allocation_contexts = []
self.logger = logger
self.duts = []
self.dutinformations = []
self._resource_configuration = None
self._dut_initialization_functions = {}
def __len__(self):
"""
len implementation for AllocationContextList
:return: len(self._allocation_contexts)
"""
return len(self._allocation_contexts)
def __iter__(self):
"""
Implementation of __iter__ to allow for item in list loops
:return: iterator to self._allocation_contexts
"""
return iter(self._allocation_contexts)
def __getitem__(self, item):
"""
__getitem__ implementation for AllocationContextList
enables list[index]
:param item: index to return
:return: self._allocation_contexts[item]
:raises: TypeError if item is not integer.
:raises: IndexError if item < 0 or item > len(self._allocation_contexts)
"""
if not isinstance(item, int):
raise TypeError("AllocationContextList get expects an integer index")
if len(self._allocation_contexts) <= item:
raise IndexError("list getitem out of bounds")
elif item < 0:
raise IndexError("AllocationContextList get not implemented for negative values.")
return self._allocation_contexts[item]
def __setitem__(self, key, value):
"""
__setitem__ implementation for AllocationContextList.
Replaces item in list space key with value.
:param key: Index of list item to replace
:param value: Value to replace list item with
:return: Nothing
:raises: TypeError if key is not an integer.
:raises: IndexError if key < 0 or key > len(self._allocation_contexts)
"""
if not isinstance(key, int):
raise TypeError("AllocationContextList set expects an integer index")
if len(self._allocation_contexts) <= key:
raise IndexError("list setitem out of bounds")
elif key < 0:
raise IndexError("AllocationContextList set not implemented for negative indexes.")
self._allocation_contexts[key] = value
def set_dut_init_function(self, dut_type, fnctn):
"""
Setter for dut initialization function
:param dut_type: Dut type
:param fnctn: callable
:return: Nothing
"""
self._dut_initialization_functions[dut_type] = fnctn
def get_dut_init_function(self, dut_type):
"""
Get dut initialization function
:param dut_type: Dut type.
:return: callable
"""
return self._dut_initialization_functions.get(dut_type)
def set_resconf(self, resconf):
"""
Set resource configuration.
:param resconf: ResourceConfig object
:return: Nothing
"""
self._resource_configuration = resconf
def get_resconf(self):
"""
Get resource configuration for this AllocationContextList.
:return: ResourceConfig
"""
return self._resource_configuration
def set_logger(self, logger):
"""
Set logger.
:param logger: logging.logger
:return: Nothing
"""
self.logger = logger
def get_duts(self):
"""
Get list of duts.
:return: list of Dut objects
"""
return self.duts
def get_dutinformations(self):
"""
Get DutInformation objects of current duts.
:return: list of DutInformation objects
"""
return self.dutinformations
def append(self, alloc_context):
"""
Appends alloc_context to self._allocation_contexts. No type-checking done here.
:param alloc_context: AllocationContext object.
:return: Nothing
"""
self._allocation_contexts.append(alloc_context)
# pylint: disable=too-many-statements
def init_duts(self, args): # pylint: disable=too-many-locals,too-many-branches
"""
Initializes duts of different types based on configuration provided by AllocationContext.
Able to do the initialization of duts in parallel, if --parallel_flash was provided.
:param args: Argument Namespace object
:return: list of initialized duts.
"""
# TODO: Split into smaller chunks to reduce complexity.
threads = []
abort_queue = Queue()
def thread_wrapper(*thread_args, **thread_kwargs):
"""
Run initialization function for dut
:param thread_args: arguments to pass to the function
:param thread_kwargs: keyword arguments, (func: callable, abort_queue: Queue)
:return: Result of func(*thread_args)
"""
# pylint: disable=broad-except
try:
return thread_kwargs["func"](*thread_args)
except Exception as error:
thread_kwargs["abort_queue"].put((thread_args[2], error))
for index, dut_conf in enumerate(self._allocation_contexts):
dut_type = dut_conf.get("type")
func = self.get_dut_init_function(dut_type)
if func is None:
continue
threads.append(((self, dut_conf.get_alloc_data().get_requirements(), index + 1, args),
{"func": func, "abort_queue": abort_queue}))
try:
thread_limit = len(threads) if args.parallel_flash else 1
pool = ThreadPool(thread_limit)
async_results = [pool.apply_async(func=thread_wrapper,
args=t[0], kwds=t[1])
for t in threads]
# Wait for resources to be ready.
[res.get() for res in async_results] # pylint: disable=expression-not-assigned
pool.close()
pool.join()
if not abort_queue.empty():
msg = "Dut Initialization failed, reason(s):"
while not abort_queue.empty():
dut_index, error = abort_queue.get()
msg = "{}\nDUT index {} - {}".format(msg, dut_index, error)
raise AllocationError(msg)
# Sort duts to same order as in dut_conf_list
self.duts.sort(key=lambda d: d.index)
self.dutinformations.sort(key=lambda d: d.index)
except KeyboardInterrupt:
msg = "Received keyboard interrupt, waiting for flashing to finish"
self.logger.info(msg)
for dut in self.duts:
dut.close_dut(False)
dut.close_connection()
if hasattr(dut, "release"):
dut.release()
dut = None
raise
except RuntimeError:
self.logger.exception("RuntimeError during flashing")
# ValueError is raised if ThreadPool is tried to initiate with
# zero threads.
except ValueError:
self.logger.exception("No devices allocated")
raise AllocationError("Dut Initialization failed!")
except (DutConnectionError, TypeError):
for dut in self.duts:
if hasattr(dut, "release"):
dut.release()
raise AllocationError("Dut Initialization failed!")
finally:
if pool:
pool.close()
pool.join()
self.logger.debug("Allocated following duts:")
for dut in self.duts:
dut.print_info()
return self.duts
def open_dut_connections(self):
"""
Opens connections to Duts. Starts Dut read threads.
:return: Nothing
:raises DutConnectionError: if problems were encountered while opening dut connection.
"""
for dut in self.duts:
try:
dut.start_dut_thread()
if hasattr(dut, "command"):
dut.open_dut(dut.command)
else:
dut.open_dut()
except DutConnectionError:
self.logger.exception("Failed when opening dut connection")
dut.close_dut(False)
dut.close_connection()
dut = None
raise
def check_flashing_need(self, execution_type, build_id, force):
"""
Check if flashing of local device is required.
:param execution_type: Should be 'hardware'
:param build_id: Build id, usually file name
:param force: Forceflash flag
:return: Boolean
"""
binary_file_name = AllocationContextList.get_build(build_id)
if binary_file_name:
if execution_type == 'hardware' and os.path.isfile(binary_file_name):
if not force:
#@todo: Make a better check for binary compatibility
extension_split = os.path.splitext(binary_file_name)
extension = extension_split[-1].lower()
if extension != '.bin' and extension != '.hex':
self.logger.debug("File ('%s') is not supported to flash, skip it" %(
build_id))
return False
return True
return True
else:
raise ResourceInitError("Given binary %s does not exist" % build_id)
else:
raise ResourceInitError("Given binary %s does not exist" % build_id)
@staticmethod
def get_build(build_id):
"""
Gets a file related to build_id.
"""
try:
build = Build.init(build_id)
except NotImplementedError:
return None
return build.get_file()
|
class AllocationContextList(object):
'''
Class AllocationContextList
Is used for storing and handling Duts after they have been allocated.
Contains methods to iterate over AllocationContext objects and initialize duts from
AllocationContext objects.
'''
def __init__(self, logger=None):
pass
def __len__(self):
'''
len implementation for AllocationContextList
:return: len(self._allocation_contexts)
'''
pass
def __iter__(self):
'''
Implementation of __iter__ to allow for item in list loops
:return: iterator to self._allocation_contexts
'''
pass
def __getitem__(self, item):
'''
__getitem__ implementation for AllocationContextList
enables list[index]
:param item: index to return
:return: self._allocation_contexts[item]
:raises: TypeError if item is not integer.
:raises: IndexError if item < 0 or item > len(self._allocation_contexts)
'''
pass
def __setitem__(self, key, value):
'''
__setitem__ implementation for AllocationContextList.
Replaces item in list space key with value.
:param key: Index of list item to replace
:param value: Value to replace list item with
:return: Nothing
:raises: TypeError if key is not an integer.
:raises: IndexError if key < 0 or key > len(self._allocation_contexts)
'''
pass
def set_dut_init_function(self, dut_type, fnctn):
'''
Setter for dut initialization function
:param dut_type: Dut type
:param fnctn: callable
:return: Nothing
'''
pass
def get_dut_init_function(self, dut_type):
'''
Get dut initialization function
:param dut_type: Dut type.
:return: callable
'''
pass
def set_resconf(self, resconf):
'''
Set resource configuration.
:param resconf: ResourceConfig object
:return: Nothing
'''
pass
def get_resconf(self):
'''
Get resource configuration for this AllocationContextList.
:return: ResourceConfig
'''
pass
def set_logger(self, logger):
'''
Set logger.
:param logger: logging.logger
:return: Nothing
'''
pass
def get_duts(self):
'''
Get list of duts.
:return: list of Dut objects
'''
pass
def get_dutinformations(self):
'''
Get DutInformation objects of current duts.
:return: list of DutInformation objects
'''
pass
def append(self, alloc_context):
'''
Appends alloc_context to self._allocation_contexts. No type-checking done here.
:param alloc_context: AllocationContext object.
:return: Nothing
'''
pass
def init_duts(self, args):
'''
Initializes duts of different types based on configuration provided by AllocationContext.
Able to do the initialization of duts in parallel, if --parallel_flash was provided.
:param args: Argument Namespace object
:return: list of initialized duts.
'''
pass
def thread_wrapper(*thread_args, **thread_kwargs):
'''
Run initialization function for dut
:param thread_args: arguments to pass to the function
:param thread_kwargs: keyword arguments, (func: callable, abort_queue: Queue)
:return: Result of func(*thread_args)
'''
pass
def open_dut_connections(self):
'''
Opens connections to Duts. Starts Dut read threads.
:return: Nothing
:raises DutConnectionError: if problems were encountered while opening dut connection.
'''
pass
def check_flashing_need(self, execution_type, build_id, force):
'''
Check if flashing of local device is required.
:param execution_type: Should be 'hardware'
:param build_id: Build id, usually file name
:param force: Forceflash flag
:return: Boolean
'''
pass
@staticmethod
def get_build(build_id):
'''
Gets a file related to build_id.
'''
pass
| 20 | 18 | 15 | 2 | 8 | 6 | 3 | 0.75 | 1 | 15 | 4 | 0 | 16 | 6 | 17 | 17 | 290 | 44 | 142 | 43 | 122 | 106 | 131 | 41 | 112 | 16 | 1 | 4 | 48 |
1,987 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/CliAsyncResponse.py
|
icetea_lib.CliAsyncResponse.CliAsyncResponse
|
class CliAsyncResponse(object): # pylint: disable=too-few-public-methods
"""Proxy class to a future CliResponse, a response to an async comand.
If any function of Cliresponse is called on an instance of this class
the system will wait and block for the response to become ready.
"""
def __init__(self, dut):
try:
self.logger = LogManager.get_bench_logger()
except KeyError:
self.logger = None
self.response = None
self.dut = dut
def set_response(self, response):
"""
Set the response, this function should not be called directly,
the DUT will do it when a response is available.
"""
if self.response is None:
self.response = response
def __wait_for_response(self):
"""
Explicitelly wait and block for the response
"""
if self.response is None:
# The response will be filled - anyway - by the DUT,
# there is no need to set it twice.
self.dut._wait_for_exec_ready() # pylint: disable=protected-access
def __getattr__(self, name):
"""
Forward calls and attribute lookup to the inner response once it is available
"""
self.__wait_for_response()
return getattr(self.response, name)
def __str__(self):
"""
Return the string representation of the response once it is available
"""
self.__wait_for_response()
return self.response.__str__()
def __getitem__(self, item):
"""
Index operator forwarded to the response once it is available
"""
self.__wait_for_response()
return self.response.__getitem__(item)
|
class CliAsyncResponse(object):
'''Proxy class to a future CliResponse, a response to an async comand.
If any function of Cliresponse is called on an instance of this class
the system will wait and block for the response to become ready.
'''
def __init__(self, dut):
pass
def set_response(self, response):
'''
Set the response, this function should not be called directly,
the DUT will do it when a response is available.
'''
pass
def __wait_for_response(self):
'''
Explicitelly wait and block for the response
'''
pass
def __getattr__(self, name):
'''
Forward calls and attribute lookup to the inner response once it is available
'''
pass
def __str__(self):
'''
Return the string representation of the response once it is available
'''
pass
def __getitem__(self, item):
'''
Index operator forwarded to the response once it is available
'''
pass
| 7 | 6 | 7 | 0 | 4 | 3 | 2 | 1.04 | 1 | 1 | 0 | 0 | 6 | 3 | 6 | 6 | 51 | 6 | 23 | 10 | 16 | 24 | 23 | 10 | 16 | 2 | 1 | 1 | 9 |
1,988 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/CliRequest.py
|
icetea_lib.CliRequest.CliRequest
|
class CliRequest(object):
"""
CliRequest class. This is a command request object, that contains the command,
creation timestamp and other values related to the command sent to a dut.
"""
def __init__(self, cmd="", timestamp=time.time(), **kwargs):
"""
Constuctor for request.
:param cmd: Command sent as string
:param timestamp: timestamp value, default is time.time() called when creating this object.
:param kwargs: Keyword arguments. Used arguments are: wait, expected_retcode, timeout,
These values will populate class members that share the same name.
"""
self.cmd = cmd
self.wait = True
self.timeout = 10
self.asynchronous = False
self.timestamp = timestamp
self.expected_retcode = 0
self.response = None
self.dut_index = -1
for key in kwargs:
if key == 'wait':
self.wait = kwargs[key]
elif key == 'expected_retcode':
self.expected_retcode = kwargs[key]
elif key == 'timeout':
self.timeout = kwargs[key]
elif key == 'asynchronous':
self.asynchronous = kwargs[key]
elif key == 'dut_index':
self.dut_index = kwargs[key]
def __str__(self):
return self.cmd
def get_timedelta(self, now):
"""
Return time difference to now from the start of this Request.
:param now: Timestamp to which time difference should be calculated to.
:return: Result of calculation as integer.
"""
return now-self.timestamp
|
class CliRequest(object):
'''
CliRequest class. This is a command request object, that contains the command,
creation timestamp and other values related to the command sent to a dut.
'''
def __init__(self, cmd="", timestamp=time.time(), **kwargs):
'''
Constuctor for request.
:param cmd: Command sent as string
:param timestamp: timestamp value, default is time.time() called when creating this object.
:param kwargs: Keyword arguments. Used arguments are: wait, expected_retcode, timeout,
These values will populate class members that share the same name.
'''
pass
def __str__(self):
pass
def get_timedelta(self, now):
'''
Return time difference to now from the start of this Request.
:param now: Timestamp to which time difference should be calculated to.
:return: Result of calculation as integer.
'''
pass
| 4 | 3 | 13 | 1 | 8 | 4 | 3 | 0.64 | 1 | 0 | 0 | 0 | 3 | 8 | 3 | 3 | 47 | 6 | 25 | 13 | 21 | 16 | 21 | 13 | 17 | 7 | 1 | 2 | 9 |
1,989 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/CliResponse.py
|
icetea_lib.CliResponse.CliResponse
|
class CliResponse(object):
"""
CliResponse class. Object returned by the command api when a command has finished.
"""
# pylint:disable=invalid-name
# Constructor
def __init__(self):
try:
self.logger = LogManager.get_bench_logger()
except KeyError:
self.logger = logging.getLogger("bench")
self.timeout = False
self.timedelta = 0
self.retcode = None
self.lines = []
self.traces = []
self.parsed = None
def __str__(self):
return str(self.retcode)
def success(self):
"""
Check if this is a response to a successful command
:return: True or False
"""
return self.retcode == 0
def fail(self):
"""
Check if this is a response to a failed command
:return: True or False
"""
return self.retcode != 0
# verify that response message is expected
def verify_message(self, expected_response, break_in_fail=True):
"""
Verifies that expected_response is found in self.lines.
:param expected_response: response or responses to look for. Must be list or str.
:param break_in_fail: If set to True,
re-raises exceptions caught or if message was not found
:return: True or False
:raises: LookupError if message was not found and break_in_fail was True. Other exceptions
might also be raised through searcher.verify_message.
"""
ok = True
try:
ok = verify_message(self.lines, expected_response)
except (TypeError, LookupError) as inst:
ok = False
if break_in_fail:
raise inst
if ok is False and break_in_fail:
raise LookupError("Unexpected message found")
return ok
def verify_trace(self, expected_traces, break_in_fail=True):
"""
Verifies that expectedResponse is found in self.traces
:param expected_traces: response or responses to look for. Must be list or str.
:param break_in_fail: If set to True, re-raises exceptions caught or if message was
not found
:return: True or False
:raises: LookupError if message was not found and breakInFail was True. Other Exceptions
might also be raised through searcher.verify_message.
"""
ok = True
try:
ok = verify_message(self.traces, expected_traces)
except (TypeError, LookupError) as inst:
ok = False
if break_in_fail:
raise inst
if ok is False and break_in_fail:
raise LookupError("Unexpected message found")
return ok
def set_response_time(self, seconds):
"""
Set response time in seconds.
:param seconds: integer
:return: Nothing
"""
self.timedelta = seconds
def verify_response_duration(self, expected=None, zero=0, threshold_percent=0,
break_in_fail=True):
"""
Verify that response duration is in bounds.
:param expected: seconds what is expected duration
:param zero: seconds if one to normalize duration before calculating error rate
:param threshold_percent: allowed error in percents
:param break_in_fail: boolean, True if raise TestStepFail when out of bounds
:return: (duration, expected duration, error)
"""
was = self.timedelta - zero
error = abs(was/expected)*100.0 - 100.0 if expected > 0 else 0
msg = "should: %.3f, was: %.3f, error: %.3f %%" % (expected, was, error)
self.logger.debug(msg)
if abs(error) > threshold_percent:
msg = "Thread::wait error(%.2f %%) was out of bounds (%.2f %%)" \
% (error, threshold_percent)
self.logger.debug(msg)
if break_in_fail:
raise TestStepFail(msg)
return was, expected, error
def verify_response_time(self, expected_below):
"""
Verify that response time (time span between request-response) is reasonable.
:param expected_below: integer
:return: Nothing
:raises: ValueError if timedelta > expected time
"""
if self.timedelta > expected_below:
raise ValueError("Response time is more (%f) than expected (%f)!"
% (self.timedelta, expected_below))
|
class CliResponse(object):
'''
CliResponse class. Object returned by the command api when a command has finished.
'''
def __init__(self):
pass
def __str__(self):
pass
def success(self):
'''
Check if this is a response to a successful command
:return: True or False
'''
pass
def fail(self):
'''
Check if this is a response to a failed command
:return: True or False
'''
pass
def verify_message(self, expected_response, break_in_fail=True):
'''
Verifies that expected_response is found in self.lines.
:param expected_response: response or responses to look for. Must be list or str.
:param break_in_fail: If set to True,
re-raises exceptions caught or if message was not found
:return: True or False
:raises: LookupError if message was not found and break_in_fail was True. Other exceptions
might also be raised through searcher.verify_message.
'''
pass
def verify_trace(self, expected_traces, break_in_fail=True):
'''
Verifies that expectedResponse is found in self.traces
:param expected_traces: response or responses to look for. Must be list or str.
:param break_in_fail: If set to True, re-raises exceptions caught or if message was
not found
:return: True or False
:raises: LookupError if message was not found and breakInFail was True. Other Exceptions
might also be raised through searcher.verify_message.
'''
pass
def set_response_time(self, seconds):
'''
Set response time in seconds.
:param seconds: integer
:return: Nothing
'''
pass
def verify_response_duration(self, expected=None, zero=0, threshold_percent=0,
break_in_fail=True):
'''
Verify that response duration is in bounds.
:param expected: seconds what is expected duration
:param zero: seconds if one to normalize duration before calculating error rate
:param threshold_percent: allowed error in percents
:param break_in_fail: boolean, True if raise TestStepFail when out of bounds
:return: (duration, expected duration, error)
'''
pass
def verify_response_time(self, expected_below):
'''
Verify that response time (time span between request-response) is reasonable.
:param expected_below: integer
:return: Nothing
:raises: ValueError if timedelta > expected time
'''
pass
| 10 | 8 | 12 | 1 | 6 | 5 | 2 | 0.86 | 1 | 6 | 1 | 0 | 9 | 7 | 9 | 9 | 124 | 14 | 59 | 25 | 48 | 51 | 56 | 22 | 46 | 4 | 1 | 2 | 20 |
1,990 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/CliResponseParser.py
|
icetea_lib.CliResponseParser.ParserManager
|
class ParserManager(object):
"""
ParserManager class. This is a manager for all parsers and this is the entry point for test
cases to handle response parsing. PluginManager appends all parsers to this class and tests
call the parse-method to parse received responses.
"""
def __init__(self, logger=None):
self.parsers = {}
self.logger = logger
if self.logger is None:
self.logger = logging.getLogger("ParserManager")
self.logger.addHandler(logging.StreamHandler().setLevel(logging.WARNING))
self.logger.setLevel(logging.WARNING)
def has_parser(self, parser):
"""
Returns True if given parser found in managed parsers.
:param parser: Name of parser to look for
:return: Boolean
"""
return True if parser in self.parsers else False
def add_parser(self, parser_name, parser_func):
"""
Add new parser function for specific name.
:param parser_name: Name of parser to add
:param parser_func: Callable function to call when parsing responses
:return: Nothing
"""
self.parsers[parser_name] = parser_func
# parse response
def parse(self, *args, **kwargs): # pylint: disable=unused-argument
"""
Parse response.
:param args: List. 2 first items used as parser name and response to parse
:param kwargs: dict, not used
:return: dictionary or return value of called callable from parser.
"""
# pylint: disable=W0703
cmd = args[0]
resp = args[1]
if cmd in self.parsers:
try:
return self.parsers[cmd](resp)
except Exception as err:
print(err)
return {}
|
class ParserManager(object):
'''
ParserManager class. This is a manager for all parsers and this is the entry point for test
cases to handle response parsing. PluginManager appends all parsers to this class and tests
call the parse-method to parse received responses.
'''
def __init__(self, logger=None):
pass
def has_parser(self, parser):
'''
Returns True if given parser found in managed parsers.
:param parser: Name of parser to look for
:return: Boolean
'''
pass
def add_parser(self, parser_name, parser_func):
'''
Add new parser function for specific name.
:param parser_name: Name of parser to add
:param parser_func: Callable function to call when parsing responses
:return: Nothing
'''
pass
def parse(self, *args, **kwargs):
'''
Parse response.
:param args: List. 2 first items used as parser name and response to parse
:param kwargs: dict, not used
:return: dictionary or return value of called callable from parser.
'''
pass
| 5 | 4 | 10 | 1 | 5 | 5 | 2 | 1.19 | 1 | 2 | 0 | 0 | 4 | 2 | 4 | 4 | 52 | 7 | 21 | 10 | 16 | 25 | 21 | 9 | 16 | 3 | 1 | 2 | 8 |
1,991 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/DeviceConnectors/Dut.py
|
icetea_lib.DeviceConnectors.Dut.DutConnectionError
|
class DutConnectionError(Exception):
"""
Exception for errors in connecting to dut.
"""
pass
|
class DutConnectionError(Exception):
'''
Exception for errors in connecting to dut.
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 5 | 0 | 2 | 1 | 1 | 3 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
1,992 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/DeviceConnectors/Dut.py
|
icetea_lib.DeviceConnectors.Dut.Location
|
class Location(object): # pylint: disable=too-few-public-methods
"""
Location object for storing x and y coordinates of a dut in a network topology.
"""
def __init__(self, x_coord=0, y_coord=0):
self.x_coord = x_coord
self.y_coord = y_coord
|
class Location(object):
'''
Location object for storing x and y coordinates of a dut in a network topology.
'''
def __init__(self, x_coord=0, y_coord=0):
pass
| 2 | 1 | 3 | 0 | 3 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 1 | 2 | 1 | 1 | 7 | 0 | 4 | 4 | 2 | 4 | 4 | 4 | 2 | 1 | 1 | 0 | 1 |
1,993 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/DeviceConnectors/DutInformation.py
|
icetea_lib.DeviceConnectors.DutInformation.DutInformation
|
class DutInformation(object):
"""
DutInformation object.
Contains fields platform (string), index (int), vendor (string), and build (Build)
"""
def __init__(self, platform, resourceid, index="", vendor="", build=None, provider=None):
self.platform = platform
self.index = index
self.resource_id = resourceid
self.vendor = vendor
self.build = build
self.provider = provider
if resourceid:
DutInformationList.push_resource_cache(resourceid, self.as_dict())
def as_dict(self):
"""
Generate a dictionary of the contents of this DutInformation object.
:return: dict
"""
my_info = {}
if self.platform:
my_info["model"] = self.platform
if self.resource_id:
my_info["sn"] = self.resource_id
if self.vendor:
my_info["vendor"] = self.vendor
if self.provider:
my_info["provider"] = self.provider
return my_info
@property
def build_binary_sha1(self):
"""
Get flashed binary sha1 or None if not flashed yet.
:return: dict object
"""
cache = DutInformationList.get_resource_cache(self.resource_id)
return cache.get("build_binary_sha1")
@build_binary_sha1.setter
def build_binary_sha1(self, value):
"""
Setter for flashed binary sha1.
:return: Nothing
"""
DutInformationList.push_resource_cache(self.resource_id, {"build_binary_sha1": value})
|
class DutInformation(object):
'''
DutInformation object.
Contains fields platform (string), index (int), vendor (string), and build (Build)
'''
def __init__(self, platform, resourceid, index="", vendor="", build=None, provider=None):
pass
def as_dict(self):
'''
Generate a dictionary of the contents of this DutInformation object.
:return: dict
'''
pass
@property
def build_binary_sha1(self):
'''
Get flashed binary sha1 or None if not flashed yet.
:return: dict object
'''
pass
@build_binary_sha1.setter
def build_binary_sha1(self):
'''
Setter for flashed binary sha1.
:return: Nothing
'''
pass
| 7 | 4 | 10 | 1 | 6 | 3 | 2 | 0.57 | 1 | 1 | 1 | 0 | 4 | 6 | 4 | 4 | 50 | 6 | 28 | 15 | 21 | 16 | 26 | 13 | 21 | 5 | 1 | 1 | 9 |
1,994 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/AllocationContext.py
|
icetea_lib.AllocationContext.AllocationContext
|
class AllocationContext(object):
"""
Class AllocationContext
Contains allocation and resource_id:s used to define an allocated resource.
Also contains supplementary data in _alloc_data dictionary.
"""
def __init__(self, resource_id=None, alloc_id=None, alloc_data=None):
self._resource_id = resource_id
self._alloc_id = alloc_id
self._alloc_data = alloc_data if alloc_data else {}
def get(self, key):
"""
Getter for configuration dictionary items
:param key: Key to look for in configuration.
:param default: Default value to be returned if key is not found in configuration.
Default is None
:return: value of configuration[key] if key exists in configuration, else value of default.
"""
return self._alloc_data.get(key)
def set(self, key, value):
"""
Setter for configuration values
:param key:
:param value:
:return: Nothing
"""
self._alloc_data[key] = value
def get_alloc_data(self):
"""
Get allocation data dictionary
:return: Allocation data (dictionary)
"""
return self._alloc_data
@property
def alloc_id(self):
"""
Getter for alloc_id
:return: alloc_id
"""
return self._alloc_id
@property
def resource_id(self):
"""
Getter for resource_id
:return: resource_id
"""
return self._resource_id
def __getitem__(self, item):
"""
__getitem__ implementation for AllocationContext
enables context[item]
:param item: item to return
:return: self._alloc_data[item] or None if item does not exist in self._alloc_data
"""
return self._alloc_data.get(item, None)
def __setitem__(self, key, value):
"""
__setitem__ implementation for AllocationContext.
Replaces item in list space key with value.
:param key: Name of context data item to replace/add
:param value: Value to replace context data item with
:return: Nothing
"""
self._alloc_data[key] = value
|
class AllocationContext(object):
'''
Class AllocationContext
Contains allocation and resource_id:s used to define an allocated resource.
Also contains supplementary data in _alloc_data dictionary.
'''
def __init__(self, resource_id=None, alloc_id=None, alloc_data=None):
pass
def get(self, key):
'''
Getter for configuration dictionary items
:param key: Key to look for in configuration.
:param default: Default value to be returned if key is not found in configuration.
Default is None
:return: value of configuration[key] if key exists in configuration, else value of default.
'''
pass
def set(self, key, value):
'''
Setter for configuration values
:param key:
:param value:
:return: Nothing
'''
pass
def get_alloc_data(self):
'''
Get allocation data dictionary
:return: Allocation data (dictionary)
'''
pass
@property
def alloc_id(self):
'''
Getter for alloc_id
:return: alloc_id
'''
pass
@property
def resource_id(self):
'''
Getter for resource_id
:return: resource_id
'''
pass
def __getitem__(self, item):
'''
__getitem__ implementation for AllocationContext
enables context[item]
:param item: item to return
:return: self._alloc_data[item] or None if item does not exist in self._alloc_data
'''
pass
def __setitem__(self, key, value):
'''
__setitem__ implementation for AllocationContext.
Replaces item in list space key with value.
:param key: Name of context data item to replace/add
:param value: Value to replace context data item with
:return: Nothing
'''
pass
| 11 | 8 | 8 | 1 | 2 | 5 | 1 | 2.05 | 1 | 0 | 0 | 0 | 8 | 3 | 8 | 8 | 80 | 16 | 21 | 14 | 10 | 43 | 19 | 12 | 10 | 2 | 1 | 0 | 9 |
1,995 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/DeviceConnectors/DutInformation.py
|
icetea_lib.DeviceConnectors.DutInformation.DutInformationList
|
class DutInformationList(object):
"""
DutInformationList object. List of DutInformation objects in member dutinformations as a list.
helper methods for getting unique dut models in either string or list format and a list of
resource ID:s.
"""
_cache = dict()
def __init__(self, content=None):
self.dutinformations = content if content else []
def get(self, index):
"""
Get DutInformation at index index.
:param index: index as int
:return: DutInformation
"""
# pylint: disable=len-as-condition
if index > len(self):
return None
return self.dutinformations[index]
def get_uniq_string_dutmodels(self):
"""
Gets a string of dut models in this TC.
:return: String of dut models separated with commas.
String "unknown platform" if no dut information is available
"""
models = self.get_uniq_list_dutmodels()
if not models:
return ""
return ",".join(models)
def get_uniq_list_dutmodels(self):
"""
Gets a list of dut models in this TC
:return: List of dut models in this TC. Empty list if information is not available.
"""
models = []
if self.dutinformations:
for info in self.dutinformations:
models.append(info.platform)
seen = []
for item in models:
if item not in seen:
seen.append(item)
return seen
return models
def get_resource_ids(self):
"""
Get resource ids as a list.
:return: List of resource id:s or "unknown"
"""
resids = []
if self.dutinformations:
for info in self.dutinformations:
resids.append(info.resource_id)
return resids
return "unknown"
def append(self, dutinfo):
"""
Append a DutInformation object to the list.
:param dutinfo: object to append
:return: Nothing
"""
self.dutinformations.append(dutinfo)
def __len__(self):
"""
overrides len operation for DutInformationList.
:return: Length of internal dutinformation list as int
"""
return len(self.dutinformations)
@staticmethod
def push_resource_cache(resourceid, info):
"""
Cache resource specific information
:param resourceid: Resource id as string
:param info: Dict to push
:return: Nothing
"""
if not resourceid:
raise ResourceInitError("Resource id missing")
if not DutInformationList._cache.get(resourceid):
DutInformationList._cache[resourceid] = dict()
DutInformationList._cache[resourceid] = merge(DutInformationList._cache[resourceid], info)
@staticmethod
def get_resource_cache(resourceid):
"""
Get a cached dictionary related to an individual resourceid.
:param resourceid: String resource id.
:return: dict
"""
if not resourceid:
raise ResourceInitError("Resource id missing")
if not DutInformationList._cache.get(resourceid):
DutInformationList._cache[resourceid] = dict()
return DutInformationList._cache[resourceid]
|
class DutInformationList(object):
'''
DutInformationList object. List of DutInformation objects in member dutinformations as a list.
helper methods for getting unique dut models in either string or list format and a list of
resource ID:s.
'''
def __init__(self, content=None):
pass
def get(self, index):
'''
Get DutInformation at index index.
:param index: index as int
:return: DutInformation
'''
pass
def get_uniq_string_dutmodels(self):
'''
Gets a string of dut models in this TC.
:return: String of dut models separated with commas.
String "unknown platform" if no dut information is available
'''
pass
def get_uniq_list_dutmodels(self):
'''
Gets a list of dut models in this TC
:return: List of dut models in this TC. Empty list if information is not available.
'''
pass
def get_resource_ids(self):
'''
Get resource ids as a list.
:return: List of resource id:s or "unknown"
'''
pass
def append(self, dutinfo):
'''
Append a DutInformation object to the list.
:param dutinfo: object to append
:return: Nothing
'''
pass
def __len__(self):
'''
overrides len operation for DutInformationList.
:return: Length of internal dutinformation list as int
'''
pass
@staticmethod
def push_resource_cache(resourceid, info):
'''
Cache resource specific information
:param resourceid: Resource id as string
:param info: Dict to push
:return: Nothing
'''
pass
@staticmethod
def get_resource_cache(resourceid):
'''
Get a cached dictionary related to an individual resourceid.
:param resourceid: String resource id.
:return: dict
'''
pass
| 12 | 9 | 10 | 1 | 5 | 4 | 2 | 0.9 | 1 | 2 | 1 | 0 | 7 | 1 | 9 | 9 | 110 | 17 | 49 | 21 | 37 | 44 | 47 | 19 | 37 | 5 | 1 | 3 | 22 |
1,996 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Events/EventMatcher.py
|
icetea_lib.Events.EventMatcher.EventMatcher
|
class EventMatcher(Observer):
"""
EventMatcher class. This is an Observer that listens to certain events and tries to match
them to a preset match data.
"""
def __init__(self, event_type, match_data, caller=None, # pylint: disable=too-many-arguments
flag=None, callback=None, forget=True):
Observer.__init__(self)
self.caller = caller
self.event_type = event_type
self.flag_to_set = flag
self.callback = callback
self.match_data = match_data
self.__forget = forget
self.observe(event_type, self._event_received)
def _event_received(self, ref, data):
"""
Handle received event.
:param ref: ref is the object that generated the event.
:param data: event data.
:return: Nothing.
"""
match = self._resolve_match_data(ref, data)
if match:
if self.flag_to_set:
self.flag_to_set.set()
if self.callback:
self.callback(EventMatch(ref, data, match))
if self.__forget:
self.forget()
def _resolve_match_data(self, ref, event_data):
"""
If match_data is prefixed with regex: compile it to a regular expression pattern.
Match event data with match_data as either regex or string.
:param ref: Reference to object that generated this event.
:param event_data: Data from event, as string.
:return: return re.MatchObject if match found, False if ref is not caller
set for this Matcher or if no match was found.
"""
if self.caller is None:
pass
elif ref is not self.caller:
return False
try:
dat = event_data if IS_PYTHON3 else event_data.decode("utf-8")
if self.match_data.startswith("regex:"):
splt = self.match_data.split(":", 1)
pttrn = re.compile(splt[1])
match = re.search(pttrn, dat)
return match if match is not None else False
return event_data if self.match_data in dat else False
except UnicodeDecodeError:
dat = repr(event_data)
return self._resolve_match_data(ref, dat)
|
class EventMatcher(Observer):
'''
EventMatcher class. This is an Observer that listens to certain events and tries to match
them to a preset match data.
'''
def __init__(self, event_type, match_data, caller=None, # pylint: disable=too-many-arguments
flag=None, callback=None, forget=True):
pass
def _event_received(self, ref, data):
'''
Handle received event.
:param ref: ref is the object that generated the event.
:param data: event data.
:return: Nothing.
'''
pass
def _resolve_match_data(self, ref, event_data):
'''
If match_data is prefixed with regex: compile it to a regular expression pattern.
Match event data with match_data as either regex or string.
:param ref: Reference to object that generated this event.
:param event_data: Data from event, as string.
:return: return re.MatchObject if match found, False if ref is not caller
set for this Matcher or if no match was found.
'''
pass
| 4 | 3 | 17 | 1 | 12 | 5 | 5 | 0.53 | 1 | 2 | 1 | 0 | 3 | 6 | 3 | 7 | 58 | 4 | 36 | 16 | 31 | 19 | 34 | 15 | 30 | 8 | 2 | 2 | 14 |
1,997 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Events/Generics.py
|
icetea_lib.Events.Generics.Event
|
class Event(object):
"""
Event emitter
Usage:
1. Call from anywhere in your code
The apropriate observers will get notified once the event fires
"""
def __init__(self, event, *callback_args):
for observer in Observer._observers:
if event in observer._observed_events:
observer._observed_events[event](*callback_args)
|
class Event(object):
'''
Event emitter
Usage:
1. Call from anywhere in your code
The apropriate observers will get notified once the event fires
'''
def __init__(self, event, *callback_args):
pass
| 2 | 1 | 4 | 0 | 4 | 0 | 3 | 1.2 | 1 | 1 | 1 | 0 | 1 | 0 | 1 | 1 | 11 | 0 | 5 | 3 | 3 | 6 | 5 | 3 | 3 | 3 | 1 | 2 | 3 |
1,998 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Events/Generics.py
|
icetea_lib.Events.Generics.EventTypes
|
class EventTypes(object):
"""
Enum for event types
"""
DUT_LINE_RECEIVED = 1
|
class EventTypes(object):
'''
Enum for event types
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 | 0 | 2 | 2 | 1 | 3 | 2 | 2 | 1 | 0 | 1 | 0 | 0 |
1,999 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Events/Generics.py
|
icetea_lib.Events.Generics.Observer
|
class Observer(object):
"""
Usage:
1. Inherit in the class you want to listen for events
2. Make sure Observer's initializer gets called!
3. Use .observe(event_name, callback_function) to register
events that u want to observe. event_name is just a string-
identifier for a certain event
"""
_observers = []
def __init__(self):
self._observed_events = {}
self.reinit()
def reinit(self):
"""
Forget old subscriptions and restart observer.
:return: Nothing
"""
self.forget()
self._observers.append(self)
def observe(self, event, callback_fn):
"""
Observe function. Sets callback function for received event
:param event: Event type
:param callback_fn: Callable
:return: Nothing
"""
self._observed_events[event] = callback_fn
def forget(self):
"""
Reset _observed events. Remove self from observers.
:return: Nothing
"""
self._observed_events = {}
if self in self._observers:
self._observers.remove(self)
|
class Observer(object):
'''
Usage:
1. Inherit in the class you want to listen for events
2. Make sure Observer's initializer gets called!
3. Use .observe(event_name, callback_function) to register
events that u want to observe. event_name is just a string-
identifier for a certain event
'''
def __init__(self):
pass
def reinit(self):
'''
Forget old subscriptions and restart observer.
:return: Nothing
'''
pass
def observe(self, event, callback_fn):
'''
Observe function. Sets callback function for received event
:param event: Event type
:param callback_fn: Callable
:return: Nothing
'''
pass
def forget(self):
'''
Reset _observed events. Remove self from observers.
:return: Nothing
'''
pass
| 5 | 4 | 7 | 0 | 3 | 4 | 1 | 1.57 | 1 | 0 | 0 | 1 | 4 | 1 | 4 | 4 | 40 | 4 | 14 | 7 | 9 | 22 | 14 | 7 | 9 | 2 | 1 | 1 | 5 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.