id
int64 0
843k
| repository_name
stringlengths 7
55
| file_path
stringlengths 9
332
| class_name
stringlengths 3
290
| human_written_code
stringlengths 12
4.36M
| class_skeleton
stringlengths 19
2.2M
| total_program_units
int64 1
9.57k
| total_doc_str
int64 0
4.2k
| AvgCountLine
float64 0
7.89k
| AvgCountLineBlank
float64 0
300
| AvgCountLineCode
float64 0
7.89k
| AvgCountLineComment
float64 0
7.89k
| AvgCyclomatic
float64 0
130
| CommentToCodeRatio
float64 0
176
| CountClassBase
float64 0
48
| CountClassCoupled
float64 0
589
| CountClassCoupledModified
float64 0
581
| CountClassDerived
float64 0
5.37k
| CountDeclInstanceMethod
float64 0
4.2k
| CountDeclInstanceVariable
float64 0
299
| CountDeclMethod
float64 0
4.2k
| CountDeclMethodAll
float64 0
4.2k
| CountLine
float64 1
115k
| CountLineBlank
float64 0
9.01k
| CountLineCode
float64 0
94.4k
| CountLineCodeDecl
float64 0
46.1k
| CountLineCodeExe
float64 0
91.3k
| CountLineComment
float64 0
27k
| CountStmt
float64 1
93.2k
| CountStmtDecl
float64 0
46.1k
| CountStmtExe
float64 0
90.2k
| MaxCyclomatic
float64 0
759
| MaxInheritanceTree
float64 0
16
| MaxNesting
float64 0
34
| SumCyclomatic
float64 0
6k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2,000 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/IceteaManager.py
|
icetea_lib.IceteaManager.ExitCodes
|
class ExitCodes:
"""
Console exit codes
"""
EXIT_SUCCESS = 0
EXIT_ERROR = 1
EXIT_FAIL = 2
EXIT_INCONC = 3
|
class ExitCodes:
'''
Console exit codes
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.6 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 | 0 | 5 | 5 | 4 | 3 | 5 | 5 | 4 | 0 | 0 | 0 | 0 |
2,001 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/IceteaManager.py
|
icetea_lib.IceteaManager.IceteaManager
|
class IceteaManager(object):
"""
IceteaManager class. This is the master of the entire run. The primary entry point into
execution is the run method.
"""
def __init__(self):
"""
Constructor for IceteaManager. Appends libraries to sys.path, loads the test case
metadata schema, parses arguments and initializes logging.
"""
self.libpath = os.sep.join(os.path.abspath(__file__).split(os.sep)[:-1])
sys.path.append(self.libpath)
libpath2 = os.sep.join(self.libpath.split(os.sep)[:-1])
sys.path.append(libpath2)
# Initialize TCMetaSchema with correct libpath
TCMetaSchema(self.libpath)
self.args, self.unknown = IceteaManager._parse_arguments()
# If called with --clean, clean up logs.
if self.args.clean:
_cleanlogs(silent=self.args.silent, log_location=self.args.log)
LogManager.init_base_logging(self.args.log, verbose=self.args.verbose,
silent=self.args.silent, color=self.args.color,
no_file=(self.args.list or self.args.listsuites),
truncate=not self.args.disable_log_truncate)
self.logger = LogManager.get_logger("icetea")
self.pluginmanager = None
self.resourceprovider = ResourceProvider(self.args)
self._init_pluginmanager()
self.resourceprovider.set_pluginmanager(self.pluginmanager)
@staticmethod
def list_suites(suitedir="./testcases/suites", cloud=False):
"""
Static method for listing suites from both local source and cloud.
Uses PrettyTable to generate the table.
:param suitedir: Local directory for suites.
:param cloud: cloud module
:return: PrettyTable object or None if no test cases were found
"""
suites = []
suites.extend(TestSuite.get_suite_files(suitedir))
# no suitedir, or no suites -> append cloud.get_campaigns()
if cloud:
names = cloud.get_campaign_names()
if names:
suites.append("------------------------------------")
suites.append("FROM CLOUD:")
suites.extend(names)
if not suites:
return None
from prettytable import PrettyTable
table = PrettyTable(["Testcase suites"])
for suite in suites:
table.add_row([suite])
return table
@staticmethod
def _parse_arguments():
"""
Static method for paring arguments
"""
parser = get_base_arguments(get_parser())
parser = get_tc_arguments(parser)
args, unknown = parser.parse_known_args()
return args, unknown
def check_args(self):
"""
Validates that a valid number of arguments were received and that all arguments were
recognised.
:return: True or False.
"""
parser = get_base_arguments(get_parser())
parser = get_tc_arguments(parser)
# Disable "Do not use len(SEQ) as condition value"
# pylint: disable=C1801
if len(sys.argv) < 2:
self.logger.error("Icetea called with no arguments! ")
parser.print_help()
return False
elif not self.args.ignore_invalid_params and self.unknown:
self.logger.error("Unknown parameters received, exiting. "
"To ignore this add --ignore_invalid_params flag.")
self.logger.error("Following parameters were unknown: {}".format(self.unknown))
parser.print_help()
return False
return True
def _init_pluginmanager(self):
"""
Initialize PluginManager and load run wide plugins.
"""
self.pluginmanager = PluginManager(logger=self.logger)
self.logger.debug("Registering execution wide plugins:")
self.pluginmanager.load_default_run_plugins()
self.pluginmanager.load_custom_run_plugins(self.args.plugin_path)
self.logger.debug("Execution wide plugins loaded and registered.")
def run(self, args=None):
"""
Runs the set of tests within the given path.
"""
# Disable "Too many branches" and "Too many return statemets" warnings
# pylint: disable=R0912,R0911
retcodesummary = ExitCodes.EXIT_SUCCESS
self.args = args if args else self.args
if not self.check_args():
return retcodesummary
if self.args.clean:
if not self.args.tc and not self.args.suite:
return retcodesummary
# If called with --version print version and exit
version = get_fw_version()
if self.args.version and version:
print(version)
return retcodesummary
elif self.args.version and not version:
print("Unable to get version. Have you installed Icetea correctly?")
return retcodesummary
self.logger.info("Using Icetea version {}".format(version) if version
else "Unable to get Icetea version. Is Icetea installed?")
# If cloud set, import cloud, get parameters from environment, initialize cloud
cloud = self._init_cloud(self.args.cloud)
# Check if called with listsuites. If so, print out suites either from cloud or from local
if self.args.listsuites:
table = self.list_suites(self.args.suitedir, cloud)
if table is None:
self.logger.error("No suites found!")
retcodesummary = ExitCodes.EXIT_FAIL
else:
print(table)
return retcodesummary
try:
testsuite = TestSuite(logger=self.logger, cloud_module=cloud, args=self.args)
except SuiteException as error:
self.logger.error("Something went wrong in suite creation! {}".format(error))
retcodesummary = ExitCodes.EXIT_INCONC
return retcodesummary
if self.args.list:
if self.args.cloud:
testsuite.update_testcases()
testcases = testsuite.list_testcases()
print(testcases)
return retcodesummary
results = self.runtestsuite(testsuite=testsuite)
if not results:
retcodesummary = ExitCodes.EXIT_SUCCESS
elif results.failure_count() and self.args.failure_return_value is True:
retcodesummary = ExitCodes.EXIT_FAIL
elif results.inconclusive_count() and self.args.failure_return_value is True:
retcodesummary = ExitCodes.EXIT_INCONC
return retcodesummary
def runtestsuite(self, testsuite):
"""
Runs a single test suite
:param testsuite: TestSuite
:return: ResultList
"""
if testsuite.status == TestStatus.READY:
results = testsuite.run()
else:
results = ResultList()
# Disable "Expression is assigned to nothing" warning
# pylint: disable=W0106
[handler.flush() for handler in self.logger.handlers]
results.save(heads={'Build': '', 'Branch': self.args.branch})
sys.stdout.flush()
self._cleanup_resourceprovider()
return results
# Disable "String statement has no effect" warning
# pylint: disable=W0105
"""
PRIVATE FUNCTIONS HERE
"""
def _cleanup_resourceprovider(self):
"""
Calls cleanup for ResourceProvider of this run.
:return: Nothing
"""
# Disable too broad exception warning
# pylint: disable=W0703
self.resourceprovider = ResourceProvider(self.args)
try:
self.resourceprovider.cleanup()
self.logger.info("Cleanup done.")
except Exception as error:
self.logger.error("Cleanup failed! %s", error)
def _init_cloud(self, cloud_arg):
"""
Initializes Cloud module if cloud_arg is set.
:param cloud_arg: taken from args.cloud
:return: cloud module object instance
"""
# Disable too broad exception warning
# pylint: disable=W0703
cloud = None
if cloud_arg:
try:
if hasattr(self.args, "cm"):
cloud_module = self.args.cm if self.args.cm else None
self.logger.info("Creating cloud module {}.".format(cloud_module))
else:
cloud_module = None
cloud = Cloud(host=None, module=cloud_module, logger=self.logger, args=self.args)
except Exception as error:
self.logger.warning("Cloud module could not be initialized: {}".format(error))
cloud = None
return cloud
|
class IceteaManager(object):
'''
IceteaManager class. This is the master of the entire run. The primary entry point into
execution is the run method.
'''
def __init__(self):
'''
Constructor for IceteaManager. Appends libraries to sys.path, loads the test case
metadata schema, parses arguments and initializes logging.
'''
pass
@staticmethod
def list_suites(suitedir="./testcases/suites", cloud=False):
'''
Static method for listing suites from both local source and cloud.
Uses PrettyTable to generate the table.
:param suitedir: Local directory for suites.
:param cloud: cloud module
:return: PrettyTable object or None if no test cases were found
'''
pass
@staticmethod
def _parse_arguments():
'''
Static method for paring arguments
'''
pass
def check_args(self):
'''
Validates that a valid number of arguments were received and that all arguments were
recognised.
:return: True or False.
'''
pass
def _init_pluginmanager(self):
'''
Initialize PluginManager and load run wide plugins.
'''
pass
def run(self, args=None):
'''
Runs the set of tests within the given path.
'''
pass
def runtestsuite(self, testsuite):
'''
Runs a single test suite
:param testsuite: TestSuite
:return: ResultList
'''
pass
def _cleanup_resourceprovider(self):
'''
Calls cleanup for ResourceProvider of this run.
:return: Nothing
'''
pass
def _init_cloud(self, cloud_arg):
'''
Initializes Cloud module if cloud_arg is set.
:param cloud_arg: taken from args.cloud
:return: cloud module object instance
'''
pass
| 12 | 10 | 24 | 2 | 15 | 6 | 4 | 0.46 | 1 | 10 | 9 | 0 | 7 | 6 | 9 | 9 | 234 | 31 | 139 | 39 | 126 | 64 | 125 | 34 | 114 | 16 | 1 | 3 | 37 |
2,002 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/IceteaManager.py
|
icetea_lib.IceteaManager.TCMetaSchema
|
class TCMetaSchema(object):
"""
Singleton metadata schema object.
"""
__metaclass__ = Singleton
def __init__(self, libpath="./icetea_lib"):
with open(os.path.join(libpath, 'tc_schema.json')) as data_file:
self._tc_meta_schema = json.load(data_file)
def get_meta_schema(self):
"""
Getter for tc meta schema.
:return: tc_meta_schema
"""
return self._tc_meta_schema
|
class TCMetaSchema(object):
'''
Singleton metadata schema object.
'''
def __init__(self, libpath="./icetea_lib"):
pass
def get_meta_schema(self):
'''
Getter for tc meta schema.
:return: tc_meta_schema
'''
pass
| 3 | 2 | 5 | 1 | 3 | 2 | 1 | 1 | 1 | 0 | 0 | 0 | 2 | 1 | 2 | 2 | 17 | 3 | 7 | 6 | 4 | 7 | 7 | 5 | 4 | 1 | 1 | 1 | 2 |
2,003 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/LogManager.py
|
icetea_lib.LogManager.BenchFormatter
|
class BenchFormatter(logging.Formatter):
"""
Handle time zone conversion to UTC and append milliseconds on %F.
"""
converter = datetime.datetime.fromtimestamp
def formatTime(self, record, datefmt=None):
date_and_time = self.converter(record.created, tz=pytz.utc)
if "%F" in datefmt:
msec = "%03d" % record.msecs
datefmt = datefmt.replace("%F", msec)
str_time = date_and_time.strftime(datefmt)
return str_time
|
class BenchFormatter(logging.Formatter):
'''
Handle time zone conversion to UTC and append milliseconds on %F.
'''
def formatTime(self, record, datefmt=None):
pass
| 2 | 1 | 7 | 0 | 7 | 0 | 2 | 0.33 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 8 | 13 | 1 | 9 | 6 | 7 | 3 | 9 | 6 | 7 | 2 | 2 | 1 | 2 |
2,004 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Events/EventMatch.py
|
icetea_lib.Events.EventMatch.EventMatch
|
class EventMatch(object): # pylint: disable=too-few-public-methods
"""
EventMatcher callback object
"""
def __init__(self, ref, event_data, match):
"""
:param ref: reference object
:param event_data: original event data which matches
:param match: re.MatchObject or string depend on EventMatcher configuration
"""
self.ref = ref
self.event_data = event_data
self.match = match
|
class EventMatch(object):
'''
EventMatcher callback object
'''
def __init__(self, ref, event_data, match):
'''
:param ref: reference object
:param event_data: original event data which matches
:param match: re.MatchObject or string depend on EventMatcher configuration
'''
pass
| 2 | 2 | 9 | 0 | 4 | 5 | 1 | 1.8 | 1 | 0 | 0 | 0 | 1 | 3 | 1 | 1 | 13 | 0 | 5 | 5 | 3 | 9 | 5 | 5 | 3 | 1 | 1 | 0 | 1 |
2,005 |
ARMmbed/icetea
|
ARMmbed_icetea/examples/testcase_example_usage/sample_testcase_func.py
|
examples.testcase_example_usage.sample_testcase_func.Testcase
|
class Testcase(Bench):
def __init__(self):
Bench.__init__(self,
name="sample_testcase_func",
title="test case functions example usage",
status="development",
type="smoke",
purpose="show an example usage of Icetea test case functions",
component=["Icetea"],
requirements={
"duts": {
'*': {
"count": 0, # devices number
}
}
}
)
def setup(self):
self.logger.info("Here is your test case customized setUp!")
def case(self):
self.logger.info("Here is your test case content!")
def teardown(self):
self.logger.info("Here is your test case customized tearDown!")
|
class Testcase(Bench):
def __init__(self):
pass
def setup(self):
pass
def case(self):
pass
def teardown(self):
pass
| 5 | 0 | 6 | 0 | 6 | 0 | 1 | 0.04 | 1 | 0 | 0 | 0 | 4 | 0 | 4 | 110 | 26 | 3 | 23 | 5 | 18 | 1 | 9 | 5 | 4 | 1 | 3 | 0 | 4 |
2,006 |
ARMmbed/icetea
|
ARMmbed_icetea/examples/testcase_example_usage/sample_get_time.py
|
examples.testcase_example_usage.sample_get_time.Testcase
|
class Testcase(Bench):
def __init__(self):
Bench.__init__(self,
name="sample_get_time",
title="get_time() functions example usage",
status="development",
type="smoke",
purpose="show an example usage of Icetea get_time() functions",
component=["Icetea"],
requirements={
"duts": {
'*': {
"count": 1, # devices number
"type": "process", # "hardware" (by default) or "process"
"application": {
"bin": "build_path/build_full_name", # build binary path
}
}
}
}
)
def case(self):
# wait for 3 seconds
self.delay(3)
# get time
self.logger.info("time interval between current time and test case start time: %s",
self.get_time())
|
class Testcase(Bench):
def __init__(self):
pass
def case(self):
pass
| 3 | 0 | 13 | 0 | 12 | 3 | 1 | 0.2 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 108 | 28 | 1 | 25 | 3 | 22 | 5 | 6 | 3 | 3 | 1 | 3 | 0 | 2 |
2,007 |
ARMmbed/icetea
|
ARMmbed_icetea/examples/testcase_example_usage/sample_get_dut.py
|
examples.testcase_example_usage.sample_get_dut.Testcase
|
class Testcase(Bench):
def __init__(self):
Bench.__init__(self,
name="sample_get_dut",
title="get_dut(index) functions example usage",
status="development",
type="smoke",
purpose="show an example usage of Icetea get_dut(index) functions",
component=["Icetea"],
requirements={
"duts": {
'*': {
"count": 1, # devices number
"type": "hardware", # "hardware" (by default) or "process"
"application": {
"bin": "build_path/build_full_name", # build binary path
}
}
}
}
)
def case(self):
# Close connection
self.get_dut(1).close_connection()
# wait a second
self.delay(1)
# open connection
self.get_dut(1).open_connection()
# get port name or path
if self.get_dut(1).comport:
self.logger.info("DUT serial port is %s", self.get_dut(1).comport)
|
class Testcase(Bench):
def __init__(self):
pass
def case(self):
pass
| 3 | 0 | 17 | 2 | 13 | 4 | 2 | 0.26 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 108 | 35 | 4 | 27 | 3 | 24 | 7 | 9 | 3 | 6 | 2 | 3 | 1 | 3 |
2,008 |
ARMmbed/icetea
|
ARMmbed_icetea/test/test_dut.py
|
test.test_dut.DutTestcase
|
class DutTestcase(unittest.TestCase):
def test_executionready_wait_skip(self, mock_log): # pylint: disable=unused-argument
dut = Dut("test_dut")
with mock.patch.object(dut, "response_received") as mock_r:
mock_r.wait = mock.MagicMock()
mock_r.wait.return_value = False
dut.query_timeout = 0
dut.response_coming_in = -1
with self.assertRaises(TestStepError):
dut._wait_for_exec_ready() # pylint: disable=protected-access
mock_r.wait.return_value = True
dut.query_timeout = 1
with self.assertRaises(TestStepError):
dut._wait_for_exec_ready() # pylint: disable=protected-access
def test_execready_wait_timeout(self, mock_log): # pylint: disable=unused-argument
dut = Dut("test_dut")
with mock.patch.object(dut, "response_received") as mock_r:
with mock.patch.object(dut, "get_time", return_value=2):
mock_r.wait = mock.MagicMock()
mock_r.wait.return_value = False
dut.response_coming_in = -1
dut.query_timeout = 1
with self.assertRaises(TestStepTimeout):
dut._wait_for_exec_ready() # pylint: disable=protected-access
def test_initclihuman(self, mock_log): # pylint: disable=unused-argument
dut = Dut("test_Dut")
with mock.patch.object(dut, "execute_command") as m_com:
dut.post_cli_cmds = None
dut.init_cli_human()
self.assertListEqual(dut.post_cli_cmds, dut.set_default_init_cli_human_cmds())
self.assertEqual(len(m_com.mock_calls), len(dut.post_cli_cmds))
m_com.reset_mock()
dut.post_cli_cmds = ["com1", "com2"]
dut.init_cli_human()
self.assertListEqual(dut.post_cli_cmds, ["com1", "com2"])
self.assertEqual(len(m_com.mock_calls), 2)
m_com.reset_mock()
dut.post_cli_cmds = [["com1", True, False]]
dut.init_cli_human()
self.assertListEqual(dut.post_cli_cmds, [["com1", True, False]])
self.assertEqual(len(m_com.mock_calls), 1)
m_com.assert_called_once_with("com1", wait=False, asynchronous=True)
def test_dut_run(self, mock_log):
dut = Dut("test_Dut")
type(dut)._logger = mock.MagicMock()
dut.logger = mock.MagicMock()
semaphore = mock.MagicMock()
semaphore.acquire = mock.MagicMock(side_effect=[True, Exception])
type(dut)._sem = semaphore
type(dut)._signalled_duts = [dut]
type(dut)._run = mock.MagicMock(side_effect=[True, False])
type(dut).waiting_for_response = mock.PropertyMock(return_value=None)
query = mock.MagicMock()
query.cmd = mock.MagicMock()
dut.query = query
dut.writeline = mock.MagicMock(side_effect=[Exception])
dut.readline = mock.MagicMock(return_value=False)
dut.response_received = mock.MagicMock()
with self.assertRaises(Exception):
dut.run()
def test_store_traces(self, mock_log):
dut = Dut("test")
self.assertTrue(dut.store_traces)
dut.store_traces = False
self.assertFalse(dut.store_traces)
dut.store_traces = True
self.assertTrue(dut.store_traces)
def test_read_reasponse(self, mock_log):
dut = Dut("test")
with mock.patch.object(dut, "readline") as mocked_readline:
mocked_readline.side_effect = ["retcode: 0", RuntimeError, "retcode: 0", "test"]
resp = dut._read_response()
self.assertTrue(isinstance(resp, CliResponse))
self.assertEqual(dut._read_response(), -1)
prior_len = len(dut.traces)
dut.store_traces = False
dut._read_response()
self.assertEqual(len(dut.traces), prior_len)
dut.store_traces = True
resp = dut._read_response()
self.assertEqual(len(dut.traces), prior_len + 1)
self.assertIsNone(resp)
def test_wait_for_exec_ready(self, mock_log):
dut_obj = Dut("test")
dut_obj.get_time = mock.MagicMock(return_value=10)
dut_obj.query_timeout = 9
mocked_response = mock.MagicMock()
type(dut_obj).response_received = mock.PropertyMock(return_value=mocked_response)
mocked_response.wait = mock.MagicMock(return_value=False)
with self.assertRaises(TestStepTimeout):
dut_obj._wait_for_exec_ready()
|
class DutTestcase(unittest.TestCase):
def test_executionready_wait_skip(self, mock_log):
pass
def test_execready_wait_timeout(self, mock_log):
pass
def test_initclihuman(self, mock_log):
pass
def test_dut_run(self, mock_log):
pass
def test_store_traces(self, mock_log):
pass
def test_read_reasponse(self, mock_log):
pass
def test_wait_for_exec_ready(self, mock_log):
pass
| 8 | 0 | 14 | 1 | 13 | 1 | 1 | 0.07 | 1 | 7 | 4 | 0 | 7 | 0 | 7 | 79 | 105 | 14 | 91 | 24 | 83 | 6 | 91 | 20 | 83 | 1 | 2 | 3 | 7 |
2,009 |
ARMmbed/icetea
|
ARMmbed_icetea/examples/example_plugins/example_allocator.py
|
examples.example_plugins.example_allocator.ExampleAllocator
|
class ExampleAllocator(BaseAllocator):
"""
The allocator needs to be a class that implements the api from BaseAllocator.
"""
def __init__(self, args=None, logger=None):
super(ExampleAllocator, self).__init__()
def can_allocate(self, dut_configuration):
return True
def allocate(self, dut_configuration_list, args=None):
return AllocationContextList()
def cleanup(self):
pass
def release(self, dut=None):
pass
def share_allocations(self):
return False
|
class ExampleAllocator(BaseAllocator):
'''
The allocator needs to be a class that implements the api from BaseAllocator.
'''
def __init__(self, args=None, logger=None):
pass
def can_allocate(self, dut_configuration):
pass
def allocate(self, dut_configuration_list, args=None):
pass
def cleanup(self):
pass
def release(self, dut=None):
pass
def share_allocations(self):
pass
| 7 | 1 | 2 | 0 | 2 | 0 | 1 | 0.23 | 1 | 2 | 1 | 0 | 6 | 0 | 6 | 11 | 21 | 5 | 13 | 7 | 6 | 3 | 13 | 7 | 6 | 1 | 2 | 0 | 6 |
2,010 |
ARMmbed/icetea
|
ARMmbed_icetea/examples/example_plugins/example_allocator.py
|
examples.example_plugins.example_allocator.ExampleAllocatorPlugin
|
class ExampleAllocatorPlugin(RunPluginBase):
def __init__(self):
super(ExampleAllocatorPlugin, self).__init__()
pass
def get_allocators(self):
"""
Return reference to allocator class.
"""
return {"ExampleAllocator": ExampleAllocator}
|
class ExampleAllocatorPlugin(RunPluginBase):
def __init__(self):
pass
def get_allocators(self):
'''
Return reference to allocator class.
'''
pass
| 3 | 1 | 4 | 0 | 3 | 2 | 1 | 0.5 | 1 | 2 | 1 | 0 | 2 | 0 | 2 | 4 | 10 | 1 | 6 | 3 | 3 | 3 | 6 | 3 | 3 | 1 | 2 | 0 | 2 |
2,011 |
ARMmbed/icetea
|
ARMmbed_icetea/examples/example_plugins/example_bench_plugin.py
|
examples.example_plugins.example_bench_plugin.BenchPlugin
|
class BenchPlugin(PluginBase):
def __init__(self):
super(BenchPlugin, self).__init__()
self.bench = None
def init(self, bench=None):
"""
This function provides access to the test bench object.
:param bench: Bench
:return: Nothing
"""
if bench is None:
raise PluginException("Bench not provided!")
self.bench = bench
def get_bench_api(self):
"""
This function should return a dictionary. The keys are added to Bench as attributes and
their values are set as the values of the attribute. In a test case, calling
self.plugin_func() should result in a call to example_plugin_function.
:return: dict
"""
return {"plugin_func": self.example_plugin_function,
"plugin_class": ExamplePluginClass}
def example_plugin_function(self):
pass
|
class BenchPlugin(PluginBase):
def __init__(self):
pass
def init(self, bench=None):
'''
This function provides access to the test bench object.
:param bench: Bench
:return: Nothing
'''
pass
def get_bench_api(self):
'''
This function should return a dictionary. The keys are added to Bench as attributes and
their values are set as the values of the attribute. In a test case, calling
self.plugin_func() should result in a call to example_plugin_function.
:return: dict
'''
pass
def example_plugin_function(self):
pass
| 5 | 2 | 6 | 0 | 3 | 3 | 1 | 0.85 | 1 | 3 | 2 | 0 | 4 | 1 | 4 | 11 | 27 | 3 | 13 | 6 | 8 | 11 | 12 | 6 | 7 | 2 | 2 | 1 | 5 |
2,012 |
ARMmbed/icetea
|
ARMmbed_icetea/examples/example_plugins/example_bench_plugin.py
|
examples.example_plugins.example_bench_plugin.ExamplePluginClass
|
class ExamplePluginClass(object):
def __init__(self, *args, **kwargs):
pass
|
class ExamplePluginClass(object):
def __init__(self, *args, **kwargs):
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 3 | 0 | 3 | 2 | 1 | 0 | 3 | 2 | 1 | 1 | 1 | 0 | 1 |
2,013 |
ARMmbed/icetea
|
ARMmbed_icetea/examples/example_plugins/example_multi_plugin.py
|
examples.example_plugins.example_multi_plugin.ExamplePlugin
|
class ExamplePlugin(PluginBase):
def __init__(self):
super(ExamplePlugin, self).__init__()
def get_external_services(self):
"""
:return: dict
"""
return {"ExampleService": ExampleService}
def get_parsers(self):
"""
:return: dict
"""
return {"ExampleParserPlugin": self.example_parser}
def example_parser(self, data):
"""
A simple example parser that looks for one occurence of string 'one'.
:param data: line of data as string
:return: dict
"""
parsed = PluginBase.find_one(data, "one")
return {"one_found": parsed}
|
class ExamplePlugin(PluginBase):
def __init__(self):
pass
def get_external_services(self):
'''
:return: dict
'''
pass
def get_parsers(self):
'''
:return: dict
'''
pass
def example_parser(self, data):
'''
A simple example parser that looks for one occurence of string 'one'.
:param data: line of data as string
:return: dict
'''
pass
| 5 | 3 | 5 | 0 | 2 | 3 | 1 | 1.1 | 1 | 2 | 1 | 0 | 4 | 0 | 4 | 11 | 24 | 3 | 10 | 6 | 5 | 11 | 10 | 6 | 5 | 1 | 2 | 0 | 4 |
2,014 |
ARMmbed/icetea
|
ARMmbed_icetea/examples/example_plugins/example_multi_plugin.py
|
examples.example_plugins.example_multi_plugin.ExampleService
|
class ExampleService(object):
def __init__(self, name, **kwargs):
"""
:param name: Name of the service
:param kwargs: kwargs 'conf' and 'bench'
"""
self.name = name
self.conf = kwargs["conf"]
self.bench = kwargs["bench"]
def start(self):
pass
def stop(self):
pass
|
class ExampleService(object):
def __init__(self, name, **kwargs):
'''
:param name: Name of the service
:param kwargs: kwargs 'conf' and 'bench'
'''
pass
def start(self):
pass
def stop(self):
pass
| 4 | 1 | 4 | 0 | 3 | 1 | 1 | 0.44 | 1 | 0 | 0 | 0 | 3 | 3 | 3 | 3 | 15 | 2 | 9 | 7 | 5 | 4 | 9 | 7 | 5 | 1 | 1 | 0 | 3 |
2,015 |
ARMmbed/icetea
|
ARMmbed_icetea/examples/multiple_process.py
|
examples.multiple_process.Testcase
|
class Testcase(Bench):
def __init__(self):
Bench.__init__(self,
name="test_multiple_processes",
title="Example test for multiple processes",
status="released",
purpose="Demo process duts",
component=["cmdline"],
type="smoke",
requirements={
"duts": {
'*': {
"count": 10,
"type": "process",
"application": {
"name": "dummyDut",
"bin": "test/dut/dummyDut"
}
},
'1': {
"nick": "DUT1"
},
'2..10': {
"nick": "DUT#{i}"
}
}}
)
def setup(self):
pass
def case(self):
self.command("DUT1", "echo hello DUT1")
while True:
try:
self.command("*", "echo hello world")
except KeyboardInterrupt:
break
def teardown(self):
pass
|
class Testcase(Bench):
def __init__(self):
pass
def setup(self):
pass
def case(self):
pass
def teardown(self):
pass
| 5 | 0 | 9 | 0 | 9 | 0 | 2 | 0.03 | 1 | 1 | 0 | 0 | 4 | 0 | 4 | 110 | 41 | 3 | 38 | 5 | 33 | 1 | 14 | 5 | 9 | 3 | 3 | 2 | 6 |
2,016 |
ARMmbed/icetea
|
ARMmbed_icetea/examples/multiple_test_cases_by_file_example/multiple_tests_cases.py
|
examples.multiple_test_cases_by_file_example.multiple_tests_cases.MultipleTestsCaseExampleTestEnv
|
class MultipleTestsCaseExampleTestEnv(Bench):
def __init__(self, **kwargs):
testcase_args = {
'title': "dummy",
'status': "unknown",
'type': "functional",
'purpose': "dummy",
'requirements': {
"duts": {
'*': {
"count": 0,
}
}
}
}
testcase_args.update(kwargs)
Bench.__init__(self, **testcase_args)
def setup(self):
self.logger.info("MultipleTestsCaseExampleTestEnv.setup")
# setup code
pass
def teardown(self):
# teardown code
self.logger.info("MultipleTestsCaseExampleTestEnv.teardown")
pass
|
class MultipleTestsCaseExampleTestEnv(Bench):
def __init__(self, **kwargs):
pass
def setup(self):
pass
def teardown(self):
pass
| 4 | 0 | 8 | 0 | 7 | 1 | 1 | 0.09 | 1 | 0 | 0 | 0 | 3 | 0 | 3 | 109 | 27 | 2 | 23 | 5 | 19 | 2 | 11 | 5 | 7 | 1 | 3 | 0 | 3 |
2,017 |
ARMmbed/icetea
|
ARMmbed_icetea/examples/sample.py
|
examples.sample.Testcase
|
class Testcase(Bench):
def __init__(self):
Bench.__init__(self,
name="sample", # short name for testcase, have to be unique,
# e.g. "icetea-sample"
title="Smoke sample test", # short title
# Implementation Status
# "released" TC is verified and are ready to be execute
# "development" TC is in development and not yet ready for testing
# "maintenance" TC is ready, but for some reason (e.g. DUT interface
# changes) it cannot be executed for now
# "broken" Some why this TC is not working correctly,
# but not because of DUT. E.g. some major framework changes
# "unknown" Unknown status
status="unknown", # allowed values: released, development, maintenance,
# broken, unknown
# Test Case type
# "installation" (just in case)
# "compatibility" Verify compatibility,
# e.g. two different versions of DUTs
# "smoke" Verify just very basic situation
# "regression"
# "acceptance"
# "functional"
# "stability"
# "destructive"
# "performance"
# "reliability"
# "functional"
type="acceptance", # allowed values: installation, compatibility, smoke,
# regression, acceptance, alpha, beta, destructive, performance.
# Allowed type -specific sub-types.
# type acceptance - sub-type certification.
#
sub_type="certification",
purpose="Demostrate FW TC API", # test case purpose.
specification_href="http://.....",
component=["thread"], # components under test as a list.
feature=[], # list of features under test.
# Compatibility related information
# This section can be normally ignored,
# because all cases should be automation compatible.
compatible={
"automation": { # by default all cases are automation compatible
"value": False,
# Reason why this is not automation compatible.
"reason": "This is just dummy sample"
},
"hw": {
"value": True,
# there will be more information later.
},
"framework": {
"name": "icetea",
# Version of framework required.
# This will be checked if you run icetea with --check_version.
"version": ">0.3.2" # Allowed values in semantic version format.
}
},
# This part can exists when execution should be skipped every time,
# of course there should be reason for skipping a test case.
# execution={
# "skip": {
# "value": False,
# "reason": "This is just dummy sample"}},
# Test case requirements
requirements={ # Device Under Test related configurations
"duts": { # default requirements for all nodes
'*': {
"count": 10, # Test required 10 DUTs
"type": "hardware", # allowed values: hardware, process
"allowed_platforms": [
"K64F"
# "NRF51_DK",... any mbed enabled platforms
# which support required test application
],
"application": {
# Application name and version requirements
# @todo implementation missing!
"name": "generalTestApplication",
"version": "1.0",
# You can also specify HTTP url for binary file,
# which will be downloaded and flashed
# e.g. https://www.some.thing.com/mybin.hex
"bin": "./tools/sample.exe", # this is relative path
"init_cli_cmds": [], # overwrite default dut init commands,
# list of commands to run
"post_cli_cmds": [], # overwrite default dut post commands,
# list of commands to run
},
# Specify location: x = 0.0, y = 10.0 (units).
"location": [0.0, 10.0],
},
# specific values for node 1
"1": {
"nick": 'leader' # Specify nick name for DUT 1
},
# specific values for node 2
"2": {
# variables to use here:
# {n} = duts total count
# {i} = dut index
"nick": 'router'
},
# Specify requirements for multiple dut's. e.g. 3-10
# variables to use here
# {n} = duts total count
# {i} = dut index
# {pi} = math.pi
# {xy} = x-axis: -> 0, y-axis -> 1
# you can also set math inside location,
# e.g. "location: ["cos(%n/7*$n*2*$pi)*50",
# "sin($n/7*$n*2*$pi)*50"]
# -> put 7 nodes to a circle, which distance is 50 meter
"3..10": {"nick": "Router{i}", "location": ["{n}", "{n}*{i}*{pi}"]}
}
# External applications which should be started before TC rampUp and
# will be killed in end of test.
# Some applications have their own wrapper classes,
# which implement methods
# and properties for use in test cases. These
# applications can be run along with
# the test case by just giving the name field, possible
# values can be found in ExtApps folder.
# These applications might have additional configuration
# parameters dependent
# on the environment, these have to be defined in
# env_cfg.json.
# "external": {
# apps": [
# {
# "name": "ExtApp"
# },
# {
# Other applications require a configuration field as well,
# containing the application command and executable path
# "name": "lighting",
# "config": {
# "cmd": "runLighting.bat",
# "path": "../../lighting/bin"
# }
# }
# ]
# }
}
)
def setup(self):
# All 'preconditions' for test case execution should be here
self.logger.info("Test Case RAMP-UP phase started")
def case(self):
# Send command 'ifup' for all duts
responses = self.execute_command('*', "ifup") # pylint: disable=unused-variable
def teardown(self):
# shut down interfaces from nodes
self.execute_command('*', 'ifdown')
|
class Testcase(Bench):
def __init__(self):
pass
def setup(self):
pass
def case(self):
pass
def teardown(self):
pass
| 5 | 0 | 40 | 1 | 14 | 29 | 1 | 1.98 | 1 | 0 | 0 | 0 | 4 | 0 | 4 | 110 | 162 | 7 | 58 | 6 | 53 | 115 | 9 | 6 | 4 | 1 | 3 | 0 | 4 |
2,018 |
ARMmbed/icetea
|
ARMmbed_icetea/examples/sample_cloud.py
|
examples.sample_cloud.SampleClient
|
class SampleClient(object):
"""
Example of a cloud client class.
"""
def __init__(self, host='localhost', port=3000, result_converter=None,
testcase_converter=None, args=None):
# Optional converter for result data from format provided by test framework to
# format supported by server
self.result_converter = result_converter
# Optional converter for testcase metadata from format provided by test framework to
# one supported by server
self.tc_converter = testcase_converter
self.host = host
self.port = port
def set_logger(self, logger):
'''
Set Logger
:param logger: Logger -oject
'''
pass
def get_suite(self, suite, options):
'''
Get suite from server.
returns suite information as a dictionary object.
'''
pass
def get_campaign_id(self, campaign_name):
"""
Get ID of campaign that has name campaign_name
"""
pass
def get_campaigns(self):
"""
Get campaigns from server
"""
pass
def get_campaign_names(self):
"""
Get names of campaigns from server
returns list of campaign/suite names.
"""
pass
def update_testcase(self, metadata):
"""
Update TC data to server or create a new testcase on server.
If testcase_converter has been provided,
use it to convert TC metadata to format accepted by the server.
"""
pass
def send_results(self, result):
"""
Upload a result object to server.
If resultConverter has been provided, use it to convert result object to format accepted
by the server.
If needed, use testcase_converter to convert tc metadata in result to suitable format.
returns new result entry as a dictionary or None.
"""
if self.result_converter:
print(self.result_converter(result))
else:
print(result)
|
class SampleClient(object):
'''
Example of a cloud client class.
'''
def __init__(self, host='localhost', port=3000, result_converter=None,
testcase_converter=None, args=None):
pass
def set_logger(self, logger):
'''
Set Logger
:param logger: Logger -oject
'''
pass
def get_suite(self, suite, options):
'''
Get suite from server.
returns suite information as a dictionary object.
'''
pass
def get_campaign_id(self, campaign_name):
'''
Get ID of campaign that has name campaign_name
'''
pass
def get_campaigns(self):
'''
Get campaigns from server
'''
pass
def get_campaign_names(self):
'''
Get names of campaigns from server
returns list of campaign/suite names.
'''
pass
def update_testcase(self, metadata):
'''
Update TC data to server or create a new testcase on server.
If testcase_converter has been provided,
use it to convert TC metadata to format accepted by the server.
'''
pass
def send_results(self, result):
'''
Upload a result object to server.
If resultConverter has been provided, use it to convert result object to format accepted
by the server.
If needed, use testcase_converter to convert tc metadata in result to suitable format.
returns new result entry as a dictionary or None.
'''
pass
| 9 | 8 | 8 | 1 | 3 | 4 | 1 | 1.54 | 1 | 0 | 0 | 0 | 8 | 4 | 8 | 8 | 73 | 12 | 24 | 14 | 14 | 37 | 22 | 13 | 13 | 2 | 1 | 1 | 9 |
2,019 |
ARMmbed/icetea
|
ARMmbed_icetea/examples/sample_http.py
|
examples.sample_http.Testcase
|
class Testcase(Bench):
def __init__(self):
Bench.__init__(self,
name="sample_http",
title="Test http methods",
status="development",
type="acceptance",
purpose="dummy",
requirements={}
)
def setup(self):
pass
def case(self):
# Initialize the HttpApi with the path to the server you want to contact
http = self.HttpApi("https://www.mbed.com/en/") # pylint: disable=no-member
# Send get request to "https://mbed.com/", should respond with 200
resp = http.get("/")
if resp.status_code == 200:
self.logger.info("mbed.com responded with status code 200!")
# This should fail
# resp = http.get("/", expected=201)
def teardown(self):
pass
|
class Testcase(Bench):
def __init__(self):
pass
def setup(self):
pass
def case(self):
pass
def teardown(self):
pass
| 5 | 0 | 5 | 0 | 5 | 1 | 1 | 0.26 | 1 | 0 | 0 | 0 | 4 | 0 | 4 | 110 | 26 | 3 | 19 | 7 | 14 | 5 | 12 | 7 | 7 | 2 | 3 | 1 | 5 |
2,020 |
ARMmbed/icetea
|
ARMmbed_icetea/examples/sample_multi_dut.py
|
examples.sample_multi_dut.Testcase
|
class Testcase(Bench):
def __init__(self):
Bench.__init__(self,
name="sample_process_multidut_testcase",
title="unittest exception in testcase",
status="development",
type="acceptance",
purpose="dummy",
requirements={
"duts": {
'*': {
"count": 200,
"type": "process",
"application": {
"name": "sample",
"version": "1.0",
"bin": "tools/sample"
}
}
}
}
)
def case(self):
self.command("*", "Hello")
|
class Testcase(Bench):
def __init__(self):
pass
def case(self):
pass
| 3 | 0 | 12 | 0 | 12 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 108 | 25 | 1 | 24 | 3 | 21 | 0 | 5 | 3 | 2 | 1 | 3 | 0 | 2 |
2,021 |
ARMmbed/icetea
|
ARMmbed_icetea/examples/test_close_open.py
|
examples.test_close_open.Testcase
|
class Testcase(Bench):
def __init__(self):
Bench.__init__(self,
name="test_close_open",
title="Smoke test for testing dut connection "
"opening and closing in testcase",
status="released",
purpose="Verify Command Line Interface",
component=["cmdline"],
type="smoke",
requirements={
"duts": {
'*': {
"count": 1,
"type": "hardware",
"allowed_platforms": ['K64F', "SAM4E", 'NRF51_DK'],
"application": {"name": "generalTestApplication",
"version": "1.0"}
}
}}
)
def case(self):
# Test command line works before closing
self.command(1, "echo helloworld")
# Close connection, wait a second and reopen connection
self.get_dut(1).close_connection()
# We could use the serial port at this point for
# communicating with the DUT in another manner (eg. terminal)
print("DUT serial port is %s" % self.get_dut(1).comport)
self.delay(1)
self.get_dut(1).open_connection()
# Again test command line works correctly after reopening
self.command(1, "echo helloworld")
# Check that exception is raised if we try to reopen connection
try:
self.get_dut(1).open_connection()
# We should never get here,
# since previous line should raise DutConnectionError exception
raise TestStepError("Calling open_connection twice didn't raise error as expected!")
except DutConnectionError:
pass
|
class Testcase(Bench):
def __init__(self):
pass
def case(self):
pass
| 3 | 0 | 23 | 3 | 16 | 4 | 2 | 0.24 | 1 | 2 | 2 | 0 | 2 | 0 | 2 | 108 | 47 | 6 | 33 | 3 | 30 | 8 | 15 | 3 | 12 | 2 | 3 | 1 | 3 |
2,022 |
ARMmbed/icetea
|
ARMmbed_icetea/examples/test_cmdline.py
|
examples.test_cmdline.Testcase
|
class Testcase(Bench):
def __init__(self):
Bench.__init__(self,
name="test_cmdline",
title="Smoke test for command line interface",
status="released",
purpose="Verify Command Line Interface",
component=["cmdline"],
type="smoke",
requirements={
"duts": {
'*': {
"count": 1,
"type": "hardware",
"allowed_platforms": ['K64F']
}
}
}
)
def setup(self):
# nothing for now
self.device = self.get_node_endpoint(1) # pylint: disable=attribute-defined-outside-init
def case(self):
self.command(1, "echo hello world", timeout=5)
self.device.command("help")
def teardown(self):
# nothing for now
pass
|
class Testcase(Bench):
def __init__(self):
pass
def setup(self):
pass
def case(self):
pass
def teardown(self):
pass
| 5 | 0 | 7 | 0 | 6 | 1 | 1 | 0.12 | 1 | 0 | 0 | 0 | 4 | 1 | 4 | 110 | 31 | 3 | 26 | 6 | 21 | 3 | 10 | 6 | 5 | 1 | 3 | 0 | 4 |
2,023 |
ARMmbed/icetea
|
ARMmbed_icetea/examples/testcase_example_usage/sample_async.py
|
examples.testcase_example_usage.sample_async.Testcase
|
class Testcase(Bench):
def __init__(self):
Bench.__init__(self,
name="sample_async",
title="async command example usage",
status="development",
type="smoke",
purpose="show an example usage of async command",
component=["Icetea"],
requirements={
"duts": {
'*': {
"count": 1, # devices number
"type": "hardware", # "hardware" (by default) or "process"
"application": {
"bin": "build_path/build_full_name", # build binary path
}
}
}
}
)
def case(self):
# launch an async command
async_cmd = self.command(1, "echo hello!", asynchronous=True)
# Wait_for_async_response:
# Wait for the given asynchronous response to be ready and then parse it
resp = self.wait_for_async_response("echo", async_cmd)
# Verifies that expected response messages found
resp.verify_message("hello!")
|
class Testcase(Bench):
def __init__(self):
pass
def case(self):
pass
| 3 | 0 | 15 | 1 | 12 | 4 | 1 | 0.28 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 108 | 32 | 3 | 25 | 5 | 22 | 7 | 7 | 5 | 4 | 1 | 3 | 0 | 2 |
2,024 |
ARMmbed/icetea
|
ARMmbed_icetea/examples/testcase_example_usage/sample_cli_init.py
|
examples.testcase_example_usage.sample_cli_init.Testcase
|
class Testcase(Bench):
def __init__(self):
Bench.__init__(self,
name="sample_cli_trigger",
title="cli_ready_trigger example usage",
status="development",
type="smoke",
purpose="show an example usage of cli_ready_trigger",
component=["Icetea"],
requirements={
"duts": {
'*': {
"count": 1, # devices number
"type": "hardware", # "hardware" (by default) or "process"
"application": {
"bin": "build_path/build_full_name", # build binary path
"cli_ready_trigger": "/>"
}
}
}
}
)
def case(self):
self.logger.info("cli_ready_trigger will help Icetea wait until application is ready "
"for communication.")
# following examples shows how to create triggers from received data
EventMatcher(EventTypes.DUT_LINE_RECEIVED, # event id
"ping", # match string or regex (see documentation)
self.get_dut(1), # dut which data want to follow
callback=self.ping_cb) # callback which is called when regex matches
# this will trig above callback
self.command(1, "echo ping")
def ping_cb(self, dut, line):
self.logger.info("pong (because of received %s)", line)
|
class Testcase(Bench):
def __init__(self):
pass
def case(self):
pass
def ping_cb(self, dut, line):
pass
| 4 | 0 | 11 | 0 | 10 | 3 | 1 | 0.28 | 1 | 2 | 2 | 0 | 3 | 0 | 3 | 109 | 36 | 2 | 32 | 4 | 28 | 9 | 9 | 4 | 5 | 1 | 3 | 0 | 3 |
2,025 |
ARMmbed/icetea
|
ARMmbed_icetea/examples/testcase_example_usage/sample_command_response.py
|
examples.testcase_example_usage.sample_command_response.Testcase
|
class Testcase(Bench):
def __init__(self):
Bench.__init__(self,
name="sample_command_response",
title="Icetea command and response APIs example usage",
status="development",
type="smoke",
purpose="show an example usage of Icetea command and response APIs",
component=["Icetea"],
requirements={
"duts": {
'*': {
"count": 2, # devices number
"type": "hardware", # "hardware" (by default) or "process"
"application": {
"bin": "build_path/build_full_name", # build binary path
}
},
"1": {
"nick": "dut1" # give dut a nick
},
"2": {
"nick": "dut2"
}
}
}
)
def case(self):
# send command "echo hello" to 1st dut by index
self.command(1, "echo hello")
# send command "echo hello" to 2nd dut by nick
self.command("dut2", "echo hello")
# send command "echo hello" to all duts by '*'
self.command("*", "echo hello")
# send know command "echo hello" and retcode expected to be 0 --> success() is True
response = self.command(1, "echo hello", expected_retcode=0)
self.assertTrue(response.success())
# send unknown command "hello" and the retcode for unknown command is -5 --> fail() is True
response = self.command(1, "hello", expected_retcode=-5)
self.assertTrue(response.fail())
# get response and verify traces
response = self.command(2, "echo world")
response.verify_trace("world")
# send command to all duts by '*'
responses = self.command('*', "echo hello world! ")
# the 'responses' will be a list of all the returned response
for response in responses:
response.verify_message("hello world!")
response.verify_response_time(1)
|
class Testcase(Bench):
def __init__(self):
pass
def case(self):
pass
| 3 | 0 | 27 | 3 | 20 | 6 | 2 | 0.29 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 108 | 56 | 7 | 41 | 5 | 38 | 12 | 17 | 5 | 14 | 2 | 3 | 1 | 3 |
2,026 |
ARMmbed/icetea
|
ARMmbed_icetea/examples/testcase_example_usage/sample_eventmatcher.py
|
examples.testcase_example_usage.sample_eventmatcher.Testcase
|
class Testcase(Bench):
def __init__(self):
Bench.__init__(self,
name="sample_event_matcher",
title="Icetea built-in event matcher example",
status="development",
type="smoke",
purpose="show an example usage of Icetea event matcher",
component=["icetea"],
requirements={
"duts": {
'*': {
"count": 1,
"type": "hardware",
"application": {
"bin": "build_path/build_full_name",
}
},
"1": {
"nick": "dut1" # give dut a nick
}
}
}
)
def setup(self):
# Start an EventMatcher to follow line received events, looking for the echoed hello.
self.matcher = EventMatcher(EventTypes.DUT_LINE_RECEIVED,
"hello",
callback=self.callback_function)
def case(self):
# send command "echo hello" to 1st dut by index
# "hello" matcher calls callback_function when echo coming back
self.command(1, "echo hello")
# Alternative example: Wait data from DUT with 10s timeout
# create event matcher which is trigged when "ping" is received from DUT
event = Event()
EventMatcher(EventTypes.DUT_LINE_RECEIVED, # event id
"ping", # match string or regex (see documentation)
self.get_dut(1), # dut which data want to follow
event)
# simulate "ping" by sending echo command
self.command(1, "echo ping")
# waits until event is set - "ping" is received
if not event.wait(10):
# if wait timeouts raise Error
raise TestStepError("ping did not arrive")
self.logger.info("Pong!")
def callback_function(self, match_obj):
match = match_obj.match # re.MatchObject
event_source = match_obj.ref # Dut object that generated this event
# Dut object is defined in icetea_lib/DeviceConnectors/Dut.py
self.logger.info("Oh hello!")
self.logger.info("Event data: %s", match_obj.event_data)
|
class Testcase(Bench):
def __init__(self):
pass
def setup(self):
pass
def case(self):
pass
def callback_function(self, match_obj):
pass
| 5 | 0 | 13 | 0 | 11 | 4 | 1 | 0.34 | 1 | 4 | 3 | 0 | 4 | 1 | 4 | 110 | 57 | 4 | 44 | 9 | 39 | 15 | 18 | 9 | 13 | 2 | 3 | 1 | 5 |
2,027 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/LogManager.py
|
icetea_lib.LogManager.BenchFormatterWithType
|
class BenchFormatterWithType(object): # pylint: disable=too-few-public-methods
"""
Bench logger formatter.
"""
def __init__(self, color=False, loggername="Bench"):
if not color:
config = LOGGING_CONFIG.get(loggername, {})
self._formatter = BenchFormatter(
config.get("format", DEFAULT_LOGGING_CONFIG.get("format")),
config.get("dateformat", DEFAULT_LOGGING_CONFIG.get("dateformat")))
else:
class ColoredBenchFormatter(coloredlogs.ColoredFormatter):
"""
This is defined as an internal class here because coloredlogs is and optional
dependency.
"""
converter = datetime.datetime.fromtimestamp
def formatTime(self, record, datefmt=None):
date_and_time = self.converter(record.created, tz=pytz.utc)
if "%F" in datefmt:
msec = "%03d" % record.msecs
datefmt = datefmt.replace("%F", msec)
str_time = date_and_time.strftime(datefmt)
return str_time
self._formatter = ColoredBenchFormatter(
LOGGING_CONFIG.get(loggername, {}).get(
"format", DEFAULT_LOGGING_CONFIG.get("format")),
LOGGING_CONFIG.get(loggername, {}).get(
"dateformat", DEFAULT_LOGGING_CONFIG.get("dateformat")),
LEVEL_FORMATS, FIELD_STYLES)
def format(self, record):
"""
Format record with formatter.
:param record: Record to format
:return: Formatted record
"""
if not hasattr(record, "type"):
record.type = " "
return self._formatter.format(record)
|
class BenchFormatterWithType(object):
'''
Bench logger formatter.
'''
def __init__(self, color=False, loggername="Bench"):
pass
class ColoredBenchFormatter(coloredlogs.ColoredFormatter):
'''
This is defined as an internal class here because coloredlogs is and optional
dependency.
'''
def formatTime(self, record, datefmt=None):
pass
def formatTime(self, record, datefmt=None):
'''
Format record with formatter.
:param record: Record to format
:return: Formatted record
'''
pass
| 5 | 3 | 15 | 1 | 11 | 3 | 2 | 0.48 | 1 | 2 | 2 | 0 | 2 | 1 | 2 | 2 | 44 | 5 | 27 | 11 | 22 | 13 | 19 | 11 | 14 | 2 | 1 | 1 | 6 |
2,028 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/LogManager.py
|
icetea_lib.LogManager.BenchLoggerAdapter
|
class BenchLoggerAdapter(logging.LoggerAdapter):
"""
Adapter to add field 'extra' to logger.
"""
def process(self, msg, kwargs):
if "extra" not in kwargs:
kwargs["extra"] = {}
kwargs["extra"]["source"] = self.extra["source"]
return msg, kwargs
|
class BenchLoggerAdapter(logging.LoggerAdapter):
'''
Adapter to add field 'extra' to logger.
'''
def process(self, msg, kwargs):
pass
| 2 | 1 | 5 | 0 | 5 | 0 | 2 | 0.5 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 20 | 9 | 0 | 6 | 2 | 4 | 3 | 6 | 2 | 4 | 2 | 2 | 1 | 2 |
2,029 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/LogManager.py
|
icetea_lib.LogManager.ContextFilter
|
class ContextFilter(logging.Filter): # pylint: disable=too-few-public-methods
"""
Filter for filtering logging messages and truncating them after MAXIMUM_LENGTH has been reached.
"""
MAXIMUM_LENGTH = 10000
REVEAL_LENGTH = 50
def filter(self, record):
"""
Filter record
:param record: Record to filter
:return:
"""
def modify(value):
"""
Modify logged record, truncating it to max length and logging remaining length
:param value: Record to modify
:return:
"""
if isinstance(value, six.string_types):
if len(value) < ContextFilter.MAXIMUM_LENGTH:
return value
try:
return "{}...[{} more bytes]".format(
value[:ContextFilter.REVEAL_LENGTH],
len(value) - ContextFilter.REVEAL_LENGTH)
except UnicodeError:
return "{}...[{} more bytes]".format(
repr(value[:ContextFilter.REVEAL_LENGTH]),
len(value) - ContextFilter.REVEAL_LENGTH)
elif isinstance(value, six.binary_type):
return "{}...[{} more bytes]".format(
repr(value[:ContextFilter.REVEAL_LENGTH]),
len(value) - ContextFilter.REVEAL_LENGTH)
else:
return value
record.msg = traverse_json_obj(record.msg, callback=modify)
return True
|
class ContextFilter(logging.Filter):
'''
Filter for filtering logging messages and truncating them after MAXIMUM_LENGTH has been reached.
'''
def filter(self, record):
'''
Filter record
:param record: Record to filter
:return:
'''
pass
def modify(value):
'''
Modify logged record, truncating it to max length and logging remaining length
:param value: Record to modify
:return:
'''
pass
| 3 | 3 | 30 | 3 | 20 | 8 | 3 | 0.58 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 3 | 42 | 5 | 24 | 5 | 21 | 14 | 16 | 5 | 13 | 5 | 2 | 2 | 6 |
2,030 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Plugin/PluginBase.py
|
icetea_lib.Plugin.PluginBase.PluginBase
|
class PluginBase(object):
"""
Base class for test case level plugins. A plugin should implement at least one of the getters
in this class.
"""
def __init__(self):
pass
def get_bench_api(self):
"""
Return dictionary with attribute names as strings as keys and attribute values (classes,
functions, values) as values.
:return: Dictionary
"""
return None
def get_parsers(self):
"""
Return dictionary with parser names as keys and parser functions as values.
:return: Dictonary
"""
return None
def get_external_services(self):
"""
Get dictionary with external service names as keys and classes as values.
:return: Dictionary
"""
return None
def init(self, bench=None):
"""
Initialization function that test case plugins can implement.
:param bench: test bench object (Bench).
:return: Nothing
"""
return None
@staticmethod
def find_one(line, lookup):
"""
regexp search with one value to return.
:param line: Line
:param lookup: regexp
:return: Match group or False
"""
match = re.search(lookup, line)
if match:
if match.group(1):
return match.group(1)
return False
# regex search with multiple values to return
@staticmethod
def find_multiple(line, lookup):
"""
regexp search with one value to return.
:param line: Line
:param lookup: regexp
:return: List of match groups or False
"""
match = re.search(lookup, line)
if match:
ret = []
for i in range(1, len(match.groups()) + 1):
ret.append(match.group(i))
if ret:
return ret
return False
|
class PluginBase(object):
'''
Base class for test case level plugins. A plugin should implement at least one of the getters
in this class.
'''
def __init__(self):
pass
def get_bench_api(self):
'''
Return dictionary with attribute names as strings as keys and attribute values (classes,
functions, values) as values.
:return: Dictionary
'''
pass
def get_parsers(self):
'''
Return dictionary with parser names as keys and parser functions as values.
:return: Dictonary
'''
pass
def get_external_services(self):
'''
Get dictionary with external service names as keys and classes as values.
:return: Dictionary
'''
pass
def init(self, bench=None):
'''
Initialization function that test case plugins can implement.
:param bench: test bench object (Bench).
:return: Nothing
'''
pass
@staticmethod
def find_one(line, lookup):
'''
regexp search with one value to return.
:param line: Line
:param lookup: regexp
:return: Match group or False
'''
pass
@staticmethod
def find_multiple(line, lookup):
'''
regexp search with one value to return.
:param line: Line
:param lookup: regexp
:return: List of match groups or False
'''
pass
| 10 | 7 | 9 | 1 | 4 | 4 | 2 | 1.25 | 1 | 1 | 0 | 8 | 5 | 0 | 7 | 7 | 75 | 12 | 28 | 14 | 18 | 35 | 26 | 12 | 18 | 4 | 1 | 2 | 12 |
2,031 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Randomize/seed.py
|
icetea_lib.Randomize.seed.SeedInteger
|
class SeedInteger(Seed):
"""
Integer seed implementation.
"""
def __iadd__(self, other):
return SeedInteger(self.value + other)
@staticmethod
def load(filename):
"""
Load seed from a file.
:param filename: Source file name
:return: SeedInteger
"""
json_obj = Seed.load(filename)
return SeedInteger(json_obj["seed_value"], json_obj["seed_id"], json_obj["date"])
|
class SeedInteger(Seed):
'''
Integer seed implementation.
'''
def __iadd__(self, other):
pass
@staticmethod
def load(filename):
'''
Load seed from a file.
:param filename: Source file name
:return: SeedInteger
'''
pass
| 4 | 2 | 6 | 1 | 3 | 3 | 1 | 1.14 | 1 | 0 | 0 | 0 | 1 | 0 | 2 | 12 | 17 | 2 | 7 | 5 | 3 | 8 | 6 | 4 | 3 | 1 | 2 | 0 | 2 |
2,032 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Randomize/seed.py
|
icetea_lib.Randomize.seed.SeedString
|
class SeedString(Seed):
"""
String seed implementation
"""
def __getitem__(self, index):
"""
Return character at index.
:param index: index of character.
:return: str
"""
return self.value[index]
def __len__(self):
return len(self.value)
def __str__(self):
return str(self.value)
def __iter__(self):
for elem in self.value:
yield elem
@staticmethod
def load(filename):
json_obj = Seed.load(filename)
return SeedString(json_obj["seed_value"], json_obj["seed_id"], json_obj["date"])
|
class SeedString(Seed):
'''
String seed implementation
'''
def __getitem__(self, index):
'''
Return character at index.
:param index: index of character.
:return: str
'''
pass
def __len__(self):
pass
def __str__(self):
pass
def __iter__(self):
pass
@staticmethod
def load(filename):
pass
| 7 | 2 | 4 | 0 | 2 | 1 | 1 | 0.57 | 1 | 1 | 0 | 0 | 4 | 0 | 5 | 15 | 27 | 5 | 14 | 9 | 7 | 8 | 13 | 8 | 7 | 2 | 2 | 1 | 6 |
2,033 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Randomize/seed.py
|
icetea_lib.Randomize.seed.SeedStringArray
|
class SeedStringArray(Seed):
"""
SeedStringArray implementation.
"""
def __getitem__(self, index):
return self.value[index]
def __len__(self):
return len(self.value)
def __iter__(self):
for elem in self.value:
yield elem
def store(self, filename):
with open(filename, 'w') as file_handle:
seed_dict = {"seed_id": self.seed_id, "seed_value": self.value, "date": self.date}
json.dump(seed_dict, file_handle, default=lambda array_elem: array_elem.value)
@staticmethod
def load(filename):
json_obj = Seed.load(filename)
return SeedStringArray(json_obj["seed_value"], json_obj["seed_id"], json_obj["date"])
|
class SeedStringArray(Seed):
'''
SeedStringArray implementation.
'''
def __getitem__(self, index):
pass
def __len__(self):
pass
def __iter__(self):
pass
def store(self, filename):
pass
@staticmethod
def load(filename):
pass
| 7 | 1 | 3 | 0 | 3 | 0 | 1 | 0.19 | 1 | 0 | 0 | 0 | 4 | 0 | 5 | 15 | 23 | 4 | 16 | 11 | 9 | 3 | 15 | 9 | 9 | 2 | 2 | 1 | 6 |
2,034 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Reports/ReportConsole.py
|
icetea_lib.Reports.ReportConsole.ReportConsole
|
class ReportConsole(ReportBase):
"""
ReportConsole class, implements generating and printing reports to the console using print
method and PrettyTable module.
"""
def __init__(self, results):
ReportBase.__init__(self, results)
def generate(self, *args, **kwargs):
"""
Generates and prints the console report, which consists of a table of test cases ran as
well as a summary table with passrate, number of test cases and statistics on
passed/failed/inconclusive/skipped cases.
:param args: arguments, not used
:param kwargs: keyword arguments, not used
:return: Nothing
"""
# Generate TC result table
table = PrettyTable(
["Testcase", "Verdict", "Fail Reason", "Skip Reason", "platforms", "duration",
"Retried"])
for result in self.results:
table.add_row([
result.get_tc_name(),
result.get_verdict(),
hex_escape_str(result.fail_reason)[:60],
str(result.skip_reason) if result.skipped() else "",
result.get_dut_models(),
str(result.duration),
"Yes" if result.retries_left != 0 else "No"
])
# Print to console
print(table) # pylint: disable=superfluous-parens
# Generate Summary table
table = PrettyTable(['Summary', ''])
final_verdict = "FAIL"
if self.summary["fail"] == 0 and self.summary["inconclusive"] == 0:
final_verdict = "PASS"
elif self.results.clean_inconcs() and not self.results.clean_fails():
final_verdict = "INCONCLUSIVE"
elif self.summary["fail"] + self.summary["inconclusive"] == self.summary["retries"]:
final_verdict = "PASS"
table.add_row(["Final Verdict", final_verdict])
table.add_row(["count", str(self.summary["count"])])
table.add_row(["passrate", self.results.pass_rate()])
table.add_row(["passrate excluding retries", self.results.pass_rate(include_retries=False)])
if self.summary["pass"] > 0:
table.add_row(["pass", str(self.summary["pass"])])
if self.summary["fail"] > 0:
table.add_row(["fail", str(self.summary["fail"])])
if self.summary["skip"] > 0:
table.add_row(["skip", str(self.summary["skip"])])
if self.summary["inconclusive"] > 0:
table.add_row(["inconclusive", str(self.summary["inconclusive"])])
table.add_row(["Duration", self.duration_to_string(self.summary["duration"])])
# Print to console
print(table)
|
class ReportConsole(ReportBase):
'''
ReportConsole class, implements generating and printing reports to the console using print
method and PrettyTable module.
'''
def __init__(self, results):
pass
def generate(self, *args, **kwargs):
'''
Generates and prints the console report, which consists of a table of test cases ran as
well as a summary table with passrate, number of test cases and statistics on
passed/failed/inconclusive/skipped cases.
:param args: arguments, not used
:param kwargs: keyword arguments, not used
:return: Nothing
'''
pass
| 3 | 2 | 27 | 2 | 20 | 7 | 6 | 0.45 | 1 | 1 | 0 | 0 | 2 | 0 | 2 | 7 | 60 | 4 | 40 | 6 | 37 | 18 | 28 | 6 | 25 | 11 | 2 | 1 | 12 |
2,035 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Reports/ReportHtml.py
|
icetea_lib.Reports.ReportHtml.ReportHtml
|
class ReportHtml(ReportBase):
"""
ReportHtml class. Uses yattag to generate html reports of results.
"""
def __init__(self, results):
ReportBase.__init__(self, results)
def generate(self, *args, **kwargs):
"""
Implementation for the generate method defined in ReportBase.
Generates a html report and saves it.
:param args: 1 argument, which is the filename
:param kwargs: 3 keyword arguments with keys 'title', 'heads' and 'refresh'
:return: Nothing.
"""
title = kwargs.get("title")
heads = kwargs.get("heads")
refresh = kwargs.get("refresh")
filename = args[0]
report = self._create(title, heads, refresh, path_start=os.path.dirname(filename))
ReportHtml.save(report, filename)
# pylint: disable=too-many-statements
def _create(self, title, heads, refresh=None, path_start=None):
"""
Internal create method, uses yattag to generate a html document with result data.
:param title: Title of report
:param heads: Headers for report
:param refresh: If set to True, adds a HTTP-EQUIV="refresh" to the report
:param path_start: path to file where this is report is to be stored.
:return: yattag document.
"""
# TODO: Refactor to make less complex
doc, tag, text = Doc().tagtext()
doc.asis('<!DOCTYPE html>')
heads["Date"] = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
heads["Pass rate"] = self.results.pass_rate()
heads["Pass rate excluding retries"] = self.results.pass_rate(include_retries=False)
with tag('html'):
with tag('head'):
doc.asis(self.head)
if refresh:
doc.asis('<META HTTP-EQUIV="refresh" CONTENT="' + str(refresh) + '">')
with tag('body', id='body'):
with tag('h1'):
text(title)
with tag('table'):
for head in heads:
with tag('tr'):
with tag('th', width="100px"):
text(head)
with tag('td'):
text(heads[head])
with tag('tr'):
with tag('th'):
text('Executed')
with tag('td'):
text(str(self.summary["count"]))
with tag('tr'):
with tag('th'):
text('Pass:')
with tag('td'):
text(str(self.summary["pass"]))
with tag('tr'):
with tag('th'):
text('Fails:')
with tag('td'):
text(str(self.summary["fail"]))
with tag('tr'):
with tag('th'):
text('inconclusive:')
with tag('td'):
text(str(self.summary["inconclusive"]))
with tag('tr'):
with tag('th'):
text('Skip:')
with tag('td'):
text(str(self.summary["skip"]))
with tag('tr'):
with tag('th'):
text('Duration:')
with tag('td'):
text(self.duration_to_string(self.summary["duration"]))
with tag('tr'):
with tag('th'):
text('{} version:'.format(get_fw_name()))
with tag('td'):
text(get_fw_version())
with tag('table', style='border-collapse: collapse;'):
with tag('tr'):
with tag('th'):
text("Test Case")
with tag('th'):
text("Verdict")
with tag('th'):
text("Fail Reason")
with tag('th'):
text("Skip Reason")
with tag('th'):
text("Retried")
with tag('th'):
text("Duration")
for result in self.results:
if result.success:
klass = 'item_pass'
elif result.inconclusive:
klass = 'item_inconc'
else:
klass = 'item_fail'
with tag('tr', klass='item %s' % klass, onclick='showhide(this)'):
with tag('td', width="200px"):
text(result.get_tc_name())
with tag('td', width="100px"):
if result.success:
color = 'green'
elif result.failure:
color = 'red'
else:
color = 'black'
with tag('font', color=color):
text(result.get_verdict())
with tag('td', width="350px"):
text(hex_escape_str(result.fail_reason))
with tag('td', width="300px"):
text(result.skip_reason if result.skipped() else "")
with tag('td', width="50px"):
text("Yes" if result.retries_left != 0 else "No")
with tag('td', width="100px"):
text(str(result.duration))
with tag('tr', klass='info hidden'):
with tag('td', colspan="5"):
if hasattr(result, 'tc_git_info') and \
result.tc_git_info and \
"scm_link" in result.tc_git_info:
# add tc git info only when available
link = result.tc_git_info['scm_link']
with tag('a', href=link):
text(link)
doc.stag('br')
for fil in result.logfiles:
filepath = os.path.relpath(fil, path_start)
with tag('a', href=filepath):
text(filepath)
doc.stag('br')
return doc.getvalue()
@staticmethod
def save(htmldoc, filename):
"""
Static method which saves htmldoc with filename filename.
:param htmldoc: yattag Document
:param filename: file name/path
:return: Nothing
"""
with open(filename, 'w') as fil:
fil.write(htmldoc)
head = """
<style type="text/css">
.item {
border-top: 2px solid #505050;
border-bottom: 1px solid #A0A0A0;
border-left: 1px solid #A0A0A0;
border-right: 1px solid #A0A0A0;
}
.item:hover {
cursor: pointer;
}
.item_fail {
background-color: #FFC0C0;
}
.item_inconc {
background-color: #FFD733;
}
.item_inconc:hover {
background-color: #FCFF58;
}
.item_fail:hover {
background-color: #FFCFCF;
}
.item_pass {
background-color: #C0FFC0;
}
.item_pass:hover {
background-color: #CFFFCF;
}
.info {
display: ;
background: transparent;
border-left: 1px solid #A0A0A0;
border-right: 1px solid #A0A0A0;
border-bottom: 2px solid #505050;
}
.hidden {
display: None;
}
.visible_info td {
padding-left: 1em;
}
</style>
<script type="text/javascript">
function showhide(which) {
n = which.parentNode.rows[which.rowIndex + 1];
if (n != null) {
if (n.classList.contains("hidden")) {
n.classList.remove("hidden")
}
else {
n.classList.add("hidden")
}
}
}
</script>"""
|
class ReportHtml(ReportBase):
'''
ReportHtml class. Uses yattag to generate html reports of results.
'''
def __init__(self, results):
pass
def generate(self, *args, **kwargs):
'''
Implementation for the generate method defined in ReportBase.
Generates a html report and saves it.
:param args: 1 argument, which is the filename
:param kwargs: 3 keyword arguments with keys 'title', 'heads' and 'refresh'
:return: Nothing.
'''
pass
def _create(self, title, heads, refresh=None, path_start=None):
'''
Internal create method, uses yattag to generate a html document with result data.
:param title: Title of report
:param heads: Headers for report
:param refresh: If set to True, adds a HTTP-EQUIV="refresh" to the report
:param path_start: path to file where this is report is to be stored.
:return: yattag document.
'''
pass
@staticmethod
def save(htmldoc, filename):
'''
Static method which saves htmldoc with filename filename.
:param htmldoc: yattag Document
:param filename: file name/path
:return: Nothing
'''
pass
| 6 | 4 | 38 | 2 | 31 | 6 | 4 | 0.22 | 1 | 2 | 0 | 0 | 3 | 0 | 4 | 9 | 230 | 21 | 182 | 21 | 176 | 40 | 120 | 19 | 115 | 12 | 2 | 8 | 15 |
2,036 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Reports/ReportJunit.py
|
icetea_lib.Reports.ReportJunit.ReportJunit
|
class ReportJunit(ReportBase):
"""
ReportJunit class. Generates the Junit xml report from results using yattag.
"""
def __init__(self, results):
ReportBase.__init__(self, results)
def generate(self, *args, **kwargs):
"""
Implementation for generate method from ReportBase. Generates the xml and saves the
report in Junit xml format.
:param args: 1 argument, filename is used.
:param kwargs: Not used
:return: Nothing
"""
xmlstr = str(self)
filename = args[0]
with open(filename, 'w') as fil:
fil.write(xmlstr)
with open(self.get_latest_filename('junit.xml'), "w") as latest_report:
latest_report.write(xmlstr)
def __str__(self):
"""
Generates the xml string for Junit report.
:return: Report as xml string.
"""
return ReportJunit.__generate(self.results)
def to_string(self):
"""
Generates the xml string for Junit report.
:return: Report as xml string.
"""
return str(self)
# pylint: disable=too-many-branches
@staticmethod
def __generate(results):
"""
Static method which generates the Junit xml string from results
:param results: Results as ResultList object.
:return: Junit xml format string.
"""
doc, tag, text = Doc().tagtext()
# Counters for testsuite tag info
count = 0
fails = 0
errors = 0
skips = 0
for result in results:
# Loop through all results and count the ones that were not later retried.
if result.passed() is False:
if result.retries_left > 0:
# This will appear in the list again, move on
continue
count += 1
if result.passed():
# Passed, no need to increment anything else
continue
elif result.skipped():
skips += 1
elif result.was_inconclusive():
errors += 1
else:
fails += 1
with tag('testsuite',
tests=str(count),
failures=str(fails),
errors=str(errors),
skipped=str(skips)):
for result in results:
if result.passed() is False and result.retries_left > 0:
continue
class_name = result.get_tc_name()
models = result.get_dut_models()
if models:
class_name = class_name + "." + models
name = result.get_toolchain()
with tag('testcase', classname=class_name, name=name,
time=result.get_duration(seconds=True)):
if result.stdout:
with tag('system-out'):
text(result.stdout)
if result.passed():
continue
elif result.skipped():
with tag('skipped'):
text(result.skip_reason)
elif result.was_inconclusive():
with tag('error', message=hex_escape_str(result.fail_reason)):
text(result.stderr)
else:
with tag('failure', message=hex_escape_str(result.fail_reason)):
text(result.stderr)
return indent(
doc.getvalue(),
indentation=' '*4
)
|
class ReportJunit(ReportBase):
'''
ReportJunit class. Generates the Junit xml report from results using yattag.
'''
def __init__(self, results):
pass
def generate(self, *args, **kwargs):
'''
Implementation for generate method from ReportBase. Generates the xml and saves the
report in Junit xml format.
:param args: 1 argument, filename is used.
:param kwargs: Not used
:return: Nothing
'''
pass
def __str__(self):
'''
Generates the xml string for Junit report.
:return: Report as xml string.
'''
pass
def to_string(self):
'''
Generates the xml string for Junit report.
:return: Report as xml string.
'''
pass
@staticmethod
def __generate(results):
'''
Static method which generates the Junit xml string from results
:param results: Results as ResultList object.
:return: Junit xml format string.
'''
pass
| 7 | 5 | 19 | 2 | 13 | 5 | 4 | 0.42 | 1 | 1 | 0 | 0 | 4 | 0 | 5 | 10 | 107 | 12 | 67 | 20 | 60 | 28 | 52 | 17 | 46 | 14 | 2 | 5 | 18 |
2,037 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/ResourceProvider/Allocators/exceptions.py
|
icetea_lib.ResourceProvider.Allocators.exceptions.AllocationError
|
class AllocationError(Exception):
"""
Exception for allocation errors.
"""
def __init__(self, message=""): # pylint: disable=useless-super-delegation
super(AllocationError, self).__init__(message)
|
class AllocationError(Exception):
'''
Exception for allocation errors.
'''
def __init__(self, message=""):
pass
| 2 | 1 | 2 | 0 | 2 | 1 | 1 | 1.33 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 11 | 6 | 0 | 3 | 2 | 1 | 4 | 3 | 2 | 1 | 1 | 3 | 0 | 1 |
2,038 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/ResourceProvider/ResourceConfig.py
|
icetea_lib.ResourceProvider.ResourceConfig.ResourceConfig
|
class ResourceConfig(object): # pylint: disable=too-many-instance-attributes
"""
Object used to describe and manipulate the resource configuration used by a testcase.
"""
def __init__(self, json_configuration=None, logger=None):
self.json_config = json_configuration
self._dut_requirements = []
self.logger = logger
self._sim_config = None
if self.logger is None:
self.logger = LogManager.get_dummy_logger()
self._counts = {"total": 0, "hardware": 0, "process": 0, "serial": 0, "mbed": 0}
@property
def _hardware_count(self):
"""
Amount of hardware resources.
:return: integer
"""
return self._counts.get("hardware") + self._counts.get("serial") + self._counts.get("mbed")
@_hardware_count.setter
def _hardware_count(self, value):
self._counts["hardware"] = value
@property
def _process_count(self):
"""
Amount of process resources.
:return: integer
"""
return self._counts.get("process")
@_process_count.setter
def _process_count(self, value):
self._counts["process"] = value
@property
def _dut_count(self):
"""
Total amount of resources.
:return: integer
"""
return self._counts.get("total")
@_dut_count.setter
def _dut_count(self, value):
self._counts["total"] = value
def resolve_configuration(self, configuration):
"""
Resolve requirements from given JSON encoded data.
The JSON should follow the testcase meta-data requirements field format. This function
will resolve requirements for each individual DUT and create a DUT requirements list
that contains the configuration for each DUT, eg:
{
"duts": [
{ "*": {"count": 2, "type": "process" } }
]
}
would result in the following configuration:
[
{ "1": {"type": "process", "allowed_platforms": [], "nick": None }
{ "2": {"type": "process", "allowed_platforms": [], "nick": None }
]
:param requirements: optional argument if requirements come from external source,
should be similar to the following format:
{
"duts": [
{ "*": {"count": 2, "type": "process" } }
]
}
"""
configuration = configuration if configuration else self.json_config
self._resolve_requirements(configuration["requirements"])
self._resolve_dut_count()
def _resolve_requirements(self, requirements):
"""
Internal method for resolving requirements into resource configurations.
:param requirements: Resource requirements from test case configuration as dictionary.
:return: Empty list if dut_count cannot be resolved, or nothing
"""
try:
dut_count = requirements["duts"]["*"]["count"]
except KeyError:
return []
default_values = {
"type": "hardware",
"allowed_platforms": [],
"nick": None,
}
default_values.update(requirements["duts"]["*"])
del default_values["count"]
dut_keys = list(default_values.keys())
dut_keys.extend(["application", "location", "subtype"])
dut_requirements = self.__generate_indexed_requirements(dut_count,
default_values,
requirements)
# Match groups of duts defined with 1..40 notation.
for key in requirements["duts"].keys():
if not isinstance(key, string_types):
continue
match = re.search(r'([\d]{1,})\.\.([\d]{1,})', key)
if match:
first_dut_idx = int(match.group(1))
last_dut_idx = int(match.group(2))
for i in range(first_dut_idx, last_dut_idx+1):
for k in dut_keys:
if k in requirements["duts"][key]:
dut_requirements[i-1].set(k, copy.copy(requirements["duts"][key][k]))
for idx, req in enumerate(dut_requirements):
if isinstance(req.get("nick"), string_types):
nick = req.get("nick")
req.set("nick", ResourceConfig.__replace_base_variables(nick,
len(dut_requirements),
idx))
self._solve_location(req, len(dut_requirements), idx)
self._dut_requirements = dut_requirements
return None
def _solve_location(self, req, dut_req_len, idx):
"""
Helper function for resolving the location for a resource.
:param req: Requirements dictionary
:param dut_req_len: Amount of required resources
:param idx: index, integer
:return: Nothing, modifies req object
"""
if not req.get("location"):
return
if len(req.get("location")) == 2:
for x_and_y, coord in enumerate(req.get("location")):
if isinstance(coord, string_types):
coord = ResourceConfig.__replace_coord_variables(coord,
x_and_y,
dut_req_len,
idx)
try:
loc = req.get("location")
loc[x_and_y] = eval(coord) # pylint: disable=eval-used
req.set("location", loc)
except SyntaxError as error:
self.logger.error(error)
loc = req.get("location")
loc[x_and_y] = 0.0
req.set("location", loc)
else:
self.logger.error("invalid location field!")
req.set("location", [0.0, 0.0])
@staticmethod
def __replace_base_variables(text, req_len, idx):
"""
Replace i and n in text with index+1 and req_len.
:param text: base text to modify
:param req_len: amount of required resources
:param idx: index of resource we are working on
:return: modified string
"""
return text \
.replace("{i}", str(idx + 1)) \
.replace("{n}", str(req_len))
@staticmethod
def __replace_coord_variables(text, x_and_y, req_len, idx):
"""
Replace x and y with their coordinates and replace pi with value of pi.
:param text: text: base text to modify
:param x_and_y: location x and y
:param req_len: amount of required resources
:param idx: index of resource we are working on
:return: str
"""
return ResourceConfig.__replace_base_variables(text, req_len, idx) \
.replace("{xy}", str(x_and_y)) \
.replace("{pi}", str(math.pi))
@staticmethod
def __generate_indexed_requirements(dut_count, basekeys, requirements):
"""
Generate indexed requirements from general requirements.
:param dut_count: Amount of duts
:param basekeys: base keys as dict
:param requirements: requirements
:return: Indexed requirements as dict.
"""
dut_requirements = []
for i in range(1, dut_count + 1):
dut_requirement = ResourceRequirements(basekeys.copy())
if i in requirements["duts"]:
for k in requirements["duts"][i]:
dut_requirement.set(k, requirements["duts"][i][k])
elif str(i) in requirements["duts"]:
i = str(i)
for k in requirements["duts"][i]:
dut_requirement.set(k, requirements["duts"][i][k])
dut_requirements.append(dut_requirement)
return dut_requirements
def count_hardware(self):
"""
:return: Hardware resource count
"""
return self._hardware_count
def get_dut_range(self, i=0):
"""
get range of length dut_count with offset i.
:param i: Offset
:return: range
"""
return range(1 + i, self.count_duts() + i + 1)
def _resolve_hardware_count(self):
"""
Calculate amount of hardware resources.
:return: Nothing, adds results to self._hardware_count
"""
length = len([d for d in self._dut_requirements if d.get("type") in ["hardware",
"serial", "mbed"]])
self._hardware_count = length
def count_process(self):
"""
:return: Process resource count
"""
return self._process_count
def _resolve_process_count(self):
"""
Calculate amount of process resources.
:return: Nothing, adds results to self._process_count
"""
length = len([d for d in self._dut_requirements if d.get("type") == "process"])
self._process_count = length
def count_duts(self):
"""
:return: Total amount of resources
"""
return self._dut_count
def _resolve_dut_count(self):
"""
Calculates total amount of resources required and their types.
:return: Nothing, modifies _dut_count, _hardware_count and
_process_count
:raises: ValueError if total count does not match counts of types separately.
"""
self._dut_count = len(self._dut_requirements)
self._resolve_process_count()
self._resolve_hardware_count()
if self._dut_count != self._hardware_count + self._process_count:
raise ValueError("Missing or invalid type fields in dut configuration!")
def get_dut_configuration(self, ident=None):
"""
Getter for dut configuration for dut ident.
:param ident: Identification for dut. If set to None, all requirements are returned.
:return: Requirements dictionary for dut ident if ident is not None,
else dictionary with all requirements.
"""
return self._dut_requirements if ident is None else self._dut_requirements[ident]
def set_dut_configuration(self, ident, config):
"""
Set requirements for dut ident.
:param ident: Identity of dut.
:param config: If ResourceRequirements object, add object as requirements for resource
ident. If dictionary, create new ResourceRequirements object from dictionary.
:return: Nothing
"""
if hasattr(config, "get_requirements"):
self._dut_requirements[ident] = config
elif isinstance(config, dict):
self._dut_requirements[ident] = ResourceRequirements(config)
|
class ResourceConfig(object):
'''
Object used to describe and manipulate the resource configuration used by a testcase.
'''
def __init__(self, json_configuration=None, logger=None):
pass
@property
def _hardware_count(self):
'''
Amount of hardware resources.
:return: integer
'''
pass
@_hardware_count.setter
def _hardware_count(self):
pass
@property
def _process_count(self):
'''
Amount of process resources.
:return: integer
'''
pass
@_process_count.setter
def _process_count(self):
pass
@property
def _dut_count(self):
'''
Total amount of resources.
:return: integer
'''
pass
@_dut_count.setter
def _dut_count(self):
pass
def resolve_configuration(self, configuration):
'''
Resolve requirements from given JSON encoded data.
The JSON should follow the testcase meta-data requirements field format. This function
will resolve requirements for each individual DUT and create a DUT requirements list
that contains the configuration for each DUT, eg:
{
"duts": [
{ "*": {"count": 2, "type": "process" } }
]
}
would result in the following configuration:
[
{ "1": {"type": "process", "allowed_platforms": [], "nick": None }
{ "2": {"type": "process", "allowed_platforms": [], "nick": None }
]
:param requirements: optional argument if requirements come from external source,
should be similar to the following format:
{
"duts": [
{ "*": {"count": 2, "type": "process" } }
]
}
'''
pass
def _resolve_requirements(self, requirements):
'''
Internal method for resolving requirements into resource configurations.
:param requirements: Resource requirements from test case configuration as dictionary.
:return: Empty list if dut_count cannot be resolved, or nothing
'''
pass
def _solve_location(self, req, dut_req_len, idx):
'''
Helper function for resolving the location for a resource.
:param req: Requirements dictionary
:param dut_req_len: Amount of required resources
:param idx: index, integer
:return: Nothing, modifies req object
'''
pass
@staticmethod
def __replace_base_variables(text, req_len, idx):
'''
Replace i and n in text with index+1 and req_len.
:param text: base text to modify
:param req_len: amount of required resources
:param idx: index of resource we are working on
:return: modified string
'''
pass
@staticmethod
def __replace_coord_variables(text, x_and_y, req_len, idx):
'''
Replace x and y with their coordinates and replace pi with value of pi.
:param text: text: base text to modify
:param x_and_y: location x and y
:param req_len: amount of required resources
:param idx: index of resource we are working on
:return: str
'''
pass
@staticmethod
def __generate_indexed_requirements(dut_count, basekeys, requirements):
'''
Generate indexed requirements from general requirements.
:param dut_count: Amount of duts
:param basekeys: base keys as dict
:param requirements: requirements
:return: Indexed requirements as dict.
'''
pass
def count_hardware(self):
'''
:return: Hardware resource count
'''
pass
def get_dut_range(self, i=0):
'''
get range of length dut_count with offset i.
:param i: Offset
:return: range
'''
pass
def _resolve_hardware_count(self):
'''
Calculate amount of hardware resources.
:return: Nothing, adds results to self._hardware_count
'''
pass
def count_process(self):
'''
:return: Process resource count
'''
pass
def _resolve_process_count(self):
'''
Calculate amount of process resources.
:return: Nothing, adds results to self._process_count
'''
pass
def count_duts(self):
'''
:return: Total amount of resources
'''
pass
def _resolve_dut_count(self):
'''
Calculates total amount of resources required and their types.
:return: Nothing, modifies _dut_count, _hardware_count and
_process_count
:raises: ValueError if total count does not match counts of types separately.
'''
pass
def get_dut_configuration(self, ident=None):
'''
Getter for dut configuration for dut ident.
:param ident: Identification for dut. If set to None, all requirements are returned.
:return: Requirements dictionary for dut ident if ident is not None,
else dictionary with all requirements.
'''
pass
def set_dut_configuration(self, ident, config):
'''
Set requirements for dut ident.
:param ident: Identity of dut.
:param config: If ResourceRequirements object, add object as requirements for resource
ident. If dictionary, create new ResourceRequirements object from dictionary.
:return: Nothing
'''
pass
| 32 | 19 | 12 | 1 | 6 | 5 | 2 | 0.82 | 1 | 10 | 1 | 0 | 19 | 5 | 22 | 22 | 299 | 43 | 142 | 58 | 110 | 116 | 114 | 48 | 91 | 10 | 1 | 5 | 47 |
2,039 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/ResourceProvider/ResourceProvider.py
|
icetea_lib.ResourceProvider.ResourceProvider.ResourceProvider
|
class ResourceProvider(object):
"""
Singleton ResourceProvider class. ResourceProvider is common for the entire run and it
handles allocation of resources based on requested resource configurations. It also
determines which allocator is used for allocating these resources.
"""
__metaclass__ = Singleton
def __init__(self, args):
# @TODO: Refactor args into some separate configuration class maybe?
self.args = args
self.allocator = None
self.jsonconf = None
no_file = False if self.args.list or self.args.listsuites else True
self.logger = LogManager.get_resourceprovider_logger("ResourceProvider", "RSP", no_file)
self._pluginmanager = None
def set_pluginmanager(self, pluginmanager):
"""
Setter for pluginmanager
:param pluginmanager: PluginManager
:return: Nothing
"""
self._pluginmanager = pluginmanager
def __del__(self):
if self.allocator:
self.allocator.cleanup()
self.allocator = None
def resolve_configuration(self, conf, resource_configuration):
"""
Resolve the configuration from given JSON encoded configuration data.
:param conf: JSON encoded configuration
:param resource_configuration: ResourceConfig object
"""
if not self.logger:
self.logger = LogManager.get_resourceprovider_logger("ResourceProvider", "RSP")
self.jsonconf = conf
resource_configuration.resolve_configuration(conf)
def allocate_duts(self, resource_configuration):
"""
Initialize DUT's.
:param resource_configuration: ResourceConfig
:return: List of DUT objects
:raises: ResourceInitError
"""
if not self.allocator:
self.allocator = self.__get_allocator()
try:
# Try to allocate resources and instantiate DUT's
dut_conf_list = resource_configuration.get_dut_configuration()
if dut_conf_list:
self.logger.debug("Allocating duts with the following configurations:")
for conf in dut_conf_list:
self.logger.debug(conf.get_requirements())
self.logger.info("Allocating {} duts.".format(resource_configuration.count_duts()))
allocations = self.allocator.allocate(resource_configuration, args=self.args)
self.logger.info("Allocation successful")
except AllocationError as error:
raise ResourceInitError(error)
return allocations
def cleanup(self):
"""
Clean up allocator at the end of the run.
:return: Nothing
"""
self.logger.debug("Cleaning up ResourceProvider.")
if self.allocator:
self.logger.debug("Cleaning up allocator.")
self.allocator.cleanup()
def __get_allocator(self):
"""
Internal method for determining which allocator is needed for this run.
:return: BaseAllocator
:raises: ResourceInitError
"""
allocator_name = self.args.allocator
allocator_cfg_file = self.args.allocator_cfg
allocator_cfg = dict()
if allocator_cfg_file:
allocator_cfg = self._read_allocator_config(allocator_name, allocator_cfg_file)
allocator = self._pluginmanager.get_allocator(allocator_name)
if allocator is None:
raise ResourceInitError("Unable to load allocator {}".format(allocator_name))
self.logger.debug("Using allocator %s", allocator_name)
return allocator(self.args, None, allocator_cfg)
def get_my_duts(self):
"""
Get my duts.
:return: list of duts
"""
# TODO: Is this function still used somewhere? There are no usages in this project at least.
if self.args.my_duts:
myduts = self.args.my_duts.split(',')
return myduts
return None
def _read_allocator_config(self, allocator_name, allocator_cfg_file):
"""
Read configuration for allocator from a json file.
Json file needs to have an object that contains key allocator_name (if you want to use
the same config for each allocator for example)
:param allocator_name: Name of the allocator.
:param allocator_cfg_file: absolute path to the json config file to use.
:return: dict
:raises: ResourceInitError if config file not found.
"""
allocator_config = dict()
self.logger.debug("Reading allocator configuration from {}".format(allocator_cfg_file))
try:
with open(allocator_cfg_file, "r") as cfg_file:
try:
data = json.load(cfg_file)
except ValueError as error:
self.logger.error(error)
raise ResourceInitError("Failed to decode json "
"from allocator config file {}".format(cfg_file))
if allocator_name in data:
allocator_config = data.get(allocator_name)
else:
self.logger.error(
"Allocator configuration not found in {}".format(allocator_cfg_file))
raise ResourceInitError(
"Allocator configuration not found in {}".format(allocator_cfg_file))
except IOError as error:
self.logger.error(error)
raise ResourceInitError("Unable to read allocator config: {}".format(error))
self.logger.debug("Read allocator configuration from {}: {}".format(allocator_cfg_file,
allocator_config))
return allocator_config
|
class ResourceProvider(object):
'''
Singleton ResourceProvider class. ResourceProvider is common for the entire run and it
handles allocation of resources based on requested resource configurations. It also
determines which allocator is used for allocating these resources.
'''
def __init__(self, args):
pass
def set_pluginmanager(self, pluginmanager):
'''
Setter for pluginmanager
:param pluginmanager: PluginManager
:return: Nothing
'''
pass
def __del__(self):
pass
def resolve_configuration(self, conf, resource_configuration):
'''
Resolve the configuration from given JSON encoded configuration data.
:param conf: JSON encoded configuration
:param resource_configuration: ResourceConfig object
'''
pass
def allocate_duts(self, resource_configuration):
'''
Initialize DUT's.
:param resource_configuration: ResourceConfig
:return: List of DUT objects
:raises: ResourceInitError
'''
pass
def cleanup(self):
'''
Clean up allocator at the end of the run.
:return: Nothing
'''
pass
def __get_allocator(self):
'''
Internal method for determining which allocator is needed for this run.
:return: BaseAllocator
:raises: ResourceInitError
'''
pass
def get_my_duts(self):
'''
Get my duts.
:return: list of duts
'''
pass
def _read_allocator_config(self, allocator_name, allocator_cfg_file):
'''
Read configuration for allocator from a json file.
Json file needs to have an object that contains key allocator_name (if you want to use
the same config for each allocator for example)
:param allocator_name: Name of the allocator.
:param allocator_cfg_file: absolute path to the json config file to use.
:return: dict
:raises: ResourceInitError if config file not found.
'''
pass
| 10 | 8 | 14 | 1 | 9 | 5 | 3 | 0.57 | 1 | 4 | 2 | 0 | 9 | 5 | 9 | 9 | 145 | 19 | 80 | 30 | 70 | 46 | 75 | 27 | 65 | 5 | 1 | 3 | 23 |
2,040 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/ResourceProvider/ResourceRequirements.py
|
icetea_lib.ResourceProvider.ResourceRequirements.ResourceRequirements
|
class ResourceRequirements(object):
"""
ResourceRequirements class. Contains methods for getting and setting requirement values as
well as processing requirements into formats supported by allocators.
"""
def __init__(self, requirement_dict=None):
self._requirements = requirement_dict if requirement_dict else {}
def set(self, key, value):
"""
Sets the value for a specific requirement.
:param key: Name of requirement to be set
:param value: Value to set for requirement key
:return: Nothing, modifies requirement
"""
if key == "tags":
self._set_tag(tags=value)
else:
if isinstance(value, dict) and key in self._requirements and isinstance(
self._requirements[key], dict):
self._requirements[key] = merge(self._requirements[key], value)
else:
self._requirements[key] = value
def get(self, key):
"""
Gets contents of requirement key.
Switches to recursive search if dots ('.') are found in the key.
:param key: key or dot separated string of keys to look for.
:return: contents of requirement key/results of search or None.
"""
# Catch the case where the key is "."
if "." in key and len(key) > 1:
return self._recursive_get(key)
return self._requirements.get(key, None)
def __getitem__(self, item):
return self._requirements[item]
def _recursive_get(self, key, dic=None):
"""
Gets contents of requirement key recursively so users can search for
specific keys inside nested requirement dicts.
:param key: key or dot separated string of keys to look for.
:param dic: Optional dictionary to use in the search.
If not provided, self._requirements is used.
:return: results of search or None
"""
return recursive_search(key, dic) if dic else recursive_search(key, self._requirements)
def get_requirements(self):
"""
Return requirements as dict.
:return: Dictionary
"""
return self._requirements
def _set_tag(self, tag=None, tags=None, value=True):
"""
Sets the value of a specific tag or merges existing tags with a dict of new tags.
Either tag or tags must be None.
:param tag: Tag which needs to be set.
:param tags: Set of tags which needs to be merged with existing tags.
:param value: Value to set for net tag named by :param tag.
:return: Nothing
"""
existing_tags = self._requirements.get("tags")
if tags and not tag:
existing_tags = merge(existing_tags, tags)
self._requirements["tags"] = existing_tags
elif tag and not tags:
existing_tags[tag] = value
self._requirements["tags"] = existing_tags
def remove_empty_tags(self, tags=None):
"""
Tags whose value is set to None shall be removed from tags.
:param tags: Tags which are to be processed.
If None, tags found in self._requirements are used.
:return: If tags is not None, returns dict with processed tags. Else returns None.
"""
new_tags = {}
old_tags = tags if tags else self.get("tags")
for tag in old_tags.keys():
if old_tags[tag] is not None:
new_tags[tag] = old_tags[tag]
if not tags:
self._requirements["tags"] = new_tags
return None
return new_tags
|
class ResourceRequirements(object):
'''
ResourceRequirements class. Contains methods for getting and setting requirement values as
well as processing requirements into formats supported by allocators.
'''
def __init__(self, requirement_dict=None):
pass
def set(self, key, value):
'''
Sets the value for a specific requirement.
:param key: Name of requirement to be set
:param value: Value to set for requirement key
:return: Nothing, modifies requirement
'''
pass
def get(self, key):
'''
Gets contents of requirement key.
Switches to recursive search if dots ('.') are found in the key.
:param key: key or dot separated string of keys to look for.
:return: contents of requirement key/results of search or None.
'''
pass
def __getitem__(self, item):
pass
def _recursive_get(self, key, dic=None):
'''
Gets contents of requirement key recursively so users can search for
specific keys inside nested requirement dicts.
:param key: key or dot separated string of keys to look for.
:param dic: Optional dictionary to use in the search.
If not provided, self._requirements is used.
:return: results of search or None
'''
pass
def get_requirements(self):
'''
Return requirements as dict.
:return: Dictionary
'''
pass
def _set_tag(self, tag=None, tags=None, value=True):
'''
Sets the value of a specific tag or merges existing tags with a dict of new tags.
Either tag or tags must be None.
:param tag: Tag which needs to be set.
:param tags: Set of tags which needs to be merged with existing tags.
:param value: Value to set for net tag named by :param tag.
:return: Nothing
'''
pass
def remove_empty_tags(self, tags=None):
'''
Tags whose value is set to None shall be removed from tags.
:param tags: Tags which are to be processed.
If None, tags found in self._requirements are used.
:return: If tags is not None, returns dict with processed tags. Else returns None.
'''
pass
| 9 | 7 | 10 | 1 | 5 | 5 | 2 | 1.07 | 1 | 1 | 0 | 0 | 8 | 1 | 8 | 8 | 95 | 12 | 40 | 14 | 31 | 43 | 36 | 14 | 27 | 5 | 1 | 2 | 19 |
2,041 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/ResourceProvider/exceptions.py
|
icetea_lib.ResourceProvider.exceptions.ResourceInitError
|
class ResourceInitError(Exception):
"""
ResourceInitError exception.
"""
pass
|
class ResourceInitError(Exception):
'''
ResourceInitError exception.
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 5 | 0 | 2 | 1 | 1 | 3 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
2,042 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Result.py
|
icetea_lib.Result.Result
|
class Result(object): # pylint: disable=too-many-instance-attributes,too-many-public-methods
"""
Result object, used for storing all relevant information from the test case
for reporting and storage purposes.
"""
# Constructor for Result
def __init__(self, kwargs=None):
kwargs = {} if kwargs is None else kwargs
self.__verdict = 'unknown'
self.duration = kwargs.get("duration", 0)
self.framework_info = {
"name": kwargs.get("fw_name", get_fw_name()),
"version": kwargs.get("fw_version", get_fw_version())
}
self.tc_git_info = {
"url": kwargs.get("gitUrl", ''),
"branch": kwargs.get("branch", ''),
"commitId": kwargs.get("commitId", '')
}
self.job_id = kwargs.get("jobId", '')
self.campaign = kwargs.get("campaign", '')
self.retries_left = 0
self.duration = kwargs.get("duration", 0)
self.fail_reason = kwargs.get("reason", '')
self.skip_reason = kwargs.get("skip_reason", '')
self.dutinformation = DutInformationList()
self.dut_type = ''
self.dut_count = 0
self.dut_vendor = []
self.toolchain = 'unknown'
self.logpath = None
self.retcode = kwargs.get("retcode", -1)
self.tester = Result.__get_username()
self.component = [] # CUT - Component Under Test
self.feature = [] # FUT - Feature Under Test
self.logfiles = []
self.tc_metadata = kwargs.get("tc_metadata", {'name': '', 'purpose': ''})
self.tc_metadata['name'] = kwargs.get("testcase", '')
self.stdout = kwargs.get("stdout", '')
self.stderr = kwargs.get("stderr", '')
if "verdict" in kwargs:
self.set_verdict(kwargs.get("verdict"))
if 'retcode' in kwargs:
self.retcode = kwargs.get("retcode")
if self.retcode == 0:
self.set_verdict("pass", self.retcode)
else:
self.set_verdict("fail", self.retcode)
self.uploaded = False
def set_tc_git_info(self, git_info):
"""
Set test case git information.
:param git_info: git information as dictionary. Keys: url, branch and commitId
:return: Nothing
"""
self.tc_git_info.update(git_info)
# TC properties
# TC git branch
@property
def tcbranch(self):
"""
get test case git branch.
:return: branch
"""
return self.tc_git_info.get('branch')
@tcbranch.setter
def tcbranch(self, value):
set_or_delete(self.tc_git_info, 'branch', value)
# TC GIT commit id
@property
def tc_commit_id(self):
"""
get test case commit id.
:return: commit id
"""
return self.tc_git_info.get('commitId')
@tc_commit_id.setter
def tc_commit_id(self, value):
set_or_delete(self.tc_git_info, 'commitId', value)
# TC GIT URL
@property
def tc_git_url(self):
"""
get test case git url.
:return: git url
"""
return self.tc_git_info.get('url', self.tc_git_info.get('scm_link'))
@tc_git_url.setter
def tc_git_url(self, value):
self.tc_git_info['gitUrl'] = value
# BUILD
# Build name
@property
def build(self):
"""
get build name.
:return: build name. None if not found
"""
# pylint: disable=len-as-condition
if len(self.dutinformation) > 0 and (self.dutinformation.get(0).build is not None):
return self.dutinformation.get(0).build.name
return None
@build.setter
def build(self, value):
# pylint: disable=len-as-condition
if len(self.dutinformation) > 0 and (self.dutinformation.get(0).build is not None):
self.dutinformation.get(0).build.name = value
# Build name
@property
def build_date(self):
"""
get build date.
:return: build date. None if not found
"""
# pylint: disable=len-as-condition
if len(self.dutinformation) > 0 and (self.dutinformation.get(0).build is not None):
return self.dutinformation.get(0).build.date
return None
@build_date.setter
def build_date(self, value):
# pylint: disable=len-as-condition
if len(self.dutinformation) > 0 and (self.dutinformation.get(0).build is not None):
self.dutinformation.get(0).build.date = value
# Build file sha1
@property
def build_sha1(self):
"""
get sha1 hash of build.
:return: build sha1 or None if not found
"""
# pylint: disable=len-as-condition
if len(self.dutinformation) > 0 and (self.dutinformation.get(0).build is not None):
return self.dutinformation.get(0).build.sha1
return None
@build_sha1.setter
def build_sha1(self, value):
# pylint: disable=len-as-condition
if len(self.dutinformation) > 0 and (self.dutinformation.get(0).build is not None):
self.dutinformation.get(0).build.sha1 = value
@property
def build_url(self):
"""
get build url.
:return: build url or None if not found
"""
# pylint: disable=len-as-condition
if len(self.dutinformation) > 0 and (self.dutinformation.get(0).build is not None):
return self.dutinformation.get(0).build.build_url
return None
@build_url.setter
def build_url(self, value):
# pylint: disable=len-as-condition
if len(self.dutinformation) > 0 and (self.dutinformation.get(0).build is not None):
self.dutinformation.get(0).build.build_url = value
@property
def build_git_url(self):
"""
get build git url.
:return: build git url or None if not found
"""
# pylint: disable=len-as-condition
if len(self.dutinformation) > 0 and (self.dutinformation.get(0).build is not None):
return self.dutinformation.get(0).build.giturl
return None
@build_git_url.setter
def build_git_url(self, value):
# pylint: disable=len-as-condition
if len(self.dutinformation) > 0 and (self.dutinformation.get(0).build is not None):
self.dutinformation.get(0).build.giturl = value
@property
def build_data(self):
"""
get build data.
:return: build data or None if not found
"""
# pylint: disable=len-as-condition
if len(self.dutinformation) > 0 and (self.dutinformation.get(0).build is not None):
return self.dutinformation.get(0).build.get_data()
return None
@property
def build_branch(self):
"""
get build branch.
:return: build branch or None if not found
"""
# pylint: disable=len-as-condition
if len(self.dutinformation) > 0 and (self.dutinformation.get(0).build is not None):
return self.dutinformation.get(0).build.branch
return None
@build_branch.setter
def build_branch(self, value):
# pylint: disable=len-as-condition
if len(self.dutinformation) > 0 and (self.dutinformation.get(0).build is not None):
self.dutinformation.get(0).build.branch = value
@property
def buildcommit(self):
"""
get build commit id.
:return: build commit id or None if not found
"""
# pylint: disable=len-as-condition
if len(self.dutinformation) > 0 and (self.dutinformation.get(0).build is not None):
return self.dutinformation.get(0).build.commit_id
return None
@buildcommit.setter
def buildcommit(self, value):
# pylint: disable=len-as-condition
if len(self.dutinformation) > 0 and (self.dutinformation.get(0).build is not None):
self.dutinformation.get(0).build.commit_id = value
# FRAMEWORK
@property
def fw_name(self):
"""
get test framework name.
:return: framework name or None if not found
"""
return self.framework_info.get("name")
@fw_name.setter
def fw_name(self, value):
self.framework_info["name"] = value
@property
def fw_version(self):
"""
get framework version.
:return: framework version or None if not found
"""
return self.framework_info.get("version")
@fw_version.setter
def fw_version(self, value):
self.framework_info["version"] = value
@property
def skip(self):
"""
Determine if test was skipped.
:return: True if test was skipped, else False
"""
return self.skipped()
@property
def success(self):
"""
Determine if test was passed.
:return: True if test was passed, else False
"""
return self.passed()
@property
def failure(self):
"""
Determine if test failed.
:return: True if test failed, else False
"""
return self.failed()
@property
def inconclusive(self):
"""
Determine if test was inconclusive.
:return: True if test was inconclusive, else False
"""
return self.was_inconclusive()
def get_verdict(self):
"""
Get test verdict.
:return: verdict
"""
return self.__verdict
# Set final verdict
def set_verdict(self, verdict, retcode=-1, duration=-1):
"""
Set the final verdict for this Result.
:param verdict: Verdict, must be from ['pass', 'fail', 'unknown', 'skip', 'inconclusive']'
:param retcode: integer return code
:param duration: test duration
:return: Nothing
:raises: ValueError if verdict was unknown.
"""
verdict = verdict.lower()
if not verdict in ['pass', 'fail', 'unknown', 'skip', 'inconclusive']:
raise ValueError("Unknown verdict {}".format(verdict))
if retcode == -1 and verdict == 'pass':
retcode = 0
self.__verdict = verdict
self.retcode = retcode
if duration >= 0:
self.duration = duration
def build_result_metadata(self, data=None, args=None):
"""
collect metadata into this object
:param data: dict
:param args: build from args instead of data
"""
data = data if data else self._build_result_metainfo(args)
if data.get("build_branch"):
self.build_branch = data.get("build_branch")
if data.get("buildcommit"):
self.buildcommit = data.get("buildcommit")
if data.get("build_git_url"):
self.build_git_url = data.get("build_git_url")
if data.get("build_url"):
self.build_url = data.get("build_url")
if data.get("campaign"):
self.campaign = data.get("campaign")
if data.get("job_id"):
self.job_id = data.get("job_id")
if data.get("toolchain"):
self.toolchain = data.get("toolchain")
if data.get("build_date"):
self.build_date = data.get("build_date")
@staticmethod
def _build_result_metainfo(args):
"""
Internal helper for collecting metadata from args to results
"""
data = dict()
if hasattr(args, "branch") and args.branch:
data["build_branch"] = args.branch
if hasattr(args, "commitId") and args.commitId:
data["buildcommit"] = args.commitId
if hasattr(args, "gitUrl") and args.gitUrl:
data["build_git_url"] = args.gitUrl
if hasattr(args, "buildUrl") and args.buildUrl:
data["build_url"] = args.buildUrl
if hasattr(args, "campaign") and args.campaign:
data["campaign"] = args.campaign
if hasattr(args, "jobId") and args.jobId:
data["job_id"] = args.jobId
if hasattr(args, "toolchain") and args.toolchain:
data["toolchain"] = args.toolchain
if hasattr(args, "buildDate") and args.buildDate:
data["build_date"] = args.buildDate
return data
def set_tc_metadata(self, tc_metadata):
"""
Set test case metadata.
:param tc_metadata: dictionary
:return: Nothing
"""
self.tc_metadata = tc_metadata
def get_tc_name(self):
"""
Get name from tc metadata.
:return: Name from tc metadata
"""
return self.tc_metadata['name']
def get_toolchain(self):
"""
get toolchain.
:return: toolchain
"""
return self.toolchain
def passed(self):
"""
Determine if test passed.
:return: True if test was passed, else False
"""
return self.get_verdict() == 'pass'
def skipped(self):
"""
Determine if test was skipped.
:return: True if test was skipped, else False
"""
return self.get_verdict() == 'skip'
def was_inconclusive(self):
"""
Determine if test was inconclusive.
:return: True if test was inconclusive, else False
"""
return self.get_verdict() == 'inconclusive'
def failed(self):
"""
Determine if test failed.
:return: True if test failed, else False
"""
return self.get_verdict() == 'fail'
def get_fail_reason(self):
"""
Get fail reason.
:return: failure reason
"""
return self.fail_reason
def set_dutinformation(self, info):
"""
Create a new DutInformationList with initial data info.
:param info: list of DutInformation objects
:return: Nothing
"""
self.dutinformation = DutInformationList(info)
def add_dutinformation(self, info):
"""
Append the information of a new dut to the dutinformation list.
:param info: DutInformation object
:return: Nothing
"""
self.dutinformation.append(info)
@property
def dut_resource_id(self):
"""
Get dut resource id:s.
:return: list of resource id:s or unknown if none were found.
"""
return self.dutinformation.get_resource_ids()
def get_dut_models(self):
"""
Gets a string of dut models in this TC.
:return: String of dut models separated with commas.
unknown platform if no dut information is available
"""
return self.dutinformation.get_uniq_string_dutmodels()
@property
def dut_models(self):
"""
Gets a list of dut models in this TC.
:return: List of dut models in this TC. Empty list if information is not available.
"""
return self.dutinformation.get_uniq_list_dutmodels()
def get_duration(self, seconds=False):
"""
Get test case duration.
:param seconds: if set to True, return tc duration in seconds, otherwise as str(
datetime.timedelta)
:return: str(datetime.timedelta) or duration as string in seconds
"""
if seconds:
return str(self.duration)
delta = datetime.timedelta(seconds=self.duration)
return str(delta)
# get testcase metadata
def get_tc_object(self):
"""
get tc metadata.
:return: tc metadata dictionary
"""
return self.tc_metadata
def has_logs(self):
"""
Check if log files are available and return file names if they exist.
:return: list
"""
found_files = []
if self.logpath is None:
return found_files
if os.path.exists(self.logpath):
for root, _, files in os.walk(os.path.abspath(self.logpath)):
for fil in files:
found_files.append(os.path.join(root, fil))
return found_files
# get current os username
@staticmethod
def __get_username():
"""
Get current os username.
:return: os username.
"""
if pwd:
return pwd.getpwuid(os.geteuid()).pw_name
return getpass.getuser()
|
class Result(object):
'''
Result object, used for storing all relevant information from the test case
for reporting and storage purposes.
'''
def __init__(self, kwargs=None):
pass
def set_tc_git_info(self, git_info):
'''
Set test case git information.
:param git_info: git information as dictionary. Keys: url, branch and commitId
:return: Nothing
'''
pass
@property
def tcbranch(self):
'''
get test case git branch.
:return: branch
'''
pass
@tcbranch.setter
def tcbranch(self):
pass
@property
def tc_commit_id(self):
'''
get test case commit id.
:return: commit id
'''
pass
@tc_commit_id.setter
def tc_commit_id(self):
pass
@property
def tc_git_url(self):
'''
get test case git url.
:return: git url
'''
pass
@tc_git_url.setter
def tc_git_url(self):
pass
@property
def build(self):
'''
get build name.
:return: build name. None if not found
'''
pass
@build.setter
def build(self):
pass
@property
def build_date(self):
'''
get build date.
:return: build date. None if not found
'''
pass
@build_date.setter
def build_date(self):
pass
@property
def build_sha1(self):
'''
get sha1 hash of build.
:return: build sha1 or None if not found
'''
pass
@build_sha1.setter
def build_sha1(self):
pass
@property
def build_url(self):
'''
get build url.
:return: build url or None if not found
'''
pass
@build_url.setter
def build_url(self):
pass
@property
def build_git_url(self):
'''
get build git url.
:return: build git url or None if not found
'''
pass
@build_git_url.setter
def build_git_url(self):
pass
@property
def build_data(self):
'''
get build data.
:return: build data or None if not found
'''
pass
@property
def build_branch(self):
'''
get build branch.
:return: build branch or None if not found
'''
pass
@build_branch.setter
def build_branch(self):
pass
@property
def buildcommit(self):
'''
get build commit id.
:return: build commit id or None if not found
'''
pass
@buildcommit.setter
def buildcommit(self):
pass
@property
def fw_name(self):
'''
get test framework name.
:return: framework name or None if not found
'''
pass
@fw_name.setter
def fw_name(self):
pass
@property
def fw_version(self):
'''
get framework version.
:return: framework version or None if not found
'''
pass
@fw_version.setter
def fw_version(self):
pass
@property
def skip(self):
'''
Determine if test was skipped.
:return: True if test was skipped, else False
'''
pass
@property
def success(self):
'''
Determine if test was passed.
:return: True if test was passed, else False
'''
pass
@property
def failure(self):
'''
Determine if test failed.
:return: True if test failed, else False
'''
pass
@property
def inconclusive(self):
'''
Determine if test was inconclusive.
:return: True if test was inconclusive, else False
'''
pass
def get_verdict(self):
'''
Get test verdict.
:return: verdict
'''
pass
def set_verdict(self, verdict, retcode=-1, duration=-1):
'''
Set the final verdict for this Result.
:param verdict: Verdict, must be from ['pass', 'fail', 'unknown', 'skip', 'inconclusive']'
:param retcode: integer return code
:param duration: test duration
:return: Nothing
:raises: ValueError if verdict was unknown.
'''
pass
def build_result_metadata(self, data=None, args=None):
'''
collect metadata into this object
:param data: dict
:param args: build from args instead of data
'''
pass
@staticmethod
def _build_result_metainfo(args):
'''
Internal helper for collecting metadata from args to results
'''
pass
def set_tc_metadata(self, tc_metadata):
'''
Set test case metadata.
:param tc_metadata: dictionary
:return: Nothing
'''
pass
def get_tc_name(self):
'''
Get name from tc metadata.
:return: Name from tc metadata
'''
pass
def get_toolchain(self):
'''
get toolchain.
:return: toolchain
'''
pass
def passed(self):
'''
Determine if test passed.
:return: True if test was passed, else False
'''
pass
def skipped(self):
'''
Determine if test was skipped.
:return: True if test was skipped, else False
'''
pass
def was_inconclusive(self):
'''
Determine if test was inconclusive.
:return: True if test was inconclusive, else False
'''
pass
def failed(self):
'''
Determine if test failed.
:return: True if test failed, else False
'''
pass
def get_fail_reason(self):
'''
Get fail reason.
:return: failure reason
'''
pass
def set_dutinformation(self, info):
'''
Create a new DutInformationList with initial data info.
:param info: list of DutInformation objects
:return: Nothing
'''
pass
def add_dutinformation(self, info):
'''
Append the information of a new dut to the dutinformation list.
:param info: DutInformation object
:return: Nothing
'''
pass
@property
def dut_resource_id(self):
'''
Get dut resource id:s.
:return: list of resource id:s or unknown if none were found.
'''
pass
def get_dut_models(self):
'''
Gets a string of dut models in this TC.
:return: String of dut models separated with commas.
unknown platform if no dut information is available
'''
pass
@property
def dut_models(self):
'''
Gets a list of dut models in this TC.
:return: List of dut models in this TC. Empty list if information is not available.
'''
pass
def get_duration(self, seconds=False):
'''
Get test case duration.
:param seconds: if set to True, return tc duration in seconds, otherwise as str(
datetime.timedelta)
:return: str(datetime.timedelta) or duration as string in seconds
'''
pass
def get_tc_object(self):
'''
get tc metadata.
:return: tc metadata dictionary
'''
pass
def has_logs(self):
'''
Check if log files are available and return file names if they exist.
:return: list
'''
pass
@staticmethod
def __get_username():
'''
Get current os username.
:return: os username.
'''
pass
| 86 | 40 | 9 | 1 | 4 | 4 | 2 | 0.79 | 1 | 5 | 1 | 0 | 50 | 24 | 52 | 52 | 548 | 94 | 255 | 115 | 169 | 202 | 214 | 82 | 161 | 10 | 1 | 3 | 97 |
2,043 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/ResultList.py
|
icetea_lib.ResultList.ResultList
|
class ResultList(Iterator):
"""
List of Result objects. Implements parts of Iterator interface to allow some ease of use.
"""
def __init__(self): # pylint: disable=super-init-not-called
"""
Constructor for ResultList
"""
self.data = []
self.index = 0
self.retries_left = 0
def get(self, index=0):
"""
Get object with index index.
:param index: int
:return: Result
"""
return self.data[index]
def append(self, result):
"""
Append a new Result to the list.
:param result: Result to append
:return: Nothing
:raises: TypeError if result is not Result or ResultList
"""
if isinstance(result, Result):
self.data.append(result)
elif isinstance(result, ResultList):
self.data += result.data
else:
raise TypeError('unknown result type')
# @todo this could be used to generate html table after each test..
# self._save_html_report({"NOTE": "TESTS EXECUTION IS ONGOING.."}, reload=5)
def save(self, heads, console=True):
"""
Create reports in different formats.
:param heads: html table extra values in title rows
:param console: Boolean, default is True. If set, also print out the console log.
"""
# Junit
self._save_junit()
# HTML
self._save_html_report(heads)
if console:
# Console print
self._print_console_summary()
def _save_junit(self):
"""
Save Junit report.
:return: Nothing
"""
report = ReportJunit(self)
file_name = report.get_latest_filename("result.junit.xml", "")
report.generate(file_name)
file_name = report.get_latest_filename("junit.xml", "../")
report.generate(file_name)
def _save_html_report(self, heads=None, refresh=None):
"""
Save html report.
:param heads: headers as dict
:param refresh: Boolean, if True will add a reload-tag to the report
:return: Nothing
"""
report = ReportHtml(self)
heads = heads if heads else {}
test_report_filename = report.get_current_filename("html")
report.generate(test_report_filename, title='Test Results', heads=heads, refresh=refresh)
# Update latest.html in the log root directory
latest_report_filename = report.get_latest_filename("html")
report.generate(latest_report_filename, title='Test Results', heads=heads, refresh=refresh)
def _print_console_summary(self):
"""
Print the console report.
:return: Nothing
"""
ReportConsole(self).generate()
def success_count(self):
"""
Amount of passed test cases in this list.
:return: integer
"""
return len([i for i, result in enumerate(self.data) if result.success])
def failure_count(self):
"""
Amount of failed test cases in this list.
:return: integer
"""
return len([i for i, result in enumerate(self.data) if result.failure])
def inconclusive_count(self):
"""
Amount of inconclusive test cases in this list.
:return: integer
"""
inconc_count = len([i for i, result in enumerate(self.data) if result.inconclusive])
unknown_count = len([i for i, result in enumerate(self.data) if result.get_verdict() ==
"unknown"])
return inconc_count + unknown_count
def retry_count(self):
"""
Amount of retried test cases in this list.
:return: integer
"""
retries = len([i for i, result in enumerate(self.data) if result.retries_left > 0])
return retries
def skip_count(self):
"""
Amount of skipped test cases in this list.
:return: integer
"""
return len([i for i, result in enumerate(self.data) if result.skip])
def clean_fails(self):
"""
Check if there are any fails that were not subsequently retried.
:return: Boolean
"""
for item in self.data:
if item.failure and not item.retries_left > 0:
return True
return False
def clean_inconcs(self):
"""
Check if there are any inconclusives or uknowns that were not subsequently retried.
:return: Boolean
"""
for item in self.data:
if (item.inconclusive or item.get_verdict() == "unknown") and not item.retries_left > 0:
return True
return False
@property
def skipped(self):
"""
True if all results were skipped. Else False.
:return: Boolean
"""
return True if self.skip_count() == len(self) else False
@property
def inconclusive(self):
"""
True if all failures were inconclusive. False if there are any fails or all tests passed.
:return: Boolean
"""
return True if self.inconclusive_count() and not self.failure else False
@property
def success(self):
"""
If all tests passed, return True, else False.
:return: Boolean
"""
return True if not self.inconclusive and not self.failure else False
@property
def failure(self):
"""
If any tests failed, return True, else False.
:return: Boolean
"""
for item in self.data:
if item.failure:
return True
return False
def get_verdict(self):
"""
Get success state as a string from pass, fail, skip and inconclusive.
:return: str
"""
if self.success:
return "pass"
elif self.failure:
return "fail"
elif self.skipped:
return "skip"
return "inconclusive"
def total_duration(self):
"""
Sum of the durations of the tests in this list.
:return: integer
"""
durations = [result.duration for result in self.data]
return sum(durations)
def pass_rate(self, include_skips=False, include_inconclusive=False, include_retries=True):
"""
Calculate pass rate for tests in this list.
:param include_skips: Boolean, if True skipped tc:s will be included. Default is False
:param include_inconclusive: Boolean, if True inconclusive tc:s will be included.
Default is False.
:param include_retries: Boolean, if True retried tc:s will be included in percentages.
:return: Percentage in format .2f %
"""
total = self.count()
success = self.success_count()
retries = self.retry_count()
try:
if include_inconclusive and include_skips and include_retries:
val = 100.0*success/total
elif include_inconclusive and include_skips and not include_retries:
val = 100.0 * success / (total - retries)
elif include_skips and include_retries and not include_inconclusive:
inconcs = self.inconclusive_count()
val = 100.0 * success / (total - inconcs)
elif include_skips and not include_retries and not include_inconclusive:
inconcs = self.inconclusive_count()
val = 100.0 * success / (total - inconcs - retries)
elif include_inconclusive and include_retries and not include_skips:
skipped = self.skip_count()
val = 100.0 * success / (total - skipped)
elif include_inconclusive and not include_retries and not include_skips:
skipped = self.skip_count()
val = 100.0 * success / (total - skipped - retries)
elif not include_inconclusive and not include_skips and include_retries:
failures = self.failure_count()
val = 100.0 * success / (failures + success)
else:
failures = self.clean_fails()
val = 100.0 * success / (failures + success)
except ZeroDivisionError:
val = 0
return format(val, '.2f') + " %"
def get_summary(self):
"""
Get a summary of this ResultLists contents as dictionary.
:return: dictionary
"""
return {
"count": self.count(),
"pass": self.success_count(),
"fail": self.failure_count(),
"skip": self.skip_count(),
"inconclusive": self.inconclusive_count(),
"retries": self.retry_count(),
"duration": self.total_duration()
}
def count(self):
"""
Return length of this list (amount of Results).
:return: integer
"""
return len(self)
def __len__(self):
"""
Return length of this list (amount of Results).
:return: integer
"""
return len(self.data)
def __next__(self):
"""
Get next Result from this list.
:return: Result
"""
return self.next()
def next(self):
"""
Implementation of next method from Iterator.
:return: Result
:raises: StopIteration if IndexError occurs.
"""
try:
result = self.data[self.index]
except IndexError:
self.index = 0
raise StopIteration
self.index += 1
return result
|
class ResultList(Iterator):
'''
List of Result objects. Implements parts of Iterator interface to allow some ease of use.
'''
def __init__(self):
'''
Constructor for ResultList
'''
pass
def get(self, index=0):
'''
Get object with index index.
:param index: int
:return: Result
'''
pass
def append(self, result):
'''
Append a new Result to the list.
:param result: Result to append
:return: Nothing
:raises: TypeError if result is not Result or ResultList
'''
pass
def save(self, heads, console=True):
'''
Create reports in different formats.
:param heads: html table extra values in title rows
:param console: Boolean, default is True. If set, also print out the console log.
'''
pass
def _save_junit(self):
'''
Save Junit report.
:return: Nothing
'''
pass
def _save_html_report(self, heads=None, refresh=None):
'''
Save html report.
:param heads: headers as dict
:param refresh: Boolean, if True will add a reload-tag to the report
:return: Nothing
'''
pass
def _print_console_summary(self):
'''
Print the console report.
:return: Nothing
'''
pass
def success_count(self):
'''
Amount of passed test cases in this list.
:return: integer
'''
pass
def failure_count(self):
'''
Amount of failed test cases in this list.
:return: integer
'''
pass
def inconclusive_count(self):
'''
Amount of inconclusive test cases in this list.
:return: integer
'''
pass
def retry_count(self):
'''
Amount of retried test cases in this list.
:return: integer
'''
pass
def skip_count(self):
'''
Amount of skipped test cases in this list.
:return: integer
'''
pass
def clean_fails(self):
'''
Check if there are any fails that were not subsequently retried.
:return: Boolean
'''
pass
def clean_inconcs(self):
'''
Check if there are any inconclusives or uknowns that were not subsequently retried.
:return: Boolean
'''
pass
@property
def skipped(self):
'''
True if all results were skipped. Else False.
:return: Boolean
'''
pass
@property
def inconclusive_count(self):
'''
True if all failures were inconclusive. False if there are any fails or all tests passed.
:return: Boolean
'''
pass
@property
def success_count(self):
'''
If all tests passed, return True, else False.
:return: Boolean
'''
pass
@property
def failure_count(self):
'''
If any tests failed, return True, else False.
:return: Boolean
'''
pass
def get_verdict(self):
'''
Get success state as a string from pass, fail, skip and inconclusive.
:return: str
'''
pass
def total_duration(self):
'''
Sum of the durations of the tests in this list.
:return: integer
'''
pass
def pass_rate(self, include_skips=False, include_inconclusive=False, include_retries=True):
'''
Calculate pass rate for tests in this list.
:param include_skips: Boolean, if True skipped tc:s will be included. Default is False
:param include_inconclusive: Boolean, if True inconclusive tc:s will be included.
Default is False.
:param include_retries: Boolean, if True retried tc:s will be included in percentages.
:return: Percentage in format .2f %
'''
pass
def get_summary(self):
'''
Get a summary of this ResultLists contents as dictionary.
:return: dictionary
'''
pass
def count(self):
'''
Return length of this list (amount of Results).
:return: integer
'''
pass
def __len__(self):
'''
Return length of this list (amount of Results).
:return: integer
'''
pass
def __next__(self):
'''
Get next Result from this list.
:return: Result
'''
pass
def next(self):
'''
Implementation of next method from Iterator.
:return: Result
:raises: StopIteration if IndexError occurs.
'''
pass
| 31 | 27 | 11 | 1 | 5 | 5 | 2 | 0.9 | 1 | 9 | 4 | 0 | 26 | 3 | 26 | 26 | 314 | 53 | 138 | 57 | 107 | 124 | 114 | 50 | 87 | 9 | 1 | 2 | 51 |
2,044 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/ReturnCodes.py
|
icetea_lib.ReturnCodes.ReturnCodes
|
class ReturnCodes(object): #pylint: disable=no-init,too-few-public-methods
#pylint: disable=invalid-name
"""
Enum for Bench return codes.
"""
RETCODE_SKIP = -1
RETCODE_SUCCESS = 0
RETCODE_FAIL_SETUP_BENCH = 1000
RETCODE_FAIL_SETUP_TC = 1001
RETCODE_FAIL_MISSING_DUTS = 1002
RETCODE_FAIL_UNDEFINED_REQUIRED_DUTS_COUNT = 1003
RETCODE_FAIL_DUT_CONNECTION_FAIL = 1004
RETCODE_FAIL_TC_EXCEPTION = 1005
RETCODE_FAIL_TEARDOWN_TC = 1006
RETCODE_FAIL_INITIALIZE_BENCH = 1007
RETCODE_FAIL_NO_PRELIMINARY_VERDICT = 1010
RETCODE_FAIL_TEARDOWN_BENCH = 1011
RETCODE_FAIL_ABORTED_BY_USER = 1012
RETCODE_FAIL_UNKNOWN = 1013
RETCODE_FAIL_INCONCLUSIVE = 1014
RETCODE_FAIL_TC_NOT_FOUND = 1015
INCONCLUSIVE_RETCODES = [RETCODE_FAIL_ABORTED_BY_USER,
RETCODE_FAIL_INITIALIZE_BENCH,
RETCODE_FAIL_TEARDOWN_BENCH,
RETCODE_FAIL_SETUP_BENCH,
RETCODE_FAIL_DUT_CONNECTION_FAIL,
RETCODE_FAIL_INCONCLUSIVE,
RETCODE_FAIL_TC_NOT_FOUND]
ALL_RETCODES = [RETCODE_SKIP,
RETCODE_SUCCESS,
RETCODE_FAIL_SETUP_BENCH,
RETCODE_FAIL_SETUP_TC,
RETCODE_FAIL_MISSING_DUTS,
RETCODE_FAIL_UNDEFINED_REQUIRED_DUTS_COUNT,
RETCODE_FAIL_DUT_CONNECTION_FAIL,
RETCODE_FAIL_TC_EXCEPTION,
RETCODE_FAIL_TEARDOWN_BENCH,
RETCODE_FAIL_TEARDOWN_TC,
RETCODE_FAIL_INITIALIZE_BENCH,
RETCODE_FAIL_NO_PRELIMINARY_VERDICT,
RETCODE_FAIL_ABORTED_BY_USER,
RETCODE_FAIL_UNKNOWN,
RETCODE_FAIL_INCONCLUSIVE,
RETCODE_FAIL_TC_NOT_FOUND]
|
class ReturnCodes(object):
'''
Enum for Bench return codes.
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.13 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 45 | 1 | 40 | 19 | 39 | 5 | 19 | 19 | 18 | 0 | 1 | 0 | 0 |
2,045 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Searcher.py
|
icetea_lib.Searcher.Invert
|
class Invert(object): # pylint: disable=too-few-public-methods
"""
Class Invert
"""
def __init__(self, string_obj):
self.str = string_obj
def __str__(self):
return self.str
|
class Invert(object):
'''
Class Invert
'''
def __init__(self, string_obj):
pass
def __str__(self):
pass
| 3 | 1 | 2 | 0 | 2 | 0 | 1 | 0.8 | 1 | 0 | 0 | 0 | 2 | 1 | 2 | 2 | 9 | 1 | 5 | 4 | 2 | 4 | 5 | 4 | 2 | 1 | 1 | 0 | 2 |
2,046 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/TestBench/ArgsHandler.py
|
icetea_lib.TestBench.ArgsHandler.ArgsHandler
|
class ArgsHandler(object):
"""
This mixer get and parses cli arguments
It brings two API's:
args -getter and setter
"""
def __init__(self, **kwargs):
super(ArgsHandler, self).__init__(**kwargs)
parser = get_tc_arguments(get_base_arguments(get_parser()))
self.__args, self.__unknown = parser.parse_known_args()
@property
def args(self):
"""
Property for arguments.
:return: arguments
"""
return self.__args
@args.setter
def args(self, value):
"""
Setter for arguments.
:param value: arguments
:return: Nothing
"""
self.__args = value
@property
def unknown(self):
"""
Getter for unknown arguments.
"""
return self.__unknown
@unknown.setter
def unknown(self, value):
"""
Setter for the unknown variable.
"""
self.__unknown = value
|
class ArgsHandler(object):
'''
This mixer get and parses cli arguments
It brings two API's:
args -getter and setter
'''
def __init__(self, **kwargs):
pass
@property
def args(self):
'''
Property for arguments.
:return: arguments
'''
pass
@args.setter
def args(self):
'''
Setter for arguments.
:param value: arguments
:return: Nothing
'''
pass
@property
def unknown(self):
'''
Getter for unknown arguments.
'''
pass
@unknown.setter
def unknown(self):
'''
Setter for the unknown variable.
'''
pass
| 10 | 5 | 6 | 0 | 2 | 3 | 1 | 1.18 | 1 | 1 | 0 | 0 | 5 | 2 | 5 | 5 | 43 | 6 | 17 | 12 | 7 | 20 | 13 | 8 | 7 | 1 | 1 | 0 | 5 |
2,047 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/TestBench/Bench.py
|
icetea_lib.TestBench.Bench.Bench
|
class Bench(BenchApi):
"""
This is base Bench class which merge all subclasses together to one logical blob.
Each subclasses (called Mixer) brings some functionality to Bench.
Logger mixer brings self.logger -instance which are most widely used.
RunnerMixer manage test execution in right order, like setup_bench, and teardown_bench -calling.
See rest of Mixer functionality from theirs class descriptions.
This class brings very top level API's, e.g. constructor
for whole system and run -method which is called by TestManager when test begins.
"""
def __init__(self, **kwargs):
super(Bench, self).__init__(**kwargs)
self.runner = None
def run(self):
"""
Run the test bench.
:return: int (return code)
"""
try:
self._init()
self.runner = RunnerSM(self, self.logger)
except (TestStepError, InconclusiveError) as error:
self.set_failure(ReturnCodes.RETCODE_FAIL_INCONCLUSIVE, str(error))
self.logger.error(error)
self.logger.info("Test case verdict: INCONCLUSIVE")
return ReturnCodes.RETCODE_FAIL_INCONCLUSIVE
skip = self.runner.check_skip()
if skip is not None:
return skip
retval = self.runner.run()
if retval in ReturnCodes.INCONCLUSIVE_RETCODES:
verdict = "INCONCLUSIVE"
elif retval == ReturnCodes.RETCODE_SKIP:
verdict = "SKIP"
elif retval == ReturnCodes.RETCODE_SUCCESS:
verdict = "PASS"
else:
verdict = "FAIL"
self.logger.info("Test case verdict: %s", verdict)
return retval
|
class Bench(BenchApi):
'''
This is base Bench class which merge all subclasses together to one logical blob.
Each subclasses (called Mixer) brings some functionality to Bench.
Logger mixer brings self.logger -instance which are most widely used.
RunnerMixer manage test execution in right order, like setup_bench, and teardown_bench -calling.
See rest of Mixer functionality from theirs class descriptions.
This class brings very top level API's, e.g. constructor
for whole system and run -method which is called by TestManager when test begins.
'''
def __init__(self, **kwargs):
pass
def run(self):
'''
Run the test bench.
:return: int (return code)
'''
pass
| 3 | 2 | 16 | 1 | 13 | 2 | 4 | 0.48 | 1 | 6 | 4 | 46 | 2 | 1 | 2 | 106 | 43 | 3 | 27 | 8 | 24 | 13 | 24 | 7 | 21 | 6 | 2 | 1 | 7 |
2,048 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/TestBench/BenchFunctions.py
|
icetea_lib.TestBench.BenchFunctions.BenchFunctions
|
class BenchFunctions(object):
"""
A collection of functions that add different functionalities to the test bench.
"""
def __init__(self, resource_configuration, resources, configurations, **kwargs):
super(BenchFunctions, self).__init__(**kwargs)
self._resource_configuration = resource_configuration
self._resources = resources
self._configurations = configurations
self._logger = LogManager.get_dummy_logger()
def init(self, logger=None):
"""
Set logger.
"""
if logger:
self._logger = logger
# input data from user
def input_from_user(self, title=None): # pylint: disable=no-self-use
"""
Input data from user.
:param title: Title as string
:return: stripped data from stdin.
"""
if title:
print(title)
print("Press [ENTER] to continue")
resp = sys.stdin.readline().strip()
if resp != '':
return resp.strip()
return ""
def open_node_terminal(self, k='*', wait=True):
"""
Open Putty (/or kitty if exists)
:param k: number 1.<max duts> or '*' to open putty to all devices
:param wait: wait while putty is closed before continue testing
:return: Nothing
"""
if k == '*':
for ind in self._resource_configuration.get_dut_range():
self.open_node_terminal(ind, wait)
return
if not self._resources.is_my_dut_index(k):
return
params = '-serial ' + self._resources.duts[k - 1].comport + ' -sercfg ' + str(
self._resources.duts[k - 1].serialBaudrate)
putty_exe = self._configurations.env['extApps']['puttyExe']
if os.path.exists(self._configurations.env['extApps']['kittyExe']):
putty_exe = self._configurations.env['extApps']['kittyExe']
if "kitty.exe" in putty_exe:
params = params + ' -title "' + self._resources.duts[k - 1].comport
params += ' - ' + self._configurations.test_name
params += ' | DUT' + str(k) + ' ' + self._resources.get_dut_nick(k) + '"'
params += ' -log "' + LogManager.get_testcase_logfilename('DUT%d.manual' % k) + '"'
if os.path.exists(putty_exe):
command = putty_exe + ' ' + params
self._logger.info(command)
if wait:
if self._resources.is_my_dut_index(k):
self._resources.duts[k - 1].close_dut()
self._resources.duts[k - 1].close_connection()
self._resources.resource_provider.allocator.release(
dut=self._resources.duts[k - 1])
process = subprocess.Popen(command)
time.sleep(2)
process.wait()
self._resources.duts[k - 1].open_dut()
else:
subprocess.Popen(command, close_fds=True)
else:
self._logger.warning('putty not exists in path: %s', putty_exe)
def delay(self, seconds):
"""
Sleep command.
:param seconds: Amount of seconds to sleep.
:return: Nothing
"""
self._logger.debug("Waiting for %i seconds", seconds)
if seconds < 30:
time.sleep(seconds)
else:
while seconds > 10:
self._logger.debug("Still waiting... %i seconds remain", seconds)
time.sleep(10)
seconds = seconds - 10
time.sleep(seconds)
def verify_trace_skip_fail(self, k, expected_traces):
"""
Shortcut to set break_in_fail to False in verify_trace.
:param k: nick or index of dut.
:param expected_traces: Expected traces as a list or string
:return: boolean
"""
return self.verify_trace(k, expected_traces, False)
def verify_trace(self, k, expected_traces, break_in_fail=True):
"""
Verify that traces expected_traces are found in dut traces.
:param k: index or nick of dut whose traces are to be used.
:param expected_traces: list of expected traces or string
:param break_in_fail: Boolean, if True raise LookupError if search fails
:return: boolean.
:raises: LookupError if search fails.
"""
if isinstance(k, str):
dut_index = self._resources.get_dut_index(k)
return self.verify_trace(dut_index, expected_traces, break_in_fail)
# If expectedTraces given as a String (expecting only a certain trace), wrap it in a list.
if isinstance(expected_traces, str):
expected_traces = [expected_traces]
status = True
try:
status = verify_message(self._resources.duts[k - 1].traces, expected_traces)
except TypeError as inst:
status = False
if break_in_fail:
raise inst
if status is False and break_in_fail:
raise LookupError("{} not found in traces.".format(expected_traces))
return status
def get_time(self): # pylint: disable=no-self-use
"""
Get timestamp using time.time().
:return: timestamp
"""
return time.time()
|
class BenchFunctions(object):
'''
A collection of functions that add different functionalities to the test bench.
'''
def __init__(self, resource_configuration, resources, configurations, **kwargs):
pass
def init(self, logger=None):
'''
Set logger.
'''
pass
def input_from_user(self, title=None):
'''
Input data from user.
:param title: Title as string
:return: stripped data from stdin.
'''
pass
def open_node_terminal(self, k='*', wait=True):
'''
Open Putty (/or kitty if exists)
:param k: number 1.<max duts> or '*' to open putty to all devices
:param wait: wait while putty is closed before continue testing
:return: Nothing
'''
pass
def delay(self, seconds):
'''
Sleep command.
:param seconds: Amount of seconds to sleep.
:return: Nothing
'''
pass
def verify_trace_skip_fail(self, k, expected_traces):
'''
Shortcut to set break_in_fail to False in verify_trace.
:param k: nick or index of dut.
:param expected_traces: Expected traces as a list or string
:return: boolean
'''
pass
def verify_trace_skip_fail(self, k, expected_traces):
'''
Verify that traces expected_traces are found in dut traces.
:param k: index or nick of dut whose traces are to be used.
:param expected_traces: list of expected traces or string
:param break_in_fail: Boolean, if True raise LookupError if search fails
:return: boolean.
:raises: LookupError if search fails.
'''
pass
def get_time(self):
'''
Get timestamp using time.time().
:return: timestamp
'''
pass
| 9 | 8 | 17 | 2 | 10 | 5 | 3 | 0.54 | 1 | 5 | 0 | 0 | 8 | 4 | 8 | 8 | 146 | 22 | 82 | 22 | 73 | 44 | 77 | 21 | 68 | 9 | 1 | 3 | 26 |
2,049 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/TestBench/Commands.py
|
icetea_lib.TestBench.Commands.CommandResponseCodes
|
class CommandResponseCodes(object): # pylint: disable=too-few-public-methods
"""
Enum for cliapp command invalid return codes.
"""
INVALID_PARAMS = -2
NOT_IMPLEMENTED = -3
CALLBACK_MISSING = -4
UNKNOWN_COMMAND = -5
|
class CommandResponseCodes(object):
'''
Enum for cliapp command invalid return codes.
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 | 0 | 5 | 5 | 4 | 4 | 5 | 5 | 4 | 0 | 1 | 0 | 0 |
2,050 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Randomize/seed.py
|
icetea_lib.Randomize.seed.Seed
|
class Seed(object):
"""
Base Seed implementation.
"""
def __init__(self, value, seed_id=None, date=None):
self.__seed_value = value
self.__seed_id = seed_id if seed_id else str(uuid.uuid4())
self.__date = date if date else datetime.utcnow().isoformat()
def __repr__(self):
return str(self.__seed_value)
def __add__(self, other):
return self.value + other
def __radd__(self, other):
return other + self.value
def __cmp__(self, other):
return cmp(self.value, other)
@property
def value(self):
"""
get __seed_value.
:return: __seed_value
"""
return self.__seed_value
@property
def seed_id(self):
"""
get __seed_id.
:return: __seed_id
"""
return self.__seed_id
@property
def date(self):
"""
Return date.
:return: __date
"""
return self.__date
def store(self, filename):
"""
Store seed in json format into a file
:param filename: File name to save
:return: Nothing
"""
with open(filename, 'w') as file_handle:
seed_dict = {"seed_id": self.seed_id, "seed_value": self.value, "date": self.date}
json.dump(seed_dict, file_handle)
@staticmethod
def load(filename):
"""
Load seed from a file.
:param filename: Source file name
:return: dict
"""
with open(filename, 'r') as file_handle:
return json.load(file_handle)
|
class Seed(object):
'''
Base Seed implementation.
'''
def __init__(self, value, seed_id=None, date=None):
pass
def __repr__(self):
pass
def __add__(self, other):
pass
def __radd__(self, other):
pass
def __cmp__(self, other):
pass
@property
def value(self):
'''
get __seed_value.
:return: __seed_value
'''
pass
@property
def seed_id(self):
'''
get __seed_id.
:return: __seed_id
'''
pass
@property
def date(self):
'''
Return date.
:return: __date
'''
pass
def store(self, filename):
'''
Store seed in json format into a file
:param filename: File name to save
:return: Nothing
'''
pass
@staticmethod
def load(filename):
'''
Load seed from a file.
:param filename: Source file name
:return: dict
'''
pass
| 15 | 6 | 5 | 1 | 3 | 2 | 1 | 0.83 | 1 | 2 | 0 | 3 | 9 | 3 | 10 | 10 | 69 | 14 | 30 | 21 | 15 | 25 | 26 | 15 | 15 | 3 | 1 | 1 | 12 |
2,051 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/TestBench/Logger.py
|
icetea_lib.TestBench.Logger.Logger
|
class Logger(object):
"""
This Mixer provide public logger property for everybody usage.
"""
def __init__(self, **kwargs):
super(Logger, self).__init__(**kwargs)
self.__logger = LogManager.get_dummy_logger()
def get_logger(self):
"""
Getter for the logger.
:return: Logger
"""
return self.__logger
def init_logger(self, test_name, verbose, silent, color, disable_log_truncate):
"""
This function is called from Bench right after test is started.
"""
LogManager.init_testcase_logging(test_name, verbose,
silent, color,
not disable_log_truncate)
self.__logger = LogManager.get_bench_logger()
def set_logger(self, value):
"""
Setter for logger.
"""
self.__logger = value
|
class Logger(object):
'''
This Mixer provide public logger property for everybody usage.
'''
def __init__(self, **kwargs):
pass
def get_logger(self):
'''
Getter for the logger.
:return: Logger
'''
pass
def init_logger(self, test_name, verbose, silent, color, disable_log_truncate):
'''
This function is called from Bench right after test is started.
'''
pass
def set_logger(self, value):
'''
Setter for logger.
'''
pass
| 5 | 4 | 6 | 0 | 3 | 3 | 1 | 1 | 1 | 1 | 0 | 0 | 4 | 1 | 4 | 4 | 30 | 4 | 13 | 6 | 8 | 13 | 11 | 6 | 6 | 1 | 1 | 0 | 4 |
2,052 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Randomize/randomize.py
|
icetea_lib.Randomize.randomize.Randomize
|
class Randomize(object):
"""
Randomize class, collection of static methods for generating randomized content.
"""
@staticmethod
def random_integer(max_value, min_value=0):
"""
:param max_value: Maximum value, int
:param min_value: Minimum value, int, default is 0
:return: SeedInteger
"""
return SeedInteger(random.randint(min_value, max_value))
@staticmethod
def random_list_elem(str_list):
"""
:param str_list: a pre-defined string list
:return: SeedString()
"""
if isinstance(str_list, list):
for elem in str_list:
if not isinstance(elem, str):
raise TypeError("list element can only be string")
return SeedString(random.choice(str_list))
return None
@staticmethod
def random_string(max_len=1, min_len=1, chars=string.ascii_letters, **kwargs):
"""
:param max_len: max value of len(string)
:param min_len: min value of len(string)
:param chars: can be sting, list of strings or function pointer.
Randomly choose one if given a list of strings
:param kwargs: keyword arguments for chars if it's function pointer
:return: SeedString()
"""
if isinstance(chars, list):
# assume each element is a str
chars = ''.join(chars)
if isinstance(chars, str):
return SeedString(
''.join(random.choice(chars) for _ in range(random.randint(min_len, max_len))))
elif isfunction(chars):
# this function is assumed to return/generate one character each time it is called
return SeedString(
''.join(chars(**kwargs) for _ in range(random.randint(min_len, max_len))))
else:
raise ValueError("chars should be string, list, or function pointer")
@staticmethod
def random_array_elem(str_array):
"""
:param str_array:a pre-defined string array
:return: SeedStringArray()
"""
return SeedStringArray([str(Randomize.random_list_elem(str_array))])
@staticmethod
def random_string_array(max_len=1, min_len=1,
elem_max_len=1, elem_min_len=1,
strings=string.ascii_letters, **kwargs):
"""
:param max_len: max value of len(array)
:param min_len: min value of len(array)
:param elem_max_len: max value of len(array[index])
:param elem_min_len: min value of len(array[index])
:param strings: allowed string characters in each element of array,
or predefined list of strings, or function pointer
:param **kwargs: keyworded arguments for strings if it's a function pointer
:return: SeedStringArray
"""
string_array = list()
for _ in range(random.randint(min_len, max_len)):
string_array.append(Randomize.random_string(max_len=elem_max_len, min_len=elem_min_len,
chars=strings, **kwargs).value)
return SeedStringArray(string_array)
|
class Randomize(object):
'''
Randomize class, collection of static methods for generating randomized content.
'''
@staticmethod
def random_integer(max_value, min_value=0):
'''
:param max_value: Maximum value, int
:param min_value: Minimum value, int, default is 0
:return: SeedInteger
'''
pass
@staticmethod
def random_list_elem(str_list):
'''
:param str_list: a pre-defined string list
:return: SeedString()
'''
pass
@staticmethod
def random_string(max_len=1, min_len=1, chars=string.ascii_letters, **kwargs):
'''
:param max_len: max value of len(string)
:param min_len: min value of len(string)
:param chars: can be sting, list of strings or function pointer.
Randomly choose one if given a list of strings
:param kwargs: keyword arguments for chars if it's function pointer
:return: SeedString()
'''
pass
@staticmethod
def random_array_elem(str_array):
'''
:param str_array:a pre-defined string array
:return: SeedStringArray()
'''
pass
@staticmethod
def random_string_array(max_len=1, min_len=1,
elem_max_len=1, elem_min_len=1,
strings=string.ascii_letters, **kwargs):
'''
:param max_len: max value of len(array)
:param min_len: min value of len(array)
:param elem_max_len: max value of len(array[index])
:param elem_min_len: min value of len(array[index])
:param strings: allowed string characters in each element of array,
or predefined list of strings, or function pointer
:param **kwargs: keyworded arguments for strings if it's a function pointer
:return: SeedStringArray
'''
pass
| 11 | 6 | 13 | 0 | 6 | 7 | 2 | 1 | 1 | 8 | 3 | 0 | 0 | 0 | 5 | 5 | 78 | 6 | 36 | 17 | 23 | 36 | 24 | 9 | 18 | 4 | 1 | 3 | 12 |
2,053 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Plugin/plugins/plugin_tests/test_dutmbed.py
|
icetea_lib.Plugin.plugins.plugin_tests.test_dutmbed.MockArgspec
|
class MockArgspec(object):
def __init__(self, lst):
self.args = lst
|
class MockArgspec(object):
def __init__(self, lst):
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 3 | 0 | 3 | 3 | 1 | 0 | 3 | 3 | 1 | 1 | 1 | 0 | 1 |
2,054 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Plugin/PluginBase.py
|
icetea_lib.Plugin.PluginBase.PluginTypes
|
class PluginTypes(object):
"""
Just a small enum for types.
"""
BENCH = 0
PARSER = 1
EXTSERVICE = 2
ALLOCATOR = 3
|
class PluginTypes(object):
'''
Just a small enum for types.
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 | 0 | 5 | 5 | 4 | 3 | 5 | 5 | 4 | 0 | 1 | 0 | 0 |
2,055 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Plugin/PluginBase.py
|
icetea_lib.Plugin.PluginBase.RunPluginBase
|
class RunPluginBase(object):
"""
Base class for run-level plugins.
"""
def __init__(self):
pass
def get_allocators(self):
"""
Get a dictionary with names and class references to BaseAllocator objects.
:return: Dictionary
"""
return None
|
class RunPluginBase(object):
'''
Base class for run-level plugins.
'''
def __init__(self):
pass
def get_allocators(self):
'''
Get a dictionary with names and class references to BaseAllocator objects.
:return: Dictionary
'''
pass
| 3 | 2 | 5 | 1 | 2 | 2 | 1 | 1.4 | 1 | 0 | 0 | 2 | 2 | 0 | 2 | 2 | 14 | 2 | 5 | 3 | 2 | 7 | 5 | 3 | 2 | 1 | 1 | 0 | 2 |
2,056 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Plugin/PluginManager.py
|
icetea_lib.Plugin.PluginManager.PluginException
|
class PluginException(Exception):
"""
Plugin Exception
"""
pass
|
class PluginException(Exception):
'''
Plugin Exception
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 5 | 0 | 2 | 1 | 1 | 3 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
2,057 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Plugin/PluginManager.py
|
icetea_lib.Plugin.PluginManager.PluginManager
|
class PluginManager(object): # pylint: disable=too-many-instance-attributes
"""
class PluginManager. The job of this class is to load and register plugins
for Icetea.
Loads modules like we load Testcases. That instance is then used to
get the plugin contents as a dict.
The contents of this dict are registered to relevant parts of Icetea
according to the type of plugin we are dealing with.
"""
def __init__(self, responseparser=None, bench=None, logger=None):
self.logger = logger
if self.logger is None:
import logging
self.logger = logging.getLogger("PluginManager")
if not self.logger.handlers:
self.logger.addHandler(logging.StreamHandler())
self.logger.setLevel(logging.INFO)
self.responseparser = responseparser
self.bench = bench
self._external_services = {}
self._started_services = []
self.registered_plugins = []
self._allocators = {}
self.plugin_types = {PluginTypes.BENCH: self._register_bench_extension,
PluginTypes.PARSER: self._register_dataparser,
PluginTypes.EXTSERVICE: self._register_external_service,
PluginTypes.ALLOCATOR: self._register_allocator}
def register_tc_plugins(self, plugin_name, plugin_class):
"""
Loads a plugin as a dictionary and attaches needed parts to correct areas for testing
parts.
:param plugin_name: Name of the plugins
:param plugin_class: PluginBase
:return: Nothing
"""
if plugin_name in self.registered_plugins:
raise PluginException("Plugin {} already registered! Duplicate "
"plugins?".format(plugin_name))
self.logger.debug("Registering plugin %s", plugin_name)
plugin_class.init(bench=self.bench)
if plugin_class.get_bench_api() is not None:
register_func = self.plugin_types[PluginTypes.BENCH]
register_func(plugin_name, plugin_class)
if plugin_class.get_parsers() is not None:
register_func = self.plugin_types[PluginTypes.PARSER]
register_func(plugin_name, plugin_class)
if plugin_class.get_external_services() is not None:
register_func = self.plugin_types[PluginTypes.EXTSERVICE]
register_func(plugin_name, plugin_class)
self.registered_plugins.append(plugin_name)
def register_run_plugins(self, plugin_name, plugin_class):
"""
Loads a plugin as a dictionary and attaches needed parts to correct Icetea run
global parts.
:param plugin_name: Name of the plugins
:param plugin_class: PluginBase
:return: Nothing
"""
if plugin_name in self.registered_plugins:
raise PluginException("Plugin {} already registered! "
"Duplicate plugins?".format(plugin_name))
self.logger.debug("Registering plugin %s", plugin_name)
if plugin_class.get_allocators():
register_func = self.plugin_types[PluginTypes.ALLOCATOR]
register_func(plugin_name, plugin_class)
self.registered_plugins.append(plugin_name)
def get_allocator(self, allocator_name):
"""
Get a registered allocator based on allocator_name.
:param allocator_name: Name of allocator to get
:return: BaseAllocator
"""
if allocator_name in self._allocators:
return self._allocators[allocator_name]
return None
def load_default_tc_plugins(self):
"""
Load default test case level plugins from icetea_lib.Plugin.plugins.default_plugins.
:return: Nothing
"""
for plugin_name, plugin_class in default_plugins.items():
if issubclass(plugin_class, PluginBase):
try:
self.register_tc_plugins(plugin_name, plugin_class())
except PluginException as error:
self.logger.debug(error)
continue
def load_custom_tc_plugins(self, plugin_path=None):
"""
Load custom test case level plugins from plugin_path.
:param plugin_path: Path to file, which contains the imports and mapping for plugins.
:return: None if plugin_path is None or False or something equivalent to those.
"""
if not plugin_path:
return
directory = os.path.dirname(plugin_path)
sys.path.append(directory)
modulename = os.path.split(plugin_path)[1]
# Strip of file extension.
if "." in modulename:
modulename = modulename[:modulename.rindex(".")]
try:
module = importlib.import_module(modulename)
except ImportError:
raise PluginException("Unable to import custom plugin information from {}.".format(
plugin_path))
for plugin_name, plugin_class in module.plugins_to_load.items():
if issubclass(plugin_class, PluginBase):
try:
self.register_tc_plugins(plugin_name, plugin_class())
except PluginException as error:
self.logger.debug(error)
continue
def load_default_run_plugins(self):
"""
Load default run level plugins from icetea_lib.Plugin.plugins.default_plugins.
:return: Nothing
"""
for plugin_name, plugin_class in default_plugins.items():
if issubclass(plugin_class, RunPluginBase):
try:
self.register_run_plugins(plugin_name, plugin_class())
except PluginException as error:
self.logger.debug(error)
continue
def load_custom_run_plugins(self, plugin_path=None):
"""
Load custom run level plugins from plugin_path.
:param plugin_path: Path to file, which contains the imports and mapping for plugins.
:return: None if plugin_path is None or False or something equivalent to those.
"""
if not plugin_path:
return
directory = os.path.dirname(plugin_path)
sys.path.append(directory)
modulename = os.path.split(plugin_path)[1]
# Strip of file extension.
if "." in modulename:
modulename = modulename[:modulename.rindex(".")]
try:
module = importlib.import_module(modulename)
except ImportError:
raise PluginException("Unable to import custom plugin information from {}.".format(
plugin_path))
for plugin_name, plugin_class in module.plugins_to_load.items():
if issubclass(plugin_class, RunPluginBase):
try:
self.register_run_plugins(plugin_name, plugin_class())
except PluginException as error:
self.logger.debug(error)
continue
def start_external_service(self, service_name, conf=None):
"""
Start external service service_name with configuration conf.
:param service_name: Name of service to start
:param conf:
:return: nothing
"""
if service_name in self._external_services:
ser = self._external_services[service_name]
service = ser(service_name, conf=conf, bench=self.bench)
try:
service.start()
except PluginException:
self.logger.exception("Starting service %s caused an exception!", service_name)
raise PluginException("Failed to start external service {}".format(service_name))
self._started_services.append(service)
setattr(self.bench, service_name, service)
else:
self.logger.warning("Service %s not found. Check your plugins.", service_name)
def stop_external_services(self):
"""
Stop all external services.
:return: Nothing
"""
for service in self._started_services:
self.logger.debug("Stopping application %s", service.name)
try:
service.stop()
except PluginException:
self.logger.exception("Stopping external service %s caused and exception!",
service.name)
self._started_services = []
def _register_bench_extension(self, plugin_name, plugin_instance):
"""
Register a bench extension.
:param plugin_name: Plugin name
:param plugin_instance: PluginBase
:return: Nothing
"""
for attr in plugin_instance.get_bench_api().keys():
if hasattr(self.bench, attr):
raise PluginException("Attribute {} already exists in bench! Unable to add "
"plugin {}.".format(attr, plugin_name))
setattr(self.bench, attr, plugin_instance.get_bench_api().get(attr))
def _register_dataparser(self, plugin_name, plugin_instance):
"""
Register a parser.
:param plugin_name: Parser name
:param plugin_instance: PluginBase
:return: Nothing
"""
for parser in plugin_instance.get_parsers().keys():
if self.responseparser.has_parser(parser):
raise PluginException("Parser {} already registered to parsers! Unable to "
"add parsers from {}.".format(parser, plugin_name))
self.responseparser.add_parser(parser, plugin_instance.get_parsers().get(parser))
def _register_external_service(self, plugin_name, plugin_instance):
"""
Register an external service.
:param plugin_name: Service name
:param plugin_instance: PluginBase
:return:
"""
for attr in plugin_instance.get_external_services().keys():
if attr in self._external_services:
raise PluginException("External service with name {} already exists! Unable to add "
"services from plugin {}.".format(attr, plugin_name))
self._external_services[attr] = plugin_instance.get_external_services().get(attr)
def _register_allocator(self, plugin_name, plugin_instance):
"""
Register an allocator.
:param plugin_name: Allocator name
:param plugin_instance: RunPluginBase
:return:
"""
for allocator in plugin_instance.get_allocators().keys():
if allocator in self._allocators:
raise PluginException("Allocator with name {} already exists! unable to add "
"allocators from plugin {}".format(allocator, plugin_name))
self._allocators[allocator] = plugin_instance.get_allocators().get(allocator)
|
class PluginManager(object):
'''
class PluginManager. The job of this class is to load and register plugins
for Icetea.
Loads modules like we load Testcases. That instance is then used to
get the plugin contents as a dict.
The contents of this dict are registered to relevant parts of Icetea
according to the type of plugin we are dealing with.
'''
def __init__(self, responseparser=None, bench=None, logger=None):
pass
def register_tc_plugins(self, plugin_name, plugin_class):
'''
Loads a plugin as a dictionary and attaches needed parts to correct areas for testing
parts.
:param plugin_name: Name of the plugins
:param plugin_class: PluginBase
:return: Nothing
'''
pass
def register_run_plugins(self, plugin_name, plugin_class):
'''
Loads a plugin as a dictionary and attaches needed parts to correct Icetea run
global parts.
:param plugin_name: Name of the plugins
:param plugin_class: PluginBase
:return: Nothing
'''
pass
def get_allocator(self, allocator_name):
'''
Get a registered allocator based on allocator_name.
:param allocator_name: Name of allocator to get
:return: BaseAllocator
'''
pass
def load_default_tc_plugins(self):
'''
Load default test case level plugins from icetea_lib.Plugin.plugins.default_plugins.
:return: Nothing
'''
pass
def load_custom_tc_plugins(self, plugin_path=None):
'''
Load custom test case level plugins from plugin_path.
:param plugin_path: Path to file, which contains the imports and mapping for plugins.
:return: None if plugin_path is None or False or something equivalent to those.
'''
pass
def load_default_run_plugins(self):
'''
Load default run level plugins from icetea_lib.Plugin.plugins.default_plugins.
:return: Nothing
'''
pass
def load_custom_run_plugins(self, plugin_path=None):
'''
Load custom run level plugins from plugin_path.
:param plugin_path: Path to file, which contains the imports and mapping for plugins.
:return: None if plugin_path is None or False or something equivalent to those.
'''
pass
def start_external_service(self, service_name, conf=None):
'''
Start external service service_name with configuration conf.
:param service_name: Name of service to start
:param conf:
:return: nothing
'''
pass
def stop_external_services(self):
'''
Stop all external services.
:return: Nothing
'''
pass
def _register_bench_extension(self, plugin_name, plugin_instance):
'''
Register a bench extension.
:param plugin_name: Plugin name
:param plugin_instance: PluginBase
:return: Nothing
'''
pass
def _register_dataparser(self, plugin_name, plugin_instance):
'''
Register a parser.
:param plugin_name: Parser name
:param plugin_instance: PluginBase
:return: Nothing
'''
pass
def _register_external_service(self, plugin_name, plugin_instance):
'''
Register an external service.
:param plugin_name: Service name
:param plugin_instance: PluginBase
:return:
'''
pass
def _register_allocator(self, plugin_name, plugin_instance):
'''
Register an allocator.
:param plugin_name: Allocator name
:param plugin_instance: RunPluginBase
:return:
'''
pass
| 15 | 14 | 17 | 1 | 11 | 5 | 4 | 0.55 | 1 | 6 | 4 | 0 | 14 | 8 | 14 | 14 | 261 | 30 | 150 | 47 | 134 | 82 | 137 | 43 | 121 | 7 | 1 | 3 | 53 |
2,058 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Plugin/plugins/Asserts.py
|
icetea_lib.Plugin.plugins.Asserts.AssertPlugin
|
class AssertPlugin(PluginBase):
"""
Plugin for Asserts
"""
def __init__(self): # pylint: disable=useless-super-delegation
super(AssertPlugin, self).__init__()
self.bench = None
def init(self, bench=None):
"""
Store bench instance
:param bench: Bench
:return: Nothing
:raises: AttributeError if bench is None
"""
self.bench = bench
if self.bench is None:
raise AttributeError("Bench instance not present!")
def get_bench_api(self):
"""
Extend bench functionality with these new commands
:return: Dictionary
"""
# Extend bench functionality with these new commands
ret_dict = dict()
ret_dict["assertTraceDoesNotContain"] = asserts.assertTraceDoesNotContain
ret_dict["assertTraceContains"] = asserts.assertTraceContains
ret_dict["assertDutTraceDoesNotContain"] = self.assert_dut_trace_not_contains
ret_dict["assertDutTraceContains"] = self.assert_dut_trace_contains
ret_dict["assertTrue"] = asserts.assertTrue
ret_dict["assertFalse"] = asserts.assertFalse
ret_dict["assertNone"] = asserts.assertNone
ret_dict["assertNotNone"] = asserts.assertNotNone
ret_dict["assertEqual"] = asserts.assertEqual
ret_dict["assertNotEqual"] = asserts.assertNotEqual
ret_dict["assertJsonContains"] = asserts.assertJsonContains
return ret_dict
def assert_dut_trace_contains(self, k, message):
"""
Wrapper to provice access to bench for assertDutTraceContains.
:param k: index of dut
:param message: Message that should be in traces
:return: Nothing
"""
asserts.assertDutTraceContains(k, message, bench=self.bench)
def assert_dut_trace_not_contains(self, k, message):
"""
Wrapper to provice access to bench for assertDutTraceDoesNotContain.
:param k: index of dut
:param message: Message that should not appear in traces
:return: Nothing
"""
asserts.assertDutTraceDoesNotContain(k, message, bench=self.bench)
|
class AssertPlugin(PluginBase):
'''
Plugin for Asserts
'''
def __init__(self):
pass
def init(self, bench=None):
'''
Store bench instance
:param bench: Bench
:return: Nothing
:raises: AttributeError if bench is None
'''
pass
def get_bench_api(self):
'''
Extend bench functionality with these new commands
:return: Dictionary
'''
pass
def assert_dut_trace_contains(self, k, message):
'''
Wrapper to provice access to bench for assertDutTraceContains.
:param k: index of dut
:param message: Message that should be in traces
:return: Nothing
'''
pass
def assert_dut_trace_not_contains(self, k, message):
'''
Wrapper to provice access to bench for assertDutTraceDoesNotContain.
:param k: index of dut
:param message: Message that should not appear in traces
:return: Nothing
'''
pass
| 6 | 5 | 10 | 0 | 5 | 5 | 1 | 1.04 | 1 | 3 | 0 | 0 | 5 | 1 | 5 | 12 | 56 | 4 | 26 | 8 | 20 | 27 | 26 | 8 | 20 | 2 | 2 | 1 | 6 |
2,059 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Plugin/plugins/FileApi.py
|
icetea_lib.Plugin.plugins.FileApi.FileApiPlugin
|
class FileApiPlugin(PluginBase):
"""
Plugin interface for JsonFile plugin.
"""
def __init__(self):
super(FileApiPlugin, self).__init__() # pylint: disable=useless-super-delegation
self.bench = None
def init(self, bench=None):
"""
Init function to store the Bench object reference.
:param bench: Bench
:return: Nothing
:raises AttributeError if bench is None.
"""
self.bench = bench
if self.bench is None:
raise AttributeError("Bench instance not present!")
def get_bench_api(self):
"""
Get the descriptor for the plugin interface.
:return: dict
"""
return {"JsonFile": self._jsonfileconstructor}
def _jsonfileconstructor(self, filename=None, filepath=None, logger=None):
"""
Constructor method for the JsonFile object.
:param filename: Name of the file
:param filepath: Path to the file
:param logger: Optional logger.
:return: JsonFile
"""
if filepath:
path = filepath
else:
tc_path = os.path.abspath(os.path.join(inspect.getfile(self.bench.__class__),
os.pardir))
path = os.path.abspath(os.path.join(tc_path, os.pardir, "session_data"))
name = "default_file.json" if not filename else filename
log = self.bench.logger if not logger else logger
self.bench.logger.info("Setting json file location to: {}".format(path))
return files.JsonFile(log, path, name)
|
class FileApiPlugin(PluginBase):
'''
Plugin interface for JsonFile plugin.
'''
def __init__(self):
pass
def init(self, bench=None):
'''
Init function to store the Bench object reference.
:param bench: Bench
:return: Nothing
:raises AttributeError if bench is None.
'''
pass
def get_bench_api(self):
'''
Get the descriptor for the plugin interface.
:return: dict
'''
pass
def _jsonfileconstructor(self, filename=None, filepath=None, logger=None):
'''
Constructor method for the JsonFile object.
:param filename: Name of the file
:param filepath: Path to the file
:param logger: Optional logger.
:return: JsonFile
'''
pass
| 5 | 4 | 10 | 1 | 5 | 5 | 2 | 1 | 1 | 3 | 1 | 0 | 4 | 1 | 4 | 11 | 47 | 6 | 21 | 10 | 16 | 21 | 19 | 10 | 14 | 4 | 2 | 1 | 8 |
2,060 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Plugin/plugins/HttpApi.py
|
icetea_lib.Plugin.plugins.HttpApi.Api
|
class Api(HttpApi.HttpApi):
"""
Wrapper for HttpApi.
"""
def __init__(self, host, headers, cert, logger): # pylint: disable=useless-super-delegation
super(Api, self).__init__(host, headers, cert, logger)
def _raise_fail(self, response, expected):
"""
Raise a TestStepFail with neatly formatted error message
"""
try:
if self.logger:
self.logger.error("Status code "
"{} != {}. \n\n "
"Payload: {}".format(response.status_code,
expected,
response.content))
raise TestStepFail("Status code {} != {}.".format(response.status_code, expected))
except TestStepFail:
raise
except: # pylint: disable=bare-except
if self.logger:
self.logger.error("Status code "
"{} != {}. \n\n "
"Payload: {}".format(response.status_code,
expected,
"Unable to parse payload"))
raise TestStepFail("Status code {} != {}.".format(response.status_code, expected))
# pylint: disable=arguments-differ,too-many-arguments
def get(self, path, headers=None, params=None, expected=200, raiseException=True, **kwargs):
response = super(Api, self).get(path, headers, params, **kwargs)
if expected is not None and response.status_code != expected:
if raiseException:
self._raise_fail(response, expected)
return response
def post(self, path, data=None, json=None, headers=None, expected=200,
raiseException=True, **kwargs):
response = super(Api, self).post(path, data, json, headers, **kwargs)
if expected is not None and response.status_code != expected:
if raiseException:
self._raise_fail(response, expected)
return response
def put(self, path, data=None, headers=None, expected=200, raiseException=True, **kwargs):
response = super(Api, self).put(path, data, headers, **kwargs)
if expected is not None and response.status_code != expected:
if raiseException:
self._raise_fail(response, expected)
return response
def delete(self, path, headers=None, expected=200, raiseException=True, **kwargs):
response = super(Api, self).delete(path, headers, **kwargs)
if expected is not None and response.status_code != expected:
if raiseException:
self._raise_fail(response, expected)
return response
def patch(self, path, data=None, headers=None, expected=200, raiseException=True, **kwargs):
response = super(Api, self).patch(path, data, headers, **kwargs)
if expected is not None and response.status_code != expected:
if raiseException:
self._raise_fail(response, expected)
return response
|
class Api(HttpApi.HttpApi):
'''
Wrapper for HttpApi.
'''
def __init__(self, host, headers, cert, logger):
pass
def _raise_fail(self, response, expected):
'''
Raise a TestStepFail with neatly formatted error message
'''
pass
def get(self, path, headers=None, params=None, expected=200, raiseException=True, **kwargs):
pass
def post(self, path, data=None, json=None, headers=None, expected=200,
raiseException=True, **kwargs):
pass
def put(self, path, data=None, headers=None, expected=200, raiseException=True, **kwargs):
pass
def delete(self, path, headers=None, expected=200, raiseException=True, **kwargs):
pass
def patch(self, path, data=None, headers=None, expected=200, raiseException=True, **kwargs):
pass
| 8 | 2 | 8 | 0 | 7 | 1 | 3 | 0.17 | 1 | 2 | 1 | 0 | 7 | 0 | 7 | 19 | 66 | 6 | 53 | 14 | 44 | 9 | 44 | 13 | 36 | 5 | 2 | 2 | 21 |
2,061 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Plugin/plugins/HttpApi.py
|
icetea_lib.Plugin.plugins.HttpApi.HttpApiPlugin
|
class HttpApiPlugin(PluginBase):
"""
Plugin class implementation
"""
def __init__(self):
super(HttpApiPlugin, self).__init__()
self.logger = None
self.bench = None
def init(self, bench=None):
self.bench = bench
if self.bench is None:
raise AttributeError("Bench instance not present!")
if hasattr(bench, "logger"):
self.logger = bench.logger
def get_bench_api(self):
return {"HttpApi": self.get_tc_api}
def get_tc_api(self, host, headers=None, cert=None, logger=None):
'''
Gets HttpApi wrapped into a neat little package that raises TestStepFail
if expected status code is not returned by the server.
Default setting for expected status code is 200. Set expected to None when calling methods
to ignore the expected status code parameter or
set raiseException = False to disable raising the exception.
'''
if logger is None and self.logger:
logger = self.logger
return Api(host, headers, cert, logger)
|
class HttpApiPlugin(PluginBase):
'''
Plugin class implementation
'''
def __init__(self):
pass
def init(self, bench=None):
pass
def get_bench_api(self):
pass
def get_tc_api(self, host, headers=None, cert=None, logger=None):
'''
Gets HttpApi wrapped into a neat little package that raises TestStepFail
if expected status code is not returned by the server.
Default setting for expected status code is 200. Set expected to None when calling methods
to ignore the expected status code parameter or
set raiseException = False to disable raising the exception.
'''
pass
| 5 | 2 | 6 | 0 | 4 | 2 | 2 | 0.59 | 1 | 3 | 1 | 0 | 4 | 2 | 4 | 11 | 30 | 3 | 17 | 7 | 12 | 10 | 17 | 7 | 12 | 3 | 2 | 1 | 7 |
2,062 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Plugin/plugins/LocalAllocator/DutConsole.py
|
icetea_lib.Plugin.plugins.LocalAllocator.DutConsole.DutConsole
|
class DutConsole(DutProcess):
"""
Configuration for DutConsole. The supported configuration parameters are:
"type": The type of console connection; possible values: "SSH" (default)
"username": The username that should be used for login; default is 'arm'
"hostname": The hostname or IP address of the remote host; default is 'localhost'
"port": Port for the console connection; default is 22
"app": The name of the executable that creates the connection;
default is '/usr/bin/ssh'
"cwd": The directory where the executable is run;
default is None (current directory)
"args": Extra arguments for the executable; default is '-tt'
"shell": The type of shell (if any) used at the remote host; default is 'bash'
"""
conf = {
"type": "SSH",
"username": "arm",
"hostname": "localhost",
"app": "/usr/bin/ssh",
"port": "22",
"cwd": None,
"args": "-tt",
"shell": "bash"
}
def __init__(self, name, conf=None, params=None):
DutProcess.__init__(self, name=name, params=None)
self.config.update(self.conf)
if conf is not None:
self.config.update(conf)
# Set up SSH connection
if self.config["type"] == "SSH":
self.cmd = self.config["app"] + " " + \
self.config["args"] + " " + \
self.config["username"] + "@" + \
self.config["hostname"] + " -p " + \
self.config["port"]
self.path = self.config["cwd"]
self.ignore_return_code = True
self.comport = self.cmd
def init_cli(self):
# Set up the Bash Unix shell
if self.config["shell"] == "bash":
self.execute_command("export PROMPT_COMMAND='RC=$?;echo \"retcode: $RC\";'")
def prepareConnectionClose(self): #pylint: disable=C0103
"""
Deprecated version of prepare_connection_close. Still present for backwards compatibility
:return: Nothing
"""
self.logger.warning("prepareConnectionClose deprecated, use prepare_connection_close")
self.prepare_connection_close()
def prepare_connection_close(self):
# No actions...
return
def reset(self, method=None):
# No actions...
return
def writeline(self, data, crlf="\n"):
DutProcess.writeline(self, data, crlf)
def print_info(self):
info_string = "DutConsole {} \n".format(self.name)
if self.config:
info_string = info_string + "Configuration for this DUT:\n {} \n".format(self.config)
if self.comport:
info_string = info_string + "COM port: {} \n".format(self.comport)
if self.location:
info_string = info_string + "Location: x = {}, y = {} \n".format(self.location.x_coord,
self.location.y_coord)
self.logger.info(info_string)
def get_config(self):
return self.config
def _flash_needed(self, **kwargs):
pass
|
class DutConsole(DutProcess):
'''
Configuration for DutConsole. The supported configuration parameters are:
"type": The type of console connection; possible values: "SSH" (default)
"username": The username that should be used for login; default is 'arm'
"hostname": The hostname or IP address of the remote host; default is 'localhost'
"port": Port for the console connection; default is 22
"app": The name of the executable that creates the connection;
default is '/usr/bin/ssh'
"cwd": The directory where the executable is run;
default is None (current directory)
"args": Extra arguments for the executable; default is '-tt'
"shell": The type of shell (if any) used at the remote host; default is 'bash'
'''
def __init__(self, name, conf=None, params=None):
pass
def init_cli(self):
pass
def prepareConnectionClose(self):
'''
Deprecated version of prepare_connection_close. Still present for backwards compatibility
:return: Nothing
'''
pass
def prepare_connection_close(self):
pass
def reset(self, method=None):
pass
def writeline(self, data, crlf="\n"):
pass
def print_info(self):
pass
def get_config(self):
pass
def _flash_needed(self, **kwargs):
pass
| 10 | 2 | 6 | 1 | 4 | 1 | 2 | 0.43 | 1 | 0 | 0 | 0 | 9 | 4 | 9 | 83 | 87 | 15 | 51 | 16 | 41 | 22 | 37 | 16 | 27 | 4 | 3 | 1 | 15 |
2,063 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Plugin/plugins/LocalAllocator/DutDetection.py
|
icetea_lib.Plugin.plugins.LocalAllocator.DutDetection.DutDetection
|
class DutDetection(object):
"""
DutDetection class. Contains methods to detect usable ports and available devices using
mbedls and serial module.
"""
def __init__(self):
self.mbeds = None
try:
from mbed_lstools.main import create
self.mbeds = create()
except ImportError:
raise AllocationError("mbedls is missing")
@staticmethod
def is_port_usable(port_name):
"""
Static is_port_usable method. Tries to create instance of Serial object. to confirm that
the port is usable.
:param port_name: Name of port
:return: True or False
"""
try:
#Disable "unused variable" warning
#pylint: disable=W0612
ser = Serial(port=port_name)
return True
except SerialException:
return False
def get_available_devices(self):
"""
Gets available devices using mbedls and self.available_edbg_ports.
:return: List of connected devices as dictionaries.
"""
connected_devices = self.mbeds.list_mbeds() if self.mbeds else []
# Check non mbedOS supported devices.
# Just for backward compatible reason - is obsolete..
edbg_ports = self.available_edbg_ports()
for port in edbg_ports:
connected_devices.append({
"platform_name": "SAM4E",
"serial_port": port,
"mount_point": None,
"target_id": None,
"baud_rate": 460800
})
for dev in connected_devices:
dev['state'] = "unknown"
return connected_devices
def available_edbg_ports(self):
"""
Finds available EDBG COM ports.
:return: list of available ports
"""
ports_available = sorted(list(list_ports.comports()))
edbg_ports = []
for iport in ports_available:
port = iport[0]
desc = iport[1]
hwid = iport[2]
if str(desc).startswith("EDBG Virtual COM Port") or \
"VID:PID=03EB:2111" in str(hwid).upper():
# print("%-10s: %s (%s)\n" % (port, desc, hwid))
try:
edbg_ports.index(port, 0)
print("There is multiple %s ports with same number!" % port)
except ValueError:
edbg_ports.append(port)
# print("Detected %i DUT's" % len(edbg_ports))
return edbg_ports
|
class DutDetection(object):
'''
DutDetection class. Contains methods to detect usable ports and available devices using
mbedls and serial module.
'''
def __init__(self):
pass
@staticmethod
def is_port_usable(port_name):
'''
Static is_port_usable method. Tries to create instance of Serial object. to confirm that
the port is usable.
:param port_name: Name of port
:return: True or False
'''
pass
def get_available_devices(self):
'''
Gets available devices using mbedls and self.available_edbg_ports.
:return: List of connected devices as dictionaries.
'''
pass
def available_edbg_ports(self):
'''
Finds available EDBG COM ports.
:return: list of available ports
'''
pass
| 6 | 4 | 17 | 1 | 11 | 5 | 3 | 0.55 | 1 | 5 | 1 | 0 | 3 | 1 | 4 | 4 | 76 | 8 | 44 | 19 | 37 | 24 | 36 | 18 | 30 | 4 | 1 | 3 | 12 |
2,064 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Plugin/plugins/LocalAllocator/DutMbed.py
|
icetea_lib.Plugin.plugins.LocalAllocator.DutMbed.DutMbed
|
class DutMbed(DutSerial):
"""
DutMbed, child of DutSerial. Mbed device over serial connection.
"""
def __init__(self, name='mbed', port=None, baudrate=115200, config=None,
ch_mode_config=None, serial_config=None, params=None):
"""
Mbed device over serial connection.
:param name: Dut name
:param port: Serial port name
:param baudrate: Baudrate, int
:param config: configuration dict
:param ch_mode_config: dict
:param serial_config: dict
:param params: dict
"""
super(DutMbed, self).__init__(name, port, baudrate, config, ch_mode_config,
serial_config, params)
def flash(self, binary_location=None, forceflash=None):
"""
Flash a binary to the target device using mbed-flasher.
:param binary_location: Binary to flash to device.
:param forceflash: Not used.
:return: False if an unknown error was encountered during flashing.
True if flasher retcode == 0
:raises: ImportError if mbed-flasher not installed.
:raises: DutConnectionError if flashing fails.
"""
if not Flash:
self.logger.error("Mbed-flasher not installed!")
raise ImportError("Mbed-flasher not installed!")
try:
# create build object
self.build = Build.init(binary_location)
except NotImplementedError as error:
self.logger.error("Build initialization failed. "
"Check your build location.")
self.logger.debug(error)
raise DutConnectionError(error)
# check if need to flash - depend on forceflash -option
if not self._flash_needed(forceflash=forceflash):
self.logger.info("Skipping flash, not needed.")
return True
# initialize mbed-flasher with proper logger
logger = get_external_logger("mbed-flasher", "FLS")
flasher = Flash(logger=logger)
if not self.device:
self.logger.error("Trying to flash device but device is not there?")
return False
try:
buildfile = self.build.get_file()
if not buildfile:
raise DutConnectionError("Binary {} not found".format(buildfile))
self.logger.info('Flashing dev: %s', self.device['target_id'])
target_id = self.device.get("target_id")
retcode = flasher.flash(build=buildfile, target_id=target_id,
device_mapping_table=[self.device])
except FLASHER_ERRORS as error:
if error.__class__ == NotImplementedError:
self.logger.error("Flashing not supported for this platform!")
elif error.__class__ == SyntaxError:
self.logger.error("target_id required by mbed-flasher!")
if FlashError is not None:
if error.__class__ == FlashError:
self.logger.error("Flasher raised the following error: %s Error code: %i",
error.message, error.return_code)
raise DutConnectionError(error)
if retcode == 0:
self.dutinformation.build_binary_sha1 = self.build.sha1
return True
self.dutinformation.build_binary_sha1 = None
return False
def _flash_needed(self, **kwargs):
"""
Check if flashing is needed. Flashing can be skipped if resource binary_sha1 attribute
matches build sha1 and forceflash is not True.
:param kwargs: Keyword arguments (forceflash: Boolean)
:return: Boolean
"""
forceflash = kwargs.get("forceflash", False)
cur_binary_sha1 = self.dutinformation.build_binary_sha1
if not forceflash and self.build.sha1 == cur_binary_sha1:
return False
return True
def print_info(self):
"""
Prints Dut information nicely formatted into a table.
:return: Nothing
"""
table = PrettyTable()
start_string = "DutMbed {} \n".format(self.name)
row = []
info_string = ""
if self.config:
info_string = info_string + "Configuration for this DUT:\n\n {} \n".format(self.config)
if self.comport:
table.add_column("COM port", [])
row.append(self.comport)
if self.port:
if hasattr(self.port, "baudrate"):
table.add_column("Baudrate", [])
row.append(self.port.baudrate)
if hasattr(self.port, "xonxoff"):
table.add_column("XON/XOFF", [])
row.append(self.port.xonxoff)
if hasattr(self.port, "timeout"):
table.add_column("Timeout", [])
row.append(self.port.timeout)
if hasattr(self.port, "rtscts"):
table.add_column("RTSCTS", [])
row.append(self.port.rtscts)
if self.location:
table.add_column("Location", [])
row.append("X = {}, Y = {}".format(self.location.x_coord, self.location.y_coord))
self.logger.info(start_string)
self.logger.debug(info_string)
table.add_row(row)
print(table)
|
class DutMbed(DutSerial):
'''
DutMbed, child of DutSerial. Mbed device over serial connection.
'''
def __init__(self, name='mbed', port=None, baudrate=115200, config=None,
ch_mode_config=None, serial_config=None, params=None):
'''
Mbed device over serial connection.
:param name: Dut name
:param port: Serial port name
:param baudrate: Baudrate, int
:param config: configuration dict
:param ch_mode_config: dict
:param serial_config: dict
:param params: dict
'''
pass
def flash(self, binary_location=None, forceflash=None):
'''
Flash a binary to the target device using mbed-flasher.
:param binary_location: Binary to flash to device.
:param forceflash: Not used.
:return: False if an unknown error was encountered during flashing.
True if flasher retcode == 0
:raises: ImportError if mbed-flasher not installed.
:raises: DutConnectionError if flashing fails.
'''
pass
def _flash_needed(self, **kwargs):
'''
Check if flashing is needed. Flashing can be skipped if resource binary_sha1 attribute
matches build sha1 and forceflash is not True.
:param kwargs: Keyword arguments (forceflash: Boolean)
:return: Boolean
'''
pass
def print_info(self):
'''
Prints Dut information nicely formatted into a table.
:return: Nothing
'''
pass
| 5 | 5 | 31 | 2 | 21 | 8 | 6 | 0.42 | 1 | 5 | 1 | 0 | 4 | 1 | 4 | 85 | 129 | 11 | 83 | 19 | 77 | 35 | 77 | 17 | 72 | 12 | 3 | 3 | 24 |
2,065 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Plugin/plugins/LocalAllocator/DutProcess.py
|
icetea_lib.Plugin.plugins.LocalAllocator.DutProcess.DutProcess
|
class DutProcess(Dut, GenericProcess): # pylint: disable=too-many-instance-attributes
"""
DutProcess class, subclasses both Dut and GenericProcess. Implements an interface for
communicating with a process as if it were a device under test.
"""
def __init__(self, type='process', name='process', config=None, params=None):
Dut.__init__(self, name=name, params=params)
GenericProcess.__init__(self, self.name, logger=self.logger)
self.disable_io_prints() # because those are printed in Dut object
self.proc = False
self.type = type
self.config = config if config else {}
self.dutinformation = DutInformation(self.type,
self.resource_id if self.resource_id else "",
index=None, build=None)
self.command = None
def open_connection(self):
"""
Open connection by starting the process.
:raises: DutConnectionError
"""
self.logger.debug("Open CLI Process '%s'",
(self.comport), extra={'type': '<->'})
self.cmd = self.comport if isinstance(self.comport, list) else [self.comport]
if not self.comport:
raise DutConnectionError("Process not defined!")
try:
self.build = Build.init(self.cmd[0])
except NotImplementedError as error:
self.logger.error("Build initialization failed. Check your build location.")
self.logger.debug(error)
raise DutConnectionError(error)
# Start process&reader thread. Call Dut.process_dut() when new data is coming
app = self.config.get("application")
if app and app.get("bin_args"):
self.cmd = self.cmd + app.get("bin_args")
try:
self.start_process(self.cmd, processing_callback=lambda: Dut.process_dut(self))
except KeyboardInterrupt:
raise
except Exception as error:
raise DutConnectionError("Couldn't start DUT target process {}".format(error))
def prepareConnectionClose(self): # pylint: disable=C0103
"""
Deprecated version of prepare_connection_close. Still present for backwards compatibility.
:return: Nothing
"""
self.logger.warning("prepareConnectionClose deprecated, use prepare_connection_close")
self.prepare_connection_close()
def prepare_connection_close(self):
"""
exit the process if it is alive.
:return: Nothing
"""
pass
def close_connection(self):
"""
Stop the process.
:return: Nothing
"""
self.logger.debug("Close CLI Process '%s'" % self.cmd, extra={'type': '<->'})
self.stop_process()
def writeline(self, data, crlf="\n"): # pylint: disable=arguments-differ
"""
Write data to process.
:param data: data to write
:param crlf: line end character
:return: Nothing
"""
GenericProcess.writeline(self, data, crlf=crlf)
def readline(self, timeout=1):
"""
Read a line from the process.
:param timeout: Timeout
:return: read line
"""
return GenericProcess.readline(self, timeout=timeout)
def reset(self, method=None):
"""
Not implemented
"""
self.logger.info("Reset not implemented for process DUT")
def print_info(self):
"""
Print information of this dut.
:return: Nothing.
"""
info_string = "DutProcess {}, \n".format(self.name)
if self.comport:
info_string = info_string + "CMD {} \n".format(self.comport)
if self.location:
info_string = info_string + "Location: x = {}, y = {} \n".format(self.location.x_coord,
self.location.y_coord)
if self.config:
info_string = info_string + "Configuration for this DUT:\n {} \n".format(self.config)
self.logger.info(info_string)
def get_info(self):
"""
Get DutInformation object of this dut.
:return: DutInformation
"""
return self.dutinformation
def get_config(self):
"""
Get configuration of this dut.
:return: dictionary
"""
return self.config
def _flash_needed(self, **kwargs):
pass
|
class DutProcess(Dut, GenericProcess):
'''
DutProcess class, subclasses both Dut and GenericProcess. Implements an interface for
communicating with a process as if it were a device under test.
'''
def __init__(self, type='process', name='process', config=None, params=None):
pass
def open_connection(self):
'''
Open connection by starting the process.
:raises: DutConnectionError
'''
pass
def prepareConnectionClose(self):
'''
Deprecated version of prepare_connection_close. Still present for backwards compatibility.
:return: Nothing
'''
pass
def prepare_connection_close(self):
'''
exit the process if it is alive.
:return: Nothing
'''
pass
def close_connection(self):
'''
Stop the process.
:return: Nothing
'''
pass
def writeline(self, data, crlf="\n"):
'''
Write data to process.
:param data: data to write
:param crlf: line end character
:return: Nothing
'''
pass
def readline(self, timeout=1):
'''
Read a line from the process.
:param timeout: Timeout
:return: read line
'''
pass
def reset(self, method=None):
'''
Not implemented
'''
pass
def print_info(self):
'''
Print information of this dut.
:return: Nothing.
'''
pass
def get_info(self):
'''
Get DutInformation object of this dut.
:return: DutInformation
'''
pass
def get_config(self):
'''
Get configuration of this dut.
:return: dictionary
'''
pass
def _flash_needed(self, **kwargs):
pass
| 13 | 11 | 10 | 1 | 5 | 4 | 2 | 0.81 | 2 | 7 | 3 | 1 | 12 | 9 | 12 | 74 | 130 | 20 | 63 | 25 | 50 | 51 | 59 | 22 | 46 | 7 | 2 | 1 | 23 |
2,066 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Plugin/plugins/LocalAllocator/DutSerial.py
|
icetea_lib.Plugin.plugins.LocalAllocator.DutSerial.ChunkModeParams
|
class ChunkModeParams(object):
"""
ChunkModeParams object for storing chunk mode parameters
"""
def __init__(self, on=False, size=1, chunk_delay=0.01, start_delay=0):
self.enabled = on
self.size = size
self.chunk_delay = chunk_delay
self.start_delay = start_delay
def get_params(self):
"""
Get parameters.
:return: enabled, size, chunk_delay, start_delay
"""
return self.enabled, self.size, self.chunk_delay, self.start_delay
|
class ChunkModeParams(object):
'''
ChunkModeParams object for storing chunk mode parameters
'''
def __init__(self, on=False, size=1, chunk_delay=0.01, start_delay=0):
pass
def get_params(self):
'''
Get parameters.
:return: enabled, size, chunk_delay, start_delay
'''
pass
| 3 | 2 | 6 | 1 | 4 | 2 | 1 | 0.88 | 1 | 0 | 0 | 0 | 2 | 4 | 2 | 2 | 17 | 2 | 8 | 7 | 5 | 7 | 8 | 7 | 5 | 1 | 1 | 0 | 2 |
2,067 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Plugin/plugins/LocalAllocator/DutSerial.py
|
icetea_lib.Plugin.plugins.LocalAllocator.DutSerial.DutSerial
|
class DutSerial(Dut):
"""
DutSerial Object. Inherits from Dut object. Represents a local hardware device connected to USB
"""
def __init__(self, name='serial', port=None, baudrate=460800, config=None,
ch_mode_config=None, serial_config=None, params=None):
Dut.__init__(self, name=name, params=params)
ch_mode_config = ch_mode_config if ch_mode_config is not None else {}
serial_config = serial_config if serial_config is not None else {}
self.readthread = None
self.port = False
self.comport = port
self.type = 'serial'
self.name = port
self.platform = ''
self.serialparams = SerialParams(timeout=serial_config.get("serial_timeout", 0.01),
xonxoff=serial_config.get("serial_xonxoff", False),
rtscts=serial_config.get("serial_rtscts", False),
baudrate=baudrate)
self.chunkmodeparams = ChunkModeParams(on=ch_mode_config.get("ch_mode", False),
size=ch_mode_config.get("ch_mode_chunk_size", 1),
chunk_delay=ch_mode_config.get("ch_mode_ch_delay",
0.01),
start_delay=ch_mode_config.get("ch_mode_start_delay",
0))
self.input_queue = deque() # Input queue
self.daemon = True # Allow Python to stop us
self.keep_reading = False
if config:
self.config.update(config)
self.device = config.get("allocated", None)
init_cli_cmds = None
if "init_cli_cmds" in config["application"]:
init_cli_cmds = config["application"]["init_cli_cmds"]
if init_cli_cmds is not None:
self.set_init_cli_cmds(init_cli_cmds)
post_cli_cmds = None
if "post_cli_cmds" in config["application"]:
post_cli_cmds = config["application"]["post_cli_cmds"]
if post_cli_cmds is not None:
self.set_post_cli_cmds(post_cli_cmds)
tid = self.config.get('allocated', {}).get('target_id', "unknown")
self.dutinformation = DutInformation("serial",
tid,
index=self.index, build=self.build)
"""Properties"""
@property
def ch_mode(self):
"""
:return: True if chunk mode enabled, False otherwise
"""
return self.chunkmodeparams.enabled
@ch_mode.setter
def ch_mode(self, value):
self.chunkmodeparams.enabled = value
@property
def ch_mode_chunk_size(self):
"""
:return: Chunk size
"""
return self.chunkmodeparams.size
@ch_mode_chunk_size.setter
def ch_mode_chunk_size(self, value):
self.chunkmodeparams.size = value
@property
def ch_mode_ch_delay(self):
"""
:return: Chunk delay
"""
return self.chunkmodeparams.chunk_delay
@ch_mode_ch_delay.setter
def ch_mode_ch_delay(self, value):
self.chunkmodeparams.chunk_delay = value
@property
def ch_mode_start_delay(self):
"""
:return: Chunk start delay
"""
return self.chunkmodeparams.start_delay
@ch_mode_start_delay.setter
def ch_mode_start_delay(self, value):
self.chunkmodeparams.start_delay = value
@property
def serial_baudrate(self):
"""
Getter for serial baudrate.
:return: int
"""
return self.serialparams.baudrate
@serial_baudrate.setter
def serial_baudrate(self, value):
self.serialparams.baudrate = value
@property
def serial_timeout(self):
"""
:return: Serial timeout
"""
return self.serialparams.timeout
@serial_timeout.setter
def serial_timeout(self, value):
"""
Setter for serial connection timeout.
:param value: Value to set
:return: Nothing
"""
self.serialparams.timeout = value
@property
def serial_xonxoff(self):
"""
:return: xonxoff value as as Boolean
"""
return self.serialparams.xonxoff
@serial_xonxoff.setter
def serial_xonxoff(self, value):
self.serialparams.xonxoff = value
@property
def serial_rtscts(self):
"""
:return: Rtscts as boolean
"""
return self.serialparams.rtscts
@serial_rtscts.setter
def serial_rtscts(self, value):
self.serialparams.rtscts = value
"""Methods"""
def get_resource_id(self):
"""
Get resource id (target id) from config dictionary.
:return: target_id or None if not found
"""
return self.config.get('allocated').get('target_id')
def flash(self, binary_location=None, forceflash=None): # pylint: disable=too-many-branches
"""
Nothing, not implemented.
"""
self.logger.warning("Flashing is not supported for this dut type.")
return True
def get_info(self):
"""
Get DutInformation object from this Dut.
:return: DutInformation object
"""
return self.dutinformation
# open serial port connection
def open_connection(self):
"""
Open serial port connection.
:return: Nothing
:raises: DutConnectionError if serial port was already open or a SerialException occurs.
ValueError if EnhancedSerial __init__ or value setters raise ValueError
"""
if self.readthread is not None:
raise DutConnectionError("Trying to open serial port which was already open")
self.logger.info("Open Connection "
"for '%s' using '%s' baudrate: %d" % (self.dut_name,
self.comport,
self.serial_baudrate),
extra={'type': '<->'})
if self.serial_xonxoff:
self.logger.debug("Use software flow control for dut: %s" % self.dut_name)
if self.serial_rtscts:
self.logger.debug("Use hardware flow control for dut: %s" % self.dut_name)
try:
self.port = EnhancedSerial(self.comport)
self.port.baudrate = self.serial_baudrate
self.port.timeout = self.serial_timeout
self.port.xonxoff = self.serial_xonxoff
self.port.rtscts = self.serial_rtscts
self.port.flushInput()
self.port.flushOutput()
except SerialException as err:
self.logger.warning(err)
raise DutConnectionError(str(err))
except ValueError as err:
self.logger.warning(err)
raise ValueError(str(err))
if self.ch_mode:
self.logger.info("Use chunk-mode with size %d, delay: %.3f when write data" %
(self.ch_mode_chunk_size, self.ch_mode_ch_delay),
extra={'type': '<->'})
time.sleep(self.ch_mode_start_delay)
else:
self.logger.info("Use normal serial write mode", extra={'type': '<->'})
if self.params.reset:
self.reset()
# Start the serial reading thread
self.readthread = Thread(name=self.name, target=self.run)
self.readthread.start()
def prepareConnectionClose(self): # pylint: disable=C0103
"""
Deprecated version of prepare_connection_close. Still present for backwards compatibility.
:return: Nothing
"""
self.logger.warning("prepareConnectionClose deprecated, use prepare_connection_close")
self.prepare_connection_close()
def prepare_connection_close(self):
"""
Sends post-cli-cmds and stops the read thread.
:return: Nothing
"""
try:
self.init_cli_human()
except KeyboardInterrupt:
pass
self.stop()
# close serial port connection
def close_connection(self): # pylint: disable=C0103
"""
Closes serial port connection.
:return: Nothing
"""
if self.port:
self.stop()
self.logger.debug("Close port '%s'" % self.comport,
extra={'type': '<->'})
self.port.close()
self.port = False
def reset(self, method=None):
"""
Resets the serial device. Internally calls __send_break().
:param method: Not used for DutSerial
:return: Nothing
"""
self.logger.info('Reset serial device %s' % self.name)
self.__send_break()
def __sendBreak(self): # pylint: disable=C0103
"""
Deprecated, present for backwards compatibility.
:return: result of EnhancedSerial safe_sendBreak()
"""
self.logger.warning("__send_Break deprecated, use __send_break")
return self.__send_break()
def __send_break(self):
"""
Sends break to device.
:return: result of EnhancedSerial safe_sendBreak()
"""
if self.port:
self.logger.debug("sendBreak to device to reboot", extra={'type': '<->'})
result = self.port.safe_sendBreak()
time.sleep(1)
if result:
self.logger.debug("reset completed", extra={'type': '<->'})
else:
self.logger.warning("reset failed", extra={'type': '<->'})
return result
return None
# transfer data to the serial port
def writeline(self, data):
"""
Writes data to serial port.
:param data: Data to write
:return: Nothing
:raises: IOError if SerialException occurs.
"""
try:
if self.ch_mode:
data += "\n"
parts = split_by_n(data, self.ch_mode_chunk_size)
for split_str in parts:
self.port.write(split_str.encode())
time.sleep(self.ch_mode_ch_delay)
else:
self.port.write((data + "\n").encode())
except SerialException as err:
self.logger.exception("SerialError occured while trying to write data {}.".format(data))
raise RuntimeError(str(err))
# read line from serial port
def _readline(self, timeout=1):
"""
Read line from serial port.
:param timeout: timeout, default is 1
:return: stripped line or None
"""
line = self.port.readline(timeout=timeout)
return strip_escape(line.strip()) if line is not None else line
def peek(self):
"""
Peek into the port line buffer to see if there are incomplete lines.
:return: str
"""
if self.port:
return self.port.peek()
return ""
def run(self):
"""
Read lines while keep_reading is True. Calls process_dut for each received line.
:return: Nothing
"""
self.keep_reading = True
while self.keep_reading:
line = self._readline()
if line:
self.input_queue.appendleft(line)
Dut.process_dut(self)
def stop(self):
"""
Stops and joins readthread.
:return: Nothing
"""
self.keep_reading = False
if self.readthread is not None:
self.readthread.join()
self.readthread = None
def readline(self, timeout=1):
"""
Pops from input_queue.
:param timeout: Not used
:return: first item in input_queue or None
"""
try:
return self.input_queue.pop()
except IndexError:
pass
return None
def print_info(self):
"""
Prints Dut information nicely formatted into a table.
"""
table = PrettyTable()
start_string = "DutSerial {} \n".format(self.name)
row = []
info_string = ""
if self.config:
info_string = info_string + "Configuration for this DUT:\n\n {} \n".format(self.config)
if self.comport:
table.add_column("COM port", [])
row.append(self.comport)
if self.port:
if hasattr(self.port, "baudrate"):
table.add_column("Baudrate", [])
row.append(self.port.baudrate)
if hasattr(self.port, "xonxoff"):
table.add_column("XON/XOFF", [])
row.append(self.port.xonxoff)
if hasattr(self.port, "timeout"):
table.add_column("Timeout", [])
row.append(self.port.timeout)
if hasattr(self.port, "rtscts"):
table.add_column("RTSCTS", [])
row.append(self.port.rtscts)
if self.location:
table.add_column("Location", [])
row.append("X = {}, Y = {}".format(self.location.x_coord, self.location.y_coord))
self.logger.info(start_string)
self.logger.debug(info_string)
table.add_row(row)
print(table)
def get_config(self):
"""
Gets configuration dictionary.
:return: configuration as a dictionary
"""
return self.config
def _flash_needed(self, **kwargs):
"""
Check if flashing is needed. Flashing can be skipped if resource binary_sha1 attribute
matches build sha 1 and forceflash is not True.
:param kwargs: Keyword arguments (forceflash: Boolean)
:return: Boolean
"""
return False
|
class DutSerial(Dut):
'''
DutSerial Object. Inherits from Dut object. Represents a local hardware device connected to USB
'''
def __init__(self, name='serial', port=None, baudrate=460800, config=None,
ch_mode_config=None, serial_config=None, params=None):
pass
@property
def ch_mode(self):
'''
:return: True if chunk mode enabled, False otherwise
'''
pass
@ch_mode.setter
def ch_mode(self):
pass
@property
def ch_mode_chunk_size(self):
'''
:return: Chunk size
'''
pass
@ch_mode_chunk_size.setter
def ch_mode_chunk_size(self):
pass
@property
def ch_mode_ch_delay(self):
'''
:return: Chunk delay
'''
pass
@ch_mode_ch_delay.setter
def ch_mode_ch_delay(self):
pass
@property
def ch_mode_start_delay(self):
'''
:return: Chunk start delay
'''
pass
@ch_mode_start_delay.setter
def ch_mode_start_delay(self):
pass
@property
def serial_baudrate(self):
'''
Getter for serial baudrate.
:return: int
'''
pass
@serial_baudrate.setter
def serial_baudrate(self):
pass
@property
def serial_timeout(self):
'''
:return: Serial timeout
'''
pass
@serial_timeout.setter
def serial_timeout(self):
'''
Setter for serial connection timeout.
:param value: Value to set
:return: Nothing
'''
pass
@property
def serial_xonxoff(self):
'''
:return: xonxoff value as as Boolean
'''
pass
@serial_xonxoff.setter
def serial_xonxoff(self):
pass
@property
def serial_rtscts(self):
'''
:return: Rtscts as boolean
'''
pass
@serial_rtscts.setter
def serial_rtscts(self):
pass
def get_resource_id(self):
'''
Get resource id (target id) from config dictionary.
:return: target_id or None if not found
'''
pass
def flash(self, binary_location=None, forceflash=None):
'''
Nothing, not implemented.
'''
pass
def get_info(self):
'''
Get DutInformation object from this Dut.
:return: DutInformation object
'''
pass
def open_connection(self):
'''
Open serial port connection.
:return: Nothing
:raises: DutConnectionError if serial port was already open or a SerialException occurs.
ValueError if EnhancedSerial __init__ or value setters raise ValueError
'''
pass
def prepareConnectionClose(self):
'''
Deprecated version of prepare_connection_close. Still present for backwards compatibility.
:return: Nothing
'''
pass
def prepare_connection_close(self):
'''
Sends post-cli-cmds and stops the read thread.
:return: Nothing
'''
pass
def close_connection(self):
'''
Closes serial port connection.
:return: Nothing
'''
pass
def reset(self, method=None):
'''
Resets the serial device. Internally calls __send_break().
:param method: Not used for DutSerial
:return: Nothing
'''
pass
def __sendBreak(self):
'''
Deprecated, present for backwards compatibility.
:return: result of EnhancedSerial safe_sendBreak()
'''
pass
def __send_break(self):
'''
Sends break to device.
:return: result of EnhancedSerial safe_sendBreak()
'''
pass
def writeline(self, data):
'''
Writes data to serial port.
:param data: Data to write
:return: Nothing
:raises: IOError if SerialException occurs.
'''
pass
def _readline(self, timeout=1):
'''
Read line from serial port.
:param timeout: timeout, default is 1
:return: stripped line or None
'''
pass
def peek(self):
'''
Peek into the port line buffer to see if there are incomplete lines.
:return: str
'''
pass
def run(self):
'''
Read lines while keep_reading is True. Calls process_dut for each received line.
:return: Nothing
'''
pass
def stop(self):
'''
Stops and joins readthread.
:return: Nothing
'''
pass
def readline(self, timeout=1):
'''
Pops from input_queue.
:param timeout: Not used
:return: first item in input_queue or None
'''
pass
def print_info(self):
'''
Prints Dut information nicely formatted into a table.
'''
pass
def get_config(self):
'''
Gets configuration dictionary.
:return: configuration as a dictionary
'''
pass
def _flash_needed(self, **kwargs):
'''
Check if flashing is needed. Flashing can be skipped if resource binary_sha1 attribute
matches build sha 1 and forceflash is not True.
:param kwargs: Keyword arguments (forceflash: Boolean)
:return: Boolean
'''
pass
| 53 | 29 | 10 | 1 | 6 | 3 | 2 | 0.54 | 1 | 11 | 5 | 1 | 36 | 15 | 36 | 81 | 422 | 60 | 239 | 83 | 185 | 129 | 202 | 62 | 165 | 9 | 2 | 3 | 71 |
2,068 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Plugin/plugins/LocalAllocator/DutSerial.py
|
icetea_lib.Plugin.plugins.LocalAllocator.DutSerial.SerialParams
|
class SerialParams(object):
"""
SerialParams object for storing serial connection parameters.
"""
def __init__(self, timeout=0.01, xonxoff=False, rtscts=False, baudrate=460800):
self.timeout = timeout
self.xonxoff = xonxoff
self.rtscts = rtscts
self.baudrate = baudrate
def get_params(self):
"""
Get parameters as a tuple.
:return: timeout, xonxoff, rtscts, baudrate
"""
return self.timeout, self.xonxoff, self.rtscts, self.baudrate
|
class SerialParams(object):
'''
SerialParams object for storing serial connection parameters.
'''
def __init__(self, timeout=0.01, xonxoff=False, rtscts=False, baudrate=460800):
pass
def get_params(self):
'''
Get parameters as a tuple.
:return: timeout, xonxoff, rtscts, baudrate
'''
pass
| 3 | 2 | 6 | 1 | 4 | 2 | 1 | 0.88 | 1 | 0 | 0 | 0 | 2 | 4 | 2 | 2 | 17 | 2 | 8 | 7 | 5 | 7 | 8 | 7 | 5 | 1 | 1 | 0 | 2 |
2,069 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Plugin/plugins/LocalAllocator/DutTcp.py
|
icetea_lib.Plugin.plugins.LocalAllocator.DutTcp.DutTcp
|
class DutTcp(Dut):
'''
Draft version of TCP -dut type
'''
def __init__(self, name='tcp'):
Dut.__init__(self, name=name)
self.port = None
self.type = 'socket'
def open_connection(self):
"""
Open connection over TCP socket.
:return: Nothing
"""
self.logger.debug("Open COM %s", self.comport)
self.port = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
(ip_addr, port) = self.comport.split(':')
self.port.connect(ip_addr, num(port))
def close_connection(self):
"""
Close TCP port
:return: Nothing
"""
if self.port:
self.port.close()
self.logger.debug("Close TCP port")
def writeline(self, data):
"""
Write data to port
:param data: data to write
:return: Nothing
"""
self.port.send(data)
def readline(self, timeout=None): # timeout is not in use
"""
Read data from port and strip escape characters
:param timeout:
:return: Stripped line.
"""
fil = self.port.makefile()
line = fil.readline()
return strip_escape(line.strip())
def print_info(self):
pass
def _flash_needed(self, **kwargs):
pass
def get_info(self):
pass
def reset(self, method=None):
pass
|
class DutTcp(Dut):
'''
Draft version of TCP -dut type
'''
def __init__(self, name='tcp'):
pass
def open_connection(self):
'''
Open connection over TCP socket.
:return: Nothing
'''
pass
def close_connection(self):
'''
Close TCP port
:return: Nothing
'''
pass
def writeline(self, data):
'''
Write data to port
:param data: data to write
:return: Nothing
'''
pass
def readline(self, timeout=None):
'''
Read data from port and strip escape characters
:param timeout:
:return: Stripped line.
'''
pass
def print_info(self):
pass
def _flash_needed(self, **kwargs):
pass
def get_info(self):
pass
def reset(self, method=None):
pass
| 10 | 5 | 5 | 0 | 3 | 2 | 1 | 0.79 | 1 | 1 | 0 | 0 | 9 | 2 | 9 | 54 | 57 | 8 | 28 | 15 | 18 | 22 | 28 | 15 | 18 | 2 | 2 | 1 | 10 |
2,070 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Plugin/plugins/LocalAllocator/LocalAllocator.py
|
icetea_lib.Plugin.plugins.LocalAllocator.LocalAllocator.LocalAllocator
|
class LocalAllocator(BaseAllocator):
"""
LocalAllocator class, subclasses BaseAllocator. Implements allocation of local resources for
use in test cases. Uses mbedls to detect mbed devices.
"""
def __init__(self, args=None, logger=None, allocator_cfg=None):
super(LocalAllocator, self).__init__()
self.logger = logger
if self.logger is None:
self.logger = get_resourceprovider_logger("LocalAllocator", "LAL")
set_level("LAL", logging.DEBUG)
self._available_devices = []
@property
def share_allocations(self):
"""
Just return False, allocation sharing not implemented for this allocator.
:return: False
"""
return False
def can_allocate(self, dut_configuration):
"""
Checks if resource type is supported.
:param dut_configuration: ResourceRequirements object
:return: True if type is supported, False otherwise
"""
try:
return dut_configuration["type"] in ["hardware", "process", "serial", "mbed"]
except KeyError:
return False
def allocate(self, dut_configuration_list, args=None):
"""
Allocates resources from available local devices.
:param dut_configuration_list: List of ResourceRequirements objects
:param args: Not used
:return: AllocationContextList with allocated resources
"""
dut_config_list = dut_configuration_list.get_dut_configuration()
# if we need one or more local hardware duts let's search attached
# devices using DutDetection
if not isinstance(dut_config_list, list):
raise AllocationError("Invalid dut configuration format!")
if next((item for item in dut_config_list if item.get("type") == "hardware"), False):
self._available_devices = DutDetection().get_available_devices()
if len(self._available_devices) < len(dut_config_list):
raise AllocationError("Required amount of devices not available.")
# Enumerate all required DUT's
try:
for dut_config in dut_config_list:
if not self.can_allocate(dut_config.get_requirements()):
raise AllocationError("Resource type is not supported")
self._allocate(dut_config)
except AllocationError:
# Locally allocated don't need to be released any way for
# now, so just re-raise the error
raise
alloc_list = AllocationContextList()
res_id = None
for conf in dut_config_list:
if conf.get("type") == "mbed":
res_id = conf.get("allocated").get("target_id")
context = AllocationContext(resource_id=res_id, alloc_data=conf)
alloc_list.append(context)
alloc_list.set_dut_init_function("serial", init_generic_serial_dut)
alloc_list.set_dut_init_function("process", init_process_dut)
alloc_list.set_dut_init_function("mbed", init_mbed_dut)
return alloc_list
def release(self, dut=None):
"""
Resource releasing is not necessary. Not implemented.
:param dut: Not used
:return: Nothing
"""
pass
def _allocate(self, dut_configuration): # pylint: disable=too-many-branches
"""
Internal allocation function. Allocates a single resource based on dut_configuration.
:param dut_configuration: ResourceRequirements object which describes a required resource
:return: True
:raises: AllocationError if suitable resource was not found or if the platform was not
allowed to be used.
"""
if dut_configuration["type"] == "hardware":
dut_configuration.set("type", "mbed")
if dut_configuration["type"] == "mbed":
if not self._available_devices:
raise AllocationError("No available devices to allocate from")
dut_reqs = dut_configuration.get_requirements()
platforms = None if 'allowed_platforms' not in dut_reqs else dut_reqs[
'allowed_platforms']
platform_name = None if 'platform_name' not in dut_reqs else dut_reqs[
"platform_name"]
if platform_name is None and platforms:
platform_name = platforms[0]
if platform_name and platforms:
if platform_name not in platforms:
raise AllocationError("Platform name not in allowed platforms.")
# Enumerate through all available devices
for dev in self._available_devices:
if platform_name and dev["platform_name"] != platform_name:
self.logger.debug("Skipping device %s because of mismatching platform. "
"Required %s but device was %s", dev['target_id'],
platform_name, dev['platform_name'])
continue
if dev['state'] == 'allocated':
self.logger.debug("Skipping device %s because it was "
"already allocated", dev['target_id'])
continue
if DutDetection.is_port_usable(dev['serial_port']):
dev['state'] = "allocated"
dut_reqs['allocated'] = dev
self.logger.info("Allocated device %s", dev['target_id'])
return True
else:
self.logger.info("Could not open serial port (%s) of "
"allocated device %s", dev['serial_port'], dev['target_id'])
# Didn't find a matching device to allocate so allocation failed
raise AllocationError("No suitable local device available")
elif dut_configuration["type"] == "serial":
dut_reqs = dut_configuration.get_requirements()
if not dut_reqs.get("serial_port"):
raise AllocationError("Serial port not defined for requirement {}".format(dut_reqs))
if not DutDetection.is_port_usable(dut_reqs['serial_port']):
raise AllocationError("Serial port {} not usable".format(dut_reqs['serial_port']))
# Successful allocation, return True
return True
|
class LocalAllocator(BaseAllocator):
'''
LocalAllocator class, subclasses BaseAllocator. Implements allocation of local resources for
use in test cases. Uses mbedls to detect mbed devices.
'''
def __init__(self, args=None, logger=None, allocator_cfg=None):
pass
@property
def share_allocations(self):
'''
Just return False, allocation sharing not implemented for this allocator.
:return: False
'''
pass
def can_allocate(self, dut_configuration):
'''
Checks if resource type is supported.
:param dut_configuration: ResourceRequirements object
:return: True if type is supported, False otherwise
'''
pass
def allocate(self, dut_configuration_list, args=None):
'''
Allocates resources from available local devices.
:param dut_configuration_list: List of ResourceRequirements objects
:param args: Not used
:return: AllocationContextList with allocated resources
'''
pass
def release(self, dut=None):
'''
Resource releasing is not necessary. Not implemented.
:param dut: Not used
:return: Nothing
'''
pass
def _allocate(self, dut_configuration):
'''
Internal allocation function. Allocates a single resource based on dut_configuration.
:param dut_configuration: ResourceRequirements object which describes a required resource
:return: True
:raises: AllocationError if suitable resource was not found or if the platform was not
allowed to be used.
'''
pass
| 8 | 6 | 22 | 2 | 14 | 6 | 5 | 0.47 | 1 | 7 | 4 | 0 | 6 | 2 | 6 | 11 | 140 | 15 | 86 | 20 | 78 | 40 | 77 | 19 | 70 | 16 | 2 | 3 | 31 |
2,071 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Plugin/plugins/default_parsers.py
|
icetea_lib.Plugin.plugins.default_parsers.DefaultParsers
|
class DefaultParsers(PluginBase):
"""
Default parsers as a plugin.
"""
def __init__(self):
super(DefaultParsers, self).__init__()
def get_parsers(self):
return {}
|
class DefaultParsers(PluginBase):
'''
Default parsers as a plugin.
'''
def __init__(self):
pass
def get_parsers(self):
pass
| 3 | 1 | 2 | 0 | 2 | 0 | 1 | 0.6 | 1 | 1 | 0 | 0 | 2 | 0 | 2 | 9 | 9 | 1 | 5 | 3 | 2 | 3 | 5 | 3 | 2 | 1 | 2 | 0 | 2 |
2,072 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Plugin/plugins/plugin_localallocator.py
|
icetea_lib.Plugin.plugins.plugin_localallocator.LocalAllocatorPlugin
|
class LocalAllocatorPlugin(RunPluginBase):
"""
Plugin interface for the LocalAllocator allocator.
"""
def __init__(self):
super(LocalAllocatorPlugin, self).__init__()
def get_allocators(self):
"""
Get plugin descriptor for the LocalAllocator.
:return: dict
"""
return {"LocalAllocator": LocalAllocator}
|
class LocalAllocatorPlugin(RunPluginBase):
'''
Plugin interface for the LocalAllocator allocator.
'''
def __init__(self):
pass
def get_allocators(self):
'''
Get plugin descriptor for the LocalAllocator.
:return: dict
'''
pass
| 3 | 2 | 5 | 1 | 2 | 2 | 1 | 1.4 | 1 | 2 | 1 | 0 | 2 | 0 | 2 | 4 | 14 | 2 | 5 | 3 | 2 | 7 | 5 | 3 | 2 | 1 | 2 | 0 | 2 |
2,073 |
ARMmbed/icetea
|
ARMmbed_icetea/icetea_lib/Plugin/plugins/plugin_tests/test_httpapi.py
|
icetea_lib.Plugin.plugins.plugin_tests.test_httpapi.MockedRequestsResponse
|
class MockedRequestsResponse(object):
"""
Mocked Response object.
"""
def __init__(self, status_code=200, json_data=None):
self.json_data = json_data if json_data else {"key1": "value1"}
self.status_code = status_code
self.url = ''
self.headers = {"head": "ers"}
self.text = "This is test text"
self.request = self
def json(self):
return self.json_data
|
class MockedRequestsResponse(object):
'''
Mocked Response object.
'''
def __init__(self, status_code=200, json_data=None):
pass
def json(self):
pass
| 3 | 1 | 5 | 0 | 5 | 0 | 2 | 0.3 | 1 | 0 | 0 | 0 | 2 | 6 | 2 | 2 | 14 | 1 | 10 | 9 | 7 | 3 | 10 | 9 | 7 | 2 | 1 | 0 | 3 |
2,074 |
ARMmbed/icetea
|
ARMmbed_icetea/test/test_dutinformation.py
|
test.test_dutinformation.DutInfoTestcase
|
class DutInfoTestcase(unittest.TestCase):
def setUp(self):
self.dut1 = DutInformation("plat1", "12345", "1", "vendor")
self.dut2 = DutInformation("plat1", "23456", "2", "vendor")
self.dut3 = DutInformation("plat2", "34567", "3", "vendor")
array_of_duts = [self.dut1, self.dut2, self.dut3]
self.testlist = DutInformationList(array_of_duts)
self.emptylist = DutInformationList()
def test_constuction(self):
lst = DutInformationList()
lst.append(self.dut1)
self.assertEqual(len(lst), 1)
lst.append(self.dut2)
lst.append(self.dut3)
self.assertEqual(len(lst), 3)
def test_dutmodel_gets(self):
lst = self.testlist.get_uniq_list_dutmodels()
self.assertEqual(len(lst), 2)
self.assertListEqual(lst, ["plat1", "plat2"])
self.assertEqual(self.testlist.get_uniq_string_dutmodels(), "plat1,plat2")
self.assertEqual(self.emptylist.get_uniq_string_dutmodels(), "", "Empty list does not "
"return correct message.")
def test_get_resourceids(self):
self.assertListEqual(self.testlist.get_resource_ids(), ['12345', '23456', '34567'])
def test_cache(self):
# pylint: disable=W0212
DutInformationList._cache = dict()
self.assertDictEqual(DutInformationList._cache, dict())
DutInformationList.push_resource_cache("test", {"a": "1"})
self.assertDictEqual(DutInformationList._cache, {"test": {"a": "1"}})
DutInformationList.get_resource_cache("test")["b"] = "2"
self.assertDictEqual(DutInformationList._cache, {"test": {"a": "1", "b": "2"}})
DutInformationList.push_resource_cache("test", {"a": "2"})
self.assertDictEqual(DutInformationList._cache, {"test": {"a": "2", "b": "2"}})
DutInformationList._cache = dict()
self.assertDictEqual(DutInformationList._cache, dict())
def test_build_sha(self):
# pylint: disable=W0212
DutInformationList._cache = dict()
info = DutInformation("plat1", "12345", "1", "vendor")
self.assertEqual(info.build_binary_sha1, None)
info.build_binary_sha1 = "123"
self.assertEqual(info.build_binary_sha1, "123")
DutInformationList._cache = dict()
|
class DutInfoTestcase(unittest.TestCase):
def setUp(self):
pass
def test_constuction(self):
pass
def test_dutmodel_gets(self):
pass
def test_get_resourceids(self):
pass
def test_cache(self):
pass
def test_build_sha(self):
pass
| 7 | 0 | 8 | 1 | 7 | 0 | 1 | 0.05 | 1 | 3 | 2 | 0 | 6 | 5 | 6 | 78 | 55 | 11 | 42 | 16 | 35 | 2 | 41 | 16 | 34 | 1 | 2 | 0 | 6 |
2,075 |
ARMmbed/icetea
|
ARMmbed_icetea/test/test_bench.py
|
test.test_bench.TestingTestcase
|
class TestingTestcase(Bench):
"""
Testcase class for testing all exception cases
"""
def __init__(self, teststep_fail=False, teststep_error=False,
name_error=False, value_error=False, kbinterrupt=False,
exception=False, inconclusive_error=False, in_setup=False,
in_case=False, in_teardown=False, test_step_timeout=False):
self.teststep_error = teststep_error
self.teststep_fail = teststep_fail
self.name_error = name_error
self.value_error = value_error
self.kbinterrupt = kbinterrupt
self.exception = exception
self.in_setup = in_setup
self.in_case = in_case
self.in_teardown = in_teardown
self.inconclusive = inconclusive_error
self.test_step_timeout = test_step_timeout
Bench.__init__(self,
name="ut_exception",
title="unittest exception in testcase",
status="development",
type="acceptance",
purpose="dummy",
requirements={
"duts": {
'*': { # requirements for all nodes
"count": 0,
}
}}
)
def raise_exc(self):
if self.teststep_fail:
raise TestStepFail("This is a TestStepFail")
if self.teststep_error:
raise TestStepError("This is a TestStepError")
elif self.name_error:
raise NameError("This is a NameError")
elif self.value_error:
raise ValueError("This is a ValueError")
elif self.exception:
raise Exception("This is a generic exception")
elif self.kbinterrupt:
raise KeyboardInterrupt()
elif self.inconclusive:
raise InconclusiveError("This will result in an inconclusive retcode.")
elif self.test_step_timeout:
raise TestStepTimeout("This is TestStepTimeout")
def setup(self):
self.args.silent = True
if self.in_setup:
self.raise_exc()
def case(self):
if self.in_case:
self.raise_exc()
def teardown(self):
if self.in_teardown:
self.raise_exc()
|
class TestingTestcase(Bench):
'''
Testcase class for testing all exception cases
'''
def __init__(self, teststep_fail=False, teststep_error=False,
name_error=False, value_error=False, kbinterrupt=False,
exception=False, inconclusive_error=False, in_setup=False,
in_case=False, in_teardown=False, test_step_timeout=False):
pass
def raise_exc(self):
pass
def setup(self):
pass
def case(self):
pass
def teardown(self):
pass
| 6 | 1 | 11 | 0 | 11 | 0 | 3 | 0.07 | 1 | 8 | 4 | 0 | 5 | 11 | 5 | 111 | 63 | 4 | 56 | 20 | 47 | 4 | 35 | 17 | 29 | 9 | 3 | 1 | 16 |
2,076 |
ARMmbed/icetea
|
ARMmbed_icetea/test/test_events.py
|
test.test_events.EventTestcase
|
class EventTestcase(unittest.TestCase):
def test_resolve_match_data(self):
event_object = mock.MagicMock()
callback = mock.MagicMock()
event_flag = EventFlag()
event_matcher = EventMatcher(EventTypes.DUT_LINE_RECEIVED, "test", event_object,
flag=event_flag, callback=callback)
event = Event(EventTypes.DUT_LINE_RECEIVED, event_object, "test")
callback.assert_called_once()
obj, = callback.call_args[0]
self.assertEqual(obj.ref, event_object)
self.assertEqual(obj.event_data, "test")
self.assertEqual(obj.match, "test")
self.assertTrue(event_flag.isSet())
event_flag.clear()
callback.reset_mock()
# Recreate matcher because it forgets itself once it has matched once.
event_matcher = EventMatcher(EventTypes.DUT_LINE_RECEIVED, "regex:test*", event_object,
flag=event_flag, callback=callback)
event = Event(EventTypes.DUT_LINE_RECEIVED, event_object, "nothing")
self.assertFalse(event_flag.isSet())
event = Event(EventTypes.DUT_LINE_RECEIVED, event_object, "test1")
callback.assert_called_once()
obj, = callback.call_args[0]
self.assertEqual(obj.ref, event_object)
self.assertEqual(obj.event_data, "test1")
self.assertIsInstance(obj.match, MATCH_TYPE)
self.assertTrue(event_flag.isSet())
event_flag.clear()
callback.reset_mock()
event_matcher = EventMatcher(EventTypes.DUT_LINE_RECEIVED, "regex:test:[0-9]",
event_object, flag=event_flag, callback=callback)
event = Event(EventTypes.DUT_LINE_RECEIVED, event_object, "test")
self.assertFalse(event_flag.isSet())
event = Event(EventTypes.DUT_LINE_RECEIVED, event_object, "test:1")
callback.assert_called_once()
obj, = callback.call_args[0]
self.assertEqual(obj.ref, event_object)
self.assertEqual(obj.event_data, "test:1")
self.assertIsInstance(obj.match, MATCH_TYPE)
self.assertTrue(event_flag.isSet())
def test_resolve_data_no_caller(self):
event_object = mock.MagicMock()
event_callback = mock.MagicMock()
event_flag = EventFlag()
event_matcher = EventMatcher(EventTypes.DUT_LINE_RECEIVED, "test",
caller=None, flag=event_flag, callback=event_callback)
event = Event(EventTypes.DUT_LINE_RECEIVED, event_object, "test")
event_callback.assert_called_once()
obj, = event_callback.call_args[0]
self.assertEqual(obj.ref, event_object)
self.assertEqual(obj.event_data, "test")
self.assertEqual(obj.match, "test")
self.assertTrue(event_flag.isSet())
def test_resolve_data_decodefail(self):
event_object = mock.MagicMock()
event_callback = mock.MagicMock()
event_flag = EventFlag()
if IS_PYTHON3:
event_matcher = EventMatcher(EventTypes.DUT_LINE_RECEIVED,
"\x00\x00\x00\x00\x00\x00\x01\xc8", event_object,
flag=event_flag, callback=event_callback)
else:
event_matcher = EventMatcher(EventTypes.DUT_LINE_RECEIVED,
repr("\x00\x00\x00\x00\x00\x00\x01\xc8"), event_object,
flag=event_flag, callback=event_callback)
event = Event(EventTypes.DUT_LINE_RECEIVED, event_object,
"\x00\x00\x00\x00\x00\x00\x01\xc8")
event_callback.assert_called_once()
obj, = event_callback.call_args[0]
self.assertEqual(obj.ref, event_object)
self.assertEqual(obj.event_data, "\x00\x00\x00\x00\x00\x00\x01\xc8")
if IS_PYTHON3:
match_equal = "\x00\x00\x00\x00\x00\x00\x01\xc8"
else:
match_equal = repr("\x00\x00\x00\x00\x00\x00\x01\xc8")
self.assertEqual(obj.match, match_equal)
self.assertTrue(event_flag.isSet())
event_flag.clear()
def test_observer(self): # pylint: disable=no-self-use
obs = Observer()
callback = mock.MagicMock()
obs.observe(EventTypes.DUT_LINE_RECEIVED, callback)
callback.assert_not_called()
event = Event(2, "data")
callback.assert_not_called()
event = Event(EventTypes.DUT_LINE_RECEIVED, "data")
callback.assert_called_once_with("data")
|
class EventTestcase(unittest.TestCase):
def test_resolve_match_data(self):
pass
def test_resolve_data_no_caller(self):
pass
def test_resolve_data_decodefail(self):
pass
def test_observer(self):
pass
| 5 | 0 | 22 | 0 | 22 | 1 | 2 | 0.02 | 1 | 4 | 4 | 0 | 4 | 0 | 4 | 76 | 92 | 4 | 87 | 27 | 82 | 2 | 76 | 27 | 71 | 3 | 2 | 1 | 6 |
2,077 |
ARMmbed/icetea
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ARMmbed_icetea/test/test_pluginmanager.py
|
test.test_pluginmanager.PMTestcase
|
class PMTestcase(unittest.TestCase):
def test_load_defaults(self):
bench = mock.MagicMock(spec=[])
bench.logger = mock.MagicMock(return_value=mock.MagicMock())
resp_parser = mock.MagicMock()
resp_parser.append = mock.MagicMock()
resp_parser.has_parser = mock.MagicMock(return_value=False)
pluginmanager = PluginManager(bench=bench, responseparser=resp_parser)
pluginmanager.load_default_tc_plugins()
pluginmanager.load_default_run_plugins()
length = len(default_plugins)
self.assertEqual(len(pluginmanager.registered_plugins), length)
def test_register_all_tc_types(self):
# Set up mocks
plugin_class = mock.MagicMock()
plugin_class.init = mock.MagicMock()
plugin_class.get_bench_api = mock.MagicMock()
plugin_class.get_parsers = mock.MagicMock()
plugin_class.get_external_services = mock.MagicMock()
mock_bench = mock.MagicMock(spec=[])
mock_bench.logger = mock.MagicMock(return_value=mock.MagicMock())
mock_bench_function = mock.MagicMock()
mock_parser = mock.MagicMock()
plugin_class.get_bench_api.return_value = {
"mock_func": mock_bench_function}
plugin_class.get_external_services.return_value = {
"mock_class": mock.MagicMock}
plugin_class.get_parsers.return_value = {"mock_parser": mock_parser}
mock_parsermanager = mock.MagicMock()
mock_parsermanager.add_parser = mock.MagicMock()
mock_parsermanager.has_parser = mock.MagicMock(return_value=False)
pluginmanager = PluginManager(
bench=mock_bench, responseparser=mock_parsermanager)
pluginmanager.register_tc_plugins("test_plugin", plugin_class)
# Asserts
self.assertEqual(len(pluginmanager.registered_plugins), 1)
self.assertEqual(pluginmanager.registered_plugins[0], "test_plugin")
self.assertEqual(len(pluginmanager._external_services), 1)
mock_parsermanager.has_parser.assert_called_once_with("mock_parser")
mock_parsermanager.add_parser.assert_called_once_with(
"mock_parser", mock_parser)
def test_register_and_start_service(self):
# Set up mocks
plugin_class = mock.MagicMock()
plugin_class.init = mock.MagicMock()
plugin_class.get_bench_api = mock.MagicMock()
plugin_class.get_parsers = mock.MagicMock()
plugin_class.get_external_services = mock.MagicMock()
mock_bench = mock.MagicMock(spec=[])
mock_bench.logger = mock.MagicMock(return_value=mock.MagicMock())
plugin_class.get_bench_api.return_value = None
mock_class = mock.MagicMock()
plugin_class.get_external_services.return_value = {
"mock_class": mock_class}
plugin_class.get_parsers.return_value = None
mock_parsermanager = mock.MagicMock()
pluginmanager = PluginManager(
bench=mock_bench, responseparser=mock_parsermanager)
pluginmanager.register_tc_plugins("test_plugin", plugin_class)
pluginmanager.start_external_service("mock_class")
self.assertEqual(len(pluginmanager._started_services), 1)
pluginmanager.stop_external_services()
self.assertEqual(len(pluginmanager._started_services), 0)
self.assertEqual(len(pluginmanager._external_services), 1)
mock_class.assert_called_once()
def test_start_service_raises_exception(self): # pylint: disable=invalid-name
# Set up mocks
plugin_class = mock.MagicMock()
plugin_class.init = mock.MagicMock()
plugin_class.get_bench_api = mock.MagicMock()
plugin_class.get_parsers = mock.MagicMock()
plugin_class.get_external_services = mock.MagicMock()
mock_bench = mock.MagicMock(spec=[])
mock_bench.logger = mock.MagicMock(return_value=mock.MagicMock())
plugin_class.get_bench_api.return_value = None
mocked_service = mock.MagicMock()
mock_class = mock.MagicMock(return_value=mocked_service)
mocked_service.start = mock.MagicMock()
mocked_service.start.side_effect = [PluginException]
plugin_class.get_external_services.return_value = {
"mock_class": mock_class}
plugin_class.get_parsers.return_value = None
mock_parsermanager = mock.MagicMock()
pluginmanager = PluginManager(
bench=mock_bench, responseparser=mock_parsermanager)
pluginmanager.register_tc_plugins("test_plugin", plugin_class)
with self.assertRaises(PluginException):
pluginmanager.start_external_service("mock_class")
mocked_service.start.assert_called_once()
def test_register_start_stop_service(self): # pylint: disable=invalid-name
plugin_class = mock.MagicMock()
plugin_class.init = mock.MagicMock()
plugin_class.get_bench_api = mock.MagicMock()
plugin_class.get_parsers = mock.MagicMock()
plugin_class.get_external_services = mock.MagicMock()
mock_bench = mock.MagicMock(spec=[])
mock_bench.logger = mock.MagicMock(return_value=mock.MagicMock())
plugin_class.get_bench_api.return_value = None
mocked_service = mock.MagicMock()
mocked_service.start = mock.MagicMock()
mocked_service.stop = mock.MagicMock(side_effect=[PluginException])
mock_class = mock.MagicMock(return_value=mocked_service)
plugin_class.get_external_services.return_value = {
"mock_class": mock_class}
plugin_class.get_parsers.return_value = None
mock_parsermanager = mock.MagicMock()
pluginmanager = PluginManager(
bench=mock_bench, responseparser=mock_parsermanager)
pluginmanager.register_tc_plugins("test_plugin", plugin_class)
pluginmanager.start_external_service("mock_class")
self.assertEqual(len(pluginmanager._started_services), 1)
pluginmanager.stop_external_services()
self.assertEqual(len(pluginmanager._started_services), 0)
self.assertEqual(len(pluginmanager._external_services), 1)
mock_class.assert_called_once()
def test_register_raises_pluginexception(self): # pylint: disable=invalid-name
plugin_class = mock.MagicMock()
plugin_class.init = mock.MagicMock()
plugin_class.get_bench_api = mock.MagicMock()
mock_bench = mock.MagicMock(spec=[])
mock_bench.logger = mock.MagicMock(return_value=mock.MagicMock())
mock_bench_function = mock.MagicMock()
plugin_class.get_bench_api.return_value = {
"mock_func": mock_bench_function}
mock_parsermanager = mock.MagicMock()
mock_parsermanager.add_parser = mock.MagicMock()
mock_parsermanager.has_parser = mock.MagicMock(return_value=False)
pluginmanager = PluginManager(
bench=mock_bench, responseparser=mock_parsermanager)
pluginmanager.registered_plugins = ["test_plugin"]
with self.assertRaises(PluginException):
pluginmanager.register_tc_plugins("test_plugin", plugin_class)
def test_load_custom_plugins(self): # pylint: disable=no-self-use
modules = sys.modules
mock_bench = mock.MagicMock(spec=[])
mock_parsermanager = mock.MagicMock()
pluginmanager = PluginManager(
bench=mock_bench, responseparser=mock_parsermanager)
pluginmanager.register_tc_plugins = mock.MagicMock()
pluginmanager.load_custom_tc_plugins(os.path.join(os.path.dirname(os.path.abspath(
__file__)), "test_plugin/load_test_plugins.py"))
sys.modules = modules
pluginmanager.register_tc_plugins.assert_called_once()
@mock.patch("icetea_lib.Plugin.PluginManager.importlib")
def test_load_custom_plugin_exception(self, mock_importer): # pylint: disable=invalid-name
mock_bench = mock.MagicMock(spec=[])
mock_parsermanager = mock.MagicMock()
mock_importer.import_module = mock.MagicMock(side_effect=[ImportError])
pluginmanager = PluginManager(
bench=mock_bench, responseparser=mock_parsermanager)
with self.assertRaises(PluginException):
pluginmanager.load_custom_tc_plugins(os.path.join(os.path.dirname(os.path.abspath(
__file__)), "test_plugin/load_test_plugins.py"))
|
class PMTestcase(unittest.TestCase):
def test_load_defaults(self):
pass
def test_register_all_tc_types(self):
pass
def test_register_and_start_service(self):
pass
def test_start_service_raises_exception(self):
pass
def test_register_start_stop_service(self):
pass
def test_register_raises_pluginexception(self):
pass
def test_load_custom_plugins(self):
pass
@mock.patch("icetea_lib.Plugin.PluginManager.importlib")
def test_load_custom_plugin_exception(self, mock_importer):
pass
| 10 | 0 | 19 | 2 | 17 | 1 | 1 | 0.07 | 1 | 3 | 2 | 0 | 8 | 0 | 8 | 80 | 165 | 26 | 135 | 49 | 125 | 9 | 132 | 48 | 123 | 1 | 2 | 1 | 8 |
2,078 |
ARMmbed/icetea
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ARMmbed_icetea/test/test_resourceprovider.py
|
test.test_resourceprovider.RPTestcase
|
class RPTestcase(unittest.TestCase):
def test_init(self, mock_rplogger_get, mock_logman):
mock_logman.get_resourceprovider_logger = mock.MagicMock(
return_value=MockLogger())
self.res_pro = ResourceProvider(MockArgs())
mock_logman.get_resourceprovider_logger.assert_called_once_with("ResourceProvider",
"RSP", True)
def test_init_with_no_file_logging(self, mock_rplogger_get, mock_logman):
mock_logman.get_resourceprovider_logger = mock.MagicMock(
return_value=MockLogger())
mock_arguments = MockArgs()
mock_arguments.list = True
self.res_pro = ResourceProvider(mock_arguments)
mock_logman.get_resourceprovider_logger.assert_called_once_with("ResourceProvider",
"RSP", False)
def test_init_with_list(self, mock_rplogger_get, mock_logman):
mock_logman.get_resourceprovider_logger = mock.MagicMock(
return_value=MockLogger())
args = MockArgs()
args.list = True
self.res_pro = ResourceProvider(args)
mock_logman.get_resourceprovider_logger.assert_called_once_with("ResourceProvider",
"RSP", False)
def test_allocate_duts_errors(self, mock_rplogger_get, mock_logman):
mock_logman.get_resourceprovider_logger = mock.MagicMock(
return_value=MockLogger())
self.res_pro = ResourceProvider(MockArgs())
mock_resconf = mock.MagicMock()
mock_pluginmanager = mock.MagicMock()
mock_pluginmanager.get_allocator = mock.MagicMock(
return_value=MockAllocator)
self.res_pro.set_pluginmanager(mock_pluginmanager)
mock_resconf.count_hardware = mock.MagicMock(return_value=0)
mock_resconf.get_dut_configuration = mock.MagicMock(return_value=[])
mock_resconf.count_duts = mock.MagicMock(return_value=0)
self.res_pro._duts = [MockDut()]
self.res_pro._resource_configuration = mock_resconf
# Test raise when allocation fails
with self.assertRaises(ResourceInitError):
self.res_pro.allocate_duts(mock_resconf)
def test_allocate_duts_success(self, mock_rplogger_get, mock_logman):
mock_logman.get_resourceprovider_logger = mock.MagicMock(
return_value=MockLogger())
self.res_pro = ResourceProvider(MockArgs())
mock_resconf = mock.MagicMock()
mock_resconf.count_hardware = mock.MagicMock(return_value=1)
mock_resconf.get_dut_configuration = mock.MagicMock(
return_value=[mock.MagicMock()])
mock_resconf.count_duts = mock.MagicMock(return_value=1)
self.res_pro._duts = [MockDut()]
self.res_pro._resource_configuration = mock_resconf
self.res_pro.allocator = mock.MagicMock()
self.res_pro.allocator.allocate = mock.MagicMock()
self.res_pro.allocate_duts(mock_resconf)
self.res_pro.allocator.allocate.assert_called_once_with(mock_resconf,
args=self.res_pro.args)
def test_allocator_get(self, mock_rplogger_get, mock_logman):
mock_logman.get_resourceprovider_logger = mock.MagicMock(
return_value=MockLogger())
m_args = MockArgs()
mock_resconf = mock.MagicMock()
mock_resconf.count_hardware = mock.MagicMock(return_value=1)
mock_resconf.get_dut_configuration = mock.MagicMock(
return_value=[mock.MagicMock()])
mock_resconf.count_duts = mock.MagicMock(return_value=1)
self.res_pro = ResourceProvider(m_args)
self.res_pro._resource_configuration = mock_resconf
mock_pluginmanager = mock.MagicMock()
self.res_pro.set_pluginmanager(mock_pluginmanager)
mock_allocator = mock.MagicMock()
mock_pluginmanager.get_allocator = mock.MagicMock(
side_effect=[mock_allocator, None])
self.res_pro.allocate_duts(mock_resconf)
mock_allocator.assert_called_once_with(m_args, None, dict())
self.res_pro.allocator = None
with self.assertRaises(ResourceInitError):
self.res_pro.allocate_duts(mock_resconf)
def test_config_file_reading(self, mock_rplogger_get, mock_logman):
mock_logman.get_resourceprovider_logger = mock.MagicMock(
return_value=MockLogger())
filepath = os.path.abspath(os.path.join(__file__, os.path.pardir, "tests",
"allocator_config.json"))
self.res_pro = ResourceProvider(MockArgs())
with open(filepath, "r") as cfg_file:
test_data = json.load(cfg_file)
self.res_pro = ResourceProvider(MockArgs())
retval = self.res_pro._read_allocator_config("testallocator", filepath)
self.assertEquals(retval, test_data.get("testallocator"))
@mock.patch("icetea_lib.ResourceProvider.ResourceProvider.json")
def test_config_file_errors(self, mock_rplogger_get, mock_logman, mocked_json):
mock_logman.get_resourceprovider_logger = mock.MagicMock(
return_value=MockLogger())
self.res_pro = ResourceProvider(MockArgs())
with self.assertRaises(ResourceInitError):
self.res_pro._read_allocator_config("generic", "does_not_exist")
with self.assertRaises(ResourceInitError):
not_a_file = os.path.abspath(
os.path.join(__file__, os.path.pardir, "tests"))
self.res_pro._read_allocator_config("generic", not_a_file)
with self.assertRaises(ResourceInitError):
no_config_here = os.path.abspath(os.path.join(__file__, os.path.pardir, "suites",
"dummy_suite.json"))
self.res_pro._read_allocator_config("generic", no_config_here)
with self.assertRaises(ResourceInitError):
mocked_json.load = mock.MagicMock()
mocked_json.load.side_effect = [ValueError]
filepath = os.path.abspath(os.path.join(__file__, os.path.pardir, "tests",
"allocator_config.json"))
self.res_pro._read_allocator_config("testallocator", filepath)
def tearDown(self):
self.res_pro.cleanup()
self.res_pro.__metaclass__._instances.clear()
|
class RPTestcase(unittest.TestCase):
def test_init(self, mock_rplogger_get, mock_logman):
pass
def test_init_with_no_file_logging(self, mock_rplogger_get, mock_logman):
pass
def test_init_with_list(self, mock_rplogger_get, mock_logman):
pass
def test_allocate_duts_errors(self, mock_rplogger_get, mock_logman):
pass
def test_allocate_duts_success(self, mock_rplogger_get, mock_logman):
pass
def test_allocator_get(self, mock_rplogger_get, mock_logman):
pass
def test_config_file_reading(self, mock_rplogger_get, mock_logman):
pass
@mock.patch("icetea_lib.ResourceProvider.ResourceProvider.json")
def test_config_file_errors(self, mock_rplogger_get, mock_logman, mocked_json):
pass
def tearDown(self):
pass
| 11 | 0 | 12 | 1 | 11 | 0 | 1 | 0.01 | 1 | 8 | 6 | 0 | 9 | 1 | 9 | 81 | 116 | 17 | 98 | 28 | 87 | 1 | 90 | 26 | 80 | 1 | 2 | 1 | 9 |
2,079 |
ARMmbed/icetea
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ARMmbed_icetea/test/test_testcasecontainer.py
|
test.test_testcasecontainer.TCContainerTestcase
|
class TCContainerTestcase(unittest.TestCase):
def setUp(self):
with open(os.path.join("./icetea_lib", 'tc_schema.json')) as data_file:
self.tc_meta_schema = json.load(data_file)
self.args_tc = argparse.Namespace(
available=False, version=False, bin=None, binary=False, channel=None,
clean=False, cloud=False, component=False, device='*', gdb=None,
gdbs=None, gdbs_port=2345, group=False, iface=None, kill_putty=False, list=False,
listsuites=False, log='./log', my_duts=None, nobuf=None,
pause_when_external_dut=False, putty=False, reset=False, silent=True,
skip_case=False, skip_rampdown=False, skip_rampup=False,
status=False, suite=None, tc="test_cmdline", tc_cfg=None, tcdir="examples",
testtype=False, type="process", subtype=None, use_sniffer=False,
valgrind=False, valgrind_tool=None, verbose=False, repeat=0, feature=None,
suitedir="./test/suites", forceflash_once=True, forceflash=True,
stop_on_failure=False, ignore_invalid_params=False)
@mock.patch("icetea_lib.TestSuite.TestcaseContainer.load_class")
def test_load_testcase_fails(self, mock_loadclass):
testcase = TestcaseContainer.find_testcases("examples.test_cmdline", "./examples",
self.tc_meta_schema)[0]
with self.assertRaises(TypeError):
testcase._load_testcase(1)
mock_loadclass.side_effect = [ValueError, None]
with self.assertRaises(ImportError):
testcase._load_testcase("test_case")
with self.assertRaises(ImportError):
testcase._load_testcase("test_case")
def test_check_major_version(self):
testcase = TestcaseContainer.find_testcases("examples.test_cmdline", "examples",
self.tc_meta_schema)[0]
self.assertFalse(testcase._check_major_version("1.0.0", "0.9.1"))
self.assertFalse(testcase._check_major_version("1.0.0", ">0.0.2"))
self.assertFalse(testcase._check_major_version("1.0.0", ">=0.0.3"))
@mock.patch("icetea_lib.TestSuite.TestcaseContainer.get_fw_version")
def test_version_checker(self, mock_fwver):
mock_fwver.return_value = "0.9.0"
testcase = TestcaseContainer.find_testcases("examples.test_cmdline", "examples",
self.tc_meta_schema)[0]
self.assertIsNone(testcase._check_version(
MockInstance("Icetea", "0.9.0")))
res = testcase._check_version(MockInstance("Icetea", "0.2.2"))
self.assertEqual(res.get_verdict(), "skip")
mock_fwver.return_value = "0.2.2"
self.assertIsNone(testcase._check_version(
MockInstance("Icetea", "<0.9.0")))
res = testcase._check_version(MockInstance("Icetea", ">0.9.0"))
self.assertEqual(res.get_verdict(), "skip")
mock_fwver.return_value = "0.9.0"
self.assertIsNone(testcase._check_version(
MockInstance("Icetea", ">=0.9.0")))
mock_fwver.return_value = "0.9.1"
self.assertIsNone(testcase._check_version(
MockInstance("Icetea", ">=0.9.0")))
def test_check_skip(self):
testcase = TestcaseContainer.find_testcases("examples.test_cmdline", "./examples",
self.tc_meta_schema)[0]
res = testcase._check_skip(MockInstance("test", "0.9.0", "process"))
self.assertFalse(res)
self.assertFalse(testcase._check_skip(MockInstance("test", "0.9.0", "hardware",
skip_val=False)))
res = testcase._check_skip(MockInstance(
"test", "0.9.0", "process", True, {"test": "test"}))
self.assertEqual(res.get_verdict(), "skip")
def test_find_testcases(self):
lst = TestcaseContainer.find_testcases("test.testbase.dummy_multiples", "./test/testbase",
self.tc_meta_schema)
self.assertEqual(len(lst), 2)
lst = TestcaseContainer.find_testcases("test.testbase.dummy", "./test/testbase",
self.tc_meta_schema)
self.assertEqual(len(lst), 1)
with self.assertRaises(TypeError):
TestcaseContainer.find_testcases(
1, "./test/testbase", self.tc_meta_schema)
with self.assertRaises(ValueError):
TestcaseContainer.find_testcases(
"", "./test/testbase", self.tc_meta_schema)
@mock.patch("icetea_lib.TestSuite.TestcaseContainer.import_module")
def test_find_testcases_error(self, mocked_import):
mocked_import.side_effect = [ImportError]
with self.assertRaises(ImportError):
lst = TestcaseContainer.find_testcases("test.testbase.dummy_multiples",
"./test/testbase",
self.tc_meta_schema)
def test_create_new_bench_instance(self):
lst = TestcaseContainer.find_testcases("test.testbase.dummy", "./test/testbase",
self.tc_meta_schema)
inst = lst[0]._create_new_bench_instance("test.testbase.dummy")
self.assertTrue(isinstance(inst, Bench))
@mock.patch("icetea_lib.TestSuite.TestcaseContainer.TestcaseContainer.get_instance")
@mock.patch("icetea_lib.TestSuite.TestcaseContainer.TestcaseContainer._check_version")
@mock.patch("icetea_lib.TestSuite.TestcaseContainer.TestcaseContainer._check_skip")
@mock.patch("icetea_lib.TestSuite.TestcaseContainer.get_tc_arguments")
def test_run(self, mock_parser, mock_skip, mock_version, mock_instance):
testcase = TestcaseContainer.find_testcases("examples.test_cmdline", "./examples",
self.tc_meta_schema)[0]
# Initialize mocks
parser = mock.MagicMock()
instance = mock.MagicMock()
instance.run = mock.MagicMock()
instance.run.return_value = 0
instance.get_result = mock.MagicMock()
instance.get_result.return_value = Result()
mock_instance.return_value = instance
mock_skip.return_value = None
mock_version.return_value = None
parser.parse_known_args = mock.MagicMock()
parser.parse_known_args.return_value = (mock.MagicMock(), [])
mock_parser.return_value = parser
# Mocked a succesful run
testcase.run()
# Skip returns 1, tc should be skipped
mock_skip.return_value = 1
mock_version.return_value = None
self.assertEqual(testcase.status, TestStatus.FINISHED)
testcase.run()
# Version mismatch
mock_skip.return_value = None
mock_version.return_value = 1
self.assertEqual(testcase.status, TestStatus.FINISHED)
testcase.run()
# Unknown arguments
mock_version.return_value = None
parser.parse_known_args.return_value = (self.args_tc, [1])
res = testcase.run()
self.assertEqual(testcase.status, TestStatus.FINISHED)
self.assertEqual(res.get_verdict(), 'inconclusive')
result = Result()
result.retcode = 1012
instance.get_result.return_value = result
instance.run.return_value = 1012
parser.parse_known_args.return_value = (mock.MagicMock(), [])
testcase.run()
|
class TCContainerTestcase(unittest.TestCase):
def setUp(self):
pass
@mock.patch("icetea_lib.TestSuite.TestcaseContainer.load_class")
def test_load_testcase_fails(self, mock_loadclass):
pass
def test_check_major_version(self):
pass
@mock.patch("icetea_lib.TestSuite.TestcaseContainer.get_fw_version")
def test_version_checker(self, mock_fwver):
pass
def test_check_skip(self):
pass
def test_find_testcases(self):
pass
@mock.patch("icetea_lib.TestSuite.TestcaseContainer.import_module")
def test_find_testcases_error(self, mocked_import):
pass
def test_create_new_bench_instance(self):
pass
@mock.patch("icetea_lib.TestSuite.TestcaseContainer.TestcaseContainer.get_instance")
@mock.patch("icetea_lib.TestSuite.TestcaseContainer.TestcaseContainer._check_version")
@mock.patch("icetea_lib.TestSuite.TestcaseContainer.TestcaseContainer._check_skip")
@mock.patch("icetea_lib.TestSuite.TestcaseContainer.get_tc_arguments")
def test_run(self, mock_parser, mock_skip, mock_version, mock_instance):
pass
| 17 | 0 | 14 | 1 | 12 | 1 | 1 | 0.04 | 1 | 9 | 5 | 0 | 9 | 2 | 9 | 81 | 143 | 18 | 120 | 32 | 103 | 5 | 91 | 27 | 81 | 1 | 2 | 1 | 9 |
2,080 |
ARMmbed/icetea
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ARMmbed_icetea/test/test_testcaselist.py
|
test.test_testcaselist.TCListTestcase
|
class TCListTestcase(unittest.TestCase):
def setUp(self):
with open(os.path.join("./icetea_lib", 'tc_schema.json')) as data_file:
self.tc_meta_schema = json.load(data_file)
def test_append_and_len(self):
testcase = TestcaseContainer.find_testcases("examples.test_cmdline",
"examples", self.tc_meta_schema)
tlist = TestcaseList()
tlist.append(testcase[0])
self.assertEqual(len(tlist), 1)
tlist.append(testcase[0])
self.assertEqual(len(tlist), 2)
@mock.patch("icetea_lib.TestSuite.TestcaseList.TestcaseContainer.find_testcases")
def test_parse_local_testcases_exceptions(self, mock_finder): # pylint: disable=invalid-name
mock_finder.side_effect = [
IndexError, TypeError, ValueError, ImportError, [1], [2]]
lst = TestcaseList()
self.assertEqual(len(lst._parse_local_testcases("", False)), 0)
lst._parse_local_testcases(
["1234", "1234", "1234", "1234", "1234"], False)
def test_filtering_adds_dummycontainers(self): # pylint: disable=invalid-name
filt = mock.MagicMock()
filt.match = mock.MagicMock()
filt.match.side_effect = [True, False, True]
filt.get_filter = mock.MagicMock(
return_value={"list": [0, 0, 0], "name": False})
tclist = TestcaseList()
mock_tc = mock.MagicMock()
tcname = mock.PropertyMock(return_value="test_case")
type(mock_tc).tcname = tcname
tclist.append(mock_tc)
new_list = tclist.filter(
filt, ["test_case", "test_case_2", "test_case"])
self.assertEqual(len(new_list), 3)
self.assertTrue(isinstance(new_list.get_list()[1], DummyContainer))
self.assertFalse(isinstance(new_list.get_list()[0], DummyContainer))
self.assertFalse(isinstance(new_list.get_list()[2], DummyContainer))
@mock.patch("icetea_lib.TestSuite.TestcaseList.TestcaseContainer.find_testcases")
def test_import_error_store(self, mock_finder):
mock_finder.side_effect = [ImportError]
tclist = TestcaseList()
self.assertEqual(len(tclist.search_errors), 0)
tclist._parse_local_testcases([["examples.test_cmdline", "examples",
"examples/test_cmdline.py"]], False)
self.assertEqual(len(tclist.search_errors), 1)
|
class TCListTestcase(unittest.TestCase):
def setUp(self):
pass
def test_append_and_len(self):
pass
@mock.patch("icetea_lib.TestSuite.TestcaseList.TestcaseContainer.find_testcases")
def test_parse_local_testcases_exceptions(self, mock_finder):
pass
def test_filtering_adds_dummycontainers(self):
pass
@mock.patch("icetea_lib.TestSuite.TestcaseList.TestcaseContainer.find_testcases")
def test_import_error_store(self, mock_finder):
pass
| 8 | 0 | 8 | 0 | 8 | 0 | 1 | 0.05 | 1 | 8 | 3 | 0 | 5 | 1 | 5 | 77 | 46 | 5 | 41 | 19 | 33 | 2 | 37 | 16 | 31 | 1 | 2 | 1 | 5 |
2,081 |
ARMmbed/icetea
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ARMmbed_icetea/test/test_testsuite.py
|
test.test_testsuite.TestSuiteTestcase
|
class TestSuiteTestcase(unittest.TestCase):
def setUp(self):
with open(os.path.join("./icetea_lib", 'tc_schema.json')) as data_file:
self.tc_meta_schema = json.load(data_file)
testpath = os.path.dirname(os.path.abspath(__file__))
self.testdir = os.path.join(testpath, 'testbase')
self.args_noprint = argparse.Namespace(
available=False, version=False, bin=None, binary=False, channel=None,
clean=False, cloud=False, component=False, device='*', gdb=None,
gdbs=None, gdbs_port=2345, group=False, iface=None, kill_putty=False, list=True,
listsuites=False, log='./log', my_duts=None, nobuf=None,
pause_when_external_dut=False,
putty=False, reset=False, silent=True, skip_case=False,
skip_rampdown=False, skip_rampup=False,
status=False, suite=False, tc='all', tc_cfg=None, tcdir=self.testdir,
testtype=False, type=None, platform_filter=None, branch="",
subtype=None, use_sniffer=False, valgrind=False, valgrind_tool=None,
verbose=False, repeat=0, feature=None, json=False)
self.args_tc_no_exist = argparse.Namespace(
available=False, version=False, bin=None, binary=False, channel=None,
clean=False, cloud=False, component=False, device='*', gdb=None,
gdbs=None, gdbs_port=2345, group=False, iface=None, kill_putty=False, list=True,
listsuites=False, log='./log', my_duts=None, nobuf=None,
pause_when_external_dut=False, platform_filter=None,
putty=False, reset=False, silent=True, skip_case=False,
skip_rampdown=False, skip_rampup=False, branch="",
status=False, suite=False, tc='does_not_exist', tc_cfg=None, tcdir=self.testdir,
testtype=False, type=None, subtype=None, use_sniffer=False,
valgrind=False, valgrind_tool=None, verbose=False, repeat=0, feature=None,
json=False)
self.args_suite = argparse.Namespace(
available=False, version=False, bin=None, binary=False, channel=None,
clean=False, cloud=False, component=False, device='*', gdb=None,
gdbs=None, gdbs_port=2345, group=False, iface=None,
kill_putty=False, list=False, branch="",
listsuites=False, log='./log', my_duts=None, nobuf=None,
pause_when_external_dut=False, platform_filter=None,
putty=False, reset=False, silent=True, skip_case=False,
skip_rampdown=False, skip_rampup=False,
status=False, suite="dummy_suite.json", tc=None, tc_cfg=None,
tcdir="examples", testtype=False, type=None,
subtype=None, use_sniffer=False, valgrind=False, valgrind_tool=None,
verbose=False, repeat=2, feature=None, suitedir="./test/suites", json=False)
self.args_tc = argparse.Namespace(
available=False, version=False, bin=None, binary=False, channel=None,
clean=False, cloud=False, component=False, device='*', gdb=None, branch="",
gdbs=None, gdbs_port=2345, group=False, iface=None, kill_putty=False, list=False,
listsuites=False, log='./log', my_duts=None, nobuf=None, pause_when_external_dut=False,
putty=False, reset=False, silent=True, skip_case=False,
skip_rampdown=False, skip_rampup=False, platform_filter=None,
status=False, suite=None, tc="test_cmdline", tc_cfg=None,
tcdir="examples", testtype=False, type="process",
subtype=None, use_sniffer=False, valgrind=False,
valgrind_tool=None, verbose=False, repeat=0, feature=None,
suitedir="./test/suites", forceflash_once=True, forceflash=False,
stop_on_failure=False, json=False)
def test_create_suite_suitefile_some_not_found(self): # pylint: disable=invalid-name
suit = TestSuite(args=self.args_suite)
self.assertEqual(len(suit), 5)
def test_create_suite_list_success(self):
suit = TestSuite(args=self.args_noprint)
self.assertEqual(len(suit), 7,
"Suite length ({}) did not match expected length of {}!".format(
len(suit), 7))
self.assertEqual(suit.status, TestStatus.READY)
def test_create_suite_suitefile_fail(self): # pylint: disable=invalid-name
self.args_suite.suite = "malformed_suite.json"
with self.assertRaises(SuiteException):
suite = TestSuite(args=self.args_suite)
def test_create_suite_list_empty(self):
suit = TestSuite(args=self.args_tc_no_exist)
self.assertEqual(len(suit), 1)
def test_prepare_suite_success(self):
self.args_suite.suite = "working_suite.json"
suit = TestSuite(args=self.args_suite)
self.assertEqual(suit.status, TestStatus.READY)
testcase = suit.get_testcases().get(0)
self.assertEqual(testcase.status, TestStatus.READY)
def test_prepare_suite_merge_configs(self): # pylint: disable=invalid-name
self.args_suite.suite = "working_suite.json"
suit = TestSuite(args=self.args_suite)
tcs = suit.get_testcases()
self.assertEqual(len(tcs), 2)
testcase = tcs.get(1)
sconf = testcase.get_suiteconfig()
with open("test/suites/working_suite.json") as file_handle:
suite = json.load(file_handle)
cases = suite.get("testcases")
case2 = cases[1]
self.assertDictEqual(case2.get("config"), sconf)
conf = testcase.get_final_config()
self.assertDictEqual(
merge(testcase.get_instance().get_config(), sconf), conf)
def test_prepare_suite_merge_configs_missing_tc(self): # pylint: disable=invalid-name
self.args_suite.suite = "suite_missing_one.json"
suit = TestSuite(args=self.args_suite)
tcs = suit.get_testcases()
self.assertEqual(len(tcs), 3)
testcase = tcs.get(2)
sconf = testcase.get_suiteconfig()
with open("test/suites/suite_missing_one.json") as file_handle:
suite = json.load(file_handle)
cases = suite.get("testcases")
case2 = cases[2]
self.assertDictEqual(case2.get("config"), sconf)
conf = testcase.get_final_config()
self.assertDictEqual(
merge(testcase.get_instance().get_config(), sconf), conf)
self.assertTrue(isinstance(tcs.get(1), DummyContainer))
all_duts = conf.get("requirements").get("duts").get("*")
self.assertIsNotNone(all_duts)
self.assertIsNone(all_duts.get("should_not"))
self.assertTrue(all_duts.get("should_be") == "here")
@mock.patch("icetea_lib.TestSuite.TestSuite.TestSuite._prepare_testcase")
def test_prepare_suite_fail(self, mock_prep):
self.args_suite.suite = "working_suite.json"
mock_prep.side_effect = [TypeError, SyntaxError]
with self.assertRaises(SuiteException):
suit = TestSuite(args=self.args_suite)
with self.assertRaises(SyntaxError):
suit = TestSuite(args=self.args_suite)
def test_get_suite_tcs_success(self):
self.args_suite.suite = "working_suite.json"
suit = TestSuite(args=self.args_suite)
self.assertIsNone(suit._get_suite_tcs("dir", []))
tcs = suit._get_suite_tcs("./examples", 'all')
self.assertNotEqual(len(tcs), 0)
# Make sure that the same testcase is found twice and
# that they both have different instances.
tcs = suit._get_suite_tcs("./examples",
["sample_process_multidut_testcase",
"sample_process_multidut_testcase"])
self.assertEqual(len(tcs), 2)
@mock.patch("icetea_lib.TestSuite.TestSuite.TestcaseFilter")
def test_get_suite_tcs_errors(self, mock_filter):
self.args_suite.suite = "working_suite.json"
mock_filter.side_effect = [TypeError, {}]
with self.assertRaises(SuiteException):
suit = TestSuite(args=self.args_suite)
def test_print_list_testcases(self):
suit = TestSuite(args=self.args_noprint)
table = suit.list_testcases()
def test_get_suite_files(self):
lst = TestSuite.get_suite_files("./test/suites")
self.assertEqual(len(lst), 6)
def test_load_suite_list(self):
self.args_tc.tc = "tc_no_exist"
suit = TestSuite(args=self.args_tc)
suit._load_suite_list()
self.assertEqual(len(suit), 1)
suit.args.tcdir = "./test/suites"
suit._load_suite_list()
self.assertEqual(len(suit), 1)
suit.args.status = 2
self.assertFalse(suit._load_suite_list())
suit.args.tc = False
suit.args.status = "released"
suit.args.tcdir = "examples"
self.assertIsNot(suit._load_suite_list(), False)
@mock.patch("icetea_lib.TestSuite.TestSuite.TestSuite._create_tc_list")
def test_run(self, mock_tclist): # pylint: disable=too-many-statements
testsuite = TestSuite(args=self.args_tc)
cont1 = mock.MagicMock()
pass_result = Result()
pass_result.set_verdict('pass', 0, 10)
fail_result = Result()
fail_result.set_verdict('fail', 1000, 10)
skipped_result = Result()
skipped_result.set_verdict('skip', 0, 1)
resultlist = ResultList()
resultlist.append(pass_result)
resultlist.save = mock.MagicMock()
testsuite._default_configs["retryCount"] = 1
cont1.run.side_effect = [pass_result,
fail_result,
skipped_result,
KeyboardInterrupt,
fail_result,
pass_result]
cont_reslist = mock.MagicMock()
cont_reslist.run = mock.MagicMock()
cont_reslist.run.return_value = resultlist
# Passing result
testsuite._testcases = []
testsuite._testcases.append(cont1)
testsuite._results = ResultList()
testsuite._results.save = mock.MagicMock()
testsuite.run()
self.assertEqual(testsuite.status, TestStatus.FINISHED)
self.assertEqual(len(testsuite._results), 1)
self.assertEqual(testsuite._results.get(0).get_verdict(), "pass")
self.assertTrue(self.args_tc.forceflash) # pylint: disable=no-member
self.assertEquals(testsuite._results.save.call_count, 1)
# ResultList as result
testsuite._testcases = []
testsuite._testcases.append(cont_reslist)
testsuite._results = ResultList()
testsuite._results.save = mock.MagicMock()
testsuite.run()
self.assertEqual(testsuite.status, TestStatus.FINISHED)
self.assertEqual(len(testsuite._results), 1)
self.assertEqual(testsuite._results.get(0).get_verdict(), "pass")
# Failing result, no retry
testsuite._testcases = []
testsuite._testcases.append(cont1)
testsuite._results = ResultList()
testsuite._results.save = mock.MagicMock()
testsuite.run()
self.assertEqual(testsuite.status, TestStatus.FINISHED)
self.assertEqual(len(testsuite._results), 1)
self.assertEqual(testsuite._results.get(0).get_verdict(), "fail")
# skipped result
testsuite._testcases = []
testsuite._testcases.append(cont1)
testsuite._results = ResultList()
testsuite._results.save = mock.MagicMock()
testsuite.run()
self.assertEqual(testsuite.status, TestStatus.FINISHED)
self.assertEqual(len(testsuite._results), 1)
self.assertEqual(testsuite._results.get(0).get_verdict(), "skip")
# Interrupt
cont2 = mock.MagicMock()
cont2.run = mock.MagicMock()
testsuite._testcases = []
testsuite._testcases.append(cont1)
testsuite._testcases.append(cont2)
testsuite._results = ResultList()
testsuite._results.save = mock.MagicMock()
testsuite.run()
self.assertEqual(testsuite.status, TestStatus.FINISHED)
self.assertEqual(len(testsuite._results), 0)
cont2.run.assert_not_called()
# Failing result, retried
testsuite._testcases = []
testsuite._testcases.append(cont1)
testsuite._results = ResultList()
testsuite._results.save = mock.MagicMock()
testsuite._default_configs["retryReason"] = "includeFailures"
testsuite.run()
self.assertEqual(testsuite.status, TestStatus.FINISHED)
self.assertEqual(len(testsuite._results), 2)
self.assertEqual(testsuite._results.get(0).get_verdict(), "fail")
self.assertEqual(testsuite._results.get(1).get_verdict(), "pass")
# TC not exist result, retried
testsuite._testcases = []
contx = mock.MagicMock()
inconc_res = Result()
inconc_res.set_verdict("inconclusive", 1015, 0)
contx.run = mock.MagicMock(return_value=inconc_res)
testsuite._testcases.append(contx)
testsuite._results = ResultList()
testsuite._results.save = mock.MagicMock()
testsuite._default_configs["retryReason"] = "includeFailures"
testsuite.run()
self.assertEqual(testsuite.status, TestStatus.FINISHED)
self.assertEqual(len(testsuite._results), 1)
self.assertEqual(testsuite._results.get(
0).get_verdict(), "inconclusive")
# Failing result, retried, from a result list.
testsuite._testcases = []
fail_result_2 = Result()
fail_result_2.set_verdict('fail', 1000, 10)
reslist = ResultList()
reslist.append(fail_result_2)
cont_retry = mock.MagicMock()
cont_retry.run.side_effect = [reslist, resultlist]
testsuite._testcases.append(cont_retry)
testsuite._results = ResultList()
testsuite._results.save = mock.MagicMock()
testsuite._default_configs["retryReason"] = "includeFailures"
testsuite.run()
self.assertEqual(testsuite.status, TestStatus.FINISHED)
self.assertEqual(len(testsuite._results), 2)
self.assertEqual(testsuite._results.get(0).get_verdict(), "fail")
self.assertEqual(testsuite._results.get(1).get_verdict(), "pass")
self.assertEqual(testsuite._results.get(0).retries_left, 1)
self.assertEqual(testsuite._results.get(1).retries_left, 0)
# Inconclusive result, retried, from a result list.
testsuite._testcases = []
fail_result_3 = Result()
fail_result_3.set_verdict('inconclusive', 1000, 10)
reslist = ResultList()
reslist.append(fail_result_3)
cont_retry = mock.MagicMock()
cont_retry.run.side_effect = [reslist, resultlist]
testsuite._testcases.append(cont_retry)
testsuite._results = ResultList()
testsuite._results.save = mock.MagicMock()
testsuite._default_configs["retryReason"] = "includeFailures"
testsuite.run()
self.assertEqual(testsuite.status, TestStatus.FINISHED)
self.assertEqual(len(testsuite._results), 2)
self.assertEqual(testsuite._results.get(
0).get_verdict(), "inconclusive")
self.assertEqual(testsuite._results.get(1).get_verdict(), "pass")
self.assertEqual(testsuite._results.get(0).retries_left, 1)
self.assertEqual(testsuite._results.get(1).retries_left, 0)
self.args_tc.repeat = 2
testsuite._testcases = []
testsuite._testcases.append(cont1)
testsuite._results = ResultList()
testsuite._results.save = mock.MagicMock()
cont1.run.side_effect = [pass_result,
pass_result, pass_result, pass_result]
testsuite.run()
self.assertEqual(testsuite.status, TestStatus.FINISHED)
self.assertEqual(len(testsuite._results), 2)
self.assertEqual(testsuite._results.get(0).get_verdict(), "pass")
self.assertFalse(self.args_tc.forceflash) # pylint: disable=no-member
# Failing result, stop_on_failure
self.args_tc.stop_on_failure = True
self.args_tc.repeat = 1
testsuite._default_configs["retryCount"] = 0
testsuite._testcases = []
testsuite._testcases.append(cont1)
testsuite._results = ResultList()
testsuite._results.save = mock.MagicMock()
cont1.run.side_effect = [pass_result]
cont2 = mock.MagicMock()
cont2.run = mock.MagicMock()
cont2.run.side_effect = [fail_result]
cont3 = mock.MagicMock()
cont3.run = mock.MagicMock()
cont3.run.side_effect = [pass_result]
testsuite._testcases.append(cont2)
testsuite.run()
self.assertEqual(testsuite.status, TestStatus.FINISHED)
self.assertEqual(len(testsuite._results), 2)
self.assertEqual(testsuite._results.get(0).get_verdict(), "pass")
self.assertEqual(testsuite._results.get(1).get_verdict(), "fail")
self.assertEquals(testsuite._results.save.call_count, 2)
# Skipped result, stop_on_failure
self.args_tc.stop_on_failure = True
self.args_tc.repeat = 0
testsuite._testcases = []
testsuite._testcases.append(cont1)
testsuite._results = ResultList()
testsuite._results.save = mock.MagicMock()
cont1.run.side_effect = [skipped_result]
cont2 = mock.MagicMock()
cont2.run = mock.MagicMock()
cont2.run.side_effect = [pass_result]
testsuite._testcases.append(cont2)
testsuite.run()
self.assertEqual(testsuite.status, TestStatus.FINISHED)
self.assertEqual(len(testsuite._results), 2)
self.assertEqual(testsuite._results.get(0).get_verdict(), "skip")
self.assertEqual(testsuite._results.get(1).get_verdict(), "pass")
self.assertEquals(testsuite._results.save.call_count, 2)
@mock.patch("icetea_lib.TestSuite.TestSuite.TestSuite._create_tc_list")
@mock.patch("icetea_lib.TestSuite.TestSuite.os.path")
def test_load_suite_file(self, mock_path, mock_tc):
mock_path.join = mock.MagicMock()
mock_path.join.return_value = "path"
mock_path.exists = mock.MagicMock()
mock_path.exists.return_value = False
suit = TestSuite(args=self.args_suite)
self.assertIsNone(suit._load_suite_file(1, "dir"))
self.assertIsNone(suit._load_suite_file("name", "dir"))
with mock.patch.object(suit, "cloud_module") as mock_cm:
mock_cm.get_suite = mock.MagicMock()
mock_cm.get_suite.return_value = None
self.assertIsNone(suit._load_suite_file("name", "dir"))
def test_load_suite_with_duplicate_configs(self): # pylint: disable=invalid-name
self.args_suite.suite = "duplicates_suite.json"
with self.assertRaises(SuiteException):
suit = TestSuite(
args=self.args_suite)
|
class TestSuiteTestcase(unittest.TestCase):
def setUp(self):
pass
def test_create_suite_suitefile_some_not_found(self):
pass
def test_create_suite_list_success(self):
pass
def test_create_suite_suitefile_fail(self):
pass
def test_create_suite_list_empty(self):
pass
def test_prepare_suite_success(self):
pass
def test_prepare_suite_merge_configs(self):
pass
def test_prepare_suite_merge_configs_missing_tc(self):
pass
@mock.patch("icetea_lib.TestSuite.TestSuite.TestSuite._prepare_testcase")
def test_prepare_suite_fail(self, mock_prep):
pass
def test_get_suite_tcs_success(self):
pass
@mock.patch("icetea_lib.TestSuite.TestSuite.TestcaseFilter")
def test_get_suite_tcs_errors(self, mock_filter):
pass
def test_print_list_testcases(self):
pass
def test_get_suite_files(self):
pass
def test_load_suite_list(self):
pass
@mock.patch("icetea_lib.TestSuite.TestSuite.TestSuite._create_tc_list")
def test_run(self, mock_tclist):
pass
@mock.patch("icetea_lib.TestSuite.TestSuite.TestSuite._create_tc_list")
@mock.patch("icetea_lib.TestSuite.TestSuite.os.path")
def test_load_suite_file(self, mock_path, mock_tc):
pass
def test_load_suite_with_duplicate_configs(self):
pass
| 23 | 0 | 22 | 1 | 20 | 1 | 1 | 0.06 | 1 | 10 | 6 | 0 | 17 | 6 | 17 | 89 | 390 | 28 | 349 | 81 | 326 | 22 | 289 | 73 | 271 | 1 | 2 | 1 | 17 |
2,082 |
ARMmbed/icetea
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ARMmbed_icetea/test/test_wireshark.py
|
test.test_wireshark.TestVerify
|
class TestVerify(unittest.TestCase):
def test_packetparser(self):
string = open('test/data/BeaconRequest.txt', 'r').read()
packet = NwPacket(string)
self.assertTrue(NwPacket.verify( # Successful
packet,
{"WPAN": {
"Command Identifier": "Beacon Request",
"Destination Addressing Mode": "Short/16-bit",
"Destination PAN": "0xffff",
"Frame Version": "0"
}}
))
self.assertFalse(NwPacket.verify( # Unsuccessful
packet,
{"WPAN": {
"Command Identifier": "Bacon Request",
"Destination Addressing Moda": "Short/16-bit",
"Destination PAN": "0xffff"
}}
))
def test_packetparser6lowpan(self):
string = open('test/data/6lowpanping.txt', 'r').read()
p = NwPacket(string)
self.assertTrue(NwPacket.verify( # Successful
p,
{"6LOWPAN": {
"Destination": "fe80::166e:a00:0:2",
"Source address compression": "Stateless",
"Context identifier extension": "False"
}, "ICMPV6": {
"Response To": "17"
}}
))
def test_packetparser6lowpan2(self):
string = open('test/data/6lowpanping2.txt', 'r').read()
packet = NwPacket(string)
self.assertTrue(NwPacket.verify( # Successful
packet,
{"6LOWPAN": {
"Destination": "fe80::166e:a00:0:2",
"Source address compression": "Stateless",
"Context identifier extension": "False"
}, "UDP": {
"Source Port": "65534"
}, "IPV6": {
"Source": "fe80::ff:fe00:0"
}}
))
def test_packetparser6lowpan3(self):
string = open('test/data/6lowpanudp.txt', 'r').read()
packet = NwPacket(string)
self.assertTrue(
NwPacket.verify(
packet,
{
"6LOWPAN": {
"Destination": "fd00:db8::ff:fe00:face"
},
"IPV6": {
"RPLInstanceID": "0x82"
},
"UDP": {
"Destination Port": "7"
}
}
)
)
def test_packetparserfullrow(self):
string = open('test/data/rpl_dio.txt', 'r').read()
packet = NwPacket(string)
self.assertTrue(NwPacket.verify(
packet,
{
"IPV6": {
"Source": "fe80::ff:fe00:face",
"Destination": "ff02::1a"
},
"ICMPV6": {
"Code": "1",
"*1": "ICMPv6 RPL Option \(Routing Information fd00:db8::/64\)",
"*2": "ICMPv6 RPL Option \(Prefix Information fd00:db8::ff:fe00:face/64\)",
"*3": "ICMPv6 RPL Option \(Routing Information ::/0\)"
}
}
))
def test_packetparserlegacy(self):
string = open('test/data/BeaconRequest.txt', 'r').read()
packet = NwPacket(string)
self.assertTrue(NwPacket.verify(
packet,
{
"WPAN.Command Identifier": "Beacon Request",
"WPAN.Destination Addressing Mode": "Short/16-bit",
"WPAN.Destination PAN": "0xffff",
"WPAN.Frame Version": "0"
}
))
self.assertFalse(NwPacket.verify( # Unsuccessful
packet,
{
"WPAN.Command Identifier": "Bacon Request",
"WPAN.Destination Addressing Moda": "Short/16-bit",
"WPAN.Destination PAN": "0xffff"
}
))
def test_packetparsermultilayer(self):
string = open('test/data/BeaconRequest.txt', 'r').read()
packet = NwPacket(string)
self.assertTrue(NwPacket.verify( # Successful
packet,
{"WPAN": {
"Destination PAN": "0xffff"
}, "IP": {
"Protocol": "UDP"
}}
))
self.assertFalse(NwPacket.verify( # Unsuccessful
packet,
{"WPAN": {
"Destination PAN": "0xffff"
}, "IP": {
"Protocol": "XXX"
}}
))
def test_packetparserregexvalue(self):
string = open('test/data/BeaconRequest.txt', 'r').read()
packet = NwPacket(string)
self.assertTrue(NwPacket.verify(
packet,
{"WPAN": {
"Destination PAN": "0xfff[abcdef]"
}, "IP": {
"Protocol": "KGB|UDP|VH1"
}}
))
@mock.patch('icetea_lib.LogManager.get_bench_logger')
def test_packetmangager_happyday(self, loggerpatch):
loggerpatch.return_value = mock.MagicMock()
p1 = open('test/data/BeaconRequest.txt', 'r').read()
p2 = open('test/data/BeaconRequest2.txt', 'r').read()
manager = NwPacketManager()
manager.push(p1)
manager.push(p2)
self.assertTrue(manager.verifyPackets([{
"WPAN": {
"Command Identifier": "Beacon Request",
"Destination PAN": "0xffff",
"Sequence Number": "79"
}
}]) is None)
@mock.patch('icetea_lib.LogManager.get_bench_logger')
def test_packetmangager_sadday(self, loggerpatch):
loggerpatch.return_value = mock.MagicMock()
p1 = open('test/data/BeaconRequest.txt', 'r').read()
p2 = open('test/data/BeaconRequest2.txt', 'r').read()
manager = NwPacketManager()
manager.push(p1)
manager.push(p2)
with self.assertRaises(TestStepError):
self.assertTrue(manager.verifyPackets([{
"WPAN": {
"Command Identifiers": "Bacon Request",
"Destination PAN": "0xffff"
}
}]) is None)
with self.assertRaises(TestStepError):
self.assertTrue(manager.verifyPackets([{
"WPAN": {
"Command Identifier": "Beacon Request",
"Destination PAN": "0xffff",
"Sequence Number": "80"
}
}]) is None)
@mock.patch('icetea_lib.LogManager.get_bench_logger')
def test_packetcounter6lowpan(self, loggerpatch):
loggerpatch.return_value = mock.MagicMock()
p1 = open('test/data/BeaconRequest2.txt', 'r').read()
p2 = open('test/data/6lowpanping.txt', 'r').read()
p3 = open('test/data/BeaconRequest.txt', 'r').read()
p4 = open('test/data/6lowpanping2.txt', 'r').read()
manager = NwPacketManager()
manager.push(p1)
manager.push(p2)
manager.push(p3)
manager.push(p4)
self.assertTrue(manager.countPackets({
"WPAN": {
"Extended Source": "14:6e:0a:00:00:00:00:01",
},
"IPV6": {
"Destination": "fe80::166e:a00:0:2"
}
}) == 2)
'''
# these test needs sniffer HW and valid configurations.
def test_liveCapture(self):
wshark = Wireshark()
wshark.startCapture('wireshark.pcap')
time.sleep(5)
wshark.setMark('marker#1')
time.sleep(1)
wshark.setMark('marker#2')
self.assertIsInstance( wshark.stopCapture(), int )
self.assertEqual( wshark.findIndexByMark('start'), 0 )
self.assertEqual( wshark.findIndexByMark('unknown'), None )
self.assertGreaterEqual( wshark.findIndexByMark('marker#1'), wshark.findIndexByMark('marker#2') )
def test_wireshark(self):
capture = pyshark.LiveCapture(interface='Sniffer')
print('Start Live capturing')
capture.sniff(timeout=10)
for packet in capture.sniff_continuously(packet_count=50):
print 'Just arrived:', packetl
print("Stop Live Capturing")
print( capture )
for packet in capture:
print(packet)
import netifaces
print( netifaces.inter() )
print( capture.interfaces )
self.assertTrue(True)
with self.assertRaises(LookupError):
self.assertTrue(raise LookupError())
'''
|
class TestVerify(unittest.TestCase):
def test_packetparser(self):
pass
def test_packetparser6lowpan(self):
pass
def test_packetparser6lowpan2(self):
pass
def test_packetparser6lowpan3(self):
pass
def test_packetparserfullrow(self):
pass
def test_packetparserlegacy(self):
pass
def test_packetparsermultilayer(self):
pass
def test_packetparserregexvalue(self):
pass
@mock.patch('icetea_lib.LogManager.get_bench_logger')
def test_packetmangager_happyday(self, loggerpatch):
pass
@mock.patch('icetea_lib.LogManager.get_bench_logger')
def test_packetmangager_sadday(self, loggerpatch):
pass
@mock.patch('icetea_lib.LogManager.get_bench_logger')
def test_packetcounter6lowpan(self, loggerpatch):
pass
| 15 | 0 | 17 | 0 | 17 | 1 | 1 | 0.19 | 1 | 3 | 3 | 0 | 11 | 0 | 11 | 83 | 241 | 17 | 194 | 42 | 179 | 37 | 67 | 39 | 55 | 1 | 2 | 1 | 11 |
2,083 |
ARMmbed/icetea
|
ARMmbed_icetea/test_regression/test_serial_port.py
|
test_regression.test_serial_port.Testcase
|
class Testcase(Bench):
def __init__(self):
Bench.__init__(self,
name="test_serial_port",
title="regression test for dut serial data communication",
status="development",
purpose="Verify dut serial",
component=["cmdline"],
type="regression",
requirements={
"duts": {
'*': {
"count": 1,
"type": "hardware",
"allowed_platforms": ['K64F'],
"application": {"bin": "examples/cliapp/mbed-os5/bin/mbed_cliapp_K64F.bin"}
}
}
}
)
def case(self):
# get dut
dut = self.get_dut(1)
# print dut information
dut.print_info()
# get dut serial port
self.logger.info("DUT serial port is %s" % dut.comport)
# write data to dut
dut.writeline(data="echo 'This is a testing line write to dut'")
# wait 1 second
self.delay(1)
# verify message from trace
verify_message(dut.traces, 'This is a testing line write to dut')
|
class Testcase(Bench):
def __init__(self):
pass
def case(self):
pass
| 3 | 0 | 19 | 3 | 13 | 3 | 1 | 0.22 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 108 | 39 | 6 | 27 | 4 | 24 | 6 | 10 | 4 | 7 | 1 | 3 | 0 | 2 |
2,084 |
ARMmbed/icetea
|
ARMmbed_icetea/test_regression/test_regression.py
|
test_regression.test_regression.TestRegression
|
class TestRegression(unittest.TestCase):
def test_regression_tests(self):
icetea_verbose = '-vv'
icetea_bin = "icetea"
this_file_path = os.path.dirname(os.path.realpath(__file__))
tc_name_list = ["test_async", "test_cli_init", "test_close_open", "test_cmdline", "test_multi_dut",
"test_cmd_resp", "test_serial_port"]
test_result = []
# start spawn tests
for tc in tc_name_list:
parameters = [icetea_bin, "--tcdir", this_file_path, "--tc", tc, "--failure_return_value", icetea_verbose]
if tc == "test_cli_init":
parameters.append("--reset")
proc = Popen(parameters, stdout=PIPE)
proc.communicate()
test_result.append((tc, proc.returncode))
raise_exception = False
for tc, result in test_result:
if result != 0:
raise_exception = True
print(tc + " failed with retCode: " + str(result))
if raise_exception:
raise Exception("Regression tests have failure")
|
class TestRegression(unittest.TestCase):
def test_regression_tests(self):
pass
| 2 | 0 | 25 | 3 | 21 | 1 | 6 | 0.05 | 1 | 3 | 0 | 0 | 1 | 0 | 1 | 73 | 26 | 3 | 22 | 12 | 20 | 1 | 21 | 12 | 19 | 6 | 2 | 2 | 6 |
2,085 |
ARMmbed/icetea
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ARMmbed_icetea/test/test_logmanager.py
|
test.test_logmanager.FunctionTests
|
class FunctionTests(unittest.TestCase):
def setUp(self):
pass
@mock.patch("icetea_lib.LogManager.os.path.join")
def test_verbosity_set_to_warn(self, mock_os):
LogManager.VERBOSE_LEVEL = 0
LogManager.SILENT_ON = False
logger = get_external_logger("test_logger", "TST", False)
logger = logging.getLogger("test_logger")
self.assertTrue(logger.handlers[0].level == logging.WARNING)
@mock.patch("icetea_lib.LogManager.os.path.join")
def test_verbosity_set_to_info(self, mock_os):
LogManager.VERBOSE_LEVEL = 1
LogManager.SILENT_ON = False
logger = get_external_logger("test_logger2", "TST", False)
logger = logging.getLogger("test_logger2")
self.assertTrue(logger.handlers[0].level == logging.INFO)
@mock.patch("icetea_lib.LogManager.os.path.join")
def test_verbosity_set_to_debug(self, mock_os):
LogManager.VERBOSE_LEVEL = 2
LogManager.SILENT_ON = False
logger = get_external_logger("test_logger3", "TST", False)
logger = logging.getLogger("test_logger3")
self.assertTrue(logger.handlers[0].level == logging.DEBUG)
@mock.patch("icetea_lib.LogManager.os.path.join")
def test_verbosity_set_to_silent(self, mock_os):
LogManager.VERBOSE_LEVEL = 2
LogManager.SILENT_ON = True
logger = get_external_logger("test_logger4", "TST", False)
logger = logging.getLogger("test_logger4")
self.assertTrue(logger.handlers[0].level == logging.ERROR)
|
class FunctionTests(unittest.TestCase):
def setUp(self):
pass
@mock.patch("icetea_lib.LogManager.os.path.join")
def test_verbosity_set_to_warn(self, mock_os):
pass
@mock.patch("icetea_lib.LogManager.os.path.join")
def test_verbosity_set_to_info(self, mock_os):
pass
@mock.patch("icetea_lib.LogManager.os.path.join")
def test_verbosity_set_to_debug(self, mock_os):
pass
@mock.patch("icetea_lib.LogManager.os.path.join")
def test_verbosity_set_to_silent(self, mock_os):
pass
| 10 | 0 | 5 | 0 | 5 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 5 | 0 | 5 | 77 | 35 | 4 | 31 | 14 | 21 | 0 | 27 | 10 | 21 | 1 | 2 | 0 | 5 |
2,086 |
ARMmbed/icetea
|
ARMmbed_icetea/test_regression/test_multi_dut.py
|
test_regression.test_multi_dut.Testcase
|
class Testcase(Bench):
def __init__(self):
Bench.__init__(self,
name="test_multi_dut",
title="",
status="development",
type="regression",
purpose="",
component=["icetea"],
requirements={
"duts": {
'*': {
"count": 2,
"type": "hardware",
"allowed_platforms": ['K64F'],
"application": {
"bin": "examples/cliapp/mbed-os5/bin/mbed_cliapp_K64F.bin"
}
},
"1": {
"nick": "dut1"
},
"2": {
"nick": "dut2"
}
}
}
)
def case(self):
# send command to all duts by '*'
responses = self.command('*', "echo hello world! ")
# the 'responses' will be a list of all the returned response
for response in responses:
response.verify_message("hello world!")
response.verify_response_time(1)
|
class Testcase(Bench):
def __init__(self):
pass
def case(self):
pass
| 3 | 0 | 17 | 0 | 16 | 1 | 2 | 0.06 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 108 | 36 | 1 | 33 | 5 | 30 | 2 | 8 | 5 | 5 | 2 | 3 | 1 | 3 |
2,087 |
ARMmbed/icetea
|
ARMmbed_icetea/test_regression/test_cmd_resp.py
|
test_regression.test_cmd_resp.Testcase
|
class Testcase(Bench):
def __init__(self):
Bench.__init__(self,
name="test_cmd_resp",
title="icetea command and response APIs example usage",
status="development",
type="smoke",
purpose="show an example usage of icetea command and response APIs",
component=["icetea"],
requirements={
"duts": {
'*': {
"count": 2,
"type": "hardware",
"allowed_platforms": ['K64F'],
"application": {"bin": "examples/cliapp/mbed-os5/bin/mbed_cliapp_K64F.bin"}
},
"1": {
"nick": "dut1"
},
"2": {
"nick": "dut2"
}
}
}
)
def case(self):
# send known command "echo hello" and retcode expected to be 0 --> success() is True
response = self.command("dut1", "echo hello", expected_retcode=0)
self.assertTrue(response.success())
response.verify_message("hello")
# send unknown command "hello" and the retcode for unknown command is -5 --> fail() is True
response = self.command("dut2", "hello", expected_retcode=-5)
self.assertTrue(response.fail())
|
class Testcase(Bench):
def __init__(self):
pass
def case(self):
pass
| 3 | 0 | 17 | 1 | 16 | 1 | 1 | 0.06 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 108 | 36 | 2 | 32 | 4 | 29 | 2 | 9 | 4 | 6 | 1 | 3 | 0 | 2 |
2,088 |
ARMmbed/icetea
|
ARMmbed_icetea/test_regression/test_close_open.py
|
test_regression.test_close_open.Testcase
|
class Testcase(Bench):
def __init__(self):
Bench.__init__(self,
name="test_close_open",
title="regression test for open and close dut connection",
status="development",
purpose="Verify dut connection",
component=["cmdline"],
type="regression", # allowed values: installation, compatibility, smoke,
# regression, acceptance, alpha, beta, destructive, performance
requirements={
"duts": {
'*': { # requirements for all nodes
"count": 1,
"type": "hardware",
"allowed_platforms": ['K64F'],
"application": {"bin": "examples/cliapp/mbed-os5/bin/mbed_cliapp_K64F.bin"}
}
}
}
)
def case(self):
# get dut
dut = self.get_dut(1)
# Close connection, since by default dut connection has opened already
dut.close_connection()
# wait a second and reopen connection
self.delay(1)
dut.open_connection()
# verify connection opened successfully
resp = self.command(1, "echo helloworld")
resp.verify_message("helloworld", break_in_fail=True)
# Check that exception is raised if we try to reopen connection
try:
dut.open_connection()
# We should never get here, since previous line
# should raise DutConnectionError exception
raise TestStepError("Calling open_Connection twice didn't raise error as expected!")
except DutConnectionError:
pass
|
class Testcase(Bench):
def __init__(self):
pass
def case(self):
pass
| 3 | 0 | 22 | 2 | 16 | 5 | 2 | 0.31 | 1 | 2 | 2 | 0 | 2 | 0 | 2 | 108 | 45 | 5 | 32 | 5 | 29 | 10 | 15 | 5 | 12 | 2 | 3 | 1 | 3 |
2,089 |
ARMmbed/icetea
|
ARMmbed_icetea/test_regression/test_cli_init.py
|
test_regression.test_cli_init.Testcase
|
class Testcase(Bench):
def __init__(self):
Bench.__init__(self,
name="test_cli_init",
title="test cli init with EventMatcher",
status="development",
type="regression",
purpose="test cli init with EventMatcher ",
component=["icetea"],
requirements={
"duts": {
'*': {
"count": 1,
"type": "hardware",
"allowed_platforms": ["K64F"],
"application": {
"bin": "examples/cliapp/mbed-os5/bin/mbed_cliapp_K64F.bin",
"cli_ready_trigger": "/>"
}
}
}
}
)
def case(self):
self.logger.info("cli_ready_trigger will help icetea wait until application is ready for communication.")
# create triggers from received data
EventMatcher(EventTypes.DUT_LINE_RECEIVED,
"ping",
self.get_dut(1),
callback=self.ping_cb)
# this will trig above callback
self.command(1, "echo ping")
def ping_cb(self, event_match):
self.logger.info("pong (because of received %s)", event_match.event_data)
|
class Testcase(Bench):
def __init__(self):
pass
def case(self):
pass
def ping_cb(self, event_match):
pass
| 4 | 0 | 11 | 0 | 10 | 1 | 1 | 0.06 | 1 | 2 | 2 | 0 | 3 | 0 | 3 | 109 | 37 | 3 | 32 | 4 | 28 | 2 | 9 | 4 | 5 | 1 | 3 | 0 | 3 |
2,090 |
ARMmbed/icetea
|
ARMmbed_icetea/test_regression/test_async.py
|
test_regression.test_async.Testcase
|
class Testcase(Bench):
def __init__(self):
Bench.__init__(self,
name="test_async",
title="async command and response test",
status="development",
type="regression",
purpose="test async command and response parse works",
component=["icetea"],
requirements={
"duts": {
'*': {
"count": 1,
"type": "hardware",
"allowed_platforms": ["K64F"],
"application": {
"bin": "examples/cliapp/mbed-os5/bin/mbed_cliapp_K64F.bin"
}
}
}
}
)
def case(self):
# launch an async command
asyncCmd = self.command(1, "echo HelloWorld!", asynchronous=True)
# wait_for_async_response: Wait for the given asynchronous response to be ready and then parse it
resp = self.wait_for_async_response("echo", asyncCmd)
# Verifies that expected response messages found
resp.verify_message("HelloWorld!")
self.assertTrue(resp.success())
|
class Testcase(Bench):
def __init__(self):
pass
def case(self):
pass
| 3 | 0 | 16 | 1 | 13 | 2 | 1 | 0.11 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 108 | 33 | 3 | 27 | 5 | 24 | 3 | 8 | 5 | 5 | 1 | 3 | 0 | 2 |
2,091 |
ARMmbed/icetea
|
ARMmbed_icetea/test/tests/test_tcTearDown.py
|
test.tests.test_tcTearDown.Testcase
|
class Testcase(Bench):
def __init__(self, teststepfail=None, teststeperror=None, teststeptimeout=None,
exception=None, name_error=None, value_error=None, teststeptimeout_in_case=None):
self.teststepfail = teststepfail
self.teststeperror = teststeperror
self.teststeptimeout = teststeptimeout
self.exception = exception
self.name_error = name_error
self.value_error = value_error
self.timeout_in_case = teststeptimeout_in_case
Bench.__init__(self,
name="test_tcTearDown",
title="Test Testcase teardown with invalid command",
status="development",
type="acceptance",
purpose="dummy",
component=["Icetea_ut"],
requirements={
"duts": {
'*': { # requirements for all nodes
"count": 0,
}
}}
)
def setup(self):
# Send invalid command to test if teardown is launched.
if self.teststepfail:
raise TestStepFail("Failed!")
elif self.teststeperror:
raise TestStepError("Error!")
elif self.teststeptimeout:
raise TestStepTimeout("Timeout!")
elif self.name_error:
raise NameError("This is a NameError")
elif self.value_error:
raise ValueError("This is a ValueError")
elif self.exception:
raise Exception("This is a generic exception")
# If no exception thrown in setUp, case should be run
def case(self):
if self.timeout_in_case:
raise TestStepTimeout("Timeout in case!")
def teardown(self):
pass
|
class Testcase(Bench):
def __init__(self, teststepfail=None, teststeperror=None, teststeptimeout=None,
exception=None, name_error=None, value_error=None, teststeptimeout_in_case=None):
pass
def setup(self):
pass
def case(self):
pass
def teardown(self):
pass
| 5 | 0 | 11 | 0 | 10 | 1 | 3 | 0.07 | 1 | 6 | 3 | 0 | 4 | 7 | 4 | 110 | 47 | 3 | 42 | 13 | 36 | 3 | 23 | 12 | 18 | 7 | 3 | 1 | 11 |
2,092 |
ARMmbed/icetea
|
ARMmbed_icetea/test/tests/test_reportcmdfail.py
|
test.tests.test_reportcmdfail.Testcase
|
class Testcase(Bench):
"""
Test case for
"""
def __init__(self):
Bench.__init__(self,
name="cmdfailtestcase",
title="Bench test file",
status="development",
purpose="test",
component=["None"],
type="regression",
requirements={
"duts": {
'*': {
"count": 0
}
}
}
)
def case(self): # pylint: disable=missing-docstring
mocked_dut = MagicMock()
mocked_dut.execute_command = MagicMock(side_effect=[TestStepTimeout])
self.duts.append(mocked_dut)
self.resource_configuration._dut_count = 1 # pylint: disable=protected-access
self.command(1, "test", report_cmd_fail=False)
|
class Testcase(Bench):
'''
Test case for
'''
def __init__(self):
pass
def case(self):
pass
| 3 | 1 | 11 | 0 | 11 | 1 | 1 | 0.22 | 1 | 1 | 1 | 0 | 2 | 0 | 2 | 108 | 27 | 1 | 23 | 4 | 20 | 5 | 9 | 4 | 6 | 1 | 3 | 0 | 2 |
2,093 |
ARMmbed/icetea
|
ARMmbed_icetea/test/tests/test_genericprocess_testcase.py
|
test.tests.test_genericprocess_testcase.Testcase
|
class Testcase(Bench):
def __init__(self):
Bench.__init__(
self,
name="test_quick_process",
type="smoke",
requirements={
"duts": {
"*": {
"count": 1,
"type": "process"
},
1: {
"application": {
"bin": "/bin/echo",
"bin_args": ["If this is found, the test passed"],
"init_cli_cmds": [],
"post_cli_cmds": []
}
}
}
}
)
def setup(self): # pylint: disable=method-hidden
pass
def case(self):
time_start = time.time()
result = False
while (time.time() - time_start) < 10.0 and result is False:
result = self.verify_trace(
0, "If this is found, the test passed", False)
time.sleep(1.0)
if result is False:
raise TestStepFail("Didn't get trace")
def teardown(self): # pylint: disable=method-hidden
pass
|
class Testcase(Bench):
def __init__(self):
pass
def setup(self):
pass
def case(self):
pass
def teardown(self):
pass
| 5 | 0 | 9 | 0 | 9 | 1 | 2 | 0.06 | 1 | 1 | 1 | 0 | 4 | 0 | 4 | 110 | 39 | 3 | 36 | 7 | 31 | 2 | 15 | 7 | 10 | 3 | 3 | 1 | 6 |
2,094 |
ARMmbed/icetea
|
ARMmbed_icetea/test/tests/test_config_parse_corner_case.py
|
test.tests.test_config_parse_corner_case.Testcase
|
class Testcase(Bench):
def __init__(self):
Bench.__init__(self,
name="test_config_parse_corner_case",
title="Regression test for a corner case in config parsing",
status="released",
purpose="Regression test",
component=["configuration"],
type="regression",
requirements={
"duts": {
'*': {
"count": 33,
"type": "process",
"application": {"bin": os.path.abspath(
os.path.join(__file__,
os.path.pardir,
os.path.pardir,
"dut", "dummyDut"))}
},
1: {"nick": "nick1", "location": [0.0, 0.0]},
2: {"nick": "nick2", "location": [10.0, 0.0]},
"3..32": {"nick": "nick_{i}", "location": [20.0, 0.0]},
33: {"nick": "nick33", "location": [50.0, 50.0]},
}
}
)
def setup(self):
pass
def case(self):
pass
def teardown(self):
pass
|
class Testcase(Bench):
def __init__(self):
pass
def setup(self):
pass
def case(self):
pass
def teardown(self):
pass
| 5 | 0 | 8 | 0 | 8 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 4 | 0 | 4 | 110 | 37 | 4 | 33 | 5 | 28 | 0 | 9 | 5 | 4 | 1 | 3 | 0 | 4 |
2,095 |
ARMmbed/icetea
|
ARMmbed_icetea/test_regression/test_cmdline.py
|
test_regression.test_cmdline.Testcase
|
class Testcase(Bench):
def __init__(self):
Bench.__init__(self,
name="test_cmdline",
title="regression test for command line interface",
status="development",
purpose="Verify Command Line Interface",
component=["cmdline"],
type="regression",
requirements={
"duts": {
'*': {
"count": 1,
"type": "hardware",
"allowed_platforms": ['K64F'],
"application": {"bin": "examples/cliapp/mbed-os5/bin/mbed_cliapp_K64F.bin"}
}
}
}
)
def case(self):
# send cli command
resp = self.command(1, "echo helloworld", timeout=5)
resp.verify_message("helloworld", break_in_fail=True)
|
class Testcase(Bench):
def __init__(self):
pass
def case(self):
pass
| 3 | 0 | 12 | 0 | 11 | 1 | 1 | 0.04 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 108 | 25 | 1 | 23 | 4 | 20 | 1 | 6 | 4 | 3 | 1 | 3 | 0 | 2 |
2,096 |
ARMmbed/icetea
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ARMmbed_icetea/test/test_iceteamanager.py
|
test.test_iceteamanager.IceteaManagerTestcase
|
class IceteaManagerTestcase(unittest.TestCase):
def setUp(self):
self.args_noprint = argparse.Namespace(
available=False, version=False, bin=None,
binary=False, channel=None,
clean=True, cloud=False, component=False,
device='*', gdb=None,
gdbs=None, gdbs_port=2345, group=False,
iface=None, kill_putty=False, list=True,
listsuites=False, log='./log', my_duts=None,
nobuf=None, pause_when_external_dut=False,
putty=False, reset=False, silent=True, skip_case=False,
skip_rampdown=False, skip_rampup=False,
status=False, suite=False, tc='all', tc_cfg=None,
tcdir="test", testtype=False, type=None,
subtype=None, use_sniffer=False, valgrind=False,
valgrind_tool=None, verbose=False,
repeat=0, platform_name=None, json=False)
self.args_tc_no_exist = argparse.Namespace(
available=False, version=False, bin=None,
binary=False, channel=None,
clean=True, cloud=False, component=False,
device='*', gdb=None,
gdbs=None, gdbs_port=2345, group=False,
iface=None, kill_putty=False, list=True,
listsuites=False, log='./log', my_duts=None,
nobuf=None, pause_when_external_dut=False,
putty=False, reset=False, silent=True,
skip_case=False, skip_rampdown=False, skip_rampup=False,
status=False, suite=False, tc='does_not_exist',
tc_cfg=None, tcdir="test", testtype=False, type=None,
subtype=None, use_sniffer=False, valgrind=False,
valgrind_tool=None, verbose=False,
repeat=0, platform_name=None, json=False)
self.args_suite = argparse.Namespace(
available=False, version=False, bin=None,
binary=False, channel=None,
clean=True, cloud=False, component=False,
device='*', gdb=None,
gdbs=None, gdbs_port=2345, group=False,
iface=None, kill_putty=False, list=False,
listsuites=False, log='./log', my_duts=None,
nobuf=None, pause_when_external_dut=False,
putty=False, reset=False, silent=True, skip_case=False,
skip_rampdown=False, skip_rampup=False,
status=False, suite="dummy_suite.json", tc=None,
tc_cfg=None, tcdir="examples", testtype=False, type=None,
subtype=None, use_sniffer=False, valgrind=False,
valgrind_tool=None, verbose=False, repeat=2,
suitedir="./test/suites", platform_name=None, json=False)
self.args_tc = argparse.Namespace(
available=False, version=False, bin=None,
binary=False, channel=None,
clean=True, cloud=False, component=False,
device='*', gdb=None,
gdbs=None, gdbs_port=2345, group=False,
iface=None, kill_putty=False, list=False,
listsuites=False, log='./log', my_duts=None,
nobuf=None, pause_when_external_dut=False,
putty=False, reset=False, silent=True, skip_case=False,
skip_rampdown=False, skip_rampup=False,
status=False, suite=None, tc="test_cmdline",
tc_cfg=None, tcdir="examples", testtype=False, type="process",
subtype=None, use_sniffer=False, valgrind=False,
valgrind_tool=None, verbose=False, repeat=2,
suitedir="./test/suites", forceflash_once=False, forceflash=False,
ignore_invalid_params=True, failure_return_value=False, stop_on_failure=False,
branch="", platform_name=None, json=False)
self.maxdiff = None
def tearDown(self):
if os.path.exists("test_suite.json"):
os.remove("test_suite.json")
def test_list_suites(self):
table = IceteaManager.list_suites(suitedir="./test/suites")
tab = u'+------------------------+\n' \
u'| Testcase suites |\n' \
u'+------------------------+\n' \
u'| dummy_suite.json |\n' \
u'| duplicates_suite.json |\n' \
u'| failing_suite.json |\n' \
u'| malformed_suite.json |\n' \
u'| suite_missing_one.json |\n' \
u'| working_suite.json |\n' \
u'+------------------------+'
self.assertEqual(table.get_string(), tab)
@mock.patch("icetea_lib.IceteaManager._cleanlogs")
@mock.patch("icetea_lib.IceteaManager.TestSuite")
def test_run(self, mock_suite, mock_clean): # pylint: disable=unused-argument
ctm = IceteaManager()
# Testing different return codes
with mock.patch.object(ctm, "runtestsuite") as mock_method:
mock_method.return_value = MockResults()
retval = ctm.run(args=self.args_tc)
self.assertEqual(retval, 0)
self.args_tc.failure_return_value = True
retval = ctm.run(args=self.args_tc)
self.assertEqual(retval, 0)
mock_method.return_value = MockResults(fails=1)
retval = ctm.run(args=self.args_tc)
self.assertEqual(retval, 2)
mock_method.return_value = MockResults(inconcs=1)
retval = ctm.run(args=self.args_tc)
self.assertEqual(retval, 3)
self.args_tc.list = True
self.args_tc.cloud = False
mock_suite.list_testcases = mock.MagicMock()
mock_suite.list_testcases.return_value = "test_list"
# Test list branch
retval = ctm.run(args=self.args_tc)
self.assertEqual(retval, 0)
self.args_tc.list = False
self.args_tc.listsuites = True
ctm.list_suites = mock.MagicMock()
ctm.list_suites.return_value = "Test-list-item"
retval = ctm.run(args=self.args_tc)
self.assertEqual(retval, 0)
@mock.patch("icetea_lib.IceteaManager.TestSuite")
def test_run_exceptions(self, mock_suite):
ctm = IceteaManager()
mock_suite.side_effect = [SuiteException]
self.assertEqual(ctm.run(args=self.args_tc), 3)
with mock.patch.object(ctm, "list_suites") as mock_method:
mock_method.return_value = None
self.args_tc.listsuites = True
self.assertEqual(ctm.run(args=self.args_tc), 2)
def test_run_returncodes(self):
retcode = subprocess.call("python icetea.py --clean -s "
"--tc test_run_retcodes_fail "
"--tcdir test --type process",
shell=True)
self.assertEquals(retcode, ExitCodes.EXIT_SUCCESS)
retcode = subprocess.call("python icetea.py --clean -s "
"--tc test_run_retcodes_success "
"--tcdir test --type process",
shell=True)
self.assertEquals(retcode, ExitCodes.EXIT_SUCCESS)
retcode = subprocess.call("python icetea.py --clean -s "
"--tc test_run_retcodes_fail "
"--failure_return_value --tcdir test "
"--type process",
shell=True)
self.assertEquals(retcode, ExitCodes.EXIT_FAIL)
retcode = subprocess.call("python icetea.py --clean -s "
"--suite working_suite --suitedir test/suites "
"--tcdir examples",
shell=True)
self.assertEquals(retcode, ExitCodes.EXIT_SUCCESS)
retcode = subprocess.call("python icetea.py --tc test_run_retcodes_notfound -s "
"--tcdir test --type process",
shell=True)
self.assertEquals(retcode, ExitCodes.EXIT_SUCCESS)
# Run with --clean to clean up
retcode = subprocess.call( # pylint: disable=unused-variable
"python icetea.py --clean ",
shell=True)
def test_run_many_cases_one_file(self):
retcode = subprocess.call("python icetea.py --clean -s "
"--tc all "
"--tcdir test/tests/multiple_in_one_file",
shell=True)
self.assertEquals(retcode, ExitCodes.EXIT_SUCCESS)
retcode = subprocess.call(
"python icetea.py --clean -s "
"--tc passing_case --failure_return_value "
"--tcdir test/tests/multiple_in_one_file",
shell=True)
self.assertEquals(retcode, ExitCodes.EXIT_SUCCESS)
retcode = subprocess.call(
"python icetea.py --clean -s "
"--tc all --failure_return_value "
"--tcdir test/tests/multiple_in_one_file",
shell=True)
self.assertEquals(retcode, ExitCodes.EXIT_FAIL)
retcode = subprocess.call(
"python icetea.py --clean -s "
"--tc fail_case --failure_return_value "
"--tcdir test/tests/multiple_in_one_file",
shell=True)
self.assertEquals(retcode, ExitCodes.EXIT_FAIL)
# Run with --clean to clean up
retcode = subprocess.call(
"python icetea.py --clean -s",
shell=True)
@mock.patch("icetea_lib.IceteaManager.shutil")
def test_clean(self, mock_shutil): # pylint: disable=unused-argument
ctm = IceteaManager()
self.args_tc.tc = None
self.args_tc.clean = True
self.assertEqual(ctm.run(self.args_tc), ExitCodes.EXIT_SUCCESS)
@mock.patch("icetea_lib.IceteaManager.get_fw_version")
def test_version_print(self, mock_fw):
mock_fw.return_value = "1.0.0"
ctm = IceteaManager()
self.args_tc.version = True
self.assertEqual(ctm.run(self.args_tc), ExitCodes.EXIT_SUCCESS)
def test_platform_name_inconc(self):
retcode = subprocess.call("python icetea.py --clean -s "
"--tc test_run_retcodes_success "
"--tcdir test --type process --platform_name TEST_PLAT2 "
"--failure_return_value",
shell=True)
self.assertEquals(retcode, ExitCodes.EXIT_INCONC, "Non-inconclusive returncode returned. "
"Allowed_platforms and platform_name "
"broken.")
# Run with --clean to clean up
retcode = subprocess.call(
"python icetea.py --clean ",
shell=True)
def test_platform_name_success(self):
retcode = subprocess.call("python icetea.py --clean -s "
"--tc test_run_retcodes_success "
"--tcdir test --type process --platform_name TEST_PLAT "
"--failure_return_value",
shell=True)
self.assertEquals(retcode, ExitCodes.EXIT_SUCCESS, "Non-success returncode returned. "
"Allowed_platforms and platform_name "
"broken.")
# Run with --clean to clean up
retcode = subprocess.call(
"python icetea.py --clean ",
shell=True)
def test_reportcmdfail(self):
retcode = subprocess.call("python icetea.py --clean -s "
"--tc cmdfailtestcase "
"--tcdir test "
"--failure_return_value",
shell=True)
self.assertEquals(retcode, ExitCodes.EXIT_SUCCESS, "Non-success returncode returned. "
"ReportCmdFail functionality broken?")
# Run with --clean to clean up
retcode = subprocess.call(
"python icetea.py --clean ",
shell=True)
def test_fail_suite_out_regression(self):
proc = subprocess.Popen(["python", "icetea.py", "--clean", "--suite",
"failing_suite.json", "--suitedir", "test/suites", "--tcdir",
"test/tests", "-s"], stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, _ = proc.communicate()
self.assertTrue(re.search(b"This is a failing test case", output))
def test_list_json_output(self):
self.maxdiff = None
expected_test_path = os.path.abspath(os.path.join(__file__, "..", "tests",
"json_output_test",
"json_output_test_case.py"))
expected_output = [
{u"status": u"development",
u"requirements": {
u"duts": {
u"*": {
u"application": {u"bin": None}}
},
u"external": {
u"apps": []}
},
u"filepath": expected_test_path,
u"name": u"json_output_test",
u"title": u"Test list output as json",
u"component": [u"Icetea_ut"],
u"compatible": {
u"framework": {
u"version": u">=1.0.0",
u"name": u"Icetea"},
u"hw": {u"value": True},
u"automation": {u"value": True}
},
u"purpose": u"dummy",
u"type": u"acceptance",
u"sub_type": None}]
proc = subprocess.Popen(["python", "icetea.py", "--list", "--tcdir",
"test{}tests{}json_output_test".format(
os.path.sep, os.path.sep),
"-s", "--json"],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
output, _ = proc.communicate()
output = output.rstrip(b"\n")
if IS_PYTHON3:
output = output.decode("utf-8")
self.assertDictEqual(expected_output[0], json.loads(output)[0])
def test_list_export_to_suite(self):
expected_call = json.dumps(
{"default": {}, "testcases": [{"name": "json_output_test"}]})
proc = subprocess.Popen(["python", "icetea.py", "--list", "--tcdir",
"test{}tests{}json_output_test".format(
os.path.sep, os.path.sep),
"-s", "--json", "--export", "test_suite.json"],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
output, _ = proc.communicate() # pylint: disable=unused-variable
with open("test_suite.json", "r") as file_handle:
read_data = file_handle.read()
self.assertDictEqual(json.loads(expected_call), json.loads(read_data))
|
class IceteaManagerTestcase(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_list_suites(self):
pass
@mock.patch("icetea_lib.IceteaManager._cleanlogs")
@mock.patch("icetea_lib.IceteaManager.TestSuite")
def test_run(self, mock_suite, mock_clean):
pass
@mock.patch("icetea_lib.IceteaManager.TestSuite")
def test_run_exceptions(self, mock_suite):
pass
def test_run_returncodes(self):
pass
def test_run_many_cases_one_file(self):
pass
@mock.patch("icetea_lib.IceteaManager.shutil")
def test_clean(self, mock_shutil):
pass
@mock.patch("icetea_lib.IceteaManager.get_fw_version")
def test_version_print(self, mock_fw):
pass
def test_platform_name_inconc(self):
pass
def test_platform_name_success(self):
pass
def test_reportcmdfail(self):
pass
def test_fail_suite_out_regression(self):
pass
def test_list_json_output(self):
pass
def test_list_export_to_suite(self):
pass
| 21 | 0 | 20 | 1 | 19 | 1 | 1 | 0.04 | 1 | 6 | 4 | 0 | 15 | 5 | 15 | 87 | 321 | 29 | 285 | 50 | 264 | 11 | 114 | 43 | 98 | 2 | 2 | 1 | 17 |
2,097 |
ARMmbed/icetea
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ARMmbed_icetea/test/test_httpapi.py
|
test.test_httpapi.APITestCase
|
class APITestCase(unittest.TestCase):
def setUp(self):
self.headers = {"accept-charset": "utf-8",
"accept": "application/json"}
self.host = "http://somesite.com"
self.host2 = "http://somesite.com/api/"
self.cert = "/path/to/cert.pem"
self.http = None
def test_init(self):
self.http = HttpApi(self.host)
self.assertEquals(self.http.host, self.host,
"HTTPApi not set up correctly, host names don't match")
self. http = HttpApi(self.host, self.headers, self.cert)
self.assertDictEqual(self.headers, self.http.defaultHeaders,
"HttpApi not set up correctly, headers not ok")
self.assertEquals(self.cert, self.http.cert)
# Test both new header field and overwrite
def test_header_set(self):
self.http = HttpApi(self.host, self.headers)
# Test overwriting headers
self.http.set_header("accept", "text/plain")
self.assertEquals(self.http.defaultHeaders["accept"], "text/plain")
# Test setting new headers
self.http.set_header("accept-language", "en-US")
self.assertEquals(self.http.defaultHeaders["accept-language"], "en-US")
def test_header_merge(self):
self.http = HttpApi(self.host, self.headers)
headers = {"accept": "text/plain", "content-length": 348}
ref = {"accept-charset": "utf-8",
"accept": "text/plain", "content-length": 348}
merger = jsonmerge.Merger(SCHEMA)
heads = merger.merge(self.headers, headers)
# Assert that new headers are the same as reference
self.assertDictEqual(
heads, ref, msg="Header merging does not work correctly")
@mock.patch("icetea_lib.tools.HTTP.Api.requests.get")
def test_url_combine(self, mock_get):
self.http = HttpApi(self.host2)
self.http.get("/test_path")
mock_get.assert_called_with(self.host2 + "test_path", {}, headers={})
mock_get.reset_mock()
self.http.get("test_path")
mock_get.assert_called_with(self.host2 + "test_path", {}, headers={})
mock_get.reset_mock()
self.http = HttpApi(self.host)
self.http.get("test_path")
mock_get.assert_called_with(self.host + "/test_path", {}, headers={})
mock_get.reset_mock()
self.http = HttpApi(self.host)
self.http.get("/test_path")
mock_get.assert_called_with(self.host + "/test_path", {}, headers={})
@mock.patch("icetea_lib.tools.HTTP.Api.requests.get")
def test_get(self, mock_requests_get):
# First test successfull get request. Assert if get was called
self.http = HttpApi(self.host)
path = "/test"
mock_requests_get.side_effect = iter([MockedRequestsResponse(),
RequestException(
"Exception raised correctly"),
MockedRequestsResponse(404),
MockedRequestsResponse(200),
MockedRequestsResponse(201)])
resp = self.http.get(path) # pylint: disable=unused-variable
self.assertTrue(mock_requests_get.called,
"Failed to call requests.get")
path = "/exception"
# Assert that RequestException is raised
with self.assertRaises(RequestException, msg="request.get exception not raised properly"):
self.http.get(path)
mock_requests_get.reset_mock()
path2 = "v2/"
resp = self.http.get(path2) # pylint: disable=unused-variable
self.assertTrue(mock_requests_get.called,
"Failed to call requests.get")
mock_requests_get.reset_mock()
path3 = "/v3/"
self.http = HttpApi(self.host2)
resp = self.http.get(path3) # pylint: disable=unused-variable
self.assertTrue(mock_requests_get.called,
"Failed to call requests.get")
@mock.patch("icetea_lib.tools.HTTP.Api.requests.post")
def test_post(self, mock_requests_post):
# Successfull post
self.http = HttpApi(self.host)
path = "/test"
json = {"testkey1": "testvalue1"}
mock_requests_post.side_effect = iter([MockedRequestsResponse(),
RequestException(
"Exception raised correctly"),
MockedRequestsResponse(404),
MockedRequestsResponse(200),
MockedRequestsResponse(201)])
resp = self.http.post(
path, json=json) # pylint: disable=unused-variable
self.assertTrue(mock_requests_post.called,
"Failed to call requests.post")
path = "/exception"
# Assert that RequestException is raised
with self.assertRaises(RequestException, msg="request.post exception not raised properly"):
self.http.post(path, json=json)
mock_requests_post.reset_mock()
path2 = "v2/"
resp = self.http.post(
path2, json=json) # pylint: disable=unused-variable
self.assertTrue(mock_requests_post.called,
"Failed to call requests.post")
mock_requests_post.reset_mock()
path3 = "/v3/"
self.http = HttpApi(self.host2)
resp = self.http.post(
path3, json=json) # pylint: disable=unused-variable
self.assertTrue(mock_requests_post.called,
"Failed to call requests.post")
@mock.patch("icetea_lib.tools.HTTP.Api.requests.put")
def test_put(self, mock_requests_put):
# Successfull put
self.http = HttpApi(self.host)
path = "/test"
data = {"testkey1": "testvalue1"}
mock_requests_put.side_effect = iter([MockedRequestsResponse(),
RequestException(
"Exception raised correctly"),
MockedRequestsResponse(404),
MockedRequestsResponse(200),
MockedRequestsResponse(201)])
resp = self.http.put(
path, data=data) # pylint: disable=unused-variable
self.assertTrue(mock_requests_put.called,
"Failed to call requests.put")
path = "/exception"
# Assert that RequestException is raised
with self.assertRaises(RequestException, msg="request.put exception not raised properly"):
self.http.put(path, data=data)
mock_requests_put.reset_mock()
path2 = "v2/"
resp = self.http.put(
path2, data=data) # pylint: disable=unused-variable
self.assertTrue(mock_requests_put.called,
"Failed to call requests.put")
mock_requests_put.reset_mock()
path3 = "/v3/"
self.http = HttpApi(self.host2)
resp = self.http.put(
path3, data=data) # pylint: disable=unused-variable
self.assertTrue(mock_requests_put.called,
"Failed to call requests.put")
@mock.patch("icetea_lib.tools.HTTP.Api.requests.patch")
def test_patch(self, mock_requests_patch):
# Successfull patch
self.http = HttpApi(self.host)
path = "/test"
data = {"testkey1": "testvalue1"}
mock_requests_patch.side_effect = iter([MockedRequestsResponse(),
RequestException(
"Exception raised correctly"),
MockedRequestsResponse(404),
MockedRequestsResponse(200),
MockedRequestsResponse(201)])
resp = self.http.patch(
path, data=data) # pylint: disable=unused-variable
self.assertTrue(mock_requests_patch.called,
"Failed to call requests.patch")
path = "/exception"
# Assert that RequestException is raised
with self.assertRaises(RequestException, msg="request.patch exception not raised properly"):
self.http.patch(path, data=data)
mock_requests_patch.reset_mock()
path2 = "v2/"
resp = self.http.patch(
path2, data=data) # pylint: disable=unused-variable
self.assertTrue(mock_requests_patch.called,
"Failed to call requests.patch")
mock_requests_patch.reset_mock()
path3 = "/v3/"
self.http = HttpApi(self.host2)
resp = self.http.patch(
path3, data=data) # pylint: disable=unused-variable
self.assertTrue(mock_requests_patch.called,
"Failed to call requests.patch")
@mock.patch("icetea_lib.tools.HTTP.Api.requests.delete")
def test_delete(self, mock_requests_delete):
# Successfull delete
self.http = HttpApi(self.host)
path = "/test"
mock_requests_delete.side_effect = iter([MockedRequestsResponse(),
RequestException(
"Exception raised correctly"),
MockedRequestsResponse(404),
MockedRequestsResponse(200),
MockedRequestsResponse(200)])
resp = self.http.delete(path) # pylint: disable=unused-variable
self.assertTrue(mock_requests_delete.called,
"Failed to call requests.delete")
path = "/exception"
# Assert that RequestException is raised
with self.assertRaises(RequestException,
msg="request.delete exception not raised properly"):
self.http.delete(path)
mock_requests_delete.reset_mock()
path2 = "v2/"
resp = self.http.delete(path2) # pylint: disable=unused-variable
self.assertTrue(mock_requests_delete.called,
"Failed to call requests.delete")
mock_requests_delete.reset_mock()
path3 = "/v3/"
self.http = HttpApi(self.host2)
resp = self.http.delete(path3) # pylint: disable=unused-variable
self.assertTrue(mock_requests_delete.called,
"Failed to call requests.delete")
@mock.patch("icetea_lib.tools.HTTP.Api.HttpApi.get")
def test_huge_binary_content(self, mocked_get):
var = os.urandom(10000000)
for _ in range(6):
var = var + os.urandom(10000000)
response = Response()
response._content = var # pylint: disable=protected-access
response.encoding = "utf-8"
response.status_code = 200
mocked_get.return_value = response
self.http = HttpApi(self.host)
resp = self.http.get("/")
self.assertEqual(resp, response)
|
class APITestCase(unittest.TestCase):
def setUp(self):
pass
def test_init(self):
pass
def test_header_set(self):
pass
def test_header_merge(self):
pass
@mock.patch("icetea_lib.tools.HTTP.Api.requests.get")
def test_url_combine(self, mock_get):
pass
@mock.patch("icetea_lib.tools.HTTP.Api.requests.get")
def test_get(self, mock_requests_get):
pass
@mock.patch("icetea_lib.tools.HTTP.Api.requests.post")
def test_post(self, mock_requests_post):
pass
@mock.patch("icetea_lib.tools.HTTP.Api.requests.put")
def test_put(self, mock_requests_put):
pass
@mock.patch("icetea_lib.tools.HTTP.Api.requests.patch")
def test_patch(self, mock_requests_patch):
pass
@mock.patch("icetea_lib.tools.HTTP.Api.requests.delete")
def test_delete(self, mock_requests_delete):
pass
@mock.patch("icetea_lib.tools.HTTP.Api.HttpApi.get")
def test_huge_binary_content(self, mocked_get):
pass
| 19 | 0 | 18 | 1 | 15 | 3 | 1 | 0.17 | 1 | 5 | 2 | 0 | 11 | 5 | 11 | 83 | 214 | 24 | 176 | 55 | 157 | 30 | 146 | 48 | 134 | 2 | 2 | 1 | 12 |
2,098 |
ARMmbed/icetea
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ARMmbed_icetea/test/test_genericprocess.py
|
test.test_genericprocess.GenericProcessUnittests
|
class GenericProcessUnittests(unittest.TestCase):
@mock.patch("icetea_lib.tools.GenericProcess.os")
@mock.patch("icetea_lib.tools.GenericProcess.time")
def test_stop_process(self, mocked_time, mocked_os):
mocked_os.killpg = mock.MagicMock()
mocked_time.sleep = mock.MagicMock(return_value=1)
my_process = GenericProcess("test", logger=MockLogger())
my_process.read_thread = mock.MagicMock()
my_process.read_thread.stop = mock.MagicMock()
my_process.stop_process()
my_process.read_thread.stop.assert_called_once()
mocked_proc = mock.MagicMock()
pid = 11111111
type(mocked_proc).pid = mock.PropertyMock(return_value=pid)
mocked_proc.poll = mock.MagicMock()
mocked_proc.poll.side_effect = [None, None, None, None, None,
None, None, None, None, None,
None, None, None, None, 0, 0, 0, 0, 0]
type(my_process).proc = mock.PropertyMock(return_value=mocked_proc)
my_process.stop_process()
self.assertEqual(mocked_os.killpg.call_count, 3)
mocked_os.killpg.assert_has_calls([mock.call(pid, signal.SIGINT),
mock.call(pid, signal.SIGTERM),
mock.call(pid, signal.SIGKILL)])
my_process = GenericProcess("test", logger=MockLogger())
my_process.read_thread = mock.MagicMock()
my_process.read_thread.stop = mock.MagicMock()
my_process.stop_process()
my_process.read_thread.stop.assert_called_once()
mocked_proc = mock.MagicMock()
pid = 11111111
type(mocked_proc).pid = mock.PropertyMock(return_value=pid)
mocked_proc.poll = mock.MagicMock()
mocked_proc.poll.side_effect = [None, None, None, None, None, None,
None, None, None, 0, 0, 0]
type(my_process).proc = mock.PropertyMock(return_value=mocked_proc)
mocked_os.killpg.reset_mock()
my_process.stop_process()
self.assertEqual(mocked_os.killpg.call_count, 2)
mocked_os.killpg.assert_has_calls([mock.call(pid, signal.SIGINT),
mock.call(pid, signal.SIGTERM)])
@mock.patch("icetea_lib.tools.GenericProcess.os", create=True)
def test_stop_process_errors(self, mocked_os):
mocked_os.killpg = mock.MagicMock(side_effect=OSError)
my_process = GenericProcess("test", logger=MockLogger())
mocked_proc = mock.MagicMock()
pid = 11111111
type(mocked_proc).pid = mock.PropertyMock(return_value=pid)
type(my_process).proc = mock.PropertyMock(return_value=mocked_proc)
my_process.stop_process()
self.assertEqual(mocked_os.killpg.call_count, 3)
mocked_os.killpg.reset_mock()
mocked_os.killpg.side_effect = None
mocked_os.killpg.return_value = 1
mocked_proc.poll = mock.MagicMock()
mocked_proc.poll.side_effect = [1, 1]
with self.assertRaises(TestStepError):
my_process.stop_process()
def test_getters_and_setters(self):
my_process = GenericProcess("test", logger=MockLogger())
self.assertFalse(my_process.gdb)
my_process.use_gdb()
self.assertTrue(my_process.gdb)
my_process.use_gdb(False)
self.assertFalse(my_process.gdb)
self.assertFalse(my_process.gdbs)
my_process.use_gdbs()
self.assertTrue(my_process.gdbs)
self.assertEqual(my_process.gdbs_port, 2345)
my_process.use_gdbs(False)
self.assertFalse(my_process.gdbs)
my_process.gdbs_port = 1234
self.assertEqual(my_process.gdbs_port, 1234)
self.assertFalse(my_process.vgdb)
my_process.use_vgdb()
self.assertTrue(my_process.vgdb)
my_process.use_vgdb(False)
self.assertFalse(my_process.vgdb)
self.assertFalse(my_process.nobuf)
my_process.no_std_buf()
self.assertTrue(my_process.nobuf)
my_process.no_std_buf(False)
self.assertFalse(my_process.nobuf)
self.assertIsNone(my_process.valgrind_xml)
my_process.valgrind_xml = True
self.assertTrue(my_process.valgrind_xml)
self.assertFalse(my_process.valgrind_console)
my_process.valgrind_console = True
self.assertTrue(my_process.valgrind_console)
self.assertFalse(my_process.valgrind_track_origins)
my_process.valgrind_track_origins = True
self.assertTrue(my_process.valgrind_track_origins)
self.assertFalse(my_process.valgrind_extra_params)
my_process.valgrind_extra_params = True
self.assertTrue(my_process.valgrind_extra_params)
self.assertFalse(my_process.ignore_return_code)
my_process.ignore_return_code = True
self.assertTrue(my_process.ignore_return_code)
my_process.ignore_return_code = False
self.assertFalse(my_process.ignore_return_code)
def test_usevalgrind(self):
my_process = GenericProcess("test", logger=MockLogger())
with self.assertRaises(AttributeError):
my_process.use_valgrind("test", True, True, True, True)
my_process.use_valgrind("memcheck", 1, 2, 3, 4)
self.assertEqual(my_process.valgrind, "memcheck")
self.assertEqual(my_process.valgrind_xml, 1)
self.assertEqual(my_process.valgrind_console, 2)
self.assertEqual(my_process.valgrind_track_origins, 3)
self.assertEqual(my_process.valgrind_extra_params, 4)
|
class GenericProcessUnittests(unittest.TestCase):
@mock.patch("icetea_lib.tools.GenericProcess.os")
@mock.patch("icetea_lib.tools.GenericProcess.time")
def test_stop_process(self, mocked_time, mocked_os):
pass
@mock.patch("icetea_lib.tools.GenericProcess.os", create=True)
def test_stop_process_errors(self, mocked_os):
pass
def test_getters_and_setters(self):
pass
def test_usevalgrind(self):
pass
| 8 | 0 | 30 | 4 | 27 | 0 | 1 | 0 | 1 | 6 | 3 | 0 | 4 | 0 | 4 | 76 | 128 | 18 | 110 | 15 | 102 | 0 | 101 | 13 | 96 | 1 | 2 | 1 | 4 |
2,099 |
ARMmbed/icetea
|
ARMmbed_icetea/test/test_enhancedserial.py
|
test.test_enhancedserial.EnhancedSerialTests
|
class EnhancedSerialTests(unittest.TestCase):
def test_peek(self):
ens = EnhancedSerial()
self.assertEqual(ens.peek(), "")
ens.buf = "test"
self.assertEqual(ens.peek(), "test")
def test_readline(self):
with mock.patch.object(EnhancedSerial, "read") as mock_read:
ens = EnhancedSerial()
mock_read.side_effect = ["test\n".encode("utf-8"), "test".encode("utf-8"),
SerialTimeoutException, SerialException, ValueError]
self.assertEqual(ens.readline(), "test\n")
self.assertIsNone(ens.readline(timeout=0))
self.assertIsNone(ens.readline(timeout=0))
self.assertIsNone(ens.readline(timeout=0))
self.assertIsNone(ens.readline(timeout=0))
|
class EnhancedSerialTests(unittest.TestCase):
def test_peek(self):
pass
def test_readline(self):
pass
| 3 | 0 | 8 | 0 | 8 | 0 | 1 | 0 | 1 | 2 | 1 | 0 | 2 | 0 | 2 | 74 | 18 | 2 | 16 | 6 | 13 | 0 | 15 | 5 | 12 | 1 | 2 | 1 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.