scripts: tests: Removal of straggling folders
Current blacbox tests leave two folders, OUT_DIR and TEST_DIR after they are finished. Unit tests create two further folders, mock_testsuite and demo_board_2. This change deletes them appropriately. Additionally, the created twister-out* folders in blackbox tests are moved to a temp directory and removed after every test. Signed-off-by: Lukasz Mrugala <lukaszx.mrugala@intel.com>
This commit is contained in:
parent
b8188e54a4
commit
23b3e5741e
|
@ -35,7 +35,7 @@ def process_logs(harness, logs):
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def gtest():
|
||||
def gtest(tmp_path):
|
||||
mock_platform = mock.Mock()
|
||||
mock_platform.name = "mock_platform"
|
||||
mock_testsuite = mock.Mock()
|
||||
|
@ -44,8 +44,10 @@ def gtest():
|
|||
mock_testsuite.id = "id"
|
||||
mock_testsuite.testcases = []
|
||||
mock_testsuite.harness_config = {}
|
||||
outdir = tmp_path / 'gtest_out'
|
||||
outdir.mkdir()
|
||||
|
||||
instance = TestInstance(testsuite=mock_testsuite, platform=mock_platform, outdir="")
|
||||
instance = TestInstance(testsuite=mock_testsuite, platform=mock_platform, outdir=outdir)
|
||||
|
||||
harness = Gtest()
|
||||
harness.configure(instance)
|
||||
|
|
|
@ -244,14 +244,14 @@ def test_apply_filters_part3(class_testplan, all_testsuites_dict, platforms_list
|
|||
filtered_instances = list(filter(lambda item: item.status == "filtered", class_testplan.instances.values()))
|
||||
assert not filtered_instances
|
||||
|
||||
def test_add_instances_short(test_data, class_env, all_testsuites_dict, platforms_list):
|
||||
def test_add_instances_short(tmp_path, class_env, all_testsuites_dict, platforms_list):
|
||||
""" Testing add_instances() function of TestPlan class in Twister
|
||||
Test 1: instances dictionary keys have expected values (Platform Name + Testcase Name)
|
||||
Test 2: Values of 'instances' dictionary in Testsuite class are an
|
||||
instance of 'TestInstance' class
|
||||
Test 3: Values of 'instances' dictionary have expected values.
|
||||
"""
|
||||
class_env.outdir = test_data
|
||||
class_env.outdir = tmp_path
|
||||
plan = TestPlan(class_env)
|
||||
plan.platforms = platforms_list
|
||||
platform = plan.get_platform("demo_board_2")
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
'''Common fixtures for use in testing the twister tool.'''
|
||||
|
||||
import logging
|
||||
import shutil
|
||||
import mock
|
||||
import os
|
||||
import pytest
|
||||
|
@ -43,3 +44,28 @@ def clear_log():
|
|||
handlers = getattr(logger, 'handlers', [])
|
||||
for handler in handlers:
|
||||
logger.removeHandler(handler)
|
||||
|
||||
# This fixture provides blackbox tests with an `out_path` parameter
|
||||
# It should be used as the `-O` (`--out_dir`) parameter in blackbox tests
|
||||
# APPRECIATED: method of using this out_path wholly outside of test code
|
||||
@pytest.fixture(name='out_path', autouse=True)
|
||||
def provide_out(tmp_path, request):
|
||||
# As this fixture is autouse, one can use the pytest.mark.noclearout decorator
|
||||
# in order to be sure that this fixture's code will not fire.
|
||||
# Most of the time, just omitting the `out_path` parameter is sufficient.
|
||||
if 'noclearout' in request.keywords:
|
||||
yield
|
||||
return
|
||||
|
||||
# Before
|
||||
out_container_path = tmp_path / 'blackbox-out-container'
|
||||
out_container_path.mkdir()
|
||||
out_path = os.path.join(out_container_path, "blackbox-out")
|
||||
|
||||
# Test
|
||||
yield out_path
|
||||
|
||||
# After
|
||||
# We're operating in temp, so it is not strictly necessary
|
||||
# but the files can get large quickly as we do not need them after the test.
|
||||
shutil.rmtree(out_container_path)
|
||||
|
|
|
@ -105,15 +105,14 @@ class TestHardwaremap:
|
|||
def teardown_class(cls):
|
||||
pass
|
||||
|
||||
@pytest.mark.usefixtures("clear_log")
|
||||
@pytest.mark.parametrize(
|
||||
('manufacturer', 'product', 'serial', 'runner'),
|
||||
TESTDATA_1,
|
||||
)
|
||||
def test_generate(self, capfd, manufacturer, product, serial, runner):
|
||||
def test_generate(self, capfd, out_path, manufacturer, product, serial, runner):
|
||||
file_name = "test-map.yaml"
|
||||
path = os.path.join(ZEPHYR_BASE, file_name)
|
||||
args = ['--generate-hardware-map', file_name]
|
||||
args = ['--outdir', out_path, '--generate-hardware-map', file_name]
|
||||
|
||||
if os.path.exists(path):
|
||||
os.remove(path)
|
||||
|
@ -165,15 +164,14 @@ class TestHardwaremap:
|
|||
for handler in handlers:
|
||||
logger.removeHandler(handler)
|
||||
|
||||
@pytest.mark.usefixtures("clear_log")
|
||||
@pytest.mark.parametrize(
|
||||
('manufacturer', 'product', 'serial', 'runner'),
|
||||
TESTDATA_2,
|
||||
)
|
||||
def test_few_generate(self, capfd, manufacturer, product, serial, runner):
|
||||
def test_few_generate(self, capfd, out_path, manufacturer, product, serial, runner):
|
||||
file_name = "test-map.yaml"
|
||||
path = os.path.join(ZEPHYR_BASE, file_name)
|
||||
args = ['--generate-hardware-map', file_name]
|
||||
args = ['--outdir', out_path, '--generate-hardware-map', file_name]
|
||||
|
||||
if os.path.exists(path):
|
||||
os.remove(path)
|
||||
|
@ -247,15 +245,14 @@ class TestHardwaremap:
|
|||
|
||||
assert str(sys_exit.value) == '0'
|
||||
|
||||
@pytest.mark.usefixtures("clear_log")
|
||||
@pytest.mark.parametrize(
|
||||
('manufacturer', 'product', 'serial', 'location'),
|
||||
TESTDATA_3,
|
||||
)
|
||||
def test_texas_exeption(self, capfd, manufacturer, product, serial, location):
|
||||
def test_texas_exeption(self, capfd, out_path, manufacturer, product, serial, location):
|
||||
file_name = "test-map.yaml"
|
||||
path = os.path.join(ZEPHYR_BASE, file_name)
|
||||
args = ['--generate-hardware-map', file_name]
|
||||
args = ['--outdir', out_path, '--generate-hardware-map', file_name]
|
||||
|
||||
if os.path.exists(path):
|
||||
os.remove(path)
|
||||
|
|
|
@ -92,7 +92,6 @@ class TestPrintOuts:
|
|||
def teardown_class(cls):
|
||||
pass
|
||||
|
||||
@pytest.mark.usefixtures("clear_log")
|
||||
@pytest.mark.parametrize(
|
||||
'test_path, expected',
|
||||
TESTDATA_1,
|
||||
|
@ -101,8 +100,8 @@ class TestPrintOuts:
|
|||
'tests/dummy/device',
|
||||
]
|
||||
)
|
||||
def test_list_tags(self, capfd, test_path, expected):
|
||||
args = ['-T', test_path, '--list-tags']
|
||||
def test_list_tags(self, capfd, out_path, test_path, expected):
|
||||
args = ['--outdir', out_path, '-T', test_path, '--list-tags']
|
||||
|
||||
with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \
|
||||
pytest.raises(SystemExit) as sys_exit:
|
||||
|
@ -119,7 +118,6 @@ class TestPrintOuts:
|
|||
|
||||
assert str(sys_exit.value) == '0'
|
||||
|
||||
@pytest.mark.usefixtures("clear_log")
|
||||
@pytest.mark.parametrize(
|
||||
'test_path, expected',
|
||||
TESTDATA_2,
|
||||
|
@ -128,8 +126,8 @@ class TestPrintOuts:
|
|||
'tests/dummy/device',
|
||||
]
|
||||
)
|
||||
def test_list_tests(self, capfd, test_path, expected):
|
||||
args = ['-T', test_path, '--list-tests']
|
||||
def test_list_tests(self, capfd, out_path, test_path, expected):
|
||||
args = ['--outdir', out_path, '-T', test_path, '--list-tests']
|
||||
|
||||
with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \
|
||||
pytest.raises(SystemExit) as sys_exit:
|
||||
|
@ -147,7 +145,6 @@ class TestPrintOuts:
|
|||
|
||||
assert str(sys_exit.value) == '0'
|
||||
|
||||
@pytest.mark.usefixtures("clear_log")
|
||||
@pytest.mark.parametrize(
|
||||
'test_path, expected',
|
||||
TESTDATA_3,
|
||||
|
@ -156,8 +153,8 @@ class TestPrintOuts:
|
|||
'tests/dummy/device',
|
||||
]
|
||||
)
|
||||
def test_tree(self, capfd, test_path, expected):
|
||||
args = ['-T', test_path, '--test-tree']
|
||||
def test_tree(self, capfd, out_path, test_path, expected):
|
||||
args = ['--outdir', out_path, '-T', test_path, '--test-tree']
|
||||
|
||||
with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \
|
||||
pytest.raises(SystemExit) as sys_exit:
|
||||
|
@ -176,9 +173,9 @@ class TestPrintOuts:
|
|||
TESTDATA_4,
|
||||
ids=['tests']
|
||||
)
|
||||
def test_timestamps(self, capfd, test_path, test_platforms):
|
||||
def test_timestamps(self, capfd, out_path, test_path, test_platforms):
|
||||
|
||||
args = ['-i', '-T', test_path, '--timestamps', '-v'] + \
|
||||
args = ['-i', '--outdir', out_path, '-T', test_path, '--timestamps', '-v'] + \
|
||||
[val for pair in zip(
|
||||
['-p'] * len(test_platforms), test_platforms
|
||||
) for val in pair]
|
||||
|
@ -247,15 +244,14 @@ class TestPrintOuts:
|
|||
|
||||
assert str(sys_exit.value) == '0'
|
||||
|
||||
@pytest.mark.usefixtures("clear_log")
|
||||
@pytest.mark.parametrize(
|
||||
'test_path, test_platforms',
|
||||
TESTDATA_4,
|
||||
ids=['tests']
|
||||
)
|
||||
def test_force_color(self, capfd, test_path, test_platforms):
|
||||
def test_force_color(self, capfd, out_path, test_path, test_platforms):
|
||||
|
||||
args = ['-i', '-T', test_path, '--force-color'] + \
|
||||
args = ['-i', '--outdir', out_path, '-T', test_path, '--force-color'] + \
|
||||
[val for pair in zip(
|
||||
['-p'] * len(test_platforms), test_platforms
|
||||
) for val in pair]
|
||||
|
|
|
@ -74,8 +74,6 @@ class TestQEMU:
|
|||
def teardown_class(cls):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("clear_log")
|
||||
@pytest.mark.parametrize(
|
||||
'test_path, test_platforms, expected',
|
||||
TESTDATA_1,
|
||||
|
@ -84,8 +82,8 @@ class TestQEMU:
|
|||
'tests/dummy/device',
|
||||
]
|
||||
)
|
||||
def test_emulation_only(self, capfd, test_path, test_platforms, expected):
|
||||
args = ['-i', '-T', test_path, '--emulation-only'] + \
|
||||
def test_emulation_only(self, capfd, out_path, test_path, test_platforms, expected):
|
||||
args = ['-i', '--outdir', out_path, '-T', test_path, '--emulation-only'] + \
|
||||
[val for pair in zip(
|
||||
['-p'] * len(test_platforms), test_platforms
|
||||
) for val in pair]
|
||||
|
|
|
@ -110,7 +110,6 @@ class TestReport:
|
|||
def teardown_class(cls):
|
||||
pass
|
||||
|
||||
@pytest.mark.usefixtures("clear_log")
|
||||
@pytest.mark.parametrize(
|
||||
'test_path, test_platforms, file_name',
|
||||
TESTDATA_1,
|
||||
|
@ -118,14 +117,11 @@ class TestReport:
|
|||
'platform_reports'
|
||||
]
|
||||
)
|
||||
def test_platform_reports(self, capfd, test_path, test_platforms, file_name):
|
||||
args = ['-i', '-T', test_path, '--platform-reports'] + \
|
||||
def test_platform_reports(self, capfd, out_path, test_path, test_platforms, file_name):
|
||||
args = ['-i', '--outdir', out_path, '-T', test_path, '--platform-reports'] + \
|
||||
[val for pair in zip(
|
||||
['-p'] * len(test_platforms), test_platforms
|
||||
) for val in pair]
|
||||
twister_path = os.path.join(ZEPHYR_BASE, "twister-out")
|
||||
if os.path.exists(twister_path):
|
||||
shutil.rmtree(twister_path)
|
||||
|
||||
with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \
|
||||
pytest.raises(SystemExit) as sys_exit:
|
||||
|
@ -136,7 +132,7 @@ class TestReport:
|
|||
sys.stderr.write(err)
|
||||
|
||||
for f_name in file_name:
|
||||
path = os.path.join(twister_path, f_name)
|
||||
path = os.path.join(out_path, f_name)
|
||||
assert os.path.exists(path), 'file not found'
|
||||
|
||||
if path.endswith(".json"):
|
||||
|
@ -158,12 +154,11 @@ class TestReport:
|
|||
pytest.fail(f"Unsupported file type: '{path}'")
|
||||
|
||||
for f_platform in test_platforms:
|
||||
platform_path = os.path.join(twister_path, f_platform)
|
||||
platform_path = os.path.join(out_path, f_platform)
|
||||
assert os.path.exists(platform_path), f'file not found {f_platform}'
|
||||
|
||||
assert str(sys_exit.value) == '0'
|
||||
|
||||
@pytest.mark.usefixtures("clear_log")
|
||||
@pytest.mark.parametrize(
|
||||
'test_path, test_platforms, file_name',
|
||||
TESTDATA_2,
|
||||
|
@ -171,16 +166,12 @@ class TestReport:
|
|||
'report_suffix',
|
||||
]
|
||||
)
|
||||
def test_report_suffix(self, capfd, test_path, test_platforms, file_name):
|
||||
args = ['-i', '-T', test_path, '--platform-reports', '--report-suffix=TEST'] + \
|
||||
def test_report_suffix(self, capfd, out_path, test_path, test_platforms, file_name):
|
||||
args = ['-i', '--outdir', out_path, '-T', test_path, '--platform-reports', '--report-suffix=TEST'] + \
|
||||
[val for pair in zip(
|
||||
['-p'] * len(test_platforms), test_platforms
|
||||
) for val in pair]
|
||||
|
||||
twister_path = os.path.join(ZEPHYR_BASE, "twister-out")
|
||||
if os.path.exists(twister_path):
|
||||
shutil.rmtree(twister_path)
|
||||
|
||||
with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \
|
||||
pytest.raises(SystemExit) as sys_exit:
|
||||
self.loader.exec_module(self.twister_module)
|
||||
|
@ -190,12 +181,11 @@ class TestReport:
|
|||
sys.stderr.write(err)
|
||||
|
||||
for f_name in file_name:
|
||||
path = os.path.join(twister_path, f_name)
|
||||
path = os.path.join(out_path, f_name)
|
||||
assert os.path.exists(path), f'file not found {f_name}'
|
||||
|
||||
assert str(sys_exit.value) == '0'
|
||||
|
||||
@pytest.mark.usefixtures("clear_log")
|
||||
@pytest.mark.parametrize(
|
||||
'test_path, test_platforms, report_arg, file_name',
|
||||
TESTDATA_3,
|
||||
|
@ -205,8 +195,8 @@ class TestReport:
|
|||
'report-name + platform-reports + report-suffix'
|
||||
]
|
||||
)
|
||||
def test_report_name(self, capfd, test_path, test_platforms, report_arg, file_name):
|
||||
args = ['-i', '-T', test_path] + \
|
||||
def test_report_name(self, capfd, out_path, test_path, test_platforms, report_arg, file_name):
|
||||
args = ['-i', '--outdir', out_path, '-T', test_path] + \
|
||||
[val for pair in zip(
|
||||
['-p'] * len(test_platforms), test_platforms
|
||||
) for val in pair] + \
|
||||
|
@ -214,10 +204,6 @@ class TestReport:
|
|||
report_arg
|
||||
) for val in pair]
|
||||
|
||||
twister_path = os.path.join(ZEPHYR_BASE, "twister-out")
|
||||
if os.path.exists(twister_path):
|
||||
shutil.rmtree(twister_path)
|
||||
|
||||
with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \
|
||||
pytest.raises(SystemExit) as sys_exit:
|
||||
self.loader.exec_module(self.twister_module)
|
||||
|
@ -227,12 +213,11 @@ class TestReport:
|
|||
sys.stderr.write(err)
|
||||
|
||||
for f_name in file_name:
|
||||
path = os.path.join(twister_path, f_name)
|
||||
path = os.path.join(out_path, f_name)
|
||||
assert os.path.exists(path), f'file not found {f_name}'
|
||||
|
||||
assert str(sys_exit.value) == '0'
|
||||
|
||||
@pytest.mark.usefixtures("clear_log")
|
||||
@pytest.mark.parametrize(
|
||||
'test_path, test_platforms, file_name, dir_name',
|
||||
TESTDATA_4,
|
||||
|
@ -240,8 +225,8 @@ class TestReport:
|
|||
'report_dir',
|
||||
]
|
||||
)
|
||||
def test_report_dir(self, capfd, test_path, test_platforms, file_name, dir_name):
|
||||
args = ['-i', '-T', test_path, "--report-dir", dir_name] + \
|
||||
def test_report_dir(self, capfd, out_path, test_path, test_platforms, file_name, dir_name):
|
||||
args = ['-i', '--outdir', out_path, '-T', test_path, "--report-dir", dir_name] + \
|
||||
[val for pair in zip(
|
||||
['-p'] * len(test_platforms), test_platforms
|
||||
) for val in pair]
|
||||
|
@ -250,21 +235,26 @@ class TestReport:
|
|||
if os.path.exists(twister_path):
|
||||
shutil.rmtree(twister_path)
|
||||
|
||||
with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \
|
||||
pytest.raises(SystemExit) as sys_exit:
|
||||
self.loader.exec_module(self.twister_module)
|
||||
try:
|
||||
with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \
|
||||
pytest.raises(SystemExit) as sys_exit:
|
||||
self.loader.exec_module(self.twister_module)
|
||||
|
||||
out, err = capfd.readouterr()
|
||||
sys.stdout.write(out)
|
||||
sys.stderr.write(err)
|
||||
out, err = capfd.readouterr()
|
||||
sys.stdout.write(out)
|
||||
sys.stderr.write(err)
|
||||
|
||||
for f_name in file_name:
|
||||
path = os.path.join(twister_path, f_name)
|
||||
assert os.path.exists(path), f'file not found {f_name}'
|
||||
for f_name in file_name:
|
||||
path = os.path.join(twister_path, f_name)
|
||||
assert os.path.exists(path), f'file not found {f_name}'
|
||||
|
||||
assert str(sys_exit.value) == '0'
|
||||
assert str(sys_exit.value) == '0'
|
||||
finally:
|
||||
twister_path = os.path.join(ZEPHYR_BASE, dir_name)
|
||||
if os.path.exists(twister_path):
|
||||
shutil.rmtree(twister_path)
|
||||
|
||||
@pytest.mark.usefixtures("clear_log")
|
||||
@pytest.mark.noclearout
|
||||
@pytest.mark.parametrize(
|
||||
'test_path, test_platforms, file_name, dir_name',
|
||||
TESTDATA_5,
|
||||
|
@ -290,12 +280,17 @@ class TestReport:
|
|||
sys.stdout.write(out)
|
||||
sys.stderr.write(err)
|
||||
|
||||
for f_name in file_name:
|
||||
path = os.path.join(twister_path, f_name)
|
||||
assert os.path.exists(path), 'file not found {f_name}'
|
||||
try:
|
||||
for f_name in file_name:
|
||||
path = os.path.join(twister_path, f_name)
|
||||
assert os.path.exists(path), 'file not found {f_name}'
|
||||
|
||||
for f_platform in test_platforms:
|
||||
platform_path = os.path.join(twister_path, f_platform)
|
||||
assert os.path.exists(platform_path), f'file not found {f_platform}'
|
||||
for f_platform in test_platforms:
|
||||
platform_path = os.path.join(twister_path, f_platform)
|
||||
assert os.path.exists(platform_path), f'file not found {f_platform}'
|
||||
|
||||
assert str(sys_exit.value) == '0'
|
||||
assert str(sys_exit.value) == '0'
|
||||
finally:
|
||||
twister_path = os.path.join(ZEPHYR_BASE, dir_name)
|
||||
if os.path.exists(twister_path):
|
||||
shutil.rmtree(twister_path)
|
||||
|
|
Loading…
Reference in a new issue