1#!/usr/bin/env python3
2# Copyright (c) 2024 Intel Corporation
3#
4# SPDX-License-Identifier: Apache-2.0
5"""
6Blackbox tests for twister's command line functions - those requiring testplan.json
7"""
8
9import importlib
10import mock
11import os
12import pytest
13import sys
14import json
15
16# pylint: disable=no-name-in-module
17from conftest import ZEPHYR_BASE, TEST_DATA, testsuite_filename_mock
18from twisterlib.testplan import TestPlan
19from twisterlib.error import TwisterRuntimeError
20
21
22class TestTestPlan:
23    TESTDATA_1 = [
24        ('dummy.agnostic.group2.a2_tests.assert1', SystemExit, 4),
25        (
26            os.path.join('scripts', 'tests', 'twister_blackbox', 'test_data', 'tests',
27                         'dummy', 'agnostic', 'group1', 'subgroup1',
28                         'dummy.agnostic.group2.assert1'),
29            TwisterRuntimeError,
30            None
31        ),
32    ]
33    TESTDATA_2 = [
34        ('buildable', 7),
35        ('runnable', 5),
36    ]
37    TESTDATA_3 = [
38        (True, 1),
39        (False, 7),
40    ]
41
42    @classmethod
43    def setup_class(cls):
44        apath = os.path.join(ZEPHYR_BASE, 'scripts', 'twister')
45        cls.loader = importlib.machinery.SourceFileLoader('__main__', apath)
46        cls.spec = importlib.util.spec_from_loader(cls.loader.name, cls.loader)
47        cls.twister_module = importlib.util.module_from_spec(cls.spec)
48
49    @classmethod
50    def teardown_class(cls):
51        pass
52
53    @pytest.mark.parametrize(
54        'test, expected_exception, expected_subtest_count',
55        TESTDATA_1,
56        ids=['valid', 'not found']
57    )
58    @mock.patch.object(TestPlan, 'TESTSUITE_FILENAME', testsuite_filename_mock)
59    def test_subtest(self, out_path, test, expected_exception, expected_subtest_count):
60        test_platforms = ['qemu_x86', 'intel_adl_crb']
61        path = os.path.join(TEST_DATA, 'tests', 'dummy')
62        args = ['-i', '--outdir', out_path, '-T', path, '--sub-test', test, '-y'] + \
63               [val for pair in zip(
64                   ['-p'] * len(test_platforms), test_platforms
65               ) for val in pair]
66
67        with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \
68                pytest.raises(expected_exception) as exc:
69            self.loader.exec_module(self.twister_module)
70
71        if expected_exception != SystemExit:
72            assert True
73            return
74
75        with open(os.path.join(out_path, 'testplan.json')) as f:
76            j = json.load(f)
77        filtered_j = [
78            (ts['platform'], ts['name'], tc['identifier']) \
79                for ts in j['testsuites'] \
80                for tc in ts['testcases'] if 'reason' not in tc
81        ]
82
83        assert str(exc.value) == '0'
84        assert len(filtered_j) == expected_subtest_count
85
86    @pytest.mark.parametrize(
87        'filter, expected_count',
88        TESTDATA_2,
89        ids=['buildable', 'runnable']
90    )
91    @mock.patch.object(TestPlan, 'TESTSUITE_FILENAME', testsuite_filename_mock)
92    def test_filter(self, out_path, filter, expected_count):
93        test_platforms = ['qemu_x86', 'intel_adl_crb']
94        path = os.path.join(TEST_DATA, 'tests', 'dummy')
95        args = ['-i', '--outdir', out_path, '-T', path, '--filter', filter, '-y'] + \
96               [val for pair in zip(
97                   ['-p'] * len(test_platforms), test_platforms
98               ) for val in pair]
99
100        with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \
101                pytest.raises(SystemExit) as exc:
102            self.loader.exec_module(self.twister_module)
103
104        assert str(exc.value) == '0'
105        import pprint
106        with open(os.path.join(out_path, 'testplan.json')) as f:
107            j = json.load(f)
108            pprint.pprint(j)
109        filtered_j = [
110            (ts['platform'], ts['name'], tc['identifier']) \
111                for ts in j['testsuites'] \
112                for tc in ts['testcases'] if 'reason' not in tc
113        ]
114        pprint.pprint(filtered_j)
115
116        assert expected_count == len(filtered_j)
117
118    @pytest.mark.parametrize(
119        'integration, expected_count',
120        TESTDATA_3,
121        ids=['integration', 'no integration']
122    )
123    @mock.patch.object(TestPlan, 'TESTSUITE_FILENAME', testsuite_filename_mock)
124    @mock.patch.object(TestPlan, 'SAMPLE_FILENAME', '')
125    def test_integration(self, out_path, integration, expected_count):
126        test_platforms = ['qemu_x86', 'intel_adl_crb']
127        path = os.path.join(TEST_DATA, 'tests', 'dummy')
128        args = ['-i', '--outdir', out_path, '-T', path, '-y'] + \
129               (['--integration'] if integration else []) + \
130               [val for pair in zip(
131                   ['-p'] * len(test_platforms), test_platforms
132               ) for val in pair]
133
134        with mock.patch.object(sys, 'argv', [sys.argv[0]] + args), \
135                pytest.raises(SystemExit) as exc:
136            self.loader.exec_module(self.twister_module)
137
138        assert str(exc.value) == '0'
139
140        with open(os.path.join(out_path, 'testplan.json')) as f:
141            j = json.load(f)
142        filtered_j = [
143            (ts['platform'], ts['name'], tc['identifier']) \
144                for ts in j['testsuites'] \
145                for tc in ts['testcases'] if 'reason' not in tc
146        ]
147
148        assert expected_count == len(filtered_j)
149