Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

scripts: twister: Add CTest harness #83297

Merged
merged 4 commits into from
Jan 10, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/west_cmds.yml
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ jobs:
${{ runner.os }}-pip-${{ matrix.python-version }}
- name: install pytest
run: |
pip install pytest west pyelftools canopen natsort progress mypy intelhex psutil ply pyserial anytree
pip install pytest west pyelftools canopen natsort progress mypy intelhex psutil ply pyserial anytree junitparser
- name: run pytest-win
if: runner.os == 'Windows'
run: |
Expand Down
7 changes: 7 additions & 0 deletions doc/develop/test/twister.rst
Original file line number Diff line number Diff line change
Expand Up @@ -526,6 +526,7 @@ harness: <string>
- pytest
- gtest
- robot
- ctest

Harnesses ``ztest``, ``gtest`` and ``console`` are based on parsing of the
output and matching certain phrases. ``ztest`` and ``gtest`` harnesses look
Expand Down Expand Up @@ -691,6 +692,12 @@ harness_config: <harness configuration options>
If the scope is set to ``function``, DUT is launched for every test case
in python script. For ``session`` scope, DUT is launched only once.

ctest_args: <list of arguments> (default empty)
Specify a list of additional arguments to pass to ``ctest`` e.g.:
``ctest_args: [‘--repeat until-pass:5’]``. Note that
``--ctest-args`` can be passed multiple times to pass several arguments
to the ctest.

robot_testsuite: <robot file path> (default empty)
Specify one or more paths to a file containing a Robot Framework test suite to be run.

Expand Down
6 changes: 6 additions & 0 deletions scripts/pylib/twister/twisterlib/environment.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,6 +275,12 @@ def add_parse_arguments(parser = None) -> argparse.ArgumentParser:
will extend the pytest_args from the harness_config in YAML file.
""")

parser.add_argument(
"--ctest-args", action="append",
help="""Pass additional arguments to the ctest subprocess. This parameter
will extend the ctest_args from the harness_config in YAML file.
""")

valgrind_asan_group.add_argument(
"--enable-valgrind", action="store_true",
help="""Run binary through valgrind and check for several memory access
Expand Down
137 changes: 137 additions & 0 deletions scripts/pylib/twister/twisterlib/harness.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
from collections import OrderedDict
from enum import Enum

import junitparser.junitparser as junit
from pytest import ExitCode
from twisterlib.constants import SUPPORTED_SIMS_IN_PYTEST
from twisterlib.environment import PYTEST_PLUGIN_INSTALLED, ZEPHYR_BASE
Expand Down Expand Up @@ -947,6 +948,142 @@ def build(self):
logger.debug(f'Copying executable from {original_exe_path} to {new_exe_path}')
shutil.copy(original_exe_path, new_exe_path)

class Ctest(Harness):
def configure(self, instance: TestInstance):
super().configure(instance)
self.running_dir = instance.build_dir
self.report_file = os.path.join(self.running_dir, 'report.xml')
self.ctest_log_file_path = os.path.join(self.running_dir, 'twister_harness.log')
self._output = []

def ctest_run(self, timeout):
assert self.instance is not None
try:
cmd = self.generate_command()
self.run_command(cmd, timeout)
except Exception as err:
logger.error(str(err))
self.status = TwisterStatus.FAIL
self.instance.reason = str(err)
finally:
self.instance.record(self.recording)
self._update_test_status()

def generate_command(self):
config = self.instance.testsuite.harness_config
handler: Handler = self.instance.handler
ctest_args_yaml = config.get('ctest_args', []) if config else []
command = [
'ctest',
'--build-nocmake',
'--test-dir',
self.running_dir,
'--output-junit',
self.report_file,
'--output-log',
self.ctest_log_file_path,
'--output-on-failure',
]
base_timeout = handler.get_test_timeout()
command.extend(['--timeout', str(base_timeout)])
command.extend(ctest_args_yaml)

if handler.options.ctest_args:
command.extend(handler.options.ctest_args)

return command

def run_command(self, cmd, timeout):
with subprocess.Popen(
cmd,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
) as proc:
try:
reader_t = threading.Thread(target=self._output_reader, args=(proc,), daemon=True)
reader_t.start()
reader_t.join(timeout)
if reader_t.is_alive():
terminate_process(proc)
logger.warning('Timeout has occurred. Can be extended in testspec file. '
f'Currently set to {timeout} seconds.')
self.instance.reason = 'Ctest timeout'
self.status = TwisterStatus.FAIL
proc.wait(timeout)
except subprocess.TimeoutExpired:
self.status = TwisterStatus.FAIL
proc.kill()

if proc.returncode in (ExitCode.INTERRUPTED, ExitCode.USAGE_ERROR, ExitCode.INTERNAL_ERROR):
self.status = TwisterStatus.ERROR
self.instance.reason = f'Ctest error - return code {proc.returncode}'
with open(self.ctest_log_file_path, 'w') as log_file:
log_file.write(shlex.join(cmd) + '\n\n')
log_file.write('\n'.join(self._output))

def _output_reader(self, proc):
self._output = []
while proc.stdout.readable() and proc.poll() is None:
line = proc.stdout.readline().decode().strip()
if not line:
continue
self._output.append(line)
logger.debug(f'CTEST: {line}')
self.parse_record(line)
proc.communicate()

def _update_test_status(self):
if self.status == TwisterStatus.NONE:
self.instance.testcases = []
try:
self._parse_report_file(self.report_file)
except Exception as e:
logger.error(f'Error when parsing file {self.report_file}: {e}')
self.status = TwisterStatus.FAIL
finally:
if not self.instance.testcases:
self.instance.init_cases()

self.instance.status = self.status if self.status != TwisterStatus.NONE else \
TwisterStatus.FAIL
if self.instance.status in [TwisterStatus.ERROR, TwisterStatus.FAIL]:
self.instance.reason = self.instance.reason or 'Ctest failed'
self.instance.add_missing_case_status(TwisterStatus.BLOCK, self.instance.reason)

def _parse_report_file(self, report):
suite = junit.JUnitXml.fromfile(report)
if suite is None:
self.status = TwisterStatus.SKIP
self.instance.reason = 'No tests collected'
return

assert isinstance(suite, junit.TestSuite)

if suite.failures and suite.failures > 0:
self.status = TwisterStatus.FAIL
self.instance.reason = f"{suite.failures}/{suite.tests} ctest scenario(s) failed"
elif suite.errors and suite.errors > 0:
self.status = TwisterStatus.ERROR
self.instance.reason = 'Error during ctest execution'
elif suite.skipped and suite.skipped > 0:
self.status = TwisterStatus.SKIP
else:
self.status = TwisterStatus.PASS
self.instance.execution_time = suite.time

for case in suite:
tc = self.instance.add_testcase(f"{self.id}.{case.name}")
tc.duration = case.time
if any(isinstance(r, junit.Failure) for r in case.result):
tc.status = TwisterStatus.FAIL
tc.output = case.system_out
elif any(isinstance(r, junit.Error) for r in case.result):
tc.status = TwisterStatus.ERROR
tc.output = case.system_out
elif any(isinstance(r, junit.Skipped) for r in case.result):
tc.status = TwisterStatus.SKIP
else:
tc.status = TwisterStatus.PASS

class HarnessImporter:

Expand Down
4 changes: 3 additions & 1 deletion scripts/pylib/twister/twisterlib/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
sys.path.insert(0, os.path.join(ZEPHYR_BASE, "scripts/pylib/build_helpers"))
from domains import Domains
from twisterlib.environment import TwisterEnv
from twisterlib.harness import HarnessImporter, Pytest
from twisterlib.harness import Ctest, HarnessImporter, Pytest
from twisterlib.log_helper import log_command
from twisterlib.platform import Platform
from twisterlib.testinstance import TestInstance
Expand Down Expand Up @@ -1717,6 +1717,8 @@ def run(self):
#
if isinstance(harness, Pytest):
harness.pytest_run(instance.handler.get_test_timeout())
elif isinstance(harness, Ctest):
harness.ctest_run(instance.handler.get_test_timeout())
else:
instance.handler.handle(harness)

Expand Down
5 changes: 4 additions & 1 deletion scripts/pylib/twister/twisterlib/testinstance.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@ def get_case_or_create(self, name):
def testsuite_runnable(testsuite, fixtures):
can_run = False
# console harness allows us to run the test and capture data.
if testsuite.harness in [ 'console', 'ztest', 'pytest', 'test', 'gtest', 'robot']:
if testsuite.harness in ['console', 'ztest', 'pytest', 'test', 'gtest', 'robot', 'ctest']:
can_run = True
# if we have a fixture that is also being supplied on the
# command-line, then we need to run the test, not just build it.
Expand Down Expand Up @@ -252,6 +252,8 @@ def setup_handler(self, env: TwisterEnv):
handler.ready = True
else:
handler = Handler(self, "", *common_args)
if self.testsuite.harness == "ctest":
handler.ready = True

self.handler = handler

Expand Down Expand Up @@ -287,6 +289,7 @@ def check_runnable(self,

target_ready = bool(self.testsuite.type == "unit" or \
self.platform.type == "native" or \
self.testsuite.harness == "ctest" or \
(simulator and simulator.name in SUPPORTED_SIMS and \
simulator.name not in self.testsuite.simulation_exclude) or \
device_testing)
Expand Down
3 changes: 3 additions & 0 deletions scripts/requirements-build-test.txt
Original file line number Diff line number Diff line change
Expand Up @@ -19,3 +19,6 @@ mypy

# used for mocking functions in pytest
mock>=4.0.1

# used for JUnit XML parsing in CTest harness
junitparser
5 changes: 5 additions & 0 deletions scripts/schemas/twister/testsuite-schema.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,11 @@ schema;scenario-schema:
type: str
enum: ["function", "class", "module", "package", "session"]
required: false
"ctest_args":
type: seq
required: false
sequence:
- type: str
"regex":
type: seq
required: false
Expand Down
26 changes: 26 additions & 0 deletions tests/ctest/base/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
# Copyright (c) 2024 Basalte bv
# SPDX-License-Identifier: Apache-2.0

cmake_minimum_required(VERSION 3.20.0)
find_package(Zephyr REQUIRED HINTS $ENV{ZEPHYR_BASE})

project(ctest_base)

target_sources(app PRIVATE ${ZEPHYR_BASE}/misc/empty_file.c)

enable_testing()
include(CTest)

add_test(NAME exe_help COMMAND ${CMAKE_BINARY_DIR}/zephyr/zephyr.exe --help)
add_test(NAME exe_run COMMAND ${CMAKE_BINARY_DIR}/zephyr/zephyr.exe -stop_at=3 -no-rt)
set_property(
TEST exe_run
PROPERTY PASS_REGULAR_EXPRESSION "Stopped at [0-9.]+s"
)

# A test that always succeeds
add_test(NAME success COMMAND ${CMAKE_COMMAND} -E true)

# A test that is expected to fail
add_test(NAME failure COMMAND ${CMAKE_COMMAND} -E false)
set_property(TEST failure PROPERTY WILL_FAIL true)
1 change: 1 addition & 0 deletions tests/ctest/base/prj.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
# Intentionally empty
10 changes: 10 additions & 0 deletions tests/ctest/base/testcase.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
common:
tags:
- test_framework
platform_allow:
- native_sim
integration_platforms:
- native_sim
harness: ctest
tests:
testing.ctest.base: {}
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can we add a ctest_args example as well

Loading