Adding upstream version 1.1.0.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-02-05 11:54:23 +01:00
parent f13b7abbd8
commit 77504588ab
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
196 changed files with 10121 additions and 3780 deletions

View file

@ -0,0 +1,85 @@
# Copyright (c) 2023-2024 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file.
"""See https://docs.pytest.org/en/stable/reference/fixtures.html#conftest-py-sharing-fixtures-across-multiple-files."""
import json
from pathlib import Path
from typing import Callable
import pytest
from anta.device import AntaDevice
from anta.result_manager import ResultManager
from anta.result_manager.models import TestResult
TEST_RESULTS: Path = Path(__file__).parent.resolve() / "test_files" / "test_md_report_results.json"
@pytest.fixture
def result_manager_factory(list_result_factory: Callable[[int], list[TestResult]]) -> Callable[[int], ResultManager]:
"""Return a ResultManager factory that takes as input a number of tests."""
# pylint: disable=redefined-outer-name
def _factory(number: int = 0) -> ResultManager:
"""Create a factory for list[TestResult] entry of size entries."""
result_manager = ResultManager()
result_manager.results = list_result_factory(number)
return result_manager
return _factory
@pytest.fixture
def result_manager() -> ResultManager:
"""Return a ResultManager with 30 random tests loaded from a JSON file.
Devices: DC1-SPINE1, DC1-LEAF1A
- Total tests: 30
- Success: 7
- Skipped: 2
- Failure: 19
- Error: 2
See `tests/units/result_manager/test_md_report_results.json` for details.
"""
manager = ResultManager()
with TEST_RESULTS.open("r", encoding="utf-8") as f:
results = json.load(f)
for result in results:
manager.add(TestResult(**result))
return manager
@pytest.fixture
def test_result_factory(device: AntaDevice) -> Callable[[int], TestResult]:
"""Return a anta.result_manager.models.TestResult object."""
# pylint: disable=redefined-outer-name
def _create(index: int = 0) -> TestResult:
"""Actual Factory."""
return TestResult(
name=device.name,
test=f"VerifyTest{index}",
categories=["test"],
description=f"Verifies Test {index}",
custom_field=None,
)
return _create
@pytest.fixture
def list_result_factory(test_result_factory: Callable[[int], TestResult]) -> Callable[[int], list[TestResult]]:
"""Return a list[TestResult] with 'size' TestResult instantiated using the test_result_factory fixture."""
# pylint: disable=redefined-outer-name
def _factory(size: int = 0) -> list[TestResult]:
"""Create a factory for list[TestResult] entry of size entries."""
return [test_result_factory(i) for i in range(size)]
return _factory

View file

@ -6,15 +6,16 @@
from __future__ import annotations
import json
import re
from contextlib import AbstractContextManager, nullcontext
from typing import TYPE_CHECKING, Callable
import pytest
from anta.result_manager import ResultManager, models
from anta.result_manager.models import AntaTestStatus
if TYPE_CHECKING:
from anta.custom_types import TestStatus
from anta.result_manager.models import TestResult
@ -55,7 +56,7 @@ class TestResultManager:
success_list = list_result_factory(3)
for test in success_list:
test.result = "success"
test.result = AntaTestStatus.SUCCESS
result_manager.results = success_list
json_res = result_manager.json
@ -71,6 +72,27 @@ class TestResultManager:
assert test.get("custom_field") is None
assert test.get("result") == "success"
def test_sorted_category_stats(self, list_result_factory: Callable[[int], list[TestResult]]) -> None:
"""Test ResultManager.sorted_category_stats."""
result_manager = ResultManager()
results = list_result_factory(4)
# Modify the categories to have a mix of different acronym categories
results[0].categories = ["ospf"]
results[1].categories = ["bgp"]
results[2].categories = ["vxlan"]
results[3].categories = ["system"]
result_manager.results = results
# Check the current categories order
expected_order = ["ospf", "bgp", "vxlan", "system"]
assert list(result_manager.category_stats.keys()) == expected_order
# Check the sorted categories order
expected_order = ["bgp", "ospf", "system", "vxlan"]
assert list(result_manager.sorted_category_stats.keys()) == expected_order
@pytest.mark.parametrize(
("starting_status", "test_status", "expected_status", "expected_raise"),
[
@ -119,29 +141,26 @@ class TestResultManager:
nullcontext(),
id="failure, add success",
),
pytest.param(
"unset", "unknown", None, pytest.raises(ValueError, match="Input should be 'unset', 'success', 'failure', 'error' or 'skipped'"), id="wrong status"
),
pytest.param("unset", "unknown", None, pytest.raises(ValueError, match="'unknown' is not a valid AntaTestStatus"), id="wrong status"),
],
)
def test_add(
self,
test_result_factory: Callable[[], TestResult],
starting_status: TestStatus,
test_status: TestStatus,
starting_status: str,
test_status: str,
expected_status: str,
expected_raise: AbstractContextManager[Exception],
) -> None:
# pylint: disable=too-many-arguments
"""Test ResultManager_update_status."""
result_manager = ResultManager()
result_manager.status = starting_status
result_manager.status = AntaTestStatus(starting_status)
assert result_manager.error_status is False
assert len(result_manager) == 0
test = test_result_factory()
test.result = test_status
with expected_raise:
test.result = AntaTestStatus(test_status)
result_manager.add(test)
if test_status == "error":
assert result_manager.error_status is True
@ -149,6 +168,91 @@ class TestResultManager:
assert result_manager.status == expected_status
assert len(result_manager) == 1
def test_add_clear_cache(self, result_manager: ResultManager, test_result_factory: Callable[[], TestResult]) -> None:
"""Test ResultManager.add and make sure the cache is reset after adding a new test."""
# Check the cache is empty
assert "results_by_status" not in result_manager.__dict__
# Access the cache
assert result_manager.get_total_results() == 30
# Check the cache is filled with the correct results count
assert "results_by_status" in result_manager.__dict__
assert sum(len(v) for v in result_manager.__dict__["results_by_status"].values()) == 30
# Add a new test
result_manager.add(result=test_result_factory())
# Check the cache has been reset
assert "results_by_status" not in result_manager.__dict__
# Access the cache again
assert result_manager.get_total_results() == 31
# Check the cache is filled again with the correct results count
assert "results_by_status" in result_manager.__dict__
assert sum(len(v) for v in result_manager.__dict__["results_by_status"].values()) == 31
def test_get_results(self, result_manager: ResultManager) -> None:
"""Test ResultManager.get_results."""
# Check for single status
success_results = result_manager.get_results(status={AntaTestStatus.SUCCESS})
assert len(success_results) == 7
assert all(r.result == "success" for r in success_results)
# Check for multiple statuses
failure_results = result_manager.get_results(status={AntaTestStatus.FAILURE, AntaTestStatus.ERROR})
assert len(failure_results) == 21
assert all(r.result in {"failure", "error"} for r in failure_results)
# Check all results
all_results = result_manager.get_results()
assert len(all_results) == 30
def test_get_results_sort_by(self, result_manager: ResultManager) -> None:
"""Test ResultManager.get_results with sort_by."""
# Check all results with sort_by result
all_results = result_manager.get_results(sort_by=["result"])
assert len(all_results) == 30
assert [r.result for r in all_results] == ["error"] * 2 + ["failure"] * 19 + ["skipped"] * 2 + ["success"] * 7
# Check all results with sort_by device (name)
all_results = result_manager.get_results(sort_by=["name"])
assert len(all_results) == 30
assert all_results[0].name == "DC1-LEAF1A"
assert all_results[-1].name == "DC1-SPINE1"
# Check multiple statuses with sort_by categories
success_skipped_results = result_manager.get_results(status={AntaTestStatus.SUCCESS, AntaTestStatus.SKIPPED}, sort_by=["categories"])
assert len(success_skipped_results) == 9
assert success_skipped_results[0].categories == ["Interfaces"]
assert success_skipped_results[-1].categories == ["VXLAN"]
# Check all results with bad sort_by
with pytest.raises(
ValueError,
match=re.escape(
"Invalid sort_by fields: ['bad_field']. Accepted fields are: ['name', 'test', 'categories', 'description', 'result', 'messages', 'custom_field']",
),
):
all_results = result_manager.get_results(sort_by=["bad_field"])
def test_get_total_results(self, result_manager: ResultManager) -> None:
"""Test ResultManager.get_total_results."""
# Test all results
assert result_manager.get_total_results() == 30
# Test single status
assert result_manager.get_total_results(status={AntaTestStatus.SUCCESS}) == 7
assert result_manager.get_total_results(status={AntaTestStatus.FAILURE}) == 19
assert result_manager.get_total_results(status={AntaTestStatus.ERROR}) == 2
assert result_manager.get_total_results(status={AntaTestStatus.SKIPPED}) == 2
# Test multiple statuses
assert result_manager.get_total_results(status={AntaTestStatus.SUCCESS, AntaTestStatus.FAILURE}) == 26
assert result_manager.get_total_results(status={AntaTestStatus.SUCCESS, AntaTestStatus.FAILURE, AntaTestStatus.ERROR}) == 28
assert result_manager.get_total_results(status={AntaTestStatus.SUCCESS, AntaTestStatus.FAILURE, AntaTestStatus.ERROR, AntaTestStatus.SKIPPED}) == 30
@pytest.mark.parametrize(
("status", "error_status", "ignore_error", "expected_status"),
[
@ -159,7 +263,7 @@ class TestResultManager:
)
def test_get_status(
self,
status: TestStatus,
status: AntaTestStatus,
error_status: bool,
ignore_error: bool,
expected_status: str,
@ -177,28 +281,28 @@ class TestResultManager:
success_list = list_result_factory(3)
for test in success_list:
test.result = "success"
test.result = AntaTestStatus.SUCCESS
result_manager.results = success_list
test = test_result_factory()
test.result = "failure"
test.result = AntaTestStatus.FAILURE
result_manager.add(test)
test = test_result_factory()
test.result = "error"
test.result = AntaTestStatus.ERROR
result_manager.add(test)
test = test_result_factory()
test.result = "skipped"
test.result = AntaTestStatus.SKIPPED
result_manager.add(test)
assert len(result_manager) == 6
assert len(result_manager.filter({"failure"})) == 5
assert len(result_manager.filter({"error"})) == 5
assert len(result_manager.filter({"skipped"})) == 5
assert len(result_manager.filter({"failure", "error"})) == 4
assert len(result_manager.filter({"failure", "error", "skipped"})) == 3
assert len(result_manager.filter({"success", "failure", "error", "skipped"})) == 0
assert len(result_manager.filter({AntaTestStatus.FAILURE})) == 5
assert len(result_manager.filter({AntaTestStatus.ERROR})) == 5
assert len(result_manager.filter({AntaTestStatus.SKIPPED})) == 5
assert len(result_manager.filter({AntaTestStatus.FAILURE, AntaTestStatus.ERROR})) == 4
assert len(result_manager.filter({AntaTestStatus.FAILURE, AntaTestStatus.ERROR, AntaTestStatus.SKIPPED})) == 3
assert len(result_manager.filter({AntaTestStatus.SUCCESS, AntaTestStatus.FAILURE, AntaTestStatus.ERROR, AntaTestStatus.SKIPPED})) == 0
def test_get_by_tests(self, test_result_factory: Callable[[], TestResult], result_manager_factory: Callable[[int], ResultManager]) -> None:
"""Test ResultManager.get_by_tests."""

View file

@ -0,0 +1,378 @@
[
{
"name": "DC1-SPINE1",
"test": "VerifyTacacsSourceIntf",
"categories": [
"AAA"
],
"description": "Verifies TACACS source-interface for a specified VRF.",
"result": "failure",
"messages": [
"Source-interface Management0 is not configured in VRF default"
],
"custom_field": null
},
{
"name": "DC1-SPINE1",
"test": "VerifyLLDPNeighbors",
"categories": [
"Connectivity"
],
"description": "Verifies that the provided LLDP neighbors are connected properly.",
"result": "failure",
"messages": [
"Wrong LLDP neighbor(s) on port(s):\n Ethernet1\n DC1-LEAF1A_Ethernet1\n Ethernet2\n DC1-LEAF1B_Ethernet1\nPort(s) not configured:\n Ethernet7"
],
"custom_field": null
},
{
"name": "DC1-SPINE1",
"test": "VerifyBGPPeerCount",
"categories": [
"BGP"
],
"description": "Verifies the count of BGP peers.",
"result": "failure",
"messages": [
"Failures: [{'afi': 'ipv4', 'safi': 'unicast', 'vrfs': {'PROD': 'Not Configured', 'default': 'Expected: 3, Actual: 4'}}, {'afi': 'ipv4', 'safi': 'multicast', 'vrfs': {'DEV': 'Not Configured'}}, {'afi': 'evpn', 'vrfs': {'default': 'Expected: 2, Actual: 4'}}]"
],
"custom_field": null
},
{
"name": "DC1-SPINE1",
"test": "VerifySTPMode",
"categories": [
"STP"
],
"description": "Verifies the configured STP mode for a provided list of VLAN(s).",
"result": "failure",
"messages": [
"STP mode 'rapidPvst' not configured for the following VLAN(s): [10, 20]"
],
"custom_field": null
},
{
"name": "DC1-SPINE1",
"test": "VerifySnmpStatus",
"categories": [
"SNMP"
],
"description": "Verifies if the SNMP agent is enabled.",
"result": "failure",
"messages": [
"SNMP agent disabled in vrf default"
],
"custom_field": null
},
{
"name": "DC1-SPINE1",
"test": "VerifyRoutingTableEntry",
"categories": [
"Routing"
],
"description": "Verifies that the provided routes are present in the routing table of a specified VRF.",
"result": "failure",
"messages": [
"The following route(s) are missing from the routing table of VRF default: ['10.1.0.2']"
],
"custom_field": null
},
{
"name": "DC1-SPINE1",
"test": "VerifyInterfaceUtilization",
"categories": [
"Interfaces"
],
"description": "Verifies that the utilization of interfaces is below a certain threshold.",
"result": "success",
"messages": [],
"custom_field": null
},
{
"name": "DC1-SPINE1",
"test": "VerifyMlagStatus",
"categories": [
"MLAG"
],
"description": "Verifies the health status of the MLAG configuration.",
"result": "skipped",
"messages": [
"MLAG is disabled"
],
"custom_field": null
},
{
"name": "DC1-SPINE1",
"test": "VerifyVxlan1Interface",
"categories": [
"VXLAN"
],
"description": "Verifies the Vxlan1 interface status.",
"result": "skipped",
"messages": [
"Vxlan1 interface is not configured"
],
"custom_field": null
},
{
"name": "DC1-SPINE1",
"test": "VerifyBFDSpecificPeers",
"categories": [
"BFD"
],
"description": "Verifies the IPv4 BFD peer's sessions and remote disc in the specified VRF.",
"result": "failure",
"messages": [
"Following BFD peers are not configured, status is not up or remote disc is zero:\n{'192.0.255.8': {'default': 'Not Configured'}, '192.0.255.7': {'default': 'Not Configured'}}"
],
"custom_field": null
},
{
"name": "DC1-SPINE1",
"test": "VerifyNTP",
"categories": [
"System"
],
"description": "Verifies if NTP is synchronised.",
"result": "failure",
"messages": [
"The device is not synchronized with the configured NTP server(s): 'NTP is disabled.'"
],
"custom_field": null
},
{
"name": "DC1-SPINE1",
"test": "VerifyReachability",
"categories": [
"Connectivity"
],
"description": "Test the network reachability to one or many destination IP(s).",
"result": "error",
"messages": [
"ping vrf MGMT 1.1.1.1 source Management1 repeat 2 has failed: No source interface Management1"
],
"custom_field": null
},
{
"name": "DC1-SPINE1",
"test": "VerifyTelnetStatus",
"categories": [
"Security"
],
"description": "Verifies if Telnet is disabled in the default VRF.",
"result": "success",
"messages": [],
"custom_field": null
},
{
"name": "DC1-SPINE1",
"test": "VerifyEOSVersion",
"categories": [
"Software"
],
"description": "Verifies the EOS version of the device.",
"result": "failure",
"messages": [
"device is running version \"4.31.1F-34554157.4311F (engineering build)\" not in expected versions: ['4.25.4M', '4.26.1F']"
],
"custom_field": null
},
{
"name": "DC1-SPINE1",
"test": "VerifyHostname",
"categories": [
"Services"
],
"description": "Verifies the hostname of a device.",
"result": "failure",
"messages": [
"Expected `s1-spine1` as the hostname, but found `DC1-SPINE1` instead."
],
"custom_field": null
},
{
"name": "DC1-LEAF1A",
"test": "VerifyTacacsSourceIntf",
"categories": [
"AAA"
],
"description": "Verifies TACACS source-interface for a specified VRF.",
"result": "failure",
"messages": [
"Source-interface Management0 is not configured in VRF default"
],
"custom_field": null
},
{
"name": "DC1-LEAF1A",
"test": "VerifyLLDPNeighbors",
"categories": [
"Connectivity"
],
"description": "Verifies that the provided LLDP neighbors are connected properly.",
"result": "failure",
"messages": [
"Wrong LLDP neighbor(s) on port(s):\n Ethernet1\n DC1-SPINE1_Ethernet1\n Ethernet2\n DC1-SPINE2_Ethernet1\nPort(s) not configured:\n Ethernet7"
],
"custom_field": null
},
{
"name": "DC1-LEAF1A",
"test": "VerifyBGPPeerCount",
"categories": [
"BGP"
],
"description": "Verifies the count of BGP peers.",
"result": "failure",
"messages": [
"Failures: [{'afi': 'ipv4', 'safi': 'unicast', 'vrfs': {'PROD': 'Expected: 2, Actual: 1'}}, {'afi': 'ipv4', 'safi': 'multicast', 'vrfs': {'DEV': 'Expected: 3, Actual: 0'}}]"
],
"custom_field": null
},
{
"name": "DC1-LEAF1A",
"test": "VerifySTPMode",
"categories": [
"STP"
],
"description": "Verifies the configured STP mode for a provided list of VLAN(s).",
"result": "failure",
"messages": [
"Wrong STP mode configured for the following VLAN(s): [10, 20]"
],
"custom_field": null
},
{
"name": "DC1-LEAF1A",
"test": "VerifySnmpStatus",
"categories": [
"SNMP"
],
"description": "Verifies if the SNMP agent is enabled.",
"result": "failure",
"messages": [
"SNMP agent disabled in vrf default"
],
"custom_field": null
},
{
"name": "DC1-LEAF1A",
"test": "VerifyRoutingTableEntry",
"categories": [
"Routing"
],
"description": "Verifies that the provided routes are present in the routing table of a specified VRF.",
"result": "success",
"messages": [],
"custom_field": null
},
{
"name": "DC1-LEAF1A",
"test": "VerifyInterfaceUtilization",
"categories": [
"Interfaces"
],
"description": "Verifies that the utilization of interfaces is below a certain threshold.",
"result": "success",
"messages": [],
"custom_field": null
},
{
"name": "DC1-LEAF1A",
"test": "VerifyMlagStatus",
"categories": [
"MLAG"
],
"description": "Verifies the health status of the MLAG configuration.",
"result": "success",
"messages": [],
"custom_field": null
},
{
"name": "DC1-LEAF1A",
"test": "VerifyVxlan1Interface",
"categories": [
"VXLAN"
],
"description": "Verifies the Vxlan1 interface status.",
"result": "success",
"messages": [],
"custom_field": null
},
{
"name": "DC1-LEAF1A",
"test": "VerifyBFDSpecificPeers",
"categories": [
"BFD"
],
"description": "Verifies the IPv4 BFD peer's sessions and remote disc in the specified VRF.",
"result": "failure",
"messages": [
"Following BFD peers are not configured, status is not up or remote disc is zero:\n{'192.0.255.8': {'default': 'Not Configured'}, '192.0.255.7': {'default': 'Not Configured'}}"
],
"custom_field": null
},
{
"name": "DC1-LEAF1A",
"test": "VerifyNTP",
"categories": [
"System"
],
"description": "Verifies if NTP is synchronised.",
"result": "failure",
"messages": [
"The device is not synchronized with the configured NTP server(s): 'NTP is disabled.'"
],
"custom_field": null
},
{
"name": "DC1-LEAF1A",
"test": "VerifyReachability",
"categories": [
"Connectivity"
],
"description": "Test the network reachability to one or many destination IP(s).",
"result": "error",
"messages": [
"ping vrf MGMT 1.1.1.1 source Management1 repeat 2 has failed: No source interface Management1"
],
"custom_field": null
},
{
"name": "DC1-LEAF1A",
"test": "VerifyTelnetStatus",
"categories": [
"Security"
],
"description": "Verifies if Telnet is disabled in the default VRF.",
"result": "success",
"messages": [],
"custom_field": null
},
{
"name": "DC1-LEAF1A",
"test": "VerifyEOSVersion",
"categories": [
"Software"
],
"description": "Verifies the EOS version of the device.",
"result": "failure",
"messages": [
"device is running version \"4.31.1F-34554157.4311F (engineering build)\" not in expected versions: ['4.25.4M', '4.26.1F']"
],
"custom_field": null
},
{
"name": "DC1-LEAF1A",
"test": "VerifyHostname",
"categories": [
"Services"
],
"description": "Verifies the hostname of a device.",
"result": "failure",
"messages": [
"Expected `s1-spine1` as the hostname, but found `DC1-LEAF1A` instead."
],
"custom_field": null
}
]

View file

@ -5,56 +5,65 @@
from __future__ import annotations
from typing import TYPE_CHECKING, Any, Callable
from typing import TYPE_CHECKING, Callable
import pytest
# Import as Result to avoid pytest collection
from tests.data.json_data import TEST_RESULT_SET_STATUS
from tests.lib.fixture import DEVICE_NAME
from tests.lib.utils import generate_test_ids_dict
from anta.result_manager.models import AntaTestStatus
from tests.units.conftest import DEVICE_NAME
if TYPE_CHECKING:
from _pytest.mark.structures import ParameterSet
# Import as Result to avoid pytest collection
from anta.result_manager.models import TestResult as Result
TEST_RESULT_SET_STATUS: list[ParameterSet] = [
pytest.param(AntaTestStatus.SUCCESS, "test success message", id="set_success"),
pytest.param(AntaTestStatus.ERROR, "test error message", id="set_error"),
pytest.param(AntaTestStatus.FAILURE, "test failure message", id="set_failure"),
pytest.param(AntaTestStatus.SKIPPED, "test skipped message", id="set_skipped"),
pytest.param(AntaTestStatus.UNSET, "test unset message", id="set_unset"),
]
class TestTestResultModels:
"""Test components of anta.result_manager.models."""
@pytest.mark.parametrize("data", TEST_RESULT_SET_STATUS, ids=generate_test_ids_dict)
def test__is_status_foo(self, test_result_factory: Callable[[int], Result], data: dict[str, Any]) -> None:
@pytest.mark.parametrize(("target", "message"), TEST_RESULT_SET_STATUS)
def test__is_status_foo(self, test_result_factory: Callable[[int], Result], target: AntaTestStatus, message: str) -> None:
"""Test TestResult.is_foo methods."""
testresult = test_result_factory(1)
assert testresult.result == "unset"
assert testresult.result == AntaTestStatus.UNSET
assert len(testresult.messages) == 0
if data["target"] == "success":
testresult.is_success(data["message"])
assert testresult.result == data["target"]
assert data["message"] in testresult.messages
if data["target"] == "failure":
testresult.is_failure(data["message"])
assert testresult.result == data["target"]
assert data["message"] in testresult.messages
if data["target"] == "error":
testresult.is_error(data["message"])
assert testresult.result == data["target"]
assert data["message"] in testresult.messages
if data["target"] == "skipped":
testresult.is_skipped(data["message"])
assert testresult.result == data["target"]
assert data["message"] in testresult.messages
# no helper for unset, testing _set_status
if data["target"] == "unset":
testresult._set_status("unset", data["message"]) # pylint: disable=W0212
assert testresult.result == data["target"]
assert data["message"] in testresult.messages
if target == AntaTestStatus.SUCCESS:
testresult.is_success(message)
assert testresult.result == "success"
assert message in testresult.messages
if target == AntaTestStatus.FAILURE:
testresult.is_failure(message)
assert testresult.result == "failure"
assert message in testresult.messages
if target == AntaTestStatus.ERROR:
testresult.is_error(message)
assert testresult.result == "error"
assert message in testresult.messages
if target == AntaTestStatus.SKIPPED:
testresult.is_skipped(message)
assert testresult.result == "skipped"
assert message in testresult.messages
if target == AntaTestStatus.UNSET:
# no helper for unset, testing _set_status
testresult._set_status(AntaTestStatus.UNSET, message)
assert testresult.result == "unset"
assert message in testresult.messages
@pytest.mark.parametrize("data", TEST_RESULT_SET_STATUS, ids=generate_test_ids_dict)
def test____str__(self, test_result_factory: Callable[[int], Result], data: dict[str, Any]) -> None:
@pytest.mark.parametrize(("target", "message"), TEST_RESULT_SET_STATUS)
def test____str__(self, test_result_factory: Callable[[int], Result], target: AntaTestStatus, message: str) -> None:
"""Test TestResult.__str__."""
testresult = test_result_factory(1)
assert testresult.result == "unset"
assert testresult.result == AntaTestStatus.UNSET
assert len(testresult.messages) == 0
testresult._set_status(data["target"], data["message"]) # pylint: disable=W0212
assert testresult.result == data["target"]
assert str(testresult) == f"Test 'VerifyTest1' (on '{DEVICE_NAME}'): Result '{data['target']}'\nMessages: {[data['message']]}"
testresult._set_status(target, message)
assert testresult.result == target
assert str(testresult) == f"Test 'VerifyTest1' (on '{DEVICE_NAME}'): Result '{target}'\nMessages: {[message]}"