Adding upstream version 1.2.0.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
77504588ab
commit
6fd6eb426a
154 changed files with 7346 additions and 5000 deletions
|
@ -4,12 +4,14 @@
|
|||
"""Fixtures for benchmarking ANTA."""
|
||||
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
|
||||
import pytest
|
||||
import respx
|
||||
from _pytest.terminal import TerminalReporter
|
||||
|
||||
from anta.catalog import AntaCatalog
|
||||
from anta.result_manager import ResultManager
|
||||
|
||||
from .utils import AntaMockEnvironment
|
||||
|
||||
|
@ -17,6 +19,12 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
TEST_CASE_COUNT = None
|
||||
|
||||
# Used to globally configure the benchmarks by specifying parameters for inventories
|
||||
BENCHMARK_PARAMETERS = [
|
||||
pytest.param({"count": 1, "disable_cache": True, "reachable": True}, id="1-device"),
|
||||
pytest.param({"count": 2, "disable_cache": True, "reachable": True}, id="2-devices"),
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture(name="anta_mock_env", scope="session") # We want this fixture to have a scope set to session to avoid reparsing all the unit tests data.
|
||||
def anta_mock_env_fixture() -> AntaMockEnvironment:
|
||||
|
@ -35,6 +43,22 @@ def catalog(anta_mock_env: AntaMockEnvironment) -> AntaCatalog:
|
|||
return anta_mock_env.catalog
|
||||
|
||||
|
||||
@pytest.fixture(name="session_results", scope="session") # We want this fixture to be reused across test modules within tests/benchmark
|
||||
def session_results_fixture() -> defaultdict[str, ResultManager]:
|
||||
"""Return a dictionary of ResultManger objects for the benchmarks.
|
||||
|
||||
The key is the test id as defined in the pytest_generate_tests in this module.
|
||||
Used to pass a populated ResultManager from one benchmark to another.
|
||||
"""
|
||||
return defaultdict(lambda: ResultManager())
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def results(request: pytest.FixtureRequest, session_results: defaultdict[str, ResultManager]) -> ResultManager:
|
||||
"""Return the unique ResultManger object for the current benchmark parameter."""
|
||||
return session_results[request.node.callspec.id]
|
||||
|
||||
|
||||
def pytest_terminal_summary(terminalreporter: TerminalReporter) -> None:
|
||||
"""Display the total number of ANTA unit test cases used to benchmark."""
|
||||
terminalreporter.write_sep("=", f"{TEST_CASE_COUNT} ANTA test cases")
|
||||
|
@ -49,9 +73,12 @@ def pytest_generate_tests(metafunc: pytest.Metafunc) -> None:
|
|||
return
|
||||
metafunc.parametrize(
|
||||
"inventory",
|
||||
[
|
||||
pytest.param({"count": 1, "disable_cache": True, "reachable": True}, id="1-device"),
|
||||
pytest.param({"count": 2, "disable_cache": True, "reachable": True}, id="2-devices"),
|
||||
],
|
||||
BENCHMARK_PARAMETERS,
|
||||
indirect=True,
|
||||
)
|
||||
elif "results" in metafunc.fixturenames:
|
||||
metafunc.parametrize(
|
||||
"results",
|
||||
BENCHMARK_PARAMETERS,
|
||||
indirect=True,
|
||||
)
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
|
||||
import asyncio
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
@ -22,45 +23,61 @@ from .utils import collect, collect_commands
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def test_anta_dry_run(benchmark: BenchmarkFixture, event_loop: asyncio.AbstractEventLoop, catalog: AntaCatalog, inventory: AntaInventory) -> None:
|
||||
def test_anta_dry_run(
|
||||
benchmark: BenchmarkFixture,
|
||||
event_loop: asyncio.AbstractEventLoop,
|
||||
catalog: AntaCatalog,
|
||||
inventory: AntaInventory,
|
||||
request: pytest.FixtureRequest,
|
||||
session_results: defaultdict[str, ResultManager],
|
||||
) -> None:
|
||||
"""Benchmark ANTA in Dry-Run Mode."""
|
||||
# Disable logging during ANTA execution to avoid having these function time in benchmarks
|
||||
logging.disable()
|
||||
|
||||
def _() -> ResultManager:
|
||||
manager = ResultManager()
|
||||
catalog.clear_indexes()
|
||||
event_loop.run_until_complete(main(manager, inventory, catalog, dry_run=True))
|
||||
return manager
|
||||
results = session_results[request.node.callspec.id]
|
||||
|
||||
manager = benchmark(_)
|
||||
@benchmark
|
||||
def _() -> None:
|
||||
results.reset()
|
||||
catalog.clear_indexes()
|
||||
event_loop.run_until_complete(main(results, inventory, catalog, dry_run=True))
|
||||
|
||||
logging.disable(logging.NOTSET)
|
||||
if len(manager.results) != len(inventory) * len(catalog.tests):
|
||||
pytest.fail(f"Expected {len(inventory) * len(catalog.tests)} tests but got {len(manager.results)}", pytrace=False)
|
||||
bench_info = "\n--- ANTA NRFU Dry-Run Benchmark Information ---\n" f"Test count: {len(manager.results)}\n" "-----------------------------------------------"
|
||||
|
||||
if len(results.results) != len(inventory) * len(catalog.tests):
|
||||
pytest.fail(f"Expected {len(inventory) * len(catalog.tests)} tests but got {len(results.results)}", pytrace=False)
|
||||
bench_info = "\n--- ANTA NRFU Dry-Run Benchmark Information ---\n" f"Test count: {len(results.results)}\n" "-----------------------------------------------"
|
||||
logger.info(bench_info)
|
||||
|
||||
|
||||
@patch("anta.models.AntaTest.collect", collect)
|
||||
@patch("anta.device.AntaDevice.collect_commands", collect_commands)
|
||||
@pytest.mark.dependency(name="anta_benchmark", scope="package")
|
||||
@respx.mock # Mock eAPI responses
|
||||
def test_anta(benchmark: BenchmarkFixture, event_loop: asyncio.AbstractEventLoop, catalog: AntaCatalog, inventory: AntaInventory) -> None:
|
||||
def test_anta(
|
||||
benchmark: BenchmarkFixture,
|
||||
event_loop: asyncio.AbstractEventLoop,
|
||||
catalog: AntaCatalog,
|
||||
inventory: AntaInventory,
|
||||
request: pytest.FixtureRequest,
|
||||
session_results: defaultdict[str, ResultManager],
|
||||
) -> None:
|
||||
"""Benchmark ANTA."""
|
||||
# Disable logging during ANTA execution to avoid having these function time in benchmarks
|
||||
logging.disable()
|
||||
|
||||
def _() -> ResultManager:
|
||||
manager = ResultManager()
|
||||
catalog.clear_indexes()
|
||||
event_loop.run_until_complete(main(manager, inventory, catalog))
|
||||
return manager
|
||||
results = session_results[request.node.callspec.id]
|
||||
|
||||
manager = benchmark(_)
|
||||
@benchmark
|
||||
def _() -> None:
|
||||
results.reset()
|
||||
catalog.clear_indexes()
|
||||
event_loop.run_until_complete(main(results, inventory, catalog))
|
||||
|
||||
logging.disable(logging.NOTSET)
|
||||
|
||||
if len(catalog.tests) * len(inventory) != len(manager.results):
|
||||
if len(catalog.tests) * len(inventory) != len(results.results):
|
||||
# This could mean duplicates exist.
|
||||
# TODO: consider removing this code and refactor unit test data as a dictionary with tuple keys instead of a list
|
||||
seen = set()
|
||||
|
@ -74,17 +91,17 @@ def test_anta(benchmark: BenchmarkFixture, event_loop: asyncio.AbstractEventLoop
|
|||
for test in dupes:
|
||||
msg = f"Found duplicate in test catalog: {test}"
|
||||
logger.error(msg)
|
||||
pytest.fail(f"Expected {len(catalog.tests) * len(inventory)} tests but got {len(manager.results)}", pytrace=False)
|
||||
pytest.fail(f"Expected {len(catalog.tests) * len(inventory)} tests but got {len(results.results)}", pytrace=False)
|
||||
bench_info = (
|
||||
"\n--- ANTA NRFU Benchmark Information ---\n"
|
||||
f"Test results: {len(manager.results)}\n"
|
||||
f"Success: {manager.get_total_results({AntaTestStatus.SUCCESS})}\n"
|
||||
f"Failure: {manager.get_total_results({AntaTestStatus.FAILURE})}\n"
|
||||
f"Skipped: {manager.get_total_results({AntaTestStatus.SKIPPED})}\n"
|
||||
f"Error: {manager.get_total_results({AntaTestStatus.ERROR})}\n"
|
||||
f"Unset: {manager.get_total_results({AntaTestStatus.UNSET})}\n"
|
||||
f"Test results: {len(results.results)}\n"
|
||||
f"Success: {results.get_total_results({AntaTestStatus.SUCCESS})}\n"
|
||||
f"Failure: {results.get_total_results({AntaTestStatus.FAILURE})}\n"
|
||||
f"Skipped: {results.get_total_results({AntaTestStatus.SKIPPED})}\n"
|
||||
f"Error: {results.get_total_results({AntaTestStatus.ERROR})}\n"
|
||||
f"Unset: {results.get_total_results({AntaTestStatus.UNSET})}\n"
|
||||
"---------------------------------------"
|
||||
)
|
||||
logger.info(bench_info)
|
||||
assert manager.get_total_results({AntaTestStatus.ERROR}) == 0
|
||||
assert manager.get_total_results({AntaTestStatus.UNSET}) == 0
|
||||
assert results.get_total_results({AntaTestStatus.ERROR}) == 0
|
||||
assert results.get_total_results({AntaTestStatus.UNSET}) == 0
|
||||
|
|
71
tests/benchmark/test_reporter.py
Normal file
71
tests/benchmark/test_reporter.py
Normal file
|
@ -0,0 +1,71 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""Benchmark tests for anta.reporter."""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from anta.reporter import ReportJinja, ReportTable
|
||||
from anta.reporter.csv_reporter import ReportCsv
|
||||
from anta.reporter.md_reporter import MDReportGenerator
|
||||
from anta.result_manager import ResultManager
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DATA_DIR: Path = Path(__file__).parents[1].resolve() / "data"
|
||||
|
||||
|
||||
@pytest.mark.benchmark
|
||||
@pytest.mark.dependency(depends=["anta_benchmark"], scope="package")
|
||||
def test_table_all(results: ResultManager) -> None:
|
||||
"""Benchmark ReportTable.report_all()."""
|
||||
reporter = ReportTable()
|
||||
reporter.report_all(results)
|
||||
|
||||
|
||||
@pytest.mark.benchmark
|
||||
@pytest.mark.dependency(depends=["anta_benchmark"], scope="package")
|
||||
def test_table_devices(results: ResultManager) -> None:
|
||||
"""Benchmark ReportTable.report_summary_devices()."""
|
||||
reporter = ReportTable()
|
||||
reporter.report_summary_devices(results)
|
||||
|
||||
|
||||
@pytest.mark.benchmark
|
||||
@pytest.mark.dependency(depends=["anta_benchmark"], scope="package")
|
||||
def test_table_tests(results: ResultManager) -> None:
|
||||
"""Benchmark ReportTable.report_summary_tests()."""
|
||||
reporter = ReportTable()
|
||||
reporter.report_summary_tests(results)
|
||||
|
||||
|
||||
@pytest.mark.benchmark
|
||||
@pytest.mark.dependency(depends=["anta_benchmark"], scope="package")
|
||||
def test_json(results: ResultManager) -> None:
|
||||
"""Benchmark JSON report."""
|
||||
assert isinstance(results.json, str)
|
||||
|
||||
|
||||
@pytest.mark.benchmark
|
||||
@pytest.mark.dependency(depends=["anta_benchmark"], scope="package")
|
||||
def test_jinja(results: ResultManager) -> None:
|
||||
"""Benchmark ReportJinja."""
|
||||
assert isinstance(ReportJinja(template_path=DATA_DIR / "template.j2").render(json.loads(results.json)), str)
|
||||
|
||||
|
||||
@pytest.mark.benchmark
|
||||
@pytest.mark.dependency(depends=["anta_benchmark"], scope="package")
|
||||
def test_csv(results: ResultManager, tmp_path: Path) -> None:
|
||||
"""Benchmark ReportCsv.generate()."""
|
||||
ReportCsv.generate(results=results, csv_filename=tmp_path / "report.csv")
|
||||
|
||||
|
||||
@pytest.mark.benchmark
|
||||
@pytest.mark.dependency(depends=["anta_benchmark"], scope="package")
|
||||
def test_markdown(results: ResultManager, tmp_path: Path) -> None:
|
||||
"""Benchmark MDReportGenerator.generate()."""
|
||||
MDReportGenerator.generate(results=results, md_filename=tmp_path / "report.md")
|
|
@ -5,19 +5,21 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from anta.result_manager import ResultManager
|
||||
from anta.runner import get_coroutines, prepare_tests
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections import defaultdict
|
||||
from collections.abc import Coroutine
|
||||
|
||||
from pytest_codspeed import BenchmarkFixture
|
||||
|
||||
from anta.catalog import AntaCatalog, AntaTestDefinition
|
||||
from anta.device import AntaDevice
|
||||
from anta.inventory import AntaInventory
|
||||
from anta.result_manager.models import TestResult
|
||||
|
||||
|
||||
def test_prepare_tests(benchmark: BenchmarkFixture, catalog: AntaCatalog, inventory: AntaInventory) -> None:
|
||||
|
@ -40,9 +42,13 @@ def test_get_coroutines(benchmark: BenchmarkFixture, catalog: AntaCatalog, inven
|
|||
|
||||
assert selected_tests is not None
|
||||
|
||||
coroutines = benchmark(lambda: get_coroutines(selected_tests=selected_tests, manager=ResultManager()))
|
||||
for coros in coroutines:
|
||||
coros.close()
|
||||
def bench() -> list[Coroutine[Any, Any, TestResult]]:
|
||||
coros = get_coroutines(selected_tests=selected_tests, manager=ResultManager())
|
||||
for c in coros:
|
||||
c.close()
|
||||
return coros
|
||||
|
||||
coroutines = benchmark(bench)
|
||||
|
||||
count = sum(len(tests) for tests in selected_tests.values())
|
||||
assert count == len(coroutines)
|
||||
|
|
13
tests/data/test_catalog_double_failure.yml
Normal file
13
tests/data/test_catalog_double_failure.yml
Normal file
|
@ -0,0 +1,13 @@
|
|||
---
|
||||
anta.tests.interfaces:
|
||||
- VerifyInterfacesSpeed:
|
||||
interfaces:
|
||||
- name: Ethernet2
|
||||
auto: False
|
||||
speed: 10
|
||||
- name: Ethernet3
|
||||
auto: True
|
||||
speed: 100
|
||||
- name: Ethernet4
|
||||
auto: False
|
||||
speed: 2.5
|
|
@ -3,7 +3,7 @@ anta_inventory:
|
|||
hosts:
|
||||
- name: leaf1
|
||||
host: leaf1.anta.arista.com
|
||||
tags: ["leaf"]
|
||||
tags: ["leaf", "dc1"]
|
||||
- name: leaf2
|
||||
host: leaf2.anta.arista.com
|
||||
tags: ["leaf"]
|
||||
|
|
|
@ -21,7 +21,7 @@ def build_test_id(val: dict[str, Any]) -> str:
|
|||
|
||||
|
||||
def pytest_generate_tests(metafunc: pytest.Metafunc) -> None:
|
||||
"""Generate ANTA testts unit tests dynamically during test collection.
|
||||
"""Generate ANTA tests unit tests dynamically during test collection.
|
||||
|
||||
It will parametrize test cases based on the `DATA` data structure defined in `tests.units.anta_tests` modules.
|
||||
See `tests/units/anta_tests/README.md` for more information on how to use it.
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -11,7 +11,7 @@ from typing import Any
|
|||
import pytest
|
||||
from pydantic import ValidationError
|
||||
|
||||
from anta.tests.routing.generic import VerifyRoutingProtocolModel, VerifyRoutingTableEntry, VerifyRoutingTableSize
|
||||
from anta.tests.routing.generic import VerifyIPv4RouteType, VerifyRoutingProtocolModel, VerifyRoutingTableEntry, VerifyRoutingTableSize
|
||||
from tests.units.anta_tests import test
|
||||
|
||||
DATA: list[dict[str, Any]] = [
|
||||
|
@ -304,6 +304,50 @@ DATA: list[dict[str, Any]] = [
|
|||
"inputs": {"vrf": "default", "routes": ["10.1.0.1", "10.1.0.2"], "collect": "all"},
|
||||
"expected": {"result": "failure", "messages": ["The following route(s) are missing from the routing table of VRF default: ['10.1.0.2']"]},
|
||||
},
|
||||
{
|
||||
"name": "success-valid-route-type",
|
||||
"test": VerifyIPv4RouteType,
|
||||
"eos_data": [
|
||||
{
|
||||
"vrfs": {
|
||||
"default": {"routes": {"10.10.0.1/32": {"routeType": "eBGP"}, "10.100.0.12/31": {"routeType": "connected"}}},
|
||||
"MGMT": {"routes": {"10.100.1.5/32": {"routeType": "iBGP"}}},
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"routes_entries": [
|
||||
{"vrf": "default", "prefix": "10.10.0.1/32", "route_type": "eBGP"},
|
||||
{"vrf": "default", "prefix": "10.100.0.12/31", "route_type": "connected"},
|
||||
{"vrf": "MGMT", "prefix": "10.100.1.5/32", "route_type": "iBGP"},
|
||||
]
|
||||
},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-route-not-found",
|
||||
"test": VerifyIPv4RouteType,
|
||||
"eos_data": [{"vrfs": {"default": {"routes": {}}}}],
|
||||
"inputs": {"routes_entries": [{"vrf": "default", "prefix": "10.10.0.1/32", "route_type": "eBGP"}]},
|
||||
"expected": {"result": "failure", "messages": ["Prefix: 10.10.0.1/32 VRF: default - Route not found"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-invalid-route-type",
|
||||
"test": VerifyIPv4RouteType,
|
||||
"eos_data": [{"vrfs": {"default": {"routes": {"10.10.0.1/32": {"routeType": "eBGP"}}}}}],
|
||||
"inputs": {"routes_entries": [{"vrf": "default", "prefix": "10.10.0.1/32", "route_type": "iBGP"}]},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["Prefix: 10.10.0.1/32 VRF: default - Incorrect route type - Expected: iBGP Actual: eBGP"],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-vrf-not-configured",
|
||||
"test": VerifyIPv4RouteType,
|
||||
"eos_data": [{"vrfs": {}}],
|
||||
"inputs": {"routes_entries": [{"vrf": "default", "prefix": "10.10.0.1/32", "route_type": "eBGP"}]},
|
||||
"expected": {"result": "failure", "messages": ["Prefix: 10.10.0.1/32 VRF: default - VRF not configured"]},
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
|
|
|
@ -361,48 +361,63 @@ DATA: list[dict[str, Any]] = [
|
|||
"avts": {
|
||||
"DEFAULT-AVT-POLICY-CONTROL-PLANE": {
|
||||
"avtPaths": {
|
||||
"direct:10": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"},
|
||||
"direct:9": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"},
|
||||
"multihop:1": {"flags": {"directPath": False, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"},
|
||||
"multihop:3": {"flags": {"directPath": False, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"},
|
||||
"direct:10": {
|
||||
"flags": {"directPath": True, "valid": True, "active": True},
|
||||
"nexthopAddr": "10.101.255.1",
|
||||
"destination": "10.101.255.2",
|
||||
},
|
||||
"direct:9": {
|
||||
"flags": {"directPath": True, "valid": True, "active": True},
|
||||
"nexthopAddr": "10.101.255.1",
|
||||
"destination": "10.101.255.2",
|
||||
},
|
||||
"multihop:1": {
|
||||
"flags": {"directPath": False, "valid": True, "active": True},
|
||||
"nexthopAddr": "10.101.255.1",
|
||||
"destination": "10.101.255.2",
|
||||
},
|
||||
"multihop:3": {
|
||||
"flags": {"directPath": False, "valid": True, "active": True},
|
||||
"nexthopAddr": "10.101.255.1",
|
||||
"destination": "10.101.255.2",
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"vrfs": {
|
||||
},
|
||||
},
|
||||
"data": {
|
||||
"avts": {
|
||||
"DATA-AVT-POLICY-CONTROL-PLANE": {
|
||||
"avtPaths": {
|
||||
"direct:10": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"},
|
||||
"direct:9": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"},
|
||||
"direct:8": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"},
|
||||
"multihop:1": {"flags": {"directPath": False, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"},
|
||||
"multihop:3": {"flags": {"directPath": False, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"},
|
||||
"direct:10": {
|
||||
"flags": {"directPath": True, "valid": True, "active": True},
|
||||
"nexthopAddr": "10.101.255.1",
|
||||
"destination": "10.101.255.1",
|
||||
},
|
||||
"direct:9": {
|
||||
"flags": {"directPath": True, "valid": True, "active": True},
|
||||
"nexthopAddr": "10.101.255.1",
|
||||
"destination": "10.101.255.1",
|
||||
},
|
||||
"direct:8": {
|
||||
"flags": {"directPath": True, "valid": True, "active": True},
|
||||
"nexthopAddr": "10.101.255.2",
|
||||
"destination": "10.101.255.1",
|
||||
},
|
||||
"multihop:1": {
|
||||
"flags": {"directPath": False, "valid": True, "active": True},
|
||||
"nexthopAddr": "10.101.255.2",
|
||||
"destination": "10.101.255.1",
|
||||
},
|
||||
"multihop:3": {
|
||||
"flags": {"directPath": False, "valid": True, "active": True},
|
||||
"nexthopAddr": "10.101.255.2",
|
||||
"destination": "10.101.255.1",
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"vrfs": {
|
||||
"data": {
|
||||
"avts": {
|
||||
"DATA-AVT-POLICY-CONTROL-PLANE": {
|
||||
"avtPaths": {
|
||||
"direct:10": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"},
|
||||
"direct:9": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"},
|
||||
"direct:8": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"},
|
||||
"multihop:1": {"flags": {"directPath": False, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"},
|
||||
"multihop:3": {"flags": {"directPath": False, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
],
|
||||
|
@ -420,22 +435,6 @@ DATA: list[dict[str, Any]] = [
|
|||
"test": VerifyAVTSpecificPath,
|
||||
"eos_data": [
|
||||
{"vrfs": {}},
|
||||
{
|
||||
"vrfs": {
|
||||
"data": {
|
||||
"avts": {
|
||||
"DATA-AVT-POLICY-CONTROL-PLANE": {
|
||||
"avtPaths": {
|
||||
"direct:10": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"},
|
||||
"direct:9": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"},
|
||||
"multihop:1": {"flags": {"directPath": False, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"},
|
||||
"multihop:3": {"flags": {"directPath": False, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
],
|
||||
"inputs": {
|
||||
"avt_paths": [
|
||||
|
@ -445,11 +444,11 @@ DATA: list[dict[str, Any]] = [
|
|||
},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["AVT configuration for peer '10.101.255.2' under topology 'MGMT-AVT-POLICY-DEFAULT' in VRF 'default' is not found."],
|
||||
"messages": ["AVT MGMT-AVT-POLICY-DEFAULT VRF: default (Destination: 10.101.255.2, Next-hop: 10.101.255.1) - No AVT path configured"],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-no-path-with-correct-next-hop",
|
||||
"name": "failure-path_type_check_true",
|
||||
"test": VerifyAVTSpecificPath,
|
||||
"eos_data": [
|
||||
{
|
||||
|
@ -458,30 +457,38 @@ DATA: list[dict[str, Any]] = [
|
|||
"avts": {
|
||||
"DEFAULT-AVT-POLICY-CONTROL-PLANE": {
|
||||
"avtPaths": {
|
||||
"direct:10": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"},
|
||||
"direct:9": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"},
|
||||
"multihop:1": {"flags": {"directPath": False, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"},
|
||||
"multihop:3": {"flags": {"directPath": False, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"},
|
||||
"direct:10": {
|
||||
"flags": {"directPath": True, "valid": True, "active": True},
|
||||
"nexthopAddr": "10.101.255.1",
|
||||
"destination": "10.101.255.2",
|
||||
},
|
||||
"direct:9": {
|
||||
"flags": {"directPath": True, "valid": True, "active": True},
|
||||
"nexthopAddr": "10.101.255.1",
|
||||
"destination": "10.101.255.2",
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"vrfs": {
|
||||
},
|
||||
},
|
||||
"data": {
|
||||
"avts": {
|
||||
"DATA-AVT-POLICY-CONTROL-PLANE": {
|
||||
"avtPaths": {
|
||||
"direct:10": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"},
|
||||
"direct:9": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"},
|
||||
"multihop:1": {"flags": {"directPath": False, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"},
|
||||
"multihop:3": {"flags": {"directPath": False, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"},
|
||||
"direct:10": {
|
||||
"flags": {"directPath": True, "valid": True, "active": True},
|
||||
"nexthopAddr": "10.101.255.1",
|
||||
"destination": "10.101.255.3",
|
||||
},
|
||||
"direct:9": {
|
||||
"flags": {"directPath": True, "valid": True, "active": True},
|
||||
"nexthopAddr": "10.101.255.1",
|
||||
"destination": "10.101.255.3",
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
],
|
||||
|
@ -500,10 +507,72 @@ DATA: list[dict[str, Any]] = [
|
|||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"No 'multihop' path found with next-hop address '10.101.255.11' for AVT peer '10.101.255.2' under "
|
||||
"topology 'DEFAULT-AVT-POLICY-CONTROL-PLANE' in VRF 'default'.",
|
||||
"No 'direct' path found with next-hop address '10.101.255.21' for AVT peer '10.101.255.1' under "
|
||||
"topology 'DATA-AVT-POLICY-CONTROL-PLANE' in VRF 'data'.",
|
||||
"AVT DEFAULT-AVT-POLICY-CONTROL-PLANE VRF: default (Destination: 10.101.255.2, Next-hop: 10.101.255.11) Path Type: multihop - Path not found",
|
||||
"AVT DATA-AVT-POLICY-CONTROL-PLANE VRF: data (Destination: 10.101.255.1, Next-hop: 10.101.255.21) Path Type: direct - Path not found",
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-path_type_check_false",
|
||||
"test": VerifyAVTSpecificPath,
|
||||
"eos_data": [
|
||||
{
|
||||
"vrfs": {
|
||||
"default": {
|
||||
"avts": {
|
||||
"DEFAULT-AVT-POLICY-CONTROL-PLANE": {
|
||||
"avtPaths": {
|
||||
"direct:10": {
|
||||
"flags": {"directPath": True, "valid": True, "active": True},
|
||||
"nexthopAddr": "10.101.255.1",
|
||||
"destination": "10.101.255.2",
|
||||
},
|
||||
"direct:9": {
|
||||
"flags": {"directPath": True, "valid": True, "active": True},
|
||||
"nexthopAddr": "10.101.255.1",
|
||||
"destination": "10.101.255.2",
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
"data": {
|
||||
"avts": {
|
||||
"DATA-AVT-POLICY-CONTROL-PLANE": {
|
||||
"avtPaths": {
|
||||
"direct:10": {
|
||||
"flags": {"directPath": True, "valid": True, "active": True},
|
||||
"nexthopAddr": "10.101.255.1",
|
||||
"destination": "10.101.255.3",
|
||||
},
|
||||
"direct:9": {
|
||||
"flags": {"directPath": True, "valid": True, "active": True},
|
||||
"nexthopAddr": "10.101.255.1",
|
||||
"destination": "10.101.255.3",
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
],
|
||||
"inputs": {
|
||||
"avt_paths": [
|
||||
{
|
||||
"avt_name": "DEFAULT-AVT-POLICY-CONTROL-PLANE",
|
||||
"vrf": "default",
|
||||
"destination": "10.101.255.2",
|
||||
"next_hop": "10.101.255.11",
|
||||
},
|
||||
{"avt_name": "DATA-AVT-POLICY-CONTROL-PLANE", "vrf": "data", "destination": "10.101.255.1", "next_hop": "10.101.255.21"},
|
||||
]
|
||||
},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"AVT DEFAULT-AVT-POLICY-CONTROL-PLANE VRF: default (Destination: 10.101.255.2, Next-hop: 10.101.255.11) - Path not found",
|
||||
"AVT DATA-AVT-POLICY-CONTROL-PLANE VRF: data (Destination: 10.101.255.1, Next-hop: 10.101.255.21) - Path not found",
|
||||
],
|
||||
},
|
||||
},
|
||||
|
@ -517,30 +586,48 @@ DATA: list[dict[str, Any]] = [
|
|||
"avts": {
|
||||
"DEFAULT-AVT-POLICY-CONTROL-PLANE": {
|
||||
"avtPaths": {
|
||||
"direct:10": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"},
|
||||
"direct:9": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"},
|
||||
"multihop:1": {"flags": {"directPath": True, "valid": False, "active": False}, "nexthopAddr": "10.101.255.1"},
|
||||
"multihop:3": {"flags": {"directPath": False, "valid": True, "active": False}, "nexthopAddr": "10.101.255.1"},
|
||||
"multihop:3": {
|
||||
"flags": {"directPath": False, "valid": False, "active": True},
|
||||
"nexthopAddr": "10.101.255.1",
|
||||
"destination": "10.101.255.2",
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"vrfs": {
|
||||
},
|
||||
},
|
||||
"data": {
|
||||
"avts": {
|
||||
"DATA-AVT-POLICY-CONTROL-PLANE": {
|
||||
"avtPaths": {
|
||||
"direct:10": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"},
|
||||
"direct:9": {"flags": {"directPath": True, "valid": False, "active": True}, "nexthopAddr": "10.101.255.1"},
|
||||
"multihop:1": {"flags": {"directPath": False, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"},
|
||||
"multihop:3": {"flags": {"directPath": False, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"},
|
||||
"direct:10": {
|
||||
"flags": {"directPath": True, "valid": False, "active": True},
|
||||
"nexthopAddr": "10.101.255.1",
|
||||
"destination": "10.101.255.1",
|
||||
},
|
||||
"direct:9": {
|
||||
"flags": {"directPath": True, "valid": True, "active": False},
|
||||
"nexthopAddr": "10.101.255.1",
|
||||
"destination": "10.101.255.1",
|
||||
},
|
||||
"direct:8": {
|
||||
"flags": {"directPath": True, "valid": False, "active": False},
|
||||
"nexthopAddr": "10.101.255.2",
|
||||
"destination": "10.101.255.1",
|
||||
},
|
||||
"multihop:1": {
|
||||
"flags": {"directPath": False, "valid": True, "active": True},
|
||||
"nexthopAddr": "10.101.255.2",
|
||||
"destination": "10.101.255.1",
|
||||
},
|
||||
"multihop:3": {
|
||||
"flags": {"directPath": False, "valid": True, "active": True},
|
||||
"nexthopAddr": "10.101.255.2",
|
||||
"destination": "10.101.255.1",
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
],
|
||||
|
@ -559,8 +646,12 @@ DATA: list[dict[str, Any]] = [
|
|||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"AVT path 'multihop:3' for topology 'DEFAULT-AVT-POLICY-CONTROL-PLANE' in VRF 'default' is inactive.",
|
||||
"AVT path 'direct:9' for topology 'DATA-AVT-POLICY-CONTROL-PLANE' in VRF 'data' is invalid.",
|
||||
"AVT DEFAULT-AVT-POLICY-CONTROL-PLANE VRF: default (Destination: 10.101.255.2, Next-hop: 10.101.255.1) - "
|
||||
"Incorrect path multihop:3 - Valid: False, Active: True",
|
||||
"AVT DATA-AVT-POLICY-CONTROL-PLANE VRF: data (Destination: 10.101.255.1, Next-hop: 10.101.255.1) - "
|
||||
"Incorrect path direct:10 - Valid: False, Active: True",
|
||||
"AVT DATA-AVT-POLICY-CONTROL-PLANE VRF: data (Destination: 10.101.255.1, Next-hop: 10.101.255.1) - "
|
||||
"Incorrect path direct:9 - Valid: True, Active: False",
|
||||
],
|
||||
},
|
||||
},
|
||||
|
|
|
@ -107,8 +107,8 @@ DATA: list[dict[str, Any]] = [
|
|||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"Following BFD peers are not configured or timers are not correct:\n"
|
||||
"{'192.0.255.7': {'CS': 'Not Configured'}, '192.0.255.70': {'MGMT': 'Not Configured'}}"
|
||||
"Peer: 192.0.255.7 VRF: CS - Not found",
|
||||
"Peer: 192.0.255.70 VRF: MGMT - Not found",
|
||||
],
|
||||
},
|
||||
},
|
||||
|
@ -160,9 +160,11 @@ DATA: list[dict[str, Any]] = [
|
|||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"Following BFD peers are not configured or timers are not correct:\n"
|
||||
"{'192.0.255.7': {'default': {'tx_interval': 1300, 'rx_interval': 1200, 'multiplier': 4}}, "
|
||||
"'192.0.255.70': {'MGMT': {'tx_interval': 120, 'rx_interval': 120, 'multiplier': 5}}}"
|
||||
"Peer: 192.0.255.7 VRF: default - Incorrect Transmit interval - Expected: 1200 Actual: 1300",
|
||||
"Peer: 192.0.255.7 VRF: default - Incorrect Multiplier - Expected: 3 Actual: 4",
|
||||
"Peer: 192.0.255.70 VRF: MGMT - Incorrect Transmit interval - Expected: 1200 Actual: 120",
|
||||
"Peer: 192.0.255.70 VRF: MGMT - Incorrect Receive interval - Expected: 1200 Actual: 120",
|
||||
"Peer: 192.0.255.70 VRF: MGMT - Incorrect Multiplier - Expected: 3 Actual: 5",
|
||||
],
|
||||
},
|
||||
},
|
||||
|
@ -239,8 +241,8 @@ DATA: list[dict[str, Any]] = [
|
|||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"Following BFD peers are not configured, status is not up or remote disc is zero:\n"
|
||||
"{'192.0.255.7': {'CS': 'Not Configured'}, '192.0.255.70': {'MGMT': 'Not Configured'}}"
|
||||
"Peer: 192.0.255.7 VRF: CS - Not found",
|
||||
"Peer: 192.0.255.70 VRF: MGMT - Not found",
|
||||
],
|
||||
},
|
||||
},
|
||||
|
@ -255,7 +257,7 @@ DATA: list[dict[str, Any]] = [
|
|||
"192.0.255.7": {
|
||||
"peerStats": {
|
||||
"": {
|
||||
"status": "Down",
|
||||
"status": "down",
|
||||
"remoteDisc": 108328132,
|
||||
}
|
||||
}
|
||||
|
@ -267,7 +269,7 @@ DATA: list[dict[str, Any]] = [
|
|||
"192.0.255.70": {
|
||||
"peerStats": {
|
||||
"": {
|
||||
"status": "Down",
|
||||
"status": "down",
|
||||
"remoteDisc": 0,
|
||||
}
|
||||
}
|
||||
|
@ -281,9 +283,8 @@ DATA: list[dict[str, Any]] = [
|
|||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"Following BFD peers are not configured, status is not up or remote disc is zero:\n"
|
||||
"{'192.0.255.7': {'default': {'status': 'Down', 'remote_disc': 108328132}}, "
|
||||
"'192.0.255.70': {'MGMT': {'status': 'Down', 'remote_disc': 0}}}"
|
||||
"Peer: 192.0.255.7 VRF: default - Session not properly established - State: down Remote Discriminator: 108328132",
|
||||
"Peer: 192.0.255.70 VRF: MGMT - Session not properly established - State: down Remote Discriminator: 0",
|
||||
],
|
||||
},
|
||||
},
|
||||
|
@ -414,7 +415,8 @@ DATA: list[dict[str, Any]] = [
|
|||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"Following BFD peers are not up:\n192.0.255.7 is down in default VRF with remote disc 0.\n192.0.255.71 is down in MGMT VRF with remote disc 0."
|
||||
"Peer: 192.0.255.7 VRF: default - Session not properly established - State: down Remote Discriminator: 0",
|
||||
"Peer: 192.0.255.71 VRF: MGMT - Session not properly established - State: down Remote Discriminator: 0",
|
||||
],
|
||||
},
|
||||
},
|
||||
|
@ -458,7 +460,10 @@ DATA: list[dict[str, Any]] = [
|
|||
"inputs": {},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["Following BFD peers were down:\n192.0.255.7 in default VRF has remote disc 0.\n192.0.255.71 in default VRF has remote disc 0."],
|
||||
"messages": [
|
||||
"Peer: 192.0.255.7 VRF: default - Session not properly established - State: up Remote Discriminator: 0",
|
||||
"Peer: 192.0.255.71 VRF: default - Session not properly established - State: up Remote Discriminator: 0",
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -512,8 +517,9 @@ DATA: list[dict[str, Any]] = [
|
|||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"Following BFD peers were down:\n192.0.255.7 in default VRF was down 3 hours ago.\n"
|
||||
"192.0.255.71 in default VRF was down 3 hours ago.\n192.0.255.17 in default VRF was down 3 hours ago."
|
||||
"Peer: 192.0.255.7 VRF: default - Session failure detected within the expected uptime threshold (3 hours ago)",
|
||||
"Peer: 192.0.255.71 VRF: default - Session failure detected within the expected uptime threshold (3 hours ago)",
|
||||
"Peer: 192.0.255.17 VRF: default - Session failure detected within the expected uptime threshold (3 hours ago)",
|
||||
],
|
||||
},
|
||||
},
|
||||
|
@ -609,15 +615,14 @@ DATA: list[dict[str, Any]] = [
|
|||
"inputs": {
|
||||
"bfd_peers": [
|
||||
{"peer_address": "192.0.255.7", "vrf": "default", "protocols": ["isis"]},
|
||||
{"peer_address": "192.0.255.70", "vrf": "MGMT", "protocols": ["isis"]},
|
||||
{"peer_address": "192.0.255.70", "vrf": "MGMT", "protocols": ["isis", "ospf"]},
|
||||
]
|
||||
},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"The following BFD peers are not configured or have non-registered protocol(s):\n"
|
||||
"{'192.0.255.7': {'default': ['isis']}, "
|
||||
"'192.0.255.70': {'MGMT': ['isis']}}"
|
||||
"Peer: 192.0.255.7 VRF: default - `isis` routing protocol(s) not configured",
|
||||
"Peer: 192.0.255.70 VRF: MGMT - `isis` `ospf` routing protocol(s) not configured",
|
||||
],
|
||||
},
|
||||
},
|
||||
|
@ -641,8 +646,8 @@ DATA: list[dict[str, Any]] = [
|
|||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"The following BFD peers are not configured or have non-registered protocol(s):\n"
|
||||
"{'192.0.255.7': {'default': 'Not Configured'}, '192.0.255.70': {'MGMT': 'Not Configured'}}"
|
||||
"Peer: 192.0.255.7 VRF: default - Not found",
|
||||
"Peer: 192.0.255.70 VRF: MGMT - Not found",
|
||||
],
|
||||
},
|
||||
},
|
||||
|
|
|
@ -153,7 +153,7 @@ DATA: list[dict[str, Any]] = [
|
|||
],
|
||||
},
|
||||
],
|
||||
"expected": {"result": "failure", "messages": ["Connectivity test failed for the following source-destination pairs: [('10.0.0.5', '10.0.0.11')]"]},
|
||||
"expected": {"result": "failure", "messages": ["Host 10.0.0.11 (src: 10.0.0.5, vrf: default, size: 100B, repeat: 2) - Unreachable"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-interface",
|
||||
|
@ -187,7 +187,7 @@ DATA: list[dict[str, Any]] = [
|
|||
],
|
||||
},
|
||||
],
|
||||
"expected": {"result": "failure", "messages": ["Connectivity test failed for the following source-destination pairs: [('Management0', '10.0.0.11')]"]},
|
||||
"expected": {"result": "failure", "messages": ["Host 10.0.0.11 (src: Management0, vrf: default, size: 100B, repeat: 2) - Unreachable"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-size",
|
||||
|
@ -209,17 +209,11 @@ DATA: list[dict[str, Any]] = [
|
|||
],
|
||||
},
|
||||
],
|
||||
"expected": {"result": "failure", "messages": ["Connectivity test failed for the following source-destination pairs: [('Management0', '10.0.0.1')]"]},
|
||||
"expected": {"result": "failure", "messages": ["Host 10.0.0.1 (src: Management0, vrf: default, size: 1501B, repeat: 5, df-bit: enabled) - Unreachable"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyLLDPNeighbors,
|
||||
"inputs": {
|
||||
"neighbors": [
|
||||
{"port": "Ethernet1", "neighbor_device": "DC1-SPINE1", "neighbor_port": "Ethernet1"},
|
||||
{"port": "Ethernet2", "neighbor_device": "DC1-SPINE2", "neighbor_port": "Ethernet1"},
|
||||
],
|
||||
},
|
||||
"eos_data": [
|
||||
{
|
||||
"lldpNeighbors": {
|
||||
|
@ -256,16 +250,17 @@ DATA: list[dict[str, Any]] = [
|
|||
},
|
||||
},
|
||||
],
|
||||
"inputs": {
|
||||
"neighbors": [
|
||||
{"port": "Ethernet1", "neighbor_device": "DC1-SPINE1", "neighbor_port": "Ethernet1"},
|
||||
{"port": "Ethernet2", "neighbor_device": "DC1-SPINE2", "neighbor_port": "Ethernet1"},
|
||||
],
|
||||
},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "success-multiple-neighbors",
|
||||
"test": VerifyLLDPNeighbors,
|
||||
"inputs": {
|
||||
"neighbors": [
|
||||
{"port": "Ethernet1", "neighbor_device": "DC1-SPINE2", "neighbor_port": "Ethernet1"},
|
||||
],
|
||||
},
|
||||
"eos_data": [
|
||||
{
|
||||
"lldpNeighbors": {
|
||||
|
@ -298,17 +293,16 @@ DATA: list[dict[str, Any]] = [
|
|||
},
|
||||
},
|
||||
],
|
||||
"inputs": {
|
||||
"neighbors": [
|
||||
{"port": "Ethernet1", "neighbor_device": "DC1-SPINE2", "neighbor_port": "Ethernet1"},
|
||||
],
|
||||
},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-port-not-configured",
|
||||
"test": VerifyLLDPNeighbors,
|
||||
"inputs": {
|
||||
"neighbors": [
|
||||
{"port": "Ethernet1", "neighbor_device": "DC1-SPINE1", "neighbor_port": "Ethernet1"},
|
||||
{"port": "Ethernet2", "neighbor_device": "DC1-SPINE2", "neighbor_port": "Ethernet1"},
|
||||
],
|
||||
},
|
||||
"eos_data": [
|
||||
{
|
||||
"lldpNeighbors": {
|
||||
|
@ -330,17 +324,17 @@ DATA: list[dict[str, Any]] = [
|
|||
},
|
||||
},
|
||||
],
|
||||
"expected": {"result": "failure", "messages": ["Port(s) not configured:\n Ethernet2"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-no-neighbor",
|
||||
"test": VerifyLLDPNeighbors,
|
||||
"inputs": {
|
||||
"neighbors": [
|
||||
{"port": "Ethernet1", "neighbor_device": "DC1-SPINE1", "neighbor_port": "Ethernet1"},
|
||||
{"port": "Ethernet2", "neighbor_device": "DC1-SPINE2", "neighbor_port": "Ethernet1"},
|
||||
],
|
||||
},
|
||||
"expected": {"result": "failure", "messages": ["Port Ethernet2 (Neighbor: DC1-SPINE2, Neighbor Port: Ethernet1) - Port not found"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-no-neighbor",
|
||||
"test": VerifyLLDPNeighbors,
|
||||
"eos_data": [
|
||||
{
|
||||
"lldpNeighbors": {
|
||||
|
@ -363,17 +357,17 @@ DATA: list[dict[str, Any]] = [
|
|||
},
|
||||
},
|
||||
],
|
||||
"expected": {"result": "failure", "messages": ["No LLDP neighbor(s) on port(s):\n Ethernet2"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-neighbor",
|
||||
"test": VerifyLLDPNeighbors,
|
||||
"inputs": {
|
||||
"neighbors": [
|
||||
{"port": "Ethernet1", "neighbor_device": "DC1-SPINE1", "neighbor_port": "Ethernet1"},
|
||||
{"port": "Ethernet2", "neighbor_device": "DC1-SPINE2", "neighbor_port": "Ethernet1"},
|
||||
],
|
||||
},
|
||||
"expected": {"result": "failure", "messages": ["Port Ethernet2 (Neighbor: DC1-SPINE2, Neighbor Port: Ethernet1) - No LLDP neighbors"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-neighbor",
|
||||
"test": VerifyLLDPNeighbors,
|
||||
"eos_data": [
|
||||
{
|
||||
"lldpNeighbors": {
|
||||
|
@ -410,11 +404,42 @@ DATA: list[dict[str, Any]] = [
|
|||
},
|
||||
},
|
||||
],
|
||||
"expected": {"result": "failure", "messages": ["Wrong LLDP neighbor(s) on port(s):\n Ethernet2\n DC1-SPINE2_Ethernet2"]},
|
||||
"inputs": {
|
||||
"neighbors": [
|
||||
{"port": "Ethernet1", "neighbor_device": "DC1-SPINE1", "neighbor_port": "Ethernet1"},
|
||||
{"port": "Ethernet2", "neighbor_device": "DC1-SPINE2", "neighbor_port": "Ethernet1"},
|
||||
],
|
||||
},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["Port Ethernet2 (Neighbor: DC1-SPINE2, Neighbor Port: Ethernet1) - Wrong LLDP neighbors: DC1-SPINE2/Ethernet2"],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-multiple",
|
||||
"test": VerifyLLDPNeighbors,
|
||||
"eos_data": [
|
||||
{
|
||||
"lldpNeighbors": {
|
||||
"Ethernet1": {
|
||||
"lldpNeighborInfo": [
|
||||
{
|
||||
"chassisIdType": "macAddress",
|
||||
"chassisId": "001c.73a0.fc18",
|
||||
"systemName": "DC1-SPINE1",
|
||||
"neighborInterfaceInfo": {
|
||||
"interfaceIdType": "interfaceName",
|
||||
"interfaceId": '"Ethernet2"',
|
||||
"interfaceId_v2": "Ethernet2",
|
||||
"interfaceDescription": "P2P_LINK_TO_DC1-LEAF1A_Ethernet1",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
"Ethernet2": {"lldpNeighborInfo": []},
|
||||
},
|
||||
},
|
||||
],
|
||||
"inputs": {
|
||||
"neighbors": [
|
||||
{"port": "Ethernet1", "neighbor_device": "DC1-SPINE1", "neighbor_port": "Ethernet1"},
|
||||
|
@ -422,45 +447,18 @@ DATA: list[dict[str, Any]] = [
|
|||
{"port": "Ethernet3", "neighbor_device": "DC1-SPINE3", "neighbor_port": "Ethernet1"},
|
||||
],
|
||||
},
|
||||
"eos_data": [
|
||||
{
|
||||
"lldpNeighbors": {
|
||||
"Ethernet1": {
|
||||
"lldpNeighborInfo": [
|
||||
{
|
||||
"chassisIdType": "macAddress",
|
||||
"chassisId": "001c.73a0.fc18",
|
||||
"systemName": "DC1-SPINE1",
|
||||
"neighborInterfaceInfo": {
|
||||
"interfaceIdType": "interfaceName",
|
||||
"interfaceId": '"Ethernet2"',
|
||||
"interfaceId_v2": "Ethernet2",
|
||||
"interfaceDescription": "P2P_LINK_TO_DC1-LEAF1A_Ethernet1",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
"Ethernet2": {"lldpNeighborInfo": []},
|
||||
},
|
||||
},
|
||||
],
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"Wrong LLDP neighbor(s) on port(s):\n Ethernet1\n DC1-SPINE1_Ethernet2\n"
|
||||
"No LLDP neighbor(s) on port(s):\n Ethernet2\n"
|
||||
"Port(s) not configured:\n Ethernet3"
|
||||
"Port Ethernet1 (Neighbor: DC1-SPINE1, Neighbor Port: Ethernet1) - Wrong LLDP neighbors: DC1-SPINE1/Ethernet2",
|
||||
"Port Ethernet2 (Neighbor: DC1-SPINE2, Neighbor Port: Ethernet1) - No LLDP neighbors",
|
||||
"Port Ethernet3 (Neighbor: DC1-SPINE3, Neighbor Port: Ethernet1) - Port not found",
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-multiple-neighbors",
|
||||
"test": VerifyLLDPNeighbors,
|
||||
"inputs": {
|
||||
"neighbors": [
|
||||
{"port": "Ethernet1", "neighbor_device": "DC1-SPINE3", "neighbor_port": "Ethernet1"},
|
||||
],
|
||||
},
|
||||
"eos_data": [
|
||||
{
|
||||
"lldpNeighbors": {
|
||||
|
@ -493,6 +491,14 @@ DATA: list[dict[str, Any]] = [
|
|||
},
|
||||
},
|
||||
],
|
||||
"expected": {"result": "failure", "messages": ["Wrong LLDP neighbor(s) on port(s):\n Ethernet1\n DC1-SPINE1_Ethernet1\n DC1-SPINE2_Ethernet1"]},
|
||||
"inputs": {
|
||||
"neighbors": [
|
||||
{"port": "Ethernet1", "neighbor_device": "DC1-SPINE3", "neighbor_port": "Ethernet1"},
|
||||
],
|
||||
},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["Port Ethernet1 (Neighbor: DC1-SPINE3, Neighbor Port: Ethernet1) - Wrong LLDP neighbors: DC1-SPINE1/Ethernet1, DC1-SPINE2/Ethernet1"],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
|
525
tests/units/anta_tests/test_cvx.py
Normal file
525
tests/units/anta_tests/test_cvx.py
Normal file
|
@ -0,0 +1,525 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""Data for testing anta.tests.cvx."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from anta.tests.cvx import VerifyActiveCVXConnections, VerifyCVXClusterStatus, VerifyManagementCVX, VerifyMcsClientMounts, VerifyMcsServerMounts
|
||||
from tests.units.anta_tests import test
|
||||
|
||||
DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyMcsClientMounts,
|
||||
"eos_data": [{"mountStates": [{"path": "mcs/v1/toSwitch/28-99-3a-8f-93-7b", "type": "Mcs::DeviceConfigV1", "state": "mountStateMountComplete"}]}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "success-haclient",
|
||||
"test": VerifyMcsClientMounts,
|
||||
"eos_data": [
|
||||
{
|
||||
"mountStates": [
|
||||
{"path": "mcs/v1/apiCfgRedState", "type": "Mcs::ApiConfigRedundancyState", "state": "mountStateMountComplete"},
|
||||
{"path": "mcs/v1/toSwitch/00-1c-73-74-c0-8b", "type": "Mcs::DeviceConfigV1", "state": "mountStateMountComplete"},
|
||||
]
|
||||
},
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "success-partial-non-mcs",
|
||||
"test": VerifyMcsClientMounts,
|
||||
"eos_data": [
|
||||
{
|
||||
"mountStates": [
|
||||
{"path": "blah/blah/blah", "type": "blah::blah", "state": "mountStatePreservedUnmounted"},
|
||||
{"path": "mcs/v1/toSwitch/00-1c-73-74-c0-8b", "type": "Mcs::DeviceConfigV1", "state": "mountStateMountComplete"},
|
||||
]
|
||||
},
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-nomounts",
|
||||
"test": VerifyMcsClientMounts,
|
||||
"eos_data": [
|
||||
{"mountStates": []},
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["MCS Client mount states are not present"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-mountStatePreservedUnmounted",
|
||||
"test": VerifyMcsClientMounts,
|
||||
"eos_data": [{"mountStates": [{"path": "mcs/v1/toSwitch/28-99-3a-8f-93-7b", "type": "Mcs::DeviceConfigV1", "state": "mountStatePreservedUnmounted"}]}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["MCS Client mount states are not valid: mountStatePreservedUnmounted"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-partial-haclient",
|
||||
"test": VerifyMcsClientMounts,
|
||||
"eos_data": [
|
||||
{
|
||||
"mountStates": [
|
||||
{"path": "mcs/v1/apiCfgRedState", "type": "Mcs::ApiConfigRedundancyState", "state": "mountStateMountComplete"},
|
||||
{"path": "mcs/v1/toSwitch/00-1c-73-74-c0-8b", "type": "Mcs::DeviceConfigV1", "state": "mountStatePreservedUnmounted"},
|
||||
]
|
||||
},
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["MCS Client mount states are not valid: mountStatePreservedUnmounted"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-full-haclient",
|
||||
"test": VerifyMcsClientMounts,
|
||||
"eos_data": [
|
||||
{
|
||||
"mountStates": [
|
||||
{"path": "blah/blah/blah", "type": "blah::blahState", "state": "mountStatePreservedUnmounted"},
|
||||
{"path": "mcs/v1/toSwitch/00-1c-73-74-c0-8b", "type": "Mcs::DeviceConfigV1", "state": "mountStatePreservedUnmounted"},
|
||||
]
|
||||
},
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["MCS Client mount states are not valid: mountStatePreservedUnmounted"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-non-mcs-client",
|
||||
"test": VerifyMcsClientMounts,
|
||||
"eos_data": [
|
||||
{"mountStates": [{"path": "blah/blah/blah", "type": "blah::blahState", "state": "mountStatePreservedUnmounted"}]},
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["MCS Client mount states are not present"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-partial-mcs-client",
|
||||
"test": VerifyMcsClientMounts,
|
||||
"eos_data": [
|
||||
{
|
||||
"mountStates": [
|
||||
{"path": "blah/blah/blah", "type": "blah::blahState", "state": "mountStatePreservedUnmounted"},
|
||||
{"path": "blah/blah/blah", "type": "Mcs::DeviceConfigV1", "state": "mountStatePreservedUnmounted"},
|
||||
]
|
||||
},
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["MCS Client mount states are not valid: mountStatePreservedUnmounted"]},
|
||||
},
|
||||
{
|
||||
"name": "success-enabled",
|
||||
"test": VerifyManagementCVX,
|
||||
"eos_data": [
|
||||
{
|
||||
"clusterStatus": {
|
||||
"enabled": True,
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": {"enabled": True},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "success-disabled",
|
||||
"test": VerifyManagementCVX,
|
||||
"eos_data": [
|
||||
{
|
||||
"clusterStatus": {
|
||||
"enabled": False,
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": {"enabled": False},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure - no enabled state",
|
||||
"test": VerifyManagementCVX,
|
||||
"eos_data": [{"clusterStatus": {}}],
|
||||
"inputs": {"enabled": False},
|
||||
"expected": {"result": "failure", "messages": ["Management CVX status is not valid: None"]},
|
||||
},
|
||||
{
|
||||
"name": "failure - no clusterStatus",
|
||||
"test": VerifyManagementCVX,
|
||||
"eos_data": [{}],
|
||||
"inputs": {"enabled": False},
|
||||
"expected": {"result": "failure", "messages": ["Management CVX status is not valid: None"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyMcsServerMounts,
|
||||
"eos_data": [
|
||||
{
|
||||
"connections": [
|
||||
{
|
||||
"hostname": "media-leaf-1",
|
||||
"mounts": [
|
||||
{
|
||||
"service": "Mcs",
|
||||
"mountStates": [
|
||||
{
|
||||
"pathStates": [
|
||||
{"path": "mcs/v1/apiCfgRedStatus", "type": "Mcs::ApiConfigRedundancyStatus", "state": "mountStateMountComplete"},
|
||||
{"path": "mcs/v1/activeflows", "type": "Mcs::ActiveFlows", "state": "mountStateMountComplete"},
|
||||
{"path": "mcs/switch/status", "type": "Mcs::Client::Status", "state": "mountStateMountComplete"},
|
||||
]
|
||||
}
|
||||
],
|
||||
}
|
||||
],
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"inputs": {"connections_count": 1},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-no-mounts",
|
||||
"test": VerifyMcsServerMounts,
|
||||
"eos_data": [{"connections": [{"hostname": "media-leaf-1", "mounts": []}]}],
|
||||
"inputs": {"connections_count": 1},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["No mount status for media-leaf-1", "Incorrect CVX successful connections count. Expected: 1, Actual : 0"],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-unexpected-number-paths",
|
||||
"test": VerifyMcsServerMounts,
|
||||
"eos_data": [
|
||||
{
|
||||
"connections": [
|
||||
{
|
||||
"hostname": "media-leaf-1",
|
||||
"mounts": [
|
||||
{
|
||||
"service": "Mcs",
|
||||
"mountStates": [
|
||||
{
|
||||
"pathStates": [
|
||||
{"path": "mcs/v1/apiCfgRedStatus", "type": "Mcs::ApiStatus", "state": "mountStateMountComplete"},
|
||||
{"path": "mcs/v1/activeflows", "type": "Mcs::ActiveFlows", "state": "mountStateMountComplete"},
|
||||
]
|
||||
}
|
||||
],
|
||||
}
|
||||
],
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"inputs": {"connections_count": 1},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"Incorrect number of mount path states for media-leaf-1 - Expected: 3, Actual: 2",
|
||||
"Unexpected MCS path type for media-leaf-1: 'Mcs::ApiStatus'.",
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-unexpected-path-type",
|
||||
"test": VerifyMcsServerMounts,
|
||||
"eos_data": [
|
||||
{
|
||||
"connections": [
|
||||
{
|
||||
"hostname": "media-leaf-1",
|
||||
"mounts": [
|
||||
{
|
||||
"service": "Mcs",
|
||||
"mountStates": [
|
||||
{
|
||||
"pathStates": [
|
||||
{"path": "mcs/v1/apiCfgRedStatus", "type": "Mcs::ApiStatus", "state": "mountStateMountComplete"},
|
||||
{"path": "mcs/v1/activeflows", "type": "Mcs::ActiveFlows", "state": "mountStateMountComplete"},
|
||||
{"path": "mcs/switch/status", "type": "Mcs::Client::Status", "state": "mountStateMountComplete"},
|
||||
]
|
||||
}
|
||||
],
|
||||
}
|
||||
],
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"inputs": {"connections_count": 1},
|
||||
"expected": {"result": "failure", "messages": ["Unexpected MCS path type for media-leaf-1: 'Mcs::ApiStatus'"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-invalid-mount-state",
|
||||
"test": VerifyMcsServerMounts,
|
||||
"eos_data": [
|
||||
{
|
||||
"connections": [
|
||||
{
|
||||
"hostname": "media-leaf-1",
|
||||
"mounts": [
|
||||
{
|
||||
"service": "Mcs",
|
||||
"mountStates": [
|
||||
{
|
||||
"pathStates": [
|
||||
{"path": "mcs/v1/apiCfgRedStatus", "type": "Mcs::ApiConfigRedundancyStatus", "state": "mountStateMountFailed"},
|
||||
{"path": "mcs/v1/activeflows", "type": "Mcs::ActiveFlows", "state": "mountStateMountComplete"},
|
||||
{"path": "mcs/switch/status", "type": "Mcs::Client::Status", "state": "mountStateMountComplete"},
|
||||
]
|
||||
}
|
||||
],
|
||||
}
|
||||
],
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"inputs": {"connections_count": 1},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["MCS server mount state for path 'Mcs::ApiConfigRedundancyStatus' is not valid is for media-leaf-1: 'mountStateMountFailed'"],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-no-mcs-mount",
|
||||
"test": VerifyMcsServerMounts,
|
||||
"eos_data": [
|
||||
{
|
||||
"connections": [
|
||||
{
|
||||
"hostname": "media-leaf-1",
|
||||
"mounts": [
|
||||
{
|
||||
"service": "blah-blah",
|
||||
"mountStates": [{"pathStates": [{"path": "blah-blah-path", "type": "blah-blah-type", "state": "blah-blah-state"}]}],
|
||||
}
|
||||
],
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"inputs": {"connections_count": 1},
|
||||
"expected": {"result": "failure", "messages": ["MCS mount state not detected", "Incorrect CVX successful connections count. Expected: 1, Actual : 0"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-connections",
|
||||
"test": VerifyMcsServerMounts,
|
||||
"eos_data": [{}],
|
||||
"inputs": {"connections_count": 1},
|
||||
"expected": {"result": "failure", "messages": ["CVX connections are not available."]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyActiveCVXConnections,
|
||||
"eos_data": [
|
||||
{
|
||||
"connections": [
|
||||
{
|
||||
"switchId": "fc:bd:67:c3:16:55",
|
||||
"hostname": "lyv563",
|
||||
"oobConnectionActive": True,
|
||||
},
|
||||
{
|
||||
"switchId": "00:1c:73:3c:e3:9e",
|
||||
"hostname": "tg264",
|
||||
"oobConnectionActive": True,
|
||||
},
|
||||
]
|
||||
}
|
||||
],
|
||||
"inputs": {"connections_count": 2},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyActiveCVXConnections,
|
||||
"eos_data": [
|
||||
{
|
||||
"connections": [
|
||||
{
|
||||
"switchId": "fc:bd:67:c3:16:55",
|
||||
"hostname": "lyv563",
|
||||
"oobConnectionActive": False,
|
||||
},
|
||||
{
|
||||
"switchId": "00:1c:73:3c:e3:9e",
|
||||
"hostname": "tg264",
|
||||
"oobConnectionActive": True,
|
||||
},
|
||||
]
|
||||
}
|
||||
],
|
||||
"inputs": {"connections_count": 2},
|
||||
"expected": {"result": "failure", "messages": ["CVX active connections count. Expected: 2, Actual : 1"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-no-connections",
|
||||
"test": VerifyActiveCVXConnections,
|
||||
"eos_data": [{}],
|
||||
"inputs": {"connections_count": 2},
|
||||
"expected": {"result": "failure", "messages": ["CVX connections are not available"]},
|
||||
},
|
||||
{
|
||||
"name": "success-all",
|
||||
"test": VerifyCVXClusterStatus,
|
||||
"eos_data": [
|
||||
{
|
||||
"enabled": True,
|
||||
"clusterMode": True,
|
||||
"clusterStatus": {
|
||||
"role": "Master",
|
||||
"peerStatus": {
|
||||
"cvx-red-2": {"peerName": "cvx-red-2", "registrationState": "Registration complete"},
|
||||
"cvx-red-3": {"peerName": "cvx-red-3", "registrationState": "Registration complete"},
|
||||
},
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"role": "Master",
|
||||
"peer_status": [
|
||||
{"peer_name": "cvx-red-2", "registrationState": "Registration complete"},
|
||||
{"peer_name": "cvx-red-3", "registrationState": "Registration complete"},
|
||||
],
|
||||
},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-invalid-role",
|
||||
"test": VerifyCVXClusterStatus,
|
||||
"eos_data": [
|
||||
{
|
||||
"enabled": True,
|
||||
"clusterMode": True,
|
||||
"clusterStatus": {
|
||||
"role": "Standby",
|
||||
"peerStatus": {
|
||||
"cvx-red-2": {"peerName": "cvx-red-2", "registrationState": "Registration complete"},
|
||||
"cvx-red-3": {"peerName": "cvx-red-3", "registrationState": "Registration complete"},
|
||||
},
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"role": "Master",
|
||||
"peer_status": [
|
||||
{"peer_name": "cvx-red-2", "registrationState": "Registration complete"},
|
||||
{"peer_name": "cvx-red-3", "registrationState": "Registration complete"},
|
||||
],
|
||||
},
|
||||
"expected": {"result": "failure", "messages": ["CVX Role is not valid: Standby"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-cvx-enabled",
|
||||
"test": VerifyCVXClusterStatus,
|
||||
"eos_data": [
|
||||
{
|
||||
"enabled": False,
|
||||
"clusterMode": True,
|
||||
"clusterStatus": {
|
||||
"role": "Master",
|
||||
"peerStatus": {},
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"role": "Master",
|
||||
"peer_status": [],
|
||||
},
|
||||
"expected": {"result": "failure", "messages": ["CVX Server status is not enabled"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-cluster-enabled",
|
||||
"test": VerifyCVXClusterStatus,
|
||||
"eos_data": [
|
||||
{
|
||||
"enabled": True,
|
||||
"clusterMode": False,
|
||||
"clusterStatus": {},
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"role": "Master",
|
||||
"peer_status": [],
|
||||
},
|
||||
"expected": {"result": "failure", "messages": ["CVX Server is not a cluster"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-missing-peers",
|
||||
"test": VerifyCVXClusterStatus,
|
||||
"eos_data": [
|
||||
{
|
||||
"enabled": True,
|
||||
"clusterMode": True,
|
||||
"clusterStatus": {
|
||||
"role": "Master",
|
||||
"peerStatus": {
|
||||
"cvx-red-2": {"peerName": "cvx-red-2", "registrationState": "Registration complete"},
|
||||
},
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"role": "Master",
|
||||
"peer_status": [
|
||||
{"peer_name": "cvx-red-2", "registrationState": "Registration complete"},
|
||||
{"peer_name": "cvx-red-3", "registrationState": "Registration complete"},
|
||||
],
|
||||
},
|
||||
"expected": {"result": "failure", "messages": ["Unexpected number of peers 1 vs 2", "cvx-red-3 is not present"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-invalid-peers",
|
||||
"test": VerifyCVXClusterStatus,
|
||||
"eos_data": [
|
||||
{
|
||||
"enabled": True,
|
||||
"clusterMode": True,
|
||||
"clusterStatus": {
|
||||
"role": "Master",
|
||||
"peerStatus": {},
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"role": "Master",
|
||||
"peer_status": [
|
||||
{"peer_name": "cvx-red-2", "registrationState": "Registration complete"},
|
||||
{"peer_name": "cvx-red-3", "registrationState": "Registration complete"},
|
||||
],
|
||||
},
|
||||
"expected": {"result": "failure", "messages": ["Unexpected number of peers 0 vs 2", "cvx-red-2 is not present", "cvx-red-3 is not present"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-registration-error",
|
||||
"test": VerifyCVXClusterStatus,
|
||||
"eos_data": [
|
||||
{
|
||||
"enabled": True,
|
||||
"clusterMode": True,
|
||||
"clusterStatus": {
|
||||
"role": "Master",
|
||||
"peerStatus": {
|
||||
"cvx-red-2": {"peerName": "cvx-red-2", "registrationState": "Registration error"},
|
||||
"cvx-red-3": {"peerName": "cvx-red-3", "registrationState": "Registration complete"},
|
||||
},
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"role": "Master",
|
||||
"peer_status": [
|
||||
{"peer_name": "cvx-red-2", "registrationState": "Registration complete"},
|
||||
{"peer_name": "cvx-red-3", "registrationState": "Registration complete"},
|
||||
],
|
||||
},
|
||||
"expected": {"result": "failure", "messages": ["cvx-red-2 registration state is not complete: Registration error"]},
|
||||
},
|
||||
]
|
|
@ -1108,7 +1108,7 @@ DATA: list[dict[str, Any]] = [
|
|||
"inputs": {"interfaces": [{"name": "Ethernet2", "status": "up"}, {"name": "Ethernet8", "status": "up"}, {"name": "Ethernet3", "status": "up"}]},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["The following interface(s) are not configured: ['Ethernet8']"],
|
||||
"messages": ["Ethernet8 - Not configured"],
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -1126,7 +1126,7 @@ DATA: list[dict[str, Any]] = [
|
|||
"inputs": {"interfaces": [{"name": "Ethernet2", "status": "up"}, {"name": "Ethernet8", "status": "up"}, {"name": "Ethernet3", "status": "up"}]},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["The following interface(s) are not in the expected state: ['Ethernet8 is down/down'"],
|
||||
"messages": ["Ethernet8 - Expected: up/up, Actual: down/down"],
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -1150,7 +1150,7 @@ DATA: list[dict[str, Any]] = [
|
|||
},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["The following interface(s) are not in the expected state: ['Ethernet8 is up/down'"],
|
||||
"messages": ["Ethernet8 - Expected: up/up, Actual: up/down"],
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -1166,7 +1166,7 @@ DATA: list[dict[str, Any]] = [
|
|||
"inputs": {"interfaces": [{"name": "PortChannel100", "status": "up"}]},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["The following interface(s) are not in the expected state: ['Port-Channel100 is down/lowerLayerDown'"],
|
||||
"messages": ["Port-Channel100 - Expected: up/up, Actual: down/lowerLayerDown"],
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -1190,7 +1190,38 @@ DATA: list[dict[str, Any]] = [
|
|||
},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["The following interface(s) are not in the expected state: ['Ethernet2 is up/unknown'"],
|
||||
"messages": [
|
||||
"Ethernet2 - Expected: up/down, Actual: up/unknown",
|
||||
"Ethernet8 - Expected: up/up, Actual: up/down",
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-interface-status-down",
|
||||
"test": VerifyInterfacesStatus,
|
||||
"eos_data": [
|
||||
{
|
||||
"interfaceDescriptions": {
|
||||
"Ethernet8": {"interfaceStatus": "up", "description": "", "lineProtocolStatus": "down"},
|
||||
"Ethernet2": {"interfaceStatus": "up", "description": "", "lineProtocolStatus": "unknown"},
|
||||
"Ethernet3": {"interfaceStatus": "up", "description": "", "lineProtocolStatus": "up"},
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"interfaces": [
|
||||
{"name": "Ethernet2", "status": "down"},
|
||||
{"name": "Ethernet8", "status": "down"},
|
||||
{"name": "Ethernet3", "status": "down"},
|
||||
]
|
||||
},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"Ethernet2 - Expected: down, Actual: up",
|
||||
"Ethernet8 - Expected: down, Actual: up",
|
||||
"Ethernet3 - Expected: down, Actual: up",
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -1938,8 +1969,8 @@ DATA: list[dict[str, Any]] = [
|
|||
"interfaces": {
|
||||
"Ethernet2": {
|
||||
"interfaceAddress": {
|
||||
"primaryIp": {"address": "172.30.11.0", "maskLen": 31},
|
||||
"secondaryIpsOrderedList": [{"address": "10.10.10.0", "maskLen": 31}, {"address": "10.10.10.10", "maskLen": 31}],
|
||||
"primaryIp": {"address": "172.30.11.1", "maskLen": 31},
|
||||
"secondaryIpsOrderedList": [{"address": "10.10.10.1", "maskLen": 31}, {"address": "10.10.10.10", "maskLen": 31}],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1957,7 +1988,7 @@ DATA: list[dict[str, Any]] = [
|
|||
],
|
||||
"inputs": {
|
||||
"interfaces": [
|
||||
{"name": "Ethernet2", "primary_ip": "172.30.11.0/31", "secondary_ips": ["10.10.10.0/31", "10.10.10.10/31"]},
|
||||
{"name": "Ethernet2", "primary_ip": "172.30.11.1/31", "secondary_ips": ["10.10.10.1/31", "10.10.10.10/31"]},
|
||||
{"name": "Ethernet12", "primary_ip": "172.30.11.10/31", "secondary_ips": ["10.10.10.10/31", "10.10.10.20/31"]},
|
||||
]
|
||||
},
|
||||
|
@ -2479,6 +2510,43 @@ DATA: list[dict[str, Any]] = [
|
|||
"inputs": {"interfaces": [{"name": "Ethernet5", "portchannel": "Port-Channel5"}]},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "success-short-timeout",
|
||||
"test": VerifyLACPInterfacesStatus,
|
||||
"eos_data": [
|
||||
{
|
||||
"portChannels": {
|
||||
"Port-Channel5": {
|
||||
"interfaces": {
|
||||
"Ethernet5": {
|
||||
"actorPortStatus": "bundled",
|
||||
"partnerPortState": {
|
||||
"activity": True,
|
||||
"timeout": True,
|
||||
"aggregation": True,
|
||||
"synchronization": True,
|
||||
"collecting": True,
|
||||
"distributing": True,
|
||||
},
|
||||
"actorPortState": {
|
||||
"activity": True,
|
||||
"timeout": True,
|
||||
"aggregation": True,
|
||||
"synchronization": True,
|
||||
"collecting": True,
|
||||
"distributing": True,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"interface": "Ethernet5",
|
||||
"orphanPorts": {},
|
||||
}
|
||||
],
|
||||
"inputs": {"interfaces": [{"name": "Ethernet5", "portchannel": "Port-Channel5", "lacp_rate_fast": True}]},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-not-bundled",
|
||||
"test": VerifyLACPInterfacesStatus,
|
||||
|
@ -2500,7 +2568,7 @@ DATA: list[dict[str, Any]] = [
|
|||
"inputs": {"interfaces": [{"name": "Ethernet5", "portchannel": "Po5"}]},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["For Interface Ethernet5:\nExpected `bundled` as the local port status, but found `No Aggregate` instead.\n"],
|
||||
"messages": ["Interface: Ethernet5 Port-Channel: Port-Channel5 - Not bundled - Port Status: No Aggregate"],
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -2514,7 +2582,7 @@ DATA: list[dict[str, Any]] = [
|
|||
"inputs": {"interfaces": [{"name": "Ethernet5", "portchannel": "Po 5"}]},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["Interface 'Ethernet5' is not configured to be a member of LACP 'Port-Channel5'."],
|
||||
"messages": ["Interface: Ethernet5 Port-Channel: Port-Channel5 - Not configured"],
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -2555,13 +2623,55 @@ DATA: list[dict[str, Any]] = [
|
|||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"For Interface Ethernet5:\n"
|
||||
"Actor port details:\nExpected `True` as the activity, but found `False` instead."
|
||||
"\nExpected `True` as the aggregation, but found `False` instead."
|
||||
"\nExpected `True` as the synchronization, but found `False` instead."
|
||||
"\nPartner port details:\nExpected `True` as the activity, but found `False` instead.\n"
|
||||
"Expected `True` as the aggregation, but found `False` instead.\n"
|
||||
"Expected `True` as the synchronization, but found `False` instead.\n"
|
||||
"Interface: Ethernet5 Port-Channel: Port-Channel5 - Actor port details mismatch - Activity: False, Aggregation: False, "
|
||||
"Synchronization: False, Collecting: True, Distributing: True, Timeout: False",
|
||||
"Interface: Ethernet5 Port-Channel: Port-Channel5 - Partner port details mismatch - Activity: False, Aggregation: False, "
|
||||
"Synchronization: False, Collecting: True, Distributing: True, Timeout: False",
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-short-timeout",
|
||||
"test": VerifyLACPInterfacesStatus,
|
||||
"eos_data": [
|
||||
{
|
||||
"portChannels": {
|
||||
"Port-Channel5": {
|
||||
"interfaces": {
|
||||
"Ethernet5": {
|
||||
"actorPortStatus": "bundled",
|
||||
"partnerPortState": {
|
||||
"activity": True,
|
||||
"timeout": False,
|
||||
"aggregation": True,
|
||||
"synchronization": True,
|
||||
"collecting": True,
|
||||
"distributing": True,
|
||||
},
|
||||
"actorPortState": {
|
||||
"activity": True,
|
||||
"timeout": False,
|
||||
"aggregation": True,
|
||||
"synchronization": True,
|
||||
"collecting": True,
|
||||
"distributing": True,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"interface": "Ethernet5",
|
||||
"orphanPorts": {},
|
||||
}
|
||||
],
|
||||
"inputs": {"interfaces": [{"name": "Ethernet5", "portchannel": "port-channel 5", "lacp_rate_fast": True}]},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"Interface: Ethernet5 Port-Channel: Port-Channel5 - Actor port details mismatch - Activity: True, Aggregation: True, "
|
||||
"Synchronization: True, Collecting: True, Distributing: True, Timeout: False",
|
||||
"Interface: Ethernet5 Port-Channel: Port-Channel5 - Partner port details mismatch - Activity: True, Aggregation: True, "
|
||||
"Synchronization: True, Collecting: True, Distributing: True, Timeout: False",
|
||||
],
|
||||
},
|
||||
},
|
||||
|
|
|
@ -1079,7 +1079,7 @@ DATA: list[dict[str, Any]] = [
|
|||
},
|
||||
]
|
||||
},
|
||||
"expected": {"result": "failure", "messages": ["No IPv4 security connection configured for peer `10.255.0.1`."]},
|
||||
"expected": {"result": "failure", "messages": ["Peer: 10.255.0.1 VRF: default - Not configured"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-not-established",
|
||||
|
@ -1127,14 +1127,10 @@ DATA: list[dict[str, Any]] = [
|
|||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"Expected state of IPv4 security connection `source:172.18.3.2 destination:172.18.2.2 vrf:default` for peer `10.255.0.1` is `Established` "
|
||||
"but found `Idle` instead.",
|
||||
"Expected state of IPv4 security connection `source:100.64.2.2 destination:100.64.1.2 vrf:default` for peer `10.255.0.1` is `Established` "
|
||||
"but found `Idle` instead.",
|
||||
"Expected state of IPv4 security connection `source:100.64.2.2 destination:100.64.1.2 vrf:MGMT` for peer `10.255.0.2` is `Established` "
|
||||
"but found `Idle` instead.",
|
||||
"Expected state of IPv4 security connection `source:172.18.2.2 destination:172.18.1.2 vrf:MGMT` for peer `10.255.0.2` is `Established` "
|
||||
"but found `Idle` instead.",
|
||||
"Peer: 10.255.0.1 VRF: default Source: 172.18.3.2 Destination: 172.18.2.2 - Connection down - Expected: Established, Actual: Idle",
|
||||
"Peer: 10.255.0.1 VRF: default Source: 100.64.2.2 Destination: 100.64.1.2 - Connection down - Expected: Established, Actual: Idle",
|
||||
"Peer: 10.255.0.2 VRF: MGMT Source: 100.64.2.2 Destination: 100.64.1.2 - Connection down - Expected: Established, Actual: Idle",
|
||||
"Peer: 10.255.0.2 VRF: MGMT Source: 172.18.2.2 Destination: 172.18.1.2 - Connection down - Expected: Established, Actual: Idle",
|
||||
],
|
||||
},
|
||||
},
|
||||
|
@ -1194,12 +1190,10 @@ DATA: list[dict[str, Any]] = [
|
|||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"Expected state of IPv4 security connection `source:172.18.3.2 destination:172.18.2.2 vrf:default` for peer `10.255.0.1` is `Established` "
|
||||
"but found `Idle` instead.",
|
||||
"Expected state of IPv4 security connection `source:100.64.3.2 destination:100.64.2.2 vrf:default` for peer `10.255.0.1` is `Established` "
|
||||
"but found `Idle` instead.",
|
||||
"IPv4 security connection `source:100.64.4.2 destination:100.64.1.2 vrf:default` for peer `10.255.0.2` is not found.",
|
||||
"IPv4 security connection `source:172.18.4.2 destination:172.18.1.2 vrf:default` for peer `10.255.0.2` is not found.",
|
||||
"Peer: 10.255.0.1 VRF: default Source: 172.18.3.2 Destination: 172.18.2.2 - Connection down - Expected: Established, Actual: Idle",
|
||||
"Peer: 10.255.0.1 VRF: default Source: 100.64.3.2 Destination: 100.64.2.2 - Connection down - Expected: Established, Actual: Idle",
|
||||
"Peer: 10.255.0.2 VRF: default Source: 100.64.4.2 Destination: 100.64.1.2 - Connection not found.",
|
||||
"Peer: 10.255.0.2 VRF: default Source: 172.18.4.2 Destination: 172.18.1.2 - Connection not found.",
|
||||
],
|
||||
},
|
||||
},
|
||||
|
|
|
@ -59,30 +59,22 @@ DATA: list[dict[str, Any]] = [
|
|||
"test": VerifyDNSServers,
|
||||
"eos_data": [
|
||||
{
|
||||
"nameServerConfigs": [{"ipAddr": "10.14.0.1", "vrf": "default", "priority": 0}, {"ipAddr": "10.14.0.11", "vrf": "MGMT", "priority": 1}],
|
||||
"nameServerConfigs": [
|
||||
{"ipAddr": "10.14.0.1", "vrf": "default", "priority": 0},
|
||||
{"ipAddr": "10.14.0.11", "vrf": "MGMT", "priority": 1},
|
||||
{"ipAddr": "fd12:3456:789a::1", "vrf": "default", "priority": 0},
|
||||
],
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"dns_servers": [{"server_address": "10.14.0.1", "vrf": "default", "priority": 0}, {"server_address": "10.14.0.11", "vrf": "MGMT", "priority": 1}]
|
||||
"dns_servers": [
|
||||
{"server_address": "10.14.0.1", "vrf": "default", "priority": 0},
|
||||
{"server_address": "10.14.0.11", "vrf": "MGMT", "priority": 1},
|
||||
{"server_address": "fd12:3456:789a::1", "vrf": "default", "priority": 0},
|
||||
]
|
||||
},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-dns-missing",
|
||||
"test": VerifyDNSServers,
|
||||
"eos_data": [
|
||||
{
|
||||
"nameServerConfigs": [{"ipAddr": "10.14.0.1", "vrf": "default", "priority": 0}, {"ipAddr": "10.14.0.11", "vrf": "MGMT", "priority": 1}],
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"dns_servers": [{"server_address": "10.14.0.10", "vrf": "default", "priority": 0}, {"server_address": "10.14.0.21", "vrf": "MGMT", "priority": 1}]
|
||||
},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["DNS server `10.14.0.10` is not configured with any VRF.", "DNS server `10.14.0.21` is not configured with any VRF."],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-no-dns-found",
|
||||
"test": VerifyDNSServers,
|
||||
|
@ -96,7 +88,7 @@ DATA: list[dict[str, Any]] = [
|
|||
},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["DNS server `10.14.0.10` is not configured with any VRF.", "DNS server `10.14.0.21` is not configured with any VRF."],
|
||||
"messages": ["Server 10.14.0.10 (VRF: default, Priority: 0) - Not configured", "Server 10.14.0.21 (VRF: MGMT, Priority: 1) - Not configured"],
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -117,9 +109,9 @@ DATA: list[dict[str, Any]] = [
|
|||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"For DNS server `10.14.0.1`, the expected priority is `0`, but `1` was found instead.",
|
||||
"DNS server `10.14.0.11` is not configured with VRF `default`.",
|
||||
"DNS server `10.14.0.110` is not configured with any VRF.",
|
||||
"Server 10.14.0.1 (VRF: CS, Priority: 0) - Incorrect priority - Priority: 1",
|
||||
"Server 10.14.0.11 (VRF: default, Priority: 0) - Not configured",
|
||||
"Server 10.14.0.110 (VRF: MGMT, Priority: 0) - Not configured",
|
||||
],
|
||||
},
|
||||
},
|
||||
|
|
|
@ -7,13 +7,13 @@ from __future__ import annotations
|
|||
|
||||
from typing import Any
|
||||
|
||||
from anta.tests.stun import VerifyStunClient, VerifyStunServer
|
||||
from anta.tests.stun import VerifyStunClientTranslation, VerifyStunServer
|
||||
from tests.units.anta_tests import test
|
||||
|
||||
DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyStunClient,
|
||||
"test": VerifyStunClientTranslation,
|
||||
"eos_data": [
|
||||
{
|
||||
"bindings": {
|
||||
|
@ -60,7 +60,7 @@ DATA: list[dict[str, Any]] = [
|
|||
},
|
||||
{
|
||||
"name": "failure-incorrect-public-ip",
|
||||
"test": VerifyStunClient,
|
||||
"test": VerifyStunClientTranslation,
|
||||
"eos_data": [
|
||||
{
|
||||
"bindings": {
|
||||
|
@ -88,14 +88,14 @@ DATA: list[dict[str, Any]] = [
|
|||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"For STUN source `100.64.3.2:4500`:\nExpected `192.164.3.2` as the public ip, but found `192.64.3.2` instead.",
|
||||
"For STUN source `172.18.3.2:4500`:\nExpected `192.118.3.2` as the public ip, but found `192.18.3.2` instead.",
|
||||
"Client 100.64.3.2 Port: 4500 - Incorrect public-facing address - Expected: 192.164.3.2 Actual: 192.64.3.2",
|
||||
"Client 172.18.3.2 Port: 4500 - Incorrect public-facing address - Expected: 192.118.3.2 Actual: 192.18.3.2",
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-no-client",
|
||||
"test": VerifyStunClient,
|
||||
"test": VerifyStunClientTranslation,
|
||||
"eos_data": [
|
||||
{"bindings": {}},
|
||||
{"bindings": {}},
|
||||
|
@ -108,12 +108,12 @@ DATA: list[dict[str, Any]] = [
|
|||
},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["STUN client transaction for source `100.64.3.2:4500` is not found.", "STUN client transaction for source `172.18.3.2:4500` is not found."],
|
||||
"messages": ["Client 100.64.3.2 Port: 4500 - STUN client translation not found.", "Client 172.18.3.2 Port: 4500 - STUN client translation not found."],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-incorrect-public-port",
|
||||
"test": VerifyStunClient,
|
||||
"test": VerifyStunClientTranslation,
|
||||
"eos_data": [
|
||||
{"bindings": {}},
|
||||
{
|
||||
|
@ -134,16 +134,15 @@ DATA: list[dict[str, Any]] = [
|
|||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"STUN client transaction for source `100.64.3.2:4500` is not found.",
|
||||
"For STUN source `172.18.3.2:4500`:\n"
|
||||
"Expected `192.118.3.2` as the public ip, but found `192.18.3.2` instead.\n"
|
||||
"Expected `6006` as the public port, but found `4800` instead.",
|
||||
"Client 100.64.3.2 Port: 4500 - STUN client translation not found.",
|
||||
"Client 172.18.3.2 Port: 4500 - Incorrect public-facing address - Expected: 192.118.3.2 Actual: 192.18.3.2",
|
||||
"Client 172.18.3.2 Port: 4500 - Incorrect public-facing port - Expected: 6006 Actual: 4800",
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-all-type",
|
||||
"test": VerifyStunClient,
|
||||
"test": VerifyStunClientTranslation,
|
||||
"eos_data": [
|
||||
{"bindings": {}},
|
||||
{
|
||||
|
@ -164,12 +163,9 @@ DATA: list[dict[str, Any]] = [
|
|||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"STUN client transaction for source `100.64.3.2:4500` is not found.",
|
||||
"For STUN source `172.18.4.2:4800`:\n"
|
||||
"Expected `172.18.4.2` as the source ip, but found `172.18.3.2` instead.\n"
|
||||
"Expected `4800` as the source port, but found `4500` instead.\n"
|
||||
"Expected `192.118.3.2` as the public ip, but found `192.18.3.2` instead.\n"
|
||||
"Expected `6006` as the public port, but found `4800` instead.",
|
||||
"Client 100.64.3.2 Port: 4500 - STUN client translation not found.",
|
||||
"Client 172.18.4.2 Port: 4800 - Incorrect public-facing address - Expected: 192.118.3.2 Actual: 192.18.3.2",
|
||||
"Client 172.18.4.2 Port: 4800 - Incorrect public-facing port - Expected: 6006 Actual: 4800",
|
||||
],
|
||||
},
|
||||
},
|
||||
|
|
|
@ -346,6 +346,39 @@ poll interval unknown
|
|||
},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "success-ip-dns",
|
||||
"test": VerifyNTPAssociations,
|
||||
"eos_data": [
|
||||
{
|
||||
"peers": {
|
||||
"1.1.1.1 (1.ntp.networks.com)": {
|
||||
"condition": "sys.peer",
|
||||
"peerIpAddr": "1.1.1.1",
|
||||
"stratumLevel": 1,
|
||||
},
|
||||
"2.2.2.2 (2.ntp.networks.com)": {
|
||||
"condition": "candidate",
|
||||
"peerIpAddr": "2.2.2.2",
|
||||
"stratumLevel": 2,
|
||||
},
|
||||
"3.3.3.3 (3.ntp.networks.com)": {
|
||||
"condition": "candidate",
|
||||
"peerIpAddr": "3.3.3.3",
|
||||
"stratumLevel": 2,
|
||||
},
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"ntp_servers": [
|
||||
{"server_address": "1.1.1.1", "preferred": True, "stratum": 1},
|
||||
{"server_address": "2.2.2.2", "stratum": 2},
|
||||
{"server_address": "3.3.3.3", "stratum": 2},
|
||||
]
|
||||
},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyNTPAssociations,
|
||||
|
@ -380,9 +413,9 @@ poll interval unknown
|
|||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"For NTP peer 1.1.1.1:\nExpected `sys.peer` as the condition, but found `candidate` instead.\nExpected `1` as the stratum, but found `2` instead.\n"
|
||||
"For NTP peer 2.2.2.2:\nExpected `candidate` as the condition, but found `sys.peer` instead.\n"
|
||||
"For NTP peer 3.3.3.3:\nExpected `candidate` as the condition, but found `sys.peer` instead.\nExpected `2` as the stratum, but found `3` instead."
|
||||
"1.1.1.1 (Preferred: True, Stratum: 1) - Bad association - Condition: candidate, Stratum: 2",
|
||||
"2.2.2.2 (Preferred: False, Stratum: 2) - Bad association - Condition: sys.peer, Stratum: 2",
|
||||
"3.3.3.3 (Preferred: False, Stratum: 2) - Bad association - Condition: sys.peer, Stratum: 3",
|
||||
],
|
||||
},
|
||||
},
|
||||
|
@ -399,7 +432,7 @@ poll interval unknown
|
|||
},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["None of NTP peers are not configured."],
|
||||
"messages": ["No NTP peers configured"],
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -430,7 +463,7 @@ poll interval unknown
|
|||
},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["NTP peer 3.3.3.3 is not configured."],
|
||||
"messages": ["3.3.3.3 (Preferred: False, Stratum: 1) - Not configured"],
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -457,8 +490,9 @@ poll interval unknown
|
|||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"For NTP peer 1.1.1.1:\nExpected `sys.peer` as the condition, but found `candidate` instead.\n"
|
||||
"NTP peer 2.2.2.2 is not configured.\nNTP peer 3.3.3.3 is not configured."
|
||||
"1.1.1.1 (Preferred: True, Stratum: 1) - Bad association - Condition: candidate, Stratum: 1",
|
||||
"2.2.2.2 (Preferred: False, Stratum: 1) - Not configured",
|
||||
"3.3.3.3 (Preferred: False, Stratum: 1) - Not configured",
|
||||
],
|
||||
},
|
||||
},
|
||||
|
|
|
@ -39,6 +39,7 @@ MOCK_CLI_JSON: dict[str, asynceapi.EapiCommandError | dict[str, Any]] = {
|
|||
errmsg="Invalid command",
|
||||
not_exec=[],
|
||||
),
|
||||
"show interfaces": {},
|
||||
}
|
||||
|
||||
MOCK_CLI_TEXT: dict[str, asynceapi.EapiCommandError | str] = {
|
||||
|
|
4
tests/units/cli/get/local_module/__init__.py
Normal file
4
tests/units/cli/get/local_module/__init__.py
Normal file
|
@ -0,0 +1,4 @@
|
|||
# Copyright (c) 2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""Module used for test purposes."""
|
|
@ -114,6 +114,27 @@ def test_from_cvp(
|
|||
assert result.exit_code == ExitCode.OK
|
||||
|
||||
|
||||
def test_from_cvp_os_error(tmp_path: Path, click_runner: CliRunner, caplog: pytest.LogCaptureFixture) -> None:
|
||||
"""Test from_cvp when an OSError occurs."""
|
||||
output: Path = tmp_path / "output.yml"
|
||||
cli_args = ["get", "from-cvp", "--output", str(output), "--host", "42.42.42.42", "--username", "anta", "--password", "anta"]
|
||||
|
||||
with (
|
||||
patch("anta.cli.get.commands.get_cv_token", autospec=True, side_effect=None),
|
||||
patch("cvprac.cvp_client.CvpClient.connect", autospec=True, side_effect=None) as mocked_cvp_connect,
|
||||
patch("cvprac.cvp_client.CvpApi.get_inventory", autospec=True, return_value=[]) as mocked_get_inventory,
|
||||
patch("cvprac.cvp_client.CvpApi.get_devices_in_container", autospec=True, return_value=[]),
|
||||
patch("anta.cli.get.utils.Path.open", side_effect=OSError("Permission denied")),
|
||||
):
|
||||
result = click_runner.invoke(anta, cli_args)
|
||||
|
||||
mocked_cvp_connect.assert_called_once()
|
||||
mocked_get_inventory.assert_called_once()
|
||||
assert not output.exists()
|
||||
assert "Could not write inventory to path" in caplog.text
|
||||
assert result.exit_code == ExitCode.USAGE_ERROR
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("ansible_inventory", "ansible_group", "expected_exit", "expected_log"),
|
||||
[
|
||||
|
@ -257,8 +278,7 @@ def test_from_ansible_overwrite(
|
|||
else:
|
||||
temp_env["ANTA_INVENTORY"] = None
|
||||
tmp_inv = tmp_output
|
||||
cli_args.extend(["--output", str(tmp_output)])
|
||||
|
||||
cli_args.extend(["--output", str(tmp_inv)])
|
||||
if overwrite:
|
||||
cli_args.append("--overwrite")
|
||||
|
||||
|
@ -275,3 +295,162 @@ def test_from_ansible_overwrite(
|
|||
elif expected_exit == ExitCode.INTERNAL_ERROR:
|
||||
assert expected_log
|
||||
assert expected_log in result.output
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("module", "test_name", "short", "count", "expected_output", "expected_exit_code"),
|
||||
[
|
||||
pytest.param(
|
||||
None,
|
||||
None,
|
||||
False,
|
||||
False,
|
||||
"VerifyAcctConsoleMethods",
|
||||
ExitCode.OK,
|
||||
id="Get all tests",
|
||||
),
|
||||
pytest.param(
|
||||
"anta.tests.aaa",
|
||||
None,
|
||||
False,
|
||||
False,
|
||||
"VerifyAcctConsoleMethods",
|
||||
ExitCode.OK,
|
||||
id="Get tests, filter on module",
|
||||
),
|
||||
pytest.param(
|
||||
None,
|
||||
"VerifyNTPAssociations",
|
||||
False,
|
||||
False,
|
||||
"VerifyNTPAssociations",
|
||||
ExitCode.OK,
|
||||
id="Get tests, filter on exact test name",
|
||||
),
|
||||
pytest.param(
|
||||
None,
|
||||
"VerifyNTP",
|
||||
False,
|
||||
False,
|
||||
"anta.tests.system",
|
||||
ExitCode.OK,
|
||||
id="Get tests, filter on included test name",
|
||||
),
|
||||
pytest.param(
|
||||
None,
|
||||
"VerifyNTP",
|
||||
True,
|
||||
False,
|
||||
"VerifyNTPAssociations",
|
||||
ExitCode.OK,
|
||||
id="Get tests --short",
|
||||
),
|
||||
pytest.param(
|
||||
"unknown_module",
|
||||
None,
|
||||
True,
|
||||
False,
|
||||
"Module `unknown_module` was not found!",
|
||||
ExitCode.USAGE_ERROR,
|
||||
id="Get tests wrong module",
|
||||
),
|
||||
pytest.param(
|
||||
"unknown_module.unknown",
|
||||
None,
|
||||
True,
|
||||
False,
|
||||
"Module `unknown_module.unknown` was not found!",
|
||||
ExitCode.USAGE_ERROR,
|
||||
id="Get tests wrong submodule",
|
||||
),
|
||||
pytest.param(
|
||||
".unknown_module",
|
||||
None,
|
||||
True,
|
||||
False,
|
||||
"`anta get tests --module <module>` does not support relative imports",
|
||||
ExitCode.USAGE_ERROR,
|
||||
id="Use relative module name",
|
||||
),
|
||||
pytest.param(
|
||||
None,
|
||||
"VerifySomething",
|
||||
True,
|
||||
False,
|
||||
"No test 'VerifySomething' found in 'anta.tests'",
|
||||
ExitCode.OK,
|
||||
id="Get tests wrong test name",
|
||||
),
|
||||
pytest.param(
|
||||
"anta.tests.aaa",
|
||||
"VerifyNTP",
|
||||
True,
|
||||
False,
|
||||
"No test 'VerifyNTP' found in 'anta.tests.aaa'",
|
||||
ExitCode.OK,
|
||||
id="Get tests test exists but not in module",
|
||||
),
|
||||
pytest.param(
|
||||
"anta.tests.system",
|
||||
"VerifyNTPAssociations",
|
||||
False,
|
||||
True,
|
||||
"There is 1 test available in 'anta.tests.system'.",
|
||||
ExitCode.OK,
|
||||
id="Get single test count",
|
||||
),
|
||||
pytest.param(
|
||||
"anta.tests.stun",
|
||||
None,
|
||||
False,
|
||||
True,
|
||||
"There are 3 tests available in 'anta.tests.stun'",
|
||||
ExitCode.OK,
|
||||
id="Get multiple test count",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_get_tests(
|
||||
click_runner: CliRunner, module: str | None, test_name: str | None, *, short: bool, count: bool, expected_output: str, expected_exit_code: str
|
||||
) -> None:
|
||||
"""Test `anta get tests`."""
|
||||
cli_args = [
|
||||
"get",
|
||||
"tests",
|
||||
]
|
||||
if module is not None:
|
||||
cli_args.extend(["--module", module])
|
||||
|
||||
if test_name is not None:
|
||||
cli_args.extend(["--test", test_name])
|
||||
|
||||
if short:
|
||||
cli_args.append("--short")
|
||||
|
||||
if count:
|
||||
cli_args.append("--count")
|
||||
|
||||
result = click_runner.invoke(anta, cli_args)
|
||||
|
||||
assert result.exit_code == expected_exit_code
|
||||
assert expected_output in result.output
|
||||
|
||||
|
||||
def test_get_tests_local_module(click_runner: CliRunner) -> None:
|
||||
"""Test injecting CWD in sys.
|
||||
|
||||
The test overwrite CWD to return this file parents and local_module is located there.
|
||||
"""
|
||||
cli_args = ["get", "tests", "--module", "local_module"]
|
||||
|
||||
cwd = Path.cwd()
|
||||
local_module_parent_path = Path(__file__).parent
|
||||
with patch("anta.cli.get.utils.Path.cwd", return_value=local_module_parent_path):
|
||||
result = click_runner.invoke(anta, cli_args)
|
||||
|
||||
assert result.exit_code == ExitCode.OK
|
||||
|
||||
# In the rare case where people would be running `pytest .` in this directory
|
||||
if cwd != local_module_parent_path:
|
||||
assert "injecting CWD in PYTHONPATH and retrying..." in result.output
|
||||
assert "No test found in 'local_module'" in result.output
|
||||
|
|
|
@ -7,14 +7,15 @@ from __future__ import annotations
|
|||
|
||||
from contextlib import AbstractContextManager, nullcontext
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from typing import Any, ClassVar
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from anta.cli.get.utils import create_inventory_from_ansible, create_inventory_from_cvp, get_cv_token
|
||||
from anta.cli.get.utils import create_inventory_from_ansible, create_inventory_from_cvp, extract_examples, find_tests_examples, get_cv_token, print_test
|
||||
from anta.inventory import AntaInventory
|
||||
from anta.models import AntaCommand, AntaTemplate, AntaTest
|
||||
|
||||
DATA_DIR: Path = Path(__file__).parents[3].resolve() / "data"
|
||||
|
||||
|
@ -160,3 +161,91 @@ def test_create_inventory_from_ansible(
|
|||
assert not target_file.exists()
|
||||
if expected_log:
|
||||
assert expected_log in caplog.text
|
||||
|
||||
|
||||
class MissingExampleTest(AntaTest):
|
||||
"""ANTA test that always succeed but has no Examples section."""
|
||||
|
||||
categories: ClassVar[list[str]] = []
|
||||
commands: ClassVar[list[AntaCommand | AntaTemplate]] = []
|
||||
|
||||
@AntaTest.anta_test
|
||||
def test(self) -> None:
|
||||
"""Test function."""
|
||||
self.result.is_success()
|
||||
|
||||
|
||||
class EmptyExampleTest(AntaTest):
|
||||
"""ANTA test that always succeed but has an empty Examples section.
|
||||
|
||||
Examples
|
||||
--------
|
||||
"""
|
||||
|
||||
# For the test purpose we want am empty section as custom tests could not be using ruff.
|
||||
# ruff: noqa: D414
|
||||
|
||||
categories: ClassVar[list[str]] = []
|
||||
commands: ClassVar[list[AntaCommand | AntaTemplate]] = []
|
||||
|
||||
@AntaTest.anta_test
|
||||
def test(self) -> None:
|
||||
"""Test function."""
|
||||
self.result.is_success()
|
||||
|
||||
|
||||
class TypoExampleTest(AntaTest):
|
||||
"""ANTA test that always succeed but has a Typo in the test name in the example.
|
||||
|
||||
Notice capital P in TyPo below.
|
||||
|
||||
Examples
|
||||
--------
|
||||
```yaml
|
||||
tests.units.cli.get.test_utils:
|
||||
- TyPoExampleTest:
|
||||
```
|
||||
"""
|
||||
|
||||
# For the test purpose we want am empty section as custom tests could not be using ruff.
|
||||
# ruff: noqa: D414
|
||||
|
||||
categories: ClassVar[list[str]] = []
|
||||
commands: ClassVar[list[AntaCommand | AntaTemplate]] = []
|
||||
|
||||
@AntaTest.anta_test
|
||||
def test(self) -> None:
|
||||
"""Test function."""
|
||||
self.result.is_success()
|
||||
|
||||
|
||||
def test_find_tests_examples() -> None:
|
||||
"""Test find_tests_examples.
|
||||
|
||||
Only testing the failure scenarii not tested through test_commands.
|
||||
TODO: expand
|
||||
"""
|
||||
with pytest.raises(ValueError, match="Error when importing"):
|
||||
find_tests_examples("blah", "UnusedTestName")
|
||||
|
||||
|
||||
def test_print_test() -> None:
|
||||
"""Test print_test."""
|
||||
with pytest.raises(ValueError, match="Could not find the name of the test"):
|
||||
print_test(TypoExampleTest)
|
||||
with pytest.raises(LookupError, match="is missing an Example"):
|
||||
print_test(MissingExampleTest)
|
||||
with pytest.raises(LookupError, match="is missing an Example"):
|
||||
print_test(EmptyExampleTest)
|
||||
|
||||
|
||||
def test_extract_examples() -> None:
|
||||
"""Test extract_examples.
|
||||
|
||||
Only testing the case where the 'Examples' is missing as everything else
|
||||
is covered already in test_commands.py.
|
||||
"""
|
||||
assert MissingExampleTest.__doc__ is not None
|
||||
assert EmptyExampleTest.__doc__ is not None
|
||||
assert extract_examples(MissingExampleTest.__doc__) is None
|
||||
assert extract_examples(EmptyExampleTest.__doc__) is None
|
||||
|
|
|
@ -17,7 +17,7 @@ from anta.cli.utils import ExitCode
|
|||
if TYPE_CHECKING:
|
||||
from click.testing import CliRunner
|
||||
|
||||
DATA_DIR: Path = Path(__file__).parent.parent.parent.parent.resolve() / "data"
|
||||
DATA_DIR: Path = Path(__file__).parents[3].resolve() / "data"
|
||||
|
||||
|
||||
def test_anta_nrfu_table_help(click_runner: CliRunner) -> None:
|
||||
|
@ -76,6 +76,19 @@ def test_anta_nrfu_text(click_runner: CliRunner) -> None:
|
|||
assert "leaf1 :: VerifyEOSVersion :: SUCCESS" in result.output
|
||||
|
||||
|
||||
def test_anta_nrfu_text_multiple_failures(click_runner: CliRunner) -> None:
|
||||
"""Test anta nrfu text with multiple failures, catalog is given via env."""
|
||||
result = click_runner.invoke(anta, ["nrfu", "text"], env={"ANTA_CATALOG": str(DATA_DIR / "test_catalog_double_failure.yml")})
|
||||
assert result.exit_code == ExitCode.TESTS_FAILED
|
||||
assert (
|
||||
"""spine1 :: VerifyInterfacesSpeed :: FAILURE
|
||||
Interface `Ethernet2` is not found.
|
||||
Interface `Ethernet3` is not found.
|
||||
Interface `Ethernet4` is not found."""
|
||||
in result.output
|
||||
)
|
||||
|
||||
|
||||
def test_anta_nrfu_json(click_runner: CliRunner) -> None:
|
||||
"""Test anta nrfu, catalog is given via env."""
|
||||
result = click_runner.invoke(anta, ["nrfu", "json"])
|
||||
|
|
4
tests/units/input_models/__init__.py
Normal file
4
tests/units/input_models/__init__.py
Normal file
|
@ -0,0 +1,4 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""Tests for anta.input_models module."""
|
4
tests/units/input_models/routing/__init__.py
Normal file
4
tests/units/input_models/routing/__init__.py
Normal file
|
@ -0,0 +1,4 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""Test for anta.input_models.routing submodule."""
|
238
tests/units/input_models/routing/test_bgp.py
Normal file
238
tests/units/input_models/routing/test_bgp.py
Normal file
|
@ -0,0 +1,238 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""Tests for anta.input_models.routing.bgp.py."""
|
||||
|
||||
# pylint: disable=C0302
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pytest
|
||||
from pydantic import ValidationError
|
||||
|
||||
from anta.input_models.routing.bgp import BgpAddressFamily, BgpPeer
|
||||
from anta.tests.routing.bgp import (
|
||||
VerifyBGPExchangedRoutes,
|
||||
VerifyBGPPeerCount,
|
||||
VerifyBGPPeerMPCaps,
|
||||
VerifyBGPPeerRouteLimit,
|
||||
VerifyBgpRouteMaps,
|
||||
VerifyBGPSpecificPeers,
|
||||
VerifyBGPTimers,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from anta.custom_types import Afi, Safi
|
||||
|
||||
|
||||
class TestBgpAddressFamily:
|
||||
"""Test anta.input_models.routing.bgp.BgpAddressFamily."""
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("afi", "safi", "vrf"),
|
||||
[
|
||||
pytest.param("ipv4", "unicast", "MGMT", id="afi"),
|
||||
pytest.param("evpn", None, "default", id="safi"),
|
||||
pytest.param("ipv4", "unicast", "default", id="vrf"),
|
||||
],
|
||||
)
|
||||
def test_valid(self, afi: Afi, safi: Safi, vrf: str) -> None:
|
||||
"""Test BgpAddressFamily valid inputs."""
|
||||
BgpAddressFamily(afi=afi, safi=safi, vrf=vrf)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("afi", "safi", "vrf"),
|
||||
[
|
||||
pytest.param("ipv4", None, "default", id="afi"),
|
||||
pytest.param("evpn", "multicast", "default", id="safi"),
|
||||
pytest.param("evpn", None, "MGMT", id="vrf"),
|
||||
],
|
||||
)
|
||||
def test_invalid(self, afi: Afi, safi: Safi, vrf: str) -> None:
|
||||
"""Test BgpAddressFamily invalid inputs."""
|
||||
with pytest.raises(ValidationError):
|
||||
BgpAddressFamily(afi=afi, safi=safi, vrf=vrf)
|
||||
|
||||
|
||||
class TestVerifyBGPPeerCountInput:
|
||||
"""Test anta.tests.routing.bgp.VerifyBGPPeerCount.Input."""
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("address_families"),
|
||||
[
|
||||
pytest.param([{"afi": "evpn", "num_peers": 2}], id="valid"),
|
||||
],
|
||||
)
|
||||
def test_valid(self, address_families: list[BgpAddressFamily]) -> None:
|
||||
"""Test VerifyBGPPeerCount.Input valid inputs."""
|
||||
VerifyBGPPeerCount.Input(address_families=address_families)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("address_families"),
|
||||
[
|
||||
pytest.param([{"afi": "evpn", "num_peers": 0}], id="zero-peer"),
|
||||
pytest.param([{"afi": "evpn"}], id="None"),
|
||||
],
|
||||
)
|
||||
def test_invalid(self, address_families: list[BgpAddressFamily]) -> None:
|
||||
"""Test VerifyBGPPeerCount.Input invalid inputs."""
|
||||
with pytest.raises(ValidationError):
|
||||
VerifyBGPPeerCount.Input(address_families=address_families)
|
||||
|
||||
|
||||
class TestVerifyBGPSpecificPeersInput:
|
||||
"""Test anta.tests.routing.bgp.VerifyBGPSpecificPeers.Input."""
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("address_families"),
|
||||
[
|
||||
pytest.param([{"afi": "evpn", "peers": ["10.1.0.1", "10.1.0.2"]}], id="valid"),
|
||||
],
|
||||
)
|
||||
def test_valid(self, address_families: list[BgpAddressFamily]) -> None:
|
||||
"""Test VerifyBGPSpecificPeers.Input valid inputs."""
|
||||
VerifyBGPSpecificPeers.Input(address_families=address_families)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("address_families"),
|
||||
[
|
||||
pytest.param([{"afi": "evpn"}], id="None"),
|
||||
],
|
||||
)
|
||||
def test_invalid(self, address_families: list[BgpAddressFamily]) -> None:
|
||||
"""Test VerifyBGPSpecificPeers.Input invalid inputs."""
|
||||
with pytest.raises(ValidationError):
|
||||
VerifyBGPSpecificPeers.Input(address_families=address_families)
|
||||
|
||||
|
||||
class TestVerifyBGPExchangedRoutesInput:
|
||||
"""Test anta.tests.routing.bgp.VerifyBGPExchangedRoutes.Input."""
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("bgp_peers"),
|
||||
[
|
||||
pytest.param(
|
||||
[{"peer_address": "172.30.255.5", "vrf": "default", "advertised_routes": ["192.0.254.5/32"], "received_routes": ["192.0.255.4/32"]}],
|
||||
id="valid_both_received_advertised",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_valid(self, bgp_peers: list[BgpPeer]) -> None:
|
||||
"""Test VerifyBGPExchangedRoutes.Input valid inputs."""
|
||||
VerifyBGPExchangedRoutes.Input(bgp_peers=bgp_peers)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("bgp_peers"),
|
||||
[
|
||||
pytest.param([{"peer_address": "172.30.255.5", "vrf": "default"}], id="invalid"),
|
||||
pytest.param([{"peer_address": "172.30.255.5", "vrf": "default", "advertised_routes": ["192.0.254.5/32"]}], id="invalid_received_route"),
|
||||
pytest.param([{"peer_address": "172.30.255.5", "vrf": "default", "received_routes": ["192.0.254.5/32"]}], id="invalid_advertised_route"),
|
||||
],
|
||||
)
|
||||
def test_invalid(self, bgp_peers: list[BgpPeer]) -> None:
|
||||
"""Test VerifyBGPExchangedRoutes.Input invalid inputs."""
|
||||
with pytest.raises(ValidationError):
|
||||
VerifyBGPExchangedRoutes.Input(bgp_peers=bgp_peers)
|
||||
|
||||
|
||||
class TestVerifyBGPPeerMPCapsInput:
|
||||
"""Test anta.tests.routing.bgp.VerifyBGPPeerMPCaps.Input."""
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("bgp_peers"),
|
||||
[
|
||||
pytest.param([{"peer_address": "172.30.255.5", "vrf": "default", "capabilities": ["ipv4Unicast"]}], id="valid"),
|
||||
],
|
||||
)
|
||||
def test_valid(self, bgp_peers: list[BgpPeer]) -> None:
|
||||
"""Test VerifyBGPPeerMPCaps.Input valid inputs."""
|
||||
VerifyBGPPeerMPCaps.Input(bgp_peers=bgp_peers)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("bgp_peers"),
|
||||
[
|
||||
pytest.param([{"peer_address": "172.30.255.5", "vrf": "default"}], id="invalid"),
|
||||
],
|
||||
)
|
||||
def test_invalid(self, bgp_peers: list[BgpPeer]) -> None:
|
||||
"""Test VerifyBGPPeerMPCaps.Input invalid inputs."""
|
||||
with pytest.raises(ValidationError):
|
||||
VerifyBGPPeerMPCaps.Input(bgp_peers=bgp_peers)
|
||||
|
||||
|
||||
class TestVerifyBGPTimersInput:
|
||||
"""Test anta.tests.routing.bgp.VerifyBGPTimers.Input."""
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("bgp_peers"),
|
||||
[
|
||||
pytest.param([{"peer_address": "172.30.255.5", "vrf": "default", "hold_time": 180, "keep_alive_time": 60}], id="valid"),
|
||||
],
|
||||
)
|
||||
def test_valid(self, bgp_peers: list[BgpPeer]) -> None:
|
||||
"""Test VerifyBGPTimers.Input valid inputs."""
|
||||
VerifyBGPTimers.Input(bgp_peers=bgp_peers)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("bgp_peers"),
|
||||
[
|
||||
pytest.param([{"peer_address": "172.30.255.5", "vrf": "default"}], id="invalid"),
|
||||
pytest.param([{"peer_address": "172.30.255.5", "vrf": "default", "hold_time": 180}], id="invalid_keep_alive"),
|
||||
pytest.param([{"peer_address": "172.30.255.5", "vrf": "default", "keep_alive_time": 180}], id="invalid_hold_time"),
|
||||
],
|
||||
)
|
||||
def test_invalid(self, bgp_peers: list[BgpPeer]) -> None:
|
||||
"""Test VerifyBGPTimers.Input invalid inputs."""
|
||||
with pytest.raises(ValidationError):
|
||||
VerifyBGPTimers.Input(bgp_peers=bgp_peers)
|
||||
|
||||
|
||||
class TestVerifyBgpRouteMapsInput:
|
||||
"""Test anta.tests.routing.bgp.VerifyBgpRouteMaps.Input."""
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("bgp_peers"),
|
||||
[
|
||||
pytest.param([{"peer_address": "172.30.255.5", "vrf": "default", "inbound_route_map": "Test", "outbound_route_map": "Test"}], id="valid"),
|
||||
],
|
||||
)
|
||||
def test_valid(self, bgp_peers: list[BgpPeer]) -> None:
|
||||
"""Test VerifyBgpRouteMaps.Input valid inputs."""
|
||||
VerifyBgpRouteMaps.Input(bgp_peers=bgp_peers)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("bgp_peers"),
|
||||
[
|
||||
pytest.param([{"peer_address": "172.30.255.5", "vrf": "default"}], id="invalid"),
|
||||
],
|
||||
)
|
||||
def test_invalid(self, bgp_peers: list[BgpPeer]) -> None:
|
||||
"""Test VerifyBgpRouteMaps.Input invalid inputs."""
|
||||
with pytest.raises(ValidationError):
|
||||
VerifyBgpRouteMaps.Input(bgp_peers=bgp_peers)
|
||||
|
||||
|
||||
class TestVerifyBGPPeerRouteLimitInput:
|
||||
"""Test anta.tests.routing.bgp.VerifyBGPPeerRouteLimit.Input."""
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("bgp_peers"),
|
||||
[
|
||||
pytest.param([{"peer_address": "172.30.255.5", "vrf": "default", "maximum_routes": 10000}], id="valid"),
|
||||
],
|
||||
)
|
||||
def test_valid(self, bgp_peers: list[BgpPeer]) -> None:
|
||||
"""Test VerifyBGPPeerRouteLimit.Input valid inputs."""
|
||||
VerifyBGPPeerRouteLimit.Input(bgp_peers=bgp_peers)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("bgp_peers"),
|
||||
[
|
||||
pytest.param([{"peer_address": "172.30.255.5", "vrf": "default"}], id="invalid"),
|
||||
],
|
||||
)
|
||||
def test_invalid(self, bgp_peers: list[BgpPeer]) -> None:
|
||||
"""Test VerifyBGPPeerRouteLimit.Input invalid inputs."""
|
||||
with pytest.raises(ValidationError):
|
||||
VerifyBGPPeerRouteLimit.Input(bgp_peers=bgp_peers)
|
33
tests/units/input_models/test_interfaces.py
Normal file
33
tests/units/input_models/test_interfaces.py
Normal file
|
@ -0,0 +1,33 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""Tests for anta.input_models.interfaces.py."""
|
||||
|
||||
# pylint: disable=C0302
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pytest
|
||||
|
||||
from anta.input_models.interfaces import InterfaceState
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from anta.custom_types import Interface, PortChannelInterface
|
||||
|
||||
|
||||
class TestInterfaceState:
|
||||
"""Test anta.input_models.interfaces.InterfaceState."""
|
||||
|
||||
# pylint: disable=too-few-public-methods
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("name", "portchannel", "expected"),
|
||||
[
|
||||
pytest.param("Ethernet1", "Port-Channel42", "Interface: Ethernet1 Port-Channel: Port-Channel42", id="with port-channel"),
|
||||
pytest.param("Ethernet1", None, "Interface: Ethernet1", id="no port-channel"),
|
||||
],
|
||||
)
|
||||
def test_valid__str__(self, name: Interface, portchannel: PortChannelInterface | None, expected: str) -> None:
|
||||
"""Test InterfaceState __str__."""
|
||||
assert str(InterfaceState(name=name, portchannel=portchannel)) == expected
|
|
@ -5,4 +5,4 @@
|
|||
|
||||
from tests.units.result_manager.conftest import list_result_factory, result_manager, result_manager_factory, test_result_factory
|
||||
|
||||
__all__ = ["result_manager", "result_manager_factory", "list_result_factory", "test_result_factory"]
|
||||
__all__ = ["list_result_factory", "result_manager", "result_manager_factory", "test_result_factory"]
|
||||
|
|
|
@ -188,5 +188,5 @@ class TestReportJinja:
|
|||
|
||||
def test_fail__init__file_not_found(self) -> None:
|
||||
"""Test __init__ failure if file is not found."""
|
||||
with pytest.raises(FileNotFoundError, match="template file is not found: /gnu/terry/pratchett"):
|
||||
with pytest.raises(FileNotFoundError, match=r"template file is not found: [/|\\]gnu[/|\\]terry[/|\\]pratchett"):
|
||||
ReportJinja(Path("/gnu/terry/pratchett"))
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
import csv
|
||||
import pathlib
|
||||
from typing import Any, Callable
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
|
@ -49,8 +50,8 @@ class TestReportCsv:
|
|||
# Generate the CSV report
|
||||
ReportCsv.generate(result_manager, csv_filename)
|
||||
|
||||
# Read the generated CSV file
|
||||
with pathlib.Path.open(csv_filename, encoding="utf-8") as csvfile:
|
||||
# Read the generated CSV file - newline required on Windows..
|
||||
with pathlib.Path.open(csv_filename, encoding="utf-8", newline="") as csvfile:
|
||||
reader = csv.reader(csvfile, delimiter=",")
|
||||
rows = list(reader)
|
||||
|
||||
|
@ -82,11 +83,9 @@ class TestReportCsv:
|
|||
max_test_entries = 10
|
||||
result_manager = result_manager_factory(max_test_entries)
|
||||
|
||||
# Create a temporary CSV file path and make tmp_path read_only
|
||||
tmp_path.chmod(0o400)
|
||||
csv_filename = tmp_path / "read_only.csv"
|
||||
|
||||
with pytest.raises(OSError, match="Permission denied"):
|
||||
with patch("pathlib.Path.open", side_effect=OSError("Any OSError")), pytest.raises(OSError, match="Any OSError"):
|
||||
# Generate the CSV report
|
||||
ReportCsv.generate(result_manager, csv_filename)
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
from io import StringIO
|
||||
from io import BytesIO, TextIOWrapper
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
@ -46,7 +46,7 @@ def test_md_report_base() -> None:
|
|||
|
||||
results = ResultManager()
|
||||
|
||||
with StringIO() as mock_file:
|
||||
with TextIOWrapper(BytesIO(b"1 2 3")) as mock_file:
|
||||
report = FakeMDReportBase(mock_file, results)
|
||||
assert report.generate_heading_name() == "Fake MD Report Base"
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
from contextlib import AbstractContextManager, nullcontext
|
||||
from typing import TYPE_CHECKING, Callable
|
||||
|
@ -379,3 +380,103 @@ class TestResultManager:
|
|||
|
||||
assert len(result_manager.get_devices()) == 2
|
||||
assert all(t in result_manager.get_devices() for t in ["Device1", "Device2"])
|
||||
|
||||
def test_stats_computation_methods(self, test_result_factory: Callable[[], TestResult], caplog: pytest.LogCaptureFixture) -> None:
|
||||
"""Test ResultManager internal stats computation methods."""
|
||||
result_manager = ResultManager()
|
||||
|
||||
# Initially stats should be unsynced
|
||||
assert result_manager._stats_in_sync is False
|
||||
|
||||
# Test _reset_stats
|
||||
result_manager._reset_stats()
|
||||
assert result_manager._stats_in_sync is False
|
||||
assert len(result_manager._device_stats) == 0
|
||||
assert len(result_manager._category_stats) == 0
|
||||
assert len(result_manager._test_stats) == 0
|
||||
|
||||
# Add some test results
|
||||
test1 = test_result_factory()
|
||||
test1.name = "device1"
|
||||
test1.result = AntaTestStatus.SUCCESS
|
||||
test1.categories = ["system"]
|
||||
test1.test = "test1"
|
||||
|
||||
test2 = test_result_factory()
|
||||
test2.name = "device2"
|
||||
test2.result = AntaTestStatus.FAILURE
|
||||
test2.categories = ["interfaces"]
|
||||
test2.test = "test2"
|
||||
|
||||
result_manager.add(test1)
|
||||
result_manager.add(test2)
|
||||
|
||||
# Stats should still be unsynced after adding results
|
||||
assert result_manager._stats_in_sync is False
|
||||
|
||||
# Test _compute_stats directly
|
||||
with caplog.at_level(logging.INFO):
|
||||
result_manager._compute_stats()
|
||||
assert "Computing statistics for all results" in caplog.text
|
||||
assert result_manager._stats_in_sync is True
|
||||
|
||||
# Verify stats content
|
||||
assert len(result_manager._device_stats) == 2
|
||||
assert len(result_manager._category_stats) == 2
|
||||
assert len(result_manager._test_stats) == 2
|
||||
assert result_manager._device_stats["device1"].tests_success_count == 1
|
||||
assert result_manager._device_stats["device2"].tests_failure_count == 1
|
||||
assert result_manager._category_stats["system"].tests_success_count == 1
|
||||
assert result_manager._category_stats["interfaces"].tests_failure_count == 1
|
||||
assert result_manager._test_stats["test1"].devices_success_count == 1
|
||||
assert result_manager._test_stats["test2"].devices_failure_count == 1
|
||||
|
||||
def test_stats_property_computation(self, test_result_factory: Callable[[], TestResult], caplog: pytest.LogCaptureFixture) -> None:
|
||||
"""Test that stats are computed only once when accessed via properties."""
|
||||
result_manager = ResultManager()
|
||||
|
||||
# Add some test results
|
||||
test1 = test_result_factory()
|
||||
test1.name = "device1"
|
||||
test1.result = AntaTestStatus.SUCCESS
|
||||
test1.categories = ["system"]
|
||||
result_manager.add(test1)
|
||||
|
||||
test2 = test_result_factory()
|
||||
test2.name = "device2"
|
||||
test2.result = AntaTestStatus.FAILURE
|
||||
test2.categories = ["interfaces"]
|
||||
result_manager.add(test2)
|
||||
|
||||
# Stats should be unsynced after adding results
|
||||
assert result_manager._stats_in_sync is False
|
||||
assert "Computing statistics" not in caplog.text
|
||||
|
||||
# Access device_stats property - should trigger computation
|
||||
with caplog.at_level(logging.INFO):
|
||||
_ = result_manager.device_stats
|
||||
assert "Computing statistics for all results" in caplog.text
|
||||
assert result_manager._stats_in_sync is True
|
||||
|
||||
# Clear the log
|
||||
caplog.clear()
|
||||
|
||||
# Access other stats properties - should not trigger computation again
|
||||
with caplog.at_level(logging.INFO):
|
||||
_ = result_manager.category_stats
|
||||
_ = result_manager.test_stats
|
||||
_ = result_manager.sorted_category_stats
|
||||
assert "Computing statistics" not in caplog.text
|
||||
|
||||
# Add another result - should mark stats as unsynced
|
||||
test3 = test_result_factory()
|
||||
test3.name = "device3"
|
||||
test3.result = "error"
|
||||
result_manager.add(test3)
|
||||
assert result_manager._stats_in_sync is False
|
||||
|
||||
# Access stats again - should trigger recomputation
|
||||
with caplog.at_level(logging.INFO):
|
||||
_ = result_manager.device_stats
|
||||
assert "Computing statistics for all results" in caplog.text
|
||||
assert result_manager._stats_in_sync is True
|
||||
|
|
|
@ -192,8 +192,7 @@ def test_regexp_eos_blacklist_cmds(test_string: str, expected: bool) -> None:
|
|||
"""Test REGEXP_EOS_BLACKLIST_CMDS."""
|
||||
|
||||
def matches_any_regex(string: str, regex_list: list[str]) -> bool:
|
||||
"""
|
||||
Check if a string matches at least one regular expression in a list.
|
||||
"""Check if a string matches at least one regular expression in a list.
|
||||
|
||||
:param string: The string to check.
|
||||
:param regex_list: A list of regular expressions.
|
||||
|
|
77
tests/units/test_decorators.py
Normal file
77
tests/units/test_decorators.py
Normal file
|
@ -0,0 +1,77 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""test anta.decorators.py."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, ClassVar
|
||||
|
||||
import pytest
|
||||
|
||||
from anta.decorators import deprecated_test_class, skip_on_platforms
|
||||
from anta.models import AntaCommand, AntaTemplate, AntaTest
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from anta.device import AntaDevice
|
||||
|
||||
|
||||
class ExampleTest(AntaTest):
|
||||
"""ANTA test that always succeed."""
|
||||
|
||||
categories: ClassVar[list[str]] = []
|
||||
commands: ClassVar[list[AntaCommand | AntaTemplate]] = []
|
||||
|
||||
@AntaTest.anta_test
|
||||
def test(self) -> None:
|
||||
"""Test function."""
|
||||
self.result.is_success()
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"new_tests",
|
||||
[
|
||||
pytest.param(None, id="No new_tests"),
|
||||
pytest.param(["NewExampleTest"], id="one new_tests"),
|
||||
pytest.param(["NewExampleTest1", "NewExampleTest2"], id="multiple new_tests"),
|
||||
],
|
||||
)
|
||||
def test_deprecated_test_class(caplog: pytest.LogCaptureFixture, device: AntaDevice, new_tests: list[str] | None) -> None:
|
||||
"""Test deprecated_test_class decorator."""
|
||||
caplog.set_level(logging.INFO)
|
||||
|
||||
decorated_test_class = deprecated_test_class(new_tests=new_tests)(ExampleTest)
|
||||
|
||||
# Initialize the decorated test
|
||||
decorated_test_class(device)
|
||||
|
||||
if new_tests is None:
|
||||
assert "ExampleTest test is deprecated." in caplog.messages
|
||||
else:
|
||||
assert f"ExampleTest test is deprecated. Consider using the following new tests: {', '.join(new_tests)}." in caplog.messages
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("platforms", "device_platform", "expected_result"),
|
||||
[
|
||||
pytest.param([], "cEOS-lab", "success", id="empty platforms"),
|
||||
pytest.param(["cEOS-lab"], "cEOS-lab", "skipped", id="skip on one platform - match"),
|
||||
pytest.param(["cEOS-lab"], "vEOS", "success", id="skip on one platform - no match"),
|
||||
pytest.param(["cEOS-lab", "vEOS"], "cEOS-lab", "skipped", id="skip on multiple platforms - match"),
|
||||
],
|
||||
)
|
||||
async def test_skip_on_platforms(device: AntaDevice, platforms: list[str], device_platform: str, expected_result: str) -> None:
|
||||
"""Test skip_on_platforms decorator.
|
||||
|
||||
Leverage the ExampleTest defined at the top of the module.
|
||||
"""
|
||||
# Apply the decorator - ignoring mypy warning - this is for testing
|
||||
ExampleTest.test = skip_on_platforms(platforms)(ExampleTest.test) # type: ignore[method-assign]
|
||||
|
||||
device.hw_model = device_platform
|
||||
|
||||
test_instance = ExampleTest(device)
|
||||
await test_instance.test()
|
||||
|
||||
assert test_instance.result.result == expected_result
|
|
@ -6,13 +6,15 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from contextlib import AbstractContextManager
|
||||
from contextlib import nullcontext as does_not_raise
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from asyncssh import SSHClientConnection, SSHClientConnectionOptions
|
||||
from httpx import ConnectError, HTTPError
|
||||
from httpx import ConnectError, HTTPError, TimeoutException
|
||||
from rich import print as rprint
|
||||
|
||||
from anta.device import AntaDevice, AsyncEOSDevice
|
||||
|
@ -24,13 +26,37 @@ if TYPE_CHECKING:
|
|||
from _pytest.mark.structures import ParameterSet
|
||||
|
||||
INIT_PARAMS: list[ParameterSet] = [
|
||||
pytest.param({"host": "42.42.42.42", "username": "anta", "password": "anta"}, {"name": "42.42.42.42"}, id="no name, no port"),
|
||||
pytest.param({"host": "42.42.42.42", "username": "anta", "password": "anta", "port": 666}, {"name": "42.42.42.42:666"}, id="no name, port"),
|
||||
pytest.param({"host": "42.42.42.42", "username": "anta", "password": "anta"}, {"name": "42.42.42.42"}, does_not_raise(), id="no name, no port"),
|
||||
pytest.param({"host": "42.42.42.42", "username": "anta", "password": "anta", "port": 666}, {"name": "42.42.42.42:666"}, does_not_raise(), id="no name, port"),
|
||||
pytest.param(
|
||||
{"host": "42.42.42.42", "username": "anta", "password": "anta", "name": "test.anta.ninja", "disable_cache": True}, {"name": "test.anta.ninja"}, id="name"
|
||||
{"host": "42.42.42.42", "username": "anta", "password": "anta", "name": "test.anta.ninja", "disable_cache": True},
|
||||
{"name": "test.anta.ninja"},
|
||||
does_not_raise(),
|
||||
id="name",
|
||||
),
|
||||
pytest.param(
|
||||
{"host": "42.42.42.42", "username": "anta", "password": "anta", "name": "test.anta.ninja", "insecure": True}, {"name": "test.anta.ninja"}, id="insecure"
|
||||
{"host": "42.42.42.42", "username": "anta", "password": "anta", "name": "test.anta.ninja", "insecure": True},
|
||||
{"name": "test.anta.ninja"},
|
||||
does_not_raise(),
|
||||
id="insecure",
|
||||
),
|
||||
pytest.param(
|
||||
{"host": None, "username": "anta", "password": "anta", "name": "test.anta.ninja"},
|
||||
None,
|
||||
pytest.raises(ValueError, match="'host' is required to create an AsyncEOSDevice"),
|
||||
id="host is None",
|
||||
),
|
||||
pytest.param(
|
||||
{"host": "42.42.42.42", "username": None, "password": "anta", "name": "test.anta.ninja"},
|
||||
None,
|
||||
pytest.raises(ValueError, match="'username' is required to instantiate device 'test.anta.ninja'"),
|
||||
id="username is None",
|
||||
),
|
||||
pytest.param(
|
||||
{"host": "42.42.42.42", "username": "anta", "password": None, "name": "test.anta.ninja"},
|
||||
None,
|
||||
pytest.raises(ValueError, match="'password' is required to instantiate device 'test.anta.ninja'"),
|
||||
id="password is None",
|
||||
),
|
||||
]
|
||||
EQUALITY_PARAMS: list[ParameterSet] = [
|
||||
|
@ -48,7 +74,10 @@ EQUALITY_PARAMS: list[ParameterSet] = [
|
|||
id="not-equal-port",
|
||||
),
|
||||
pytest.param(
|
||||
{"host": "42.42.42.41", "username": "anta", "password": "anta"}, {"host": "42.42.42.42", "username": "anta", "password": "anta"}, False, id="not-equal-host"
|
||||
{"host": "42.42.42.41", "username": "anta", "password": "anta"},
|
||||
{"host": "42.42.42.42", "username": "anta", "password": "anta"},
|
||||
False,
|
||||
id="not-equal-host",
|
||||
),
|
||||
]
|
||||
ASYNCEAPI_COLLECT_PARAMS: list[ParameterSet] = [
|
||||
|
@ -287,7 +316,58 @@ ASYNCEAPI_COLLECT_PARAMS: list[ParameterSet] = [
|
|||
},
|
||||
},
|
||||
{"output": None, "errors": ["Authorization denied for command 'show version'"]},
|
||||
id="asynceapi.EapiCommandError",
|
||||
id="asynceapi.EapiCommandError - Authorization denied",
|
||||
),
|
||||
pytest.param(
|
||||
{},
|
||||
{
|
||||
"command": "show version",
|
||||
"patch_kwargs": {
|
||||
"side_effect": EapiCommandError(
|
||||
passed=[],
|
||||
failed="show version",
|
||||
errors=["not supported on this hardware platform"],
|
||||
errmsg="Invalid command",
|
||||
not_exec=[],
|
||||
)
|
||||
},
|
||||
},
|
||||
{"output": None, "errors": ["not supported on this hardware platform"]},
|
||||
id="asynceapi.EapiCommandError - not supported",
|
||||
),
|
||||
pytest.param(
|
||||
{},
|
||||
{
|
||||
"command": "show version",
|
||||
"patch_kwargs": {
|
||||
"side_effect": EapiCommandError(
|
||||
passed=[],
|
||||
failed="show version",
|
||||
errors=["BGP inactive"],
|
||||
errmsg="Invalid command",
|
||||
not_exec=[],
|
||||
)
|
||||
},
|
||||
},
|
||||
{"output": None, "errors": ["BGP inactive"]},
|
||||
id="asynceapi.EapiCommandError - known EOS error",
|
||||
),
|
||||
pytest.param(
|
||||
{},
|
||||
{
|
||||
"command": "show version",
|
||||
"patch_kwargs": {
|
||||
"side_effect": EapiCommandError(
|
||||
passed=[],
|
||||
failed="show version",
|
||||
errors=["Invalid input (privileged mode required)"],
|
||||
errmsg="Invalid command",
|
||||
not_exec=[],
|
||||
)
|
||||
},
|
||||
},
|
||||
{"output": None, "errors": ["Invalid input (privileged mode required)"]},
|
||||
id="asynceapi.EapiCommandError - requires privileges",
|
||||
),
|
||||
pytest.param(
|
||||
{},
|
||||
|
@ -301,6 +381,12 @@ ASYNCEAPI_COLLECT_PARAMS: list[ParameterSet] = [
|
|||
{"output": None, "errors": ["ConnectError: Cannot open port"]},
|
||||
id="httpx.ConnectError",
|
||||
),
|
||||
pytest.param(
|
||||
{},
|
||||
{"command": "show version", "patch_kwargs": {"side_effect": TimeoutException("Test")}},
|
||||
{"output": None, "errors": ["TimeoutException: Test"]},
|
||||
id="httpx.TimeoutException",
|
||||
),
|
||||
]
|
||||
ASYNCEAPI_COPY_PARAMS: list[ParameterSet] = [
|
||||
pytest.param({}, {"sources": [Path("/mnt/flash"), Path("/var/log/agents")], "destination": Path(), "direction": "from"}, id="from"),
|
||||
|
@ -531,22 +617,24 @@ class TestAntaDevice:
|
|||
class TestAsyncEOSDevice:
|
||||
"""Test for anta.device.AsyncEOSDevice."""
|
||||
|
||||
@pytest.mark.parametrize(("device", "expected"), INIT_PARAMS)
|
||||
def test__init__(self, device: dict[str, Any], expected: dict[str, Any]) -> None:
|
||||
@pytest.mark.parametrize(("device", "expected", "expected_raise"), INIT_PARAMS)
|
||||
def test__init__(self, device: dict[str, Any], expected: dict[str, Any] | None, expected_raise: AbstractContextManager[Exception]) -> None:
|
||||
"""Test the AsyncEOSDevice constructor."""
|
||||
dev = AsyncEOSDevice(**device)
|
||||
with expected_raise:
|
||||
dev = AsyncEOSDevice(**device)
|
||||
|
||||
assert dev.name == expected["name"]
|
||||
if device.get("disable_cache") is True:
|
||||
assert dev.cache is None
|
||||
assert dev.cache_locks is None
|
||||
else: # False or None
|
||||
assert dev.cache is not None
|
||||
assert dev.cache_locks is not None
|
||||
hash(dev)
|
||||
assert expected is not None
|
||||
assert dev.name == expected["name"]
|
||||
if device.get("disable_cache") is True:
|
||||
assert dev.cache is None
|
||||
assert dev.cache_locks is None
|
||||
else: # False or None
|
||||
assert dev.cache is not None
|
||||
assert dev.cache_locks is not None
|
||||
hash(dev)
|
||||
|
||||
with patch("anta.device.__DEBUG__", new=True):
|
||||
rprint(dev)
|
||||
with patch("anta.device.__DEBUG__", new=True):
|
||||
rprint(dev)
|
||||
|
||||
@pytest.mark.parametrize(("device1", "device2", "expected"), EQUALITY_PARAMS)
|
||||
def test__eq(self, device1: dict[str, Any], device2: dict[str, Any], expected: bool) -> None:
|
||||
|
|
|
@ -26,8 +26,6 @@ if TYPE_CHECKING:
|
|||
class FakeTest(AntaTest):
|
||||
"""ANTA test that always succeed."""
|
||||
|
||||
name = "FakeTest"
|
||||
description = "ANTA test that always succeed"
|
||||
categories: ClassVar[list[str]] = []
|
||||
commands: ClassVar[list[AntaCommand | AntaTemplate]] = []
|
||||
|
||||
|
@ -40,8 +38,6 @@ class FakeTest(AntaTest):
|
|||
class FakeTestWithFailedCommand(AntaTest):
|
||||
"""ANTA test with a command that failed."""
|
||||
|
||||
name = "FakeTestWithFailedCommand"
|
||||
description = "ANTA test with a command that failed"
|
||||
categories: ClassVar[list[str]] = []
|
||||
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show version", errors=["failed command"])]
|
||||
|
||||
|
@ -54,8 +50,6 @@ class FakeTestWithFailedCommand(AntaTest):
|
|||
class FakeTestWithUnsupportedCommand(AntaTest):
|
||||
"""ANTA test with an unsupported command."""
|
||||
|
||||
name = "FakeTestWithUnsupportedCommand"
|
||||
description = "ANTA test with an unsupported command"
|
||||
categories: ClassVar[list[str]] = []
|
||||
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [
|
||||
AntaCommand(
|
||||
|
@ -70,11 +64,26 @@ class FakeTestWithUnsupportedCommand(AntaTest):
|
|||
self.result.is_success()
|
||||
|
||||
|
||||
class FakeTestWithKnownEOSError(AntaTest):
|
||||
"""ANTA test triggering a known EOS Error that should translate to failure of the test."""
|
||||
|
||||
categories: ClassVar[list[str]] = []
|
||||
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [
|
||||
AntaCommand(
|
||||
command="show bgp evpn route-type mac-ip aa:c1:ab:de:50:ad vni 10010",
|
||||
errors=["BGP inactive"],
|
||||
)
|
||||
]
|
||||
|
||||
@AntaTest.anta_test
|
||||
def test(self) -> None:
|
||||
"""Test function."""
|
||||
self.result.is_success()
|
||||
|
||||
|
||||
class FakeTestWithInput(AntaTest):
|
||||
"""ANTA test with inputs that always succeed."""
|
||||
|
||||
name = "FakeTestWithInput"
|
||||
description = "ANTA test with inputs that always succeed"
|
||||
categories: ClassVar[list[str]] = []
|
||||
commands: ClassVar[list[AntaCommand | AntaTemplate]] = []
|
||||
|
||||
|
@ -92,8 +101,6 @@ class FakeTestWithInput(AntaTest):
|
|||
class FakeTestWithTemplate(AntaTest):
|
||||
"""ANTA test with template that always succeed."""
|
||||
|
||||
name = "FakeTestWithTemplate"
|
||||
description = "ANTA test with template that always succeed"
|
||||
categories: ClassVar[list[str]] = []
|
||||
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaTemplate(template="show interface {interface}")]
|
||||
|
||||
|
@ -115,8 +122,6 @@ class FakeTestWithTemplate(AntaTest):
|
|||
class FakeTestWithTemplateNoRender(AntaTest):
|
||||
"""ANTA test with template that miss the render() method."""
|
||||
|
||||
name = "FakeTestWithTemplateNoRender"
|
||||
description = "ANTA test with template that miss the render() method"
|
||||
categories: ClassVar[list[str]] = []
|
||||
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaTemplate(template="show interface {interface}")]
|
||||
|
||||
|
@ -134,8 +139,6 @@ class FakeTestWithTemplateNoRender(AntaTest):
|
|||
class FakeTestWithTemplateBadRender1(AntaTest):
|
||||
"""ANTA test with template that raises a AntaTemplateRenderError exception."""
|
||||
|
||||
name = "FakeTestWithTemplateBadRender"
|
||||
description = "ANTA test with template that raises a AntaTemplateRenderError exception"
|
||||
categories: ClassVar[list[str]] = []
|
||||
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaTemplate(template="show interface {interface}")]
|
||||
|
||||
|
@ -157,8 +160,6 @@ class FakeTestWithTemplateBadRender1(AntaTest):
|
|||
class FakeTestWithTemplateBadRender2(AntaTest):
|
||||
"""ANTA test with template that raises an arbitrary exception in render()."""
|
||||
|
||||
name = "FakeTestWithTemplateBadRender2"
|
||||
description = "ANTA test with template that raises an arbitrary exception in render()"
|
||||
categories: ClassVar[list[str]] = []
|
||||
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaTemplate(template="show interface {interface}")]
|
||||
|
||||
|
@ -180,8 +181,6 @@ class FakeTestWithTemplateBadRender2(AntaTest):
|
|||
class FakeTestWithTemplateBadRender3(AntaTest):
|
||||
"""ANTA test with template that gives extra template parameters in render()."""
|
||||
|
||||
name = "FakeTestWithTemplateBadRender3"
|
||||
description = "ANTA test with template that gives extra template parameters in render()"
|
||||
categories: ClassVar[list[str]] = []
|
||||
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaTemplate(template="show interface {interface}")]
|
||||
|
||||
|
@ -203,8 +202,6 @@ class FakeTestWithTemplateBadRender3(AntaTest):
|
|||
class FakeTestWithTemplateBadTest(AntaTest):
|
||||
"""ANTA test with template that tries to access an undefined template parameter in test()."""
|
||||
|
||||
name = "FakeTestWithTemplateBadTest"
|
||||
description = "ANTA test with template that tries to access an undefined template parameter in test()"
|
||||
categories: ClassVar[list[str]] = []
|
||||
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaTemplate(template="show interface {interface}")]
|
||||
|
||||
|
@ -227,8 +224,6 @@ class FakeTestWithTemplateBadTest(AntaTest):
|
|||
class SkipOnPlatformTest(AntaTest):
|
||||
"""ANTA test that is skipped."""
|
||||
|
||||
name = "SkipOnPlatformTest"
|
||||
description = "ANTA test that is skipped on a specific platform"
|
||||
categories: ClassVar[list[str]] = []
|
||||
commands: ClassVar[list[AntaCommand | AntaTemplate]] = []
|
||||
|
||||
|
@ -242,8 +237,6 @@ class SkipOnPlatformTest(AntaTest):
|
|||
class UnSkipOnPlatformTest(AntaTest):
|
||||
"""ANTA test that is skipped."""
|
||||
|
||||
name = "UnSkipOnPlatformTest"
|
||||
description = "ANTA test that is skipped on a specific platform"
|
||||
categories: ClassVar[list[str]] = []
|
||||
commands: ClassVar[list[AntaCommand | AntaTemplate]] = []
|
||||
|
||||
|
@ -257,8 +250,6 @@ class UnSkipOnPlatformTest(AntaTest):
|
|||
class SkipOnPlatformTestWithInput(AntaTest):
|
||||
"""ANTA test skipped on platforms but with Input."""
|
||||
|
||||
name = "SkipOnPlatformTestWithInput"
|
||||
description = "ANTA test skipped on platforms but with Input"
|
||||
categories: ClassVar[list[str]] = []
|
||||
commands: ClassVar[list[AntaCommand | AntaTemplate]] = []
|
||||
|
||||
|
@ -277,8 +268,6 @@ class SkipOnPlatformTestWithInput(AntaTest):
|
|||
class DeprecatedTestWithoutNewTest(AntaTest):
|
||||
"""ANTA test that is deprecated without new test."""
|
||||
|
||||
name = "DeprecatedTestWitouthNewTest"
|
||||
description = "ANTA test that is deprecated without new test"
|
||||
categories: ClassVar[list[str]] = []
|
||||
commands: ClassVar[list[AntaCommand | AntaTemplate]] = []
|
||||
|
||||
|
@ -292,8 +281,6 @@ class DeprecatedTestWithoutNewTest(AntaTest):
|
|||
class DeprecatedTestWithNewTest(AntaTest):
|
||||
"""ANTA test that is deprecated with new test."""
|
||||
|
||||
name = "DeprecatedTestWithNewTest"
|
||||
description = "ANTA deprecated test with New Test"
|
||||
categories: ClassVar[list[str]] = []
|
||||
commands: ClassVar[list[AntaCommand | AntaTemplate]] = []
|
||||
|
||||
|
@ -307,8 +294,6 @@ class DeprecatedTestWithNewTest(AntaTest):
|
|||
class FakeTestWithMissingTest(AntaTest):
|
||||
"""ANTA test with missing test() method implementation."""
|
||||
|
||||
name = "FakeTestWithMissingTest"
|
||||
description = "ANTA test with missing test() method implementation"
|
||||
categories: ClassVar[list[str]] = []
|
||||
commands: ClassVar[list[AntaCommand | AntaTemplate]] = []
|
||||
|
||||
|
@ -516,6 +501,18 @@ ANTATEST_DATA: list[dict[str, Any]] = [
|
|||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "known EOS error command",
|
||||
"test": FakeTestWithKnownEOSError,
|
||||
"inputs": None,
|
||||
"expected": {
|
||||
"__init__": {"result": "unset"},
|
||||
"test": {
|
||||
"result": "failure",
|
||||
"messages": ["BGP inactive"],
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
BLACKLIST_COMMANDS_PARAMS = ["reload", "reload --force", "write", "wr mem"]
|
||||
|
@ -526,65 +523,61 @@ class TestAntaTest:
|
|||
|
||||
def test__init_subclass__(self) -> None:
|
||||
"""Test __init_subclass__."""
|
||||
with pytest.raises(NotImplementedError) as exec_info:
|
||||
|
||||
class _WrongTestNoName(AntaTest):
|
||||
"""ANTA test that is missing a name."""
|
||||
|
||||
description = "ANTA test that is missing a name"
|
||||
categories: ClassVar[list[str]] = []
|
||||
commands: ClassVar[list[AntaCommand | AntaTemplate]] = []
|
||||
|
||||
@AntaTest.anta_test
|
||||
def test(self) -> None:
|
||||
self.result.is_success()
|
||||
|
||||
assert exec_info.value.args[0] == "Class tests.units.test_models._WrongTestNoName is missing required class attribute name"
|
||||
|
||||
with pytest.raises(NotImplementedError) as exec_info:
|
||||
|
||||
class _WrongTestNoDescription(AntaTest):
|
||||
"""ANTA test that is missing a description."""
|
||||
|
||||
name = "WrongTestNoDescription"
|
||||
categories: ClassVar[list[str]] = []
|
||||
commands: ClassVar[list[AntaCommand | AntaTemplate]] = []
|
||||
|
||||
@AntaTest.anta_test
|
||||
def test(self) -> None:
|
||||
self.result.is_success()
|
||||
|
||||
assert exec_info.value.args[0] == "Class tests.units.test_models._WrongTestNoDescription is missing required class attribute description"
|
||||
|
||||
with pytest.raises(NotImplementedError) as exec_info:
|
||||
with pytest.raises(AttributeError) as exec_info:
|
||||
|
||||
class _WrongTestNoCategories(AntaTest):
|
||||
"""ANTA test that is missing categories."""
|
||||
|
||||
name = "WrongTestNoCategories"
|
||||
description = "ANTA test that is missing categories"
|
||||
commands: ClassVar[list[AntaCommand | AntaTemplate]] = []
|
||||
|
||||
@AntaTest.anta_test
|
||||
def test(self) -> None:
|
||||
self.result.is_success()
|
||||
|
||||
assert exec_info.value.args[0] == "Class tests.units.test_models._WrongTestNoCategories is missing required class attribute categories"
|
||||
assert exec_info.value.args[0] == "Class tests.units.test_models._WrongTestNoCategories is missing required class attribute(s): categories"
|
||||
|
||||
with pytest.raises(NotImplementedError) as exec_info:
|
||||
with pytest.raises(AttributeError) as exec_info:
|
||||
|
||||
class _WrongTestNoCommands(AntaTest):
|
||||
"""ANTA test that is missing commands."""
|
||||
|
||||
name = "WrongTestNoCommands"
|
||||
description = "ANTA test that is missing commands"
|
||||
categories: ClassVar[list[str]] = []
|
||||
|
||||
@AntaTest.anta_test
|
||||
def test(self) -> None:
|
||||
self.result.is_success()
|
||||
|
||||
assert exec_info.value.args[0] == "Class tests.units.test_models._WrongTestNoCommands is missing required class attribute commands"
|
||||
assert exec_info.value.args[0] == "Class tests.units.test_models._WrongTestNoCommands is missing required class attribute(s): commands"
|
||||
|
||||
with pytest.raises(
|
||||
AttributeError,
|
||||
match="Cannot set the description for class _WrongTestNoDescription, either set it in the class definition or add a docstring to the class.",
|
||||
):
|
||||
|
||||
class _WrongTestNoDescription(AntaTest):
|
||||
# ANTA test that is missing a description and does not have a doctstring.
|
||||
|
||||
commands: ClassVar[list[AntaCommand | AntaTemplate]] = []
|
||||
categories: ClassVar[list[str]] = []
|
||||
|
||||
@AntaTest.anta_test
|
||||
def test(self) -> None:
|
||||
self.result.is_success()
|
||||
|
||||
class _TestOverwriteNameAndDescription(AntaTest):
|
||||
"""ANTA test where both the test name and description are overwritten in the class definition."""
|
||||
|
||||
name: ClassVar[str] = "CustomName"
|
||||
description: ClassVar[str] = "Custom description"
|
||||
commands: ClassVar[list[AntaCommand | AntaTemplate]] = []
|
||||
categories: ClassVar[list[str]] = []
|
||||
|
||||
@AntaTest.anta_test
|
||||
def test(self) -> None:
|
||||
self.result.is_success()
|
||||
|
||||
assert _TestOverwriteNameAndDescription.name == "CustomName"
|
||||
assert _TestOverwriteNameAndDescription.description == "Custom description"
|
||||
|
||||
def test_abc(self) -> None:
|
||||
"""Test that an error is raised if AntaTest is not implemented."""
|
||||
|
@ -626,8 +619,6 @@ class TestAntaTest:
|
|||
class FakeTestWithBlacklist(AntaTest):
|
||||
"""Fake Test for blacklist."""
|
||||
|
||||
name = "FakeTestWithBlacklist"
|
||||
description = "ANTA test that has blacklisted command"
|
||||
categories: ClassVar[list[str]] = []
|
||||
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command=command)]
|
||||
|
||||
|
@ -651,7 +642,7 @@ class TestAntaTest:
|
|||
assert test.result.custom_field == "a custom field"
|
||||
|
||||
|
||||
class TestAntaComamnd:
|
||||
class TestAntaCommand:
|
||||
"""Test for anta.models.AntaCommand."""
|
||||
|
||||
# ruff: noqa: B018
|
||||
|
@ -710,6 +701,32 @@ class TestAntaComamnd:
|
|||
)
|
||||
assert command.requires_privileges is False
|
||||
command = AntaCommand(command="show aaa methods accounting")
|
||||
with pytest.raises(RuntimeError) as exec_info:
|
||||
with pytest.raises(
|
||||
RuntimeError, match="Command 'show aaa methods accounting' has not been collected and has not returned an error. Call AntaDevice.collect()."
|
||||
):
|
||||
command.requires_privileges
|
||||
assert exec_info.value.args[0] == "Command 'show aaa methods accounting' has not been collected and has not returned an error. Call AntaDevice.collect()."
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("command_str", "error", "is_known"),
|
||||
[
|
||||
("show ip interface Ethernet1", "Ethernet1 does not support IP", True),
|
||||
("ping vrf MGMT 1.1.1.1 source Management0 size 100 df-bit repeat 2", "VRF 'MGMT' is not active", True),
|
||||
("ping vrf MGMT 1.1.1.1 source Management1 size 100 df-bit repeat 2", "No source interface Management1", True),
|
||||
("show bgp evpn route-type mac-ip aa:c1:ab:de:50:ad vni 10010", "BGP inactive", True),
|
||||
("show isis BLAH neighbors", "IS-IS (BLAH) is disabled because: IS-IS Network Entity Title (NET) configuration is not present", True),
|
||||
("show ip interface Ethernet1", None, False),
|
||||
],
|
||||
)
|
||||
def test_returned_known_eos_error(self, command_str: str, error: str | None, is_known: bool) -> None:
|
||||
"""Test the returned_known_eos_error property."""
|
||||
# Adding fake output when no error is present to mimic that the command has been collected
|
||||
command = AntaCommand(command=command_str, errors=[error] if error else [], output=None if error else "{}")
|
||||
assert command.returned_known_eos_error is is_known
|
||||
|
||||
def test_returned_known_eos_error_failure(self) -> None:
|
||||
"""Test the returned_known_eos_error property unset."""
|
||||
command = AntaCommand(command="show ip interface Ethernet1")
|
||||
with pytest.raises(
|
||||
RuntimeError, match="Command 'show ip interface Ethernet1' has not been collected and has not returned an error. Call AntaDevice.collect()."
|
||||
):
|
||||
command.returned_known_eos_error
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import resource
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
|
@ -16,10 +16,16 @@ import pytest
|
|||
from anta.catalog import AntaCatalog
|
||||
from anta.inventory import AntaInventory
|
||||
from anta.result_manager import ResultManager
|
||||
from anta.runner import adjust_rlimit_nofile, main, prepare_tests
|
||||
from anta.runner import main, prepare_tests
|
||||
|
||||
from .test_models import FakeTest, FakeTestWithMissingTest
|
||||
|
||||
if os.name == "posix":
|
||||
# The function is not defined on non-POSIX system
|
||||
import resource
|
||||
|
||||
from anta.runner import adjust_rlimit_nofile
|
||||
|
||||
DATA_DIR: Path = Path(__file__).parent.parent.resolve() / "data"
|
||||
FAKE_CATALOG: AntaCatalog = AntaCatalog.from_list([(FakeTest, None)])
|
||||
|
||||
|
@ -65,8 +71,10 @@ async def test_no_selected_device(caplog: pytest.LogCaptureFixture, inventory: A
|
|||
assert msg in caplog.messages
|
||||
|
||||
|
||||
@pytest.mark.skipif(os.name != "posix", reason="Cannot run this test on Windows")
|
||||
def test_adjust_rlimit_nofile_valid_env(caplog: pytest.LogCaptureFixture) -> None:
|
||||
"""Test adjust_rlimit_nofile with valid environment variables."""
|
||||
# pylint: disable=E0606
|
||||
with (
|
||||
caplog.at_level(logging.DEBUG),
|
||||
patch.dict("os.environ", {"ANTA_NOFILE": "20480"}),
|
||||
|
@ -96,6 +104,7 @@ def test_adjust_rlimit_nofile_valid_env(caplog: pytest.LogCaptureFixture) -> Non
|
|||
setrlimit_mock.assert_called_once_with(resource.RLIMIT_NOFILE, (20480, 1048576))
|
||||
|
||||
|
||||
@pytest.mark.skipif(os.name != "posix", reason="Cannot run this test on Windows")
|
||||
def test_adjust_rlimit_nofile_invalid_env(caplog: pytest.LogCaptureFixture) -> None:
|
||||
"""Test adjust_rlimit_nofile with valid environment variables."""
|
||||
with (
|
||||
|
@ -129,6 +138,31 @@ def test_adjust_rlimit_nofile_invalid_env(caplog: pytest.LogCaptureFixture) -> N
|
|||
setrlimit_mock.assert_called_once_with(resource.RLIMIT_NOFILE, (16384, 1048576))
|
||||
|
||||
|
||||
@pytest.mark.skipif(os.name == "posix", reason="Run this test on Windows only")
|
||||
async def test_check_runner_log_for_windows(caplog: pytest.LogCaptureFixture, inventory: AntaInventory) -> None:
|
||||
"""Test log output for Windows host regarding rlimit."""
|
||||
caplog.set_level(logging.INFO)
|
||||
manager = ResultManager()
|
||||
# Using dry-run to shorten the test
|
||||
await main(manager, inventory, FAKE_CATALOG, dry_run=True)
|
||||
assert "Running on a non-POSIX system, cannot adjust the maximum number of file descriptors." in caplog.records[-3].message
|
||||
|
||||
|
||||
# We could instead merge multiple coverage report together but that requires more work than just this.
|
||||
@pytest.mark.skipif(os.name != "posix", reason="Fake non-posix for coverage")
|
||||
async def test_check_runner_log_for_windows_fake(caplog: pytest.LogCaptureFixture, inventory: AntaInventory) -> None:
|
||||
"""Test log output for Windows host regarding rlimit."""
|
||||
with patch("os.name", new="win32"):
|
||||
del sys.modules["anta.runner"]
|
||||
from anta.runner import main # pylint: disable=W0621
|
||||
|
||||
caplog.set_level(logging.INFO)
|
||||
manager = ResultManager()
|
||||
# Using dry-run to shorten the test
|
||||
await main(manager, inventory, FAKE_CATALOG, dry_run=True)
|
||||
assert "Running on a non-POSIX system, cannot adjust the maximum number of file descriptors." in caplog.records[-3].message
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("inventory", "tags", "tests", "devices_count", "tests_count"),
|
||||
[
|
||||
|
@ -138,6 +172,7 @@ def test_adjust_rlimit_nofile_invalid_env(caplog: pytest.LogCaptureFixture) -> N
|
|||
pytest.param({"filename": "test_inventory_with_tags.yml"}, None, {"VerifyMlagStatus", "VerifyUptime"}, 3, 5, id="filtered-tests"),
|
||||
pytest.param({"filename": "test_inventory_with_tags.yml"}, {"leaf"}, {"VerifyMlagStatus", "VerifyUptime"}, 2, 4, id="1-tag-filtered-tests"),
|
||||
pytest.param({"filename": "test_inventory_with_tags.yml"}, {"invalid"}, None, 0, 0, id="invalid-tag"),
|
||||
pytest.param({"filename": "test_inventory_with_tags.yml"}, {"dc1"}, None, 0, 0, id="device-tag-no-tests"),
|
||||
],
|
||||
indirect=["inventory"],
|
||||
)
|
||||
|
|
|
@ -11,7 +11,7 @@ from typing import Any
|
|||
|
||||
import pytest
|
||||
|
||||
from anta.tools import convert_categories, custom_division, get_dict_superset, get_failed_logs, get_item, get_value
|
||||
from anta.tools import convert_categories, custom_division, format_data, get_dict_superset, get_failed_logs, get_item, get_value
|
||||
|
||||
TEST_GET_FAILED_LOGS_DATA = [
|
||||
{"id": 1, "name": "Alice", "age": 30, "email": "alice@example.com"},
|
||||
|
@ -513,3 +513,17 @@ def test_convert_categories(test_input: list[str], expected_raise: AbstractConte
|
|||
"""Test convert_categories."""
|
||||
with expected_raise:
|
||||
assert convert_categories(test_input) == expected_result
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("input_data", "expected_output"),
|
||||
[
|
||||
pytest.param({"advertised": True, "received": True, "enabled": True}, "Advertised: True, Received: True, Enabled: True", id="multiple entry, all True"),
|
||||
pytest.param({"advertised": False, "received": False}, "Advertised: False, Received: False", id="multiple entry, all False"),
|
||||
pytest.param({}, "", id="empty dict"),
|
||||
pytest.param({"test": True}, "Test: True", id="single entry"),
|
||||
],
|
||||
)
|
||||
def test_format_data(input_data: dict[str, bool], expected_output: str) -> None:
|
||||
"""Test format_data."""
|
||||
assert format_data(input_data) == expected_output
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue