Merging upstream version 1.3.0.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Daniel Baumann 2025-03-17 07:33:51 +01:00
parent 5b922100c9
commit 8a6a3342fc
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
337 changed files with 16571 additions and 4891 deletions

1
.codespellignore Normal file
View file

@ -0,0 +1 @@
toi

View file

@ -43,7 +43,7 @@ jobs:
- 'docs/**' - 'docs/**'
- 'README.md' - 'README.md'
check-requirements: check-requirements:
runs-on: ubuntu-20.04 runs-on: ubuntu-latest
strategy: strategy:
matrix: matrix:
python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
@ -62,7 +62,7 @@ jobs:
# @gmuloc: commenting this out for now # @gmuloc: commenting this out for now
#missing-documentation: #missing-documentation:
# name: "Warning documentation is missing" # name: "Warning documentation is missing"
# runs-on: ubuntu-20.04 # runs-on: ubuntu-latest
# needs: [file-changes] # needs: [file-changes]
# if: needs.file-changes.outputs.cli == 'true' && needs.file-changes.outputs.docs == 'false' # if: needs.file-changes.outputs.cli == 'true' && needs.file-changes.outputs.docs == 'false'
# steps: # steps:
@ -74,7 +74,7 @@ jobs:
# You should update documentation to reflect your change, or maybe not :) # You should update documentation to reflect your change, or maybe not :)
lint-python: lint-python:
name: Check the code style name: Check the code style
runs-on: ubuntu-20.04 runs-on: ubuntu-latest
needs: file-changes needs: file-changes
if: needs.file-changes.outputs.code == 'true' if: needs.file-changes.outputs.code == 'true'
steps: steps:
@ -89,7 +89,7 @@ jobs:
run: tox -e lint run: tox -e lint
type-python: type-python:
name: Check typing name: Check typing
runs-on: ubuntu-20.04 runs-on: ubuntu-latest
needs: file-changes needs: file-changes
if: needs.file-changes.outputs.code == 'true' if: needs.file-changes.outputs.code == 'true'
steps: steps:
@ -104,7 +104,7 @@ jobs:
run: tox -e type run: tox -e type
test-python: test-python:
name: Pytest across all supported python versions name: Pytest across all supported python versions
runs-on: ubuntu-20.04 runs-on: ubuntu-latest
needs: [lint-python, type-python] needs: [lint-python, type-python]
strategy: strategy:
matrix: matrix:
@ -138,7 +138,7 @@ jobs:
run: tox run: tox
test-documentation: test-documentation:
name: Build offline documentation for testing name: Build offline documentation for testing
runs-on: ubuntu-20.04 runs-on: ubuntu-latest
needs: [test-python] needs: [test-python]
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4

View file

@ -27,31 +27,9 @@ jobs:
- name: Publish distribution 📦 to PyPI - name: Publish distribution 📦 to PyPI
uses: pypa/gh-action-pypi-publish@release/v1 uses: pypa/gh-action-pypi-publish@release/v1
release-coverage:
name: Updated ANTA release coverage badge
runs-on: ubuntu-20.04
needs: [pypi]
steps:
- uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: "3.11"
- name: Install dependencies
run: pip install genbadge[coverage] tox tox-gh-actions
- name: "Run pytest via tox for ${{ matrix.python }}"
run: tox
- name: Generate coverage badge
run: genbadge coverage -i .coverage.xml -o badge/latest-release-coverage.svg
- name: Publish coverage badge to gh-pages branch
uses: JamesIves/github-pages-deploy-action@v4
with:
branch: coverage-badge
folder: badge
release-doc: release-doc:
name: "Publish documentation for release ${{github.ref_name}}" name: "Publish documentation for release ${{github.ref_name}}"
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: [release-coverage]
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:

View file

@ -20,7 +20,7 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
ref: ${{ github.event.pull_request.head.sha }} ref: ${{ github.event.pull_request.head.sha }}
fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis
- name: Setup Python - name: Setup Python
uses: actions/setup-python@v5 uses: actions/setup-python@v5
with: with:
@ -30,7 +30,7 @@ jobs:
- name: "Run pytest via tox for ${{ matrix.python }}" - name: "Run pytest via tox for ${{ matrix.python }}"
run: tox run: tox
- name: SonarCloud Scan - name: SonarCloud Scan
uses: SonarSource/sonarcloud-github-action@master uses: SonarSource/sonarqube-scan-action@v5.0.0
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}

View file

@ -3,6 +3,7 @@
# See https://pre-commit.com/hooks.html for more hooks # See https://pre-commit.com/hooks.html for more hooks
ci: ci:
autoupdate_commit_msg: "ci: pre-commit autoupdate" autoupdate_commit_msg: "ci: pre-commit autoupdate"
skip: [mypy]
files: ^(anta|docs|scripts|tests|asynceapi)/ files: ^(anta|docs|scripts|tests|asynceapi)/
@ -43,28 +44,28 @@ repos:
- --allow-past-years - --allow-past-years
- --fuzzy-match-generates-todo - --fuzzy-match-generates-todo
- --comment-style - --comment-style
- '<!--| ~| -->' - "<!--| ~| -->"
- repo: https://github.com/astral-sh/ruff-pre-commit - repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.8.4 rev: v0.10.0
hooks: hooks:
- id: ruff - id: ruff
name: Run Ruff linter name: Run Ruff linter
args: [ --fix ] args: [--fix]
- id: ruff-format - id: ruff-format
name: Run Ruff formatter name: Run Ruff formatter
- repo: https://github.com/pycqa/pylint - repo: https://github.com/pycqa/pylint
rev: "v3.3.2" rev: "v3.3.5"
hooks: hooks:
- id: pylint - id: pylint
name: Check code style with pylint name: Check code style with pylint
description: This hook runs pylint. description: This hook runs pylint.
types: [python] types: [python]
args: args:
- -rn # Only display messages - -rn # Only display messages
- -sn # Don't display the score - -sn # Don't display the score
- --rcfile=pyproject.toml # Link to config file - --rcfile=pyproject.toml # Link to config file
additional_dependencies: additional_dependencies:
- anta[cli] - anta[cli]
- types-PyYAML - types-PyYAML
@ -76,16 +77,17 @@ repos:
- respx - respx
- repo: https://github.com/codespell-project/codespell - repo: https://github.com/codespell-project/codespell
rev: v2.3.0 rev: v2.4.1
hooks: hooks:
- id: codespell - id: codespell
name: Checks for common misspellings in text files. name: Checks for common misspellings in text files.
entry: codespell entry: codespell
language: python language: python
types: [text] types: [text]
args: ["--ignore-words", ".codespellignore"]
- repo: https://github.com/pre-commit/mirrors-mypy - repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.14.0 rev: v1.15.0
hooks: hooks:
- id: mypy - id: mypy
name: Check typing with mypy name: Check typing with mypy
@ -97,10 +99,10 @@ repos:
- types-requests - types-requests
- types-pyOpenSSL - types-pyOpenSSL
- pytest - pytest
files: ^(anta|tests)/ files: ^(anta|tests|asynceapi)/
- repo: https://github.com/igorshubovych/markdownlint-cli - repo: https://github.com/igorshubovych/markdownlint-cli
rev: v0.43.0 rev: v0.44.0
hooks: hooks:
- id: markdownlint - id: markdownlint
name: Check Markdown files style. name: Check Markdown files style.
@ -122,5 +124,14 @@ repos:
pass_filenames: false pass_filenames: false
additional_dependencies: additional_dependencies:
- anta[cli] - anta[cli]
# TODO: next can go once we have it added to anta properly - id: doc-snippets
- numpydoc name: Generate doc snippets
entry: >-
sh -c "docs/scripts/generate_doc_snippets.py"
language: python
types: [python]
files: anta/cli/
verbose: true
pass_filenames: false
additional_dependencies:
- anta[cli]

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Arista Network Test Automation (ANTA) Framework.""" """Arista Network Test Automation (ANTA) Framework."""

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Catalog related functions.""" """Catalog related functions."""
@ -14,11 +14,11 @@ from itertools import chain
from json import load as json_load from json import load as json_load
from pathlib import Path from pathlib import Path
from typing import TYPE_CHECKING, Any, Literal, Optional, Union from typing import TYPE_CHECKING, Any, Literal, Optional, Union
from warnings import warn
from pydantic import BaseModel, ConfigDict, RootModel, ValidationError, ValidationInfo, field_validator, model_serializer, model_validator from pydantic import BaseModel, ConfigDict, RootModel, ValidationError, ValidationInfo, field_validator, model_serializer, model_validator
from pydantic.types import ImportString from pydantic.types import ImportString
from pydantic_core import PydanticCustomError from pydantic_core import PydanticCustomError
from typing_extensions import deprecated
from yaml import YAMLError, safe_dump, safe_load from yaml import YAMLError, safe_dump, safe_load
from anta.logger import anta_log_exception from anta.logger import anta_log_exception
@ -182,7 +182,7 @@ class AntaCatalogFile(RootModel[dict[ImportString[Any], list[AntaTestDefinition]
except Exception as e: except Exception as e:
# A test module is potentially user-defined code. # A test module is potentially user-defined code.
# We need to catch everything if we want to have meaningful logs # We need to catch everything if we want to have meaningful logs
module_str = f"{module_name[1:] if module_name.startswith('.') else module_name}{f' from package {package}' if package else ''}" module_str = f"{module_name.removeprefix('.')}{f' from package {package}' if package else ''}"
message = f"Module named {module_str} cannot be imported. Verify that the module exists and there is no Python syntax issues." message = f"Module named {module_str} cannot be imported. Verify that the module exists and there is no Python syntax issues."
anta_log_exception(e, message, logger) anta_log_exception(e, message, logger)
raise ValueError(message) from e raise ValueError(message) from e
@ -223,16 +223,14 @@ class AntaCatalogFile(RootModel[dict[ImportString[Any], list[AntaTestDefinition]
raise ValueError(msg) # noqa: TRY004 pydantic catches ValueError or AssertionError, no TypeError raise ValueError(msg) # noqa: TRY004 pydantic catches ValueError or AssertionError, no TypeError
if len(test_definition) != 1: if len(test_definition) != 1:
msg = ( msg = (
f"Syntax error when parsing: {test_definition}\n" f"Syntax error when parsing: {test_definition}\nIt must be a dictionary with a single entry. Check the indentation in the test catalog."
"It must be a dictionary with a single entry. Check the indentation in the test catalog."
) )
raise ValueError(msg) raise ValueError(msg)
for test_name, test_inputs in test_definition.copy().items(): for test_name, test_inputs in test_definition.copy().items():
test: type[AntaTest] | None = getattr(module, test_name, None) test: type[AntaTest] | None = getattr(module, test_name, None)
if test is None: if test is None:
msg = ( msg = (
f"{test_name} is not defined in Python module {module.__name__}" f"{test_name} is not defined in Python module {module.__name__}{f' (from {module.__file__})' if module.__file__ is not None else ''}"
f"{f' (from {module.__file__})' if module.__file__ is not None else ''}"
) )
raise ValueError(msg) raise ValueError(msg)
test_definitions.append(AntaTestDefinition(test=test, inputs=test_inputs)) test_definitions.append(AntaTestDefinition(test=test, inputs=test_inputs))
@ -252,7 +250,7 @@ class AntaCatalogFile(RootModel[dict[ImportString[Any], list[AntaTestDefinition]
# This could be improved. # This could be improved.
# https://github.com/pydantic/pydantic/issues/1043 # https://github.com/pydantic/pydantic/issues/1043
# Explore if this worth using this: https://github.com/NowanIlfideme/pydantic-yaml # Explore if this worth using this: https://github.com/NowanIlfideme/pydantic-yaml
return safe_dump(safe_load(self.model_dump_json(serialize_as_any=True, exclude_unset=True)), indent=2, width=math.inf) return safe_dump(safe_load(self.model_dump_json(serialize_as_any=True, exclude_unset=True)), width=math.inf)
def to_json(self) -> str: def to_json(self) -> str:
"""Return a JSON representation string of this model. """Return a JSON representation string of this model.
@ -291,11 +289,7 @@ class AntaCatalog:
self._tests = tests self._tests = tests
self._filename: Path | None = None self._filename: Path | None = None
if filename is not None: if filename is not None:
if isinstance(filename, Path): self._filename = filename if isinstance(filename, Path) else Path(filename)
self._filename = filename
else:
self._filename = Path(filename)
self.indexes_built: bool self.indexes_built: bool
self.tag_to_tests: defaultdict[str | None, set[AntaTestDefinition]] self.tag_to_tests: defaultdict[str | None, set[AntaTestDefinition]]
self._init_indexes() self._init_indexes()
@ -325,6 +319,8 @@ class AntaCatalog:
msg = "A test in the catalog must be an AntaTestDefinition instance" msg = "A test in the catalog must be an AntaTestDefinition instance"
raise TypeError(msg) raise TypeError(msg)
self._tests = value self._tests = value
# Tests were modified so indexes need to be rebuilt.
self.clear_indexes()
@staticmethod @staticmethod
def parse(filename: str | Path, file_format: Literal["yaml", "json"] = "yaml") -> AntaCatalog: def parse(filename: str | Path, file_format: Literal["yaml", "json"] = "yaml") -> AntaCatalog:
@ -440,13 +436,12 @@ class AntaCatalog:
combined_tests = list(chain(*(catalog.tests for catalog in catalogs))) combined_tests = list(chain(*(catalog.tests for catalog in catalogs)))
return cls(tests=combined_tests) return cls(tests=combined_tests)
@deprecated(
"This method is deprecated, use `AntaCatalogs.merge_catalogs` class method instead. This will be removed in ANTA v2.0.0.", category=DeprecationWarning
)
def merge(self, catalog: AntaCatalog) -> AntaCatalog: def merge(self, catalog: AntaCatalog) -> AntaCatalog:
"""Merge two AntaCatalog instances. """Merge two AntaCatalog instances.
Warning
-------
This method is deprecated and will be removed in ANTA v2.0. Use `AntaCatalog.merge_catalogs()` instead.
Parameters Parameters
---------- ----------
catalog catalog
@ -457,12 +452,6 @@ class AntaCatalog:
AntaCatalog AntaCatalog
A new AntaCatalog instance containing the tests of the two instances. A new AntaCatalog instance containing the tests of the two instances.
""" """
# TODO: Use a decorator to deprecate this method instead. See https://github.com/aristanetworks/anta/issues/754
warn(
message="AntaCatalog.merge() is deprecated and will be removed in ANTA v2.0. Use AntaCatalog.merge_catalogs() instead.",
category=DeprecationWarning,
stacklevel=2,
)
return self.merge_catalogs([self, catalog]) return self.merge_catalogs([self, catalog])
def dump(self) -> AntaCatalogFile: def dump(self) -> AntaCatalogFile:

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""ANTA CLI.""" """ANTA CLI."""

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""ANTA CLI.""" """ANTA CLI."""

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Click commands to validate configuration files.""" """Click commands to validate configuration files."""

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
# pylint: disable = redefined-outer-name # pylint: disable = redefined-outer-name

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""ANTA Top-level Console. """ANTA Top-level Console.

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Click commands to execute EOS commands on remote devices.""" """Click commands to execute EOS commands on remote devices."""

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
# pylint: disable = redefined-outer-name # pylint: disable = redefined-outer-name

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Utils functions to use with anta.cli.debug module.""" """Utils functions to use with anta.cli.debug module."""

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Click commands to execute various scripts on EOS devices.""" """Click commands to execute various scripts on EOS devices."""

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Click commands to execute various scripts on EOS devices.""" """Click commands to execute various scripts on EOS devices."""

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
@ -13,6 +13,7 @@ import logging
from pathlib import Path from pathlib import Path
from typing import TYPE_CHECKING, Literal from typing import TYPE_CHECKING, Literal
from asyncssh.misc import HostKeyNotVerifiable
from click.exceptions import UsageError from click.exceptions import UsageError
from httpx import ConnectError, HTTPError from httpx import ConnectError, HTTPError
@ -23,6 +24,7 @@ from asynceapi import EapiCommandError
if TYPE_CHECKING: if TYPE_CHECKING:
from anta.inventory import AntaInventory from anta.inventory import AntaInventory
from asynceapi._types import EapiComplexCommand, EapiSimpleCommand
EOS_SCHEDULED_TECH_SUPPORT = "/mnt/flash/schedule/tech-support" EOS_SCHEDULED_TECH_SUPPORT = "/mnt/flash/schedule/tech-support"
INVALID_CHAR = "`~!@#$/" INVALID_CHAR = "`~!@#$/"
@ -96,7 +98,7 @@ async def collect_commands(
logger.error("Error when collecting commands: %s", str(r)) logger.error("Error when collecting commands: %s", str(r))
async def collect_show_tech(inv: AntaInventory, root_dir: Path, *, configure: bool, tags: set[str] | None = None, latest: int | None = None) -> None: async def collect_show_tech(inv: AntaInventory, root_dir: Path, *, configure: bool, tags: set[str] | None = None, latest: int | None = None) -> None: # noqa: C901
"""Collect scheduled show-tech on devices.""" """Collect scheduled show-tech on devices."""
async def collect(device: AntaDevice) -> None: async def collect(device: AntaDevice) -> None:
@ -135,13 +137,13 @@ async def collect_show_tech(inv: AntaInventory, root_dir: Path, *, configure: bo
) )
logger.warning(msg) logger.warning(msg)
commands = []
# TODO: @mtache - add `config` field to `AntaCommand` object to handle this use case. # TODO: @mtache - add `config` field to `AntaCommand` object to handle this use case.
# Otherwise mypy complains about enable as it is only implemented for AsyncEOSDevice # Otherwise mypy complains about enable as it is only implemented for AsyncEOSDevice
# TODO: Should enable be also included in AntaDevice? # TODO: Should enable be also included in AntaDevice?
if not isinstance(device, AsyncEOSDevice): if not isinstance(device, AsyncEOSDevice):
msg = "anta exec collect-tech-support is only supported with AsyncEOSDevice for now." msg = "anta exec collect-tech-support is only supported with AsyncEOSDevice for now."
raise UsageError(msg) raise UsageError(msg)
commands: list[EapiSimpleCommand | EapiComplexCommand] = []
if device.enable and device._enable_password is not None: if device.enable and device._enable_password is not None:
commands.append({"cmd": "enable", "input": device._enable_password}) commands.append({"cmd": "enable", "input": device._enable_password})
elif device.enable: elif device.enable:
@ -162,6 +164,11 @@ async def collect_show_tech(inv: AntaInventory, root_dir: Path, *, configure: bo
await device.copy(sources=filenames, destination=outdir, direction="from") await device.copy(sources=filenames, destination=outdir, direction="from")
logger.info("Collected %s scheduled tech-support from %s", len(filenames), device.name) logger.info("Collected %s scheduled tech-support from %s", len(filenames), device.name)
except HostKeyNotVerifiable:
logger.error(
"Unable to collect tech-support on %s. The host SSH key could not be verified. Make sure it is part of the `known_hosts` file on your machine.",
device.name,
)
except (EapiCommandError, HTTPError, ConnectError) as e: except (EapiCommandError, HTTPError, ConnectError) as e:
logger.error("Unable to collect tech-support on %s: %s", device.name, str(e)) logger.error("Unable to collect tech-support on %s: %s", device.name, str(e))

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Click commands to get information from or generate inventories.""" """Click commands to get information from or generate inventories."""

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
# pylint: disable = redefined-outer-name # pylint: disable = redefined-outer-name

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Utils functions to use with anta.cli.get.commands module.""" """Utils functions to use with anta.cli.get.commands module."""
@ -350,17 +350,18 @@ def print_test(test: type[AntaTest], *, short: bool = False) -> None:
# Need to handle the fact that we nest the routing modules in Examples. # Need to handle the fact that we nest the routing modules in Examples.
# This is a bit fragile. # This is a bit fragile.
inputs = example.split("\n") inputs = example.split("\n")
try: test_name_lines = [i for i, input_entry in enumerate(inputs) if test.name in input_entry]
test_name_line = next((i for i, input_entry in enumerate(inputs) if test.name in input_entry)) if not test_name_lines:
except StopIteration as e:
msg = f"Could not find the name of the test '{test.name}' in the Example section in the docstring." msg = f"Could not find the name of the test '{test.name}' in the Example section in the docstring."
raise ValueError(msg) from e raise ValueError(msg)
# TODO: handle not found for list_index, line_index in enumerate(test_name_lines):
console.print(f" {inputs[test_name_line].strip()}") end = test_name_lines[list_index + 1] if list_index + 1 < len(test_name_lines) else -1
# Injecting the description console.print(f" {inputs[line_index].strip()}")
console.print(f" # {test.description}", soft_wrap=True) # Injecting the description for the first example
if not short and len(inputs) > test_name_line + 2: # There are params if list_index == 0:
console.print(textwrap.indent(textwrap.dedent("\n".join(inputs[test_name_line + 1 : -1])), " " * 6)) console.print(f" # {test.description}", soft_wrap=True)
if not short and len(inputs) > line_index + 2: # There are params
console.print(textwrap.indent(textwrap.dedent("\n".join(inputs[line_index + 1 : end])), " " * 6))
def extract_examples(docstring: str) -> str | None: def extract_examples(docstring: str) -> str | None:

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Click commands that run ANTA tests using anta.runner.""" """Click commands that run ANTA tests using anta.runner."""
@ -42,9 +42,10 @@ class IgnoreRequiredWithHelp(AliasedGroup):
if "--help" not in args: if "--help" not in args:
raise raise
# remove the required params so that help can display # Fake presence of the required params so that help can display
for param in self.params: for param in self.params:
param.required = False if param.required:
param.value_is_missing = lambda value: False # type: ignore[method-assign] # noqa: ARG005
return super().parse_args(ctx, args) return super().parse_args(ctx, args)

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Click commands that render ANTA tests results.""" """Click commands that render ANTA tests results."""
@ -45,7 +45,10 @@ def table(ctx: click.Context, group_by: Literal["device", "test"] | None) -> Non
help="Path to save report as a JSON file", help="Path to save report as a JSON file",
) )
def json(ctx: click.Context, output: pathlib.Path | None) -> None: def json(ctx: click.Context, output: pathlib.Path | None) -> None:
"""ANTA command to check network state with JSON results.""" """ANTA command to check network state with JSON results.
If no `--output` is specified, the output is printed to stdout.
"""
run_tests(ctx) run_tests(ctx)
print_json(ctx, output=output) print_json(ctx, output=output)
exit_with_code(ctx) exit_with_code(ctx)
@ -72,11 +75,11 @@ def text(ctx: click.Context) -> None:
path_type=pathlib.Path, path_type=pathlib.Path,
), ),
show_envvar=True, show_envvar=True,
required=False, required=True,
help="Path to save report as a CSV file", help="Path to save report as a CSV file",
) )
def csv(ctx: click.Context, csv_output: pathlib.Path) -> None: def csv(ctx: click.Context, csv_output: pathlib.Path) -> None:
"""ANTA command to check network states with CSV result.""" """ANTA command to check network state with CSV report."""
run_tests(ctx) run_tests(ctx)
save_to_csv(ctx, csv_file=csv_output) save_to_csv(ctx, csv_file=csv_output)
exit_with_code(ctx) exit_with_code(ctx)

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Utils functions to use with anta.cli.nrfu.commands module.""" """Utils functions to use with anta.cli.nrfu.commands module."""
@ -157,7 +157,7 @@ def save_markdown_report(ctx: click.Context, md_output: pathlib.Path) -> None:
Path to save the markdown report. Path to save the markdown report.
""" """
try: try:
MDReportGenerator.generate(results=_get_result_manager(ctx), md_filename=md_output) MDReportGenerator.generate(results=_get_result_manager(ctx).sort(["name", "categories", "test"]), md_filename=md_output)
console.print(f"Markdown report saved to {md_output}", style="cyan") console.print(f"Markdown report saved to {md_output}", style="cyan")
except OSError: except OSError:
console.print(f"Failed to save Markdown report to {md_output}", style="cyan") console.print(f"Failed to save Markdown report to {md_output}", style="cyan")

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Utils functions to use with anta.cli module.""" """Utils functions to use with anta.cli module."""
@ -9,7 +9,7 @@ import enum
import functools import functools
import logging import logging
from pathlib import Path from pathlib import Path
from typing import TYPE_CHECKING, Any, Callable from typing import TYPE_CHECKING, Any, Callable, Literal
import click import click
from yaml import YAMLError from yaml import YAMLError
@ -17,6 +17,7 @@ from yaml import YAMLError
from anta.catalog import AntaCatalog from anta.catalog import AntaCatalog
from anta.inventory import AntaInventory from anta.inventory import AntaInventory
from anta.inventory.exceptions import InventoryIncorrectSchemaError, InventoryRootKeyError from anta.inventory.exceptions import InventoryIncorrectSchemaError, InventoryRootKeyError
from anta.logger import anta_log_exception
if TYPE_CHECKING: if TYPE_CHECKING:
from click import Option from click import Option
@ -190,6 +191,14 @@ def core_options(f: Callable[..., Any]) -> Callable[..., Any]:
required=True, required=True,
type=click.Path(file_okay=True, dir_okay=False, exists=True, readable=True, path_type=Path), type=click.Path(file_okay=True, dir_okay=False, exists=True, readable=True, path_type=Path),
) )
@click.option(
"--inventory-format",
envvar="ANTA_INVENTORY_FORMAT",
show_envvar=True,
help="Format of the inventory file, either 'yaml' or 'json'",
default="yaml",
type=click.Choice(["yaml", "json"], case_sensitive=False),
)
@click.pass_context @click.pass_context
@functools.wraps(f) @functools.wraps(f)
def wrapper( def wrapper(
@ -204,6 +213,7 @@ def core_options(f: Callable[..., Any]) -> Callable[..., Any]:
timeout: float, timeout: float,
insecure: bool, insecure: bool,
disable_cache: bool, disable_cache: bool,
inventory_format: Literal["json", "yaml"],
**kwargs: dict[str, Any], **kwargs: dict[str, Any],
) -> Any: ) -> Any:
# If help is invoke somewhere, do not parse inventory # If help is invoke somewhere, do not parse inventory
@ -241,8 +251,10 @@ def core_options(f: Callable[..., Any]) -> Callable[..., Any]:
timeout=timeout, timeout=timeout,
insecure=insecure, insecure=insecure,
disable_cache=disable_cache, disable_cache=disable_cache,
file_format=inventory_format,
) )
except (TypeError, ValueError, YAMLError, OSError, InventoryIncorrectSchemaError, InventoryRootKeyError): except (TypeError, ValueError, YAMLError, OSError, InventoryIncorrectSchemaError, InventoryRootKeyError) as e:
anta_log_exception(e, f"Failed to parse the inventory: {inventory}", logger)
ctx.exit(ExitCode.USAGE_ERROR) ctx.exit(ExitCode.USAGE_ERROR)
return f(*args, inventory=i, **kwargs) return f(*args, inventory=i, **kwargs)
@ -319,7 +331,8 @@ def catalog_options(f: Callable[..., Any]) -> Callable[..., Any]:
try: try:
file_format = catalog_format.lower() file_format = catalog_format.lower()
c = AntaCatalog.parse(catalog, file_format=file_format) # type: ignore[arg-type] c = AntaCatalog.parse(catalog, file_format=file_format) # type: ignore[arg-type]
except (TypeError, ValueError, YAMLError, OSError): except (TypeError, ValueError, YAMLError, OSError) as e:
anta_log_exception(e, f"Failed to parse the catalog: {catalog}", logger)
ctx.exit(ExitCode.USAGE_ERROR) ctx.exit(ExitCode.USAGE_ERROR)
return f(*args, catalog=c, **kwargs) return f(*args, catalog=c, **kwargs)

View file

@ -1,11 +1,30 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Constants used in ANTA.""" """Constants used in ANTA."""
from __future__ import annotations from __future__ import annotations
ACRONYM_CATEGORIES: set[str] = {"aaa", "mlag", "snmp", "bgp", "ospf", "vxlan", "stp", "igmp", "ip", "lldp", "ntp", "bfd", "ptp", "lanz", "stun", "vlan"} ACRONYM_CATEGORIES: set[str] = {
"aaa",
"avt",
"bfd",
"bgp",
"igmp",
"ip",
"isis",
"lanz",
"lldp",
"mlag",
"ntp",
"ospf",
"ptp",
"snmp",
"stp",
"stun",
"vlan",
"vxlan",
}
"""A set of network protocol or feature acronyms that should be represented in uppercase.""" """A set of network protocol or feature acronyms that should be represented in uppercase."""
MD_REPORT_TOC = """**Table of Contents:** MD_REPORT_TOC = """**Table of Contents:**
@ -24,5 +43,33 @@ KNOWN_EOS_ERRORS = [
r".* does not support IP", r".* does not support IP",
r"IS-IS (.*) is disabled because: .*", r"IS-IS (.*) is disabled because: .*",
r"No source interface .*", r"No source interface .*",
r".*controller\snot\sready.*",
] ]
"""List of known EOS errors that should set a test status to 'failure' with the error message.""" """List of known EOS errors.
!!! failure "Generic EOS Error Handling"
When catching these errors, **ANTA will fail the affected test** and reported the error message.
"""
EOS_BLACKLIST_CMDS = [
r"^reload.*",
r"^conf.*",
r"^wr.*",
]
"""List of blacklisted EOS commands.
!!! success "Disruptive commands safeguard"
ANTA implements a mechanism to **prevent the execution of disruptive commands** such as `reload`, `write erase` or `configure terminal`.
"""
UNSUPPORTED_PLATFORM_ERRORS = [
"not supported on this hardware platform",
"Invalid input (at token 2: 'trident')",
]
"""Error messages indicating platform or hardware unsupported commands. Includes both general hardware
platform errors and specific ASIC family limitations.
!!! tip "Running EOS commands unsupported by hardware"
When catching these errors, ANTA will skip the affected test and raise a warning. The **test catalog must be updated** to remove execution of the affected test
on unsupported devices.
"""

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module that provides predefined types for AntaTest.Input instances.""" """Module that provides predefined types for AntaTest.Input instances."""
@ -10,9 +10,6 @@ from pydantic import Field
from pydantic.functional_validators import AfterValidator, BeforeValidator from pydantic.functional_validators import AfterValidator, BeforeValidator
# Regular Expression definition # Regular Expression definition
# TODO: make this configurable - with an env var maybe?
REGEXP_EOS_BLACKLIST_CMDS = [r"^reload.*", r"^conf\w*\s*(terminal|session)*", r"^wr\w*\s*\w+"]
"""List of regular expressions to blacklist from eos commands."""
REGEXP_PATH_MARKERS = r"[\\\/\s]" REGEXP_PATH_MARKERS = r"[\\\/\s]"
"""Match directory path from string.""" """Match directory path from string."""
REGEXP_INTERFACE_ID = r"\d+(\/\d+)*(\.\d+)?" REGEXP_INTERFACE_ID = r"\d+(\/\d+)*(\.\d+)?"
@ -26,15 +23,12 @@ REGEX_TYPE_PORTCHANNEL = r"^Port-Channel[0-9]{1,6}$"
REGEXP_TYPE_HOSTNAME = r"^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$" REGEXP_TYPE_HOSTNAME = r"^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$"
"""Match hostname like `my-hostname`, `my-hostname-1`, `my-hostname-1-2`.""" """Match hostname like `my-hostname`, `my-hostname-1`, `my-hostname-1-2`."""
# Regexp BGP AFI/SAFI
REGEXP_BGP_L2VPN_AFI = r"\b(l2[\s\-]?vpn[\s\-]?evpn)\b" # Regular expression for BGP redistributed routes
"""Match L2VPN EVPN AFI.""" REGEX_IPV4_UNICAST = r"ipv4[-_ ]?unicast$"
REGEXP_BGP_IPV4_MPLS_LABELS = r"\b(ipv4[\s\-]?mpls[\s\-]?label(s)?)\b" REGEX_IPV4_MULTICAST = r"ipv4[-_ ]?multicast$"
"""Match IPv4 MPLS Labels.""" REGEX_IPV6_UNICAST = r"ipv6[-_ ]?unicast$"
REGEX_BGP_IPV4_MPLS_VPN = r"\b(ipv4[\s\-]?mpls[\s\-]?vpn)\b" REGEX_IPV6_MULTICAST = r"ipv6[-_ ]?multicast$"
"""Match IPv4 MPLS VPN."""
REGEX_BGP_IPV4_UNICAST = r"\b(ipv4[\s\-]?uni[\s\-]?cast)\b"
"""Match IPv4 Unicast."""
def aaa_group_prefix(v: str) -> str: def aaa_group_prefix(v: str) -> str:
@ -58,7 +52,7 @@ def interface_autocomplete(v: str) -> str:
raise ValueError(msg) raise ValueError(msg)
intf_id = m[0] intf_id = m[0]
alias_map = {"et": "Ethernet", "eth": "Ethernet", "po": "Port-Channel", "lo": "Loopback"} alias_map = {"et": "Ethernet", "eth": "Ethernet", "po": "Port-Channel", "lo": "Loopback", "vl": "Vlan"}
return next((f"{full_name}{intf_id}" for alias, full_name in alias_map.items() if v.lower().startswith(alias)), v) return next((f"{full_name}{intf_id}" for alias, full_name in alias_map.items() if v.lower().startswith(alias)), v)
@ -81,26 +75,57 @@ def interface_case_sensitivity(v: str) -> str:
def bgp_multiprotocol_capabilities_abbreviations(value: str) -> str: def bgp_multiprotocol_capabilities_abbreviations(value: str) -> str:
"""Abbreviations for different BGP multiprotocol capabilities. """Abbreviations for different BGP multiprotocol capabilities.
Handles different separators (hyphen, underscore, space) and case sensitivity.
Examples Examples
-------- --------
- IPv4 Unicast ```python
- L2vpnEVPN >>> bgp_multiprotocol_capabilities_abbreviations("IPv4 Unicast")
- ipv4 MPLS Labels 'ipv4Unicast'
- ipv4Mplsvpn >>> bgp_multiprotocol_capabilities_abbreviations("ipv4-Flow_Spec Vpn")
'ipv4FlowSpecVpn'
>>> bgp_multiprotocol_capabilities_abbreviations("ipv6_labeled-unicast")
'ipv6MplsLabels'
>>> bgp_multiprotocol_capabilities_abbreviations("ipv4_mpls_vpn")
'ipv4MplsVpn'
>>> bgp_multiprotocol_capabilities_abbreviations("ipv4 mpls labels")
'ipv4MplsLabels'
>>> bgp_multiprotocol_capabilities_abbreviations("rt-membership")
'rtMembership'
>>> bgp_multiprotocol_capabilities_abbreviations("dynamic-path-selection")
'dps'
```
""" """
patterns = { patterns = {
REGEXP_BGP_L2VPN_AFI: "l2VpnEvpn", f"{r'dynamic[-_ ]?path[-_ ]?selection$'}": "dps",
REGEXP_BGP_IPV4_MPLS_LABELS: "ipv4MplsLabels", f"{r'dps$'}": "dps",
REGEX_BGP_IPV4_MPLS_VPN: "ipv4MplsVpn", f"{REGEX_IPV4_UNICAST}": "ipv4Unicast",
REGEX_BGP_IPV4_UNICAST: "ipv4Unicast", f"{REGEX_IPV6_UNICAST}": "ipv6Unicast",
f"{REGEX_IPV4_MULTICAST}": "ipv4Multicast",
f"{REGEX_IPV6_MULTICAST}": "ipv6Multicast",
f"{r'ipv4[-_ ]?labeled[-_ ]?Unicast$'}": "ipv4MplsLabels",
f"{r'ipv4[-_ ]?mpls[-_ ]?labels$'}": "ipv4MplsLabels",
f"{r'ipv6[-_ ]?labeled[-_ ]?Unicast$'}": "ipv6MplsLabels",
f"{r'ipv6[-_ ]?mpls[-_ ]?labels$'}": "ipv6MplsLabels",
f"{r'ipv4[-_ ]?sr[-_ ]?te$'}": "ipv4SrTe", # codespell:ignore
f"{r'ipv6[-_ ]?sr[-_ ]?te$'}": "ipv6SrTe", # codespell:ignore
f"{r'ipv4[-_ ]?mpls[-_ ]?vpn$'}": "ipv4MplsVpn",
f"{r'ipv6[-_ ]?mpls[-_ ]?vpn$'}": "ipv6MplsVpn",
f"{r'ipv4[-_ ]?Flow[-_ ]?spec$'}": "ipv4FlowSpec",
f"{r'ipv6[-_ ]?Flow[-_ ]?spec$'}": "ipv6FlowSpec",
f"{r'ipv4[-_ ]?Flow[-_ ]?spec[-_ ]?vpn$'}": "ipv4FlowSpecVpn",
f"{r'ipv6[-_ ]?Flow[-_ ]?spec[-_ ]?vpn$'}": "ipv6FlowSpecVpn",
f"{r'l2[-_ ]?vpn[-_ ]?vpls$'}": "l2VpnVpls",
f"{r'l2[-_ ]?vpn[-_ ]?evpn$'}": "l2VpnEvpn",
f"{r'link[-_ ]?state$'}": "linkState",
f"{r'rt[-_ ]?membership$'}": "rtMembership",
f"{r'ipv4[-_ ]?rt[-_ ]?membership$'}": "rtMembership",
f"{r'ipv4[-_ ]?mvpn$'}": "ipv4Mvpn",
} }
for pattern, replacement in patterns.items(): for pattern, replacement in patterns.items():
match = re.search(pattern, value, re.IGNORECASE) match = re.match(pattern, value, re.IGNORECASE)
if match: if match:
return replacement return replacement
return value return value
@ -114,6 +139,54 @@ def validate_regex(value: str) -> str:
return value return value
def bgp_redistributed_route_proto_abbreviations(value: str) -> str:
"""Abbreviations for different BGP redistributed route protocols.
Handles different separators (hyphen, underscore, space) and case sensitivity.
Examples
--------
```python
>>> bgp_redistributed_route_proto_abbreviations("IPv4 Unicast")
'v4u'
>>> bgp_redistributed_route_proto_abbreviations("IPv4-multicast")
'v4m'
>>> bgp_redistributed_route_proto_abbreviations("IPv6_multicast")
'v6m'
>>> bgp_redistributed_route_proto_abbreviations("ipv6unicast")
'v6u'
```
"""
patterns = {REGEX_IPV4_UNICAST: "v4u", REGEX_IPV4_MULTICAST: "v4m", REGEX_IPV6_UNICAST: "v6u", REGEX_IPV6_MULTICAST: "v6m"}
for pattern, replacement in patterns.items():
match = re.match(pattern, value, re.IGNORECASE)
if match:
return replacement
return value
def update_bgp_redistributed_proto_user(value: str) -> str:
"""Update BGP redistributed route `User` proto with EOS SDK.
Examples
--------
```python
>>> update_bgp_redistributed_proto_user("User")
'EOS SDK'
>>> update_bgp_redistributed_proto_user("Bgp")
'Bgp'
>>> update_bgp_redistributed_proto_user("RIP")
'RIP'
```
"""
if value == "User":
value = "EOS SDK"
return value
# AntaTest.Input types # AntaTest.Input types
AAAAuthMethod = Annotated[str, AfterValidator(aaa_group_prefix)] AAAAuthMethod = Annotated[str, AfterValidator(aaa_group_prefix)]
Vlan = Annotated[int, Field(ge=0, le=4094)] Vlan = Annotated[int, Field(ge=0, le=4094)]
@ -148,22 +221,68 @@ Safi = Literal["unicast", "multicast", "labeled-unicast", "sr-te"]
EncryptionAlgorithm = Literal["RSA", "ECDSA"] EncryptionAlgorithm = Literal["RSA", "ECDSA"]
RsaKeySize = Literal[2048, 3072, 4096] RsaKeySize = Literal[2048, 3072, 4096]
EcdsaKeySize = Literal[256, 384, 512] EcdsaKeySize = Literal[256, 384, 512]
MultiProtocolCaps = Annotated[str, BeforeValidator(bgp_multiprotocol_capabilities_abbreviations)] MultiProtocolCaps = Annotated[
Literal[
"dps",
"ipv4Unicast",
"ipv6Unicast",
"ipv4Multicast",
"ipv6Multicast",
"ipv4MplsLabels",
"ipv6MplsLabels",
"ipv4SrTe",
"ipv6SrTe",
"ipv4MplsVpn",
"ipv6MplsVpn",
"ipv4FlowSpec",
"ipv6FlowSpec",
"ipv4FlowSpecVpn",
"ipv6FlowSpecVpn",
"l2VpnVpls",
"l2VpnEvpn",
"linkState",
"rtMembership",
"ipv4Mvpn",
],
BeforeValidator(bgp_multiprotocol_capabilities_abbreviations),
]
BfdInterval = Annotated[int, Field(ge=50, le=60000)] BfdInterval = Annotated[int, Field(ge=50, le=60000)]
BfdMultiplier = Annotated[int, Field(ge=3, le=50)] BfdMultiplier = Annotated[int, Field(ge=3, le=50)]
ErrDisableReasons = Literal[ ErrDisableReasons = Literal[
"acl", "acl",
"arp-inspection", "arp-inspection",
"bgp-session-tracking",
"bpduguard", "bpduguard",
"dot1x",
"dot1x-coa",
"dot1x-session-replace", "dot1x-session-replace",
"evpn-sa-mh",
"fabric-link-failure",
"fabric-link-flap",
"hitless-reload-down", "hitless-reload-down",
"lacp-no-portid",
"lacp-rate-limit", "lacp-rate-limit",
"license-enforce",
"link-flap", "link-flap",
"mlagasu",
"mlagdualprimary",
"mlagissu",
"mlagmaintdown",
"no-internal-vlan", "no-internal-vlan",
"out-of-voqs",
"portchannelguard", "portchannelguard",
"portgroup-disabled",
"portsec", "portsec",
"speed-misconfigured",
"storm-control",
"stp-no-portid",
"stuck-queue",
"tapagg", "tapagg",
"uplink-failure-detection", "uplink-failure-detection",
"xcvr-misconfigured",
"xcvr-overheat",
"xcvr-power-unsupported",
"xcvr-unsupported",
] ]
ErrDisableInterval = Annotated[int, Field(ge=30, le=86400)] ErrDisableInterval = Annotated[int, Field(ge=30, le=86400)]
Percent = Annotated[float, Field(ge=0.0, le=100.0)] Percent = Annotated[float, Field(ge=0.0, le=100.0)]
@ -204,11 +323,6 @@ BgpDropStats = Literal[
] ]
BgpUpdateError = Literal["inUpdErrWithdraw", "inUpdErrIgnore", "inUpdErrDisableAfiSafi", "disabledAfiSafi", "lastUpdErrTime"] BgpUpdateError = Literal["inUpdErrWithdraw", "inUpdErrIgnore", "inUpdErrDisableAfiSafi", "disabledAfiSafi", "lastUpdErrTime"]
BfdProtocol = Literal["bgp", "isis", "lag", "ospf", "ospfv3", "pim", "route-input", "static-bfd", "static-route", "vrrp", "vxlan"] BfdProtocol = Literal["bgp", "isis", "lag", "ospf", "ospfv3", "pim", "route-input", "static-bfd", "static-route", "vrrp", "vxlan"]
SnmpPdu = Literal["inGetPdus", "inGetNextPdus", "inSetPdus", "outGetResponsePdus", "outTrapPdus"]
SnmpErrorCounter = Literal[
"inVersionErrs", "inBadCommunityNames", "inBadCommunityUses", "inParseErrs", "outTooBigErrs", "outNoSuchNameErrs", "outBadValueErrs", "outGeneralErrs"
]
IPv4RouteType = Literal[ IPv4RouteType = Literal[
"connected", "connected",
"static", "static",
@ -238,3 +352,47 @@ IPv4RouteType = Literal[
"Route Cache Route", "Route Cache Route",
"CBF Leaked Route", "CBF Leaked Route",
] ]
DynamicVlanSource = Literal["dmf", "dot1x", "dynvtep", "evpn", "mlag", "mlagsync", "mvpn", "swfwd", "vccbfd"]
LogSeverityLevel = Literal["alerts", "critical", "debugging", "emergencies", "errors", "informational", "notifications", "warnings"]
########################################
# SNMP
########################################
def snmp_v3_prefix(auth_type: Literal["auth", "priv", "noauth"]) -> str:
"""Prefix the SNMP authentication type with 'v3'."""
if auth_type == "noauth":
return "v3NoAuth"
return f"v3{auth_type.title()}"
SnmpVersion = Literal["v1", "v2c", "v3"]
SnmpHashingAlgorithm = Literal["MD5", "SHA", "SHA-224", "SHA-256", "SHA-384", "SHA-512"]
SnmpEncryptionAlgorithm = Literal["AES-128", "AES-192", "AES-256", "DES"]
SnmpPdu = Literal["inGetPdus", "inGetNextPdus", "inSetPdus", "outGetResponsePdus", "outTrapPdus"]
SnmpErrorCounter = Literal[
"inVersionErrs", "inBadCommunityNames", "inBadCommunityUses", "inParseErrs", "outTooBigErrs", "outNoSuchNameErrs", "outBadValueErrs", "outGeneralErrs"
]
SnmpVersionV3AuthType = Annotated[Literal["auth", "priv", "noauth"], AfterValidator(snmp_v3_prefix)]
RedistributedProtocol = Annotated[
Literal[
"AttachedHost",
"Bgp",
"Connected",
"DHCP",
"Dynamic",
"IS-IS",
"OSPF Internal",
"OSPF External",
"OSPF Nssa-External",
"OSPFv3 Internal",
"OSPFv3 External",
"OSPFv3 Nssa-External",
"RIP",
"Static",
"User",
],
AfterValidator(update_bgp_redistributed_proto_user),
]
RedistributedAfiSafi = Annotated[Literal["v4u", "v4m", "v6u", "v6m"], BeforeValidator(bgp_redistributed_route_proto_abbreviations)]
NTPStratumLevel = Annotated[int, Field(ge=0, le=16)]

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""decorators for tests.""" """decorators for tests."""
@ -58,7 +58,7 @@ def deprecated_test(new_tests: list[str] | None = None) -> Callable[[F], F]: #
logger.warning("%s test is deprecated.", anta_test.name) logger.warning("%s test is deprecated.", anta_test.name)
return await function(*args, **kwargs) return await function(*args, **kwargs)
return cast(F, wrapper) return cast("F", wrapper)
return decorator return decorator
@ -167,6 +167,6 @@ def skip_on_platforms(platforms: list[str]) -> Callable[[F], F]:
return await function(*args, **kwargs) return await function(*args, **kwargs)
return cast(F, wrapper) return cast("F", wrapper)
return decorator return decorator

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""ANTA Device Abstraction Module.""" """ANTA Device Abstraction Module."""
@ -8,13 +8,12 @@ from __future__ import annotations
import asyncio import asyncio
import logging import logging
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from collections import defaultdict from collections import OrderedDict, defaultdict
from time import monotonic
from typing import TYPE_CHECKING, Any, Literal from typing import TYPE_CHECKING, Any, Literal
import asyncssh import asyncssh
import httpcore import httpcore
from aiocache import Cache
from aiocache.plugins import HitMissRatioPlugin
from asyncssh import SSHClientConnection, SSHClientConnectionOptions from asyncssh import SSHClientConnection, SSHClientConnectionOptions
from httpx import ConnectError, HTTPError, TimeoutException from httpx import ConnectError, HTTPError, TimeoutException
@ -27,12 +26,79 @@ if TYPE_CHECKING:
from collections.abc import Iterator from collections.abc import Iterator
from pathlib import Path from pathlib import Path
from asynceapi._types import EapiComplexCommand, EapiSimpleCommand
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
# Do not load the default keypairs multiple times due to a performance issue introduced in cryptography 37.0 # Do not load the default keypairs multiple times due to a performance issue introduced in cryptography 37.0
# https://github.com/pyca/cryptography/issues/7236#issuecomment-1131908472 # https://github.com/pyca/cryptography/issues/7236#issuecomment-1131908472
CLIENT_KEYS = asyncssh.public_key.load_default_keypairs() CLIENT_KEYS = asyncssh.public_key.load_default_keypairs()
# Limit concurrency to 100 requests (HTTPX default) to avoid high-concurrency performance issues
# See: https://github.com/encode/httpx/issues/3215
MAX_CONCURRENT_REQUESTS = 100
class AntaCache:
"""Class to be used as cache.
Example
-------
```python
# Create cache
cache = AntaCache("device1")
with cache.locks[key]:
command_output = cache.get(key)
```
"""
def __init__(self, device: str, max_size: int = 128, ttl: int = 60) -> None:
"""Initialize the cache."""
self.device = device
self.cache: OrderedDict[str, Any] = OrderedDict()
self.locks: defaultdict[str, asyncio.Lock] = defaultdict(asyncio.Lock)
self.max_size = max_size
self.ttl = ttl
# Stats
self.stats: dict[str, int] = {}
self._init_stats()
def _init_stats(self) -> None:
"""Initialize the stats."""
self.stats["hits"] = 0
self.stats["total"] = 0
async def get(self, key: str) -> Any: # noqa: ANN401
"""Return the cached entry for key."""
self.stats["total"] += 1
if key in self.cache:
timestamp, value = self.cache[key]
if monotonic() - timestamp < self.ttl:
# checking the value is still valid
self.cache.move_to_end(key)
self.stats["hits"] += 1
return value
# Time expired
del self.cache[key]
del self.locks[key]
return None
async def set(self, key: str, value: Any) -> bool: # noqa: ANN401
"""Set the cached entry for key to value."""
timestamp = monotonic()
if len(self.cache) > self.max_size:
self.cache.popitem(last=False)
self.cache[key] = timestamp, value
return True
def clear(self) -> None:
"""Empty the cache."""
logger.debug("Clearing cache for device %s", self.device)
self.cache = OrderedDict()
self._init_stats()
class AntaDevice(ABC): class AntaDevice(ABC):
"""Abstract class representing a device in ANTA. """Abstract class representing a device in ANTA.
@ -52,10 +118,11 @@ class AntaDevice(ABC):
Hardware model of the device. Hardware model of the device.
tags : set[str] tags : set[str]
Tags for this device. Tags for this device.
cache : Cache | None cache : AntaCache | None
In-memory cache from aiocache library for this device (None if cache is disabled). In-memory cache for this device (None if cache is disabled).
cache_locks : dict cache_locks : dict
Dictionary mapping keys to asyncio locks to guarantee exclusive access to the cache if not disabled. Dictionary mapping keys to asyncio locks to guarantee exclusive access to the cache if not disabled.
Deprecated, will be removed in ANTA v2.0.0, use self.cache.locks instead.
""" """
@ -79,7 +146,8 @@ class AntaDevice(ABC):
self.tags.add(self.name) self.tags.add(self.name)
self.is_online: bool = False self.is_online: bool = False
self.established: bool = False self.established: bool = False
self.cache: Cache | None = None self.cache: AntaCache | None = None
# Keeping cache_locks for backward compatibility.
self.cache_locks: defaultdict[str, asyncio.Lock] | None = None self.cache_locks: defaultdict[str, asyncio.Lock] | None = None
# Initialize cache if not disabled # Initialize cache if not disabled
@ -101,17 +169,16 @@ class AntaDevice(ABC):
def _init_cache(self) -> None: def _init_cache(self) -> None:
"""Initialize cache for the device, can be overridden by subclasses to manipulate how it works.""" """Initialize cache for the device, can be overridden by subclasses to manipulate how it works."""
self.cache = Cache(cache_class=Cache.MEMORY, ttl=60, namespace=self.name, plugins=[HitMissRatioPlugin()]) self.cache = AntaCache(device=self.name, ttl=60)
self.cache_locks = defaultdict(asyncio.Lock) self.cache_locks = self.cache.locks
@property @property
def cache_statistics(self) -> dict[str, Any] | None: def cache_statistics(self) -> dict[str, Any] | None:
"""Return the device cache statistics for logging purposes.""" """Return the device cache statistics for logging purposes."""
# Need to ignore pylint no-member as Cache is a proxy class and pylint is not smart enough
# https://github.com/pylint-dev/pylint/issues/7258
if self.cache is not None: if self.cache is not None:
stats = getattr(self.cache, "hit_miss_ratio", {"total": 0, "hits": 0, "hit_ratio": 0}) stats = self.cache.stats
return {"total_commands_sent": stats["total"], "cache_hits": stats["hits"], "cache_hit_ratio": f"{stats['hit_ratio'] * 100:.2f}%"} ratio = stats["hits"] / stats["total"] if stats["total"] > 0 else 0
return {"total_commands_sent": stats["total"], "cache_hits": stats["hits"], "cache_hit_ratio": f"{ratio * 100:.2f}%"}
return None return None
def __rich_repr__(self) -> Iterator[tuple[str, Any]]: def __rich_repr__(self) -> Iterator[tuple[str, Any]]:
@ -177,18 +244,16 @@ class AntaDevice(ABC):
collection_id collection_id
An identifier used to build the eAPI request ID. An identifier used to build the eAPI request ID.
""" """
# Need to ignore pylint no-member as Cache is a proxy class and pylint is not smart enough if self.cache is not None and command.use_cache:
# https://github.com/pylint-dev/pylint/issues/7258 async with self.cache.locks[command.uid]:
if self.cache is not None and self.cache_locks is not None and command.use_cache: cached_output = await self.cache.get(command.uid)
async with self.cache_locks[command.uid]:
cached_output = await self.cache.get(command.uid) # pylint: disable=no-member
if cached_output is not None: if cached_output is not None:
logger.debug("Cache hit for %s on %s", command.command, self.name) logger.debug("Cache hit for %s on %s", command.command, self.name)
command.output = cached_output command.output = cached_output
else: else:
await self._collect(command=command, collection_id=collection_id) await self._collect(command=command, collection_id=collection_id)
await self.cache.set(command.uid, command.output) # pylint: disable=no-member await self.cache.set(command.uid, command.output)
else: else:
await self._collect(command=command, collection_id=collection_id) await self._collect(command=command, collection_id=collection_id)
@ -237,6 +302,7 @@ class AntaDevice(ABC):
raise NotImplementedError(msg) raise NotImplementedError(msg)
# pylint: disable=too-many-instance-attributes
class AsyncEOSDevice(AntaDevice): class AsyncEOSDevice(AntaDevice):
"""Implementation of AntaDevice for EOS using aio-eapi. """Implementation of AntaDevice for EOS using aio-eapi.
@ -329,6 +395,10 @@ class AsyncEOSDevice(AntaDevice):
host=host, port=ssh_port, username=username, password=password, client_keys=CLIENT_KEYS, **ssh_params host=host, port=ssh_port, username=username, password=password, client_keys=CLIENT_KEYS, **ssh_params
) )
# In Python 3.9, Semaphore must be created within a running event loop
# TODO: Once we drop Python 3.9 support, initialize the semaphore here
self._command_semaphore: asyncio.Semaphore | None = None
def __rich_repr__(self) -> Iterator[tuple[str, Any]]: def __rich_repr__(self) -> Iterator[tuple[str, Any]]:
"""Implement Rich Repr Protocol. """Implement Rich Repr Protocol.
@ -372,6 +442,15 @@ class AsyncEOSDevice(AntaDevice):
""" """
return (self._session.host, self._session.port) return (self._session.host, self._session.port)
async def _get_semaphore(self) -> asyncio.Semaphore:
"""Return the semaphore, initializing it if needed.
TODO: Remove this method once we drop Python 3.9 support.
"""
if self._command_semaphore is None:
self._command_semaphore = asyncio.Semaphore(MAX_CONCURRENT_REQUESTS)
return self._command_semaphore
async def _collect(self, command: AntaCommand, *, collection_id: str | None = None) -> None: async def _collect(self, command: AntaCommand, *, collection_id: str | None = None) -> None:
"""Collect device command output from EOS using aio-eapi. """Collect device command output from EOS using aio-eapi.
@ -386,57 +465,63 @@ class AsyncEOSDevice(AntaDevice):
collection_id collection_id
An identifier used to build the eAPI request ID. An identifier used to build the eAPI request ID.
""" """
commands: list[dict[str, str | int]] = [] semaphore = await self._get_semaphore()
if self.enable and self._enable_password is not None:
commands.append( async with semaphore:
{ commands: list[EapiComplexCommand | EapiSimpleCommand] = []
"cmd": "enable", if self.enable and self._enable_password is not None:
"input": str(self._enable_password), commands.append(
}, {
) "cmd": "enable",
elif self.enable: "input": str(self._enable_password),
# No password },
commands.append({"cmd": "enable"}) )
commands += [{"cmd": command.command, "revision": command.revision}] if command.revision else [{"cmd": command.command}] elif self.enable:
try: # No password
response: list[dict[str, Any] | str] = await self._session.cli( commands.append({"cmd": "enable"})
commands=commands, commands += [{"cmd": command.command, "revision": command.revision}] if command.revision else [{"cmd": command.command}]
ofmt=command.ofmt, try:
version=command.version, response = await self._session.cli(
req_id=f"ANTA-{collection_id}-{id(command)}" if collection_id else f"ANTA-{id(command)}", commands=commands,
) # type: ignore[assignment] # multiple commands returns a list ofmt=command.ofmt,
# Do not keep response of 'enable' command version=command.version,
command.output = response[-1] req_id=f"ANTA-{collection_id}-{id(command)}" if collection_id else f"ANTA-{id(command)}",
except asynceapi.EapiCommandError as e: )
# This block catches exceptions related to EOS issuing an error. # Do not keep response of 'enable' command
self._log_eapi_command_error(command, e) command.output = response[-1]
except TimeoutException as e: except asynceapi.EapiCommandError as e:
# This block catches Timeout exceptions. # This block catches exceptions related to EOS issuing an error.
command.errors = [exc_to_str(e)] self._log_eapi_command_error(command, e)
timeouts = self._session.timeout.as_dict() except TimeoutException as e:
logger.error( # This block catches Timeout exceptions.
"%s occurred while sending a command to %s. Consider increasing the timeout.\nCurrent timeouts: Connect: %s | Read: %s | Write: %s | Pool: %s", command.errors = [exc_to_str(e)]
exc_to_str(e), timeouts = self._session.timeout.as_dict()
self.name, logger.error(
timeouts["connect"], "%s occurred while sending a command to %s. Consider increasing the timeout.\nCurrent timeouts: Connect: %s | Read: %s | Write: %s | Pool: %s",
timeouts["read"], exc_to_str(e),
timeouts["write"], self.name,
timeouts["pool"], timeouts["connect"],
) timeouts["read"],
except (ConnectError, OSError) as e: timeouts["write"],
# This block catches OSError and socket issues related exceptions. timeouts["pool"],
command.errors = [exc_to_str(e)] )
if (isinstance(exc := e.__cause__, httpcore.ConnectError) and isinstance(os_error := exc.__context__, OSError)) or isinstance(os_error := e, OSError): # pylint: disable=no-member except (ConnectError, OSError) as e:
if isinstance(os_error.__cause__, OSError): # This block catches OSError and socket issues related exceptions.
os_error = os_error.__cause__ command.errors = [exc_to_str(e)]
logger.error("A local OS error occurred while connecting to %s: %s.", self.name, os_error) # pylint: disable=no-member
else: if (isinstance(exc := e.__cause__, httpcore.ConnectError) and isinstance(os_error := exc.__context__, OSError)) or isinstance(
os_error := e, OSError
):
if isinstance(os_error.__cause__, OSError):
os_error = os_error.__cause__
logger.error("A local OS error occurred while connecting to %s: %s.", self.name, os_error)
else:
anta_log_exception(e, f"An error occurred while issuing an eAPI request to {self.name}", logger)
except HTTPError as e:
# This block catches most of the httpx Exceptions and logs a general message.
command.errors = [exc_to_str(e)]
anta_log_exception(e, f"An error occurred while issuing an eAPI request to {self.name}", logger) anta_log_exception(e, f"An error occurred while issuing an eAPI request to {self.name}", logger)
except HTTPError as e: logger.debug("%s: %s", self.name, command)
# This block catches most of the httpx Exceptions and logs a general message.
command.errors = [exc_to_str(e)]
anta_log_exception(e, f"An error occurred while issuing an eAPI request to {self.name}", logger)
logger.debug("%s: %s", self.name, command)
def _log_eapi_command_error(self, command: AntaCommand, e: asynceapi.EapiCommandError) -> None: def _log_eapi_command_error(self, command: AntaCommand, e: asynceapi.EapiCommandError) -> None:
"""Appropriately log the eapi command error.""" """Appropriately log the eapi command error."""

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Package related to all ANTA tests input models.""" """Package related to all ANTA tests input models."""

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module containing input models for AVT tests.""" """Module containing input models for AVT tests."""
@ -33,4 +33,4 @@ class AVTPath(BaseModel):
AVT CONTROL-PLANE-PROFILE VRF: default (Destination: 10.101.255.2, Next-hop: 10.101.255.1) AVT CONTROL-PLANE-PROFILE VRF: default (Destination: 10.101.255.2, Next-hop: 10.101.255.1)
""" """
return f"AVT {self.avt_name} VRF: {self.vrf} (Destination: {self.destination}, Next-hop: {self.next_hop})" return f"AVT: {self.avt_name} VRF: {self.vrf} Destination: {self.destination} Next-hop: {self.next_hop}"

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module containing input models for BFD tests.""" """Module containing input models for BFD tests."""
@ -31,6 +31,10 @@ class BFDPeer(BaseModel):
"""Multiplier of BFD peer. Required field in the `VerifyBFDPeersIntervals` test.""" """Multiplier of BFD peer. Required field in the `VerifyBFDPeersIntervals` test."""
protocols: list[BfdProtocol] | None = None protocols: list[BfdProtocol] | None = None
"""List of protocols to be verified. Required field in the `VerifyBFDPeersRegProtocols` test.""" """List of protocols to be verified. Required field in the `VerifyBFDPeersRegProtocols` test."""
detection_time: int | None = None
"""Detection time of BFD peer in milliseconds. Defines how long to wait without receiving BFD packets before declaring the peer session as down.
Optional field in the `VerifyBFDPeersIntervals` test."""
def __str__(self) -> str: def __str__(self) -> str:
"""Return a human-readable string representation of the BFDPeer for reporting.""" """Return a human-readable string representation of the BFDPeer for reporting."""

View file

@ -1,11 +1,11 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module containing input models for connectivity tests.""" """Module containing input models for connectivity tests."""
from __future__ import annotations from __future__ import annotations
from ipaddress import IPv4Address from ipaddress import IPv4Address, IPv6Address
from typing import Any from typing import Any
from warnings import warn from warnings import warn
@ -18,29 +18,30 @@ class Host(BaseModel):
"""Model for a remote host to ping.""" """Model for a remote host to ping."""
model_config = ConfigDict(extra="forbid") model_config = ConfigDict(extra="forbid")
destination: IPv4Address destination: IPv4Address | IPv6Address
"""IPv4 address to ping.""" """Destination address to ping."""
source: IPv4Address | Interface source: IPv4Address | IPv6Address | Interface
"""IPv4 address source IP or egress interface to use.""" """Source address IP or egress interface to use."""
vrf: str = "default" vrf: str = "default"
"""VRF context. Defaults to `default`.""" """VRF context."""
repeat: int = 2 repeat: int = 2
"""Number of ping repetition. Defaults to 2.""" """Number of ping repetition."""
size: int = 100 size: int = 100
"""Specify datagram size. Defaults to 100.""" """Specify datagram size."""
df_bit: bool = False df_bit: bool = False
"""Enable do not fragment bit in IP header. Defaults to False.""" """Enable do not fragment bit in IP header."""
reachable: bool = True
"""Indicates whether the destination should be reachable."""
def __str__(self) -> str: def __str__(self) -> str:
"""Return a human-readable string representation of the Host for reporting. """Return a human-readable string representation of the Host for reporting.
Examples Examples
-------- --------
Host 10.1.1.1 (src: 10.2.2.2, vrf: mgmt, size: 100B, repeat: 2) Host: 10.1.1.1 Source: 10.2.2.2 VRF: mgmt
""" """
df_status = ", df-bit: enabled" if self.df_bit else "" return f"Host: {self.destination} Source: {self.source} VRF: {self.vrf}"
return f"Host {self.destination} (src: {self.source}, vrf: {self.vrf}, size: {self.size}B, repeat: {self.repeat}{df_status})"
class LLDPNeighbor(BaseModel): class LLDPNeighbor(BaseModel):
@ -59,10 +60,10 @@ class LLDPNeighbor(BaseModel):
Examples Examples
-------- --------
Port Ethernet1 (Neighbor: DC1-SPINE2, Neighbor Port: Ethernet2) Port: Ethernet1 Neighbor: DC1-SPINE2 Neighbor Port: Ethernet2
""" """
return f"Port {self.port} (Neighbor: {self.neighbor_device}, Neighbor Port: {self.neighbor_port})" return f"Port: {self.port} Neighbor: {self.neighbor_device} Neighbor Port: {self.neighbor_port}"
class Neighbor(LLDPNeighbor): # pragma: no cover class Neighbor(LLDPNeighbor): # pragma: no cover

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module containing input models for CVX tests.""" """Module containing input models for CVX tests."""

View file

@ -0,0 +1,72 @@
# Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file.
"""Module containing input models for flow tracking tests."""
from __future__ import annotations
from pydantic import BaseModel, ConfigDict
class FlowTracker(BaseModel):
"""Flow Tracking model representing the tracker details."""
model_config = ConfigDict(extra="forbid")
name: str
"""The name of the flow tracker."""
record_export: RecordExport | None = None
"""Configuration for record export, specifying details about timeouts."""
exporters: list[Exporter] | None = None
"""A list of exporters associated with the flow tracker."""
def __str__(self) -> str:
"""Return a human-readable string representation of the FlowTracker for reporting.
Examples
--------
Flow Tracker: FLOW-TRACKER
"""
return f"Flow Tracker: {self.name}"
class RecordExport(BaseModel):
"""Model representing the record export configuration for a flow tracker."""
model_config = ConfigDict(extra="forbid")
on_inactive_timeout: int
"""The timeout in milliseconds for exporting flow records when the flow becomes inactive."""
on_interval: int
"""The interval in milliseconds for exporting flow records."""
def __str__(self) -> str:
"""Return a human-readable string representation of the RecordExport for reporting.
Examples
--------
Inactive Timeout: 60000, Active Interval: 300000
"""
return f"Inactive Timeout: {self.on_inactive_timeout} Active Interval: {self.on_interval}"
class Exporter(BaseModel):
"""Model representing the exporter used for flow record export."""
model_config = ConfigDict(extra="forbid")
name: str
"""The name of the exporter."""
local_interface: str
"""The local interface used by the exporter to send flow records."""
template_interval: int
"""The template interval, in milliseconds, for the exporter to refresh the flow template."""
def __str__(self) -> str:
"""Return a human-readable string representation of the Exporter for reporting.
Examples
--------
Exporter: CVP-TELEMETRY
"""
return f"Exporter: {self.name}"

View file

@ -1,19 +1,24 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module containing input models for interface tests.""" """Module containing input models for interface tests."""
from __future__ import annotations from __future__ import annotations
from typing import Literal from ipaddress import IPv4Interface
from typing import Any, Literal
from warnings import warn
from pydantic import BaseModel, ConfigDict from pydantic import BaseModel, ConfigDict, Field
from anta.custom_types import Interface, PortChannelInterface from anta.custom_types import Interface, PortChannelInterface
class InterfaceState(BaseModel): class InterfaceState(BaseModel):
"""Model for an interface state.""" """Model for an interface state.
TODO: Need to review this class name in ANTA v2.0.0.
"""
model_config = ConfigDict(extra="forbid") model_config = ConfigDict(extra="forbid")
name: Interface name: Interface
@ -33,6 +38,16 @@ class InterfaceState(BaseModel):
Can be enabled in the `VerifyLACPInterfacesStatus` tests. Can be enabled in the `VerifyLACPInterfacesStatus` tests.
""" """
primary_ip: IPv4Interface | None = None
"""Primary IPv4 address in CIDR notation. Required field in the `VerifyInterfaceIPv4` test."""
secondary_ips: list[IPv4Interface] | None = None
"""List of secondary IPv4 addresses in CIDR notation. Can be provided in the `VerifyInterfaceIPv4` test."""
auto: bool = False
"""The auto-negotiation status of the interface. Can be provided in the `VerifyInterfacesSpeed` test."""
speed: float | None = Field(None, ge=1, le=1000)
"""The speed of the interface in Gigabits per second. Valid range is 1 to 1000. Required field in the `VerifyInterfacesSpeed` test."""
lanes: int | None = Field(None, ge=1, le=8)
"""The number of lanes in the interface. Valid range is 1 to 8. Can be provided in the `VerifyInterfacesSpeed` test."""
def __str__(self) -> str: def __str__(self) -> str:
"""Return a human-readable string representation of the InterfaceState for reporting. """Return a human-readable string representation of the InterfaceState for reporting.
@ -46,3 +61,21 @@ class InterfaceState(BaseModel):
if self.portchannel is not None: if self.portchannel is not None:
base_string += f" Port-Channel: {self.portchannel}" base_string += f" Port-Channel: {self.portchannel}"
return base_string return base_string
class InterfaceDetail(InterfaceState): # pragma: no cover
"""Alias for the InterfaceState model to maintain backward compatibility.
When initialized, it will emit a deprecation warning and call the InterfaceState model.
TODO: Remove this class in ANTA v2.0.0.
"""
def __init__(self, **data: Any) -> None: # noqa: ANN401
"""Initialize the InterfaceState class, emitting a depreciation warning."""
warn(
message="InterfaceDetail model is deprecated and will be removed in ANTA v2.0.0. Use the InterfaceState model instead.",
category=DeprecationWarning,
stacklevel=2,
)
super().__init__(**data)

View file

@ -0,0 +1,21 @@
# Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file.
"""Module containing input models for logging tests."""
from __future__ import annotations
from pydantic import BaseModel, Field
from anta.custom_types import LogSeverityLevel, RegexString
class LoggingQuery(BaseModel):
"""Logging query model representing the logging details."""
regex_match: RegexString
"""Log regex pattern to be searched in last log entries."""
last_number_messages: int = Field(ge=1, le=9999)
"""Last number of messages to check in the logging buffers."""
severity_level: LogSeverityLevel = "informational"
"""Log severity level."""

View file

@ -0,0 +1,28 @@
# Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file.
"""Module containing input models for path-selection tests."""
from __future__ import annotations
from ipaddress import IPv4Address
from pydantic import BaseModel, ConfigDict
class DpsPath(BaseModel):
"""Model for a list of DPS path entries."""
model_config = ConfigDict(extra="forbid")
peer: IPv4Address
"""Static peer IPv4 address."""
path_group: str
"""Router path group name."""
source_address: IPv4Address
"""Source IPv4 address of path."""
destination_address: IPv4Address
"""Destination IPv4 address of path."""
def __str__(self) -> str:
"""Return a human-readable string representation of the DpsPath for reporting."""
return f"Peer: {self.peer} PathGroup: {self.path_group} Source: {self.source_address} Destination: {self.destination_address}"

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Package related to routing tests input models.""" """Package related to routing tests input models."""

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module containing input models for routing BGP tests.""" """Module containing input models for routing BGP tests."""
@ -6,13 +6,13 @@
from __future__ import annotations from __future__ import annotations
from ipaddress import IPv4Address, IPv4Network, IPv6Address from ipaddress import IPv4Address, IPv4Network, IPv6Address
from typing import TYPE_CHECKING, Any from typing import TYPE_CHECKING, Any, Literal
from warnings import warn from warnings import warn
from pydantic import BaseModel, ConfigDict, Field, PositiveInt, model_validator from pydantic import BaseModel, ConfigDict, Field, PositiveInt, model_validator
from pydantic_extra_types.mac_address import MacAddress from pydantic_extra_types.mac_address import MacAddress
from anta.custom_types import Afi, BgpDropStats, BgpUpdateError, MultiProtocolCaps, Safi, Vni from anta.custom_types import Afi, BgpDropStats, BgpUpdateError, MultiProtocolCaps, RedistributedAfiSafi, RedistributedProtocol, Safi, Vni
if TYPE_CHECKING: if TYPE_CHECKING:
import sys import sys
@ -39,6 +39,23 @@ AFI_SAFI_EOS_KEY = {
("link-state", None): "linkState", ("link-state", None): "linkState",
} }
"""Dictionary mapping AFI/SAFI to EOS key representation.""" """Dictionary mapping AFI/SAFI to EOS key representation."""
AFI_SAFI_MAPPINGS = {"v4u": "IPv4 Unicast", "v4m": "IPv4 Multicast", "v6u": "IPv6 Unicast", "v6m": "IPv6 Multicast"}
"""Dictionary mapping AFI/SAFI to EOS key representation for BGP redistributed route protocol."""
IPV4_MULTICAST_SUPPORTED_PROTO = [
"AttachedHost",
"Connected",
"IS-IS",
"OSPF Internal",
"OSPF External",
"OSPF Nssa-External",
"OSPFv3 Internal",
"OSPFv3 External",
"OSPFv3 Nssa-External",
"Static",
]
"""List of BGP redistributed route protocol, supported for IPv4 multicast address family."""
IPV6_MULTICAST_SUPPORTED_PROTO = [proto for proto in IPV4_MULTICAST_SUPPORTED_PROTO if proto != "AttachedHost"]
"""List of BGP redistributed route protocol, supported for IPv6 multicast address family."""
class BgpAddressFamily(BaseModel): class BgpAddressFamily(BaseModel):
@ -68,8 +85,7 @@ class BgpAddressFamily(BaseModel):
check_peer_state: bool = False check_peer_state: bool = False
"""Flag to check if the peers are established with negotiated AFI/SAFI. Defaults to `False`. """Flag to check if the peers are established with negotiated AFI/SAFI. Defaults to `False`.
Can be enabled in the `VerifyBGPPeerCount` tests. Can be enabled in the `VerifyBGPPeerCount` tests."""
"""
@model_validator(mode="after") @model_validator(mode="after")
def validate_inputs(self) -> Self: def validate_inputs(self) -> Self:
@ -142,12 +158,14 @@ class BgpPeer(BaseModel):
"""IPv4 address of the BGP peer.""" """IPv4 address of the BGP peer."""
vrf: str = "default" vrf: str = "default"
"""Optional VRF for the BGP peer. Defaults to `default`.""" """Optional VRF for the BGP peer. Defaults to `default`."""
peer_group: str | None = None
"""Peer group of the BGP peer. Required field in the `VerifyBGPPeerGroup` test."""
advertised_routes: list[IPv4Network] | None = None advertised_routes: list[IPv4Network] | None = None
"""List of advertised routes in CIDR format. Required field in the `VerifyBGPExchangedRoutes` test.""" """List of advertised routes in CIDR format. Required field in the `VerifyBGPExchangedRoutes` test."""
received_routes: list[IPv4Network] | None = None received_routes: list[IPv4Network] | None = None
"""List of received routes in CIDR format. Required field in the `VerifyBGPExchangedRoutes` test.""" """List of received routes in CIDR format. Required field in the `VerifyBGPExchangedRoutes` test."""
capabilities: list[MultiProtocolCaps] | None = None capabilities: list[MultiProtocolCaps] | None = None
"""List of BGP multiprotocol capabilities. Required field in the `VerifyBGPPeerMPCaps` test.""" """List of BGP multiprotocol capabilities. Required field in the `VerifyBGPPeerMPCaps`, `VerifyBGPNlriAcceptance` tests."""
strict: bool = False strict: bool = False
"""If True, requires exact match of the provided BGP multiprotocol capabilities. """If True, requires exact match of the provided BGP multiprotocol capabilities.
@ -169,9 +187,15 @@ class BgpPeer(BaseModel):
outbound_route_map: str | None = None outbound_route_map: str | None = None
"""Outbound route map applied, defaults to None. Required field in the `VerifyBgpRouteMaps` test.""" """Outbound route map applied, defaults to None. Required field in the `VerifyBgpRouteMaps` test."""
maximum_routes: int | None = Field(default=None, ge=0, le=4294967294) maximum_routes: int | None = Field(default=None, ge=0, le=4294967294)
"""The maximum allowable number of BGP routes, `0` means unlimited. Required field in the `VerifyBGPPeerRouteLimit` test""" """The maximum allowable number of BGP routes. `0` means unlimited. Required field in the `VerifyBGPPeerRouteLimit` test"""
warning_limit: int | None = Field(default=None, ge=0, le=4294967294) warning_limit: int | None = Field(default=None, ge=0, le=4294967294)
"""Optional maximum routes warning limit. If not provided, it defaults to `0` meaning no warning limit.""" """The warning limit for the maximum routes. `0` means no warning.
Optional field in the `VerifyBGPPeerRouteLimit` test. If not provided, the test will not verify the warning limit."""
ttl: int | None = Field(default=None, ge=1, le=255)
"""The Time-To-Live (TTL). Required field in the `VerifyBGPPeerTtlMultiHops` test."""
max_ttl_hops: int | None = Field(default=None, ge=1, le=255)
"""The Max TTL hops. Required field in the `VerifyBGPPeerTtlMultiHops` test."""
def __str__(self) -> str: def __str__(self) -> str:
"""Return a human-readable string representation of the BgpPeer for reporting.""" """Return a human-readable string representation of the BgpPeer for reporting."""
@ -207,3 +231,159 @@ class VxlanEndpoint(BaseModel):
def __str__(self) -> str: def __str__(self) -> str:
"""Return a human-readable string representation of the VxlanEndpoint for reporting.""" """Return a human-readable string representation of the VxlanEndpoint for reporting."""
return f"Address: {self.address} VNI: {self.vni}" return f"Address: {self.address} VNI: {self.vni}"
class BgpRoute(BaseModel):
"""Model representing BGP routes.
Only IPv4 prefixes are supported for now.
"""
model_config = ConfigDict(extra="forbid")
prefix: IPv4Network
"""The IPv4 network address."""
vrf: str = "default"
"""Optional VRF for the BGP peer. Defaults to `default`."""
paths: list[BgpRoutePath] | None = None
"""A list of paths for the BGP route. Required field in the `VerifyBGPRoutePaths` test."""
ecmp_count: int | None = None
"""The expected number of ECMP paths for the BGP route. Required field in the `VerifyBGPRouteECMP` test."""
def __str__(self) -> str:
"""Return a human-readable string representation of the BgpRoute for reporting.
Examples
--------
- Prefix: 192.168.66.100/24 VRF: default
"""
return f"Prefix: {self.prefix} VRF: {self.vrf}"
class BgpRoutePath(BaseModel):
"""Model representing a BGP route path."""
model_config = ConfigDict(extra="forbid")
nexthop: IPv4Address
"""The next-hop IPv4 address for the path."""
origin: Literal["Igp", "Egp", "Incomplete"]
"""The BGP origin attribute of the route."""
def __str__(self) -> str:
"""Return a human-readable string representation of the RoutePath for reporting.
Examples
--------
- Next-hop: 192.168.66.101 Origin: Igp
"""
return f"Next-hop: {self.nexthop} Origin: {self.origin}"
class BgpVrf(BaseModel):
"""Model representing a VRF in a BGP instance."""
vrf: str = "default"
"""VRF context."""
address_families: list[AddressFamilyConfig]
"""List of address family configuration."""
def __str__(self) -> str:
"""Return a human-readable string representation of the BgpVrf for reporting.
Examples
--------
- VRF: default
"""
return f"VRF: {self.vrf}"
class RedistributedRouteConfig(BaseModel):
"""Model representing a BGP redistributed route configuration."""
proto: RedistributedProtocol
"""The redistributed protocol."""
include_leaked: bool = False
"""Flag to include leaked routes of the redistributed protocol while redistributing."""
route_map: str | None = None
"""Optional route map applied to the redistribution."""
@model_validator(mode="after")
def validate_inputs(self) -> Self:
"""Validate that 'include_leaked' is not set when the redistributed protocol is AttachedHost, User, Dynamic, or RIP."""
if self.include_leaked and self.proto in ["AttachedHost", "EOS SDK", "Dynamic", "RIP"]:
msg = f"'include_leaked' field is not supported for redistributed protocol '{self.proto}'"
raise ValueError(msg)
return self
def __str__(self) -> str:
"""Return a human-readable string representation of the RedistributedRouteConfig for reporting.
Examples
--------
- Proto: Connected, Include Leaked: True, Route Map: RM-CONN-2-BGP
"""
base_string = f"Proto: {self.proto}"
if self.include_leaked:
base_string += f", Include Leaked: {self.include_leaked}"
if self.route_map:
base_string += f", Route Map: {self.route_map}"
return base_string
class AddressFamilyConfig(BaseModel):
"""Model representing a BGP address family configuration."""
afi_safi: RedistributedAfiSafi
"""AFI/SAFI abbreviation per EOS."""
redistributed_routes: list[RedistributedRouteConfig]
"""List of redistributed route configuration."""
@model_validator(mode="after")
def validate_afi_safi_supported_routes(self) -> Self:
"""Validate each address family supported redistributed protocol.
Following table shows the supported redistributed routes for each address family.
| IPv4 Unicast | IPv6 Unicast | IPv4 Multicast | IPv6 Multicast |
| ------------------------|-------------------------|------------------------|------------------------|
| AttachedHost | AttachedHost | AttachedHost | Connected |
| Bgp | Bgp | Connected | IS-IS |
| Connected | Connected | IS-IS | OSPF Internal |
| Dynamic | DHCP | OSPF Internal | OSPF External |
| IS-IS | Dynamic | OSPF External | OSPF Nssa-External |
| OSPF Internal | IS-IS | OSPF Nssa-External | OSPFv3 Internal |
| OSPF External | OSPFv3 Internal | OSPFv3 Internal | OSPFv3 External |
| OSPF Nssa-External | OSPFv3 External | OSPFv3 External | OSPFv3 Nssa-External |
| OSPFv3 Internal | OSPFv3 Nssa-External | OSPFv3 Nssa-External | Static |
| OSPFv3 External | Static | Static | |
| OSPFv3 Nssa-External | User | | |
| RIP | | | |
| Static | | | |
| User | | | |
"""
for routes_data in self.redistributed_routes:
if all([self.afi_safi == "v4u", routes_data.proto == "DHCP"]):
msg = f"Redistributed protocol 'DHCP' is not supported for address-family '{AFI_SAFI_MAPPINGS[self.afi_safi]}'"
raise ValueError(msg)
if self.afi_safi == "v6u" and routes_data.proto in ["OSPF Internal", "OSPF External", "OSPF Nssa-External", "RIP"]:
msg = f"Redistributed protocol '{routes_data.proto}' is not supported for address-family '{AFI_SAFI_MAPPINGS[self.afi_safi]}'"
raise ValueError(msg)
if self.afi_safi == "v4m" and routes_data.proto not in IPV4_MULTICAST_SUPPORTED_PROTO:
msg = f"Redistributed protocol '{routes_data.proto}' is not supported for address-family '{AFI_SAFI_MAPPINGS[self.afi_safi]}'"
raise ValueError(msg)
if self.afi_safi == "v6m" and routes_data.proto not in IPV6_MULTICAST_SUPPORTED_PROTO:
msg = f"Redistributed protocol '{routes_data.proto}' is not supported for address-family '{AFI_SAFI_MAPPINGS[self.afi_safi]}'"
raise ValueError(msg)
return self
def __str__(self) -> str:
"""Return a human-readable string representation of the AddressFamilyConfig for reporting.
Examples
--------
- AFI-SAFI: IPv4 Unicast
"""
return f"AFI-SAFI: {AFI_SAFI_MAPPINGS[self.afi_safi]}"

View file

@ -1,11 +1,11 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module containing input models for generic routing tests.""" """Module containing input models for generic routing tests."""
from __future__ import annotations from __future__ import annotations
from ipaddress import IPv4Network from ipaddress import IPv4Address, IPv4Network
from pydantic import BaseModel, ConfigDict from pydantic import BaseModel, ConfigDict
@ -17,12 +17,18 @@ class IPv4Routes(BaseModel):
model_config = ConfigDict(extra="forbid") model_config = ConfigDict(extra="forbid")
prefix: IPv4Network prefix: IPv4Network
"""The IPV4 network to validate the route type.""" """IPv4 prefix in CIDR notation."""
vrf: str = "default" vrf: str = "default"
"""VRF context. Defaults to `default` VRF.""" """VRF context. Defaults to `default` VRF."""
route_type: IPv4RouteType route_type: IPv4RouteType | None = None
"""List of IPV4 Route type to validate the valid rout type.""" """Expected route type. Required field in the `VerifyIPv4RouteType` test."""
nexthops: list[IPv4Address] | None = None
"""A list of the next-hop IP addresses for the route. Required field in the `VerifyIPv4RouteNextHops` test."""
strict: bool = False
"""If True, requires exact matching of provided nexthop(s).
Can be enabled in `VerifyIPv4RouteNextHops` test."""
def __str__(self) -> str: def __str__(self) -> str:
"""Return a human-readable string representation of the IPv4RouteType for reporting.""" """Return a human-readable string representation of the IPv4Routes for reporting."""
return f"Prefix: {self.prefix} VRF: {self.vrf}" return f"Prefix: {self.prefix} VRF: {self.vrf}"

View file

@ -0,0 +1,202 @@
# Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file.
"""Module containing input models for routing IS-IS tests."""
from __future__ import annotations
from ipaddress import IPv4Address, IPv4Network
from typing import Any, Literal
from warnings import warn
from pydantic import BaseModel, ConfigDict
from anta.custom_types import Interface
class ISISInstance(BaseModel):
"""Model for an IS-IS instance."""
model_config = ConfigDict(extra="forbid")
name: str
"""The name of the IS-IS instance."""
vrf: str = "default"
"""VRF context of the IS-IS instance."""
dataplane: Literal["MPLS", "mpls", "unset"] = "MPLS"
"""Configured SR data-plane for the IS-IS instance."""
segments: list[Segment] | None = None
"""List of IS-IS SR segments associated with the instance. Required field in the `VerifyISISSegmentRoutingAdjacencySegments` test."""
def __str__(self) -> str:
"""Return a human-readable string representation of the ISISInstance for reporting."""
return f"Instance: {self.name} VRF: {self.vrf}"
class Segment(BaseModel):
"""Model for an IS-IS segment."""
model_config = ConfigDict(extra="forbid")
interface: Interface
"""Local interface name."""
level: Literal[1, 2] = 2
"""IS-IS level of the segment."""
sid_origin: Literal["dynamic", "configured"] = "dynamic"
"""Origin of the segment ID."""
address: IPv4Address
"""Adjacency IPv4 address of the segment."""
def __str__(self) -> str:
"""Return a human-readable string representation of the Segment for reporting."""
return f"Local Intf: {self.interface} Adj IP Address: {self.address}"
class ISISInterface(BaseModel):
"""Model for an IS-IS enabled interface."""
model_config = ConfigDict(extra="forbid")
name: Interface
"""Interface name."""
vrf: str = "default"
"""VRF context of the interface."""
level: Literal[1, 2] = 2
"""IS-IS level of the interface."""
count: int | None = None
"""Expected number of IS-IS neighbors on this interface. Required field in the `VerifyISISNeighborCount` test."""
mode: Literal["point-to-point", "broadcast", "passive"] | None = None
"""IS-IS network type of the interface. Required field in the `VerifyISISInterfaceMode` test."""
def __str__(self) -> str:
"""Return a human-readable string representation of the ISISInterface for reporting."""
return f"Interface: {self.name} VRF: {self.vrf} Level: {self.level}"
class InterfaceCount(ISISInterface): # pragma: no cover
"""Alias for the ISISInterface model to maintain backward compatibility.
When initialized, it will emit a deprecation warning and call the ISISInterface model.
TODO: Remove this class in ANTA v2.0.0.
"""
def __init__(self, **data: Any) -> None: # noqa: ANN401
"""Initialize the InterfaceCount class, emitting a deprecation warning."""
warn(
message="InterfaceCount model is deprecated and will be removed in ANTA v2.0.0. Use the ISISInterface model instead.",
category=DeprecationWarning,
stacklevel=2,
)
super().__init__(**data)
class InterfaceState(ISISInterface): # pragma: no cover
"""Alias for the ISISInterface model to maintain backward compatibility.
When initialized, it will emit a deprecation warning and call the ISISInterface model.
TODO: Remove this class in ANTA v2.0.0.
"""
def __init__(self, **data: Any) -> None: # noqa: ANN401
"""Initialize the InterfaceState class, emitting a deprecation warning."""
warn(
message="InterfaceState model is deprecated and will be removed in ANTA v2.0.0. Use the ISISInterface model instead.",
category=DeprecationWarning,
stacklevel=2,
)
super().__init__(**data)
class IsisInstance(ISISInstance): # pragma: no cover
"""Alias for the ISISInstance model to maintain backward compatibility.
When initialized, it will emit a deprecation warning and call the ISISInstance model.
TODO: Remove this class in ANTA v2.0.0.
"""
def __init__(self, **data: Any) -> None: # noqa: ANN401
"""Initialize the IsisInstance class, emitting a deprecation warning."""
warn(
message="IsisInstance model is deprecated and will be removed in ANTA v2.0.0. Use the ISISInstance model instead.",
category=DeprecationWarning,
stacklevel=2,
)
super().__init__(**data)
class Tunnel(BaseModel):
"""Model for a IS-IS SR tunnel."""
model_config = ConfigDict(extra="forbid")
endpoint: IPv4Network
"""Endpoint of the tunnel."""
vias: list[TunnelPath] | None = None
"""Optional list of paths to reach the endpoint."""
def __str__(self) -> str:
"""Return a human-readable string representation of the Tunnel for reporting."""
return f"Endpoint: {self.endpoint}"
class TunnelPath(BaseModel):
"""Model for a IS-IS tunnel path."""
model_config = ConfigDict(extra="forbid")
nexthop: IPv4Address | None = None
"""Nexthop of the tunnel."""
type: Literal["ip", "tunnel"] | None = None
"""Type of the tunnel."""
interface: Interface | None = None
"""Interface of the tunnel."""
tunnel_id: Literal["TI-LFA", "ti-lfa", "unset"] | None = None
"""Computation method of the tunnel."""
def __str__(self) -> str:
"""Return a human-readable string representation of the TunnelPath for reporting."""
base_string = ""
if self.nexthop:
base_string += f" Next-hop: {self.nexthop}"
if self.type:
base_string += f" Type: {self.type}"
if self.interface:
base_string += f" Interface: {self.interface}"
if self.tunnel_id:
base_string += f" Tunnel ID: {self.tunnel_id}"
return base_string.lstrip()
class Entry(Tunnel): # pragma: no cover
"""Alias for the Tunnel model to maintain backward compatibility.
When initialized, it will emit a deprecation warning and call the Tunnel model.
TODO: Remove this class in ANTA v2.0.0.
"""
def __init__(self, **data: Any) -> None: # noqa: ANN401
"""Initialize the Entry class, emitting a deprecation warning."""
warn(
message="Entry model is deprecated and will be removed in ANTA v2.0.0. Use the Tunnel model instead.",
category=DeprecationWarning,
stacklevel=2,
)
super().__init__(**data)
class Vias(TunnelPath): # pragma: no cover
"""Alias for the TunnelPath model to maintain backward compatibility.
When initialized, it will emit a deprecation warning and call the TunnelPath model.
TODO: Remove this class in ANTA v2.0.0.
"""
def __init__(self, **data: Any) -> None: # noqa: ANN401
"""Initialize the Vias class, emitting a deprecation warning."""
warn(
message="Vias model is deprecated and will be removed in ANTA v2.0.0. Use the TunnelPath model instead.",
category=DeprecationWarning,
stacklevel=2,
)
super().__init__(**data)

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module containing input models for security tests.""" """Module containing input models for security tests."""
@ -6,10 +6,20 @@
from __future__ import annotations from __future__ import annotations
from ipaddress import IPv4Address from ipaddress import IPv4Address
from typing import Any from typing import TYPE_CHECKING, Any, ClassVar, get_args
from warnings import warn from warnings import warn
from pydantic import BaseModel, ConfigDict from pydantic import BaseModel, ConfigDict, Field, model_validator
from anta.custom_types import EcdsaKeySize, EncryptionAlgorithm, RsaKeySize
if TYPE_CHECKING:
import sys
if sys.version_info >= (3, 11):
from typing import Self
else:
from typing_extensions import Self
class IPSecPeer(BaseModel): class IPSecPeer(BaseModel):
@ -43,6 +53,107 @@ class IPSecConn(BaseModel):
"""The IPv4 address of the destination in the security connection.""" """The IPv4 address of the destination in the security connection."""
class APISSLCertificate(BaseModel):
"""Model for an API SSL certificate."""
model_config = ConfigDict(extra="forbid")
certificate_name: str
"""The name of the certificate to be verified."""
expiry_threshold: int
"""The expiry threshold of the certificate in days."""
common_name: str
"""The Common Name of the certificate."""
encryption_algorithm: EncryptionAlgorithm
"""The encryption algorithm used by the certificate."""
key_size: RsaKeySize | EcdsaKeySize
"""The key size (in bits) of the encryption algorithm."""
def __str__(self) -> str:
"""Return a human-readable string representation of the APISSLCertificate for reporting.
Examples
--------
- Certificate: SIGNING_CA.crt
"""
return f"Certificate: {self.certificate_name}"
@model_validator(mode="after")
def validate_inputs(self) -> Self:
"""Validate the key size provided to the APISSLCertificates class.
If encryption_algorithm is RSA then key_size should be in {2048, 3072, 4096}.
If encryption_algorithm is ECDSA then key_size should be in {256, 384, 521}.
"""
if self.encryption_algorithm == "RSA" and self.key_size not in get_args(RsaKeySize):
msg = f"`{self.certificate_name}` key size {self.key_size} is invalid for RSA encryption. Allowed sizes are {get_args(RsaKeySize)}."
raise ValueError(msg)
if self.encryption_algorithm == "ECDSA" and self.key_size not in get_args(EcdsaKeySize):
msg = f"`{self.certificate_name}` key size {self.key_size} is invalid for ECDSA encryption. Allowed sizes are {get_args(EcdsaKeySize)}."
raise ValueError(msg)
return self
class ACLEntry(BaseModel):
"""Model for an Access Control List (ACL) entry."""
model_config = ConfigDict(extra="forbid")
sequence: int = Field(ge=1, le=4294967295)
"""Sequence number of the ACL entry, used to define the order of processing. Must be between 1 and 4294967295."""
action: str
"""Action of the ACL entry. Example: `deny ip any any`."""
def __str__(self) -> str:
"""Return a human-readable string representation of the ACLEntry for reporting.
Examples
--------
- Sequence: 10
"""
return f"Sequence: {self.sequence}"
class ACL(BaseModel):
"""Model for an Access Control List (ACL)."""
model_config = ConfigDict(extra="forbid")
name: str
"""Name of the ACL."""
entries: list[ACLEntry]
"""List of the ACL entries."""
IPv4ACLEntry: ClassVar[type[ACLEntry]] = ACLEntry
"""To maintain backward compatibility."""
def __str__(self) -> str:
"""Return a human-readable string representation of the ACL for reporting.
Examples
--------
- ACL name: Test
"""
return f"ACL name: {self.name}"
class IPv4ACL(ACL): # pragma: no cover
"""Alias for the ACL model to maintain backward compatibility.
When initialized, it will emit a deprecation warning and call the ACL model.
TODO: Remove this class in ANTA v2.0.0.
"""
def __init__(self, **data: Any) -> None: # noqa: ANN401
"""Initialize the IPv4ACL class, emitting a deprecation warning."""
warn(
message="IPv4ACL model is deprecated and will be removed in ANTA v2.0.0. Use the ACL model instead.",
category=DeprecationWarning,
stacklevel=2,
)
super().__init__(**data)
class IPSecPeers(IPSecPeer): # pragma: no cover class IPSecPeers(IPSecPeer): # pragma: no cover
"""Alias for the IPSecPeers model to maintain backward compatibility. """Alias for the IPSecPeers model to maintain backward compatibility.
@ -52,7 +163,7 @@ class IPSecPeers(IPSecPeer): # pragma: no cover
""" """
def __init__(self, **data: Any) -> None: # noqa: ANN401 def __init__(self, **data: Any) -> None: # noqa: ANN401
"""Initialize the IPSecPeer class, emitting a deprecation warning.""" """Initialize the IPSecPeers class, emitting a deprecation warning."""
warn( warn(
message="IPSecPeers model is deprecated and will be removed in ANTA v2.0.0. Use the IPSecPeer model instead.", message="IPSecPeers model is deprecated and will be removed in ANTA v2.0.0. Use the IPSecPeer model instead.",
category=DeprecationWarning, category=DeprecationWarning,

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module containing input models for services tests.""" """Module containing input models for services tests."""
@ -6,9 +6,13 @@
from __future__ import annotations from __future__ import annotations
from ipaddress import IPv4Address, IPv6Address from ipaddress import IPv4Address, IPv6Address
from typing import Any, Literal
from warnings import warn
from pydantic import BaseModel, ConfigDict, Field from pydantic import BaseModel, ConfigDict, Field
from anta.custom_types import ErrDisableReasons
class DnsServer(BaseModel): class DnsServer(BaseModel):
"""Model for a DNS server configuration.""" """Model for a DNS server configuration."""
@ -28,4 +32,43 @@ class DnsServer(BaseModel):
-------- --------
Server 10.0.0.1 (VRF: default, Priority: 1) Server 10.0.0.1 (VRF: default, Priority: 1)
""" """
return f"Server {self.server_address} (VRF: {self.vrf}, Priority: {self.priority})" return f"Server {self.server_address} VRF: {self.vrf} Priority: {self.priority}"
class ErrdisableRecovery(BaseModel):
"""Model for the error disable recovery functionality."""
model_config = ConfigDict(extra="forbid")
reason: ErrDisableReasons
"""Name of the error disable reason."""
status: Literal["Enabled", "Disabled"] = "Enabled"
"""Operational status of the reason. Defaults to 'Enabled'."""
interval: int = Field(ge=30, le=86400)
"""Timer interval of the reason in seconds."""
def __str__(self) -> str:
"""Return a human-readable string representation of the ErrdisableRecovery for reporting.
Examples
--------
Reason: acl Status: Enabled Interval: 300
"""
return f"Reason: {self.reason} Status: {self.status} Interval: {self.interval}"
class ErrDisableReason(ErrdisableRecovery): # pragma: no cover
"""Alias for the ErrdisableRecovery model to maintain backward compatibility.
When initialised, it will emit a deprecation warning and call the ErrdisableRecovery model.
TODO: Remove this class in ANTA v2.0.0.
"""
def __init__(self, **data: Any) -> None: # noqa: ANN401
"""Initialize the ErrdisableRecovery class, emitting a depreciation warning."""
warn(
message="ErrDisableReason model is deprecated and will be removed in ANTA v2.0.0. Use the ErrdisableRecovery model instead.",
category=DeprecationWarning,
stacklevel=2,
)
super().__init__(**data)

127
anta/input_models/snmp.py Normal file
View file

@ -0,0 +1,127 @@
# Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file.
"""Module containing input models for SNMP tests."""
from __future__ import annotations
from ipaddress import IPv4Address
from typing import TYPE_CHECKING, Literal
from pydantic import BaseModel, ConfigDict, model_validator
from anta.custom_types import Hostname, Interface, Port, SnmpEncryptionAlgorithm, SnmpHashingAlgorithm, SnmpVersion, SnmpVersionV3AuthType
if TYPE_CHECKING:
import sys
if sys.version_info >= (3, 11):
from typing import Self
else:
from typing_extensions import Self
class SnmpHost(BaseModel):
"""Model for a SNMP host."""
model_config = ConfigDict(extra="forbid")
hostname: IPv4Address | Hostname
"""IPv4 address or Hostname of the SNMP notification host."""
vrf: str = "default"
"""Optional VRF for SNMP Hosts. If not provided, it defaults to `default`."""
notification_type: Literal["trap", "inform"] = "trap"
"""Type of SNMP notification (trap or inform), it defaults to trap."""
version: SnmpVersion | None = None
"""SNMP protocol version. Required field in the `VerifySnmpNotificationHost` test."""
udp_port: Port | int = 162
"""UDP port for SNMP. If not provided then defaults to 162."""
community_string: str | None = None
"""Optional SNMP community string for authentication,required for SNMP version is v1 or v2c. Can be provided in the `VerifySnmpNotificationHost` test."""
user: str | None = None
"""Optional SNMP user for authentication, required for SNMP version v3. Can be provided in the `VerifySnmpNotificationHost` test."""
def __str__(self) -> str:
"""Return a human-readable string representation of the SnmpHost for reporting.
Examples
--------
- Host: 192.168.1.100 VRF: default
"""
return f"Host: {self.hostname} VRF: {self.vrf}"
class SnmpUser(BaseModel):
"""Model for a SNMP User."""
model_config = ConfigDict(extra="forbid")
username: str
"""SNMP user name."""
group_name: str
"""SNMP group for the user."""
version: SnmpVersion
"""SNMP protocol version."""
auth_type: SnmpHashingAlgorithm | None = None
"""User authentication algorithm. Can be provided in the `VerifySnmpUser` test."""
priv_type: SnmpEncryptionAlgorithm | None = None
"""User privacy algorithm. Can be provided in the `VerifySnmpUser` test."""
def __str__(self) -> str:
"""Return a human-readable string representation of the SnmpUser for reporting.
Examples
--------
- User: Test Group: Test_Group Version: v2c
"""
return f"User: {self.username} Group: {self.group_name} Version: {self.version}"
class SnmpSourceInterface(BaseModel):
"""Model for a SNMP source-interface."""
interface: Interface
"""Interface to use as the source IP address of SNMP messages."""
vrf: str = "default"
"""VRF of the source interface."""
def __str__(self) -> str:
"""Return a human-readable string representation of the SnmpSourceInterface for reporting.
Examples
--------
- Source Interface: Ethernet1 VRF: default
"""
return f"Source Interface: {self.interface} VRF: {self.vrf}"
class SnmpGroup(BaseModel):
"""Model for an SNMP group."""
group_name: str
"""SNMP group name."""
version: SnmpVersion
"""SNMP protocol version."""
read_view: str | None = None
"""Optional field, View to restrict read access."""
write_view: str | None = None
"""Optional field, View to restrict write access."""
notify_view: str | None = None
"""Optional field, View to restrict notifications."""
authentication: SnmpVersionV3AuthType | None = None
"""SNMPv3 authentication settings. Required when version is v3. Can be provided in the `VerifySnmpGroup` test."""
@model_validator(mode="after")
def validate_inputs(self) -> Self:
"""Validate the inputs provided to the SnmpGroup class."""
if self.version == "v3" and self.authentication is None:
msg = f"{self!s}: `authentication` field is missing in the input"
raise ValueError(msg)
return self
def __str__(self) -> str:
"""Return a human-readable string representation of the SnmpGroup for reporting.
Examples
--------
- Group: Test_Group Version: v2c
"""
return f"Group: {self.group_name} Version: {self.version}"

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module containing input models for services tests.""" """Module containing input models for services tests."""

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module containing input models for system tests.""" """Module containing input models for system tests."""
@ -7,9 +7,9 @@ from __future__ import annotations
from ipaddress import IPv4Address from ipaddress import IPv4Address
from pydantic import BaseModel, ConfigDict, Field from pydantic import BaseModel, ConfigDict
from anta.custom_types import Hostname from anta.custom_types import Hostname, NTPStratumLevel
class NTPServer(BaseModel): class NTPServer(BaseModel):
@ -22,10 +22,20 @@ class NTPServer(BaseModel):
For example, 'ntp.example.com' in the configuration might resolve to 'ntp3.example.com' in the device output.""" For example, 'ntp.example.com' in the configuration might resolve to 'ntp3.example.com' in the device output."""
preferred: bool = False preferred: bool = False
"""Optional preferred for NTP server. If not provided, it defaults to `False`.""" """Optional preferred for NTP server. If not provided, it defaults to `False`."""
stratum: int = Field(ge=0, le=16) stratum: NTPStratumLevel
"""NTP stratum level (0 to 15) where 0 is the reference clock and 16 indicates unsynchronized. """NTP stratum level (0 to 15) where 0 is the reference clock and 16 indicates unsynchronized.
Values should be between 0 and 15 for valid synchronization and 16 represents an out-of-sync state.""" Values should be between 0 and 15 for valid synchronization and 16 represents an out-of-sync state."""
def __str__(self) -> str: def __str__(self) -> str:
"""Representation of the NTPServer model.""" """Representation of the NTPServer model."""
return f"{self.server_address} (Preferred: {self.preferred}, Stratum: {self.stratum})" return f"NTP Server: {self.server_address} Preferred: {self.preferred} Stratum: {self.stratum}"
class NTPPool(BaseModel):
"""Model for a NTP server pool."""
model_config = ConfigDict(extra="forbid")
server_addresses: list[Hostname | IPv4Address]
"""The list of NTP server addresses as an IPv4 addresses or hostnames."""
preferred_stratum_range: list[NTPStratumLevel]
"""Preferred NTP stratum range for the NTP server pool. If the expected stratum range is 1 to 3 then preferred_stratum_range should be `[1,3]`."""

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Inventory module for ANTA.""" """Inventory module for ANTA."""
@ -8,15 +8,16 @@ from __future__ import annotations
import asyncio import asyncio
import logging import logging
from ipaddress import ip_address, ip_network from ipaddress import ip_address, ip_network
from json import load as json_load
from pathlib import Path from pathlib import Path
from typing import Any, ClassVar from typing import Any, ClassVar, Literal
from pydantic import ValidationError from pydantic import ValidationError
from yaml import YAMLError, safe_load from yaml import YAMLError, safe_load
from anta.device import AntaDevice, AsyncEOSDevice from anta.device import AntaDevice, AsyncEOSDevice
from anta.inventory.exceptions import InventoryIncorrectSchemaError, InventoryRootKeyError from anta.inventory.exceptions import InventoryIncorrectSchemaError, InventoryRootKeyError
from anta.inventory.models import AntaInventoryInput from anta.inventory.models import AntaInventoryHost, AntaInventoryInput
from anta.logger import anta_log_exception from anta.logger import anta_log_exception
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -26,7 +27,7 @@ class AntaInventory(dict[str, AntaDevice]):
"""Inventory abstraction for ANTA framework.""" """Inventory abstraction for ANTA framework."""
# Root key of inventory part of the inventory file # Root key of inventory part of the inventory file
INVENTORY_ROOT_KEY = "anta_inventory" INVENTORY_ROOT_KEY: str = "anta_inventory"
# Supported Output format # Supported Output format
INVENTORY_OUTPUT_FORMAT: ClassVar[list[str]] = ["native", "json"] INVENTORY_OUTPUT_FORMAT: ClassVar[list[str]] = ["native", "json"]
@ -178,6 +179,7 @@ class AntaInventory(dict[str, AntaDevice]):
password: str, password: str,
enable_password: str | None = None, enable_password: str | None = None,
timeout: float | None = None, timeout: float | None = None,
file_format: Literal["yaml", "json"] = "yaml",
*, *,
enable: bool = False, enable: bool = False,
insecure: bool = False, insecure: bool = False,
@ -199,6 +201,8 @@ class AntaInventory(dict[str, AntaDevice]):
Enable password to use if required. Enable password to use if required.
timeout timeout
Timeout value in seconds for outgoing API calls. Timeout value in seconds for outgoing API calls.
file_format
Whether the inventory file is in JSON or YAML.
enable enable
Whether or not the commands need to be run in enable mode towards the devices. Whether or not the commands need to be run in enable mode towards the devices.
insecure insecure
@ -214,6 +218,10 @@ class AntaInventory(dict[str, AntaDevice]):
Inventory file is not following AntaInventory Schema. Inventory file is not following AntaInventory Schema.
""" """
if file_format not in ["yaml", "json"]:
message = f"'{file_format}' is not a valid format for an AntaInventory file. Only 'yaml' and 'json' are supported."
raise ValueError(message)
inventory = AntaInventory() inventory = AntaInventory()
kwargs: dict[str, Any] = { kwargs: dict[str, Any] = {
"username": username, "username": username,
@ -224,20 +232,12 @@ class AntaInventory(dict[str, AntaDevice]):
"insecure": insecure, "insecure": insecure,
"disable_cache": disable_cache, "disable_cache": disable_cache,
} }
if username is None:
message = "'username' is required to create an AntaInventory"
logger.error(message)
raise ValueError(message)
if password is None:
message = "'password' is required to create an AntaInventory"
logger.error(message)
raise ValueError(message)
try: try:
filename = Path(filename) filename = Path(filename)
with filename.open(encoding="UTF-8") as file: with filename.open(encoding="UTF-8") as file:
data = safe_load(file) data = safe_load(file) if file_format == "yaml" else json_load(file)
except (TypeError, YAMLError, OSError) as e: except (TypeError, YAMLError, OSError, ValueError) as e:
message = f"Unable to parse ANTA Device Inventory file '{filename}'" message = f"Unable to parse ANTA Device Inventory file '{filename}'"
anta_log_exception(e, message, logger) anta_log_exception(e, message, logger)
raise raise
@ -342,3 +342,20 @@ class AntaInventory(dict[str, AntaDevice]):
if isinstance(r, Exception): if isinstance(r, Exception):
message = "Error when refreshing inventory" message = "Error when refreshing inventory"
anta_log_exception(r, message, logger) anta_log_exception(r, message, logger)
def dump(self) -> AntaInventoryInput:
"""Dump the AntaInventory to an AntaInventoryInput.
Each hosts is dumped individually.
"""
hosts = [
AntaInventoryHost(
name=device.name,
host=device.host if hasattr(device, "host") else device.name,
port=device.port if hasattr(device, "port") else None,
tags=device.tags,
disable_cache=device.cache is None,
)
for device in self.devices
]
return AntaInventoryInput(hosts=hosts)

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Manage Exception in Inventory module.""" """Manage Exception in Inventory module."""

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Models related to inventory management.""" """Models related to inventory management."""
@ -9,14 +9,26 @@ import logging
import math import math
import yaml import yaml
from pydantic import BaseModel, ConfigDict, IPvAnyAddress, IPvAnyNetwork from pydantic import BaseModel, ConfigDict, FieldSerializationInfo, IPvAnyAddress, IPvAnyNetwork, field_serializer
from anta.custom_types import Hostname, Port from anta.custom_types import Hostname, Port
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class AntaInventoryHost(BaseModel): class AntaInventoryBaseModel(BaseModel):
"""Pydantic BaseModel for AntaInventory objects."""
model_config = ConfigDict(extra="forbid")
# Using check_fields as we plan to use this in the child classes
@field_serializer("tags", when_used="json", check_fields=False)
def serialize_tags(self, tags: set[str], _info: FieldSerializationInfo) -> list[str]:
"""Make sure the tags are always dumped in the same order."""
return sorted(tags)
class AntaInventoryHost(AntaInventoryBaseModel):
"""Host entry of AntaInventoryInput. """Host entry of AntaInventoryInput.
Attributes Attributes
@ -34,8 +46,6 @@ class AntaInventoryHost(BaseModel):
""" """
model_config = ConfigDict(extra="forbid")
name: str | None = None name: str | None = None
host: Hostname | IPvAnyAddress host: Hostname | IPvAnyAddress
port: Port | None = None port: Port | None = None
@ -43,7 +53,7 @@ class AntaInventoryHost(BaseModel):
disable_cache: bool = False disable_cache: bool = False
class AntaInventoryNetwork(BaseModel): class AntaInventoryNetwork(AntaInventoryBaseModel):
"""Network entry of AntaInventoryInput. """Network entry of AntaInventoryInput.
Attributes Attributes
@ -57,14 +67,12 @@ class AntaInventoryNetwork(BaseModel):
""" """
model_config = ConfigDict(extra="forbid")
network: IPvAnyNetwork network: IPvAnyNetwork
tags: set[str] | None = None tags: set[str] | None = None
disable_cache: bool = False disable_cache: bool = False
class AntaInventoryRange(BaseModel): class AntaInventoryRange(AntaInventoryBaseModel):
"""IP Range entry of AntaInventoryInput. """IP Range entry of AntaInventoryInput.
Attributes Attributes
@ -80,8 +88,6 @@ class AntaInventoryRange(BaseModel):
""" """
model_config = ConfigDict(extra="forbid")
start: IPvAnyAddress start: IPvAnyAddress
end: IPvAnyAddress end: IPvAnyAddress
tags: set[str] | None = None tags: set[str] | None = None
@ -109,4 +115,13 @@ class AntaInventoryInput(BaseModel):
# This could be improved. # This could be improved.
# https://github.com/pydantic/pydantic/issues/1043 # https://github.com/pydantic/pydantic/issues/1043
# Explore if this worth using this: https://github.com/NowanIlfideme/pydantic-yaml # Explore if this worth using this: https://github.com/NowanIlfideme/pydantic-yaml
return yaml.safe_dump(yaml.safe_load(self.model_dump_json(serialize_as_any=True, exclude_unset=True)), indent=2, width=math.inf) return yaml.safe_dump(yaml.safe_load(self.model_dump_json(serialize_as_any=True, exclude_unset=True)), width=math.inf)
def to_json(self) -> str:
"""Return a JSON representation string of this model.
Returns
-------
The JSON representation string of this model.
"""
return self.model_dump_json(serialize_as_any=True, exclude_unset=True, indent=2)

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Configure logging for ANTA.""" """Configure logging for ANTA."""
@ -9,15 +9,13 @@ import logging
import traceback import traceback
from datetime import timedelta from datetime import timedelta
from enum import Enum from enum import Enum
from typing import TYPE_CHECKING, Literal from pathlib import Path
from typing import Literal
from rich.logging import RichHandler from rich.logging import RichHandler
from anta import __DEBUG__ from anta import __DEBUG__
if TYPE_CHECKING:
from pathlib import Path
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -69,27 +67,59 @@ def setup_logging(level: LogLevel = Log.INFO, file: Path | None = None) -> None:
# httpx as well # httpx as well
logging.getLogger("httpx").setLevel(logging.WARNING) logging.getLogger("httpx").setLevel(logging.WARNING)
# Add RichHandler for stdout # Add RichHandler for stdout if not already present
rich_handler = RichHandler(markup=True, rich_tracebacks=True, tracebacks_show_locals=False) _maybe_add_rich_handler(loglevel, root)
# Show Python module in stdout at DEBUG level
fmt_string = "[grey58]\\[%(name)s][/grey58] %(message)s" if loglevel == logging.DEBUG else "%(message)s" # Add FileHandler if file is provided and same File Handler is not already present
formatter = logging.Formatter(fmt=fmt_string, datefmt="[%X]") if file and not _get_file_handler(root, file):
rich_handler.setFormatter(formatter)
root.addHandler(rich_handler)
# Add FileHandler if file is provided
if file:
file_handler = logging.FileHandler(file) file_handler = logging.FileHandler(file)
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
file_handler.setFormatter(formatter) file_handler.setFormatter(formatter)
root.addHandler(file_handler) root.addHandler(file_handler)
# If level is DEBUG and file is provided, do not send DEBUG level to stdout # If level is DEBUG and file is provided, do not send DEBUG level to stdout
if loglevel == logging.DEBUG: if loglevel == logging.DEBUG and (rich_handler := _get_rich_handler(root)) is not None:
rich_handler.setLevel(logging.INFO) rich_handler.setLevel(logging.INFO)
if __DEBUG__: if __DEBUG__:
logger.debug("ANTA Debug Mode enabled") logger.debug("ANTA Debug Mode enabled")
def _get_file_handler(logger_instance: logging.Logger, file: Path) -> logging.FileHandler | None:
"""Return the FileHandler if present."""
return (
next(
(
handler
for handler in logger_instance.handlers
if isinstance(handler, logging.FileHandler) and str(Path(handler.baseFilename).resolve()) == str(file.resolve())
),
None,
)
if logger_instance.hasHandlers()
else None
)
def _get_rich_handler(logger_instance: logging.Logger) -> logging.Handler | None:
"""Return the ANTA Rich Handler."""
return next((handler for handler in logger_instance.handlers if handler.get_name() == "ANTA_RICH_HANDLER"), None) if logger_instance.hasHandlers() else None
def _maybe_add_rich_handler(loglevel: int, logger_instance: logging.Logger) -> None:
"""Add RichHandler for stdout if not already present."""
if _get_rich_handler(logger_instance) is not None:
# Nothing to do.
return
anta_rich_handler = RichHandler(markup=True, rich_tracebacks=True, tracebacks_show_locals=False)
anta_rich_handler.set_name("ANTA_RICH_HANDLER")
# Show Python module in stdout at DEBUG level
fmt_string = "[grey58]\\[%(name)s][/grey58] %(message)s" if loglevel == logging.DEBUG else "%(message)s"
formatter = logging.Formatter(fmt=fmt_string, datefmt="[%X]")
anta_rich_handler.setFormatter(formatter)
logger_instance.addHandler(anta_rich_handler)
def format_td(seconds: float, digits: int = 3) -> str: def format_td(seconds: float, digits: int = 3) -> str:
"""Return a formatted string from a float number representing seconds and a number of digits.""" """Return a formatted string from a float number representing seconds and a number of digits."""
isec, fsec = divmod(round(seconds * 10**digits), 10**digits) isec, fsec = divmod(round(seconds * 10**digits), 10**digits)

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Models to define a TestStructure.""" """Models to define a TestStructure."""
@ -15,9 +15,8 @@ from typing import TYPE_CHECKING, Any, Callable, ClassVar, Literal, TypeVar
from pydantic import BaseModel, ConfigDict, ValidationError, create_model from pydantic import BaseModel, ConfigDict, ValidationError, create_model
from anta import GITHUB_SUGGESTION from anta.constants import EOS_BLACKLIST_CMDS, KNOWN_EOS_ERRORS, UNSUPPORTED_PLATFORM_ERRORS
from anta.constants import KNOWN_EOS_ERRORS from anta.custom_types import Revision
from anta.custom_types import REGEXP_EOS_BLACKLIST_CMDS, Revision
from anta.logger import anta_log_exception, exc_to_str from anta.logger import anta_log_exception, exc_to_str
from anta.result_manager.models import AntaTestStatus, TestResult from anta.result_manager.models import AntaTestStatus, TestResult
@ -81,7 +80,7 @@ class AntaTemplate:
# Create a AntaTemplateParams model to elegantly store AntaTemplate variables # Create a AntaTemplateParams model to elegantly store AntaTemplate variables
field_names = [fname for _, fname, _, _ in Formatter().parse(self.template) if fname] field_names = [fname for _, fname, _, _ in Formatter().parse(self.template) if fname]
# Extracting the type from the params based on the expected field_names from the template # Extracting the type from the params based on the expected field_names from the template
fields: dict[str, Any] = {key: (Any, ...) for key in field_names} fields: dict[str, Any] = dict.fromkeys(field_names, (Any, ...))
self.params_schema = create_model( self.params_schema = create_model(
"AntaParams", "AntaParams",
__base__=AntaParamsBaseModel, __base__=AntaParamsBaseModel,
@ -258,7 +257,8 @@ class AntaCommand(BaseModel):
msg = f"Command '{self.command}' has not been collected and has not returned an error. Call AntaDevice.collect()." msg = f"Command '{self.command}' has not been collected and has not returned an error. Call AntaDevice.collect()."
raise RuntimeError(msg) raise RuntimeError(msg)
return all("not supported on this hardware platform" not in e for e in self.errors)
return not any(any(error in e for error in UNSUPPORTED_PLATFORM_ERRORS) for e in self.errors)
@property @property
def returned_known_eos_error(self) -> bool: def returned_known_eos_error(self) -> bool:
@ -432,7 +432,7 @@ class AntaTest(ABC):
inputs: dict[str, Any] | AntaTest.Input | None = None, inputs: dict[str, Any] | AntaTest.Input | None = None,
eos_data: list[dict[Any, Any] | str] | None = None, eos_data: list[dict[Any, Any] | str] | None = None,
) -> None: ) -> None:
"""AntaTest Constructor. """Initialize an AntaTest instance.
Parameters Parameters
---------- ----------
@ -575,12 +575,12 @@ class AntaTest(ABC):
"""Check if CLI commands contain a blocked keyword.""" """Check if CLI commands contain a blocked keyword."""
state = False state = False
for command in self.instance_commands: for command in self.instance_commands:
for pattern in REGEXP_EOS_BLACKLIST_CMDS: for pattern in EOS_BLACKLIST_CMDS:
if re.match(pattern, command.command): if re.match(pattern, command.command):
self.logger.error( self.logger.error(
"Command <%s> is blocked for security reason matching %s", "Command <%s> is blocked for security reason matching %s",
command.command, command.command,
REGEXP_EOS_BLACKLIST_CMDS, EOS_BLACKLIST_CMDS,
) )
self.result.is_error(f"<{command.command}> is blocked for security reason") self.result.is_error(f"<{command.command}> is blocked for security reason")
state = True state = True
@ -683,8 +683,6 @@ class AntaTest(ABC):
cmds = self.failed_commands cmds = self.failed_commands
unsupported_commands = [f"'{c.command}' is not supported on {self.device.hw_model}" for c in cmds if not c.supported] unsupported_commands = [f"'{c.command}' is not supported on {self.device.hw_model}" for c in cmds if not c.supported]
if unsupported_commands: if unsupported_commands:
msg = f"Test {self.name} has been skipped because it is not supported on {self.device.hw_model}: {GITHUB_SUGGESTION}"
self.logger.warning(msg)
self.result.is_skipped("\n".join(unsupported_commands)) self.result.is_skipped("\n".join(unsupported_commands))
return return
returned_known_eos_error = [f"'{c.command}' failed on {self.device.name}: {', '.join(c.errors)}" for c in cmds if c.returned_known_eos_error] returned_known_eos_error = [f"'{c.command}' failed on {self.device.name}: {', '.join(c.errors)}" for c in cmds if c.returned_known_eos_error]

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Report management for ANTA.""" """Report management for ANTA."""
@ -28,7 +28,7 @@ logger = logging.getLogger(__name__)
class ReportTable: class ReportTable:
"""TableReport Generate a Table based on TestResult.""" """TableReport Generate a Table based on TestResult."""
@dataclass() @dataclass
class Headers: # pylint: disable=too-many-instance-attributes class Headers: # pylint: disable=too-many-instance-attributes
"""Headers for the table report.""" """Headers for the table report."""
@ -168,7 +168,7 @@ class ReportTable:
self.Headers.list_of_error_nodes, self.Headers.list_of_error_nodes,
] ]
table = self._build_headers(headers=headers, table=table) table = self._build_headers(headers=headers, table=table)
for test, stats in sorted(manager.test_stats.items()): for test, stats in manager.test_stats.items():
if tests is None or test in tests: if tests is None or test in tests:
table.add_row( table.add_row(
test, test,
@ -214,7 +214,7 @@ class ReportTable:
self.Headers.list_of_error_tests, self.Headers.list_of_error_tests,
] ]
table = self._build_headers(headers=headers, table=table) table = self._build_headers(headers=headers, table=table)
for device, stats in sorted(manager.device_stats.items()): for device, stats in manager.device_stats.items():
if devices is None or device in devices: if devices is None or device in devices:
table.add_row( table.add_row(
device, device,

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""CSV Report management for ANTA.""" """CSV Report management for ANTA."""
@ -8,6 +8,7 @@ from __future__ import annotations
import csv import csv
import logging import logging
import os
from dataclasses import dataclass from dataclasses import dataclass
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
@ -111,6 +112,7 @@ class ReportCsv:
csvwriter = csv.writer( csvwriter = csv.writer(
csvfile, csvfile,
delimiter=",", delimiter=",",
lineterminator=os.linesep,
) )
csvwriter.writerow(headers) csvwriter.writerow(headers)
for entry in results.results: for entry in results.results:

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Markdown report generator for ANTA test results.""" """Markdown report generator for ANTA test results."""
@ -177,8 +177,8 @@ class MDReportBase(ABC):
if text is None: if text is None:
return "" return ""
# Replace newlines with spaces to keep content on one line # Replace newlines with <br> to preserve line breaks in HTML
text = text.replace("\n", " ") text = text.replace("\n", "<br>")
# Replace backticks with single quotes # Replace backticks with single quotes
return text.replace("`", "'") return text.replace("`", "'")
@ -237,7 +237,7 @@ class SummaryTotalsDeviceUnderTest(MDReportBase):
def generate_rows(self) -> Generator[str, None, None]: def generate_rows(self) -> Generator[str, None, None]:
"""Generate the rows of the summary totals device under test table.""" """Generate the rows of the summary totals device under test table."""
for device, stat in self.results.device_stats.items(): for device, stat in self.results.device_stats.items():
total_tests = stat.tests_success_count + stat.tests_skipped_count + stat.tests_failure_count + stat.tests_error_count total_tests = stat.tests_success_count + stat.tests_skipped_count + stat.tests_failure_count + stat.tests_error_count + stat.tests_unset_count
categories_skipped = ", ".join(sorted(convert_categories(list(stat.categories_skipped)))) categories_skipped = ", ".join(sorted(convert_categories(list(stat.categories_skipped))))
categories_failed = ", ".join(sorted(convert_categories(list(stat.categories_failed)))) categories_failed = ", ".join(sorted(convert_categories(list(stat.categories_failed))))
yield ( yield (
@ -261,10 +261,11 @@ class SummaryTotalsPerCategory(MDReportBase):
def generate_rows(self) -> Generator[str, None, None]: def generate_rows(self) -> Generator[str, None, None]:
"""Generate the rows of the summary totals per category table.""" """Generate the rows of the summary totals per category table."""
for category, stat in self.results.sorted_category_stats.items(): for category, stat in self.results.category_stats.items():
total_tests = stat.tests_success_count + stat.tests_skipped_count + stat.tests_failure_count + stat.tests_error_count converted_category = convert_categories([category])[0]
total_tests = stat.tests_success_count + stat.tests_skipped_count + stat.tests_failure_count + stat.tests_error_count + stat.tests_unset_count
yield ( yield (
f"| {category} | {total_tests} | {stat.tests_success_count} | {stat.tests_skipped_count} | {stat.tests_failure_count} " f"| {converted_category} | {total_tests} | {stat.tests_success_count} | {stat.tests_skipped_count} | {stat.tests_failure_count} "
f"| {stat.tests_error_count} |\n" f"| {stat.tests_error_count} |\n"
) )
@ -284,9 +285,9 @@ class TestResults(MDReportBase):
def generate_rows(self) -> Generator[str, None, None]: def generate_rows(self) -> Generator[str, None, None]:
"""Generate the rows of the all test results table.""" """Generate the rows of the all test results table."""
for result in self.results.get_results(sort_by=["name", "test"]): for result in self.results.results:
messages = self.safe_markdown(", ".join(result.messages)) messages = self.safe_markdown(result.messages[0]) if len(result.messages) == 1 else self.safe_markdown("<br>".join(result.messages))
categories = ", ".join(convert_categories(result.categories)) categories = ", ".join(sorted(convert_categories(result.categories)))
yield ( yield (
f"| {result.name or '-'} | {categories or '-'} | {result.test or '-'} " f"| {result.name or '-'} | {categories or '-'} | {result.test or '-'} "
f"| {result.description or '-'} | {self.safe_markdown(result.custom_field) or '-'} | {result.result or '-'} | {messages or '-'} |\n" f"| {result.description or '-'} | {self.safe_markdown(result.custom_field) or '-'} | {result.result or '-'} | {messages or '-'} |\n"

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Result Manager module for ANTA.""" """Result Manager module for ANTA."""
@ -12,6 +12,8 @@ from functools import cached_property
from itertools import chain from itertools import chain
from typing import Any from typing import Any
from typing_extensions import deprecated
from anta.result_manager.models import AntaTestStatus, TestResult from anta.result_manager.models import AntaTestStatus, TestResult
from .models import CategoryStats, DeviceStats, TestStats from .models import CategoryStats, DeviceStats, TestStats
@ -21,56 +23,40 @@ logger = logging.getLogger(__name__)
# pylint: disable=too-many-instance-attributes # pylint: disable=too-many-instance-attributes
class ResultManager: class ResultManager:
"""Helper to manage Test Results and generate reports. """Manager of ANTA Results.
Examples The status of the class is initialized to "unset"
--------
Create Inventory:
inventory_anta = AntaInventory.parse( Then when adding a test with a status that is NOT 'error' the following
filename='examples/inventory.yml', table shows the updated status:
username='ansible',
password='ansible',
)
Create Result Manager: | Current Status | Added test Status | Updated Status |
| -------------- | ------------------------------- | -------------- |
| unset | Any | Any |
| skipped | unset, skipped | skipped |
| skipped | success | success |
| skipped | failure | failure |
| success | unset, skipped, success | success |
| success | failure | failure |
| failure | unset, skipped success, failure | failure |
manager = ResultManager() If the status of the added test is error, the status is untouched and the
`error_status` attribute is set to True.
Run tests for all connected devices: Attributes
----------
for device in inventory_anta.get_inventory().devices: results
manager.add( dump
VerifyNTP(device=device).test() status
) Status rerpesenting all the results.
manager.add( error_status
VerifyEOSVersion(device=device).test(version='4.28.3M') Will be `True` if a test returned an error.
) results_by_status
dump
Print result in native format: json
device_stats
manager.results category_stats
[ test_stats
TestResult(
name="pf1",
test="VerifyZeroTouch",
categories=["configuration"],
description="Verifies ZeroTouch is disabled",
result="success",
messages=[],
custom_field=None,
),
TestResult(
name="pf1",
test='VerifyNTP',
categories=["software"],
categories=['system'],
description='Verifies if NTP is synchronised.',
result='failure',
messages=["The device is not synchronized with the configured NTP server(s): 'NTP is disabled.'"],
custom_field=None,
),
]
""" """
_result_entries: list[TestResult] _result_entries: list[TestResult]
@ -83,26 +69,7 @@ class ResultManager:
_stats_in_sync: bool _stats_in_sync: bool
def __init__(self) -> None: def __init__(self) -> None:
"""Class constructor. """Initialize a ResultManager instance."""
The status of the class is initialized to "unset"
Then when adding a test with a status that is NOT 'error' the following
table shows the updated status:
| Current Status | Added test Status | Updated Status |
| -------------- | ------------------------------- | -------------- |
| unset | Any | Any |
| skipped | unset, skipped | skipped |
| skipped | success | success |
| skipped | failure | failure |
| success | unset, skipped, success | success |
| success | failure | failure |
| failure | unset, skipped success, failure | failure |
If the status of the added test is error, the status is untouched and the
error_status is set to True.
"""
self.reset() self.reset()
def reset(self) -> None: def reset(self) -> None:
@ -143,28 +110,28 @@ class ResultManager:
return json.dumps(self.dump, indent=4) return json.dumps(self.dump, indent=4)
@property @property
def device_stats(self) -> defaultdict[str, DeviceStats]: def device_stats(self) -> dict[str, DeviceStats]:
"""Get the device statistics.""" """Get the device statistics."""
self._ensure_stats_in_sync() self._ensure_stats_in_sync()
return self._device_stats return dict(sorted(self._device_stats.items()))
@property @property
def category_stats(self) -> defaultdict[str, CategoryStats]: def category_stats(self) -> dict[str, CategoryStats]:
"""Get the category statistics.""" """Get the category statistics."""
self._ensure_stats_in_sync() self._ensure_stats_in_sync()
return self._category_stats return dict(sorted(self._category_stats.items()))
@property @property
def test_stats(self) -> defaultdict[str, TestStats]: def test_stats(self) -> dict[str, TestStats]:
"""Get the test statistics.""" """Get the test statistics."""
self._ensure_stats_in_sync() self._ensure_stats_in_sync()
return self._test_stats return dict(sorted(self._test_stats.items()))
@property @property
@deprecated("This property is deprecated, use `category_stats` instead. This will be removed in ANTA v2.0.0.", category=DeprecationWarning)
def sorted_category_stats(self) -> dict[str, CategoryStats]: def sorted_category_stats(self) -> dict[str, CategoryStats]:
"""A property that returns the category_stats dictionary sorted by key name.""" """A property that returns the category_stats dictionary sorted by key name."""
self._ensure_stats_in_sync() return self.category_stats
return dict(sorted(self.category_stats.items()))
@cached_property @cached_property
def results_by_status(self) -> dict[AntaTestStatus, list[TestResult]]: def results_by_status(self) -> dict[AntaTestStatus, list[TestResult]]:
@ -316,6 +283,21 @@ class ResultManager:
"""Return the current status including error_status if ignore_error is False.""" """Return the current status including error_status if ignore_error is False."""
return "error" if self.error_status and not ignore_error else self.status return "error" if self.error_status and not ignore_error else self.status
def sort(self, sort_by: list[str]) -> ResultManager:
"""Sort the ResultManager results based on TestResult fields.
Parameters
----------
sort_by
List of TestResult fields to sort the results.
"""
accepted_fields = TestResult.model_fields.keys()
if not set(sort_by).issubset(set(accepted_fields)):
msg = f"Invalid sort_by fields: {sort_by}. Accepted fields are: {list(accepted_fields)}"
raise ValueError(msg)
self._result_entries.sort(key=lambda result: [getattr(result, field) for field in sort_by])
return self
def filter(self, hide: set[AntaTestStatus]) -> ResultManager: def filter(self, hide: set[AntaTestStatus]) -> ResultManager:
"""Get a filtered ResultManager based on test status. """Get a filtered ResultManager based on test status.
@ -334,6 +316,7 @@ class ResultManager:
manager.results = self.get_results(possible_statuses - hide) manager.results = self.get_results(possible_statuses - hide)
return manager return manager
@deprecated("This method is deprecated. This will be removed in ANTA v2.0.0.", category=DeprecationWarning)
def filter_by_tests(self, tests: set[str]) -> ResultManager: def filter_by_tests(self, tests: set[str]) -> ResultManager:
"""Get a filtered ResultManager that only contains specific tests. """Get a filtered ResultManager that only contains specific tests.
@ -351,6 +334,7 @@ class ResultManager:
manager.results = [result for result in self._result_entries if result.test in tests] manager.results = [result for result in self._result_entries if result.test in tests]
return manager return manager
@deprecated("This method is deprecated. This will be removed in ANTA v2.0.0.", category=DeprecationWarning)
def filter_by_devices(self, devices: set[str]) -> ResultManager: def filter_by_devices(self, devices: set[str]) -> ResultManager:
"""Get a filtered ResultManager that only contains specific devices. """Get a filtered ResultManager that only contains specific devices.
@ -368,6 +352,7 @@ class ResultManager:
manager.results = [result for result in self._result_entries if result.name in devices] manager.results = [result for result in self._result_entries if result.name in devices]
return manager return manager
@deprecated("This method is deprecated. This will be removed in ANTA v2.0.0.", category=DeprecationWarning)
def get_tests(self) -> set[str]: def get_tests(self) -> set[str]:
"""Get the set of all the test names. """Get the set of all the test names.
@ -378,6 +363,7 @@ class ResultManager:
""" """
return {str(result.test) for result in self._result_entries} return {str(result.test) for result in self._result_entries}
@deprecated("This method is deprecated. This will be removed in ANTA v2.0.0.", category=DeprecationWarning)
def get_devices(self) -> set[str]: def get_devices(self) -> set[str]:
"""Get the set of all the device names. """Get the set of all the device names.

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Models related to anta.result_manager module.""" """Models related to anta.result_manager module."""

View file

@ -1,7 +1,7 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""ANTA runner function.""" """ANTA runner module."""
from __future__ import annotations from __future__ import annotations
@ -115,7 +115,7 @@ async def setup_inventory(inventory: AntaInventory, tags: set[str] | None, devic
# If there are no devices in the inventory after filtering, exit # If there are no devices in the inventory after filtering, exit
if not selected_inventory.devices: if not selected_inventory.devices:
msg = f'No reachable device {f"matching the tags {tags} " if tags else ""}was found.{f" Selected devices: {devices} " if devices is not None else ""}' msg = f"No reachable device {f'matching the tags {tags} ' if tags else ''}was found.{f' Selected devices: {devices} ' if devices is not None else ''}"
logger.warning(msg) logger.warning(msg)
return None return None
@ -170,8 +170,7 @@ def prepare_tests(
if total_test_count == 0: if total_test_count == 0:
msg = ( msg = (
f"There are no tests{f' matching the tags {tags} ' if tags else ' '}to run in the current " f"There are no tests{f' matching the tags {tags} ' if tags else ' '}to run in the current test catalog and device inventory, please verify your inputs."
"test catalog and device inventory, please verify your inputs."
) )
logger.warning(msg) logger.warning(msg)
return None return None

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module related to all ANTA tests.""" """Module related to all ANTA tests."""

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module related to the EOS various AAA tests.""" """Module related to the EOS various AAA tests."""
@ -51,12 +51,12 @@ class VerifyTacacsSourceIntf(AntaTest):
"""Main test function for VerifyTacacsSourceIntf.""" """Main test function for VerifyTacacsSourceIntf."""
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
try: try:
if command_output["srcIntf"][self.inputs.vrf] == self.inputs.intf: if (src_interface := command_output["srcIntf"][self.inputs.vrf]) == self.inputs.intf:
self.result.is_success() self.result.is_success()
else: else:
self.result.is_failure(f"Wrong source-interface configured in VRF {self.inputs.vrf}") self.result.is_failure(f"VRF: {self.inputs.vrf} - Source interface mismatch - Expected: {self.inputs.intf} Actual: {src_interface}")
except KeyError: except KeyError:
self.result.is_failure(f"Source-interface {self.inputs.intf} is not configured in VRF {self.inputs.vrf}") self.result.is_failure(f"VRF: {self.inputs.vrf} Source Interface: {self.inputs.intf} - Not configured")
class VerifyTacacsServers(AntaTest): class VerifyTacacsServers(AntaTest):
@ -108,7 +108,7 @@ class VerifyTacacsServers(AntaTest):
if not not_configured: if not not_configured:
self.result.is_success() self.result.is_success()
else: else:
self.result.is_failure(f"TACACS servers {not_configured} are not configured in VRF {self.inputs.vrf}") self.result.is_failure(f"TACACS servers {', '.join(not_configured)} are not configured in VRF {self.inputs.vrf}")
class VerifyTacacsServerGroups(AntaTest): class VerifyTacacsServerGroups(AntaTest):
@ -151,7 +151,7 @@ class VerifyTacacsServerGroups(AntaTest):
if not not_configured: if not not_configured:
self.result.is_success() self.result.is_success()
else: else:
self.result.is_failure(f"TACACS server group(s) {not_configured} are not configured") self.result.is_failure(f"TACACS server group(s) {', '.join(not_configured)} are not configured")
class VerifyAuthenMethods(AntaTest): class VerifyAuthenMethods(AntaTest):
@ -204,14 +204,14 @@ class VerifyAuthenMethods(AntaTest):
self.result.is_failure("AAA authentication methods are not configured for login console") self.result.is_failure("AAA authentication methods are not configured for login console")
return return
if v["login"]["methods"] != self.inputs.methods: if v["login"]["methods"] != self.inputs.methods:
self.result.is_failure(f"AAA authentication methods {self.inputs.methods} are not matching for login console") self.result.is_failure(f"AAA authentication methods {', '.join(self.inputs.methods)} are not matching for login console")
return return
not_matching.extend(auth_type for methods in v.values() if methods["methods"] != self.inputs.methods) not_matching.extend(auth_type for methods in v.values() if methods["methods"] != self.inputs.methods)
if not not_matching: if not not_matching:
self.result.is_success() self.result.is_success()
else: else:
self.result.is_failure(f"AAA authentication methods {self.inputs.methods} are not matching for {not_matching}") self.result.is_failure(f"AAA authentication methods {', '.join(self.inputs.methods)} are not matching for {', '.join(not_matching)}")
class VerifyAuthzMethods(AntaTest): class VerifyAuthzMethods(AntaTest):
@ -263,7 +263,7 @@ class VerifyAuthzMethods(AntaTest):
if not not_matching: if not not_matching:
self.result.is_success() self.result.is_success()
else: else:
self.result.is_failure(f"AAA authorization methods {self.inputs.methods} are not matching for {not_matching}") self.result.is_failure(f"AAA authorization methods {', '.join(self.inputs.methods)} are not matching for {', '.join(not_matching)}")
class VerifyAcctDefaultMethods(AntaTest): class VerifyAcctDefaultMethods(AntaTest):
@ -319,12 +319,12 @@ class VerifyAcctDefaultMethods(AntaTest):
if methods["defaultMethods"] != self.inputs.methods: if methods["defaultMethods"] != self.inputs.methods:
not_matching.append(acct_type) not_matching.append(acct_type)
if not_configured: if not_configured:
self.result.is_failure(f"AAA default accounting is not configured for {not_configured}") self.result.is_failure(f"AAA default accounting is not configured for {', '.join(not_configured)}")
return return
if not not_matching: if not not_matching:
self.result.is_success() self.result.is_success()
else: else:
self.result.is_failure(f"AAA accounting default methods {self.inputs.methods} are not matching for {not_matching}") self.result.is_failure(f"AAA accounting default methods {', '.join(self.inputs.methods)} are not matching for {', '.join(not_matching)}")
class VerifyAcctConsoleMethods(AntaTest): class VerifyAcctConsoleMethods(AntaTest):
@ -380,9 +380,9 @@ class VerifyAcctConsoleMethods(AntaTest):
if methods["consoleMethods"] != self.inputs.methods: if methods["consoleMethods"] != self.inputs.methods:
not_matching.append(acct_type) not_matching.append(acct_type)
if not_configured: if not_configured:
self.result.is_failure(f"AAA console accounting is not configured for {not_configured}") self.result.is_failure(f"AAA console accounting is not configured for {', '.join(not_configured)}")
return return
if not not_matching: if not not_matching:
self.result.is_success() self.result.is_success()
else: else:
self.result.is_failure(f"AAA accounting console methods {self.inputs.methods} are not matching for {not_matching}") self.result.is_failure(f"AAA accounting console methods {', '.join(self.inputs.methods)} are not matching for {', '.join(not_matching)}")

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module related to Adaptive virtual topology tests.""" """Module related to Adaptive virtual topology tests."""
@ -47,7 +47,7 @@ class VerifyAVTPathHealth(AntaTest):
# Check if AVT is configured # Check if AVT is configured
if not command_output: if not command_output:
self.result.is_failure("Adaptive virtual topology paths are not configured.") self.result.is_failure("Adaptive virtual topology paths are not configured")
return return
# Iterate over each VRF # Iterate over each VRF
@ -61,11 +61,11 @@ class VerifyAVTPathHealth(AntaTest):
# Check the status of the AVT path # Check the status of the AVT path
if not valid and not active: if not valid and not active:
self.result.is_failure(f"AVT path {path} for profile {profile} in VRF {vrf} is invalid and not active.") self.result.is_failure(f"VRF: {vrf} Profile: {profile} AVT path: {path} - Invalid and not active")
elif not valid: elif not valid:
self.result.is_failure(f"AVT path {path} for profile {profile} in VRF {vrf} is invalid.") self.result.is_failure(f"VRF: {vrf} Profile: {profile} AVT path: {path} - Invalid")
elif not active: elif not active:
self.result.is_failure(f"AVT path {path} for profile {profile} in VRF {vrf} is not active.") self.result.is_failure(f"VRF: {vrf} Profile: {profile} AVT path: {path} - Not active")
class VerifyAVTSpecificPath(AntaTest): class VerifyAVTSpecificPath(AntaTest):
@ -143,7 +143,7 @@ class VerifyAVTSpecificPath(AntaTest):
valid = get_value(path_data, "flags.valid") valid = get_value(path_data, "flags.valid")
active = get_value(path_data, "flags.active") active = get_value(path_data, "flags.active")
if not all([valid, active]): if not all([valid, active]):
self.result.is_failure(f"{avt_path} - Incorrect path {path} - Valid: {valid}, Active: {active}") self.result.is_failure(f"{avt_path} - Incorrect path {path} - Valid: {valid} Active: {active}")
# If no matching path found, mark the test as failed # If no matching path found, mark the test as failed
if not path_found: if not path_found:
@ -192,4 +192,4 @@ class VerifyAVTRole(AntaTest):
# Check if the AVT role matches the expected role # Check if the AVT role matches the expected role
if self.inputs.role != command_output.get("role"): if self.inputs.role != command_output.get("role"):
self.result.is_failure(f"Expected AVT role as `{self.inputs.role}`, but found `{command_output.get('role')}` instead.") self.result.is_failure(f"AVT role mismatch - Expected: {self.inputs.role} Actual: {command_output.get('role')}")

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module related to BFD tests.""" """Module related to BFD tests."""
@ -8,9 +8,9 @@
from __future__ import annotations from __future__ import annotations
from datetime import datetime, timezone from datetime import datetime, timezone
from typing import TYPE_CHECKING, ClassVar from typing import TYPE_CHECKING, ClassVar, TypeVar
from pydantic import Field from pydantic import Field, field_validator
from anta.input_models.bfd import BFDPeer from anta.input_models.bfd import BFDPeer
from anta.models import AntaCommand, AntaTest from anta.models import AntaCommand, AntaTest
@ -19,6 +19,9 @@ from anta.tools import get_value
if TYPE_CHECKING: if TYPE_CHECKING:
from anta.models import AntaTemplate from anta.models import AntaTemplate
# Using a TypeVar for the BFDPeer model since mypy thinks it's a ClassVar and not a valid type when used in field validators
T = TypeVar("T", bound=BFDPeer)
class VerifyBFDSpecificPeers(AntaTest): class VerifyBFDSpecificPeers(AntaTest):
"""Verifies the state of IPv4 BFD peer sessions. """Verifies the state of IPv4 BFD peer sessions.
@ -99,15 +102,18 @@ class VerifyBFDPeersIntervals(AntaTest):
1. Confirms that the specified VRF is configured. 1. Confirms that the specified VRF is configured.
2. Verifies that the peer exists in the BFD configuration. 2. Verifies that the peer exists in the BFD configuration.
3. Confirms that BFD peer is correctly configured with the `Transmit interval, Receive interval and Multiplier`. 3. Confirms that BFD peer is correctly configured with the `Transmit interval, Receive interval and Multiplier`.
4. Verifies that BFD peer is correctly configured with the `Detection time`, if provided.
Expected Results Expected Results
---------------- ----------------
* Success: If all of the following conditions are met: * Success: If all of the following conditions are met:
- All specified peers are found in the BFD configuration within the specified VRF. - All specified peers are found in the BFD configuration within the specified VRF.
- All BFD peers are correctly configured with the `Transmit interval, Receive interval and Multiplier`. - All BFD peers are correctly configured with the `Transmit interval, Receive interval and Multiplier`.
- If provided, the `Detection time` is correctly configured.
* Failure: If any of the following occur: * Failure: If any of the following occur:
- A specified peer is not found in the BFD configuration within the specified VRF. - A specified peer is not found in the BFD configuration within the specified VRF.
- Any BFD peer not correctly configured with the `Transmit interval, Receive interval and Multiplier`. - Any BFD peer not correctly configured with the `Transmit interval, Receive interval and Multiplier`.
- Any BFD peer is not correctly configured with `Detection time`, if provided.
Examples Examples
-------- --------
@ -125,6 +131,7 @@ class VerifyBFDPeersIntervals(AntaTest):
tx_interval: 1200 tx_interval: 1200
rx_interval: 1200 rx_interval: 1200
multiplier: 3 multiplier: 3
detection_time: 3600
``` ```
""" """
@ -139,6 +146,23 @@ class VerifyBFDPeersIntervals(AntaTest):
BFDPeer: ClassVar[type[BFDPeer]] = BFDPeer BFDPeer: ClassVar[type[BFDPeer]] = BFDPeer
"""To maintain backward compatibility""" """To maintain backward compatibility"""
@field_validator("bfd_peers")
@classmethod
def validate_bfd_peers(cls, bfd_peers: list[T]) -> list[T]:
"""Validate that 'tx_interval', 'rx_interval' and 'multiplier' fields are provided in each BFD peer."""
for peer in bfd_peers:
missing_fileds = []
if peer.tx_interval is None:
missing_fileds.append("tx_interval")
if peer.rx_interval is None:
missing_fileds.append("rx_interval")
if peer.multiplier is None:
missing_fileds.append("multiplier")
if missing_fileds:
msg = f"{peer} {', '.join(missing_fileds)} field(s) are missing in the input"
raise ValueError(msg)
return bfd_peers
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyBFDPeersIntervals.""" """Main test function for VerifyBFDPeersIntervals."""
@ -151,6 +175,7 @@ class VerifyBFDPeersIntervals(AntaTest):
tx_interval = bfd_peer.tx_interval tx_interval = bfd_peer.tx_interval
rx_interval = bfd_peer.rx_interval rx_interval = bfd_peer.rx_interval
multiplier = bfd_peer.multiplier multiplier = bfd_peer.multiplier
detect_time = bfd_peer.detection_time
# Check if BFD peer configured # Check if BFD peer configured
bfd_output = get_value( bfd_output = get_value(
@ -166,6 +191,7 @@ class VerifyBFDPeersIntervals(AntaTest):
bfd_details = bfd_output.get("peerStatsDetail", {}) bfd_details = bfd_output.get("peerStatsDetail", {})
op_tx_interval = bfd_details.get("operTxInterval") // 1000 op_tx_interval = bfd_details.get("operTxInterval") // 1000
op_rx_interval = bfd_details.get("operRxInterval") // 1000 op_rx_interval = bfd_details.get("operRxInterval") // 1000
op_detection_time = bfd_details.get("detectTime") // 1000
detect_multiplier = bfd_details.get("detectMult") detect_multiplier = bfd_details.get("detectMult")
if op_tx_interval != tx_interval: if op_tx_interval != tx_interval:
@ -177,6 +203,9 @@ class VerifyBFDPeersIntervals(AntaTest):
if detect_multiplier != multiplier: if detect_multiplier != multiplier:
self.result.is_failure(f"{bfd_peer} - Incorrect Multiplier - Expected: {multiplier} Actual: {detect_multiplier}") self.result.is_failure(f"{bfd_peer} - Incorrect Multiplier - Expected: {multiplier} Actual: {detect_multiplier}")
if detect_time and op_detection_time != detect_time:
self.result.is_failure(f"{bfd_peer} - Incorrect Detection Time - Expected: {detect_time} Actual: {op_detection_time}")
class VerifyBFDPeersHealth(AntaTest): class VerifyBFDPeersHealth(AntaTest):
"""Verifies the health of IPv4 BFD peers across all VRFs. """Verifies the health of IPv4 BFD peers across all VRFs.
@ -231,7 +260,7 @@ class VerifyBFDPeersHealth(AntaTest):
# Check if any IPv4 BFD peer is configured # Check if any IPv4 BFD peer is configured
ipv4_neighbors_exist = any(vrf_data["ipv4Neighbors"] for vrf_data in bfd_output["vrfs"].values()) ipv4_neighbors_exist = any(vrf_data["ipv4Neighbors"] for vrf_data in bfd_output["vrfs"].values())
if not ipv4_neighbors_exist: if not ipv4_neighbors_exist:
self.result.is_failure("No IPv4 BFD peers are configured for any VRF.") self.result.is_failure("No IPv4 BFD peers are configured for any VRF")
return return
# Iterate over IPv4 BFD peers # Iterate over IPv4 BFD peers
@ -299,6 +328,16 @@ class VerifyBFDPeersRegProtocols(AntaTest):
BFDPeer: ClassVar[type[BFDPeer]] = BFDPeer BFDPeer: ClassVar[type[BFDPeer]] = BFDPeer
"""To maintain backward compatibility""" """To maintain backward compatibility"""
@field_validator("bfd_peers")
@classmethod
def validate_bfd_peers(cls, bfd_peers: list[T]) -> list[T]:
"""Validate that 'protocols' field is provided in each BFD peer."""
for peer in bfd_peers:
if peer.protocols is None:
msg = f"{peer} 'protocols' field missing in the input"
raise ValueError(msg)
return bfd_peers
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyBFDPeersRegProtocols.""" """Main test function for VerifyBFDPeersRegProtocols."""
@ -323,5 +362,5 @@ class VerifyBFDPeersRegProtocols(AntaTest):
# Check registered protocols # Check registered protocols
difference = sorted(set(protocols) - set(get_value(bfd_output, "peerStatsDetail.apps"))) difference = sorted(set(protocols) - set(get_value(bfd_output, "peerStatsDetail.apps")))
if difference: if difference:
failures = " ".join(f"`{item}`" for item in difference) failures = ", ".join(f"`{item}`" for item in difference)
self.result.is_failure(f"{bfd_peer} - {failures} routing protocol(s) not configured") self.result.is_failure(f"{bfd_peer} - {failures} routing protocol(s) not configured")

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module related to the device configuration tests.""" """Module related to the device configuration tests."""
@ -125,4 +125,4 @@ class VerifyRunningConfigLines(AntaTest):
if not failure_msgs: if not failure_msgs:
self.result.is_success() self.result.is_success()
else: else:
self.result.is_failure("Following patterns were not found: " + ",".join(failure_msgs)) self.result.is_failure("Following patterns were not found: " + ", ".join(failure_msgs))

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module related to various connectivity tests.""" """Module related to various connectivity tests."""
@ -7,11 +7,16 @@
# mypy: disable-error-code=attr-defined # mypy: disable-error-code=attr-defined
from __future__ import annotations from __future__ import annotations
from typing import ClassVar from typing import ClassVar, TypeVar
from pydantic import field_validator
from anta.input_models.connectivity import Host, LLDPNeighbor, Neighbor from anta.input_models.connectivity import Host, LLDPNeighbor, Neighbor
from anta.models import AntaCommand, AntaTemplate, AntaTest from anta.models import AntaCommand, AntaTemplate, AntaTest
# Using a TypeVar for the Host model since mypy thinks it's a ClassVar and not a valid type when used in field validators
T = TypeVar("T", bound=Host)
class VerifyReachability(AntaTest): class VerifyReachability(AntaTest):
"""Test network reachability to one or many destination IP(s). """Test network reachability to one or many destination IP(s).
@ -32,11 +37,18 @@ class VerifyReachability(AntaTest):
vrf: MGMT vrf: MGMT
df_bit: True df_bit: True
size: 100 size: 100
reachable: true
- source: Management0 - source: Management0
destination: 8.8.8.8 destination: 8.8.8.8
vrf: MGMT vrf: MGMT
df_bit: True df_bit: True
size: 100 size: 100
- source: fd12:3456:789a:1::1
destination: fd12:3456:789a:1::2
vrf: default
df_bit: True
size: 100
reachable: false
``` ```
""" """
@ -54,6 +66,16 @@ class VerifyReachability(AntaTest):
Host: ClassVar[type[Host]] = Host Host: ClassVar[type[Host]] = Host
"""To maintain backward compatibility.""" """To maintain backward compatibility."""
@field_validator("hosts")
@classmethod
def validate_hosts(cls, hosts: list[T]) -> list[T]:
"""Validate the 'destination' and 'source' IP address family in each host."""
for host in hosts:
if not isinstance(host.source, str) and host.destination.version != host.source.version:
msg = f"{host} IP address family for destination does not match source"
raise ValueError(msg)
return hosts
def render(self, template: AntaTemplate) -> list[AntaCommand]: def render(self, template: AntaTemplate) -> list[AntaCommand]:
"""Render the template for each host in the input list.""" """Render the template for each host in the input list."""
return [ return [
@ -69,9 +91,14 @@ class VerifyReachability(AntaTest):
self.result.is_success() self.result.is_success()
for command, host in zip(self.instance_commands, self.inputs.hosts): for command, host in zip(self.instance_commands, self.inputs.hosts):
if f"{host.repeat} received" not in command.json_output["messages"][0]: # Verifies the network is reachable
if host.reachable and f"{host.repeat} received" not in command.json_output["messages"][0]:
self.result.is_failure(f"{host} - Unreachable") self.result.is_failure(f"{host} - Unreachable")
# Verifies the network is unreachable.
if not host.reachable and f"{host.repeat} received" in command.json_output["messages"][0]:
self.result.is_failure(f"{host} - Destination is expected to be unreachable but found reachable")
class VerifyLLDPNeighbors(AntaTest): class VerifyLLDPNeighbors(AntaTest):
"""Verifies the connection status of the specified LLDP (Link Layer Discovery Protocol) neighbors. """Verifies the connection status of the specified LLDP (Link Layer Discovery Protocol) neighbors.

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module related to the CVX tests.""" """Module related to the CVX tests."""
@ -49,7 +49,7 @@ class VerifyMcsClientMounts(AntaTest):
continue continue
mcs_mount_state_detected = True mcs_mount_state_detected = True
if (state := mount_state["state"]) != "mountStateMountComplete": if (state := mount_state["state"]) != "mountStateMountComplete":
self.result.is_failure(f"MCS Client mount states are not valid: {state}") self.result.is_failure(f"MCS Client mount states are not valid - Expected: mountStateMountComplete Actual: {state}")
if not mcs_mount_state_detected: if not mcs_mount_state_detected:
self.result.is_failure("MCS Client mount states are not present") self.result.is_failure("MCS Client mount states are not present")
@ -88,7 +88,12 @@ class VerifyManagementCVX(AntaTest):
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
self.result.is_success() self.result.is_success()
if (cluster_state := get_value(command_output, "clusterStatus.enabled")) != self.inputs.enabled: if (cluster_state := get_value(command_output, "clusterStatus.enabled")) != self.inputs.enabled:
self.result.is_failure(f"Management CVX status is not valid: {cluster_state}") if cluster_state is None:
self.result.is_failure("Management CVX status - Not configured")
return
cluster_state = "enabled" if cluster_state else "disabled"
self.inputs.enabled = "enabled" if self.inputs.enabled else "disabled"
self.result.is_failure(f"Management CVX status is not valid: Expected: {self.inputs.enabled} Actual: {cluster_state}")
class VerifyMcsServerMounts(AntaTest): class VerifyMcsServerMounts(AntaTest):
@ -126,13 +131,15 @@ class VerifyMcsServerMounts(AntaTest):
mount_states = mount["mountStates"][0] mount_states = mount["mountStates"][0]
if (num_path_states := len(mount_states["pathStates"])) != (expected_num := len(self.mcs_path_types)): if (num_path_states := len(mount_states["pathStates"])) != (expected_num := len(self.mcs_path_types)):
self.result.is_failure(f"Incorrect number of mount path states for {hostname} - Expected: {expected_num}, Actual: {num_path_states}") self.result.is_failure(f"Host: {hostname} - Incorrect number of mount path states - Expected: {expected_num} Actual: {num_path_states}")
for path in mount_states["pathStates"]: for path in mount_states["pathStates"]:
if (path_type := path.get("type")) not in self.mcs_path_types: if (path_type := path.get("type")) not in self.mcs_path_types:
self.result.is_failure(f"Unexpected MCS path type for {hostname}: '{path_type}'.") self.result.is_failure(f"Host: {hostname} - Unexpected MCS path type - Expected: {', '.join(self.mcs_path_types)} Actual: {path_type}")
if (path_state := path.get("state")) != "mountStateMountComplete": if (path_state := path.get("state")) != "mountStateMountComplete":
self.result.is_failure(f"MCS server mount state for path '{path_type}' is not valid is for {hostname}: '{path_state}'.") self.result.is_failure(
f"Host: {hostname} Path Type: {path_type} - MCS server mount state is not valid - Expected: mountStateMountComplete Actual:{path_state}"
)
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
@ -152,18 +159,18 @@ class VerifyMcsServerMounts(AntaTest):
mcs_mounts = [mount for mount in mounts if mount["service"] == "Mcs"] mcs_mounts = [mount for mount in mounts if mount["service"] == "Mcs"]
if not mounts: if not mounts:
self.result.is_failure(f"No mount status for {hostname}") self.result.is_failure(f"Host: {hostname} - No mount status found")
continue continue
if not mcs_mounts: if not mcs_mounts:
self.result.is_failure(f"MCS mount state not detected for {hostname}") self.result.is_failure(f"Host: {hostname} - MCS mount state not detected")
else: else:
for mount in mcs_mounts: for mount in mcs_mounts:
self.validate_mount_states(mount, hostname) self.validate_mount_states(mount, hostname)
active_count += 1 active_count += 1
if active_count != self.inputs.connections_count: if active_count != self.inputs.connections_count:
self.result.is_failure(f"Incorrect CVX successful connections count. Expected: {self.inputs.connections_count}, Actual : {active_count}") self.result.is_failure(f"Incorrect CVX successful connections count - Expected: {self.inputs.connections_count} Actual: {active_count}")
class VerifyActiveCVXConnections(AntaTest): class VerifyActiveCVXConnections(AntaTest):
@ -200,13 +207,13 @@ class VerifyActiveCVXConnections(AntaTest):
self.result.is_success() self.result.is_success()
if not (connections := command_output.get("connections")): if not (connections := command_output.get("connections")):
self.result.is_failure("CVX connections are not available.") self.result.is_failure("CVX connections are not available")
return return
active_count = len([connection for connection in connections if connection.get("oobConnectionActive")]) active_count = len([connection for connection in connections if connection.get("oobConnectionActive")])
if self.inputs.connections_count != active_count: if self.inputs.connections_count != active_count:
self.result.is_failure(f"CVX active connections count. Expected: {self.inputs.connections_count}, Actual : {active_count}") self.result.is_failure(f"Incorrect CVX active connections count - Expected: {self.inputs.connections_count} Actual: {active_count}")
class VerifyCVXClusterStatus(AntaTest): class VerifyCVXClusterStatus(AntaTest):
@ -261,7 +268,7 @@ class VerifyCVXClusterStatus(AntaTest):
# Check cluster role # Check cluster role
if (cluster_role := cluster_status.get("role")) != self.inputs.role: if (cluster_role := cluster_status.get("role")) != self.inputs.role:
self.result.is_failure(f"CVX Role is not valid: {cluster_role}") self.result.is_failure(f"CVX Role is not valid: Expected: {self.inputs.role} Actual: {cluster_role}")
return return
# Validate peer status # Validate peer status
@ -269,15 +276,15 @@ class VerifyCVXClusterStatus(AntaTest):
# Check peer count # Check peer count
if (num_of_peers := len(peer_cluster)) != (expected_num_of_peers := len(self.inputs.peer_status)): if (num_of_peers := len(peer_cluster)) != (expected_num_of_peers := len(self.inputs.peer_status)):
self.result.is_failure(f"Unexpected number of peers {num_of_peers} vs {expected_num_of_peers}") self.result.is_failure(f"Unexpected number of peers - Expected: {expected_num_of_peers} Actual: {num_of_peers}")
# Check each peer # Check each peer
for peer in self.inputs.peer_status: for peer in self.inputs.peer_status:
# Retrieve the peer status from the peer cluster # Retrieve the peer status from the peer cluster
if (eos_peer_status := get_value(peer_cluster, peer.peer_name, separator="..")) is None: if (eos_peer_status := get_value(peer_cluster, peer.peer_name, separator="..")) is None:
self.result.is_failure(f"{peer.peer_name} is not present") self.result.is_failure(f"{peer.peer_name} - Not present")
continue continue
# Validate the registration state of the peer # Validate the registration state of the peer
if (peer_reg_state := eos_peer_status.get("registrationState")) != peer.registration_state: if (peer_reg_state := eos_peer_status.get("registrationState")) != peer.registration_state:
self.result.is_failure(f"{peer.peer_name} registration state is not complete: {peer_reg_state}") self.result.is_failure(f"{peer.peer_name} - Invalid registration state - Expected: {peer.registration_state} Actual: {peer_reg_state}")

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module related to field notices tests.""" """Module related to field notices tests."""
@ -96,7 +96,7 @@ class VerifyFieldNotice44Resolution(AntaTest):
for variant in variants: for variant in variants:
model = model.replace(variant, "") model = model.replace(variant, "")
if model not in devices: if model not in devices:
self.result.is_skipped("device is not impacted by FN044") self.result.is_skipped("Device is not impacted by FN044")
return return
for component in command_output["details"]["components"]: for component in command_output["details"]["components"]:
@ -117,7 +117,7 @@ class VerifyFieldNotice44Resolution(AntaTest):
) )
) )
if incorrect_aboot_version: if incorrect_aboot_version:
self.result.is_failure(f"device is running incorrect version of aboot ({aboot_version})") self.result.is_failure(f"Device is running incorrect version of aboot {aboot_version}")
class VerifyFieldNotice72Resolution(AntaTest): class VerifyFieldNotice72Resolution(AntaTest):

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module related to the flow tracking tests.""" """Module related to the flow tracking tests."""
@ -9,37 +9,13 @@ from __future__ import annotations
from typing import ClassVar from typing import ClassVar
from pydantic import BaseModel
from anta.decorators import skip_on_platforms from anta.decorators import skip_on_platforms
from anta.input_models.flow_tracking import FlowTracker
from anta.models import AntaCommand, AntaTemplate, AntaTest from anta.models import AntaCommand, AntaTemplate, AntaTest
from anta.tools import get_failed_logs from anta.tools import get_value
def validate_record_export(record_export: dict[str, str], tracker_info: dict[str, str]) -> str: def validate_exporters(exporters: list[dict[str, str]], tracker_info: dict[str, str]) -> list[str]:
"""Validate the record export configuration against the tracker info.
Parameters
----------
record_export
The expected record export configuration.
tracker_info
The actual tracker info from the command output.
Returns
-------
str
A failure message if the record export configuration does not match, otherwise blank string.
"""
failed_log = ""
actual_export = {"inactive timeout": tracker_info.get("inactiveTimeout"), "interval": tracker_info.get("activeInterval")}
expected_export = {"inactive timeout": record_export.get("on_inactive_timeout"), "interval": record_export.get("on_interval")}
if actual_export != expected_export:
failed_log = get_failed_logs(expected_export, actual_export)
return failed_log
def validate_exporters(exporters: list[dict[str, str]], tracker_info: dict[str, str]) -> str:
"""Validate the exporter configurations against the tracker info. """Validate the exporter configurations against the tracker info.
Parameters Parameters
@ -51,36 +27,52 @@ def validate_exporters(exporters: list[dict[str, str]], tracker_info: dict[str,
Returns Returns
------- -------
str list
Failure message if any exporter configuration does not match. List of failure messages for any exporter configuration that does not match.
""" """
failed_log = "" failure_messages = []
for exporter in exporters: for exporter in exporters:
exporter_name = exporter["name"] exporter_name = exporter.name
actual_exporter_info = tracker_info["exporters"].get(exporter_name) actual_exporter_info = tracker_info["exporters"].get(exporter_name)
if not actual_exporter_info: if not actual_exporter_info:
failed_log += f"\nExporter `{exporter_name}` is not configured." failure_messages.append(f"{exporter} - Not configured")
continue continue
local_interface = actual_exporter_info["localIntf"]
template_interval = actual_exporter_info["templateInterval"]
expected_exporter_data = {"local interface": exporter["local_interface"], "template interval": exporter["template_interval"]} if local_interface != exporter.local_interface:
actual_exporter_data = {"local interface": actual_exporter_info["localIntf"], "template interval": actual_exporter_info["templateInterval"]} failure_messages.append(f"{exporter} - Incorrect local interface - Expected: {exporter.local_interface} Actual: {local_interface}")
if expected_exporter_data != actual_exporter_data: if template_interval != exporter.template_interval:
failed_msg = get_failed_logs(expected_exporter_data, actual_exporter_data) failure_messages.append(f"{exporter} - Incorrect template interval - Expected: {exporter.template_interval} Actual: {template_interval}")
failed_log += f"\nExporter `{exporter_name}`: {failed_msg}" return failure_messages
return failed_log
class VerifyHardwareFlowTrackerStatus(AntaTest): class VerifyHardwareFlowTrackerStatus(AntaTest):
"""Verifies if hardware flow tracking is running and an input tracker is active. """Verifies the hardware flow tracking state.
This test optionally verifies the tracker interval/timeout and exporter configuration. This test performs the following checks:
1. Confirms that hardware flow tracking is running.
2. For each specified flow tracker:
- Confirms that the tracker is active.
- Optionally, checks the tracker interval/timeout configuration.
- Optionally, verifies the tracker exporter configuration
Expected Results Expected Results
---------------- ----------------
* Success: The test will pass if hardware flow tracking is running and an input tracker is active. * Success: The test will pass if all of the following conditions are met:
* Failure: The test will fail if hardware flow tracking is not running, an input tracker is not active, - Hardware flow tracking is running.
or the tracker interval/timeout and exporter configuration does not match the expected values. - For each specified flow tracker:
- The flow tracker is active.
- The tracker interval/timeout matches the expected values, if provided.
- The exporter configuration matches the expected values, if provided.
* Failure: The test will fail if any of the following conditions are met:
- Hardware flow tracking is not running.
- For any specified flow tracker:
- The flow tracker is not active.
- The tracker interval/timeout does not match the expected values, if provided.
- The exporter configuration does not match the expected values, if provided.
Examples Examples
-------- --------
@ -99,11 +91,8 @@ class VerifyHardwareFlowTrackerStatus(AntaTest):
``` ```
""" """
description = (
"Verifies if hardware flow tracking is running and an input tracker is active. Optionally verifies the tracker interval/timeout and exporter configuration."
)
categories: ClassVar[list[str]] = ["flow tracking"] categories: ClassVar[list[str]] = ["flow tracking"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaTemplate(template="show flow tracking hardware tracker {name}", revision=1)] commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show flow tracking hardware", revision=1)]
class Input(AntaTest.Input): class Input(AntaTest.Input):
"""Input model for the VerifyHardwareFlowTrackerStatus test.""" """Input model for the VerifyHardwareFlowTrackerStatus test."""
@ -111,82 +100,42 @@ class VerifyHardwareFlowTrackerStatus(AntaTest):
trackers: list[FlowTracker] trackers: list[FlowTracker]
"""List of flow trackers to verify.""" """List of flow trackers to verify."""
class FlowTracker(BaseModel):
"""Detail of a flow tracker."""
name: str
"""Name of the flow tracker."""
record_export: RecordExport | None = None
"""Record export configuration for the flow tracker."""
exporters: list[Exporter] | None = None
"""List of exporters for the flow tracker."""
class RecordExport(BaseModel):
"""Record export configuration."""
on_inactive_timeout: int
"""Timeout in milliseconds for exporting records when inactive."""
on_interval: int
"""Interval in milliseconds for exporting records."""
class Exporter(BaseModel):
"""Detail of an exporter."""
name: str
"""Name of the exporter."""
local_interface: str
"""Local interface used by the exporter."""
template_interval: int
"""Template interval in milliseconds for the exporter."""
def render(self, template: AntaTemplate) -> list[AntaCommand]:
"""Render the template for each hardware tracker."""
return [template.render(name=tracker.name) for tracker in self.inputs.trackers]
@skip_on_platforms(["cEOSLab", "vEOS-lab"]) @skip_on_platforms(["cEOSLab", "vEOS-lab"])
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyHardwareFlowTrackerStatus.""" """Main test function for VerifyHardwareFlowTrackerStatus."""
self.result.is_success() self.result.is_success()
for command, tracker_input in zip(self.instance_commands, self.inputs.trackers):
hardware_tracker_name = command.params.name
record_export = tracker_input.record_export.model_dump() if tracker_input.record_export else None
exporters = [exporter.model_dump() for exporter in tracker_input.exporters] if tracker_input.exporters else None
command_output = command.json_output
# Check if hardware flow tracking is configured command_output = self.instance_commands[0].json_output
if not command_output.get("running"): # Check if hardware flow tracking is configured
self.result.is_failure("Hardware flow tracking is not running.") if not command_output.get("running"):
return self.result.is_failure("Hardware flow tracking is not running.")
return
for tracker in self.inputs.trackers:
# Check if the input hardware tracker is configured # Check if the input hardware tracker is configured
tracker_info = command_output["trackers"].get(hardware_tracker_name) if not (tracker_info := get_value(command_output["trackers"], f"{tracker.name}")):
if not tracker_info: self.result.is_failure(f"{tracker} - Not found")
self.result.is_failure(f"Hardware flow tracker `{hardware_tracker_name}` is not configured.")
continue continue
# Check if the input hardware tracker is active # Check if the input hardware tracker is active
if not tracker_info.get("active"): if not tracker_info.get("active"):
self.result.is_failure(f"Hardware flow tracker `{hardware_tracker_name}` is not active.") self.result.is_failure(f"{tracker} - Disabled")
continue continue
# Check the input hardware tracker timeouts # Check the input hardware tracker timeouts
failure_msg = "" if tracker.record_export:
if record_export: inactive_interval = tracker.record_export.on_inactive_timeout
record_export_failure = validate_record_export(record_export, tracker_info) on_interval = tracker.record_export.on_interval
if record_export_failure: act_inactive = tracker_info.get("inactiveTimeout")
failure_msg += record_export_failure act_interval = tracker_info.get("activeInterval")
if not all([inactive_interval == act_inactive, on_interval == act_interval]):
self.result.is_failure(
f"{tracker} {tracker.record_export} - Incorrect timers - Inactive Timeout: {act_inactive} OnActive Interval: {act_interval}"
)
# Check the input hardware tracker exporters' configuration # Check the input hardware tracker exporters configuration
if exporters: if tracker.exporters:
exporters_failure = validate_exporters(exporters, tracker_info) failure_messages = validate_exporters(tracker.exporters, tracker_info)
if exporters_failure: for message in failure_messages:
failure_msg += exporters_failure self.result.is_failure(f"{tracker} {message}")
if failure_msg:
self.result.is_failure(f"{hardware_tracker_name}: {failure_msg}\n")

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module related to GreenT (Postcard Telemetry) tests.""" """Module related to GreenT (Postcard Telemetry) tests."""

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module related to the hardware or environment tests.""" """Module related to the hardware or environment tests."""
@ -49,14 +49,14 @@ class VerifyTransceiversManufacturers(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyTransceiversManufacturers.""" """Main test function for VerifyTransceiversManufacturers."""
self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
wrong_manufacturers = { for interface, value in command_output["xcvrSlots"].items():
interface: value["mfgName"] for interface, value in command_output["xcvrSlots"].items() if value["mfgName"] not in self.inputs.manufacturers if value["mfgName"] not in self.inputs.manufacturers:
} self.result.is_failure(
if not wrong_manufacturers: f"Interface: {interface} - Transceiver is from unapproved manufacturers - Expected: {', '.join(self.inputs.manufacturers)}"
self.result.is_success() f" Actual: {value['mfgName']}"
else: )
self.result.is_failure(f"Some transceivers are from unapproved manufacturers: {wrong_manufacturers}")
class VerifyTemperature(AntaTest): class VerifyTemperature(AntaTest):
@ -82,12 +82,11 @@ class VerifyTemperature(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyTemperature.""" """Main test function for VerifyTemperature."""
self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
temperature_status = command_output.get("systemStatus", "") temperature_status = command_output.get("systemStatus", "")
if temperature_status == "temperatureOk": if temperature_status != "temperatureOk":
self.result.is_success() self.result.is_failure(f"Device temperature exceeds acceptable limits - Expected: temperatureOk Actual: {temperature_status}")
else:
self.result.is_failure(f"Device temperature exceeds acceptable limits. Current system status: '{temperature_status}'")
class VerifyTransceiversTemperature(AntaTest): class VerifyTransceiversTemperature(AntaTest):
@ -113,20 +112,14 @@ class VerifyTransceiversTemperature(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyTransceiversTemperature.""" """Main test function for VerifyTransceiversTemperature."""
self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
sensors = command_output.get("tempSensors", "") sensors = command_output.get("tempSensors", "")
wrong_sensors = { for sensor in sensors:
sensor["name"]: { if sensor["hwStatus"] != "ok":
"hwStatus": sensor["hwStatus"], self.result.is_failure(f"Sensor: {sensor['name']} - Invalid hardware state - Expected: ok Actual: {sensor['hwStatus']}")
"alertCount": sensor["alertCount"], if sensor["alertCount"] != 0:
} self.result.is_failure(f"Sensor: {sensor['name']} - Incorrect alert counter - Expected: 0 Actual: {sensor['alertCount']}")
for sensor in sensors
if sensor["hwStatus"] != "ok" or sensor["alertCount"] != 0
}
if not wrong_sensors:
self.result.is_success()
else:
self.result.is_failure(f"The following sensors are operating outside the acceptable temperature range or have raised alerts: {wrong_sensors}")
class VerifyEnvironmentSystemCooling(AntaTest): class VerifyEnvironmentSystemCooling(AntaTest):
@ -156,7 +149,7 @@ class VerifyEnvironmentSystemCooling(AntaTest):
sys_status = command_output.get("systemStatus", "") sys_status = command_output.get("systemStatus", "")
self.result.is_success() self.result.is_success()
if sys_status != "coolingOk": if sys_status != "coolingOk":
self.result.is_failure(f"Device system cooling is not OK: '{sys_status}'") self.result.is_failure(f"Device system cooling status invalid - Expected: coolingOk Actual: {sys_status}")
class VerifyEnvironmentCooling(AntaTest): class VerifyEnvironmentCooling(AntaTest):
@ -177,8 +170,6 @@ class VerifyEnvironmentCooling(AntaTest):
``` ```
""" """
name = "VerifyEnvironmentCooling"
description = "Verifies the status of power supply fans and all fan trays."
categories: ClassVar[list[str]] = ["hardware"] categories: ClassVar[list[str]] = ["hardware"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show system environment cooling", revision=1)] commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show system environment cooling", revision=1)]
@ -198,12 +189,16 @@ class VerifyEnvironmentCooling(AntaTest):
for power_supply in command_output.get("powerSupplySlots", []): for power_supply in command_output.get("powerSupplySlots", []):
for fan in power_supply.get("fans", []): for fan in power_supply.get("fans", []):
if (state := fan["status"]) not in self.inputs.states: if (state := fan["status"]) not in self.inputs.states:
self.result.is_failure(f"Fan {fan['label']} on PowerSupply {power_supply['label']} is: '{state}'") self.result.is_failure(
f"Power Slot: {power_supply['label']} Fan: {fan['label']} - Invalid state - Expected: {', '.join(self.inputs.states)} Actual: {state}"
)
# Then go through fan trays # Then go through fan trays
for fan_tray in command_output.get("fanTraySlots", []): for fan_tray in command_output.get("fanTraySlots", []):
for fan in fan_tray.get("fans", []): for fan in fan_tray.get("fans", []):
if (state := fan["status"]) not in self.inputs.states: if (state := fan["status"]) not in self.inputs.states:
self.result.is_failure(f"Fan {fan['label']} on Fan Tray {fan_tray['label']} is: '{state}'") self.result.is_failure(
f"Fan Tray: {fan_tray['label']} Fan: {fan['label']} - Invalid state - Expected: {', '.join(self.inputs.states)} Actual: {state}"
)
class VerifyEnvironmentPower(AntaTest): class VerifyEnvironmentPower(AntaTest):
@ -237,19 +232,16 @@ class VerifyEnvironmentPower(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyEnvironmentPower.""" """Main test function for VerifyEnvironmentPower."""
self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
power_supplies = command_output.get("powerSupplies", "{}") power_supplies = command_output.get("powerSupplies", "{}")
wrong_power_supplies = { for power_supply, value in dict(power_supplies).items():
powersupply: {"state": value["state"]} for powersupply, value in dict(power_supplies).items() if value["state"] not in self.inputs.states if (state := value["state"]) not in self.inputs.states:
} self.result.is_failure(f"Power Slot: {power_supply} - Invalid power supplies state - Expected: {', '.join(self.inputs.states)} Actual: {state}")
if not wrong_power_supplies:
self.result.is_success()
else:
self.result.is_failure(f"The following power supplies status are not in the accepted states list: {wrong_power_supplies}")
class VerifyAdverseDrops(AntaTest): class VerifyAdverseDrops(AntaTest):
"""Verifies there are no adverse drops on DCS-7280 and DCS-7500 family switches (Arad/Jericho chips). """Verifies there are no adverse drops on DCS-7280 and DCS-7500 family switches.
Expected Results Expected Results
---------------- ----------------
@ -264,7 +256,6 @@ class VerifyAdverseDrops(AntaTest):
``` ```
""" """
description = "Verifies there are no adverse drops on DCS-7280 and DCS-7500 family switches."
categories: ClassVar[list[str]] = ["hardware"] categories: ClassVar[list[str]] = ["hardware"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show hardware counter drop", revision=1)] commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show hardware counter drop", revision=1)]
@ -272,9 +263,8 @@ class VerifyAdverseDrops(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyAdverseDrops.""" """Main test function for VerifyAdverseDrops."""
self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
total_adverse_drop = command_output.get("totalAdverseDrops", "") total_adverse_drop = command_output.get("totalAdverseDrops", "")
if total_adverse_drop == 0: if total_adverse_drop != 0:
self.result.is_success() self.result.is_failure(f"Incorrect total adverse drops counter - Expected: 0 Actual: {total_adverse_drop}")
else:
self.result.is_failure(f"Device totalAdverseDrops counter is: '{total_adverse_drop}'")

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module related to the device interfaces tests.""" """Module related to the device interfaces tests."""
@ -8,21 +8,22 @@
from __future__ import annotations from __future__ import annotations
import re import re
from ipaddress import IPv4Interface from typing import ClassVar, TypeVar
from typing import Any, ClassVar
from pydantic import BaseModel, Field from pydantic import Field, field_validator
from pydantic_extra_types.mac_address import MacAddress from pydantic_extra_types.mac_address import MacAddress
from anta import GITHUB_SUGGESTION from anta.custom_types import Interface, Percent, PositiveInteger
from anta.custom_types import EthernetInterface, Interface, Percent, PositiveInteger
from anta.decorators import skip_on_platforms from anta.decorators import skip_on_platforms
from anta.input_models.interfaces import InterfaceState from anta.input_models.interfaces import InterfaceDetail, InterfaceState
from anta.models import AntaCommand, AntaTemplate, AntaTest from anta.models import AntaCommand, AntaTemplate, AntaTest
from anta.tools import custom_division, format_data, get_failed_logs, get_item, get_value from anta.tools import custom_division, format_data, get_item, get_value
BPS_GBPS_CONVERSIONS = 1000000000 BPS_GBPS_CONVERSIONS = 1000000000
# Using a TypeVar for the InterfaceState model since mypy thinks it's a ClassVar and not a valid type when used in field validators
T = TypeVar("T", bound=InterfaceState)
class VerifyInterfaceUtilization(AntaTest): class VerifyInterfaceUtilization(AntaTest):
"""Verifies that the utilization of interfaces is below a certain threshold. """Verifies that the utilization of interfaces is below a certain threshold.
@ -60,8 +61,8 @@ class VerifyInterfaceUtilization(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyInterfaceUtilization.""" """Main test function for VerifyInterfaceUtilization."""
self.result.is_success()
duplex_full = "duplexFull" duplex_full = "duplexFull"
failed_interfaces: dict[str, dict[str, float]] = {}
rates = self.instance_commands[0].json_output rates = self.instance_commands[0].json_output
interfaces = self.instance_commands[1].json_output interfaces = self.instance_commands[1].json_output
@ -77,15 +78,13 @@ class VerifyInterfaceUtilization(AntaTest):
self.logger.debug("Interface %s has been ignored due to null bandwidth value", intf) self.logger.debug("Interface %s has been ignored due to null bandwidth value", intf)
continue continue
# If one or more interfaces have a usage above the threshold, test fails.
for bps_rate in ("inBpsRate", "outBpsRate"): for bps_rate in ("inBpsRate", "outBpsRate"):
usage = rate[bps_rate] / bandwidth * 100 usage = rate[bps_rate] / bandwidth * 100
if usage > self.inputs.threshold: if usage > self.inputs.threshold:
failed_interfaces.setdefault(intf, {})[bps_rate] = usage self.result.is_failure(
f"Interface: {intf} BPS Rate: {bps_rate} - Usage exceeds the threshold - Expected: < {self.inputs.threshold}% Actual: {usage}%"
if not failed_interfaces: )
self.result.is_success()
else:
self.result.is_failure(f"The following interfaces have a usage > {self.inputs.threshold}%: {failed_interfaces}")
class VerifyInterfaceErrors(AntaTest): class VerifyInterfaceErrors(AntaTest):
@ -110,15 +109,12 @@ class VerifyInterfaceErrors(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyInterfaceErrors.""" """Main test function for VerifyInterfaceErrors."""
self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
wrong_interfaces: list[dict[str, dict[str, int]]] = []
for interface, counters in command_output["interfaceErrorCounters"].items(): for interface, counters in command_output["interfaceErrorCounters"].items():
if any(value > 0 for value in counters.values()) and all(interface not in wrong_interface for wrong_interface in wrong_interfaces): counters_data = [f"{counter}: {value}" for counter, value in counters.items() if value > 0]
wrong_interfaces.append({interface: counters}) if counters_data:
if not wrong_interfaces: self.result.is_failure(f"Interface: {interface} - Non-zero error counter(s) - {', '.join(counters_data)}")
self.result.is_success()
else:
self.result.is_failure(f"The following interface(s) have non-zero error counters: {wrong_interfaces}")
class VerifyInterfaceDiscards(AntaTest): class VerifyInterfaceDiscards(AntaTest):
@ -143,14 +139,12 @@ class VerifyInterfaceDiscards(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyInterfaceDiscards.""" """Main test function for VerifyInterfaceDiscards."""
self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
wrong_interfaces: list[dict[str, dict[str, int]]] = [] for interface, interface_data in command_output["interfaces"].items():
for interface, outer_v in command_output["interfaces"].items(): counters_data = [f"{counter}: {value}" for counter, value in interface_data.items() if value > 0]
wrong_interfaces.extend({interface: outer_v} for value in outer_v.values() if value > 0) if counters_data:
if not wrong_interfaces: self.result.is_failure(f"Interface: {interface} - Non-zero discard counter(s): {', '.join(counters_data)}")
self.result.is_success()
else:
self.result.is_failure(f"The following interfaces have non 0 discard counter(s): {wrong_interfaces}")
class VerifyInterfaceErrDisabled(AntaTest): class VerifyInterfaceErrDisabled(AntaTest):
@ -175,12 +169,11 @@ class VerifyInterfaceErrDisabled(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyInterfaceErrDisabled.""" """Main test function for VerifyInterfaceErrDisabled."""
self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
errdisabled_interfaces = [interface for interface, value in command_output["interfaceStatuses"].items() if value["linkStatus"] == "errdisabled"] for interface, value in command_output["interfaceStatuses"].items():
if errdisabled_interfaces: if value["linkStatus"] == "errdisabled":
self.result.is_failure(f"The following interfaces are in error disabled state: {errdisabled_interfaces}") self.result.is_failure(f"Interface: {interface} - Link status Error disabled")
else:
self.result.is_success()
class VerifyInterfacesStatus(AntaTest): class VerifyInterfacesStatus(AntaTest):
@ -226,6 +219,16 @@ class VerifyInterfacesStatus(AntaTest):
"""List of interfaces with their expected state.""" """List of interfaces with their expected state."""
InterfaceState: ClassVar[type[InterfaceState]] = InterfaceState InterfaceState: ClassVar[type[InterfaceState]] = InterfaceState
@field_validator("interfaces")
@classmethod
def validate_interfaces(cls, interfaces: list[T]) -> list[T]:
"""Validate that 'status' field is provided in each interface."""
for interface in interfaces:
if interface.status is None:
msg = f"{interface} 'status' field missing in the input"
raise ValueError(msg)
return interfaces
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyInterfacesStatus.""" """Main test function for VerifyInterfacesStatus."""
@ -242,16 +245,16 @@ class VerifyInterfacesStatus(AntaTest):
# If line protocol status is provided, prioritize checking against both status and line protocol status # If line protocol status is provided, prioritize checking against both status and line protocol status
if interface.line_protocol_status: if interface.line_protocol_status:
if interface.status != status or interface.line_protocol_status != proto: if any([interface.status != status, interface.line_protocol_status != proto]):
actual_state = f"Expected: {interface.status}/{interface.line_protocol_status}, Actual: {status}/{proto}" actual_state = f"Expected: {interface.status}/{interface.line_protocol_status}, Actual: {status}/{proto}"
self.result.is_failure(f"{interface.name} - {actual_state}") self.result.is_failure(f"{interface.name} - Status mismatch - {actual_state}")
# If line protocol status is not provided and interface status is "up", expect both status and proto to be "up" # If line protocol status is not provided and interface status is "up", expect both status and proto to be "up"
# If interface status is not "up", check only the interface status without considering line protocol status # If interface status is not "up", check only the interface status without considering line protocol status
elif interface.status == "up" and (status != "up" or proto != "up"): elif all([interface.status == "up", status != "up" or proto != "up"]):
self.result.is_failure(f"{interface.name} - Expected: up/up, Actual: {status}/{proto}") self.result.is_failure(f"{interface.name} - Status mismatch - Expected: up/up, Actual: {status}/{proto}")
elif interface.status != status: elif interface.status != status:
self.result.is_failure(f"{interface.name} - Expected: {interface.status}, Actual: {status}") self.result.is_failure(f"{interface.name} - Status mismatch - Expected: {interface.status}, Actual: {status}")
class VerifyStormControlDrops(AntaTest): class VerifyStormControlDrops(AntaTest):
@ -278,16 +281,15 @@ class VerifyStormControlDrops(AntaTest):
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyStormControlDrops.""" """Main test function for VerifyStormControlDrops."""
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
storm_controlled_interfaces: dict[str, dict[str, Any]] = {} storm_controlled_interfaces = []
self.result.is_success()
for interface, interface_dict in command_output["interfaces"].items(): for interface, interface_dict in command_output["interfaces"].items():
for traffic_type, traffic_type_dict in interface_dict["trafficTypes"].items(): for traffic_type, traffic_type_dict in interface_dict["trafficTypes"].items():
if "drop" in traffic_type_dict and traffic_type_dict["drop"] != 0: if "drop" in traffic_type_dict and traffic_type_dict["drop"] != 0:
storm_controlled_interface_dict = storm_controlled_interfaces.setdefault(interface, {}) storm_controlled_interfaces.append(f"{traffic_type}: {traffic_type_dict['drop']}")
storm_controlled_interface_dict.update({traffic_type: traffic_type_dict["drop"]}) if storm_controlled_interfaces:
if not storm_controlled_interfaces: self.result.is_failure(f"Interface: {interface} - Non-zero storm-control drop counter(s) - {', '.join(storm_controlled_interfaces)}")
self.result.is_success()
else:
self.result.is_failure(f"The following interfaces have none 0 storm-control drop counters {storm_controlled_interfaces}")
class VerifyPortChannels(AntaTest): class VerifyPortChannels(AntaTest):
@ -312,15 +314,12 @@ class VerifyPortChannels(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyPortChannels.""" """Main test function for VerifyPortChannels."""
self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
po_with_inactive_ports: list[dict[str, str]] = [] for port_channel, port_channel_details in command_output["portChannels"].items():
for portchannel, portchannel_dict in command_output["portChannels"].items(): # Verify that the no inactive ports in all port channels.
if len(portchannel_dict["inactivePorts"]) != 0: if inactive_ports := port_channel_details["inactivePorts"]:
po_with_inactive_ports.extend({portchannel: portchannel_dict["inactivePorts"]}) self.result.is_failure(f"{port_channel} - Inactive port(s) - {', '.join(inactive_ports.keys())}")
if not po_with_inactive_ports:
self.result.is_success()
else:
self.result.is_failure(f"The following port-channels have inactive port(s): {po_with_inactive_ports}")
class VerifyIllegalLACP(AntaTest): class VerifyIllegalLACP(AntaTest):
@ -345,16 +344,13 @@ class VerifyIllegalLACP(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyIllegalLACP.""" """Main test function for VerifyIllegalLACP."""
self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
po_with_illegal_lacp: list[dict[str, dict[str, int]]] = [] for port_channel, port_channel_dict in command_output["portChannels"].items():
for portchannel, portchannel_dict in command_output["portChannels"].items(): for interface, interface_details in port_channel_dict["interfaces"].items():
po_with_illegal_lacp.extend( # Verify that the no illegal LACP packets in all port channels.
{portchannel: interface} for interface, interface_dict in portchannel_dict["interfaces"].items() if interface_dict["illegalRxCount"] != 0 if interface_details["illegalRxCount"] != 0:
) self.result.is_failure(f"{port_channel} Interface: {interface} - Illegal LACP packets found")
if not po_with_illegal_lacp:
self.result.is_success()
else:
self.result.is_failure(f"The following port-channels have received illegal LACP packets on the following ports: {po_with_illegal_lacp}")
class VerifyLoopbackCount(AntaTest): class VerifyLoopbackCount(AntaTest):
@ -387,23 +383,20 @@ class VerifyLoopbackCount(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyLoopbackCount.""" """Main test function for VerifyLoopbackCount."""
self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
loopback_count = 0 loopback_count = 0
down_loopback_interfaces = [] for interface, interface_details in command_output["interfaces"].items():
for interface in command_output["interfaces"]:
interface_dict = command_output["interfaces"][interface]
if "Loopback" in interface: if "Loopback" in interface:
loopback_count += 1 loopback_count += 1
if not (interface_dict["lineProtocolStatus"] == "up" and interface_dict["interfaceStatus"] == "connected"): if (status := interface_details["lineProtocolStatus"]) != "up":
down_loopback_interfaces.append(interface) self.result.is_failure(f"Interface: {interface} - Invalid line protocol status - Expected: up Actual: {status}")
if loopback_count == self.inputs.number and len(down_loopback_interfaces) == 0:
self.result.is_success() if (status := interface_details["interfaceStatus"]) != "connected":
else: self.result.is_failure(f"Interface: {interface} - Invalid interface status - Expected: connected Actual: {status}")
self.result.is_failure()
if loopback_count != self.inputs.number: if loopback_count != self.inputs.number:
self.result.is_failure(f"Found {loopback_count} Loopbacks when expecting {self.inputs.number}") self.result.is_failure(f"Loopback interface(s) count mismatch: Expected {self.inputs.number} Actual: {loopback_count}")
elif len(down_loopback_interfaces) != 0: # pragma: no branch
self.result.is_failure(f"The following Loopbacks are not up: {down_loopback_interfaces}")
class VerifySVI(AntaTest): class VerifySVI(AntaTest):
@ -428,16 +421,13 @@ class VerifySVI(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifySVI.""" """Main test function for VerifySVI."""
self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
down_svis = [] for interface, int_data in command_output["interfaces"].items():
for interface in command_output["interfaces"]: if "Vlan" in interface and (status := int_data["lineProtocolStatus"]) != "up":
interface_dict = command_output["interfaces"][interface] self.result.is_failure(f"SVI: {interface} - Invalid line protocol status - Expected: up Actual: {status}")
if "Vlan" in interface and not (interface_dict["lineProtocolStatus"] == "up" and interface_dict["interfaceStatus"] == "connected"): if "Vlan" in interface and int_data["interfaceStatus"] != "connected":
down_svis.append(interface) self.result.is_failure(f"SVI: {interface} - Invalid interface status - Expected: connected Actual: {int_data['interfaceStatus']}")
if len(down_svis) == 0:
self.result.is_success()
else:
self.result.is_failure(f"The following SVIs are not up: {down_svis}")
class VerifyL3MTU(AntaTest): class VerifyL3MTU(AntaTest):
@ -482,8 +472,7 @@ class VerifyL3MTU(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyL3MTU.""" """Main test function for VerifyL3MTU."""
# Parameter to save incorrect interface settings self.result.is_success()
wrong_l3mtu_intf: list[dict[str, int]] = []
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
# Set list of interfaces with specific settings # Set list of interfaces with specific settings
specific_interfaces: list[str] = [] specific_interfaces: list[str] = []
@ -493,18 +482,18 @@ class VerifyL3MTU(AntaTest):
for interface, values in command_output["interfaces"].items(): for interface, values in command_output["interfaces"].items():
if re.findall(r"[a-z]+", interface, re.IGNORECASE)[0] not in self.inputs.ignored_interfaces and values["forwardingModel"] == "routed": if re.findall(r"[a-z]+", interface, re.IGNORECASE)[0] not in self.inputs.ignored_interfaces and values["forwardingModel"] == "routed":
if interface in specific_interfaces: if interface in specific_interfaces:
wrong_l3mtu_intf.extend({interface: values["mtu"]} for custom_data in self.inputs.specific_mtu if values["mtu"] != custom_data[interface]) invalid_mtu = next(
(values["mtu"] for custom_data in self.inputs.specific_mtu if values["mtu"] != (expected_mtu := custom_data[interface])), None
)
if invalid_mtu:
self.result.is_failure(f"Interface: {interface} - Incorrect MTU - Expected: {expected_mtu} Actual: {invalid_mtu}")
# Comparison with generic setting # Comparison with generic setting
elif values["mtu"] != self.inputs.mtu: elif values["mtu"] != self.inputs.mtu:
wrong_l3mtu_intf.append({interface: values["mtu"]}) self.result.is_failure(f"Interface: {interface} - Incorrect MTU - Expected: {self.inputs.mtu} Actual: {values['mtu']}")
if wrong_l3mtu_intf:
self.result.is_failure(f"Some interfaces do not have correct MTU configured:\n{wrong_l3mtu_intf}")
else:
self.result.is_success()
class VerifyIPProxyARP(AntaTest): class VerifyIPProxyARP(AntaTest):
"""Verifies if Proxy-ARP is enabled for the provided list of interface(s). """Verifies if Proxy ARP is enabled.
Expected Results Expected Results
---------------- ----------------
@ -522,32 +511,28 @@ class VerifyIPProxyARP(AntaTest):
``` ```
""" """
description = "Verifies if Proxy ARP is enabled."
categories: ClassVar[list[str]] = ["interfaces"] categories: ClassVar[list[str]] = ["interfaces"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaTemplate(template="show ip interface {intf}", revision=2)] commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show ip interface", revision=2)]
class Input(AntaTest.Input): class Input(AntaTest.Input):
"""Input model for the VerifyIPProxyARP test.""" """Input model for the VerifyIPProxyARP test."""
interfaces: list[str] interfaces: list[Interface]
"""List of interfaces to be tested.""" """List of interfaces to be tested."""
def render(self, template: AntaTemplate) -> list[AntaCommand]:
"""Render the template for each interface in the input list."""
return [template.render(intf=intf) for intf in self.inputs.interfaces]
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyIPProxyARP.""" """Main test function for VerifyIPProxyARP."""
disabled_intf = [] self.result.is_success()
for command in self.instance_commands: command_output = self.instance_commands[0].json_output
intf = command.params.intf
if not command.json_output["interfaces"][intf]["proxyArp"]: for interface in self.inputs.interfaces:
disabled_intf.append(intf) if (interface_detail := get_value(command_output["interfaces"], f"{interface}", separator="..")) is None:
if disabled_intf: self.result.is_failure(f"Interface: {interface} - Not found")
self.result.is_failure(f"The following interface(s) have Proxy-ARP disabled: {disabled_intf}") continue
else:
self.result.is_success() if not interface_detail["proxyArp"]:
self.result.is_failure(f"Interface: {interface} - Proxy-ARP disabled")
class VerifyL2MTU(AntaTest): class VerifyL2MTU(AntaTest):
@ -586,36 +571,29 @@ class VerifyL2MTU(AntaTest):
"""Default MTU we should have configured on all non-excluded interfaces. Defaults to 9214.""" """Default MTU we should have configured on all non-excluded interfaces. Defaults to 9214."""
ignored_interfaces: list[str] = Field(default=["Management", "Loopback", "Vxlan", "Tunnel"]) ignored_interfaces: list[str] = Field(default=["Management", "Loopback", "Vxlan", "Tunnel"])
"""A list of L2 interfaces to ignore. Defaults to ["Management", "Loopback", "Vxlan", "Tunnel"]""" """A list of L2 interfaces to ignore. Defaults to ["Management", "Loopback", "Vxlan", "Tunnel"]"""
specific_mtu: list[dict[str, int]] = Field(default=[]) specific_mtu: list[dict[Interface, int]] = Field(default=[])
"""A list of dictionary of L2 interfaces with their specific MTU configured""" """A list of dictionary of L2 interfaces with their specific MTU configured"""
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyL2MTU.""" """Main test function for VerifyL2MTU."""
# Parameter to save incorrect interface settings self.result.is_success()
wrong_l2mtu_intf: list[dict[str, int]] = [] interface_output = self.instance_commands[0].json_output["interfaces"]
command_output = self.instance_commands[0].json_output specific_interfaces = {key: value for details in self.inputs.specific_mtu for key, value in details.items()}
# Set list of interfaces with specific settings
specific_interfaces: list[str] = [] for interface, details in interface_output.items():
if self.inputs.specific_mtu:
for d in self.inputs.specific_mtu:
specific_interfaces.extend(d)
for interface, values in command_output["interfaces"].items():
catch_interface = re.findall(r"^[e,p][a-zA-Z]+[-,a-zA-Z]*\d+\/*\d*", interface, re.IGNORECASE) catch_interface = re.findall(r"^[e,p][a-zA-Z]+[-,a-zA-Z]*\d+\/*\d*", interface, re.IGNORECASE)
if len(catch_interface) and catch_interface[0] not in self.inputs.ignored_interfaces and values["forwardingModel"] == "bridged": if catch_interface and catch_interface not in self.inputs.ignored_interfaces and details["forwardingModel"] == "bridged":
if interface in specific_interfaces: if interface in specific_interfaces:
wrong_l2mtu_intf.extend({interface: values["mtu"]} for custom_data in self.inputs.specific_mtu if values["mtu"] != custom_data[interface]) if (mtu := specific_interfaces[interface]) != (act_mtu := details["mtu"]):
# Comparison with generic setting self.result.is_failure(f"Interface: {interface} - Incorrect MTU configured - Expected: {mtu} Actual: {act_mtu}")
elif values["mtu"] != self.inputs.mtu:
wrong_l2mtu_intf.append({interface: values["mtu"]}) elif (act_mtu := details["mtu"]) != self.inputs.mtu:
if wrong_l2mtu_intf: self.result.is_failure(f"Interface: {interface} - Incorrect MTU configured - Expected: {self.inputs.mtu} Actual: {act_mtu}")
self.result.is_failure(f"Some L2 interfaces do not have correct MTU configured:\n{wrong_l2mtu_intf}")
else:
self.result.is_success()
class VerifyInterfaceIPv4(AntaTest): class VerifyInterfaceIPv4(AntaTest):
"""Verifies if an interface is configured with a correct primary and list of optional secondary IPv4 addresses. """Verifies the interface IPv4 addresses.
Expected Results Expected Results
---------------- ----------------
@ -636,83 +614,61 @@ class VerifyInterfaceIPv4(AntaTest):
``` ```
""" """
description = "Verifies the interface IPv4 addresses."
categories: ClassVar[list[str]] = ["interfaces"] categories: ClassVar[list[str]] = ["interfaces"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaTemplate(template="show ip interface {interface}", revision=2)] commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show ip interface", revision=2)]
class Input(AntaTest.Input): class Input(AntaTest.Input):
"""Input model for the VerifyInterfaceIPv4 test.""" """Input model for the VerifyInterfaceIPv4 test."""
interfaces: list[InterfaceDetail] interfaces: list[InterfaceState]
"""List of interfaces with their details.""" """List of interfaces with their details."""
InterfaceDetail: ClassVar[type[InterfaceDetail]] = InterfaceDetail
class InterfaceDetail(BaseModel): @field_validator("interfaces")
"""Model for an interface detail.""" @classmethod
def validate_interfaces(cls, interfaces: list[T]) -> list[T]:
name: Interface """Validate that 'primary_ip' field is provided in each interface."""
"""Name of the interface.""" for interface in interfaces:
primary_ip: IPv4Interface if interface.primary_ip is None:
"""Primary IPv4 address in CIDR notation.""" msg = f"{interface} 'primary_ip' field missing in the input"
secondary_ips: list[IPv4Interface] | None = None raise ValueError(msg)
"""Optional list of secondary IPv4 addresses in CIDR notation.""" return interfaces
def render(self, template: AntaTemplate) -> list[AntaCommand]:
"""Render the template for each interface in the input list."""
return [template.render(interface=interface.name) for interface in self.inputs.interfaces]
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyInterfaceIPv4.""" """Main test function for VerifyInterfaceIPv4."""
self.result.is_success() self.result.is_success()
for command in self.instance_commands: command_output = self.instance_commands[0].json_output
intf = command.params.interface
for interface in self.inputs.interfaces: for interface in self.inputs.interfaces:
if interface.name == intf: if (interface_detail := get_value(command_output["interfaces"], f"{interface.name}", separator="..")) is None:
input_interface_detail = interface self.result.is_failure(f"{interface} - Not found")
break
else:
self.result.is_failure(f"Could not find `{intf}` in the input interfaces. {GITHUB_SUGGESTION}")
continue continue
input_primary_ip = str(input_interface_detail.primary_ip) if (ip_address := get_value(interface_detail, "interfaceAddress.primaryIp")) is None:
failed_messages = [] self.result.is_failure(f"{interface} - IP address is not configured")
# Check if the interface has an IP address configured
if not (interface_output := get_value(command.json_output, f"interfaces.{intf}.interfaceAddress")):
self.result.is_failure(f"For interface `{intf}`, IP address is not configured.")
continue continue
primary_ip = get_value(interface_output, "primaryIp")
# Combine IP address and subnet for primary IP # Combine IP address and subnet for primary IP
actual_primary_ip = f"{primary_ip['address']}/{primary_ip['maskLen']}" actual_primary_ip = f"{ip_address['address']}/{ip_address['maskLen']}"
# Check if the primary IP address matches the input # Check if the primary IP address matches the input
if actual_primary_ip != input_primary_ip: if actual_primary_ip != str(interface.primary_ip):
failed_messages.append(f"The expected primary IP address is `{input_primary_ip}`, but the actual primary IP address is `{actual_primary_ip}`.") self.result.is_failure(f"{interface} - IP address mismatch - Expected: {interface.primary_ip} Actual: {actual_primary_ip}")
if (param_secondary_ips := input_interface_detail.secondary_ips) is not None: if interface.secondary_ips:
input_secondary_ips = sorted([str(network) for network in param_secondary_ips]) if not (secondary_ips := get_value(interface_detail, "interfaceAddress.secondaryIpsOrderedList")):
secondary_ips = get_value(interface_output, "secondaryIpsOrderedList") self.result.is_failure(f"{interface} - Secondary IP address is not configured")
continue
# Combine IP address and subnet for secondary IPs
actual_secondary_ips = sorted([f"{secondary_ip['address']}/{secondary_ip['maskLen']}" for secondary_ip in secondary_ips]) actual_secondary_ips = sorted([f"{secondary_ip['address']}/{secondary_ip['maskLen']}" for secondary_ip in secondary_ips])
input_secondary_ips = sorted([str(ip) for ip in interface.secondary_ips])
# Check if the secondary IP address is configured if actual_secondary_ips != input_secondary_ips:
if not actual_secondary_ips: self.result.is_failure(
failed_messages.append( f"{interface} - Secondary IP address mismatch - Expected: {', '.join(input_secondary_ips)} Actual: {', '.join(actual_secondary_ips)}"
f"The expected secondary IP addresses are `{input_secondary_ips}`, but the actual secondary IP address is not configured."
) )
# Check if the secondary IP addresses match the input
elif actual_secondary_ips != input_secondary_ips:
failed_messages.append(
f"The expected secondary IP addresses are `{input_secondary_ips}`, but the actual secondary IP addresses are `{actual_secondary_ips}`."
)
if failed_messages:
self.result.is_failure(f"For interface `{intf}`, " + " ".join(failed_messages))
class VerifyIpVirtualRouterMac(AntaTest): class VerifyIpVirtualRouterMac(AntaTest):
"""Verifies the IP virtual router MAC address. """Verifies the IP virtual router MAC address.
@ -743,13 +699,10 @@ class VerifyIpVirtualRouterMac(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyIpVirtualRouterMac.""" """Main test function for VerifyIpVirtualRouterMac."""
self.result.is_success()
command_output = self.instance_commands[0].json_output["virtualMacs"] command_output = self.instance_commands[0].json_output["virtualMacs"]
mac_address_found = get_item(command_output, "macAddress", self.inputs.mac_address) if get_item(command_output, "macAddress", self.inputs.mac_address) is None:
self.result.is_failure(f"IP virtual router MAC address: {self.inputs.mac_address} - Not configured")
if mac_address_found is None:
self.result.is_failure(f"IP virtual router MAC address `{self.inputs.mac_address}` is not configured.")
else:
self.result.is_success()
class VerifyInterfacesSpeed(AntaTest): class VerifyInterfacesSpeed(AntaTest):
@ -788,20 +741,19 @@ class VerifyInterfacesSpeed(AntaTest):
class Input(AntaTest.Input): class Input(AntaTest.Input):
"""Inputs for the VerifyInterfacesSpeed test.""" """Inputs for the VerifyInterfacesSpeed test."""
interfaces: list[InterfaceDetail] interfaces: list[InterfaceState]
"""List of interfaces to be tested""" """List of interfaces with their expected state."""
InterfaceDetail: ClassVar[type[InterfaceDetail]] = InterfaceDetail
class InterfaceDetail(BaseModel): @field_validator("interfaces")
"""Detail of an interface.""" @classmethod
def validate_interfaces(cls, interfaces: list[T]) -> list[T]:
name: EthernetInterface """Validate that 'speed' field is provided in each interface."""
"""The name of the interface.""" for interface in interfaces:
auto: bool if interface.speed is None:
"""The auto-negotiation status of the interface.""" msg = f"{interface} 'speed' field missing in the input"
speed: float = Field(ge=1, le=1000) raise ValueError(msg)
"""The speed of the interface in Gigabits per second. Valid range is 1 to 1000.""" return interfaces
lanes: None | int = Field(None, ge=1, le=8)
"""The number of lanes in the interface. Valid range is 1 to 8. This field is optional."""
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
@ -811,40 +763,27 @@ class VerifyInterfacesSpeed(AntaTest):
# Iterate over all the interfaces # Iterate over all the interfaces
for interface in self.inputs.interfaces: for interface in self.inputs.interfaces:
intf = interface.name if (interface_detail := get_value(command_output["interfaces"], f"{interface.name}", separator="..")) is None:
self.result.is_failure(f"{interface} - Not found")
# Check if interface exists
if not (interface_output := get_value(command_output, f"interfaces.{intf}")):
self.result.is_failure(f"Interface `{intf}` is not found.")
continue continue
auto_negotiation = interface_output.get("autoNegotiate") # Verifies the bandwidth
actual_lanes = interface_output.get("lanes") if (speed := interface_detail.get("bandwidth")) != interface.speed * BPS_GBPS_CONVERSIONS:
self.result.is_failure(
f"{interface} - Bandwidth mismatch - Expected: {interface.speed}Gbps Actual: {custom_division(speed, BPS_GBPS_CONVERSIONS)}Gbps"
)
# Collecting actual interface details # Verifies the duplex mode
actual_interface_output = { if (duplex := interface_detail.get("duplex")) != "duplexFull":
"auto negotiation": auto_negotiation if interface.auto is True else None, self.result.is_failure(f"{interface} - Duplex mode mismatch - Expected: duplexFull Actual: {duplex}")
"duplex mode": interface_output.get("duplex"),
"speed": interface_output.get("bandwidth"),
"lanes": actual_lanes if interface.lanes is not None else None,
}
# Forming expected interface details # Verifies the auto-negotiation as success if specified
expected_interface_output = { if interface.auto and (auto_negotiation := interface_detail.get("autoNegotiate")) != "success":
"auto negotiation": "success" if interface.auto is True else None, self.result.is_failure(f"{interface} - Auto-negotiation mismatch - Expected: success Actual: {auto_negotiation}")
"duplex mode": "duplexFull",
"speed": interface.speed * BPS_GBPS_CONVERSIONS,
"lanes": interface.lanes,
}
# Forming failure message # Verifies the communication lanes if specified
if actual_interface_output != expected_interface_output: if interface.lanes and (lanes := interface_detail.get("lanes")) != interface.lanes:
for output in [actual_interface_output, expected_interface_output]: self.result.is_failure(f"{interface} - Data lanes count mismatch - Expected: {interface.lanes} Actual: {lanes}")
# Convert speed to Gbps for readability
if output["speed"] is not None:
output["speed"] = f"{custom_division(output['speed'], BPS_GBPS_CONVERSIONS)}Gbps"
failed_log = get_failed_logs(expected_interface_output, actual_interface_output)
self.result.is_failure(f"For interface {intf}:{failed_log}\n")
class VerifyLACPInterfacesStatus(AntaTest): class VerifyLACPInterfacesStatus(AntaTest):
@ -891,6 +830,16 @@ class VerifyLACPInterfacesStatus(AntaTest):
"""List of interfaces with their expected state.""" """List of interfaces with their expected state."""
InterfaceState: ClassVar[type[InterfaceState]] = InterfaceState InterfaceState: ClassVar[type[InterfaceState]] = InterfaceState
@field_validator("interfaces")
@classmethod
def validate_interfaces(cls, interfaces: list[T]) -> list[T]:
"""Validate that 'portchannel' field is provided in each interface."""
for interface in interfaces:
if interface.portchannel is None:
msg = f"{interface} 'portchannel' field missing in the input"
raise ValueError(msg)
return interfaces
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyLACPInterfacesStatus.""" """Main test function for VerifyLACPInterfacesStatus."""

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module related to LANZ tests.""" """Module related to LANZ tests."""

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module related to the EOS various logging tests. """Module related to the EOS various logging tests.
@ -14,13 +14,13 @@ import re
from ipaddress import IPv4Address from ipaddress import IPv4Address
from typing import TYPE_CHECKING, ClassVar from typing import TYPE_CHECKING, ClassVar
from anta.models import AntaCommand, AntaTest from anta.custom_types import LogSeverityLevel
from anta.input_models.logging import LoggingQuery
from anta.models import AntaCommand, AntaTemplate, AntaTest
if TYPE_CHECKING: if TYPE_CHECKING:
import logging import logging
from anta.models import AntaTemplate
def _get_logging_states(logger: logging.Logger, command_output: str) -> str: def _get_logging_states(logger: logging.Logger, command_output: str) -> str:
"""Parse `show logging` output and gets operational logging states used in the tests in this module. """Parse `show logging` output and gets operational logging states used in the tests in this module.
@ -43,6 +43,35 @@ def _get_logging_states(logger: logging.Logger, command_output: str) -> str:
return log_states return log_states
class VerifySyslogLogging(AntaTest):
"""Verifies if syslog logging is enabled.
Expected Results
----------------
* Success: The test will pass if syslog logging is enabled.
* Failure: The test will fail if syslog logging is disabled.
Examples
--------
```yaml
anta.tests.logging:
- VerifySyslogLogging:
```
"""
categories: ClassVar[list[str]] = ["logging"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show logging", ofmt="text")]
@AntaTest.anta_test
def test(self) -> None:
"""Main test function for VerifySyslogLogging."""
self.result.is_success()
log_output = self.instance_commands[0].text_output
if "Syslog logging: enabled" not in _get_logging_states(self.logger, log_output):
self.result.is_failure("Syslog logging is disabled")
class VerifyLoggingPersistent(AntaTest): class VerifyLoggingPersistent(AntaTest):
"""Verifies if logging persistent is enabled and logs are saved in flash. """Verifies if logging persistent is enabled and logs are saved in flash.
@ -117,7 +146,7 @@ class VerifyLoggingSourceIntf(AntaTest):
if re.search(pattern, _get_logging_states(self.logger, output)): if re.search(pattern, _get_logging_states(self.logger, output)):
self.result.is_success() self.result.is_success()
else: else:
self.result.is_failure(f"Source-interface '{self.inputs.interface}' is not configured in VRF {self.inputs.vrf}") self.result.is_failure(f"Source-interface: {self.inputs.interface} VRF: {self.inputs.vrf} - Not configured")
class VerifyLoggingHosts(AntaTest): class VerifyLoggingHosts(AntaTest):
@ -164,7 +193,7 @@ class VerifyLoggingHosts(AntaTest):
if not not_configured: if not not_configured:
self.result.is_success() self.result.is_success()
else: else:
self.result.is_failure(f"Syslog servers {not_configured} are not configured in VRF {self.inputs.vrf}") self.result.is_failure(f"Syslog servers {', '.join(not_configured)} are not configured in VRF {self.inputs.vrf}")
class VerifyLoggingLogsGeneration(AntaTest): class VerifyLoggingLogsGeneration(AntaTest):
@ -172,35 +201,43 @@ class VerifyLoggingLogsGeneration(AntaTest):
This test performs the following checks: This test performs the following checks:
1. Sends a test log message at the **informational** level 1. Sends a test log message at the specified severity log level.
2. Retrieves the most recent logs (last 30 seconds) 2. Retrieves the most recent logs (last 30 seconds).
3. Verifies that the test message was successfully logged 3. Verifies that the test message was successfully logged.
!!! warning
EOS logging buffer should be set to severity level `informational` or higher for this test to work.
Expected Results Expected Results
---------------- ----------------
* Success: If logs are being generated and the test message is found in recent logs. * Success: If logs are being generated and the test message is found in recent logs.
* Failure: If any of the following occur: * Failure: If any of the following occur:
- The test message is not found in recent logs - The test message is not found in recent logs.
- The logging system is not capturing new messages - The logging system is not capturing new messages.
- No logs are being generated - No logs are being generated.
Examples Examples
-------- --------
```yaml ```yaml
anta.tests.logging: anta.tests.logging:
- VerifyLoggingLogsGeneration: - VerifyLoggingLogsGeneration:
severity_level: informational
``` ```
""" """
categories: ClassVar[list[str]] = ["logging"] categories: ClassVar[list[str]] = ["logging"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [ commands: ClassVar[list[AntaCommand | AntaTemplate]] = [
AntaCommand(command="send log level informational message ANTA VerifyLoggingLogsGeneration validation", ofmt="text"), AntaTemplate(template="send log level {severity_level} message ANTA VerifyLoggingLogsGeneration validation", ofmt="text"),
AntaCommand(command="show logging informational last 30 seconds | grep ANTA", ofmt="text", use_cache=False), AntaTemplate(template="show logging {severity_level} last 30 seconds | grep ANTA", ofmt="text", use_cache=False),
] ]
class Input(AntaTest.Input):
"""Input model for the VerifyLoggingLogsGeneration test."""
severity_level: LogSeverityLevel = "informational"
"""Log severity level. Defaults to informational."""
def render(self, template: AntaTemplate) -> list[AntaCommand]:
"""Render the template for log severity level in the input."""
return [template.render(severity_level=self.inputs.severity_level)]
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyLoggingLogsGeneration.""" """Main test function for VerifyLoggingLogsGeneration."""
@ -219,37 +256,45 @@ class VerifyLoggingHostname(AntaTest):
This test performs the following checks: This test performs the following checks:
1. Retrieves the device's configured FQDN 1. Retrieves the device's configured FQDN.
2. Sends a test log message at the **informational** level 2. Sends a test log message at the specified severity log level.
3. Retrieves the most recent logs (last 30 seconds) 3. Retrieves the most recent logs (last 30 seconds).
4. Verifies that the test message includes the complete FQDN of the device 4. Verifies that the test message includes the complete FQDN of the device.
!!! warning
EOS logging buffer should be set to severity level `informational` or higher for this test to work.
Expected Results Expected Results
---------------- ----------------
* Success: If logs are generated with the device's complete FQDN. * Success: If logs are generated with the device's complete FQDN.
* Failure: If any of the following occur: * Failure: If any of the following occur:
- The test message is not found in recent logs - The test message is not found in recent logs.
- The log message does not include the device's FQDN - The log message does not include the device's FQDN.
- The FQDN in the log message doesn't match the configured FQDN - The FQDN in the log message doesn't match the configured FQDN.
Examples Examples
-------- --------
```yaml ```yaml
anta.tests.logging: anta.tests.logging:
- VerifyLoggingHostname: - VerifyLoggingHostname:
severity_level: informational
``` ```
""" """
categories: ClassVar[list[str]] = ["logging"] categories: ClassVar[list[str]] = ["logging"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [ commands: ClassVar[list[AntaCommand | AntaTemplate]] = [
AntaCommand(command="show hostname", revision=1), AntaCommand(command="show hostname", revision=1),
AntaCommand(command="send log level informational message ANTA VerifyLoggingHostname validation", ofmt="text"), AntaTemplate(template="send log level {severity_level} message ANTA VerifyLoggingHostname validation", ofmt="text"),
AntaCommand(command="show logging informational last 30 seconds | grep ANTA", ofmt="text", use_cache=False), AntaTemplate(template="show logging {severity_level} last 30 seconds | grep ANTA", ofmt="text", use_cache=False),
] ]
class Input(AntaTest.Input):
"""Input model for the VerifyLoggingHostname test."""
severity_level: LogSeverityLevel = "informational"
"""Log severity level. Defaults to informational."""
def render(self, template: AntaTemplate) -> list[AntaCommand]:
"""Render the template for log severity level in the input."""
return [template.render(severity_level=self.inputs.severity_level)]
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyLoggingHostname.""" """Main test function for VerifyLoggingHostname."""
@ -274,37 +319,45 @@ class VerifyLoggingTimestamp(AntaTest):
This test performs the following checks: This test performs the following checks:
1. Sends a test log message at the **informational** level 1. Sends a test log message at the specified severity log level.
2. Retrieves the most recent logs (last 30 seconds) 2. Retrieves the most recent logs (last 30 seconds).
3. Verifies that the test message is present with a high-resolution RFC3339 timestamp format 3. Verifies that the test message is present with a high-resolution RFC3339 timestamp format.
- Example format: `2024-01-25T15:30:45.123456+00:00` - Example format: `2024-01-25T15:30:45.123456+00:00`.
- Includes microsecond precision - Includes microsecond precision.
- Contains timezone offset - Contains timezone offset.
!!! warning
EOS logging buffer should be set to severity level `informational` or higher for this test to work.
Expected Results Expected Results
---------------- ----------------
* Success: If logs are generated with the correct high-resolution RFC3339 timestamp format. * Success: If logs are generated with the correct high-resolution RFC3339 timestamp format.
* Failure: If any of the following occur: * Failure: If any of the following occur:
- The test message is not found in recent logs - The test message is not found in recent logs.
- The timestamp format does not match the expected RFC3339 format - The timestamp format does not match the expected RFC3339 format.
Examples Examples
-------- --------
```yaml ```yaml
anta.tests.logging: anta.tests.logging:
- VerifyLoggingTimestamp: - VerifyLoggingTimestamp:
severity_level: informational
``` ```
""" """
categories: ClassVar[list[str]] = ["logging"] categories: ClassVar[list[str]] = ["logging"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [ commands: ClassVar[list[AntaCommand | AntaTemplate]] = [
AntaCommand(command="send log level informational message ANTA VerifyLoggingTimestamp validation", ofmt="text"), AntaTemplate(template="send log level {severity_level} message ANTA VerifyLoggingTimestamp validation", ofmt="text"),
AntaCommand(command="show logging informational last 30 seconds | grep ANTA", ofmt="text", use_cache=False), AntaTemplate(template="show logging {severity_level} last 30 seconds | grep ANTA", ofmt="text", use_cache=False),
] ]
class Input(AntaTest.Input):
"""Input model for the VerifyLoggingTimestamp test."""
severity_level: LogSeverityLevel = "informational"
"""Log severity level. Defaults to informational."""
def render(self, template: AntaTemplate) -> list[AntaCommand]:
"""Render the template for log severity level in the input."""
return [template.render(severity_level=self.inputs.severity_level)]
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyLoggingTimestamp.""" """Main test function for VerifyLoggingTimestamp."""
@ -381,3 +434,53 @@ class VerifyLoggingErrors(AntaTest):
self.result.is_success() self.result.is_success()
else: else:
self.result.is_failure("Device has reported syslog messages with a severity of ERRORS or higher") self.result.is_failure("Device has reported syslog messages with a severity of ERRORS or higher")
class VerifyLoggingEntries(AntaTest):
"""Verifies that the expected log string is present in the last specified log messages.
Expected Results
----------------
* Success: The test will pass if the expected log string for the mentioned severity level is present in the last specified log messages.
* Failure: The test will fail if the specified log string is not present in the last specified log messages.
Examples
--------
```yaml
anta.tests.logging:
- VerifyLoggingEntries:
logging_entries:
- regex_match: ".ACCOUNTING-5-EXEC: cvpadmin ssh."
last_number_messages: 30
severity_level: alerts
- regex_match: ".SPANTREE-6-INTERFACE_ADD:."
last_number_messages: 10
severity_level: critical
```
"""
categories: ClassVar[list[str]] = ["logging"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [
AntaTemplate(template="show logging {last_number_messages} {severity_level}", ofmt="text", use_cache=False)
]
class Input(AntaTest.Input):
"""Input model for the VerifyLoggingEntries test."""
logging_entries: list[LoggingQuery]
"""List of logging entries and regex match."""
def render(self, template: AntaTemplate) -> list[AntaCommand]:
"""Render the template for last number messages and log severity level in the input."""
return [template.render(last_number_messages=entry.last_number_messages, severity_level=entry.severity_level) for entry in self.inputs.logging_entries]
@AntaTest.anta_test
def test(self) -> None:
"""Main test function for VerifyLoggingEntries."""
self.result.is_success()
for command_output, logging_entry in zip(self.instance_commands, self.inputs.logging_entries):
output = command_output.text_output
if not re.search(logging_entry.regex_match, output):
self.result.is_failure(
f"Pattern: {logging_entry.regex_match} - Not found in last {logging_entry.last_number_messages} {logging_entry.severity_level} log entries"
)

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module related to Multi-chassis Link Aggregation (MLAG) tests.""" """Module related to Multi-chassis Link Aggregation (MLAG) tests."""
@ -22,10 +22,8 @@ class VerifyMlagStatus(AntaTest):
Expected Results Expected Results
---------------- ----------------
* Success: The test will pass if the MLAG state is 'active', negotiation status is 'connected', * Success: The test will pass if the MLAG state is 'active', negotiation status is 'connected', peer-link status and local interface status are 'up'.
peer-link status and local interface status are 'up'. * Failure: The test will fail if the MLAG state is not 'active', negotiation status is not 'connected', peer-link status or local interface status are not 'up'.
* Failure: The test will fail if the MLAG state is not 'active', negotiation status is not 'connected',
peer-link status or local interface status are not 'up'.
* Skipped: The test will be skipped if MLAG is 'disabled'. * Skipped: The test will be skipped if MLAG is 'disabled'.
Examples Examples
@ -42,21 +40,25 @@ class VerifyMlagStatus(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyMlagStatus.""" """Main test function for VerifyMlagStatus."""
self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
# Skipping the test if MLAG is disabled
if command_output["state"] == "disabled": if command_output["state"] == "disabled":
self.result.is_skipped("MLAG is disabled") self.result.is_skipped("MLAG is disabled")
return return
keys_to_verify = ["state", "negStatus", "localIntfStatus", "peerLinkStatus"]
verified_output = {key: get_value(command_output, key) for key in keys_to_verify} # Verifies the negotiation status
if ( if (neg_status := command_output["negStatus"]) != "connected":
verified_output["state"] == "active" self.result.is_failure(f"MLAG negotiation status mismatch - Expected: connected Actual: {neg_status}")
and verified_output["negStatus"] == "connected"
and verified_output["localIntfStatus"] == "up" # Verifies the local interface interface status
and verified_output["peerLinkStatus"] == "up" if (intf_state := command_output["localIntfStatus"]) != "up":
): self.result.is_failure(f"Operational state of the MLAG local interface is not correct - Expected: up Actual: {intf_state}")
self.result.is_success()
else: # Verifies the peerLinkStatus
self.result.is_failure(f"MLAG status is not OK: {verified_output}") if (peer_link_state := command_output["peerLinkStatus"]) != "up":
self.result.is_failure(f"Operational state of the MLAG peer link is not correct - Expected: up Actual: {peer_link_state}")
class VerifyMlagInterfaces(AntaTest): class VerifyMlagInterfaces(AntaTest):
@ -82,14 +84,19 @@ class VerifyMlagInterfaces(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyMlagInterfaces.""" """Main test function for VerifyMlagInterfaces."""
self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
# Skipping the test if MLAG is disabled
if command_output["state"] == "disabled": if command_output["state"] == "disabled":
self.result.is_skipped("MLAG is disabled") self.result.is_skipped("MLAG is disabled")
return return
if command_output["mlagPorts"]["Inactive"] == 0 and command_output["mlagPorts"]["Active-partial"] == 0:
self.result.is_success() # Verifies the Inactive and Active-partial ports
else: inactive_ports = command_output["mlagPorts"]["Inactive"]
self.result.is_failure(f"MLAG status is not OK: {command_output['mlagPorts']}") partial_active_ports = command_output["mlagPorts"]["Active-partial"]
if inactive_ports != 0 or partial_active_ports != 0:
self.result.is_failure(f"MLAG status is not ok - Inactive Ports: {inactive_ports} Partial Active Ports: {partial_active_ports}")
class VerifyMlagConfigSanity(AntaTest): class VerifyMlagConfigSanity(AntaTest):
@ -116,16 +123,21 @@ class VerifyMlagConfigSanity(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyMlagConfigSanity.""" """Main test function for VerifyMlagConfigSanity."""
self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
# Skipping the test if MLAG is disabled
if command_output["mlagActive"] is False: if command_output["mlagActive"] is False:
self.result.is_skipped("MLAG is disabled") self.result.is_skipped("MLAG is disabled")
return return
keys_to_verify = ["globalConfiguration", "interfaceConfiguration"]
verified_output = {key: get_value(command_output, key) for key in keys_to_verify} # Verifies the globalConfiguration config-sanity
if not any(verified_output.values()): if get_value(command_output, "globalConfiguration"):
self.result.is_success() self.result.is_failure("MLAG config-sanity found in global configuration")
else:
self.result.is_failure(f"MLAG config-sanity returned inconsistencies: {verified_output}") # Verifies the interfaceConfiguration config-sanity
if get_value(command_output, "interfaceConfiguration"):
self.result.is_failure("MLAG config-sanity found in interface configuration")
class VerifyMlagReloadDelay(AntaTest): class VerifyMlagReloadDelay(AntaTest):
@ -161,17 +173,21 @@ class VerifyMlagReloadDelay(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyMlagReloadDelay.""" """Main test function for VerifyMlagReloadDelay."""
self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
# Skipping the test if MLAG is disabled
if command_output["state"] == "disabled": if command_output["state"] == "disabled":
self.result.is_skipped("MLAG is disabled") self.result.is_skipped("MLAG is disabled")
return return
keys_to_verify = ["reloadDelay", "reloadDelayNonMlag"]
verified_output = {key: get_value(command_output, key) for key in keys_to_verify}
if verified_output["reloadDelay"] == self.inputs.reload_delay and verified_output["reloadDelayNonMlag"] == self.inputs.reload_delay_non_mlag:
self.result.is_success()
else: # Verifies the reloadDelay
self.result.is_failure(f"The reload-delay parameters are not configured properly: {verified_output}") if (reload_delay := get_value(command_output, "reloadDelay")) != self.inputs.reload_delay:
self.result.is_failure(f"MLAG reload-delay mismatch - Expected: {self.inputs.reload_delay}s Actual: {reload_delay}s")
# Verifies the reloadDelayNonMlag
if (non_mlag_reload_delay := get_value(command_output, "reloadDelayNonMlag")) != self.inputs.reload_delay_non_mlag:
self.result.is_failure(f"Delay for non-MLAG ports mismatch - Expected: {self.inputs.reload_delay_non_mlag}s Actual: {non_mlag_reload_delay}s")
class VerifyMlagDualPrimary(AntaTest): class VerifyMlagDualPrimary(AntaTest):
@ -214,25 +230,37 @@ class VerifyMlagDualPrimary(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyMlagDualPrimary.""" """Main test function for VerifyMlagDualPrimary."""
self.result.is_success()
errdisabled_action = "errdisableAllInterfaces" if self.inputs.errdisabled else "none" errdisabled_action = "errdisableAllInterfaces" if self.inputs.errdisabled else "none"
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
# Skipping the test if MLAG is disabled
if command_output["state"] == "disabled": if command_output["state"] == "disabled":
self.result.is_skipped("MLAG is disabled") self.result.is_skipped("MLAG is disabled")
return return
# Verifies the dualPrimaryDetectionState
if command_output["dualPrimaryDetectionState"] == "disabled": if command_output["dualPrimaryDetectionState"] == "disabled":
self.result.is_failure("Dual-primary detection is disabled") self.result.is_failure("Dual-primary detection is disabled")
return return
keys_to_verify = ["detail.dualPrimaryDetectionDelay", "detail.dualPrimaryAction", "dualPrimaryMlagRecoveryDelay", "dualPrimaryNonMlagRecoveryDelay"]
verified_output = {key: get_value(command_output, key) for key in keys_to_verify} # Verifies the dualPrimaryAction
if ( if (primary_action := get_value(command_output, "detail.dualPrimaryAction")) != errdisabled_action:
verified_output["detail.dualPrimaryDetectionDelay"] == self.inputs.detection_delay self.result.is_failure(f"Dual-primary action mismatch - Expected: {errdisabled_action} Actual: {primary_action}")
and verified_output["detail.dualPrimaryAction"] == errdisabled_action
and verified_output["dualPrimaryMlagRecoveryDelay"] == self.inputs.recovery_delay # Verifies the dualPrimaryDetectionDelay
and verified_output["dualPrimaryNonMlagRecoveryDelay"] == self.inputs.recovery_delay_non_mlag if (detection_delay := get_value(command_output, "detail.dualPrimaryDetectionDelay")) != self.inputs.detection_delay:
): self.result.is_failure(f"Dual-primary detection delay mismatch - Expected: {self.inputs.detection_delay} Actual: {detection_delay}")
self.result.is_success()
else: # Verifies the dualPrimaryMlagRecoveryDelay
self.result.is_failure(f"The dual-primary parameters are not configured properly: {verified_output}") if (recovery_delay := get_value(command_output, "dualPrimaryMlagRecoveryDelay")) != self.inputs.recovery_delay:
self.result.is_failure(f"Dual-primary MLAG recovery delay mismatch - Expected: {self.inputs.recovery_delay} Actual: {recovery_delay}")
# Verifies the dualPrimaryNonMlagRecoveryDelay
if (recovery_delay_non_mlag := get_value(command_output, "dualPrimaryNonMlagRecoveryDelay")) != self.inputs.recovery_delay_non_mlag:
self.result.is_failure(
f"Dual-primary non MLAG recovery delay mismatch - Expected: {self.inputs.recovery_delay_non_mlag} Actual: {recovery_delay_non_mlag}"
)
class VerifyMlagPrimaryPriority(AntaTest): class VerifyMlagPrimaryPriority(AntaTest):
@ -278,10 +306,8 @@ class VerifyMlagPrimaryPriority(AntaTest):
# Check MLAG state # Check MLAG state
if mlag_state != "primary": if mlag_state != "primary":
self.result.is_failure("The device is not set as MLAG primary.") self.result.is_failure("The device is not set as MLAG primary")
# Check primary priority # Check primary priority
if primary_priority != self.inputs.primary_priority: if primary_priority != self.inputs.primary_priority:
self.result.is_failure( self.result.is_failure(f"MLAG primary priority mismatch - Expected: {self.inputs.primary_priority} Actual: {primary_priority}")
f"The primary priority does not match expected. Expected `{self.inputs.primary_priority}`, but found `{primary_priority}` instead.",
)

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module related to multicast and IGMP tests.""" """Module related to multicast and IGMP tests."""
@ -51,12 +51,12 @@ class VerifyIGMPSnoopingVlans(AntaTest):
self.result.is_success() self.result.is_success()
for vlan, enabled in self.inputs.vlans.items(): for vlan, enabled in self.inputs.vlans.items():
if str(vlan) not in command_output["vlans"]: if str(vlan) not in command_output["vlans"]:
self.result.is_failure(f"Supplied vlan {vlan} is not present on the device.") self.result.is_failure(f"Supplied vlan {vlan} is not present on the device")
continue continue
expected_state = "enabled" if enabled else "disabled"
igmp_state = command_output["vlans"][str(vlan)]["igmpSnoopingState"] igmp_state = command_output["vlans"][str(vlan)]["igmpSnoopingState"]
if igmp_state != "enabled" if enabled else igmp_state != "disabled": if igmp_state != expected_state:
self.result.is_failure(f"IGMP state for vlan {vlan} is {igmp_state}") self.result.is_failure(f"VLAN{vlan} - Incorrect IGMP state - Expected: {expected_state} Actual: {igmp_state}")
class VerifyIGMPSnoopingGlobal(AntaTest): class VerifyIGMPSnoopingGlobal(AntaTest):
@ -91,5 +91,6 @@ class VerifyIGMPSnoopingGlobal(AntaTest):
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
self.result.is_success() self.result.is_success()
igmp_state = command_output["igmpSnoopingState"] igmp_state = command_output["igmpSnoopingState"]
if igmp_state != "enabled" if self.inputs.enabled else igmp_state != "disabled": expected_state = "enabled" if self.inputs.enabled else "disabled"
self.result.is_failure(f"IGMP state is not valid: {igmp_state}") if igmp_state != expected_state:
self.result.is_failure(f"IGMP state is not valid - Expected: {expected_state} Actual: {igmp_state}")

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Test functions related to various router path-selection settings.""" """Test functions related to various router path-selection settings."""
@ -7,12 +7,10 @@
# mypy: disable-error-code=attr-defined # mypy: disable-error-code=attr-defined
from __future__ import annotations from __future__ import annotations
from ipaddress import IPv4Address
from typing import ClassVar from typing import ClassVar
from pydantic import BaseModel
from anta.decorators import skip_on_platforms from anta.decorators import skip_on_platforms
from anta.input_models.path_selection import DpsPath
from anta.models import AntaCommand, AntaTemplate, AntaTest from anta.models import AntaCommand, AntaTemplate, AntaTest
from anta.tools import get_value from anta.tools import get_value
@ -50,7 +48,7 @@ class VerifyPathsHealth(AntaTest):
# If no paths are configured for router path-selection, the test fails # If no paths are configured for router path-selection, the test fails
if not command_output: if not command_output:
self.result.is_failure("No path configured for router path-selection.") self.result.is_failure("No path configured for router path-selection")
return return
# Check the state of each path # Check the state of each path
@ -61,25 +59,33 @@ class VerifyPathsHealth(AntaTest):
session = path_data["dpsSessions"]["0"]["active"] session = path_data["dpsSessions"]["0"]["active"]
# If the path state of any path is not 'ipsecEstablished' or 'routeResolved', the test fails # If the path state of any path is not 'ipsecEstablished' or 'routeResolved', the test fails
if path_state not in ["ipsecEstablished", "routeResolved"]: expected_state = ["ipsecEstablished", "routeResolved"]
self.result.is_failure(f"Path state for peer {peer} in path-group {group} is `{path_state}`.") if path_state not in expected_state:
self.result.is_failure(f"Peer: {peer} Path Group: {group} - Invalid path state - Expected: {', '.join(expected_state)} Actual: {path_state}")
# If the telemetry state of any path is inactive, the test fails # If the telemetry state of any path is inactive, the test fails
elif not session: elif not session:
self.result.is_failure(f"Telemetry state for peer {peer} in path-group {group} is `inactive`.") self.result.is_failure(f"Peer: {peer} Path Group {group} - Telemetry state inactive")
class VerifySpecificPath(AntaTest): class VerifySpecificPath(AntaTest):
"""Verifies the path and telemetry state of a specific path for an IPv4 peer under router path-selection. """Verifies the DPS path and telemetry state of an IPv4 peer.
The expected states are 'IPsec established', 'Resolved' for path and 'active' for telemetry. This test performs the following checks:
1. Verifies that the specified peer is configured.
2. Verifies that the specified path group is found.
3. For each specified DPS path:
- Verifies that the expected source and destination address matches the expected.
- Verifies that the state is `ipsecEstablished` or `routeResolved`.
- Verifies that the telemetry state is `active`.
Expected Results Expected Results
---------------- ----------------
* Success: The test will pass if the path state under router path-selection is either 'IPsec established' or 'Resolved' * Success: The test will pass if the path state under router path-selection is either 'IPsecEstablished' or 'Resolved'
and telemetry state as 'active'. and telemetry state as 'active'.
* Failure: The test will fail if router path-selection is not configured or if the path state is not 'IPsec established' or 'Resolved', * Failure: The test will fail if router path selection or the peer is not configured or if the path state is not 'IPsec established' or 'Resolved',
or if the telemetry state is 'inactive'. or the telemetry state is 'inactive'.
Examples Examples
-------- --------
@ -95,36 +101,15 @@ class VerifySpecificPath(AntaTest):
""" """
categories: ClassVar[list[str]] = ["path-selection"] categories: ClassVar[list[str]] = ["path-selection"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [ commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show path-selection paths", revision=1)]
AntaTemplate(template="show path-selection paths peer {peer} path-group {group} source {source} destination {destination}", revision=1)
]
class Input(AntaTest.Input): class Input(AntaTest.Input):
"""Input model for the VerifySpecificPath test.""" """Input model for the VerifySpecificPath test."""
paths: list[RouterPath] paths: list[DpsPath]
"""List of router paths to verify.""" """List of router paths to verify."""
RouterPath: ClassVar[type[DpsPath]] = DpsPath
class RouterPath(BaseModel): """To maintain backward compatibility."""
"""Detail of a router path."""
peer: IPv4Address
"""Static peer IPv4 address."""
path_group: str
"""Router path group name."""
source_address: IPv4Address
"""Source IPv4 address of path."""
destination_address: IPv4Address
"""Destination IPv4 address of path."""
def render(self, template: AntaTemplate) -> list[AntaCommand]:
"""Render the template for each router path."""
return [
template.render(peer=path.peer, group=path.path_group, source=path.source_address, destination=path.destination_address) for path in self.inputs.paths
]
@skip_on_platforms(["cEOSLab", "vEOS-lab"]) @skip_on_platforms(["cEOSLab", "vEOS-lab"])
@AntaTest.anta_test @AntaTest.anta_test
@ -132,28 +117,42 @@ class VerifySpecificPath(AntaTest):
"""Main test function for VerifySpecificPath.""" """Main test function for VerifySpecificPath."""
self.result.is_success() self.result.is_success()
# Check the state of each path command_output = self.instance_commands[0].json_output
for command in self.instance_commands:
peer = command.params.peer
path_group = command.params.group
source = command.params.source
destination = command.params.destination
command_output = command.json_output.get("dpsPeers", [])
# If the dpsPeers details are not found in the command output, the test fails.
if not (dps_peers_details := get_value(command_output, "dpsPeers")):
self.result.is_failure("Router path-selection not configured")
return
# Iterating on each DPS peer mentioned in the inputs.
for dps_path in self.inputs.paths:
peer = str(dps_path.peer)
peer_details = dps_peers_details.get(peer, {})
# If the peer is not configured for the path group, the test fails # If the peer is not configured for the path group, the test fails
if not command_output: if not peer_details:
self.result.is_failure(f"Path `peer: {peer} source: {source} destination: {destination}` is not configured for path-group `{path_group}`.") self.result.is_failure(f"{dps_path} - Peer not found")
continue continue
# Extract the state of the path path_group = dps_path.path_group
path_output = get_value(command_output, f"{peer}..dpsGroups..{path_group}..dpsPaths", separator="..") source = str(dps_path.source_address)
path_state = next(iter(path_output.values())).get("state") destination = str(dps_path.destination_address)
session = get_value(next(iter(path_output.values())), "dpsSessions.0.active") path_group_details = get_value(peer_details, f"dpsGroups..{path_group}..dpsPaths", separator="..")
# If the expected path group is not found for the peer, the test fails.
if not path_group_details:
self.result.is_failure(f"{dps_path} - No DPS path found for this peer and path group")
continue
path_data = next((path for path in path_group_details.values() if (path.get("source") == source and path.get("destination") == destination)), None)
# Source and destination address do not match, the test fails.
if not path_data:
self.result.is_failure(f"{dps_path} - No path matching the source and destination found")
continue
path_state = path_data.get("state")
session = get_value(path_data, "dpsSessions.0.active")
# If the state of the path is not 'ipsecEstablished' or 'routeResolved', or the telemetry state is 'inactive', the test fails # If the state of the path is not 'ipsecEstablished' or 'routeResolved', or the telemetry state is 'inactive', the test fails
if path_state not in ["ipsecEstablished", "routeResolved"]: if path_state not in ["ipsecEstablished", "routeResolved"]:
self.result.is_failure(f"Path state for `peer: {peer} source: {source} destination: {destination}` in path-group {path_group} is `{path_state}`.") self.result.is_failure(f"{dps_path} - Invalid state path - Expected: ipsecEstablished, routeResolved Actual: {path_state}")
elif not session: elif not session:
self.result.is_failure( self.result.is_failure(f"{dps_path} - Telemetry state inactive for this path")
f"Telemetry state for path `peer: {peer} source: {source} destination: {destination}` in path-group {path_group} is `inactive`."
)

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module related to ASIC profile tests.""" """Module related to ASIC profile tests."""
@ -51,7 +51,7 @@ class VerifyUnifiedForwardingTableMode(AntaTest):
if command_output["uftMode"] == str(self.inputs.mode): if command_output["uftMode"] == str(self.inputs.mode):
self.result.is_success() self.result.is_success()
else: else:
self.result.is_failure(f"Device is not running correct UFT mode (expected: {self.inputs.mode} / running: {command_output['uftMode']})") self.result.is_failure(f"Not running the correct UFT mode - Expected: {self.inputs.mode} Actual: {command_output['uftMode']}")
class VerifyTcamProfile(AntaTest): class VerifyTcamProfile(AntaTest):

View file

@ -1,4 +1,4 @@
# Copyright (c) 2024 Arista Networks, Inc. # Copyright (c) 2024-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module related to PTP tests.""" """Module related to PTP tests."""
@ -17,7 +17,7 @@ if TYPE_CHECKING:
class VerifyPtpModeStatus(AntaTest): class VerifyPtpModeStatus(AntaTest):
"""Verifies that the device is configured as a Precision Time Protocol (PTP) Boundary Clock (BC). """Verifies that the device is configured as a PTP Boundary Clock.
Expected Results Expected Results
---------------- ----------------
@ -33,7 +33,6 @@ class VerifyPtpModeStatus(AntaTest):
``` ```
""" """
description = "Verifies that the device is configured as a PTP Boundary Clock."
categories: ClassVar[list[str]] = ["ptp"] categories: ClassVar[list[str]] = ["ptp"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show ptp", revision=2)] commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show ptp", revision=2)]
@ -48,13 +47,13 @@ class VerifyPtpModeStatus(AntaTest):
return return
if ptp_mode != "ptpBoundaryClock": if ptp_mode != "ptpBoundaryClock":
self.result.is_failure(f"The device is not configured as a PTP Boundary Clock: '{ptp_mode}'") self.result.is_failure(f"Not configured as a PTP Boundary Clock - Actual: {ptp_mode}")
else: else:
self.result.is_success() self.result.is_success()
class VerifyPtpGMStatus(AntaTest): class VerifyPtpGMStatus(AntaTest):
"""Verifies that the device is locked to a valid Precision Time Protocol (PTP) Grandmaster (GM). """Verifies that the device is locked to a valid PTP Grandmaster.
To test PTP failover, re-run the test with a secondary GMID configured. To test PTP failover, re-run the test with a secondary GMID configured.
@ -79,7 +78,6 @@ class VerifyPtpGMStatus(AntaTest):
gmid: str gmid: str
"""Identifier of the Grandmaster to which the device should be locked.""" """Identifier of the Grandmaster to which the device should be locked."""
description = "Verifies that the device is locked to a valid PTP Grandmaster."
categories: ClassVar[list[str]] = ["ptp"] categories: ClassVar[list[str]] = ["ptp"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show ptp", revision=2)] commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show ptp", revision=2)]
@ -87,22 +85,19 @@ class VerifyPtpGMStatus(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyPtpGMStatus.""" """Main test function for VerifyPtpGMStatus."""
self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
if (ptp_clock_summary := command_output.get("ptpClockSummary")) is None: if (ptp_clock_summary := command_output.get("ptpClockSummary")) is None:
self.result.is_skipped("PTP is not configured") self.result.is_skipped("PTP is not configured")
return return
if ptp_clock_summary["gmClockIdentity"] != self.inputs.gmid: if (act_gmid := ptp_clock_summary["gmClockIdentity"]) != self.inputs.gmid:
self.result.is_failure( self.result.is_failure(f"The device is locked to the incorrect Grandmaster - Expected: {self.inputs.gmid} Actual: {act_gmid}")
f"The device is locked to the following Grandmaster: '{ptp_clock_summary['gmClockIdentity']}', which differ from the expected one.",
)
else:
self.result.is_success()
class VerifyPtpLockStatus(AntaTest): class VerifyPtpLockStatus(AntaTest):
"""Verifies that the device was locked to the upstream Precision Time Protocol (PTP) Grandmaster (GM) in the last minute. """Verifies that the device was locked to the upstream PTP GM in the last minute.
Expected Results Expected Results
---------------- ----------------
@ -118,7 +113,6 @@ class VerifyPtpLockStatus(AntaTest):
``` ```
""" """
description = "Verifies that the device was locked to the upstream PTP GM in the last minute."
categories: ClassVar[list[str]] = ["ptp"] categories: ClassVar[list[str]] = ["ptp"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show ptp", revision=2)] commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show ptp", revision=2)]
@ -136,13 +130,13 @@ class VerifyPtpLockStatus(AntaTest):
time_difference = ptp_clock_summary["currentPtpSystemTime"] - ptp_clock_summary["lastSyncTime"] time_difference = ptp_clock_summary["currentPtpSystemTime"] - ptp_clock_summary["lastSyncTime"]
if time_difference >= threshold: if time_difference >= threshold:
self.result.is_failure(f"The device lock is more than {threshold}s old: {time_difference}s") self.result.is_failure(f"Lock is more than {threshold}s old - Actual: {time_difference}s")
else: else:
self.result.is_success() self.result.is_success()
class VerifyPtpOffset(AntaTest): class VerifyPtpOffset(AntaTest):
"""Verifies that the Precision Time Protocol (PTP) timing offset is within +/- 1000ns from the master clock. """Verifies that the PTP timing offset is within +/- 1000ns from the master clock.
Expected Results Expected Results
---------------- ----------------
@ -158,7 +152,6 @@ class VerifyPtpOffset(AntaTest):
``` ```
""" """
description = "Verifies that the PTP timing offset is within +/- 1000ns from the master clock."
categories: ClassVar[list[str]] = ["ptp"] categories: ClassVar[list[str]] = ["ptp"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show ptp monitor", revision=1)] commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show ptp monitor", revision=1)]
@ -167,9 +160,9 @@ class VerifyPtpOffset(AntaTest):
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyPtpOffset.""" """Main test function for VerifyPtpOffset."""
threshold = 1000 threshold = 1000
offset_interfaces: dict[str, list[int]] = {} self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
offset_interfaces: dict[str, list[int]] = {}
if not command_output["ptpMonitorData"]: if not command_output["ptpMonitorData"]:
self.result.is_skipped("PTP is not configured") self.result.is_skipped("PTP is not configured")
return return
@ -178,14 +171,12 @@ class VerifyPtpOffset(AntaTest):
if abs(interface["offsetFromMaster"]) > threshold: if abs(interface["offsetFromMaster"]) > threshold:
offset_interfaces.setdefault(interface["intf"], []).append(interface["offsetFromMaster"]) offset_interfaces.setdefault(interface["intf"], []).append(interface["offsetFromMaster"])
if offset_interfaces: for interface, data in offset_interfaces.items():
self.result.is_failure(f"The device timing offset from master is greater than +/- {threshold}ns: {offset_interfaces}") self.result.is_failure(f"Interface: {interface} - Timing offset from master is greater than +/- {threshold}ns: Actual: {', '.join(map(str, data))}")
else:
self.result.is_success()
class VerifyPtpPortModeStatus(AntaTest): class VerifyPtpPortModeStatus(AntaTest):
"""Verifies that all interfaces are in a valid Precision Time Protocol (PTP) state. """Verifies the PTP interfaces state.
The interfaces can be in one of the following state: Master, Slave, Passive, or Disabled. The interfaces can be in one of the following state: Master, Slave, Passive, or Disabled.
@ -202,7 +193,6 @@ class VerifyPtpPortModeStatus(AntaTest):
``` ```
""" """
description = "Verifies the PTP interfaces state."
categories: ClassVar[list[str]] = ["ptp"] categories: ClassVar[list[str]] = ["ptp"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show ptp", revision=2)] commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show ptp", revision=2)]
@ -227,4 +217,4 @@ class VerifyPtpPortModeStatus(AntaTest):
if not invalid_interfaces: if not invalid_interfaces:
self.result.is_success() self.result.is_success()
else: else:
self.result.is_failure(f"The following interface(s) are not in a valid PTP state: '{invalid_interfaces}'") self.result.is_failure(f"The following interface(s) are not in a valid PTP state: {', '.join(invalid_interfaces)}")

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Package related to routing tests.""" """Package related to routing tests."""

File diff suppressed because it is too large Load diff

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module related to generic routing tests.""" """Module related to generic routing tests."""
@ -11,12 +11,12 @@ from functools import cache
from ipaddress import IPv4Address, IPv4Interface from ipaddress import IPv4Address, IPv4Interface
from typing import TYPE_CHECKING, ClassVar, Literal from typing import TYPE_CHECKING, ClassVar, Literal
from pydantic import model_validator from pydantic import field_validator, model_validator
from anta.custom_types import PositiveInteger from anta.custom_types import PositiveInteger
from anta.input_models.routing.generic import IPv4Routes from anta.input_models.routing.generic import IPv4Routes
from anta.models import AntaCommand, AntaTemplate, AntaTest from anta.models import AntaCommand, AntaTemplate, AntaTest
from anta.tools import get_value from anta.tools import get_item, get_value
if TYPE_CHECKING: if TYPE_CHECKING:
import sys import sys
@ -63,7 +63,7 @@ class VerifyRoutingProtocolModel(AntaTest):
if configured_model == operating_model == self.inputs.model: if configured_model == operating_model == self.inputs.model:
self.result.is_success() self.result.is_success()
else: else:
self.result.is_failure(f"routing model is misconfigured: configured: {configured_model} - operating: {operating_model} - expected: {self.inputs.model}") self.result.is_failure(f"Routing model is misconfigured - Expected: {self.inputs.model} Actual: {operating_model}")
class VerifyRoutingTableSize(AntaTest): class VerifyRoutingTableSize(AntaTest):
@ -112,7 +112,9 @@ class VerifyRoutingTableSize(AntaTest):
if self.inputs.minimum <= total_routes <= self.inputs.maximum: if self.inputs.minimum <= total_routes <= self.inputs.maximum:
self.result.is_success() self.result.is_success()
else: else:
self.result.is_failure(f"routing-table has {total_routes} routes and not between min ({self.inputs.minimum}) and maximum ({self.inputs.maximum})") self.result.is_failure(
f"Routing table routes are outside the routes range - Expected: {self.inputs.minimum} <= to >= {self.inputs.maximum} Actual: {total_routes}"
)
class VerifyRoutingTableEntry(AntaTest): class VerifyRoutingTableEntry(AntaTest):
@ -182,16 +184,17 @@ class VerifyRoutingTableEntry(AntaTest):
if not missing_routes: if not missing_routes:
self.result.is_success() self.result.is_success()
else: else:
self.result.is_failure(f"The following route(s) are missing from the routing table of VRF {self.inputs.vrf}: {missing_routes}") self.result.is_failure(f"The following route(s) are missing from the routing table of VRF {self.inputs.vrf}: {', '.join(missing_routes)}")
class VerifyIPv4RouteType(AntaTest): class VerifyIPv4RouteType(AntaTest):
"""Verifies the route-type of the IPv4 prefixes. """Verifies the route-type of the IPv4 prefixes.
This test performs the following checks for each IPv4 route: This test performs the following checks for each IPv4 route:
1. Verifies that the specified VRF is configured.
2. Verifies that the specified IPv4 route is exists in the configuration. 1. Verifies that the specified VRF is configured.
3. Verifies that the the specified IPv4 route is of the expected type. 2. Verifies that the specified IPv4 route is exists in the configuration.
3. Verifies that the the specified IPv4 route is of the expected type.
Expected Results Expected Results
---------------- ----------------
@ -230,6 +233,17 @@ class VerifyIPv4RouteType(AntaTest):
"""Input model for the VerifyIPv4RouteType test.""" """Input model for the VerifyIPv4RouteType test."""
routes_entries: list[IPv4Routes] routes_entries: list[IPv4Routes]
"""List of IPv4 route(s)."""
@field_validator("routes_entries")
@classmethod
def validate_routes_entries(cls, routes_entries: list[IPv4Routes]) -> list[IPv4Routes]:
"""Validate that 'route_type' field is provided in each BGP route entry."""
for entry in routes_entries:
if entry.route_type is None:
msg = f"{entry} 'route_type' field missing in the input"
raise ValueError(msg)
return routes_entries
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
@ -256,3 +270,82 @@ class VerifyIPv4RouteType(AntaTest):
# Verifying that the specified IPv4 routes are of the expected type. # Verifying that the specified IPv4 routes are of the expected type.
if expected_route_type != (actual_route_type := route_data.get("routeType")): if expected_route_type != (actual_route_type := route_data.get("routeType")):
self.result.is_failure(f"{entry} - Incorrect route type - Expected: {expected_route_type} Actual: {actual_route_type}") self.result.is_failure(f"{entry} - Incorrect route type - Expected: {expected_route_type} Actual: {actual_route_type}")
class VerifyIPv4RouteNextHops(AntaTest):
"""Verifies the next-hops of the IPv4 prefixes.
This test performs the following checks for each IPv4 prefix:
1. Verifies the specified IPv4 route exists in the routing table.
2. For each specified next-hop:
- Verifies a path with matching next-hop exists.
- Supports `strict: True` to verify that routes must be learned exclusively via the exact next-hops specified.
Expected Results
----------------
* Success: The test will pass if routes exist with paths matching the expected next-hops.
* Failure: The test will fail if:
- A route entry is not found for given IPv4 prefixes.
- A path with specified next-hop is not found.
Examples
--------
```yaml
anta.tests.routing:
generic:
- VerifyIPv4RouteNextHops:
route_entries:
- prefix: 10.10.0.1/32
vrf: default
strict: false
nexthops:
- 10.100.0.8
- 10.100.0.10
```
"""
categories: ClassVar[list[str]] = ["routing"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show ip route vrf all", revision=4)]
class Input(AntaTest.Input):
"""Input model for the VerifyIPv4RouteNextHops test."""
route_entries: list[IPv4Routes]
"""List of IPv4 route(s)."""
@field_validator("route_entries")
@classmethod
def validate_route_entries(cls, route_entries: list[IPv4Routes]) -> list[IPv4Routes]:
"""Validate that 'nexthops' field is provided in each route entry."""
for entry in route_entries:
if entry.nexthops is None:
msg = f"{entry} 'nexthops' field missing in the input"
raise ValueError(msg)
return route_entries
@AntaTest.anta_test
def test(self) -> None:
"""Main test function for VerifyIPv4RouteNextHops."""
self.result.is_success()
output = self.instance_commands[0].json_output
for entry in self.inputs.route_entries:
# Verify if the prefix exists in route table
if (route_data := get_value(output, f"vrfs..{entry.vrf}..routes..{entry.prefix}", separator="..")) is None:
self.result.is_failure(f"{entry} - prefix not found")
continue
# Verify the nexthop addresses
actual_nexthops = sorted(["Directly connected" if (next_hop := route.get("nexthopAddr")) == "" else next_hop for route in route_data["vias"]])
expected_nexthops = sorted([str(nexthop) for nexthop in entry.nexthops])
if entry.strict and expected_nexthops != actual_nexthops:
exp_nexthops = ", ".join(expected_nexthops)
self.result.is_failure(f"{entry} - List of next-hops not matching - Expected: {exp_nexthops} Actual: {', '.join(actual_nexthops)}")
continue
for nexthop in entry.nexthops:
if not get_item(route_data["vias"], "nexthopAddr", str(nexthop)):
self.result.is_failure(f"{entry} Nexthop: {nexthop} - Route not found")

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module related to IS-IS tests.""" """Module related to IS-IS tests."""
@ -7,147 +7,23 @@
# mypy: disable-error-code=attr-defined # mypy: disable-error-code=attr-defined
from __future__ import annotations from __future__ import annotations
from ipaddress import IPv4Address, IPv4Network from typing import Any, ClassVar
from typing import Any, ClassVar, Literal
from pydantic import BaseModel from pydantic import field_validator
from anta.custom_types import Interface from anta.input_models.routing.isis import Entry, InterfaceCount, InterfaceState, ISISInstance, IsisInstance, ISISInterface, Tunnel, TunnelPath
from anta.models import AntaCommand, AntaTemplate, AntaTest from anta.models import AntaCommand, AntaTemplate, AntaTest
from anta.tools import get_value from anta.tools import get_item, get_value
def _count_isis_neighbor(isis_neighbor_json: dict[str, Any]) -> int:
"""Count the number of isis neighbors.
Parameters
----------
isis_neighbor_json
The JSON output of the `show isis neighbors` command.
Returns
-------
int
The number of isis neighbors.
"""
count = 0
for vrf_data in isis_neighbor_json["vrfs"].values():
for instance_data in vrf_data["isisInstances"].values():
count += len(instance_data.get("neighbors", {}))
return count
def _get_not_full_isis_neighbors(isis_neighbor_json: dict[str, Any]) -> list[dict[str, Any]]:
"""Return the isis neighbors whose adjacency state is not `up`.
Parameters
----------
isis_neighbor_json
The JSON output of the `show isis neighbors` command.
Returns
-------
list[dict[str, Any]]
A list of isis neighbors whose adjacency state is not `UP`.
"""
return [
{
"vrf": vrf,
"instance": instance,
"neighbor": adjacency["hostname"],
"state": state,
}
for vrf, vrf_data in isis_neighbor_json["vrfs"].items()
for instance, instance_data in vrf_data.get("isisInstances").items()
for neighbor, neighbor_data in instance_data.get("neighbors").items()
for adjacency in neighbor_data.get("adjacencies")
if (state := adjacency["state"]) != "up"
]
def _get_full_isis_neighbors(isis_neighbor_json: dict[str, Any], neighbor_state: Literal["up", "down"] = "up") -> list[dict[str, Any]]:
"""Return the isis neighbors whose adjacency state is `up`.
Parameters
----------
isis_neighbor_json
The JSON output of the `show isis neighbors` command.
neighbor_state
Value of the neihbor state we are looking for. Defaults to `up`.
Returns
-------
list[dict[str, Any]]
A list of isis neighbors whose adjacency state is not `UP`.
"""
return [
{
"vrf": vrf,
"instance": instance,
"neighbor": adjacency["hostname"],
"neighbor_address": adjacency["routerIdV4"],
"interface": adjacency["interfaceName"],
"state": state,
}
for vrf, vrf_data in isis_neighbor_json["vrfs"].items()
for instance, instance_data in vrf_data.get("isisInstances").items()
for neighbor, neighbor_data in instance_data.get("neighbors").items()
for adjacency in neighbor_data.get("adjacencies")
if (state := adjacency["state"]) == neighbor_state
]
def _get_isis_neighbors_count(isis_neighbor_json: dict[str, Any]) -> list[dict[str, Any]]:
"""Count number of IS-IS neighbor of the device."""
return [
{"vrf": vrf, "interface": interface, "mode": mode, "count": int(level_data["numAdjacencies"]), "level": int(level)}
for vrf, vrf_data in isis_neighbor_json["vrfs"].items()
for instance, instance_data in vrf_data.get("isisInstances").items()
for interface, interface_data in instance_data.get("interfaces").items()
for level, level_data in interface_data.get("intfLevels").items()
if (mode := level_data["passive"]) is not True
]
def _get_interface_data(interface: str, vrf: str, command_output: dict[str, Any]) -> dict[str, Any] | None:
"""Extract data related to an IS-IS interface for testing."""
if (vrf_data := get_value(command_output, f"vrfs.{vrf}")) is None:
return None
for instance_data in vrf_data.get("isisInstances").values():
if (intf_dict := get_value(dictionary=instance_data, key="interfaces")) is not None:
try:
return next(ifl_data for ifl, ifl_data in intf_dict.items() if ifl == interface)
except StopIteration:
return None
return None
def _get_adjacency_segment_data_by_neighbor(neighbor: str, instance: str, vrf: str, command_output: dict[str, Any]) -> dict[str, Any] | None:
"""Extract data related to an IS-IS interface for testing."""
search_path = f"vrfs.{vrf}.isisInstances.{instance}.adjacencySegments"
if get_value(dictionary=command_output, key=search_path, default=None) is None:
return None
isis_instance = get_value(dictionary=command_output, key=search_path, default=None)
return next(
(segment_data for segment_data in isis_instance if neighbor == segment_data["ipAddress"]),
None,
)
class VerifyISISNeighborState(AntaTest): class VerifyISISNeighborState(AntaTest):
"""Verifies all IS-IS neighbors are in UP state. """Verifies the health of IS-IS neighbors.
Expected Results Expected Results
---------------- ----------------
* Success: The test will pass if all IS-IS neighbors are in UP state. * Success: The test will pass if all IS-IS neighbors are in the `up` state.
* Failure: The test will fail if some IS-IS neighbors are not in UP state. * Failure: The test will fail if any IS-IS neighbor adjacency is down.
* Skipped: The test will be skipped if no IS-IS neighbor is found. * Skipped: The test will be skipped if IS-IS is not configured or no IS-IS neighbor is found.
Examples Examples
-------- --------
@ -155,33 +31,58 @@ class VerifyISISNeighborState(AntaTest):
anta.tests.routing: anta.tests.routing:
isis: isis:
- VerifyISISNeighborState: - VerifyISISNeighborState:
check_all_vrfs: true
``` ```
""" """
categories: ClassVar[list[str]] = ["isis"] categories: ClassVar[list[str]] = ["isis"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show isis neighbors", revision=1)] commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show isis neighbors vrf all", revision=1)]
class Input(AntaTest.Input):
"""Input model for the VerifyISISNeighborState test."""
check_all_vrfs: bool = False
"""If enabled, verifies IS-IS instances of all VRFs."""
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyISISNeighborState.""" """Main test function for VerifyISISNeighborState."""
command_output = self.instance_commands[0].json_output
if _count_isis_neighbor(command_output) == 0:
self.result.is_skipped("No IS-IS neighbor detected")
return
self.result.is_success() self.result.is_success()
not_full_neighbors = _get_not_full_isis_neighbors(command_output)
if not_full_neighbors: # Verify if IS-IS is configured
self.result.is_failure(f"Some neighbors are not in the correct state (UP): {not_full_neighbors}.") if not (command_output := self.instance_commands[0].json_output["vrfs"]):
self.result.is_skipped("IS-IS not configured")
return
vrfs_to_check = command_output
if not self.inputs.check_all_vrfs:
vrfs_to_check = {"default": command_output["default"]}
no_neighbor = True
for vrf, vrf_data in vrfs_to_check.items():
for isis_instance, instance_data in vrf_data["isisInstances"].items():
neighbors = instance_data["neighbors"]
if not neighbors:
continue
no_neighbor = False
interfaces = [(adj["interfaceName"], adj["state"]) for neighbor in neighbors.values() for adj in neighbor["adjacencies"] if adj["state"] != "up"]
for interface in interfaces:
self.result.is_failure(
f"Instance: {isis_instance} VRF: {vrf} Interface: {interface[0]} - Incorrect adjacency state - Expected: up Actual: {interface[1]}"
)
if no_neighbor:
self.result.is_skipped("No IS-IS neighbor detected")
class VerifyISISNeighborCount(AntaTest): class VerifyISISNeighborCount(AntaTest):
"""Verifies number of IS-IS neighbors per level and per interface. """Verifies the number of IS-IS neighbors per interface and level.
Expected Results Expected Results
---------------- ----------------
* Success: The test will pass if the number of neighbors is correct. * Success: The test will pass if all provided IS-IS interfaces have the expected number of neighbors.
* Failure: The test will fail if the number of neighbors is incorrect. * Failure: The test will fail if any of the provided IS-IS interfaces are not configured or have an incorrect number of neighbors.
* Skipped: The test will be skipped if no IS-IS neighbor is found. * Skipped: The test will be skipped if IS-IS is not configured.
Examples Examples
-------- --------
@ -198,59 +99,54 @@ class VerifyISISNeighborCount(AntaTest):
count: 1 count: 1
- name: Ethernet3 - name: Ethernet3
count: 2 count: 2
# level is set to 2 by default
``` ```
""" """
categories: ClassVar[list[str]] = ["isis"] categories: ClassVar[list[str]] = ["isis"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show isis interface brief", revision=1)] commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show isis interface brief vrf all", revision=1)]
class Input(AntaTest.Input): class Input(AntaTest.Input):
"""Input model for the VerifyISISNeighborCount test.""" """Input model for the VerifyISISNeighborCount test."""
interfaces: list[InterfaceCount] interfaces: list[ISISInterface]
"""list of interfaces with their information.""" """List of IS-IS interfaces with their information."""
InterfaceCount: ClassVar[type[InterfaceCount]] = InterfaceCount
class InterfaceCount(BaseModel):
"""Input model for the VerifyISISNeighborCount test."""
name: Interface
"""Interface name to check."""
level: int = 2
"""IS-IS level to check."""
count: int
"""Number of IS-IS neighbors."""
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyISISNeighborCount.""" """Main test function for VerifyISISNeighborCount."""
command_output = self.instance_commands[0].json_output
self.result.is_success() self.result.is_success()
isis_neighbor_count = _get_isis_neighbors_count(command_output)
if len(isis_neighbor_count) == 0: # Verify if IS-IS is configured
self.result.is_skipped("No IS-IS neighbor detected") if not (command_output := self.instance_commands[0].json_output["vrfs"]):
self.result.is_skipped("IS-IS not configured")
return return
for interface in self.inputs.interfaces: for interface in self.inputs.interfaces:
eos_data = [ifl_data for ifl_data in isis_neighbor_count if ifl_data["interface"] == interface.name and ifl_data["level"] == interface.level] interface_detail = {}
if not eos_data: vrf_instances = get_value(command_output, f"{interface.vrf}..isisInstances", default={}, separator="..")
self.result.is_failure(f"No neighbor detected for interface {interface.name}") for instance_data in vrf_instances.values():
if interface_data := get_value(instance_data, f"interfaces..{interface.name}..intfLevels..{interface.level}", separator=".."):
interface_detail = interface_data
# An interface can only be configured in one IS-IS instance at a time
break
if not interface_detail:
self.result.is_failure(f"{interface} - Not configured")
continue continue
if eos_data[0]["count"] != interface.count:
self.result.is_failure( if interface_detail["passive"] is False and (act_count := interface_detail["numAdjacencies"]) != interface.count:
f"Interface {interface.name}: " self.result.is_failure(f"{interface} - Neighbor count mismatch - Expected: {interface.count} Actual: {act_count}")
f"expected Level {interface.level}: count {interface.count}, "
f"got Level {eos_data[0]['level']}: count {eos_data[0]['count']}"
)
class VerifyISISInterfaceMode(AntaTest): class VerifyISISInterfaceMode(AntaTest):
"""Verifies ISIS Interfaces are running in correct mode. """Verifies IS-IS interfaces are running in the correct mode.
Expected Results Expected Results
---------------- ----------------
* Success: The test will pass if all listed interfaces are running in correct mode. * Success: The test will pass if all provided IS-IS interfaces are running in the correct mode.
* Failure: The test will fail if any of the listed interfaces is not running in correct mode. * Failure: The test will fail if any of the provided IS-IS interfaces are not configured or running in the incorrect mode.
* Skipped: The test will be skipped if no ISIS neighbor is found. * Skipped: The test will be skipped if IS-IS is not configured.
Examples Examples
-------- --------
@ -261,80 +157,71 @@ class VerifyISISInterfaceMode(AntaTest):
interfaces: interfaces:
- name: Loopback0 - name: Loopback0
mode: passive mode: passive
# vrf is set to default by default
- name: Ethernet2 - name: Ethernet2
mode: passive mode: passive
level: 2 level: 2
# vrf is set to default by default
- name: Ethernet1 - name: Ethernet1
mode: point-to-point mode: point-to-point
vrf: default vrf: PROD
# level is set to 2 by default
``` ```
""" """
description = "Verifies interface mode for IS-IS"
categories: ClassVar[list[str]] = ["isis"] categories: ClassVar[list[str]] = ["isis"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show isis interface brief", revision=1)] commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show isis interface brief vrf all", revision=1)]
class Input(AntaTest.Input): class Input(AntaTest.Input):
"""Input model for the VerifyISISNeighborCount test.""" """Input model for the VerifyISISInterfaceMode test."""
interfaces: list[InterfaceState] interfaces: list[ISISInterface]
"""list of interfaces with their information.""" """List of IS-IS interfaces with their information."""
InterfaceState: ClassVar[type[InterfaceState]] = InterfaceState
class InterfaceState(BaseModel):
"""Input model for the VerifyISISNeighborCount test."""
name: Interface
"""Interface name to check."""
level: Literal[1, 2] = 2
"""ISIS level configured for interface. Default is 2."""
mode: Literal["point-to-point", "broadcast", "passive"]
"""Number of IS-IS neighbors."""
vrf: str = "default"
"""VRF where the interface should be configured"""
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyISISInterfaceMode.""" """Main test function for VerifyISISInterfaceMode."""
command_output = self.instance_commands[0].json_output
self.result.is_success() self.result.is_success()
if len(command_output["vrfs"]) == 0: # Verify if IS-IS is configured
self.result.is_skipped("IS-IS is not configured on device") if not (command_output := self.instance_commands[0].json_output["vrfs"]):
self.result.is_skipped("IS-IS not configured")
return return
# Check for p2p interfaces
for interface in self.inputs.interfaces: for interface in self.inputs.interfaces:
interface_data = _get_interface_data( interface_detail = {}
interface=interface.name, vrf_instances = get_value(command_output, f"{interface.vrf}..isisInstances", default={}, separator="..")
vrf=interface.vrf, for instance_data in vrf_instances.values():
command_output=command_output, if interface_data := get_value(instance_data, f"interfaces..{interface.name}", separator=".."):
) interface_detail = interface_data
# Check for correct VRF # An interface can only be configured in one IS-IS instance at a time
if interface_data is not None: break
interface_type = get_value(dictionary=interface_data, key="interfaceType", default="unset")
# Check for interfaceType if not interface_detail:
if interface.mode == "point-to-point" and interface.mode != interface_type: self.result.is_failure(f"{interface} - Not configured")
self.result.is_failure(f"Interface {interface.name} in VRF {interface.vrf} is not running in {interface.mode} reporting {interface_type}") continue
# Check for passive
elif interface.mode == "passive": # Check for passive
json_path = f"intfLevels.{interface.level}.passive" if interface.mode == "passive":
if interface_data is None or get_value(dictionary=interface_data, key=json_path, default=False) is False: if get_value(interface_detail, f"intfLevels.{interface.level}.passive", default=False) is False:
self.result.is_failure(f"Interface {interface.name} in VRF {interface.vrf} is not running in passive mode") self.result.is_failure(f"{interface} - Not running in passive mode")
else:
self.result.is_failure(f"Interface {interface.name} not found in VRF {interface.vrf}") # Check for point-to-point or broadcast
elif interface.mode != (interface_type := get_value(interface_detail, "interfaceType", default="unset")):
self.result.is_failure(f"{interface} - Incorrect interface mode - Expected: {interface.mode} Actual: {interface_type}")
class VerifyISISSegmentRoutingAdjacencySegments(AntaTest): class VerifyISISSegmentRoutingAdjacencySegments(AntaTest):
"""Verify that all expected Adjacency segments are correctly visible for each interface. """Verifies IS-IS segment routing adjacency segments.
!!! warning "IS-IS SR Limitation"
As of EOS 4.33.1F, IS-IS SR is supported only in the default VRF.
Please refer to the IS-IS Segment Routing [documentation](https://www.arista.com/en/support/toi/eos-4-17-0f/13789-isis-segment-routing)
for more information.
Expected Results Expected Results
---------------- ----------------
* Success: The test will pass if all listed interfaces have correct adjacencies. * Success: The test will pass if all provided IS-IS instances have the correct adjacency segments.
* Failure: The test will fail if any of the listed interfaces has not expected list of adjacencies. * Failure: The test will fail if any of the provided IS-IS instances have no adjacency segments or incorrect segments.
* Skipped: The test will be skipped if no ISIS SR Adjacency is found. * Skipped: The test will be skipped if IS-IS is not configured.
Examples Examples
-------- --------
@ -358,91 +245,62 @@ class VerifyISISSegmentRoutingAdjacencySegments(AntaTest):
class Input(AntaTest.Input): class Input(AntaTest.Input):
"""Input model for the VerifyISISSegmentRoutingAdjacencySegments test.""" """Input model for the VerifyISISSegmentRoutingAdjacencySegments test."""
instances: list[IsisInstance] instances: list[ISISInstance]
"""List of IS-IS instances with their information."""
IsisInstance: ClassVar[type[IsisInstance]] = IsisInstance
class IsisInstance(BaseModel): @field_validator("instances")
"""ISIS Instance model definition.""" @classmethod
def validate_instances(cls, instances: list[ISISInstance]) -> list[ISISInstance]:
name: str """Validate that 'vrf' field is 'default' in each IS-IS instance."""
"""ISIS instance name.""" for instance in instances:
vrf: str = "default" if instance.vrf != "default":
"""VRF name where ISIS instance is configured.""" msg = f"{instance} 'vrf' field must be 'default'"
segments: list[Segment] raise ValueError(msg)
"""List of Adjacency segments configured in this instance.""" return instances
class Segment(BaseModel):
"""Segment model definition."""
interface: Interface
"""Interface name to check."""
level: Literal[1, 2] = 2
"""ISIS level configured for interface. Default is 2."""
sid_origin: Literal["dynamic"] = "dynamic"
"""Adjacency type"""
address: IPv4Address
"""IP address of remote end of segment."""
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyISISSegmentRoutingAdjacencySegments.""" """Main test function for VerifyISISSegmentRoutingAdjacencySegments."""
command_output = self.instance_commands[0].json_output
self.result.is_success() self.result.is_success()
if len(command_output["vrfs"]) == 0: # Verify if IS-IS is configured
self.result.is_skipped("IS-IS is not configured on device") if not (command_output := self.instance_commands[0].json_output["vrfs"]):
self.result.is_skipped("IS-IS not configured")
return return
# initiate defaults
failure_message = []
skip_vrfs = []
skip_instances = []
# Check if VRFs and instances are present in output.
for instance in self.inputs.instances: for instance in self.inputs.instances:
vrf_data = get_value( if not (act_segments := get_value(command_output, f"{instance.vrf}..isisInstances..{instance.name}..adjacencySegments", default=[], separator="..")):
dictionary=command_output, self.result.is_failure(f"{instance} - No adjacency segments found")
key=f"vrfs.{instance.vrf}", continue
default=None,
)
if vrf_data is None:
skip_vrfs.append(instance.vrf)
failure_message.append(f"VRF {instance.vrf} is not configured to run segment routging.")
elif get_value(dictionary=vrf_data, key=f"isisInstances.{instance.name}", default=None) is None: for segment in instance.segments:
skip_instances.append(instance.name) if (act_segment := get_item(act_segments, "ipAddress", str(segment.address))) is None:
failure_message.append(f"Instance {instance.name} is not found in vrf {instance.vrf}.") self.result.is_failure(f"{instance} {segment} - Adjacency segment not found")
continue
# Check Adjacency segments # Check SID origin
for instance in self.inputs.instances: if (act_origin := act_segment["sidOrigin"]) != segment.sid_origin:
if instance.vrf not in skip_vrfs and instance.name not in skip_instances: self.result.is_failure(f"{instance} {segment} - Incorrect SID origin - Expected: {segment.sid_origin} Actual: {act_origin}")
for input_segment in instance.segments:
eos_segment = _get_adjacency_segment_data_by_neighbor(
neighbor=str(input_segment.address),
instance=instance.name,
vrf=instance.vrf,
command_output=command_output,
)
if eos_segment is None:
failure_message.append(f"Your segment has not been found: {input_segment}.")
elif ( # Check IS-IS level
eos_segment["localIntf"] != input_segment.interface if (actual_level := act_segment["level"]) != segment.level:
or eos_segment["level"] != input_segment.level self.result.is_failure(f"{instance} {segment} - Incorrect IS-IS level - Expected: {segment.level} Actual: {actual_level}")
or eos_segment["sidOrigin"] != input_segment.sid_origin
):
failure_message.append(f"Your segment is not correct: Expected: {input_segment} - Found: {eos_segment}.")
if failure_message:
self.result.is_failure("\n".join(failure_message))
class VerifyISISSegmentRoutingDataplane(AntaTest): class VerifyISISSegmentRoutingDataplane(AntaTest):
"""Verify dataplane of a list of ISIS-SR instances. """Verifies IS-IS segment routing data-plane configuration.
!!! warning "IS-IS SR Limitation"
As of EOS 4.33.1F, IS-IS SR is supported only in the default VRF.
Please refer to the IS-IS Segment Routing [documentation](https://www.arista.com/en/support/toi/eos-4-17-0f/13789-isis-segment-routing)
for more information.
Expected Results Expected Results
---------------- ----------------
* Success: The test will pass if all instances have correct dataplane configured * Success: The test will pass if all provided IS-IS instances have the correct data-plane configured.
* Failure: The test will fail if one of the instances has incorrect dataplane configured * Failure: The test will fail if any of the provided IS-IS instances have an incorrect data-plane configured.
* Skipped: The test will be skipped if ISIS is not running * Skipped: The test will be skipped if IS-IS is not configured.
Examples Examples
-------- --------
@ -463,57 +321,37 @@ class VerifyISISSegmentRoutingDataplane(AntaTest):
class Input(AntaTest.Input): class Input(AntaTest.Input):
"""Input model for the VerifyISISSegmentRoutingDataplane test.""" """Input model for the VerifyISISSegmentRoutingDataplane test."""
instances: list[IsisInstance] instances: list[ISISInstance]
"""List of IS-IS instances with their information."""
IsisInstance: ClassVar[type[IsisInstance]] = IsisInstance
class IsisInstance(BaseModel): @field_validator("instances")
"""ISIS Instance model definition.""" @classmethod
def validate_instances(cls, instances: list[ISISInstance]) -> list[ISISInstance]:
name: str """Validate that 'vrf' field is 'default' in each IS-IS instance."""
"""ISIS instance name.""" for instance in instances:
vrf: str = "default" if instance.vrf != "default":
"""VRF name where ISIS instance is configured.""" msg = f"{instance} 'vrf' field must be 'default'"
dataplane: Literal["MPLS", "mpls", "unset"] = "MPLS" raise ValueError(msg)
"""Configured dataplane for the instance.""" return instances
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyISISSegmentRoutingDataplane.""" """Main test function for VerifyISISSegmentRoutingDataplane."""
command_output = self.instance_commands[0].json_output
self.result.is_success() self.result.is_success()
if len(command_output["vrfs"]) == 0: # Verify if IS-IS is configured
self.result.is_skipped("IS-IS-SR is not running on device.") if not (command_output := self.instance_commands[0].json_output["vrfs"]):
self.result.is_skipped("IS-IS not configured")
return return
# initiate defaults
failure_message = []
skip_vrfs = []
skip_instances = []
# Check if VRFs and instances are present in output.
for instance in self.inputs.instances: for instance in self.inputs.instances:
vrf_data = get_value( if not (instance_data := get_value(command_output, f"{instance.vrf}..isisInstances..{instance.name}", separator="..")):
dictionary=command_output, self.result.is_failure(f"{instance} - Not configured")
key=f"vrfs.{instance.vrf}", continue
default=None,
)
if vrf_data is None:
skip_vrfs.append(instance.vrf)
failure_message.append(f"VRF {instance.vrf} is not configured to run segment routing.")
elif get_value(dictionary=vrf_data, key=f"isisInstances.{instance.name}", default=None) is None: if instance.dataplane.upper() != (dataplane := instance_data["dataPlane"]):
skip_instances.append(instance.name) self.result.is_failure(f"{instance} - Data-plane not correctly configured - Expected: {instance.dataplane.upper()} Actual: {dataplane}")
failure_message.append(f"Instance {instance.name} is not found in vrf {instance.vrf}.")
# Check Adjacency segments
for instance in self.inputs.instances:
if instance.vrf not in skip_vrfs and instance.name not in skip_instances:
eos_dataplane = get_value(dictionary=command_output, key=f"vrfs.{instance.vrf}.isisInstances.{instance.name}.dataPlane", default=None)
if instance.dataplane.upper() != eos_dataplane:
failure_message.append(f"ISIS instance {instance.name} is not running dataplane {instance.dataplane} ({eos_dataplane})")
if failure_message:
self.result.is_failure("\n".join(failure_message))
class VerifyISISSegmentRoutingTunnels(AntaTest): class VerifyISISSegmentRoutingTunnels(AntaTest):
@ -553,34 +391,9 @@ class VerifyISISSegmentRoutingTunnels(AntaTest):
class Input(AntaTest.Input): class Input(AntaTest.Input):
"""Input model for the VerifyISISSegmentRoutingTunnels test.""" """Input model for the VerifyISISSegmentRoutingTunnels test."""
entries: list[Entry] entries: list[Tunnel]
"""List of tunnels to check on device.""" """List of tunnels to check on device."""
Entry: ClassVar[type[Entry]] = Entry
class Entry(BaseModel):
"""Definition of a tunnel entry."""
endpoint: IPv4Network
"""Endpoint IP of the tunnel."""
vias: list[Vias] | None = None
"""Optional list of path to reach endpoint."""
class Vias(BaseModel):
"""Definition of a tunnel path."""
nexthop: IPv4Address | None = None
"""Nexthop of the tunnel. If None, then it is not tested. Default: None"""
type: Literal["ip", "tunnel"] | None = None
"""Type of the tunnel. If None, then it is not tested. Default: None"""
interface: Interface | None = None
"""Interface of the tunnel. If None, then it is not tested. Default: None"""
tunnel_id: Literal["TI-LFA", "ti-lfa", "unset"] | None = None
"""Computation method of the tunnel. If None, then it is not tested. Default: None"""
def _eos_entry_lookup(self, search_value: IPv4Network, entries: dict[str, Any], search_key: str = "endpoint") -> dict[str, Any] | None:
return next(
(entry_value for entry_id, entry_value in entries.items() if str(entry_value[search_key]) == str(search_value)),
None,
)
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
@ -589,142 +402,43 @@ class VerifyISISSegmentRoutingTunnels(AntaTest):
This method performs the main test logic for verifying ISIS Segment Routing tunnels. This method performs the main test logic for verifying ISIS Segment Routing tunnels.
It checks the command output, initiates defaults, and performs various checks on the tunnels. It checks the command output, initiates defaults, and performs various checks on the tunnels.
""" """
command_output = self.instance_commands[0].json_output
self.result.is_success() self.result.is_success()
# initiate defaults command_output = self.instance_commands[0].json_output
failure_message = []
if len(command_output["entries"]) == 0: if len(command_output["entries"]) == 0:
self.result.is_skipped("IS-IS-SR is not running on device.") self.result.is_skipped("IS-IS-SR not configured")
return return
for input_entry in self.inputs.entries: for input_entry in self.inputs.entries:
eos_entry = self._eos_entry_lookup(search_value=input_entry.endpoint, entries=command_output["entries"]) entries = list(command_output["entries"].values())
if eos_entry is None: if (eos_entry := get_item(entries, "endpoint", str(input_entry.endpoint))) is None:
failure_message.append(f"Tunnel to {input_entry} is not found.") self.result.is_failure(f"{input_entry} - Tunnel not found")
elif input_entry.vias is not None: continue
failure_src = []
if input_entry.vias is not None:
for via_input in input_entry.vias: for via_input in input_entry.vias:
if not self._check_tunnel_type(via_input, eos_entry): via_search_result = any(self._via_matches(via_input, eos_via) for eos_via in eos_entry["vias"])
failure_src.append("incorrect tunnel type") if not via_search_result:
if not self._check_tunnel_nexthop(via_input, eos_entry): self.result.is_failure(f"{input_entry} {via_input} - Tunnel is incorrect")
failure_src.append("incorrect nexthop")
if not self._check_tunnel_interface(via_input, eos_entry):
failure_src.append("incorrect interface")
if not self._check_tunnel_id(via_input, eos_entry):
failure_src.append("incorrect tunnel ID")
if failure_src: def _via_matches(self, via_input: TunnelPath, eos_via: dict[str, Any]) -> bool:
failure_message.append(f"Tunnel to {input_entry.endpoint!s} is incorrect: {', '.join(failure_src)}") """Check if the via input matches the eos via.
if failure_message:
self.result.is_failure("\n".join(failure_message))
def _check_tunnel_type(self, via_input: VerifyISISSegmentRoutingTunnels.Input.Entry.Vias, eos_entry: dict[str, Any]) -> bool:
"""Check if the tunnel type specified in `via_input` matches any of the tunnel types in `eos_entry`.
Parameters Parameters
---------- ----------
via_input : VerifyISISSegmentRoutingTunnels.Input.Entry.Vias via_input : TunnelPath
The input tunnel type to check. The input via to check.
eos_entry : dict[str, Any] eos_via : dict[str, Any]
The EOS entry containing the tunnel types. The EOS via to compare against.
Returns Returns
------- -------
bool bool
True if the tunnel type matches any of the tunnel types in `eos_entry`, False otherwise. True if the via input matches the eos via, False otherwise.
""" """
if via_input.type is not None: return (
return any( (via_input.type is None or via_input.type == eos_via.get("type"))
via_input.type and (via_input.nexthop is None or str(via_input.nexthop) == eos_via.get("nexthop"))
== get_value( and (via_input.interface is None or via_input.interface == eos_via.get("interface"))
dictionary=eos_via, and (via_input.tunnel_id is None or via_input.tunnel_id.upper() == get_value(eos_via, "tunnelId.type", default="").upper())
key="type", )
default="undefined",
)
for eos_via in eos_entry["vias"]
)
return True
def _check_tunnel_nexthop(self, via_input: VerifyISISSegmentRoutingTunnels.Input.Entry.Vias, eos_entry: dict[str, Any]) -> bool:
"""Check if the tunnel nexthop matches the given input.
Parameters
----------
via_input : VerifyISISSegmentRoutingTunnels.Input.Entry.Vias
The input via object.
eos_entry : dict[str, Any]
The EOS entry dictionary.
Returns
-------
bool
True if the tunnel nexthop matches, False otherwise.
"""
if via_input.nexthop is not None:
return any(
str(via_input.nexthop)
== get_value(
dictionary=eos_via,
key="nexthop",
default="undefined",
)
for eos_via in eos_entry["vias"]
)
return True
def _check_tunnel_interface(self, via_input: VerifyISISSegmentRoutingTunnels.Input.Entry.Vias, eos_entry: dict[str, Any]) -> bool:
"""Check if the tunnel interface exists in the given EOS entry.
Parameters
----------
via_input : VerifyISISSegmentRoutingTunnels.Input.Entry.Vias
The input via object.
eos_entry : dict[str, Any]
The EOS entry dictionary.
Returns
-------
bool
True if the tunnel interface exists, False otherwise.
"""
if via_input.interface is not None:
return any(
via_input.interface
== get_value(
dictionary=eos_via,
key="interface",
default="undefined",
)
for eos_via in eos_entry["vias"]
)
return True
def _check_tunnel_id(self, via_input: VerifyISISSegmentRoutingTunnels.Input.Entry.Vias, eos_entry: dict[str, Any]) -> bool:
"""Check if the tunnel ID matches any of the tunnel IDs in the EOS entry's vias.
Parameters
----------
via_input : VerifyISISSegmentRoutingTunnels.Input.Entry.Vias
The input vias to check.
eos_entry : dict[str, Any])
The EOS entry to compare against.
Returns
-------
bool
True if the tunnel ID matches any of the tunnel IDs in the EOS entry's vias, False otherwise.
"""
if via_input.tunnel_id is not None:
return any(
via_input.tunnel_id.upper()
== get_value(
dictionary=eos_via,
key="tunnelId.type",
default="undefined",
).upper()
for eos_via in eos_entry["vias"]
)
return True

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module related to OSPF tests.""" """Module related to OSPF tests."""
@ -7,90 +7,15 @@
# mypy: disable-error-code=attr-defined # mypy: disable-error-code=attr-defined
from __future__ import annotations from __future__ import annotations
from typing import TYPE_CHECKING, Any, ClassVar from typing import TYPE_CHECKING, ClassVar
from anta.models import AntaCommand, AntaTest from anta.models import AntaCommand, AntaTest
from anta.tools import get_value
if TYPE_CHECKING: if TYPE_CHECKING:
from anta.models import AntaTemplate from anta.models import AntaTemplate
def _count_ospf_neighbor(ospf_neighbor_json: dict[str, Any]) -> int:
"""Count the number of OSPF neighbors.
Parameters
----------
ospf_neighbor_json
The JSON output of the `show ip ospf neighbor` command.
Returns
-------
int
The number of OSPF neighbors.
"""
count = 0
for vrf_data in ospf_neighbor_json["vrfs"].values():
for instance_data in vrf_data["instList"].values():
count += len(instance_data.get("ospfNeighborEntries", []))
return count
def _get_not_full_ospf_neighbors(ospf_neighbor_json: dict[str, Any]) -> list[dict[str, Any]]:
"""Return the OSPF neighbors whose adjacency state is not `full`.
Parameters
----------
ospf_neighbor_json
The JSON output of the `show ip ospf neighbor` command.
Returns
-------
list[dict[str, Any]]
A list of OSPF neighbors whose adjacency state is not `full`.
"""
return [
{
"vrf": vrf,
"instance": instance,
"neighbor": neighbor_data["routerId"],
"state": state,
}
for vrf, vrf_data in ospf_neighbor_json["vrfs"].items()
for instance, instance_data in vrf_data["instList"].items()
for neighbor_data in instance_data.get("ospfNeighborEntries", [])
if (state := neighbor_data["adjacencyState"]) != "full"
]
def _get_ospf_max_lsa_info(ospf_process_json: dict[str, Any]) -> list[dict[str, Any]]:
"""Return information about OSPF instances and their LSAs.
Parameters
----------
ospf_process_json
OSPF process information in JSON format.
Returns
-------
list[dict[str, Any]]
A list of dictionaries containing OSPF LSAs information.
"""
return [
{
"vrf": vrf,
"instance": instance,
"maxLsa": instance_data.get("maxLsaInformation", {}).get("maxLsa"),
"maxLsaThreshold": instance_data.get("maxLsaInformation", {}).get("maxLsaThreshold"),
"numLsa": instance_data.get("lsaInformation", {}).get("numLsa"),
}
for vrf, vrf_data in ospf_process_json.get("vrfs", {}).items()
for instance, instance_data in vrf_data.get("instList", {}).items()
]
class VerifyOSPFNeighborState(AntaTest): class VerifyOSPFNeighborState(AntaTest):
"""Verifies all OSPF neighbors are in FULL state. """Verifies all OSPF neighbors are in FULL state.
@ -115,14 +40,29 @@ class VerifyOSPFNeighborState(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyOSPFNeighborState.""" """Main test function for VerifyOSPFNeighborState."""
command_output = self.instance_commands[0].json_output
if _count_ospf_neighbor(command_output) == 0:
self.result.is_skipped("no OSPF neighbor found")
return
self.result.is_success() self.result.is_success()
not_full_neighbors = _get_not_full_ospf_neighbors(command_output)
if not_full_neighbors: # If OSPF is not configured on device, test skipped.
self.result.is_failure(f"Some neighbors are not correctly configured: {not_full_neighbors}.") if not (command_output := get_value(self.instance_commands[0].json_output, "vrfs")):
self.result.is_skipped("OSPF not configured")
return
no_neighbor = True
for vrf, vrf_data in command_output.items():
for instance, instance_data in vrf_data["instList"].items():
neighbors = instance_data["ospfNeighborEntries"]
if not neighbors:
continue
no_neighbor = False
interfaces = [(neighbor["routerId"], state) for neighbor in neighbors if (state := neighbor["adjacencyState"]) != "full"]
for interface in interfaces:
self.result.is_failure(
f"Instance: {instance} VRF: {vrf} Interface: {interface[0]} - Incorrect adjacency state - Expected: Full Actual: {interface[1]}"
)
# If OSPF neighbors are not configured on device, test skipped.
if no_neighbor:
self.result.is_skipped("No OSPF neighbor detected")
class VerifyOSPFNeighborCount(AntaTest): class VerifyOSPFNeighborCount(AntaTest):
@ -156,20 +96,34 @@ class VerifyOSPFNeighborCount(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyOSPFNeighborCount.""" """Main test function for VerifyOSPFNeighborCount."""
command_output = self.instance_commands[0].json_output
if (neighbor_count := _count_ospf_neighbor(command_output)) == 0:
self.result.is_skipped("no OSPF neighbor found")
return
self.result.is_success() self.result.is_success()
if neighbor_count != self.inputs.number: # If OSPF is not configured on device, test skipped.
self.result.is_failure(f"device has {neighbor_count} neighbors (expected {self.inputs.number})") if not (command_output := get_value(self.instance_commands[0].json_output, "vrfs")):
not_full_neighbors = _get_not_full_ospf_neighbors(command_output) self.result.is_skipped("OSPF not configured")
if not_full_neighbors: return
self.result.is_failure(f"Some neighbors are not correctly configured: {not_full_neighbors}.")
no_neighbor = True
interfaces = []
for vrf_data in command_output.values():
for instance_data in vrf_data["instList"].values():
neighbors = instance_data["ospfNeighborEntries"]
if not neighbors:
continue
no_neighbor = False
interfaces.extend([neighbor["routerId"] for neighbor in neighbors if neighbor["adjacencyState"] == "full"])
# If OSPF neighbors are not configured on device, test skipped.
if no_neighbor:
self.result.is_skipped("No OSPF neighbor detected")
return
# If the number of OSPF neighbors expected to be in the FULL state does not match with actual one, test fails.
if len(interfaces) != self.inputs.number:
self.result.is_failure(f"Neighbor count mismatch - Expected: {self.inputs.number} Actual: {len(interfaces)}")
class VerifyOSPFMaxLSA(AntaTest): class VerifyOSPFMaxLSA(AntaTest):
"""Verifies LSAs present in the OSPF link state database did not cross the maximum LSA Threshold. """Verifies all OSPF instances did not cross the maximum LSA threshold.
Expected Results Expected Results
---------------- ----------------
@ -186,23 +140,23 @@ class VerifyOSPFMaxLSA(AntaTest):
``` ```
""" """
description = "Verifies all OSPF instances did not cross the maximum LSA threshold."
categories: ClassVar[list[str]] = ["ospf"] categories: ClassVar[list[str]] = ["ospf"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show ip ospf", revision=1)] commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show ip ospf", revision=1)]
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyOSPFMaxLSA.""" """Main test function for VerifyOSPFMaxLSA."""
command_output = self.instance_commands[0].json_output self.result.is_success()
ospf_instance_info = _get_ospf_max_lsa_info(command_output)
if not ospf_instance_info: # If OSPF is not configured on device, test skipped.
self.result.is_skipped("No OSPF instance found.") if not (command_output := get_value(self.instance_commands[0].json_output, "vrfs")):
self.result.is_skipped("OSPF not configured")
return return
all_instances_within_threshold = all(instance["numLsa"] <= instance["maxLsa"] * (instance["maxLsaThreshold"] / 100) for instance in ospf_instance_info)
if all_instances_within_threshold: for vrf_data in command_output.values():
self.result.is_success() for instance, instance_data in vrf_data.get("instList", {}).items():
else: max_lsa = instance_data["maxLsaInformation"]["maxLsa"]
exceeded_instances = [ max_lsa_threshold = instance_data["maxLsaInformation"]["maxLsaThreshold"]
instance["instance"] for instance in ospf_instance_info if instance["numLsa"] > instance["maxLsa"] * (instance["maxLsaThreshold"] / 100) num_lsa = get_value(instance_data, "lsaInformation.numLsa")
] if num_lsa > (max_lsa_threshold := round(max_lsa * (max_lsa_threshold / 100))):
self.result.is_failure(f"OSPF Instances {exceeded_instances} crossed the maximum LSA threshold.") self.result.is_failure(f"Instance: {instance} - Crossed the maximum LSA threshold - Expected: < {max_lsa_threshold} Actual: {num_lsa}")

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module related to the EOS various security tests.""" """Module related to the EOS various security tests."""
@ -8,22 +8,12 @@ from __future__ import annotations
# Mypy does not understand AntaTest.Input typing # Mypy does not understand AntaTest.Input typing
# mypy: disable-error-code=attr-defined # mypy: disable-error-code=attr-defined
from datetime import datetime, timezone from datetime import datetime, timezone
from typing import TYPE_CHECKING, ClassVar, get_args from typing import ClassVar
from pydantic import BaseModel, Field, model_validator from anta.custom_types import PositiveInteger
from anta.input_models.security import ACL, APISSLCertificate, IPSecPeer, IPSecPeers
from anta.custom_types import EcdsaKeySize, EncryptionAlgorithm, PositiveInteger, RsaKeySize
from anta.input_models.security import IPSecPeer, IPSecPeers
from anta.models import AntaCommand, AntaTemplate, AntaTest from anta.models import AntaCommand, AntaTemplate, AntaTest
from anta.tools import get_failed_logs, get_item, get_value from anta.tools import get_item, get_value
if TYPE_CHECKING:
import sys
if sys.version_info >= (3, 11):
from typing import Self
else:
from typing_extensions import Self
class VerifySSHStatus(AntaTest): class VerifySSHStatus(AntaTest):
@ -53,7 +43,7 @@ class VerifySSHStatus(AntaTest):
try: try:
line = next(line for line in command_output.split("\n") if line.startswith("SSHD status")) line = next(line for line in command_output.split("\n") if line.startswith("SSHD status"))
except StopIteration: except StopIteration:
self.result.is_failure("Could not find SSH status in returned output.") self.result.is_failure("Could not find SSH status in returned output")
return return
status = line.split()[-1] status = line.split()[-1]
@ -96,19 +86,18 @@ class VerifySSHIPv4Acl(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifySSHIPv4Acl.""" """Main test function for VerifySSHIPv4Acl."""
self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
ipv4_acl_list = command_output["ipAclList"]["aclList"] ipv4_acl_list = command_output["ipAclList"]["aclList"]
ipv4_acl_number = len(ipv4_acl_list) ipv4_acl_number = len(ipv4_acl_list)
if ipv4_acl_number != self.inputs.number: if ipv4_acl_number != self.inputs.number:
self.result.is_failure(f"Expected {self.inputs.number} SSH IPv4 ACL(s) in vrf {self.inputs.vrf} but got {ipv4_acl_number}") self.result.is_failure(f"VRF: {self.inputs.vrf} - SSH IPv4 ACL(s) count mismatch - Expected: {self.inputs.number} Actual: {ipv4_acl_number}")
return return
not_configured_acl = [acl["name"] for acl in ipv4_acl_list if self.inputs.vrf not in acl["configuredVrfs"] or self.inputs.vrf not in acl["activeVrfs"]] not_configured_acl = [acl["name"] for acl in ipv4_acl_list if self.inputs.vrf not in acl["configuredVrfs"] or self.inputs.vrf not in acl["activeVrfs"]]
if not_configured_acl: if not_configured_acl:
self.result.is_failure(f"SSH IPv4 ACL(s) not configured or active in vrf {self.inputs.vrf}: {not_configured_acl}") self.result.is_failure(f"VRF: {self.inputs.vrf} - Following SSH IPv4 ACL(s) not configured or active: {', '.join(not_configured_acl)}")
else:
self.result.is_success()
class VerifySSHIPv6Acl(AntaTest): class VerifySSHIPv6Acl(AntaTest):
@ -144,19 +133,18 @@ class VerifySSHIPv6Acl(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifySSHIPv6Acl.""" """Main test function for VerifySSHIPv6Acl."""
self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
ipv6_acl_list = command_output["ipv6AclList"]["aclList"] ipv6_acl_list = command_output["ipv6AclList"]["aclList"]
ipv6_acl_number = len(ipv6_acl_list) ipv6_acl_number = len(ipv6_acl_list)
if ipv6_acl_number != self.inputs.number: if ipv6_acl_number != self.inputs.number:
self.result.is_failure(f"Expected {self.inputs.number} SSH IPv6 ACL(s) in vrf {self.inputs.vrf} but got {ipv6_acl_number}") self.result.is_failure(f"VRF: {self.inputs.vrf} - SSH IPv6 ACL(s) count mismatch - Expected: {self.inputs.number} Actual: {ipv6_acl_number}")
return return
not_configured_acl = [acl["name"] for acl in ipv6_acl_list if self.inputs.vrf not in acl["configuredVrfs"] or self.inputs.vrf not in acl["activeVrfs"]] not_configured_acl = [acl["name"] for acl in ipv6_acl_list if self.inputs.vrf not in acl["configuredVrfs"] or self.inputs.vrf not in acl["activeVrfs"]]
if not_configured_acl: if not_configured_acl:
self.result.is_failure(f"SSH IPv6 ACL(s) not configured or active in vrf {self.inputs.vrf}: {not_configured_acl}") self.result.is_failure(f"VRF: {self.inputs.vrf} - Following SSH IPv6 ACL(s) not configured or active: {', '.join(not_configured_acl)}")
else:
self.result.is_success()
class VerifyTelnetStatus(AntaTest): class VerifyTelnetStatus(AntaTest):
@ -218,7 +206,7 @@ class VerifyAPIHttpStatus(AntaTest):
class VerifyAPIHttpsSSL(AntaTest): class VerifyAPIHttpsSSL(AntaTest):
"""Verifies if eAPI HTTPS server SSL profile is configured and valid. """Verifies if the eAPI has a valid SSL profile.
Expected Results Expected Results
---------------- ----------------
@ -234,7 +222,6 @@ class VerifyAPIHttpsSSL(AntaTest):
``` ```
""" """
description = "Verifies if the eAPI has a valid SSL profile."
categories: ClassVar[list[str]] = ["security"] categories: ClassVar[list[str]] = ["security"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show management api http-commands", revision=1)] commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show management api http-commands", revision=1)]
@ -252,10 +239,10 @@ class VerifyAPIHttpsSSL(AntaTest):
if command_output["sslProfile"]["name"] == self.inputs.profile and command_output["sslProfile"]["state"] == "valid": if command_output["sslProfile"]["name"] == self.inputs.profile and command_output["sslProfile"]["state"] == "valid":
self.result.is_success() self.result.is_success()
else: else:
self.result.is_failure(f"eAPI HTTPS server SSL profile ({self.inputs.profile}) is misconfigured or invalid") self.result.is_failure(f"eAPI HTTPS server SSL profile {self.inputs.profile} is misconfigured or invalid")
except KeyError: except KeyError:
self.result.is_failure(f"eAPI HTTPS server SSL profile ({self.inputs.profile}) is not configured") self.result.is_failure(f"eAPI HTTPS server SSL profile {self.inputs.profile} is not configured")
class VerifyAPIIPv4Acl(AntaTest): class VerifyAPIIPv4Acl(AntaTest):
@ -294,13 +281,13 @@ class VerifyAPIIPv4Acl(AntaTest):
ipv4_acl_list = command_output["ipAclList"]["aclList"] ipv4_acl_list = command_output["ipAclList"]["aclList"]
ipv4_acl_number = len(ipv4_acl_list) ipv4_acl_number = len(ipv4_acl_list)
if ipv4_acl_number != self.inputs.number: if ipv4_acl_number != self.inputs.number:
self.result.is_failure(f"Expected {self.inputs.number} eAPI IPv4 ACL(s) in vrf {self.inputs.vrf} but got {ipv4_acl_number}") self.result.is_failure(f"VRF: {self.inputs.vrf} - eAPI IPv4 ACL(s) count mismatch - Expected: {self.inputs.number} Actual: {ipv4_acl_number}")
return return
not_configured_acl = [acl["name"] for acl in ipv4_acl_list if self.inputs.vrf not in acl["configuredVrfs"] or self.inputs.vrf not in acl["activeVrfs"]] not_configured_acl = [acl["name"] for acl in ipv4_acl_list if self.inputs.vrf not in acl["configuredVrfs"] or self.inputs.vrf not in acl["activeVrfs"]]
if not_configured_acl: if not_configured_acl:
self.result.is_failure(f"eAPI IPv4 ACL(s) not configured or active in vrf {self.inputs.vrf}: {not_configured_acl}") self.result.is_failure(f"VRF: {self.inputs.vrf} - Following eAPI IPv4 ACL(s) not configured or active: {', '.join(not_configured_acl)}")
else: else:
self.result.is_success() self.result.is_success()
@ -342,13 +329,13 @@ class VerifyAPIIPv6Acl(AntaTest):
ipv6_acl_list = command_output["ipv6AclList"]["aclList"] ipv6_acl_list = command_output["ipv6AclList"]["aclList"]
ipv6_acl_number = len(ipv6_acl_list) ipv6_acl_number = len(ipv6_acl_list)
if ipv6_acl_number != self.inputs.number: if ipv6_acl_number != self.inputs.number:
self.result.is_failure(f"Expected {self.inputs.number} eAPI IPv6 ACL(s) in vrf {self.inputs.vrf} but got {ipv6_acl_number}") self.result.is_failure(f"VRF: {self.inputs.vrf} - eAPI IPv6 ACL(s) count mismatch - Expected: {self.inputs.number} Actual: {ipv6_acl_number}")
return return
not_configured_acl = [acl["name"] for acl in ipv6_acl_list if self.inputs.vrf not in acl["configuredVrfs"] or self.inputs.vrf not in acl["activeVrfs"]] not_configured_acl = [acl["name"] for acl in ipv6_acl_list if self.inputs.vrf not in acl["configuredVrfs"] or self.inputs.vrf not in acl["activeVrfs"]]
if not_configured_acl: if not_configured_acl:
self.result.is_failure(f"eAPI IPv6 ACL(s) not configured or active in vrf {self.inputs.vrf}: {not_configured_acl}") self.result.is_failure(f"VRF: {self.inputs.vrf} - Following eAPI IPv6 ACL(s) not configured or active: {', '.join(not_configured_acl)}")
else: else:
self.result.is_success() self.result.is_success()
@ -356,12 +343,25 @@ class VerifyAPIIPv6Acl(AntaTest):
class VerifyAPISSLCertificate(AntaTest): class VerifyAPISSLCertificate(AntaTest):
"""Verifies the eAPI SSL certificate expiry, common subject name, encryption algorithm and key size. """Verifies the eAPI SSL certificate expiry, common subject name, encryption algorithm and key size.
This test performs the following checks for each certificate:
1. Validates that the certificate is not expired and meets the configured expiry threshold.
2. Validates that the certificate Common Name matches the expected one.
3. Ensures the certificate uses the specified encryption algorithm.
4. Verifies the certificate key matches the expected key size.
Expected Results Expected Results
---------------- ----------------
* Success: The test will pass if the certificate's expiry date is greater than the threshold, * Success: If all of the following occur:
and the certificate has the correct name, encryption algorithm, and key size. - The certificate's expiry date exceeds the configured threshold.
* Failure: The test will fail if the certificate is expired or is going to expire, - The certificate's Common Name matches the input configuration.
or if the certificate has an incorrect name, encryption algorithm, or key size. - The encryption algorithm used by the certificate is as expected.
- The key size of the certificate matches the input configuration.
* Failure: If any of the following occur:
- The certificate is expired or set to expire within the defined threshold.
- The certificate's common name does not match the expected input.
- The encryption algorithm is incorrect.
- The key size does not match the expected input.
Examples Examples
-------- --------
@ -393,38 +393,7 @@ class VerifyAPISSLCertificate(AntaTest):
certificates: list[APISSLCertificate] certificates: list[APISSLCertificate]
"""List of API SSL certificates.""" """List of API SSL certificates."""
APISSLCertificate: ClassVar[type[APISSLCertificate]] = APISSLCertificate
class APISSLCertificate(BaseModel):
"""Model for an API SSL certificate."""
certificate_name: str
"""The name of the certificate to be verified."""
expiry_threshold: int
"""The expiry threshold of the certificate in days."""
common_name: str
"""The common subject name of the certificate."""
encryption_algorithm: EncryptionAlgorithm
"""The encryption algorithm of the certificate."""
key_size: RsaKeySize | EcdsaKeySize
"""The encryption algorithm key size of the certificate."""
@model_validator(mode="after")
def validate_inputs(self) -> Self:
"""Validate the key size provided to the APISSLCertificates class.
If encryption_algorithm is RSA then key_size should be in {2048, 3072, 4096}.
If encryption_algorithm is ECDSA then key_size should be in {256, 384, 521}.
"""
if self.encryption_algorithm == "RSA" and self.key_size not in get_args(RsaKeySize):
msg = f"`{self.certificate_name}` key size {self.key_size} is invalid for RSA encryption. Allowed sizes are {get_args(RsaKeySize)}."
raise ValueError(msg)
if self.encryption_algorithm == "ECDSA" and self.key_size not in get_args(EcdsaKeySize):
msg = f"`{self.certificate_name}` key size {self.key_size} is invalid for ECDSA encryption. Allowed sizes are {get_args(EcdsaKeySize)}."
raise ValueError(msg)
return self
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
@ -442,7 +411,7 @@ class VerifyAPISSLCertificate(AntaTest):
# Collecting certificate expiry time and current EOS time. # Collecting certificate expiry time and current EOS time.
# These times are used to calculate the number of days until the certificate expires. # These times are used to calculate the number of days until the certificate expires.
if not (certificate_data := get_value(certificate_output, f"certificates..{certificate.certificate_name}", separator="..")): if not (certificate_data := get_value(certificate_output, f"certificates..{certificate.certificate_name}", separator="..")):
self.result.is_failure(f"SSL certificate '{certificate.certificate_name}', is not configured.\n") self.result.is_failure(f"{certificate} - Not found")
continue continue
expiry_time = certificate_data["notAfter"] expiry_time = certificate_data["notAfter"]
@ -450,24 +419,25 @@ class VerifyAPISSLCertificate(AntaTest):
# Verify certificate expiry # Verify certificate expiry
if 0 < day_difference < certificate.expiry_threshold: if 0 < day_difference < certificate.expiry_threshold:
self.result.is_failure(f"SSL certificate `{certificate.certificate_name}` is about to expire in {day_difference} days.\n") self.result.is_failure(
f"{certificate} - set to expire within the threshold - Threshold: {certificate.expiry_threshold} days Actual: {day_difference} days"
)
elif day_difference < 0: elif day_difference < 0:
self.result.is_failure(f"SSL certificate `{certificate.certificate_name}` is expired.\n") self.result.is_failure(f"{certificate} - certificate expired")
# Verify certificate common subject name, encryption algorithm and key size # Verify certificate common subject name, encryption algorithm and key size
keys_to_verify = ["subject.commonName", "publicKey.encryptionAlgorithm", "publicKey.size"] common_name = get_value(certificate_data, "subject.commonName", default="Not found")
actual_certificate_details = {key: get_value(certificate_data, key) for key in keys_to_verify} encryp_algo = get_value(certificate_data, "publicKey.encryptionAlgorithm", default="Not found")
key_size = get_value(certificate_data, "publicKey.size", default="Not found")
expected_certificate_details = { if common_name != certificate.common_name:
"subject.commonName": certificate.common_name, self.result.is_failure(f"{certificate} - incorrect common name - Expected: {certificate.common_name} Actual: {common_name}")
"publicKey.encryptionAlgorithm": certificate.encryption_algorithm,
"publicKey.size": certificate.key_size,
}
if actual_certificate_details != expected_certificate_details: if encryp_algo != certificate.encryption_algorithm:
failed_log = f"SSL certificate `{certificate.certificate_name}` is not configured properly:" self.result.is_failure(f"{certificate} - incorrect encryption algorithm - Expected: {certificate.encryption_algorithm} Actual: {encryp_algo}")
failed_log += get_failed_logs(expected_certificate_details, actual_certificate_details)
self.result.is_failure(f"{failed_log}\n") if key_size != certificate.key_size:
self.result.is_failure(f"{certificate} - incorrect public key - Expected: {certificate.key_size} Actual: {key_size}")
class VerifyBannerLogin(AntaTest): class VerifyBannerLogin(AntaTest):
@ -502,14 +472,15 @@ class VerifyBannerLogin(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyBannerLogin.""" """Main test function for VerifyBannerLogin."""
login_banner = self.instance_commands[0].json_output["loginBanner"] self.result.is_success()
if not (login_banner := self.instance_commands[0].json_output["loginBanner"]):
self.result.is_failure("Login banner is not configured")
return
# Remove leading and trailing whitespaces from each line # Remove leading and trailing whitespaces from each line
cleaned_banner = "\n".join(line.strip() for line in self.inputs.login_banner.split("\n")) cleaned_banner = "\n".join(line.strip() for line in self.inputs.login_banner.split("\n"))
if login_banner != cleaned_banner: if login_banner != cleaned_banner:
self.result.is_failure(f"Expected `{cleaned_banner}` as the login banner, but found `{login_banner}` instead.") self.result.is_failure(f"Incorrect login banner configured - Expected: {cleaned_banner} Actual: {login_banner}")
else:
self.result.is_success()
class VerifyBannerMotd(AntaTest): class VerifyBannerMotd(AntaTest):
@ -544,23 +515,34 @@ class VerifyBannerMotd(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyBannerMotd.""" """Main test function for VerifyBannerMotd."""
motd_banner = self.instance_commands[0].json_output["motd"] self.result.is_success()
if not (motd_banner := self.instance_commands[0].json_output["motd"]):
self.result.is_failure("MOTD banner is not configured")
return
# Remove leading and trailing whitespaces from each line # Remove leading and trailing whitespaces from each line
cleaned_banner = "\n".join(line.strip() for line in self.inputs.motd_banner.split("\n")) cleaned_banner = "\n".join(line.strip() for line in self.inputs.motd_banner.split("\n"))
if motd_banner != cleaned_banner: if motd_banner != cleaned_banner:
self.result.is_failure(f"Expected `{cleaned_banner}` as the motd banner, but found `{motd_banner}` instead.") self.result.is_failure(f"Incorrect MOTD banner configured - Expected: {cleaned_banner} Actual: {motd_banner}")
else:
self.result.is_success()
class VerifyIPv4ACL(AntaTest): class VerifyIPv4ACL(AntaTest):
"""Verifies the configuration of IPv4 ACLs. """Verifies the configuration of IPv4 ACLs.
This test performs the following checks for each IPv4 ACL:
1. Validates that the IPv4 ACL is properly configured.
2. Validates that the sequence entries in the ACL are correctly ordered.
Expected Results Expected Results
---------------- ----------------
* Success: The test will pass if an IPv4 ACL is configured with the correct sequence entries. * Success: If all of the following occur:
* Failure: The test will fail if an IPv4 ACL is not configured or entries are not in sequence. - Any IPv4 ACL entry is not configured.
- The sequency entries are correctly configured.
* Failure: If any of the following occur:
- The IPv4 ACL is not configured.
- The any IPv4 ACL entry is not configured.
- The action for any entry does not match the expected input.
Examples Examples
-------- --------
@ -586,65 +568,37 @@ class VerifyIPv4ACL(AntaTest):
""" """
categories: ClassVar[list[str]] = ["security"] categories: ClassVar[list[str]] = ["security"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaTemplate(template="show ip access-lists {acl}", revision=1)] commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show ip access-lists", revision=1)]
class Input(AntaTest.Input): class Input(AntaTest.Input):
"""Input model for the VerifyIPv4ACL test.""" """Input model for the VerifyIPv4ACL test."""
ipv4_access_lists: list[IPv4ACL] ipv4_access_lists: list[ACL]
"""List of IPv4 ACLs to verify.""" """List of IPv4 ACLs to verify."""
IPv4ACL: ClassVar[type[ACL]] = ACL
class IPv4ACL(BaseModel): """To maintain backward compatibility."""
"""Model for an IPv4 ACL."""
name: str
"""Name of IPv4 ACL."""
entries: list[IPv4ACLEntry]
"""List of IPv4 ACL entries."""
class IPv4ACLEntry(BaseModel):
"""Model for an IPv4 ACL entry."""
sequence: int = Field(ge=1, le=4294967295)
"""Sequence number of an ACL entry."""
action: str
"""Action of an ACL entry."""
def render(self, template: AntaTemplate) -> list[AntaCommand]:
"""Render the template for each input ACL."""
return [template.render(acl=acl.name) for acl in self.inputs.ipv4_access_lists]
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyIPv4ACL.""" """Main test function for VerifyIPv4ACL."""
self.result.is_success() self.result.is_success()
for command_output, acl in zip(self.instance_commands, self.inputs.ipv4_access_lists):
# Collecting input ACL details
acl_name = command_output.params.acl
# Retrieve the expected entries from the inputs
acl_entries = acl.entries
# Check if ACL is configured if not (command_output := self.instance_commands[0].json_output["aclList"]):
ipv4_acl_list = command_output.json_output["aclList"] self.result.is_failure("No Access Control List (ACL) configured")
if not ipv4_acl_list: return
self.result.is_failure(f"{acl_name}: Not found")
for access_list in self.inputs.ipv4_access_lists:
if not (access_list_output := get_item(command_output, "name", access_list.name)):
self.result.is_failure(f"{access_list} - Not configured")
continue continue
# Check if the sequence number is configured and has the correct action applied for entry in access_list.entries:
failed_log = f"{acl_name}:\n" if not (actual_entry := get_item(access_list_output["sequence"], "sequenceNumber", entry.sequence)):
for acl_entry in acl_entries: self.result.is_failure(f"{access_list} {entry} - Not configured")
acl_seq = acl_entry.sequence
acl_action = acl_entry.action
if (actual_entry := get_item(ipv4_acl_list[0]["sequence"], "sequenceNumber", acl_seq)) is None:
failed_log += f"Sequence number `{acl_seq}` is not found.\n"
continue continue
if actual_entry["text"] != acl_action: if (act_action := actual_entry["text"]) != entry.action:
failed_log += f"Expected `{acl_action}` as sequence number {acl_seq} action but found `{actual_entry['text']}` instead.\n" self.result.is_failure(f"{access_list} {entry} - action mismatch - Expected: {entry.action} Actual: {act_action}")
if failed_log != f"{acl_name}:\n":
self.result.is_failure(f"{failed_log}")
class VerifyIPSecConnHealth(AntaTest): class VerifyIPSecConnHealth(AntaTest):
@ -670,12 +624,11 @@ class VerifyIPSecConnHealth(AntaTest):
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyIPSecConnHealth.""" """Main test function for VerifyIPSecConnHealth."""
self.result.is_success() self.result.is_success()
failure_conn = []
command_output = self.instance_commands[0].json_output["connections"] command_output = self.instance_commands[0].json_output["connections"]
# Check if IP security connection is configured # Check if IP security connection is configured
if not command_output: if not command_output:
self.result.is_failure("No IPv4 security connection configured.") self.result.is_failure("No IPv4 security connection configured")
return return
# Iterate over all ipsec connections # Iterate over all ipsec connections
@ -685,10 +638,7 @@ class VerifyIPSecConnHealth(AntaTest):
source = conn_data.get("saddr") source = conn_data.get("saddr")
destination = conn_data.get("daddr") destination = conn_data.get("daddr")
vrf = conn_data.get("tunnelNs") vrf = conn_data.get("tunnelNs")
failure_conn.append(f"source:{source} destination:{destination} vrf:{vrf}") self.result.is_failure(f"Source: {source} Destination: {destination} VRF: {vrf} - IPv4 security connection not established")
if failure_conn:
failure_msg = "\n".join(failure_conn)
self.result.is_failure(f"The following IPv4 security connections are not established:\n{failure_msg}.")
class VerifySpecificIPSecConn(AntaTest): class VerifySpecificIPSecConn(AntaTest):
@ -763,9 +713,7 @@ class VerifySpecificIPSecConn(AntaTest):
if state != "Established": if state != "Established":
source = conn_data.get("saddr") source = conn_data.get("saddr")
destination = conn_data.get("daddr") destination = conn_data.get("daddr")
self.result.is_failure( self.result.is_failure(f"{input_peer} Source: {source} Destination: {destination} - Connection down - Expected: Established Actual: {state}")
f"{input_peer} Source: {source} Destination: {destination} - Connection down - Expected: Established, Actual: {state}"
)
continue continue
# Create a dictionary of existing connections for faster lookup # Create a dictionary of existing connections for faster lookup
@ -780,7 +728,7 @@ class VerifySpecificIPSecConn(AntaTest):
if (source_input, destination_input, vrf) in existing_connections: if (source_input, destination_input, vrf) in existing_connections:
existing_state = existing_connections[(source_input, destination_input, vrf)] existing_state = existing_connections[(source_input, destination_input, vrf)]
if existing_state != "Established": if existing_state != "Established":
failure = f"Expected: Established, Actual: {existing_state}" failure = f"Expected: Established Actual: {existing_state}"
self.result.is_failure(f"{input_peer} Source: {source_input} Destination: {destination_input} - Connection down - {failure}") self.result.is_failure(f"{input_peer} Source: {source_input} Destination: {destination_input} - Connection down - {failure}")
else: else:
self.result.is_failure(f"{input_peer} Source: {source_input} Destination: {destination_input} - Connection not found.") self.result.is_failure(f"{input_peer} Source: {source_input} Destination: {destination_input} - Connection not found.")
@ -812,6 +760,6 @@ class VerifyHardwareEntropy(AntaTest):
# Check if hardware entropy generation is enabled. # Check if hardware entropy generation is enabled.
if not command_output.get("hardwareEntropyEnabled"): if not command_output.get("hardwareEntropyEnabled"):
self.result.is_failure("Hardware entropy generation is disabled.") self.result.is_failure("Hardware entropy generation is disabled")
else: else:
self.result.is_success() self.result.is_success()

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module related to the EOS various services tests.""" """Module related to the EOS various services tests."""
@ -9,12 +9,9 @@ from __future__ import annotations
# mypy: disable-error-code=attr-defined # mypy: disable-error-code=attr-defined
from typing import ClassVar from typing import ClassVar
from pydantic import BaseModel from anta.input_models.services import DnsServer, ErrDisableReason, ErrdisableRecovery
from anta.custom_types import ErrDisableInterval, ErrDisableReasons
from anta.input_models.services import DnsServer
from anta.models import AntaCommand, AntaTemplate, AntaTest from anta.models import AntaCommand, AntaTemplate, AntaTest
from anta.tools import get_dict_superset, get_failed_logs from anta.tools import get_dict_superset, get_item
class VerifyHostname(AntaTest): class VerifyHostname(AntaTest):
@ -49,7 +46,7 @@ class VerifyHostname(AntaTest):
hostname = self.instance_commands[0].json_output["hostname"] hostname = self.instance_commands[0].json_output["hostname"]
if hostname != self.inputs.hostname: if hostname != self.inputs.hostname:
self.result.is_failure(f"Expected `{self.inputs.hostname}` as the hostname, but found `{hostname}` instead.") self.result.is_failure(f"Incorrect Hostname - Expected: {self.inputs.hostname} Actual: {hostname}")
else: else:
self.result.is_success() self.result.is_success()
@ -166,12 +163,24 @@ class VerifyDNSServers(AntaTest):
class VerifyErrdisableRecovery(AntaTest): class VerifyErrdisableRecovery(AntaTest):
"""Verifies the errdisable recovery reason, status, and interval. """Verifies the error disable recovery functionality.
This test performs the following checks for each specified error disable reason:
1. Verifying if the specified error disable reason exists.
2. Checking if the recovery timer status matches the expected enabled/disabled state.
3. Validating that the timer interval matches the configured value.
Expected Results Expected Results
---------------- ----------------
* Success: The test will pass if the errdisable recovery reason status is enabled and the interval matches the input. * Success: The test will pass if:
* Failure: The test will fail if the errdisable recovery reason is not found, the status is not enabled, or the interval does not match the input. - The specified error disable reason exists.
- The recovery timer status matches the expected state.
- The timer interval matches the configured value.
* Failure: The test will fail if:
- The specified error disable reason does not exist.
- The recovery timer status does not match the expected state.
- The timer interval does not match the configured value.
Examples Examples
-------- --------
@ -181,8 +190,10 @@ class VerifyErrdisableRecovery(AntaTest):
reasons: reasons:
- reason: acl - reason: acl
interval: 30 interval: 30
status: Enabled
- reason: bpduguard - reason: bpduguard
interval: 30 interval: 30
status: Enabled
``` ```
""" """
@ -193,44 +204,35 @@ class VerifyErrdisableRecovery(AntaTest):
class Input(AntaTest.Input): class Input(AntaTest.Input):
"""Input model for the VerifyErrdisableRecovery test.""" """Input model for the VerifyErrdisableRecovery test."""
reasons: list[ErrDisableReason] reasons: list[ErrdisableRecovery]
"""List of errdisable reasons.""" """List of errdisable reasons."""
ErrDisableReason: ClassVar[type[ErrdisableRecovery]] = ErrDisableReason
class ErrDisableReason(BaseModel):
"""Model for an errdisable reason."""
reason: ErrDisableReasons
"""Type or name of the errdisable reason."""
interval: ErrDisableInterval
"""Interval of the reason in seconds."""
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyErrdisableRecovery.""" """Main test function for VerifyErrdisableRecovery."""
command_output = self.instance_commands[0].text_output
self.result.is_success() self.result.is_success()
# Skip header and last empty line
command_output = self.instance_commands[0].text_output.split("\n")[2:-1]
# Collecting the actual errdisable reasons for faster lookup
errdisable_reasons = [
{"reason": reason, "status": status, "interval": interval}
for line in command_output
if line.strip() # Skip empty lines
for reason, status, interval in [line.split(None, 2)] # Unpack split result
]
for error_reason in self.inputs.reasons: for error_reason in self.inputs.reasons:
input_reason = error_reason.reason if not (reason_output := get_item(errdisable_reasons, "reason", error_reason.reason)):
input_interval = error_reason.interval self.result.is_failure(f"{error_reason} - Not found")
reason_found = False continue
# Skip header and last empty line if not all(
lines = command_output.split("\n")[2:-1] [
for line in lines: error_reason.status == (act_status := reason_output["status"]),
# Skip empty lines error_reason.interval == (act_interval := int(reason_output["interval"])),
if not line.strip(): ]
continue ):
# Split by first two whitespaces self.result.is_failure(f"{error_reason} - Incorrect configuration - Status: {act_status} Interval: {act_interval}")
reason, status, interval = line.split(None, 2)
if reason != input_reason:
continue
reason_found = True
actual_reason_data = {"interval": interval, "status": status}
expected_reason_data = {"interval": str(input_interval), "status": "Enabled"}
if actual_reason_data != expected_reason_data:
failed_log = get_failed_logs(expected_reason_data, actual_reason_data)
self.result.is_failure(f"`{input_reason}`:{failed_log}\n")
break
if not reason_found:
self.result.is_failure(f"`{input_reason}`: Not found.\n")

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module related to the EOS various SNMP tests.""" """Module related to the EOS various SNMP tests."""
@ -9,7 +9,10 @@ from __future__ import annotations
from typing import TYPE_CHECKING, ClassVar, get_args from typing import TYPE_CHECKING, ClassVar, get_args
from pydantic import field_validator
from anta.custom_types import PositiveInteger, SnmpErrorCounter, SnmpPdu from anta.custom_types import PositiveInteger, SnmpErrorCounter, SnmpPdu
from anta.input_models.snmp import SnmpGroup, SnmpHost, SnmpSourceInterface, SnmpUser
from anta.models import AntaCommand, AntaTest from anta.models import AntaCommand, AntaTest
from anta.tools import get_value from anta.tools import get_value
@ -18,7 +21,7 @@ if TYPE_CHECKING:
class VerifySnmpStatus(AntaTest): class VerifySnmpStatus(AntaTest):
"""Verifies whether the SNMP agent is enabled in a specified VRF. """Verifies if the SNMP agent is enabled.
Expected Results Expected Results
---------------- ----------------
@ -34,7 +37,6 @@ class VerifySnmpStatus(AntaTest):
``` ```
""" """
description = "Verifies if the SNMP agent is enabled."
categories: ClassVar[list[str]] = ["snmp"] categories: ClassVar[list[str]] = ["snmp"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show snmp", revision=1)] commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show snmp", revision=1)]
@ -47,15 +49,14 @@ class VerifySnmpStatus(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifySnmpStatus.""" """Main test function for VerifySnmpStatus."""
self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
if command_output["enabled"] and self.inputs.vrf in command_output["vrfs"]["snmpVrfs"]: if not (command_output["enabled"] and self.inputs.vrf in command_output["vrfs"]["snmpVrfs"]):
self.result.is_success() self.result.is_failure(f"VRF: {self.inputs.vrf} - SNMP agent disabled")
else:
self.result.is_failure(f"SNMP agent disabled in vrf {self.inputs.vrf}")
class VerifySnmpIPv4Acl(AntaTest): class VerifySnmpIPv4Acl(AntaTest):
"""Verifies if the SNMP agent has the right number IPv4 ACL(s) configured for a specified VRF. """Verifies if the SNMP agent has IPv4 ACL(s) configured.
Expected Results Expected Results
---------------- ----------------
@ -72,7 +73,6 @@ class VerifySnmpIPv4Acl(AntaTest):
``` ```
""" """
description = "Verifies if the SNMP agent has IPv4 ACL(s) configured."
categories: ClassVar[list[str]] = ["snmp"] categories: ClassVar[list[str]] = ["snmp"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show snmp ipv4 access-list summary", revision=1)] commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show snmp ipv4 access-list summary", revision=1)]
@ -87,23 +87,22 @@ class VerifySnmpIPv4Acl(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifySnmpIPv4Acl.""" """Main test function for VerifySnmpIPv4Acl."""
self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
ipv4_acl_list = command_output["ipAclList"]["aclList"] ipv4_acl_list = command_output["ipAclList"]["aclList"]
ipv4_acl_number = len(ipv4_acl_list) ipv4_acl_number = len(ipv4_acl_list)
if ipv4_acl_number != self.inputs.number: if ipv4_acl_number != self.inputs.number:
self.result.is_failure(f"Expected {self.inputs.number} SNMP IPv4 ACL(s) in vrf {self.inputs.vrf} but got {ipv4_acl_number}") self.result.is_failure(f"VRF: {self.inputs.vrf} - Incorrect SNMP IPv4 ACL(s) - Expected: {self.inputs.number} Actual: {ipv4_acl_number}")
return return
not_configured_acl = [acl["name"] for acl in ipv4_acl_list if self.inputs.vrf not in acl["configuredVrfs"] or self.inputs.vrf not in acl["activeVrfs"]] not_configured_acl = [acl["name"] for acl in ipv4_acl_list if self.inputs.vrf not in acl["configuredVrfs"] or self.inputs.vrf not in acl["activeVrfs"]]
if not_configured_acl: if not_configured_acl:
self.result.is_failure(f"SNMP IPv4 ACL(s) not configured or active in vrf {self.inputs.vrf}: {not_configured_acl}") self.result.is_failure(f"VRF: {self.inputs.vrf} - Following SNMP IPv4 ACL(s) not configured or active: {', '.join(not_configured_acl)}")
else:
self.result.is_success()
class VerifySnmpIPv6Acl(AntaTest): class VerifySnmpIPv6Acl(AntaTest):
"""Verifies if the SNMP agent has the right number IPv6 ACL(s) configured for a specified VRF. """Verifies if the SNMP agent has IPv6 ACL(s) configured.
Expected Results Expected Results
---------------- ----------------
@ -120,7 +119,6 @@ class VerifySnmpIPv6Acl(AntaTest):
``` ```
""" """
description = "Verifies if the SNMP agent has IPv6 ACL(s) configured."
categories: ClassVar[list[str]] = ["snmp"] categories: ClassVar[list[str]] = ["snmp"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show snmp ipv6 access-list summary", revision=1)] commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show snmp ipv6 access-list summary", revision=1)]
@ -136,18 +134,17 @@ class VerifySnmpIPv6Acl(AntaTest):
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifySnmpIPv6Acl.""" """Main test function for VerifySnmpIPv6Acl."""
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
self.result.is_success()
ipv6_acl_list = command_output["ipv6AclList"]["aclList"] ipv6_acl_list = command_output["ipv6AclList"]["aclList"]
ipv6_acl_number = len(ipv6_acl_list) ipv6_acl_number = len(ipv6_acl_list)
if ipv6_acl_number != self.inputs.number: if ipv6_acl_number != self.inputs.number:
self.result.is_failure(f"Expected {self.inputs.number} SNMP IPv6 ACL(s) in vrf {self.inputs.vrf} but got {ipv6_acl_number}") self.result.is_failure(f"VRF: {self.inputs.vrf} - Incorrect SNMP IPv6 ACL(s) - Expected: {self.inputs.number} Actual: {ipv6_acl_number}")
return return
acl_not_configured = [acl["name"] for acl in ipv6_acl_list if self.inputs.vrf not in acl["configuredVrfs"] or self.inputs.vrf not in acl["activeVrfs"]] acl_not_configured = [acl["name"] for acl in ipv6_acl_list if self.inputs.vrf not in acl["configuredVrfs"] or self.inputs.vrf not in acl["activeVrfs"]]
if acl_not_configured: if acl_not_configured:
self.result.is_failure(f"SNMP IPv6 ACL(s) not configured or active in vrf {self.inputs.vrf}: {acl_not_configured}") self.result.is_failure(f"VRF: {self.inputs.vrf} - Following SNMP IPv6 ACL(s) not configured or active: {', '.join(acl_not_configured)}")
else:
self.result.is_success()
class VerifySnmpLocation(AntaTest): class VerifySnmpLocation(AntaTest):
@ -179,16 +176,15 @@ class VerifySnmpLocation(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifySnmpLocation.""" """Main test function for VerifySnmpLocation."""
self.result.is_success()
# Verifies the SNMP location is configured. # Verifies the SNMP location is configured.
if not (location := get_value(self.instance_commands[0].json_output, "location.location")): if not (location := get_value(self.instance_commands[0].json_output, "location.location")):
self.result.is_failure("SNMP location is not configured.") self.result.is_failure("SNMP location is not configured")
return return
# Verifies the expected SNMP location. # Verifies the expected SNMP location.
if location != self.inputs.location: if location != self.inputs.location:
self.result.is_failure(f"Expected `{self.inputs.location}` as the location, but found `{location}` instead.") self.result.is_failure(f"Incorrect SNMP location - Expected: {self.inputs.location} Actual: {location}")
else:
self.result.is_success()
class VerifySnmpContact(AntaTest): class VerifySnmpContact(AntaTest):
@ -220,16 +216,15 @@ class VerifySnmpContact(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifySnmpContact.""" """Main test function for VerifySnmpContact."""
self.result.is_success()
# Verifies the SNMP contact is configured. # Verifies the SNMP contact is configured.
if not (contact := get_value(self.instance_commands[0].json_output, "contact.contact")): if not (contact := get_value(self.instance_commands[0].json_output, "contact.contact")):
self.result.is_failure("SNMP contact is not configured.") self.result.is_failure("SNMP contact is not configured")
return return
# Verifies the expected SNMP contact. # Verifies the expected SNMP contact.
if contact != self.inputs.contact: if contact != self.inputs.contact:
self.result.is_failure(f"Expected `{self.inputs.contact}` as the contact, but found `{contact}` instead.") self.result.is_failure(f"Incorrect SNMP contact - Expected: {self.inputs.contact} Actual: {contact}")
else:
self.result.is_success()
class VerifySnmpPDUCounters(AntaTest): class VerifySnmpPDUCounters(AntaTest):
@ -266,25 +261,24 @@ class VerifySnmpPDUCounters(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifySnmpPDUCounters.""" """Main test function for VerifySnmpPDUCounters."""
self.result.is_success()
snmp_pdus = self.inputs.pdus snmp_pdus = self.inputs.pdus
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
# Verify SNMP PDU counters. # Verify SNMP PDU counters.
if not (pdu_counters := get_value(command_output, "counters")): if not (pdu_counters := get_value(command_output, "counters")):
self.result.is_failure("SNMP counters not found.") self.result.is_failure("SNMP counters not found")
return return
# In case SNMP PDUs not provided, It will check all the update error counters. # In case SNMP PDUs not provided, It will check all the update error counters.
if not snmp_pdus: if not snmp_pdus:
snmp_pdus = list(get_args(SnmpPdu)) snmp_pdus = list(get_args(SnmpPdu))
failures = {pdu: value for pdu in snmp_pdus if (value := pdu_counters.get(pdu, "Not Found")) == "Not Found" or value == 0} failures = {pdu for pdu in snmp_pdus if (value := pdu_counters.get(pdu, "Not Found")) == "Not Found" or value == 0}
# Check if any failures # Check if any failures
if not failures: if failures:
self.result.is_success() self.result.is_failure(f"The following SNMP PDU counters are not found or have zero PDU counters: {', '.join(sorted(failures))}")
else:
self.result.is_failure(f"The following SNMP PDU counters are not found or have zero PDU counters:\n{failures}")
class VerifySnmpErrorCounters(AntaTest): class VerifySnmpErrorCounters(AntaTest):
@ -320,6 +314,7 @@ class VerifySnmpErrorCounters(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifySnmpErrorCounters.""" """Main test function for VerifySnmpErrorCounters."""
self.result.is_success()
error_counters = self.inputs.error_counters error_counters = self.inputs.error_counters
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
@ -332,10 +327,400 @@ class VerifySnmpErrorCounters(AntaTest):
if not error_counters: if not error_counters:
error_counters = list(get_args(SnmpErrorCounter)) error_counters = list(get_args(SnmpErrorCounter))
error_counters_not_ok = {counter: value for counter in error_counters if (value := snmp_counters.get(counter))} error_counters_not_ok = {counter for counter in error_counters if snmp_counters.get(counter)}
# Check if any failures # Check if any failures
if not error_counters_not_ok: if error_counters_not_ok:
self.result.is_success() self.result.is_failure(f"The following SNMP error counters are not found or have non-zero error counters: {', '.join(sorted(error_counters_not_ok))}")
else:
self.result.is_failure(f"The following SNMP error counters are not found or have non-zero error counters:\n{error_counters_not_ok}")
class VerifySnmpHostLogging(AntaTest):
"""Verifies SNMP logging configurations.
This test performs the following checks:
1. SNMP logging is enabled globally.
2. For each specified SNMP host:
- Host exists in configuration.
- Host's VRF assignment matches expected value.
Expected Results
----------------
* Success: The test will pass if all of the following conditions are met:
- SNMP logging is enabled on the device.
- All specified hosts are configured with correct VRF assignments.
* Failure: The test will fail if any of the following conditions is met:
- SNMP logging is disabled on the device.
- SNMP host not found in configuration.
- Host's VRF assignment doesn't match expected value.
Examples
--------
```yaml
anta.tests.snmp:
- VerifySnmpHostLogging:
hosts:
- hostname: 192.168.1.100
vrf: default
- hostname: 192.168.1.103
vrf: MGMT
```
"""
categories: ClassVar[list[str]] = ["snmp"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show snmp", revision=1)]
class Input(AntaTest.Input):
"""Input model for the VerifySnmpHostLogging test."""
hosts: list[SnmpHost]
"""List of SNMP hosts."""
@AntaTest.anta_test
def test(self) -> None:
"""Main test function for VerifySnmpHostLogging."""
self.result.is_success()
command_output = self.instance_commands[0].json_output.get("logging", {})
# If SNMP logging is disabled, test fails.
if not command_output.get("loggingEnabled"):
self.result.is_failure("SNMP logging is disabled")
return
host_details = command_output.get("hosts", {})
for host in self.inputs.hosts:
hostname = str(host.hostname)
vrf = host.vrf
actual_snmp_host = host_details.get(hostname, {})
# If SNMP host is not configured on the device, test fails.
if not actual_snmp_host:
self.result.is_failure(f"{host} - Not configured")
continue
# If VRF is not matches the expected value, test fails.
actual_vrf = "default" if (vrf_name := actual_snmp_host.get("vrf")) == "" else vrf_name
if actual_vrf != vrf:
self.result.is_failure(f"{host} - Incorrect VRF - Actual: {actual_vrf}")
class VerifySnmpUser(AntaTest):
"""Verifies the SNMP user configurations.
This test performs the following checks for each specified user:
1. User exists in SNMP configuration.
2. Group assignment is correct.
3. For SNMPv3 users only:
- Authentication type matches (if specified)
- Privacy type matches (if specified)
Expected Results
----------------
* Success: If all of the following conditions are met:
- All users exist with correct group assignments.
- SNMPv3 authentication and privacy types match specified values.
* Failure: If any of the following occur:
- User not found in SNMP configuration.
- Incorrect group assignment.
- For SNMPv3: Mismatched authentication or privacy types.
Examples
--------
```yaml
anta.tests.snmp:
- VerifySnmpUser:
snmp_users:
- username: test
group_name: test_group
version: v3
auth_type: MD5
priv_type: AES-128
```
"""
categories: ClassVar[list[str]] = ["snmp"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show snmp user", revision=1)]
class Input(AntaTest.Input):
"""Input model for the VerifySnmpUser test."""
snmp_users: list[SnmpUser]
"""List of SNMP users."""
@field_validator("snmp_users")
@classmethod
def validate_snmp_users(cls, snmp_users: list[SnmpUser]) -> list[SnmpUser]:
"""Validate that 'auth_type' or 'priv_type' field is provided in each SNMPv3 user."""
for user in snmp_users:
if user.version == "v3" and not (user.auth_type or user.priv_type):
msg = f"{user} 'auth_type' or 'priv_type' field is required with 'version: v3'"
raise ValueError(msg)
return snmp_users
@AntaTest.anta_test
def test(self) -> None:
"""Main test function for VerifySnmpUser."""
self.result.is_success()
for user in self.inputs.snmp_users:
# Verify SNMP user details.
if not (user_details := get_value(self.instance_commands[0].json_output, f"usersByVersion.{user.version}.users.{user.username}")):
self.result.is_failure(f"{user} - Not found")
continue
if user.group_name != (act_group := user_details.get("groupName", "Not Found")):
self.result.is_failure(f"{user} - Incorrect user group - Actual: {act_group}")
if user.version == "v3":
if user.auth_type and (act_auth_type := get_value(user_details, "v3Params.authType", "Not Found")) != user.auth_type:
self.result.is_failure(f"{user} - Incorrect authentication type - Expected: {user.auth_type} Actual: {act_auth_type}")
if user.priv_type and (act_encryption := get_value(user_details, "v3Params.privType", "Not Found")) != user.priv_type:
self.result.is_failure(f"{user} - Incorrect privacy type - Expected: {user.priv_type} Actual: {act_encryption}")
class VerifySnmpNotificationHost(AntaTest):
"""Verifies the SNMP notification host(s) (SNMP manager) configurations.
This test performs the following checks for each specified host:
1. Verifies that the SNMP host(s) is configured on the device.
2. Verifies that the notification type ("trap" or "inform") matches the expected value.
3. Ensures that UDP port provided matches the expected value.
4. Ensures the following depending on SNMP version:
- For SNMP version v1/v2c, a valid community string is set and matches the expected value.
- For SNMP version v3, a valid user field is set and matches the expected value.
Expected Results
----------------
* Success: The test will pass if all of the following conditions are met:
- The SNMP host(s) is configured on the device.
- The notification type ("trap" or "inform") and UDP port match the expected value.
- Ensures the following depending on SNMP version:
- For SNMP version v1/v2c, a community string is set and it matches the expected value.
- For SNMP version v3, a valid user field is set and matches the expected value.
* Failure: The test will fail if any of the following conditions is met:
- The SNMP host(s) is not configured on the device.
- The notification type ("trap" or "inform") or UDP port do not matches the expected value.
- Ensures the following depending on SNMP version:
- For SNMP version v1/v2c, a community string is not matches the expected value.
- For SNMP version v3, an user field is not matches the expected value.
Examples
--------
```yaml
anta.tests.snmp:
- VerifySnmpNotificationHost:
notification_hosts:
- hostname: spine
vrf: default
notification_type: trap
version: v1
udp_port: 162
community_string: public
- hostname: 192.168.1.100
vrf: default
notification_type: trap
version: v3
udp_port: 162
user: public
```
"""
categories: ClassVar[list[str]] = ["snmp"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show snmp notification host", revision=1)]
class Input(AntaTest.Input):
"""Input model for the VerifySnmpNotificationHost test."""
notification_hosts: list[SnmpHost]
"""List of SNMP host(s)."""
@field_validator("notification_hosts")
@classmethod
def validate_notification_hosts(cls, notification_hosts: list[SnmpHost]) -> list[SnmpHost]:
"""Validate that all required fields are provided in each SNMP Notification Host."""
for host in notification_hosts:
if host.version is None:
msg = f"{host}; 'version' field missing in the input"
raise ValueError(msg)
if host.version in ["v1", "v2c"] and host.community_string is None:
msg = f"{host} Version: {host.version}; 'community_string' field missing in the input"
raise ValueError(msg)
if host.version == "v3" and host.user is None:
msg = f"{host} Version: {host.version}; 'user' field missing in the input"
raise ValueError(msg)
return notification_hosts
@AntaTest.anta_test
def test(self) -> None:
"""Main test function for VerifySnmpNotificationHost."""
self.result.is_success()
# If SNMP is not configured, test fails.
if not (snmp_hosts := get_value(self.instance_commands[0].json_output, "hosts")):
self.result.is_failure("No SNMP host is configured")
return
for host in self.inputs.notification_hosts:
vrf = "" if host.vrf == "default" else host.vrf
hostname = str(host.hostname)
notification_type = host.notification_type
version = host.version
udp_port = host.udp_port
community_string = host.community_string
user = host.user
default_value = "Not Found"
host_details = next(
(host for host in snmp_hosts if (host.get("hostname") == hostname and host.get("protocolVersion") == version and host.get("vrf") == vrf)), None
)
# If expected SNMP host is not configured with the specified protocol version, test fails.
if not host_details:
self.result.is_failure(f"{host} Version: {version} - Not configured")
continue
# If actual notification type does not match the expected value, test fails.
if notification_type != (actual_notification_type := get_value(host_details, "notificationType", default_value)):
self.result.is_failure(f"{host} - Incorrect notification type - Expected: {notification_type} Actual: {actual_notification_type}")
# If actual UDP port does not match the expected value, test fails.
if udp_port != (actual_udp_port := get_value(host_details, "port", default_value)):
self.result.is_failure(f"{host} - Incorrect UDP port - Expected: {udp_port} Actual: {actual_udp_port}")
user_found = user != (actual_user := get_value(host_details, "v3Params.user", default_value))
version_user_check = (version == "v3", user_found)
# If SNMP protocol version is v1 or v2c and actual community string does not match the expected value, test fails.
if version in ["v1", "v2c"] and community_string != (actual_community_string := get_value(host_details, "v1v2cParams.communityString", default_value)):
self.result.is_failure(f"{host} Version: {version} - Incorrect community string - Expected: {community_string} Actual: {actual_community_string}")
# If SNMP protocol version is v3 and actual user does not match the expected value, test fails.
elif all(version_user_check):
self.result.is_failure(f"{host} Version: {version} - Incorrect user - Expected: {user} Actual: {actual_user}")
class VerifySnmpSourceInterface(AntaTest):
"""Verifies SNMP source interfaces.
This test performs the following checks:
1. Verifies that source interface(s) are configured for SNMP.
2. For each specified source interface:
- Interface is configured in the specified VRF.
Expected Results
----------------
* Success: The test will pass if the provided SNMP source interface(s) are configured in their specified VRF.
* Failure: The test will fail if any of the provided SNMP source interface(s) are NOT configured in their specified VRF.
Examples
--------
```yaml
anta.tests.snmp:
- VerifySnmpSourceInterface:
interfaces:
- interface: Ethernet1
vrf: default
- interface: Management0
vrf: MGMT
```
"""
categories: ClassVar[list[str]] = ["snmp"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show snmp", revision=1)]
class Input(AntaTest.Input):
"""Input model for the VerifySnmpSourceInterface test."""
interfaces: list[SnmpSourceInterface]
"""List of source interfaces."""
@AntaTest.anta_test
def test(self) -> None:
"""Main test function for VerifySnmpSourceInterface."""
self.result.is_success()
command_output = self.instance_commands[0].json_output.get("srcIntf", {})
if not (interface_output := command_output.get("sourceInterfaces")):
self.result.is_failure("SNMP source interface(s) not configured")
return
for interface_details in self.inputs.interfaces:
# If the source interface is not configured, or if it does not match the expected value, the test fails.
if not (actual_interface := interface_output.get(interface_details.vrf)):
self.result.is_failure(f"{interface_details} - Not configured")
elif actual_interface != interface_details.interface:
self.result.is_failure(f"{interface_details} - Incorrect source interface - Actual: {actual_interface}")
class VerifySnmpGroup(AntaTest):
"""Verifies the SNMP group configurations for specified version(s).
This test performs the following checks:
1. Verifies that the SNMP group is configured for the specified version.
2. For SNMP version 3, verify that the security model matches the expected value.
3. Ensures that SNMP group configurations, including read, write, and notify views, align with version-specific requirements.
Expected Results
----------------
* Success: The test will pass if the provided SNMP group and all specified parameters are correctly configured.
* Failure: The test will fail if the provided SNMP group is not configured or if any specified parameter is not correctly configured.
Examples
--------
```yaml
anta.tests.snmp:
- VerifySnmpGroup:
snmp_groups:
- group_name: Group1
version: v1
read_view: group_read_1
write_view: group_write_1
notify_view: group_notify_1
- group_name: Group2
version: v3
read_view: group_read_2
write_view: group_write_2
notify_view: group_notify_2
authentication: priv
```
"""
categories: ClassVar[list[str]] = ["snmp"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show snmp group", revision=1)]
class Input(AntaTest.Input):
"""Input model for the VerifySnmpGroup test."""
snmp_groups: list[SnmpGroup]
"""List of SNMP groups."""
@AntaTest.anta_test
def test(self) -> None:
"""Main test function for VerifySnmpGroup."""
self.result.is_success()
for group in self.inputs.snmp_groups:
# Verify SNMP group details.
if not (group_details := get_value(self.instance_commands[0].json_output, f"groups.{group.group_name}.versions.{group.version}")):
self.result.is_failure(f"{group} - Not configured")
continue
view_types = [view_type for view_type in ["read", "write", "notify"] if getattr(group, f"{view_type}_view")]
# Verify SNMP views, the read, write and notify settings aligning with version-specific requirements.
for view_type in view_types:
expected_view = getattr(group, f"{view_type}_view")
# Verify actual view is configured.
if group_details.get(f"{view_type}View") == "":
self.result.is_failure(f"{group} View: {view_type} - Not configured")
elif (act_view := group_details.get(f"{view_type}View")) != expected_view:
self.result.is_failure(f"{group} - Incorrect {view_type.title()} view - Expected: {expected_view} Actual: {act_view}")
elif not group_details.get(f"{view_type}ViewConfig"):
self.result.is_failure(f"{group} {view_type.title()} View: {expected_view} - Not configured")
# For version v3, verify that the security model aligns with the expected value.
if group.version == "v3" and (actual_auth := group_details.get("secModel")) != group.authentication:
self.result.is_failure(f"{group} - Incorrect security model - Expected: {group.authentication} Actual: {actual_auth}")

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module related to the EOS software tests.""" """Module related to the EOS software tests."""
@ -16,7 +16,7 @@ if TYPE_CHECKING:
class VerifyEOSVersion(AntaTest): class VerifyEOSVersion(AntaTest):
"""Verifies that the device is running one of the allowed EOS version. """Verifies the EOS version of the device.
Expected Results Expected Results
---------------- ----------------
@ -34,7 +34,6 @@ class VerifyEOSVersion(AntaTest):
``` ```
""" """
description = "Verifies the EOS version of the device."
categories: ClassVar[list[str]] = ["software"] categories: ClassVar[list[str]] = ["software"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show version", revision=1)] commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show version", revision=1)]
@ -48,14 +47,13 @@ class VerifyEOSVersion(AntaTest):
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyEOSVersion.""" """Main test function for VerifyEOSVersion."""
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
if command_output["version"] in self.inputs.versions: self.result.is_success()
self.result.is_success() if command_output["version"] not in self.inputs.versions:
else: self.result.is_failure(f"EOS version mismatch - Actual: {command_output['version']} not in Expected: {', '.join(self.inputs.versions)}")
self.result.is_failure(f'device is running version "{command_output["version"]}" not in expected versions: {self.inputs.versions}')
class VerifyTerminAttrVersion(AntaTest): class VerifyTerminAttrVersion(AntaTest):
"""Verifies that he device is running one of the allowed TerminAttr version. """Verifies the TerminAttr version of the device.
Expected Results Expected Results
---------------- ----------------
@ -73,7 +71,6 @@ class VerifyTerminAttrVersion(AntaTest):
``` ```
""" """
description = "Verifies the TerminAttr version of the device."
categories: ClassVar[list[str]] = ["software"] categories: ClassVar[list[str]] = ["software"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show version detail", revision=1)] commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show version detail", revision=1)]
@ -87,11 +84,10 @@ class VerifyTerminAttrVersion(AntaTest):
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyTerminAttrVersion.""" """Main test function for VerifyTerminAttrVersion."""
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
self.result.is_success()
command_output_data = command_output["details"]["packages"]["TerminAttr-core"]["version"] command_output_data = command_output["details"]["packages"]["TerminAttr-core"]["version"]
if command_output_data in self.inputs.versions: if command_output_data not in self.inputs.versions:
self.result.is_success() self.result.is_failure(f"TerminAttr version mismatch - Actual: {command_output_data} not in Expected: {', '.join(self.inputs.versions)}")
else:
self.result.is_failure(f"device is running TerminAttr version {command_output_data} and is not in the allowed list: {self.inputs.versions}")
class VerifyEOSExtensions(AntaTest): class VerifyEOSExtensions(AntaTest):
@ -120,6 +116,7 @@ class VerifyEOSExtensions(AntaTest):
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyEOSExtensions.""" """Main test function for VerifyEOSExtensions."""
boot_extensions = [] boot_extensions = []
self.result.is_success()
show_extensions_command_output = self.instance_commands[0].json_output show_extensions_command_output = self.instance_commands[0].json_output
show_boot_extensions_command_output = self.instance_commands[1].json_output show_boot_extensions_command_output = self.instance_commands[1].json_output
installed_extensions = [ installed_extensions = [
@ -131,7 +128,7 @@ class VerifyEOSExtensions(AntaTest):
boot_extensions.append(formatted_extension) boot_extensions.append(formatted_extension)
installed_extensions.sort() installed_extensions.sort()
boot_extensions.sort() boot_extensions.sort()
if installed_extensions == boot_extensions: if installed_extensions != boot_extensions:
self.result.is_success() str_installed_extensions = ", ".join(installed_extensions) if installed_extensions else "Not found"
else: str_boot_extensions = ", ".join(boot_extensions) if boot_extensions else "Not found"
self.result.is_failure(f"Missing EOS extensions: installed {installed_extensions} / configured: {boot_extensions}") self.result.is_failure(f"EOS extensions mismatch - Installed: {str_installed_extensions} Configured: {str_boot_extensions}")

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module related to various Spanning Tree Protocol (STP) tests.""" """Module related to various Spanning Tree Protocol (STP) tests."""
@ -7,7 +7,7 @@
# mypy: disable-error-code=attr-defined # mypy: disable-error-code=attr-defined
from __future__ import annotations from __future__ import annotations
from typing import Any, ClassVar, Literal from typing import ClassVar, Literal
from pydantic import Field from pydantic import Field
@ -54,8 +54,7 @@ class VerifySTPMode(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifySTPMode.""" """Main test function for VerifySTPMode."""
not_configured = [] self.result.is_success()
wrong_stp_mode = []
for command in self.instance_commands: for command in self.instance_commands:
vlan_id = command.params.vlan vlan_id = command.params.vlan
if not ( if not (
@ -64,15 +63,9 @@ class VerifySTPMode(AntaTest):
f"spanningTreeVlanInstances.{vlan_id}.spanningTreeVlanInstance.protocol", f"spanningTreeVlanInstances.{vlan_id}.spanningTreeVlanInstance.protocol",
) )
): ):
not_configured.append(vlan_id) self.result.is_failure(f"VLAN {vlan_id} STP mode: {self.inputs.mode} - Not configured")
elif stp_mode != self.inputs.mode: elif stp_mode != self.inputs.mode:
wrong_stp_mode.append(vlan_id) self.result.is_failure(f"VLAN {vlan_id} - Incorrect STP mode - Expected: {self.inputs.mode} Actual: {stp_mode}")
if not_configured:
self.result.is_failure(f"STP mode '{self.inputs.mode}' not configured for the following VLAN(s): {not_configured}")
if wrong_stp_mode:
self.result.is_failure(f"Wrong STP mode configured for the following VLAN(s): {wrong_stp_mode}")
if not not_configured and not wrong_stp_mode:
self.result.is_success()
class VerifySTPBlockedPorts(AntaTest): class VerifySTPBlockedPorts(AntaTest):
@ -102,8 +95,8 @@ class VerifySTPBlockedPorts(AntaTest):
self.result.is_success() self.result.is_success()
else: else:
for key, value in stp_instances.items(): for key, value in stp_instances.items():
stp_instances[key] = value.pop("spanningTreeBlockedPorts") stp_block_ports = value.get("spanningTreeBlockedPorts")
self.result.is_failure(f"The following ports are blocked by STP: {stp_instances}") self.result.is_failure(f"STP Instance: {key} - Blocked ports - {', '.join(stp_block_ports)}")
class VerifySTPCounters(AntaTest): class VerifySTPCounters(AntaTest):
@ -128,14 +121,14 @@ class VerifySTPCounters(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifySTPCounters.""" """Main test function for VerifySTPCounters."""
self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
interfaces_with_errors = [
interface for interface, counters in command_output["interfaces"].items() if counters["bpduTaggedError"] or counters["bpduOtherError"] != 0 for interface, counters in command_output["interfaces"].items():
] if counters["bpduTaggedError"] != 0:
if interfaces_with_errors: self.result.is_failure(f"Interface {interface} - STP BPDU packet tagged errors count mismatch - Expected: 0 Actual: {counters['bpduTaggedError']}")
self.result.is_failure(f"The following interfaces have STP BPDU packet errors: {interfaces_with_errors}") if counters["bpduOtherError"] != 0:
else: self.result.is_failure(f"Interface {interface} - STP BPDU packet other errors count mismatch - Expected: 0 Actual: {counters['bpduOtherError']}")
self.result.is_success()
class VerifySTPForwardingPorts(AntaTest): class VerifySTPForwardingPorts(AntaTest):
@ -174,25 +167,22 @@ class VerifySTPForwardingPorts(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifySTPForwardingPorts.""" """Main test function for VerifySTPForwardingPorts."""
not_configured = [] self.result.is_success()
not_forwarding = [] interfaces_state = []
for command in self.instance_commands: for command in self.instance_commands:
vlan_id = command.params.vlan vlan_id = command.params.vlan
if not (topologies := get_value(command.json_output, "topologies")): if not (topologies := get_value(command.json_output, "topologies")):
not_configured.append(vlan_id) self.result.is_failure(f"VLAN {vlan_id} - STP instance is not configured")
else: continue
interfaces_not_forwarding = [] for value in topologies.values():
for value in topologies.values(): if vlan_id and int(vlan_id) in value["vlans"]:
if vlan_id and int(vlan_id) in value["vlans"]: interfaces_state = [
interfaces_not_forwarding = [interface for interface, state in value["interfaces"].items() if state["state"] != "forwarding"] (interface, actual_state) for interface, state in value["interfaces"].items() if (actual_state := state["state"]) != "forwarding"
if interfaces_not_forwarding: ]
not_forwarding.append({f"VLAN {vlan_id}": interfaces_not_forwarding})
if not_configured: if interfaces_state:
self.result.is_failure(f"STP instance is not configured for the following VLAN(s): {not_configured}") for interface, state in interfaces_state:
if not_forwarding: self.result.is_failure(f"VLAN {vlan_id} Interface: {interface} - Invalid state - Expected: forwarding Actual: {state}")
self.result.is_failure(f"The following VLAN(s) have interface(s) that are not in a forwarding state: {not_forwarding}")
if not not_configured and not interfaces_not_forwarding:
self.result.is_success()
class VerifySTPRootPriority(AntaTest): class VerifySTPRootPriority(AntaTest):
@ -229,6 +219,7 @@ class VerifySTPRootPriority(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifySTPRootPriority.""" """Main test function for VerifySTPRootPriority."""
self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
if not (stp_instances := command_output["instances"]): if not (stp_instances := command_output["instances"]):
self.result.is_failure("No STP instances configured") self.result.is_failure("No STP instances configured")
@ -240,16 +231,15 @@ class VerifySTPRootPriority(AntaTest):
elif first_name.startswith("VL"): elif first_name.startswith("VL"):
prefix = "VL" prefix = "VL"
else: else:
self.result.is_failure(f"Unsupported STP instance type: {first_name}") self.result.is_failure(f"STP Instance: {first_name} - Unsupported STP instance type")
return return
check_instances = [f"{prefix}{instance_id}" for instance_id in self.inputs.instances] if self.inputs.instances else command_output["instances"].keys() check_instances = [f"{prefix}{instance_id}" for instance_id in self.inputs.instances] if self.inputs.instances else command_output["instances"].keys()
wrong_priority_instances = [ for instance in check_instances:
instance for instance in check_instances if get_value(command_output, f"instances.{instance}.rootBridge.priority") != self.inputs.priority if not (instance_details := get_value(command_output, f"instances.{instance}")):
] self.result.is_failure(f"Instance: {instance} - Not configured")
if wrong_priority_instances: continue
self.result.is_failure(f"The following instance(s) have the wrong STP root priority configured: {wrong_priority_instances}") if (priority := get_value(instance_details, "rootBridge.priority")) != self.inputs.priority:
else: self.result.is_failure(f"STP Instance: {instance} - Incorrect root priority - Expected: {self.inputs.priority} Actual: {priority}")
self.result.is_success()
class VerifyStpTopologyChanges(AntaTest): class VerifyStpTopologyChanges(AntaTest):
@ -282,8 +272,7 @@ class VerifyStpTopologyChanges(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyStpTopologyChanges.""" """Main test function for VerifyStpTopologyChanges."""
failures: dict[str, Any] = {"topologies": {}} self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
stp_topologies = command_output.get("topologies", {}) stp_topologies = command_output.get("topologies", {})
@ -292,20 +281,78 @@ class VerifyStpTopologyChanges(AntaTest):
# Verify the STP topology(s). # Verify the STP topology(s).
if not stp_topologies: if not stp_topologies:
self.result.is_failure("STP is not configured.") self.result.is_failure("STP is not configured")
return return
# Verifies the number of changes across all interfaces # Verifies the number of changes across all interfaces
for topology, topology_details in stp_topologies.items(): for topology, topology_details in stp_topologies.items():
interfaces = { for interface, details in topology_details.get("interfaces", {}).items():
interface: {"Number of changes": num_of_changes} if (num_of_changes := details.get("numChanges")) > self.inputs.threshold:
for interface, details in topology_details.get("interfaces", {}).items() self.result.is_failure(
if (num_of_changes := details.get("numChanges")) > self.inputs.threshold f"Topology: {topology} Interface: {interface} - Number of changes not within the threshold - Expected: "
} f"{self.inputs.threshold} Actual: {num_of_changes}"
if interfaces: )
failures["topologies"][topology] = interfaces
if failures["topologies"]:
self.result.is_failure(f"The following STP topologies are not configured or number of changes not within the threshold:\n{failures}") class VerifySTPDisabledVlans(AntaTest):
else: """Verifies the STP disabled VLAN(s).
self.result.is_success()
This test performs the following checks:
1. Verifies that the STP is configured.
2. Verifies that the specified VLAN(s) exist on the device.
3. Verifies that the STP is disabled for the specified VLAN(s).
Expected Results
----------------
* Success: The test will pass if all of the following conditions are met:
- STP is properly configured on the device.
- The specified VLAN(s) exist on the device.
- STP is confirmed to be disabled for all the specified VLAN(s).
* Failure: The test will fail if any of the following condition is met:
- STP is not configured on the device.
- The specified VLAN(s) do not exist on the device.
- STP is enabled for any of the specified VLAN(s).
Examples
--------
```yaml
anta.tests.stp:
- VerifySTPDisabledVlans:
vlans:
- 6
- 4094
```
"""
categories: ClassVar[list[str]] = ["stp"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show spanning-tree vlan detail", revision=1)]
class Input(AntaTest.Input):
"""Input model for the VerifySTPDisabledVlans test."""
vlans: list[Vlan]
"""List of STP disabled VLAN(s)."""
@AntaTest.anta_test
def test(self) -> None:
"""Main test function for VerifySTPDisabledVlans."""
self.result.is_success()
command_output = self.instance_commands[0].json_output
stp_vlan_instances = command_output.get("spanningTreeVlanInstances", {})
# If the spanningTreeVlanInstances detail are not found in the command output, the test fails.
if not stp_vlan_instances:
self.result.is_failure("STP is not configured")
return
actual_vlans = list(stp_vlan_instances)
# If the specified VLAN is not present on the device, STP is enabled for the VLAN(s), test fails.
for vlan in self.inputs.vlans:
if str(vlan) not in actual_vlans:
self.result.is_failure(f"VLAN: {vlan} - Not configured")
continue
if stp_vlan_instances.get(str(vlan)):
self.result.is_failure(f"VLAN: {vlan} - STP is enabled")

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Test functions related to various STUN settings.""" """Test functions related to various STUN settings."""
@ -76,7 +76,7 @@ class VerifyStunClientTranslation(AntaTest):
# If no bindings are found for the STUN client, mark the test as a failure and continue with the next client # If no bindings are found for the STUN client, mark the test as a failure and continue with the next client
if not bindings: if not bindings:
self.result.is_failure(f"{client_input} - STUN client translation not found.") self.result.is_failure(f"{client_input} - STUN client translation not found")
continue continue
# Extract the transaction ID from the bindings # Extract the transaction ID from the bindings
@ -145,10 +145,10 @@ class VerifyStunServer(AntaTest):
not_running = command_output.get("pid") == 0 not_running = command_output.get("pid") == 0
if status_disabled and not_running: if status_disabled and not_running:
self.result.is_failure("STUN server status is disabled and not running.") self.result.is_failure("STUN server status is disabled and not running")
elif status_disabled: elif status_disabled:
self.result.is_failure("STUN server status is disabled.") self.result.is_failure("STUN server status is disabled")
elif not_running: elif not_running:
self.result.is_failure("STUN server is not running.") self.result.is_failure("STUN server is not running")
else: else:
self.result.is_success() self.result.is_success()

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module related to system-level features and protocols tests.""" """Module related to system-level features and protocols tests."""
@ -8,23 +8,33 @@
from __future__ import annotations from __future__ import annotations
import re import re
from typing import TYPE_CHECKING, ClassVar from typing import TYPE_CHECKING, Any, ClassVar
from anta.custom_types import PositiveInteger from pydantic import model_validator
from anta.input_models.system import NTPServer
from anta.custom_types import Hostname, PositiveInteger
from anta.input_models.system import NTPPool, NTPServer
from anta.models import AntaCommand, AntaTest from anta.models import AntaCommand, AntaTest
from anta.tools import get_value from anta.tools import get_value
if TYPE_CHECKING: if TYPE_CHECKING:
import sys
from ipaddress import IPv4Address
from anta.models import AntaTemplate from anta.models import AntaTemplate
if sys.version_info >= (3, 11):
from typing import Self
else:
from typing_extensions import Self
CPU_IDLE_THRESHOLD = 25 CPU_IDLE_THRESHOLD = 25
MEMORY_THRESHOLD = 0.25 MEMORY_THRESHOLD = 0.25
DISK_SPACE_THRESHOLD = 75 DISK_SPACE_THRESHOLD = 75
class VerifyUptime(AntaTest): class VerifyUptime(AntaTest):
"""Verifies if the device uptime is higher than the provided minimum uptime value. """Verifies the device uptime.
Expected Results Expected Results
---------------- ----------------
@ -40,7 +50,6 @@ class VerifyUptime(AntaTest):
``` ```
""" """
description = "Verifies the device uptime."
categories: ClassVar[list[str]] = ["system"] categories: ClassVar[list[str]] = ["system"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show uptime", revision=1)] commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show uptime", revision=1)]
@ -53,11 +62,10 @@ class VerifyUptime(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyUptime.""" """Main test function for VerifyUptime."""
self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
if command_output["upTime"] > self.inputs.minimum: if command_output["upTime"] < self.inputs.minimum:
self.result.is_success() self.result.is_failure(f"Device uptime is incorrect - Expected: {self.inputs.minimum}s Actual: {command_output['upTime']}s")
else:
self.result.is_failure(f"Device uptime is {command_output['upTime']} seconds")
class VerifyReloadCause(AntaTest): class VerifyReloadCause(AntaTest):
@ -96,11 +104,11 @@ class VerifyReloadCause(AntaTest):
]: ]:
self.result.is_success() self.result.is_success()
else: else:
self.result.is_failure(f"Reload cause is: '{command_output_data}'") self.result.is_failure(f"Reload cause is: {command_output_data}")
class VerifyCoredump(AntaTest): class VerifyCoredump(AntaTest):
"""Verifies if there are core dump files in the /var/core directory. """Verifies there are no core dump files.
Expected Results Expected Results
---------------- ----------------
@ -119,7 +127,6 @@ class VerifyCoredump(AntaTest):
``` ```
""" """
description = "Verifies there are no core dump files."
categories: ClassVar[list[str]] = ["system"] categories: ClassVar[list[str]] = ["system"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show system coredump", revision=1)] commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show system coredump", revision=1)]
@ -133,7 +140,7 @@ class VerifyCoredump(AntaTest):
if not core_files: if not core_files:
self.result.is_success() self.result.is_success()
else: else:
self.result.is_failure(f"Core dump(s) have been found: {core_files}") self.result.is_failure(f"Core dump(s) have been found: {', '.join(core_files)}")
class VerifyAgentLogs(AntaTest): class VerifyAgentLogs(AntaTest):
@ -189,12 +196,11 @@ class VerifyCPUUtilization(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyCPUUtilization.""" """Main test function for VerifyCPUUtilization."""
self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
command_output_data = command_output["cpuInfo"]["%Cpu(s)"]["idle"] command_output_data = command_output["cpuInfo"]["%Cpu(s)"]["idle"]
if command_output_data > CPU_IDLE_THRESHOLD: if command_output_data < CPU_IDLE_THRESHOLD:
self.result.is_success() self.result.is_failure(f"Device has reported a high CPU utilization - Expected: < 75% Actual: {100 - command_output_data}%")
else:
self.result.is_failure(f"Device has reported a high CPU utilization: {100 - command_output_data}%")
class VerifyMemoryUtilization(AntaTest): class VerifyMemoryUtilization(AntaTest):
@ -219,12 +225,11 @@ class VerifyMemoryUtilization(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyMemoryUtilization.""" """Main test function for VerifyMemoryUtilization."""
self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
memory_usage = command_output["memFree"] / command_output["memTotal"] memory_usage = command_output["memFree"] / command_output["memTotal"]
if memory_usage > MEMORY_THRESHOLD: if memory_usage < MEMORY_THRESHOLD:
self.result.is_success() self.result.is_failure(f"Device has reported a high memory usage - Expected: < 75% Actual: {(1 - memory_usage) * 100:.2f}%")
else:
self.result.is_failure(f"Device has reported a high memory usage: {(1 - memory_usage)*100:.2f}%")
class VerifyFileSystemUtilization(AntaTest): class VerifyFileSystemUtilization(AntaTest):
@ -253,11 +258,11 @@ class VerifyFileSystemUtilization(AntaTest):
self.result.is_success() self.result.is_success()
for line in command_output.split("\n")[1:]: for line in command_output.split("\n")[1:]:
if "loop" not in line and len(line) > 0 and (percentage := int(line.split()[4].replace("%", ""))) > DISK_SPACE_THRESHOLD: if "loop" not in line and len(line) > 0 and (percentage := int(line.split()[4].replace("%", ""))) > DISK_SPACE_THRESHOLD:
self.result.is_failure(f"Mount point {line} is higher than 75%: reported {percentage}%") self.result.is_failure(f"Mount point: {line} - Higher disk space utilization - Expected: {DISK_SPACE_THRESHOLD}% Actual: {percentage}%")
class VerifyNTP(AntaTest): class VerifyNTP(AntaTest):
"""Verifies that the Network Time Protocol (NTP) is synchronized. """Verifies if NTP is synchronised.
Expected Results Expected Results
---------------- ----------------
@ -272,7 +277,6 @@ class VerifyNTP(AntaTest):
``` ```
""" """
description = "Verifies if NTP is synchronised."
categories: ClassVar[list[str]] = ["system"] categories: ClassVar[list[str]] = ["system"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show ntp status", ofmt="text")] commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show ntp status", ofmt="text")]
@ -284,18 +288,27 @@ class VerifyNTP(AntaTest):
self.result.is_success() self.result.is_success()
else: else:
data = command_output.split("\n")[0] data = command_output.split("\n")[0]
self.result.is_failure(f"The device is not synchronized with the configured NTP server(s): '{data}'") self.result.is_failure(f"NTP status mismatch - Expected: synchronised Actual: {data}")
class VerifyNTPAssociations(AntaTest): class VerifyNTPAssociations(AntaTest):
"""Verifies the Network Time Protocol (NTP) associations. """Verifies the Network Time Protocol (NTP) associations.
This test performs the following checks:
1. For the NTP servers:
- The primary NTP server (marked as preferred) has the condition 'sys.peer'.
- All other NTP servers have the condition 'candidate'.
- All the NTP servers have the expected stratum level.
2. For the NTP servers pool:
- All the NTP servers belong to the specified NTP pool.
- All the NTP servers have valid condition (sys.peer | candidate).
- All the NTP servers have the stratum level within the specified startum level.
Expected Results Expected Results
---------------- ----------------
* Success: The test will pass if the Primary NTP server (marked as preferred) has the condition 'sys.peer' and * Success: The test will pass if all the NTP servers meet the expected state.
all other NTP servers have the condition 'candidate'. * Failure: The test will fail if any of the NTP server does not meet the expected state.
* Failure: The test will fail if the Primary NTP server (marked as preferred) does not have the condition 'sys.peer' or
if any other NTP server does not have the condition 'candidate'.
Examples Examples
-------- --------
@ -310,6 +323,10 @@ class VerifyNTPAssociations(AntaTest):
stratum: 2 stratum: 2
- server_address: 3.3.3.3 - server_address: 3.3.3.3
stratum: 2 stratum: 2
- VerifyNTPAssociations:
ntp_pool:
server_addresses: [1.1.1.1, 2.2.2.2]
preferred_stratum_range: [1,3]
``` ```
""" """
@ -319,10 +336,79 @@ class VerifyNTPAssociations(AntaTest):
class Input(AntaTest.Input): class Input(AntaTest.Input):
"""Input model for the VerifyNTPAssociations test.""" """Input model for the VerifyNTPAssociations test."""
ntp_servers: list[NTPServer] ntp_servers: list[NTPServer] | None = None
"""List of NTP servers.""" """List of NTP servers."""
ntp_pool: NTPPool | None = None
"""NTP servers pool."""
NTPServer: ClassVar[type[NTPServer]] = NTPServer NTPServer: ClassVar[type[NTPServer]] = NTPServer
@model_validator(mode="after")
def validate_inputs(self) -> Self:
"""Validate the inputs provided to the VerifyNTPAssociations test.
Either `ntp_servers` or `ntp_pool` can be provided at the same time.
"""
if not self.ntp_servers and not self.ntp_pool:
msg = "'ntp_servers' or 'ntp_pool' must be provided"
raise ValueError(msg)
if self.ntp_servers and self.ntp_pool:
msg = "Either 'ntp_servers' or 'ntp_pool' can be provided at the same time"
raise ValueError(msg)
# Verifies the len of preferred_stratum_range in NTP Pool should be 2 as this is the range.
stratum_range = 2
if self.ntp_pool and len(self.ntp_pool.preferred_stratum_range) > stratum_range:
msg = "'preferred_stratum_range' list should have at most 2 items"
raise ValueError(msg)
return self
def _validate_ntp_server(self, ntp_server: NTPServer, peers: dict[str, Any]) -> list[str]:
"""Validate the NTP server, condition and stratum level."""
failure_msgs: list[str] = []
server_address = str(ntp_server.server_address)
# We check `peerIpAddr` in the peer details - covering IPv4Address input, or the peer key - covering Hostname input.
matching_peer = next((peer for peer, peer_details in peers.items() if (server_address in {peer_details["peerIpAddr"], peer})), None)
if not matching_peer:
failure_msgs.append(f"{ntp_server} - Not configured")
return failure_msgs
# Collecting the expected/actual NTP peer details.
exp_condition = "sys.peer" if ntp_server.preferred else "candidate"
exp_stratum = ntp_server.stratum
act_condition = get_value(peers[matching_peer], "condition")
act_stratum = get_value(peers[matching_peer], "stratumLevel")
if act_condition != exp_condition:
failure_msgs.append(f"{ntp_server} - Incorrect condition - Expected: {exp_condition} Actual: {act_condition}")
if act_stratum != exp_stratum:
failure_msgs.append(f"{ntp_server} - Incorrect stratum level - Expected: {exp_stratum} Actual: {act_stratum}")
return failure_msgs
def _validate_ntp_pool(self, server_addresses: list[Hostname | IPv4Address], peer: str, stratum_range: list[int], peer_details: dict[str, Any]) -> list[str]:
"""Validate the NTP server pool, condition and stratum level."""
failure_msgs: list[str] = []
# We check `peerIpAddr` and `peer` in the peer details - covering server_addresses input
if (peer_ip := peer_details["peerIpAddr"]) not in server_addresses and peer not in server_addresses:
failure_msgs.append(f"NTP Server: {peer_ip} Hostname: {peer} - Associated but not part of the provided NTP pool")
return failure_msgs
act_condition = get_value(peer_details, "condition")
act_stratum = get_value(peer_details, "stratumLevel")
if act_condition not in ["sys.peer", "candidate"]:
failure_msgs.append(f"NTP Server: {peer_ip} Hostname: {peer} - Incorrect condition - Expected: sys.peer, candidate Actual: {act_condition}")
if int(act_stratum) not in range(stratum_range[0], stratum_range[1] + 1):
msg = f"Expected Stratum Range: {stratum_range[0]} to {stratum_range[1]} Actual: {act_stratum}"
failure_msgs.append(f"NTP Server: {peer_ip} Hostname: {peer} - Incorrect stratum level - {msg}")
return failure_msgs
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyNTPAssociations.""" """Main test function for VerifyNTPAssociations."""
@ -332,22 +418,66 @@ class VerifyNTPAssociations(AntaTest):
self.result.is_failure("No NTP peers configured") self.result.is_failure("No NTP peers configured")
return return
# Iterate over each NTP server. if self.inputs.ntp_servers:
for ntp_server in self.inputs.ntp_servers: # Iterate over each NTP server.
server_address = str(ntp_server.server_address) for ntp_server in self.inputs.ntp_servers:
failure_msgs = self._validate_ntp_server(ntp_server, peers)
for msg in failure_msgs:
self.result.is_failure(msg)
return
# We check `peerIpAddr` in the peer details - covering IPv4Address input, or the peer key - covering Hostname input. # Verifies the NTP pool details
matching_peer = next((peer for peer, peer_details in peers.items() if (server_address in {peer_details["peerIpAddr"], peer})), None) server_addresses = self.inputs.ntp_pool.server_addresses
exp_stratum_range = self.inputs.ntp_pool.preferred_stratum_range
for peer, peer_details in peers.items():
failure_msgs = self._validate_ntp_pool(server_addresses, peer, exp_stratum_range, peer_details)
for msg in failure_msgs:
self.result.is_failure(msg)
if not matching_peer:
self.result.is_failure(f"{ntp_server} - Not configured")
continue
# Collecting the expected/actual NTP peer details. class VerifyMaintenance(AntaTest):
exp_condition = "sys.peer" if ntp_server.preferred else "candidate" """Verifies that the device is not currently under or entering maintenance.
exp_stratum = ntp_server.stratum
act_condition = get_value(peers[matching_peer], "condition")
act_stratum = get_value(peers[matching_peer], "stratumLevel")
if act_condition != exp_condition or act_stratum != exp_stratum: Expected Results
self.result.is_failure(f"{ntp_server} - Bad association - Condition: {act_condition}, Stratum: {act_stratum}") ----------------
* Success: The test will pass if the device is not under or entering maintenance.
* Failure: The test will fail if the device is under or entering maintenance.
Examples
--------
```yaml
anta.tests.system:
- VerifyMaintenance:
```
"""
categories: ClassVar[list[str]] = ["Maintenance"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show maintenance", revision=1)]
@AntaTest.anta_test
def test(self) -> None:
"""Main test function for VerifyMaintenance."""
self.result.is_success()
# If units is not empty we have to examine the output for details.
if not (units := get_value(self.instance_commands[0].json_output, "units")):
return
units_under_maintenance = [unit for unit, info in units.items() if info["state"] == "underMaintenance"]
units_entering_maintenance = [unit for unit, info in units.items() if info["state"] == "maintenanceModeEnter"]
causes = set()
# Iterate over units, check for units under or entering maintenance, and examine the causes.
for info in units.values():
if info["adminState"] == "underMaintenance":
causes.add("Quiesce is configured")
if info["onBootMaintenance"]:
causes.add("On-boot maintenance is configured")
if info["intfsViolatingTrafficThreshold"]:
causes.add("Interface traffic threshold violation")
# Building the error message.
if units_under_maintenance:
self.result.is_failure(f"Units under maintenance: '{', '.join(units_under_maintenance)}'.")
if units_entering_maintenance:
self.result.is_failure(f"Units entering maintenance: '{', '.join(units_entering_maintenance)}'.")
if causes:
self.result.is_failure(f"Possible causes: '{', '.join(sorted(causes))}'.")

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module related to VLAN tests.""" """Module related to VLAN tests."""
@ -9,9 +9,9 @@ from __future__ import annotations
from typing import TYPE_CHECKING, ClassVar, Literal from typing import TYPE_CHECKING, ClassVar, Literal
from anta.custom_types import Vlan from anta.custom_types import DynamicVlanSource, Vlan
from anta.models import AntaCommand, AntaTest from anta.models import AntaCommand, AntaTest
from anta.tools import get_failed_logs, get_value from anta.tools import get_value
if TYPE_CHECKING: if TYPE_CHECKING:
from anta.models import AntaTemplate from anta.models import AntaTemplate
@ -55,16 +55,93 @@ class VerifyVlanInternalPolicy(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyVlanInternalPolicy.""" """Main test function for VerifyVlanInternalPolicy."""
self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
keys_to_verify = ["policy", "startVlanId", "endVlanId"] if (policy := self.inputs.policy) != (act_policy := get_value(command_output, "policy")):
actual_policy_output = {key: get_value(command_output, key) for key in keys_to_verify} self.result.is_failure(f"Incorrect VLAN internal allocation policy configured - Expected: {policy} Actual: {act_policy}")
expected_policy_output = {"policy": self.inputs.policy, "startVlanId": self.inputs.start_vlan_id, "endVlanId": self.inputs.end_vlan_id} return
# Check if the actual output matches the expected output if (start_vlan_id := self.inputs.start_vlan_id) != (act_vlan_id := get_value(command_output, "startVlanId")):
if actual_policy_output != expected_policy_output: self.result.is_failure(
failed_log = "The VLAN internal allocation policy is not configured properly:" f"VLAN internal allocation policy: {self.inputs.policy} - Incorrect start VLAN id configured - Expected: {start_vlan_id} Actual: {act_vlan_id}"
failed_log += get_failed_logs(expected_policy_output, actual_policy_output) )
self.result.is_failure(failed_log)
else: if (end_vlan_id := self.inputs.end_vlan_id) != (act_vlan_id := get_value(command_output, "endVlanId")):
self.result.is_success() self.result.is_failure(
f"VLAN internal allocation policy: {self.inputs.policy} - Incorrect end VLAN id configured - Expected: {end_vlan_id} Actual: {act_vlan_id}"
)
class VerifyDynamicVlanSource(AntaTest):
"""Verifies dynamic VLAN allocation for specified VLAN sources.
This test performs the following checks for each specified VLAN source:
1. Validates source exists in dynamic VLAN table.
2. Verifies at least one VLAN is allocated to the source.
3. When strict mode is enabled (`strict: true`), ensures no other sources have VLANs allocated.
Expected Results
----------------
* Success: The test will pass if all of the following conditions are met:
- Each specified source exists in dynamic VLAN table.
- Each specified source has at least one VLAN allocated.
- In strict mode: No other sources have VLANs allocated.
* Failure: The test will fail if any of the following conditions is met:
- Specified source not found in configuration.
- Source exists but has no VLANs allocated.
- In strict mode: Non-specified sources have VLANs allocated.
Examples
--------
```yaml
anta.tests.vlan:
- VerifyDynamicVlanSource:
sources:
- evpn
- mlagsync
strict: False
```
"""
categories: ClassVar[list[str]] = ["vlan"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show vlan dynamic", revision=1)]
class Input(AntaTest.Input):
"""Input model for the VerifyDynamicVlanSource test."""
sources: list[DynamicVlanSource]
"""The dynamic VLAN source list."""
strict: bool = False
"""If True, only specified sources are allowed to have VLANs allocated. Default is False."""
@AntaTest.anta_test
def test(self) -> None:
"""Main test function for VerifyDynamicVlanSource."""
self.result.is_success()
command_output = self.instance_commands[0].json_output
dynamic_vlans = command_output.get("dynamicVlans", {})
# Get all configured sources and sources with VLANs allocated
configured_sources = set(dynamic_vlans.keys())
sources_with_vlans = {source for source, data in dynamic_vlans.items() if data.get("vlanIds")}
expected_sources = set(self.inputs.sources)
# Check if all specified sources exist in configuration
missing_sources = expected_sources - configured_sources
if missing_sources:
self.result.is_failure(f"Dynamic VLAN source(s) not found in configuration: {', '.join(sorted(missing_sources))}")
return
# Check if configured sources have VLANs allocated
sources_without_vlans = expected_sources - sources_with_vlans
if sources_without_vlans:
self.result.is_failure(f"Dynamic VLAN source(s) exist but have no VLANs allocated: {', '.join(sorted(sources_without_vlans))}")
return
# In strict mode, verify no other sources have VLANs allocated
if self.inputs.strict:
unexpected_sources = sources_with_vlans - expected_sources
if unexpected_sources:
self.result.is_failure(f"Strict mode enabled: Unexpected sources have VLANs allocated: {', '.join(sorted(unexpected_sources))}")

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Module related to VXLAN tests.""" """Module related to VXLAN tests."""
@ -21,7 +21,7 @@ if TYPE_CHECKING:
class VerifyVxlan1Interface(AntaTest): class VerifyVxlan1Interface(AntaTest):
"""Verifies if the Vxlan1 interface is configured and 'up/up'. """Verifies the Vxlan1 interface status.
Warnings Warnings
-------- --------
@ -41,26 +41,26 @@ class VerifyVxlan1Interface(AntaTest):
``` ```
""" """
description = "Verifies the Vxlan1 interface status."
categories: ClassVar[list[str]] = ["vxlan"] categories: ClassVar[list[str]] = ["vxlan"]
commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show interfaces description", revision=1)] commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show interfaces description", revision=1)]
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyVxlan1Interface.""" """Main test function for VerifyVxlan1Interface."""
self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
if "Vxlan1" not in command_output["interfaceDescriptions"]:
self.result.is_skipped("Vxlan1 interface is not configured") # Skipping the test if the Vxlan1 interface is not configured
elif ( if "Vxlan1" not in (interface_details := command_output["interfaceDescriptions"]):
command_output["interfaceDescriptions"]["Vxlan1"]["lineProtocolStatus"] == "up" self.result.is_skipped("Interface: Vxlan1 - Not configured")
and command_output["interfaceDescriptions"]["Vxlan1"]["interfaceStatus"] == "up" return
):
self.result.is_success() line_protocol_status = interface_details["Vxlan1"]["lineProtocolStatus"]
else: interface_status = interface_details["Vxlan1"]["interfaceStatus"]
self.result.is_failure(
f"Vxlan1 interface is {command_output['interfaceDescriptions']['Vxlan1']['lineProtocolStatus']}" # Checking against both status and line protocol status
f"/{command_output['interfaceDescriptions']['Vxlan1']['interfaceStatus']}", if interface_status != "up" or line_protocol_status != "up":
) self.result.is_failure(f"Interface: Vxlan1 - Incorrect Line protocol status/Status - Expected: up/up Actual: {line_protocol_status}/{interface_status}")
class VerifyVxlanConfigSanity(AntaTest): class VerifyVxlanConfigSanity(AntaTest):
@ -86,19 +86,19 @@ class VerifyVxlanConfigSanity(AntaTest):
@AntaTest.anta_test @AntaTest.anta_test
def test(self) -> None: def test(self) -> None:
"""Main test function for VerifyVxlanConfigSanity.""" """Main test function for VerifyVxlanConfigSanity."""
self.result.is_success()
command_output = self.instance_commands[0].json_output command_output = self.instance_commands[0].json_output
# Skipping the test if VXLAN is not configured
if "categories" not in command_output or len(command_output["categories"]) == 0: if "categories" not in command_output or len(command_output["categories"]) == 0:
self.result.is_skipped("VXLAN is not configured") self.result.is_skipped("VXLAN is not configured")
return return
failed_categories = {
category: content # Verifies the Vxlan config sanity
for category, content in command_output["categories"].items() categories_to_check = ["localVtep", "mlag", "pd"]
if category in ["localVtep", "mlag", "pd"] and content["allCheckPass"] is not True for category in categories_to_check:
} if not get_value(command_output, f"categories.{category}.allCheckPass"):
if len(failed_categories) > 0: self.result.is_failure(f"Vxlan Category: {category} - Config sanity check is not passing")
self.result.is_failure(f"VXLAN config sanity check is not passing: {failed_categories}")
else:
self.result.is_success()
class VerifyVxlanVniBinding(AntaTest): class VerifyVxlanVniBinding(AntaTest):
@ -135,31 +135,23 @@ class VerifyVxlanVniBinding(AntaTest):
"""Main test function for VerifyVxlanVniBinding.""" """Main test function for VerifyVxlanVniBinding."""
self.result.is_success() self.result.is_success()
no_binding = []
wrong_binding = []
if (vxlan1 := get_value(self.instance_commands[0].json_output, "vxlanIntfs.Vxlan1")) is None: if (vxlan1 := get_value(self.instance_commands[0].json_output, "vxlanIntfs.Vxlan1")) is None:
self.result.is_skipped("Vxlan1 interface is not configured") self.result.is_skipped("Vxlan1 interface is not configured")
return return
for vni, vlan in self.inputs.bindings.items(): for vni, vlan in self.inputs.bindings.items():
str_vni = str(vni) str_vni = str(vni)
retrieved_vlan = ""
if str_vni in vxlan1["vniBindings"]: if str_vni in vxlan1["vniBindings"]:
retrieved_vlan = vxlan1["vniBindings"][str_vni]["vlan"] retrieved_vlan = get_value(vxlan1, f"vniBindings..{str_vni}..vlan", separator="..")
elif str_vni in vxlan1["vniBindingsToVrf"]: elif str_vni in vxlan1["vniBindingsToVrf"]:
retrieved_vlan = vxlan1["vniBindingsToVrf"][str_vni]["vlan"] retrieved_vlan = get_value(vxlan1, f"vniBindingsToVrf..{str_vni}..vlan", separator="..")
else:
no_binding.append(str_vni)
retrieved_vlan = None
if retrieved_vlan and vlan != retrieved_vlan: if not retrieved_vlan:
wrong_binding.append({str_vni: retrieved_vlan}) self.result.is_failure(f"Interface: Vxlan1 VNI: {str_vni} - Binding not found")
if no_binding: elif vlan != retrieved_vlan:
self.result.is_failure(f"The following VNI(s) have no binding: {no_binding}") self.result.is_failure(f"Interface: Vxlan1 VNI: {str_vni} VLAN: {vlan} - Wrong VLAN binding - Actual: {retrieved_vlan}")
if wrong_binding:
self.result.is_failure(f"The following VNI(s) have the wrong VLAN binding: {wrong_binding}")
class VerifyVxlanVtep(AntaTest): class VerifyVxlanVtep(AntaTest):
@ -206,10 +198,10 @@ class VerifyVxlanVtep(AntaTest):
difference2 = set(vxlan1["vteps"]).difference(set(inputs_vteps)) difference2 = set(vxlan1["vteps"]).difference(set(inputs_vteps))
if difference1: if difference1:
self.result.is_failure(f"The following VTEP peer(s) are missing from the Vxlan1 interface: {sorted(difference1)}") self.result.is_failure(f"The following VTEP peer(s) are missing from the Vxlan1 interface: {', '.join(sorted(difference1))}")
if difference2: if difference2:
self.result.is_failure(f"Unexpected VTEP peer(s) on Vxlan1 interface: {sorted(difference2)}") self.result.is_failure(f"Unexpected VTEP peer(s) on Vxlan1 interface: {', '.join(sorted(difference2))}")
class VerifyVxlan1ConnSettings(AntaTest): class VerifyVxlan1ConnSettings(AntaTest):
@ -259,6 +251,6 @@ class VerifyVxlan1ConnSettings(AntaTest):
# Check vxlan1 source interface and udp port # Check vxlan1 source interface and udp port
if src_intf != self.inputs.source_interface: if src_intf != self.inputs.source_interface:
self.result.is_failure(f"Source interface is not correct. Expected `{self.inputs.source_interface}` as source interface but found `{src_intf}` instead.") self.result.is_failure(f"Interface: Vxlan1 - Incorrect Source interface - Expected: {self.inputs.source_interface} Actual: {src_intf}")
if port != self.inputs.udp_port: if port != self.inputs.udp_port:
self.result.is_failure(f"UDP port is not correct. Expected `{self.inputs.udp_port}` as UDP port but found `{port}` instead.") self.result.is_failure(f"Interface: Vxlan1 - Incorrect UDP port - Expected: {self.inputs.udp_port} Actual: {port}")

View file

@ -1,4 +1,4 @@
# Copyright (c) 2023-2024 Arista Networks, Inc. # Copyright (c) 2023-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
"""Common functions used in ANTA tests.""" """Common functions used in ANTA tests."""
@ -353,7 +353,7 @@ def cprofile(sort_by: str = "cumtime") -> Callable[[F], F]:
return result return result
return cast(F, wrapper) return cast("F", wrapper)
return decorator return decorator

View file

@ -1,4 +1,4 @@
# Copyright (c) 2024 Arista Networks, Inc. # Copyright (c) 2024-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
# Initially written by Jeremy Schulman at https://github.com/jeremyschulman/aio-eapi # Initially written by Jeremy Schulman at https://github.com/jeremyschulman/aio-eapi

22
asynceapi/_constants.py Normal file
View file

@ -0,0 +1,22 @@
# Copyright (c) 2024-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file.
"""Constants and Enums for the asynceapi package."""
from __future__ import annotations
from enum import Enum
class EapiCommandFormat(str, Enum):
"""Enum for the eAPI command format.
NOTE: This could be updated to StrEnum when Python 3.11 is the minimum supported version in ANTA.
"""
JSON = "json"
TEXT = "text"
def __str__(self) -> str:
"""Override the __str__ method to return the value of the Enum, mimicking the behavior of StrEnum."""
return self.value

36
asynceapi/_errors.py Normal file
View file

@ -0,0 +1,36 @@
# Copyright (c) 2024-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file.
"""Exceptions for the asynceapi package."""
from __future__ import annotations
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from asynceapi._models import EapiResponse
class EapiReponseError(Exception):
"""Exception raised when an eAPI response contains errors.
Attributes
----------
response : EapiResponse
The eAPI response that contains the error.
"""
def __init__(self, response: EapiResponse) -> None:
"""Initialize the EapiReponseError exception."""
self.response = response
# Build a descriptive error message
message = "Error in eAPI response"
if response.error_code is not None:
message += f" (code: {response.error_code})"
if response.error_message is not None:
message += f": {response.error_message}"
super().__init__(message)

238
asynceapi/_models.py Normal file
View file

@ -0,0 +1,238 @@
# Copyright (c) 2024-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file.
"""Models for the asynceapi package."""
from __future__ import annotations
from dataclasses import dataclass, field
from logging import getLogger
from typing import TYPE_CHECKING, Any, Literal
from uuid import uuid4
from ._constants import EapiCommandFormat
from ._errors import EapiReponseError
if TYPE_CHECKING:
from collections.abc import Iterator
from ._types import EapiComplexCommand, EapiJsonOutput, EapiSimpleCommand, EapiTextOutput, JsonRpc
LOGGER = getLogger(__name__)
# pylint: disable=too-many-instance-attributes
@dataclass(frozen=True)
class EapiRequest:
"""Model for an eAPI request.
Attributes
----------
commands : list[EapiSimpleCommand | EapiComplexCommand]
A list of commands to execute.
version : int | Literal["latest"]
The eAPI version to use. Defaults to "latest".
format : EapiCommandFormat
The command output format. Defaults "json".
timestamps : bool
Include timestamps in the command output. Defaults to False.
auto_complete : bool
Enable command auto-completion. Defaults to False.
expand_aliases : bool
Expand command aliases. Defaults to False.
stop_on_error : bool
Stop command execution on first error. Defaults to True.
id : int | str
The request ID. Defaults to a random hex string.
"""
commands: list[EapiSimpleCommand | EapiComplexCommand]
version: int | Literal["latest"] = "latest"
format: EapiCommandFormat = EapiCommandFormat.JSON
timestamps: bool = False
auto_complete: bool = False
expand_aliases: bool = False
stop_on_error: bool = True
id: int | str = field(default_factory=lambda: uuid4().hex)
def to_jsonrpc(self) -> JsonRpc:
"""Return the JSON-RPC dictionary payload for the request."""
return {
"jsonrpc": "2.0",
"method": "runCmds",
"params": {
"version": self.version,
"cmds": self.commands,
"format": self.format,
"timestamps": self.timestamps,
"autoComplete": self.auto_complete,
"expandAliases": self.expand_aliases,
"stopOnError": self.stop_on_error,
},
"id": self.id,
}
@dataclass(frozen=True)
class EapiResponse:
"""Model for an eAPI response.
Construct an EapiResponse from a JSON-RPC response dictionary using the `from_jsonrpc` class method.
Can be iterated over to access command results in order of execution.
Attributes
----------
request_id : str
The ID of the original request this response corresponds to.
_results : dict[int, EapiCommandResult]
Dictionary mapping request command indices to their respective results.
error_code : int | None
The JSON-RPC error code, if any.
error_message : str | None
The JSON-RPC error message, if any.
"""
request_id: str
_results: dict[int, EapiCommandResult] = field(default_factory=dict)
error_code: int | None = None
error_message: str | None = None
@property
def success(self) -> bool:
"""Return True if the response has no errors."""
return self.error_code is None
@property
def results(self) -> list[EapiCommandResult]:
"""Get all results as a list. Results are ordered by the command indices in the request."""
return list(self._results.values())
def __len__(self) -> int:
"""Return the number of results."""
return len(self._results)
def __iter__(self) -> Iterator[EapiCommandResult]:
"""Enable iteration over the results. Results are yielded in the same order as provided in the request."""
yield from self._results.values()
@classmethod
def from_jsonrpc(cls, response: dict[str, Any], request: EapiRequest, *, raise_on_error: bool = False) -> EapiResponse:
"""Build an EapiResponse from a JSON-RPC eAPI response.
Parameters
----------
response
The JSON-RPC eAPI response dictionary.
request
The corresponding EapiRequest.
raise_on_error
Raise an EapiReponseError if the response contains errors, by default False.
Returns
-------
EapiResponse
The EapiResponse object.
"""
has_error = "error" in response
response_data = response["error"]["data"] if has_error else response["result"]
# Handle case where we have fewer results than commands (stop_on_error=True)
executed_count = min(len(response_data), len(request.commands))
# Process the results we have
results = {}
for i in range(executed_count):
cmd = request.commands[i]
cmd_str = cmd["cmd"] if isinstance(cmd, dict) else cmd
data = response_data[i]
output = None
errors = []
success = True
start_time = None
duration = None
# Parse the output based on the data type, no output when errors are present
if isinstance(data, dict):
if "errors" in data:
errors = data["errors"]
success = False
else:
output = data["output"] if request.format == EapiCommandFormat.TEXT and "output" in data else data
# Add timestamps if available
if request.timestamps and "_meta" in data:
meta = data.pop("_meta")
start_time = meta.get("execStartTime")
duration = meta.get("execDuration")
elif isinstance(data, str):
# Handle case where eAPI returns a JSON string response (serialized JSON) for certain commands
try:
from json import JSONDecodeError, loads
output = loads(data)
except (JSONDecodeError, TypeError):
# If it's not valid JSON, store as is
LOGGER.warning("Invalid JSON response for command: %s. Storing as text: %s", cmd_str, data)
output = data
results[i] = EapiCommandResult(
command=cmd_str,
output=output,
errors=errors,
success=success,
start_time=start_time,
duration=duration,
)
# If stop_on_error is True and we have an error, indicate commands not executed
if has_error and request.stop_on_error and executed_count < len(request.commands):
for i in range(executed_count, len(request.commands)):
cmd = request.commands[i]
cmd_str = cmd["cmd"] if isinstance(cmd, dict) else cmd
results[i] = EapiCommandResult(command=cmd_str, output=None, errors=["Command not executed due to previous error"], success=False, executed=False)
response_obj = cls(
request_id=response["id"],
_results=results,
error_code=response["error"]["code"] if has_error else None,
error_message=response["error"]["message"] if has_error else None,
)
if raise_on_error and has_error:
raise EapiReponseError(response_obj)
return response_obj
@dataclass(frozen=True)
class EapiCommandResult:
"""Model for an eAPI command result.
Attributes
----------
command : str
The command that was executed.
output : EapiJsonOutput | EapiTextOutput | None
The command result output. None if the command returned errors.
errors : list[str]
A list of error messages, if any.
success : bool
True if the command was successful.
executed : bool
True if the command was executed. When `stop_on_error` is True in the request, some commands may not be executed.
start_time : float | None
Command execution start time in seconds. Uses Unix epoch format. `timestamps` must be True in the request.
duration : float | None
Command execution duration in seconds. `timestamps` must be True in the request.
"""
command: str
output: EapiJsonOutput | EapiTextOutput | None
errors: list[str] = field(default_factory=list)
success: bool = True
executed: bool = True
start_time: float | None = None
duration: float | None = None

53
asynceapi/_types.py Normal file
View file

@ -0,0 +1,53 @@
# Copyright (c) 2024-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file.
"""Type definitions used for the asynceapi package."""
from __future__ import annotations
import sys
from typing import TYPE_CHECKING, Any, Literal
if TYPE_CHECKING:
from ._constants import EapiCommandFormat
if sys.version_info >= (3, 11):
from typing import NotRequired, TypedDict
else:
from typing_extensions import NotRequired, TypedDict
EapiJsonOutput = dict[str, Any]
"""Type definition of an eAPI JSON output response."""
EapiTextOutput = str
"""Type definition of an eAPI text output response."""
EapiSimpleCommand = str
"""Type definition of an eAPI simple command. A simple command is the CLI command to run as a string."""
class EapiComplexCommand(TypedDict):
"""Type definition of an eAPI complex command. A complex command is a dictionary with the CLI command to run with additional parameters."""
cmd: str
input: NotRequired[str]
revision: NotRequired[int]
class JsonRpc(TypedDict):
"""Type definition of a JSON-RPC payload."""
jsonrpc: Literal["2.0"]
method: Literal["runCmds"]
params: JsonRpcParams
id: NotRequired[int | str]
class JsonRpcParams(TypedDict):
"""Type definition of JSON-RPC parameters."""
version: NotRequired[int | Literal["latest"]]
cmds: list[EapiSimpleCommand | EapiComplexCommand]
format: NotRequired[EapiCommandFormat]
autoComplete: NotRequired[bool]
expandAliases: NotRequired[bool]
timestamps: NotRequired[bool]
stopOnError: NotRequired[bool]

View file

@ -1,4 +1,4 @@
# Copyright (c) 2024 Arista Networks, Inc. # Copyright (c) 2024-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
# Initially written by Jeremy Schulman at https://github.com/jeremyschulman/aio-eapi # Initially written by Jeremy Schulman at https://github.com/jeremyschulman/aio-eapi

View file

@ -1,4 +1,4 @@
# Copyright (c) 2024 Arista Networks, Inc. # Copyright (c) 2024-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
# Initially written by Jeremy Schulman at https://github.com/jeremyschulman/aio-eapi # Initially written by Jeremy Schulman at https://github.com/jeremyschulman/aio-eapi
@ -10,9 +10,10 @@
from __future__ import annotations from __future__ import annotations
import re import re
from typing import TYPE_CHECKING, Any from typing import TYPE_CHECKING
if TYPE_CHECKING: if TYPE_CHECKING:
from ._types import EapiComplexCommand, EapiJsonOutput, EapiSimpleCommand
from .device import Device from .device import Device
# ----------------------------------------------------------------------------- # -----------------------------------------------------------------------------
@ -78,7 +79,7 @@ class SessionConfig:
# Public Methods # Public Methods
# ------------------------------------------------------------------------- # -------------------------------------------------------------------------
async def status_all(self) -> dict[str, Any]: async def status_all(self) -> EapiJsonOutput:
"""Get the status of all the session config on the device. """Get the status of all the session config on the device.
Run the following command on the device: Run the following command on the device:
@ -86,7 +87,7 @@ class SessionConfig:
Returns Returns
------- -------
dict[str, Any] EapiJsonOutput
Dictionary of native EOS eAPI response; see `status` method for Dictionary of native EOS eAPI response; see `status` method for
details. details.
@ -116,9 +117,9 @@ class SessionConfig:
} }
``` ```
""" """
return await self._cli("show configuration sessions detail") # type: ignore[return-value] # json outformat returns dict[str, Any] return await self._cli(command="show configuration sessions detail")
async def status(self) -> dict[str, Any] | None: async def status(self) -> EapiJsonOutput | None:
"""Get the status of a session config on the device. """Get the status of a session config on the device.
Run the following command on the device: Run the following command on the device:
@ -129,7 +130,7 @@ class SessionConfig:
Returns Returns
------- -------
dict[str, Any] | None EapiJsonOutput | None
Dictionary instance of the session status. If the session does not exist, Dictionary instance of the session status. If the session does not exist,
then this method will return None. then this method will return None.
@ -201,7 +202,7 @@ class SessionConfig:
# prepare the initial set of command to enter the config session and # prepare the initial set of command to enter the config session and
# rollback clean if the `replace` argument is True. # rollback clean if the `replace` argument is True.
commands: list[str | dict[str, Any]] = [self._cli_config_session] commands: list[EapiSimpleCommand | EapiComplexCommand] = [self._cli_config_session]
if replace: if replace:
commands.append(self.CLI_CFG_FACTORY_RESET) commands.append(self.CLI_CFG_FACTORY_RESET)
@ -232,7 +233,7 @@ class SessionConfig:
if timer: if timer:
command += f" timer {timer}" command += f" timer {timer}"
await self._cli(command) await self._cli(command=command)
async def abort(self) -> None: async def abort(self) -> None:
"""Abort the configuration session. """Abort the configuration session.
@ -240,7 +241,7 @@ class SessionConfig:
Run the following command on the device: Run the following command on the device:
# configure session <name> abort # configure session <name> abort
""" """
await self._cli(f"{self._cli_config_session} abort") await self._cli(command=f"{self._cli_config_session} abort")
async def diff(self) -> str: async def diff(self) -> str:
"""Return the "diff" of the session config relative to the running config. """Return the "diff" of the session config relative to the running config.
@ -257,7 +258,7 @@ class SessionConfig:
---------- ----------
* https://www.gnu.org/software/diffutils/manual/diffutils.txt * https://www.gnu.org/software/diffutils/manual/diffutils.txt
""" """
return await self._cli(f"show session-config named {self.name} diffs", ofmt="text") # type: ignore[return-value] # text outformat returns str return await self._cli(command=f"show session-config named {self.name} diffs", ofmt="text")
async def load_file(self, filename: str, *, replace: bool = False) -> None: async def load_file(self, filename: str, *, replace: bool = False) -> None:
"""Load the configuration from <filename> into the session configuration. """Load the configuration from <filename> into the session configuration.
@ -281,12 +282,12 @@ class SessionConfig:
If there are any issues with loading the configuration file then a If there are any issues with loading the configuration file then a
RuntimeError is raised with the error messages content. RuntimeError is raised with the error messages content.
""" """
commands: list[str | dict[str, Any]] = [self._cli_config_session] commands: list[EapiSimpleCommand | EapiComplexCommand] = [self._cli_config_session]
if replace: if replace:
commands.append(self.CLI_CFG_FACTORY_RESET) commands.append(self.CLI_CFG_FACTORY_RESET)
commands.append(f"copy {filename} session-config") commands.append(f"copy {filename} session-config")
res: list[dict[str, Any]] = await self._cli(commands=commands) # type: ignore[assignment] # JSON outformat of multiple commands returns list[dict[str, Any]] res = await self._cli(commands=commands)
checks_re = re.compile(r"error|abort|invalid", flags=re.IGNORECASE) checks_re = re.compile(r"error|abort|invalid", flags=re.IGNORECASE)
messages = res[-1]["messages"] messages = res[-1]["messages"]
@ -295,4 +296,4 @@ class SessionConfig:
async def write(self) -> None: async def write(self) -> None:
"""Save the running config to the startup config by issuing the command "write" to the device.""" """Save the running config to the startup config by issuing the command "write" to the device."""
await self._cli("write") await self._cli(command="write")

View file

@ -1,4 +1,4 @@
# Copyright (c) 2024 Arista Networks, Inc. # Copyright (c) 2024-2025 Arista Networks, Inc.
# Use of this source code is governed by the Apache License 2.0 # Use of this source code is governed by the Apache License 2.0
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
# Initially written by Jeremy Schulman at https://github.com/jeremyschulman/aio-eapi # Initially written by Jeremy Schulman at https://github.com/jeremyschulman/aio-eapi
@ -10,7 +10,7 @@
from __future__ import annotations from __future__ import annotations
from socket import getservbyname from socket import getservbyname
from typing import TYPE_CHECKING, Any from typing import TYPE_CHECKING, Any, Literal, overload
# ----------------------------------------------------------------------------- # -----------------------------------------------------------------------------
# Public Imports # Public Imports
@ -20,12 +20,13 @@ import httpx
# ----------------------------------------------------------------------------- # -----------------------------------------------------------------------------
# Private Imports # Private Imports
# ----------------------------------------------------------------------------- # -----------------------------------------------------------------------------
from ._constants import EapiCommandFormat
from .aio_portcheck import port_check_url from .aio_portcheck import port_check_url
from .config_session import SessionConfig from .config_session import SessionConfig
from .errors import EapiCommandError from .errors import EapiCommandError
if TYPE_CHECKING: if TYPE_CHECKING:
from collections.abc import Sequence from ._types import EapiComplexCommand, EapiJsonOutput, EapiSimpleCommand, EapiTextOutput, JsonRpc
# ----------------------------------------------------------------------------- # -----------------------------------------------------------------------------
# Exports # Exports
@ -121,18 +122,139 @@ class Device(httpx.AsyncClient):
""" """
return await port_check_url(self.base_url) return await port_check_url(self.base_url)
# Single command, JSON output, no suppression
@overload
async def cli( async def cli(
self, self,
command: str | dict[str, Any] | None = None, *,
commands: Sequence[str | dict[str, Any]] | None = None, command: EapiSimpleCommand | EapiComplexCommand,
ofmt: str | None = None, commands: None = None,
version: int | str | None = "latest", ofmt: Literal["json"] = "json",
version: int | Literal["latest"] = "latest",
suppress_error: Literal[False] = False,
auto_complete: bool = False,
expand_aliases: bool = False,
req_id: int | str | None = None,
) -> EapiJsonOutput: ...
# Multiple commands, JSON output, no suppression
@overload
async def cli(
self,
*,
command: None = None,
commands: list[EapiSimpleCommand | EapiComplexCommand],
ofmt: Literal["json"] = "json",
version: int | Literal["latest"] = "latest",
suppress_error: Literal[False] = False,
auto_complete: bool = False,
expand_aliases: bool = False,
req_id: int | str | None = None,
) -> list[EapiJsonOutput]: ...
# Single command, TEXT output, no suppression
@overload
async def cli(
self,
*,
command: EapiSimpleCommand | EapiComplexCommand,
commands: None = None,
ofmt: Literal["text"],
version: int | Literal["latest"] = "latest",
suppress_error: Literal[False] = False,
auto_complete: bool = False,
expand_aliases: bool = False,
req_id: int | str | None = None,
) -> EapiTextOutput: ...
# Multiple commands, TEXT output, no suppression
@overload
async def cli(
self,
*,
command: None = None,
commands: list[EapiSimpleCommand | EapiComplexCommand],
ofmt: Literal["text"],
version: int | Literal["latest"] = "latest",
suppress_error: Literal[False] = False,
auto_complete: bool = False,
expand_aliases: bool = False,
req_id: int | str | None = None,
) -> list[EapiTextOutput]: ...
# Single command, JSON output, with suppression
@overload
async def cli(
self,
*,
command: EapiSimpleCommand | EapiComplexCommand,
commands: None = None,
ofmt: Literal["json"] = "json",
version: int | Literal["latest"] = "latest",
suppress_error: Literal[True],
auto_complete: bool = False,
expand_aliases: bool = False,
req_id: int | str | None = None,
) -> EapiJsonOutput | None: ...
# Multiple commands, JSON output, with suppression
@overload
async def cli(
self,
*,
command: None = None,
commands: list[EapiSimpleCommand | EapiComplexCommand],
ofmt: Literal["json"] = "json",
version: int | Literal["latest"] = "latest",
suppress_error: Literal[True],
auto_complete: bool = False,
expand_aliases: bool = False,
req_id: int | str | None = None,
) -> list[EapiJsonOutput] | None: ...
# Single command, TEXT output, with suppression
@overload
async def cli(
self,
*,
command: EapiSimpleCommand | EapiComplexCommand,
commands: None = None,
ofmt: Literal["text"],
version: int | Literal["latest"] = "latest",
suppress_error: Literal[True],
auto_complete: bool = False,
expand_aliases: bool = False,
req_id: int | str | None = None,
) -> EapiTextOutput | None: ...
# Multiple commands, TEXT output, with suppression
@overload
async def cli(
self,
*,
command: None = None,
commands: list[EapiSimpleCommand | EapiComplexCommand],
ofmt: Literal["text"],
version: int | Literal["latest"] = "latest",
suppress_error: Literal[True],
auto_complete: bool = False,
expand_aliases: bool = False,
req_id: int | str | None = None,
) -> list[EapiTextOutput] | None: ...
# Actual implementation
async def cli(
self,
command: EapiSimpleCommand | EapiComplexCommand | None = None,
commands: list[EapiSimpleCommand | EapiComplexCommand] | None = None,
ofmt: Literal["json", "text"] = "json",
version: int | Literal["latest"] = "latest",
*, *,
suppress_error: bool = False, suppress_error: bool = False,
auto_complete: bool = False, auto_complete: bool = False,
expand_aliases: bool = False, expand_aliases: bool = False,
req_id: int | str | None = None, req_id: int | str | None = None,
) -> list[dict[str, Any] | str] | dict[str, Any] | str | None: ) -> EapiJsonOutput | EapiTextOutput | list[EapiJsonOutput] | list[EapiTextOutput] | None:
"""Execute one or more CLI commands. """Execute one or more CLI commands.
Parameters Parameters
@ -143,6 +265,7 @@ class Device(httpx.AsyncClient):
A list of commands to execute; results in a list of output responses. A list of commands to execute; results in a list of output responses.
ofmt ofmt
Either 'json' or 'text'; indicates the output format for the CLI commands. Either 'json' or 'text'; indicates the output format for the CLI commands.
eAPI defaults to 'json'.
version version
By default the eAPI will use "version 1" for all API object models. By default the eAPI will use "version 1" for all API object models.
This driver will, by default, always set version to "latest" so This driver will, by default, always set version to "latest" so
@ -158,13 +281,13 @@ class Device(httpx.AsyncClient):
response = dev.cli(..., suppress_error=True) response = dev.cli(..., suppress_error=True)
auto_complete auto_complete
Enabled/disables the command auto-compelete feature of the EAPI. Per the Enabled/disables the command auto-compelete feature of the eAPI. Per the
documentation: documentation:
Allows users to use shorthand commands in eAPI calls. With this Allows users to use shorthand commands in eAPI calls. With this
parameter included a user can send 'sh ver' via eAPI to get the parameter included a user can send 'sh ver' via eAPI to get the
output of 'show version'. output of 'show version'.
expand_aliases expand_aliases
Enables/disables the command use of User defined alias. Per the Enables/disables the command use of user-defined alias. Per the
documentation: documentation:
Allowed users to provide the expandAliases parameter to eAPI Allowed users to provide the expandAliases parameter to eAPI
calls. This allows users to use aliased commands via the API. calls. This allows users to use aliased commands via the API.
@ -176,15 +299,34 @@ class Device(httpx.AsyncClient):
Returns Returns
------- -------
list[dict[str, Any] | str] | dict[str, Any] | str | None dict[str, Any]
One or List of output responses, per the description above. Single command, JSON output, suppress_error=False
list[dict[str, Any]]
Multiple commands, JSON output, suppress_error=False
str
Single command, TEXT output, suppress_error=False
list[str]
Multiple commands, TEXT output, suppress_error=False
dict[str, Any] | None
Single command, JSON output, suppress_error=True
list[dict[str, Any]] | None
Multiple commands, JSON output, suppress_error=True
str | None
Single command, TEXT output, suppress_error=True
list[str] | None
Multiple commands, TEXT output, suppress_error=True
""" """
if not any((command, commands)): if not any((command, commands)):
msg = "Required 'command' or 'commands'" msg = "Required 'command' or 'commands'"
raise RuntimeError(msg) raise RuntimeError(msg)
jsonrpc = self._jsonrpc_command( jsonrpc = self._jsonrpc_command(
commands=[command] if command else commands, ofmt=ofmt, version=version, auto_complete=auto_complete, expand_aliases=expand_aliases, req_id=req_id commands=[command] if command else commands if commands else [],
ofmt=ofmt,
version=version,
auto_complete=auto_complete,
expand_aliases=expand_aliases,
req_id=req_id,
) )
try: try:
@ -197,14 +339,14 @@ class Device(httpx.AsyncClient):
def _jsonrpc_command( def _jsonrpc_command(
self, self,
commands: Sequence[str | dict[str, Any]] | None = None, commands: list[EapiSimpleCommand | EapiComplexCommand],
ofmt: str | None = None, ofmt: Literal["json", "text"] = "json",
version: int | str | None = "latest", version: int | Literal["latest"] = "latest",
*, *,
auto_complete: bool = False, auto_complete: bool = False,
expand_aliases: bool = False, expand_aliases: bool = False,
req_id: int | str | None = None, req_id: int | str | None = None,
) -> dict[str, Any]: ) -> JsonRpc:
"""Create the JSON-RPC command dictionary object. """Create the JSON-RPC command dictionary object.
Parameters Parameters
@ -213,6 +355,7 @@ class Device(httpx.AsyncClient):
A list of commands to execute; results in a list of output responses. A list of commands to execute; results in a list of output responses.
ofmt ofmt
Either 'json' or 'text'; indicates the output format for the CLI commands. Either 'json' or 'text'; indicates the output format for the CLI commands.
eAPI defaults to 'json'.
version version
By default the eAPI will use "version 1" for all API object models. By default the eAPI will use "version 1" for all API object models.
This driver will, by default, always set version to "latest" so This driver will, by default, always set version to "latest" so
@ -241,25 +384,20 @@ class Device(httpx.AsyncClient):
dict containing the JSON payload to run the command. dict containing the JSON payload to run the command.
""" """
cmd: dict[str, Any] = { return {
"jsonrpc": "2.0", "jsonrpc": "2.0",
"method": "runCmds", "method": "runCmds",
"params": { "params": {
"version": version, "version": version,
"cmds": commands, "cmds": commands,
"format": ofmt or self.EAPI_DEFAULT_OFMT, "format": EapiCommandFormat(ofmt),
"autoComplete": auto_complete,
"expandAliases": expand_aliases,
}, },
"id": req_id or id(self), "id": req_id or id(self),
} }
if auto_complete is not None:
cmd["params"].update({"autoComplete": auto_complete})
if expand_aliases is not None: async def jsonrpc_exec(self, jsonrpc: JsonRpc) -> list[EapiJsonOutput] | list[EapiTextOutput]:
cmd["params"].update({"expandAliases": expand_aliases})
return cmd
async def jsonrpc_exec(self, jsonrpc: dict[str, Any]) -> list[dict[str, Any] | str]:
"""Execute the JSON-RPC dictionary object. """Execute the JSON-RPC dictionary object.
Parameters Parameters
@ -315,7 +453,7 @@ class Device(httpx.AsyncClient):
failed_cmd = commands[err_at] failed_cmd = commands[err_at]
raise EapiCommandError( raise EapiCommandError(
passed=[get_output(cmd_data[cmd_i]) for cmd_i, cmd in enumerate(commands[:err_at])], passed=[get_output(cmd_data[i]) for i in range(err_at)],
failed=failed_cmd["cmd"] if isinstance(failed_cmd, dict) else failed_cmd, failed=failed_cmd["cmd"] if isinstance(failed_cmd, dict) else failed_cmd,
errors=cmd_data[err_at]["errors"], errors=cmd_data[err_at]["errors"],
errmsg=err_msg, errmsg=err_msg,

Some files were not shown because too many files have changed in this diff Show more