Adding upstream version 0.13.0.
Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
parent
c0ae77e0f6
commit
ecf5ca3300
272 changed files with 33172 additions and 0 deletions
3
tests/__init__.py
Normal file
3
tests/__init__.py
Normal file
|
@ -0,0 +1,3 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
55
tests/conftest.py
Normal file
55
tests/conftest.py
Normal file
|
@ -0,0 +1,55 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
conftest.py - used to store anta specific fixtures used for tests
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import pytest
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pytest import Metafunc
|
||||
|
||||
# Load fixtures from dedicated file tests/lib/fixture.py
|
||||
# As well as pytest_asyncio plugin to test co-routines
|
||||
pytest_plugins = [
|
||||
"tests.lib.fixture",
|
||||
"pytest_asyncio",
|
||||
]
|
||||
|
||||
# Enable nice assert messages
|
||||
# https://docs.pytest.org/en/7.1.x/how-to/writing_plugins.html#assertion-rewriting
|
||||
pytest.register_assert_rewrite("tests.lib.anta")
|
||||
|
||||
# Placeholder to disable logging of some external libs
|
||||
for _ in ("asyncio", "httpx"):
|
||||
logging.getLogger(_).setLevel(logging.CRITICAL)
|
||||
|
||||
|
||||
def build_test_id(val: dict[str, Any]) -> str:
|
||||
"""
|
||||
build id for a unit test of an AntaTest subclass
|
||||
|
||||
{
|
||||
"name": "meaniful test name",
|
||||
"test": <AntaTest instance>,
|
||||
...
|
||||
}
|
||||
"""
|
||||
return f"{val['test'].__module__}.{val['test'].__name__}-{val['name']}"
|
||||
|
||||
|
||||
def pytest_generate_tests(metafunc: Metafunc) -> None:
|
||||
"""
|
||||
This function is called during test collection.
|
||||
It will parametrize test cases based on the `DATA` data structure defined in `tests.units.anta_tests` modules.
|
||||
See `tests/units/anta_tests/README.md` for more information on how to use it.
|
||||
Test IDs are generated using the `build_test_id` function above.
|
||||
"""
|
||||
if "tests.units.anta_tests" in metafunc.module.__package__:
|
||||
# This is a unit test for an AntaTest subclass
|
||||
metafunc.parametrize("data", metafunc.module.DATA, ids=build_test_id)
|
3
tests/data/__init__.py
Normal file
3
tests/data/__init__.py
Normal file
|
@ -0,0 +1,3 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
47
tests/data/ansible_inventory.yml
Normal file
47
tests/data/ansible_inventory.yml
Normal file
|
@ -0,0 +1,47 @@
|
|||
---
|
||||
all:
|
||||
children:
|
||||
cv_servers:
|
||||
hosts:
|
||||
cv_atd1:
|
||||
ansible_host: 10.73.1.238
|
||||
ansible_user: tom
|
||||
ansible_password: arista123
|
||||
cv_collection: v3
|
||||
ATD_LAB:
|
||||
vars:
|
||||
ansible_user: arista
|
||||
ansible_ssh_pass: arista
|
||||
children:
|
||||
ATD_FABRIC:
|
||||
children:
|
||||
ATD_SPINES:
|
||||
vars:
|
||||
type: spine
|
||||
hosts:
|
||||
spine1:
|
||||
ansible_host: 192.168.0.10
|
||||
spine2:
|
||||
ansible_host: 192.168.0.11
|
||||
ATD_LEAFS:
|
||||
vars:
|
||||
type: l3leaf
|
||||
children:
|
||||
pod1:
|
||||
hosts:
|
||||
leaf1:
|
||||
ansible_host: 192.168.0.12
|
||||
leaf2:
|
||||
ansible_host: 192.168.0.13
|
||||
pod2:
|
||||
hosts:
|
||||
leaf3:
|
||||
ansible_host: 192.168.0.14
|
||||
leaf4:
|
||||
ansible_host: 192.168.0.15
|
||||
ATD_TENANTS_NETWORKS:
|
||||
children:
|
||||
ATD_LEAFS:
|
||||
ATD_SERVERS:
|
||||
children:
|
||||
ATD_LEAFS:
|
0
tests/data/empty
Normal file
0
tests/data/empty
Normal file
1
tests/data/empty_ansible_inventory.yml
Normal file
1
tests/data/empty_ansible_inventory.yml
Normal file
|
@ -0,0 +1 @@
|
|||
---
|
16
tests/data/expected_anta_inventory.yml
Normal file
16
tests/data/expected_anta_inventory.yml
Normal file
|
@ -0,0 +1,16 @@
|
|||
anta_inventory:
|
||||
hosts:
|
||||
- host: 10.73.1.238
|
||||
name: cv_atd1
|
||||
- host: 192.168.0.10
|
||||
name: spine1
|
||||
- host: 192.168.0.11
|
||||
name: spine2
|
||||
- host: 192.168.0.12
|
||||
name: leaf1
|
||||
- host: 192.168.0.13
|
||||
name: leaf2
|
||||
- host: 192.168.0.14
|
||||
name: leaf3
|
||||
- host: 192.168.0.15
|
||||
name: leaf4
|
258
tests/data/json_data.py
Normal file
258
tests/data/json_data.py
Normal file
|
@ -0,0 +1,258 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
# pylint: skip-file
|
||||
|
||||
INVENTORY_MODEL_HOST_VALID = [
|
||||
{"name": "validIPv4", "input": "1.1.1.1", "expected_result": "valid"},
|
||||
{
|
||||
"name": "validIPv6",
|
||||
"input": "fe80::cc62:a9ff:feef:932a",
|
||||
},
|
||||
]
|
||||
|
||||
INVENTORY_MODEL_HOST_INVALID = [
|
||||
{
|
||||
"name": "invalidIPv4_with_netmask",
|
||||
"input": "1.1.1.1/32",
|
||||
},
|
||||
{
|
||||
"name": "invalidIPv6_with_netmask",
|
||||
"input": "fe80::cc62:a9ff:feef:932a/128",
|
||||
},
|
||||
{"name": "invalidHost_format", "input": "@", "expected_result": "invalid"},
|
||||
{
|
||||
"name": "invalidIPv6_format",
|
||||
"input": "fe80::cc62:a9ff:feef:",
|
||||
},
|
||||
]
|
||||
|
||||
INVENTORY_MODEL_HOST_CACHE = [
|
||||
{"name": "Host cache default", "input": {"host": "1.1.1.1"}, "expected_result": False},
|
||||
{"name": "Host cache enabled", "input": {"host": "1.1.1.1", "disable_cache": False}, "expected_result": False},
|
||||
{"name": "Host cache disabled", "input": {"host": "1.1.1.1", "disable_cache": True}, "expected_result": True},
|
||||
]
|
||||
|
||||
INVENTORY_MODEL_NETWORK_VALID = [
|
||||
{"name": "ValidIPv4_Subnet", "input": "1.1.1.0/24", "expected_result": "valid"},
|
||||
{"name": "ValidIPv6_Subnet", "input": "2001:db8::/32", "expected_result": "valid"},
|
||||
]
|
||||
|
||||
INVENTORY_MODEL_NETWORK_INVALID = [
|
||||
{"name": "ValidIPv4_Subnet", "input": "1.1.1.0/17", "expected_result": "invalid"},
|
||||
{
|
||||
"name": "InvalidIPv6_Subnet",
|
||||
"input": "2001:db8::/16",
|
||||
"expected_result": "invalid",
|
||||
},
|
||||
]
|
||||
|
||||
INVENTORY_MODEL_NETWORK_CACHE = [
|
||||
{"name": "Network cache default", "input": {"network": "1.1.1.0/24"}, "expected_result": False},
|
||||
{"name": "Network cache enabled", "input": {"network": "1.1.1.0/24", "disable_cache": False}, "expected_result": False},
|
||||
{"name": "Network cache disabled", "input": {"network": "1.1.1.0/24", "disable_cache": True}, "expected_result": True},
|
||||
]
|
||||
|
||||
INVENTORY_MODEL_RANGE_VALID = [
|
||||
{
|
||||
"name": "ValidIPv4_Range",
|
||||
"input": {"start": "10.1.0.1", "end": "10.1.0.10"},
|
||||
"expected_result": "valid",
|
||||
},
|
||||
]
|
||||
|
||||
INVENTORY_MODEL_RANGE_INVALID = [
|
||||
{
|
||||
"name": "InvalidIPv4_Range_name",
|
||||
"input": {"start": "toto", "end": "10.1.0.1"},
|
||||
"expected_result": "invalid",
|
||||
},
|
||||
]
|
||||
|
||||
INVENTORY_MODEL_RANGE_CACHE = [
|
||||
{"name": "Range cache default", "input": {"start": "1.1.1.1", "end": "1.1.1.10"}, "expected_result": False},
|
||||
{"name": "Range cache enabled", "input": {"start": "1.1.1.1", "end": "1.1.1.10", "disable_cache": False}, "expected_result": False},
|
||||
{"name": "Range cache disabled", "input": {"start": "1.1.1.1", "end": "1.1.1.10", "disable_cache": True}, "expected_result": True},
|
||||
]
|
||||
|
||||
INVENTORY_MODEL_VALID = [
|
||||
{
|
||||
"name": "Valid_Host_Only",
|
||||
"input": {"hosts": [{"host": "192.168.0.17"}, {"host": "192.168.0.2"}]},
|
||||
"expected_result": "valid",
|
||||
},
|
||||
{
|
||||
"name": "Valid_Networks_Only",
|
||||
"input": {"networks": [{"network": "192.168.0.0/16"}, {"network": "192.168.1.0/24"}]},
|
||||
"expected_result": "valid",
|
||||
},
|
||||
{
|
||||
"name": "Valid_Ranges_Only",
|
||||
"input": {
|
||||
"ranges": [
|
||||
{"start": "10.1.0.1", "end": "10.1.0.10"},
|
||||
{"start": "10.2.0.1", "end": "10.2.1.10"},
|
||||
]
|
||||
},
|
||||
"expected_result": "valid",
|
||||
},
|
||||
]
|
||||
|
||||
INVENTORY_MODEL_INVALID = [
|
||||
{
|
||||
"name": "Host_with_Invalid_entry",
|
||||
"input": {"hosts": [{"host": "192.168.0.17"}, {"host": "192.168.0.2/32"}]},
|
||||
"expected_result": "invalid",
|
||||
},
|
||||
]
|
||||
|
||||
INVENTORY_DEVICE_MODEL_VALID = [
|
||||
{
|
||||
"name": "Valid_Inventory",
|
||||
"input": [{"host": "1.1.1.1", "username": "arista", "password": "arista123!"}, {"host": "1.1.1.2", "username": "arista", "password": "arista123!"}],
|
||||
"expected_result": "valid",
|
||||
},
|
||||
]
|
||||
|
||||
INVENTORY_DEVICE_MODEL_INVALID = [
|
||||
{
|
||||
"name": "Invalid_Inventory",
|
||||
"input": [{"host": "1.1.1.1", "password": "arista123!"}, {"host": "1.1.1.1", "username": "arista"}],
|
||||
"expected_result": "invalid",
|
||||
},
|
||||
]
|
||||
|
||||
ANTA_INVENTORY_TESTS_VALID = [
|
||||
{
|
||||
"name": "ValidInventory_with_host_only",
|
||||
"input": {"anta_inventory": {"hosts": [{"host": "192.168.0.17"}, {"host": "192.168.0.2"}, {"host": "my.awesome.host.com"}]}},
|
||||
"expected_result": "valid",
|
||||
"parameters": {
|
||||
"ipaddress_in_scope": "192.168.0.17",
|
||||
"ipaddress_out_of_scope": "192.168.1.1",
|
||||
"nb_hosts": 2,
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "ValidInventory_with_networks_only",
|
||||
"input": {"anta_inventory": {"networks": [{"network": "192.168.0.0/24"}]}},
|
||||
"expected_result": "valid",
|
||||
"parameters": {
|
||||
"ipaddress_in_scope": "192.168.0.1",
|
||||
"ipaddress_out_of_scope": "192.168.1.1",
|
||||
"nb_hosts": 256,
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "ValidInventory_with_ranges_only",
|
||||
"input": {
|
||||
"anta_inventory": {
|
||||
"ranges": [
|
||||
{"start": "10.0.0.1", "end": "10.0.0.11"},
|
||||
{"start": "10.0.0.101", "end": "10.0.0.111"},
|
||||
]
|
||||
}
|
||||
},
|
||||
"expected_result": "valid",
|
||||
"parameters": {
|
||||
"ipaddress_in_scope": "10.0.0.10",
|
||||
"ipaddress_out_of_scope": "192.168.1.1",
|
||||
"nb_hosts": 22,
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "ValidInventory_with_host_port",
|
||||
"input": {"anta_inventory": {"hosts": [{"host": "192.168.0.17", "port": 443}, {"host": "192.168.0.2", "port": 80}]}},
|
||||
"expected_result": "valid",
|
||||
"parameters": {
|
||||
"ipaddress_in_scope": "192.168.0.17",
|
||||
"ipaddress_out_of_scope": "192.168.1.1",
|
||||
"nb_hosts": 2,
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "ValidInventory_with_host_tags",
|
||||
"input": {"anta_inventory": {"hosts": [{"host": "192.168.0.17", "tags": ["leaf"]}, {"host": "192.168.0.2", "tags": ["spine"]}]}},
|
||||
"expected_result": "valid",
|
||||
"parameters": {
|
||||
"ipaddress_in_scope": "192.168.0.17",
|
||||
"ipaddress_out_of_scope": "192.168.1.1",
|
||||
"nb_hosts": 2,
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "ValidInventory_with_networks_tags",
|
||||
"input": {"anta_inventory": {"networks": [{"network": "192.168.0.0/24", "tags": ["leaf"]}]}},
|
||||
"expected_result": "valid",
|
||||
"parameters": {
|
||||
"ipaddress_in_scope": "192.168.0.1",
|
||||
"ipaddress_out_of_scope": "192.168.1.1",
|
||||
"nb_hosts": 256,
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "ValidInventory_with_ranges_tags",
|
||||
"input": {
|
||||
"anta_inventory": {
|
||||
"ranges": [
|
||||
{"start": "10.0.0.1", "end": "10.0.0.11", "tags": ["leaf"]},
|
||||
{"start": "10.0.0.101", "end": "10.0.0.111", "tags": ["spine"]},
|
||||
]
|
||||
}
|
||||
},
|
||||
"expected_result": "valid",
|
||||
"parameters": {
|
||||
"ipaddress_in_scope": "10.0.0.10",
|
||||
"ipaddress_out_of_scope": "192.168.1.1",
|
||||
"nb_hosts": 22,
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
ANTA_INVENTORY_TESTS_INVALID = [
|
||||
{
|
||||
"name": "InvalidInventory_with_host_only",
|
||||
"input": {"anta_inventory": {"hosts": [{"host": "192.168.0.17/32"}, {"host": "192.168.0.2"}]}},
|
||||
"expected_result": "invalid",
|
||||
},
|
||||
{
|
||||
"name": "InvalidInventory_wrong_network_bits",
|
||||
"input": {"anta_inventory": {"networks": [{"network": "192.168.42.0/8"}]}},
|
||||
"expected_result": "invalid",
|
||||
},
|
||||
{
|
||||
"name": "InvalidInventory_wrong_network",
|
||||
"input": {"anta_inventory": {"networks": [{"network": "toto"}]}},
|
||||
"expected_result": "invalid",
|
||||
},
|
||||
{
|
||||
"name": "InvalidInventory_wrong_range",
|
||||
"input": {"anta_inventory": {"ranges": [{"start": "toto", "end": "192.168.42.42"}]}},
|
||||
"expected_result": "invalid",
|
||||
},
|
||||
{
|
||||
"name": "InvalidInventory_wrong_range_type_mismatch",
|
||||
"input": {"anta_inventory": {"ranges": [{"start": "fe80::cafe", "end": "192.168.42.42"}]}},
|
||||
"expected_result": "invalid",
|
||||
},
|
||||
{
|
||||
"name": "Invalid_Root_Key",
|
||||
"input": {
|
||||
"inventory": {
|
||||
"ranges": [
|
||||
{"start": "10.0.0.1", "end": "10.0.0.11"},
|
||||
{"start": "10.0.0.100", "end": "10.0.0.111"},
|
||||
]
|
||||
}
|
||||
},
|
||||
"expected_result": "invalid",
|
||||
},
|
||||
]
|
||||
|
||||
TEST_RESULT_SET_STATUS = [
|
||||
{"name": "set_success", "target": "success", "message": "success"},
|
||||
{"name": "set_error", "target": "error", "message": "error"},
|
||||
{"name": "set_failure", "target": "failure", "message": "failure"},
|
||||
{"name": "set_skipped", "target": "skipped", "message": "skipped"},
|
||||
{"name": "set_unset", "target": "unset", "message": "unset"},
|
||||
]
|
7
tests/data/syntax_error.py
Normal file
7
tests/data/syntax_error.py
Normal file
|
@ -0,0 +1,7 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
# pylint: skip-file
|
||||
# flake8: noqa
|
||||
# type: ignore
|
||||
typo
|
3
tests/data/template.j2
Normal file
3
tests/data/template.j2
Normal file
|
@ -0,0 +1,3 @@
|
|||
{% for d in data %}
|
||||
* {{ d.test }} is [green]{{ d.result | upper}}[/green] for {{ d.name }}
|
||||
{% endfor %}
|
5
tests/data/test_catalog.yml
Normal file
5
tests/data/test_catalog.yml
Normal file
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
anta.tests.software:
|
||||
- VerifyEOSVersion:
|
||||
versions:
|
||||
- 4.31.1F
|
2
tests/data/test_catalog_not_a_list.yml
Normal file
2
tests/data/test_catalog_not_a_list.yml
Normal file
|
@ -0,0 +1,2 @@
|
|||
---
|
||||
anta.tests.configuration: true
|
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
anta.tests.software:
|
||||
- VerifyEOSVersion:
|
||||
versions:
|
||||
- 4.25.4M
|
||||
- 4.26.1F
|
||||
VerifyTerminAttrVersion:
|
||||
versions:
|
||||
- 4.25.4M
|
3
tests/data/test_catalog_test_definition_not_a_dict.yml
Normal file
3
tests/data/test_catalog_test_definition_not_a_dict.yml
Normal file
|
@ -0,0 +1,3 @@
|
|||
---
|
||||
anta.tests.software:
|
||||
- VerifyEOSVersion
|
2
tests/data/test_catalog_with_syntax_error_module.yml
Normal file
2
tests/data/test_catalog_with_syntax_error_module.yml
Normal file
|
@ -0,0 +1,2 @@
|
|||
---
|
||||
tests.data.syntax_error:
|
28
tests/data/test_catalog_with_tags.yml
Normal file
28
tests/data/test_catalog_with_tags.yml
Normal file
|
@ -0,0 +1,28 @@
|
|||
---
|
||||
anta.tests.system:
|
||||
- VerifyUptime:
|
||||
minimum: 10
|
||||
filters:
|
||||
tags: ['fabric']
|
||||
- VerifyReloadCause:
|
||||
filters:
|
||||
tags: ['leaf', 'spine']
|
||||
- VerifyCoredump:
|
||||
- VerifyAgentLogs:
|
||||
- VerifyCPUUtilization:
|
||||
filters:
|
||||
tags: ['leaf']
|
||||
- VerifyMemoryUtilization:
|
||||
filters:
|
||||
tags: ['testdevice']
|
||||
- VerifyFileSystemUtilization:
|
||||
- VerifyNTP:
|
||||
|
||||
anta.tests.mlag:
|
||||
- VerifyMlagStatus:
|
||||
|
||||
anta.tests.interfaces:
|
||||
- VerifyL3MTU:
|
||||
mtu: 1500
|
||||
filters:
|
||||
tags: ['demo']
|
3
tests/data/test_catalog_with_undefined_module.yml
Normal file
3
tests/data/test_catalog_with_undefined_module.yml
Normal file
|
@ -0,0 +1,3 @@
|
|||
---
|
||||
anta.tests.undefined:
|
||||
- MyTest:
|
4
tests/data/test_catalog_with_undefined_module_nested.yml
Normal file
4
tests/data/test_catalog_with_undefined_module_nested.yml
Normal file
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
anta.tests:
|
||||
undefined:
|
||||
- MyTest:
|
3
tests/data/test_catalog_with_undefined_tests.yml
Normal file
3
tests/data/test_catalog_with_undefined_tests.yml
Normal file
|
@ -0,0 +1,3 @@
|
|||
---
|
||||
anta.tests.software:
|
||||
- FakeTest:
|
1
tests/data/test_catalog_wrong_type.yml
Normal file
1
tests/data/test_catalog_wrong_type.yml
Normal file
|
@ -0,0 +1 @@
|
|||
"Not a string"
|
0
tests/data/test_empty_catalog.yml
Normal file
0
tests/data/test_empty_catalog.yml
Normal file
12
tests/data/test_inventory.yml
Normal file
12
tests/data/test_inventory.yml
Normal file
|
@ -0,0 +1,12 @@
|
|||
---
|
||||
anta_inventory:
|
||||
hosts:
|
||||
- name: dummy
|
||||
host: dummy.anta.ninja
|
||||
tags: ["leaf"]
|
||||
- name: dummy2
|
||||
host: dummy2.anta.ninja
|
||||
tags: ["leaf"]
|
||||
- name: dummy3
|
||||
host: dummy3.anta.ninja
|
||||
tags: ["spine"]
|
8
tests/data/test_snapshot_commands.yml
Normal file
8
tests/data/test_snapshot_commands.yml
Normal file
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
# list of EOS commands to collect in JSON format
|
||||
json_format:
|
||||
- show version
|
||||
|
||||
# list of EOS commands to collect in text format
|
||||
text_format:
|
||||
- show version
|
16
tests/data/toto.yml
Normal file
16
tests/data/toto.yml
Normal file
|
@ -0,0 +1,16 @@
|
|||
anta_inventory:
|
||||
hosts:
|
||||
- host: 10.73.1.238
|
||||
name: cv_atd1
|
||||
- host: 192.168.0.10
|
||||
name: spine1
|
||||
- host: 192.168.0.11
|
||||
name: spine2
|
||||
- host: 192.168.0.12
|
||||
name: leaf1
|
||||
- host: 192.168.0.13
|
||||
name: leaf2
|
||||
- host: 192.168.0.14
|
||||
name: leaf3
|
||||
- host: 192.168.0.15
|
||||
name: leaf4
|
3
tests/lib/__init__.py
Normal file
3
tests/lib/__init__.py
Normal file
|
@ -0,0 +1,3 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
34
tests/lib/anta.py
Normal file
34
tests/lib/anta.py
Normal file
|
@ -0,0 +1,34 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
generic test funciton used to generate unit tests for each AntaTest
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from typing import Any
|
||||
|
||||
from anta.device import AntaDevice
|
||||
|
||||
|
||||
def test(device: AntaDevice, data: dict[str, Any]) -> None:
|
||||
"""
|
||||
Generic test function for AntaTest subclass.
|
||||
See `tests/units/anta_tests/README.md` for more information on how to use it.
|
||||
"""
|
||||
# Instantiate the AntaTest subclass
|
||||
test_instance = data["test"](device, inputs=data["inputs"], eos_data=data["eos_data"])
|
||||
# Run the test() method
|
||||
asyncio.run(test_instance.test())
|
||||
# Assert expected result
|
||||
assert test_instance.result.result == data["expected"]["result"], test_instance.result.messages
|
||||
if "messages" in data["expected"]:
|
||||
# We expect messages in test result
|
||||
assert len(test_instance.result.messages) == len(data["expected"]["messages"])
|
||||
# Test will pass if the expected message is included in the test result message
|
||||
for message, expected in zip(test_instance.result.messages, data["expected"]["messages"]): # NOTE: zip(strict=True) has been added in Python 3.10
|
||||
assert expected in message
|
||||
else:
|
||||
# Test result should not have messages
|
||||
assert test_instance.result.messages == []
|
242
tests/lib/fixture.py
Normal file
242
tests/lib/fixture.py
Normal file
|
@ -0,0 +1,242 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""Fixture for Anta Testing"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Iterator
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from click.testing import CliRunner, Result
|
||||
from pytest import CaptureFixture
|
||||
|
||||
from anta import aioeapi
|
||||
from anta.cli.console import console
|
||||
from anta.device import AntaDevice, AsyncEOSDevice
|
||||
from anta.inventory import AntaInventory
|
||||
from anta.models import AntaCommand
|
||||
from anta.result_manager import ResultManager
|
||||
from anta.result_manager.models import TestResult
|
||||
from tests.lib.utils import default_anta_env
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DEVICE_HW_MODEL = "pytest"
|
||||
DEVICE_NAME = "pytest"
|
||||
COMMAND_OUTPUT = "retrieved"
|
||||
|
||||
MOCK_CLI_JSON: dict[str, aioeapi.EapiCommandError | dict[str, Any]] = {
|
||||
"show version": {
|
||||
"modelName": "DCS-7280CR3-32P4-F",
|
||||
"version": "4.31.1F",
|
||||
},
|
||||
"enable": {},
|
||||
"clear counters": {},
|
||||
"clear hardware counter drop": {},
|
||||
"undefined": aioeapi.EapiCommandError(
|
||||
passed=[], failed="show version", errors=["Authorization denied for command 'show version'"], errmsg="Invalid command", not_exec=[]
|
||||
),
|
||||
}
|
||||
|
||||
MOCK_CLI_TEXT: dict[str, aioeapi.EapiCommandError | str] = {
|
||||
"show version": "Arista cEOSLab",
|
||||
"bash timeout 10 ls -1t /mnt/flash/schedule/tech-support": "dummy_tech-support_2023-12-01.1115.log.gz\ndummy_tech-support_2023-12-01.1015.log.gz",
|
||||
"bash timeout 10 ls -1t /mnt/flash/schedule/tech-support | head -1": "dummy_tech-support_2023-12-01.1115.log.gz",
|
||||
"show running-config | include aaa authorization exec default": "aaa authorization exec default local",
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def device(request: pytest.FixtureRequest) -> Iterator[AntaDevice]:
|
||||
"""
|
||||
Returns an AntaDevice instance with mocked abstract method
|
||||
"""
|
||||
|
||||
def _collect(command: AntaCommand) -> None:
|
||||
command.output = COMMAND_OUTPUT
|
||||
|
||||
kwargs = {"name": DEVICE_NAME, "hw_model": DEVICE_HW_MODEL}
|
||||
|
||||
if hasattr(request, "param"):
|
||||
# Fixture is parametrized indirectly
|
||||
kwargs.update(request.param)
|
||||
with patch.object(AntaDevice, "__abstractmethods__", set()):
|
||||
with patch("anta.device.AntaDevice._collect", side_effect=_collect):
|
||||
# AntaDevice constructor does not have hw_model argument
|
||||
hw_model = kwargs.pop("hw_model")
|
||||
dev = AntaDevice(**kwargs) # type: ignore[abstract, arg-type] # pylint: disable=abstract-class-instantiated, unexpected-keyword-arg
|
||||
dev.hw_model = hw_model
|
||||
yield dev
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_inventory() -> AntaInventory:
|
||||
"""
|
||||
Return the test_inventory
|
||||
"""
|
||||
env = default_anta_env()
|
||||
assert env["ANTA_INVENTORY"] and env["ANTA_USERNAME"] and env["ANTA_PASSWORD"] is not None
|
||||
return AntaInventory.parse(
|
||||
filename=env["ANTA_INVENTORY"],
|
||||
username=env["ANTA_USERNAME"],
|
||||
password=env["ANTA_PASSWORD"],
|
||||
)
|
||||
|
||||
|
||||
# tests.unit.test_device.py fixture
|
||||
@pytest.fixture
|
||||
def async_device(request: pytest.FixtureRequest) -> AsyncEOSDevice:
|
||||
"""
|
||||
Returns an AsyncEOSDevice instance
|
||||
"""
|
||||
|
||||
kwargs = {"name": DEVICE_NAME, "host": "42.42.42.42", "username": "anta", "password": "anta"}
|
||||
|
||||
if hasattr(request, "param"):
|
||||
# Fixture is parametrized indirectly
|
||||
kwargs.update(request.param)
|
||||
dev = AsyncEOSDevice(**kwargs) # type: ignore[arg-type]
|
||||
return dev
|
||||
|
||||
|
||||
# tests.units.result_manager fixtures
|
||||
@pytest.fixture
|
||||
def test_result_factory(device: AntaDevice) -> Callable[[int], TestResult]:
|
||||
"""
|
||||
Return a anta.result_manager.models.TestResult object
|
||||
"""
|
||||
|
||||
# pylint: disable=redefined-outer-name
|
||||
|
||||
def _create(index: int = 0) -> TestResult:
|
||||
"""
|
||||
Actual Factory
|
||||
"""
|
||||
return TestResult(
|
||||
name=device.name,
|
||||
test=f"VerifyTest{index}",
|
||||
categories=["test"],
|
||||
description=f"Verifies Test {index}",
|
||||
custom_field=None,
|
||||
)
|
||||
|
||||
return _create
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def list_result_factory(test_result_factory: Callable[[int], TestResult]) -> Callable[[int], list[TestResult]]:
|
||||
"""
|
||||
Return a list[TestResult] with 'size' TestResult instanciated using the test_result_factory fixture
|
||||
"""
|
||||
|
||||
# pylint: disable=redefined-outer-name
|
||||
|
||||
def _factory(size: int = 0) -> list[TestResult]:
|
||||
"""
|
||||
Factory for list[TestResult] entry of size entries
|
||||
"""
|
||||
result: list[TestResult] = []
|
||||
for i in range(size):
|
||||
result.append(test_result_factory(i))
|
||||
return result
|
||||
|
||||
return _factory
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def result_manager_factory(list_result_factory: Callable[[int], list[TestResult]]) -> Callable[[int], ResultManager]:
|
||||
"""
|
||||
Return a ResultManager factory that takes as input a number of tests
|
||||
"""
|
||||
|
||||
# pylint: disable=redefined-outer-name
|
||||
|
||||
def _factory(number: int = 0) -> ResultManager:
|
||||
"""
|
||||
Factory for list[TestResult] entry of size entries
|
||||
"""
|
||||
result_manager = ResultManager()
|
||||
result_manager.add_test_results(list_result_factory(number))
|
||||
return result_manager
|
||||
|
||||
return _factory
|
||||
|
||||
|
||||
# tests.units.cli fixtures
|
||||
@pytest.fixture
|
||||
def temp_env(tmp_path: Path) -> dict[str, str | None]:
|
||||
"""Fixture that create a temporary ANTA inventory that can be overriden
|
||||
and returns the corresponding environment variables"""
|
||||
env = default_anta_env()
|
||||
anta_inventory = str(env["ANTA_INVENTORY"])
|
||||
temp_inventory = tmp_path / "test_inventory.yml"
|
||||
shutil.copy(anta_inventory, temp_inventory)
|
||||
env["ANTA_INVENTORY"] = str(temp_inventory)
|
||||
return env
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def click_runner(capsys: CaptureFixture[str]) -> Iterator[CliRunner]:
|
||||
"""
|
||||
Convenience fixture to return a click.CliRunner for cli testing
|
||||
"""
|
||||
|
||||
class AntaCliRunner(CliRunner):
|
||||
"""Override CliRunner to inject specific variables for ANTA"""
|
||||
|
||||
def invoke(self, *args, **kwargs) -> Result: # type: ignore[no-untyped-def]
|
||||
# Inject default env if not provided
|
||||
kwargs["env"] = kwargs["env"] if "env" in kwargs else default_anta_env()
|
||||
# Deterministic terminal width
|
||||
kwargs["env"]["COLUMNS"] = "165"
|
||||
|
||||
kwargs["auto_envvar_prefix"] = "ANTA"
|
||||
# Way to fix https://github.com/pallets/click/issues/824
|
||||
with capsys.disabled():
|
||||
result = super().invoke(*args, **kwargs)
|
||||
print("--- CLI Output ---")
|
||||
print(result.output)
|
||||
return result
|
||||
|
||||
def cli(
|
||||
command: str | None = None, commands: list[dict[str, Any]] | None = None, ofmt: str = "json", version: int | str | None = "latest", **kwargs: Any
|
||||
) -> dict[str, Any] | list[dict[str, Any]]:
|
||||
# pylint: disable=unused-argument
|
||||
def get_output(command: str | dict[str, Any]) -> dict[str, Any]:
|
||||
if isinstance(command, dict):
|
||||
command = command["cmd"]
|
||||
mock_cli: dict[str, Any]
|
||||
if ofmt == "json":
|
||||
mock_cli = MOCK_CLI_JSON
|
||||
elif ofmt == "text":
|
||||
mock_cli = MOCK_CLI_TEXT
|
||||
for mock_cmd, output in mock_cli.items():
|
||||
if command == mock_cmd:
|
||||
logger.info(f"Mocking command {mock_cmd}")
|
||||
if isinstance(output, aioeapi.EapiCommandError):
|
||||
raise output
|
||||
return output
|
||||
message = f"Command '{command}' is not mocked"
|
||||
logger.critical(message)
|
||||
raise NotImplementedError(message)
|
||||
|
||||
res: dict[str, Any] | list[dict[str, Any]]
|
||||
if command is not None:
|
||||
logger.debug(f"Mock input {command}")
|
||||
res = get_output(command)
|
||||
if commands is not None:
|
||||
logger.debug(f"Mock input {commands}")
|
||||
res = list(map(get_output, commands))
|
||||
logger.debug(f"Mock output {res}")
|
||||
return res
|
||||
|
||||
# Patch aioeapi methods used by AsyncEOSDevice. See tests/units/test_device.py
|
||||
with patch("aioeapi.device.Device.check_connection", return_value=True), patch("aioeapi.device.Device.cli", side_effect=cli), patch("asyncssh.connect"), patch(
|
||||
"asyncssh.scp"
|
||||
):
|
||||
console._color_system = None # pylint: disable=protected-access
|
||||
yield AntaCliRunner()
|
49
tests/lib/utils.py
Normal file
49
tests/lib/utils.py
Normal file
|
@ -0,0 +1,49 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
tests.lib.utils
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
def generate_test_ids_dict(val: dict[str, Any], key: str = "name") -> str:
|
||||
"""
|
||||
generate_test_ids Helper to generate test ID for parametrize
|
||||
"""
|
||||
return val.get(key, "unamed_test")
|
||||
|
||||
|
||||
def generate_test_ids_list(val: list[dict[str, Any]], key: str = "name") -> list[str]:
|
||||
"""
|
||||
generate_test_ids Helper to generate test ID for parametrize
|
||||
"""
|
||||
return [entry[key] if key in entry.keys() else "unamed_test" for entry in val]
|
||||
|
||||
|
||||
def generate_test_ids(data: list[dict[str, Any]]) -> list[str]:
|
||||
"""
|
||||
build id for a unit test of an AntaTest subclass
|
||||
|
||||
{
|
||||
"name": "meaniful test name",
|
||||
"test": <AntaTest instance>,
|
||||
...
|
||||
}
|
||||
"""
|
||||
return [f"{val['test'].__module__}.{val['test'].__name__}-{val['name']}" for val in data]
|
||||
|
||||
|
||||
def default_anta_env() -> dict[str, str | None]:
|
||||
"""
|
||||
Return a default_anta_environement which can be passed to a cliRunner.invoke method
|
||||
"""
|
||||
return {
|
||||
"ANTA_USERNAME": "anta",
|
||||
"ANTA_PASSWORD": "formica",
|
||||
"ANTA_INVENTORY": str(Path(__file__).parent.parent / "data" / "test_inventory.yml"),
|
||||
"ANTA_CATALOG": str(Path(__file__).parent.parent / "data" / "test_catalog.yml"),
|
||||
}
|
1
tests/mock_data/show_ntp_status_text_synchronised.out
Normal file
1
tests/mock_data/show_ntp_status_text_synchronised.out
Normal file
|
@ -0,0 +1 @@
|
|||
[{'output': 'synchronised to NTP server (51.254.83.231) at stratum 3\n time correct to within 82 ms\n polling server every 1024 s\n\n'}]
|
1
tests/mock_data/show_uptime_json_1000000.out
Normal file
1
tests/mock_data/show_uptime_json_1000000.out
Normal file
|
@ -0,0 +1 @@
|
|||
[{'upTime': 1000000.68, 'loadAvg': [0.17, 0.21, 0.18], 'users': 1, 'currentTime': 1643761588.030645}]
|
1
tests/mock_data/show_version_json_4.27.1.1F.out
Normal file
1
tests/mock_data/show_version_json_4.27.1.1F.out
Normal file
|
@ -0,0 +1 @@
|
|||
[{'imageFormatVersion': '2.0', 'uptime': 2697.76, 'modelName': 'DCS-7280TRA-48C6-F', 'internalVersion': '4.27.1.1F-25536724.42711F', 'memTotal': 8098984, 'mfgName': 'Arista', 'serialNumber': 'SSJ16376415', 'systemMacAddress': '44:4c:a8:c7:1f:6b', 'bootupTimestamp': 1643715179.0, 'memFree': 6131068, 'version': '4.27.1.1F', 'configMacAddress': '00:00:00:00:00:00', 'isIntlVersion': False, 'internalBuildId': '38c43eab-c660-477a-915b-5a7b28da781d', 'hardwareRevision': '21.02', 'hwMacAddress': '44:4c:a8:c7:1f:6b', 'architecture': 'i686'}]
|
3
tests/units/__init__.py
Normal file
3
tests/units/__init__.py
Normal file
|
@ -0,0 +1,3 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
7
tests/units/anta_tests/README.md
Normal file
7
tests/units/anta_tests/README.md
Normal file
|
@ -0,0 +1,7 @@
|
|||
<!--
|
||||
~ Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
~ Use of this source code is governed by the Apache License 2.0
|
||||
~ that can be found in the LICENSE file.
|
||||
-->
|
||||
|
||||
A guide explaining how to write the unit test can be found in the [contribution guide](../../../docs/contribution.md#unit-tests)
|
3
tests/units/anta_tests/__init__.py
Normal file
3
tests/units/anta_tests/__init__.py
Normal file
|
@ -0,0 +1,3 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
3
tests/units/anta_tests/routing/__init__.py
Normal file
3
tests/units/anta_tests/routing/__init__.py
Normal file
|
@ -0,0 +1,3 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
3385
tests/units/anta_tests/routing/test_bgp.py
Normal file
3385
tests/units/anta_tests/routing/test_bgp.py
Normal file
File diff suppressed because it is too large
Load diff
230
tests/units/anta_tests/routing/test_generic.py
Normal file
230
tests/units/anta_tests/routing/test_generic.py
Normal file
|
@ -0,0 +1,230 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Tests for anta.tests.routing.generic.py
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from anta.tests.routing.generic import VerifyRoutingProtocolModel, VerifyRoutingTableEntry, VerifyRoutingTableSize
|
||||
from tests.lib.anta import test # noqa: F401; pylint: disable=W0611
|
||||
|
||||
DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyRoutingProtocolModel,
|
||||
"eos_data": [{"vrfs": {"default": {}}, "protoModelStatus": {"configuredProtoModel": "multi-agent", "operatingProtoModel": "multi-agent"}}],
|
||||
"inputs": {"model": "multi-agent"},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-configured-model",
|
||||
"test": VerifyRoutingProtocolModel,
|
||||
"eos_data": [{"vrfs": {"default": {}}, "protoModelStatus": {"configuredProtoModel": "ribd", "operatingProtoModel": "ribd"}}],
|
||||
"inputs": {"model": "multi-agent"},
|
||||
"expected": {"result": "failure", "messages": ["routing model is misconfigured: configured: ribd - operating: ribd - expected: multi-agent"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-mismatch-operating-model",
|
||||
"test": VerifyRoutingProtocolModel,
|
||||
"eos_data": [{"vrfs": {"default": {}}, "protoModelStatus": {"configuredProtoModel": "multi-agent", "operatingProtoModel": "ribd"}}],
|
||||
"inputs": {"model": "multi-agent"},
|
||||
"expected": {"result": "failure", "messages": ["routing model is misconfigured: configured: multi-agent - operating: ribd - expected: multi-agent"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyRoutingTableSize,
|
||||
"eos_data": [
|
||||
{
|
||||
"vrfs": {
|
||||
"default": {
|
||||
# Output truncated
|
||||
"maskLen": {"8": 2},
|
||||
"totalRoutes": 123,
|
||||
}
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": {"minimum": 42, "maximum": 666},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyRoutingTableSize,
|
||||
"eos_data": [
|
||||
{
|
||||
"vrfs": {
|
||||
"default": {
|
||||
# Output truncated
|
||||
"maskLen": {"8": 2},
|
||||
"totalRoutes": 1000,
|
||||
}
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": {"minimum": 42, "maximum": 666},
|
||||
"expected": {"result": "failure", "messages": ["routing-table has 1000 routes and not between min (42) and maximum (666)"]},
|
||||
},
|
||||
{
|
||||
"name": "error-max-smaller-than-min",
|
||||
"test": VerifyRoutingTableSize,
|
||||
"eos_data": [{}],
|
||||
"inputs": {"minimum": 666, "maximum": 42},
|
||||
"expected": {
|
||||
"result": "error",
|
||||
"messages": ["Minimum 666 is greater than maximum 42"],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyRoutingTableEntry,
|
||||
"eos_data": [
|
||||
{
|
||||
"vrfs": {
|
||||
"default": {
|
||||
"routingDisabled": False,
|
||||
"allRoutesProgrammedHardware": True,
|
||||
"allRoutesProgrammedKernel": True,
|
||||
"defaultRouteState": "notSet",
|
||||
"routes": {
|
||||
"10.1.0.1/32": {
|
||||
"hardwareProgrammed": True,
|
||||
"routeType": "eBGP",
|
||||
"routeLeaked": False,
|
||||
"kernelProgrammed": True,
|
||||
"routeAction": "forward",
|
||||
"directlyConnected": False,
|
||||
"preference": 20,
|
||||
"metric": 0,
|
||||
"vias": [{"nexthopAddr": "10.1.255.4", "interface": "Ethernet1"}],
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"vrfs": {
|
||||
"default": {
|
||||
"routingDisabled": False,
|
||||
"allRoutesProgrammedHardware": True,
|
||||
"allRoutesProgrammedKernel": True,
|
||||
"defaultRouteState": "notSet",
|
||||
"routes": {
|
||||
"10.1.0.2/32": {
|
||||
"hardwareProgrammed": True,
|
||||
"routeType": "eBGP",
|
||||
"routeLeaked": False,
|
||||
"kernelProgrammed": True,
|
||||
"routeAction": "forward",
|
||||
"directlyConnected": False,
|
||||
"preference": 20,
|
||||
"metric": 0,
|
||||
"vias": [{"nexthopAddr": "10.1.255.6", "interface": "Ethernet2"}],
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
],
|
||||
"inputs": {"vrf": "default", "routes": ["10.1.0.1", "10.1.0.2"]},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-missing-route",
|
||||
"test": VerifyRoutingTableEntry,
|
||||
"eos_data": [
|
||||
{
|
||||
"vrfs": {
|
||||
"default": {
|
||||
"routingDisabled": False,
|
||||
"allRoutesProgrammedHardware": True,
|
||||
"allRoutesProgrammedKernel": True,
|
||||
"defaultRouteState": "notSet",
|
||||
"routes": {},
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"vrfs": {
|
||||
"default": {
|
||||
"routingDisabled": False,
|
||||
"allRoutesProgrammedHardware": True,
|
||||
"allRoutesProgrammedKernel": True,
|
||||
"defaultRouteState": "notSet",
|
||||
"routes": {
|
||||
"10.1.0.2/32": {
|
||||
"hardwareProgrammed": True,
|
||||
"routeType": "eBGP",
|
||||
"routeLeaked": False,
|
||||
"kernelProgrammed": True,
|
||||
"routeAction": "forward",
|
||||
"directlyConnected": False,
|
||||
"preference": 20,
|
||||
"metric": 0,
|
||||
"vias": [{"nexthopAddr": "10.1.255.6", "interface": "Ethernet2"}],
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
],
|
||||
"inputs": {"vrf": "default", "routes": ["10.1.0.1", "10.1.0.2"]},
|
||||
"expected": {"result": "failure", "messages": ["The following route(s) are missing from the routing table of VRF default: ['10.1.0.1']"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-route",
|
||||
"test": VerifyRoutingTableEntry,
|
||||
"eos_data": [
|
||||
{
|
||||
"vrfs": {
|
||||
"default": {
|
||||
"routingDisabled": False,
|
||||
"allRoutesProgrammedHardware": True,
|
||||
"allRoutesProgrammedKernel": True,
|
||||
"defaultRouteState": "notSet",
|
||||
"routes": {
|
||||
"10.1.0.1/32": {
|
||||
"hardwareProgrammed": True,
|
||||
"routeType": "eBGP",
|
||||
"routeLeaked": False,
|
||||
"kernelProgrammed": True,
|
||||
"routeAction": "forward",
|
||||
"directlyConnected": False,
|
||||
"preference": 20,
|
||||
"metric": 0,
|
||||
"vias": [{"nexthopAddr": "10.1.255.4", "interface": "Ethernet1"}],
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"vrfs": {
|
||||
"default": {
|
||||
"routingDisabled": False,
|
||||
"allRoutesProgrammedHardware": True,
|
||||
"allRoutesProgrammedKernel": True,
|
||||
"defaultRouteState": "notSet",
|
||||
"routes": {
|
||||
"10.1.0.55/32": {
|
||||
"hardwareProgrammed": True,
|
||||
"routeType": "eBGP",
|
||||
"routeLeaked": False,
|
||||
"kernelProgrammed": True,
|
||||
"routeAction": "forward",
|
||||
"directlyConnected": False,
|
||||
"preference": 20,
|
||||
"metric": 0,
|
||||
"vias": [{"nexthopAddr": "10.1.255.6", "interface": "Ethernet2"}],
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
],
|
||||
"inputs": {"vrf": "default", "routes": ["10.1.0.1", "10.1.0.2"]},
|
||||
"expected": {"result": "failure", "messages": ["The following route(s) are missing from the routing table of VRF default: ['10.1.0.2']"]},
|
||||
},
|
||||
]
|
298
tests/units/anta_tests/routing/test_ospf.py
Normal file
298
tests/units/anta_tests/routing/test_ospf.py
Normal file
|
@ -0,0 +1,298 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Tests for anta.tests.routing.ospf.py
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from anta.tests.routing.ospf import VerifyOSPFNeighborCount, VerifyOSPFNeighborState
|
||||
from tests.lib.anta import test # noqa: F401; pylint: disable=W0611
|
||||
|
||||
DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyOSPFNeighborState,
|
||||
"eos_data": [
|
||||
{
|
||||
"vrfs": {
|
||||
"default": {
|
||||
"instList": {
|
||||
"666": {
|
||||
"ospfNeighborEntries": [
|
||||
{
|
||||
"routerId": "7.7.7.7",
|
||||
"priority": 1,
|
||||
"drState": "DR",
|
||||
"interfaceName": "Ethernet1",
|
||||
"adjacencyState": "full",
|
||||
"inactivity": 1683298014.844345,
|
||||
"interfaceAddress": "10.3.0.1",
|
||||
},
|
||||
{
|
||||
"routerId": "9.9.9.9",
|
||||
"priority": 1,
|
||||
"drState": "DR",
|
||||
"interfaceName": "Ethernet1",
|
||||
"adjacencyState": "full",
|
||||
"inactivity": 1683298014.844345,
|
||||
"interfaceAddress": "10.3.0.1",
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"BLAH": {
|
||||
"instList": {
|
||||
"777": {
|
||||
"ospfNeighborEntries": [
|
||||
{
|
||||
"routerId": "8.8.8.8",
|
||||
"priority": 1,
|
||||
"drState": "DR",
|
||||
"interfaceName": "Ethernet1",
|
||||
"adjacencyState": "full",
|
||||
"inactivity": 1683298014.844345,
|
||||
"interfaceAddress": "10.3.0.1",
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyOSPFNeighborState,
|
||||
"eos_data": [
|
||||
{
|
||||
"vrfs": {
|
||||
"default": {
|
||||
"instList": {
|
||||
"666": {
|
||||
"ospfNeighborEntries": [
|
||||
{
|
||||
"routerId": "7.7.7.7",
|
||||
"priority": 1,
|
||||
"drState": "DR",
|
||||
"interfaceName": "Ethernet1",
|
||||
"adjacencyState": "2-way",
|
||||
"inactivity": 1683298014.844345,
|
||||
"interfaceAddress": "10.3.0.1",
|
||||
},
|
||||
{
|
||||
"routerId": "9.9.9.9",
|
||||
"priority": 1,
|
||||
"drState": "DR",
|
||||
"interfaceName": "Ethernet1",
|
||||
"adjacencyState": "full",
|
||||
"inactivity": 1683298014.844345,
|
||||
"interfaceAddress": "10.3.0.1",
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"BLAH": {
|
||||
"instList": {
|
||||
"777": {
|
||||
"ospfNeighborEntries": [
|
||||
{
|
||||
"routerId": "8.8.8.8",
|
||||
"priority": 1,
|
||||
"drState": "DR",
|
||||
"interfaceName": "Ethernet1",
|
||||
"adjacencyState": "down",
|
||||
"inactivity": 1683298014.844345,
|
||||
"interfaceAddress": "10.3.0.1",
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"Some neighbors are not correctly configured: [{'vrf': 'default', 'instance': '666', 'neighbor': '7.7.7.7', 'state': '2-way'},"
|
||||
" {'vrf': 'BLAH', 'instance': '777', 'neighbor': '8.8.8.8', 'state': 'down'}]."
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "skipped",
|
||||
"test": VerifyOSPFNeighborState,
|
||||
"eos_data": [
|
||||
{
|
||||
"vrfs": {},
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "skipped", "messages": ["no OSPF neighbor found"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyOSPFNeighborCount,
|
||||
"eos_data": [
|
||||
{
|
||||
"vrfs": {
|
||||
"default": {
|
||||
"instList": {
|
||||
"666": {
|
||||
"ospfNeighborEntries": [
|
||||
{
|
||||
"routerId": "7.7.7.7",
|
||||
"priority": 1,
|
||||
"drState": "DR",
|
||||
"interfaceName": "Ethernet1",
|
||||
"adjacencyState": "full",
|
||||
"inactivity": 1683298014.844345,
|
||||
"interfaceAddress": "10.3.0.1",
|
||||
},
|
||||
{
|
||||
"routerId": "9.9.9.9",
|
||||
"priority": 1,
|
||||
"drState": "DR",
|
||||
"interfaceName": "Ethernet1",
|
||||
"adjacencyState": "full",
|
||||
"inactivity": 1683298014.844345,
|
||||
"interfaceAddress": "10.3.0.1",
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"BLAH": {
|
||||
"instList": {
|
||||
"777": {
|
||||
"ospfNeighborEntries": [
|
||||
{
|
||||
"routerId": "8.8.8.8",
|
||||
"priority": 1,
|
||||
"drState": "DR",
|
||||
"interfaceName": "Ethernet1",
|
||||
"adjacencyState": "full",
|
||||
"inactivity": 1683298014.844345,
|
||||
"interfaceAddress": "10.3.0.1",
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": {"number": 3},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-number",
|
||||
"test": VerifyOSPFNeighborCount,
|
||||
"eos_data": [
|
||||
{
|
||||
"vrfs": {
|
||||
"default": {
|
||||
"instList": {
|
||||
"666": {
|
||||
"ospfNeighborEntries": [
|
||||
{
|
||||
"routerId": "7.7.7.7",
|
||||
"priority": 1,
|
||||
"drState": "DR",
|
||||
"interfaceName": "Ethernet1",
|
||||
"adjacencyState": "full",
|
||||
"inactivity": 1683298014.844345,
|
||||
"interfaceAddress": "10.3.0.1",
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": {"number": 3},
|
||||
"expected": {"result": "failure", "messages": ["device has 1 neighbors (expected 3)"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-good-number-wrong-state",
|
||||
"test": VerifyOSPFNeighborCount,
|
||||
"eos_data": [
|
||||
{
|
||||
"vrfs": {
|
||||
"default": {
|
||||
"instList": {
|
||||
"666": {
|
||||
"ospfNeighborEntries": [
|
||||
{
|
||||
"routerId": "7.7.7.7",
|
||||
"priority": 1,
|
||||
"drState": "DR",
|
||||
"interfaceName": "Ethernet1",
|
||||
"adjacencyState": "2-way",
|
||||
"inactivity": 1683298014.844345,
|
||||
"interfaceAddress": "10.3.0.1",
|
||||
},
|
||||
{
|
||||
"routerId": "9.9.9.9",
|
||||
"priority": 1,
|
||||
"drState": "DR",
|
||||
"interfaceName": "Ethernet1",
|
||||
"adjacencyState": "full",
|
||||
"inactivity": 1683298014.844345,
|
||||
"interfaceAddress": "10.3.0.1",
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"BLAH": {
|
||||
"instList": {
|
||||
"777": {
|
||||
"ospfNeighborEntries": [
|
||||
{
|
||||
"routerId": "8.8.8.8",
|
||||
"priority": 1,
|
||||
"drState": "DR",
|
||||
"interfaceName": "Ethernet1",
|
||||
"adjacencyState": "down",
|
||||
"inactivity": 1683298014.844345,
|
||||
"interfaceAddress": "10.3.0.1",
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": {"number": 3},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"Some neighbors are not correctly configured: [{'vrf': 'default', 'instance': '666', 'neighbor': '7.7.7.7', 'state': '2-way'},"
|
||||
" {'vrf': 'BLAH', 'instance': '777', 'neighbor': '8.8.8.8', 'state': 'down'}]."
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "skipped",
|
||||
"test": VerifyOSPFNeighborCount,
|
||||
"eos_data": [
|
||||
{
|
||||
"vrfs": {},
|
||||
}
|
||||
],
|
||||
"inputs": {"number": 3},
|
||||
"expected": {"result": "skipped", "messages": ["no OSPF neighbor found"]},
|
||||
},
|
||||
]
|
516
tests/units/anta_tests/test_aaa.py
Normal file
516
tests/units/anta_tests/test_aaa.py
Normal file
|
@ -0,0 +1,516 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Tests for anta.tests.aaa.py
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from anta.tests.aaa import (
|
||||
VerifyAcctConsoleMethods,
|
||||
VerifyAcctDefaultMethods,
|
||||
VerifyAuthenMethods,
|
||||
VerifyAuthzMethods,
|
||||
VerifyTacacsServerGroups,
|
||||
VerifyTacacsServers,
|
||||
VerifyTacacsSourceIntf,
|
||||
)
|
||||
from tests.lib.anta import test # noqa: F401; pylint: disable=unused-import
|
||||
|
||||
DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyTacacsSourceIntf,
|
||||
"eos_data": [
|
||||
{
|
||||
"tacacsServers": [
|
||||
{
|
||||
"serverInfo": {"hostname": "10.22.10.91", "authport": 49, "vrf": "MGMT"},
|
||||
}
|
||||
],
|
||||
"groups": {"GROUP1": {"serverGroup": "TACACS+", "members": [{"hostname": "SERVER1", "authport": 49, "vrf": "MGMT"}]}},
|
||||
"srcIntf": {"MGMT": "Management0"},
|
||||
}
|
||||
],
|
||||
"inputs": {"intf": "Management0", "vrf": "MGMT"},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-not-configured",
|
||||
"test": VerifyTacacsSourceIntf,
|
||||
"eos_data": [
|
||||
{
|
||||
"tacacsServers": [],
|
||||
"groups": {},
|
||||
"srcIntf": {},
|
||||
}
|
||||
],
|
||||
"inputs": {"intf": "Management0", "vrf": "MGMT"},
|
||||
"expected": {"result": "failure", "messages": ["Source-interface Management0 is not configured in VRF MGMT"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-intf",
|
||||
"test": VerifyTacacsSourceIntf,
|
||||
"eos_data": [
|
||||
{
|
||||
"tacacsServers": [
|
||||
{
|
||||
"serverInfo": {"hostname": "10.22.10.91", "authport": 49, "vrf": "MGMT"},
|
||||
}
|
||||
],
|
||||
"groups": {"GROUP1": {"serverGroup": "TACACS+", "members": [{"hostname": "SERVER1", "authport": 49, "vrf": "MGMT"}]}},
|
||||
"srcIntf": {"MGMT": "Management1"},
|
||||
}
|
||||
],
|
||||
"inputs": {"intf": "Management0", "vrf": "MGMT"},
|
||||
"expected": {"result": "failure", "messages": ["Wrong source-interface configured in VRF MGMT"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-vrf",
|
||||
"test": VerifyTacacsSourceIntf,
|
||||
"eos_data": [
|
||||
{
|
||||
"tacacsServers": [
|
||||
{
|
||||
"serverInfo": {"hostname": "10.22.10.91", "authport": 49, "vrf": "MGMT"},
|
||||
}
|
||||
],
|
||||
"groups": {"GROUP1": {"serverGroup": "TACACS+", "members": [{"hostname": "SERVER1", "authport": 49, "vrf": "MGMT"}]}},
|
||||
"srcIntf": {"PROD": "Management0"},
|
||||
}
|
||||
],
|
||||
"inputs": {"intf": "Management0", "vrf": "MGMT"},
|
||||
"expected": {"result": "failure", "messages": ["Source-interface Management0 is not configured in VRF MGMT"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyTacacsServers,
|
||||
"eos_data": [
|
||||
{
|
||||
"tacacsServers": [
|
||||
{
|
||||
"serverInfo": {"hostname": "10.22.10.91", "authport": 49, "vrf": "MGMT"},
|
||||
}
|
||||
],
|
||||
"groups": {"GROUP1": {"serverGroup": "TACACS+", "members": [{"hostname": "SERVER1", "authport": 49, "vrf": "MGMT"}]}},
|
||||
"srcIntf": {"MGMT": "Management0"},
|
||||
}
|
||||
],
|
||||
"inputs": {"servers": ["10.22.10.91"], "vrf": "MGMT"},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-no-servers",
|
||||
"test": VerifyTacacsServers,
|
||||
"eos_data": [
|
||||
{
|
||||
"tacacsServers": [],
|
||||
"groups": {},
|
||||
"srcIntf": {},
|
||||
}
|
||||
],
|
||||
"inputs": {"servers": ["10.22.10.91"], "vrf": "MGMT"},
|
||||
"expected": {"result": "failure", "messages": ["No TACACS servers are configured"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-not-configured",
|
||||
"test": VerifyTacacsServers,
|
||||
"eos_data": [
|
||||
{
|
||||
"tacacsServers": [
|
||||
{
|
||||
"serverInfo": {"hostname": "10.22.10.91", "authport": 49, "vrf": "MGMT"},
|
||||
}
|
||||
],
|
||||
"groups": {"GROUP1": {"serverGroup": "TACACS+", "members": [{"hostname": "SERVER1", "authport": 49, "vrf": "MGMT"}]}},
|
||||
"srcIntf": {"MGMT": "Management0"},
|
||||
}
|
||||
],
|
||||
"inputs": {"servers": ["10.22.10.91", "10.22.10.92"], "vrf": "MGMT"},
|
||||
"expected": {"result": "failure", "messages": ["TACACS servers ['10.22.10.92'] are not configured in VRF MGMT"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-vrf",
|
||||
"test": VerifyTacacsServers,
|
||||
"eos_data": [
|
||||
{
|
||||
"tacacsServers": [
|
||||
{
|
||||
"serverInfo": {"hostname": "10.22.10.91", "authport": 49, "vrf": "PROD"},
|
||||
}
|
||||
],
|
||||
"groups": {"GROUP1": {"serverGroup": "TACACS+", "members": [{"hostname": "SERVER1", "authport": 49, "vrf": "MGMT"}]}},
|
||||
"srcIntf": {"MGMT": "Management0"},
|
||||
}
|
||||
],
|
||||
"inputs": {"servers": ["10.22.10.91"], "vrf": "MGMT"},
|
||||
"expected": {"result": "failure", "messages": ["TACACS servers ['10.22.10.91'] are not configured in VRF MGMT"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyTacacsServerGroups,
|
||||
"eos_data": [
|
||||
{
|
||||
"tacacsServers": [
|
||||
{
|
||||
"serverInfo": {"hostname": "10.22.10.91", "authport": 49, "vrf": "MGMT"},
|
||||
}
|
||||
],
|
||||
"groups": {"GROUP1": {"serverGroup": "TACACS+", "members": [{"hostname": "SERVER1", "authport": 49, "vrf": "MGMT"}]}},
|
||||
"srcIntf": {"MGMT": "Management0"},
|
||||
}
|
||||
],
|
||||
"inputs": {"groups": ["GROUP1"]},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-no-server-groups",
|
||||
"test": VerifyTacacsServerGroups,
|
||||
"eos_data": [
|
||||
{
|
||||
"tacacsServers": [],
|
||||
"groups": {},
|
||||
"srcIntf": {},
|
||||
}
|
||||
],
|
||||
"inputs": {"groups": ["GROUP1"]},
|
||||
"expected": {"result": "failure", "messages": ["No TACACS server group(s) are configured"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-not-configured",
|
||||
"test": VerifyTacacsServerGroups,
|
||||
"eos_data": [
|
||||
{
|
||||
"tacacsServers": [
|
||||
{
|
||||
"serverInfo": {"hostname": "10.22.10.91", "authport": 49, "vrf": "MGMT"},
|
||||
}
|
||||
],
|
||||
"groups": {"GROUP2": {"serverGroup": "TACACS+", "members": [{"hostname": "SERVER1", "authport": 49, "vrf": "MGMT"}]}},
|
||||
"srcIntf": {"MGMT": "Management0"},
|
||||
}
|
||||
],
|
||||
"inputs": {"groups": ["GROUP1"]},
|
||||
"expected": {"result": "failure", "messages": ["TACACS server group(s) ['GROUP1'] are not configured"]},
|
||||
},
|
||||
{
|
||||
"name": "success-login-enable",
|
||||
"test": VerifyAuthenMethods,
|
||||
"eos_data": [
|
||||
{
|
||||
"loginAuthenMethods": {"default": {"methods": ["group tacacs+", "local"]}, "login": {"methods": ["group tacacs+", "local"]}},
|
||||
"enableAuthenMethods": {"default": {"methods": ["group tacacs+", "local"]}},
|
||||
"dot1xAuthenMethods": {"default": {"methods": ["group radius"]}},
|
||||
}
|
||||
],
|
||||
"inputs": {"methods": ["tacacs+", "local"], "types": ["login", "enable"]},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "success-dot1x",
|
||||
"test": VerifyAuthenMethods,
|
||||
"eos_data": [
|
||||
{
|
||||
"loginAuthenMethods": {"default": {"methods": ["group tacacs+", "local"]}, "login": {"methods": ["group tacacs+", "local"]}},
|
||||
"enableAuthenMethods": {"default": {"methods": ["group tacacs+", "local"]}},
|
||||
"dot1xAuthenMethods": {"default": {"methods": ["group radius"]}},
|
||||
}
|
||||
],
|
||||
"inputs": {"methods": ["radius"], "types": ["dot1x"]},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-no-login-console",
|
||||
"test": VerifyAuthenMethods,
|
||||
"eos_data": [
|
||||
{
|
||||
"loginAuthenMethods": {"default": {"methods": ["group tacacs+", "local"]}},
|
||||
"enableAuthenMethods": {"default": {"methods": ["group tacacs+", "local"]}},
|
||||
"dot1xAuthenMethods": {"default": {"methods": ["group radius"]}},
|
||||
}
|
||||
],
|
||||
"inputs": {"methods": ["tacacs+", "local"], "types": ["login", "enable"]},
|
||||
"expected": {"result": "failure", "messages": ["AAA authentication methods are not configured for login console"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-login-console",
|
||||
"test": VerifyAuthenMethods,
|
||||
"eos_data": [
|
||||
{
|
||||
"loginAuthenMethods": {"default": {"methods": ["group tacacs+", "local"]}, "login": {"methods": ["group radius", "local"]}},
|
||||
"enableAuthenMethods": {"default": {"methods": ["group tacacs+", "local"]}},
|
||||
"dot1xAuthenMethods": {"default": {"methods": ["group radius"]}},
|
||||
}
|
||||
],
|
||||
"inputs": {"methods": ["tacacs+", "local"], "types": ["login", "enable"]},
|
||||
"expected": {"result": "failure", "messages": ["AAA authentication methods ['group tacacs+', 'local'] are not matching for login console"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-login-default",
|
||||
"test": VerifyAuthenMethods,
|
||||
"eos_data": [
|
||||
{
|
||||
"loginAuthenMethods": {"default": {"methods": ["group radius", "local"]}, "login": {"methods": ["group tacacs+", "local"]}},
|
||||
"enableAuthenMethods": {"default": {"methods": ["group tacacs+", "local"]}},
|
||||
"dot1xAuthenMethods": {"default": {"methods": ["group radius"]}},
|
||||
}
|
||||
],
|
||||
"inputs": {"methods": ["tacacs+", "local"], "types": ["login", "enable"]},
|
||||
"expected": {"result": "failure", "messages": ["AAA authentication methods ['group tacacs+', 'local'] are not matching for ['login']"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyAuthzMethods,
|
||||
"eos_data": [
|
||||
{
|
||||
"commandsAuthzMethods": {"privilege0-15": {"methods": ["group tacacs+", "local"]}},
|
||||
"execAuthzMethods": {"exec": {"methods": ["group tacacs+", "local"]}},
|
||||
}
|
||||
],
|
||||
"inputs": {"methods": ["tacacs+", "local"], "types": ["commands", "exec"]},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-commands",
|
||||
"test": VerifyAuthzMethods,
|
||||
"eos_data": [
|
||||
{
|
||||
"commandsAuthzMethods": {"privilege0-15": {"methods": ["group radius", "local"]}},
|
||||
"execAuthzMethods": {"exec": {"methods": ["group tacacs+", "local"]}},
|
||||
}
|
||||
],
|
||||
"inputs": {"methods": ["tacacs+", "local"], "types": ["commands", "exec"]},
|
||||
"expected": {"result": "failure", "messages": ["AAA authorization methods ['group tacacs+', 'local'] are not matching for ['commands']"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-exec",
|
||||
"test": VerifyAuthzMethods,
|
||||
"eos_data": [
|
||||
{
|
||||
"commandsAuthzMethods": {"privilege0-15": {"methods": ["group tacacs+", "local"]}},
|
||||
"execAuthzMethods": {"exec": {"methods": ["group radius", "local"]}},
|
||||
}
|
||||
],
|
||||
"inputs": {"methods": ["tacacs+", "local"], "types": ["commands", "exec"]},
|
||||
"expected": {"result": "failure", "messages": ["AAA authorization methods ['group tacacs+', 'local'] are not matching for ['exec']"]},
|
||||
},
|
||||
{
|
||||
"name": "success-commands-exec-system",
|
||||
"test": VerifyAcctDefaultMethods,
|
||||
"eos_data": [
|
||||
{
|
||||
"commandsAcctMethods": {"privilege0-15": {"defaultAction": "startStop", "defaultMethods": ["group tacacs+", "logging"], "consoleMethods": []}},
|
||||
"execAcctMethods": {"exec": {"defaultAction": "startStop", "defaultMethods": ["group tacacs+", "logging"], "consoleMethods": []}},
|
||||
"systemAcctMethods": {"system": {"defaultAction": "startStop", "defaultMethods": ["group tacacs+", "logging"], "consoleMethods": []}},
|
||||
"dot1xAcctMethods": {"dot1x": {"defaultMethods": [], "consoleMethods": []}},
|
||||
}
|
||||
],
|
||||
"inputs": {"methods": ["tacacs+", "logging"], "types": ["commands", "exec", "system"]},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "success-dot1x",
|
||||
"test": VerifyAcctDefaultMethods,
|
||||
"eos_data": [
|
||||
{
|
||||
"commandsAcctMethods": {"privilege0-15": {"defaultAction": "startStop", "defaultMethods": ["group tacacs+", "logging"], "consoleMethods": []}},
|
||||
"execAcctMethods": {"exec": {"defaultAction": "startStop", "defaultMethods": ["group tacacs+", "logging"], "consoleMethods": []}},
|
||||
"systemAcctMethods": {"system": {"defaultAction": "startStop", "defaultMethods": ["group tacacs+", "logging"], "consoleMethods": []}},
|
||||
"dot1xAcctMethods": {"dot1x": {"defaultAction": "startStop", "defaultMethods": ["group radius", "logging"], "consoleMethods": []}},
|
||||
}
|
||||
],
|
||||
"inputs": {"methods": ["radius", "logging"], "types": ["dot1x"]},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-not-configured",
|
||||
"test": VerifyAcctDefaultMethods,
|
||||
"eos_data": [
|
||||
{
|
||||
"commandsAcctMethods": {"privilege0-15": {"defaultMethods": [], "consoleMethods": []}},
|
||||
"execAcctMethods": {"exec": {"defaultAction": "startStop", "defaultMethods": ["group tacacs+", "logging"], "consoleMethods": []}},
|
||||
"systemAcctMethods": {"system": {"defaultAction": "startStop", "defaultMethods": ["group tacacs+", "logging"], "consoleMethods": []}},
|
||||
"dot1xAcctMethods": {"dot1x": {"defaultMethods": [], "consoleMethods": []}},
|
||||
}
|
||||
],
|
||||
"inputs": {"methods": ["tacacs+", "logging"], "types": ["commands", "exec", "system"]},
|
||||
"expected": {"result": "failure", "messages": ["AAA default accounting is not configured for ['commands']"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-not-configured-empty",
|
||||
"test": VerifyAcctDefaultMethods,
|
||||
"eos_data": [
|
||||
{
|
||||
"systemAcctMethods": {"system": {"defaultMethods": [], "consoleMethods": []}},
|
||||
"execAcctMethods": {"exec": {"defaultMethods": [], "consoleMethods": []}},
|
||||
"commandsAcctMethods": {"privilege0-15": {"defaultMethods": [], "consoleMethods": []}},
|
||||
"dot1xAcctMethods": {"dot1x": {"defaultMethods": [], "consoleMethods": []}},
|
||||
}
|
||||
],
|
||||
"inputs": {"methods": ["tacacs+", "logging"], "types": ["commands", "exec", "system"]},
|
||||
"expected": {"result": "failure", "messages": ["AAA default accounting is not configured for ['system', 'exec', 'commands']"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-not-matching",
|
||||
"test": VerifyAcctDefaultMethods,
|
||||
"eos_data": [
|
||||
{
|
||||
"commandsAcctMethods": {"privilege0-15": {"defaultAction": "startStop", "defaultMethods": ["group radius", "logging"], "consoleMethods": []}},
|
||||
"execAcctMethods": {"exec": {"defaultAction": "startStop", "defaultMethods": ["group tacacs+", "logging"], "consoleMethods": []}},
|
||||
"systemAcctMethods": {"system": {"defaultAction": "startStop", "defaultMethods": ["group tacacs+", "logging"], "consoleMethods": []}},
|
||||
"dot1xAcctMethods": {"dot1x": {"defaultMethods": [], "consoleMethods": []}},
|
||||
}
|
||||
],
|
||||
"inputs": {"methods": ["tacacs+", "logging"], "types": ["commands", "exec", "system"]},
|
||||
"expected": {"result": "failure", "messages": ["AAA accounting default methods ['group tacacs+', 'logging'] are not matching for ['commands']"]},
|
||||
},
|
||||
{
|
||||
"name": "success-commands-exec-system",
|
||||
"test": VerifyAcctConsoleMethods,
|
||||
"eos_data": [
|
||||
{
|
||||
"commandsAcctMethods": {
|
||||
"privilege0-15": {
|
||||
"defaultMethods": [],
|
||||
"consoleAction": "startStop",
|
||||
"consoleMethods": ["group tacacs+", "logging"],
|
||||
}
|
||||
},
|
||||
"execAcctMethods": {
|
||||
"exec": {
|
||||
"defaultMethods": [],
|
||||
"consoleAction": "startStop",
|
||||
"consoleMethods": ["group tacacs+", "logging"],
|
||||
}
|
||||
},
|
||||
"systemAcctMethods": {
|
||||
"system": {
|
||||
"defaultMethods": [],
|
||||
"consoleAction": "startStop",
|
||||
"consoleMethods": ["group tacacs+", "logging"],
|
||||
}
|
||||
},
|
||||
"dot1xAcctMethods": {"dot1x": {"defaultMethods": [], "consoleMethods": []}},
|
||||
}
|
||||
],
|
||||
"inputs": {"methods": ["tacacs+", "logging"], "types": ["commands", "exec", "system"]},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "success-dot1x",
|
||||
"test": VerifyAcctConsoleMethods,
|
||||
"eos_data": [
|
||||
{
|
||||
"commandsAcctMethods": {
|
||||
"privilege0-15": {
|
||||
"defaultMethods": [],
|
||||
"consoleAction": "startStop",
|
||||
"consoleMethods": ["group tacacs+", "logging"],
|
||||
}
|
||||
},
|
||||
"execAcctMethods": {
|
||||
"exec": {
|
||||
"defaultMethods": [],
|
||||
"consoleAction": "startStop",
|
||||
"consoleMethods": ["group tacacs+", "logging"],
|
||||
}
|
||||
},
|
||||
"systemAcctMethods": {
|
||||
"system": {
|
||||
"defaultMethods": [],
|
||||
"consoleAction": "startStop",
|
||||
"consoleMethods": ["group tacacs+", "logging"],
|
||||
}
|
||||
},
|
||||
"dot1xAcctMethods": {
|
||||
"dot1x": {
|
||||
"defaultMethods": [],
|
||||
"consoleAction": "startStop",
|
||||
"consoleMethods": ["group tacacs+", "logging"],
|
||||
}
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": {"methods": ["tacacs+", "logging"], "types": ["dot1x"]},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-not-configured",
|
||||
"test": VerifyAcctConsoleMethods,
|
||||
"eos_data": [
|
||||
{
|
||||
"commandsAcctMethods": {
|
||||
"privilege0-15": {
|
||||
"defaultMethods": [],
|
||||
"consoleMethods": [],
|
||||
}
|
||||
},
|
||||
"execAcctMethods": {
|
||||
"exec": {
|
||||
"defaultMethods": [],
|
||||
"consoleAction": "startStop",
|
||||
"consoleMethods": ["group tacacs+", "logging"],
|
||||
}
|
||||
},
|
||||
"systemAcctMethods": {
|
||||
"system": {
|
||||
"defaultMethods": [],
|
||||
"consoleAction": "startStop",
|
||||
"consoleMethods": ["group tacacs+", "logging"],
|
||||
}
|
||||
},
|
||||
"dot1xAcctMethods": {"dot1x": {"defaultMethods": [], "consoleMethods": []}},
|
||||
}
|
||||
],
|
||||
"inputs": {"methods": ["tacacs+", "logging"], "types": ["commands", "exec", "system"]},
|
||||
"expected": {"result": "failure", "messages": ["AAA console accounting is not configured for ['commands']"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-not-configured-empty",
|
||||
"test": VerifyAcctConsoleMethods,
|
||||
"eos_data": [
|
||||
{
|
||||
"systemAcctMethods": {"system": {"defaultMethods": [], "consoleMethods": []}},
|
||||
"execAcctMethods": {"exec": {"defaultMethods": [], "consoleMethods": []}},
|
||||
"commandsAcctMethods": {"privilege0-15": {"defaultMethods": [], "consoleMethods": []}},
|
||||
"dot1xAcctMethods": {"dot1x": {"defaultMethods": [], "consoleMethods": []}},
|
||||
}
|
||||
],
|
||||
"inputs": {"methods": ["tacacs+", "logging"], "types": ["commands", "exec", "system"]},
|
||||
"expected": {"result": "failure", "messages": ["AAA console accounting is not configured for ['system', 'exec', 'commands']"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-not-matching",
|
||||
"test": VerifyAcctConsoleMethods,
|
||||
"eos_data": [
|
||||
{
|
||||
"commandsAcctMethods": {
|
||||
"privilege0-15": {
|
||||
"defaultMethods": [],
|
||||
"consoleAction": "startStop",
|
||||
"consoleMethods": ["group radius", "logging"],
|
||||
}
|
||||
},
|
||||
"execAcctMethods": {
|
||||
"exec": {
|
||||
"defaultMethods": [],
|
||||
"consoleAction": "startStop",
|
||||
"consoleMethods": ["group tacacs+", "logging"],
|
||||
}
|
||||
},
|
||||
"systemAcctMethods": {
|
||||
"system": {
|
||||
"defaultMethods": [],
|
||||
"consoleAction": "startStop",
|
||||
"consoleMethods": ["group tacacs+", "logging"],
|
||||
}
|
||||
},
|
||||
"dot1xAcctMethods": {"dot1x": {"defaultMethods": [], "consoleMethods": []}},
|
||||
}
|
||||
],
|
||||
"inputs": {"methods": ["tacacs+", "logging"], "types": ["commands", "exec", "system"]},
|
||||
"expected": {"result": "failure", "messages": ["AAA accounting console methods ['group tacacs+', 'logging'] are not matching for ['commands']"]},
|
||||
},
|
||||
]
|
523
tests/units/anta_tests/test_bfd.py
Normal file
523
tests/units/anta_tests/test_bfd.py
Normal file
|
@ -0,0 +1,523 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Tests for anta.tests.bfd.py
|
||||
"""
|
||||
# pylint: disable=C0302
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
# pylint: disable=C0413
|
||||
# because of the patch above
|
||||
from anta.tests.bfd import VerifyBFDPeersHealth, VerifyBFDPeersIntervals, VerifyBFDSpecificPeers # noqa: E402
|
||||
from tests.lib.anta import test # noqa: F401; pylint: disable=W0611
|
||||
|
||||
DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyBFDPeersIntervals,
|
||||
"eos_data": [
|
||||
{
|
||||
"vrfs": {
|
||||
"default": {
|
||||
"ipv4Neighbors": {
|
||||
"192.0.255.7": {
|
||||
"peerStats": {
|
||||
"": {
|
||||
"peerStatsDetail": {
|
||||
"operTxInterval": 1200000,
|
||||
"operRxInterval": 1200000,
|
||||
"detectMult": 3,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"MGMT": {
|
||||
"ipv4Neighbors": {
|
||||
"192.0.255.70": {
|
||||
"peerStats": {
|
||||
"": {
|
||||
"peerStatsDetail": {
|
||||
"operTxInterval": 1200000,
|
||||
"operRxInterval": 1200000,
|
||||
"detectMult": 3,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"bfd_peers": [
|
||||
{"peer_address": "192.0.255.7", "vrf": "default", "tx_interval": 1200, "rx_interval": 1200, "multiplier": 3},
|
||||
{"peer_address": "192.0.255.70", "vrf": "MGMT", "tx_interval": 1200, "rx_interval": 1200, "multiplier": 3},
|
||||
]
|
||||
},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-no-peer",
|
||||
"test": VerifyBFDPeersIntervals,
|
||||
"eos_data": [
|
||||
{
|
||||
"vrfs": {
|
||||
"default": {
|
||||
"ipv4Neighbors": {
|
||||
"192.0.255.7": {
|
||||
"peerStats": {
|
||||
"": {
|
||||
"peerStatsDetail": {
|
||||
"operTxInterval": 1200000,
|
||||
"operRxInterval": 1200000,
|
||||
"detectMult": 3,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"MGMT": {
|
||||
"ipv4Neighbors": {
|
||||
"192.0.255.71": {
|
||||
"peerStats": {
|
||||
"": {
|
||||
"peerStatsDetail": {
|
||||
"operTxInterval": 1200000,
|
||||
"operRxInterval": 1200000,
|
||||
"detectMult": 3,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"bfd_peers": [
|
||||
{"peer_address": "192.0.255.7", "vrf": "CS", "tx_interval": 1200, "rx_interval": 1200, "multiplier": 3},
|
||||
{"peer_address": "192.0.255.70", "vrf": "MGMT", "tx_interval": 1200, "rx_interval": 1200, "multiplier": 3},
|
||||
]
|
||||
},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"Following BFD peers are not configured or timers are not correct:\n"
|
||||
"{'192.0.255.7': {'CS': 'Not Configured'}, '192.0.255.70': {'MGMT': 'Not Configured'}}"
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-incorrect-timers",
|
||||
"test": VerifyBFDPeersIntervals,
|
||||
"eos_data": [
|
||||
{
|
||||
"vrfs": {
|
||||
"default": {
|
||||
"ipv4Neighbors": {
|
||||
"192.0.255.7": {
|
||||
"peerStats": {
|
||||
"": {
|
||||
"peerStatsDetail": {
|
||||
"operTxInterval": 1300000,
|
||||
"operRxInterval": 1200000,
|
||||
"detectMult": 4,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"MGMT": {
|
||||
"ipv4Neighbors": {
|
||||
"192.0.255.70": {
|
||||
"peerStats": {
|
||||
"": {
|
||||
"peerStatsDetail": {
|
||||
"operTxInterval": 120000,
|
||||
"operRxInterval": 120000,
|
||||
"detectMult": 5,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"bfd_peers": [
|
||||
{"peer_address": "192.0.255.7", "vrf": "default", "tx_interval": 1200, "rx_interval": 1200, "multiplier": 3},
|
||||
{"peer_address": "192.0.255.70", "vrf": "MGMT", "tx_interval": 1200, "rx_interval": 1200, "multiplier": 3},
|
||||
]
|
||||
},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"Following BFD peers are not configured or timers are not correct:\n"
|
||||
"{'192.0.255.7': {'default': {'tx_interval': 1300000, 'rx_interval': 1200000, 'multiplier': 4}}, "
|
||||
"'192.0.255.70': {'MGMT': {'tx_interval': 120000, 'rx_interval': 120000, 'multiplier': 5}}}"
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyBFDSpecificPeers,
|
||||
"eos_data": [
|
||||
{
|
||||
"vrfs": {
|
||||
"default": {
|
||||
"ipv4Neighbors": {
|
||||
"192.0.255.7": {
|
||||
"peerStats": {
|
||||
"": {
|
||||
"status": "up",
|
||||
"remoteDisc": 108328132,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"MGMT": {
|
||||
"ipv4Neighbors": {
|
||||
"192.0.255.70": {
|
||||
"peerStats": {
|
||||
"": {
|
||||
"status": "up",
|
||||
"remoteDisc": 108328132,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": {"bfd_peers": [{"peer_address": "192.0.255.7", "vrf": "default"}, {"peer_address": "192.0.255.70", "vrf": "MGMT"}]},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-no-peer",
|
||||
"test": VerifyBFDSpecificPeers,
|
||||
"eos_data": [
|
||||
{
|
||||
"vrfs": {
|
||||
"default": {
|
||||
"ipv4Neighbors": {
|
||||
"192.0.255.7": {
|
||||
"peerStats": {
|
||||
"": {
|
||||
"status": "up",
|
||||
"remoteDisc": 108328132,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"MGMT": {
|
||||
"ipv4Neighbors": {
|
||||
"192.0.255.71": {
|
||||
"peerStats": {
|
||||
"": {
|
||||
"status": "up",
|
||||
"remoteDisc": 108328132,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": {"bfd_peers": [{"peer_address": "192.0.255.7", "vrf": "CS"}, {"peer_address": "192.0.255.70", "vrf": "MGMT"}]},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"Following BFD peers are not configured, status is not up or remote disc is zero:\n"
|
||||
"{'192.0.255.7': {'CS': 'Not Configured'}, '192.0.255.70': {'MGMT': 'Not Configured'}}"
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-session-down",
|
||||
"test": VerifyBFDSpecificPeers,
|
||||
"eos_data": [
|
||||
{
|
||||
"vrfs": {
|
||||
"default": {
|
||||
"ipv4Neighbors": {
|
||||
"192.0.255.7": {
|
||||
"peerStats": {
|
||||
"": {
|
||||
"status": "Down",
|
||||
"remoteDisc": 108328132,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"MGMT": {
|
||||
"ipv4Neighbors": {
|
||||
"192.0.255.70": {
|
||||
"peerStats": {
|
||||
"": {
|
||||
"status": "Down",
|
||||
"remoteDisc": 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": {"bfd_peers": [{"peer_address": "192.0.255.7", "vrf": "default"}, {"peer_address": "192.0.255.70", "vrf": "MGMT"}]},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"Following BFD peers are not configured, status is not up or remote disc is zero:\n"
|
||||
"{'192.0.255.7': {'default': {'status': 'Down', 'remote_disc': 108328132}}, "
|
||||
"'192.0.255.70': {'MGMT': {'status': 'Down', 'remote_disc': 0}}}"
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyBFDPeersHealth,
|
||||
"eos_data": [
|
||||
{
|
||||
"vrfs": {
|
||||
"default": {
|
||||
"ipv4Neighbors": {
|
||||
"192.0.255.7": {
|
||||
"peerStats": {
|
||||
"": {
|
||||
"status": "up",
|
||||
"remoteDisc": 3940685114,
|
||||
"lastDown": 1703657258.652725,
|
||||
"l3intf": "",
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
"ipv6Neighbors": {},
|
||||
},
|
||||
"MGMT": {
|
||||
"ipv4Neighbors": {
|
||||
"192.0.255.71": {
|
||||
"peerStats": {
|
||||
"": {
|
||||
"status": "up",
|
||||
"remoteDisc": 3940685114,
|
||||
"lastDown": 1703657258.652725,
|
||||
"l3intf": "",
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
"ipv6Neighbors": {},
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
"utcTime": 1703667348.111288,
|
||||
},
|
||||
],
|
||||
"inputs": {"down_threshold": 2},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-no-peer",
|
||||
"test": VerifyBFDPeersHealth,
|
||||
"eos_data": [
|
||||
{
|
||||
"vrfs": {
|
||||
"MGMT": {
|
||||
"ipv6Neighbors": {},
|
||||
"ipv4Neighbors": {},
|
||||
},
|
||||
"default": {
|
||||
"ipv6Neighbors": {},
|
||||
"ipv4Neighbors": {},
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
"utcTime": 1703658481.8778424,
|
||||
},
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["No IPv4 BFD peers are configured for any VRF."],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-session-down",
|
||||
"test": VerifyBFDPeersHealth,
|
||||
"eos_data": [
|
||||
{
|
||||
"vrfs": {
|
||||
"default": {
|
||||
"ipv4Neighbors": {
|
||||
"192.0.255.7": {
|
||||
"peerStats": {
|
||||
"": {
|
||||
"status": "down",
|
||||
"remoteDisc": 0,
|
||||
"lastDown": 1703657258.652725,
|
||||
"l3intf": "",
|
||||
}
|
||||
}
|
||||
},
|
||||
"192.0.255.70": {
|
||||
"peerStats": {
|
||||
"": {
|
||||
"status": "up",
|
||||
"remoteDisc": 3940685114,
|
||||
"lastDown": 1703657258.652725,
|
||||
"l3intf": "",
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
"ipv6Neighbors": {},
|
||||
},
|
||||
"MGMT": {
|
||||
"ipv4Neighbors": {
|
||||
"192.0.255.71": {
|
||||
"peerStats": {
|
||||
"": {
|
||||
"status": "down",
|
||||
"remoteDisc": 0,
|
||||
"lastDown": 1703657258.652725,
|
||||
"l3intf": "",
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
"ipv6Neighbors": {},
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
"utcTime": 1703658481.8778424,
|
||||
},
|
||||
],
|
||||
"inputs": {},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"Following BFD peers are not up:\n192.0.255.7 is down in default VRF with remote disc 0.\n192.0.255.71 is down in MGMT VRF with remote disc 0."
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-session-up-disc",
|
||||
"test": VerifyBFDPeersHealth,
|
||||
"eos_data": [
|
||||
{
|
||||
"vrfs": {
|
||||
"default": {
|
||||
"ipv4Neighbors": {
|
||||
"192.0.255.7": {
|
||||
"peerStats": {
|
||||
"": {
|
||||
"status": "up",
|
||||
"remoteDisc": 0,
|
||||
"lastDown": 1703657258.652725,
|
||||
"l3intf": "Ethernet2",
|
||||
}
|
||||
}
|
||||
},
|
||||
"192.0.255.71": {
|
||||
"peerStats": {
|
||||
"": {
|
||||
"status": "up",
|
||||
"remoteDisc": 0,
|
||||
"lastDown": 1703657258.652725,
|
||||
"l3intf": "Ethernet2",
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
"ipv6Neighbors": {},
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"utcTime": 1703658481.8778424,
|
||||
},
|
||||
],
|
||||
"inputs": {},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["Following BFD peers were down:\n192.0.255.7 in default VRF has remote disc 0.\n192.0.255.71 in default VRF has remote disc 0."],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-last-down",
|
||||
"test": VerifyBFDPeersHealth,
|
||||
"eos_data": [
|
||||
{
|
||||
"vrfs": {
|
||||
"default": {
|
||||
"ipv4Neighbors": {
|
||||
"192.0.255.7": {
|
||||
"peerStats": {
|
||||
"": {
|
||||
"status": "up",
|
||||
"remoteDisc": 3940685114,
|
||||
"lastDown": 1703657258.652725,
|
||||
"l3intf": "",
|
||||
}
|
||||
}
|
||||
},
|
||||
"192.0.255.71": {
|
||||
"peerStats": {
|
||||
"": {
|
||||
"status": "up",
|
||||
"remoteDisc": 3940685114,
|
||||
"lastDown": 1703657258.652725,
|
||||
"l3intf": "",
|
||||
}
|
||||
}
|
||||
},
|
||||
"192.0.255.17": {
|
||||
"peerStats": {
|
||||
"": {
|
||||
"status": "up",
|
||||
"remoteDisc": 3940685114,
|
||||
"lastDown": 1703657258.652725,
|
||||
"l3intf": "",
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
"ipv6Neighbors": {},
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"utcTime": 1703667348.111288,
|
||||
},
|
||||
],
|
||||
"inputs": {"down_threshold": 4},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"Following BFD peers were down:\n192.0.255.7 in default VRF was down 3 hours ago.\n"
|
||||
"192.0.255.71 in default VRF was down 3 hours ago.\n192.0.255.17 in default VRF was down 3 hours ago."
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
35
tests/units/anta_tests/test_configuration.py
Normal file
35
tests/units/anta_tests/test_configuration.py
Normal file
|
@ -0,0 +1,35 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""Data for testing anta.tests.configuration"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from anta.tests.configuration import VerifyRunningConfigDiffs, VerifyZeroTouch
|
||||
from tests.lib.anta import test # noqa: F401; pylint: disable=W0611
|
||||
|
||||
DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyZeroTouch,
|
||||
"eos_data": [{"mode": "disabled"}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyZeroTouch,
|
||||
"eos_data": [{"mode": "enabled"}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["ZTP is NOT disabled"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyRunningConfigDiffs,
|
||||
"eos_data": [""],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{"name": "failure", "test": VerifyRunningConfigDiffs, "eos_data": ["blah blah"], "inputs": None, "expected": {"result": "failure", "messages": ["blah blah"]}},
|
||||
]
|
369
tests/units/anta_tests/test_connectivity.py
Normal file
369
tests/units/anta_tests/test_connectivity.py
Normal file
|
@ -0,0 +1,369 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Tests for anta.tests.connectivity.py
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from anta.tests.connectivity import VerifyLLDPNeighbors, VerifyReachability
|
||||
from tests.lib.anta import test # noqa: F401; pylint: disable=W0611
|
||||
|
||||
DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "success-ip",
|
||||
"test": VerifyReachability,
|
||||
"inputs": {"hosts": [{"destination": "10.0.0.1", "source": "10.0.0.5"}, {"destination": "10.0.0.2", "source": "10.0.0.5"}]},
|
||||
"eos_data": [
|
||||
{
|
||||
"messages": [
|
||||
"""PING 10.0.0.1 (10.0.0.1) from 10.0.0.5 : 72(100) bytes of data.
|
||||
80 bytes from 10.0.0.1: icmp_seq=1 ttl=64 time=0.247 ms
|
||||
80 bytes from 10.0.0.1: icmp_seq=2 ttl=64 time=0.072 ms
|
||||
|
||||
--- 10.0.0.1 ping statistics ---
|
||||
2 packets transmitted, 2 received, 0% packet loss, time 0ms
|
||||
rtt min/avg/max/mdev = 0.072/0.159/0.247/0.088 ms, ipg/ewma 0.370/0.225 ms
|
||||
|
||||
"""
|
||||
]
|
||||
},
|
||||
{
|
||||
"messages": [
|
||||
"""PING 10.0.0.2 (10.0.0.2) from 10.0.0.5 : 72(100) bytes of data.
|
||||
80 bytes from 10.0.0.2: icmp_seq=1 ttl=64 time=0.247 ms
|
||||
80 bytes from 10.0.0.2: icmp_seq=2 ttl=64 time=0.072 ms
|
||||
|
||||
--- 10.0.0.2 ping statistics ---
|
||||
2 packets transmitted, 2 received, 0% packet loss, time 0ms
|
||||
rtt min/avg/max/mdev = 0.072/0.159/0.247/0.088 ms, ipg/ewma 0.370/0.225 ms
|
||||
|
||||
"""
|
||||
]
|
||||
},
|
||||
],
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "success-interface",
|
||||
"test": VerifyReachability,
|
||||
"inputs": {"hosts": [{"destination": "10.0.0.1", "source": "Management0"}, {"destination": "10.0.0.2", "source": "Management0"}]},
|
||||
"eos_data": [
|
||||
{
|
||||
"messages": [
|
||||
"""PING 10.0.0.1 (10.0.0.1) from 10.0.0.5 : 72(100) bytes of data.
|
||||
80 bytes from 10.0.0.1: icmp_seq=1 ttl=64 time=0.247 ms
|
||||
80 bytes from 10.0.0.1: icmp_seq=2 ttl=64 time=0.072 ms
|
||||
|
||||
--- 10.0.0.1 ping statistics ---
|
||||
2 packets transmitted, 2 received, 0% packet loss, time 0ms
|
||||
rtt min/avg/max/mdev = 0.072/0.159/0.247/0.088 ms, ipg/ewma 0.370/0.225 ms
|
||||
|
||||
"""
|
||||
]
|
||||
},
|
||||
{
|
||||
"messages": [
|
||||
"""PING 10.0.0.2 (10.0.0.2) from 10.0.0.5 : 72(100) bytes of data.
|
||||
80 bytes from 10.0.0.2: icmp_seq=1 ttl=64 time=0.247 ms
|
||||
80 bytes from 10.0.0.2: icmp_seq=2 ttl=64 time=0.072 ms
|
||||
|
||||
--- 10.0.0.2 ping statistics ---
|
||||
2 packets transmitted, 2 received, 0% packet loss, time 0ms
|
||||
rtt min/avg/max/mdev = 0.072/0.159/0.247/0.088 ms, ipg/ewma 0.370/0.225 ms
|
||||
|
||||
"""
|
||||
]
|
||||
},
|
||||
],
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "success-repeat",
|
||||
"test": VerifyReachability,
|
||||
"inputs": {"hosts": [{"destination": "10.0.0.1", "source": "Management0", "repeat": 1}]},
|
||||
"eos_data": [
|
||||
{
|
||||
"messages": [
|
||||
"""PING 10.0.0.1 (10.0.0.1) from 10.0.0.5 : 72(100) bytes of data.
|
||||
80 bytes from 10.0.0.1: icmp_seq=1 ttl=64 time=0.247 ms
|
||||
|
||||
--- 10.0.0.1 ping statistics ---
|
||||
1 packets transmitted, 1 received, 0% packet loss, time 0ms
|
||||
rtt min/avg/max/mdev = 0.072/0.159/0.247/0.088 ms, ipg/ewma 0.370/0.225 ms
|
||||
|
||||
"""
|
||||
]
|
||||
},
|
||||
],
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-ip",
|
||||
"test": VerifyReachability,
|
||||
"inputs": {"hosts": [{"destination": "10.0.0.11", "source": "10.0.0.5"}, {"destination": "10.0.0.2", "source": "10.0.0.5"}]},
|
||||
"eos_data": [
|
||||
{
|
||||
"messages": [
|
||||
"""ping: sendmsg: Network is unreachable
|
||||
ping: sendmsg: Network is unreachable
|
||||
PING 10.0.0.11 (10.0.0.11) from 10.0.0.5 : 72(100) bytes of data.
|
||||
|
||||
--- 10.0.0.11 ping statistics ---
|
||||
2 packets transmitted, 0 received, 100% packet loss, time 10ms
|
||||
|
||||
|
||||
"""
|
||||
]
|
||||
},
|
||||
{
|
||||
"messages": [
|
||||
"""PING 10.0.0.2 (10.0.0.2) from 10.0.0.5 : 72(100) bytes of data.
|
||||
80 bytes from 10.0.0.2: icmp_seq=1 ttl=64 time=0.247 ms
|
||||
80 bytes from 10.0.0.2: icmp_seq=2 ttl=64 time=0.072 ms
|
||||
|
||||
--- 10.0.0.2 ping statistics ---
|
||||
2 packets transmitted, 2 received, 0% packet loss, time 0ms
|
||||
rtt min/avg/max/mdev = 0.072/0.159/0.247/0.088 ms, ipg/ewma 0.370/0.225 ms
|
||||
|
||||
"""
|
||||
]
|
||||
},
|
||||
],
|
||||
"expected": {"result": "failure", "messages": ["Connectivity test failed for the following source-destination pairs: [('10.0.0.5', '10.0.0.11')]"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-interface",
|
||||
"test": VerifyReachability,
|
||||
"inputs": {"hosts": [{"destination": "10.0.0.11", "source": "Management0"}, {"destination": "10.0.0.2", "source": "Management0"}]},
|
||||
"eos_data": [
|
||||
{
|
||||
"messages": [
|
||||
"""ping: sendmsg: Network is unreachable
|
||||
ping: sendmsg: Network is unreachable
|
||||
PING 10.0.0.11 (10.0.0.11) from 10.0.0.5 : 72(100) bytes of data.
|
||||
|
||||
--- 10.0.0.11 ping statistics ---
|
||||
2 packets transmitted, 0 received, 100% packet loss, time 10ms
|
||||
|
||||
|
||||
"""
|
||||
]
|
||||
},
|
||||
{
|
||||
"messages": [
|
||||
"""PING 10.0.0.2 (10.0.0.2) from 10.0.0.5 : 72(100) bytes of data.
|
||||
80 bytes from 10.0.0.2: icmp_seq=1 ttl=64 time=0.247 ms
|
||||
80 bytes from 10.0.0.2: icmp_seq=2 ttl=64 time=0.072 ms
|
||||
|
||||
--- 10.0.0.2 ping statistics ---
|
||||
2 packets transmitted, 2 received, 0% packet loss, time 0ms
|
||||
rtt min/avg/max/mdev = 0.072/0.159/0.247/0.088 ms, ipg/ewma 0.370/0.225 ms
|
||||
|
||||
"""
|
||||
]
|
||||
},
|
||||
],
|
||||
"expected": {"result": "failure", "messages": ["Connectivity test failed for the following source-destination pairs: [('Management0', '10.0.0.11')]"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyLLDPNeighbors,
|
||||
"inputs": {
|
||||
"neighbors": [
|
||||
{"port": "Ethernet1", "neighbor_device": "DC1-SPINE1", "neighbor_port": "Ethernet1"},
|
||||
{"port": "Ethernet2", "neighbor_device": "DC1-SPINE2", "neighbor_port": "Ethernet1"},
|
||||
]
|
||||
},
|
||||
"eos_data": [
|
||||
{
|
||||
"lldpNeighbors": {
|
||||
"Ethernet1": {
|
||||
"lldpNeighborInfo": [
|
||||
{
|
||||
"chassisIdType": "macAddress",
|
||||
"chassisId": "001c.73a0.fc18",
|
||||
"systemName": "DC1-SPINE1",
|
||||
"neighborInterfaceInfo": {
|
||||
"interfaceIdType": "interfaceName",
|
||||
"interfaceId": '"Ethernet1"',
|
||||
"interfaceId_v2": "Ethernet1",
|
||||
"interfaceDescription": "P2P_LINK_TO_DC1-LEAF1A_Ethernet1",
|
||||
},
|
||||
}
|
||||
]
|
||||
},
|
||||
"Ethernet2": {
|
||||
"lldpNeighborInfo": [
|
||||
{
|
||||
"chassisIdType": "macAddress",
|
||||
"chassisId": "001c.73f7.d138",
|
||||
"systemName": "DC1-SPINE2",
|
||||
"neighborInterfaceInfo": {
|
||||
"interfaceIdType": "interfaceName",
|
||||
"interfaceId": '"Ethernet1"',
|
||||
"interfaceId_v2": "Ethernet1",
|
||||
"interfaceDescription": "P2P_LINK_TO_DC1-LEAF1A_Ethernet2",
|
||||
},
|
||||
}
|
||||
]
|
||||
},
|
||||
}
|
||||
}
|
||||
],
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-port-not-configured",
|
||||
"test": VerifyLLDPNeighbors,
|
||||
"inputs": {
|
||||
"neighbors": [
|
||||
{"port": "Ethernet1", "neighbor_device": "DC1-SPINE1", "neighbor_port": "Ethernet1"},
|
||||
{"port": "Ethernet2", "neighbor_device": "DC1-SPINE2", "neighbor_port": "Ethernet1"},
|
||||
]
|
||||
},
|
||||
"eos_data": [
|
||||
{
|
||||
"lldpNeighbors": {
|
||||
"Ethernet1": {
|
||||
"lldpNeighborInfo": [
|
||||
{
|
||||
"chassisIdType": "macAddress",
|
||||
"chassisId": "001c.73a0.fc18",
|
||||
"systemName": "DC1-SPINE1",
|
||||
"neighborInterfaceInfo": {
|
||||
"interfaceIdType": "interfaceName",
|
||||
"interfaceId": '"Ethernet1"',
|
||||
"interfaceId_v2": "Ethernet1",
|
||||
"interfaceDescription": "P2P_LINK_TO_DC1-LEAF1A_Ethernet1",
|
||||
},
|
||||
}
|
||||
]
|
||||
},
|
||||
}
|
||||
}
|
||||
],
|
||||
"expected": {"result": "failure", "messages": ["The following port(s) have issues: {'port_not_configured': ['Ethernet2']}"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-no-neighbor",
|
||||
"test": VerifyLLDPNeighbors,
|
||||
"inputs": {
|
||||
"neighbors": [
|
||||
{"port": "Ethernet1", "neighbor_device": "DC1-SPINE1", "neighbor_port": "Ethernet1"},
|
||||
{"port": "Ethernet2", "neighbor_device": "DC1-SPINE2", "neighbor_port": "Ethernet1"},
|
||||
]
|
||||
},
|
||||
"eos_data": [
|
||||
{
|
||||
"lldpNeighbors": {
|
||||
"Ethernet1": {
|
||||
"lldpNeighborInfo": [
|
||||
{
|
||||
"chassisIdType": "macAddress",
|
||||
"chassisId": "001c.73a0.fc18",
|
||||
"systemName": "DC1-SPINE1",
|
||||
"neighborInterfaceInfo": {
|
||||
"interfaceIdType": "interfaceName",
|
||||
"interfaceId": '"Ethernet1"',
|
||||
"interfaceId_v2": "Ethernet1",
|
||||
"interfaceDescription": "P2P_LINK_TO_DC1-LEAF1A_Ethernet1",
|
||||
},
|
||||
}
|
||||
]
|
||||
},
|
||||
"Ethernet2": {"lldpNeighborInfo": []},
|
||||
}
|
||||
}
|
||||
],
|
||||
"expected": {"result": "failure", "messages": ["The following port(s) have issues: {'no_lldp_neighbor': ['Ethernet2']}"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-neighbor",
|
||||
"test": VerifyLLDPNeighbors,
|
||||
"inputs": {
|
||||
"neighbors": [
|
||||
{"port": "Ethernet1", "neighbor_device": "DC1-SPINE1", "neighbor_port": "Ethernet1"},
|
||||
{"port": "Ethernet2", "neighbor_device": "DC1-SPINE2", "neighbor_port": "Ethernet1"},
|
||||
]
|
||||
},
|
||||
"eos_data": [
|
||||
{
|
||||
"lldpNeighbors": {
|
||||
"Ethernet1": {
|
||||
"lldpNeighborInfo": [
|
||||
{
|
||||
"chassisIdType": "macAddress",
|
||||
"chassisId": "001c.73a0.fc18",
|
||||
"systemName": "DC1-SPINE1",
|
||||
"neighborInterfaceInfo": {
|
||||
"interfaceIdType": "interfaceName",
|
||||
"interfaceId": '"Ethernet1"',
|
||||
"interfaceId_v2": "Ethernet1",
|
||||
"interfaceDescription": "P2P_LINK_TO_DC1-LEAF1A_Ethernet1",
|
||||
},
|
||||
}
|
||||
]
|
||||
},
|
||||
"Ethernet2": {
|
||||
"lldpNeighborInfo": [
|
||||
{
|
||||
"chassisIdType": "macAddress",
|
||||
"chassisId": "001c.73f7.d138",
|
||||
"systemName": "DC1-SPINE2",
|
||||
"neighborInterfaceInfo": {
|
||||
"interfaceIdType": "interfaceName",
|
||||
"interfaceId": '"Ethernet2"',
|
||||
"interfaceId_v2": "Ethernet2",
|
||||
"interfaceDescription": "P2P_LINK_TO_DC1-LEAF1A_Ethernet2",
|
||||
},
|
||||
}
|
||||
]
|
||||
},
|
||||
}
|
||||
}
|
||||
],
|
||||
"expected": {"result": "failure", "messages": ["The following port(s) have issues: {'wrong_lldp_neighbor': ['Ethernet2']}"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-multiple",
|
||||
"test": VerifyLLDPNeighbors,
|
||||
"inputs": {
|
||||
"neighbors": [
|
||||
{"port": "Ethernet1", "neighbor_device": "DC1-SPINE1", "neighbor_port": "Ethernet1"},
|
||||
{"port": "Ethernet2", "neighbor_device": "DC1-SPINE2", "neighbor_port": "Ethernet1"},
|
||||
{"port": "Ethernet3", "neighbor_device": "DC1-SPINE3", "neighbor_port": "Ethernet1"},
|
||||
]
|
||||
},
|
||||
"eos_data": [
|
||||
{
|
||||
"lldpNeighbors": {
|
||||
"Ethernet1": {
|
||||
"lldpNeighborInfo": [
|
||||
{
|
||||
"chassisIdType": "macAddress",
|
||||
"chassisId": "001c.73a0.fc18",
|
||||
"systemName": "DC1-SPINE1",
|
||||
"neighborInterfaceInfo": {
|
||||
"interfaceIdType": "interfaceName",
|
||||
"interfaceId": '"Ethernet2"',
|
||||
"interfaceId_v2": "Ethernet2",
|
||||
"interfaceDescription": "P2P_LINK_TO_DC1-LEAF1A_Ethernet1",
|
||||
},
|
||||
}
|
||||
]
|
||||
},
|
||||
"Ethernet2": {"lldpNeighborInfo": []},
|
||||
}
|
||||
}
|
||||
],
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"The following port(s) have issues: {'wrong_lldp_neighbor': ['Ethernet1'], 'no_lldp_neighbor': ['Ethernet2'], 'port_not_configured': ['Ethernet3']}"
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
291
tests/units/anta_tests/test_field_notices.py
Normal file
291
tests/units/anta_tests/test_field_notices.py
Normal file
|
@ -0,0 +1,291 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""Test inputs for anta.tests.field_notices"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from anta.tests.field_notices import VerifyFieldNotice44Resolution, VerifyFieldNotice72Resolution
|
||||
from tests.lib.anta import test # noqa: F401; pylint: disable=W0611
|
||||
|
||||
DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyFieldNotice44Resolution,
|
||||
"eos_data": [
|
||||
{
|
||||
"imageFormatVersion": "1.0",
|
||||
"uptime": 1109144.35,
|
||||
"modelName": "DCS-7280QRA-C36S",
|
||||
"details": {
|
||||
"deviations": [],
|
||||
"components": [{"name": "Aboot", "version": "Aboot-veos-8.0.0-3255441"}],
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-4.0",
|
||||
"test": VerifyFieldNotice44Resolution,
|
||||
"eos_data": [
|
||||
{
|
||||
"imageFormatVersion": "1.0",
|
||||
"uptime": 1109144.35,
|
||||
"modelName": "DCS-7280QRA-C36S",
|
||||
"details": {
|
||||
"deviations": [],
|
||||
"components": [{"name": "Aboot", "version": "Aboot-veos-4.0.1-3255441"}],
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["device is running incorrect version of aboot (4.0.1)"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-4.1",
|
||||
"test": VerifyFieldNotice44Resolution,
|
||||
"eos_data": [
|
||||
{
|
||||
"imageFormatVersion": "1.0",
|
||||
"uptime": 1109144.35,
|
||||
"modelName": "DCS-7280QRA-C36S",
|
||||
"details": {
|
||||
"deviations": [],
|
||||
"components": [{"name": "Aboot", "version": "Aboot-veos-4.1.0-3255441"}],
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["device is running incorrect version of aboot (4.1.0)"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-6.0",
|
||||
"test": VerifyFieldNotice44Resolution,
|
||||
"eos_data": [
|
||||
{
|
||||
"imageFormatVersion": "1.0",
|
||||
"uptime": 1109144.35,
|
||||
"modelName": "DCS-7280QRA-C36S",
|
||||
"details": {
|
||||
"deviations": [],
|
||||
"components": [{"name": "Aboot", "version": "Aboot-veos-6.0.1-3255441"}],
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["device is running incorrect version of aboot (6.0.1)"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-6.1",
|
||||
"test": VerifyFieldNotice44Resolution,
|
||||
"eos_data": [
|
||||
{
|
||||
"imageFormatVersion": "1.0",
|
||||
"uptime": 1109144.35,
|
||||
"modelName": "DCS-7280QRA-C36S",
|
||||
"details": {
|
||||
"deviations": [],
|
||||
"components": [{"name": "Aboot", "version": "Aboot-veos-6.1.1-3255441"}],
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["device is running incorrect version of aboot (6.1.1)"]},
|
||||
},
|
||||
{
|
||||
"name": "skipped-model",
|
||||
"test": VerifyFieldNotice44Resolution,
|
||||
"eos_data": [
|
||||
{
|
||||
"imageFormatVersion": "1.0",
|
||||
"uptime": 1109144.35,
|
||||
"modelName": "vEOS-lab",
|
||||
"details": {
|
||||
"deviations": [],
|
||||
"components": [{"name": "Aboot", "version": "Aboot-veos-8.0.0-3255441"}],
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "skipped", "messages": ["device is not impacted by FN044"]},
|
||||
},
|
||||
{
|
||||
"name": "success-JPE",
|
||||
"test": VerifyFieldNotice72Resolution,
|
||||
"eos_data": [
|
||||
{
|
||||
"modelName": "DCS-7280SR3-48YC8",
|
||||
"serialNumber": "JPE2130000",
|
||||
"details": {
|
||||
"deviations": [],
|
||||
"components": [{"name": "FixedSystemvrm1", "version": "7"}],
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success", "messages": ["FN72 is mitigated"]},
|
||||
},
|
||||
{
|
||||
"name": "success-JAS",
|
||||
"test": VerifyFieldNotice72Resolution,
|
||||
"eos_data": [
|
||||
{
|
||||
"modelName": "DCS-7280SR3-48YC8",
|
||||
"serialNumber": "JAS2040000",
|
||||
"details": {
|
||||
"deviations": [],
|
||||
"components": [{"name": "FixedSystemvrm1", "version": "7"}],
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success", "messages": ["FN72 is mitigated"]},
|
||||
},
|
||||
{
|
||||
"name": "success-K-JPE",
|
||||
"test": VerifyFieldNotice72Resolution,
|
||||
"eos_data": [
|
||||
{
|
||||
"modelName": "DCS-7280SR3K-48YC8",
|
||||
"serialNumber": "JPE2133000",
|
||||
"details": {
|
||||
"deviations": [],
|
||||
"components": [{"name": "FixedSystemvrm1", "version": "7"}],
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success", "messages": ["FN72 is mitigated"]},
|
||||
},
|
||||
{
|
||||
"name": "success-K-JAS",
|
||||
"test": VerifyFieldNotice72Resolution,
|
||||
"eos_data": [
|
||||
{
|
||||
"modelName": "DCS-7280SR3K-48YC8",
|
||||
"serialNumber": "JAS2040000",
|
||||
"details": {
|
||||
"deviations": [],
|
||||
"components": [{"name": "FixedSystemvrm1", "version": "7"}],
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success", "messages": ["FN72 is mitigated"]},
|
||||
},
|
||||
{
|
||||
"name": "skipped-Serial",
|
||||
"test": VerifyFieldNotice72Resolution,
|
||||
"eos_data": [
|
||||
{
|
||||
"modelName": "DCS-7280SR3K-48YC8",
|
||||
"serialNumber": "BAN2040000",
|
||||
"details": {
|
||||
"deviations": [],
|
||||
"components": [{"name": "FixedSystemvrm1", "version": "7"}],
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "skipped", "messages": ["Device not exposed"]},
|
||||
},
|
||||
{
|
||||
"name": "skipped-Platform",
|
||||
"test": VerifyFieldNotice72Resolution,
|
||||
"eos_data": [
|
||||
{
|
||||
"modelName": "DCS-7150-52-CL",
|
||||
"serialNumber": "JAS0040000",
|
||||
"details": {
|
||||
"deviations": [],
|
||||
"components": [{"name": "FixedSystemvrm1", "version": "5"}],
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "skipped", "messages": ["Platform is not impacted by FN072"]},
|
||||
},
|
||||
{
|
||||
"name": "skipped-range-JPE",
|
||||
"test": VerifyFieldNotice72Resolution,
|
||||
"eos_data": [
|
||||
{
|
||||
"modelName": "DCS-7280SR3-48YC8",
|
||||
"serialNumber": "JPE2131000",
|
||||
"details": {
|
||||
"deviations": [],
|
||||
"components": [{"name": "FixedSystemvrm1", "version": "5"}],
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "skipped", "messages": ["Device not exposed"]},
|
||||
},
|
||||
{
|
||||
"name": "skipped-range-K-JAS",
|
||||
"test": VerifyFieldNotice72Resolution,
|
||||
"eos_data": [
|
||||
{
|
||||
"modelName": "DCS-7280SR3K-48YC8",
|
||||
"serialNumber": "JAS2041000",
|
||||
"details": {
|
||||
"deviations": [],
|
||||
"components": [{"name": "FixedSystemvrm1", "version": "5"}],
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "skipped", "messages": ["Device not exposed"]},
|
||||
},
|
||||
{
|
||||
"name": "failed-JPE",
|
||||
"test": VerifyFieldNotice72Resolution,
|
||||
"eos_data": [
|
||||
{
|
||||
"modelName": "DCS-7280SR3K-48YC8",
|
||||
"serialNumber": "JPE2133000",
|
||||
"details": {
|
||||
"deviations": [],
|
||||
"components": [{"name": "FixedSystemvrm1", "version": "5"}],
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["Device is exposed to FN72"]},
|
||||
},
|
||||
{
|
||||
"name": "failed-JAS",
|
||||
"test": VerifyFieldNotice72Resolution,
|
||||
"eos_data": [
|
||||
{
|
||||
"modelName": "DCS-7280SR3-48YC8",
|
||||
"serialNumber": "JAS2040000",
|
||||
"details": {
|
||||
"deviations": [],
|
||||
"components": [{"name": "FixedSystemvrm1", "version": "5"}],
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["Device is exposed to FN72"]},
|
||||
},
|
||||
{
|
||||
"name": "error",
|
||||
"test": VerifyFieldNotice72Resolution,
|
||||
"eos_data": [
|
||||
{
|
||||
"modelName": "DCS-7280SR3-48YC8",
|
||||
"serialNumber": "JAS2040000",
|
||||
"details": {
|
||||
"deviations": [],
|
||||
"components": [{"name": "FixedSystemvrm2", "version": "5"}],
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "error", "messages": ["Error in running test - FixedSystemvrm1 not found"]},
|
||||
},
|
||||
]
|
47
tests/units/anta_tests/test_greent.py
Normal file
47
tests/units/anta_tests/test_greent.py
Normal file
|
@ -0,0 +1,47 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""Data for testing anta.tests.configuration"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from anta.tests.greent import VerifyGreenT, VerifyGreenTCounters
|
||||
|
||||
DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyGreenTCounters,
|
||||
"eos_data": [{"sampleRcvd": 0, "sampleDiscarded": 0, "multiDstSampleRcvd": 0, "grePktSent": 1, "sampleSent": 0}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyGreenTCounters,
|
||||
"eos_data": [{"sampleRcvd": 0, "sampleDiscarded": 0, "multiDstSampleRcvd": 0, "grePktSent": 0, "sampleSent": 0}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure"},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyGreenT,
|
||||
"eos_data": [{"sampleRcvd": 0, "sampleDiscarded": 0, "multiDstSampleRcvd": 0, "grePktSent": 1, "sampleSent": 0}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyGreenT,
|
||||
"eos_data": [
|
||||
{
|
||||
"profiles": {
|
||||
"default": {"interfaces": [], "appliedInterfaces": [], "samplePolicy": "default", "failures": {}, "appliedInterfaces6": [], "failures6": {}},
|
||||
"testProfile": {"interfaces": [], "appliedInterfaces": [], "samplePolicy": "default", "failures": {}, "appliedInterfaces6": [], "failures6": {}},
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure"},
|
||||
},
|
||||
]
|
918
tests/units/anta_tests/test_hardware.py
Normal file
918
tests/units/anta_tests/test_hardware.py
Normal file
|
@ -0,0 +1,918 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""Test inputs for anta.tests.hardware"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from anta.tests.hardware import (
|
||||
VerifyAdverseDrops,
|
||||
VerifyEnvironmentCooling,
|
||||
VerifyEnvironmentPower,
|
||||
VerifyEnvironmentSystemCooling,
|
||||
VerifyTemperature,
|
||||
VerifyTransceiversManufacturers,
|
||||
VerifyTransceiversTemperature,
|
||||
)
|
||||
from tests.lib.anta import test # noqa: F401; pylint: disable=W0611
|
||||
|
||||
DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyTransceiversManufacturers,
|
||||
"eos_data": [
|
||||
{
|
||||
"xcvrSlots": {
|
||||
"1": {"mfgName": "Arista Networks", "modelName": "QSFP-100G-DR", "serialNum": "XKT203501340", "hardwareRev": "21"},
|
||||
"2": {"mfgName": "Arista Networks", "modelName": "QSFP-100G-DR", "serialNum": "XKT203501337", "hardwareRev": "21"},
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": {"manufacturers": ["Arista Networks"]},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyTransceiversManufacturers,
|
||||
"eos_data": [
|
||||
{
|
||||
"xcvrSlots": {
|
||||
"1": {"mfgName": "Arista Networks", "modelName": "QSFP-100G-DR", "serialNum": "XKT203501340", "hardwareRev": "21"},
|
||||
"2": {"mfgName": "Arista Networks", "modelName": "QSFP-100G-DR", "serialNum": "XKT203501337", "hardwareRev": "21"},
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": {"manufacturers": ["Arista"]},
|
||||
"expected": {"result": "failure", "messages": ["Some transceivers are from unapproved manufacturers: {'1': 'Arista Networks', '2': 'Arista Networks'}"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyTemperature,
|
||||
"eos_data": [
|
||||
{
|
||||
"powercycleOnOverheat": "False",
|
||||
"ambientThreshold": 45,
|
||||
"cardSlots": [],
|
||||
"shutdownOnOverheat": "True",
|
||||
"systemStatus": "temperatureOk",
|
||||
"recoveryModeOnOverheat": "recoveryModeNA",
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyTemperature,
|
||||
"eos_data": [
|
||||
{
|
||||
"powercycleOnOverheat": "False",
|
||||
"ambientThreshold": 45,
|
||||
"cardSlots": [],
|
||||
"shutdownOnOverheat": "True",
|
||||
"systemStatus": "temperatureKO",
|
||||
"recoveryModeOnOverheat": "recoveryModeNA",
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["Device temperature exceeds acceptable limits. Current system status: 'temperatureKO'"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyTransceiversTemperature,
|
||||
"eos_data": [
|
||||
{
|
||||
"tempSensors": [
|
||||
{
|
||||
"maxTemperature": 25.03125,
|
||||
"maxTemperatureLastChange": 1682509618.2227979,
|
||||
"hwStatus": "ok",
|
||||
"alertCount": 0,
|
||||
"description": "Xcvr54 temp sensor",
|
||||
"overheatThreshold": 70.0,
|
||||
"criticalThreshold": 70.0,
|
||||
"inAlertState": False,
|
||||
"targetTemperature": 62.0,
|
||||
"relPos": "54",
|
||||
"currentTemperature": 24.171875,
|
||||
"setPointTemperature": 61.8,
|
||||
"pidDriverCount": 0,
|
||||
"isPidDriver": False,
|
||||
"name": "DomTemperatureSensor54",
|
||||
}
|
||||
],
|
||||
"cardSlots": [],
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-hwStatus",
|
||||
"test": VerifyTransceiversTemperature,
|
||||
"eos_data": [
|
||||
{
|
||||
"tempSensors": [
|
||||
{
|
||||
"maxTemperature": 25.03125,
|
||||
"maxTemperatureLastChange": 1682509618.2227979,
|
||||
"hwStatus": "ko",
|
||||
"alertCount": 0,
|
||||
"description": "Xcvr54 temp sensor",
|
||||
"overheatThreshold": 70.0,
|
||||
"criticalThreshold": 70.0,
|
||||
"inAlertState": False,
|
||||
"targetTemperature": 62.0,
|
||||
"relPos": "54",
|
||||
"currentTemperature": 24.171875,
|
||||
"setPointTemperature": 61.8,
|
||||
"pidDriverCount": 0,
|
||||
"isPidDriver": False,
|
||||
"name": "DomTemperatureSensor54",
|
||||
}
|
||||
],
|
||||
"cardSlots": [],
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"The following sensors are operating outside the acceptable temperature range or have raised alerts: "
|
||||
"{'DomTemperatureSensor54': "
|
||||
"{'hwStatus': 'ko', 'alertCount': 0}}"
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-alertCount",
|
||||
"test": VerifyTransceiversTemperature,
|
||||
"eos_data": [
|
||||
{
|
||||
"tempSensors": [
|
||||
{
|
||||
"maxTemperature": 25.03125,
|
||||
"maxTemperatureLastChange": 1682509618.2227979,
|
||||
"hwStatus": "ok",
|
||||
"alertCount": 1,
|
||||
"description": "Xcvr54 temp sensor",
|
||||
"overheatThreshold": 70.0,
|
||||
"criticalThreshold": 70.0,
|
||||
"inAlertState": False,
|
||||
"targetTemperature": 62.0,
|
||||
"relPos": "54",
|
||||
"currentTemperature": 24.171875,
|
||||
"setPointTemperature": 61.8,
|
||||
"pidDriverCount": 0,
|
||||
"isPidDriver": False,
|
||||
"name": "DomTemperatureSensor54",
|
||||
}
|
||||
],
|
||||
"cardSlots": [],
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"The following sensors are operating outside the acceptable temperature range or have raised alerts: "
|
||||
"{'DomTemperatureSensor54': "
|
||||
"{'hwStatus': 'ok', 'alertCount': 1}}"
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyEnvironmentSystemCooling,
|
||||
"eos_data": [
|
||||
{
|
||||
"defaultZones": False,
|
||||
"numCoolingZones": [],
|
||||
"coolingMode": "automatic",
|
||||
"ambientTemperature": 24.5,
|
||||
"shutdownOnInsufficientFans": True,
|
||||
"airflowDirection": "frontToBackAirflow",
|
||||
"overrideFanSpeed": 0,
|
||||
"powerSupplySlots": [],
|
||||
"fanTraySlots": [],
|
||||
"minFanSpeed": 0,
|
||||
"currentZones": 1,
|
||||
"configuredZones": 0,
|
||||
"systemStatus": "coolingOk",
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyEnvironmentSystemCooling,
|
||||
"eos_data": [
|
||||
{
|
||||
"defaultZones": False,
|
||||
"numCoolingZones": [],
|
||||
"coolingMode": "automatic",
|
||||
"ambientTemperature": 24.5,
|
||||
"shutdownOnInsufficientFans": True,
|
||||
"airflowDirection": "frontToBackAirflow",
|
||||
"overrideFanSpeed": 0,
|
||||
"powerSupplySlots": [],
|
||||
"fanTraySlots": [],
|
||||
"minFanSpeed": 0,
|
||||
"currentZones": 1,
|
||||
"configuredZones": 0,
|
||||
"systemStatus": "coolingKo",
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["Device system cooling is not OK: 'coolingKo'"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyEnvironmentCooling,
|
||||
"eos_data": [
|
||||
{
|
||||
"defaultZones": False,
|
||||
"numCoolingZones": [],
|
||||
"coolingMode": "automatic",
|
||||
"ambientTemperature": 24.5,
|
||||
"shutdownOnInsufficientFans": True,
|
||||
"airflowDirection": "frontToBackAirflow",
|
||||
"overrideFanSpeed": 0,
|
||||
"powerSupplySlots": [
|
||||
{
|
||||
"status": "ok",
|
||||
"fans": [
|
||||
{
|
||||
"status": "ok",
|
||||
"uptime": 1682498937.0240965,
|
||||
"maxSpeed": 23000,
|
||||
"lastSpeedStableChangeTime": 1682499033.0403435,
|
||||
"configuredSpeed": 30,
|
||||
"actualSpeed": 33,
|
||||
"speedHwOverride": True,
|
||||
"speedStable": True,
|
||||
"label": "PowerSupply1/1",
|
||||
}
|
||||
],
|
||||
"speed": 30,
|
||||
"label": "PowerSupply1",
|
||||
},
|
||||
{
|
||||
"status": "ok",
|
||||
"fans": [
|
||||
{
|
||||
"status": "ok",
|
||||
"uptime": 1682498935.9121106,
|
||||
"maxSpeed": 23000,
|
||||
"lastSpeedStableChangeTime": 1682499092.4665174,
|
||||
"configuredSpeed": 30,
|
||||
"actualSpeed": 33,
|
||||
"speedHwOverride": True,
|
||||
"speedStable": True,
|
||||
"label": "PowerSupply2/1",
|
||||
}
|
||||
],
|
||||
"speed": 30,
|
||||
"label": "PowerSupply2",
|
||||
},
|
||||
],
|
||||
"fanTraySlots": [
|
||||
{
|
||||
"status": "ok",
|
||||
"fans": [
|
||||
{
|
||||
"status": "ok",
|
||||
"uptime": 1682498923.9303148,
|
||||
"maxSpeed": 17500,
|
||||
"lastSpeedStableChangeTime": 1682498975.0139885,
|
||||
"configuredSpeed": 30,
|
||||
"actualSpeed": 29,
|
||||
"speedHwOverride": False,
|
||||
"speedStable": True,
|
||||
"label": "1/1",
|
||||
}
|
||||
],
|
||||
"speed": 30,
|
||||
"label": "1",
|
||||
},
|
||||
{
|
||||
"status": "ok",
|
||||
"fans": [
|
||||
{
|
||||
"status": "ok",
|
||||
"uptime": 1682498923.9304729,
|
||||
"maxSpeed": 17500,
|
||||
"lastSpeedStableChangeTime": 1682498939.9329433,
|
||||
"configuredSpeed": 30,
|
||||
"actualSpeed": 30,
|
||||
"speedHwOverride": False,
|
||||
"speedStable": True,
|
||||
"label": "2/1",
|
||||
}
|
||||
],
|
||||
"speed": 30,
|
||||
"label": "2",
|
||||
},
|
||||
{
|
||||
"status": "ok",
|
||||
"fans": [
|
||||
{
|
||||
"status": "ok",
|
||||
"uptime": 1682498923.9383528,
|
||||
"maxSpeed": 17500,
|
||||
"lastSpeedStableChangeTime": 1682498975.0140095,
|
||||
"configuredSpeed": 30,
|
||||
"actualSpeed": 30,
|
||||
"speedHwOverride": False,
|
||||
"speedStable": True,
|
||||
"label": "3/1",
|
||||
}
|
||||
],
|
||||
"speed": 30,
|
||||
"label": "3",
|
||||
},
|
||||
{
|
||||
"status": "ok",
|
||||
"fans": [
|
||||
{
|
||||
"status": "ok",
|
||||
"uptime": 1682498923.9303904,
|
||||
"maxSpeed": 17500,
|
||||
"lastSpeedStableChangeTime": 1682498975.0140295,
|
||||
"configuredSpeed": 30,
|
||||
"actualSpeed": 30,
|
||||
"speedHwOverride": False,
|
||||
"speedStable": True,
|
||||
"label": "4/1",
|
||||
}
|
||||
],
|
||||
"speed": 30,
|
||||
"label": "4",
|
||||
},
|
||||
],
|
||||
"minFanSpeed": 0,
|
||||
"currentZones": 1,
|
||||
"configuredZones": 0,
|
||||
"systemStatus": "coolingOk",
|
||||
}
|
||||
],
|
||||
"inputs": {"states": ["ok"]},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "success-additional-states",
|
||||
"test": VerifyEnvironmentCooling,
|
||||
"eos_data": [
|
||||
{
|
||||
"defaultZones": False,
|
||||
"numCoolingZones": [],
|
||||
"coolingMode": "automatic",
|
||||
"ambientTemperature": 24.5,
|
||||
"shutdownOnInsufficientFans": True,
|
||||
"airflowDirection": "frontToBackAirflow",
|
||||
"overrideFanSpeed": 0,
|
||||
"powerSupplySlots": [
|
||||
{
|
||||
"status": "ok",
|
||||
"fans": [
|
||||
{
|
||||
"status": "ok",
|
||||
"uptime": 1682498937.0240965,
|
||||
"maxSpeed": 23000,
|
||||
"lastSpeedStableChangeTime": 1682499033.0403435,
|
||||
"configuredSpeed": 30,
|
||||
"actualSpeed": 33,
|
||||
"speedHwOverride": True,
|
||||
"speedStable": True,
|
||||
"label": "PowerSupply1/1",
|
||||
}
|
||||
],
|
||||
"speed": 30,
|
||||
"label": "PowerSupply1",
|
||||
},
|
||||
{
|
||||
"status": "ok",
|
||||
"fans": [
|
||||
{
|
||||
"status": "Not Inserted",
|
||||
"uptime": 1682498935.9121106,
|
||||
"maxSpeed": 23000,
|
||||
"lastSpeedStableChangeTime": 1682499092.4665174,
|
||||
"configuredSpeed": 30,
|
||||
"actualSpeed": 33,
|
||||
"speedHwOverride": True,
|
||||
"speedStable": True,
|
||||
"label": "PowerSupply2/1",
|
||||
}
|
||||
],
|
||||
"speed": 30,
|
||||
"label": "PowerSupply2",
|
||||
},
|
||||
],
|
||||
"fanTraySlots": [
|
||||
{
|
||||
"status": "ok",
|
||||
"fans": [
|
||||
{
|
||||
"status": "ok",
|
||||
"uptime": 1682498923.9303148,
|
||||
"maxSpeed": 17500,
|
||||
"lastSpeedStableChangeTime": 1682498975.0139885,
|
||||
"configuredSpeed": 30,
|
||||
"actualSpeed": 29,
|
||||
"speedHwOverride": False,
|
||||
"speedStable": True,
|
||||
"label": "1/1",
|
||||
}
|
||||
],
|
||||
"speed": 30,
|
||||
"label": "1",
|
||||
},
|
||||
{
|
||||
"status": "ok",
|
||||
"fans": [
|
||||
{
|
||||
"status": "ok",
|
||||
"uptime": 1682498923.9304729,
|
||||
"maxSpeed": 17500,
|
||||
"lastSpeedStableChangeTime": 1682498939.9329433,
|
||||
"configuredSpeed": 30,
|
||||
"actualSpeed": 30,
|
||||
"speedHwOverride": False,
|
||||
"speedStable": True,
|
||||
"label": "2/1",
|
||||
}
|
||||
],
|
||||
"speed": 30,
|
||||
"label": "2",
|
||||
},
|
||||
{
|
||||
"status": "ok",
|
||||
"fans": [
|
||||
{
|
||||
"status": "ok",
|
||||
"uptime": 1682498923.9383528,
|
||||
"maxSpeed": 17500,
|
||||
"lastSpeedStableChangeTime": 1682498975.0140095,
|
||||
"configuredSpeed": 30,
|
||||
"actualSpeed": 30,
|
||||
"speedHwOverride": False,
|
||||
"speedStable": True,
|
||||
"label": "3/1",
|
||||
}
|
||||
],
|
||||
"speed": 30,
|
||||
"label": "3",
|
||||
},
|
||||
{
|
||||
"status": "ok",
|
||||
"fans": [
|
||||
{
|
||||
"status": "ok",
|
||||
"uptime": 1682498923.9303904,
|
||||
"maxSpeed": 17500,
|
||||
"lastSpeedStableChangeTime": 1682498975.0140295,
|
||||
"configuredSpeed": 30,
|
||||
"actualSpeed": 30,
|
||||
"speedHwOverride": False,
|
||||
"speedStable": True,
|
||||
"label": "4/1",
|
||||
}
|
||||
],
|
||||
"speed": 30,
|
||||
"label": "4",
|
||||
},
|
||||
],
|
||||
"minFanSpeed": 0,
|
||||
"currentZones": 1,
|
||||
"configuredZones": 0,
|
||||
"systemStatus": "coolingOk",
|
||||
}
|
||||
],
|
||||
"inputs": {"states": ["ok", "Not Inserted"]},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-fan-tray",
|
||||
"test": VerifyEnvironmentCooling,
|
||||
"eos_data": [
|
||||
{
|
||||
"defaultZones": False,
|
||||
"numCoolingZones": [],
|
||||
"coolingMode": "automatic",
|
||||
"ambientTemperature": 24.5,
|
||||
"shutdownOnInsufficientFans": True,
|
||||
"airflowDirection": "frontToBackAirflow",
|
||||
"overrideFanSpeed": 0,
|
||||
"powerSupplySlots": [
|
||||
{
|
||||
"status": "ok",
|
||||
"fans": [
|
||||
{
|
||||
"status": "ok",
|
||||
"uptime": 1682498937.0240965,
|
||||
"maxSpeed": 23000,
|
||||
"lastSpeedStableChangeTime": 1682499033.0403435,
|
||||
"configuredSpeed": 30,
|
||||
"actualSpeed": 33,
|
||||
"speedHwOverride": True,
|
||||
"speedStable": True,
|
||||
"label": "PowerSupply1/1",
|
||||
}
|
||||
],
|
||||
"speed": 30,
|
||||
"label": "PowerSupply1",
|
||||
},
|
||||
{
|
||||
"status": "ok",
|
||||
"fans": [
|
||||
{
|
||||
"status": "ok",
|
||||
"uptime": 1682498935.9121106,
|
||||
"maxSpeed": 23000,
|
||||
"lastSpeedStableChangeTime": 1682499092.4665174,
|
||||
"configuredSpeed": 30,
|
||||
"actualSpeed": 33,
|
||||
"speedHwOverride": True,
|
||||
"speedStable": True,
|
||||
"label": "PowerSupply2/1",
|
||||
}
|
||||
],
|
||||
"speed": 30,
|
||||
"label": "PowerSupply2",
|
||||
},
|
||||
],
|
||||
"fanTraySlots": [
|
||||
{
|
||||
"status": "ok",
|
||||
"fans": [
|
||||
{
|
||||
"status": "down",
|
||||
"uptime": 1682498923.9303148,
|
||||
"maxSpeed": 17500,
|
||||
"lastSpeedStableChangeTime": 1682498975.0139885,
|
||||
"configuredSpeed": 30,
|
||||
"actualSpeed": 29,
|
||||
"speedHwOverride": False,
|
||||
"speedStable": True,
|
||||
"label": "1/1",
|
||||
}
|
||||
],
|
||||
"speed": 30,
|
||||
"label": "1",
|
||||
},
|
||||
{
|
||||
"status": "ok",
|
||||
"fans": [
|
||||
{
|
||||
"status": "ok",
|
||||
"uptime": 1682498923.9304729,
|
||||
"maxSpeed": 17500,
|
||||
"lastSpeedStableChangeTime": 1682498939.9329433,
|
||||
"configuredSpeed": 30,
|
||||
"actualSpeed": 30,
|
||||
"speedHwOverride": False,
|
||||
"speedStable": True,
|
||||
"label": "2/1",
|
||||
}
|
||||
],
|
||||
"speed": 30,
|
||||
"label": "2",
|
||||
},
|
||||
{
|
||||
"status": "ok",
|
||||
"fans": [
|
||||
{
|
||||
"status": "Not Inserted",
|
||||
"uptime": 1682498923.9383528,
|
||||
"maxSpeed": 17500,
|
||||
"lastSpeedStableChangeTime": 1682498975.0140095,
|
||||
"configuredSpeed": 30,
|
||||
"actualSpeed": 30,
|
||||
"speedHwOverride": False,
|
||||
"speedStable": True,
|
||||
"label": "3/1",
|
||||
}
|
||||
],
|
||||
"speed": 30,
|
||||
"label": "3",
|
||||
},
|
||||
{
|
||||
"status": "ok",
|
||||
"fans": [
|
||||
{
|
||||
"status": "ok",
|
||||
"uptime": 1682498923.9303904,
|
||||
"maxSpeed": 17500,
|
||||
"lastSpeedStableChangeTime": 1682498975.0140295,
|
||||
"configuredSpeed": 30,
|
||||
"actualSpeed": 30,
|
||||
"speedHwOverride": False,
|
||||
"speedStable": True,
|
||||
"label": "4/1",
|
||||
}
|
||||
],
|
||||
"speed": 30,
|
||||
"label": "4",
|
||||
},
|
||||
],
|
||||
"minFanSpeed": 0,
|
||||
"currentZones": 1,
|
||||
"configuredZones": 0,
|
||||
"systemStatus": "CoolingKo",
|
||||
}
|
||||
],
|
||||
"inputs": {"states": ["ok", "Not Inserted"]},
|
||||
"expected": {"result": "failure", "messages": ["Fan 1/1 on Fan Tray 1 is: 'down'"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-power-supply",
|
||||
"test": VerifyEnvironmentCooling,
|
||||
"eos_data": [
|
||||
{
|
||||
"defaultZones": False,
|
||||
"numCoolingZones": [],
|
||||
"coolingMode": "automatic",
|
||||
"ambientTemperature": 24.5,
|
||||
"shutdownOnInsufficientFans": True,
|
||||
"airflowDirection": "frontToBackAirflow",
|
||||
"overrideFanSpeed": 0,
|
||||
"powerSupplySlots": [
|
||||
{
|
||||
"status": "ok",
|
||||
"fans": [
|
||||
{
|
||||
"status": "down",
|
||||
"uptime": 1682498937.0240965,
|
||||
"maxSpeed": 23000,
|
||||
"lastSpeedStableChangeTime": 1682499033.0403435,
|
||||
"configuredSpeed": 30,
|
||||
"actualSpeed": 33,
|
||||
"speedHwOverride": True,
|
||||
"speedStable": True,
|
||||
"label": "PowerSupply1/1",
|
||||
}
|
||||
],
|
||||
"speed": 30,
|
||||
"label": "PowerSupply1",
|
||||
},
|
||||
{
|
||||
"status": "ok",
|
||||
"fans": [
|
||||
{
|
||||
"status": "ok",
|
||||
"uptime": 1682498935.9121106,
|
||||
"maxSpeed": 23000,
|
||||
"lastSpeedStableChangeTime": 1682499092.4665174,
|
||||
"configuredSpeed": 30,
|
||||
"actualSpeed": 33,
|
||||
"speedHwOverride": True,
|
||||
"speedStable": True,
|
||||
"label": "PowerSupply2/1",
|
||||
}
|
||||
],
|
||||
"speed": 30,
|
||||
"label": "PowerSupply2",
|
||||
},
|
||||
],
|
||||
"fanTraySlots": [
|
||||
{
|
||||
"status": "ok",
|
||||
"fans": [
|
||||
{
|
||||
"status": "ok",
|
||||
"uptime": 1682498923.9303148,
|
||||
"maxSpeed": 17500,
|
||||
"lastSpeedStableChangeTime": 1682498975.0139885,
|
||||
"configuredSpeed": 30,
|
||||
"actualSpeed": 29,
|
||||
"speedHwOverride": False,
|
||||
"speedStable": True,
|
||||
"label": "1/1",
|
||||
}
|
||||
],
|
||||
"speed": 30,
|
||||
"label": "1",
|
||||
},
|
||||
{
|
||||
"status": "ok",
|
||||
"fans": [
|
||||
{
|
||||
"status": "ok",
|
||||
"uptime": 1682498923.9304729,
|
||||
"maxSpeed": 17500,
|
||||
"lastSpeedStableChangeTime": 1682498939.9329433,
|
||||
"configuredSpeed": 30,
|
||||
"actualSpeed": 30,
|
||||
"speedHwOverride": False,
|
||||
"speedStable": True,
|
||||
"label": "2/1",
|
||||
}
|
||||
],
|
||||
"speed": 30,
|
||||
"label": "2",
|
||||
},
|
||||
{
|
||||
"status": "ok",
|
||||
"fans": [
|
||||
{
|
||||
"status": "Not Inserted",
|
||||
"uptime": 1682498923.9383528,
|
||||
"maxSpeed": 17500,
|
||||
"lastSpeedStableChangeTime": 1682498975.0140095,
|
||||
"configuredSpeed": 30,
|
||||
"actualSpeed": 30,
|
||||
"speedHwOverride": False,
|
||||
"speedStable": True,
|
||||
"label": "3/1",
|
||||
}
|
||||
],
|
||||
"speed": 30,
|
||||
"label": "3",
|
||||
},
|
||||
{
|
||||
"status": "ok",
|
||||
"fans": [
|
||||
{
|
||||
"status": "ok",
|
||||
"uptime": 1682498923.9303904,
|
||||
"maxSpeed": 17500,
|
||||
"lastSpeedStableChangeTime": 1682498975.0140295,
|
||||
"configuredSpeed": 30,
|
||||
"actualSpeed": 30,
|
||||
"speedHwOverride": False,
|
||||
"speedStable": True,
|
||||
"label": "4/1",
|
||||
}
|
||||
],
|
||||
"speed": 30,
|
||||
"label": "4",
|
||||
},
|
||||
],
|
||||
"minFanSpeed": 0,
|
||||
"currentZones": 1,
|
||||
"configuredZones": 0,
|
||||
"systemStatus": "CoolingKo",
|
||||
}
|
||||
],
|
||||
"inputs": {"states": ["ok", "Not Inserted"]},
|
||||
"expected": {"result": "failure", "messages": ["Fan PowerSupply1/1 on PowerSupply PowerSupply1 is: 'down'"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyEnvironmentPower,
|
||||
"eos_data": [
|
||||
{
|
||||
"powerSupplies": {
|
||||
"1": {
|
||||
"outputPower": 0.0,
|
||||
"modelName": "PWR-500AC-F",
|
||||
"capacity": 500.0,
|
||||
"tempSensors": {
|
||||
"TempSensorP1/2": {"status": "ok", "temperature": 0.0},
|
||||
"TempSensorP1/3": {"status": "ok", "temperature": 0.0},
|
||||
"TempSensorP1/1": {"status": "ok", "temperature": 0.0},
|
||||
},
|
||||
"fans": {"FanP1/1": {"status": "ok", "speed": 33}},
|
||||
"state": "ok",
|
||||
"inputCurrent": 0.0,
|
||||
"dominant": False,
|
||||
"inputVoltage": 0.0,
|
||||
"outputCurrent": 0.0,
|
||||
"managed": True,
|
||||
},
|
||||
"2": {
|
||||
"outputPower": 117.375,
|
||||
"uptime": 1682498935.9121966,
|
||||
"modelName": "PWR-500AC-F",
|
||||
"capacity": 500.0,
|
||||
"tempSensors": {
|
||||
"TempSensorP2/1": {"status": "ok", "temperature": 39.0},
|
||||
"TempSensorP2/3": {"status": "ok", "temperature": 43.0},
|
||||
"TempSensorP2/2": {"status": "ok", "temperature": 31.0},
|
||||
},
|
||||
"fans": {"FanP2/1": {"status": "ok", "speed": 33}},
|
||||
"state": "ok",
|
||||
"inputCurrent": 0.572265625,
|
||||
"dominant": False,
|
||||
"inputVoltage": 232.5,
|
||||
"outputCurrent": 9.828125,
|
||||
"managed": True,
|
||||
},
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": {"states": ["ok"]},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "success-additional-states",
|
||||
"test": VerifyEnvironmentPower,
|
||||
"eos_data": [
|
||||
{
|
||||
"powerSupplies": {
|
||||
"1": {
|
||||
"outputPower": 0.0,
|
||||
"modelName": "PWR-500AC-F",
|
||||
"capacity": 500.0,
|
||||
"tempSensors": {
|
||||
"TempSensorP1/2": {"status": "ok", "temperature": 0.0},
|
||||
"TempSensorP1/3": {"status": "ok", "temperature": 0.0},
|
||||
"TempSensorP1/1": {"status": "ok", "temperature": 0.0},
|
||||
},
|
||||
"fans": {"FanP1/1": {"status": "ok", "speed": 33}},
|
||||
"state": "Not Inserted",
|
||||
"inputCurrent": 0.0,
|
||||
"dominant": False,
|
||||
"inputVoltage": 0.0,
|
||||
"outputCurrent": 0.0,
|
||||
"managed": True,
|
||||
},
|
||||
"2": {
|
||||
"outputPower": 117.375,
|
||||
"uptime": 1682498935.9121966,
|
||||
"modelName": "PWR-500AC-F",
|
||||
"capacity": 500.0,
|
||||
"tempSensors": {
|
||||
"TempSensorP2/1": {"status": "ok", "temperature": 39.0},
|
||||
"TempSensorP2/3": {"status": "ok", "temperature": 43.0},
|
||||
"TempSensorP2/2": {"status": "ok", "temperature": 31.0},
|
||||
},
|
||||
"fans": {"FanP2/1": {"status": "ok", "speed": 33}},
|
||||
"state": "ok",
|
||||
"inputCurrent": 0.572265625,
|
||||
"dominant": False,
|
||||
"inputVoltage": 232.5,
|
||||
"outputCurrent": 9.828125,
|
||||
"managed": True,
|
||||
},
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": {"states": ["ok", "Not Inserted"]},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyEnvironmentPower,
|
||||
"eos_data": [
|
||||
{
|
||||
"powerSupplies": {
|
||||
"1": {
|
||||
"outputPower": 0.0,
|
||||
"modelName": "PWR-500AC-F",
|
||||
"capacity": 500.0,
|
||||
"tempSensors": {
|
||||
"TempSensorP1/2": {"status": "ok", "temperature": 0.0},
|
||||
"TempSensorP1/3": {"status": "ok", "temperature": 0.0},
|
||||
"TempSensorP1/1": {"status": "ok", "temperature": 0.0},
|
||||
},
|
||||
"fans": {"FanP1/1": {"status": "ok", "speed": 33}},
|
||||
"state": "powerLoss",
|
||||
"inputCurrent": 0.0,
|
||||
"dominant": False,
|
||||
"inputVoltage": 0.0,
|
||||
"outputCurrent": 0.0,
|
||||
"managed": True,
|
||||
},
|
||||
"2": {
|
||||
"outputPower": 117.375,
|
||||
"uptime": 1682498935.9121966,
|
||||
"modelName": "PWR-500AC-F",
|
||||
"capacity": 500.0,
|
||||
"tempSensors": {
|
||||
"TempSensorP2/1": {"status": "ok", "temperature": 39.0},
|
||||
"TempSensorP2/3": {"status": "ok", "temperature": 43.0},
|
||||
"TempSensorP2/2": {"status": "ok", "temperature": 31.0},
|
||||
},
|
||||
"fans": {"FanP2/1": {"status": "ok", "speed": 33}},
|
||||
"state": "ok",
|
||||
"inputCurrent": 0.572265625,
|
||||
"dominant": False,
|
||||
"inputVoltage": 232.5,
|
||||
"outputCurrent": 9.828125,
|
||||
"managed": True,
|
||||
},
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": {"states": ["ok"]},
|
||||
"expected": {"result": "failure", "messages": ["The following power supplies status are not in the accepted states list: {'1': {'state': 'powerLoss'}}"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyAdverseDrops,
|
||||
"eos_data": [{"totalAdverseDrops": 0}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyAdverseDrops,
|
||||
"eos_data": [{"totalAdverseDrops": 10}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["Device totalAdverseDrops counter is: '10'"]},
|
||||
},
|
||||
]
|
1411
tests/units/anta_tests/test_interfaces.py
Normal file
1411
tests/units/anta_tests/test_interfaces.py
Normal file
File diff suppressed because it is too large
Load diff
27
tests/units/anta_tests/test_lanz.py
Normal file
27
tests/units/anta_tests/test_lanz.py
Normal file
|
@ -0,0 +1,27 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""Data for testing anta.tests.configuration"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from anta.tests.lanz import VerifyLANZ
|
||||
from tests.lib.anta import test # noqa: F401; pylint: disable=W0611
|
||||
|
||||
DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyLANZ,
|
||||
"eos_data": [{"lanzEnabled": True}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success", "messages": ["LANZ is enabled"]},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyLANZ,
|
||||
"eos_data": [{"lanzEnabled": False}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["LANZ is not enabled"]},
|
||||
},
|
||||
]
|
254
tests/units/anta_tests/test_logging.py
Normal file
254
tests/units/anta_tests/test_logging.py
Normal file
|
@ -0,0 +1,254 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""Data for testing anta.tests.logging"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from anta.tests.logging import (
|
||||
VerifyLoggingAccounting,
|
||||
VerifyLoggingErrors,
|
||||
VerifyLoggingHostname,
|
||||
VerifyLoggingHosts,
|
||||
VerifyLoggingLogsGeneration,
|
||||
VerifyLoggingPersistent,
|
||||
VerifyLoggingSourceIntf,
|
||||
VerifyLoggingTimestamp,
|
||||
)
|
||||
from tests.lib.anta import test # noqa: F401; pylint: disable=W0611
|
||||
|
||||
DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyLoggingPersistent,
|
||||
"eos_data": [
|
||||
"Persistent logging: level debugging\n",
|
||||
"""Directory of flash:/persist/messages
|
||||
|
||||
-rw- 9948 May 10 13:54 messages
|
||||
|
||||
33214693376 bytes total (10081136640 bytes free)
|
||||
|
||||
""",
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-disabled",
|
||||
"test": VerifyLoggingPersistent,
|
||||
"eos_data": [
|
||||
"Persistent logging: disabled\n",
|
||||
"""Directory of flash:/persist/messages
|
||||
|
||||
-rw- 0 Apr 13 16:29 messages
|
||||
|
||||
33214693376 bytes total (10082168832 bytes free)
|
||||
|
||||
""",
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["Persistent logging is disabled"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-not-saved",
|
||||
"test": VerifyLoggingPersistent,
|
||||
"eos_data": [
|
||||
"Persistent logging: level debugging\n",
|
||||
"""Directory of flash:/persist/messages
|
||||
|
||||
-rw- 0 Apr 13 16:29 messages
|
||||
|
||||
33214693376 bytes total (10082168832 bytes free)
|
||||
|
||||
""",
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["No persistent logs are saved in flash"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyLoggingSourceIntf,
|
||||
"eos_data": [
|
||||
"""Trap logging: level informational
|
||||
Logging source-interface 'Management0', IP Address 172.20.20.12 in VRF MGMT
|
||||
Logging to '10.22.10.92' port 514 in VRF MGMT via udp
|
||||
Logging to '10.22.10.93' port 514 in VRF MGMT via tcp
|
||||
Logging to '10.22.10.94' port 911 in VRF MGMT via udp
|
||||
|
||||
"""
|
||||
],
|
||||
"inputs": {"interface": "Management0", "vrf": "MGMT"},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-intf",
|
||||
"test": VerifyLoggingSourceIntf,
|
||||
"eos_data": [
|
||||
"""Trap logging: level informational
|
||||
Logging source-interface 'Management1', IP Address 172.20.20.12 in VRF MGMT
|
||||
Logging to '10.22.10.92' port 514 in VRF MGMT via udp
|
||||
Logging to '10.22.10.93' port 514 in VRF MGMT via tcp
|
||||
Logging to '10.22.10.94' port 911 in VRF MGMT via udp
|
||||
|
||||
"""
|
||||
],
|
||||
"inputs": {"interface": "Management0", "vrf": "MGMT"},
|
||||
"expected": {"result": "failure", "messages": ["Source-interface 'Management0' is not configured in VRF MGMT"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-vrf",
|
||||
"test": VerifyLoggingSourceIntf,
|
||||
"eos_data": [
|
||||
"""Trap logging: level informational
|
||||
Logging source-interface 'Management0', IP Address 172.20.20.12 in VRF default
|
||||
Logging to '10.22.10.92' port 514 in VRF MGMT via udp
|
||||
Logging to '10.22.10.93' port 514 in VRF MGMT via tcp
|
||||
Logging to '10.22.10.94' port 911 in VRF MGMT via udp
|
||||
|
||||
"""
|
||||
],
|
||||
"inputs": {"interface": "Management0", "vrf": "MGMT"},
|
||||
"expected": {"result": "failure", "messages": ["Source-interface 'Management0' is not configured in VRF MGMT"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyLoggingHosts,
|
||||
"eos_data": [
|
||||
"""Trap logging: level informational
|
||||
Logging source-interface 'Management0', IP Address 172.20.20.12 in VRF MGMT
|
||||
Logging to '10.22.10.92' port 514 in VRF MGMT via udp
|
||||
Logging to '10.22.10.93' port 514 in VRF MGMT via tcp
|
||||
Logging to '10.22.10.94' port 911 in VRF MGMT via udp
|
||||
|
||||
"""
|
||||
],
|
||||
"inputs": {"hosts": ["10.22.10.92", "10.22.10.93", "10.22.10.94"], "vrf": "MGMT"},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-hosts",
|
||||
"test": VerifyLoggingHosts,
|
||||
"eos_data": [
|
||||
"""Trap logging: level informational
|
||||
Logging source-interface 'Management1', IP Address 172.20.20.12 in VRF MGMT
|
||||
Logging to '10.22.10.92' port 514 in VRF MGMT via udp
|
||||
Logging to '10.22.10.103' port 514 in VRF MGMT via tcp
|
||||
Logging to '10.22.10.104' port 911 in VRF MGMT via udp
|
||||
|
||||
"""
|
||||
],
|
||||
"inputs": {"hosts": ["10.22.10.92", "10.22.10.93", "10.22.10.94"], "vrf": "MGMT"},
|
||||
"expected": {"result": "failure", "messages": ["Syslog servers ['10.22.10.93', '10.22.10.94'] are not configured in VRF MGMT"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-vrf",
|
||||
"test": VerifyLoggingHosts,
|
||||
"eos_data": [
|
||||
"""Trap logging: level informational
|
||||
Logging source-interface 'Management0', IP Address 172.20.20.12 in VRF MGMT
|
||||
Logging to '10.22.10.92' port 514 in VRF MGMT via udp
|
||||
Logging to '10.22.10.93' port 514 in VRF default via tcp
|
||||
Logging to '10.22.10.94' port 911 in VRF default via udp
|
||||
|
||||
"""
|
||||
],
|
||||
"inputs": {"hosts": ["10.22.10.92", "10.22.10.93", "10.22.10.94"], "vrf": "MGMT"},
|
||||
"expected": {"result": "failure", "messages": ["Syslog servers ['10.22.10.93', '10.22.10.94'] are not configured in VRF MGMT"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyLoggingLogsGeneration,
|
||||
"eos_data": [
|
||||
"",
|
||||
"2023-05-10T13:54:21.463497-05:00 NW-CORE.example.org ConfigAgent: %SYS-6-LOGMSG_INFO: "
|
||||
"Message from arista on command-api (10.22.1.107): ANTA VerifyLoggingLogsGeneration validation\n",
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyLoggingLogsGeneration,
|
||||
"eos_data": ["", "Log Buffer:\n"],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["Logs are not generated"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyLoggingHostname,
|
||||
"eos_data": [
|
||||
{"hostname": "NW-CORE", "fqdn": "NW-CORE.example.org"},
|
||||
"",
|
||||
"2023-05-10T15:41:44.701810-05:00 NW-CORE.example.org ConfigAgent: %SYS-6-LOGMSG_INFO: "
|
||||
"Message from arista on command-api (10.22.1.107): ANTA VerifyLoggingHostname validation\n",
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyLoggingHostname,
|
||||
"eos_data": [
|
||||
{"hostname": "NW-CORE", "fqdn": "NW-CORE.example.org"},
|
||||
"",
|
||||
"2023-05-10T13:54:21.463497-05:00 NW-CORE ConfigAgent: %SYS-6-LOGMSG_INFO: "
|
||||
"Message from arista on command-api (10.22.1.107): ANTA VerifyLoggingLogsHostname validation\n",
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["Logs are not generated with the device FQDN"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyLoggingTimestamp,
|
||||
"eos_data": [
|
||||
"",
|
||||
"2023-05-10T15:41:44.680813-05:00 NW-CORE.example.org ConfigAgent: %SYS-6-LOGMSG_INFO: "
|
||||
"Message from arista on command-api (10.22.1.107): ANTA VerifyLoggingTimestamp validation\n",
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyLoggingTimestamp,
|
||||
"eos_data": [
|
||||
"",
|
||||
"May 10 13:54:22 NE-CORE.example.org ConfigAgent: %SYS-6-LOGMSG_INFO: "
|
||||
"Message from arista on command-api (10.22.1.107): ANTA VerifyLoggingTimestamp validation\n",
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["Logs are not generated with the appropriate timestamp format"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyLoggingAccounting,
|
||||
"eos_data": ["2023 May 10 15:50:31 arista command-api 10.22.1.107 stop service=shell priv-lvl=15 cmd=show aaa accounting logs | tail\n"],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyLoggingAccounting,
|
||||
"eos_data": ["2023 May 10 15:52:26 arista vty14 10.22.1.107 stop service=shell priv-lvl=15 cmd=show bgp summary\n"],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["AAA accounting logs are not generated"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyLoggingErrors,
|
||||
"eos_data": [""],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyLoggingErrors,
|
||||
"eos_data": [
|
||||
"Aug 2 19:57:42 DC1-LEAF1A Mlag: %FWK-3-SOCKET_CLOSE_REMOTE: Connection to Mlag (pid:27200) at tbt://192.168.0.1:4432/+n closed by peer (EOF)"
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["Device has reported syslog messages with a severity of ERRORS or higher"]},
|
||||
},
|
||||
]
|
343
tests/units/anta_tests/test_mlag.py
Normal file
343
tests/units/anta_tests/test_mlag.py
Normal file
|
@ -0,0 +1,343 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Tests for anta.tests.mlag.py
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from anta.tests.mlag import VerifyMlagConfigSanity, VerifyMlagDualPrimary, VerifyMlagInterfaces, VerifyMlagPrimaryPriority, VerifyMlagReloadDelay, VerifyMlagStatus
|
||||
from tests.lib.anta import test # noqa: F401; pylint: disable=W0611
|
||||
|
||||
DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyMlagStatus,
|
||||
"eos_data": [{"state": "active", "negStatus": "connected", "peerLinkStatus": "up", "localIntfStatus": "up"}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "skipped",
|
||||
"test": VerifyMlagStatus,
|
||||
"eos_data": [
|
||||
{
|
||||
"state": "disabled",
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "skipped", "messages": ["MLAG is disabled"]},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyMlagStatus,
|
||||
"eos_data": [{"state": "active", "negStatus": "connected", "peerLinkStatus": "down", "localIntfStatus": "up"}],
|
||||
"inputs": None,
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["MLAG status is not OK: {'state': 'active', 'negStatus': 'connected', 'localIntfStatus': 'up', 'peerLinkStatus': 'down'}"],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyMlagInterfaces,
|
||||
"eos_data": [
|
||||
{
|
||||
"state": "active",
|
||||
"mlagPorts": {"Disabled": 0, "Configured": 0, "Inactive": 0, "Active-partial": 0, "Active-full": 1},
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "skipped",
|
||||
"test": VerifyMlagInterfaces,
|
||||
"eos_data": [
|
||||
{
|
||||
"state": "disabled",
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "skipped", "messages": ["MLAG is disabled"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-active-partial",
|
||||
"test": VerifyMlagInterfaces,
|
||||
"eos_data": [
|
||||
{
|
||||
"state": "active",
|
||||
"mlagPorts": {"Disabled": 0, "Configured": 0, "Inactive": 0, "Active-partial": 1, "Active-full": 1},
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["MLAG status is not OK: {'Disabled': 0, 'Configured': 0, 'Inactive': 0, 'Active-partial': 1, 'Active-full': 1}"],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-inactive",
|
||||
"test": VerifyMlagInterfaces,
|
||||
"eos_data": [
|
||||
{
|
||||
"state": "active",
|
||||
"mlagPorts": {"Disabled": 0, "Configured": 0, "Inactive": 1, "Active-partial": 1, "Active-full": 1},
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["MLAG status is not OK: {'Disabled': 0, 'Configured': 0, 'Inactive': 1, 'Active-partial': 1, 'Active-full': 1}"],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyMlagConfigSanity,
|
||||
"eos_data": [{"globalConfiguration": {}, "interfaceConfiguration": {}, "mlagActive": True, "mlagConnected": True}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "skipped",
|
||||
"test": VerifyMlagConfigSanity,
|
||||
"eos_data": [
|
||||
{
|
||||
"mlagActive": False,
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "skipped", "messages": ["MLAG is disabled"]},
|
||||
},
|
||||
{
|
||||
"name": "error",
|
||||
"test": VerifyMlagConfigSanity,
|
||||
"eos_data": [
|
||||
{
|
||||
"dummy": False,
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "error", "messages": ["Incorrect JSON response - 'mlagActive' state was not found"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-global",
|
||||
"test": VerifyMlagConfigSanity,
|
||||
"eos_data": [
|
||||
{
|
||||
"globalConfiguration": {"mlag": {"globalParameters": {"dual-primary-detection-delay": {"localValue": "0", "peerValue": "200"}}}},
|
||||
"interfaceConfiguration": {},
|
||||
"mlagActive": True,
|
||||
"mlagConnected": True,
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"MLAG config-sanity returned inconsistencies: "
|
||||
"{'globalConfiguration': {'mlag': {'globalParameters': "
|
||||
"{'dual-primary-detection-delay': {'localValue': '0', 'peerValue': '200'}}}}, "
|
||||
"'interfaceConfiguration': {}}"
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-interface",
|
||||
"test": VerifyMlagConfigSanity,
|
||||
"eos_data": [
|
||||
{
|
||||
"globalConfiguration": {},
|
||||
"interfaceConfiguration": {"trunk-native-vlan mlag30": {"interface": {"Port-Channel30": {"localValue": "123", "peerValue": "3700"}}}},
|
||||
"mlagActive": True,
|
||||
"mlagConnected": True,
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"MLAG config-sanity returned inconsistencies: "
|
||||
"{'globalConfiguration': {}, "
|
||||
"'interfaceConfiguration': {'trunk-native-vlan mlag30': "
|
||||
"{'interface': {'Port-Channel30': {'localValue': '123', 'peerValue': '3700'}}}}}"
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyMlagReloadDelay,
|
||||
"eos_data": [{"state": "active", "reloadDelay": 300, "reloadDelayNonMlag": 330}],
|
||||
"inputs": {"reload_delay": 300, "reload_delay_non_mlag": 330},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "skipped-disabled",
|
||||
"test": VerifyMlagReloadDelay,
|
||||
"eos_data": [
|
||||
{
|
||||
"state": "disabled",
|
||||
}
|
||||
],
|
||||
"inputs": {"reload_delay": 300, "reload_delay_non_mlag": 330},
|
||||
"expected": {"result": "skipped", "messages": ["MLAG is disabled"]},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyMlagReloadDelay,
|
||||
"eos_data": [{"state": "active", "reloadDelay": 400, "reloadDelayNonMlag": 430}],
|
||||
"inputs": {"reload_delay": 300, "reload_delay_non_mlag": 330},
|
||||
"expected": {"result": "failure", "messages": ["The reload-delay parameters are not configured properly: {'reloadDelay': 400, 'reloadDelayNonMlag': 430}"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyMlagDualPrimary,
|
||||
"eos_data": [
|
||||
{
|
||||
"state": "active",
|
||||
"dualPrimaryDetectionState": "configured",
|
||||
"dualPrimaryPortsErrdisabled": False,
|
||||
"dualPrimaryMlagRecoveryDelay": 60,
|
||||
"dualPrimaryNonMlagRecoveryDelay": 0,
|
||||
"detail": {"dualPrimaryDetectionDelay": 200, "dualPrimaryAction": "none"},
|
||||
}
|
||||
],
|
||||
"inputs": {"detection_delay": 200, "errdisabled": False, "recovery_delay": 60, "recovery_delay_non_mlag": 0},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "skipped-disabled",
|
||||
"test": VerifyMlagDualPrimary,
|
||||
"eos_data": [
|
||||
{
|
||||
"state": "disabled",
|
||||
}
|
||||
],
|
||||
"inputs": {"detection_delay": 200, "errdisabled": False, "recovery_delay": 60, "recovery_delay_non_mlag": 0},
|
||||
"expected": {"result": "skipped", "messages": ["MLAG is disabled"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-disabled",
|
||||
"test": VerifyMlagDualPrimary,
|
||||
"eos_data": [
|
||||
{
|
||||
"state": "active",
|
||||
"dualPrimaryDetectionState": "disabled",
|
||||
"dualPrimaryPortsErrdisabled": False,
|
||||
}
|
||||
],
|
||||
"inputs": {"detection_delay": 200, "errdisabled": False, "recovery_delay": 60, "recovery_delay_non_mlag": 0},
|
||||
"expected": {"result": "failure", "messages": ["Dual-primary detection is disabled"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-timers",
|
||||
"test": VerifyMlagDualPrimary,
|
||||
"eos_data": [
|
||||
{
|
||||
"state": "active",
|
||||
"dualPrimaryDetectionState": "configured",
|
||||
"dualPrimaryPortsErrdisabled": False,
|
||||
"dualPrimaryMlagRecoveryDelay": 160,
|
||||
"dualPrimaryNonMlagRecoveryDelay": 0,
|
||||
"detail": {"dualPrimaryDetectionDelay": 300, "dualPrimaryAction": "none"},
|
||||
}
|
||||
],
|
||||
"inputs": {"detection_delay": 200, "errdisabled": False, "recovery_delay": 60, "recovery_delay_non_mlag": 0},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
(
|
||||
"The dual-primary parameters are not configured properly: "
|
||||
"{'detail.dualPrimaryDetectionDelay': 300, "
|
||||
"'detail.dualPrimaryAction': 'none', "
|
||||
"'dualPrimaryMlagRecoveryDelay': 160, "
|
||||
"'dualPrimaryNonMlagRecoveryDelay': 0}"
|
||||
)
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-action",
|
||||
"test": VerifyMlagDualPrimary,
|
||||
"eos_data": [
|
||||
{
|
||||
"state": "active",
|
||||
"dualPrimaryDetectionState": "configured",
|
||||
"dualPrimaryPortsErrdisabled": False,
|
||||
"dualPrimaryMlagRecoveryDelay": 60,
|
||||
"dualPrimaryNonMlagRecoveryDelay": 0,
|
||||
"detail": {"dualPrimaryDetectionDelay": 200, "dualPrimaryAction": "none"},
|
||||
}
|
||||
],
|
||||
"inputs": {"detection_delay": 200, "errdisabled": True, "recovery_delay": 60, "recovery_delay_non_mlag": 0},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
(
|
||||
"The dual-primary parameters are not configured properly: "
|
||||
"{'detail.dualPrimaryDetectionDelay': 200, "
|
||||
"'detail.dualPrimaryAction': 'none', "
|
||||
"'dualPrimaryMlagRecoveryDelay': 60, "
|
||||
"'dualPrimaryNonMlagRecoveryDelay': 0}"
|
||||
)
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyMlagPrimaryPriority,
|
||||
"eos_data": [
|
||||
{
|
||||
"state": "active",
|
||||
"detail": {"mlagState": "primary", "primaryPriority": 32767},
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"primary_priority": 32767,
|
||||
},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "skipped-disabled",
|
||||
"test": VerifyMlagPrimaryPriority,
|
||||
"eos_data": [
|
||||
{
|
||||
"state": "disabled",
|
||||
}
|
||||
],
|
||||
"inputs": {"primary_priority": 32767},
|
||||
"expected": {"result": "skipped", "messages": ["MLAG is disabled"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-not-primary",
|
||||
"test": VerifyMlagPrimaryPriority,
|
||||
"eos_data": [
|
||||
{
|
||||
"state": "active",
|
||||
"detail": {"mlagState": "secondary", "primaryPriority": 32767},
|
||||
}
|
||||
],
|
||||
"inputs": {"primary_priority": 32767},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["The device is not set as MLAG primary."],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-incorrect-priority",
|
||||
"test": VerifyMlagPrimaryPriority,
|
||||
"eos_data": [
|
||||
{
|
||||
"state": "active",
|
||||
"detail": {"mlagState": "secondary", "primaryPriority": 32767},
|
||||
}
|
||||
],
|
||||
"inputs": {"primary_priority": 1},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["The device is not set as MLAG primary.", "The primary priority does not match expected. Expected `1`, but found `32767` instead."],
|
||||
},
|
||||
},
|
||||
]
|
175
tests/units/anta_tests/test_multicast.py
Normal file
175
tests/units/anta_tests/test_multicast.py
Normal file
|
@ -0,0 +1,175 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""Test inputs for anta.tests.multicast"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from anta.tests.multicast import VerifyIGMPSnoopingGlobal, VerifyIGMPSnoopingVlans
|
||||
from tests.lib.anta import test # noqa: F401; pylint: disable=unused-import
|
||||
|
||||
DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "success-enabled",
|
||||
"test": VerifyIGMPSnoopingVlans,
|
||||
"eos_data": [
|
||||
{
|
||||
"reportFlooding": "disabled",
|
||||
"igmpSnoopingState": "enabled",
|
||||
"vlans": {
|
||||
"1": {
|
||||
"reportFlooding": "disabled",
|
||||
"proxyActive": False,
|
||||
"groupsOverrun": False,
|
||||
"multicastRouterLearningMode": "pim-dvmrp",
|
||||
"igmpSnoopingState": "enabled",
|
||||
"pruningActive": False,
|
||||
"maxGroups": 65534,
|
||||
"immediateLeave": "default",
|
||||
"floodingTraffic": True,
|
||||
},
|
||||
"42": {
|
||||
"reportFlooding": "disabled",
|
||||
"proxyActive": False,
|
||||
"groupsOverrun": False,
|
||||
"multicastRouterLearningMode": "pim-dvmrp",
|
||||
"igmpSnoopingState": "enabled",
|
||||
"pruningActive": False,
|
||||
"maxGroups": 65534,
|
||||
"immediateLeave": "default",
|
||||
"floodingTraffic": True,
|
||||
},
|
||||
},
|
||||
"robustness": 2,
|
||||
"immediateLeave": "enabled",
|
||||
"reportFloodingSwitchPorts": [],
|
||||
}
|
||||
],
|
||||
"inputs": {"vlans": {1: True, 42: True}},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "success-disabled",
|
||||
"test": VerifyIGMPSnoopingVlans,
|
||||
"eos_data": [
|
||||
{
|
||||
"reportFlooding": "disabled",
|
||||
"igmpSnoopingState": "enabled",
|
||||
"vlans": {
|
||||
"42": {
|
||||
"reportFlooding": "disabled",
|
||||
"proxyActive": False,
|
||||
"groupsOverrun": False,
|
||||
"multicastRouterLearningMode": "pim-dvmrp",
|
||||
"igmpSnoopingState": "disabled",
|
||||
"pruningActive": False,
|
||||
"maxGroups": 65534,
|
||||
"immediateLeave": "default",
|
||||
"floodingTraffic": True,
|
||||
}
|
||||
},
|
||||
"robustness": 2,
|
||||
"immediateLeave": "enabled",
|
||||
"reportFloodingSwitchPorts": [],
|
||||
}
|
||||
],
|
||||
"inputs": {"vlans": {42: False}},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-missing-vlan",
|
||||
"test": VerifyIGMPSnoopingVlans,
|
||||
"eos_data": [
|
||||
{
|
||||
"reportFlooding": "disabled",
|
||||
"igmpSnoopingState": "enabled",
|
||||
"vlans": {
|
||||
"1": {
|
||||
"reportFlooding": "disabled",
|
||||
"proxyActive": False,
|
||||
"groupsOverrun": False,
|
||||
"multicastRouterLearningMode": "pim-dvmrp",
|
||||
"igmpSnoopingState": "enabled",
|
||||
"pruningActive": False,
|
||||
"maxGroups": 65534,
|
||||
"immediateLeave": "default",
|
||||
"floodingTraffic": True,
|
||||
},
|
||||
},
|
||||
"robustness": 2,
|
||||
"immediateLeave": "enabled",
|
||||
"reportFloodingSwitchPorts": [],
|
||||
}
|
||||
],
|
||||
"inputs": {"vlans": {1: False, 42: False}},
|
||||
"expected": {"result": "failure", "messages": ["IGMP state for vlan 1 is enabled", "Supplied vlan 42 is not present on the device."]},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-state",
|
||||
"test": VerifyIGMPSnoopingVlans,
|
||||
"eos_data": [
|
||||
{
|
||||
"reportFlooding": "disabled",
|
||||
"igmpSnoopingState": "enabled",
|
||||
"vlans": {
|
||||
"1": {
|
||||
"reportFlooding": "disabled",
|
||||
"proxyActive": False,
|
||||
"groupsOverrun": False,
|
||||
"multicastRouterLearningMode": "pim-dvmrp",
|
||||
"igmpSnoopingState": "disabled",
|
||||
"pruningActive": False,
|
||||
"maxGroups": 65534,
|
||||
"immediateLeave": "default",
|
||||
"floodingTraffic": True,
|
||||
},
|
||||
},
|
||||
"robustness": 2,
|
||||
"immediateLeave": "enabled",
|
||||
"reportFloodingSwitchPorts": [],
|
||||
}
|
||||
],
|
||||
"inputs": {"vlans": {1: True}},
|
||||
"expected": {"result": "failure", "messages": ["IGMP state for vlan 1 is disabled"]},
|
||||
},
|
||||
{
|
||||
"name": "success-enabled",
|
||||
"test": VerifyIGMPSnoopingGlobal,
|
||||
"eos_data": [
|
||||
{
|
||||
"reportFlooding": "disabled",
|
||||
"igmpSnoopingState": "enabled",
|
||||
"robustness": 2,
|
||||
"immediateLeave": "enabled",
|
||||
"reportFloodingSwitchPorts": [],
|
||||
}
|
||||
],
|
||||
"inputs": {"enabled": True},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "success-disabled",
|
||||
"test": VerifyIGMPSnoopingGlobal,
|
||||
"eos_data": [
|
||||
{
|
||||
"reportFlooding": "disabled",
|
||||
"igmpSnoopingState": "disabled",
|
||||
}
|
||||
],
|
||||
"inputs": {"enabled": False},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-state",
|
||||
"test": VerifyIGMPSnoopingGlobal,
|
||||
"eos_data": [
|
||||
{
|
||||
"reportFlooding": "disabled",
|
||||
"igmpSnoopingState": "disabled",
|
||||
}
|
||||
],
|
||||
"inputs": {"enabled": True},
|
||||
"expected": {"result": "failure", "messages": ["IGMP state is not valid: disabled"]},
|
||||
},
|
||||
]
|
47
tests/units/anta_tests/test_profiles.py
Normal file
47
tests/units/anta_tests/test_profiles.py
Normal file
|
@ -0,0 +1,47 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Tests for anta.tests.profiles.py
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from anta.tests.profiles import VerifyTcamProfile, VerifyUnifiedForwardingTableMode
|
||||
from tests.lib.anta import test # noqa: F401; pylint: disable=W0611
|
||||
|
||||
DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyUnifiedForwardingTableMode,
|
||||
"eos_data": [{"uftMode": "2", "urpfEnabled": False, "chipModel": "bcm56870", "l2TableSize": 163840, "l3TableSize": 147456, "lpmTableSize": 32768}],
|
||||
"inputs": {"mode": 2},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyUnifiedForwardingTableMode,
|
||||
"eos_data": [{"uftMode": "2", "urpfEnabled": False, "chipModel": "bcm56870", "l2TableSize": 163840, "l3TableSize": 147456, "lpmTableSize": 32768}],
|
||||
"inputs": {"mode": 3},
|
||||
"expected": {"result": "failure", "messages": ["Device is not running correct UFT mode (expected: 3 / running: 2)"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyTcamProfile,
|
||||
"eos_data": [
|
||||
{"pmfProfiles": {"FixedSystem": {"config": "test", "configType": "System Profile", "status": "test", "mode": "tcam"}}, "lastProgrammingStatus": {}}
|
||||
],
|
||||
"inputs": {"profile": "test"},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyTcamProfile,
|
||||
"eos_data": [
|
||||
{"pmfProfiles": {"FixedSystem": {"config": "test", "configType": "System Profile", "status": "default", "mode": "tcam"}}, "lastProgrammingStatus": {}}
|
||||
],
|
||||
"inputs": {"profile": "test"},
|
||||
"expected": {"result": "failure", "messages": ["Incorrect profile running on device: default"]},
|
||||
},
|
||||
]
|
42
tests/units/anta_tests/test_ptp.py
Normal file
42
tests/units/anta_tests/test_ptp.py
Normal file
|
@ -0,0 +1,42 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""Data for testing anta.tests.configuration"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from anta.tests.ptp import VerifyPtpStatus
|
||||
|
||||
DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyPtpStatus,
|
||||
"eos_data": [
|
||||
{
|
||||
"ptpMode": "ptpBoundaryClock",
|
||||
"ptpProfile": "ptpDefaultProfile",
|
||||
"ptpClockSummary": {
|
||||
"clockIdentity": "0xcc:1a:a3:ff:ff:c3:bf:eb",
|
||||
"gmClockIdentity": "0x00:00:00:00:00:00:00:00",
|
||||
"numberOfSlavePorts": 0,
|
||||
"numberOfMasterPorts": 0,
|
||||
"offsetFromMaster": 0,
|
||||
"meanPathDelay": 0,
|
||||
"stepsRemoved": 0,
|
||||
"skew": 1.0,
|
||||
},
|
||||
"ptpIntfSummaries": {},
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyPtpStatus,
|
||||
"eos_data": [{"ptpIntfSummaries": {}}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure"},
|
||||
},
|
||||
]
|
900
tests/units/anta_tests/test_security.py
Normal file
900
tests/units/anta_tests/test_security.py
Normal file
|
@ -0,0 +1,900 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Tests for anta.tests.security.py
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from anta.tests.security import (
|
||||
VerifyAPIHttpsSSL,
|
||||
VerifyAPIHttpStatus,
|
||||
VerifyAPIIPv4Acl,
|
||||
VerifyAPIIPv6Acl,
|
||||
VerifyAPISSLCertificate,
|
||||
VerifyBannerLogin,
|
||||
VerifyBannerMotd,
|
||||
VerifyIPv4ACL,
|
||||
VerifySSHIPv4Acl,
|
||||
VerifySSHIPv6Acl,
|
||||
VerifySSHStatus,
|
||||
VerifyTelnetStatus,
|
||||
)
|
||||
from tests.lib.anta import test # noqa: F401; pylint: disable=W0611
|
||||
|
||||
DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifySSHStatus,
|
||||
"eos_data": ["SSHD status for Default VRF is disabled\nSSH connection limit is 50\nSSH per host connection limit is 20\nFIPS status: disabled\n\n"],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifySSHStatus,
|
||||
"eos_data": ["SSHD status for Default VRF is enabled\nSSH connection limit is 50\nSSH per host connection limit is 20\nFIPS status: disabled\n\n"],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["SSHD status for Default VRF is enabled"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifySSHIPv4Acl,
|
||||
"eos_data": [{"ipAclList": {"aclList": [{"type": "Ip4Acl", "name": "ACL_IPV4_SSH", "configuredVrfs": ["MGMT"], "activeVrfs": ["MGMT"]}]}}],
|
||||
"inputs": {"number": 1, "vrf": "MGMT"},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-number",
|
||||
"test": VerifySSHIPv4Acl,
|
||||
"eos_data": [{"ipAclList": {"aclList": []}}],
|
||||
"inputs": {"number": 1, "vrf": "MGMT"},
|
||||
"expected": {"result": "failure", "messages": ["Expected 1 SSH IPv4 ACL(s) in vrf MGMT but got 0"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-vrf",
|
||||
"test": VerifySSHIPv4Acl,
|
||||
"eos_data": [{"ipAclList": {"aclList": [{"type": "Ip4Acl", "name": "ACL_IPV4_SSH", "configuredVrfs": ["default"], "activeVrfs": ["default"]}]}}],
|
||||
"inputs": {"number": 1, "vrf": "MGMT"},
|
||||
"expected": {"result": "failure", "messages": ["SSH IPv4 ACL(s) not configured or active in vrf MGMT: ['ACL_IPV4_SSH']"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifySSHIPv6Acl,
|
||||
"eos_data": [{"ipv6AclList": {"aclList": [{"type": "Ip6Acl", "name": "ACL_IPV6_SSH", "configuredVrfs": ["MGMT"], "activeVrfs": ["MGMT"]}]}}],
|
||||
"inputs": {"number": 1, "vrf": "MGMT"},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-number",
|
||||
"test": VerifySSHIPv6Acl,
|
||||
"eos_data": [{"ipv6AclList": {"aclList": []}}],
|
||||
"inputs": {"number": 1, "vrf": "MGMT"},
|
||||
"expected": {"result": "failure", "messages": ["Expected 1 SSH IPv6 ACL(s) in vrf MGMT but got 0"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-vrf",
|
||||
"test": VerifySSHIPv6Acl,
|
||||
"eos_data": [{"ipv6AclList": {"aclList": [{"type": "Ip6Acl", "name": "ACL_IPV6_SSH", "configuredVrfs": ["default"], "activeVrfs": ["default"]}]}}],
|
||||
"inputs": {"number": 1, "vrf": "MGMT"},
|
||||
"expected": {"result": "failure", "messages": ["SSH IPv6 ACL(s) not configured or active in vrf MGMT: ['ACL_IPV6_SSH']"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyTelnetStatus,
|
||||
"eos_data": [{"serverState": "disabled", "vrfName": "default", "maxTelnetSessions": 20, "maxTelnetSessionsPerHost": 20}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyTelnetStatus,
|
||||
"eos_data": [{"serverState": "enabled", "vrfName": "default", "maxTelnetSessions": 20, "maxTelnetSessionsPerHost": 20}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["Telnet status for Default VRF is enabled"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyAPIHttpStatus,
|
||||
"eos_data": [
|
||||
{
|
||||
"enabled": True,
|
||||
"httpServer": {"configured": False, "running": False, "port": 80},
|
||||
"localHttpServer": {"configured": False, "running": False, "port": 8080},
|
||||
"httpsServer": {"configured": True, "running": True, "port": 443},
|
||||
"unixSocketServer": {"configured": False, "running": False},
|
||||
"sslProfile": {"name": "API_SSL_Profile", "configured": True, "state": "valid"},
|
||||
"tlsProtocol": ["1.2"],
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyAPIHttpStatus,
|
||||
"eos_data": [
|
||||
{
|
||||
"enabled": True,
|
||||
"httpServer": {"configured": True, "running": True, "port": 80},
|
||||
"localHttpServer": {"configured": False, "running": False, "port": 8080},
|
||||
"httpsServer": {"configured": True, "running": True, "port": 443},
|
||||
"unixSocketServer": {"configured": False, "running": False},
|
||||
"sslProfile": {"name": "API_SSL_Profile", "configured": True, "state": "valid"},
|
||||
"tlsProtocol": ["1.2"],
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["eAPI HTTP server is enabled globally"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyAPIHttpsSSL,
|
||||
"eos_data": [
|
||||
{
|
||||
"enabled": True,
|
||||
"httpServer": {"configured": False, "running": False, "port": 80},
|
||||
"localHttpServer": {"configured": False, "running": False, "port": 8080},
|
||||
"httpsServer": {"configured": True, "running": True, "port": 443},
|
||||
"unixSocketServer": {"configured": False, "running": False},
|
||||
"sslProfile": {"name": "API_SSL_Profile", "configured": True, "state": "valid"},
|
||||
"tlsProtocol": ["1.2"],
|
||||
}
|
||||
],
|
||||
"inputs": {"profile": "API_SSL_Profile"},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-not-configured",
|
||||
"test": VerifyAPIHttpsSSL,
|
||||
"eos_data": [
|
||||
{
|
||||
"enabled": True,
|
||||
"httpServer": {"configured": True, "running": True, "port": 80},
|
||||
"localHttpServer": {"configured": False, "running": False, "port": 8080},
|
||||
"httpsServer": {"configured": True, "running": True, "port": 443},
|
||||
"unixSocketServer": {"configured": False, "running": False},
|
||||
"tlsProtocol": ["1.2"],
|
||||
}
|
||||
],
|
||||
"inputs": {"profile": "API_SSL_Profile"},
|
||||
"expected": {"result": "failure", "messages": ["eAPI HTTPS server SSL profile (API_SSL_Profile) is not configured"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-misconfigured-invalid",
|
||||
"test": VerifyAPIHttpsSSL,
|
||||
"eos_data": [
|
||||
{
|
||||
"enabled": True,
|
||||
"httpServer": {"configured": True, "running": True, "port": 80},
|
||||
"localHttpServer": {"configured": False, "running": False, "port": 8080},
|
||||
"httpsServer": {"configured": True, "running": True, "port": 443},
|
||||
"unixSocketServer": {"configured": False, "running": False},
|
||||
"sslProfile": {"name": "Wrong_SSL_Profile", "configured": True, "state": "valid"},
|
||||
"tlsProtocol": ["1.2"],
|
||||
}
|
||||
],
|
||||
"inputs": {"profile": "API_SSL_Profile"},
|
||||
"expected": {"result": "failure", "messages": ["eAPI HTTPS server SSL profile (API_SSL_Profile) is misconfigured or invalid"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyAPIIPv4Acl,
|
||||
"eos_data": [{"ipAclList": {"aclList": [{"type": "Ip4Acl", "name": "ACL_IPV4_API", "configuredVrfs": ["MGMT"], "activeVrfs": ["MGMT"]}]}}],
|
||||
"inputs": {"number": 1, "vrf": "MGMT"},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-number",
|
||||
"test": VerifyAPIIPv4Acl,
|
||||
"eos_data": [{"ipAclList": {"aclList": []}}],
|
||||
"inputs": {"number": 1, "vrf": "MGMT"},
|
||||
"expected": {"result": "failure", "messages": ["Expected 1 eAPI IPv4 ACL(s) in vrf MGMT but got 0"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-vrf",
|
||||
"test": VerifyAPIIPv4Acl,
|
||||
"eos_data": [{"ipAclList": {"aclList": [{"type": "Ip4Acl", "name": "ACL_IPV4_API", "configuredVrfs": ["default"], "activeVrfs": ["default"]}]}}],
|
||||
"inputs": {"number": 1, "vrf": "MGMT"},
|
||||
"expected": {"result": "failure", "messages": ["eAPI IPv4 ACL(s) not configured or active in vrf MGMT: ['ACL_IPV4_API']"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyAPIIPv6Acl,
|
||||
"eos_data": [{"ipv6AclList": {"aclList": [{"type": "Ip6Acl", "name": "ACL_IPV6_API", "configuredVrfs": ["MGMT"], "activeVrfs": ["MGMT"]}]}}],
|
||||
"inputs": {"number": 1, "vrf": "MGMT"},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-number",
|
||||
"test": VerifyAPIIPv6Acl,
|
||||
"eos_data": [{"ipv6AclList": {"aclList": []}}],
|
||||
"inputs": {"number": 1, "vrf": "MGMT"},
|
||||
"expected": {"result": "failure", "messages": ["Expected 1 eAPI IPv6 ACL(s) in vrf MGMT but got 0"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-vrf",
|
||||
"test": VerifyAPIIPv6Acl,
|
||||
"eos_data": [{"ipv6AclList": {"aclList": [{"type": "Ip6Acl", "name": "ACL_IPV6_API", "configuredVrfs": ["default"], "activeVrfs": ["default"]}]}}],
|
||||
"inputs": {"number": 1, "vrf": "MGMT"},
|
||||
"expected": {"result": "failure", "messages": ["eAPI IPv6 ACL(s) not configured or active in vrf MGMT: ['ACL_IPV6_API']"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyAPISSLCertificate,
|
||||
"eos_data": [
|
||||
{
|
||||
"certificates": {
|
||||
"ARISTA_ROOT_CA.crt": {
|
||||
"subject": {"commonName": "Arista Networks Internal IT Root Cert Authority"},
|
||||
"notAfter": 2127420899,
|
||||
"publicKey": {
|
||||
"encryptionAlgorithm": "RSA",
|
||||
"size": 4096,
|
||||
},
|
||||
},
|
||||
"ARISTA_SIGNING_CA.crt": {
|
||||
"subject": {"commonName": "AristaIT-ICA ECDSA Issuing Cert Authority"},
|
||||
"notAfter": 2127420899,
|
||||
"publicKey": {
|
||||
"encryptionAlgorithm": "ECDSA",
|
||||
"size": 256,
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
"utcTime": 1702288467.6736515,
|
||||
},
|
||||
],
|
||||
"inputs": {
|
||||
"certificates": [
|
||||
{
|
||||
"certificate_name": "ARISTA_SIGNING_CA.crt",
|
||||
"expiry_threshold": 30,
|
||||
"common_name": "AristaIT-ICA ECDSA Issuing Cert Authority",
|
||||
"encryption_algorithm": "ECDSA",
|
||||
"key_size": 256,
|
||||
},
|
||||
{
|
||||
"certificate_name": "ARISTA_ROOT_CA.crt",
|
||||
"expiry_threshold": 30,
|
||||
"common_name": "Arista Networks Internal IT Root Cert Authority",
|
||||
"encryption_algorithm": "RSA",
|
||||
"key_size": 4096,
|
||||
},
|
||||
]
|
||||
},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-certificate-not-configured",
|
||||
"test": VerifyAPISSLCertificate,
|
||||
"eos_data": [
|
||||
{
|
||||
"certificates": {
|
||||
"ARISTA_SIGNING_CA.crt": {
|
||||
"subject": {"commonName": "AristaIT-ICA ECDSA Issuing Cert Authority"},
|
||||
"notAfter": 2127420899,
|
||||
"publicKey": {
|
||||
"encryptionAlgorithm": "ECDSA",
|
||||
"size": 256,
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
"utcTime": 1702288467.6736515,
|
||||
},
|
||||
],
|
||||
"inputs": {
|
||||
"certificates": [
|
||||
{
|
||||
"certificate_name": "ARISTA_SIGNING_CA.crt",
|
||||
"expiry_threshold": 30,
|
||||
"common_name": "AristaIT-ICA ECDSA Issuing Cert Authority",
|
||||
"encryption_algorithm": "ECDSA",
|
||||
"key_size": 256,
|
||||
},
|
||||
{
|
||||
"certificate_name": "ARISTA_ROOT_CA.crt",
|
||||
"expiry_threshold": 30,
|
||||
"common_name": "Arista Networks Internal IT Root Cert Authority",
|
||||
"encryption_algorithm": "RSA",
|
||||
"key_size": 4096,
|
||||
},
|
||||
]
|
||||
},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["SSL certificate 'ARISTA_ROOT_CA.crt', is not configured.\n"],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-certificate-expired",
|
||||
"test": VerifyAPISSLCertificate,
|
||||
"eos_data": [
|
||||
{
|
||||
"certificates": {
|
||||
"ARISTA_ROOT_CA.crt": {
|
||||
"subject": {"commonName": "Arista Networks Internal IT Root Cert Authority"},
|
||||
"notAfter": 1702533518,
|
||||
"publicKey": {
|
||||
"encryptionAlgorithm": "RSA",
|
||||
"size": 4096,
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
"utcTime": 1702622372.2240553,
|
||||
},
|
||||
],
|
||||
"inputs": {
|
||||
"certificates": [
|
||||
{
|
||||
"certificate_name": "ARISTA_SIGNING_CA.crt",
|
||||
"expiry_threshold": 30,
|
||||
"common_name": "AristaIT-ICA ECDSA Issuing Cert Authority",
|
||||
"encryption_algorithm": "ECDSA",
|
||||
"key_size": 256,
|
||||
},
|
||||
{
|
||||
"certificate_name": "ARISTA_ROOT_CA.crt",
|
||||
"expiry_threshold": 30,
|
||||
"common_name": "Arista Networks Internal IT Root Cert Authority",
|
||||
"encryption_algorithm": "RSA",
|
||||
"key_size": 4096,
|
||||
},
|
||||
]
|
||||
},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["SSL certificate 'ARISTA_SIGNING_CA.crt', is not configured.\n", "SSL certificate `ARISTA_ROOT_CA.crt` is expired.\n"],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-certificate-about-to-expire",
|
||||
"test": VerifyAPISSLCertificate,
|
||||
"eos_data": [
|
||||
{
|
||||
"certificates": {
|
||||
"ARISTA_ROOT_CA.crt": {
|
||||
"subject": {"commonName": "Arista Networks Internal IT Root Cert Authority"},
|
||||
"notAfter": 1704782709,
|
||||
"publicKey": {
|
||||
"encryptionAlgorithm": "RSA",
|
||||
"size": 4096,
|
||||
},
|
||||
},
|
||||
"ARISTA_SIGNING_CA.crt": {
|
||||
"subject": {"commonName": "AristaIT-ICA ECDSA Issuing Cert Authority"},
|
||||
"notAfter": 1702533518,
|
||||
"publicKey": {
|
||||
"encryptionAlgorithm": "ECDSA",
|
||||
"size": 256,
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
"utcTime": 1702622372.2240553,
|
||||
},
|
||||
],
|
||||
"inputs": {
|
||||
"certificates": [
|
||||
{
|
||||
"certificate_name": "ARISTA_SIGNING_CA.crt",
|
||||
"expiry_threshold": 30,
|
||||
"common_name": "AristaIT-ICA ECDSA Issuing Cert Authority",
|
||||
"encryption_algorithm": "ECDSA",
|
||||
"key_size": 256,
|
||||
},
|
||||
{
|
||||
"certificate_name": "ARISTA_ROOT_CA.crt",
|
||||
"expiry_threshold": 30,
|
||||
"common_name": "Arista Networks Internal IT Root Cert Authority",
|
||||
"encryption_algorithm": "RSA",
|
||||
"key_size": 4096,
|
||||
},
|
||||
]
|
||||
},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["SSL certificate `ARISTA_SIGNING_CA.crt` is expired.\n", "SSL certificate `ARISTA_ROOT_CA.crt` is about to expire in 25 days."],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-subject-name",
|
||||
"test": VerifyAPISSLCertificate,
|
||||
"eos_data": [
|
||||
{
|
||||
"certificates": {
|
||||
"ARISTA_ROOT_CA.crt": {
|
||||
"subject": {"commonName": "AristaIT-ICA Networks Internal IT Root Cert Authority"},
|
||||
"notAfter": 2127420899,
|
||||
"publicKey": {
|
||||
"encryptionAlgorithm": "RSA",
|
||||
"size": 4096,
|
||||
},
|
||||
},
|
||||
"ARISTA_SIGNING_CA.crt": {
|
||||
"subject": {"commonName": "Arista ECDSA Issuing Cert Authority"},
|
||||
"notAfter": 2127420899,
|
||||
"publicKey": {
|
||||
"encryptionAlgorithm": "ECDSA",
|
||||
"size": 256,
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
"utcTime": 1702288467.6736515,
|
||||
},
|
||||
],
|
||||
"inputs": {
|
||||
"certificates": [
|
||||
{
|
||||
"certificate_name": "ARISTA_SIGNING_CA.crt",
|
||||
"expiry_threshold": 30,
|
||||
"common_name": "AristaIT-ICA ECDSA Issuing Cert Authority",
|
||||
"encryption_algorithm": "ECDSA",
|
||||
"key_size": 256,
|
||||
},
|
||||
{
|
||||
"certificate_name": "ARISTA_ROOT_CA.crt",
|
||||
"expiry_threshold": 30,
|
||||
"common_name": "Arista Networks Internal IT Root Cert Authority",
|
||||
"encryption_algorithm": "RSA",
|
||||
"key_size": 4096,
|
||||
},
|
||||
]
|
||||
},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"SSL certificate `ARISTA_SIGNING_CA.crt` is not configured properly:\n"
|
||||
"Expected `AristaIT-ICA ECDSA Issuing Cert Authority` as the subject.commonName, but found "
|
||||
"`Arista ECDSA Issuing Cert Authority` instead.\n",
|
||||
"SSL certificate `ARISTA_ROOT_CA.crt` is not configured properly:\n"
|
||||
"Expected `Arista Networks Internal IT Root Cert Authority` as the subject.commonName, "
|
||||
"but found `AristaIT-ICA Networks Internal IT Root Cert Authority` instead.\n",
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-encryption-type-and-size",
|
||||
"test": VerifyAPISSLCertificate,
|
||||
"eos_data": [
|
||||
{
|
||||
"certificates": {
|
||||
"ARISTA_ROOT_CA.crt": {
|
||||
"subject": {"commonName": "Arista Networks Internal IT Root Cert Authority"},
|
||||
"notAfter": 2127420899,
|
||||
"publicKey": {
|
||||
"encryptionAlgorithm": "ECDSA",
|
||||
"size": 256,
|
||||
},
|
||||
},
|
||||
"ARISTA_SIGNING_CA.crt": {
|
||||
"subject": {"commonName": "AristaIT-ICA ECDSA Issuing Cert Authority"},
|
||||
"notAfter": 2127420899,
|
||||
"publicKey": {
|
||||
"encryptionAlgorithm": "RSA",
|
||||
"size": 4096,
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
"utcTime": 1702288467.6736515,
|
||||
},
|
||||
],
|
||||
"inputs": {
|
||||
"certificates": [
|
||||
{
|
||||
"certificate_name": "ARISTA_SIGNING_CA.crt",
|
||||
"expiry_threshold": 30,
|
||||
"common_name": "AristaIT-ICA ECDSA Issuing Cert Authority",
|
||||
"encryption_algorithm": "ECDSA",
|
||||
"key_size": 256,
|
||||
},
|
||||
{
|
||||
"certificate_name": "ARISTA_ROOT_CA.crt",
|
||||
"expiry_threshold": 30,
|
||||
"common_name": "Arista Networks Internal IT Root Cert Authority",
|
||||
"encryption_algorithm": "RSA",
|
||||
"key_size": 4096,
|
||||
},
|
||||
]
|
||||
},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"SSL certificate `ARISTA_SIGNING_CA.crt` is not configured properly:\n"
|
||||
"Expected `ECDSA` as the publicKey.encryptionAlgorithm, but found `RSA` instead.\n"
|
||||
"Expected `256` as the publicKey.size, but found `4096` instead.\n",
|
||||
"SSL certificate `ARISTA_ROOT_CA.crt` is not configured properly:\n"
|
||||
"Expected `RSA` as the publicKey.encryptionAlgorithm, but found `ECDSA` instead.\n"
|
||||
"Expected `4096` as the publicKey.size, but found `256` instead.\n",
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-missing-actual-output",
|
||||
"test": VerifyAPISSLCertificate,
|
||||
"eos_data": [
|
||||
{
|
||||
"certificates": {
|
||||
"ARISTA_ROOT_CA.crt": {
|
||||
"subject": {"commonName": "Arista Networks Internal IT Root Cert Authority"},
|
||||
"notAfter": 2127420899,
|
||||
},
|
||||
"ARISTA_SIGNING_CA.crt": {
|
||||
"subject": {"commonName": "AristaIT-ICA ECDSA Issuing Cert Authority"},
|
||||
"notAfter": 2127420899,
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
"utcTime": 1702288467.6736515,
|
||||
},
|
||||
],
|
||||
"inputs": {
|
||||
"certificates": [
|
||||
{
|
||||
"certificate_name": "ARISTA_SIGNING_CA.crt",
|
||||
"expiry_threshold": 30,
|
||||
"common_name": "AristaIT-ICA ECDSA Issuing Cert Authority",
|
||||
"encryption_algorithm": "ECDSA",
|
||||
"key_size": 256,
|
||||
},
|
||||
{
|
||||
"certificate_name": "ARISTA_ROOT_CA.crt",
|
||||
"expiry_threshold": 30,
|
||||
"common_name": "Arista Networks Internal IT Root Cert Authority",
|
||||
"encryption_algorithm": "RSA",
|
||||
"key_size": 4096,
|
||||
},
|
||||
]
|
||||
},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"SSL certificate `ARISTA_SIGNING_CA.crt` is not configured properly:\n"
|
||||
"Expected `ECDSA` as the publicKey.encryptionAlgorithm, but it was not found in the actual output.\n"
|
||||
"Expected `256` as the publicKey.size, but it was not found in the actual output.\n",
|
||||
"SSL certificate `ARISTA_ROOT_CA.crt` is not configured properly:\n"
|
||||
"Expected `RSA` as the publicKey.encryptionAlgorithm, but it was not found in the actual output.\n"
|
||||
"Expected `4096` as the publicKey.size, but it was not found in the actual output.\n",
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyBannerLogin,
|
||||
"eos_data": [
|
||||
{
|
||||
"loginBanner": "Copyright (c) 2023-2024 Arista Networks, Inc.\nUse of this source code is governed by the Apache License 2.0\n"
|
||||
"that can be found in the LICENSE file."
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"login_banner": "Copyright (c) 2023-2024 Arista Networks, Inc.\nUse of this source code is governed by the Apache License 2.0\n"
|
||||
"that can be found in the LICENSE file."
|
||||
},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "success-multiline",
|
||||
"test": VerifyBannerLogin,
|
||||
"eos_data": [
|
||||
{
|
||||
"loginBanner": "Copyright (c) 2023-2024 Arista Networks, Inc.\nUse of this source code is governed by the Apache License 2.0\n"
|
||||
"that can be found in the LICENSE file."
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"login_banner": """Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
Use of this source code is governed by the Apache License 2.0
|
||||
that can be found in the LICENSE file."""
|
||||
},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-incorrect-login-banner",
|
||||
"test": VerifyBannerLogin,
|
||||
"eos_data": [
|
||||
{
|
||||
"loginBanner": "Copyright (c) 2023 Arista Networks, Inc.\nUse of this source code is governed by the Apache License 2.0\n"
|
||||
"that can be found in the LICENSE file."
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"login_banner": "Copyright (c) 2023-2024 Arista Networks, Inc.\nUse of this source code is governed by the Apache License 2.0\n"
|
||||
"that can be found in the LICENSE file."
|
||||
},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"Expected `Copyright (c) 2023-2024 Arista Networks, Inc.\nUse of this source code is governed by the Apache License 2.0\n"
|
||||
"that can be found in the LICENSE file.` as the login banner, but found `Copyright (c) 2023 Arista Networks, Inc.\nUse of this source code is "
|
||||
"governed by the Apache License 2.0\nthat can be found in the LICENSE file.` instead."
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyBannerMotd,
|
||||
"eos_data": [
|
||||
{
|
||||
"motd": "Copyright (c) 2023-2024 Arista Networks, Inc.\nUse of this source code is governed by the Apache License 2.0\n"
|
||||
"that can be found in the LICENSE file."
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"motd_banner": "Copyright (c) 2023-2024 Arista Networks, Inc.\nUse of this source code is governed by the Apache License 2.0\n"
|
||||
"that can be found in the LICENSE file."
|
||||
},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "success-multiline",
|
||||
"test": VerifyBannerMotd,
|
||||
"eos_data": [
|
||||
{
|
||||
"motd": "Copyright (c) 2023-2024 Arista Networks, Inc.\nUse of this source code is governed by the Apache License 2.0\n"
|
||||
"that can be found in the LICENSE file."
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"motd_banner": """Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
Use of this source code is governed by the Apache License 2.0
|
||||
that can be found in the LICENSE file."""
|
||||
},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-incorrect-motd-banner",
|
||||
"test": VerifyBannerMotd,
|
||||
"eos_data": [
|
||||
{
|
||||
"motd": "Copyright (c) 2023 Arista Networks, Inc.\nUse of this source code is governed by the Apache License 2.0\n"
|
||||
"that can be found in the LICENSE file."
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"motd_banner": "Copyright (c) 2023-2024 Arista Networks, Inc.\nUse of this source code is governed by the Apache License 2.0\n"
|
||||
"that can be found in the LICENSE file."
|
||||
},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"Expected `Copyright (c) 2023-2024 Arista Networks, Inc.\nUse of this source code is governed by the Apache License 2.0\n"
|
||||
"that can be found in the LICENSE file.` as the motd banner, but found `Copyright (c) 2023 Arista Networks, Inc.\nUse of this source code is "
|
||||
"governed by the Apache License 2.0\nthat can be found in the LICENSE file.` instead."
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyIPv4ACL,
|
||||
"eos_data": [
|
||||
{
|
||||
"aclList": [
|
||||
{
|
||||
"sequence": [
|
||||
{"text": "permit icmp any any", "sequenceNumber": 10},
|
||||
{"text": "permit ip any any tracked", "sequenceNumber": 20},
|
||||
{"text": "permit udp any any eq bfd ttl eq 255", "sequenceNumber": 30},
|
||||
],
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"aclList": [
|
||||
{
|
||||
"sequence": [
|
||||
{"text": "permit icmp any any", "sequenceNumber": 10},
|
||||
{"text": "permit tcp any any range 5900 5910", "sequenceNumber": 20},
|
||||
],
|
||||
}
|
||||
]
|
||||
},
|
||||
],
|
||||
"inputs": {
|
||||
"ipv4_access_lists": [
|
||||
{
|
||||
"name": "default-control-plane-acl",
|
||||
"entries": [
|
||||
{"sequence": 10, "action": "permit icmp any any"},
|
||||
{"sequence": 20, "action": "permit ip any any tracked"},
|
||||
{"sequence": 30, "action": "permit udp any any eq bfd ttl eq 255"},
|
||||
],
|
||||
},
|
||||
{
|
||||
"name": "LabTest",
|
||||
"entries": [{"sequence": 10, "action": "permit icmp any any"}, {"sequence": 20, "action": "permit tcp any any range 5900 5910"}],
|
||||
},
|
||||
]
|
||||
},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-acl-not-found",
|
||||
"test": VerifyIPv4ACL,
|
||||
"eos_data": [
|
||||
{
|
||||
"aclList": [
|
||||
{
|
||||
"sequence": [
|
||||
{"text": "permit icmp any any", "sequenceNumber": 10},
|
||||
{"text": "permit ip any any tracked", "sequenceNumber": 20},
|
||||
{"text": "permit udp any any eq bfd ttl eq 255", "sequenceNumber": 30},
|
||||
],
|
||||
}
|
||||
]
|
||||
},
|
||||
{"aclList": []},
|
||||
],
|
||||
"inputs": {
|
||||
"ipv4_access_lists": [
|
||||
{
|
||||
"name": "default-control-plane-acl",
|
||||
"entries": [
|
||||
{"sequence": 10, "action": "permit icmp any any"},
|
||||
{"sequence": 20, "action": "permit ip any any tracked"},
|
||||
{"sequence": 30, "action": "permit udp any any eq bfd ttl eq 255"},
|
||||
],
|
||||
},
|
||||
{
|
||||
"name": "LabTest",
|
||||
"entries": [{"sequence": 10, "action": "permit icmp any any"}, {"sequence": 20, "action": "permit tcp any any range 5900 5910"}],
|
||||
},
|
||||
]
|
||||
},
|
||||
"expected": {"result": "failure", "messages": ["LabTest: Not found"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-sequence-not-found",
|
||||
"test": VerifyIPv4ACL,
|
||||
"eos_data": [
|
||||
{
|
||||
"aclList": [
|
||||
{
|
||||
"sequence": [
|
||||
{"text": "permit icmp any any", "sequenceNumber": 10},
|
||||
{"text": "permit ip any any tracked", "sequenceNumber": 20},
|
||||
{"text": "permit udp any any eq bfd ttl eq 255", "sequenceNumber": 40},
|
||||
],
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"aclList": [
|
||||
{
|
||||
"sequence": [
|
||||
{"text": "permit icmp any any", "sequenceNumber": 10},
|
||||
{"text": "permit tcp any any range 5900 5910", "sequenceNumber": 30},
|
||||
],
|
||||
}
|
||||
]
|
||||
},
|
||||
],
|
||||
"inputs": {
|
||||
"ipv4_access_lists": [
|
||||
{
|
||||
"name": "default-control-plane-acl",
|
||||
"entries": [
|
||||
{"sequence": 10, "action": "permit icmp any any"},
|
||||
{"sequence": 20, "action": "permit ip any any tracked"},
|
||||
{"sequence": 30, "action": "permit udp any any eq bfd ttl eq 255"},
|
||||
],
|
||||
},
|
||||
{
|
||||
"name": "LabTest",
|
||||
"entries": [{"sequence": 10, "action": "permit icmp any any"}, {"sequence": 20, "action": "permit tcp any any range 5900 5910"}],
|
||||
},
|
||||
]
|
||||
},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["default-control-plane-acl:\nSequence number `30` is not found.\n", "LabTest:\nSequence number `20` is not found.\n"],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-action-not-match",
|
||||
"test": VerifyIPv4ACL,
|
||||
"eos_data": [
|
||||
{
|
||||
"aclList": [
|
||||
{
|
||||
"sequence": [
|
||||
{"text": "permit icmp any any", "sequenceNumber": 10},
|
||||
{"text": "permit ip any any tracked", "sequenceNumber": 20},
|
||||
{"text": "permit tcp any any range 5900 5910", "sequenceNumber": 30},
|
||||
],
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"aclList": [
|
||||
{
|
||||
"sequence": [
|
||||
{"text": "permit icmp any any", "sequenceNumber": 10},
|
||||
{"text": "permit udp any any eq bfd ttl eq 255", "sequenceNumber": 20},
|
||||
],
|
||||
}
|
||||
]
|
||||
},
|
||||
],
|
||||
"inputs": {
|
||||
"ipv4_access_lists": [
|
||||
{
|
||||
"name": "default-control-plane-acl",
|
||||
"entries": [
|
||||
{"sequence": 10, "action": "permit icmp any any"},
|
||||
{"sequence": 20, "action": "permit ip any any tracked"},
|
||||
{"sequence": 30, "action": "permit udp any any eq bfd ttl eq 255"},
|
||||
],
|
||||
},
|
||||
{
|
||||
"name": "LabTest",
|
||||
"entries": [{"sequence": 10, "action": "permit icmp any any"}, {"sequence": 20, "action": "permit tcp any any range 5900 5910"}],
|
||||
},
|
||||
]
|
||||
},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"default-control-plane-acl:\n"
|
||||
"Expected `permit udp any any eq bfd ttl eq 255` as sequence number 30 action but found `permit tcp any any range 5900 5910` instead.\n",
|
||||
"LabTest:\nExpected `permit tcp any any range 5900 5910` as sequence number 20 action but found `permit udp any any eq bfd ttl eq 255` instead.\n",
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-all-type",
|
||||
"test": VerifyIPv4ACL,
|
||||
"eos_data": [
|
||||
{
|
||||
"aclList": [
|
||||
{
|
||||
"sequence": [
|
||||
{"text": "permit icmp any any", "sequenceNumber": 10},
|
||||
{"text": "permit ip any any tracked", "sequenceNumber": 40},
|
||||
{"text": "permit tcp any any range 5900 5910", "sequenceNumber": 30},
|
||||
],
|
||||
}
|
||||
]
|
||||
},
|
||||
{"aclList": []},
|
||||
],
|
||||
"inputs": {
|
||||
"ipv4_access_lists": [
|
||||
{
|
||||
"name": "default-control-plane-acl",
|
||||
"entries": [
|
||||
{"sequence": 10, "action": "permit icmp any any"},
|
||||
{"sequence": 20, "action": "permit ip any any tracked"},
|
||||
{"sequence": 30, "action": "permit udp any any eq bfd ttl eq 255"},
|
||||
],
|
||||
},
|
||||
{
|
||||
"name": "LabTest",
|
||||
"entries": [{"sequence": 10, "action": "permit icmp any any"}, {"sequence": 20, "action": "permit tcp any any range 5900 5910"}],
|
||||
},
|
||||
]
|
||||
},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"default-control-plane-acl:\nSequence number `20` is not found.\n"
|
||||
"Expected `permit udp any any eq bfd ttl eq 255` as sequence number 30 action but found `permit tcp any any range 5900 5910` instead.\n",
|
||||
"LabTest: Not found",
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
218
tests/units/anta_tests/test_services.py
Normal file
218
tests/units/anta_tests/test_services.py
Normal file
|
@ -0,0 +1,218 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Tests for anta.tests.services.py
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from anta.tests.services import VerifyDNSLookup, VerifyDNSServers, VerifyErrdisableRecovery, VerifyHostname
|
||||
from tests.lib.anta import test # noqa: F401; pylint: disable=W0611
|
||||
|
||||
DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyHostname,
|
||||
"eos_data": [{"hostname": "s1-spine1", "fqdn": "s1-spine1.fun.aristanetworks.com"}],
|
||||
"inputs": {"hostname": "s1-spine1"},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-incorrect-hostname",
|
||||
"test": VerifyHostname,
|
||||
"eos_data": [{"hostname": "s1-spine2", "fqdn": "s1-spine1.fun.aristanetworks.com"}],
|
||||
"inputs": {"hostname": "s1-spine1"},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["Expected `s1-spine1` as the hostname, but found `s1-spine2` instead."],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyDNSLookup,
|
||||
"eos_data": [
|
||||
{
|
||||
"messages": [
|
||||
"Server:\t\t127.0.0.1\nAddress:\t127.0.0.1#53\n\nNon-authoritative answer:\nName:\tarista.com\nAddress: 151.101.130.132\nName:\tarista.com\n"
|
||||
"Address: 151.101.2.132\nName:\tarista.com\nAddress: 151.101.194.132\nName:\tarista.com\nAddress: 151.101.66.132\n\n"
|
||||
]
|
||||
},
|
||||
{"messages": ["Server:\t\t127.0.0.1\nAddress:\t127.0.0.1#53\n\nNon-authoritative answer:\nName:\twww.google.com\nAddress: 172.217.12.100\n\n"]},
|
||||
],
|
||||
"inputs": {"domain_names": ["arista.com", "www.google.com"]},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyDNSLookup,
|
||||
"eos_data": [
|
||||
{"messages": ["Server:\t\t127.0.0.1\nAddress:\t127.0.0.1#53\n\nNon-authoritative answer:\n*** Can't find arista.ca: No answer\n\n"]},
|
||||
{"messages": ["Server:\t\t127.0.0.1\nAddress:\t127.0.0.1#53\n\nNon-authoritative answer:\nName:\twww.google.com\nAddress: 172.217.12.100\n\n"]},
|
||||
{"messages": ["Server:\t\t127.0.0.1\nAddress:\t127.0.0.1#53\n\nNon-authoritative answer:\n*** Can't find google.ca: No answer\n\n"]},
|
||||
],
|
||||
"inputs": {"domain_names": ["arista.ca", "www.google.com", "google.ca"]},
|
||||
"expected": {"result": "failure", "messages": ["The following domain(s) are not resolved to an IP address: arista.ca, google.ca"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyDNSServers,
|
||||
"eos_data": [
|
||||
{
|
||||
"nameServerConfigs": [{"ipAddr": "10.14.0.1", "vrf": "default", "priority": 0}, {"ipAddr": "10.14.0.11", "vrf": "MGMT", "priority": 1}],
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"dns_servers": [{"server_address": "10.14.0.1", "vrf": "default", "priority": 0}, {"server_address": "10.14.0.11", "vrf": "MGMT", "priority": 1}]
|
||||
},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-dns-missing",
|
||||
"test": VerifyDNSServers,
|
||||
"eos_data": [
|
||||
{
|
||||
"nameServerConfigs": [{"ipAddr": "10.14.0.1", "vrf": "default", "priority": 0}, {"ipAddr": "10.14.0.11", "vrf": "MGMT", "priority": 1}],
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"dns_servers": [{"server_address": "10.14.0.10", "vrf": "default", "priority": 0}, {"server_address": "10.14.0.21", "vrf": "MGMT", "priority": 1}]
|
||||
},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["DNS server `10.14.0.10` is not configured with any VRF.", "DNS server `10.14.0.21` is not configured with any VRF."],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-no-dns-found",
|
||||
"test": VerifyDNSServers,
|
||||
"eos_data": [
|
||||
{
|
||||
"nameServerConfigs": [],
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"dns_servers": [{"server_address": "10.14.0.10", "vrf": "default", "priority": 0}, {"server_address": "10.14.0.21", "vrf": "MGMT", "priority": 1}]
|
||||
},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["DNS server `10.14.0.10` is not configured with any VRF.", "DNS server `10.14.0.21` is not configured with any VRF."],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-incorrect-dns-details",
|
||||
"test": VerifyDNSServers,
|
||||
"eos_data": [
|
||||
{
|
||||
"nameServerConfigs": [{"ipAddr": "10.14.0.1", "vrf": "CS", "priority": 1}, {"ipAddr": "10.14.0.11", "vrf": "MGMT", "priority": 1}],
|
||||
}
|
||||
],
|
||||
"inputs": {
|
||||
"dns_servers": [
|
||||
{"server_address": "10.14.0.1", "vrf": "CS", "priority": 0},
|
||||
{"server_address": "10.14.0.11", "vrf": "default", "priority": 0},
|
||||
{"server_address": "10.14.0.110", "vrf": "MGMT", "priority": 0},
|
||||
]
|
||||
},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"For DNS server `10.14.0.1`, the expected priority is `0`, but `1` was found instead.",
|
||||
"DNS server `10.14.0.11` is not configured with VRF `default`.",
|
||||
"DNS server `10.14.0.110` is not configured with any VRF.",
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyErrdisableRecovery,
|
||||
"eos_data": [
|
||||
"""
|
||||
Errdisable Reason Timer Status Timer Interval
|
||||
------------------------------ ----------------- --------------
|
||||
acl Enabled 300
|
||||
bpduguard Enabled 300
|
||||
arp-inspection Enabled 30
|
||||
"""
|
||||
],
|
||||
"inputs": {"reasons": [{"reason": "acl", "interval": 300}, {"reason": "bpduguard", "interval": 300}]},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-reason-missing",
|
||||
"test": VerifyErrdisableRecovery,
|
||||
"eos_data": [
|
||||
"""
|
||||
Errdisable Reason Timer Status Timer Interval
|
||||
------------------------------ ----------------- --------------
|
||||
acl Enabled 300
|
||||
bpduguard Enabled 300
|
||||
arp-inspection Enabled 30
|
||||
"""
|
||||
],
|
||||
"inputs": {"reasons": [{"reason": "acl", "interval": 300}, {"reason": "arp-inspection", "interval": 30}, {"reason": "tapagg", "interval": 30}]},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["`tapagg`: Not found."],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-reason-disabled",
|
||||
"test": VerifyErrdisableRecovery,
|
||||
"eos_data": [
|
||||
"""
|
||||
Errdisable Reason Timer Status Timer Interval
|
||||
------------------------------ ----------------- --------------
|
||||
acl Disabled 300
|
||||
bpduguard Enabled 300
|
||||
arp-inspection Enabled 30
|
||||
"""
|
||||
],
|
||||
"inputs": {"reasons": [{"reason": "acl", "interval": 300}, {"reason": "arp-inspection", "interval": 30}]},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["`acl`:\nExpected `Enabled` as the status, but found `Disabled` instead."],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-interval-not-ok",
|
||||
"test": VerifyErrdisableRecovery,
|
||||
"eos_data": [
|
||||
"""
|
||||
Errdisable Reason Timer Status Timer Interval
|
||||
------------------------------ ----------------- --------------
|
||||
acl Enabled 300
|
||||
bpduguard Enabled 300
|
||||
arp-inspection Enabled 30
|
||||
"""
|
||||
],
|
||||
"inputs": {"reasons": [{"reason": "acl", "interval": 30}, {"reason": "arp-inspection", "interval": 30}]},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["`acl`:\nExpected `30` as the interval, but found `300` instead."],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-all-type",
|
||||
"test": VerifyErrdisableRecovery,
|
||||
"eos_data": [
|
||||
"""
|
||||
Errdisable Reason Timer Status Timer Interval
|
||||
------------------------------ ----------------- --------------
|
||||
acl Disabled 300
|
||||
bpduguard Enabled 300
|
||||
arp-inspection Enabled 30
|
||||
"""
|
||||
],
|
||||
"inputs": {"reasons": [{"reason": "acl", "interval": 30}, {"reason": "arp-inspection", "interval": 300}, {"reason": "tapagg", "interval": 30}]},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"`acl`:\nExpected `30` as the interval, but found `300` instead.\nExpected `Enabled` as the status, but found `Disabled` instead.",
|
||||
"`arp-inspection`:\nExpected `300` as the interval, but found `30` instead.",
|
||||
"`tapagg`: Not found.",
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
128
tests/units/anta_tests/test_snmp.py
Normal file
128
tests/units/anta_tests/test_snmp.py
Normal file
|
@ -0,0 +1,128 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Tests for anta.tests.snmp.py
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from anta.tests.snmp import VerifySnmpContact, VerifySnmpIPv4Acl, VerifySnmpIPv6Acl, VerifySnmpLocation, VerifySnmpStatus
|
||||
from tests.lib.anta import test # noqa: F401; pylint: disable=W0611
|
||||
|
||||
DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifySnmpStatus,
|
||||
"eos_data": [{"vrfs": {"snmpVrfs": ["MGMT", "default"]}, "enabled": True}],
|
||||
"inputs": {"vrf": "MGMT"},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-vrf",
|
||||
"test": VerifySnmpStatus,
|
||||
"eos_data": [{"vrfs": {"snmpVrfs": ["default"]}, "enabled": True}],
|
||||
"inputs": {"vrf": "MGMT"},
|
||||
"expected": {"result": "failure", "messages": ["SNMP agent disabled in vrf MGMT"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-disabled",
|
||||
"test": VerifySnmpStatus,
|
||||
"eos_data": [{"vrfs": {"snmpVrfs": ["default"]}, "enabled": False}],
|
||||
"inputs": {"vrf": "default"},
|
||||
"expected": {"result": "failure", "messages": ["SNMP agent disabled in vrf default"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifySnmpIPv4Acl,
|
||||
"eos_data": [{"ipAclList": {"aclList": [{"type": "Ip4Acl", "name": "ACL_IPV4_SNMP", "configuredVrfs": ["MGMT"], "activeVrfs": ["MGMT"]}]}}],
|
||||
"inputs": {"number": 1, "vrf": "MGMT"},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-number",
|
||||
"test": VerifySnmpIPv4Acl,
|
||||
"eos_data": [{"ipAclList": {"aclList": []}}],
|
||||
"inputs": {"number": 1, "vrf": "MGMT"},
|
||||
"expected": {"result": "failure", "messages": ["Expected 1 SNMP IPv4 ACL(s) in vrf MGMT but got 0"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-vrf",
|
||||
"test": VerifySnmpIPv4Acl,
|
||||
"eos_data": [{"ipAclList": {"aclList": [{"type": "Ip4Acl", "name": "ACL_IPV4_SNMP", "configuredVrfs": ["default"], "activeVrfs": ["default"]}]}}],
|
||||
"inputs": {"number": 1, "vrf": "MGMT"},
|
||||
"expected": {"result": "failure", "messages": ["SNMP IPv4 ACL(s) not configured or active in vrf MGMT: ['ACL_IPV4_SNMP']"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifySnmpIPv6Acl,
|
||||
"eos_data": [{"ipv6AclList": {"aclList": [{"type": "Ip6Acl", "name": "ACL_IPV6_SNMP", "configuredVrfs": ["MGMT"], "activeVrfs": ["MGMT"]}]}}],
|
||||
"inputs": {"number": 1, "vrf": "MGMT"},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-number",
|
||||
"test": VerifySnmpIPv6Acl,
|
||||
"eos_data": [{"ipv6AclList": {"aclList": []}}],
|
||||
"inputs": {"number": 1, "vrf": "MGMT"},
|
||||
"expected": {"result": "failure", "messages": ["Expected 1 SNMP IPv6 ACL(s) in vrf MGMT but got 0"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-vrf",
|
||||
"test": VerifySnmpIPv6Acl,
|
||||
"eos_data": [{"ipv6AclList": {"aclList": [{"type": "Ip6Acl", "name": "ACL_IPV6_SNMP", "configuredVrfs": ["default"], "activeVrfs": ["default"]}]}}],
|
||||
"inputs": {"number": 1, "vrf": "MGMT"},
|
||||
"expected": {"result": "failure", "messages": ["SNMP IPv6 ACL(s) not configured or active in vrf MGMT: ['ACL_IPV6_SNMP']"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifySnmpLocation,
|
||||
"eos_data": [
|
||||
{
|
||||
"location": {"location": "New York"},
|
||||
}
|
||||
],
|
||||
"inputs": {"location": "New York"},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-incorrect-location",
|
||||
"test": VerifySnmpLocation,
|
||||
"eos_data": [
|
||||
{
|
||||
"location": {"location": "Europe"},
|
||||
}
|
||||
],
|
||||
"inputs": {"location": "New York"},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["Expected `New York` as the location, but found `Europe` instead."],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifySnmpContact,
|
||||
"eos_data": [
|
||||
{
|
||||
"contact": {"contact": "Jon@example.com"},
|
||||
}
|
||||
],
|
||||
"inputs": {"contact": "Jon@example.com"},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-incorrect-contact",
|
||||
"test": VerifySnmpContact,
|
||||
"eos_data": [
|
||||
{
|
||||
"contact": {"contact": "Jon@example.com"},
|
||||
}
|
||||
],
|
||||
"inputs": {"contact": "Bob@example.com"},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["Expected `Bob@example.com` as the contact, but found `Jon@example.com` instead."],
|
||||
},
|
||||
},
|
||||
]
|
101
tests/units/anta_tests/test_software.py
Normal file
101
tests/units/anta_tests/test_software.py
Normal file
|
@ -0,0 +1,101 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""Test inputs for anta.tests.hardware"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from anta.tests.software import VerifyEOSExtensions, VerifyEOSVersion, VerifyTerminAttrVersion
|
||||
from tests.lib.anta import test # noqa: F401; pylint: disable=W0611
|
||||
|
||||
DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyEOSVersion,
|
||||
"eos_data": [
|
||||
{
|
||||
"modelName": "vEOS-lab",
|
||||
"internalVersion": "4.27.0F-24305004.4270F",
|
||||
"version": "4.27.0F",
|
||||
}
|
||||
],
|
||||
"inputs": {"versions": ["4.27.0F", "4.28.0F"]},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyEOSVersion,
|
||||
"eos_data": [
|
||||
{
|
||||
"modelName": "vEOS-lab",
|
||||
"internalVersion": "4.27.0F-24305004.4270F",
|
||||
"version": "4.27.0F",
|
||||
}
|
||||
],
|
||||
"inputs": {"versions": ["4.27.1F"]},
|
||||
"expected": {"result": "failure", "messages": ["device is running version \"4.27.0F\" not in expected versions: ['4.27.1F']"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyTerminAttrVersion,
|
||||
"eos_data": [
|
||||
{
|
||||
"imageFormatVersion": "1.0",
|
||||
"uptime": 1107543.52,
|
||||
"modelName": "vEOS-lab",
|
||||
"details": {
|
||||
"deviations": [],
|
||||
"components": [{"name": "Aboot", "version": "Aboot-veos-8.0.0-3255441"}],
|
||||
"switchType": "fixedSystem",
|
||||
"packages": {
|
||||
"TerminAttr-core": {"release": "1", "version": "v1.17.0"},
|
||||
},
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": {"versions": ["v1.17.0", "v1.18.1"]},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyTerminAttrVersion,
|
||||
"eos_data": [
|
||||
{
|
||||
"imageFormatVersion": "1.0",
|
||||
"uptime": 1107543.52,
|
||||
"modelName": "vEOS-lab",
|
||||
"details": {
|
||||
"deviations": [],
|
||||
"components": [{"name": "Aboot", "version": "Aboot-veos-8.0.0-3255441"}],
|
||||
"switchType": "fixedSystem",
|
||||
"packages": {
|
||||
"TerminAttr-core": {"release": "1", "version": "v1.17.0"},
|
||||
},
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": {"versions": ["v1.17.1", "v1.18.1"]},
|
||||
"expected": {"result": "failure", "messages": ["device is running TerminAttr version v1.17.0 and is not in the allowed list: ['v1.17.1', 'v1.18.1']"]},
|
||||
},
|
||||
{
|
||||
"name": "success-no-extensions",
|
||||
"test": VerifyEOSExtensions,
|
||||
"eos_data": [
|
||||
{"extensions": {}, "extensionStoredDir": "flash:", "warnings": ["No extensions are available"]},
|
||||
{"extensions": []},
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyEOSExtensions,
|
||||
"eos_data": [
|
||||
{"extensions": {}, "extensionStoredDir": "flash:", "warnings": ["No extensions are available"]},
|
||||
{"extensions": ["dummy"]},
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["Missing EOS extensions: installed [] / configured: ['dummy']"]},
|
||||
},
|
||||
]
|
328
tests/units/anta_tests/test_stp.py
Normal file
328
tests/units/anta_tests/test_stp.py
Normal file
|
@ -0,0 +1,328 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Tests for anta.tests.stp.py
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from anta.tests.stp import VerifySTPBlockedPorts, VerifySTPCounters, VerifySTPForwardingPorts, VerifySTPMode, VerifySTPRootPriority
|
||||
from tests.lib.anta import test # noqa: F401; pylint: disable=W0611
|
||||
|
||||
DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifySTPMode,
|
||||
"eos_data": [
|
||||
{"spanningTreeVlanInstances": {"10": {"spanningTreeVlanInstance": {"protocol": "rstp"}}}},
|
||||
{"spanningTreeVlanInstances": {"20": {"spanningTreeVlanInstance": {"protocol": "rstp"}}}},
|
||||
],
|
||||
"inputs": {"mode": "rstp", "vlans": [10, 20]},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-no-instances",
|
||||
"test": VerifySTPMode,
|
||||
"eos_data": [
|
||||
{"spanningTreeVlanInstances": {}},
|
||||
{"spanningTreeVlanInstances": {}},
|
||||
],
|
||||
"inputs": {"mode": "rstp", "vlans": [10, 20]},
|
||||
"expected": {"result": "failure", "messages": ["STP mode 'rstp' not configured for the following VLAN(s): [10, 20]"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-mode",
|
||||
"test": VerifySTPMode,
|
||||
"eos_data": [
|
||||
{"spanningTreeVlanInstances": {"10": {"spanningTreeVlanInstance": {"protocol": "mstp"}}}},
|
||||
{"spanningTreeVlanInstances": {"20": {"spanningTreeVlanInstance": {"protocol": "mstp"}}}},
|
||||
],
|
||||
"inputs": {"mode": "rstp", "vlans": [10, 20]},
|
||||
"expected": {"result": "failure", "messages": ["Wrong STP mode configured for the following VLAN(s): [10, 20]"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-both",
|
||||
"test": VerifySTPMode,
|
||||
"eos_data": [
|
||||
{"spanningTreeVlanInstances": {}},
|
||||
{"spanningTreeVlanInstances": {"20": {"spanningTreeVlanInstance": {"protocol": "mstp"}}}},
|
||||
],
|
||||
"inputs": {"mode": "rstp", "vlans": [10, 20]},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["STP mode 'rstp' not configured for the following VLAN(s): [10]", "Wrong STP mode configured for the following VLAN(s): [20]"],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifySTPBlockedPorts,
|
||||
"eos_data": [{"spanningTreeInstances": {}}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifySTPBlockedPorts,
|
||||
"eos_data": [{"spanningTreeInstances": {"MST0": {"spanningTreeBlockedPorts": ["Ethernet10"]}, "MST10": {"spanningTreeBlockedPorts": ["Ethernet10"]}}}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["The following ports are blocked by STP: {'MST0': ['Ethernet10'], 'MST10': ['Ethernet10']}"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifySTPCounters,
|
||||
"eos_data": [{"interfaces": {"Ethernet10": {"bpduSent": 99, "bpduReceived": 0, "bpduTaggedError": 0, "bpduOtherError": 0, "bpduRateLimitCount": 0}}}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifySTPCounters,
|
||||
"eos_data": [
|
||||
{
|
||||
"interfaces": {
|
||||
"Ethernet10": {"bpduSent": 201, "bpduReceived": 0, "bpduTaggedError": 3, "bpduOtherError": 0, "bpduRateLimitCount": 0},
|
||||
"Ethernet11": {"bpduSent": 99, "bpduReceived": 0, "bpduTaggedError": 0, "bpduOtherError": 6, "bpduRateLimitCount": 0},
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["The following interfaces have STP BPDU packet errors: ['Ethernet10', 'Ethernet11']"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifySTPForwardingPorts,
|
||||
"eos_data": [
|
||||
{
|
||||
"unmappedVlans": [],
|
||||
"topologies": {"Mst10": {"vlans": [10], "interfaces": {"Ethernet10": {"state": "forwarding"}, "MplsTrunk1": {"state": "forwarding"}}}},
|
||||
},
|
||||
{
|
||||
"unmappedVlans": [],
|
||||
"topologies": {"Mst20": {"vlans": [20], "interfaces": {"Ethernet10": {"state": "forwarding"}, "MplsTrunk1": {"state": "forwarding"}}}},
|
||||
},
|
||||
],
|
||||
"inputs": {"vlans": [10, 20]},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "success-vlan-not-in-topology", # Should it succeed really ? TODO - this output should be impossible
|
||||
"test": VerifySTPForwardingPorts,
|
||||
"eos_data": [
|
||||
{
|
||||
"unmappedVlans": [],
|
||||
"topologies": {"Mst10": {"vlans": [10], "interfaces": {"Ethernet10": {"state": "forwarding"}, "MplsTrunk1": {"state": "forwarding"}}}},
|
||||
},
|
||||
{
|
||||
"unmappedVlans": [],
|
||||
"topologies": {"Mst10": {"vlans": [10], "interfaces": {"Ethernet10": {"state": "forwarding"}, "MplsTrunk1": {"state": "forwarding"}}}},
|
||||
},
|
||||
],
|
||||
"inputs": {"vlans": [10, 20]},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-no-instances",
|
||||
"test": VerifySTPForwardingPorts,
|
||||
"eos_data": [{"unmappedVlans": [], "topologies": {}}, {"unmappedVlans": [], "topologies": {}}],
|
||||
"inputs": {"vlans": [10, 20]},
|
||||
"expected": {"result": "failure", "messages": ["STP instance is not configured for the following VLAN(s): [10, 20]"]},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifySTPForwardingPorts,
|
||||
"eos_data": [
|
||||
{
|
||||
"unmappedVlans": [],
|
||||
"topologies": {"Vl10": {"vlans": [10], "interfaces": {"Ethernet10": {"state": "discarding"}, "MplsTrunk1": {"state": "forwarding"}}}},
|
||||
},
|
||||
{
|
||||
"unmappedVlans": [],
|
||||
"topologies": {"Vl20": {"vlans": [20], "interfaces": {"Ethernet10": {"state": "discarding"}, "MplsTrunk1": {"state": "forwarding"}}}},
|
||||
},
|
||||
],
|
||||
"inputs": {"vlans": [10, 20]},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["The following VLAN(s) have interface(s) that are not in a fowarding state: [{'VLAN 10': ['Ethernet10']}, {'VLAN 20': ['Ethernet10']}]"],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "success-specific-instances",
|
||||
"test": VerifySTPRootPriority,
|
||||
"eos_data": [
|
||||
{
|
||||
"instances": {
|
||||
"VL10": {
|
||||
"rootBridge": {
|
||||
"priority": 32768,
|
||||
"systemIdExtension": 10,
|
||||
"macAddress": "00:1c:73:27:95:a2",
|
||||
"helloTime": 2.0,
|
||||
"maxAge": 20,
|
||||
"forwardDelay": 15,
|
||||
}
|
||||
},
|
||||
"VL20": {
|
||||
"rootBridge": {
|
||||
"priority": 32768,
|
||||
"systemIdExtension": 20,
|
||||
"macAddress": "00:1c:73:27:95:a2",
|
||||
"helloTime": 2.0,
|
||||
"maxAge": 20,
|
||||
"forwardDelay": 15,
|
||||
}
|
||||
},
|
||||
"VL30": {
|
||||
"rootBridge": {
|
||||
"priority": 32768,
|
||||
"systemIdExtension": 30,
|
||||
"macAddress": "00:1c:73:27:95:a2",
|
||||
"helloTime": 2.0,
|
||||
"maxAge": 20,
|
||||
"forwardDelay": 15,
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": {"priority": 32768, "instances": [10, 20]},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "success-all-instances",
|
||||
"test": VerifySTPRootPriority,
|
||||
"eos_data": [
|
||||
{
|
||||
"instances": {
|
||||
"VL10": {
|
||||
"rootBridge": {
|
||||
"priority": 32768,
|
||||
"systemIdExtension": 10,
|
||||
"macAddress": "00:1c:73:27:95:a2",
|
||||
"helloTime": 2.0,
|
||||
"maxAge": 20,
|
||||
"forwardDelay": 15,
|
||||
}
|
||||
},
|
||||
"VL20": {
|
||||
"rootBridge": {
|
||||
"priority": 32768,
|
||||
"systemIdExtension": 20,
|
||||
"macAddress": "00:1c:73:27:95:a2",
|
||||
"helloTime": 2.0,
|
||||
"maxAge": 20,
|
||||
"forwardDelay": 15,
|
||||
}
|
||||
},
|
||||
"VL30": {
|
||||
"rootBridge": {
|
||||
"priority": 32768,
|
||||
"systemIdExtension": 30,
|
||||
"macAddress": "00:1c:73:27:95:a2",
|
||||
"helloTime": 2.0,
|
||||
"maxAge": 20,
|
||||
"forwardDelay": 15,
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": {"priority": 32768},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "success-MST",
|
||||
"test": VerifySTPRootPriority,
|
||||
"eos_data": [
|
||||
{
|
||||
"instances": {
|
||||
"MST0": {
|
||||
"rootBridge": {
|
||||
"priority": 16384,
|
||||
"systemIdExtension": 0,
|
||||
"macAddress": "02:1c:73:8b:93:ac",
|
||||
"helloTime": 2.0,
|
||||
"maxAge": 20,
|
||||
"forwardDelay": 15,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": {"priority": 16384, "instances": [0]},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-no-instances",
|
||||
"test": VerifySTPRootPriority,
|
||||
"eos_data": [
|
||||
{
|
||||
"instances": {
|
||||
"WRONG0": {
|
||||
"rootBridge": {
|
||||
"priority": 16384,
|
||||
"systemIdExtension": 0,
|
||||
"macAddress": "02:1c:73:8b:93:ac",
|
||||
"helloTime": 2.0,
|
||||
"maxAge": 20,
|
||||
"forwardDelay": 15,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": {"priority": 32768, "instances": [0]},
|
||||
"expected": {"result": "failure", "messages": ["Unsupported STP instance type: WRONG0"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-instance-type",
|
||||
"test": VerifySTPRootPriority,
|
||||
"eos_data": [{"instances": {}}],
|
||||
"inputs": {"priority": 32768, "instances": [10, 20]},
|
||||
"expected": {"result": "failure", "messages": ["No STP instances configured"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-priority",
|
||||
"test": VerifySTPRootPriority,
|
||||
"eos_data": [
|
||||
{
|
||||
"instances": {
|
||||
"VL10": {
|
||||
"rootBridge": {
|
||||
"priority": 32768,
|
||||
"systemIdExtension": 10,
|
||||
"macAddress": "00:1c:73:27:95:a2",
|
||||
"helloTime": 2.0,
|
||||
"maxAge": 20,
|
||||
"forwardDelay": 15,
|
||||
}
|
||||
},
|
||||
"VL20": {
|
||||
"rootBridge": {
|
||||
"priority": 8196,
|
||||
"systemIdExtension": 20,
|
||||
"macAddress": "00:1c:73:27:95:a2",
|
||||
"helloTime": 2.0,
|
||||
"maxAge": 20,
|
||||
"forwardDelay": 15,
|
||||
}
|
||||
},
|
||||
"VL30": {
|
||||
"rootBridge": {
|
||||
"priority": 8196,
|
||||
"systemIdExtension": 30,
|
||||
"macAddress": "00:1c:73:27:95:a2",
|
||||
"helloTime": 2.0,
|
||||
"maxAge": 20,
|
||||
"forwardDelay": 15,
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": {"priority": 32768, "instances": [10, 20, 30]},
|
||||
"expected": {"result": "failure", "messages": ["The following instance(s) have the wrong STP root priority configured: ['VL20', 'VL30']"]},
|
||||
},
|
||||
]
|
283
tests/units/anta_tests/test_system.py
Normal file
283
tests/units/anta_tests/test_system.py
Normal file
|
@ -0,0 +1,283 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""Test inputs for anta.tests.system"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from anta.tests.system import (
|
||||
VerifyAgentLogs,
|
||||
VerifyCoredump,
|
||||
VerifyCPUUtilization,
|
||||
VerifyFileSystemUtilization,
|
||||
VerifyMemoryUtilization,
|
||||
VerifyNTP,
|
||||
VerifyReloadCause,
|
||||
VerifyUptime,
|
||||
)
|
||||
from tests.lib.anta import test # noqa: F401; pylint: disable=W0611
|
||||
|
||||
DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyUptime,
|
||||
"eos_data": [{"upTime": 1186689.15, "loadAvg": [0.13, 0.12, 0.09], "users": 1, "currentTime": 1683186659.139859}],
|
||||
"inputs": {"minimum": 666},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyUptime,
|
||||
"eos_data": [{"upTime": 665.15, "loadAvg": [0.13, 0.12, 0.09], "users": 1, "currentTime": 1683186659.139859}],
|
||||
"inputs": {"minimum": 666},
|
||||
"expected": {"result": "failure", "messages": ["Device uptime is 665.15 seconds"]},
|
||||
},
|
||||
{
|
||||
"name": "success-no-reload",
|
||||
"test": VerifyReloadCause,
|
||||
"eos_data": [{"kernelCrashData": [], "resetCauses": [], "full": False}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "success-valid-cause",
|
||||
"test": VerifyReloadCause,
|
||||
"eos_data": [
|
||||
{
|
||||
"resetCauses": [
|
||||
{"recommendedAction": "No action necessary.", "description": "Reload requested by the user.", "timestamp": 1683186892.0, "debugInfoIsDir": False}
|
||||
],
|
||||
"full": False,
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyReloadCause,
|
||||
# The failure cause is made up
|
||||
"eos_data": [
|
||||
{
|
||||
"resetCauses": [
|
||||
{"recommendedAction": "No action necessary.", "description": "Reload after crash.", "timestamp": 1683186892.0, "debugInfoIsDir": False}
|
||||
],
|
||||
"full": False,
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["Reload cause is: 'Reload after crash.'"]},
|
||||
},
|
||||
{
|
||||
"name": "error",
|
||||
"test": VerifyReloadCause,
|
||||
"eos_data": [{}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "error", "messages": ["No reload causes available"]},
|
||||
},
|
||||
{
|
||||
"name": "success-without-minidump",
|
||||
"test": VerifyCoredump,
|
||||
"eos_data": [{"mode": "compressedDeferred", "coreFiles": []}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "success-with-minidump",
|
||||
"test": VerifyCoredump,
|
||||
"eos_data": [{"mode": "compressedDeferred", "coreFiles": ["minidump"]}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-without-minidump",
|
||||
"test": VerifyCoredump,
|
||||
"eos_data": [{"mode": "compressedDeferred", "coreFiles": ["core.2344.1584483862.Mlag.gz", "core.23101.1584483867.Mlag.gz"]}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["Core dump(s) have been found: ['core.2344.1584483862.Mlag.gz', 'core.23101.1584483867.Mlag.gz']"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-with-minidump",
|
||||
"test": VerifyCoredump,
|
||||
"eos_data": [{"mode": "compressedDeferred", "coreFiles": ["minidump", "core.2344.1584483862.Mlag.gz", "core.23101.1584483867.Mlag.gz"]}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["Core dump(s) have been found: ['core.2344.1584483862.Mlag.gz', 'core.23101.1584483867.Mlag.gz']"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyAgentLogs,
|
||||
"eos_data": [""],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyAgentLogs,
|
||||
"eos_data": [
|
||||
"""===> /var/log/agents/Test-666 Thu May 4 09:57:02 2023 <===
|
||||
CLI Exception: Exception
|
||||
CLI Exception: Backtrace
|
||||
===> /var/log/agents/Aaa-855 Fri Jul 7 15:07:00 2023 <===
|
||||
===== Output from /usr/bin/Aaa [] (PID=855) started Jul 7 15:06:11.606414 ===
|
||||
EntityManager::doBackoff waiting for remote sysdb version ....ok
|
||||
|
||||
===> /var/log/agents/Acl-830 Fri Jul 7 15:07:00 2023 <===
|
||||
===== Output from /usr/bin/Acl [] (PID=830) started Jul 7 15:06:10.871700 ===
|
||||
EntityManager::doBackoff waiting for remote sysdb version ...................ok
|
||||
"""
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"Device has reported agent crashes:\n"
|
||||
" * /var/log/agents/Test-666 Thu May 4 09:57:02 2023\n"
|
||||
" * /var/log/agents/Aaa-855 Fri Jul 7 15:07:00 2023\n"
|
||||
" * /var/log/agents/Acl-830 Fri Jul 7 15:07:00 2023",
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyCPUUtilization,
|
||||
"eos_data": [
|
||||
{
|
||||
"cpuInfo": {"%Cpu(s)": {"idle": 88.2, "stolen": 0.0, "user": 5.9, "swIrq": 0.0, "ioWait": 0.0, "system": 0.0, "hwIrq": 5.9, "nice": 0.0}},
|
||||
"processes": {
|
||||
"1": {
|
||||
"userName": "root",
|
||||
"status": "S",
|
||||
"memPct": 0.3,
|
||||
"niceValue": 0,
|
||||
"cpuPct": 0.0,
|
||||
"cpuPctType": "{:.1f}",
|
||||
"cmd": "systemd",
|
||||
"residentMem": "5096",
|
||||
"priority": "20",
|
||||
"activeTime": 360,
|
||||
"virtMem": "6644",
|
||||
"sharedMem": "3996",
|
||||
}
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyCPUUtilization,
|
||||
"eos_data": [
|
||||
{
|
||||
"cpuInfo": {"%Cpu(s)": {"idle": 24.8, "stolen": 0.0, "user": 5.9, "swIrq": 0.0, "ioWait": 0.0, "system": 0.0, "hwIrq": 5.9, "nice": 0.0}},
|
||||
"processes": {
|
||||
"1": {
|
||||
"userName": "root",
|
||||
"status": "S",
|
||||
"memPct": 0.3,
|
||||
"niceValue": 0,
|
||||
"cpuPct": 0.0,
|
||||
"cpuPctType": "{:.1f}",
|
||||
"cmd": "systemd",
|
||||
"residentMem": "5096",
|
||||
"priority": "20",
|
||||
"activeTime": 360,
|
||||
"virtMem": "6644",
|
||||
"sharedMem": "3996",
|
||||
}
|
||||
},
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["Device has reported a high CPU utilization: 75.2%"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyMemoryUtilization,
|
||||
"eos_data": [
|
||||
{
|
||||
"uptime": 1994.67,
|
||||
"modelName": "vEOS-lab",
|
||||
"internalVersion": "4.27.3F-26379303.4273F",
|
||||
"memTotal": 2004568,
|
||||
"memFree": 879004,
|
||||
"version": "4.27.3F",
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyMemoryUtilization,
|
||||
"eos_data": [
|
||||
{
|
||||
"uptime": 1994.67,
|
||||
"modelName": "vEOS-lab",
|
||||
"internalVersion": "4.27.3F-26379303.4273F",
|
||||
"memTotal": 2004568,
|
||||
"memFree": 89004,
|
||||
"version": "4.27.3F",
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["Device has reported a high memory usage: 95.56%"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyFileSystemUtilization,
|
||||
"eos_data": [
|
||||
"""Filesystem Size Used Avail Use% Mounted on
|
||||
/dev/sda2 3.9G 988M 2.9G 26% /mnt/flash
|
||||
none 294M 78M 217M 27% /
|
||||
none 294M 78M 217M 27% /.overlay
|
||||
/dev/loop0 461M 461M 0 100% /rootfs-i386
|
||||
"""
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyFileSystemUtilization,
|
||||
"eos_data": [
|
||||
"""Filesystem Size Used Avail Use% Mounted on
|
||||
/dev/sda2 3.9G 988M 2.9G 84% /mnt/flash
|
||||
none 294M 78M 217M 27% /
|
||||
none 294M 78M 217M 84% /.overlay
|
||||
/dev/loop0 461M 461M 0 100% /rootfs-i386
|
||||
"""
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"Mount point /dev/sda2 3.9G 988M 2.9G 84% /mnt/flash is higher than 75%: reported 84%",
|
||||
"Mount point none 294M 78M 217M 84% /.overlay is higher than 75%: reported 84%",
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyNTP,
|
||||
"eos_data": [
|
||||
"""synchronised
|
||||
poll interval unknown
|
||||
"""
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyNTP,
|
||||
"eos_data": [
|
||||
"""unsynchronised
|
||||
poll interval unknown
|
||||
"""
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["The device is not synchronized with the configured NTP server(s): 'unsynchronised'"]},
|
||||
},
|
||||
]
|
37
tests/units/anta_tests/test_vlan.py
Normal file
37
tests/units/anta_tests/test_vlan.py
Normal file
|
@ -0,0 +1,37 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Tests for anta.tests.vlan.py
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from anta.tests.vlan import VerifyVlanInternalPolicy
|
||||
from tests.lib.anta import test # noqa: F401; pylint: disable=W0611
|
||||
|
||||
DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyVlanInternalPolicy,
|
||||
"eos_data": [{"policy": "ascending", "startVlanId": 1006, "endVlanId": 4094}],
|
||||
"inputs": {"policy": "ascending", "start_vlan_id": 1006, "end_vlan_id": 4094},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-incorrect-policy",
|
||||
"test": VerifyVlanInternalPolicy,
|
||||
"eos_data": [{"policy": "descending", "startVlanId": 4094, "endVlanId": 1006}],
|
||||
"inputs": {"policy": "ascending", "start_vlan_id": 1006, "end_vlan_id": 4094},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"The VLAN internal allocation policy is not configured properly:\n"
|
||||
"Expected `ascending` as the policy, but found `descending` instead.\n"
|
||||
"Expected `1006` as the startVlanId, but found `4094` instead.\n"
|
||||
"Expected `4094` as the endVlanId, but found `1006` instead."
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
365
tests/units/anta_tests/test_vxlan.py
Normal file
365
tests/units/anta_tests/test_vxlan.py
Normal file
|
@ -0,0 +1,365 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Tests for anta.tests.vxlan.py
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from anta.tests.vxlan import VerifyVxlan1ConnSettings, VerifyVxlan1Interface, VerifyVxlanConfigSanity, VerifyVxlanVniBinding, VerifyVxlanVtep
|
||||
from tests.lib.anta import test # noqa: F401; pylint: disable=W0611
|
||||
|
||||
DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyVxlan1Interface,
|
||||
"eos_data": [{"interfaceDescriptions": {"Vxlan1": {"lineProtocolStatus": "up", "interfaceStatus": "up"}}}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "skipped",
|
||||
"test": VerifyVxlan1Interface,
|
||||
"eos_data": [{"interfaceDescriptions": {"Loopback0": {"lineProtocolStatus": "up", "interfaceStatus": "up"}}}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "skipped", "messages": ["Vxlan1 interface is not configured"]},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyVxlan1Interface,
|
||||
"eos_data": [{"interfaceDescriptions": {"Vxlan1": {"lineProtocolStatus": "down", "interfaceStatus": "up"}}}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["Vxlan1 interface is down/up"]},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyVxlan1Interface,
|
||||
"eos_data": [{"interfaceDescriptions": {"Vxlan1": {"lineProtocolStatus": "up", "interfaceStatus": "down"}}}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["Vxlan1 interface is up/down"]},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyVxlan1Interface,
|
||||
"eos_data": [{"interfaceDescriptions": {"Vxlan1": {"lineProtocolStatus": "down", "interfaceStatus": "down"}}}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "failure", "messages": ["Vxlan1 interface is down/down"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyVxlanConfigSanity,
|
||||
"eos_data": [
|
||||
{
|
||||
"categories": {
|
||||
"localVtep": {
|
||||
"description": "Local VTEP Configuration Check",
|
||||
"allCheckPass": True,
|
||||
"detail": "",
|
||||
"hasWarning": False,
|
||||
"items": [
|
||||
{"name": "Loopback IP Address", "checkPass": True, "hasWarning": False, "detail": ""},
|
||||
{"name": "VLAN-VNI Map", "checkPass": True, "hasWarning": False, "detail": ""},
|
||||
{"name": "Flood List", "checkPass": True, "hasWarning": False, "detail": ""},
|
||||
{"name": "Routing", "checkPass": True, "hasWarning": False, "detail": ""},
|
||||
{"name": "VNI VRF ACL", "checkPass": True, "hasWarning": False, "detail": ""},
|
||||
{"name": "VRF-VNI Dynamic VLAN", "checkPass": True, "hasWarning": False, "detail": ""},
|
||||
{"name": "Decap VRF-VNI Map", "checkPass": True, "hasWarning": False, "detail": ""},
|
||||
],
|
||||
},
|
||||
"remoteVtep": {
|
||||
"description": "Remote VTEP Configuration Check",
|
||||
"allCheckPass": True,
|
||||
"detail": "",
|
||||
"hasWarning": False,
|
||||
"items": [{"name": "Remote VTEP", "checkPass": True, "hasWarning": False, "detail": ""}],
|
||||
},
|
||||
"pd": {
|
||||
"description": "Platform Dependent Check",
|
||||
"allCheckPass": True,
|
||||
"detail": "",
|
||||
"hasWarning": False,
|
||||
"items": [
|
||||
{"name": "VXLAN Bridging", "checkPass": True, "hasWarning": False, "detail": ""},
|
||||
{"name": "VXLAN Routing", "checkPass": True, "hasWarning": False, "detail": "VXLAN Routing not enabled"},
|
||||
],
|
||||
},
|
||||
"cvx": {
|
||||
"description": "CVX Configuration Check",
|
||||
"allCheckPass": True,
|
||||
"detail": "",
|
||||
"hasWarning": False,
|
||||
"items": [{"name": "CVX Server", "checkPass": True, "hasWarning": False, "detail": "Not in controller client mode"}],
|
||||
},
|
||||
"mlag": {
|
||||
"description": "MLAG Configuration Check",
|
||||
"allCheckPass": True,
|
||||
"detail": "Run 'show mlag config-sanity' to verify MLAG config",
|
||||
"hasWarning": False,
|
||||
"items": [
|
||||
{"name": "Peer VTEP IP", "checkPass": True, "hasWarning": False, "detail": ""},
|
||||
{"name": "MLAG VTEP IP", "checkPass": True, "hasWarning": False, "detail": ""},
|
||||
{"name": "Virtual VTEP IP", "checkPass": True, "hasWarning": False, "detail": ""},
|
||||
{"name": "Peer VLAN-VNI", "checkPass": True, "hasWarning": False, "detail": ""},
|
||||
{"name": "MLAG Inactive State", "checkPass": True, "hasWarning": False, "detail": ""},
|
||||
],
|
||||
},
|
||||
},
|
||||
"warnings": [],
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure",
|
||||
"test": VerifyVxlanConfigSanity,
|
||||
"eos_data": [
|
||||
{
|
||||
"categories": {
|
||||
"localVtep": {
|
||||
"description": "Local VTEP Configuration Check",
|
||||
"allCheckPass": False,
|
||||
"detail": "",
|
||||
"hasWarning": True,
|
||||
"items": [
|
||||
{"name": "Loopback IP Address", "checkPass": True, "hasWarning": False, "detail": ""},
|
||||
{"name": "VLAN-VNI Map", "checkPass": False, "hasWarning": False, "detail": "No VLAN-VNI mapping in Vxlan1"},
|
||||
{"name": "Flood List", "checkPass": False, "hasWarning": True, "detail": "No VXLAN VLANs in Vxlan1"},
|
||||
{"name": "Routing", "checkPass": True, "hasWarning": False, "detail": ""},
|
||||
{"name": "VNI VRF ACL", "checkPass": True, "hasWarning": False, "detail": ""},
|
||||
{"name": "VRF-VNI Dynamic VLAN", "checkPass": True, "hasWarning": False, "detail": ""},
|
||||
{"name": "Decap VRF-VNI Map", "checkPass": True, "hasWarning": False, "detail": ""},
|
||||
],
|
||||
},
|
||||
"remoteVtep": {
|
||||
"description": "Remote VTEP Configuration Check",
|
||||
"allCheckPass": True,
|
||||
"detail": "",
|
||||
"hasWarning": False,
|
||||
"items": [{"name": "Remote VTEP", "checkPass": True, "hasWarning": False, "detail": ""}],
|
||||
},
|
||||
"pd": {
|
||||
"description": "Platform Dependent Check",
|
||||
"allCheckPass": True,
|
||||
"detail": "",
|
||||
"hasWarning": False,
|
||||
"items": [
|
||||
{"name": "VXLAN Bridging", "checkPass": True, "hasWarning": False, "detail": ""},
|
||||
{"name": "VXLAN Routing", "checkPass": True, "hasWarning": False, "detail": "VXLAN Routing not enabled"},
|
||||
],
|
||||
},
|
||||
"cvx": {
|
||||
"description": "CVX Configuration Check",
|
||||
"allCheckPass": True,
|
||||
"detail": "",
|
||||
"hasWarning": False,
|
||||
"items": [{"name": "CVX Server", "checkPass": True, "hasWarning": False, "detail": "Not in controller client mode"}],
|
||||
},
|
||||
"mlag": {
|
||||
"description": "MLAG Configuration Check",
|
||||
"allCheckPass": True,
|
||||
"detail": "Run 'show mlag config-sanity' to verify MLAG config",
|
||||
"hasWarning": False,
|
||||
"items": [
|
||||
{"name": "Peer VTEP IP", "checkPass": True, "hasWarning": False, "detail": ""},
|
||||
{"name": "MLAG VTEP IP", "checkPass": True, "hasWarning": False, "detail": ""},
|
||||
{"name": "Virtual VTEP IP", "checkPass": True, "hasWarning": False, "detail": ""},
|
||||
{"name": "Peer VLAN-VNI", "checkPass": True, "hasWarning": False, "detail": ""},
|
||||
{"name": "MLAG Inactive State", "checkPass": True, "hasWarning": False, "detail": ""},
|
||||
],
|
||||
},
|
||||
},
|
||||
"warnings": ["Your configuration contains warnings. This does not mean misconfigurations. But you may wish to re-check your configurations."],
|
||||
}
|
||||
],
|
||||
"inputs": None,
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"VXLAN config sanity check is not passing: {'localVtep': {'description': 'Local VTEP Configuration Check', "
|
||||
"'allCheckPass': False, 'detail': '', 'hasWarning': True, 'items': [{'name': 'Loopback IP Address', 'checkPass': True, "
|
||||
"'hasWarning': False, 'detail': ''}, {'name': 'VLAN-VNI Map', 'checkPass': False, 'hasWarning': False, 'detail': "
|
||||
"'No VLAN-VNI mapping in Vxlan1'}, {'name': 'Flood List', 'checkPass': False, 'hasWarning': True, 'detail': "
|
||||
"'No VXLAN VLANs in Vxlan1'}, {'name': 'Routing', 'checkPass': True, 'hasWarning': False, 'detail': ''}, {'name': "
|
||||
"'VNI VRF ACL', 'checkPass': True, 'hasWarning': False, 'detail': ''}, {'name': 'VRF-VNI Dynamic VLAN', 'checkPass': True, "
|
||||
"'hasWarning': False, 'detail': ''}, {'name': 'Decap VRF-VNI Map', 'checkPass': True, 'hasWarning': False, 'detail': ''}]}}"
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "skipped",
|
||||
"test": VerifyVxlanConfigSanity,
|
||||
"eos_data": [{"categories": {}}],
|
||||
"inputs": None,
|
||||
"expected": {"result": "skipped", "messages": ["VXLAN is not configured"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyVxlanVniBinding,
|
||||
"eos_data": [
|
||||
{
|
||||
"vxlanIntfs": {
|
||||
"Vxlan1": {
|
||||
"vniBindings": {
|
||||
"10020": {"vlan": 20, "dynamicVlan": False, "source": "static", "interfaces": {"Ethernet31": {"dot1q": 0}, "Vxlan1": {"dot1q": 20}}}
|
||||
},
|
||||
"vniBindingsToVrf": {"500": {"vrfName": "PROD", "vlan": 1199, "source": "evpn"}},
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": {"bindings": {10020: 20, 500: 1199}},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-no-binding",
|
||||
"test": VerifyVxlanVniBinding,
|
||||
"eos_data": [
|
||||
{
|
||||
"vxlanIntfs": {
|
||||
"Vxlan1": {
|
||||
"vniBindings": {
|
||||
"10020": {"vlan": 20, "dynamicVlan": False, "source": "static", "interfaces": {"Ethernet31": {"dot1q": 0}, "Vxlan1": {"dot1q": 20}}}
|
||||
},
|
||||
"vniBindingsToVrf": {"500": {"vrfName": "PROD", "vlan": 1199, "source": "evpn"}},
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": {"bindings": {10010: 10, 10020: 20, 500: 1199}},
|
||||
"expected": {"result": "failure", "messages": ["The following VNI(s) have no binding: ['10010']"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-binding",
|
||||
"test": VerifyVxlanVniBinding,
|
||||
"eos_data": [
|
||||
{
|
||||
"vxlanIntfs": {
|
||||
"Vxlan1": {
|
||||
"vniBindings": {
|
||||
"10020": {"vlan": 30, "dynamicVlan": False, "source": "static", "interfaces": {"Ethernet31": {"dot1q": 0}, "Vxlan1": {"dot1q": 20}}}
|
||||
},
|
||||
"vniBindingsToVrf": {"500": {"vrfName": "PROD", "vlan": 1199, "source": "evpn"}},
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": {"bindings": {10020: 20, 500: 1199}},
|
||||
"expected": {"result": "failure", "messages": ["The following VNI(s) have the wrong VLAN binding: [{'10020': 30}]"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-no-and-wrong-binding",
|
||||
"test": VerifyVxlanVniBinding,
|
||||
"eos_data": [
|
||||
{
|
||||
"vxlanIntfs": {
|
||||
"Vxlan1": {
|
||||
"vniBindings": {
|
||||
"10020": {"vlan": 30, "dynamicVlan": False, "source": "static", "interfaces": {"Ethernet31": {"dot1q": 0}, "Vxlan1": {"dot1q": 20}}}
|
||||
},
|
||||
"vniBindingsToVrf": {"500": {"vrfName": "PROD", "vlan": 1199, "source": "evpn"}},
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"inputs": {"bindings": {10010: 10, 10020: 20, 500: 1199}},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["The following VNI(s) have no binding: ['10010']", "The following VNI(s) have the wrong VLAN binding: [{'10020': 30}]"],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "skipped",
|
||||
"test": VerifyVxlanVniBinding,
|
||||
"eos_data": [{"vxlanIntfs": {}}],
|
||||
"inputs": {"bindings": {10020: 20, 500: 1199}},
|
||||
"expected": {"result": "skipped", "messages": ["Vxlan1 interface is not configured"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyVxlanVtep,
|
||||
"eos_data": [{"vteps": {}, "interfaces": {"Vxlan1": {"vteps": ["10.1.1.5", "10.1.1.6"]}}}],
|
||||
"inputs": {"vteps": ["10.1.1.5", "10.1.1.6"]},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "failure-missing-vtep",
|
||||
"test": VerifyVxlanVtep,
|
||||
"eos_data": [{"vteps": {}, "interfaces": {"Vxlan1": {"vteps": ["10.1.1.5", "10.1.1.6"]}}}],
|
||||
"inputs": {"vteps": ["10.1.1.5", "10.1.1.6", "10.1.1.7"]},
|
||||
"expected": {"result": "failure", "messages": ["The following VTEP peer(s) are missing from the Vxlan1 interface: ['10.1.1.7']"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-no-vtep",
|
||||
"test": VerifyVxlanVtep,
|
||||
"eos_data": [{"vteps": {}, "interfaces": {"Vxlan1": {"vteps": []}}}],
|
||||
"inputs": {"vteps": ["10.1.1.5", "10.1.1.6"]},
|
||||
"expected": {"result": "failure", "messages": ["The following VTEP peer(s) are missing from the Vxlan1 interface: ['10.1.1.5', '10.1.1.6']"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-no-input-vtep",
|
||||
"test": VerifyVxlanVtep,
|
||||
"eos_data": [{"vteps": {}, "interfaces": {"Vxlan1": {"vteps": ["10.1.1.5"]}}}],
|
||||
"inputs": {"vteps": []},
|
||||
"expected": {"result": "failure", "messages": ["Unexpected VTEP peer(s) on Vxlan1 interface: ['10.1.1.5']"]},
|
||||
},
|
||||
{
|
||||
"name": "failure-missmatch",
|
||||
"test": VerifyVxlanVtep,
|
||||
"eos_data": [{"vteps": {}, "interfaces": {"Vxlan1": {"vteps": ["10.1.1.6", "10.1.1.7", "10.1.1.8"]}}}],
|
||||
"inputs": {"vteps": ["10.1.1.5", "10.1.1.6"]},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"The following VTEP peer(s) are missing from the Vxlan1 interface: ['10.1.1.5']",
|
||||
"Unexpected VTEP peer(s) on Vxlan1 interface: ['10.1.1.7', '10.1.1.8']",
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "skipped",
|
||||
"test": VerifyVxlanVtep,
|
||||
"eos_data": [{"vteps": {}, "interfaces": {}}],
|
||||
"inputs": {"vteps": ["10.1.1.5", "10.1.1.6", "10.1.1.7"]},
|
||||
"expected": {"result": "skipped", "messages": ["Vxlan1 interface is not configured"]},
|
||||
},
|
||||
{
|
||||
"name": "success",
|
||||
"test": VerifyVxlan1ConnSettings,
|
||||
"eos_data": [{"interfaces": {"Vxlan1": {"srcIpIntf": "Loopback1", "udpPort": 4789}}}],
|
||||
"inputs": {"source_interface": "Loopback1", "udp_port": 4789},
|
||||
"expected": {"result": "success"},
|
||||
},
|
||||
{
|
||||
"name": "skipped",
|
||||
"test": VerifyVxlan1ConnSettings,
|
||||
"eos_data": [{"interfaces": {}}],
|
||||
"inputs": {"source_interface": "Loopback1", "udp_port": 4789},
|
||||
"expected": {"result": "skipped", "messages": ["Vxlan1 interface is not configured."]},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-interface",
|
||||
"test": VerifyVxlan1ConnSettings,
|
||||
"eos_data": [{"interfaces": {"Vxlan1": {"srcIpIntf": "Loopback10", "udpPort": 4789}}}],
|
||||
"inputs": {"source_interface": "lo1", "udp_port": 4789},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": ["Source interface is not correct. Expected `Loopback1` as source interface but found `Loopback10` instead."],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failure-wrong-port",
|
||||
"test": VerifyVxlan1ConnSettings,
|
||||
"eos_data": [{"interfaces": {"Vxlan1": {"srcIpIntf": "Loopback10", "udpPort": 4789}}}],
|
||||
"inputs": {"source_interface": "Lo1", "udp_port": 4780},
|
||||
"expected": {
|
||||
"result": "failure",
|
||||
"messages": [
|
||||
"Source interface is not correct. Expected `Loopback1` as source interface but found `Loopback10` instead.",
|
||||
"UDP port is not correct. Expected `4780` as UDP port but found `4789` instead.",
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
3
tests/units/cli/__init__.py
Normal file
3
tests/units/cli/__init__.py
Normal file
|
@ -0,0 +1,3 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
3
tests/units/cli/check/__init__.py
Normal file
3
tests/units/cli/check/__init__.py
Normal file
|
@ -0,0 +1,3 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
30
tests/units/cli/check/test__init__.py
Normal file
30
tests/units/cli/check/test__init__.py
Normal file
|
@ -0,0 +1,30 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Tests for anta.cli.check
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from click.testing import CliRunner
|
||||
|
||||
from anta.cli import anta
|
||||
from anta.cli.utils import ExitCode
|
||||
|
||||
|
||||
def test_anta_check(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test anta check
|
||||
"""
|
||||
result = click_runner.invoke(anta, ["check"])
|
||||
assert result.exit_code == ExitCode.OK
|
||||
assert "Usage: anta check" in result.output
|
||||
|
||||
|
||||
def test_anta_check_help(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test anta check --help
|
||||
"""
|
||||
result = click_runner.invoke(anta, ["check", "--help"])
|
||||
assert result.exit_code == ExitCode.OK
|
||||
assert "Usage: anta check" in result.output
|
37
tests/units/cli/check/test_commands.py
Normal file
37
tests/units/cli/check/test_commands.py
Normal file
|
@ -0,0 +1,37 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Tests for anta.cli.check.commands
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pytest
|
||||
|
||||
from anta.cli import anta
|
||||
from anta.cli.utils import ExitCode
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from click.testing import CliRunner
|
||||
|
||||
DATA_DIR: Path = Path(__file__).parents[3].resolve() / "data"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"catalog_path, expected_exit, expected_output",
|
||||
[
|
||||
pytest.param("ghost_catalog.yml", ExitCode.USAGE_ERROR, "Error: Invalid value for '--catalog'", id="catalog does not exist"),
|
||||
pytest.param("test_catalog_with_undefined_module.yml", ExitCode.USAGE_ERROR, "Test catalog is invalid!", id="catalog is not valid"),
|
||||
pytest.param("test_catalog.yml", ExitCode.OK, "Catalog is valid", id="catalog valid"),
|
||||
],
|
||||
)
|
||||
def test_catalog(click_runner: CliRunner, catalog_path: Path, expected_exit: int, expected_output: str) -> None:
|
||||
"""
|
||||
Test `anta check catalog -c catalog
|
||||
"""
|
||||
result = click_runner.invoke(anta, ["check", "catalog", "-c", str(DATA_DIR / catalog_path)])
|
||||
assert result.exit_code == expected_exit
|
||||
assert expected_output in result.output
|
3
tests/units/cli/debug/__init__.py
Normal file
3
tests/units/cli/debug/__init__.py
Normal file
|
@ -0,0 +1,3 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
30
tests/units/cli/debug/test__init__.py
Normal file
30
tests/units/cli/debug/test__init__.py
Normal file
|
@ -0,0 +1,30 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Tests for anta.cli.debug
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from click.testing import CliRunner
|
||||
|
||||
from anta.cli import anta
|
||||
from anta.cli.utils import ExitCode
|
||||
|
||||
|
||||
def test_anta_debug(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test anta debug
|
||||
"""
|
||||
result = click_runner.invoke(anta, ["debug"])
|
||||
assert result.exit_code == ExitCode.OK
|
||||
assert "Usage: anta debug" in result.output
|
||||
|
||||
|
||||
def test_anta_debug_help(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test anta debug --help
|
||||
"""
|
||||
result = click_runner.invoke(anta, ["debug", "--help"])
|
||||
assert result.exit_code == ExitCode.OK
|
||||
assert "Usage: anta debug" in result.output
|
60
tests/units/cli/debug/test_commands.py
Normal file
60
tests/units/cli/debug/test_commands.py
Normal file
|
@ -0,0 +1,60 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Tests for anta.cli.debug.commands
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Literal
|
||||
|
||||
import pytest
|
||||
|
||||
from anta.cli import anta
|
||||
from anta.cli.utils import ExitCode
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from click.testing import CliRunner
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"command, ofmt, version, revision, device, failed",
|
||||
[
|
||||
pytest.param("show version", "json", None, None, "dummy", False, id="json command"),
|
||||
pytest.param("show version", "text", None, None, "dummy", False, id="text command"),
|
||||
pytest.param("show version", None, "latest", None, "dummy", False, id="version-latest"),
|
||||
pytest.param("show version", None, "1", None, "dummy", False, id="version"),
|
||||
pytest.param("show version", None, None, 3, "dummy", False, id="revision"),
|
||||
pytest.param("undefined", None, None, None, "dummy", True, id="command fails"),
|
||||
],
|
||||
)
|
||||
def test_run_cmd(
|
||||
click_runner: CliRunner, command: str, ofmt: Literal["json", "text"], version: Literal["1", "latest"] | None, revision: int | None, device: str, failed: bool
|
||||
) -> None:
|
||||
"""
|
||||
Test `anta debug run-cmd`
|
||||
"""
|
||||
# pylint: disable=too-many-arguments
|
||||
cli_args = ["-l", "debug", "debug", "run-cmd", "--command", command, "--device", device]
|
||||
|
||||
# ofmt
|
||||
if ofmt is not None:
|
||||
cli_args.extend(["--ofmt", ofmt])
|
||||
|
||||
# version
|
||||
if version is not None:
|
||||
cli_args.extend(["--version", version])
|
||||
|
||||
# revision
|
||||
if revision is not None:
|
||||
cli_args.extend(["--revision", str(revision)])
|
||||
|
||||
result = click_runner.invoke(anta, cli_args)
|
||||
if failed:
|
||||
assert result.exit_code == ExitCode.USAGE_ERROR
|
||||
else:
|
||||
assert result.exit_code == ExitCode.OK
|
||||
if revision is not None:
|
||||
assert f"revision={revision}" in result.output
|
||||
if version is not None:
|
||||
assert (f"version='{version}'" if version == "latest" else f"version={version}") in result.output
|
3
tests/units/cli/exec/__init__.py
Normal file
3
tests/units/cli/exec/__init__.py
Normal file
|
@ -0,0 +1,3 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
30
tests/units/cli/exec/test__init__.py
Normal file
30
tests/units/cli/exec/test__init__.py
Normal file
|
@ -0,0 +1,30 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Tests for anta.cli.exec
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from click.testing import CliRunner
|
||||
|
||||
from anta.cli import anta
|
||||
from anta.cli.utils import ExitCode
|
||||
|
||||
|
||||
def test_anta_exec(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test anta exec
|
||||
"""
|
||||
result = click_runner.invoke(anta, ["exec"])
|
||||
assert result.exit_code == ExitCode.OK
|
||||
assert "Usage: anta exec" in result.output
|
||||
|
||||
|
||||
def test_anta_exec_help(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test anta exec --help
|
||||
"""
|
||||
result = click_runner.invoke(anta, ["exec", "--help"])
|
||||
assert result.exit_code == ExitCode.OK
|
||||
assert "Usage: anta exec" in result.output
|
125
tests/units/cli/exec/test_commands.py
Normal file
125
tests/units/cli/exec/test_commands.py
Normal file
|
@ -0,0 +1,125 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Tests for anta.cli.exec.commands
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pytest
|
||||
|
||||
from anta.cli import anta
|
||||
from anta.cli.exec.commands import clear_counters, collect_tech_support, snapshot
|
||||
from anta.cli.utils import ExitCode
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from click.testing import CliRunner
|
||||
|
||||
|
||||
def test_clear_counters_help(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test `anta exec clear-counters --help`
|
||||
"""
|
||||
result = click_runner.invoke(clear_counters, ["--help"])
|
||||
assert result.exit_code == 0
|
||||
assert "Usage" in result.output
|
||||
|
||||
|
||||
def test_snapshot_help(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test `anta exec snapshot --help`
|
||||
"""
|
||||
result = click_runner.invoke(snapshot, ["--help"])
|
||||
assert result.exit_code == 0
|
||||
assert "Usage" in result.output
|
||||
|
||||
|
||||
def test_collect_tech_support_help(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test `anta exec collect-tech-support --help`
|
||||
"""
|
||||
result = click_runner.invoke(collect_tech_support, ["--help"])
|
||||
assert result.exit_code == 0
|
||||
assert "Usage" in result.output
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"tags",
|
||||
[
|
||||
pytest.param(None, id="no tags"),
|
||||
pytest.param("leaf,spine", id="with tags"),
|
||||
],
|
||||
)
|
||||
def test_clear_counters(click_runner: CliRunner, tags: str | None) -> None:
|
||||
"""
|
||||
Test `anta exec clear-counters`
|
||||
"""
|
||||
cli_args = ["exec", "clear-counters"]
|
||||
if tags is not None:
|
||||
cli_args.extend(["--tags", tags])
|
||||
result = click_runner.invoke(anta, cli_args)
|
||||
assert result.exit_code == ExitCode.OK
|
||||
|
||||
|
||||
COMMAND_LIST_PATH_FILE = Path(__file__).parent.parent.parent.parent / "data" / "test_snapshot_commands.yml"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"commands_path, tags",
|
||||
[
|
||||
pytest.param(None, None, id="missing command list"),
|
||||
pytest.param(Path("/I/do/not/exist"), None, id="wrong path for command_list"),
|
||||
pytest.param(COMMAND_LIST_PATH_FILE, None, id="command-list only"),
|
||||
pytest.param(COMMAND_LIST_PATH_FILE, "leaf,spine", id="with tags"),
|
||||
],
|
||||
)
|
||||
def test_snapshot(tmp_path: Path, click_runner: CliRunner, commands_path: Path | None, tags: str | None) -> None:
|
||||
"""
|
||||
Test `anta exec snapshot`
|
||||
"""
|
||||
cli_args = ["exec", "snapshot", "--output", str(tmp_path)]
|
||||
# Need to mock datetetime
|
||||
if commands_path is not None:
|
||||
cli_args.extend(["--commands-list", str(commands_path)])
|
||||
if tags is not None:
|
||||
cli_args.extend(["--tags", tags])
|
||||
result = click_runner.invoke(anta, cli_args)
|
||||
# Failure scenarios
|
||||
if commands_path is None:
|
||||
assert result.exit_code == ExitCode.USAGE_ERROR
|
||||
return
|
||||
if not Path.exists(Path(commands_path)):
|
||||
assert result.exit_code == ExitCode.USAGE_ERROR
|
||||
return
|
||||
assert result.exit_code == ExitCode.OK
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"output, latest, configure, tags",
|
||||
[
|
||||
pytest.param(None, None, False, None, id="no params"),
|
||||
pytest.param("/tmp/dummy", None, False, None, id="with output"),
|
||||
pytest.param(None, 1, False, None, id="only last show tech"),
|
||||
pytest.param(None, None, True, None, id="configure"),
|
||||
pytest.param(None, None, False, "leaf,spine", id="with tags"),
|
||||
],
|
||||
)
|
||||
def test_collect_tech_support(click_runner: CliRunner, output: str | None, latest: str | None, configure: bool | None, tags: str | None) -> None:
|
||||
"""
|
||||
Test `anta exec collect-tech-support`
|
||||
"""
|
||||
cli_args = ["exec", "collect-tech-support"]
|
||||
if output is not None:
|
||||
cli_args.extend(["--output", output])
|
||||
if latest is not None:
|
||||
cli_args.extend(["--latest", latest])
|
||||
if configure is True:
|
||||
cli_args.extend(["--configure"])
|
||||
if tags is not None:
|
||||
cli_args.extend(["--tags", tags])
|
||||
result = click_runner.invoke(anta, cli_args)
|
||||
assert result.exit_code == ExitCode.OK
|
134
tests/units/cli/exec/test_utils.py
Normal file
134
tests/units/cli/exec/test_utils.py
Normal file
|
@ -0,0 +1,134 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Tests for anta.cli.exec.utils
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from unittest.mock import call, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from anta.cli.exec.utils import clear_counters_utils # , collect_commands, collect_scheduled_show_tech
|
||||
from anta.device import AntaDevice
|
||||
from anta.inventory import AntaInventory
|
||||
from anta.models import AntaCommand
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pytest import LogCaptureFixture
|
||||
|
||||
|
||||
# TODO complete test cases
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"inventory_state, per_device_command_output, tags",
|
||||
[
|
||||
pytest.param(
|
||||
{"dummy": {"is_online": False}, "dummy2": {"is_online": False}, "dummy3": {"is_online": False}},
|
||||
{},
|
||||
None,
|
||||
id="no_connected_device",
|
||||
),
|
||||
pytest.param(
|
||||
{"dummy": {"is_online": True, "hw_model": "cEOSLab"}, "dummy2": {"is_online": True, "hw_model": "vEOS-lab"}, "dummy3": {"is_online": False}},
|
||||
{},
|
||||
None,
|
||||
id="cEOSLab and vEOS-lab devices",
|
||||
),
|
||||
pytest.param(
|
||||
{"dummy": {"is_online": True}, "dummy2": {"is_online": True}, "dummy3": {"is_online": False}},
|
||||
{"dummy": None}, # None means the command failed to collect
|
||||
None,
|
||||
id="device with error",
|
||||
),
|
||||
pytest.param(
|
||||
{"dummy": {"is_online": True}, "dummy2": {"is_online": True}, "dummy3": {"is_online": True}},
|
||||
{},
|
||||
["spine"],
|
||||
id="tags",
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_clear_counters_utils(
|
||||
caplog: LogCaptureFixture,
|
||||
test_inventory: AntaInventory,
|
||||
inventory_state: dict[str, Any],
|
||||
per_device_command_output: dict[str, Any],
|
||||
tags: list[str] | None,
|
||||
) -> None:
|
||||
"""
|
||||
Test anta.cli.exec.utils.clear_counters_utils
|
||||
"""
|
||||
|
||||
async def mock_connect_inventory() -> None:
|
||||
"""
|
||||
mocking connect_inventory coroutine
|
||||
"""
|
||||
for name, device in test_inventory.items():
|
||||
device.is_online = inventory_state[name].get("is_online", True)
|
||||
device.established = inventory_state[name].get("established", device.is_online)
|
||||
device.hw_model = inventory_state[name].get("hw_model", "dummy")
|
||||
|
||||
async def dummy_collect(self: AntaDevice, command: AntaCommand) -> None:
|
||||
"""
|
||||
mocking collect coroutine
|
||||
"""
|
||||
command.output = per_device_command_output.get(self.name, "")
|
||||
|
||||
# Need to patch the child device class
|
||||
with patch("anta.device.AsyncEOSDevice.collect", side_effect=dummy_collect, autospec=True) as mocked_collect, patch(
|
||||
"anta.inventory.AntaInventory.connect_inventory",
|
||||
side_effect=mock_connect_inventory,
|
||||
) as mocked_connect_inventory:
|
||||
print(mocked_collect)
|
||||
mocked_collect.side_effect = dummy_collect
|
||||
await clear_counters_utils(test_inventory, tags=tags)
|
||||
|
||||
mocked_connect_inventory.assert_awaited_once()
|
||||
devices_established = list(test_inventory.get_inventory(established_only=True, tags=tags).values())
|
||||
if devices_established:
|
||||
# Building the list of calls
|
||||
calls = []
|
||||
for device in devices_established:
|
||||
calls.append(
|
||||
call(
|
||||
device,
|
||||
**{
|
||||
"command": AntaCommand(
|
||||
command="clear counters",
|
||||
version="latest",
|
||||
revision=None,
|
||||
ofmt="json",
|
||||
output=per_device_command_output.get(device.name, ""),
|
||||
errors=[],
|
||||
)
|
||||
},
|
||||
)
|
||||
)
|
||||
if device.hw_model not in ["cEOSLab", "vEOS-lab"]:
|
||||
calls.append(
|
||||
call(
|
||||
device,
|
||||
**{
|
||||
"command": AntaCommand(
|
||||
command="clear hardware counter drop",
|
||||
version="latest",
|
||||
revision=None,
|
||||
ofmt="json",
|
||||
output=per_device_command_output.get(device.name, ""),
|
||||
)
|
||||
},
|
||||
)
|
||||
)
|
||||
mocked_collect.assert_has_awaits(calls)
|
||||
# Check error
|
||||
for key, value in per_device_command_output.items():
|
||||
if value is None:
|
||||
# means some command failed to collect
|
||||
assert "ERROR" in caplog.text
|
||||
assert f"Could not clear counters on device {key}: []" in caplog.text
|
||||
else:
|
||||
mocked_collect.assert_not_awaited()
|
3
tests/units/cli/get/__init__.py
Normal file
3
tests/units/cli/get/__init__.py
Normal file
|
@ -0,0 +1,3 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
30
tests/units/cli/get/test__init__.py
Normal file
30
tests/units/cli/get/test__init__.py
Normal file
|
@ -0,0 +1,30 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Tests for anta.cli.get
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from click.testing import CliRunner
|
||||
|
||||
from anta.cli import anta
|
||||
from anta.cli.utils import ExitCode
|
||||
|
||||
|
||||
def test_anta_get(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test anta get
|
||||
"""
|
||||
result = click_runner.invoke(anta, ["get"])
|
||||
assert result.exit_code == ExitCode.OK
|
||||
assert "Usage: anta get" in result.output
|
||||
|
||||
|
||||
def test_anta_get_help(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test anta get --help
|
||||
"""
|
||||
result = click_runner.invoke(anta, ["get", "--help"])
|
||||
assert result.exit_code == ExitCode.OK
|
||||
assert "Usage: anta get" in result.output
|
204
tests/units/cli/get/test_commands.py
Normal file
204
tests/units/cli/get/test_commands.py
Normal file
|
@ -0,0 +1,204 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Tests for anta.cli.get.commands
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import filecmp
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
from unittest.mock import ANY, patch
|
||||
|
||||
import pytest
|
||||
from cvprac.cvp_client import CvpClient
|
||||
from cvprac.cvp_client_errors import CvpApiError
|
||||
|
||||
from anta.cli import anta
|
||||
from anta.cli.utils import ExitCode
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from click.testing import CliRunner
|
||||
|
||||
DATA_DIR: Path = Path(__file__).parents[3].resolve() / "data"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"cvp_container, cvp_connect_failure",
|
||||
[
|
||||
pytest.param(None, False, id="all devices"),
|
||||
pytest.param("custom_container", False, id="custom container"),
|
||||
pytest.param(None, True, id="cvp connect failure"),
|
||||
],
|
||||
)
|
||||
def test_from_cvp(
|
||||
tmp_path: Path,
|
||||
click_runner: CliRunner,
|
||||
cvp_container: str | None,
|
||||
cvp_connect_failure: bool,
|
||||
) -> None:
|
||||
"""
|
||||
Test `anta get from-cvp`
|
||||
|
||||
This test verifies that username and password are NOT mandatory to run this command
|
||||
"""
|
||||
output: Path = tmp_path / "output.yml"
|
||||
cli_args = ["get", "from-cvp", "--output", str(output), "--host", "42.42.42.42", "--username", "anta", "--password", "anta"]
|
||||
|
||||
if cvp_container is not None:
|
||||
cli_args.extend(["--container", cvp_container])
|
||||
|
||||
def mock_cvp_connect(self: CvpClient, *args: str, **kwargs: str) -> None:
|
||||
# pylint: disable=unused-argument
|
||||
if cvp_connect_failure:
|
||||
raise CvpApiError(msg="mocked CvpApiError")
|
||||
|
||||
# always get a token
|
||||
with patch("anta.cli.get.commands.get_cv_token", return_value="dummy_token"), patch(
|
||||
"cvprac.cvp_client.CvpClient.connect", autospec=True, side_effect=mock_cvp_connect
|
||||
) as mocked_cvp_connect, patch("cvprac.cvp_client.CvpApi.get_inventory", autospec=True, return_value=[]) as mocked_get_inventory, patch(
|
||||
"cvprac.cvp_client.CvpApi.get_devices_in_container", autospec=True, return_value=[]
|
||||
) as mocked_get_devices_in_container:
|
||||
result = click_runner.invoke(anta, cli_args)
|
||||
|
||||
if not cvp_connect_failure:
|
||||
assert output.exists()
|
||||
|
||||
mocked_cvp_connect.assert_called_once()
|
||||
if not cvp_connect_failure:
|
||||
assert "Connected to CloudVision" in result.output
|
||||
if cvp_container is not None:
|
||||
mocked_get_devices_in_container.assert_called_once_with(ANY, cvp_container)
|
||||
else:
|
||||
mocked_get_inventory.assert_called_once()
|
||||
assert result.exit_code == ExitCode.OK
|
||||
else:
|
||||
assert "Error connecting to CloudVision" in result.output
|
||||
assert result.exit_code == ExitCode.USAGE_ERROR
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"ansible_inventory, ansible_group, expected_exit, expected_log",
|
||||
[
|
||||
pytest.param("ansible_inventory.yml", None, ExitCode.OK, None, id="no group"),
|
||||
pytest.param("ansible_inventory.yml", "ATD_LEAFS", ExitCode.OK, None, id="group found"),
|
||||
pytest.param("ansible_inventory.yml", "DUMMY", ExitCode.USAGE_ERROR, "Group DUMMY not found in Ansible inventory", id="group not found"),
|
||||
pytest.param("empty_ansible_inventory.yml", None, ExitCode.USAGE_ERROR, "is empty", id="empty inventory"),
|
||||
],
|
||||
)
|
||||
def test_from_ansible(
|
||||
tmp_path: Path,
|
||||
click_runner: CliRunner,
|
||||
ansible_inventory: Path,
|
||||
ansible_group: str | None,
|
||||
expected_exit: int,
|
||||
expected_log: str | None,
|
||||
) -> None:
|
||||
"""
|
||||
Test `anta get from-ansible`
|
||||
|
||||
This test verifies:
|
||||
* the parsing of an ansible-inventory
|
||||
* the ansible_group functionaliy
|
||||
|
||||
The output path is ALWAYS set to a non existing file.
|
||||
"""
|
||||
output: Path = tmp_path / "output.yml"
|
||||
ansible_inventory_path = DATA_DIR / ansible_inventory
|
||||
# Init cli_args
|
||||
cli_args = ["get", "from-ansible", "--output", str(output), "--ansible-inventory", str(ansible_inventory_path)]
|
||||
|
||||
# Set --ansible-group
|
||||
if ansible_group is not None:
|
||||
cli_args.extend(["--ansible-group", ansible_group])
|
||||
|
||||
result = click_runner.invoke(anta, cli_args)
|
||||
|
||||
assert result.exit_code == expected_exit
|
||||
|
||||
if expected_exit != ExitCode.OK:
|
||||
assert expected_log
|
||||
assert expected_log in result.output
|
||||
else:
|
||||
assert output.exists()
|
||||
# TODO check size of generated inventory to validate the group functionality!
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"env_set, overwrite, is_tty, prompt, expected_exit, expected_log",
|
||||
[
|
||||
pytest.param(True, False, True, "y", ExitCode.OK, "", id="no-overwrite-tty-init-prompt-yes"),
|
||||
pytest.param(True, False, True, "N", ExitCode.INTERNAL_ERROR, "Aborted", id="no-overwrite-tty-init-prompt-no"),
|
||||
pytest.param(
|
||||
True,
|
||||
False,
|
||||
False,
|
||||
None,
|
||||
ExitCode.USAGE_ERROR,
|
||||
"Conversion aborted since destination file is not empty (not running in interactive TTY)",
|
||||
id="no-overwrite-no-tty-init",
|
||||
),
|
||||
pytest.param(False, False, True, None, ExitCode.OK, "", id="no-overwrite-tty-no-init"),
|
||||
pytest.param(False, False, False, None, ExitCode.OK, "", id="no-overwrite-no-tty-no-init"),
|
||||
pytest.param(True, True, True, None, ExitCode.OK, "", id="overwrite-tty-init"),
|
||||
pytest.param(True, True, False, None, ExitCode.OK, "", id="overwrite-no-tty-init"),
|
||||
pytest.param(False, True, True, None, ExitCode.OK, "", id="overwrite-tty-no-init"),
|
||||
pytest.param(False, True, False, None, ExitCode.OK, "", id="overwrite-no-tty-no-init"),
|
||||
],
|
||||
)
|
||||
def test_from_ansible_overwrite(
|
||||
tmp_path: Path,
|
||||
click_runner: CliRunner,
|
||||
temp_env: dict[str, str | None],
|
||||
env_set: bool,
|
||||
overwrite: bool,
|
||||
is_tty: bool,
|
||||
prompt: str | None,
|
||||
expected_exit: int,
|
||||
expected_log: str | None,
|
||||
) -> None:
|
||||
# pylint: disable=too-many-arguments
|
||||
"""
|
||||
Test `anta get from-ansible` overwrite mechanism
|
||||
|
||||
The test uses a static ansible-inventory and output as these are tested in other functions
|
||||
|
||||
This test verifies:
|
||||
* that overwrite is working as expected with or without init data in the target file
|
||||
* that when the target file is not empty and a tty is present, the user is prompt with confirmation
|
||||
* Check the behavior when the prompt is filled
|
||||
|
||||
The initial content of the ANTA inventory is set using init_anta_inventory, if it is None, no inventory is set.
|
||||
|
||||
* With overwrite True, the expectation is that the from-ansible command succeeds
|
||||
* With no init (init_anta_inventory == None), the expectation is also that command succeeds
|
||||
"""
|
||||
ansible_inventory_path = DATA_DIR / "ansible_inventory.yml"
|
||||
expected_anta_inventory_path = DATA_DIR / "expected_anta_inventory.yml"
|
||||
tmp_output = tmp_path / "output.yml"
|
||||
cli_args = ["get", "from-ansible", "--ansible-inventory", str(ansible_inventory_path)]
|
||||
|
||||
if env_set:
|
||||
tmp_inv = Path(str(temp_env["ANTA_INVENTORY"]))
|
||||
else:
|
||||
temp_env["ANTA_INVENTORY"] = None
|
||||
tmp_inv = tmp_output
|
||||
cli_args.extend(["--output", str(tmp_output)])
|
||||
|
||||
if overwrite:
|
||||
cli_args.append("--overwrite")
|
||||
|
||||
# Verify initial content is different
|
||||
if tmp_inv.exists():
|
||||
assert not filecmp.cmp(tmp_inv, expected_anta_inventory_path)
|
||||
|
||||
with patch("sys.stdin.isatty", return_value=is_tty):
|
||||
result = click_runner.invoke(anta, cli_args, env=temp_env, input=prompt)
|
||||
|
||||
assert result.exit_code == expected_exit
|
||||
if expected_exit == ExitCode.OK:
|
||||
assert filecmp.cmp(tmp_inv, expected_anta_inventory_path)
|
||||
elif expected_exit == ExitCode.INTERNAL_ERROR:
|
||||
assert expected_log
|
||||
assert expected_log in result.output
|
115
tests/units/cli/get/test_utils.py
Normal file
115
tests/units/cli/get/test_utils.py
Normal file
|
@ -0,0 +1,115 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Tests for anta.cli.get.utils
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import nullcontext
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from anta.cli.get.utils import create_inventory_from_ansible, create_inventory_from_cvp, get_cv_token
|
||||
from anta.inventory import AntaInventory
|
||||
|
||||
DATA_DIR: Path = Path(__file__).parents[3].resolve() / "data"
|
||||
|
||||
|
||||
def test_get_cv_token() -> None:
|
||||
"""
|
||||
Test anta.get.utils.get_cv_token
|
||||
"""
|
||||
ip = "42.42.42.42"
|
||||
username = "ant"
|
||||
password = "formica"
|
||||
|
||||
with patch("anta.cli.get.utils.requests.request") as patched_request:
|
||||
mocked_ret = MagicMock(autospec=requests.Response)
|
||||
mocked_ret.json.return_value = {"sessionId": "simple"}
|
||||
patched_request.return_value = mocked_ret
|
||||
res = get_cv_token(ip, username, password)
|
||||
patched_request.assert_called_once_with(
|
||||
"POST",
|
||||
"https://42.42.42.42/cvpservice/login/authenticate.do",
|
||||
headers={"Content-Type": "application/json", "Accept": "application/json"},
|
||||
data='{"userId": "ant", "password": "formica"}',
|
||||
verify=False,
|
||||
timeout=10,
|
||||
)
|
||||
assert res == "simple"
|
||||
|
||||
|
||||
# truncated inventories
|
||||
CVP_INVENTORY = [
|
||||
{
|
||||
"hostname": "device1",
|
||||
"containerName": "DC1",
|
||||
"ipAddress": "10.20.20.97",
|
||||
},
|
||||
{
|
||||
"hostname": "device2",
|
||||
"containerName": "DC2",
|
||||
"ipAddress": "10.20.20.98",
|
||||
},
|
||||
{
|
||||
"hostname": "device3",
|
||||
"containerName": "",
|
||||
"ipAddress": "10.20.20.99",
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"inventory",
|
||||
[
|
||||
pytest.param(CVP_INVENTORY, id="some container"),
|
||||
pytest.param([], id="empty_inventory"),
|
||||
],
|
||||
)
|
||||
def test_create_inventory_from_cvp(tmp_path: Path, inventory: list[dict[str, Any]]) -> None:
|
||||
"""
|
||||
Test anta.get.utils.create_inventory_from_cvp
|
||||
"""
|
||||
output = tmp_path / "output.yml"
|
||||
|
||||
create_inventory_from_cvp(inventory, output)
|
||||
|
||||
assert output.exists()
|
||||
# This validate the file structure ;)
|
||||
inv = AntaInventory.parse(str(output), "user", "pass")
|
||||
assert len(inv) == len(inventory)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"inventory_filename, ansible_group, expected_raise, expected_inv_length",
|
||||
[
|
||||
pytest.param("ansible_inventory.yml", None, nullcontext(), 7, id="no group"),
|
||||
pytest.param("ansible_inventory.yml", "ATD_LEAFS", nullcontext(), 4, id="group found"),
|
||||
pytest.param("ansible_inventory.yml", "DUMMY", pytest.raises(ValueError, match="Group DUMMY not found in Ansible inventory"), 0, id="group not found"),
|
||||
pytest.param("empty_ansible_inventory.yml", None, pytest.raises(ValueError, match="Ansible inventory .* is empty"), 0, id="empty inventory"),
|
||||
pytest.param("wrong_ansible_inventory.yml", None, pytest.raises(ValueError, match="Could not parse"), 0, id="os error inventory"),
|
||||
],
|
||||
)
|
||||
def test_create_inventory_from_ansible(tmp_path: Path, inventory_filename: Path, ansible_group: str | None, expected_raise: Any, expected_inv_length: int) -> None:
|
||||
"""
|
||||
Test anta.get.utils.create_inventory_from_ansible
|
||||
"""
|
||||
target_file = tmp_path / "inventory.yml"
|
||||
inventory_file_path = DATA_DIR / inventory_filename
|
||||
|
||||
with expected_raise:
|
||||
if ansible_group:
|
||||
create_inventory_from_ansible(inventory_file_path, target_file, ansible_group)
|
||||
else:
|
||||
create_inventory_from_ansible(inventory_file_path, target_file)
|
||||
|
||||
assert target_file.exists()
|
||||
inv = AntaInventory().parse(str(target_file), "user", "pass")
|
||||
assert len(inv) == expected_inv_length
|
||||
if not isinstance(expected_raise, nullcontext):
|
||||
assert not target_file.exists()
|
3
tests/units/cli/nrfu/__init__.py
Normal file
3
tests/units/cli/nrfu/__init__.py
Normal file
|
@ -0,0 +1,3 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
111
tests/units/cli/nrfu/test__init__.py
Normal file
111
tests/units/cli/nrfu/test__init__.py
Normal file
|
@ -0,0 +1,111 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Tests for anta.cli.nrfu
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from click.testing import CliRunner
|
||||
|
||||
from anta.cli import anta
|
||||
from anta.cli.utils import ExitCode
|
||||
from tests.lib.utils import default_anta_env
|
||||
|
||||
# TODO: write unit tests for ignore-status and ignore-error
|
||||
|
||||
|
||||
def test_anta_nrfu_help(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test anta nrfu --help
|
||||
"""
|
||||
result = click_runner.invoke(anta, ["nrfu", "--help"])
|
||||
assert result.exit_code == ExitCode.OK
|
||||
assert "Usage: anta nrfu" in result.output
|
||||
|
||||
|
||||
def test_anta_nrfu(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test anta nrfu, catalog is given via env
|
||||
"""
|
||||
result = click_runner.invoke(anta, ["nrfu"])
|
||||
assert result.exit_code == ExitCode.OK
|
||||
assert "ANTA Inventory contains 3 devices" in result.output
|
||||
assert "Tests catalog contains 1 tests" in result.output
|
||||
|
||||
|
||||
def test_anta_password_required(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test that password is provided
|
||||
"""
|
||||
env = default_anta_env()
|
||||
env["ANTA_PASSWORD"] = None
|
||||
result = click_runner.invoke(anta, ["nrfu"], env=env)
|
||||
|
||||
assert result.exit_code == ExitCode.USAGE_ERROR
|
||||
assert "EOS password needs to be provided by using either the '--password' option or the '--prompt' option." in result.output
|
||||
|
||||
|
||||
def test_anta_password(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test that password can be provided either via --password or --prompt
|
||||
"""
|
||||
env = default_anta_env()
|
||||
env["ANTA_PASSWORD"] = None
|
||||
result = click_runner.invoke(anta, ["nrfu", "--password", "secret"], env=env)
|
||||
assert result.exit_code == ExitCode.OK
|
||||
result = click_runner.invoke(anta, ["nrfu", "--prompt"], input="password\npassword\n", env=env)
|
||||
assert result.exit_code == ExitCode.OK
|
||||
|
||||
|
||||
def test_anta_enable_password(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test that enable password can be provided either via --enable-password or --prompt
|
||||
"""
|
||||
# Both enable and enable-password
|
||||
result = click_runner.invoke(anta, ["nrfu", "--enable", "--enable-password", "secret"])
|
||||
assert result.exit_code == ExitCode.OK
|
||||
|
||||
# enable and prompt y
|
||||
result = click_runner.invoke(anta, ["nrfu", "--enable", "--prompt"], input="y\npassword\npassword\n")
|
||||
assert "Is a password required to enter EOS privileged EXEC mode? [y/N]:" in result.output
|
||||
assert "Please enter a password to enter EOS privileged EXEC mode" in result.output
|
||||
assert result.exit_code == ExitCode.OK
|
||||
|
||||
# enable and prompt N
|
||||
result = click_runner.invoke(anta, ["nrfu", "--enable", "--prompt"], input="N\n")
|
||||
assert "Is a password required to enter EOS privileged EXEC mode? [y/N]:" in result.output
|
||||
assert "Please enter a password to enter EOS privileged EXEC mode" not in result.output
|
||||
assert result.exit_code == ExitCode.OK
|
||||
|
||||
# enable and enable-password and prompt (redundant)
|
||||
result = click_runner.invoke(anta, ["nrfu", "--enable", "--enable-password", "blah", "--prompt"], input="y\npassword\npassword\n")
|
||||
assert "Is a password required to enter EOS privileged EXEC mode? [y/N]:" not in result.output
|
||||
assert "Please enter a password to enter EOS privileged EXEC mode" not in result.output
|
||||
assert result.exit_code == ExitCode.OK
|
||||
|
||||
# enabled-password without enable
|
||||
result = click_runner.invoke(anta, ["nrfu", "--enable-password", "blah"])
|
||||
assert result.exit_code == ExitCode.USAGE_ERROR
|
||||
assert "Providing a password to access EOS Privileged EXEC mode requires '--enable' option." in result.output
|
||||
|
||||
|
||||
def test_anta_enable_alone(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test that enable can be provided either without enable-password
|
||||
"""
|
||||
result = click_runner.invoke(anta, ["nrfu", "--enable"])
|
||||
assert result.exit_code == ExitCode.OK
|
||||
|
||||
|
||||
def test_disable_cache(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test that disable_cache is working on inventory
|
||||
"""
|
||||
result = click_runner.invoke(anta, ["nrfu", "--disable-cache"])
|
||||
stdout_lines = result.stdout.split("\n")
|
||||
# All caches should be disabled from the inventory
|
||||
for line in stdout_lines:
|
||||
if "disable_cache" in line:
|
||||
assert "True" in line
|
||||
assert result.exit_code == ExitCode.OK
|
97
tests/units/cli/nrfu/test_commands.py
Normal file
97
tests/units/cli/nrfu/test_commands.py
Normal file
|
@ -0,0 +1,97 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Tests for anta.cli.nrfu.commands
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
from click.testing import CliRunner
|
||||
|
||||
from anta.cli import anta
|
||||
from anta.cli.utils import ExitCode
|
||||
|
||||
DATA_DIR: Path = Path(__file__).parent.parent.parent.parent.resolve() / "data"
|
||||
|
||||
|
||||
def test_anta_nrfu_table_help(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test anta nrfu table --help
|
||||
"""
|
||||
result = click_runner.invoke(anta, ["nrfu", "table", "--help"])
|
||||
assert result.exit_code == ExitCode.OK
|
||||
assert "Usage: anta nrfu table" in result.output
|
||||
|
||||
|
||||
def test_anta_nrfu_text_help(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test anta nrfu text --help
|
||||
"""
|
||||
result = click_runner.invoke(anta, ["nrfu", "text", "--help"])
|
||||
assert result.exit_code == ExitCode.OK
|
||||
assert "Usage: anta nrfu text" in result.output
|
||||
|
||||
|
||||
def test_anta_nrfu_json_help(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test anta nrfu json --help
|
||||
"""
|
||||
result = click_runner.invoke(anta, ["nrfu", "json", "--help"])
|
||||
assert result.exit_code == ExitCode.OK
|
||||
assert "Usage: anta nrfu json" in result.output
|
||||
|
||||
|
||||
def test_anta_nrfu_template_help(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test anta nrfu tpl-report --help
|
||||
"""
|
||||
result = click_runner.invoke(anta, ["nrfu", "tpl-report", "--help"])
|
||||
assert result.exit_code == ExitCode.OK
|
||||
assert "Usage: anta nrfu tpl-report" in result.output
|
||||
|
||||
|
||||
def test_anta_nrfu_table(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test anta nrfu, catalog is given via env
|
||||
"""
|
||||
result = click_runner.invoke(anta, ["nrfu", "table"])
|
||||
assert result.exit_code == ExitCode.OK
|
||||
assert "dummy │ VerifyEOSVersion │ success" in result.output
|
||||
|
||||
|
||||
def test_anta_nrfu_text(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test anta nrfu, catalog is given via env
|
||||
"""
|
||||
result = click_runner.invoke(anta, ["nrfu", "text"])
|
||||
assert result.exit_code == ExitCode.OK
|
||||
assert "dummy :: VerifyEOSVersion :: SUCCESS" in result.output
|
||||
|
||||
|
||||
def test_anta_nrfu_json(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test anta nrfu, catalog is given via env
|
||||
"""
|
||||
result = click_runner.invoke(anta, ["nrfu", "json"])
|
||||
assert result.exit_code == ExitCode.OK
|
||||
assert "JSON results of all tests" in result.output
|
||||
m = re.search(r"\[\n {[\s\S]+ }\n\]", result.output)
|
||||
assert m is not None
|
||||
result_list = json.loads(m.group())
|
||||
for r in result_list:
|
||||
if r["name"] == "dummy":
|
||||
assert r["test"] == "VerifyEOSVersion"
|
||||
assert r["result"] == "success"
|
||||
|
||||
|
||||
def test_anta_nrfu_template(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test anta nrfu, catalog is given via env
|
||||
"""
|
||||
result = click_runner.invoke(anta, ["nrfu", "tpl-report", "--template", str(DATA_DIR / "template.j2")])
|
||||
assert result.exit_code == ExitCode.OK
|
||||
assert "* VerifyEOSVersion is SUCCESS for dummy" in result.output
|
58
tests/units/cli/test__init__.py
Normal file
58
tests/units/cli/test__init__.py
Normal file
|
@ -0,0 +1,58 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Tests for anta.cli.__init__
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from click.testing import CliRunner
|
||||
|
||||
from anta.cli import anta
|
||||
from anta.cli.utils import ExitCode
|
||||
|
||||
|
||||
def test_anta(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test anta main entrypoint
|
||||
"""
|
||||
result = click_runner.invoke(anta)
|
||||
assert result.exit_code == ExitCode.OK
|
||||
assert "Usage" in result.output
|
||||
|
||||
|
||||
def test_anta_help(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test anta --help
|
||||
"""
|
||||
result = click_runner.invoke(anta, ["--help"])
|
||||
assert result.exit_code == ExitCode.OK
|
||||
assert "Usage" in result.output
|
||||
|
||||
|
||||
def test_anta_exec_help(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test anta exec --help
|
||||
"""
|
||||
result = click_runner.invoke(anta, ["exec", "--help"])
|
||||
assert result.exit_code == ExitCode.OK
|
||||
assert "Usage: anta exec" in result.output
|
||||
|
||||
|
||||
def test_anta_debug_help(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test anta debug --help
|
||||
"""
|
||||
result = click_runner.invoke(anta, ["debug", "--help"])
|
||||
assert result.exit_code == ExitCode.OK
|
||||
assert "Usage: anta debug" in result.output
|
||||
|
||||
|
||||
def test_anta_get_help(click_runner: CliRunner) -> None:
|
||||
"""
|
||||
Test anta get --help
|
||||
"""
|
||||
result = click_runner.invoke(anta, ["get", "--help"])
|
||||
assert result.exit_code == ExitCode.OK
|
||||
assert "Usage: anta get" in result.output
|
3
tests/units/inventory/__init__.py
Normal file
3
tests/units/inventory/__init__.py
Normal file
|
@ -0,0 +1,3 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
81
tests/units/inventory/test_inventory.py
Normal file
81
tests/units/inventory/test_inventory.py
Normal file
|
@ -0,0 +1,81 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""ANTA Inventory unit tests."""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
import yaml
|
||||
from pydantic import ValidationError
|
||||
|
||||
from anta.inventory import AntaInventory
|
||||
from anta.inventory.exceptions import InventoryIncorrectSchema, InventoryRootKeyError
|
||||
from tests.data.json_data import ANTA_INVENTORY_TESTS_INVALID, ANTA_INVENTORY_TESTS_VALID
|
||||
from tests.lib.utils import generate_test_ids_dict
|
||||
|
||||
|
||||
class Test_AntaInventory:
|
||||
"""Test AntaInventory class."""
|
||||
|
||||
def create_inventory(self, content: str, tmp_path: Path) -> str:
|
||||
"""Create fakefs inventory file."""
|
||||
tmp_inventory = tmp_path / "mydir/myfile"
|
||||
tmp_inventory.parent.mkdir()
|
||||
tmp_inventory.touch()
|
||||
tmp_inventory.write_text(yaml.dump(content, allow_unicode=True))
|
||||
return str(tmp_inventory)
|
||||
|
||||
def check_parameter(self, parameter: str, test_definition: dict[Any, Any]) -> bool:
|
||||
"""Check if parameter is configured in testbed."""
|
||||
return "parameters" in test_definition and parameter in test_definition["parameters"].keys()
|
||||
|
||||
@pytest.mark.parametrize("test_definition", ANTA_INVENTORY_TESTS_VALID, ids=generate_test_ids_dict)
|
||||
def test_init_valid(self, test_definition: dict[str, Any], tmp_path: Path) -> None:
|
||||
"""Test class constructor with valid data.
|
||||
|
||||
Test structure:
|
||||
---------------
|
||||
|
||||
{
|
||||
'name': 'ValidInventory_with_host_only',
|
||||
'input': {"anta_inventory":{"hosts":[{"host":"192.168.0.17"},{"host":"192.168.0.2"}]}},
|
||||
'expected_result': 'valid',
|
||||
'parameters': {
|
||||
'ipaddress_in_scope': '192.168.0.17',
|
||||
'ipaddress_out_of_scope': '192.168.1.1',
|
||||
}
|
||||
}
|
||||
|
||||
"""
|
||||
inventory_file = self.create_inventory(content=test_definition["input"], tmp_path=tmp_path)
|
||||
try:
|
||||
AntaInventory.parse(filename=inventory_file, username="arista", password="arista123")
|
||||
except ValidationError as exc:
|
||||
logging.error("Exceptions is: %s", str(exc))
|
||||
assert False
|
||||
|
||||
@pytest.mark.parametrize("test_definition", ANTA_INVENTORY_TESTS_INVALID, ids=generate_test_ids_dict)
|
||||
def test_init_invalid(self, test_definition: dict[str, Any], tmp_path: Path) -> None:
|
||||
"""Test class constructor with invalid data.
|
||||
|
||||
Test structure:
|
||||
---------------
|
||||
|
||||
{
|
||||
'name': 'ValidInventory_with_host_only',
|
||||
'input': {"anta_inventory":{"hosts":[{"host":"192.168.0.17"},{"host":"192.168.0.2"}]}},
|
||||
'expected_result': 'invalid',
|
||||
'parameters': {
|
||||
'ipaddress_in_scope': '192.168.0.17',
|
||||
'ipaddress_out_of_scope': '192.168.1.1',
|
||||
}
|
||||
}
|
||||
|
||||
"""
|
||||
inventory_file = self.create_inventory(content=test_definition["input"], tmp_path=tmp_path)
|
||||
with pytest.raises((InventoryIncorrectSchema, InventoryRootKeyError, ValidationError)):
|
||||
AntaInventory.parse(filename=inventory_file, username="arista", password="arista123")
|
393
tests/units/inventory/test_models.py
Normal file
393
tests/units/inventory/test_models.py
Normal file
|
@ -0,0 +1,393 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""ANTA Inventory models unit tests."""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
from pydantic import ValidationError
|
||||
|
||||
from anta.device import AsyncEOSDevice
|
||||
from anta.inventory.models import AntaInventoryHost, AntaInventoryInput, AntaInventoryNetwork, AntaInventoryRange
|
||||
from tests.data.json_data import (
|
||||
INVENTORY_DEVICE_MODEL_INVALID,
|
||||
INVENTORY_DEVICE_MODEL_VALID,
|
||||
INVENTORY_MODEL_HOST_CACHE,
|
||||
INVENTORY_MODEL_HOST_INVALID,
|
||||
INVENTORY_MODEL_HOST_VALID,
|
||||
INVENTORY_MODEL_INVALID,
|
||||
INVENTORY_MODEL_NETWORK_CACHE,
|
||||
INVENTORY_MODEL_NETWORK_INVALID,
|
||||
INVENTORY_MODEL_NETWORK_VALID,
|
||||
INVENTORY_MODEL_RANGE_CACHE,
|
||||
INVENTORY_MODEL_RANGE_INVALID,
|
||||
INVENTORY_MODEL_RANGE_VALID,
|
||||
INVENTORY_MODEL_VALID,
|
||||
)
|
||||
from tests.lib.utils import generate_test_ids_dict
|
||||
|
||||
|
||||
class Test_InventoryUnitModels:
|
||||
"""Test components of AntaInventoryInput model."""
|
||||
|
||||
@pytest.mark.parametrize("test_definition", INVENTORY_MODEL_HOST_VALID, ids=generate_test_ids_dict)
|
||||
def test_anta_inventory_host_valid(self, test_definition: dict[str, Any]) -> None:
|
||||
"""Test host input model.
|
||||
|
||||
Test structure:
|
||||
---------------
|
||||
|
||||
{
|
||||
'name': 'ValidIPv4_Host',
|
||||
'input': '1.1.1.1',
|
||||
'expected_result': 'valid'
|
||||
}
|
||||
|
||||
"""
|
||||
try:
|
||||
host_inventory = AntaInventoryHost(host=test_definition["input"])
|
||||
except ValidationError as exc:
|
||||
logging.warning("Error: %s", str(exc))
|
||||
assert False
|
||||
else:
|
||||
assert test_definition["input"] == str(host_inventory.host)
|
||||
|
||||
@pytest.mark.parametrize("test_definition", INVENTORY_MODEL_HOST_INVALID, ids=generate_test_ids_dict)
|
||||
def test_anta_inventory_host_invalid(self, test_definition: dict[str, Any]) -> None:
|
||||
"""Test host input model.
|
||||
|
||||
Test structure:
|
||||
---------------
|
||||
|
||||
{
|
||||
'name': 'ValidIPv4_Host',
|
||||
'input': '1.1.1.1/32',
|
||||
'expected_result': 'invalid'
|
||||
}
|
||||
|
||||
"""
|
||||
with pytest.raises(ValidationError):
|
||||
AntaInventoryHost(host=test_definition["input"])
|
||||
|
||||
@pytest.mark.parametrize("test_definition", INVENTORY_MODEL_HOST_CACHE, ids=generate_test_ids_dict)
|
||||
def test_anta_inventory_host_cache(self, test_definition: dict[str, Any]) -> None:
|
||||
"""Test host disable_cache.
|
||||
|
||||
Test structure:
|
||||
---------------
|
||||
|
||||
{
|
||||
'name': 'Cache',
|
||||
'input': {"host": '1.1.1.1', "disable_cache": True},
|
||||
'expected_result': True
|
||||
}
|
||||
|
||||
"""
|
||||
if "disable_cache" in test_definition["input"]:
|
||||
host_inventory = AntaInventoryHost(host=test_definition["input"]["host"], disable_cache=test_definition["input"]["disable_cache"])
|
||||
else:
|
||||
host_inventory = AntaInventoryHost(host=test_definition["input"]["host"])
|
||||
assert test_definition["expected_result"] == host_inventory.disable_cache
|
||||
|
||||
@pytest.mark.parametrize("test_definition", INVENTORY_MODEL_NETWORK_VALID, ids=generate_test_ids_dict)
|
||||
def test_anta_inventory_network_valid(self, test_definition: dict[str, Any]) -> None:
|
||||
"""Test Network input model with valid data.
|
||||
|
||||
Test structure:
|
||||
---------------
|
||||
|
||||
{
|
||||
'name': 'ValidIPv4_Subnet',
|
||||
'input': '1.1.1.0/24',
|
||||
'expected_result': 'valid'
|
||||
}
|
||||
|
||||
"""
|
||||
try:
|
||||
network_inventory = AntaInventoryNetwork(network=test_definition["input"])
|
||||
except ValidationError as exc:
|
||||
logging.warning("Error: %s", str(exc))
|
||||
assert False
|
||||
else:
|
||||
assert test_definition["input"] == str(network_inventory.network)
|
||||
|
||||
@pytest.mark.parametrize("test_definition", INVENTORY_MODEL_NETWORK_INVALID, ids=generate_test_ids_dict)
|
||||
def test_anta_inventory_network_invalid(self, test_definition: dict[str, Any]) -> None:
|
||||
"""Test Network input model with invalid data.
|
||||
|
||||
Test structure:
|
||||
---------------
|
||||
|
||||
{
|
||||
'name': 'ValidIPv4_Subnet',
|
||||
'input': '1.1.1.0/16',
|
||||
'expected_result': 'invalid'
|
||||
}
|
||||
|
||||
"""
|
||||
try:
|
||||
AntaInventoryNetwork(network=test_definition["input"])
|
||||
except ValidationError as exc:
|
||||
logging.warning("Error: %s", str(exc))
|
||||
else:
|
||||
assert False
|
||||
|
||||
@pytest.mark.parametrize("test_definition", INVENTORY_MODEL_NETWORK_CACHE, ids=generate_test_ids_dict)
|
||||
def test_anta_inventory_network_cache(self, test_definition: dict[str, Any]) -> None:
|
||||
"""Test network disable_cache
|
||||
|
||||
Test structure:
|
||||
---------------
|
||||
|
||||
{
|
||||
'name': 'Cache',
|
||||
'input': {"network": '1.1.1.1/24', "disable_cache": True},
|
||||
'expected_result': True
|
||||
}
|
||||
|
||||
"""
|
||||
if "disable_cache" in test_definition["input"]:
|
||||
network_inventory = AntaInventoryNetwork(network=test_definition["input"]["network"], disable_cache=test_definition["input"]["disable_cache"])
|
||||
else:
|
||||
network_inventory = AntaInventoryNetwork(network=test_definition["input"]["network"])
|
||||
assert test_definition["expected_result"] == network_inventory.disable_cache
|
||||
|
||||
@pytest.mark.parametrize("test_definition", INVENTORY_MODEL_RANGE_VALID, ids=generate_test_ids_dict)
|
||||
def test_anta_inventory_range_valid(self, test_definition: dict[str, Any]) -> None:
|
||||
"""Test range input model.
|
||||
|
||||
Test structure:
|
||||
---------------
|
||||
|
||||
{
|
||||
'name': 'ValidIPv4_Range',
|
||||
'input': {'start':'10.1.0.1', 'end':'10.1.0.10'},
|
||||
'expected_result': 'valid'
|
||||
}
|
||||
|
||||
"""
|
||||
try:
|
||||
range_inventory = AntaInventoryRange(
|
||||
start=test_definition["input"]["start"],
|
||||
end=test_definition["input"]["end"],
|
||||
)
|
||||
except ValidationError as exc:
|
||||
logging.warning("Error: %s", str(exc))
|
||||
assert False
|
||||
else:
|
||||
assert test_definition["input"]["start"] == str(range_inventory.start)
|
||||
assert test_definition["input"]["end"] == str(range_inventory.end)
|
||||
|
||||
@pytest.mark.parametrize("test_definition", INVENTORY_MODEL_RANGE_INVALID, ids=generate_test_ids_dict)
|
||||
def test_anta_inventory_range_invalid(self, test_definition: dict[str, Any]) -> None:
|
||||
"""Test range input model.
|
||||
|
||||
Test structure:
|
||||
---------------
|
||||
|
||||
{
|
||||
'name': 'ValidIPv4_Range',
|
||||
'input': {'start':'10.1.0.1', 'end':'10.1.0.10/32'},
|
||||
'expected_result': 'invalid'
|
||||
}
|
||||
|
||||
"""
|
||||
try:
|
||||
AntaInventoryRange(
|
||||
start=test_definition["input"]["start"],
|
||||
end=test_definition["input"]["end"],
|
||||
)
|
||||
except ValidationError as exc:
|
||||
logging.warning("Error: %s", str(exc))
|
||||
else:
|
||||
assert False
|
||||
|
||||
@pytest.mark.parametrize("test_definition", INVENTORY_MODEL_RANGE_CACHE, ids=generate_test_ids_dict)
|
||||
def test_anta_inventory_range_cache(self, test_definition: dict[str, Any]) -> None:
|
||||
"""Test range disable_cache
|
||||
|
||||
Test structure:
|
||||
---------------
|
||||
|
||||
{
|
||||
'name': 'Cache',
|
||||
'input': {"start": '1.1.1.1', "end": "1.1.1.10", "disable_cache": True},
|
||||
'expected_result': True
|
||||
}
|
||||
|
||||
"""
|
||||
if "disable_cache" in test_definition["input"]:
|
||||
range_inventory = AntaInventoryRange(
|
||||
start=test_definition["input"]["start"], end=test_definition["input"]["end"], disable_cache=test_definition["input"]["disable_cache"]
|
||||
)
|
||||
else:
|
||||
range_inventory = AntaInventoryRange(start=test_definition["input"]["start"], end=test_definition["input"]["end"])
|
||||
assert test_definition["expected_result"] == range_inventory.disable_cache
|
||||
|
||||
|
||||
class Test_AntaInventoryInputModel:
|
||||
"""Unit test of AntaInventoryInput model."""
|
||||
|
||||
def test_inventory_input_structure(self) -> None:
|
||||
"""Test inventory keys are those expected."""
|
||||
|
||||
inventory = AntaInventoryInput()
|
||||
logging.info("Inventory keys are: %s", str(inventory.model_dump().keys()))
|
||||
assert all(elem in inventory.model_dump().keys() for elem in ["hosts", "networks", "ranges"])
|
||||
|
||||
@pytest.mark.parametrize("inventory_def", INVENTORY_MODEL_VALID, ids=generate_test_ids_dict)
|
||||
def test_anta_inventory_intput_valid(self, inventory_def: dict[str, Any]) -> None:
|
||||
"""Test loading valid data to inventory class.
|
||||
|
||||
Test structure:
|
||||
---------------
|
||||
|
||||
{
|
||||
"name": "Valid_Host_Only",
|
||||
"input": {
|
||||
"hosts": [
|
||||
{
|
||||
"host": "192.168.0.17"
|
||||
},
|
||||
{
|
||||
"host": "192.168.0.2"
|
||||
}
|
||||
]
|
||||
},
|
||||
"expected_result": "valid"
|
||||
}
|
||||
|
||||
"""
|
||||
try:
|
||||
inventory = AntaInventoryInput(**inventory_def["input"])
|
||||
except ValidationError as exc:
|
||||
logging.warning("Error: %s", str(exc))
|
||||
assert False
|
||||
else:
|
||||
logging.info("Checking if all root keys are correctly lodaded")
|
||||
assert all(elem in inventory.model_dump().keys() for elem in inventory_def["input"].keys())
|
||||
|
||||
@pytest.mark.parametrize("inventory_def", INVENTORY_MODEL_INVALID, ids=generate_test_ids_dict)
|
||||
def test_anta_inventory_intput_invalid(self, inventory_def: dict[str, Any]) -> None:
|
||||
"""Test loading invalid data to inventory class.
|
||||
|
||||
Test structure:
|
||||
---------------
|
||||
|
||||
{
|
||||
"name": "Valid_Host_Only",
|
||||
"input": {
|
||||
"hosts": [
|
||||
{
|
||||
"host": "192.168.0.17"
|
||||
},
|
||||
{
|
||||
"host": "192.168.0.2/32"
|
||||
}
|
||||
]
|
||||
},
|
||||
"expected_result": "invalid"
|
||||
}
|
||||
|
||||
"""
|
||||
try:
|
||||
if "hosts" in inventory_def["input"].keys():
|
||||
logging.info(
|
||||
"Loading %s into AntaInventoryInput hosts section",
|
||||
str(inventory_def["input"]["hosts"]),
|
||||
)
|
||||
AntaInventoryInput(hosts=inventory_def["input"]["hosts"])
|
||||
if "networks" in inventory_def["input"].keys():
|
||||
logging.info(
|
||||
"Loading %s into AntaInventoryInput networks section",
|
||||
str(inventory_def["input"]["networks"]),
|
||||
)
|
||||
AntaInventoryInput(networks=inventory_def["input"]["networks"])
|
||||
if "ranges" in inventory_def["input"].keys():
|
||||
logging.info(
|
||||
"Loading %s into AntaInventoryInput ranges section",
|
||||
str(inventory_def["input"]["ranges"]),
|
||||
)
|
||||
AntaInventoryInput(ranges=inventory_def["input"]["ranges"])
|
||||
except ValidationError as exc:
|
||||
logging.warning("Error: %s", str(exc))
|
||||
else:
|
||||
assert False
|
||||
|
||||
|
||||
class Test_InventoryDeviceModel:
|
||||
"""Unit test of InventoryDevice model."""
|
||||
|
||||
@pytest.mark.parametrize("test_definition", INVENTORY_DEVICE_MODEL_VALID, ids=generate_test_ids_dict)
|
||||
def test_inventory_device_valid(self, test_definition: dict[str, Any]) -> None:
|
||||
"""Test loading valid data to InventoryDevice class.
|
||||
|
||||
Test structure:
|
||||
---------------
|
||||
|
||||
{
|
||||
"name": "Valid_Inventory",
|
||||
"input": [
|
||||
{
|
||||
'host': '1.1.1.1',
|
||||
'username': 'arista',
|
||||
'password': 'arista123!'
|
||||
},
|
||||
{
|
||||
'host': '1.1.1.1',
|
||||
'username': 'arista',
|
||||
'password': 'arista123!'
|
||||
}
|
||||
],
|
||||
"expected_result": "valid"
|
||||
}
|
||||
|
||||
"""
|
||||
if test_definition["expected_result"] == "invalid":
|
||||
pytest.skip("Not concerned by the test")
|
||||
|
||||
for entity in test_definition["input"]:
|
||||
try:
|
||||
AsyncEOSDevice(**entity)
|
||||
except TypeError as exc:
|
||||
logging.warning("Error: %s", str(exc))
|
||||
assert False
|
||||
|
||||
@pytest.mark.parametrize("test_definition", INVENTORY_DEVICE_MODEL_INVALID, ids=generate_test_ids_dict)
|
||||
def test_inventory_device_invalid(self, test_definition: dict[str, Any]) -> None:
|
||||
"""Test loading invalid data to InventoryDevice class.
|
||||
|
||||
Test structure:
|
||||
---------------
|
||||
|
||||
{
|
||||
"name": "Valid_Inventory",
|
||||
"input": [
|
||||
{
|
||||
'host': '1.1.1.1',
|
||||
'username': 'arista',
|
||||
'password': 'arista123!'
|
||||
},
|
||||
{
|
||||
'host': '1.1.1.1',
|
||||
'username': 'arista',
|
||||
'password': 'arista123!'
|
||||
}
|
||||
],
|
||||
"expected_result": "valid"
|
||||
}
|
||||
|
||||
"""
|
||||
if test_definition["expected_result"] == "valid":
|
||||
pytest.skip("Not concerned by the test")
|
||||
|
||||
for entity in test_definition["input"]:
|
||||
try:
|
||||
AsyncEOSDevice(**entity)
|
||||
except TypeError as exc:
|
||||
logging.info("Error: %s", str(exc))
|
||||
else:
|
||||
assert False
|
3
tests/units/reporter/__init__.py
Normal file
3
tests/units/reporter/__init__.py
Normal file
|
@ -0,0 +1,3 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
193
tests/units/reporter/test__init__.py
Normal file
193
tests/units/reporter/test__init__.py
Normal file
|
@ -0,0 +1,193 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Test anta.report.__init__.py
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Callable
|
||||
|
||||
import pytest
|
||||
from rich.table import Table
|
||||
|
||||
from anta import RICH_COLOR_PALETTE
|
||||
from anta.custom_types import TestStatus
|
||||
from anta.reporter import ReportTable
|
||||
from anta.result_manager import ResultManager
|
||||
|
||||
|
||||
class Test_ReportTable:
|
||||
"""
|
||||
Test ReportTable class
|
||||
"""
|
||||
|
||||
# not testing __init__ as nothing is going on there
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"usr_list, delimiter, expected_output",
|
||||
[
|
||||
pytest.param([], None, "", id="empty list no delimiter"),
|
||||
pytest.param([], "*", "", id="empty list with delimiter"),
|
||||
pytest.param(["elem1"], None, "elem1", id="one elem list no delimiter"),
|
||||
pytest.param(["elem1"], "*", "* elem1", id="one elem list with delimiter"),
|
||||
pytest.param(["elem1", "elem2"], None, "elem1\nelem2", id="two elems list no delimiter"),
|
||||
pytest.param(["elem1", "elem2"], "&", "& elem1\n& elem2", id="two elems list with delimiter"),
|
||||
],
|
||||
)
|
||||
def test__split_list_to_txt_list(self, usr_list: list[str], delimiter: str | None, expected_output: str) -> None:
|
||||
"""
|
||||
test _split_list_to_txt_list
|
||||
"""
|
||||
# pylint: disable=protected-access
|
||||
report = ReportTable()
|
||||
assert report._split_list_to_txt_list(usr_list, delimiter) == expected_output
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"headers",
|
||||
[
|
||||
pytest.param([], id="empty list"),
|
||||
pytest.param(["elem1"], id="one elem list"),
|
||||
pytest.param(["elem1", "elem2"], id="two elemst"),
|
||||
],
|
||||
)
|
||||
def test__build_headers(self, headers: list[str]) -> None:
|
||||
"""
|
||||
test _build_headers
|
||||
"""
|
||||
# pylint: disable=protected-access
|
||||
report = ReportTable()
|
||||
table = Table()
|
||||
table_column_before = len(table.columns)
|
||||
report._build_headers(headers, table)
|
||||
assert len(table.columns) == table_column_before + len(headers)
|
||||
if len(table.columns) > 0:
|
||||
assert table.columns[table_column_before].style == RICH_COLOR_PALETTE.HEADER
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"status, expected_status",
|
||||
[
|
||||
pytest.param("unknown", "unknown", id="unknown status"),
|
||||
pytest.param("unset", "[grey74]unset", id="unset status"),
|
||||
pytest.param("skipped", "[bold orange4]skipped", id="skipped status"),
|
||||
pytest.param("failure", "[bold red]failure", id="failure status"),
|
||||
pytest.param("error", "[indian_red]error", id="error status"),
|
||||
pytest.param("success", "[green4]success", id="success status"),
|
||||
],
|
||||
)
|
||||
def test__color_result(self, status: TestStatus, expected_status: str) -> None:
|
||||
"""
|
||||
test _build_headers
|
||||
"""
|
||||
# pylint: disable=protected-access
|
||||
report = ReportTable()
|
||||
assert report._color_result(status) == expected_status
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"host, testcase, title, number_of_tests, expected_length",
|
||||
[
|
||||
pytest.param(None, None, None, 5, 5, id="all results"),
|
||||
pytest.param("host1", None, None, 5, 0, id="result for host1 when no host1 test"),
|
||||
pytest.param(None, "VerifyTest3", None, 5, 1, id="result for test VerifyTest3"),
|
||||
pytest.param(None, None, "Custom title", 5, 5, id="Change table title"),
|
||||
],
|
||||
)
|
||||
def test_report_all(
|
||||
self,
|
||||
result_manager_factory: Callable[[int], ResultManager],
|
||||
host: str | None,
|
||||
testcase: str | None,
|
||||
title: str | None,
|
||||
number_of_tests: int,
|
||||
expected_length: int,
|
||||
) -> None:
|
||||
"""
|
||||
test report_all
|
||||
"""
|
||||
# pylint: disable=too-many-arguments
|
||||
rm = result_manager_factory(number_of_tests)
|
||||
|
||||
report = ReportTable()
|
||||
kwargs = {"host": host, "testcase": testcase, "title": title}
|
||||
kwargs = {k: v for k, v in kwargs.items() if v is not None}
|
||||
res = report.report_all(rm, **kwargs) # type: ignore[arg-type]
|
||||
|
||||
assert isinstance(res, Table)
|
||||
assert res.title == (title or "All tests results")
|
||||
assert res.row_count == expected_length
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"testcase, title, number_of_tests, expected_length",
|
||||
[
|
||||
pytest.param(None, None, 5, 5, id="all results"),
|
||||
pytest.param("VerifyTest3", None, 5, 1, id="result for test VerifyTest3"),
|
||||
pytest.param(None, "Custom title", 5, 5, id="Change table title"),
|
||||
],
|
||||
)
|
||||
def test_report_summary_tests(
|
||||
self,
|
||||
result_manager_factory: Callable[[int], ResultManager],
|
||||
testcase: str | None,
|
||||
title: str | None,
|
||||
number_of_tests: int,
|
||||
expected_length: int,
|
||||
) -> None:
|
||||
"""
|
||||
test report_summary_tests
|
||||
"""
|
||||
# pylint: disable=too-many-arguments
|
||||
# TODO refactor this later... this is injecting double test results by modyfing the device name
|
||||
# should be a fixture
|
||||
rm = result_manager_factory(number_of_tests)
|
||||
new_results = [result.model_copy() for result in rm.get_results()]
|
||||
for result in new_results:
|
||||
result.name = "test_device"
|
||||
result.result = "failure"
|
||||
rm.add_test_results(new_results)
|
||||
|
||||
report = ReportTable()
|
||||
kwargs = {"testcase": testcase, "title": title}
|
||||
kwargs = {k: v for k, v in kwargs.items() if v is not None}
|
||||
res = report.report_summary_tests(rm, **kwargs) # type: ignore[arg-type]
|
||||
|
||||
assert isinstance(res, Table)
|
||||
assert res.title == (title or "Summary per test case")
|
||||
assert res.row_count == expected_length
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"host, title, number_of_tests, expected_length",
|
||||
[
|
||||
pytest.param(None, None, 5, 2, id="all results"),
|
||||
pytest.param("host1", None, 5, 1, id="result for host host1"),
|
||||
pytest.param(None, "Custom title", 5, 2, id="Change table title"),
|
||||
],
|
||||
)
|
||||
def test_report_summary_hosts(
|
||||
self,
|
||||
result_manager_factory: Callable[[int], ResultManager],
|
||||
host: str | None,
|
||||
title: str | None,
|
||||
number_of_tests: int,
|
||||
expected_length: int,
|
||||
) -> None:
|
||||
"""
|
||||
test report_summary_hosts
|
||||
"""
|
||||
# pylint: disable=too-many-arguments
|
||||
# TODO refactor this later... this is injecting double test results by modyfing the device name
|
||||
# should be a fixture
|
||||
rm = result_manager_factory(number_of_tests)
|
||||
new_results = [result.model_copy() for result in rm.get_results()]
|
||||
for result in new_results:
|
||||
result.name = host or "test_device"
|
||||
result.result = "failure"
|
||||
rm.add_test_results(new_results)
|
||||
|
||||
report = ReportTable()
|
||||
kwargs = {"host": host, "title": title}
|
||||
kwargs = {k: v for k, v in kwargs.items() if v is not None}
|
||||
res = report.report_summary_hosts(rm, **kwargs) # type: ignore[arg-type]
|
||||
|
||||
assert isinstance(res, Table)
|
||||
assert res.title == (title or "Summary per host")
|
||||
assert res.row_count == expected_length
|
3
tests/units/result_manager/__init__.py
Normal file
3
tests/units/result_manager/__init__.py
Normal file
|
@ -0,0 +1,3 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
204
tests/units/result_manager/test__init__.py
Normal file
204
tests/units/result_manager/test__init__.py
Normal file
|
@ -0,0 +1,204 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Test anta.result_manager.__init__.py
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from contextlib import nullcontext
|
||||
from typing import TYPE_CHECKING, Any, Callable
|
||||
|
||||
import pytest
|
||||
|
||||
from anta.custom_types import TestStatus
|
||||
from anta.result_manager import ResultManager
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from anta.result_manager.models import TestResult
|
||||
|
||||
|
||||
class Test_ResultManager:
|
||||
"""
|
||||
Test ResultManager class
|
||||
"""
|
||||
|
||||
# not testing __init__ as nothing is going on there
|
||||
|
||||
def test__len__(self, list_result_factory: Callable[[int], list[TestResult]]) -> None:
|
||||
"""
|
||||
test __len__
|
||||
"""
|
||||
list_result = list_result_factory(3)
|
||||
result_manager = ResultManager()
|
||||
assert len(result_manager) == 0
|
||||
for i in range(3):
|
||||
result_manager.add_test_result(list_result[i])
|
||||
assert len(result_manager) == i + 1
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"starting_status, test_status, expected_status, expected_raise",
|
||||
[
|
||||
pytest.param("unset", "unset", "unset", nullcontext(), id="unset->unset"),
|
||||
pytest.param("unset", "success", "success", nullcontext(), id="unset->success"),
|
||||
pytest.param("unset", "error", "unset", nullcontext(), id="set error"),
|
||||
pytest.param("skipped", "skipped", "skipped", nullcontext(), id="skipped->skipped"),
|
||||
pytest.param("skipped", "unset", "skipped", nullcontext(), id="skipped, add unset"),
|
||||
pytest.param("skipped", "success", "success", nullcontext(), id="skipped, add success"),
|
||||
pytest.param("skipped", "failure", "failure", nullcontext(), id="skipped, add failure"),
|
||||
pytest.param("success", "unset", "success", nullcontext(), id="success, add unset"),
|
||||
pytest.param("success", "skipped", "success", nullcontext(), id="success, add skipped"),
|
||||
pytest.param("success", "success", "success", nullcontext(), id="success->success"),
|
||||
pytest.param("success", "failure", "failure", nullcontext(), id="success->failure"),
|
||||
pytest.param("failure", "unset", "failure", nullcontext(), id="failure->failure"),
|
||||
pytest.param("failure", "skipped", "failure", nullcontext(), id="failure, add unset"),
|
||||
pytest.param("failure", "success", "failure", nullcontext(), id="failure, add skipped"),
|
||||
pytest.param("failure", "failure", "failure", nullcontext(), id="failure, add success"),
|
||||
pytest.param("unset", "unknown", None, pytest.raises(ValueError), id="wrong status"),
|
||||
],
|
||||
)
|
||||
def test__update_status(self, starting_status: TestStatus, test_status: TestStatus, expected_status: str, expected_raise: Any) -> None:
|
||||
"""
|
||||
Test ResultManager._update_status
|
||||
"""
|
||||
result_manager = ResultManager()
|
||||
result_manager.status = starting_status
|
||||
assert result_manager.error_status is False
|
||||
|
||||
with expected_raise:
|
||||
result_manager._update_status(test_status) # pylint: disable=protected-access
|
||||
if test_status == "error":
|
||||
assert result_manager.error_status is True
|
||||
else:
|
||||
assert result_manager.status == expected_status
|
||||
|
||||
def test_add_test_result(self, test_result_factory: Callable[[int], TestResult]) -> None:
|
||||
"""
|
||||
Test ResultManager.add_test_result
|
||||
"""
|
||||
result_manager = ResultManager()
|
||||
assert result_manager.status == "unset"
|
||||
assert result_manager.error_status is False
|
||||
assert len(result_manager) == 0
|
||||
|
||||
# Add one unset test
|
||||
unset_test = test_result_factory(0)
|
||||
unset_test.result = "unset"
|
||||
result_manager.add_test_result(unset_test)
|
||||
assert result_manager.status == "unset"
|
||||
assert result_manager.error_status is False
|
||||
assert len(result_manager) == 1
|
||||
|
||||
# Add one success test
|
||||
success_test = test_result_factory(1)
|
||||
success_test.result = "success"
|
||||
result_manager.add_test_result(success_test)
|
||||
assert result_manager.status == "success"
|
||||
assert result_manager.error_status is False
|
||||
assert len(result_manager) == 2
|
||||
|
||||
# Add one error test
|
||||
error_test = test_result_factory(1)
|
||||
error_test.result = "error"
|
||||
result_manager.add_test_result(error_test)
|
||||
assert result_manager.status == "success"
|
||||
assert result_manager.error_status is True
|
||||
assert len(result_manager) == 3
|
||||
|
||||
# Add one failure test
|
||||
failure_test = test_result_factory(1)
|
||||
failure_test.result = "failure"
|
||||
result_manager.add_test_result(failure_test)
|
||||
assert result_manager.status == "failure"
|
||||
assert result_manager.error_status is True
|
||||
assert len(result_manager) == 4
|
||||
|
||||
def test_add_test_results(self, list_result_factory: Callable[[int], list[TestResult]]) -> None:
|
||||
"""
|
||||
Test ResultManager.add_test_results
|
||||
"""
|
||||
result_manager = ResultManager()
|
||||
assert result_manager.status == "unset"
|
||||
assert result_manager.error_status is False
|
||||
assert len(result_manager) == 0
|
||||
|
||||
# Add three success tests
|
||||
success_list = list_result_factory(3)
|
||||
for test in success_list:
|
||||
test.result = "success"
|
||||
result_manager.add_test_results(success_list)
|
||||
assert result_manager.status == "success"
|
||||
assert result_manager.error_status is False
|
||||
assert len(result_manager) == 3
|
||||
|
||||
# Add one error test and one failure
|
||||
error_failure_list = list_result_factory(2)
|
||||
error_failure_list[0].result = "error"
|
||||
error_failure_list[1].result = "failure"
|
||||
result_manager.add_test_results(error_failure_list)
|
||||
assert result_manager.status == "failure"
|
||||
assert result_manager.error_status is True
|
||||
assert len(result_manager) == 5
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"status, error_status, ignore_error, expected_status",
|
||||
[
|
||||
pytest.param("success", False, True, "success", id="no error"),
|
||||
pytest.param("success", True, True, "success", id="error, ignore error"),
|
||||
pytest.param("success", True, False, "error", id="error, do not ignore error"),
|
||||
],
|
||||
)
|
||||
def test_get_status(self, status: TestStatus, error_status: bool, ignore_error: bool, expected_status: str) -> None:
|
||||
"""
|
||||
test ResultManager.get_status
|
||||
"""
|
||||
result_manager = ResultManager()
|
||||
result_manager.status = status
|
||||
result_manager.error_status = error_status
|
||||
|
||||
assert result_manager.get_status(ignore_error=ignore_error) == expected_status
|
||||
|
||||
def test_get_results(self, list_result_factory: Callable[[int], list[TestResult]]) -> None:
|
||||
"""
|
||||
test ResultManager.get_results
|
||||
"""
|
||||
result_manager = ResultManager()
|
||||
|
||||
success_list = list_result_factory(3)
|
||||
for test in success_list:
|
||||
test.result = "success"
|
||||
result_manager.add_test_results(success_list)
|
||||
|
||||
res = result_manager.get_results()
|
||||
assert isinstance(res, list)
|
||||
|
||||
def test_get_json_results(self, list_result_factory: Callable[[int], list[TestResult]]) -> None:
|
||||
"""
|
||||
test ResultManager.get_json_results
|
||||
"""
|
||||
result_manager = ResultManager()
|
||||
|
||||
success_list = list_result_factory(3)
|
||||
for test in success_list:
|
||||
test.result = "success"
|
||||
result_manager.add_test_results(success_list)
|
||||
|
||||
json_res = result_manager.get_json_results()
|
||||
assert isinstance(json_res, str)
|
||||
|
||||
# Verifies it can be deserialized back to a list of dict with the correct values types
|
||||
res = json.loads(json_res)
|
||||
for test in res:
|
||||
assert isinstance(test, dict)
|
||||
assert isinstance(test.get("test"), str)
|
||||
assert isinstance(test.get("categories"), list)
|
||||
assert isinstance(test.get("description"), str)
|
||||
assert test.get("custom_field") is None
|
||||
assert test.get("result") == "success"
|
||||
|
||||
# TODO
|
||||
# get_result_by_test
|
||||
# get_result_by_host
|
||||
# get_testcases
|
||||
# get_hosts
|
57
tests/units/result_manager/test_models.py
Normal file
57
tests/units/result_manager/test_models.py
Normal file
|
@ -0,0 +1,57 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""ANTA Result Manager models unit tests."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Callable
|
||||
|
||||
import pytest
|
||||
|
||||
# Import as Result to avoid pytest collection
|
||||
from anta.result_manager.models import TestResult as Result
|
||||
from tests.data.json_data import TEST_RESULT_SET_STATUS
|
||||
from tests.lib.fixture import DEVICE_NAME
|
||||
from tests.lib.utils import generate_test_ids_dict
|
||||
|
||||
|
||||
class TestTestResultModels:
|
||||
"""Test components of anta.result_manager.models."""
|
||||
|
||||
@pytest.mark.parametrize("data", TEST_RESULT_SET_STATUS, ids=generate_test_ids_dict)
|
||||
def test__is_status_foo(self, test_result_factory: Callable[[int], Result], data: dict[str, Any]) -> None:
|
||||
"""Test TestResult.is_foo methods."""
|
||||
testresult = test_result_factory(1)
|
||||
assert testresult.result == "unset"
|
||||
assert len(testresult.messages) == 0
|
||||
if data["target"] == "success":
|
||||
testresult.is_success(data["message"])
|
||||
assert testresult.result == data["target"]
|
||||
assert data["message"] in testresult.messages
|
||||
if data["target"] == "failure":
|
||||
testresult.is_failure(data["message"])
|
||||
assert testresult.result == data["target"]
|
||||
assert data["message"] in testresult.messages
|
||||
if data["target"] == "error":
|
||||
testresult.is_error(data["message"])
|
||||
assert testresult.result == data["target"]
|
||||
assert data["message"] in testresult.messages
|
||||
if data["target"] == "skipped":
|
||||
testresult.is_skipped(data["message"])
|
||||
assert testresult.result == data["target"]
|
||||
assert data["message"] in testresult.messages
|
||||
# no helper for unset, testing _set_status
|
||||
if data["target"] == "unset":
|
||||
testresult._set_status("unset", data["message"]) # pylint: disable=W0212
|
||||
assert testresult.result == data["target"]
|
||||
assert data["message"] in testresult.messages
|
||||
|
||||
@pytest.mark.parametrize("data", TEST_RESULT_SET_STATUS, ids=generate_test_ids_dict)
|
||||
def test____str__(self, test_result_factory: Callable[[int], Result], data: dict[str, Any]) -> None:
|
||||
"""Test TestResult.__str__."""
|
||||
testresult = test_result_factory(1)
|
||||
assert testresult.result == "unset"
|
||||
assert len(testresult.messages) == 0
|
||||
testresult._set_status(data["target"], data["message"]) # pylint: disable=W0212
|
||||
assert testresult.result == data["target"]
|
||||
assert str(testresult) == f"Test 'VerifyTest1' (on '{DEVICE_NAME}'): Result '{data['target']}'\nMessages: {[data['message']]}"
|
311
tests/units/test_catalog.py
Normal file
311
tests/units/test_catalog.py
Normal file
|
@ -0,0 +1,311 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
test anta.device.py
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
from pydantic import ValidationError
|
||||
from yaml import safe_load
|
||||
|
||||
from anta.catalog import AntaCatalog, AntaTestDefinition
|
||||
from anta.models import AntaTest
|
||||
from anta.tests.interfaces import VerifyL3MTU
|
||||
from anta.tests.mlag import VerifyMlagStatus
|
||||
from anta.tests.software import VerifyEOSVersion
|
||||
from anta.tests.system import (
|
||||
VerifyAgentLogs,
|
||||
VerifyCoredump,
|
||||
VerifyCPUUtilization,
|
||||
VerifyFileSystemUtilization,
|
||||
VerifyMemoryUtilization,
|
||||
VerifyNTP,
|
||||
VerifyReloadCause,
|
||||
VerifyUptime,
|
||||
)
|
||||
from tests.lib.utils import generate_test_ids_list
|
||||
from tests.units.test_models import FakeTestWithInput
|
||||
|
||||
# Test classes used as expected values
|
||||
|
||||
DATA_DIR: Path = Path(__file__).parent.parent.resolve() / "data"
|
||||
|
||||
INIT_CATALOG_DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "test_catalog",
|
||||
"filename": "test_catalog.yml",
|
||||
"tests": [
|
||||
(VerifyEOSVersion, VerifyEOSVersion.Input(versions=["4.31.1F"])),
|
||||
],
|
||||
},
|
||||
{
|
||||
"name": "test_catalog_with_tags",
|
||||
"filename": "test_catalog_with_tags.yml",
|
||||
"tests": [
|
||||
(
|
||||
VerifyUptime,
|
||||
VerifyUptime.Input(
|
||||
minimum=10,
|
||||
filters=VerifyUptime.Input.Filters(tags=["fabric"]),
|
||||
),
|
||||
),
|
||||
(VerifyReloadCause, {"filters": {"tags": ["leaf", "spine"]}}),
|
||||
(VerifyCoredump, VerifyCoredump.Input()),
|
||||
(VerifyAgentLogs, AntaTest.Input()),
|
||||
(VerifyCPUUtilization, VerifyCPUUtilization.Input(filters=VerifyCPUUtilization.Input.Filters(tags=["leaf"]))),
|
||||
(VerifyMemoryUtilization, VerifyMemoryUtilization.Input(filters=VerifyMemoryUtilization.Input.Filters(tags=["testdevice"]))),
|
||||
(VerifyFileSystemUtilization, None),
|
||||
(VerifyNTP, {}),
|
||||
(VerifyMlagStatus, None),
|
||||
(VerifyL3MTU, {"mtu": 1500, "filters": {"tags": ["demo"]}}),
|
||||
],
|
||||
},
|
||||
{
|
||||
"name": "test_empty_catalog",
|
||||
"filename": "test_empty_catalog.yml",
|
||||
"tests": [],
|
||||
},
|
||||
]
|
||||
CATALOG_PARSE_FAIL_DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "undefined_tests",
|
||||
"filename": "test_catalog_with_undefined_tests.yml",
|
||||
"error": "FakeTest is not defined in Python module anta.tests.software",
|
||||
},
|
||||
{
|
||||
"name": "undefined_module",
|
||||
"filename": "test_catalog_with_undefined_module.yml",
|
||||
"error": "Module named anta.tests.undefined cannot be imported",
|
||||
},
|
||||
{
|
||||
"name": "undefined_module",
|
||||
"filename": "test_catalog_with_undefined_module.yml",
|
||||
"error": "Module named anta.tests.undefined cannot be imported",
|
||||
},
|
||||
{
|
||||
"name": "syntax_error",
|
||||
"filename": "test_catalog_with_syntax_error_module.yml",
|
||||
"error": "Value error, Module named tests.data.syntax_error cannot be imported. Verify that the module exists and there is no Python syntax issues.",
|
||||
},
|
||||
{
|
||||
"name": "undefined_module_nested",
|
||||
"filename": "test_catalog_with_undefined_module_nested.yml",
|
||||
"error": "Module named undefined from package anta.tests cannot be imported",
|
||||
},
|
||||
{
|
||||
"name": "not_a_list",
|
||||
"filename": "test_catalog_not_a_list.yml",
|
||||
"error": "Value error, Syntax error when parsing: True\nIt must be a list of ANTA tests. Check the test catalog.",
|
||||
},
|
||||
{
|
||||
"name": "test_definition_not_a_dict",
|
||||
"filename": "test_catalog_test_definition_not_a_dict.yml",
|
||||
"error": "Value error, Syntax error when parsing: VerifyEOSVersion\nIt must be a dictionary. Check the test catalog.",
|
||||
},
|
||||
{
|
||||
"name": "test_definition_multiple_dicts",
|
||||
"filename": "test_catalog_test_definition_multiple_dicts.yml",
|
||||
"error": "Value error, Syntax error when parsing: {'VerifyEOSVersion': {'versions': ['4.25.4M', '4.26.1F']}, "
|
||||
"'VerifyTerminAttrVersion': {'versions': ['4.25.4M']}}\nIt must be a dictionary with a single entry. Check the indentation in the test catalog.",
|
||||
},
|
||||
{"name": "wrong_type_after_parsing", "filename": "test_catalog_wrong_type.yml", "error": "must be a dict, got str"},
|
||||
]
|
||||
CATALOG_FROM_DICT_FAIL_DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "undefined_tests",
|
||||
"filename": "test_catalog_with_undefined_tests.yml",
|
||||
"error": "FakeTest is not defined in Python module anta.tests.software",
|
||||
},
|
||||
{
|
||||
"name": "wrong_type",
|
||||
"filename": "test_catalog_wrong_type.yml",
|
||||
"error": "Wrong input type for catalog data, must be a dict, got str",
|
||||
},
|
||||
]
|
||||
CATALOG_FROM_LIST_FAIL_DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "wrong_inputs",
|
||||
"tests": [
|
||||
(
|
||||
FakeTestWithInput,
|
||||
AntaTest.Input(),
|
||||
),
|
||||
],
|
||||
"error": "Test input has type AntaTest.Input but expected type FakeTestWithInput.Input",
|
||||
},
|
||||
{
|
||||
"name": "no_test",
|
||||
"tests": [(None, None)],
|
||||
"error": "Input should be a subclass of AntaTest",
|
||||
},
|
||||
{
|
||||
"name": "no_input_when_required",
|
||||
"tests": [(FakeTestWithInput, None)],
|
||||
"error": "Field required",
|
||||
},
|
||||
{
|
||||
"name": "wrong_input_type",
|
||||
"tests": [(FakeTestWithInput, True)],
|
||||
"error": "Value error, Coud not instantiate inputs as type bool is not valid",
|
||||
},
|
||||
]
|
||||
|
||||
TESTS_SETTER_FAIL_DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "not_a_list",
|
||||
"tests": "not_a_list",
|
||||
"error": "The catalog must contain a list of tests",
|
||||
},
|
||||
{
|
||||
"name": "not_a_list_of_test_definitions",
|
||||
"tests": [42, 43],
|
||||
"error": "A test in the catalog must be an AntaTestDefinition instance",
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
class Test_AntaCatalog:
|
||||
"""
|
||||
Test for anta.catalog.AntaCatalog
|
||||
"""
|
||||
|
||||
@pytest.mark.parametrize("catalog_data", INIT_CATALOG_DATA, ids=generate_test_ids_list(INIT_CATALOG_DATA))
|
||||
def test_parse(self, catalog_data: dict[str, Any]) -> None:
|
||||
"""
|
||||
Instantiate AntaCatalog from a file
|
||||
"""
|
||||
catalog: AntaCatalog = AntaCatalog.parse(str(DATA_DIR / catalog_data["filename"]))
|
||||
|
||||
assert len(catalog.tests) == len(catalog_data["tests"])
|
||||
for test_id, (test, inputs) in enumerate(catalog_data["tests"]):
|
||||
assert catalog.tests[test_id].test == test
|
||||
if inputs is not None:
|
||||
if isinstance(inputs, dict):
|
||||
inputs = test.Input(**inputs)
|
||||
assert inputs == catalog.tests[test_id].inputs
|
||||
|
||||
@pytest.mark.parametrize("catalog_data", INIT_CATALOG_DATA, ids=generate_test_ids_list(INIT_CATALOG_DATA))
|
||||
def test_from_list(self, catalog_data: dict[str, Any]) -> None:
|
||||
"""
|
||||
Instantiate AntaCatalog from a list
|
||||
"""
|
||||
catalog: AntaCatalog = AntaCatalog.from_list(catalog_data["tests"])
|
||||
|
||||
assert len(catalog.tests) == len(catalog_data["tests"])
|
||||
for test_id, (test, inputs) in enumerate(catalog_data["tests"]):
|
||||
assert catalog.tests[test_id].test == test
|
||||
if inputs is not None:
|
||||
if isinstance(inputs, dict):
|
||||
inputs = test.Input(**inputs)
|
||||
assert inputs == catalog.tests[test_id].inputs
|
||||
|
||||
@pytest.mark.parametrize("catalog_data", INIT_CATALOG_DATA, ids=generate_test_ids_list(INIT_CATALOG_DATA))
|
||||
def test_from_dict(self, catalog_data: dict[str, Any]) -> None:
|
||||
"""
|
||||
Instantiate AntaCatalog from a dict
|
||||
"""
|
||||
with open(file=str(DATA_DIR / catalog_data["filename"]), mode="r", encoding="UTF-8") as file:
|
||||
data = safe_load(file)
|
||||
catalog: AntaCatalog = AntaCatalog.from_dict(data)
|
||||
|
||||
assert len(catalog.tests) == len(catalog_data["tests"])
|
||||
for test_id, (test, inputs) in enumerate(catalog_data["tests"]):
|
||||
assert catalog.tests[test_id].test == test
|
||||
if inputs is not None:
|
||||
if isinstance(inputs, dict):
|
||||
inputs = test.Input(**inputs)
|
||||
assert inputs == catalog.tests[test_id].inputs
|
||||
|
||||
@pytest.mark.parametrize("catalog_data", CATALOG_PARSE_FAIL_DATA, ids=generate_test_ids_list(CATALOG_PARSE_FAIL_DATA))
|
||||
def test_parse_fail(self, catalog_data: dict[str, Any]) -> None:
|
||||
"""
|
||||
Errors when instantiating AntaCatalog from a file
|
||||
"""
|
||||
with pytest.raises((ValidationError, ValueError)) as exec_info:
|
||||
AntaCatalog.parse(str(DATA_DIR / catalog_data["filename"]))
|
||||
if isinstance(exec_info.value, ValidationError):
|
||||
assert catalog_data["error"] in exec_info.value.errors()[0]["msg"]
|
||||
else:
|
||||
assert catalog_data["error"] in str(exec_info)
|
||||
|
||||
def test_parse_fail_parsing(self, caplog: pytest.LogCaptureFixture) -> None:
|
||||
"""
|
||||
Errors when instantiating AntaCatalog from a file
|
||||
"""
|
||||
with pytest.raises(Exception) as exec_info:
|
||||
AntaCatalog.parse(str(DATA_DIR / "catalog_does_not_exist.yml"))
|
||||
assert "No such file or directory" in str(exec_info)
|
||||
assert len(caplog.record_tuples) >= 1
|
||||
_, _, message = caplog.record_tuples[0]
|
||||
assert "Unable to parse ANTA Test Catalog file" in message
|
||||
assert "FileNotFoundError ([Errno 2] No such file or directory" in message
|
||||
|
||||
@pytest.mark.parametrize("catalog_data", CATALOG_FROM_LIST_FAIL_DATA, ids=generate_test_ids_list(CATALOG_FROM_LIST_FAIL_DATA))
|
||||
def test_from_list_fail(self, catalog_data: dict[str, Any]) -> None:
|
||||
"""
|
||||
Errors when instantiating AntaCatalog from a list of tuples
|
||||
"""
|
||||
with pytest.raises(ValidationError) as exec_info:
|
||||
AntaCatalog.from_list(catalog_data["tests"])
|
||||
assert catalog_data["error"] in exec_info.value.errors()[0]["msg"]
|
||||
|
||||
@pytest.mark.parametrize("catalog_data", CATALOG_FROM_DICT_FAIL_DATA, ids=generate_test_ids_list(CATALOG_FROM_DICT_FAIL_DATA))
|
||||
def test_from_dict_fail(self, catalog_data: dict[str, Any]) -> None:
|
||||
"""
|
||||
Errors when instantiating AntaCatalog from a list of tuples
|
||||
"""
|
||||
with open(file=str(DATA_DIR / catalog_data["filename"]), mode="r", encoding="UTF-8") as file:
|
||||
data = safe_load(file)
|
||||
with pytest.raises((ValidationError, ValueError)) as exec_info:
|
||||
AntaCatalog.from_dict(data)
|
||||
if isinstance(exec_info.value, ValidationError):
|
||||
assert catalog_data["error"] in exec_info.value.errors()[0]["msg"]
|
||||
else:
|
||||
assert catalog_data["error"] in str(exec_info)
|
||||
|
||||
def test_filename(self) -> None:
|
||||
"""
|
||||
Test filename
|
||||
"""
|
||||
catalog = AntaCatalog(filename="test")
|
||||
assert catalog.filename == Path("test")
|
||||
catalog = AntaCatalog(filename=Path("test"))
|
||||
assert catalog.filename == Path("test")
|
||||
|
||||
@pytest.mark.parametrize("catalog_data", INIT_CATALOG_DATA, ids=generate_test_ids_list(INIT_CATALOG_DATA))
|
||||
def test__tests_setter_success(self, catalog_data: dict[str, Any]) -> None:
|
||||
"""
|
||||
Success when setting AntaCatalog.tests from a list of tuples
|
||||
"""
|
||||
catalog = AntaCatalog()
|
||||
catalog.tests = [AntaTestDefinition(test=test, inputs=inputs) for test, inputs in catalog_data["tests"]]
|
||||
assert len(catalog.tests) == len(catalog_data["tests"])
|
||||
for test_id, (test, inputs) in enumerate(catalog_data["tests"]):
|
||||
assert catalog.tests[test_id].test == test
|
||||
if inputs is not None:
|
||||
if isinstance(inputs, dict):
|
||||
inputs = test.Input(**inputs)
|
||||
assert inputs == catalog.tests[test_id].inputs
|
||||
|
||||
@pytest.mark.parametrize("catalog_data", TESTS_SETTER_FAIL_DATA, ids=generate_test_ids_list(TESTS_SETTER_FAIL_DATA))
|
||||
def test__tests_setter_fail(self, catalog_data: dict[str, Any]) -> None:
|
||||
"""
|
||||
Errors when setting AntaCatalog.tests from a list of tuples
|
||||
"""
|
||||
catalog = AntaCatalog()
|
||||
with pytest.raises(ValueError) as exec_info:
|
||||
catalog.tests = catalog_data["tests"]
|
||||
assert catalog_data["error"] in str(exec_info)
|
||||
|
||||
def test_get_tests_by_tags(self) -> None:
|
||||
"""
|
||||
Test AntaCatalog.test_get_tests_by_tags()
|
||||
"""
|
||||
catalog: AntaCatalog = AntaCatalog.parse(str(DATA_DIR / "test_catalog_with_tags.yml"))
|
||||
tests: list[AntaTestDefinition] = catalog.get_tests_by_tags(tags=["leaf"])
|
||||
assert len(tests) == 2
|
777
tests/units/test_device.py
Normal file
777
tests/units/test_device.py
Normal file
|
@ -0,0 +1,777 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
test anta.device.py
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from unittest.mock import patch
|
||||
|
||||
import httpx
|
||||
import pytest
|
||||
from _pytest.mark.structures import ParameterSet
|
||||
from asyncssh import SSHClientConnection, SSHClientConnectionOptions
|
||||
from rich import print as rprint
|
||||
|
||||
from anta import aioeapi
|
||||
from anta.device import AntaDevice, AsyncEOSDevice
|
||||
from anta.models import AntaCommand
|
||||
from tests.lib.fixture import COMMAND_OUTPUT
|
||||
from tests.lib.utils import generate_test_ids_list
|
||||
|
||||
INIT_DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "no name, no port",
|
||||
"device": {
|
||||
"host": "42.42.42.42",
|
||||
"username": "anta",
|
||||
"password": "anta",
|
||||
},
|
||||
"expected": {"name": "42.42.42.42"},
|
||||
},
|
||||
{
|
||||
"name": "no name, port",
|
||||
"device": {
|
||||
"host": "42.42.42.42",
|
||||
"username": "anta",
|
||||
"password": "anta",
|
||||
"port": 666,
|
||||
},
|
||||
"expected": {"name": "42.42.42.42:666"},
|
||||
},
|
||||
{
|
||||
"name": "name",
|
||||
"device": {
|
||||
"host": "42.42.42.42",
|
||||
"username": "anta",
|
||||
"password": "anta",
|
||||
"name": "test.anta.ninja",
|
||||
"disable_cache": True,
|
||||
},
|
||||
"expected": {"name": "test.anta.ninja"},
|
||||
},
|
||||
{
|
||||
"name": "insecure",
|
||||
"device": {
|
||||
"host": "42.42.42.42",
|
||||
"username": "anta",
|
||||
"password": "anta",
|
||||
"name": "test.anta.ninja",
|
||||
"insecure": True,
|
||||
},
|
||||
"expected": {"name": "test.anta.ninja"},
|
||||
},
|
||||
]
|
||||
EQUALITY_DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "equal",
|
||||
"device1": {
|
||||
"host": "42.42.42.42",
|
||||
"username": "anta",
|
||||
"password": "anta",
|
||||
},
|
||||
"device2": {
|
||||
"host": "42.42.42.42",
|
||||
"username": "anta",
|
||||
"password": "blah",
|
||||
},
|
||||
"expected": True,
|
||||
},
|
||||
{
|
||||
"name": "equals-name",
|
||||
"device1": {
|
||||
"host": "42.42.42.42",
|
||||
"username": "anta",
|
||||
"password": "anta",
|
||||
"name": "device1",
|
||||
},
|
||||
"device2": {
|
||||
"host": "42.42.42.42",
|
||||
"username": "plop",
|
||||
"password": "anta",
|
||||
"name": "device2",
|
||||
},
|
||||
"expected": True,
|
||||
},
|
||||
{
|
||||
"name": "not-equal-port",
|
||||
"device1": {
|
||||
"host": "42.42.42.42",
|
||||
"username": "anta",
|
||||
"password": "anta",
|
||||
},
|
||||
"device2": {
|
||||
"host": "42.42.42.42",
|
||||
"username": "anta",
|
||||
"password": "anta",
|
||||
"port": 666,
|
||||
},
|
||||
"expected": False,
|
||||
},
|
||||
{
|
||||
"name": "not-equal-host",
|
||||
"device1": {
|
||||
"host": "42.42.42.41",
|
||||
"username": "anta",
|
||||
"password": "anta",
|
||||
},
|
||||
"device2": {
|
||||
"host": "42.42.42.42",
|
||||
"username": "anta",
|
||||
"password": "anta",
|
||||
},
|
||||
"expected": False,
|
||||
},
|
||||
]
|
||||
AIOEAPI_COLLECT_DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "command",
|
||||
"device": {},
|
||||
"command": {
|
||||
"command": "show version",
|
||||
"patch_kwargs": {
|
||||
"return_value": [
|
||||
{
|
||||
"mfgName": "Arista",
|
||||
"modelName": "DCS-7280CR3-32P4-F",
|
||||
"hardwareRevision": "11.00",
|
||||
"serialNumber": "JPE19500066",
|
||||
"systemMacAddress": "fc:bd:67:3d:13:c5",
|
||||
"hwMacAddress": "fc:bd:67:3d:13:c5",
|
||||
"configMacAddress": "00:00:00:00:00:00",
|
||||
"version": "4.31.1F-34361447.fraserrel (engineering build)",
|
||||
"architecture": "x86_64",
|
||||
"internalVersion": "4.31.1F-34361447.fraserrel",
|
||||
"internalBuildId": "4940d112-a2fc-4970-8b5a-a16cd03fd08c",
|
||||
"imageFormatVersion": "3.0",
|
||||
"imageOptimization": "Default",
|
||||
"bootupTimestamp": 1700729434.5892005,
|
||||
"uptime": 20666.78,
|
||||
"memTotal": 8099732,
|
||||
"memFree": 4989568,
|
||||
"isIntlVersion": False,
|
||||
}
|
||||
]
|
||||
},
|
||||
},
|
||||
"expected": {
|
||||
"output": {
|
||||
"mfgName": "Arista",
|
||||
"modelName": "DCS-7280CR3-32P4-F",
|
||||
"hardwareRevision": "11.00",
|
||||
"serialNumber": "JPE19500066",
|
||||
"systemMacAddress": "fc:bd:67:3d:13:c5",
|
||||
"hwMacAddress": "fc:bd:67:3d:13:c5",
|
||||
"configMacAddress": "00:00:00:00:00:00",
|
||||
"version": "4.31.1F-34361447.fraserrel (engineering build)",
|
||||
"architecture": "x86_64",
|
||||
"internalVersion": "4.31.1F-34361447.fraserrel",
|
||||
"internalBuildId": "4940d112-a2fc-4970-8b5a-a16cd03fd08c",
|
||||
"imageFormatVersion": "3.0",
|
||||
"imageOptimization": "Default",
|
||||
"bootupTimestamp": 1700729434.5892005,
|
||||
"uptime": 20666.78,
|
||||
"memTotal": 8099732,
|
||||
"memFree": 4989568,
|
||||
"isIntlVersion": False,
|
||||
},
|
||||
"errors": [],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "enable",
|
||||
"device": {"enable": True},
|
||||
"command": {
|
||||
"command": "show version",
|
||||
"patch_kwargs": {
|
||||
"return_value": [
|
||||
{},
|
||||
{
|
||||
"mfgName": "Arista",
|
||||
"modelName": "DCS-7280CR3-32P4-F",
|
||||
"hardwareRevision": "11.00",
|
||||
"serialNumber": "JPE19500066",
|
||||
"systemMacAddress": "fc:bd:67:3d:13:c5",
|
||||
"hwMacAddress": "fc:bd:67:3d:13:c5",
|
||||
"configMacAddress": "00:00:00:00:00:00",
|
||||
"version": "4.31.1F-34361447.fraserrel (engineering build)",
|
||||
"architecture": "x86_64",
|
||||
"internalVersion": "4.31.1F-34361447.fraserrel",
|
||||
"internalBuildId": "4940d112-a2fc-4970-8b5a-a16cd03fd08c",
|
||||
"imageFormatVersion": "3.0",
|
||||
"imageOptimization": "Default",
|
||||
"bootupTimestamp": 1700729434.5892005,
|
||||
"uptime": 20666.78,
|
||||
"memTotal": 8099732,
|
||||
"memFree": 4989568,
|
||||
"isIntlVersion": False,
|
||||
},
|
||||
]
|
||||
},
|
||||
},
|
||||
"expected": {
|
||||
"output": {
|
||||
"mfgName": "Arista",
|
||||
"modelName": "DCS-7280CR3-32P4-F",
|
||||
"hardwareRevision": "11.00",
|
||||
"serialNumber": "JPE19500066",
|
||||
"systemMacAddress": "fc:bd:67:3d:13:c5",
|
||||
"hwMacAddress": "fc:bd:67:3d:13:c5",
|
||||
"configMacAddress": "00:00:00:00:00:00",
|
||||
"version": "4.31.1F-34361447.fraserrel (engineering build)",
|
||||
"architecture": "x86_64",
|
||||
"internalVersion": "4.31.1F-34361447.fraserrel",
|
||||
"internalBuildId": "4940d112-a2fc-4970-8b5a-a16cd03fd08c",
|
||||
"imageFormatVersion": "3.0",
|
||||
"imageOptimization": "Default",
|
||||
"bootupTimestamp": 1700729434.5892005,
|
||||
"uptime": 20666.78,
|
||||
"memTotal": 8099732,
|
||||
"memFree": 4989568,
|
||||
"isIntlVersion": False,
|
||||
},
|
||||
"errors": [],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "enable password",
|
||||
"device": {"enable": True, "enable_password": "anta"},
|
||||
"command": {
|
||||
"command": "show version",
|
||||
"patch_kwargs": {
|
||||
"return_value": [
|
||||
{},
|
||||
{
|
||||
"mfgName": "Arista",
|
||||
"modelName": "DCS-7280CR3-32P4-F",
|
||||
"hardwareRevision": "11.00",
|
||||
"serialNumber": "JPE19500066",
|
||||
"systemMacAddress": "fc:bd:67:3d:13:c5",
|
||||
"hwMacAddress": "fc:bd:67:3d:13:c5",
|
||||
"configMacAddress": "00:00:00:00:00:00",
|
||||
"version": "4.31.1F-34361447.fraserrel (engineering build)",
|
||||
"architecture": "x86_64",
|
||||
"internalVersion": "4.31.1F-34361447.fraserrel",
|
||||
"internalBuildId": "4940d112-a2fc-4970-8b5a-a16cd03fd08c",
|
||||
"imageFormatVersion": "3.0",
|
||||
"imageOptimization": "Default",
|
||||
"bootupTimestamp": 1700729434.5892005,
|
||||
"uptime": 20666.78,
|
||||
"memTotal": 8099732,
|
||||
"memFree": 4989568,
|
||||
"isIntlVersion": False,
|
||||
},
|
||||
]
|
||||
},
|
||||
},
|
||||
"expected": {
|
||||
"output": {
|
||||
"mfgName": "Arista",
|
||||
"modelName": "DCS-7280CR3-32P4-F",
|
||||
"hardwareRevision": "11.00",
|
||||
"serialNumber": "JPE19500066",
|
||||
"systemMacAddress": "fc:bd:67:3d:13:c5",
|
||||
"hwMacAddress": "fc:bd:67:3d:13:c5",
|
||||
"configMacAddress": "00:00:00:00:00:00",
|
||||
"version": "4.31.1F-34361447.fraserrel (engineering build)",
|
||||
"architecture": "x86_64",
|
||||
"internalVersion": "4.31.1F-34361447.fraserrel",
|
||||
"internalBuildId": "4940d112-a2fc-4970-8b5a-a16cd03fd08c",
|
||||
"imageFormatVersion": "3.0",
|
||||
"imageOptimization": "Default",
|
||||
"bootupTimestamp": 1700729434.5892005,
|
||||
"uptime": 20666.78,
|
||||
"memTotal": 8099732,
|
||||
"memFree": 4989568,
|
||||
"isIntlVersion": False,
|
||||
},
|
||||
"errors": [],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "revision",
|
||||
"device": {},
|
||||
"command": {
|
||||
"command": "show version",
|
||||
"revision": 3,
|
||||
"patch_kwargs": {
|
||||
"return_value": [
|
||||
{},
|
||||
{
|
||||
"mfgName": "Arista",
|
||||
"modelName": "DCS-7280CR3-32P4-F",
|
||||
"hardwareRevision": "11.00",
|
||||
"serialNumber": "JPE19500066",
|
||||
"systemMacAddress": "fc:bd:67:3d:13:c5",
|
||||
"hwMacAddress": "fc:bd:67:3d:13:c5",
|
||||
"configMacAddress": "00:00:00:00:00:00",
|
||||
"version": "4.31.1F-34361447.fraserrel (engineering build)",
|
||||
"architecture": "x86_64",
|
||||
"internalVersion": "4.31.1F-34361447.fraserrel",
|
||||
"internalBuildId": "4940d112-a2fc-4970-8b5a-a16cd03fd08c",
|
||||
"imageFormatVersion": "3.0",
|
||||
"imageOptimization": "Default",
|
||||
"bootupTimestamp": 1700729434.5892005,
|
||||
"uptime": 20666.78,
|
||||
"memTotal": 8099732,
|
||||
"memFree": 4989568,
|
||||
"isIntlVersion": False,
|
||||
},
|
||||
]
|
||||
},
|
||||
},
|
||||
"expected": {
|
||||
"output": {
|
||||
"mfgName": "Arista",
|
||||
"modelName": "DCS-7280CR3-32P4-F",
|
||||
"hardwareRevision": "11.00",
|
||||
"serialNumber": "JPE19500066",
|
||||
"systemMacAddress": "fc:bd:67:3d:13:c5",
|
||||
"hwMacAddress": "fc:bd:67:3d:13:c5",
|
||||
"configMacAddress": "00:00:00:00:00:00",
|
||||
"version": "4.31.1F-34361447.fraserrel (engineering build)",
|
||||
"architecture": "x86_64",
|
||||
"internalVersion": "4.31.1F-34361447.fraserrel",
|
||||
"internalBuildId": "4940d112-a2fc-4970-8b5a-a16cd03fd08c",
|
||||
"imageFormatVersion": "3.0",
|
||||
"imageOptimization": "Default",
|
||||
"bootupTimestamp": 1700729434.5892005,
|
||||
"uptime": 20666.78,
|
||||
"memTotal": 8099732,
|
||||
"memFree": 4989568,
|
||||
"isIntlVersion": False,
|
||||
},
|
||||
"errors": [],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "aioeapi.EapiCommandError",
|
||||
"device": {},
|
||||
"command": {
|
||||
"command": "show version",
|
||||
"patch_kwargs": {
|
||||
"side_effect": aioeapi.EapiCommandError(
|
||||
passed=[], failed="show version", errors=["Authorization denied for command 'show version'"], errmsg="Invalid command", not_exec=[]
|
||||
)
|
||||
},
|
||||
},
|
||||
"expected": {"output": None, "errors": ["Authorization denied for command 'show version'"]},
|
||||
},
|
||||
{
|
||||
"name": "httpx.HTTPError",
|
||||
"device": {},
|
||||
"command": {
|
||||
"command": "show version",
|
||||
"patch_kwargs": {"side_effect": httpx.HTTPError(message="404")},
|
||||
},
|
||||
"expected": {"output": None, "errors": ["404"]},
|
||||
},
|
||||
{
|
||||
"name": "httpx.ConnectError",
|
||||
"device": {},
|
||||
"command": {
|
||||
"command": "show version",
|
||||
"patch_kwargs": {"side_effect": httpx.ConnectError(message="Cannot open port")},
|
||||
},
|
||||
"expected": {"output": None, "errors": ["Cannot open port"]},
|
||||
},
|
||||
]
|
||||
AIOEAPI_COPY_DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "from",
|
||||
"device": {},
|
||||
"copy": {
|
||||
"sources": [Path("/mnt/flash"), Path("/var/log/agents")],
|
||||
"destination": Path("."),
|
||||
"direction": "from",
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "to",
|
||||
"device": {},
|
||||
"copy": {
|
||||
"sources": [Path("/mnt/flash"), Path("/var/log/agents")],
|
||||
"destination": Path("."),
|
||||
"direction": "to",
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "wrong",
|
||||
"device": {},
|
||||
"copy": {
|
||||
"sources": [Path("/mnt/flash"), Path("/var/log/agents")],
|
||||
"destination": Path("."),
|
||||
"direction": "wrong",
|
||||
},
|
||||
},
|
||||
]
|
||||
REFRESH_DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "established",
|
||||
"device": {},
|
||||
"patch_kwargs": (
|
||||
{"return_value": True},
|
||||
{
|
||||
"return_value": {
|
||||
"mfgName": "Arista",
|
||||
"modelName": "DCS-7280CR3-32P4-F",
|
||||
"hardwareRevision": "11.00",
|
||||
"serialNumber": "JPE19500066",
|
||||
"systemMacAddress": "fc:bd:67:3d:13:c5",
|
||||
"hwMacAddress": "fc:bd:67:3d:13:c5",
|
||||
"configMacAddress": "00:00:00:00:00:00",
|
||||
"version": "4.31.1F-34361447.fraserrel (engineering build)",
|
||||
"architecture": "x86_64",
|
||||
"internalVersion": "4.31.1F-34361447.fraserrel",
|
||||
"internalBuildId": "4940d112-a2fc-4970-8b5a-a16cd03fd08c",
|
||||
"imageFormatVersion": "3.0",
|
||||
"imageOptimization": "Default",
|
||||
"bootupTimestamp": 1700729434.5892005,
|
||||
"uptime": 20666.78,
|
||||
"memTotal": 8099732,
|
||||
"memFree": 4989568,
|
||||
"isIntlVersion": False,
|
||||
}
|
||||
},
|
||||
),
|
||||
"expected": {"is_online": True, "established": True, "hw_model": "DCS-7280CR3-32P4-F"},
|
||||
},
|
||||
{
|
||||
"name": "is not online",
|
||||
"device": {},
|
||||
"patch_kwargs": (
|
||||
{"return_value": False},
|
||||
{
|
||||
"return_value": {
|
||||
"mfgName": "Arista",
|
||||
"modelName": "DCS-7280CR3-32P4-F",
|
||||
"hardwareRevision": "11.00",
|
||||
"serialNumber": "JPE19500066",
|
||||
"systemMacAddress": "fc:bd:67:3d:13:c5",
|
||||
"hwMacAddress": "fc:bd:67:3d:13:c5",
|
||||
"configMacAddress": "00:00:00:00:00:00",
|
||||
"version": "4.31.1F-34361447.fraserrel (engineering build)",
|
||||
"architecture": "x86_64",
|
||||
"internalVersion": "4.31.1F-34361447.fraserrel",
|
||||
"internalBuildId": "4940d112-a2fc-4970-8b5a-a16cd03fd08c",
|
||||
"imageFormatVersion": "3.0",
|
||||
"imageOptimization": "Default",
|
||||
"bootupTimestamp": 1700729434.5892005,
|
||||
"uptime": 20666.78,
|
||||
"memTotal": 8099732,
|
||||
"memFree": 4989568,
|
||||
"isIntlVersion": False,
|
||||
}
|
||||
},
|
||||
),
|
||||
"expected": {"is_online": False, "established": False, "hw_model": None},
|
||||
},
|
||||
{
|
||||
"name": "cannot parse command",
|
||||
"device": {},
|
||||
"patch_kwargs": (
|
||||
{"return_value": True},
|
||||
{
|
||||
"return_value": {
|
||||
"mfgName": "Arista",
|
||||
"hardwareRevision": "11.00",
|
||||
"serialNumber": "JPE19500066",
|
||||
"systemMacAddress": "fc:bd:67:3d:13:c5",
|
||||
"hwMacAddress": "fc:bd:67:3d:13:c5",
|
||||
"configMacAddress": "00:00:00:00:00:00",
|
||||
"version": "4.31.1F-34361447.fraserrel (engineering build)",
|
||||
"architecture": "x86_64",
|
||||
"internalVersion": "4.31.1F-34361447.fraserrel",
|
||||
"internalBuildId": "4940d112-a2fc-4970-8b5a-a16cd03fd08c",
|
||||
"imageFormatVersion": "3.0",
|
||||
"imageOptimization": "Default",
|
||||
"bootupTimestamp": 1700729434.5892005,
|
||||
"uptime": 20666.78,
|
||||
"memTotal": 8099732,
|
||||
"memFree": 4989568,
|
||||
"isIntlVersion": False,
|
||||
}
|
||||
},
|
||||
),
|
||||
"expected": {"is_online": True, "established": False, "hw_model": None},
|
||||
},
|
||||
{
|
||||
"name": "aioeapi.EapiCommandError",
|
||||
"device": {},
|
||||
"patch_kwargs": (
|
||||
{"return_value": True},
|
||||
{
|
||||
"side_effect": aioeapi.EapiCommandError(
|
||||
passed=[], failed="show version", errors=["Authorization denied for command 'show version'"], errmsg="Invalid command", not_exec=[]
|
||||
)
|
||||
},
|
||||
),
|
||||
"expected": {"is_online": True, "established": False, "hw_model": None},
|
||||
},
|
||||
{
|
||||
"name": "httpx.HTTPError",
|
||||
"device": {},
|
||||
"patch_kwargs": (
|
||||
{"return_value": True},
|
||||
{"side_effect": httpx.HTTPError(message="404")},
|
||||
),
|
||||
"expected": {"is_online": True, "established": False, "hw_model": None},
|
||||
},
|
||||
{
|
||||
"name": "httpx.ConnectError",
|
||||
"device": {},
|
||||
"patch_kwargs": (
|
||||
{"return_value": True},
|
||||
{"side_effect": httpx.ConnectError(message="Cannot open port")},
|
||||
),
|
||||
"expected": {"is_online": True, "established": False, "hw_model": None},
|
||||
},
|
||||
]
|
||||
COLLECT_DATA: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": "device cache enabled, command cache enabled, no cache hit",
|
||||
"device": {"disable_cache": False},
|
||||
"command": {
|
||||
"command": "show version",
|
||||
"use_cache": True,
|
||||
},
|
||||
"expected": {"cache_hit": False},
|
||||
},
|
||||
{
|
||||
"name": "device cache enabled, command cache enabled, cache hit",
|
||||
"device": {"disable_cache": False},
|
||||
"command": {
|
||||
"command": "show version",
|
||||
"use_cache": True,
|
||||
},
|
||||
"expected": {"cache_hit": True},
|
||||
},
|
||||
{
|
||||
"name": "device cache disabled, command cache enabled",
|
||||
"device": {"disable_cache": True},
|
||||
"command": {
|
||||
"command": "show version",
|
||||
"use_cache": True,
|
||||
},
|
||||
"expected": {},
|
||||
},
|
||||
{
|
||||
"name": "device cache enabled, command cache disabled, cache has command",
|
||||
"device": {"disable_cache": False},
|
||||
"command": {
|
||||
"command": "show version",
|
||||
"use_cache": False,
|
||||
},
|
||||
"expected": {"cache_hit": True},
|
||||
},
|
||||
{
|
||||
"name": "device cache enabled, command cache disabled, cache does not have data",
|
||||
"device": {
|
||||
"disable_cache": False,
|
||||
},
|
||||
"command": {
|
||||
"command": "show version",
|
||||
"use_cache": False,
|
||||
},
|
||||
"expected": {"cache_hit": False},
|
||||
},
|
||||
{
|
||||
"name": "device cache disabled, command cache disabled",
|
||||
"device": {
|
||||
"disable_cache": True,
|
||||
},
|
||||
"command": {
|
||||
"command": "show version",
|
||||
"use_cache": False,
|
||||
},
|
||||
"expected": {},
|
||||
},
|
||||
]
|
||||
CACHE_STATS_DATA: list[ParameterSet] = [
|
||||
pytest.param({"disable_cache": False}, {"total_commands_sent": 0, "cache_hits": 0, "cache_hit_ratio": "0.00%"}, id="with_cache"),
|
||||
pytest.param({"disable_cache": True}, None, id="without_cache"),
|
||||
]
|
||||
|
||||
|
||||
class TestAntaDevice:
|
||||
"""
|
||||
Test for anta.device.AntaDevice Abstract class
|
||||
"""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"device, command_data, expected_data",
|
||||
map(lambda d: (d["device"], d["command"], d["expected"]), COLLECT_DATA),
|
||||
indirect=["device"],
|
||||
ids=generate_test_ids_list(COLLECT_DATA),
|
||||
)
|
||||
async def test_collect(self, device: AntaDevice, command_data: dict[str, Any], expected_data: dict[str, Any]) -> None:
|
||||
"""
|
||||
Test AntaDevice.collect behavior
|
||||
"""
|
||||
command = AntaCommand(command=command_data["command"], use_cache=command_data["use_cache"])
|
||||
|
||||
# Dummy output for cache hit
|
||||
cached_output = "cached_value"
|
||||
|
||||
if device.cache is not None and expected_data["cache_hit"] is True:
|
||||
await device.cache.set(command.uid, cached_output)
|
||||
|
||||
await device.collect(command)
|
||||
|
||||
if device.cache is not None: # device_cache is enabled
|
||||
current_cached_data = await device.cache.get(command.uid)
|
||||
if command.use_cache is True: # command is allowed to use cache
|
||||
if expected_data["cache_hit"] is True:
|
||||
assert command.output == cached_output
|
||||
assert current_cached_data == cached_output
|
||||
assert device.cache.hit_miss_ratio["hits"] == 2
|
||||
else:
|
||||
assert command.output == COMMAND_OUTPUT
|
||||
assert current_cached_data == COMMAND_OUTPUT
|
||||
assert device.cache.hit_miss_ratio["hits"] == 1
|
||||
else: # command is not allowed to use cache
|
||||
device._collect.assert_called_once_with(command=command) # type: ignore[attr-defined] # pylint: disable=protected-access
|
||||
assert command.output == COMMAND_OUTPUT
|
||||
if expected_data["cache_hit"] is True:
|
||||
assert current_cached_data == cached_output
|
||||
else:
|
||||
assert current_cached_data is None
|
||||
else: # device is disabled
|
||||
assert device.cache is None
|
||||
device._collect.assert_called_once_with(command=command) # type: ignore[attr-defined] # pylint: disable=protected-access
|
||||
|
||||
@pytest.mark.parametrize("device, expected", CACHE_STATS_DATA, indirect=["device"])
|
||||
def test_cache_statistics(self, device: AntaDevice, expected: dict[str, Any] | None) -> None:
|
||||
"""
|
||||
Verify that when cache statistics attribute does not exist
|
||||
TODO add a test where cache has some value
|
||||
"""
|
||||
assert device.cache_statistics == expected
|
||||
|
||||
def test_supports(self, device: AntaDevice) -> None:
|
||||
"""
|
||||
Test if the supports() method
|
||||
"""
|
||||
command = AntaCommand(command="show hardware counter drop", errors=["Unavailable command (not supported on this hardware platform) (at token 2: 'counter')"])
|
||||
assert device.supports(command) is False
|
||||
command = AntaCommand(command="show hardware counter drop")
|
||||
assert device.supports(command) is True
|
||||
|
||||
|
||||
class TestAsyncEOSDevice:
|
||||
"""
|
||||
Test for anta.device.AsyncEOSDevice
|
||||
"""
|
||||
|
||||
@pytest.mark.parametrize("data", INIT_DATA, ids=generate_test_ids_list(INIT_DATA))
|
||||
def test__init__(self, data: dict[str, Any]) -> None:
|
||||
"""Test the AsyncEOSDevice constructor"""
|
||||
device = AsyncEOSDevice(**data["device"])
|
||||
|
||||
assert device.name == data["expected"]["name"]
|
||||
if data["device"].get("disable_cache") is True:
|
||||
assert device.cache is None
|
||||
assert device.cache_locks is None
|
||||
else: # False or None
|
||||
assert device.cache is not None
|
||||
assert device.cache_locks is not None
|
||||
hash(device)
|
||||
|
||||
with patch("anta.device.__DEBUG__", True):
|
||||
rprint(device)
|
||||
|
||||
@pytest.mark.parametrize("data", EQUALITY_DATA, ids=generate_test_ids_list(EQUALITY_DATA))
|
||||
def test__eq(self, data: dict[str, Any]) -> None:
|
||||
"""Test the AsyncEOSDevice equality"""
|
||||
device1 = AsyncEOSDevice(**data["device1"])
|
||||
device2 = AsyncEOSDevice(**data["device2"])
|
||||
if data["expected"]:
|
||||
assert device1 == device2
|
||||
else:
|
||||
assert device1 != device2
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"async_device, patch_kwargs, expected",
|
||||
map(lambda d: (d["device"], d["patch_kwargs"], d["expected"]), REFRESH_DATA),
|
||||
ids=generate_test_ids_list(REFRESH_DATA),
|
||||
indirect=["async_device"],
|
||||
)
|
||||
async def test_refresh(self, async_device: AsyncEOSDevice, patch_kwargs: list[dict[str, Any]], expected: dict[str, Any]) -> None:
|
||||
# pylint: disable=protected-access
|
||||
"""Test AsyncEOSDevice.refresh()"""
|
||||
with patch.object(async_device._session, "check_connection", **patch_kwargs[0]):
|
||||
with patch.object(async_device._session, "cli", **patch_kwargs[1]):
|
||||
await async_device.refresh()
|
||||
async_device._session.check_connection.assert_called_once()
|
||||
if expected["is_online"]:
|
||||
async_device._session.cli.assert_called_once()
|
||||
assert async_device.is_online == expected["is_online"]
|
||||
assert async_device.established == expected["established"]
|
||||
assert async_device.hw_model == expected["hw_model"]
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"async_device, command, expected",
|
||||
map(lambda d: (d["device"], d["command"], d["expected"]), AIOEAPI_COLLECT_DATA),
|
||||
ids=generate_test_ids_list(AIOEAPI_COLLECT_DATA),
|
||||
indirect=["async_device"],
|
||||
)
|
||||
async def test__collect(self, async_device: AsyncEOSDevice, command: dict[str, Any], expected: dict[str, Any]) -> None:
|
||||
# pylint: disable=protected-access
|
||||
"""Test AsyncEOSDevice._collect()"""
|
||||
if "revision" in command:
|
||||
cmd = AntaCommand(command=command["command"], revision=command["revision"])
|
||||
else:
|
||||
cmd = AntaCommand(command=command["command"])
|
||||
with patch.object(async_device._session, "cli", **command["patch_kwargs"]):
|
||||
await async_device.collect(cmd)
|
||||
commands = []
|
||||
if async_device.enable and async_device._enable_password is not None:
|
||||
commands.append(
|
||||
{
|
||||
"cmd": "enable",
|
||||
"input": str(async_device._enable_password),
|
||||
}
|
||||
)
|
||||
elif async_device.enable:
|
||||
# No password
|
||||
commands.append({"cmd": "enable"})
|
||||
if cmd.revision:
|
||||
commands.append({"cmd": cmd.command, "revision": cmd.revision})
|
||||
else:
|
||||
commands.append({"cmd": cmd.command})
|
||||
async_device._session.cli.assert_called_once_with(commands=commands, ofmt=cmd.ofmt, version=cmd.version)
|
||||
assert cmd.output == expected["output"]
|
||||
assert cmd.errors == expected["errors"]
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"async_device, copy",
|
||||
map(lambda d: (d["device"], d["copy"]), AIOEAPI_COPY_DATA),
|
||||
ids=generate_test_ids_list(AIOEAPI_COPY_DATA),
|
||||
indirect=["async_device"],
|
||||
)
|
||||
async def test_copy(self, async_device: AsyncEOSDevice, copy: dict[str, Any]) -> None:
|
||||
"""Test AsyncEOSDevice.copy()"""
|
||||
conn = SSHClientConnection(asyncio.get_event_loop(), SSHClientConnectionOptions())
|
||||
with patch("asyncssh.connect") as connect_mock:
|
||||
connect_mock.return_value.__aenter__.return_value = conn
|
||||
with patch("asyncssh.scp") as scp_mock:
|
||||
await async_device.copy(copy["sources"], copy["destination"], copy["direction"])
|
||||
if copy["direction"] == "from":
|
||||
src = [(conn, file) for file in copy["sources"]]
|
||||
dst = copy["destination"]
|
||||
elif copy["direction"] == "to":
|
||||
src = copy["sources"]
|
||||
dst = conn, copy["destination"]
|
||||
else:
|
||||
scp_mock.assert_not_awaited()
|
||||
return
|
||||
scp_mock.assert_awaited_once_with(src, dst)
|
80
tests/units/test_logger.py
Normal file
80
tests/units/test_logger.py
Normal file
|
@ -0,0 +1,80 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Tests for anta.logger
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from anta.logger import anta_log_exception
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pytest import LogCaptureFixture
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"exception, message, calling_logger, __DEBUG__value, expected_message",
|
||||
[
|
||||
pytest.param(ValueError("exception message"), None, None, False, "ValueError (exception message)", id="exception only"),
|
||||
pytest.param(ValueError("exception message"), "custom message", None, False, "custom message\nValueError (exception message)", id="custom message"),
|
||||
pytest.param(
|
||||
ValueError("exception message"),
|
||||
"custom logger",
|
||||
logging.getLogger("custom"),
|
||||
False,
|
||||
"custom logger\nValueError (exception message)",
|
||||
id="custom logger",
|
||||
),
|
||||
pytest.param(
|
||||
ValueError("exception message"), "Use with custom message", None, True, "Use with custom message\nValueError (exception message)", id="__DEBUG__ on"
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_anta_log_exception(
|
||||
caplog: LogCaptureFixture,
|
||||
exception: Exception,
|
||||
message: str | None,
|
||||
calling_logger: logging.Logger | None,
|
||||
__DEBUG__value: bool,
|
||||
expected_message: str,
|
||||
) -> None:
|
||||
"""
|
||||
Test anta_log_exception
|
||||
"""
|
||||
|
||||
if calling_logger is not None:
|
||||
# https://github.com/pytest-dev/pytest/issues/3697
|
||||
calling_logger.propagate = True
|
||||
caplog.set_level(logging.ERROR, logger=calling_logger.name)
|
||||
else:
|
||||
caplog.set_level(logging.ERROR)
|
||||
# Need to raise to trigger nice stacktrace for __DEBUG__ == True
|
||||
try:
|
||||
raise exception
|
||||
except ValueError as e:
|
||||
with patch("anta.logger.__DEBUG__", __DEBUG__value):
|
||||
anta_log_exception(e, message=message, calling_logger=calling_logger)
|
||||
|
||||
# Two log captured
|
||||
if __DEBUG__value:
|
||||
assert len(caplog.record_tuples) == 2
|
||||
else:
|
||||
assert len(caplog.record_tuples) == 1
|
||||
logger, level, message = caplog.record_tuples[0]
|
||||
|
||||
if calling_logger is not None:
|
||||
assert calling_logger.name == logger
|
||||
else:
|
||||
assert logger == "anta.logger"
|
||||
|
||||
assert level == logging.CRITICAL
|
||||
assert message == expected_message
|
||||
# the only place where we can see the stracktrace is in the capture.text
|
||||
if __DEBUG__value is True:
|
||||
assert "Traceback" in caplog.text
|
472
tests/units/test_models.py
Normal file
472
tests/units/test_models.py
Normal file
|
@ -0,0 +1,472 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
test anta.models.py
|
||||
"""
|
||||
# Mypy does not understand AntaTest.Input typing
|
||||
# mypy: disable-error-code=attr-defined
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
|
||||
from anta.decorators import deprecated_test, skip_on_platforms
|
||||
from anta.device import AntaDevice
|
||||
from anta.models import AntaCommand, AntaTemplate, AntaTest
|
||||
from tests.lib.fixture import DEVICE_HW_MODEL
|
||||
from tests.lib.utils import generate_test_ids
|
||||
|
||||
|
||||
class FakeTest(AntaTest):
|
||||
"""ANTA test that always succeed"""
|
||||
|
||||
name = "FakeTest"
|
||||
description = "ANTA test that always succeed"
|
||||
categories = []
|
||||
commands = []
|
||||
|
||||
@AntaTest.anta_test
|
||||
def test(self) -> None:
|
||||
self.result.is_success()
|
||||
|
||||
|
||||
class FakeTestWithFailedCommand(AntaTest):
|
||||
"""ANTA test with a command that failed"""
|
||||
|
||||
name = "FakeTestWithFailedCommand"
|
||||
description = "ANTA test with a command that failed"
|
||||
categories = []
|
||||
commands = [AntaCommand(command="show version", errors=["failed command"])]
|
||||
|
||||
@AntaTest.anta_test
|
||||
def test(self) -> None:
|
||||
self.result.is_success()
|
||||
|
||||
|
||||
class FakeTestWithUnsupportedCommand(AntaTest):
|
||||
"""ANTA test with an unsupported command"""
|
||||
|
||||
name = "FakeTestWithUnsupportedCommand"
|
||||
description = "ANTA test with an unsupported command"
|
||||
categories = []
|
||||
commands = [AntaCommand(command="show hardware counter drop", errors=["Unavailable command (not supported on this hardware platform) (at token 2: 'counter')"])]
|
||||
|
||||
@AntaTest.anta_test
|
||||
def test(self) -> None:
|
||||
self.result.is_success()
|
||||
|
||||
|
||||
class FakeTestWithInput(AntaTest):
|
||||
"""ANTA test with inputs that always succeed"""
|
||||
|
||||
name = "FakeTestWithInput"
|
||||
description = "ANTA test with inputs that always succeed"
|
||||
categories = []
|
||||
commands = []
|
||||
|
||||
class Input(AntaTest.Input): # pylint: disable=missing-class-docstring
|
||||
string: str
|
||||
|
||||
@AntaTest.anta_test
|
||||
def test(self) -> None:
|
||||
self.result.is_success(self.inputs.string)
|
||||
|
||||
|
||||
class FakeTestWithTemplate(AntaTest):
|
||||
"""ANTA test with template that always succeed"""
|
||||
|
||||
name = "FakeTestWithTemplate"
|
||||
description = "ANTA test with template that always succeed"
|
||||
categories = []
|
||||
commands = [AntaTemplate(template="show interface {interface}")]
|
||||
|
||||
class Input(AntaTest.Input): # pylint: disable=missing-class-docstring
|
||||
interface: str
|
||||
|
||||
def render(self, template: AntaTemplate) -> list[AntaCommand]:
|
||||
return [template.render(interface=self.inputs.interface)]
|
||||
|
||||
@AntaTest.anta_test
|
||||
def test(self) -> None:
|
||||
self.result.is_success(self.instance_commands[0].command)
|
||||
|
||||
|
||||
class FakeTestWithTemplateNoRender(AntaTest):
|
||||
"""ANTA test with template that miss the render() method"""
|
||||
|
||||
name = "FakeTestWithTemplateNoRender"
|
||||
description = "ANTA test with template that miss the render() method"
|
||||
categories = []
|
||||
commands = [AntaTemplate(template="show interface {interface}")]
|
||||
|
||||
class Input(AntaTest.Input): # pylint: disable=missing-class-docstring
|
||||
interface: str
|
||||
|
||||
@AntaTest.anta_test
|
||||
def test(self) -> None:
|
||||
self.result.is_success(self.instance_commands[0].command)
|
||||
|
||||
|
||||
class FakeTestWithTemplateBadRender1(AntaTest):
|
||||
"""ANTA test with template that raises a AntaTemplateRenderError exception"""
|
||||
|
||||
name = "FakeTestWithTemplateBadRender"
|
||||
description = "ANTA test with template that raises a AntaTemplateRenderError exception"
|
||||
categories = []
|
||||
commands = [AntaTemplate(template="show interface {interface}")]
|
||||
|
||||
class Input(AntaTest.Input): # pylint: disable=missing-class-docstring
|
||||
interface: str
|
||||
|
||||
def render(self, template: AntaTemplate) -> list[AntaCommand]:
|
||||
return [template.render(wrong_template_param=self.inputs.interface)]
|
||||
|
||||
@AntaTest.anta_test
|
||||
def test(self) -> None:
|
||||
self.result.is_success(self.instance_commands[0].command)
|
||||
|
||||
|
||||
class FakeTestWithTemplateBadRender2(AntaTest):
|
||||
"""ANTA test with template that raises an arbitrary exception"""
|
||||
|
||||
name = "FakeTestWithTemplateBadRender2"
|
||||
description = "ANTA test with template that raises an arbitrary exception"
|
||||
categories = []
|
||||
commands = [AntaTemplate(template="show interface {interface}")]
|
||||
|
||||
class Input(AntaTest.Input): # pylint: disable=missing-class-docstring
|
||||
interface: str
|
||||
|
||||
def render(self, template: AntaTemplate) -> list[AntaCommand]:
|
||||
raise Exception() # pylint: disable=broad-exception-raised
|
||||
|
||||
@AntaTest.anta_test
|
||||
def test(self) -> None:
|
||||
self.result.is_success(self.instance_commands[0].command)
|
||||
|
||||
|
||||
class SkipOnPlatformTest(AntaTest):
|
||||
"""ANTA test that is skipped"""
|
||||
|
||||
name = "SkipOnPlatformTest"
|
||||
description = "ANTA test that is skipped on a specific platform"
|
||||
categories = []
|
||||
commands = []
|
||||
|
||||
@skip_on_platforms([DEVICE_HW_MODEL])
|
||||
@AntaTest.anta_test
|
||||
def test(self) -> None:
|
||||
self.result.is_success()
|
||||
|
||||
|
||||
class UnSkipOnPlatformTest(AntaTest):
|
||||
"""ANTA test that is skipped"""
|
||||
|
||||
name = "UnSkipOnPlatformTest"
|
||||
description = "ANTA test that is skipped on a specific platform"
|
||||
categories = []
|
||||
commands = []
|
||||
|
||||
@skip_on_platforms(["dummy"])
|
||||
@AntaTest.anta_test
|
||||
def test(self) -> None:
|
||||
self.result.is_success()
|
||||
|
||||
|
||||
class SkipOnPlatformTestWithInput(AntaTest):
|
||||
"""ANTA test skipped on platforms but with Input"""
|
||||
|
||||
name = "SkipOnPlatformTestWithInput"
|
||||
description = "ANTA test skipped on platforms but with Input"
|
||||
categories = []
|
||||
commands = []
|
||||
|
||||
class Input(AntaTest.Input): # pylint: disable=missing-class-docstring
|
||||
string: str
|
||||
|
||||
@skip_on_platforms([DEVICE_HW_MODEL])
|
||||
@AntaTest.anta_test
|
||||
def test(self) -> None:
|
||||
self.result.is_success(self.inputs.string)
|
||||
|
||||
|
||||
class DeprecatedTestWithoutNewTest(AntaTest):
|
||||
"""ANTA test that is deprecated without new test"""
|
||||
|
||||
name = "DeprecatedTestWitouthNewTest"
|
||||
description = "ANTA test that is deprecated without new test"
|
||||
categories = []
|
||||
commands = []
|
||||
|
||||
@deprecated_test()
|
||||
@AntaTest.anta_test
|
||||
def test(self) -> None:
|
||||
self.result.is_success()
|
||||
|
||||
|
||||
class DeprecatedTestWithNewTest(AntaTest):
|
||||
"""ANTA test that is deprecated with new test."""
|
||||
|
||||
name = "DeprecatedTestWithNewTest"
|
||||
description = "ANTA deprecated test with New Test"
|
||||
categories = []
|
||||
commands = []
|
||||
|
||||
@deprecated_test(new_tests=["NewTest"])
|
||||
@AntaTest.anta_test
|
||||
def test(self) -> None:
|
||||
self.result.is_success()
|
||||
|
||||
|
||||
ANTATEST_DATA: list[dict[str, Any]] = [
|
||||
{"name": "no input", "test": FakeTest, "inputs": None, "expected": {"__init__": {"result": "unset"}, "test": {"result": "success"}}},
|
||||
{
|
||||
"name": "extra input",
|
||||
"test": FakeTest,
|
||||
"inputs": {"string": "culpa! veniam quas quas veniam molestias, esse"},
|
||||
"expected": {"__init__": {"result": "error", "messages": ["Extra inputs are not permitted"]}, "test": {"result": "error"}},
|
||||
},
|
||||
{
|
||||
"name": "no input",
|
||||
"test": FakeTestWithInput,
|
||||
"inputs": None,
|
||||
"expected": {"__init__": {"result": "error", "messages": ["Field required"]}, "test": {"result": "error"}},
|
||||
},
|
||||
{
|
||||
"name": "wrong input type",
|
||||
"test": FakeTestWithInput,
|
||||
"inputs": {"string": 1},
|
||||
"expected": {"__init__": {"result": "error", "messages": ["Input should be a valid string"]}, "test": {"result": "error"}},
|
||||
},
|
||||
{
|
||||
"name": "good input",
|
||||
"test": FakeTestWithInput,
|
||||
"inputs": {"string": "culpa! veniam quas quas veniam molestias, esse"},
|
||||
"expected": {"__init__": {"result": "unset"}, "test": {"result": "success", "messages": ["culpa! veniam quas quas veniam molestias, esse"]}},
|
||||
},
|
||||
{
|
||||
"name": "good input",
|
||||
"test": FakeTestWithTemplate,
|
||||
"inputs": {"interface": "Ethernet1"},
|
||||
"expected": {"__init__": {"result": "unset"}, "test": {"result": "success", "messages": ["show interface Ethernet1"]}},
|
||||
},
|
||||
{
|
||||
"name": "wrong input type",
|
||||
"test": FakeTestWithTemplate,
|
||||
"inputs": {"interface": 1},
|
||||
"expected": {"__init__": {"result": "error", "messages": ["Input should be a valid string"]}, "test": {"result": "error"}},
|
||||
},
|
||||
{
|
||||
"name": "wrong render definition",
|
||||
"test": FakeTestWithTemplateNoRender,
|
||||
"inputs": {"interface": "Ethernet1"},
|
||||
"expected": {
|
||||
"__init__": {
|
||||
"result": "error",
|
||||
"messages": ["AntaTemplate are provided but render() method has not been implemented for tests.units.test_models.FakeTestWithTemplateNoRender"],
|
||||
},
|
||||
"test": {"result": "error"},
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "AntaTemplateRenderError",
|
||||
"test": FakeTestWithTemplateBadRender1,
|
||||
"inputs": {"interface": "Ethernet1"},
|
||||
"expected": {
|
||||
"__init__": {
|
||||
"result": "error",
|
||||
"messages": ["Cannot render template {template='show interface {interface}' version='latest' revision=None ofmt='json' use_cache=True}"],
|
||||
},
|
||||
"test": {"result": "error"},
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "Exception in render()",
|
||||
"test": FakeTestWithTemplateBadRender2,
|
||||
"inputs": {"interface": "Ethernet1"},
|
||||
"expected": {
|
||||
"__init__": {
|
||||
"result": "error",
|
||||
"messages": ["Exception in tests.units.test_models.FakeTestWithTemplateBadRender2.render(): Exception"],
|
||||
},
|
||||
"test": {"result": "error"},
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "unskip on platforms",
|
||||
"test": UnSkipOnPlatformTest,
|
||||
"inputs": None,
|
||||
"expected": {
|
||||
"__init__": {"result": "unset"},
|
||||
"test": {"result": "success"},
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "skip on platforms, unset",
|
||||
"test": SkipOnPlatformTest,
|
||||
"inputs": None,
|
||||
"expected": {
|
||||
"__init__": {"result": "unset"},
|
||||
"test": {"result": "skipped"},
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "skip on platforms, not unset",
|
||||
"test": SkipOnPlatformTestWithInput,
|
||||
"inputs": None,
|
||||
"expected": {"__init__": {"result": "error", "messages": ["Field required"]}, "test": {"result": "error"}},
|
||||
},
|
||||
{
|
||||
"name": "deprecate test without new test",
|
||||
"test": DeprecatedTestWithoutNewTest,
|
||||
"inputs": None,
|
||||
"expected": {
|
||||
"__init__": {"result": "unset"},
|
||||
"test": {"result": "success"},
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "deprecate test with new test",
|
||||
"test": DeprecatedTestWithNewTest,
|
||||
"inputs": None,
|
||||
"expected": {
|
||||
"__init__": {"result": "unset"},
|
||||
"test": {"result": "success"},
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "failed command",
|
||||
"test": FakeTestWithFailedCommand,
|
||||
"inputs": None,
|
||||
"expected": {"__init__": {"result": "unset"}, "test": {"result": "error", "messages": ["show version has failed: failed command"]}},
|
||||
},
|
||||
{
|
||||
"name": "unsupported command",
|
||||
"test": FakeTestWithUnsupportedCommand,
|
||||
"inputs": None,
|
||||
"expected": {
|
||||
"__init__": {"result": "unset"},
|
||||
"test": {"result": "skipped", "messages": ["Skipped because show hardware counter drop is not supported on pytest"]},
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
class Test_AntaTest:
|
||||
"""
|
||||
Test for anta.models.AntaTest
|
||||
"""
|
||||
|
||||
def test__init_subclass__name(self) -> None:
|
||||
"""Test __init_subclass__"""
|
||||
# Pylint detects all the classes in here as unused which is on purpose
|
||||
# pylint: disable=unused-variable
|
||||
with pytest.raises(NotImplementedError) as exec_info:
|
||||
|
||||
class WrongTestNoName(AntaTest):
|
||||
"""ANTA test that is missing a name"""
|
||||
|
||||
description = "ANTA test that is missing a name"
|
||||
categories = []
|
||||
commands = []
|
||||
|
||||
@AntaTest.anta_test
|
||||
def test(self) -> None:
|
||||
self.result.is_success()
|
||||
|
||||
assert exec_info.value.args[0] == "Class tests.units.test_models.WrongTestNoName is missing required class attribute name"
|
||||
|
||||
with pytest.raises(NotImplementedError) as exec_info:
|
||||
|
||||
class WrongTestNoDescription(AntaTest):
|
||||
"""ANTA test that is missing a description"""
|
||||
|
||||
name = "WrongTestNoDescription"
|
||||
categories = []
|
||||
commands = []
|
||||
|
||||
@AntaTest.anta_test
|
||||
def test(self) -> None:
|
||||
self.result.is_success()
|
||||
|
||||
assert exec_info.value.args[0] == "Class tests.units.test_models.WrongTestNoDescription is missing required class attribute description"
|
||||
|
||||
with pytest.raises(NotImplementedError) as exec_info:
|
||||
|
||||
class WrongTestNoCategories(AntaTest):
|
||||
"""ANTA test that is missing categories"""
|
||||
|
||||
name = "WrongTestNoCategories"
|
||||
description = "ANTA test that is missing categories"
|
||||
commands = []
|
||||
|
||||
@AntaTest.anta_test
|
||||
def test(self) -> None:
|
||||
self.result.is_success()
|
||||
|
||||
assert exec_info.value.args[0] == "Class tests.units.test_models.WrongTestNoCategories is missing required class attribute categories"
|
||||
|
||||
with pytest.raises(NotImplementedError) as exec_info:
|
||||
|
||||
class WrongTestNoCommands(AntaTest):
|
||||
"""ANTA test that is missing commands"""
|
||||
|
||||
name = "WrongTestNoCommands"
|
||||
description = "ANTA test that is missing commands"
|
||||
categories = []
|
||||
|
||||
@AntaTest.anta_test
|
||||
def test(self) -> None:
|
||||
self.result.is_success()
|
||||
|
||||
assert exec_info.value.args[0] == "Class tests.units.test_models.WrongTestNoCommands is missing required class attribute commands"
|
||||
|
||||
def _assert_test(self, test: AntaTest, expected: dict[str, Any]) -> None:
|
||||
assert test.result.result == expected["result"]
|
||||
if "messages" in expected:
|
||||
for result_msg, expected_msg in zip(test.result.messages, expected["messages"]): # NOTE: zip(strict=True) has been added in Python 3.10
|
||||
assert expected_msg in result_msg
|
||||
|
||||
@pytest.mark.parametrize("data", ANTATEST_DATA, ids=generate_test_ids(ANTATEST_DATA))
|
||||
def test__init__(self, device: AntaDevice, data: dict[str, Any]) -> None:
|
||||
"""Test the AntaTest constructor"""
|
||||
expected = data["expected"]["__init__"]
|
||||
test = data["test"](device, inputs=data["inputs"])
|
||||
self._assert_test(test, expected)
|
||||
|
||||
@pytest.mark.parametrize("data", ANTATEST_DATA, ids=generate_test_ids(ANTATEST_DATA))
|
||||
def test_test(self, device: AntaDevice, data: dict[str, Any]) -> None:
|
||||
"""Test the AntaTest.test method"""
|
||||
expected = data["expected"]["test"]
|
||||
test = data["test"](device, inputs=data["inputs"])
|
||||
asyncio.run(test.test())
|
||||
self._assert_test(test, expected)
|
||||
|
||||
|
||||
ANTATEST_BLACKLIST_DATA = ["reload", "reload --force", "write", "wr mem"]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("data", ANTATEST_BLACKLIST_DATA)
|
||||
def test_blacklist(device: AntaDevice, data: str) -> None:
|
||||
"""Test for blacklisting function."""
|
||||
|
||||
class FakeTestWithBlacklist(AntaTest):
|
||||
"""Fake Test for blacklist"""
|
||||
|
||||
name = "FakeTestWithBlacklist"
|
||||
description = "ANTA test that has blacklisted command"
|
||||
categories = []
|
||||
commands = [AntaCommand(command=data)]
|
||||
|
||||
@AntaTest.anta_test
|
||||
def test(self) -> None:
|
||||
self.result.is_success()
|
||||
|
||||
test_instance = FakeTestWithBlacklist(device)
|
||||
|
||||
# Run the test() method
|
||||
asyncio.run(test_instance.test())
|
||||
assert test_instance.result.result == "error"
|
82
tests/units/test_runner.py
Normal file
82
tests/units/test_runner.py
Normal file
|
@ -0,0 +1,82 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
test anta.runner.py
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pytest
|
||||
|
||||
from anta import logger
|
||||
from anta.catalog import AntaCatalog
|
||||
from anta.inventory import AntaInventory
|
||||
from anta.result_manager import ResultManager
|
||||
from anta.runner import main
|
||||
|
||||
from .test_models import FakeTest
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pytest import LogCaptureFixture
|
||||
|
||||
FAKE_CATALOG: AntaCatalog = AntaCatalog.from_list([(FakeTest, None)])
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_runner_empty_tests(caplog: LogCaptureFixture, test_inventory: AntaInventory) -> None:
|
||||
"""
|
||||
Test that when the list of tests is empty, a log is raised
|
||||
|
||||
caplog is the pytest fixture to capture logs
|
||||
test_inventory is a fixture that gives a default inventory for tests
|
||||
"""
|
||||
logger.setup_logging(logger.Log.INFO)
|
||||
caplog.set_level(logging.INFO)
|
||||
manager = ResultManager()
|
||||
await main(manager, test_inventory, AntaCatalog())
|
||||
|
||||
assert len(caplog.record_tuples) == 1
|
||||
assert "The list of tests is empty, exiting" in caplog.records[0].message
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_runner_empty_inventory(caplog: LogCaptureFixture) -> None:
|
||||
"""
|
||||
Test that when the Inventory is empty, a log is raised
|
||||
|
||||
caplog is the pytest fixture to capture logs
|
||||
"""
|
||||
logger.setup_logging(logger.Log.INFO)
|
||||
caplog.set_level(logging.INFO)
|
||||
manager = ResultManager()
|
||||
inventory = AntaInventory()
|
||||
await main(manager, inventory, FAKE_CATALOG)
|
||||
assert len(caplog.record_tuples) == 1
|
||||
assert "The inventory is empty, exiting" in caplog.records[0].message
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_runner_no_selected_device(caplog: LogCaptureFixture, test_inventory: AntaInventory) -> None:
|
||||
"""
|
||||
Test that when the list of established device
|
||||
|
||||
caplog is the pytest fixture to capture logs
|
||||
test_inventory is a fixture that gives a default inventory for tests
|
||||
"""
|
||||
logger.setup_logging(logger.Log.INFO)
|
||||
caplog.set_level(logging.INFO)
|
||||
manager = ResultManager()
|
||||
await main(manager, test_inventory, FAKE_CATALOG)
|
||||
|
||||
assert "No device in the established state 'True' was found. There is no device to run tests against, exiting" in [record.message for record in caplog.records]
|
||||
|
||||
# Reset logs and run with tags
|
||||
caplog.clear()
|
||||
await main(manager, test_inventory, FAKE_CATALOG, tags=["toto"])
|
||||
|
||||
assert "No device in the established state 'True' matching the tags ['toto'] was found. There is no device to run tests against, exiting" in [
|
||||
record.message for record in caplog.records
|
||||
]
|
3
tests/units/tools/__init__.py
Normal file
3
tests/units/tools/__init__.py
Normal file
|
@ -0,0 +1,3 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
149
tests/units/tools/test_get_dict_superset.py
Normal file
149
tests/units/tools/test_get_dict_superset.py
Normal file
|
@ -0,0 +1,149 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
|
||||
"""Tests for `anta.tools.get_dict_superset`."""
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import nullcontext as does_not_raise
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
|
||||
from anta.tools.get_dict_superset import get_dict_superset
|
||||
|
||||
# pylint: disable=duplicate-code
|
||||
DUMMY_DATA = [
|
||||
("id", 0),
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Alice",
|
||||
"age": 30,
|
||||
"email": "alice@example.com",
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"name": "Bob",
|
||||
"age": 35,
|
||||
"email": "bob@example.com",
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"name": "Charlie",
|
||||
"age": 40,
|
||||
"email": "charlie@example.com",
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"list_of_dicts, input_dict, default, required, var_name, custom_error_msg, expected_result, expected_raise",
|
||||
[
|
||||
pytest.param([], {"id": 1, "name": "Alice"}, None, False, None, None, None, does_not_raise(), id="empty list"),
|
||||
pytest.param(
|
||||
[],
|
||||
{"id": 1, "name": "Alice"},
|
||||
None,
|
||||
True,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
pytest.raises(ValueError, match="not found in the provided list."),
|
||||
id="empty list and required",
|
||||
),
|
||||
pytest.param(DUMMY_DATA, {"id": 10, "name": "Jack"}, None, False, None, None, None, does_not_raise(), id="missing item"),
|
||||
pytest.param(DUMMY_DATA, {"id": 1, "name": "Alice"}, None, False, None, None, DUMMY_DATA[1], does_not_raise(), id="found item"),
|
||||
pytest.param(DUMMY_DATA, {"id": 10, "name": "Jack"}, "default_value", False, None, None, "default_value", does_not_raise(), id="default value"),
|
||||
pytest.param(
|
||||
DUMMY_DATA, {"id": 10, "name": "Jack"}, None, True, None, None, None, pytest.raises(ValueError, match="not found in the provided list."), id="required"
|
||||
),
|
||||
pytest.param(
|
||||
DUMMY_DATA,
|
||||
{"id": 10, "name": "Jack"},
|
||||
None,
|
||||
True,
|
||||
"custom_var_name",
|
||||
None,
|
||||
None,
|
||||
pytest.raises(ValueError, match="custom_var_name not found in the provided list."),
|
||||
id="custom var_name",
|
||||
),
|
||||
pytest.param(
|
||||
DUMMY_DATA, {"id": 1, "name": "Alice"}, None, True, "custom_var_name", "Custom error message", DUMMY_DATA[1], does_not_raise(), id="custom error message"
|
||||
),
|
||||
pytest.param(
|
||||
DUMMY_DATA,
|
||||
{"id": 10, "name": "Jack"},
|
||||
None,
|
||||
True,
|
||||
"custom_var_name",
|
||||
"Custom error message",
|
||||
None,
|
||||
pytest.raises(ValueError, match="Custom error message"),
|
||||
id="custom error message and required",
|
||||
),
|
||||
pytest.param(DUMMY_DATA, {"id": 1, "name": "Jack"}, None, False, None, None, None, does_not_raise(), id="id ok but name not ok"),
|
||||
pytest.param(
|
||||
"not a list",
|
||||
{"id": 1, "name": "Alice"},
|
||||
None,
|
||||
True,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
pytest.raises(ValueError, match="not found in the provided list."),
|
||||
id="non-list input for list_of_dicts",
|
||||
),
|
||||
pytest.param(
|
||||
DUMMY_DATA, "not a dict", None, True, None, None, None, pytest.raises(ValueError, match="not found in the provided list."), id="non-dictionary input"
|
||||
),
|
||||
pytest.param(DUMMY_DATA, {}, None, False, None, None, None, does_not_raise(), id="empty dictionary input"),
|
||||
pytest.param(
|
||||
DUMMY_DATA,
|
||||
{"id": 1, "name": "Alice", "extra_key": "extra_value"},
|
||||
None,
|
||||
True,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
pytest.raises(ValueError, match="not found in the provided list."),
|
||||
id="input dictionary with extra keys",
|
||||
),
|
||||
pytest.param(
|
||||
DUMMY_DATA,
|
||||
{"id": 1},
|
||||
None,
|
||||
False,
|
||||
None,
|
||||
None,
|
||||
DUMMY_DATA[1],
|
||||
does_not_raise(),
|
||||
id="input dictionary is a subset of more than one dictionary in list_of_dicts",
|
||||
),
|
||||
pytest.param(
|
||||
DUMMY_DATA,
|
||||
{"id": 1, "name": "Alice", "age": 30, "email": "alice@example.com", "extra_key": "extra_value"},
|
||||
None,
|
||||
True,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
pytest.raises(ValueError, match="not found in the provided list."),
|
||||
id="input dictionary is a superset of a dictionary in list_of_dicts",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_get_dict_superset(
|
||||
list_of_dicts: list[dict[Any, Any]],
|
||||
input_dict: Any,
|
||||
default: Any | None,
|
||||
required: bool,
|
||||
var_name: str | None,
|
||||
custom_error_msg: str | None,
|
||||
expected_result: str,
|
||||
expected_raise: Any,
|
||||
) -> None:
|
||||
"""Test get_dict_superset."""
|
||||
# pylint: disable=too-many-arguments
|
||||
with expected_raise:
|
||||
assert get_dict_superset(list_of_dicts, input_dict, default, required, var_name, custom_error_msg) == expected_result
|
72
tests/units/tools/test_get_item.py
Normal file
72
tests/units/tools/test_get_item.py
Normal file
|
@ -0,0 +1,72 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
|
||||
"""Tests for `anta.tools.get_item`."""
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import nullcontext as does_not_raise
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
|
||||
from anta.tools.get_item import get_item
|
||||
|
||||
DUMMY_DATA = [
|
||||
("id", 0),
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Alice",
|
||||
"age": 30,
|
||||
"email": "alice@example.com",
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"name": "Bob",
|
||||
"age": 35,
|
||||
"email": "bob@example.com",
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"name": "Charlie",
|
||||
"age": 40,
|
||||
"email": "charlie@example.com",
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"list_of_dicts, key, value, default, required, case_sensitive, var_name, custom_error_msg, expected_result, expected_raise",
|
||||
[
|
||||
pytest.param([], "name", "Bob", None, False, False, None, None, None, does_not_raise(), id="empty list"),
|
||||
pytest.param([], "name", "Bob", None, True, False, None, None, None, pytest.raises(ValueError, match="name"), id="empty list and required"),
|
||||
pytest.param(DUMMY_DATA, "name", "Jack", None, False, False, None, None, None, does_not_raise(), id="missing item"),
|
||||
pytest.param(DUMMY_DATA, "name", "Alice", None, False, False, None, None, DUMMY_DATA[1], does_not_raise(), id="found item"),
|
||||
pytest.param(DUMMY_DATA, "name", "Jack", "default_value", False, False, None, None, "default_value", does_not_raise(), id="default value"),
|
||||
pytest.param(DUMMY_DATA, "name", "Jack", None, True, False, None, None, None, pytest.raises(ValueError, match="name"), id="required"),
|
||||
pytest.param(DUMMY_DATA, "name", "Bob", None, False, True, None, None, DUMMY_DATA[2], does_not_raise(), id="case sensitive"),
|
||||
pytest.param(DUMMY_DATA, "name", "charlie", None, False, False, None, None, DUMMY_DATA[3], does_not_raise(), id="case insensitive"),
|
||||
pytest.param(
|
||||
DUMMY_DATA, "name", "Jack", None, True, False, "custom_var_name", None, None, pytest.raises(ValueError, match="custom_var_name"), id="custom var_name"
|
||||
),
|
||||
pytest.param(
|
||||
DUMMY_DATA, "name", "Jack", None, True, False, None, "custom_error_msg", None, pytest.raises(ValueError, match="custom_error_msg"), id="custom error msg"
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_get_item(
|
||||
list_of_dicts: list[dict[Any, Any]],
|
||||
key: Any,
|
||||
value: Any,
|
||||
default: Any | None,
|
||||
required: bool,
|
||||
case_sensitive: bool,
|
||||
var_name: str | None,
|
||||
custom_error_msg: str | None,
|
||||
expected_result: str,
|
||||
expected_raise: Any,
|
||||
) -> None:
|
||||
"""Test get_item."""
|
||||
# pylint: disable=too-many-arguments
|
||||
with expected_raise:
|
||||
assert get_item(list_of_dicts, key, value, default, required, case_sensitive, var_name, custom_error_msg) == expected_result
|
50
tests/units/tools/test_get_value.py
Normal file
50
tests/units/tools/test_get_value.py
Normal file
|
@ -0,0 +1,50 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Tests for anta.tools.get_value
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import nullcontext as does_not_raise
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
|
||||
from anta.tools.get_value import get_value
|
||||
|
||||
INPUT_DICT = {"test_value": 42, "nested_test": {"nested_value": 43}}
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"input_dict, key, default, required, org_key, separator, expected_result, expected_raise",
|
||||
[
|
||||
pytest.param({}, "test", None, False, None, None, None, does_not_raise(), id="empty dict"),
|
||||
pytest.param(INPUT_DICT, "test_value", None, False, None, None, 42, does_not_raise(), id="simple key"),
|
||||
pytest.param(INPUT_DICT, "nested_test.nested_value", None, False, None, None, 43, does_not_raise(), id="nested_key"),
|
||||
pytest.param(INPUT_DICT, "missing_value", None, False, None, None, None, does_not_raise(), id="missing_value"),
|
||||
pytest.param(INPUT_DICT, "missing_value_with_default", "default_value", False, None, None, "default_value", does_not_raise(), id="default"),
|
||||
pytest.param(INPUT_DICT, "missing_required", None, True, None, None, None, pytest.raises(ValueError), id="required"),
|
||||
pytest.param(INPUT_DICT, "missing_required", None, True, "custom_org_key", None, None, pytest.raises(ValueError), id="custom org_key"),
|
||||
pytest.param(INPUT_DICT, "nested_test||nested_value", None, None, None, "||", 43, does_not_raise(), id="custom separator"),
|
||||
],
|
||||
)
|
||||
def test_get_value(
|
||||
input_dict: dict[Any, Any],
|
||||
key: str,
|
||||
default: str | None,
|
||||
required: bool,
|
||||
org_key: str | None,
|
||||
separator: str | None,
|
||||
expected_result: str,
|
||||
expected_raise: Any,
|
||||
) -> None:
|
||||
"""
|
||||
Test get_value
|
||||
"""
|
||||
# pylint: disable=too-many-arguments
|
||||
kwargs = {"default": default, "required": required, "org_key": org_key, "separator": separator}
|
||||
kwargs = {k: v for k, v in kwargs.items() if v is not None}
|
||||
with expected_raise:
|
||||
assert get_value(input_dict, key, **kwargs) == expected_result # type: ignore
|
38
tests/units/tools/test_misc.py
Normal file
38
tests/units/tools/test_misc.py
Normal file
|
@ -0,0 +1,38 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
"""
|
||||
Tests for anta.tools.misc
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
|
||||
from anta.tools.misc import exc_to_str, tb_to_str
|
||||
|
||||
|
||||
def my_raising_function(exception: Exception) -> None:
|
||||
"""
|
||||
dummy function to raise Exception
|
||||
"""
|
||||
raise exception
|
||||
|
||||
|
||||
@pytest.mark.parametrize("exception, expected_output", [(ValueError("test"), "ValueError (test)"), (ValueError(), "ValueError")])
|
||||
def test_exc_to_str(exception: Exception, expected_output: str) -> None:
|
||||
"""
|
||||
Test exc_to_str
|
||||
"""
|
||||
assert exc_to_str(exception) == expected_output
|
||||
|
||||
|
||||
def test_tb_to_str() -> None:
|
||||
"""
|
||||
Test tb_to_str
|
||||
"""
|
||||
try:
|
||||
my_raising_function(ValueError("test"))
|
||||
except ValueError as e:
|
||||
output = tb_to_str(e)
|
||||
assert "Traceback" in output
|
||||
assert 'my_raising_function(ValueError("test"))' in output
|
57
tests/units/tools/test_utils.py
Normal file
57
tests/units/tools/test_utils.py
Normal file
|
@ -0,0 +1,57 @@
|
|||
# Copyright (c) 2023-2024 Arista Networks, Inc.
|
||||
# Use of this source code is governed by the Apache License 2.0
|
||||
# that can be found in the LICENSE file.
|
||||
|
||||
"""Tests for `anta.tools.utils`."""
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import nullcontext as does_not_raise
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
|
||||
from anta.tools.utils import get_failed_logs
|
||||
|
||||
EXPECTED_OUTPUTS = [
|
||||
{"id": 1, "name": "Alice", "age": 30, "email": "alice@example.com"},
|
||||
{"id": 2, "name": "Bob", "age": 35, "email": "bob@example.com"},
|
||||
{"id": 3, "name": "Charlie", "age": 40, "email": "charlie@example.com"},
|
||||
{"id": 4, "name": "Jon", "age": 25, "email": "Jon@example.com"},
|
||||
]
|
||||
|
||||
ACTUAL_OUTPUTS = [
|
||||
{"id": 1, "name": "Alice", "age": 30, "email": "alice@example.com"},
|
||||
{"id": 2, "name": "Bob", "age": 35, "email": "bob@example.com"},
|
||||
{"id": 3, "name": "Charlie", "age": 40, "email": "charlie@example.com"},
|
||||
{"id": 4, "name": "Rob", "age": 25, "email": "Jon@example.com"},
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"expected_output, actual_output, expected_result, expected_raise",
|
||||
[
|
||||
pytest.param(EXPECTED_OUTPUTS[0], ACTUAL_OUTPUTS[0], "", does_not_raise(), id="no difference"),
|
||||
pytest.param(
|
||||
EXPECTED_OUTPUTS[0],
|
||||
ACTUAL_OUTPUTS[1],
|
||||
"\nExpected `1` as the id, but found `2` instead.\nExpected `Alice` as the name, but found `Bob` instead.\n"
|
||||
"Expected `30` as the age, but found `35` instead.\nExpected `alice@example.com` as the email, but found `bob@example.com` instead.",
|
||||
does_not_raise(),
|
||||
id="different data",
|
||||
),
|
||||
pytest.param(
|
||||
EXPECTED_OUTPUTS[0],
|
||||
{},
|
||||
"\nExpected `1` as the id, but it was not found in the actual output.\nExpected `Alice` as the name, but it was not found in the actual output.\n"
|
||||
"Expected `30` as the age, but it was not found in the actual output.\nExpected `alice@example.com` as the email, but it was not found in "
|
||||
"the actual output.",
|
||||
does_not_raise(),
|
||||
id="empty actual output",
|
||||
),
|
||||
pytest.param(EXPECTED_OUTPUTS[3], ACTUAL_OUTPUTS[3], "\nExpected `Jon` as the name, but found `Rob` instead.", does_not_raise(), id="different name"),
|
||||
],
|
||||
)
|
||||
def test_get_failed_logs(expected_output: dict[Any, Any], actual_output: dict[Any, Any], expected_result: str, expected_raise: Any) -> None:
|
||||
"""Test get_failed_logs."""
|
||||
with expected_raise:
|
||||
assert get_failed_logs(expected_output, actual_output) == expected_result
|
Loading…
Add table
Add a link
Reference in a new issue