diff options
Diffstat (limited to 'tests/units')
29 files changed, 2688 insertions, 112 deletions
diff --git a/tests/units/anta_tests/routing/test_bgp.py b/tests/units/anta_tests/routing/test_bgp.py index 31006c5..e712e12 100644 --- a/tests/units/anta_tests/routing/test_bgp.py +++ b/tests/units/anta_tests/routing/test_bgp.py @@ -30,6 +30,7 @@ DATA: list[dict[str, Any]] = [ "name": "success", "test": VerifyBGPPeerCount, "eos_data": [ + # Need to order the output as the commands would be sorted after template rendering. { "vrfs": { "default": { @@ -120,9 +121,10 @@ DATA: list[dict[str, Any]] = [ ], "inputs": { "address_families": [ + # evpn first to make sure that the correct mapping output to input is kept. + {"afi": "evpn", "num_peers": 2}, {"afi": "ipv4", "safi": "unicast", "vrf": "default", "num_peers": 2}, {"afi": "ipv4", "safi": "sr-te", "vrf": "MGMT", "num_peers": 1}, - {"afi": "evpn", "num_peers": 2}, {"afi": "link-state", "num_peers": 2}, {"afi": "path-selection", "num_peers": 2}, ] @@ -652,9 +654,10 @@ DATA: list[dict[str, Any]] = [ ], "inputs": { "address_families": [ + # Path selection first to make sure input to output mapping is correct. + {"afi": "path-selection"}, {"afi": "ipv4", "safi": "unicast", "vrf": "default"}, {"afi": "ipv4", "safi": "sr-te", "vrf": "MGMT"}, - {"afi": "path-selection"}, {"afi": "link-state"}, ] }, @@ -1081,6 +1084,8 @@ DATA: list[dict[str, Any]] = [ ], "inputs": { "address_families": [ + # Path selection first to make sure input to output mapping is correct. + {"afi": "path-selection", "peers": ["10.1.255.20", "10.1.255.22"]}, { "afi": "ipv4", "safi": "unicast", @@ -1093,7 +1098,6 @@ DATA: list[dict[str, Any]] = [ "vrf": "MGMT", "peers": ["10.1.255.10", "10.1.255.12"], }, - {"afi": "path-selection", "peers": ["10.1.255.20", "10.1.255.22"]}, {"afi": "link-state", "peers": ["10.1.255.30", "10.1.255.32"]}, ] }, diff --git a/tests/units/anta_tests/routing/test_isis.py b/tests/units/anta_tests/routing/test_isis.py new file mode 100644 index 0000000..ec41105 --- /dev/null +++ b/tests/units/anta_tests/routing/test_isis.py @@ -0,0 +1,570 @@ +# Copyright (c) 2023-2024 Arista Networks, Inc. +# Use of this source code is governed by the Apache License 2.0 +# that can be found in the LICENSE file. +"""Tests for anta.tests.routing.ospf.py.""" + +from __future__ import annotations + +from typing import Any + +from anta.tests.routing.isis import VerifyISISInterfaceMode, VerifyISISNeighborCount, VerifyISISNeighborState +from tests.lib.anta import test # noqa: F401; pylint: disable=W0611 + +DATA: list[dict[str, Any]] = [ + { + "name": "success only default vrf", + "test": VerifyISISNeighborState, + "eos_data": [ + { + "vrfs": { + "default": { + "isisInstances": { + "CORE-ISIS": { + "neighbors": { + "0168.0000.0111": { + "adjacencies": [ + { + "hostname": "s1-p01", + "circuitId": "83", + "interfaceName": "Ethernet1", + "state": "up", + "lastHelloTime": 1713688408, + "routerIdV4": "1.0.0.111", + } + ] + }, + "0168.0000.0112": { + "adjacencies": [ + { + "hostname": "s1-p02", + "circuitId": "87", + "interfaceName": "Ethernet2", + "state": "up", + "lastHelloTime": 1713688405, + "routerIdV4": "1.0.0.112", + } + ] + }, + } + } + } + } + } + }, + ], + "inputs": None, + "expected": {"result": "success"}, + }, + { + "name": "success different vrfs", + "test": VerifyISISNeighborState, + "eos_data": [ + { + "vrfs": { + "default": { + "isisInstances": { + "CORE-ISIS": { + "neighbors": { + "0168.0000.0111": { + "adjacencies": [ + { + "hostname": "s1-p01", + "circuitId": "83", + "interfaceName": "Ethernet1", + "state": "up", + "lastHelloTime": 1713688408, + "routerIdV4": "1.0.0.111", + } + ] + }, + }, + }, + }, + "customer": { + "isisInstances": { + "CORE-ISIS": { + "neighbors": { + "0168.0000.0112": { + "adjacencies": [ + { + "hostname": "s1-p02", + "circuitId": "87", + "interfaceName": "Ethernet2", + "state": "up", + "lastHelloTime": 1713688405, + "routerIdV4": "1.0.0.112", + } + ] + } + } + } + } + }, + } + } + }, + ], + "inputs": None, + "expected": {"result": "success"}, + }, + { + "name": "failure", + "test": VerifyISISNeighborState, + "eos_data": [ + { + "vrfs": { + "default": { + "isisInstances": { + "CORE-ISIS": { + "neighbors": { + "0168.0000.0111": { + "adjacencies": [ + { + "hostname": "s1-p01", + "circuitId": "83", + "interfaceName": "Ethernet1", + "state": "down", + "lastHelloTime": 1713688408, + "routerIdV4": "1.0.0.111", + } + ] + }, + "0168.0000.0112": { + "adjacencies": [ + { + "hostname": "s1-p02", + "circuitId": "87", + "interfaceName": "Ethernet2", + "state": "up", + "lastHelloTime": 1713688405, + "routerIdV4": "1.0.0.112", + } + ] + }, + } + } + } + } + } + }, + ], + "inputs": None, + "expected": { + "result": "failure", + "messages": ["Some neighbors are not in the correct state (UP): [{'vrf': 'default', 'instance': 'CORE-ISIS', 'neighbor': 's1-p01', 'state': 'down'}]."], + }, + }, + { + "name": "success only default vrf", + "test": VerifyISISNeighborCount, + "eos_data": [ + { + "vrfs": { + "default": { + "isisInstances": { + "CORE-ISIS": { + "interfaces": { + "Loopback0": { + "enabled": True, + "intfLevels": { + "2": { + "ipv4Metric": 10, + "sharedSecretProfile": "", + "isisAdjacencies": [], + "passive": True, + "v4Protection": "disabled", + "v6Protection": "disabled", + } + }, + "areaProxyBoundary": False, + }, + "Ethernet1": { + "intfLevels": { + "2": { + "ipv4Metric": 10, + "numAdjacencies": 1, + "linkId": "84", + "sharedSecretProfile": "", + "isisAdjacencies": [], + "passive": False, + "v4Protection": "link", + "v6Protection": "disabled", + } + }, + "interfaceSpeed": 1000, + "areaProxyBoundary": False, + }, + "Ethernet2": { + "enabled": True, + "intfLevels": { + "2": { + "ipv4Metric": 10, + "numAdjacencies": 1, + "linkId": "88", + "sharedSecretProfile": "", + "isisAdjacencies": [], + "passive": False, + "v4Protection": "link", + "v6Protection": "disabled", + } + }, + "interfaceSpeed": 1000, + "areaProxyBoundary": False, + }, + } + } + } + } + } + }, + ], + "inputs": { + "interfaces": [ + {"name": "Ethernet1", "level": 2, "count": 1}, + {"name": "Ethernet2", "level": 2, "count": 1}, + ] + }, + "expected": {"result": "success"}, + }, + { + "name": "success VerifyISISInterfaceMode only default vrf", + "test": VerifyISISInterfaceMode, + "eos_data": [ + { + "vrfs": { + "default": { + "isisInstances": { + "CORE-ISIS": { + "interfaces": { + "Loopback0": { + "enabled": True, + "index": 2, + "snpa": "0:0:0:0:0:0", + "mtu": 65532, + "interfaceAddressFamily": "ipv4", + "interfaceType": "loopback", + "intfLevels": { + "2": { + "ipv4Metric": 10, + "sharedSecretProfile": "", + "isisAdjacencies": [], + "passive": True, + "v4Protection": "disabled", + "v6Protection": "disabled", + } + }, + "areaProxyBoundary": False, + }, + "Ethernet1": { + "enabled": True, + "index": 132, + "snpa": "P2P", + "interfaceType": "point-to-point", + "intfLevels": { + "2": { + "ipv4Metric": 10, + "numAdjacencies": 1, + "linkId": "84", + "sharedSecretProfile": "", + "isisAdjacencies": [], + "passive": False, + "v4Protection": "link", + "v6Protection": "disabled", + } + }, + "interfaceSpeed": 1000, + "areaProxyBoundary": False, + }, + "Ethernet2": { + "enabled": True, + "interfaceType": "broadcast", + "intfLevels": { + "2": { + "ipv4Metric": 10, + "numAdjacencies": 0, + "sharedSecretProfile": "", + "isisAdjacencies": [], + "passive": True, + "v4Protection": "disabled", + "v6Protection": "disabled", + } + }, + "interfaceSpeed": 1000, + "areaProxyBoundary": False, + }, + } + } + } + } + } + } + ], + "inputs": { + "interfaces": [ + {"name": "Loopback0", "mode": "passive"}, + {"name": "Ethernet2", "mode": "passive"}, + {"name": "Ethernet1", "mode": "point-to-point", "vrf": "default"}, + ] + }, + "expected": {"result": "success"}, + }, + { + "name": "failure VerifyISISInterfaceMode default vrf with interface not running passive mode", + "test": VerifyISISInterfaceMode, + "eos_data": [ + { + "vrfs": { + "default": { + "isisInstances": { + "CORE-ISIS": { + "interfaces": { + "Loopback0": { + "enabled": True, + "index": 2, + "snpa": "0:0:0:0:0:0", + "mtu": 65532, + "interfaceAddressFamily": "ipv4", + "interfaceType": "loopback", + "intfLevels": { + "2": { + "ipv4Metric": 10, + "sharedSecretProfile": "", + "isisAdjacencies": [], + "passive": True, + "v4Protection": "disabled", + "v6Protection": "disabled", + } + }, + "areaProxyBoundary": False, + }, + "Ethernet1": { + "enabled": True, + "index": 132, + "snpa": "P2P", + "interfaceType": "point-to-point", + "intfLevels": { + "2": { + "ipv4Metric": 10, + "numAdjacencies": 1, + "linkId": "84", + "sharedSecretProfile": "", + "isisAdjacencies": [], + "passive": False, + "v4Protection": "link", + "v6Protection": "disabled", + } + }, + "interfaceSpeed": 1000, + "areaProxyBoundary": False, + }, + "Ethernet2": { + "enabled": True, + "interfaceType": "point-to-point", + "intfLevels": { + "2": { + "ipv4Metric": 10, + "numAdjacencies": 0, + "sharedSecretProfile": "", + "isisAdjacencies": [], + "passive": False, + "v4Protection": "disabled", + "v6Protection": "disabled", + } + }, + "interfaceSpeed": 1000, + "areaProxyBoundary": False, + }, + } + } + } + } + } + } + ], + "inputs": { + "interfaces": [ + {"name": "Loopback0", "mode": "passive"}, + {"name": "Ethernet2", "mode": "passive"}, + {"name": "Ethernet1", "mode": "point-to-point", "vrf": "default"}, + ] + }, + "expected": { + "result": "failure", + "messages": ["Interface Ethernet2 in VRF default is not running in passive mode"], + }, + }, + { + "name": "failure VerifyISISInterfaceMode default vrf with interface not running point-point mode", + "test": VerifyISISInterfaceMode, + "eos_data": [ + { + "vrfs": { + "default": { + "isisInstances": { + "CORE-ISIS": { + "interfaces": { + "Loopback0": { + "enabled": True, + "index": 2, + "snpa": "0:0:0:0:0:0", + "mtu": 65532, + "interfaceAddressFamily": "ipv4", + "interfaceType": "loopback", + "intfLevels": { + "2": { + "ipv4Metric": 10, + "sharedSecretProfile": "", + "isisAdjacencies": [], + "passive": True, + "v4Protection": "disabled", + "v6Protection": "disabled", + } + }, + "areaProxyBoundary": False, + }, + "Ethernet1": { + "enabled": True, + "index": 132, + "snpa": "P2P", + "interfaceType": "broadcast", + "intfLevels": { + "2": { + "ipv4Metric": 10, + "numAdjacencies": 1, + "linkId": "84", + "sharedSecretProfile": "", + "isisAdjacencies": [], + "passive": False, + "v4Protection": "link", + "v6Protection": "disabled", + } + }, + "interfaceSpeed": 1000, + "areaProxyBoundary": False, + }, + "Ethernet2": { + "enabled": True, + "interfaceType": "broadcast", + "intfLevels": { + "2": { + "ipv4Metric": 10, + "numAdjacencies": 0, + "sharedSecretProfile": "", + "isisAdjacencies": [], + "passive": True, + "v4Protection": "disabled", + "v6Protection": "disabled", + } + }, + "interfaceSpeed": 1000, + "areaProxyBoundary": False, + }, + } + } + } + } + } + } + ], + "inputs": { + "interfaces": [ + {"name": "Loopback0", "mode": "passive"}, + {"name": "Ethernet2", "mode": "passive"}, + {"name": "Ethernet1", "mode": "point-to-point", "vrf": "default"}, + ] + }, + "expected": { + "result": "failure", + "messages": ["Interface Ethernet1 in VRF default is not running in point-to-point reporting broadcast"], + }, + }, + { + "name": "failure VerifyISISInterfaceMode default vrf with interface not running correct VRF mode", + "test": VerifyISISInterfaceMode, + "eos_data": [ + { + "vrfs": { + "fake_vrf": { + "isisInstances": { + "CORE-ISIS": { + "interfaces": { + "Loopback0": { + "enabled": True, + "index": 2, + "snpa": "0:0:0:0:0:0", + "mtu": 65532, + "interfaceAddressFamily": "ipv4", + "interfaceType": "loopback", + "intfLevels": { + "2": { + "ipv4Metric": 10, + "sharedSecretProfile": "", + "isisAdjacencies": [], + "passive": True, + "v4Protection": "disabled", + "v6Protection": "disabled", + } + }, + "areaProxyBoundary": False, + }, + "Ethernet1": { + "enabled": True, + "index": 132, + "snpa": "P2P", + "interfaceType": "point-to-point", + "intfLevels": { + "2": { + "ipv4Metric": 10, + "numAdjacencies": 1, + "linkId": "84", + "sharedSecretProfile": "", + "isisAdjacencies": [], + "passive": False, + "v4Protection": "link", + "v6Protection": "disabled", + } + }, + "interfaceSpeed": 1000, + "areaProxyBoundary": False, + }, + "Ethernet2": { + "enabled": True, + "interfaceType": "broadcast", + "intfLevels": { + "2": { + "ipv4Metric": 10, + "numAdjacencies": 0, + "sharedSecretProfile": "", + "isisAdjacencies": [], + "passive": True, + "v4Protection": "disabled", + "v6Protection": "disabled", + } + }, + "interfaceSpeed": 1000, + "areaProxyBoundary": False, + }, + } + } + } + } + } + } + ], + "inputs": { + "interfaces": [ + {"name": "Loopback0", "mode": "passive"}, + {"name": "Ethernet2", "mode": "passive"}, + {"name": "Ethernet1", "mode": "point-to-point", "vrf": "default"}, + ] + }, + "expected": { + "result": "failure", + "messages": [ + "Interface Loopback0 not found in VRF default", + "Interface Ethernet2 not found in VRF default", + "Interface Ethernet1 not found in VRF default", + ], + }, + }, +] diff --git a/tests/units/anta_tests/test_aaa.py b/tests/units/anta_tests/test_aaa.py index f0324c5..40bf82e 100644 --- a/tests/units/anta_tests/test_aaa.py +++ b/tests/units/anta_tests/test_aaa.py @@ -272,6 +272,18 @@ DATA: list[dict[str, Any]] = [ "expected": {"result": "success"}, }, { + "name": "success-skipping-exec", + "test": VerifyAuthzMethods, + "eos_data": [ + { + "commandsAuthzMethods": {"privilege0-15": {"methods": ["group tacacs+", "local"]}}, + "execAuthzMethods": {"exec": {"methods": ["group tacacs+", "local"]}}, + }, + ], + "inputs": {"methods": ["tacacs+", "local"], "types": ["commands"]}, + "expected": {"result": "success"}, + }, + { "name": "failure-commands", "test": VerifyAuthzMethods, "eos_data": [ diff --git a/tests/units/anta_tests/test_avt.py b/tests/units/anta_tests/test_avt.py new file mode 100644 index 0000000..7ef6be3 --- /dev/null +++ b/tests/units/anta_tests/test_avt.py @@ -0,0 +1,581 @@ +# Copyright (c) 2023-2024 Arista Networks, Inc. +# Use of this source code is governed by the Apache License 2.0 +# that can be found in the LICENSE file. +"""Tests for anta.tests.avt.py.""" + +from __future__ import annotations + +from typing import Any + +from anta.tests.avt import VerifyAVTPathHealth, VerifyAVTRole, VerifyAVTSpecificPath +from tests.lib.anta import test # noqa: F401; pylint: disable=unused-import + +DATA: list[dict[str, Any]] = [ + { + "name": "success", + "test": VerifyAVTPathHealth, + "eos_data": [ + { + "vrfs": { + "data": { + "avts": { + "DATA-AVT-POLICY-DEFAULT": { + "avtPaths": { + "direct:9": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + "direct:10": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + "direct:1": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + "direct:8": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + } + } + } + }, + "guest": { + "avts": { + "GUEST-AVT-POLICY-DEFAULT": { + "avtPaths": { + "direct:10": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + "direct:8": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + } + } + } + }, + "default": { + "avts": { + "CONTROL-PLANE-PROFILE": { + "avtPaths": { + "direct:9": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + "direct:10": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + "direct:1": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + "direct:8": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + } + }, + "DEFAULT-AVT-POLICY-DEFAULT": { + "avtPaths": { + "direct:10": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + "direct:8": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + } + }, + } + }, + } + } + ], + "inputs": {}, + "expected": {"result": "success"}, + }, + { + "name": "failure-avt-not-configured", + "test": VerifyAVTPathHealth, + "eos_data": [{"vrfs": {}}], + "inputs": {}, + "expected": { + "result": "failure", + "messages": ["Adaptive virtual topology paths are not configured."], + }, + }, + { + "name": "failure-not-active-path", + "test": VerifyAVTPathHealth, + "eos_data": [ + { + "vrfs": { + "data": { + "avts": { + "DATA-AVT-POLICY-DEFAULT": { + "avtPaths": { + "direct:9": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + "direct:10": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + "direct:1": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + "direct:8": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + } + } + } + }, + "guest": { + "avts": { + "GUEST-AVT-POLICY-DEFAULT": { + "avtPaths": { + "direct:10": { + "flags": {"directPath": True, "valid": True, "active": False}, + }, + "direct:8": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + } + } + } + }, + "default": { + "avts": { + "CONTROL-PLANE-PROFILE": { + "avtPaths": { + "direct:9": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + "direct:10": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + "direct:1": { + "flags": {"directPath": True, "valid": True, "active": False}, + }, + "direct:8": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + } + }, + "DEFAULT-AVT-POLICY-DEFAULT": { + "avtPaths": { + "direct:10": { + "flags": {"directPath": True, "valid": True, "active": False}, + }, + "direct:8": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + } + }, + } + }, + } + } + ], + "inputs": {}, + "expected": { + "result": "failure", + "messages": [ + "AVT path direct:10 for profile GUEST-AVT-POLICY-DEFAULT in VRF guest is not active.", + "AVT path direct:1 for profile CONTROL-PLANE-PROFILE in VRF default is not active.", + "AVT path direct:10 for profile DEFAULT-AVT-POLICY-DEFAULT in VRF default is not active.", + ], + }, + }, + { + "name": "failure-invalid-path", + "test": VerifyAVTPathHealth, + "eos_data": [ + { + "vrfs": { + "data": { + "avts": { + "DATA-AVT-POLICY-DEFAULT": { + "avtPaths": { + "direct:9": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + "direct:10": { + "flags": {"directPath": True, "valid": False, "active": True}, + }, + "direct:1": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + "direct:8": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + } + } + } + }, + "guest": { + "avts": { + "GUEST-AVT-POLICY-DEFAULT": { + "avtPaths": { + "direct:10": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + "direct:8": { + "flags": {"directPath": True, "valid": False, "active": True}, + }, + } + } + } + }, + "default": { + "avts": { + "CONTROL-PLANE-PROFILE": { + "avtPaths": { + "direct:9": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + "direct:10": { + "flags": {"directPath": True, "valid": False, "active": True}, + }, + "direct:1": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + "direct:8": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + } + }, + "DEFAULT-AVT-POLICY-DEFAULT": { + "avtPaths": { + "direct:10": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + "direct:8": { + "flags": {"directPath": True, "valid": False, "active": True}, + }, + } + }, + } + }, + } + } + ], + "inputs": {}, + "expected": { + "result": "failure", + "messages": [ + "AVT path direct:10 for profile DATA-AVT-POLICY-DEFAULT in VRF data is invalid.", + "AVT path direct:8 for profile GUEST-AVT-POLICY-DEFAULT in VRF guest is invalid.", + "AVT path direct:10 for profile CONTROL-PLANE-PROFILE in VRF default is invalid.", + "AVT path direct:8 for profile DEFAULT-AVT-POLICY-DEFAULT in VRF default is invalid.", + ], + }, + }, + { + "name": "failure-not-active-and-invalid", + "test": VerifyAVTPathHealth, + "eos_data": [ + { + "vrfs": { + "data": { + "avts": { + "DATA-AVT-POLICY-DEFAULT": { + "avtPaths": { + "direct:9": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + "direct:10": { + "flags": {"directPath": True, "valid": False, "active": False}, + }, + "direct:1": { + "flags": {"directPath": True, "valid": True, "active": False}, + }, + "direct:8": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + } + } + } + }, + "guest": { + "avts": { + "GUEST-AVT-POLICY-DEFAULT": { + "avtPaths": { + "direct:10": { + "flags": {"directPath": True, "valid": False, "active": True}, + }, + "direct:8": { + "flags": {"directPath": True, "valid": False, "active": False}, + }, + } + } + } + }, + "default": { + "avts": { + "CONTROL-PLANE-PROFILE": { + "avtPaths": { + "direct:9": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + "direct:10": { + "flags": {"directPath": True, "valid": False, "active": False}, + }, + "direct:1": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + "direct:8": { + "flags": {"directPath": True, "valid": True, "active": True}, + }, + } + }, + "DEFAULT-AVT-POLICY-DEFAULT": { + "avtPaths": { + "direct:10": { + "flags": {"directPath": True, "valid": True, "active": False}, + }, + "direct:8": { + "flags": {"directPath": True, "valid": False, "active": False}, + }, + } + }, + } + }, + } + } + ], + "inputs": {}, + "expected": { + "result": "failure", + "messages": [ + "AVT path direct:10 for profile DATA-AVT-POLICY-DEFAULT in VRF data is invalid and not active.", + "AVT path direct:1 for profile DATA-AVT-POLICY-DEFAULT in VRF data is not active.", + "AVT path direct:10 for profile GUEST-AVT-POLICY-DEFAULT in VRF guest is invalid.", + "AVT path direct:8 for profile GUEST-AVT-POLICY-DEFAULT in VRF guest is invalid and not active.", + "AVT path direct:10 for profile CONTROL-PLANE-PROFILE in VRF default is invalid and not active.", + "AVT path direct:10 for profile DEFAULT-AVT-POLICY-DEFAULT in VRF default is not active.", + "AVT path direct:8 for profile DEFAULT-AVT-POLICY-DEFAULT in VRF default is invalid and not active.", + ], + }, + }, + { + "name": "success", + "test": VerifyAVTSpecificPath, + "eos_data": [ + { + "vrfs": { + "default": { + "avts": { + "DEFAULT-AVT-POLICY-CONTROL-PLANE": { + "avtPaths": { + "direct:10": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"}, + "direct:9": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"}, + "multihop:1": {"flags": {"directPath": False, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"}, + "multihop:3": {"flags": {"directPath": False, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"}, + } + } + } + } + } + }, + { + "vrfs": { + "data": { + "avts": { + "DATA-AVT-POLICY-CONTROL-PLANE": { + "avtPaths": { + "direct:10": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"}, + "direct:9": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"}, + "direct:8": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"}, + "multihop:1": {"flags": {"directPath": False, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"}, + "multihop:3": {"flags": {"directPath": False, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"}, + } + } + } + } + } + }, + { + "vrfs": { + "data": { + "avts": { + "DATA-AVT-POLICY-CONTROL-PLANE": { + "avtPaths": { + "direct:10": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"}, + "direct:9": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"}, + "direct:8": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"}, + "multihop:1": {"flags": {"directPath": False, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"}, + "multihop:3": {"flags": {"directPath": False, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"}, + } + } + } + } + } + }, + ], + "inputs": { + "avt_paths": [ + {"avt_name": "DEFAULT-AVT-POLICY-CONTROL-PLANE", "destination": "10.101.255.2", "next_hop": "10.101.255.1", "path_type": "multihop"}, + {"avt_name": "DATA-AVT-POLICY-CONTROL-PLANE", "vrf": "data", "destination": "10.101.255.1", "next_hop": "10.101.255.2", "path_type": "direct"}, + {"avt_name": "DATA-AVT-POLICY-CONTROL-PLANE", "vrf": "data", "destination": "10.101.255.1", "next_hop": "10.101.255.2"}, + ] + }, + "expected": {"result": "success"}, + }, + { + "name": "failure-no-peer", + "test": VerifyAVTSpecificPath, + "eos_data": [ + {"vrfs": {}}, + { + "vrfs": { + "data": { + "avts": { + "DATA-AVT-POLICY-CONTROL-PLANE": { + "avtPaths": { + "direct:10": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"}, + "direct:9": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"}, + "multihop:1": {"flags": {"directPath": False, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"}, + "multihop:3": {"flags": {"directPath": False, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"}, + } + } + } + } + } + }, + ], + "inputs": { + "avt_paths": [ + {"avt_name": "MGMT-AVT-POLICY-DEFAULT", "vrf": "default", "destination": "10.101.255.2", "next_hop": "10.101.255.1", "path_type": "multihop"}, + {"avt_name": "DATA-AVT-POLICY-CONTROL-PLANE", "vrf": "data", "destination": "10.101.255.1", "next_hop": "10.101.255.2", "path_type": "multihop"}, + ] + }, + "expected": { + "result": "failure", + "messages": ["AVT configuration for peer '10.101.255.2' under topology 'MGMT-AVT-POLICY-DEFAULT' in VRF 'default' is not found."], + }, + }, + { + "name": "failure-no-path-with-correct-next-hop", + "test": VerifyAVTSpecificPath, + "eos_data": [ + { + "vrfs": { + "default": { + "avts": { + "DEFAULT-AVT-POLICY-CONTROL-PLANE": { + "avtPaths": { + "direct:10": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"}, + "direct:9": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"}, + "multihop:1": {"flags": {"directPath": False, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"}, + "multihop:3": {"flags": {"directPath": False, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"}, + } + } + } + } + } + }, + { + "vrfs": { + "data": { + "avts": { + "DATA-AVT-POLICY-CONTROL-PLANE": { + "avtPaths": { + "direct:10": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"}, + "direct:9": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"}, + "multihop:1": {"flags": {"directPath": False, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"}, + "multihop:3": {"flags": {"directPath": False, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"}, + } + } + } + } + } + }, + ], + "inputs": { + "avt_paths": [ + { + "avt_name": "DEFAULT-AVT-POLICY-CONTROL-PLANE", + "vrf": "default", + "destination": "10.101.255.2", + "next_hop": "10.101.255.11", + "path_type": "multihop", + }, + {"avt_name": "DATA-AVT-POLICY-CONTROL-PLANE", "vrf": "data", "destination": "10.101.255.1", "next_hop": "10.101.255.21", "path_type": "direct"}, + ] + }, + "expected": { + "result": "failure", + "messages": [ + "No 'multihop' path found with next-hop address '10.101.255.11' for AVT peer '10.101.255.2' under " + "topology 'DEFAULT-AVT-POLICY-CONTROL-PLANE' in VRF 'default'.", + "No 'direct' path found with next-hop address '10.101.255.21' for AVT peer '10.101.255.1' under " + "topology 'DATA-AVT-POLICY-CONTROL-PLANE' in VRF 'data'.", + ], + }, + }, + { + "name": "failure-incorrect-path", + "test": VerifyAVTSpecificPath, + "eos_data": [ + { + "vrfs": { + "default": { + "avts": { + "DEFAULT-AVT-POLICY-CONTROL-PLANE": { + "avtPaths": { + "direct:10": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"}, + "direct:9": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"}, + "multihop:1": {"flags": {"directPath": True, "valid": False, "active": False}, "nexthopAddr": "10.101.255.1"}, + "multihop:3": {"flags": {"directPath": False, "valid": True, "active": False}, "nexthopAddr": "10.101.255.1"}, + } + } + } + } + } + }, + { + "vrfs": { + "data": { + "avts": { + "DATA-AVT-POLICY-CONTROL-PLANE": { + "avtPaths": { + "direct:10": {"flags": {"directPath": True, "valid": True, "active": True}, "nexthopAddr": "10.101.255.1"}, + "direct:9": {"flags": {"directPath": True, "valid": False, "active": True}, "nexthopAddr": "10.101.255.1"}, + "multihop:1": {"flags": {"directPath": False, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"}, + "multihop:3": {"flags": {"directPath": False, "valid": True, "active": True}, "nexthopAddr": "10.101.255.2"}, + } + } + } + } + } + }, + ], + "inputs": { + "avt_paths": [ + { + "avt_name": "DEFAULT-AVT-POLICY-CONTROL-PLANE", + "vrf": "default", + "destination": "10.101.255.2", + "next_hop": "10.101.255.1", + "path_type": "multihop", + }, + {"avt_name": "DATA-AVT-POLICY-CONTROL-PLANE", "vrf": "data", "destination": "10.101.255.1", "next_hop": "10.101.255.1", "path_type": "direct"}, + ] + }, + "expected": { + "result": "failure", + "messages": [ + "AVT path 'multihop:3' for topology 'DEFAULT-AVT-POLICY-CONTROL-PLANE' in VRF 'default' is inactive.", + "AVT path 'direct:9' for topology 'DATA-AVT-POLICY-CONTROL-PLANE' in VRF 'data' is invalid.", + ], + }, + }, + { + "name": "success", + "test": VerifyAVTRole, + "eos_data": [{"role": "edge"}], + "inputs": {"role": "edge"}, + "expected": {"result": "success"}, + }, + { + "name": "failure-incorrect-role", + "test": VerifyAVTRole, + "eos_data": [{"role": "transit"}], + "inputs": {"role": "edge"}, + "expected": {"result": "failure", "messages": ["Expected AVT role as `edge`, but found `transit` instead."]}, + }, +] diff --git a/tests/units/anta_tests/test_configuration.py b/tests/units/anta_tests/test_configuration.py index 0444db6..7f198a3 100644 --- a/tests/units/anta_tests/test_configuration.py +++ b/tests/units/anta_tests/test_configuration.py @@ -7,7 +7,7 @@ from __future__ import annotations from typing import Any -from anta.tests.configuration import VerifyRunningConfigDiffs, VerifyZeroTouch +from anta.tests.configuration import VerifyRunningConfigDiffs, VerifyRunningConfigLines, VerifyZeroTouch from tests.lib.anta import test # noqa: F401; pylint: disable=W0611 DATA: list[dict[str, Any]] = [ @@ -32,5 +32,42 @@ DATA: list[dict[str, Any]] = [ "inputs": None, "expected": {"result": "success"}, }, - {"name": "failure", "test": VerifyRunningConfigDiffs, "eos_data": ["blah blah"], "inputs": None, "expected": {"result": "failure", "messages": ["blah blah"]}}, + { + "name": "failure", + "test": VerifyRunningConfigDiffs, + "eos_data": ["blah blah"], + "inputs": None, + "expected": {"result": "failure", "messages": ["blah blah"]}, + }, + { + "name": "success", + "test": VerifyRunningConfigLines, + "eos_data": ["blah blah"], + "inputs": {"regex_patterns": ["blah"]}, + "expected": {"result": "success"}, + }, + { + "name": "success", + "test": VerifyRunningConfigLines, + "eos_data": ["enable password something\nsome other line"], + "inputs": {"regex_patterns": ["^enable password .*$", "^.*other line$"]}, + "expected": {"result": "success"}, + }, + { + "name": "failure", + "test": VerifyRunningConfigLines, + "eos_data": ["enable password something\nsome other line"], + "inputs": {"regex_patterns": ["bla", "bleh"]}, + "expected": {"result": "failure", "messages": ["Following patterns were not found: 'bla','bleh'"]}, + }, + { + "name": "failure-invalid-regex", + "test": VerifyRunningConfigLines, + "eos_data": ["enable password something\nsome other line"], + "inputs": {"regex_patterns": ["["]}, + "expected": { + "result": "error", + "messages": ["1 validation error for Input\nregex_patterns.0\n Value error, Invalid regex: unterminated character set at position 0"], + }, + }, ] diff --git a/tests/units/anta_tests/test_field_notices.py b/tests/units/anta_tests/test_field_notices.py index 66e7801..3cb7286 100644 --- a/tests/units/anta_tests/test_field_notices.py +++ b/tests/units/anta_tests/test_field_notices.py @@ -21,7 +21,7 @@ DATA: list[dict[str, Any]] = [ "modelName": "DCS-7280QRA-C36S", "details": { "deviations": [], - "components": [{"name": "Aboot", "version": "Aboot-veos-8.0.0-3255441"}], + "components": [{"name": "Aboot", "version": "Aboot-veos-8.0.0-3255441"}, {"name": "NotAboot", "version": "Aboot-veos-8.0.0-3255441"}], }, }, ], @@ -129,6 +129,26 @@ DATA: list[dict[str, Any]] = [ }, }, { + "name": "failure-no-aboot-component", + "test": VerifyFieldNotice44Resolution, + "eos_data": [ + { + "imageFormatVersion": "1.0", + "uptime": 1109144.35, + "modelName": "DCS-7280QRA-C36S", + "details": { + "deviations": [], + "components": [{"name": "NotAboot", "version": "Aboot-veos-4.0.1-3255441"}], + }, + }, + ], + "inputs": None, + "expected": { + "result": "failure", + "messages": ["Aboot component not found"], + }, + }, + { "name": "success-JPE", "test": VerifyFieldNotice72Resolution, "eos_data": [ diff --git a/tests/units/anta_tests/test_interfaces.py b/tests/units/anta_tests/test_interfaces.py index 58f568f..b8cf493 100644 --- a/tests/units/anta_tests/test_interfaces.py +++ b/tests/units/anta_tests/test_interfaces.py @@ -1,7 +1,7 @@ # Copyright (c) 2023-2024 Arista Networks, Inc. # Use of this source code is governed by the Apache License 2.0 # that can be found in the LICENSE file. -"""Test inputs for anta.tests.hardware.""" +"""Test inputs for anta.tests.interfaces.""" # pylint: disable=C0302 from __future__ import annotations @@ -14,6 +14,7 @@ from anta.tests.interfaces import ( VerifyInterfaceErrDisabled, VerifyInterfaceErrors, VerifyInterfaceIPv4, + VerifyInterfacesSpeed, VerifyInterfacesStatus, VerifyInterfaceUtilization, VerifyIPProxyARP, @@ -1354,6 +1355,14 @@ DATA: list[dict[str, Any]] = [ "lineProtocolStatus": "up", "mtu": 65535, }, + # Checking not loopbacks are skipped + "Ethernet666": { + "name": "Ethernet666", + "interfaceStatus": "connected", + "interfaceAddress": {"ipAddr": {"maskLen": 32, "address": "6.6.6.6"}}, + "ipv4Routable240": False, + "lineProtocolStatus": "up", + }, }, }, ], @@ -1733,7 +1742,7 @@ DATA: list[dict[str, Any]] = [ }, }, ], - "inputs": {"mtu": 9214}, + "inputs": {"mtu": 9214, "ignored_interfaces": ["Loopback", "Port-Channel", "Management", "Vxlan"], "specific_mtu": [{"Ethernet10": 9214}]}, "expected": {"result": "success"}, }, { @@ -2157,4 +2166,279 @@ DATA: list[dict[str, Any]] = [ "inputs": {"mac_address": "00:1c:73:00:dc:01"}, "expected": {"result": "failure", "messages": ["IP virtual router MAC address `00:1c:73:00:dc:01` is not configured."]}, }, + { + "name": "success", + "test": VerifyInterfacesSpeed, + "eos_data": [ + { + "interfaces": { + "Ethernet1": { + "bandwidth": 1000000000, + "autoNegotiate": "unknown", + "duplex": "duplexFull", + "lanes": 2, + }, + "Ethernet1/1/2": { + "bandwidth": 1000000000, + "autoNegotiate": "unknown", + "duplex": "duplexFull", + "lanes": 2, + }, + "Ethernet3": { + "bandwidth": 100000000000, + "autoNegotiate": "success", + "duplex": "duplexFull", + "lanes": 8, + }, + "Ethernet4": { + "bandwidth": 2500000000, + "autoNegotiate": "unknown", + "duplex": "duplexFull", + "lanes": 8, + }, + } + } + ], + "inputs": { + "interfaces": [ + {"name": "Ethernet1", "auto": False, "speed": 1}, + {"name": "Ethernet1", "auto": False, "speed": 1, "lanes": 2}, + {"name": "Ethernet1/1/2", "auto": False, "speed": 1}, + {"name": "Ethernet3", "auto": True, "speed": 100}, + {"name": "Ethernet3", "auto": True, "speed": 100, "lanes": 8}, + {"name": "Ethernet3", "auto": True, "speed": 100}, + {"name": "Ethernet4", "auto": False, "speed": 2.5}, + ] + }, + "expected": {"result": "success"}, + }, + { + "name": "failure-incorrect-speed", + "test": VerifyInterfacesSpeed, + "eos_data": [ + { + "interfaces": { + "Ethernet1": { + "bandwidth": 100000000000, + "autoNegotiate": "unknown", + "duplex": "duplexFull", + "lanes": 2, + }, + "Ethernet1/1/1": { + "bandwidth": 100000000000, + "autoNegotiate": "unknown", + "duplex": "duplexFull", + "lanes": 2, + }, + "Ethernet3": { + "bandwidth": 10000000000, + "autoNegotiate": "success", + "duplex": "duplexFull", + "lanes": 8, + }, + "Ethernet4": { + "bandwidth": 25000000000, + "autoNegotiate": "unknown", + "duplex": "duplexFull", + "lanes": 8, + }, + } + } + ], + "inputs": { + "interfaces": [ + {"name": "Ethernet1", "auto": False, "speed": 1}, + {"name": "Ethernet1/1/1", "auto": False, "speed": 1}, + {"name": "Ethernet3", "auto": True, "speed": 100}, + {"name": "Ethernet4", "auto": False, "speed": 2.5}, + ] + }, + "expected": { + "result": "failure", + "messages": [ + "For interface Ethernet1:\nExpected `1Gbps` as the speed, but found `100Gbps` instead.", + "For interface Ethernet1/1/1:\nExpected `1Gbps` as the speed, but found `100Gbps` instead.", + "For interface Ethernet3:\nExpected `100Gbps` as the speed, but found `10Gbps` instead.", + "For interface Ethernet4:\nExpected `2.5Gbps` as the speed, but found `25Gbps` instead.", + ], + }, + }, + { + "name": "failure-incorrect-mode", + "test": VerifyInterfacesSpeed, + "eos_data": [ + { + "interfaces": { + "Ethernet1": { + "bandwidth": 1000000000, + "autoNegotiate": "unknown", + "duplex": "duplexHalf", + "lanes": 2, + }, + "Ethernet1/2/2": { + "bandwidth": 1000000000, + "autoNegotiate": "unknown", + "duplex": "duplexHalf", + "lanes": 2, + }, + "Ethernet3": { + "bandwidth": 100000000000, + "autoNegotiate": "success", + "duplex": "duplexHalf", + "lanes": 8, + }, + "Ethernet4": { + "bandwidth": 2500000000, + "autoNegotiate": "unknown", + "duplex": "duplexHalf", + "lanes": 8, + }, + } + } + ], + "inputs": { + "interfaces": [ + {"name": "Ethernet1", "auto": False, "speed": 1}, + {"name": "Ethernet1/2/2", "auto": False, "speed": 1}, + {"name": "Ethernet3", "auto": True, "speed": 100}, + {"name": "Ethernet3", "auto": True, "speed": 100, "lanes": 8}, + {"name": "Ethernet4", "auto": False, "speed": 2.5}, + ] + }, + "expected": { + "result": "failure", + "messages": [ + "For interface Ethernet1:\nExpected `duplexFull` as the duplex mode, but found `duplexHalf` instead.", + "For interface Ethernet1/2/2:\nExpected `duplexFull` as the duplex mode, but found `duplexHalf` instead.", + "For interface Ethernet3:\nExpected `duplexFull` as the duplex mode, but found `duplexHalf` instead.", + "For interface Ethernet3:\nExpected `duplexFull` as the duplex mode, but found `duplexHalf` instead.", + "For interface Ethernet4:\nExpected `duplexFull` as the duplex mode, but found `duplexHalf` instead.", + ], + }, + }, + { + "name": "failure-incorrect-lane", + "test": VerifyInterfacesSpeed, + "eos_data": [ + { + "interfaces": { + "Ethernet1": { + "bandwidth": 1000000000, + "autoNegotiate": "unknown", + "duplex": "duplexFull", + "lanes": 4, + }, + "Ethernet2": { + "bandwidth": 10000000000, + "autoNegotiate": "unknown", + "duplex": "duplexFull", + "lanes": 4, + }, + "Ethernet3": { + "bandwidth": 100000000000, + "autoNegotiate": "success", + "duplex": "duplexFull", + "lanes": 4, + }, + "Ethernet4": { + "bandwidth": 2500000000, + "autoNegotiate": "unknown", + "duplex": "duplexFull", + "lanes": 6, + }, + "Ethernet4/1/1": { + "bandwidth": 2500000000, + "autoNegotiate": "unknown", + "duplex": "duplexFull", + "lanes": 6, + }, + } + } + ], + "inputs": { + "interfaces": [ + {"name": "Ethernet1", "auto": False, "speed": 1, "lanes": 2}, + {"name": "Ethernet3", "auto": True, "speed": 100, "lanes": 8}, + {"name": "Ethernet4", "auto": False, "speed": 2.5, "lanes": 4}, + {"name": "Ethernet4/1/1", "auto": False, "speed": 2.5, "lanes": 4}, + ] + }, + "expected": { + "result": "failure", + "messages": [ + "For interface Ethernet1:\nExpected `2` as the lanes, but found `4` instead.", + "For interface Ethernet3:\nExpected `8` as the lanes, but found `4` instead.", + "For interface Ethernet4:\nExpected `4` as the lanes, but found `6` instead.", + "For interface Ethernet4/1/1:\nExpected `4` as the lanes, but found `6` instead.", + ], + }, + }, + { + "name": "failure-all-type", + "test": VerifyInterfacesSpeed, + "eos_data": [ + { + "interfaces": { + "Ethernet1": { + "bandwidth": 10000000000, + "autoNegotiate": "unknown", + "duplex": "duplexHalf", + "lanes": 4, + }, + "Ethernet2/1/2": { + "bandwidth": 1000000000, + "autoNegotiate": "unknown", + "duplex": "duplexHalf", + "lanes": 2, + }, + "Ethernet3": { + "bandwidth": 10000000000, + "autoNegotiate": "unknown", + "duplex": "duplexHalf", + "lanes": 6, + }, + "Ethernet4": { + "bandwidth": 25000000000, + "autoNegotiate": "unknown", + "duplex": "duplexHalf", + "lanes": 4, + }, + } + } + ], + "inputs": { + "interfaces": [ + {"name": "Ethernet1", "auto": False, "speed": 1}, + {"name": "Ethernet1", "auto": False, "speed": 1, "lanes": 2}, + {"name": "Ethernet2/1/2", "auto": False, "speed": 10}, + {"name": "Ethernet3", "auto": True, "speed": 1}, + {"name": "Ethernet3", "auto": True, "speed": 100, "lanes": 8}, + {"name": "Ethernet3", "auto": True, "speed": 100}, + {"name": "Ethernet4", "auto": False, "speed": 2.5}, + ] + }, + "expected": { + "result": "failure", + "messages": [ + "For interface Ethernet1:\nExpected `duplexFull` as the duplex mode, but found `duplexHalf` instead.\n" + "Expected `1Gbps` as the speed, but found `10Gbps` instead.", + "For interface Ethernet1:\nExpected `duplexFull` as the duplex mode, but found `duplexHalf` instead.\n" + "Expected `1Gbps` as the speed, but found `10Gbps` instead.\n" + "Expected `2` as the lanes, but found `4` instead.", + "For interface Ethernet2/1/2:\nExpected `duplexFull` as the duplex mode, but found `duplexHalf` instead.\n" + "Expected `10Gbps` as the speed, but found `1Gbps` instead.", + "For interface Ethernet3:\nExpected `success` as the auto negotiation, but found `unknown` instead.\n" + "Expected `duplexFull` as the duplex mode, but found `duplexHalf` instead.", + "For interface Ethernet3:\nExpected `success` as the auto negotiation, but found `unknown` instead.\n" + "Expected `duplexFull` as the duplex mode, but found `duplexHalf` instead.\n" + "Expected `100Gbps` as the speed, but found `10Gbps` instead.\n" + "Expected `8` as the lanes, but found `6` instead.", + "For interface Ethernet3:\nExpected `success` as the auto negotiation, but found `unknown` instead.\n" + "Expected `duplexFull` as the duplex mode, but found `duplexHalf` instead.\n" + "Expected `100Gbps` as the speed, but found `10Gbps` instead.", + "For interface Ethernet4:\nExpected `duplexFull` as the duplex mode, but found `duplexHalf` instead.\n" + "Expected `2.5Gbps` as the speed, but found `25Gbps` instead.", + ], + }, + }, ] diff --git a/tests/units/anta_tests/test_logging.py b/tests/units/anta_tests/test_logging.py index 1e8ee3d..d46c865 100644 --- a/tests/units/anta_tests/test_logging.py +++ b/tests/units/anta_tests/test_logging.py @@ -206,7 +206,9 @@ DATA: list[dict[str, Any]] = [ "eos_data": [ "", "2023-05-10T15:41:44.680813-05:00 NW-CORE.example.org ConfigAgent: %SYS-6-LOGMSG_INFO: " - "Message from arista on command-api (10.22.1.107): ANTA VerifyLoggingTimestamp validation\n", + "Message from arista on command-api (10.22.1.107): ANTA VerifyLoggingTimestamp validation\n" + "2023-05-10T15:42:44.680813-05:00 NW-CORE.example.org ConfigAgent: %SYS-6-LOGMSG_INFO: " + "Other log\n", ], "inputs": None, "expected": {"result": "success"}, @@ -223,6 +225,16 @@ DATA: list[dict[str, Any]] = [ "expected": {"result": "failure", "messages": ["Logs are not generated with the appropriate timestamp format"]}, }, { + "name": "failure-no-matching-log", + "test": VerifyLoggingTimestamp, + "eos_data": [ + "", + "May 10 13:54:22 NE-CORE.example.org ConfigAgent: %SYS-6-LOGMSG_INFO: Message from arista on command-api (10.22.1.107): BLAH\n", + ], + "inputs": None, + "expected": {"result": "failure", "messages": ["Logs are not generated with the appropriate timestamp format"]}, + }, + { "name": "success", "test": VerifyLoggingAccounting, "eos_data": ["2023 May 10 15:50:31 arista command-api 10.22.1.107 stop service=shell priv-lvl=15 cmd=show aaa accounting logs | tail\n"], diff --git a/tests/units/anta_tests/test_path_selection.py b/tests/units/anta_tests/test_path_selection.py new file mode 100644 index 0000000..c5fb079 --- /dev/null +++ b/tests/units/anta_tests/test_path_selection.py @@ -0,0 +1,327 @@ +# Copyright (c) 2023-2024 Arista Networks, Inc. +# Use of this source code is governed by the Apache License 2.0 +# that can be found in the LICENSE file. +"""Tests for anta.tests.path_selection.py.""" + +from __future__ import annotations + +from typing import Any + +from anta.tests.path_selection import VerifyPathsHealth, VerifySpecificPath +from tests.lib.anta import test # noqa: F401; pylint: disable=W0611 + +DATA: list[dict[str, Any]] = [ + { + "name": "success", + "test": VerifyPathsHealth, + "eos_data": [ + { + "dpsPeers": { + "10.255.0.1": { + "dpsGroups": { + "internet": { + "dpsPaths": { + "path3": {"state": "routeResolved", "dpsSessions": {"0": {"active": True}}}, + }, + }, + "mpls": { + "dpsPaths": { + "path4": {"state": "ipsecEstablished", "dpsSessions": {"0": {"active": True}}}, + }, + }, + }, + }, + "10.255.0.2": { + "dpsGroups": { + "internet": { + "dpsPaths": { + "path1": {"state": "ipsecEstablished", "dpsSessions": {"0": {"active": True}}}, + }, + }, + "mpls": { + "dpsPaths": { + "path2": {"state": "routeResolved", "dpsSessions": {"0": {"active": True}}}, + }, + }, + }, + }, + } + }, + ], + "inputs": {}, + "expected": {"result": "success"}, + }, + { + "name": "failure-no-peer", + "test": VerifyPathsHealth, + "eos_data": [ + {"dpsPeers": {}}, + ], + "inputs": {}, + "expected": {"result": "failure", "messages": ["No path configured for router path-selection."]}, + }, + { + "name": "failure-not-established", + "test": VerifyPathsHealth, + "eos_data": [ + { + "dpsPeers": { + "10.255.0.1": { + "dpsGroups": { + "internet": { + "dpsPaths": { + "path3": {"state": "ipsecPending", "dpsSessions": {"0": {"active": False}}}, + }, + }, + "mpls": { + "dpsPaths": { + "path4": {"state": "ipsecPending", "dpsSessions": {"0": {"active": False}}}, + }, + }, + }, + }, + "10.255.0.2": { + "dpsGroups": { + "internet": { + "dpsPaths": { + "path1": {"state": "ipsecEstablished", "dpsSessions": {"0": {"active": True}}}, + }, + }, + "mpls": { + "dpsPaths": { + "path2": {"state": "ipsecPending", "dpsSessions": {"0": {"active": False}}}, + }, + }, + }, + }, + } + }, + ], + "inputs": {}, + "expected": { + "result": "failure", + "messages": [ + "Path state for peer 10.255.0.1 in path-group internet is `ipsecPending`.", + "Path state for peer 10.255.0.1 in path-group mpls is `ipsecPending`.", + "Path state for peer 10.255.0.2 in path-group mpls is `ipsecPending`.", + ], + }, + }, + { + "name": "failure-inactive", + "test": VerifyPathsHealth, + "eos_data": [ + { + "dpsPeers": { + "10.255.0.1": { + "dpsGroups": { + "internet": { + "dpsPaths": { + "path3": {"state": "routeResolved", "dpsSessions": {"0": {"active": False}}}, + }, + }, + "mpls": { + "dpsPaths": { + "path4": {"state": "routeResolved", "dpsSessions": {"0": {"active": False}}}, + }, + }, + }, + }, + "10.255.0.2": { + "dpsGroups": { + "internet": { + "dpsPaths": { + "path1": {"state": "routeResolved", "dpsSessions": {"0": {"active": True}}}, + }, + }, + "mpls": { + "dpsPaths": { + "path2": {"state": "routeResolved", "dpsSessions": {"0": {"active": False}}}, + }, + }, + }, + }, + } + }, + ], + "inputs": {}, + "expected": { + "result": "failure", + "messages": [ + "Telemetry state for peer 10.255.0.1 in path-group internet is `inactive`.", + "Telemetry state for peer 10.255.0.1 in path-group mpls is `inactive`.", + "Telemetry state for peer 10.255.0.2 in path-group mpls is `inactive`.", + ], + }, + }, + { + "name": "success", + "test": VerifySpecificPath, + "eos_data": [ + { + "dpsPeers": { + "10.255.0.1": { + "dpsGroups": { + "internet": { + "dpsPaths": { + "path3": { + "state": "ipsecEstablished", + "source": "172.18.13.2", + "destination": "172.18.15.2", + "dpsSessions": {"0": {"active": True}}, + } + } + } + } + } + } + }, + { + "dpsPeers": { + "10.255.0.2": { + "dpsGroups": { + "mpls": { + "dpsPaths": { + "path2": { + "state": "ipsecEstablished", + "source": "172.18.3.2", + "destination": "172.18.5.2", + "dpsSessions": {"0": {"active": True}}, + } + } + } + } + } + } + }, + ], + "inputs": { + "paths": [ + {"peer": "10.255.0.1", "path_group": "internet", "source_address": "172.18.3.2", "destination_address": "172.18.5.2"}, + {"peer": "10.255.0.2", "path_group": "mpls", "source_address": "172.18.13.2", "destination_address": "172.18.15.2"}, + ] + }, + "expected": {"result": "success"}, + }, + { + "name": "failure-no-peer", + "test": VerifySpecificPath, + "eos_data": [ + {"dpsPeers": {}}, + {"dpsPeers": {}}, + ], + "inputs": { + "paths": [ + {"peer": "10.255.0.1", "path_group": "internet", "source_address": "172.18.3.2", "destination_address": "172.18.5.2"}, + {"peer": "10.255.0.2", "path_group": "mpls", "source_address": "172.18.13.2", "destination_address": "172.18.15.2"}, + ] + }, + "expected": { + "result": "failure", + "messages": [ + "Path `peer: 10.255.0.1 source: 172.18.3.2 destination: 172.18.5.2` is not configured for path-group `internet`.", + "Path `peer: 10.255.0.2 source: 172.18.13.2 destination: 172.18.15.2` is not configured for path-group `mpls`.", + ], + }, + }, + { + "name": "failure-not-established", + "test": VerifySpecificPath, + "eos_data": [ + { + "dpsPeers": { + "10.255.0.1": { + "dpsGroups": { + "internet": { + "dpsPaths": { + "path3": {"state": "ipsecPending", "source": "172.18.3.2", "destination": "172.18.5.2", "dpsSessions": {"0": {"active": True}}} + } + } + } + } + } + }, + { + "dpsPeers": { + "10.255.0.2": { + "dpsGroups": { + "mpls": { + "dpsPaths": { + "path4": { + "state": "ipsecPending", + "source": "172.18.13.2", + "destination": "172.18.15.2", + "dpsSessions": {"0": {"active": False}}, + } + } + } + } + } + } + }, + ], + "inputs": { + "paths": [ + {"peer": "10.255.0.1", "path_group": "internet", "source_address": "172.18.3.2", "destination_address": "172.18.5.2"}, + {"peer": "10.255.0.2", "path_group": "mpls", "source_address": "172.18.13.2", "destination_address": "172.18.15.2"}, + ] + }, + "expected": { + "result": "failure", + "messages": [ + "Path state for `peer: 10.255.0.1 source: 172.18.3.2 destination: 172.18.5.2` in path-group internet is `ipsecPending`.", + "Path state for `peer: 10.255.0.2 source: 172.18.13.2 destination: 172.18.15.2` in path-group mpls is `ipsecPending`.", + ], + }, + }, + { + "name": "failure-inactive", + "test": VerifySpecificPath, + "eos_data": [ + { + "dpsPeers": { + "10.255.0.1": { + "dpsGroups": { + "internet": { + "dpsPaths": { + "path3": {"state": "routeResolved", "source": "172.18.3.2", "destination": "172.18.5.2", "dpsSessions": {"0": {"active": False}}} + } + } + } + } + } + }, + { + "dpsPeers": { + "10.255.0.2": { + "dpsGroups": { + "mpls": { + "dpsPaths": { + "path4": { + "state": "routeResolved", + "source": "172.18.13.2", + "destination": "172.18.15.2", + "dpsSessions": {"0": {"active": False}}, + } + } + } + } + } + } + }, + ], + "inputs": { + "paths": [ + {"peer": "10.255.0.1", "path_group": "internet", "source_address": "172.18.3.2", "destination_address": "172.18.5.2"}, + {"peer": "10.255.0.2", "path_group": "mpls", "source_address": "172.18.13.2", "destination_address": "172.18.15.2"}, + ] + }, + "expected": { + "result": "failure", + "messages": [ + "Telemetry state for path `peer: 10.255.0.1 source: 172.18.3.2 destination: 172.18.5.2` in path-group internet is `inactive`.", + "Telemetry state for path `peer: 10.255.0.2 source: 172.18.13.2 destination: 172.18.15.2` in path-group mpls is `inactive`.", + ], + }, + }, +] diff --git a/tests/units/anta_tests/test_ptp.py b/tests/units/anta_tests/test_ptp.py index ef42a58..8f4c77f 100644 --- a/tests/units/anta_tests/test_ptp.py +++ b/tests/units/anta_tests/test_ptp.py @@ -42,11 +42,11 @@ DATA: list[dict[str, Any]] = [ "expected": {"result": "failure", "messages": ["The device is not configured as a PTP Boundary Clock: 'ptpDisabled'"]}, }, { - "name": "error", + "name": "skipped", "test": VerifyPtpModeStatus, "eos_data": [{"ptpIntfSummaries": {}}], "inputs": None, - "expected": {"result": "error", "messages": ["'ptpMode' variable is not present in the command output"]}, + "expected": {"result": "skipped", "messages": ["PTP is not configured"]}, }, { "name": "success", @@ -104,11 +104,11 @@ DATA: list[dict[str, Any]] = [ }, }, { - "name": "error", + "name": "skipped", "test": VerifyPtpGMStatus, "eos_data": [{"ptpIntfSummaries": {}}], "inputs": {"gmid": "0xec:46:70:ff:fe:00:ff:a8"}, - "expected": {"result": "error", "messages": ["'ptpClockSummary' variable is not present in the command output"]}, + "expected": {"result": "skipped", "messages": ["PTP is not configured"]}, }, { "name": "success", @@ -161,14 +161,14 @@ DATA: list[dict[str, Any]] = [ "expected": {"result": "failure", "messages": ["The device lock is more than 60s old: 157s"]}, }, { - "name": "error", + "name": "skipped", "test": VerifyPtpLockStatus, "eos_data": [{"ptpIntfSummaries": {}}], "inputs": None, "expected": { - "result": "error", + "result": "skipped", "messages": [ - "'ptpClockSummary' variable is not present in the command output", + "PTP is not configured", ], }, }, diff --git a/tests/units/anta_tests/test_security.py b/tests/units/anta_tests/test_security.py index 4c28541..3a732bd 100644 --- a/tests/units/anta_tests/test_security.py +++ b/tests/units/anta_tests/test_security.py @@ -34,7 +34,14 @@ DATA: list[dict[str, Any]] = [ "expected": {"result": "success"}, }, { - "name": "failure", + "name": "error-missing-ssh-status", + "test": VerifySSHStatus, + "eos_data": ["SSH per host connection limit is 20\nFIPS status: disabled\n\n"], + "inputs": None, + "expected": {"result": "error", "messages": ["Could not find SSH status in returned output."]}, + }, + { + "name": "failure-ssh-disabled", "test": VerifySSHStatus, "eos_data": ["SSHD status for Default VRF is enabled\nSSH connection limit is 50\nSSH per host connection limit is 20\nFIPS status: disabled\n\n"], "inputs": None, @@ -574,6 +581,40 @@ DATA: list[dict[str, Any]] = [ }, }, { + "name": "error-wrong-input-rsa", + "test": VerifyAPISSLCertificate, + "eos_data": [], + "inputs": { + "certificates": [ + { + "certificate_name": "ARISTA_ROOT_CA.crt", + "expiry_threshold": 30, + "common_name": "Arista Networks Internal IT Root Cert Authority", + "encryption_algorithm": "RSA", + "key_size": 256, + }, + ] + }, + "expected": {"result": "error", "messages": ["Allowed sizes are (2048, 3072, 4096)."]}, + }, + { + "name": "error-wrong-input-ecdsa", + "test": VerifyAPISSLCertificate, + "eos_data": [], + "inputs": { + "certificates": [ + { + "certificate_name": "ARISTA_SIGNING_CA.crt", + "expiry_threshold": 30, + "common_name": "AristaIT-ICA ECDSA Issuing Cert Authority", + "encryption_algorithm": "ECDSA", + "key_size": 2048, + }, + ] + }, + "expected": {"result": "error", "messages": ["Allowed sizes are (256, 384, 512)."]}, + }, + { "name": "success", "test": VerifyBannerLogin, "eos_data": [ diff --git a/tests/units/anta_tests/test_services.py b/tests/units/anta_tests/test_services.py index ed86e10..61c44d0 100644 --- a/tests/units/anta_tests/test_services.py +++ b/tests/units/anta_tests/test_services.py @@ -127,10 +127,12 @@ DATA: list[dict[str, Any]] = [ "name": "success", "test": VerifyErrdisableRecovery, "eos_data": [ + # Adding empty line on purpose to verify they are skipped """ Errdisable Reason Timer Status Timer Interval ------------------------------ ----------------- -------------- acl Enabled 300 + bpduguard Enabled 300 arp-inspection Enabled 30 """ diff --git a/tests/units/anta_tests/test_software.py b/tests/units/anta_tests/test_software.py index 84e90e8..e46f526 100644 --- a/tests/units/anta_tests/test_software.py +++ b/tests/units/anta_tests/test_software.py @@ -79,6 +79,7 @@ DATA: list[dict[str, Any]] = [ "inputs": {"versions": ["v1.17.1", "v1.18.1"]}, "expected": {"result": "failure", "messages": ["device is running TerminAttr version v1.17.0 and is not in the allowed list: ['v1.17.1', 'v1.18.1']"]}, }, + # TODO: add a test with a real extension? { "name": "success-no-extensions", "test": VerifyEOSExtensions, @@ -90,6 +91,16 @@ DATA: list[dict[str, Any]] = [ "expected": {"result": "success"}, }, { + "name": "success-empty-extension", + "test": VerifyEOSExtensions, + "eos_data": [ + {"extensions": {}, "extensionStoredDir": "flash:", "warnings": ["No extensions are available"]}, + {"extensions": [""]}, + ], + "inputs": None, + "expected": {"result": "success"}, + }, + { "name": "failure", "test": VerifyEOSExtensions, "eos_data": [ diff --git a/tests/units/cli/debug/test_commands.py b/tests/units/cli/debug/test_commands.py index 76c3648..039e09e 100644 --- a/tests/units/cli/debug/test_commands.py +++ b/tests/units/cli/debug/test_commands.py @@ -25,6 +25,7 @@ if TYPE_CHECKING: pytest.param("show version", None, "1", None, "dummy", False, id="version"), pytest.param("show version", None, None, 3, "dummy", False, id="revision"), pytest.param("undefined", None, None, None, "dummy", True, id="command fails"), + pytest.param("undefined", None, None, None, "doesnotexist", True, id="Device does not exist"), ], ) def test_run_cmd( diff --git a/tests/units/cli/exec/test_utils.py b/tests/units/cli/exec/test_utils.py index 455568b..ad1a78a 100644 --- a/tests/units/cli/exec/test_utils.py +++ b/tests/units/cli/exec/test_utils.py @@ -11,7 +11,7 @@ from unittest.mock import call, patch import pytest from anta.cli.exec.utils import ( - clear_counters_utils, + clear_counters, ) from anta.models import AntaCommand @@ -69,14 +69,14 @@ if TYPE_CHECKING: ), ], ) -async def test_clear_counters_utils( +async def test_clear_counters( caplog: pytest.LogCaptureFixture, test_inventory: AntaInventory, inventory_state: dict[str, Any], per_device_command_output: dict[str, Any], tags: set[str] | None, ) -> None: - """Test anta.cli.exec.utils.clear_counters_utils.""" + """Test anta.cli.exec.utils.clear_counters.""" async def mock_connect_inventory() -> None: """Mock connect_inventory coroutine.""" @@ -85,20 +85,19 @@ async def test_clear_counters_utils( device.established = inventory_state[name].get("established", device.is_online) device.hw_model = inventory_state[name].get("hw_model", "dummy") - async def dummy_collect(self: AntaDevice, command: AntaCommand) -> None: + async def collect(self: AntaDevice, command: AntaCommand, *args: Any, **kwargs: Any) -> None: # noqa: ARG001, ANN401 #pylint: disable=unused-argument """Mock collect coroutine.""" command.output = per_device_command_output.get(self.name, "") # Need to patch the child device class with ( - patch("anta.device.AsyncEOSDevice.collect", side_effect=dummy_collect, autospec=True) as mocked_collect, + patch("anta.device.AsyncEOSDevice.collect", side_effect=collect, autospec=True) as mocked_collect, patch( "anta.inventory.AntaInventory.connect_inventory", side_effect=mock_connect_inventory, ) as mocked_connect_inventory, ): - mocked_collect.side_effect = dummy_collect - await clear_counters_utils(test_inventory, tags=tags) + await clear_counters(test_inventory, tags=tags) mocked_connect_inventory.assert_awaited_once() devices_established = test_inventory.get_inventory(established_only=True, tags=tags).devices @@ -117,6 +116,7 @@ async def test_clear_counters_utils( output=per_device_command_output.get(device.name, ""), errors=[], ), + collection_id=None, ), ) if device.hw_model not in ["cEOSLab", "vEOS-lab"]: @@ -130,6 +130,7 @@ async def test_clear_counters_utils( ofmt="json", output=per_device_command_output.get(device.name, ""), ), + collection_id=None, ), ) mocked_collect.assert_has_awaits(calls) diff --git a/tests/units/cli/get/test__init__.py b/tests/units/cli/get/test__init__.py index a6a0c3c..1ef65c2 100644 --- a/tests/units/cli/get/test__init__.py +++ b/tests/units/cli/get/test__init__.py @@ -7,7 +7,7 @@ from __future__ import annotations from typing import TYPE_CHECKING -from anta.cli import anta +from anta.cli._main import anta from anta.cli.utils import ExitCode if TYPE_CHECKING: diff --git a/tests/units/cli/get/test_commands.py b/tests/units/cli/get/test_commands.py index 9edc7c3..e0b17a0 100644 --- a/tests/units/cli/get/test_commands.py +++ b/tests/units/cli/get/test_commands.py @@ -13,7 +13,7 @@ from unittest.mock import ANY, patch import pytest from cvprac.cvp_client_errors import CvpApiError -from anta.cli import anta +from anta.cli._main import anta from anta.cli.utils import ExitCode if TYPE_CHECKING: diff --git a/tests/units/cli/get/test_utils.py b/tests/units/cli/get/test_utils.py index 0dce335..7ce85dc 100644 --- a/tests/units/cli/get/test_utils.py +++ b/tests/units/cli/get/test_utils.py @@ -81,14 +81,15 @@ def test_create_inventory_from_cvp(tmp_path: Path, inventory: list[dict[str, Any @pytest.mark.parametrize( - ("inventory_filename", "ansible_group", "expected_raise", "expected_inv_length"), + ("inventory_filename", "ansible_group", "expected_raise", "expected_log", "expected_inv_length"), [ - pytest.param("ansible_inventory.yml", None, nullcontext(), 7, id="no group"), - pytest.param("ansible_inventory.yml", "ATD_LEAFS", nullcontext(), 4, id="group found"), + pytest.param("ansible_inventory.yml", None, nullcontext(), None, 7, id="no group"), + pytest.param("ansible_inventory.yml", "ATD_LEAFS", nullcontext(), None, 4, id="group found"), pytest.param( "ansible_inventory.yml", "DUMMY", pytest.raises(ValueError, match="Group DUMMY not found in Ansible inventory"), + None, 0, id="group not found", ), @@ -96,6 +97,7 @@ def test_create_inventory_from_cvp(tmp_path: Path, inventory: list[dict[str, Any "empty_ansible_inventory.yml", None, pytest.raises(ValueError, match="Ansible inventory .* is empty"), + None, 0, id="empty inventory", ), @@ -103,19 +105,39 @@ def test_create_inventory_from_cvp(tmp_path: Path, inventory: list[dict[str, Any "wrong_ansible_inventory.yml", None, pytest.raises(ValueError, match="Could not parse"), + None, 0, id="os error inventory", ), + pytest.param( + "ansible_inventory_with_vault.yml", + None, + pytest.raises(ValueError, match="Could not parse"), + "`anta get from-ansible` does not support inline vaulted variables", + 0, + id="Vault variable in inventory", + ), + pytest.param( + "ansible_inventory_unknown_yaml_tag.yml", + None, + pytest.raises(ValueError, match="Could not parse"), + None, + 0, + id="Unknown YAML tag in inventory", + ), ], ) def test_create_inventory_from_ansible( + caplog: pytest.LogCaptureFixture, tmp_path: Path, inventory_filename: Path, ansible_group: str | None, expected_raise: AbstractContextManager[Exception], + expected_log: str | None, expected_inv_length: int, ) -> None: """Test anta.get.utils.create_inventory_from_ansible.""" + # pylint: disable=R0913 target_file = tmp_path / "inventory.yml" inventory_file_path = DATA_DIR / inventory_filename @@ -130,3 +152,5 @@ def test_create_inventory_from_ansible( assert len(inv) == expected_inv_length if not isinstance(expected_raise, nullcontext): assert not target_file.exists() + if expected_log: + assert expected_log in caplog.text diff --git a/tests/units/cli/nrfu/test__init__.py b/tests/units/cli/nrfu/test__init__.py index 052c7c3..a9dcd9c 100644 --- a/tests/units/cli/nrfu/test__init__.py +++ b/tests/units/cli/nrfu/test__init__.py @@ -24,6 +24,14 @@ def test_anta_nrfu_help(click_runner: CliRunner) -> None: assert "Usage: anta nrfu" in result.output +def test_anta_nrfu_wrong_subcommand(click_runner: CliRunner) -> None: + """Test anta nrfu toast.""" + result = click_runner.invoke(anta, ["nrfu", "oook"]) + assert result.exit_code == ExitCode.USAGE_ERROR + assert "Usage: anta nrfu" in result.output + assert "No such command 'oook'." in result.output + + def test_anta_nrfu(click_runner: CliRunner) -> None: """Test anta nrfu, catalog is given via env.""" result = click_runner.invoke(anta, ["nrfu"]) @@ -32,6 +40,15 @@ def test_anta_nrfu(click_runner: CliRunner) -> None: assert "Tests catalog contains 1 tests" in result.output +def test_anta_nrfu_dry_run(click_runner: CliRunner) -> None: + """Test anta nrfu --dry-run, catalog is given via env.""" + result = click_runner.invoke(anta, ["nrfu", "--dry-run"]) + assert result.exit_code == ExitCode.OK + assert "ANTA Inventory contains 3 devices" in result.output + assert "Tests catalog contains 1 tests" in result.output + assert "Dry-run" in result.output + + def test_anta_password_required(click_runner: CliRunner) -> None: """Test that password is provided.""" env = default_anta_env() diff --git a/tests/units/cli/nrfu/test_commands.py b/tests/units/cli/nrfu/test_commands.py index 4ea40b7..e2b5031 100644 --- a/tests/units/cli/nrfu/test_commands.py +++ b/tests/units/cli/nrfu/test_commands.py @@ -54,6 +54,20 @@ def test_anta_nrfu_table(click_runner: CliRunner) -> None: assert "dummy │ VerifyEOSVersion │ success" in result.output +def test_anta_nrfu_table_group_by_device(click_runner: CliRunner) -> None: + """Test anta nrfu, catalog is given via env.""" + result = click_runner.invoke(anta, ["nrfu", "table", "--group-by", "device"]) + assert result.exit_code == ExitCode.OK + assert "Summary per device" in result.output + + +def test_anta_nrfu_table_group_by_test(click_runner: CliRunner) -> None: + """Test anta nrfu, catalog is given via env.""" + result = click_runner.invoke(anta, ["nrfu", "table", "--group-by", "test"]) + assert result.exit_code == ExitCode.OK + assert "Summary per test" in result.output + + def test_anta_nrfu_text(click_runner: CliRunner) -> None: """Test anta nrfu, catalog is given via env.""" result = click_runner.invoke(anta, ["nrfu", "text"]) @@ -66,7 +80,7 @@ def test_anta_nrfu_json(click_runner: CliRunner) -> None: result = click_runner.invoke(anta, ["nrfu", "json"]) assert result.exit_code == ExitCode.OK assert "JSON results" in result.output - match = re.search(r"\[\n {[\s\S]+ }\n\]", result.output) + match = re.search(r"\[\n {2}{[\s\S]+ {2}}\n\]", result.output) assert match is not None result_list = json.loads(match.group()) for res in result_list: diff --git a/tests/units/cli/test__init__.py b/tests/units/cli/test__init__.py index 0701083..6e32664 100644 --- a/tests/units/cli/test__init__.py +++ b/tests/units/cli/test__init__.py @@ -1,64 +1,55 @@ # Copyright (c) 2023-2024 Arista Networks, Inc. # Use of this source code is governed by the Apache License 2.0 # that can be found in the LICENSE file. -"""Tests for anta.cli.__init__.""" +"""Tests for anta.cli._main.""" from __future__ import annotations -from typing import TYPE_CHECKING +import sys +from importlib import reload +from typing import TYPE_CHECKING, Any from unittest.mock import patch import pytest -from anta.cli import anta, cli -from anta.cli.utils import ExitCode +import anta.cli if TYPE_CHECKING: - from click.testing import CliRunner + from types import ModuleType +builtins_import = __import__ -def test_anta(click_runner: CliRunner) -> None: - """Test anta main entrypoint.""" - result = click_runner.invoke(anta) - assert result.exit_code == ExitCode.OK - assert "Usage" in result.output +# Tried to achieve this with mock +# http://materials-scientist.com/blog/2021/02/11/mocking-failing-module-import-python/ +def import_mock(name: str, *args: Any) -> ModuleType: # noqa: ANN401 + """Mock.""" + if name == "click": + msg = "No module named 'click'" + raise ModuleNotFoundError(msg) + return builtins_import(name, *args) -def test_anta_help(click_runner: CliRunner) -> None: - """Test anta --help.""" - result = click_runner.invoke(anta, ["--help"]) - assert result.exit_code == ExitCode.OK - assert "Usage" in result.output +def test_cli_error_missing(capsys: pytest.CaptureFixture[Any]) -> None: + """Test ANTA errors out when anta[cli] was not installed.""" + with patch.dict(sys.modules) as sys_modules, patch("builtins.__import__", import_mock): + del sys_modules["anta.cli._main"] + reload(anta.cli) -def test_anta_exec_help(click_runner: CliRunner) -> None: - """Test anta exec --help.""" - result = click_runner.invoke(anta, ["exec", "--help"]) - assert result.exit_code == ExitCode.OK - assert "Usage: anta exec" in result.output + with pytest.raises(SystemExit) as e_info: + anta.cli.cli() + captured = capsys.readouterr() + assert "The ANTA command line client could not run because the required dependencies were not installed." in captured.out + assert "Make sure you've installed everything with: pip install 'anta[cli]'" in captured.out + assert e_info.value.code == 1 -def test_anta_debug_help(click_runner: CliRunner) -> None: - """Test anta debug --help.""" - result = click_runner.invoke(anta, ["debug", "--help"]) - assert result.exit_code == ExitCode.OK - assert "Usage: anta debug" in result.output + # setting ANTA_DEBUG + with pytest.raises(SystemExit) as e_info, patch("anta.cli.__DEBUG__", new=True): + anta.cli.cli() - -def test_anta_get_help(click_runner: CliRunner) -> None: - """Test anta get --help.""" - result = click_runner.invoke(anta, ["get", "--help"]) - assert result.exit_code == ExitCode.OK - assert "Usage: anta get" in result.output - - -def test_uncaught_failure_anta(caplog: pytest.LogCaptureFixture) -> None: - """Test uncaught failure when running ANTA cli.""" - with ( - pytest.raises(SystemExit) as e_info, - patch("anta.cli.anta", side_effect=ZeroDivisionError()), - ): - cli() - assert "CRITICAL" in caplog.text - assert "Uncaught Exception when running ANTA CLI" in caplog.text - assert e_info.value.code == 1 + captured = capsys.readouterr() + assert "The ANTA command line client could not run because the required dependencies were not installed." in captured.out + assert "Make sure you've installed everything with: pip install 'anta[cli]'" in captured.out + assert "The caught exception was:" in captured.out + assert e_info.value.code == 1 diff --git a/tests/units/cli/test_main.py b/tests/units/cli/test_main.py new file mode 100644 index 0000000..31a5e78 --- /dev/null +++ b/tests/units/cli/test_main.py @@ -0,0 +1,64 @@ +# Copyright (c) 2023-2024 Arista Networks, Inc. +# Use of this source code is governed by the Apache License 2.0 +# that can be found in the LICENSE file. +"""Tests for anta.cli._main.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING +from unittest.mock import patch + +import pytest + +from anta.cli._main import anta, cli +from anta.cli.utils import ExitCode + +if TYPE_CHECKING: + from click.testing import CliRunner + + +def test_anta(click_runner: CliRunner) -> None: + """Test anta main entrypoint.""" + result = click_runner.invoke(anta) + assert result.exit_code == ExitCode.OK + assert "Usage" in result.output + + +def test_anta_help(click_runner: CliRunner) -> None: + """Test anta --help.""" + result = click_runner.invoke(anta, ["--help"]) + assert result.exit_code == ExitCode.OK + assert "Usage" in result.output + + +def test_anta_exec_help(click_runner: CliRunner) -> None: + """Test anta exec --help.""" + result = click_runner.invoke(anta, ["exec", "--help"]) + assert result.exit_code == ExitCode.OK + assert "Usage: anta exec" in result.output + + +def test_anta_debug_help(click_runner: CliRunner) -> None: + """Test anta debug --help.""" + result = click_runner.invoke(anta, ["debug", "--help"]) + assert result.exit_code == ExitCode.OK + assert "Usage: anta debug" in result.output + + +def test_anta_get_help(click_runner: CliRunner) -> None: + """Test anta get --help.""" + result = click_runner.invoke(anta, ["get", "--help"]) + assert result.exit_code == ExitCode.OK + assert "Usage: anta get" in result.output + + +def test_uncaught_failure_anta(caplog: pytest.LogCaptureFixture) -> None: + """Test uncaught failure when running ANTA cli.""" + with ( + pytest.raises(SystemExit) as e_info, + patch("anta.cli._main.anta", side_effect=ZeroDivisionError()), + ): + cli() + assert "CRITICAL" in caplog.text + assert "Uncaught Exception when running ANTA CLI" in caplog.text + assert e_info.value.code == 1 diff --git a/tests/units/reporter/test__init__.py b/tests/units/reporter/test__init__.py index 0dc9f9a..2fc62ce 100644 --- a/tests/units/reporter/test__init__.py +++ b/tests/units/reporter/test__init__.py @@ -5,13 +5,14 @@ from __future__ import annotations +from pathlib import Path from typing import TYPE_CHECKING, Callable import pytest from rich.table import Table from anta import RICH_COLOR_PALETTE -from anta.reporter import ReportTable +from anta.reporter import ReportJinja, ReportTable if TYPE_CHECKING: from anta.custom_types import TestStatus @@ -185,3 +186,14 @@ class TestReportTable: assert isinstance(res, Table) assert res.title == (title or "Summary per device") assert res.row_count == expected_length + + +class TestReportJinja: + """Tests for ReportJinja class.""" + + # pylint: disable=too-few-public-methods + + def test_fail__init__file_not_found(self) -> None: + """Test __init__ failure if file is not found.""" + with pytest.raises(FileNotFoundError, match="template file is not found: /gnu/terry/pratchett"): + ReportJinja(Path("/gnu/terry/pratchett")) diff --git a/tests/units/test_catalog.py b/tests/units/test_catalog.py index 8de6382..1c7ca8a 100644 --- a/tests/units/test_catalog.py +++ b/tests/units/test_catalog.py @@ -12,7 +12,7 @@ import pytest from pydantic import ValidationError from yaml import safe_load -from anta.catalog import AntaCatalog, AntaTestDefinition +from anta.catalog import AntaCatalog, AntaCatalogFile, AntaTestDefinition from anta.models import AntaTest from anta.tests.interfaces import VerifyL3MTU from anta.tests.mlag import VerifyMlagStatus @@ -76,6 +76,11 @@ INIT_CATALOG_DATA: list[dict[str, Any]] = [ "filename": "test_empty_catalog.yml", "tests": [], }, + { + "name": "test_empty_dict_catalog", + "filename": "test_empty_dict_catalog.yml", + "tests": [], + }, ] CATALOG_PARSE_FAIL_DATA: list[dict[str, Any]] = [ { @@ -160,7 +165,6 @@ CATALOG_FROM_LIST_FAIL_DATA: list[dict[str, Any]] = [ "error": "FakeTestWithInput test inputs are not valid: 1 validation error for Input\n\tstring\n\t Input should be a valid string", }, ] - TESTS_SETTER_FAIL_DATA: list[dict[str, Any]] = [ { "name": "not_a_list", @@ -181,7 +185,7 @@ class TestAntaCatalog: @pytest.mark.parametrize("catalog_data", INIT_CATALOG_DATA, ids=generate_test_ids_list(INIT_CATALOG_DATA)) def test_parse(self, catalog_data: dict[str, Any]) -> None: """Instantiate AntaCatalog from a file.""" - catalog: AntaCatalog = AntaCatalog.parse(str(DATA_DIR / catalog_data["filename"])) + catalog: AntaCatalog = AntaCatalog.parse(DATA_DIR / catalog_data["filename"]) assert len(catalog.tests) == len(catalog_data["tests"]) for test_id, (test, inputs_data) in enumerate(catalog_data["tests"]): @@ -221,7 +225,7 @@ class TestAntaCatalog: def test_parse_fail(self, catalog_data: dict[str, Any]) -> None: """Errors when instantiating AntaCatalog from a file.""" with pytest.raises((ValidationError, TypeError)) as exec_info: - AntaCatalog.parse(str(DATA_DIR / catalog_data["filename"])) + AntaCatalog.parse(DATA_DIR / catalog_data["filename"]) if isinstance(exec_info.value, ValidationError): assert catalog_data["error"] in exec_info.value.errors()[0]["msg"] else: @@ -230,7 +234,7 @@ class TestAntaCatalog: def test_parse_fail_parsing(self, caplog: pytest.LogCaptureFixture) -> None: """Errors when instantiating AntaCatalog from a file.""" with pytest.raises(FileNotFoundError) as exec_info: - AntaCatalog.parse(str(DATA_DIR / "catalog_does_not_exist.yml")) + AntaCatalog.parse(DATA_DIR / "catalog_does_not_exist.yml") assert "No such file or directory" in str(exec_info) assert len(caplog.record_tuples) >= 1 _, _, message = caplog.record_tuples[0] @@ -284,16 +288,79 @@ class TestAntaCatalog: catalog.tests = catalog_data["tests"] assert catalog_data["error"] in str(exec_info) + def test_build_indexes_all(self) -> None: + """Test AntaCatalog.build_indexes().""" + catalog: AntaCatalog = AntaCatalog.parse(DATA_DIR / "test_catalog_with_tags.yml") + catalog.build_indexes() + assert len(catalog.tests_without_tags) == 5 + assert "leaf" in catalog.tag_to_tests + assert len(catalog.tag_to_tests["leaf"]) == 3 + all_unique_tests = catalog.tests_without_tags + for tests in catalog.tag_to_tests.values(): + all_unique_tests.update(tests) + assert len(all_unique_tests) == 11 + assert catalog.indexes_built is True + + def test_build_indexes_filtered(self) -> None: + """Test AntaCatalog.build_indexes().""" + catalog: AntaCatalog = AntaCatalog.parse(DATA_DIR / "test_catalog_with_tags.yml") + catalog.build_indexes({"VerifyUptime", "VerifyCoredump", "VerifyL3MTU"}) + assert "leaf" in catalog.tag_to_tests + assert len(catalog.tag_to_tests["leaf"]) == 1 + assert len(catalog.tests_without_tags) == 1 + all_unique_tests = catalog.tests_without_tags + for tests in catalog.tag_to_tests.values(): + all_unique_tests.update(tests) + assert len(all_unique_tests) == 4 + assert catalog.indexes_built is True + def test_get_tests_by_tags(self) -> None: """Test AntaCatalog.get_tests_by_tags().""" - catalog: AntaCatalog = AntaCatalog.parse(str(DATA_DIR / "test_catalog_with_tags.yml")) - tests: list[AntaTestDefinition] = catalog.get_tests_by_tags(tags={"leaf"}) + catalog: AntaCatalog = AntaCatalog.parse(DATA_DIR / "test_catalog_with_tags.yml") + catalog.build_indexes() + tests: set[AntaTestDefinition] = catalog.get_tests_by_tags(tags={"leaf"}) assert len(tests) == 3 - tests = catalog.get_tests_by_tags(tags={"leaf"}, strict=True) - assert len(tests) == 2 + tests = catalog.get_tests_by_tags(tags={"leaf", "spine"}, strict=True) + assert len(tests) == 1 - def test_get_tests_by_names(self) -> None: - """Test AntaCatalog.get_tests_by_tags().""" - catalog: AntaCatalog = AntaCatalog.parse(str(DATA_DIR / "test_catalog_with_tags.yml")) - tests: list[AntaTestDefinition] = catalog.get_tests_by_names(names={"VerifyUptime", "VerifyCoredump"}) - assert len(tests) == 3 + def test_merge(self) -> None: + """Test AntaCatalog.merge().""" + catalog1: AntaCatalog = AntaCatalog.parse(DATA_DIR / "test_catalog.yml") + assert len(catalog1.tests) == 1 + catalog2: AntaCatalog = AntaCatalog.parse(DATA_DIR / "test_catalog.yml") + assert len(catalog2.tests) == 1 + catalog3: AntaCatalog = AntaCatalog.parse(DATA_DIR / "test_catalog_medium.yml") + assert len(catalog3.tests) == 228 + + assert len(catalog1.merge(catalog2).tests) == 2 + assert len(catalog1.tests) == 1 + assert len(catalog2.tests) == 1 + + assert len(catalog2.merge(catalog3).tests) == 229 + assert len(catalog2.tests) == 1 + assert len(catalog3.tests) == 228 + + def test_dump(self) -> None: + """Test AntaCatalog.dump().""" + catalog: AntaCatalog = AntaCatalog.parse(DATA_DIR / "test_catalog.yml") + assert len(catalog.tests) == 1 + file: AntaCatalogFile = catalog.dump() + assert sum(len(tests) for tests in file.root.values()) == 1 + + catalog = AntaCatalog.parse(DATA_DIR / "test_catalog_medium.yml") + assert len(catalog.tests) == 228 + file = catalog.dump() + assert sum(len(tests) for tests in file.root.values()) == 228 + + +class TestAntaCatalogFile: # pylint: disable=too-few-public-methods + """Test for anta.catalog.AntaCatalogFile.""" + + def test_yaml(self) -> None: + """Test AntaCatalogFile.yaml().""" + file = DATA_DIR / "test_catalog_medium.yml" + catalog = AntaCatalog.parse(file) + assert len(catalog.tests) == 228 + catalog_yaml_str = catalog.dump().yaml() + with file.open(encoding="UTF-8") as f: + assert catalog_yaml_str == f.read() diff --git a/tests/units/test_custom_types.py b/tests/units/test_custom_types.py new file mode 100644 index 0000000..8119849 --- /dev/null +++ b/tests/units/test_custom_types.py @@ -0,0 +1,264 @@ +# Copyright (c) 2023-2024 Arista Networks, Inc. +# Use of this source code is governed by the Apache License 2.0 +# that can be found in the LICENSE file. +"""Tests for `anta.custom_types`. + +The intention is only to test here what is not used already in other places. + +TODO: Expand later. +""" + +from __future__ import annotations + +import re + +import pytest + +from anta.custom_types import ( + REGEX_BGP_IPV4_MPLS_VPN, + REGEX_BGP_IPV4_UNICAST, + REGEXP_BGP_IPV4_MPLS_LABELS, + REGEXP_BGP_L2VPN_AFI, + REGEXP_EOS_BLACKLIST_CMDS, + REGEXP_INTERFACE_ID, + REGEXP_PATH_MARKERS, + REGEXP_TYPE_EOS_INTERFACE, + REGEXP_TYPE_HOSTNAME, + REGEXP_TYPE_VXLAN_SRC_INTERFACE, + aaa_group_prefix, + bgp_multiprotocol_capabilities_abbreviations, + interface_autocomplete, + interface_case_sensitivity, +) + +# ------------------------------------------------------------------------------ +# TEST custom_types.py regular expressions +# ------------------------------------------------------------------------------ + + +def test_regexp_path_markers() -> None: + """Test REGEXP_PATH_MARKERS.""" + # Test strings that should match the pattern + assert re.search(REGEXP_PATH_MARKERS, "show/bgp/interfaces") is not None + assert re.search(REGEXP_PATH_MARKERS, "show\\bgp") is not None + assert re.search(REGEXP_PATH_MARKERS, "show bgp") is not None + + # Test strings that should not match the pattern + assert re.search(REGEXP_PATH_MARKERS, "aaaa") is None + assert re.search(REGEXP_PATH_MARKERS, "11111") is None + assert re.search(REGEXP_PATH_MARKERS, ".[]?<>") is None + + +def test_regexp_bgp_l2vpn_afi() -> None: + """Test REGEXP_BGP_L2VPN_AFI.""" + # Test strings that should match the pattern + assert re.search(REGEXP_BGP_L2VPN_AFI, "l2vpn-evpn") is not None + assert re.search(REGEXP_BGP_L2VPN_AFI, "l2 vpn evpn") is not None + assert re.search(REGEXP_BGP_L2VPN_AFI, "l2-vpn evpn") is not None + assert re.search(REGEXP_BGP_L2VPN_AFI, "l2vpn evpn") is not None + assert re.search(REGEXP_BGP_L2VPN_AFI, "l2vpnevpn") is not None + assert re.search(REGEXP_BGP_L2VPN_AFI, "l2 vpnevpn") is not None + + # Test strings that should not match the pattern + assert re.search(REGEXP_BGP_L2VPN_AFI, "al2vpn evpn") is None + assert re.search(REGEXP_BGP_L2VPN_AFI, "l2vpn-evpna") is None + + +def test_regexp_bgp_ipv4_mpls_labels() -> None: + """Test REGEXP_BGP_IPV4_MPLS_LABELS.""" + assert re.search(REGEXP_BGP_IPV4_MPLS_LABELS, "ipv4-mpls-label") is not None + assert re.search(REGEXP_BGP_IPV4_MPLS_LABELS, "ipv4 mpls labels") is not None + assert re.search(REGEXP_BGP_IPV4_MPLS_LABELS, "ipv4Mplslabel") is None + + +def test_regex_bgp_ipv4_mpls_vpn() -> None: + """Test REGEX_BGP_IPV4_MPLS_VPN.""" + assert re.search(REGEX_BGP_IPV4_MPLS_VPN, "ipv4-mpls-vpn") is not None + assert re.search(REGEX_BGP_IPV4_MPLS_VPN, "ipv4_mplsvpn") is None + + +def test_regex_bgp_ipv4_unicast() -> None: + """Test REGEX_BGP_IPV4_UNICAST.""" + assert re.search(REGEX_BGP_IPV4_UNICAST, "ipv4-uni-cast") is not None + assert re.search(REGEX_BGP_IPV4_UNICAST, "ipv4+unicast") is None + + +def test_regexp_type_interface_id() -> None: + """Test REGEXP_INTERFACE_ID.""" + intf_id_re = re.compile(f"{REGEXP_INTERFACE_ID}") + + # Test strings that should match the pattern + assert intf_id_re.search("123") is not None + assert intf_id_re.search("123/456") is not None + assert intf_id_re.search("123.456") is not None + assert intf_id_re.search("123/456.789") is not None + + +def test_regexp_type_eos_interface() -> None: + """Test REGEXP_TYPE_EOS_INTERFACE.""" + # Test strings that should match the pattern + assert re.match(REGEXP_TYPE_EOS_INTERFACE, "Ethernet0") is not None + assert re.match(REGEXP_TYPE_EOS_INTERFACE, "Vlan100") is not None + assert re.match(REGEXP_TYPE_EOS_INTERFACE, "Port-Channel1/0") is not None + assert re.match(REGEXP_TYPE_EOS_INTERFACE, "Loopback0.1") is not None + assert re.match(REGEXP_TYPE_EOS_INTERFACE, "Management0/0/0") is not None + assert re.match(REGEXP_TYPE_EOS_INTERFACE, "Tunnel1") is not None + assert re.match(REGEXP_TYPE_EOS_INTERFACE, "Vxlan1") is not None + assert re.match(REGEXP_TYPE_EOS_INTERFACE, "Fabric1") is not None + assert re.match(REGEXP_TYPE_EOS_INTERFACE, "Dps1") is not None + + # Test strings that should not match the pattern + assert re.match(REGEXP_TYPE_EOS_INTERFACE, "Ethernet") is None + assert re.match(REGEXP_TYPE_EOS_INTERFACE, "Vlan") is None + assert re.match(REGEXP_TYPE_EOS_INTERFACE, "Port-Channel") is None + assert re.match(REGEXP_TYPE_EOS_INTERFACE, "Loopback.") is None + assert re.match(REGEXP_TYPE_EOS_INTERFACE, "Management/") is None + assert re.match(REGEXP_TYPE_EOS_INTERFACE, "Tunnel") is None + assert re.match(REGEXP_TYPE_EOS_INTERFACE, "Vxlan") is None + assert re.match(REGEXP_TYPE_EOS_INTERFACE, "Fabric") is None + assert re.match(REGEXP_TYPE_EOS_INTERFACE, "Dps") is None + + assert re.match(REGEXP_TYPE_EOS_INTERFACE, "Ethernet1/a") is None + assert re.match(REGEXP_TYPE_EOS_INTERFACE, "Port-Channel-100") is None + assert re.match(REGEXP_TYPE_EOS_INTERFACE, "Loopback.10") is None + assert re.match(REGEXP_TYPE_EOS_INTERFACE, "Management/10") is None + + +def test_regexp_type_vxlan_src_interface() -> None: + """Test REGEXP_TYPE_VXLAN_SRC_INTERFACE.""" + # Test strings that should match the pattern + assert re.match(REGEXP_TYPE_VXLAN_SRC_INTERFACE, "Loopback0") is not None + assert re.match(REGEXP_TYPE_VXLAN_SRC_INTERFACE, "Loopback1") is not None + assert re.match(REGEXP_TYPE_VXLAN_SRC_INTERFACE, "Loopback99") is not None + assert re.match(REGEXP_TYPE_VXLAN_SRC_INTERFACE, "Loopback100") is not None + assert re.match(REGEXP_TYPE_VXLAN_SRC_INTERFACE, "Loopback8190") is not None + assert re.match(REGEXP_TYPE_VXLAN_SRC_INTERFACE, "Loopback8199") is not None + + # Test strings that should not match the pattern + assert re.match(REGEXP_TYPE_VXLAN_SRC_INTERFACE, "Loopback") is None + assert re.match(REGEXP_TYPE_VXLAN_SRC_INTERFACE, "Loopback9001") is None + assert re.match(REGEXP_TYPE_VXLAN_SRC_INTERFACE, "Loopback9000") is None + + +def test_regexp_type_hostname() -> None: + """Test REGEXP_TYPE_HOSTNAME.""" + # Test strings that should match the pattern + assert re.match(REGEXP_TYPE_HOSTNAME, "hostname") is not None + assert re.match(REGEXP_TYPE_HOSTNAME, "hostname.com") is not None + assert re.match(REGEXP_TYPE_HOSTNAME, "host-name.com") is not None + assert re.match(REGEXP_TYPE_HOSTNAME, "host.name.com") is not None + assert re.match(REGEXP_TYPE_HOSTNAME, "host-name1.com") is not None + + # Test strings that should not match the pattern + assert re.match(REGEXP_TYPE_HOSTNAME, "-hostname.com") is None + assert re.match(REGEXP_TYPE_HOSTNAME, ".hostname.com") is None + assert re.match(REGEXP_TYPE_HOSTNAME, "hostname-.com") is None + assert re.match(REGEXP_TYPE_HOSTNAME, "hostname..com") is None + + +@pytest.mark.parametrize( + ("test_string", "expected"), + [ + ("reload", True), # matches "^reload.*" + ("reload now", True), # matches "^reload.*" + ("configure terminal", True), # matches "^conf\w*\s*(terminal|session)*" + ("conf t", True), # matches "^conf\w*\s*(terminal|session)*" + ("write memory", True), # matches "^wr\w*\s*\w+" + ("wr mem", True), # matches "^wr\w*\s*\w+" + ("show running-config", False), # does not match any regex + ("no shutdown", False), # does not match any regex + ("", False), # empty string does not match any regex + ], +) +def test_regexp_eos_blacklist_cmds(test_string: str, expected: bool) -> None: + """Test REGEXP_EOS_BLACKLIST_CMDS.""" + + def matches_any_regex(string: str, regex_list: list[str]) -> bool: + """ + Check if a string matches at least one regular expression in a list. + + :param string: The string to check. + :param regex_list: A list of regular expressions. + :return: True if the string matches at least one regular expression, False otherwise. + """ + return any(re.match(regex, string) for regex in regex_list) + + assert matches_any_regex(test_string, REGEXP_EOS_BLACKLIST_CMDS) == expected + + +# ------------------------------------------------------------------------------ +# TEST custom_types.py functions +# ------------------------------------------------------------------------------ + + +def test_interface_autocomplete_success() -> None: + """Test interface_autocomplete with valid inputs.""" + assert interface_autocomplete("et1") == "Ethernet1" + assert interface_autocomplete("et1/1") == "Ethernet1/1" + assert interface_autocomplete("et1.1") == "Ethernet1.1" + assert interface_autocomplete("et1/1.1") == "Ethernet1/1.1" + assert interface_autocomplete("eth2") == "Ethernet2" + assert interface_autocomplete("po3") == "Port-Channel3" + assert interface_autocomplete("lo4") == "Loopback4" + + +def test_interface_autocomplete_no_alias() -> None: + """Test interface_autocomplete with inputs that don't have aliases.""" + assert interface_autocomplete("GigabitEthernet1") == "GigabitEthernet1" + assert interface_autocomplete("Vlan10") == "Vlan10" + assert interface_autocomplete("Tunnel100") == "Tunnel100" + + +def test_interface_autocomplete_failure() -> None: + """Trigger ValueError for interface_autocomplete.""" + with pytest.raises(ValueError, match="Could not parse interface ID in interface"): + interface_autocomplete("ThisIsNotAnInterface") + + +@pytest.mark.parametrize( + ("str_input", "expected_output"), + [ + pytest.param("L2VPNEVPN", "l2VpnEvpn", id="l2VpnEvpn"), + pytest.param("ipv4-mplsLabels", "ipv4MplsLabels", id="ipv4MplsLabels"), + pytest.param("ipv4-mpls-vpn", "ipv4MplsVpn", id="ipv4MplsVpn"), + pytest.param("ipv4-unicast", "ipv4Unicast", id="ipv4Unicast"), + pytest.param("BLAH", "BLAH", id="unmatched"), + ], +) +def test_bgp_multiprotocol_capabilities_abbreviationsh(str_input: str, expected_output: str) -> None: + """Test bgp_multiprotocol_capabilities_abbreviations.""" + assert bgp_multiprotocol_capabilities_abbreviations(str_input) == expected_output + + +def test_aaa_group_prefix_known_method() -> None: + """Test aaa_group_prefix with a known method.""" + assert aaa_group_prefix("local") == "local" + assert aaa_group_prefix("none") == "none" + assert aaa_group_prefix("logging") == "logging" + + +def test_aaa_group_prefix_unknown_method() -> None: + """Test aaa_group_prefix with an unknown method.""" + assert aaa_group_prefix("demo") == "group demo" + assert aaa_group_prefix("group1") == "group group1" + + +def test_interface_case_sensitivity_lowercase() -> None: + """Test interface_case_sensitivity with lowercase inputs.""" + assert interface_case_sensitivity("ethernet") == "Ethernet" + assert interface_case_sensitivity("vlan") == "Vlan" + assert interface_case_sensitivity("loopback") == "Loopback" + + +def test_interface_case_sensitivity_mixed_case() -> None: + """Test interface_case_sensitivity with mixed case inputs.""" + assert interface_case_sensitivity("Ethernet") == "Ethernet" + assert interface_case_sensitivity("Vlan") == "Vlan" + assert interface_case_sensitivity("Loopback") == "Loopback" + + +def test_interface_case_sensitivity_uppercase() -> None: + """Test interface_case_sensitivity with uppercase inputs.""" + assert interface_case_sensitivity("ETHERNET") == "ETHERNET" + assert interface_case_sensitivity("VLAN") == "VLAN" + assert interface_case_sensitivity("LOOPBACK") == "LOOPBACK" diff --git a/tests/units/test_device.py b/tests/units/test_device.py index c901a3d..e8a0c5f 100644 --- a/tests/units/test_device.py +++ b/tests/units/test_device.py @@ -15,7 +15,7 @@ import pytest from asyncssh import SSHClientConnection, SSHClientConnectionOptions from rich import print as rprint -from anta import aioeapi +import asynceapi from anta.device import AntaDevice, AsyncEOSDevice from anta.models import AntaCommand from tests.lib.fixture import COMMAND_OUTPUT @@ -128,7 +128,7 @@ EQUALITY_DATA: list[dict[str, Any]] = [ "expected": False, }, ] -AIOEAPI_COLLECT_DATA: list[dict[str, Any]] = [ +ASYNCEAPI_COLLECT_DATA: list[dict[str, Any]] = [ { "name": "command", "device": {}, @@ -350,12 +350,12 @@ AIOEAPI_COLLECT_DATA: list[dict[str, Any]] = [ }, }, { - "name": "aioeapi.EapiCommandError", + "name": "asynceapi.EapiCommandError", "device": {}, "command": { "command": "show version", "patch_kwargs": { - "side_effect": aioeapi.EapiCommandError( + "side_effect": asynceapi.EapiCommandError( passed=[], failed="show version", errors=["Authorization denied for command 'show version'"], @@ -385,7 +385,7 @@ AIOEAPI_COLLECT_DATA: list[dict[str, Any]] = [ "expected": {"output": None, "errors": ["ConnectError: Cannot open port"]}, }, ] -AIOEAPI_COPY_DATA: list[dict[str, Any]] = [ +ASYNCEAPI_COPY_DATA: list[dict[str, Any]] = [ { "name": "from", "device": {}, @@ -509,12 +509,12 @@ REFRESH_DATA: list[dict[str, Any]] = [ "expected": {"is_online": True, "established": False, "hw_model": None}, }, { - "name": "aioeapi.EapiCommandError", + "name": "asynceapi.EapiCommandError", "device": {}, "patch_kwargs": ( {"return_value": True}, { - "side_effect": aioeapi.EapiCommandError( + "side_effect": asynceapi.EapiCommandError( passed=[], failed="show version", errors=["Authorization denied for command 'show version'"], @@ -644,7 +644,7 @@ class TestAntaDevice: assert current_cached_data == COMMAND_OUTPUT assert device.cache.hit_miss_ratio["hits"] == 1 else: # command is not allowed to use cache - device._collect.assert_called_once_with(command=command) # type: ignore[attr-defined] # pylint: disable=protected-access + device._collect.assert_called_once_with(command=command, collection_id=None) # type: ignore[attr-defined] # pylint: disable=protected-access assert command.output == COMMAND_OUTPUT if expected_data["cache_hit"] is True: assert current_cached_data == cached_output @@ -652,7 +652,7 @@ class TestAntaDevice: assert current_cached_data is None else: # device is disabled assert device.cache is None - device._collect.assert_called_once_with(command=command) # type: ignore[attr-defined] # pylint: disable=protected-access + device._collect.assert_called_once_with(command=command, collection_id=None) # type: ignore[attr-defined] # pylint: disable=protected-access @pytest.mark.parametrize(("device", "expected"), CACHE_STATS_DATA, indirect=["device"]) def test_cache_statistics(self, device: AntaDevice, expected: dict[str, Any] | None) -> None: @@ -705,9 +705,9 @@ class TestAsyncEOSDevice: """Test AsyncEOSDevice.refresh().""" with patch.object(async_device._session, "check_connection", **patch_kwargs[0]), patch.object(async_device._session, "cli", **patch_kwargs[1]): await async_device.refresh() - async_device._session.check_connection.assert_called_once() + async_device._session.check_connection.assert_called_once() # type: ignore[attr-defined] # asynceapi.Device.check_connection is patched if expected["is_online"]: - async_device._session.cli.assert_called_once() + async_device._session.cli.assert_called_once() # type: ignore[attr-defined] # asynceapi.Device.cli is patched assert async_device.is_online == expected["is_online"] assert async_device.established == expected["established"] assert async_device.hw_model == expected["hw_model"] @@ -715,8 +715,8 @@ class TestAsyncEOSDevice: @pytest.mark.asyncio() @pytest.mark.parametrize( ("async_device", "command", "expected"), - ((d["device"], d["command"], d["expected"]) for d in AIOEAPI_COLLECT_DATA), - ids=generate_test_ids_list(AIOEAPI_COLLECT_DATA), + ((d["device"], d["command"], d["expected"]) for d in ASYNCEAPI_COLLECT_DATA), + ids=generate_test_ids_list(ASYNCEAPI_COLLECT_DATA), indirect=["async_device"], ) async def test__collect(self, async_device: AsyncEOSDevice, command: dict[str, Any], expected: dict[str, Any]) -> None: @@ -724,7 +724,8 @@ class TestAsyncEOSDevice: """Test AsyncEOSDevice._collect().""" cmd = AntaCommand(command=command["command"], revision=command["revision"]) if "revision" in command else AntaCommand(command=command["command"]) with patch.object(async_device._session, "cli", **command["patch_kwargs"]): - await async_device.collect(cmd) + collection_id = "pytest" + await async_device.collect(cmd, collection_id=collection_id) commands: list[dict[str, Any]] = [] if async_device.enable and async_device._enable_password is not None: commands.append( @@ -740,15 +741,15 @@ class TestAsyncEOSDevice: commands.append({"cmd": cmd.command, "revision": cmd.revision}) else: commands.append({"cmd": cmd.command}) - async_device._session.cli.assert_called_once_with(commands=commands, ofmt=cmd.ofmt, version=cmd.version) + async_device._session.cli.assert_called_once_with(commands=commands, ofmt=cmd.ofmt, version=cmd.version, req_id=f"ANTA-{collection_id}-{id(cmd)}") # type: ignore[attr-defined] # asynceapi.Device.cli is patched # pylint: disable=line-too-long assert cmd.output == expected["output"] assert cmd.errors == expected["errors"] @pytest.mark.asyncio() @pytest.mark.parametrize( ("async_device", "copy"), - ((d["device"], d["copy"]) for d in AIOEAPI_COPY_DATA), - ids=generate_test_ids_list(AIOEAPI_COPY_DATA), + ((d["device"], d["copy"]) for d in ASYNCEAPI_COPY_DATA), + ids=generate_test_ids_list(ASYNCEAPI_COPY_DATA), indirect=["async_device"], ) async def test_copy(self, async_device: AsyncEOSDevice, copy: dict[str, Any]) -> None: diff --git a/tests/units/test_models.py b/tests/units/test_models.py index bc6a1ce..180f6bf 100644 --- a/tests/units/test_models.py +++ b/tests/units/test_models.py @@ -153,10 +153,10 @@ class FakeTestWithTemplateBadRender1(AntaTest): class FakeTestWithTemplateBadRender2(AntaTest): - """ANTA test with template that raises an arbitrary exception.""" + """ANTA test with template that raises an arbitrary exception in render().""" name = "FakeTestWithTemplateBadRender2" - description = "ANTA test with template that raises an arbitrary exception" + description = "ANTA test with template that raises an arbitrary exception in render()" categories: ClassVar[list[str]] = [] commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaTemplate(template="show interface {interface}")] @@ -175,6 +175,53 @@ class FakeTestWithTemplateBadRender2(AntaTest): self.result.is_success(self.instance_commands[0].command) +class FakeTestWithTemplateBadRender3(AntaTest): + """ANTA test with template that gives extra template parameters in render().""" + + name = "FakeTestWithTemplateBadRender3" + description = "ANTA test with template that gives extra template parameters in render()" + categories: ClassVar[list[str]] = [] + commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaTemplate(template="show interface {interface}")] + + class Input(AntaTest.Input): + """Inputs for FakeTestWithTemplateBadRender3 test.""" + + interface: str + + def render(self, template: AntaTemplate) -> list[AntaCommand]: + """Render function.""" + return [template.render(interface=self.inputs.interface, extra="blah")] + + @AntaTest.anta_test + def test(self) -> None: + """Test function.""" + self.result.is_success(self.instance_commands[0].command) + + +class FakeTestWithTemplateBadTest(AntaTest): + """ANTA test with template that tries to access an undefined template parameter in test().""" + + name = "FakeTestWithTemplateBadTest" + description = "ANTA test with template that tries to access an undefined template parameter in test()" + categories: ClassVar[list[str]] = [] + commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaTemplate(template="show interface {interface}")] + + class Input(AntaTest.Input): + """Inputs for FakeTestWithTemplateBadTest test.""" + + interface: str + + def render(self, template: AntaTemplate) -> list[AntaCommand]: + """Render function.""" + return [template.render(interface=self.inputs.interface)] + + @AntaTest.anta_test + def test(self) -> None: + """Test function.""" + # The following line must raise AttributeError at runtime + self.result.is_success(self.instance_commands[0].params.wrong_template_param) + + class SkipOnPlatformTest(AntaTest): """ANTA test that is skipped.""" @@ -365,6 +412,31 @@ ANTATEST_DATA: list[dict[str, Any]] = [ }, }, { + "name": "Extra template parameters in render()", + "test": FakeTestWithTemplateBadRender3, + "inputs": {"interface": "Ethernet1"}, + "expected": { + "__init__": { + "result": "error", + "messages": [ + "Exception in tests.units.test_models.FakeTestWithTemplateBadRender3.render(): ValidationError: 1 validation error for AntaParams\n" + "extra\n" + " Extra inputs are not permitted [type=extra_forbidden, input_value='blah', input_type=str]\n" + ], + }, + "test": {"result": "error"}, + }, + }, + { + "name": "Access undefined template param in test()", + "test": FakeTestWithTemplateBadTest, + "inputs": {"interface": "Ethernet1"}, + "expected": { + "__init__": {"result": "unset"}, + "test": {"result": "error", "messages": ["AttributeError: 'AntaParams' object has no attribute 'wrong_template_param'"]}, + }, + }, + { "name": "unskip on platforms", "test": UnSkipOnPlatformTest, "inputs": None, @@ -574,7 +646,6 @@ class TestAntaComamnd: text_cmd = AntaCommand(command="show dummy", ofmt="text", output="blah") text_cmd_2 = AntaCommand(command="show dummy", ofmt="text", output={"not_a": "string"}) msg = "Output of command 'show dummy' is invalid" - msg = "Output of command 'show dummy' is invalid" with pytest.raises(RuntimeError, match=msg): json_cmd.text_output with pytest.raises(RuntimeError, match=msg): diff --git a/tests/units/test_runner.py b/tests/units/test_runner.py index d5bb892..955149d 100644 --- a/tests/units/test_runner.py +++ b/tests/units/test_runner.py @@ -6,6 +6,9 @@ from __future__ import annotations import logging +import resource +from pathlib import Path +from unittest.mock import patch import pytest @@ -13,10 +16,11 @@ from anta import logger from anta.catalog import AntaCatalog from anta.inventory import AntaInventory from anta.result_manager import ResultManager -from anta.runner import main +from anta.runner import adjust_rlimit_nofile, main, prepare_tests from .test_models import FakeTest +DATA_DIR: Path = Path(__file__).parent.parent.resolve() / "data" FAKE_CATALOG: AntaCatalog = AntaCatalog.from_list([(FakeTest, None)]) @@ -47,8 +51,8 @@ async def test_runner_empty_inventory(caplog: pytest.LogCaptureFixture) -> None: manager = ResultManager() inventory = AntaInventory() await main(manager, inventory, FAKE_CATALOG) - assert len(caplog.record_tuples) == 1 - assert "The inventory is empty, exiting" in caplog.records[0].message + assert len(caplog.record_tuples) == 3 + assert "The inventory is empty, exiting" in caplog.records[1].message @pytest.mark.asyncio() @@ -70,3 +74,133 @@ async def test_runner_no_selected_device(caplog: pytest.LogCaptureFixture, test_ await main(manager, test_inventory, FAKE_CATALOG, tags={"toto"}) assert "No reachable device matching the tags {'toto'} was found." in [record.message for record in caplog.records] + + +def test_adjust_rlimit_nofile_valid_env(caplog: pytest.LogCaptureFixture) -> None: + """Test adjust_rlimit_nofile with valid environment variables.""" + with ( + caplog.at_level(logging.DEBUG), + patch.dict("os.environ", {"ANTA_NOFILE": "20480"}), + patch("anta.runner.resource.getrlimit") as getrlimit_mock, + patch("anta.runner.resource.setrlimit") as setrlimit_mock, + ): + # Simulate the default system limits + system_limits = (8192, 1048576) + + # Setup getrlimit mock return value + getrlimit_mock.return_value = system_limits + + # Simulate setrlimit behavior + def side_effect_setrlimit(resource_id: int, limits: tuple[int, int]) -> None: + _ = resource_id + getrlimit_mock.return_value = (limits[0], limits[1]) + + setrlimit_mock.side_effect = side_effect_setrlimit + + result = adjust_rlimit_nofile() + + # Assert the limits were updated as expected + assert result == (20480, 1048576) + assert "Initial limit numbers for open file descriptors for the current ANTA process: Soft Limit: 8192 | Hard Limit: 1048576" in caplog.text + assert "Setting soft limit for open file descriptors for the current ANTA process to 20480" in caplog.text + + setrlimit_mock.assert_called_once_with(resource.RLIMIT_NOFILE, (20480, 1048576)) + + +def test_adjust_rlimit_nofile_invalid_env(caplog: pytest.LogCaptureFixture) -> None: + """Test adjust_rlimit_nofile with valid environment variables.""" + with ( + caplog.at_level(logging.DEBUG), + patch.dict("os.environ", {"ANTA_NOFILE": "invalid"}), + patch("anta.runner.resource.getrlimit") as getrlimit_mock, + patch("anta.runner.resource.setrlimit") as setrlimit_mock, + ): + # Simulate the default system limits + system_limits = (8192, 1048576) + + # Setup getrlimit mock return value + getrlimit_mock.return_value = system_limits + + # Simulate setrlimit behavior + def side_effect_setrlimit(resource_id: int, limits: tuple[int, int]) -> None: + _ = resource_id + getrlimit_mock.return_value = (limits[0], limits[1]) + + setrlimit_mock.side_effect = side_effect_setrlimit + + result = adjust_rlimit_nofile() + + # Assert the limits were updated as expected + assert result == (16384, 1048576) + assert "The ANTA_NOFILE environment variable value is invalid" in caplog.text + assert caplog.records[0].levelname == "WARNING" + assert "Initial limit numbers for open file descriptors for the current ANTA process: Soft Limit: 8192 | Hard Limit: 1048576" in caplog.text + assert "Setting soft limit for open file descriptors for the current ANTA process to 16384" in caplog.text + + setrlimit_mock.assert_called_once_with(resource.RLIMIT_NOFILE, (16384, 1048576)) + + +@pytest.mark.asyncio() +@pytest.mark.parametrize( + ("tags", "expected_tests_count", "expected_devices_count"), + [ + (None, 22, 3), + ({"leaf"}, 9, 3), + ({"invalid_tag"}, 0, 0), + ], + ids=["no_tags", "leaf_tag", "invalid_tag"], +) +async def test_prepare_tests( + caplog: pytest.LogCaptureFixture, + test_inventory: AntaInventory, + tags: set[str] | None, + expected_tests_count: int, + expected_devices_count: int, +) -> None: + """Test the runner prepare_tests function.""" + logger.setup_logging(logger.Log.INFO) + caplog.set_level(logging.INFO) + + catalog: AntaCatalog = AntaCatalog.parse(str(DATA_DIR / "test_catalog_with_tags.yml")) + selected_tests = prepare_tests(inventory=test_inventory, catalog=catalog, tags=tags, tests=None) + + if selected_tests is None: + assert expected_tests_count == 0 + expected_log = f"There are no tests matching the tags {tags} to run in the current test catalog and device inventory, please verify your inputs." + assert expected_log in caplog.text + else: + assert len(selected_tests) == expected_devices_count + assert sum(len(tests) for tests in selected_tests.values()) == expected_tests_count + + +@pytest.mark.asyncio() +async def test_prepare_tests_with_specific_tests(caplog: pytest.LogCaptureFixture, test_inventory: AntaInventory) -> None: + """Test the runner prepare_tests function with specific tests.""" + logger.setup_logging(logger.Log.INFO) + caplog.set_level(logging.INFO) + + catalog: AntaCatalog = AntaCatalog.parse(str(DATA_DIR / "test_catalog_with_tags.yml")) + selected_tests = prepare_tests(inventory=test_inventory, catalog=catalog, tags=None, tests={"VerifyMlagStatus", "VerifyUptime"}) + + assert selected_tests is not None + assert len(selected_tests) == 3 + assert sum(len(tests) for tests in selected_tests.values()) == 5 + + +@pytest.mark.asyncio() +async def test_runner_dry_run(caplog: pytest.LogCaptureFixture, test_inventory: AntaInventory) -> None: + """Test that when dry_run is True, no tests are run. + + caplog is the pytest fixture to capture logs + test_inventory is a fixture that gives a default inventory for tests + """ + logger.setup_logging(logger.Log.INFO) + caplog.set_level(logging.INFO) + manager = ResultManager() + catalog_path = Path(__file__).parent.parent / "data" / "test_catalog.yml" + catalog = AntaCatalog.parse(catalog_path) + + await main(manager, test_inventory, catalog, dry_run=True) + + # Check that the last log contains Dry-run + assert "Dry-run" in caplog.records[-1].message diff --git a/tests/units/test_tools.py b/tests/units/test_tools.py index a846fd6..c3a57e5 100644 --- a/tests/units/test_tools.py +++ b/tests/units/test_tools.py @@ -11,7 +11,7 @@ from typing import Any import pytest -from anta.tools import get_dict_superset, get_failed_logs, get_item, get_value +from anta.tools import custom_division, get_dict_superset, get_failed_logs, get_item, get_value TEST_GET_FAILED_LOGS_DATA = [ {"id": 1, "name": "Alice", "age": 30, "email": "alice@example.com"}, @@ -488,3 +488,17 @@ def test_get_item( # pylint: disable=too-many-arguments with expected_raise: assert get_item(list_of_dicts, key, value, default, var_name, custom_error_msg, required=required, case_sensitive=case_sensitive) == expected_result + + +@pytest.mark.parametrize( + ("numerator", "denominator", "expected_result"), + [ + pytest.param(4.0, 2.0, 2, id="int return for float input"), + pytest.param(4, 2, 2, id="int return for int input"), + pytest.param(5.0, 2.0, 2.5, id="float return for float input"), + pytest.param(5, 2, 2.5, id="float return for int input"), + ], +) +def test_custom_division(numerator: float, denominator: float, expected_result: str) -> None: + """Test custom_division.""" + assert custom_division(numerator, denominator) == expected_result |