summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-06-17 09:01:56 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-06-17 09:01:56 +0000
commit89c4287dc1d423987b3ed83c28335bb3dcb0f06e (patch)
treeb2406a87652796c5a1622815061870d1c45322d5
parentAdding upstream version 0.15.0. (diff)
downloadanta-89c4287dc1d423987b3ed83c28335bb3dcb0f06e.tar.xz
anta-89c4287dc1d423987b3ed83c28335bb3dcb0f06e.zip
Adding upstream version 1.0.0.upstream/1.0.0
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
-rw-r--r--.arista/secret_allowlist.yaml10
-rw-r--r--.devcontainer/devcontainer.json12
-rw-r--r--.devcontainer/startup.sh3
-rw-r--r--.github/release.md2
-rw-r--r--.github/workflows/code-testing.yml37
-rw-r--r--.github/workflows/secret-scanner.yml30
-rw-r--r--.pre-commit-config.yaml20
-rw-r--r--.sonarcloud.properties18
-rw-r--r--.vscode/settings.json21
-rw-r--r--Dockerfile6
-rw-r--r--anta/__init__.py2
-rw-r--r--anta/cli/debug/utils.py2
-rw-r--r--anta/cli/get/commands.py22
-rw-r--r--anta/cli/get/utils.py27
-rw-r--r--anta/inventory/models.py15
-rw-r--r--anta/tests/routing/isis.py429
-rwxr-xr-xdocs/README.md20
-rw-r--r--docs/advanced_usages/custom-tests.md4
-rw-r--r--docs/cli/inv-from-cvp.md27
-rw-r--r--docs/cli/nrfu.md18
-rw-r--r--docs/contribution.md8
-rw-r--r--docs/faq.md2
-rw-r--r--docs/getting-started.md12
-rw-r--r--docs/overrides/main.html4
-rw-r--r--docs/requirements-and-installation.md20
-rw-r--r--docs/scripts/generate_svg.py3
-rw-r--r--docs/stylesheets/extra.material.css12
-rw-r--r--docs/troubleshooting.md8
-rw-r--r--docs/usage-inventory-catalog.md8
-rw-r--r--examples/README.md6
-rw-r--r--mkdocs.yml14
-rw-r--r--pyproject.toml23
-rw-r--r--tests/conftest.py5
-rw-r--r--tests/units/anta_tests/routing/test_isis.py1350
-rw-r--r--tests/units/cli/get/test_commands.py48
-rw-r--r--tests/units/cli/get/test_utils.py13
36 files changed, 2108 insertions, 153 deletions
diff --git a/.arista/secret_allowlist.yaml b/.arista/secret_allowlist.yaml
new file mode 100644
index 0000000..fea5054
--- /dev/null
+++ b/.arista/secret_allowlist.yaml
@@ -0,0 +1,10 @@
+# Arista Secret Scanner allow list
+
+version: v1.0
+allowed_secrets:
+- secret_pattern: "https://ansible:ansible@192.168.0.2"
+ category: FALSE_POSITIVE
+ reason: Used as example in documentation
+- secret_pattern: "https://ansible:ansible@192.168.0.17"
+ category: FALSE_POSITIVE
+ reason: Used as example in documentation
diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
index 0c13d2c..7f8844d 100644
--- a/.devcontainer/devcontainer.json
+++ b/.devcontainer/devcontainer.json
@@ -21,7 +21,17 @@
"ms-python.pylint",
"LittleFoxTeam.vscode-python-test-adapter",
"njqdev.vscode-python-typehint",
- "hbenl.vscode-test-explorer"
+ "hbenl.vscode-test-explorer",
+ "codezombiech.gitignore",
+ "ms-python.isort",
+ "eriklynd.json-tools",
+ "ms-python.vscode-pylance",
+ "tuxtina.json2yaml",
+ "christian-kohler.path-intellisense",
+ "ms-python.vscode-pylance",
+ "njqdev.vscode-python-typehint",
+ "LittleFoxTeam.vscode-python-test-adapter",
+ "donjayamanne.python-environment-manager"
]
}
},
diff --git a/.devcontainer/startup.sh b/.devcontainer/startup.sh
index fb9f6f1..ec424c4 100644
--- a/.devcontainer/startup.sh
+++ b/.devcontainer/startup.sh
@@ -9,5 +9,8 @@ pip install --upgrade pip
echo "Installing ANTA package from git"
pip install -e .
+echo "Installing ANTA CLI package from git"
+pip install -e ".[cli]"
+
echo "Installing development tools"
pip install -e ".[dev]"
diff --git a/.github/release.md b/.github/release.md
index 15db226..14c7d44 100644
--- a/.github/release.md
+++ b/.github/release.md
@@ -83,7 +83,7 @@ This is to be executed at the top of the repo
git push origin HEAD
gh pr create --title 'bump: ANTA vx.x.x'
```
-9. Merge PR after review and wait for [workflow](https://github.com/arista-netdevops-community/anta/actions/workflows/release.yml) to be executed.
+9. Merge PR after review and wait for [workflow](https://github.com/aristanetworks/anta/actions/workflows/release.yml) to be executed.
```bash
gh pr merge --squash
diff --git a/.github/workflows/code-testing.yml b/.github/workflows/code-testing.yml
index 5c06d45..d8b2879 100644
--- a/.github/workflows/code-testing.yml
+++ b/.github/workflows/code-testing.yml
@@ -59,30 +59,19 @@ jobs:
pip install .
- name: install dev requirements
run: pip install .[dev]
- missing-documentation:
- name: "Warning documentation is missing"
- runs-on: ubuntu-20.04
- needs: [file-changes]
- if: needs.file-changes.outputs.cli == 'true' && needs.file-changes.outputs.docs == 'false'
- steps:
- - name: Documentation is missing
- uses: GrantBirki/comment@v2.0.10
- with:
- body: |
- Please consider that documentation is missing under `docs/` folder.
- You should update documentation to reflect your change, or maybe not :)
- lint-yaml:
- name: Run linting for yaml files
- runs-on: ubuntu-20.04
- needs: [file-changes, check-requirements]
- if: needs.file-changes.outputs.code == 'true'
- steps:
- - uses: actions/checkout@v4
- - name: yaml-lint
- uses: ibiqlik/action-yamllint@v3
- with:
- config_file: .yamllint.yml
- file_or_dir: .
+ # @gmuloc: commenting this out for now
+ #missing-documentation:
+ # name: "Warning documentation is missing"
+ # runs-on: ubuntu-20.04
+ # needs: [file-changes]
+ # if: needs.file-changes.outputs.cli == 'true' && needs.file-changes.outputs.docs == 'false'
+ # steps:
+ # - name: Documentation is missing
+ # uses: GrantBirki/comment@v2.0.10
+ # with:
+ # body: |
+ # Please consider that documentation is missing under `docs/` folder.
+ # You should update documentation to reflect your change, or maybe not :)
lint-python:
name: Check the code style
runs-on: ubuntu-20.04
diff --git a/.github/workflows/secret-scanner.yml b/.github/workflows/secret-scanner.yml
new file mode 100644
index 0000000..8210953
--- /dev/null
+++ b/.github/workflows/secret-scanner.yml
@@ -0,0 +1,30 @@
+# Secret-scanner workflow from Arista Networks.
+on:
+ pull_request:
+ types: [synchronize]
+ push:
+ branches:
+ - main
+name: Secret Scanner (go/secret-scanner)
+jobs:
+ scan_secret:
+ name: Scan incoming changes
+ runs-on: ubuntu-latest
+ container:
+ image: ghcr.io/aristanetworks/secret-scanner-service:main
+ options: --name sss-scanner
+ steps:
+ - name: Checkout ${{ github.ref }}
+ # Hitting https://github.com/actions/checkout/issues/334 so trying v1
+ uses: actions/checkout@v1
+ with:
+ fetch-depth: 0
+ - name: Run scanner
+ run: |
+ git config --global --add safe.directory $GITHUB_WORKSPACE
+ scanner commit . github ${{ github.repository }} \
+ --markdown-file job_summary.md \
+ ${{ github.event_name == 'pull_request' && format('--since-commit {0}', github.event.pull_request.base.sha) || ''}}
+ - name: Write result to summary
+ run: cat ./job_summary.md >> $GITHUB_STEP_SUMMARY
+ if: ${{ always() }}
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index bdfb5ab..8a31d2b 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -5,7 +5,7 @@ files: ^(anta|docs|scripts|tests|asynceapi)/
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v4.4.0
+ rev: v4.6.0
hooks:
- id: trailing-whitespace
exclude: docs/.*.svg
@@ -15,7 +15,7 @@ repos:
- id: check-merge-conflict
- repo: https://github.com/Lucas-C/pre-commit-hooks
- rev: v1.5.4
+ rev: v1.5.5
hooks:
- name: Check and insert license on Python files
id: insert-license
@@ -43,7 +43,7 @@ repos:
- '<!--| ~| -->'
- repo: https://github.com/astral-sh/ruff-pre-commit
- rev: v0.4.2
+ rev: v0.4.8
hooks:
- id: ruff
name: Run Ruff linter
@@ -51,11 +51,10 @@ repos:
- id: ruff-format
name: Run Ruff formatter
- - repo: local # as per https://pylint.pycqa.org/en/latest/user_guide/installation/pre-commit-integration.html
+ - repo: https://github.com/pycqa/pylint
+ rev: "v3.2.3"
hooks:
- id: pylint
- entry: pylint
- language: python
name: Check code style with pylint
description: This hook runs pylint.
types: [python]
@@ -63,9 +62,16 @@ repos:
- -rn # Only display messages
- -sn # Don't display the score
- --rcfile=pyproject.toml # Link to config file
+ additional_dependencies:
+ - anta[cli]
+ - types-PyYAML
+ - types-requests
+ - types-pyOpenSSL
+ - pylint_pydantic
+ - pytest
- repo: https://github.com/codespell-project/codespell
- rev: v2.2.6
+ rev: v2.3.0
hooks:
- id: codespell
name: Checks for common misspellings in text files.
diff --git a/.sonarcloud.properties b/.sonarcloud.properties
new file mode 100644
index 0000000..4cd2ea5
--- /dev/null
+++ b/.sonarcloud.properties
@@ -0,0 +1,18 @@
+# Path to sources
+sonar.sources=anta/,asynceapi/
+#sonar.exclusions=
+#sonar.inclusions=
+
+# Path to tests
+sonar.tests=tests/
+#sonar.test.exclusions=
+#sonar.test.inclusions=
+
+# Source encoding
+#sonar.sourceEncoding=UTF-8
+
+# Python version (for python projects only)
+sonar.python.version=3.9,3.10,3.11,3.12
+
+# Exclusions for copy-paste detection
+#sonar.cpd.exclusions=,
diff --git a/.vscode/settings.json b/.vscode/settings.json
index 8428c00..dd63eea 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -3,18 +3,27 @@
"python.testing.unittestEnabled": false,
"python.testing.pytestEnabled": true,
"pylint.importStrategy": "fromEnvironment",
- "mypy-type-checker.importStrategy": "fromEnvironment",
- "mypy-type-checker.args": [
- "--config-file=pyproject.toml"
- ],
"pylint.severity": {
"refactor": "Warning"
},
"pylint.args": [
- "--load-plugins", "pylint_pydantic",
- "--rcfile=pylintrc"
+ "--load-plugins",
+ "pylint_pydantic",
+ "--rcfile=pyproject.toml"
],
"python.testing.pytestArgs": [
"tests"
],
+ "autoDocstring.docstringFormat": "numpy",
+ "autoDocstring.includeName": false,
+ "autoDocstring.includeExtendedSummary": true,
+ "autoDocstring.startOnNewLine": true,
+ "autoDocstring.guessTypes": true,
+ "python.languageServer": "Pylance",
+ "githubIssues.issueBranchTitle": "issues/${issueNumber}-${issueTitle}",
+ "editor.formatOnPaste": true,
+ "files.trimTrailingWhitespace": true,
+ "mypy.configFile": "pyproject.toml",
+ "workbench.remoteIndicator.showExtensionRecommendations": true,
+
} \ No newline at end of file
diff --git a/Dockerfile b/Dockerfile
index 873bc1c..c7cdf65 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -37,11 +37,11 @@ RUN adduser --system anta
LABEL "org.opencontainers.image.title"="anta" \
"org.opencontainers.artifact.description"="network-test-automation in a Python package and Python scripts to test Arista devices." \
"org.opencontainers.image.description"="network-test-automation in a Python package and Python scripts to test Arista devices." \
- "org.opencontainers.image.source"="https://github.com/arista-netdevops-community/anta" \
+ "org.opencontainers.image.source"="https://github.com/aristanetworks/anta" \
"org.opencontainers.image.url"="https://www.anta.ninja" \
- "org.opencontainers.image.documentation"="https://www.anta.ninja" \
+ "org.opencontainers.image.documentation"="https://anta.arista.com" \
"org.opencontainers.image.licenses"="Apache-2.0" \
- "org.opencontainers.image.vendor"="The anta contributors." \
+ "org.opencontainers.image.vendor"="Arista Networks" \
"org.opencontainers.image.authors"="Khelil Sator, Angélique Phillipps, Colin MacGiollaEáin, Matthieu Tache, Onur Gashi, Paul Lavelle, Guillaume Mulocher, Thomas Grimonet" \
"org.opencontainers.image.base.name"="python" \
"org.opencontainers.image.revision"="dev" \
diff --git a/anta/__init__.py b/anta/__init__.py
index 4dbc107..e7111e9 100644
--- a/anta/__init__.py
+++ b/anta/__init__.py
@@ -45,4 +45,4 @@ RICH_COLOR_THEME = {
"unset": RICH_COLOR_PALETTE.UNSET,
}
-GITHUB_SUGGESTION = "Please reach out to the maintainer team or open an issue on Github: https://github.com/arista-netdevops-community/anta."
+GITHUB_SUGGESTION = "Please reach out to the maintainer team or open an issue on Github: https://github.com/aristanetworks/anta."
diff --git a/anta/cli/debug/utils.py b/anta/cli/debug/utils.py
index 26fef45..04a7a38 100644
--- a/anta/cli/debug/utils.py
+++ b/anta/cli/debug/utils.py
@@ -48,7 +48,7 @@ def debug_options(f: Callable[..., Any]) -> Callable[..., Any]:
device: str,
**kwargs: Any,
) -> Any:
- # TODO: @gmuloc - tags come from context https://github.com/arista-netdevops-community/anta/issues/584
+ # TODO: @gmuloc - tags come from context https://github.com/aristanetworks/anta/issues/584
# pylint: disable=unused-argument
# ruff: noqa: ARG001
if (d := inventory.get(device)) is None:
diff --git a/anta/cli/get/commands.py b/anta/cli/get/commands.py
index a4125db..bfe94e6 100644
--- a/anta/cli/get/commands.py
+++ b/anta/cli/get/commands.py
@@ -13,6 +13,7 @@ from pathlib import Path
from typing import TYPE_CHECKING, Any
import click
+import requests
from cvprac.cvp_client import CvpClient
from cvprac.cvp_client_errors import CvpApiError
from rich.pretty import pretty_repr
@@ -36,14 +37,27 @@ logger = logging.getLogger(__name__)
@click.option("--username", "-u", help="CloudVision username", type=str, required=True)
@click.option("--password", "-p", help="CloudVision password", type=str, required=True)
@click.option("--container", "-c", help="CloudVision container where devices are configured", type=str)
-def from_cvp(ctx: click.Context, output: Path, host: str, username: str, password: str, container: str | None) -> None:
+@click.option(
+ "--ignore-cert",
+ help="Ignore verifying the SSL certificate when connecting to CloudVision",
+ show_envvar=True,
+ is_flag=True,
+ default=False,
+)
+def from_cvp(ctx: click.Context, output: Path, host: str, username: str, password: str, container: str | None, *, ignore_cert: bool) -> None:
# pylint: disable=too-many-arguments
- """Build ANTA inventory from Cloudvision.
+ """Build ANTA inventory from CloudVision.
- TODO - handle get_inventory and get_devices_in_container failure
+ NOTE: Only username/password authentication is supported for on-premises CloudVision instances.
+ Token authentication for both on-premises and CloudVision as a Service (CVaaS) is not supported.
"""
+ # TODO: - Handle get_cv_token, get_inventory and get_devices_in_container failures.
logger.info("Getting authentication token for user '%s' from CloudVision instance '%s'", username, host)
- token = get_cv_token(cvp_ip=host, cvp_username=username, cvp_password=password)
+ try:
+ token = get_cv_token(cvp_ip=host, cvp_username=username, cvp_password=password, verify_cert=not ignore_cert)
+ except requests.exceptions.SSLError as error:
+ logger.error("Authentication to CloudVison failed: %s.", error)
+ ctx.exit(ExitCode.USAGE_ERROR)
clnt = CvpClient()
try:
diff --git a/anta/cli/get/utils.py b/anta/cli/get/utils.py
index 1d56cfa..5308f44 100644
--- a/anta/cli/get/utils.py
+++ b/anta/cli/get/utils.py
@@ -77,16 +77,33 @@ def inventory_output_options(f: Callable[..., Any]) -> Callable[..., Any]:
return wrapper
-def get_cv_token(cvp_ip: str, cvp_username: str, cvp_password: str) -> str:
- """Generate AUTH token from CVP using password."""
- # TODO: need to handle requests error
+def get_cv_token(cvp_ip: str, cvp_username: str, cvp_password: str, *, verify_cert: bool) -> str:
+ """Generate the authentication token from CloudVision using username and password.
+ TODO: need to handle requests error
+
+ Args:
+ ----
+ cvp_ip: IP address of CloudVision.
+ cvp_username: Username to connect to CloudVision.
+ cvp_password: Password to connect to CloudVision.
+ verify_cert: Enable or disable certificate verification when connecting to CloudVision.
+
+ Returns
+ -------
+ token(str): The token to use in further API calls to CloudVision.
+
+ Raises
+ ------
+ requests.ssl.SSLError: If the certificate verification fails
+
+ """
# use CVP REST API to generate a token
url = f"https://{cvp_ip}/cvpservice/login/authenticate.do"
payload = json.dumps({"userId": cvp_username, "password": cvp_password})
headers = {"Content-Type": "application/json", "Accept": "application/json"}
- response = requests.request("POST", url, headers=headers, data=payload, verify=False, timeout=10)
+ response = requests.request("POST", url, headers=headers, data=payload, verify=verify_cert, timeout=10)
return response.json()["sessionId"]
@@ -94,7 +111,7 @@ def write_inventory_to_file(hosts: list[AntaInventoryHost], output: Path) -> Non
"""Write a file inventory from pydantic models."""
i = AntaInventoryInput(hosts=hosts)
with output.open(mode="w", encoding="UTF-8") as out_fd:
- out_fd.write(yaml.dump({AntaInventory.INVENTORY_ROOT_KEY: i.model_dump(exclude_unset=True)}))
+ out_fd.write(yaml.dump({AntaInventory.INVENTORY_ROOT_KEY: yaml.safe_load(i.yaml())}))
logger.info("ANTA inventory file has been created: '%s'", output)
diff --git a/anta/inventory/models.py b/anta/inventory/models.py
index e26ea00..5796ef7 100644
--- a/anta/inventory/models.py
+++ b/anta/inventory/models.py
@@ -6,7 +6,9 @@
from __future__ import annotations
import logging
+import math
+import yaml
from pydantic import BaseModel, ConfigDict, IPvAnyAddress, IPvAnyNetwork
from anta.custom_types import Hostname, Port
@@ -82,3 +84,16 @@ class AntaInventoryInput(BaseModel):
networks: list[AntaInventoryNetwork] | None = None
hosts: list[AntaInventoryHost] | None = None
ranges: list[AntaInventoryRange] | None = None
+
+ def yaml(self) -> str:
+ """Return a YAML representation string of this model.
+
+ Returns
+ -------
+ The YAML representation string of this model.
+ """
+ # TODO: Pydantic and YAML serialization/deserialization is not supported natively.
+ # This could be improved.
+ # https://github.com/pydantic/pydantic/issues/1043
+ # Explore if this worth using this: https://github.com/NowanIlfideme/pydantic-yaml
+ return yaml.safe_dump(yaml.safe_load(self.model_dump_json(serialize_as_any=True, exclude_unset=True)), indent=2, width=math.inf)
diff --git a/anta/tests/routing/isis.py b/anta/tests/routing/isis.py
index addc083..afa75b5 100644
--- a/anta/tests/routing/isis.py
+++ b/anta/tests/routing/isis.py
@@ -7,6 +7,7 @@
# mypy: disable-error-code=attr-defined
from __future__ import annotations
+from ipaddress import IPv4Address, IPv4Network
from typing import Any, ClassVar, Literal
from pydantic import BaseModel
@@ -118,6 +119,20 @@ def _get_interface_data(interface: str, vrf: str, command_output: dict[str, Any]
return None
+def _get_adjacency_segment_data_by_neighbor(neighbor: str, instance: str, vrf: str, command_output: dict[str, Any]) -> dict[str, Any] | None:
+ """Extract data related to an IS-IS interface for testing."""
+ search_path = f"vrfs.{vrf}.isisInstances.{instance}.adjacencySegments"
+ if get_value(dictionary=command_output, key=search_path, default=None) is None:
+ return None
+
+ isis_instance = get_value(dictionary=command_output, key=search_path, default=None)
+
+ return next(
+ (segment_data for segment_data in isis_instance if neighbor == segment_data["ipAddress"]),
+ None,
+ )
+
+
class VerifyISISNeighborState(AntaTest):
"""Verifies all IS-IS neighbors are in UP state.
@@ -211,14 +226,15 @@ class VerifyISISNeighborCount(AntaTest):
isis_neighbor_count = _get_isis_neighbors_count(command_output)
if len(isis_neighbor_count) == 0:
self.result.is_skipped("No IS-IS neighbor detected")
+ return
for interface in self.inputs.interfaces:
eos_data = [ifl_data for ifl_data in isis_neighbor_count if ifl_data["interface"] == interface.name and ifl_data["level"] == interface.level]
if not eos_data:
self.result.is_failure(f"No neighbor detected for interface {interface.name}")
- return
+ continue
if eos_data[0]["count"] != interface.count:
self.result.is_failure(
- f"Interface {interface.name}:"
+ f"Interface {interface.name}: "
f"expected Level {interface.level}: count {interface.count}, "
f"got Level {eos_data[0]['level']}: count {eos_data[0]['count']}"
)
@@ -284,7 +300,8 @@ class VerifyISISInterfaceMode(AntaTest):
self.result.is_success()
if len(command_output["vrfs"]) == 0:
- self.result.is_failure("IS-IS is not configured on device")
+ self.result.is_skipped("IS-IS is not configured on device")
+ return
# Check for p2p interfaces
for interface in self.inputs.interfaces:
@@ -306,3 +323,409 @@ class VerifyISISInterfaceMode(AntaTest):
self.result.is_failure(f"Interface {interface.name} in VRF {interface.vrf} is not running in passive mode")
else:
self.result.is_failure(f"Interface {interface.name} not found in VRF {interface.vrf}")
+
+
+class VerifyISISSegmentRoutingAdjacencySegments(AntaTest):
+ """Verifies ISIS Segment Routing Adjacency Segments.
+
+ Verify that all expected Adjacency segments are correctly visible for each interface.
+
+ Expected Results
+ ----------------
+ * Success: The test will pass if all listed interfaces have correct adjacencies.
+ * Failure: The test will fail if any of the listed interfaces has not expected list of adjacencies.
+ * Skipped: The test will be skipped if no ISIS SR Adjacency is found.
+
+ Examples
+ --------
+ ```yaml
+ anta.tests.routing:
+ isis:
+ - VerifyISISSegmentRoutingAdjacencySegments:
+ instances:
+ - name: CORE-ISIS
+ vrf: default
+ segments:
+ - interface: Ethernet2
+ address: 10.0.1.3
+ sid_origin: dynamic
+
+ ```
+ """
+
+ name = "VerifyISISSegmentRoutingAdjacencySegments"
+ description = "Verify expected Adjacency segments are correctly visible for each interface."
+ categories: ClassVar[list[str]] = ["isis", "segment-routing"]
+ commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show isis segment-routing adjacency-segments", ofmt="json")]
+
+ class Input(AntaTest.Input):
+ """Input model for the VerifyISISSegmentRoutingAdjacencySegments test."""
+
+ instances: list[IsisInstance]
+
+ class IsisInstance(BaseModel):
+ """ISIS Instance model definition."""
+
+ name: str
+ """ISIS instance name."""
+ vrf: str = "default"
+ """VRF name where ISIS instance is configured."""
+ segments: list[Segment]
+ """List of Adjacency segments configured in this instance."""
+
+ class Segment(BaseModel):
+ """Segment model definition."""
+
+ interface: Interface
+ """Interface name to check."""
+ level: Literal[1, 2] = 2
+ """ISIS level configured for interface. Default is 2."""
+ sid_origin: Literal["dynamic"] = "dynamic"
+ """Adjacency type"""
+ address: IPv4Address
+ """IP address of remote end of segment."""
+
+ @AntaTest.anta_test
+ def test(self) -> None:
+ """Main test function for VerifyISISSegmentRoutingAdjacencySegments."""
+ command_output = self.instance_commands[0].json_output
+ self.result.is_success()
+
+ if len(command_output["vrfs"]) == 0:
+ self.result.is_skipped("IS-IS is not configured on device")
+ return
+
+ # initiate defaults
+ failure_message = []
+ skip_vrfs = []
+ skip_instances = []
+
+ # Check if VRFs and instances are present in output.
+ for instance in self.inputs.instances:
+ vrf_data = get_value(
+ dictionary=command_output,
+ key=f"vrfs.{instance.vrf}",
+ default=None,
+ )
+ if vrf_data is None:
+ skip_vrfs.append(instance.vrf)
+ failure_message.append(f"VRF {instance.vrf} is not configured to run segment routging.")
+
+ elif get_value(dictionary=vrf_data, key=f"isisInstances.{instance.name}", default=None) is None:
+ skip_instances.append(instance.name)
+ failure_message.append(f"Instance {instance.name} is not found in vrf {instance.vrf}.")
+
+ # Check Adjacency segments
+ for instance in self.inputs.instances:
+ if instance.vrf not in skip_vrfs and instance.name not in skip_instances:
+ for input_segment in instance.segments:
+ eos_segment = _get_adjacency_segment_data_by_neighbor(
+ neighbor=str(input_segment.address),
+ instance=instance.name,
+ vrf=instance.vrf,
+ command_output=command_output,
+ )
+ if eos_segment is None:
+ failure_message.append(f"Your segment has not been found: {input_segment}.")
+
+ elif (
+ eos_segment["localIntf"] != input_segment.interface
+ or eos_segment["level"] != input_segment.level
+ or eos_segment["sidOrigin"] != input_segment.sid_origin
+ ):
+ failure_message.append(f"Your segment is not correct: Expected: {input_segment} - Found: {eos_segment}.")
+ if failure_message:
+ self.result.is_failure("\n".join(failure_message))
+
+
+class VerifyISISSegmentRoutingDataplane(AntaTest):
+ """
+ Verify dataplane of a list of ISIS-SR instances.
+
+ Expected Results
+ ----------------
+ * Success: The test will pass if all instances have correct dataplane configured
+ * Failure: The test will fail if one of the instances has incorrect dataplane configured
+ * Skipped: The test will be skipped if ISIS is not running
+
+ Examples
+ --------
+ ```yaml
+ anta.tests.routing:
+ isis:
+ - VerifyISISSegmentRoutingDataplane:
+ instances:
+ - name: CORE-ISIS
+ vrf: default
+ dataplane: MPLS
+ ```
+ """
+
+ name = "VerifyISISSegmentRoutingDataplane"
+ description = "Verify dataplane of a list of ISIS-SR instances"
+ categories: ClassVar[list[str]] = ["isis", "segment-routing"]
+ commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show isis segment-routing", ofmt="json")]
+
+ class Input(AntaTest.Input):
+ """Input model for the VerifyISISSegmentRoutingDataplane test."""
+
+ instances: list[IsisInstance]
+
+ class IsisInstance(BaseModel):
+ """ISIS Instance model definition."""
+
+ name: str
+ """ISIS instance name."""
+ vrf: str = "default"
+ """VRF name where ISIS instance is configured."""
+ dataplane: Literal["MPLS", "mpls", "unset"] = "MPLS"
+ """Configured dataplane for the instance."""
+
+ @AntaTest.anta_test
+ def test(self) -> None:
+ """Main test function for VerifyISISSegmentRoutingDataplane."""
+ command_output = self.instance_commands[0].json_output
+ self.result.is_success()
+
+ if len(command_output["vrfs"]) == 0:
+ self.result.is_skipped("IS-IS-SR is not running on device.")
+ return
+
+ # initiate defaults
+ failure_message = []
+ skip_vrfs = []
+ skip_instances = []
+
+ # Check if VRFs and instances are present in output.
+ for instance in self.inputs.instances:
+ vrf_data = get_value(
+ dictionary=command_output,
+ key=f"vrfs.{instance.vrf}",
+ default=None,
+ )
+ if vrf_data is None:
+ skip_vrfs.append(instance.vrf)
+ failure_message.append(f"VRF {instance.vrf} is not configured to run segment routing.")
+
+ elif get_value(dictionary=vrf_data, key=f"isisInstances.{instance.name}", default=None) is None:
+ skip_instances.append(instance.name)
+ failure_message.append(f"Instance {instance.name} is not found in vrf {instance.vrf}.")
+
+ # Check Adjacency segments
+ for instance in self.inputs.instances:
+ if instance.vrf not in skip_vrfs and instance.name not in skip_instances:
+ eos_dataplane = get_value(dictionary=command_output, key=f"vrfs.{instance.vrf}.isisInstances.{instance.name}.dataPlane", default=None)
+ if instance.dataplane.upper() != eos_dataplane:
+ failure_message.append(f"ISIS instance {instance.name} is not running dataplane {instance.dataplane} ({eos_dataplane})")
+
+ if failure_message:
+ self.result.is_failure("\n".join(failure_message))
+
+
+class VerifyISISSegmentRoutingTunnels(AntaTest):
+ """
+ Verify ISIS-SR tunnels computed by device.
+
+ Expected Results
+ ----------------
+ * Success: The test will pass if all listed tunnels are computed on device.
+ * Failure: The test will fail if one of the listed tunnels is missing.
+ * Skipped: The test will be skipped if ISIS-SR is not configured.
+
+ Examples
+ --------
+ ```yaml
+ anta.tests.routing:
+ isis:
+ - VerifyISISSegmentRoutingTunnels:
+ entries:
+ # Check only endpoint
+ - endpoint: 1.0.0.122/32
+ # Check endpoint and via TI-LFA
+ - endpoint: 1.0.0.13/32
+ vias:
+ - type: tunnel
+ tunnel_id: ti-lfa
+ # Check endpoint and via IP routers
+ - endpoint: 1.0.0.14/32
+ vias:
+ - type: ip
+ nexthop: 1.1.1.1
+ ```
+ """
+
+ name = "VerifyISISSegmentRoutingTunnels"
+ description = "Verify ISIS-SR tunnels computed by device"
+ categories: ClassVar[list[str]] = ["isis", "segment-routing"]
+ commands: ClassVar[list[AntaCommand | AntaTemplate]] = [AntaCommand(command="show isis segment-routing tunnel", ofmt="json")]
+
+ class Input(AntaTest.Input):
+ """Input model for the VerifyISISSegmentRoutingTunnels test."""
+
+ entries: list[Entry]
+ """List of tunnels to check on device."""
+
+ class Entry(BaseModel):
+ """Definition of a tunnel entry."""
+
+ endpoint: IPv4Network
+ """Endpoint IP of the tunnel."""
+ vias: list[Vias] | None = None
+ """Optional list of path to reach endpoint."""
+
+ class Vias(BaseModel):
+ """Definition of a tunnel path."""
+
+ nexthop: IPv4Address | None = None
+ """Nexthop of the tunnel. If None, then it is not tested. Default: None"""
+ type: Literal["ip", "tunnel"] | None = None
+ """Type of the tunnel. If None, then it is not tested. Default: None"""
+ interface: Interface | None = None
+ """Interface of the tunnel. If None, then it is not tested. Default: None"""
+ tunnel_id: Literal["TI-LFA", "ti-lfa", "unset"] | None = None
+ """Computation method of the tunnel. If None, then it is not tested. Default: None"""
+
+ def _eos_entry_lookup(self, search_value: IPv4Network, entries: dict[str, Any], search_key: str = "endpoint") -> dict[str, Any] | None:
+ return next(
+ (entry_value for entry_id, entry_value in entries.items() if str(entry_value[search_key]) == str(search_value)),
+ None,
+ )
+
+ @AntaTest.anta_test
+ def test(self) -> None:
+ """Main test function for VerifyISISSegmentRoutingTunnels.
+
+ This method performs the main test logic for verifying ISIS Segment Routing tunnels.
+ It checks the command output, initiates defaults, and performs various checks on the tunnels.
+
+ Returns
+ -------
+ None
+ """
+ command_output = self.instance_commands[0].json_output
+ self.result.is_success()
+
+ # initiate defaults
+ failure_message = []
+
+ if len(command_output["entries"]) == 0:
+ self.result.is_skipped("IS-IS-SR is not running on device.")
+ return
+
+ for input_entry in self.inputs.entries:
+ eos_entry = self._eos_entry_lookup(search_value=input_entry.endpoint, entries=command_output["entries"])
+ if eos_entry is None:
+ failure_message.append(f"Tunnel to {input_entry} is not found.")
+ elif input_entry.vias is not None:
+ failure_src = []
+ for via_input in input_entry.vias:
+ if not self._check_tunnel_type(via_input, eos_entry):
+ failure_src.append("incorrect tunnel type")
+ if not self._check_tunnel_nexthop(via_input, eos_entry):
+ failure_src.append("incorrect nexthop")
+ if not self._check_tunnel_interface(via_input, eos_entry):
+ failure_src.append("incorrect interface")
+ if not self._check_tunnel_id(via_input, eos_entry):
+ failure_src.append("incorrect tunnel ID")
+
+ if failure_src:
+ failure_message.append(f"Tunnel to {input_entry.endpoint!s} is incorrect: {', '.join(failure_src)}")
+
+ if failure_message:
+ self.result.is_failure("\n".join(failure_message))
+
+ def _check_tunnel_type(self, via_input: VerifyISISSegmentRoutingTunnels.Input.Entry.Vias, eos_entry: dict[str, Any]) -> bool:
+ """
+ Check if the tunnel type specified in `via_input` matches any of the tunnel types in `eos_entry`.
+
+ Args:
+ via_input (VerifyISISSegmentRoutingTunnels.Input.Entry.Vias): The input tunnel type to check.
+ eos_entry (dict[str, Any]): The EOS entry containing the tunnel types.
+
+ Returns
+ -------
+ bool: True if the tunnel type matches any of the tunnel types in `eos_entry`, False otherwise.
+ """
+ if via_input.type is not None:
+ return any(
+ via_input.type
+ == get_value(
+ dictionary=eos_via,
+ key="type",
+ default="undefined",
+ )
+ for eos_via in eos_entry["vias"]
+ )
+ return True
+
+ def _check_tunnel_nexthop(self, via_input: VerifyISISSegmentRoutingTunnels.Input.Entry.Vias, eos_entry: dict[str, Any]) -> bool:
+ """
+ Check if the tunnel nexthop matches the given input.
+
+ Args:
+ via_input (VerifyISISSegmentRoutingTunnels.Input.Entry.Vias): The input via object.
+ eos_entry (dict[str, Any]): The EOS entry dictionary.
+
+ Returns
+ -------
+ bool: True if the tunnel nexthop matches, False otherwise.
+ """
+ if via_input.nexthop is not None:
+ return any(
+ str(via_input.nexthop)
+ == get_value(
+ dictionary=eos_via,
+ key="nexthop",
+ default="undefined",
+ )
+ for eos_via in eos_entry["vias"]
+ )
+ return True
+
+ def _check_tunnel_interface(self, via_input: VerifyISISSegmentRoutingTunnels.Input.Entry.Vias, eos_entry: dict[str, Any]) -> bool:
+ """
+ Check if the tunnel interface exists in the given EOS entry.
+
+ Args:
+ via_input (VerifyISISSegmentRoutingTunnels.Input.Entry.Vias): The input via object.
+ eos_entry (dict[str, Any]): The EOS entry dictionary.
+
+ Returns
+ -------
+ bool: True if the tunnel interface exists, False otherwise.
+ """
+ if via_input.interface is not None:
+ return any(
+ via_input.interface
+ == get_value(
+ dictionary=eos_via,
+ key="interface",
+ default="undefined",
+ )
+ for eos_via in eos_entry["vias"]
+ )
+ return True
+
+ def _check_tunnel_id(self, via_input: VerifyISISSegmentRoutingTunnels.Input.Entry.Vias, eos_entry: dict[str, Any]) -> bool:
+ """
+ Check if the tunnel ID matches any of the tunnel IDs in the EOS entry's vias.
+
+ Args:
+ via_input (VerifyISISSegmentRoutingTunnels.Input.Entry.Vias): The input vias to check.
+ eos_entry (dict[str, Any]): The EOS entry to compare against.
+
+ Returns
+ -------
+ bool: True if the tunnel ID matches any of the tunnel IDs in the EOS entry's vias, False otherwise.
+ """
+ if via_input.tunnel_id is not None:
+ return any(
+ via_input.tunnel_id.upper()
+ == get_value(
+ dictionary=eos_via,
+ key="tunnelId.type",
+ default="undefined",
+ ).upper()
+ for eos_via in eos_entry["vias"]
+ )
+ return True
diff --git a/docs/README.md b/docs/README.md
index 378867f..44b4455 100755
--- a/docs/README.md
+++ b/docs/README.md
@@ -6,23 +6,23 @@
# Arista Network Test Automation (ANTA) Framework
-| **Code** | [![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff) [![Numpy](https://img.shields.io/badge/Docstring_format-numpy-blue)](https://numpydoc.readthedocs.io/en/latest/format.html) |
+| **Code** | [![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff) [![Numpy](https://img.shields.io/badge/Docstring_format-numpy-blue)](https://numpydoc.readthedocs.io/en/latest/format.html) [![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=aristanetworks_anta&metric=alert_status)](https://sonarcloud.io/summary/new_code?id=aristanetworks_anta) |
| :------------: | :-------|
-| **License** | [![License](https://img.shields.io/badge/license-Apache%202.0-brightgreen.svg)](https://github.com/arista-netdevops-community/anta/blob/main/LICENSE) |
-| **GitHub** | [![CI](https://github.com/arista-netdevops-community/anta/actions/workflows/code-testing.yml/badge.svg)](https://github.com/arista-netdevops-community/anta/actions/workflows/code-testing.yml) ![Coverage](https://raw.githubusercontent.com/arista-netdevops-community/anta/coverage-badge/latest-release-coverage.svg) ![Commit](https://img.shields.io/github/last-commit/arista-netdevops-community/anta) ![GitHub commit activity (branch)](https://img.shields.io/github/commit-activity/m/arista-netdevops-community/anta) [![Github release](https://img.shields.io/github/release/arista-netdevops-community/anta.svg)](https://github.com/arista-netdevops-community/anta/releases/) [![Contributors](https://img.shields.io/github/contributors/arista-netdevops-community/anta)](https://github.com/arista-netdevops-community/anta/graphs/contributors) |
+| **License** | [![License](https://img.shields.io/badge/license-Apache%202.0-brightgreen.svg)](https://github.com/aristanetworks/anta/blob/main/LICENSE) |
+| **GitHub** | [![CI](https://github.com/aristanetworks/anta/actions/workflows/code-testing.yml/badge.svg)](https://github.com/aristanetworks/anta/actions/workflows/code-testing.yml) ![Coverage](https://raw.githubusercontent.com/aristanetworks/anta/coverage-badge/latest-release-coverage.svg) ![Commit](https://img.shields.io/github/last-commit/aristanetworks/anta) ![GitHub commit activity (branch)](https://img.shields.io/github/commit-activity/m/aristanetworks/anta) [![Github release](https://img.shields.io/github/release/aristanetworks/anta.svg)](https://github.com/aristanetworks/anta/releases/) [![Contributors](https://img.shields.io/github/contributors/aristanetworks/anta)](https://github.com/aristanetworks/anta/graphs/contributors) |
| **PyPi** | ![PyPi Version](https://img.shields.io/pypi/v/anta) ![Python Versions](https://img.shields.io/pypi/pyversions/anta) ![Python format](https://img.shields.io/pypi/format/anta) ![PyPI - Downloads](https://img.shields.io/pypi/dm/anta) |
ANTA is Python framework that automates tests for Arista devices.
- ANTA provides a [set of tests](api/tests.md) to validate the state of your network
- ANTA can be used to:
- - Automate NRFU (Network Ready For Use) test on a preproduction network
- - Automate tests on a live network (periodically or on demand)
+ - Automate NRFU (Network Ready For Use) test on a preproduction network
+ - Automate tests on a live network (periodically or on demand)
- ANTA can be used with:
- - As a [Python library](advanced_usages/as-python-lib.md) in your own application
- - The [ANTA CLI](cli/overview.md)
+ - As a [Python library](advanced_usages/as-python-lib.md) in your own application
+ - The [ANTA CLI](cli/overview.md)
-![anta nrfu](https://raw.githubusercontent.com/arista-netdevops-community/anta/main/docs/imgs/anta-nrfu.svg)
+![anta nrfu](https://raw.githubusercontent.com/aristanetworks/anta/main/docs/imgs/anta-nrfu.svg)
## Install ANTA library
@@ -72,12 +72,12 @@ Commands:
You can also still choose to install it with directly with `pip`:
```bash
-$ pip install anta[cli]
+pip install anta[cli]
```
## Documentation
-The documentation is published on [ANTA package website](https://www.anta.ninja). Also, a [demo repository](https://github.com/titom73/atd-anta-demo) is available to facilitate your journey with ANTA.
+The documentation is published on [ANTA package website](https://anta.arista.com).
## Contribution guide
diff --git a/docs/advanced_usages/custom-tests.md b/docs/advanced_usages/custom-tests.md
index ba62636..c6a2fa8 100644
--- a/docs/advanced_usages/custom-tests.md
+++ b/docs/advanced_usages/custom-tests.md
@@ -334,10 +334,10 @@ For that, you need to create your own Python package as described in this [hitch
It is very similar to what is documented in [catalog section](../usage-inventory-catalog.md) but you have to use your own package name.2
-Let say the custom Python package is `anta_titom73` and the test is defined in `anta_titom73.dc_project` Python module, the test catalog would look like:
+Let say the custom Python package is `anta_custom` and the test is defined in `anta_custom.dc_project` Python module, the test catalog would look like:
```yaml
-anta_titom73.dc_project:
+anta_custom.dc_project:
- VerifyFeatureX:
minimum: 1
```
diff --git a/docs/cli/inv-from-cvp.md b/docs/cli/inv-from-cvp.md
index 8897370..a37af62 100644
--- a/docs/cli/inv-from-cvp.md
+++ b/docs/cli/inv-from-cvp.md
@@ -8,21 +8,32 @@
In large setups, it might be beneficial to construct your inventory based on CloudVision. The `from-cvp` entrypoint of the `get` command enables the user to create an ANTA inventory from CloudVision.
+!!! info
+ The current implementation only works with on-premises CloudVision instances, not with CloudVision as a Service (CVaaS).
+
### Command overview
```bash
-anta get from-cvp --help
Usage: anta get from-cvp [OPTIONS]
- Build ANTA inventory from Cloudvision
+ Build ANTA inventory from CloudVision.
+
+ NOTE: Only username/password authentication is supported for on-premises CloudVision instances.
+ Token authentication for both on-premises and CloudVision as a Service (CVaaS) is not supported.
Options:
- -ip, --cvp-ip TEXT CVP IP Address [required]
- -u, --cvp-username TEXT CVP Username [required]
- -p, --cvp-password TEXT CVP Password / token [required]
- -c, --cvp-container TEXT Container where devices are configured
- -d, --inventory-directory PATH Path to save inventory file
- --help Show this message and exit.
+ -o, --output FILE Path to save inventory file [env var: ANTA_INVENTORY;
+ required]
+ --overwrite Do not prompt when overriding current inventory [env
+ var: ANTA_GET_FROM_CVP_OVERWRITE]
+ -host, --host TEXT CloudVision instance FQDN or IP [required]
+ -u, --username TEXT CloudVision username [required]
+ -p, --password TEXT CloudVision password [required]
+ -c, --container TEXT CloudVision container where devices are configured
+ --ignore-cert By default connection to CV will use HTTPS
+ certificate, set this flag to disable it [env var:
+ ANTA_GET_FROM_CVP_IGNORE_CERT]
+ --help Show this message and exit.
```
The output is an inventory where the name of the container is added as a tag for each host:
diff --git a/docs/cli/nrfu.md b/docs/cli/nrfu.md
index 76605cb..90b4a40 100644
--- a/docs/cli/nrfu.md
+++ b/docs/cli/nrfu.md
@@ -67,7 +67,7 @@ Options:
```bash
anta nrfu --device DC1-LEAF1A text
```
-[![anta nrfu text results](../imgs/anta-nrfu-text-output.png){ loading=lazy width="1600" }](../imgs/anta-nrfu-text-output.png)
+![anta nrfu text results](../imgs/anta-nrfu-text-output.png){ loading=lazy width="1600" }
## Performing NRFU with table rendering
@@ -92,31 +92,31 @@ The `--group-by` option show a summarized view of the test results per host or p
```bash
anta nrfu --tags LEAF table
```
-[![anta nrfu table results](../imgs/anta-nrfu-table-output.png){ loading=lazy width="1600" }](../imgs/anta-nrfu-table-output.png)
+![anta nrfu table results](../imgs/anta-nrfu-table-output.png){ loading=lazy width="1600" }
For larger setups, you can also group the results by host or test to get a summarized view:
```bash
anta nrfu table --group-by device
```
-[![anta nrfu table group_by_host_output](../imgs/anta-nrfu-table-group-by-host-output.png){ loading=lazy width="1600" }](../imgs/anta-nrfu-table-group-by-host-output.png)
+![$1anta nrfu table group_by_host_output](../imgs/anta-nrfu-table-group-by-host-output.png){ loading=lazy width="1600" }
```bash
anta nrfu table --group-by test
```
-[![anta nrfu table group_by_test_output](../imgs/anta-nrfu-table-group-by-test-output.png){ loading=lazy width="1600" }](../imgs/anta-nrfu-table-group-by-test-output.png)
+![$1anta nrfu table group_by_test_output](../imgs/anta-nrfu-table-group-by-test-output.png){ loading=lazy width="1600" }
To get more specific information, it is possible to filter on a single device or a single test:
```bash
anta nrfu --device spine1 table
```
-[![anta nrfu table filter_host_output](../imgs/anta-nrfu-table-filter-host-output.png){ loading=lazy width="1600" }](../imgs/anta-nrfu-table-filter-host-output.png)
+![$1anta nrfu table filter_host_output](../imgs/anta-nrfu-table-filter-host-output.png){ loading=lazy width="1600" }
```bash
anta nrfu --test VerifyZeroTouch table
```
-[![anta nrfu table filter_test_output](../imgs/anta-nrfu-table-filter-test-output.png){ loading=lazy width="1600" }](../imgs/anta-nrfu-table-filter-test-output.png)
+![$1anta nrfu table filter_test_output](../imgs/anta-nrfu-table-filter-test-output.png){ loading=lazy width="1600" }
## Performing NRFU with JSON rendering
@@ -143,7 +143,7 @@ The `--output` option allows you to save the JSON report as a file.
```bash
anta nrfu --tags LEAF json
```
-[![anta nrfu json results](../imgs/anta-nrfu-json-output.png){ loading=lazy width="1600" }](../imgs/anta-nrfu-json-output.png)
+![$1anta nrfu json results](../imgs/anta-nrfu-json-output.png){ loading=lazy width="1600" }
## Performing NRFU with custom reports
@@ -173,7 +173,7 @@ The `--output` option allows you to choose the path where the final report will
```bash
anta nrfu --tags LEAF tpl-report --template ./custom_template.j2
```
-[![anta nrfu tpl_resultss](../imgs/anta-nrfu-tpl-report-output.png){ loading=lazy width="1600" }](../imgs/anta-nrfu-tpl-report-output.png)
+![$1anta nrfu tpl_results](../imgs/anta-nrfu-tpl-report-output.png){ loading=lazy width="1600" }
The template `./custom_template.j2` is a simple Jinja2 template:
@@ -205,4 +205,4 @@ cat nrfu-tpl-report.txt
It is possible to run `anta nrfu --dry-run` to execute ANTA up to the point where it should communicate with the network to execute the tests. When using `--dry-run`, all inventory devices are assumed to be online. This can be useful to check how many tests would be run using the catalog and inventory.
-[![anta nrfu dry_run](../imgs/anta_nrfu___dry_run.svg){ loading=lazy width="1600" }](../imgs/anta_nrfu___dry_run.svg)
+![$1anta nrfu dry_run](../imgs/anta_nrfu___dry_run.svg){ loading=lazy width="1600" }
diff --git a/docs/contribution.md b/docs/contribution.md
index cc3a1c0..ac5d026 100644
--- a/docs/contribution.md
+++ b/docs/contribution.md
@@ -6,7 +6,7 @@
# How to contribute to ANTA
-Contribution model is based on a fork-model. Don't push to arista-netdevops-community/anta directly. Always do a branch in your forked repository and create a PR.
+Contribution model is based on a fork-model. Don't push to aristanetworks/anta directly. Always do a branch in your forked repository and create a PR.
To help development, open your PR as soon as possible even in draft mode. It helps other to know on what you are working on and avoid duplicate PRs.
@@ -16,7 +16,7 @@ Run the following commands to create an ANTA development environment:
```bash
# Clone repository
-$ git clone https://github.com/arista-netdevops-community/anta.git
+$ git clone https://github.com/aristanetworks/anta.git
$ cd anta
# Install ANTA in editable mode and its development tools
@@ -28,7 +28,7 @@ $ pip install -e .[dev,cli]
$ pip list -e
Package Version Editable project location
------- ------- -------------------------
-anta 0.15.0 /mnt/lab/projects/anta
+anta 1.0.0 /mnt/lab/projects/anta
```
Then, [`tox`](https://tox.wiki/) is configured with few environments to run CI locally:
@@ -229,4 +229,4 @@ muffet -c 2 --color=always http://127.0.0.1:8000 -e fonts.gstatic.com -b 8192
## Continuous Integration
-GitHub actions is used to test git pushes and pull requests. The workflows are defined in this [directory](https://github.com/arista-netdevops-community/anta/tree/main/.github/workflows). We can view the results [here](https://github.com/arista-netdevops-community/anta/actions).
+GitHub actions is used to test git pushes and pull requests. The workflows are defined in this [directory](https://github.com/aristanetworks/anta/tree/main/.github/workflows). We can view the results [here](https://github.com/aristanetworks/anta/actions).
diff --git a/docs/faq.md b/docs/faq.md
index c71d1c6..a699c84 100644
--- a/docs/faq.md
+++ b/docs/faq.md
@@ -126,4 +126,4 @@ toc_depth: 2
# Still facing issues?
-If you've tried the above solutions and continue to experience problems, please follow the [troubleshooting](troubleshooting.md) instructions and report the issue in our [GitHub repository](https://github.com/arista-netdevops-community/anta).
+If you've tried the above solutions and continue to experience problems, please follow the [troubleshooting](troubleshooting.md) instructions and report the issue in our [GitHub repository](https://github.com/aristanetworks/anta).
diff --git a/docs/getting-started.md b/docs/getting-started.md
index bab1cea..39b270c 100644
--- a/docs/getting-started.md
+++ b/docs/getting-started.md
@@ -121,12 +121,6 @@ anta.tests.configuration:
## Test your network
-### Basic usage in a python script
-
-```python
---8<-- "anta_runner.py"
-```
-
### CLI
ANTA comes with a generic CLI entrypoint to run tests in your network. It requires an inventory file as well as a test catalog.
@@ -264,3 +258,9 @@ $ anta nrfu \
```
You can find more information under the __usage__ section of the website
+
+### Basic usage in a Python script
+
+```python
+--8<-- "anta_runner.py"
+```
diff --git a/docs/overrides/main.html b/docs/overrides/main.html
index 2863221..ad3693f 100644
--- a/docs/overrides/main.html
+++ b/docs/overrides/main.html
@@ -15,3 +15,7 @@
</script>
{{app}}
{% endblock %}
+
+{% block announce %}
+ANTA code has moved to a new house in aristanetworks organization and so has the documentation. <strong>Please update your bookmark to use <a href="https://anta.arista.com">anta.arista.com<a/></strong>
+{% endblock %}
diff --git a/docs/requirements-and-installation.md b/docs/requirements-and-installation.md
index 75560d3..ffcb9aa 100644
--- a/docs/requirements-and-installation.md
+++ b/docs/requirements-and-installation.md
@@ -19,7 +19,7 @@ Python 3.11.8
This installation will deploy tests collection, scripts and all their Python requirements.
-The ANTA package and the cli require some packages that are not part of the Python standard library. They are indicated in the [pyproject.toml](https://github.com/arista-netdevops-community/anta/blob/main/pyproject.toml) file, under dependencies.
+The ANTA package and the cli require some packages that are not part of the Python standard library. They are indicated in the [pyproject.toml](https://github.com/aristanetworks/anta/blob/main/pyproject.toml) file, under dependencies.
### Install library from Pypi server
@@ -59,18 +59,18 @@ pip install anta[cli]
```bash
-pip install git+https://github.com/arista-netdevops-community/anta.git
-pip install git+https://github.com/arista-netdevops-community/anta.git#egg=anta[cli]
+pip install git+https://github.com/aristanetworks/anta.git
+pip install git+https://github.com/aristanetworks/anta.git#egg=anta[cli]
# You can even specify the branch, tag or commit:
-pip install git+https://github.com/arista-netdevops-community/anta.git@<cool-feature-branch>
-pip install git+https://github.com/arista-netdevops-community/anta.git@<cool-feature-branch>#egg=anta[cli]
+pip install git+https://github.com/aristanetworks/anta.git@<cool-feature-branch>
+pip install git+https://github.com/aristanetworks/anta.git@<cool-feature-branch>#egg=anta[cli]
-pip install git+https://github.com/arista-netdevops-community/anta.git@<cool-tag>
-pip install git+https://github.com/arista-netdevops-community/anta.git@<cool-tag>#egg=anta[cli]
+pip install git+https://github.com/aristanetworks/anta.git@<cool-tag>
+pip install git+https://github.com/aristanetworks/anta.git@<cool-tag>#egg=anta[cli]
-pip install git+https://github.com/arista-netdevops-community/anta.git@<more-or-less-cool-hash>
-pip install git+https://github.com/arista-netdevops-community/anta.git@<more-or-less-cool-hash>#egg=anta[cli]
+pip install git+https://github.com/aristanetworks/anta.git@<more-or-less-cool-hash>
+pip install git+https://github.com/aristanetworks/anta.git@<more-or-less-cool-hash>#egg=anta[cli]
```
### Check installation
@@ -93,7 +93,7 @@ which anta
```bash
# Check ANTA version
anta --version
-anta, version v0.15.0
+anta, version v1.0.0
```
## EOS Requirements
diff --git a/docs/scripts/generate_svg.py b/docs/scripts/generate_svg.py
index 0048160..e6bf87a 100644
--- a/docs/scripts/generate_svg.py
+++ b/docs/scripts/generate_svg.py
@@ -56,7 +56,8 @@ if __name__ == "__main__":
# stolen from https://github.com/ewels/rich-click/blob/main/src/rich_click/cli.py
args = sys.argv[1:]
script_name = args[0]
- scripts = {script.name: script for script in entry_points().get("console_scripts")}
+ console_scripts = entry_points(group="console_scripts")
+ scripts = {script.name: script for script in console_scripts}
if script_name in scripts:
# A VALID SCRIPT WAS passed
diff --git a/docs/stylesheets/extra.material.css b/docs/stylesheets/extra.material.css
index 09d7c8d..1724da9 100644
--- a/docs/stylesheets/extra.material.css
+++ b/docs/stylesheets/extra.material.css
@@ -61,6 +61,12 @@
--md-code-border-color: #aec6db4f;
}
+.md-banner {
+ background-color: #f5c842;
+ color: #000000;
+
+}
+
@media only screen and (min-width: 76.25em) {
.md-main__inner, .md-header__inner {
max-width: 85%;
@@ -128,12 +134,8 @@
font-weight: bold;
}
- .md-typeset h4::before {
- content: ">> ";
- }
-
.md-typeset h4 {
- font-size: 1.1rem;
+ font-size: 0.9rem;
margin: 1em 0;
font-weight: 700;
letter-spacing: -.01em;
diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md
index f27de7a..40fc07f 100644
--- a/docs/troubleshooting.md
+++ b/docs/troubleshooting.md
@@ -11,9 +11,9 @@ A couple of things to check when hitting an issue with ANTA:
```mermaid
flowchart LR
A>Hitting an issue with ANTA] --> B{Is my issue <br >listed in the FAQ?}
- B -- Yes --> C{Does the FAQ solution<<br />works for me?}
+ B -- Yes --> C{Does the FAQ solution<br />works for me?}
C -- Yes --> V(((Victory)))
- B -->|No| E{Is my problem<br />mentioned in one<<br />of the open issues?}
+ B -->|No| E{Is my problem<br />mentioned in one<br />of the open issues?}
C -->|No| E
E -- Yes --> F{Has the issue been<br />fixed in a newer<br />release or in main?}
F -- Yes --> U[Upgrade]
@@ -24,8 +24,8 @@ flowchart LR
F -- No ----> G((Add a comment on the <br />issue indicating you<br >are hitting this and<br />describing your setup<br /> and adding your logs.))
click B "../faq" "FAQ"
- click E "https://github.com/arista-netdevops-community/anta/issues"
- click H "https://github.com/arista-netdevops-community/anta/issues"
+ click E "https://github.com/aristanetworks/anta/issues"
+ click H "https://github.com/aristanetworks/anta/issues"
style A stroke:#f00,stroke-width:2px
```
diff --git a/docs/usage-inventory-catalog.md b/docs/usage-inventory-catalog.md
index e698dca..f469933 100644
--- a/docs/usage-inventory-catalog.md
+++ b/docs/usage-inventory-catalog.md
@@ -129,7 +129,7 @@ anta.tests:
custom_field: "Test run by John Doe"
```
-[This test catalog example](https://github.com/arista-netdevops-community/anta/blob/main/examples/tests.yaml) is maintained with all the tests defined in the `anta.tests` Python module.
+[This test catalog example](https://github.com/aristanetworks/anta/blob/main/examples/tests.yaml) is maintained with all the tests defined in the `anta.tests` Python module.
### Test tags
@@ -205,10 +205,10 @@ anta.tests.configuration:
### Catalog with custom tests
In case you want to leverage your own tests collection, use your own Python package in the test catalog.
-So for instance, if my custom tests are defined in the `titom73.tests.system` Python module, the test catalog will be:
+So for instance, if my custom tests are defined in the `custom.tests.system` Python module, the test catalog will be:
```yaml
-titom73.tests.system:
+custom.tests.system:
- VerifyPlatform:
type: ['cEOS-LAB']
```
@@ -269,7 +269,7 @@ if __name__ == "__main__":
# Apply filters to all tests for this device
for test in c.tests:
test.inputs.filters = AntaTest.Input.Filters(tags=[device])
- catalog.merge(c)
+ catalog = catalog.merge(c)
with open(Path('anta-catalog.yml'), "w") as f:
f.write(catalog.dump().yaml())
```
diff --git a/examples/README.md b/examples/README.md
index 3a166e2..0800b79 100644
--- a/examples/README.md
+++ b/examples/README.md
@@ -1,12 +1,12 @@
## Device Inventory
-The file [inventory.yaml](inventory.yaml) is an example of [device inventory](https://www.anta.ninja/stable/usage-inventory-catalog/#create-an-inventory-file).
+The file [inventory.yaml](inventory.yaml) is an example of [device inventory](https://anta.arista.com/stable/usage-inventory-catalog/#create-an-inventory-file).
## Test Catalog
-The file [tests.yaml](tests.yaml) is an example of a [test catalog](https://www.anta.ninja/stable/usage-inventory-catalog/#test-catalog).
+The file [tests.yaml](tests.yaml) is an example of a [test catalog](https://anta.arista.com/stable/usage-inventory-catalog/#test-catalog).
This file should contain all the tests implemented in [anta.tests](../anta/tests) with arbitrary parameters.
## eos-commands.yaml file
-The file [eos-commands.yaml](eos-commands.yaml) is an example of input given with the `--commands-list` option to the [anta exec snapshot](https://www.anta.ninja/stable/cli/exec/#collect-a-set-of-commands) command.
+The file [eos-commands.yaml](eos-commands.yaml) is an example of input given with the `--commands-list` option to the [anta exec snapshot](https://anta.arista.com/stable/cli/exec/#collect-a-set-of-commands) command.
diff --git a/mkdocs.yml b/mkdocs.yml
index 834c8ab..db08e73 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -6,7 +6,7 @@ copyright: Copyright &copy; 2019 - 2024 Arista Networks
# Repository
repo_name: ANTA on Github
-repo_url: https://github.com/arista-netdevops-community/anta
+repo_url: https://github.com/aristanetworks/anta
# Configuration
use_directory_urls: true
@@ -57,9 +57,9 @@ theme:
extra:
social:
- icon: fontawesome/brands/github
- link: https://github.com/arista-netdevops-community/anta
+ link: https://github.com/aristanetworks/anta
- icon: fontawesome/brands/docker
- link: https://github.com/arista-netdevops-community/anta/pkgs/container/anta
+ link: https://github.com/aristanetworks/anta/pkgs/container/anta
- icon: fontawesome/brands/python
link: https://pypi.org/project/anta/
version:
@@ -112,6 +112,14 @@ plugins:
- git-revision-date-localized:
type: date
- mike:
+ - glightbox:
+ background: none
+ shadow: true
+ touchNavigation: true
+ loop: false
+ effect: fade
+ slide_effect: slide
+ width: 90vw
markdown_extensions:
- attr_list
diff --git a/pyproject.toml b/pyproject.toml
index 0c7a915..49f699d 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,14 +5,16 @@ build-backend = "setuptools.build_meta"
[project]
name = "anta"
-version = "v0.15.0"
+version = "v1.0.0"
readme = "docs/README.md"
-authors = [{ name = "Khelil Sator", email = "ksator@arista.com" }]
+authors = [{ name = "Arista Networks ANTA maintainers", email = "anta-dev@arista.com" }]
maintainers = [
+ { name = "Arista Networks ANTA maintainers", email = "anta-dev@arista.com" },
{ name = "Khelil Sator", email = "ksator@arista.com" },
{ name = "Matthieu Tâche", email = "mtache@arista.com" },
{ name = "Thomas Grimonet", email = "tgrimonet@arista.com" },
{ name = "Guillaume Mulocher", email = "gmulocher@arista.com" },
+ { name = "Carl Baillargeon", email = "carl.baillargeon@arista.com" },
]
description = "Arista Network Test Automation (ANTA) Framework"
license = { file = "LICENSE" }
@@ -33,7 +35,7 @@ keywords = ["test", "anta", "Arista", "network", "automation", "networking", "de
classifiers = [
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
- "Development Status :: 4 - Beta",
+ "Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"Intended Audience :: Information Technology",
@@ -57,7 +59,7 @@ cli = [
]
dev = [
"bumpver>=2023.1129",
- "codespell~=2.2.6",
+ "codespell>=2.2.6,<2.4.0",
"mypy-extensions~=1.0",
"mypy~=1.10",
"pre-commit>=3.3.3",
@@ -89,12 +91,13 @@ doc = [
"mkdocs-material>=8.3.9",
"mkdocs>=1.3.1",
"mkdocstrings[python]>=0.20.0",
+ "mkdocs-glightbox>=0.4.0"
]
[project.urls]
-Homepage = "https://www.anta.ninja"
-"Bug Tracker" = "https://github.com/arista-netdevops-community/anta/issues"
-Contributing = "https://www.anta.ninja/main/contribution/"
+Homepage = "https://anta.arista.com"
+"Bug Tracker" = "https://github.com/aristanetworks/anta/issues"
+Contributing = "https://anta.arista.com/main/contribution/"
[project.scripts]
anta = "anta.cli:cli"
@@ -110,7 +113,7 @@ namespaces = false
# Version
################################
[tool.bumpver]
-current_version = "0.15.0"
+current_version = "1.0.0"
version_pattern = "MAJOR.MINOR.PATCH"
commit_message = "bump: Version {old_version} -> {new_version}"
commit = true
@@ -396,6 +399,10 @@ runtime-evaluated-base-classes = ["pydantic.BaseModel", "anta.models.AntaTest.In
"C901", # TODO: test function is too complex, needs a refactor
"PLR0911", # TODO: Too many return statements, same as above needs a refactor
]
+"anta/tests/routing/isis.py" = [
+ "C901", # TODO: test function is too complex, needs a refactor
+ "PLR0912" # Too many branches (15/12) (too-many-branches), needs a refactor
+]
"anta/decorators.py" = [
"ANN401", # Ok to use Any type hint in our decorators
]
diff --git a/tests/conftest.py b/tests/conftest.py
index d6b1b8c..e315338 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -44,7 +44,10 @@ def pytest_generate_tests(metafunc: pytest.Metafunc) -> None:
It will parametrize test cases based on the `DATA` data structure defined in `tests.units.anta_tests` modules.
See `tests/units/anta_tests/README.md` for more information on how to use it.
Test IDs are generated using the `build_test_id` function above.
+
+ Checking that only the function "test" is parametrized with data to allow for writing tests for helper functions
+ in each module.
"""
- if "tests.units.anta_tests" in metafunc.module.__package__:
+ if "tests.units.anta_tests" in metafunc.module.__package__ and metafunc.function.__name__ == "test":
# This is a unit test for an AntaTest subclass
metafunc.parametrize("data", metafunc.module.DATA, ids=build_test_id)
diff --git a/tests/units/anta_tests/routing/test_isis.py b/tests/units/anta_tests/routing/test_isis.py
index ec41105..2167ea4 100644
--- a/tests/units/anta_tests/routing/test_isis.py
+++ b/tests/units/anta_tests/routing/test_isis.py
@@ -3,11 +3,23 @@
# that can be found in the LICENSE file.
"""Tests for anta.tests.routing.ospf.py."""
+# pylint: disable=too-many-lines
+
from __future__ import annotations
from typing import Any
-from anta.tests.routing.isis import VerifyISISInterfaceMode, VerifyISISNeighborCount, VerifyISISNeighborState
+import pytest
+
+from anta.tests.routing.isis import (
+ VerifyISISInterfaceMode,
+ VerifyISISNeighborCount,
+ VerifyISISNeighborState,
+ VerifyISISSegmentRoutingAdjacencySegments,
+ VerifyISISSegmentRoutingDataplane,
+ VerifyISISSegmentRoutingTunnels,
+ _get_interface_data,
+)
from tests.lib.anta import test # noqa: F401; pylint: disable=W0611
DATA: list[dict[str, Any]] = [
@@ -155,6 +167,18 @@ DATA: list[dict[str, Any]] = [
},
},
{
+ "name": "skipped - no neighbor",
+ "test": VerifyISISNeighborState,
+ "eos_data": [
+ {"vrfs": {"default": {"isisInstances": {"CORE-ISIS": {"neighbors": {}}}}}},
+ ],
+ "inputs": None,
+ "expected": {
+ "result": "skipped",
+ "messages": ["No IS-IS neighbor detected"],
+ },
+ },
+ {
"name": "success only default vrf",
"test": VerifyISISNeighborCount,
"eos_data": [
@@ -227,6 +251,108 @@ DATA: list[dict[str, Any]] = [
"expected": {"result": "success"},
},
{
+ "name": "skipped - no neighbor",
+ "test": VerifyISISNeighborCount,
+ "eos_data": [
+ {"vrfs": {"default": {"isisInstances": {"CORE-ISIS": {"interfaces": {}}}}}},
+ ],
+ "inputs": {
+ "interfaces": [
+ {"name": "Ethernet1", "level": 2, "count": 1},
+ ]
+ },
+ "expected": {
+ "result": "skipped",
+ "messages": ["No IS-IS neighbor detected"],
+ },
+ },
+ {
+ "name": "failure - missing interface",
+ "test": VerifyISISNeighborCount,
+ "eos_data": [
+ {
+ "vrfs": {
+ "default": {
+ "isisInstances": {
+ "CORE-ISIS": {
+ "interfaces": {
+ "Ethernet1": {
+ "intfLevels": {
+ "2": {
+ "ipv4Metric": 10,
+ "numAdjacencies": 0,
+ "linkId": "84",
+ "sharedSecretProfile": "",
+ "isisAdjacencies": [],
+ "passive": False,
+ "v4Protection": "link",
+ "v6Protection": "disabled",
+ }
+ },
+ "interfaceSpeed": 1000,
+ "areaProxyBoundary": False,
+ },
+ }
+ }
+ }
+ }
+ }
+ },
+ ],
+ "inputs": {
+ "interfaces": [
+ {"name": "Ethernet2", "level": 2, "count": 1},
+ ]
+ },
+ "expected": {
+ "result": "failure",
+ "messages": ["No neighbor detected for interface Ethernet2"],
+ },
+ },
+ {
+ "name": "failure - wrong count",
+ "test": VerifyISISNeighborCount,
+ "eos_data": [
+ {
+ "vrfs": {
+ "default": {
+ "isisInstances": {
+ "CORE-ISIS": {
+ "interfaces": {
+ "Ethernet1": {
+ "intfLevels": {
+ "2": {
+ "ipv4Metric": 10,
+ "numAdjacencies": 3,
+ "linkId": "84",
+ "sharedSecretProfile": "",
+ "isisAdjacencies": [],
+ "passive": False,
+ "v4Protection": "link",
+ "v6Protection": "disabled",
+ }
+ },
+ "interfaceSpeed": 1000,
+ "areaProxyBoundary": False,
+ },
+ }
+ }
+ }
+ }
+ }
+ },
+ ],
+ "inputs": {
+ "interfaces": [
+ {"name": "Ethernet1", "level": 2, "count": 1},
+ ]
+ },
+ "expected": {
+ "result": "failure",
+ "messages": ["Interface Ethernet1: expected Level 2: count 1, got Level 2: count 3"],
+ },
+ },
+ {
"name": "success VerifyISISInterfaceMode only default vrf",
"test": VerifyISISInterfaceMode,
"eos_data": [
@@ -567,4 +693,1226 @@ DATA: list[dict[str, Any]] = [
],
},
},
+ {
+ "name": "skipped VerifyISISInterfaceMode no vrf",
+ "test": VerifyISISInterfaceMode,
+ "eos_data": [{"vrfs": {}}],
+ "inputs": {
+ "interfaces": [
+ {"name": "Loopback0", "mode": "passive"},
+ {"name": "Ethernet2", "mode": "passive"},
+ {"name": "Ethernet1", "mode": "point-to-point", "vrf": "default"},
+ ]
+ },
+ "expected": {"result": "skipped", "messages": ["IS-IS is not configured on device"]},
+ },
+ {
+ "name": "Skipped of VerifyISISSegmentRoutingAdjacencySegments no VRF.",
+ "test": VerifyISISSegmentRoutingAdjacencySegments,
+ "eos_data": [{"vrfs": {}}],
+ "inputs": {
+ "instances": [
+ {
+ "name": "CORE-ISIS",
+ "vrf": "default",
+ "segments": [
+ {
+ "interface": "Ethernet2",
+ "address": "10.0.1.3",
+ "sid_origin": "dynamic",
+ }
+ ],
+ }
+ ]
+ },
+ "expected": {"result": "skipped", "messages": ["IS-IS is not configured on device"]},
+ },
+ {
+ "test": VerifyISISSegmentRoutingAdjacencySegments,
+ "name": "Success of VerifyISISSegmentRoutingAdjacencySegments in default VRF.",
+ "eos_data": [
+ {
+ "vrfs": {
+ "default": {
+ "isisInstances": {
+ "CORE-ISIS": {
+ "dataPlane": "MPLS",
+ "routerId": "1.0.0.11",
+ "systemId": "0168.0000.0011",
+ "hostname": "s1-pe01",
+ "adjSidAllocationMode": "SrOnly",
+ "adjSidPoolBase": 116384,
+ "adjSidPoolSize": 16384,
+ "adjacencySegments": [
+ {
+ "ipAddress": "10.0.1.3",
+ "localIntf": "Ethernet2",
+ "sid": 116384,
+ "lan": False,
+ "sidOrigin": "dynamic",
+ "protection": "unprotected",
+ "flags": {
+ "b": False,
+ "v": True,
+ "l": True,
+ "f": False,
+ "s": False,
+ },
+ "level": 2,
+ },
+ {
+ "ipAddress": "10.0.1.1",
+ "localIntf": "Ethernet1",
+ "sid": 116385,
+ "lan": False,
+ "sidOrigin": "dynamic",
+ "protection": "unprotected",
+ "flags": {
+ "b": False,
+ "v": True,
+ "l": True,
+ "f": False,
+ "s": False,
+ },
+ "level": 2,
+ },
+ ],
+ "receivedGlobalAdjacencySegments": [],
+ "misconfiguredAdjacencySegments": [],
+ }
+ }
+ }
+ }
+ }
+ ],
+ "inputs": {
+ "instances": [
+ {
+ "name": "CORE-ISIS",
+ "vrf": "default",
+ "segments": [
+ {
+ "interface": "Ethernet2",
+ "address": "10.0.1.3",
+ "sid_origin": "dynamic",
+ }
+ ],
+ }
+ ]
+ },
+ "expected": {
+ "result": "success",
+ "messages": [],
+ },
+ },
+ {
+ "test": VerifyISISSegmentRoutingAdjacencySegments,
+ "name": "Failure of VerifyISISSegmentRoutingAdjacencySegments in default VRF for incorrect segment definition.",
+ "eos_data": [
+ {
+ "vrfs": {
+ "default": {
+ "isisInstances": {
+ "CORE-ISIS": {
+ "dataPlane": "MPLS",
+ "routerId": "1.0.0.11",
+ "systemId": "0168.0000.0011",
+ "hostname": "s1-pe01",
+ "adjSidAllocationMode": "SrOnly",
+ "adjSidPoolBase": 116384,
+ "adjSidPoolSize": 16384,
+ "adjacencySegments": [
+ {
+ "ipAddress": "10.0.1.3",
+ "localIntf": "Ethernet2",
+ "sid": 116384,
+ "lan": False,
+ "sidOrigin": "dynamic",
+ "protection": "unprotected",
+ "flags": {
+ "b": False,
+ "v": True,
+ "l": True,
+ "f": False,
+ "s": False,
+ },
+ "level": 2,
+ },
+ {
+ "ipAddress": "10.0.1.1",
+ "localIntf": "Ethernet1",
+ "sid": 116385,
+ "lan": False,
+ "sidOrigin": "dynamic",
+ "protection": "unprotected",
+ "flags": {
+ "b": False,
+ "v": True,
+ "l": True,
+ "f": False,
+ "s": False,
+ },
+ "level": 2,
+ },
+ ],
+ "receivedGlobalAdjacencySegments": [],
+ "misconfiguredAdjacencySegments": [],
+ }
+ }
+ }
+ }
+ }
+ ],
+ "inputs": {
+ "instances": [
+ {
+ "name": "CORE-ISIS",
+ "vrf": "default",
+ "segments": [
+ {
+ "interface": "Ethernet2",
+ "address": "10.0.1.3",
+ "sid_origin": "dynamic",
+ },
+ {
+ "interface": "Ethernet3",
+ "address": "10.0.1.2",
+ "sid_origin": "dynamic",
+ },
+ ],
+ }
+ ]
+ },
+ "expected": {
+ "result": "failure",
+ "messages": ["Your segment has not been found: interface='Ethernet3' level=2 sid_origin='dynamic' address=IPv4Address('10.0.1.2')."],
+ },
+ },
+ {
+ "test": VerifyISISSegmentRoutingAdjacencySegments,
+ "name": "Failure of VerifyISISSegmentRoutingAdjacencySegments with incorrect VRF.",
+ "eos_data": [
+ {
+ "vrfs": {
+ "default": {
+ "isisInstances": {
+ "CORE-ISIS": {
+ "dataPlane": "MPLS",
+ "routerId": "1.0.0.11",
+ "systemId": "0168.0000.0011",
+ "hostname": "s1-pe01",
+ "adjSidAllocationMode": "SrOnly",
+ "adjSidPoolBase": 116384,
+ "adjSidPoolSize": 16384,
+ "adjacencySegments": [
+ {
+ "ipAddress": "10.0.1.3",
+ "localIntf": "Ethernet2",
+ "sid": 116384,
+ "lan": False,
+ "sidOrigin": "dynamic",
+ "protection": "unprotected",
+ "flags": {
+ "b": False,
+ "v": True,
+ "l": True,
+ "f": False,
+ "s": False,
+ },
+ "level": 2,
+ },
+ {
+ "ipAddress": "10.0.1.1",
+ "localIntf": "Ethernet1",
+ "sid": 116385,
+ "lan": False,
+ "sidOrigin": "dynamic",
+ "protection": "unprotected",
+ "flags": {
+ "b": False,
+ "v": True,
+ "l": True,
+ "f": False,
+ "s": False,
+ },
+ "level": 2,
+ },
+ ],
+ "receivedGlobalAdjacencySegments": [],
+ "misconfiguredAdjacencySegments": [],
+ }
+ }
+ }
+ }
+ }
+ ],
+ "inputs": {
+ "instances": [
+ {
+ "name": "CORE-ISIS",
+ "vrf": "custom",
+ "segments": [
+ {
+ "interface": "Ethernet2",
+ "address": "10.0.1.3",
+ "sid_origin": "dynamic",
+ },
+ {
+ "interface": "Ethernet3",
+ "address": "10.0.1.2",
+ "sid_origin": "dynamic",
+ },
+ ],
+ }
+ ]
+ },
+ "expected": {
+ "result": "failure",
+ "messages": ["VRF custom is not configured to run segment routging."],
+ },
+ },
+ {
+ "test": VerifyISISSegmentRoutingAdjacencySegments,
+ "name": "Failure of VerifyISISSegmentRoutingAdjacencySegments with incorrect Instance.",
+ "eos_data": [
+ {
+ "vrfs": {
+ "default": {
+ "isisInstances": {
+ "CORE-ISIS": {
+ "dataPlane": "MPLS",
+ "routerId": "1.0.0.11",
+ "systemId": "0168.0000.0011",
+ "hostname": "s1-pe01",
+ "adjSidAllocationMode": "SrOnly",
+ "adjSidPoolBase": 116384,
+ "adjSidPoolSize": 16384,
+ "adjacencySegments": [
+ {
+ "ipAddress": "10.0.1.3",
+ "localIntf": "Ethernet2",
+ "sid": 116384,
+ "lan": False,
+ "sidOrigin": "dynamic",
+ "protection": "unprotected",
+ "flags": {
+ "b": False,
+ "v": True,
+ "l": True,
+ "f": False,
+ "s": False,
+ },
+ "level": 2,
+ },
+ {
+ "ipAddress": "10.0.1.1",
+ "localIntf": "Ethernet1",
+ "sid": 116385,
+ "lan": False,
+ "sidOrigin": "dynamic",
+ "protection": "unprotected",
+ "flags": {
+ "b": False,
+ "v": True,
+ "l": True,
+ "f": False,
+ "s": False,
+ },
+ "level": 2,
+ },
+ ],
+ "receivedGlobalAdjacencySegments": [],
+ "misconfiguredAdjacencySegments": [],
+ }
+ }
+ }
+ }
+ }
+ ],
+ "inputs": {
+ "instances": [
+ {
+ "name": "CORE-ISIS2",
+ "vrf": "default",
+ "segments": [
+ {
+ "interface": "Ethernet2",
+ "address": "10.0.1.3",
+ "sid_origin": "dynamic",
+ },
+ {
+ "interface": "Ethernet3",
+ "address": "10.0.1.2",
+ "sid_origin": "dynamic",
+ },
+ ],
+ }
+ ]
+ },
+ "expected": {
+ "result": "failure",
+ "messages": ["Instance CORE-ISIS2 is not found in vrf default."],
+ },
+ },
+ {
+ "test": VerifyISISSegmentRoutingAdjacencySegments,
+ "name": "Failure of VerifyISISSegmentRoutingAdjacencySegments with incorrect segment info.",
+ "eos_data": [
+ {
+ "vrfs": {
+ "default": {
+ "isisInstances": {
+ "CORE-ISIS": {
+ "dataPlane": "MPLS",
+ "routerId": "1.0.0.11",
+ "systemId": "0168.0000.0011",
+ "hostname": "s1-pe01",
+ "adjSidAllocationMode": "SrOnly",
+ "adjSidPoolBase": 116384,
+ "adjSidPoolSize": 16384,
+ "adjacencySegments": [
+ {
+ "ipAddress": "10.0.1.3",
+ "localIntf": "Ethernet2",
+ "sid": 116384,
+ "lan": False,
+ "sidOrigin": "dynamic",
+ "protection": "unprotected",
+ "flags": {
+ "b": False,
+ "v": True,
+ "l": True,
+ "f": False,
+ "s": False,
+ },
+ "level": 2,
+ },
+ ],
+ "receivedGlobalAdjacencySegments": [],
+ "misconfiguredAdjacencySegments": [],
+ }
+ }
+ }
+ }
+ }
+ ],
+ "inputs": {
+ "instances": [
+ {
+ "name": "CORE-ISIS",
+ "vrf": "default",
+ "segments": [
+ {
+ "interface": "Ethernet2",
+ "address": "10.0.1.3",
+ "sid_origin": "dynamic",
+ "level": 1, # Wrong level
+ },
+ ],
+ }
+ ]
+ },
+ "expected": {
+ "result": "failure",
+ "messages": [
+ (
+ "Your segment is not correct: Expected: interface='Ethernet2' level=1 sid_origin='dynamic' address=IPv4Address('10.0.1.3') - "
+ "Found: {'ipAddress': '10.0.1.3', 'localIntf': 'Ethernet2', 'sid': 116384, 'lan': False, 'sidOrigin': 'dynamic', 'protection': "
+ "'unprotected', 'flags': {'b': False, 'v': True, 'l': True, 'f': False, 's': False}, 'level': 2}."
+ )
+ ],
+ },
+ },
+ {
+ "test": VerifyISISSegmentRoutingDataplane,
+ "name": "Check VerifyISISSegmentRoutingDataplane is running successfully",
+ "eos_data": [
+ {
+ "vrfs": {
+ "default": {
+ "isisInstances": {
+ "CORE-ISIS": {
+ "dataPlane": "MPLS",
+ "routerId": "1.0.0.11",
+ "systemId": "0168.0000.0011",
+ "hostname": "s1-pe01",
+ }
+ }
+ }
+ }
+ }
+ ],
+ "inputs": {
+ "instances": [
+ {
+ "name": "CORE-ISIS",
+ "vrf": "default",
+ "dataplane": "MPLS",
+ },
+ ]
+ },
+ "expected": {
+ "result": "success",
+ "messages": [],
+ },
+ },
+ {
+ "test": VerifyISISSegmentRoutingDataplane,
+ "name": "Check VerifyISISSegmentRoutingDataplane is failing with incorrect dataplane",
+ "eos_data": [
+ {
+ "vrfs": {
+ "default": {
+ "isisInstances": {
+ "CORE-ISIS": {
+ "dataPlane": "MPLS",
+ "routerId": "1.0.0.11",
+ "systemId": "0168.0000.0011",
+ "hostname": "s1-pe01",
+ }
+ }
+ }
+ }
+ }
+ ],
+ "inputs": {
+ "instances": [
+ {
+ "name": "CORE-ISIS",
+ "vrf": "default",
+ "dataplane": "unset",
+ },
+ ]
+ },
+ "expected": {
+ "result": "failure",
+ "messages": ["ISIS instance CORE-ISIS is not running dataplane unset (MPLS)"],
+ },
+ },
+ {
+ "test": VerifyISISSegmentRoutingDataplane,
+ "name": "Check VerifyISISSegmentRoutingDataplane is failing for unknown instance",
+ "eos_data": [
+ {
+ "vrfs": {
+ "default": {
+ "isisInstances": {
+ "CORE-ISIS": {
+ "dataPlane": "MPLS",
+ "routerId": "1.0.0.11",
+ "systemId": "0168.0000.0011",
+ "hostname": "s1-pe01",
+ }
+ }
+ }
+ }
+ }
+ ],
+ "inputs": {
+ "instances": [
+ {
+ "name": "CORE-ISIS2",
+ "vrf": "default",
+ "dataplane": "unset",
+ },
+ ]
+ },
+ "expected": {
+ "result": "failure",
+ "messages": ["Instance CORE-ISIS2 is not found in vrf default."],
+ },
+ },
+ {
+ "test": VerifyISISSegmentRoutingDataplane,
+ "name": "Check VerifyISISSegmentRoutingDataplane is failing for unknown VRF",
+ "eos_data": [
+ {
+ "vrfs": {
+ "default": {
+ "isisInstances": {
+ "CORE-ISIS": {
+ "dataPlane": "MPLS",
+ "routerId": "1.0.0.11",
+ "systemId": "0168.0000.0011",
+ "hostname": "s1-pe01",
+ }
+ }
+ }
+ }
+ }
+ ],
+ "inputs": {
+ "instances": [
+ {
+ "name": "CORE-ISIS",
+ "vrf": "wrong_vrf",
+ "dataplane": "unset",
+ },
+ ]
+ },
+ "expected": {
+ "result": "failure",
+ "messages": ["VRF wrong_vrf is not configured to run segment routing."],
+ },
+ },
+ {
+ "test": VerifyISISSegmentRoutingDataplane,
+ "name": "Check VerifyISISSegmentRoutingDataplane is skipped",
+ "eos_data": [{"vrfs": {}}],
+ "inputs": {
+ "instances": [
+ {
+ "name": "CORE-ISIS",
+ "vrf": "wrong_vrf",
+ "dataplane": "unset",
+ },
+ ]
+ },
+ "expected": {
+ "result": "skipped",
+ "messages": ["IS-IS-SR is not running on device"],
+ },
+ },
+ {
+ "test": VerifyISISSegmentRoutingTunnels,
+ "name": "runs successfully",
+ "eos_data": [
+ {
+ "entries": {
+ "3": {
+ "endpoint": "1.0.0.122/32",
+ "vias": [
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.1",
+ "interface": "Ethernet1",
+ "labels": ["900021"],
+ },
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.3",
+ "interface": "Ethernet2",
+ "labels": ["900021"],
+ },
+ ],
+ },
+ "31": {
+ "endpoint": "1.0.0.13/32",
+ "vias": [
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.1",
+ "interface": "Ethernet1",
+ "labels": ["900021"],
+ },
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.3",
+ "interface": "Ethernet2",
+ "labels": ["900021"],
+ },
+ ],
+ },
+ "32": {
+ "endpoint": "1.0.0.122/32",
+ "vias": [
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.1",
+ "interface": "Ethernet1",
+ "labels": ["900021"],
+ },
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.3",
+ "interface": "Ethernet2",
+ "labels": ["900021"],
+ },
+ ],
+ },
+ "2": {
+ "endpoint": "1.0.0.111/32",
+ "vias": [
+ {
+ "type": "tunnel",
+ "tunnelId": {"type": "TI-LFA", "index": 4},
+ "labels": ["3"],
+ }
+ ],
+ },
+ }
+ }
+ ],
+ "inputs": {
+ "entries": [
+ {"endpoint": "1.0.0.122/32"},
+ {"endpoint": "1.0.0.13/32", "vias": [{"type": "ip"}]},
+ {
+ "endpoint": "1.0.0.111/32",
+ "vias": [{"type": "tunnel", "tunnel_id": "ti-lfa"}],
+ },
+ {
+ "endpoint": "1.0.0.122/32",
+ "vias": [
+ {"interface": "Ethernet1", "nexthop": "10.0.1.1"}, # Testing empty type
+ {"type": "ip", "interface": "Ethernet2", "nexthop": "10.0.1.3"},
+ ],
+ },
+ ]
+ },
+ "expected": {
+ "result": "success",
+ "messages": [],
+ },
+ },
+ {
+ "test": VerifyISISSegmentRoutingTunnels,
+ "name": "is skipped if not entry founf in EOS",
+ "eos_data": [{"entries": {}}],
+ "inputs": {
+ "entries": [
+ {"endpoint": "1.0.0.122/32"},
+ ]
+ },
+ "expected": {
+ "result": "skipped",
+ "messages": ["IS-IS-SR is not running on device."],
+ },
+ },
+ {
+ "test": VerifyISISSegmentRoutingTunnels,
+ "name": "runs successfully",
+ "eos_data": [
+ {
+ "entries": {
+ "2": {
+ "endpoint": "1.0.0.111/32",
+ "vias": [
+ {
+ "type": "tunnel",
+ "tunnelId": {"type": "TI-LFA", "index": 4},
+ "labels": ["3"],
+ }
+ ],
+ },
+ }
+ }
+ ],
+ "inputs": {
+ "entries": [
+ {"endpoint": "1.0.0.122/32"},
+ ]
+ },
+ "expected": {
+ "result": "failure",
+ "messages": ["Tunnel to endpoint=IPv4Network('1.0.0.122/32') vias=None is not found."],
+ },
+ },
+ {
+ "test": VerifyISISSegmentRoutingTunnels,
+ "name": "fails with incorrect tunnel type",
+ "eos_data": [
+ {
+ "entries": {
+ "3": {
+ "endpoint": "1.0.0.122/32",
+ "vias": [
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.1",
+ "interface": "Ethernet1",
+ "labels": ["900021"],
+ },
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.3",
+ "interface": "Ethernet2",
+ "labels": ["900021"],
+ },
+ ],
+ },
+ "31": {
+ "endpoint": "1.0.0.13/32",
+ "vias": [
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.1",
+ "interface": "Ethernet1",
+ "labels": ["900021"],
+ },
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.3",
+ "interface": "Ethernet2",
+ "labels": ["900021"],
+ },
+ ],
+ },
+ "32": {
+ "endpoint": "1.0.0.122/32",
+ "vias": [
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.1",
+ "interface": "Ethernet1",
+ "labels": ["900021"],
+ },
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.3",
+ "interface": "Ethernet2",
+ "labels": ["900021"],
+ },
+ ],
+ },
+ "2": {
+ "endpoint": "1.0.0.111/32",
+ "vias": [
+ {
+ "type": "tunnel",
+ "tunnelId": {"type": "TI-LFA", "index": 4},
+ "labels": ["3"],
+ }
+ ],
+ },
+ }
+ }
+ ],
+ "inputs": {
+ "entries": [
+ {"endpoint": "1.0.0.122/32"},
+ {"endpoint": "1.0.0.13/32", "vias": [{"type": "tunnel"}]},
+ ]
+ },
+ "expected": {
+ "result": "failure",
+ "messages": ["Tunnel to 1.0.0.13/32 is incorrect: incorrect tunnel type"],
+ },
+ },
+ {
+ "test": VerifyISISSegmentRoutingTunnels,
+ "name": "fails with incorrect nexthop",
+ "eos_data": [
+ {
+ "entries": {
+ "3": {
+ "endpoint": "1.0.0.122/32",
+ "vias": [
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.1",
+ "interface": "Ethernet1",
+ "labels": ["900021"],
+ },
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.3",
+ "interface": "Ethernet2",
+ "labels": ["900021"],
+ },
+ ],
+ },
+ "31": {
+ "endpoint": "1.0.0.13/32",
+ "vias": [
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.1",
+ "interface": "Ethernet1",
+ "labels": ["900021"],
+ },
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.3",
+ "interface": "Ethernet2",
+ "labels": ["900021"],
+ },
+ ],
+ },
+ "32": {
+ "endpoint": "1.0.0.122/32",
+ "vias": [
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.1",
+ "interface": "Ethernet1",
+ "labels": ["900021"],
+ },
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.3",
+ "interface": "Ethernet2",
+ "labels": ["900021"],
+ },
+ ],
+ },
+ "2": {
+ "endpoint": "1.0.0.111/32",
+ "vias": [
+ {
+ "type": "tunnel",
+ "tunnelId": {"type": "TI-LFA", "index": 4},
+ "labels": ["3"],
+ }
+ ],
+ },
+ }
+ }
+ ],
+ "inputs": {
+ "entries": [
+ {"endpoint": "1.0.0.122/32"},
+ {"endpoint": "1.0.0.13/32", "vias": [{"type": "ip"}]},
+ {
+ "endpoint": "1.0.0.122/32",
+ "vias": [
+ {"type": "ip", "interface": "Ethernet1", "nexthop": "10.0.1.2"},
+ {"type": "ip", "interface": "Ethernet2", "nexthop": "10.0.1.3"},
+ ],
+ },
+ ]
+ },
+ "expected": {
+ "result": "failure",
+ "messages": ["Tunnel to 1.0.0.122/32 is incorrect: incorrect nexthop"],
+ },
+ },
+ {
+ "test": VerifyISISSegmentRoutingTunnels,
+ "name": "fails with incorrect nexthop",
+ "eos_data": [
+ {
+ "entries": {
+ "3": {
+ "endpoint": "1.0.0.122/32",
+ "vias": [
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.1",
+ "interface": "Ethernet1",
+ "labels": ["900021"],
+ },
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.3",
+ "interface": "Ethernet2",
+ "labels": ["900021"],
+ },
+ ],
+ },
+ "31": {
+ "endpoint": "1.0.0.13/32",
+ "vias": [
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.1",
+ "interface": "Ethernet1",
+ "labels": ["900021"],
+ },
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.3",
+ "interface": "Ethernet2",
+ "labels": ["900021"],
+ },
+ ],
+ },
+ "32": {
+ "endpoint": "1.0.0.122/32",
+ "vias": [
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.1",
+ "interface": "Ethernet1",
+ "labels": ["900021"],
+ },
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.3",
+ "interface": "Ethernet2",
+ "labels": ["900021"],
+ },
+ ],
+ },
+ "2": {
+ "endpoint": "1.0.0.111/32",
+ "vias": [
+ {
+ "type": "tunnel",
+ "tunnelId": {"type": "TI-LFA", "index": 4},
+ "labels": ["3"],
+ }
+ ],
+ },
+ }
+ }
+ ],
+ "inputs": {
+ "entries": [
+ {"endpoint": "1.0.0.122/32"},
+ {"endpoint": "1.0.0.13/32", "vias": [{"type": "ip"}]},
+ {
+ "endpoint": "1.0.0.122/32",
+ "vias": [
+ {"type": "ip", "interface": "Ethernet4", "nexthop": "10.0.1.1"},
+ {"type": "ip", "interface": "Ethernet2", "nexthop": "10.0.1.3"},
+ ],
+ },
+ ]
+ },
+ "expected": {
+ "result": "failure",
+ "messages": ["Tunnel to 1.0.0.122/32 is incorrect: incorrect interface"],
+ },
+ },
+ {
+ "test": VerifyISISSegmentRoutingTunnels,
+ "name": "fails with incorrect interface",
+ "eos_data": [
+ {
+ "entries": {
+ "3": {
+ "endpoint": "1.0.0.122/32",
+ "vias": [
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.1",
+ "interface": "Ethernet1",
+ "labels": ["900021"],
+ },
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.3",
+ "interface": "Ethernet2",
+ "labels": ["900021"],
+ },
+ ],
+ },
+ "31": {
+ "endpoint": "1.0.0.13/32",
+ "vias": [
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.1",
+ "interface": "Ethernet1",
+ "labels": ["900021"],
+ },
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.3",
+ "interface": "Ethernet2",
+ "labels": ["900021"],
+ },
+ ],
+ },
+ "32": {
+ "endpoint": "1.0.0.122/32",
+ "vias": [
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.1",
+ "interface": "Ethernet1",
+ "labels": ["900021"],
+ },
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.3",
+ "interface": "Ethernet2",
+ "labels": ["900021"],
+ },
+ ],
+ },
+ "2": {
+ "endpoint": "1.0.0.111/32",
+ "vias": [
+ {
+ "type": "tunnel",
+ "tunnelId": {"type": "TI-LFA", "index": 4},
+ "labels": ["3"],
+ }
+ ],
+ },
+ }
+ }
+ ],
+ "inputs": {
+ "entries": [
+ {"endpoint": "1.0.0.122/32"},
+ {"endpoint": "1.0.0.13/32", "vias": [{"type": "ip"}]},
+ {
+ "endpoint": "1.0.0.122/32",
+ "vias": [
+ {"type": "ip", "interface": "Ethernet1", "nexthop": "10.0.1.2"},
+ {"type": "ip", "interface": "Ethernet2", "nexthop": "10.0.1.3"},
+ ],
+ },
+ ]
+ },
+ "expected": {
+ "result": "failure",
+ "messages": ["Tunnel to 1.0.0.122/32 is incorrect: incorrect nexthop"],
+ },
+ },
+ {
+ "test": VerifyISISSegmentRoutingTunnels,
+ "name": "fails with incorrect tunnel ID type",
+ "eos_data": [
+ {
+ "entries": {
+ "3": {
+ "endpoint": "1.0.0.122/32",
+ "vias": [
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.1",
+ "interface": "Ethernet1",
+ "labels": ["900021"],
+ },
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.3",
+ "interface": "Ethernet2",
+ "labels": ["900021"],
+ },
+ ],
+ },
+ "31": {
+ "endpoint": "1.0.0.13/32",
+ "vias": [
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.1",
+ "interface": "Ethernet1",
+ "labels": ["900021"],
+ },
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.3",
+ "interface": "Ethernet2",
+ "labels": ["900021"],
+ },
+ ],
+ },
+ "32": {
+ "endpoint": "1.0.0.122/32",
+ "vias": [
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.1",
+ "interface": "Ethernet1",
+ "labels": ["900021"],
+ },
+ {
+ "type": "ip",
+ "nexthop": "10.0.1.3",
+ "interface": "Ethernet2",
+ "labels": ["900021"],
+ },
+ ],
+ },
+ "2": {
+ "endpoint": "1.0.0.111/32",
+ "vias": [
+ {
+ "type": "tunnel",
+ "tunnelId": {"type": "TI-LFA", "index": 4},
+ "labels": ["3"],
+ }
+ ],
+ },
+ }
+ }
+ ],
+ "inputs": {
+ "entries": [
+ {"endpoint": "1.0.0.122/32"},
+ {"endpoint": "1.0.0.13/32", "vias": [{"type": "ip"}]},
+ {
+ "endpoint": "1.0.0.111/32",
+ "vias": [
+ {"type": "tunnel", "tunnel_id": "unset"},
+ ],
+ },
+ ]
+ },
+ "expected": {
+ "result": "failure",
+ "messages": ["Tunnel to 1.0.0.111/32 is incorrect: incorrect tunnel ID"],
+ },
+ },
]
+
+
+COMMAND_OUTPUT = {
+ "vrfs": {
+ "default": {
+ "isisInstances": {
+ "CORE-ISIS": {
+ "interfaces": {
+ "Loopback0": {
+ "enabled": True,
+ "intfLevels": {
+ "2": {
+ "ipv4Metric": 10,
+ "sharedSecretProfile": "",
+ "isisAdjacencies": [],
+ "passive": True,
+ "v4Protection": "disabled",
+ "v6Protection": "disabled",
+ }
+ },
+ "areaProxyBoundary": False,
+ },
+ "Ethernet1": {
+ "intfLevels": {
+ "2": {
+ "ipv4Metric": 10,
+ "numAdjacencies": 1,
+ "linkId": "84",
+ "sharedSecretProfile": "",
+ "isisAdjacencies": [],
+ "passive": False,
+ "v4Protection": "link",
+ "v6Protection": "disabled",
+ }
+ },
+ "interfaceSpeed": 1000,
+ "areaProxyBoundary": False,
+ },
+ }
+ }
+ }
+ },
+ "EMPTY": {"isisInstances": {}},
+ "NO_INTERFACES": {"isisInstances": {"CORE-ISIS": {}}},
+ }
+}
+EXPECTED_LOOPBACK_0_OUTPUT = {
+ "enabled": True,
+ "intfLevels": {
+ "2": {
+ "ipv4Metric": 10,
+ "sharedSecretProfile": "",
+ "isisAdjacencies": [],
+ "passive": True,
+ "v4Protection": "disabled",
+ "v6Protection": "disabled",
+ }
+ },
+ "areaProxyBoundary": False,
+}
+
+
+@pytest.mark.parametrize(
+ ("interface", "vrf", "expected_value"),
+ [
+ pytest.param("Loopback0", "WRONG_VRF", None, id="VRF_not_found"),
+ pytest.param("Loopback0", "EMPTY", None, id="VRF_no_ISIS_instances"),
+ pytest.param("Loopback0", "NO_INTERFACES", None, id="ISIS_instance_no_interfaces"),
+ pytest.param("Loopback42", "default", None, id="interface_not_found"),
+ pytest.param("Loopback0", "default", EXPECTED_LOOPBACK_0_OUTPUT, id="interface_found"),
+ ],
+)
+def test__get_interface_data(interface: str, vrf: str, expected_value: dict[str, Any] | None) -> None:
+ """Test anta.tests.routing.isis._get_interface_data."""
+ assert _get_interface_data(interface, vrf, COMMAND_OUTPUT) == expected_value
diff --git a/tests/units/cli/get/test_commands.py b/tests/units/cli/get/test_commands.py
index e0b17a0..1e8c6e9 100644
--- a/tests/units/cli/get/test_commands.py
+++ b/tests/units/cli/get/test_commands.py
@@ -11,6 +11,7 @@ from typing import TYPE_CHECKING
from unittest.mock import ANY, patch
import pytest
+import requests
from cvprac.cvp_client_errors import CvpApiError
from anta.cli._main import anta
@@ -24,19 +25,25 @@ DATA_DIR: Path = Path(__file__).parents[3].resolve() / "data"
@pytest.mark.parametrize(
- ("cvp_container", "cvp_connect_failure"),
+ ("cvp_container", "verify_cert", "cv_token_failure", "cvp_connect_failure"),
[
- pytest.param(None, False, id="all devices"),
- pytest.param("custom_container", False, id="custom container"),
- pytest.param(None, True, id="cvp connect failure"),
+ pytest.param(None, True, False, False, id="all devices - verify cert"),
+ pytest.param(None, True, True, False, id="all devices - fail SSL check"),
+ pytest.param(None, False, False, False, id="all devices - do not verify cert"),
+ pytest.param("custom_container", False, False, False, id="custom container"),
+ pytest.param(None, False, False, True, id="cvp connect failure"),
],
)
def test_from_cvp(
tmp_path: Path,
click_runner: CliRunner,
cvp_container: str | None,
+ verify_cert: bool,
+ cv_token_failure: bool,
cvp_connect_failure: bool,
) -> None:
+ # pylint: disable=too-many-arguments
+ # ruff: noqa: C901
"""Test `anta get from-cvp`.
This test verifies that username and password are NOT mandatory to run this command
@@ -57,6 +64,12 @@ def test_from_cvp(
if cvp_container is not None:
cli_args.extend(["--container", cvp_container])
+ if not verify_cert:
+ cli_args.extend(["--ignore-cert"])
+
+ def mock_get_cv_token(*_args: str, **_kwargs: str) -> None:
+ if cv_token_failure:
+ raise requests.exceptions.SSLError
def mock_cvp_connect(_self: CvpClient, *_args: str, **_kwargs: str) -> None:
if cvp_connect_failure:
@@ -64,7 +77,7 @@ def test_from_cvp(
# always get a token
with (
- patch("anta.cli.get.commands.get_cv_token", return_value="dummy_token"),
+ patch("anta.cli.get.commands.get_cv_token", autospec=True, side_effect=mock_get_cv_token),
patch(
"cvprac.cvp_client.CvpClient.connect",
autospec=True,
@@ -79,20 +92,27 @@ def test_from_cvp(
):
result = click_runner.invoke(anta, cli_args)
- if not cvp_connect_failure:
+ if not cvp_connect_failure and not cv_token_failure:
assert output.exists()
+ if cv_token_failure:
+ assert "Authentication to CloudVison failed" in result.output
+ assert result.exit_code == ExitCode.USAGE_ERROR
+ return
+
mocked_cvp_connect.assert_called_once()
- if not cvp_connect_failure:
- assert "Connected to CloudVision" in result.output
- if cvp_container is not None:
- mocked_get_devices_in_container.assert_called_once_with(ANY, cvp_container)
- else:
- mocked_get_inventory.assert_called_once()
- assert result.exit_code == ExitCode.OK
- else:
+
+ if cvp_connect_failure:
assert "Error connecting to CloudVision" in result.output
assert result.exit_code == ExitCode.USAGE_ERROR
+ return
+
+ assert "Connected to CloudVision" in result.output
+ if cvp_container is not None:
+ mocked_get_devices_in_container.assert_called_once_with(ANY, cvp_container)
+ else:
+ mocked_get_inventory.assert_called_once()
+ assert result.exit_code == ExitCode.OK
@pytest.mark.parametrize(
diff --git a/tests/units/cli/get/test_utils.py b/tests/units/cli/get/test_utils.py
index 7ce85dc..e105f94 100644
--- a/tests/units/cli/get/test_utils.py
+++ b/tests/units/cli/get/test_utils.py
@@ -19,7 +19,14 @@ from anta.inventory import AntaInventory
DATA_DIR: Path = Path(__file__).parents[3].resolve() / "data"
-def test_get_cv_token() -> None:
+@pytest.mark.parametrize(
+ "verify_cert",
+ [
+ pytest.param(True, id="Verify cert enabled"),
+ pytest.param(False, id="Verify cert disabled"),
+ ],
+)
+def test_get_cv_token(verify_cert: bool) -> None:
"""Test anta.get.utils.get_cv_token."""
ip_addr = "42.42.42.42"
username = "ant"
@@ -29,13 +36,13 @@ def test_get_cv_token() -> None:
mocked_ret = MagicMock(autospec=requests.Response)
mocked_ret.json.return_value = {"sessionId": "simple"}
patched_request.return_value = mocked_ret
- res = get_cv_token(ip_addr, username, password)
+ res = get_cv_token(ip_addr, username, password, verify_cert=verify_cert)
patched_request.assert_called_once_with(
"POST",
"https://42.42.42.42/cvpservice/login/authenticate.do",
headers={"Content-Type": "application/json", "Accept": "application/json"},
data='{"userId": "ant", "password": "formica"}',
- verify=False,
+ verify=verify_cert,
timeout=10,
)
assert res == "simple"