summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--.bumpversion.cfg24
-rw-r--r--.coveragerc8
-rw-r--r--.devcontainer/Dockerfile26
-rw-r--r--.devcontainer/devcontainer.json50
-rw-r--r--.github/changelog.sh435
-rw-r--r--.github/dependabot.yml28
-rw-r--r--.github/workflows/on_demand.yml89
-rw-r--r--.github/workflows/pr-management.yml104
-rw-r--r--.github/workflows/release.yml136
-rw-r--r--.gitignore348
-rw-r--r--Dockerfile27
-rw-r--r--Dockerfile.docker34
-rw-r--r--LICENSE204
-rw-r--r--Makefile10
-rw-r--r--README.md183
-rw-r--r--bin/README.md111
-rwxr-xr-xbin/cvp-upload56
-rwxr-xr-xbin/eos-download86
-rw-r--r--docs/docker.md49
-rw-r--r--eos_downloader/__init__.py47
-rw-r--r--eos_downloader/cli/__init__.py0
-rw-r--r--eos_downloader/cli/cli.py76
-rw-r--r--eos_downloader/cli/debug/__init__.py0
-rw-r--r--eos_downloader/cli/debug/commands.py53
-rw-r--r--eos_downloader/cli/get/__init__.py0
-rw-r--r--eos_downloader/cli/get/commands.py137
-rw-r--r--eos_downloader/cli/info/__init__.py0
-rw-r--r--eos_downloader/cli/info/commands.py87
-rw-r--r--eos_downloader/cvp.py276
-rw-r--r--eos_downloader/data.py93
-rw-r--r--eos_downloader/download.py77
-rw-r--r--eos_downloader/eos.py177
-rw-r--r--eos_downloader/models/__init__.py0
-rw-r--r--eos_downloader/models/version.py272
-rw-r--r--eos_downloader/object_downloader.py513
-rw-r--r--eos_downloader/tools.py13
-rw-r--r--pylintrc25
-rw-r--r--pyproject.toml189
-rw-r--r--pytest.ini5
-rw-r--r--tests/__init__.py0
-rw-r--r--tests/lib/__init__.py0
-rw-r--r--tests/lib/dataset.py116
-rw-r--r--tests/lib/fixtures.py69
-rw-r--r--tests/lib/helpers.py40
-rw-r--r--tests/system/__init__.py0
-rw-r--r--tests/system/test_eos_download.py.old48
-rw-r--r--tests/unit/__init__.py0
-rw-r--r--tests/unit/test_eos_version.py130
-rw-r--r--tests/unit/test_object_downloader.py141
49 files changed, 4592 insertions, 0 deletions
diff --git a/.bumpversion.cfg b/.bumpversion.cfg
new file mode 100644
index 0000000..9562ed5
--- /dev/null
+++ b/.bumpversion.cfg
@@ -0,0 +1,24 @@
+[bumpversion]
+commit = True
+tag = False
+tag_message = Bump version: {current_version} → {new_version}
+tag_name = v{new_version}
+current_version = 0.7.0
+parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)([-](?P<release>(dev|rc))+(?P<build>\d+))?
+serialize =
+ {major}.{minor}.{patch}-{release}{build}
+ {major}.{minor}.{patch}
+
+[bumpversion:part:release]
+first_value = dev
+optional_value = prod
+values =
+ dev
+ prod
+
+[bumpversion:part:build]
+first_value = 1
+
+[bumpversion:file:./eos_downloader/__init__.py]
+search = __version__ = '{current_version}'
+replace = __version__ = '{new_version}'
diff --git a/.coveragerc b/.coveragerc
new file mode 100644
index 0000000..a987e6a
--- /dev/null
+++ b/.coveragerc
@@ -0,0 +1,8 @@
+[html]
+directory = tests/htmlcov
+
+[tool:pytest]
+addopts = --cov=eos_downloader --cov-report html
+
+[run]
+omit = tests/* \ No newline at end of file
diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile
new file mode 100644
index 0000000..c266dbc
--- /dev/null
+++ b/.devcontainer/Dockerfile
@@ -0,0 +1,26 @@
+# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.202.3/containers/python-3/.devcontainer/base.Dockerfile
+
+# [Choice] Python version (use -bullseye variants on local arm64/Apple Silicon): 3, 3.9, 3.8, 3.7, 3.6, 3-bullseye, 3.9-bullseye, 3.8-bullseye, 3.7-bullseye, 3.6-bullseye, 3-buster, 3.9-buster, 3.8-buster, 3.7-buster, 3.6-buster
+ARG VARIANT="3.9-bullseye"
+FROM mcr.microsoft.com/vscode/devcontainers/python:0-${VARIANT}
+
+# [Choice] Node.js version: none, lts/*, 16, 14, 12, 10
+ARG NODE_VERSION="none"
+RUN if [ "${NODE_VERSION}" != "none" ]; then su vscode -c "umask 0002 && . /usr/local/share/nvm/nvm.sh && nvm install ${NODE_VERSION} 2>&1"; fi
+
+# [Optional] If your pip requirements rarely change, uncomment this section to add them to the image.
+# COPY requirements.txt /tmp/pip-tmp/
+# RUN pip3 --disable-pip-version-check --no-cache-dir install -e .
+
+# [Optional] Uncomment this section to install additional OS packages.
+RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
+ && apt-get -y install --no-install-recommends qemu-kvm qemu-utils libguestfs-tools
+
+RUN mkdir -p /opt/unetlab/wrappers/ \
+ && echo "#!/bin/bash" > /opt/unetlab/wrappers/unl_wrapper \
+ && chmod 755 /opt/unetlab/wrappers/unl_wrapper \
+ && mkdir -p /opt/unetlab/addons/qemu/ \
+ && chmod 777 /opt/unetlab/addons/qemu/
+
+# [Optional] Uncomment this line to install global node packages.
+# RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g <your-package-here>" 2>&1 \ No newline at end of file
diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
new file mode 100644
index 0000000..07f7023
--- /dev/null
+++ b/.devcontainer/devcontainer.json
@@ -0,0 +1,50 @@
+// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
+// https://github.com/microsoft/vscode-dev-containers/tree/v0.202.3/containers/python-3
+{
+ "name": "Python 3",
+ "runArgs": ["--init"],
+ "build": {
+ "dockerfile": "Dockerfile",
+ "context": "..",
+ "args": {
+ // Update 'VARIANT' to pick a Python version: 3, 3.9, 3.8, 3.7, 3.6.
+ // Append -bullseye or -buster to pin to an OS version.
+ // Use -bullseye variants on local on arm64/Apple Silicon.
+ "VARIANT": "3.9",
+ // Options
+ "NODE_VERSION": "lts/*"
+ }
+ },
+
+ // Set *default* container specific settings.json values on container create.
+ "settings": {
+ "python.pythonPath": "/usr/local/bin/python",
+ "python.languageServer": "Pylance",
+ "python.linting.enabled": true,
+ "python.linting.pylintEnabled": true,
+ "python.formatting.autopep8Path": "/usr/local/py-utils/bin/autopep8",
+ "python.formatting.blackPath": "/usr/local/py-utils/bin/black",
+ "python.formatting.yapfPath": "/usr/local/py-utils/bin/yapf",
+ "python.linting.banditPath": "/usr/local/py-utils/bin/bandit",
+ "python.linting.flake8Path": "/usr/local/py-utils/bin/flake8",
+ "python.linting.mypyPath": "/usr/local/py-utils/bin/mypy",
+ "python.linting.pycodestylePath": "/usr/local/py-utils/bin/pycodestyle",
+ "python.linting.pydocstylePath": "/usr/local/py-utils/bin/pydocstyle",
+ "python.linting.pylintPath": "/usr/local/py-utils/bin/pylint"
+ },
+
+ // Add the IDs of extensions you want installed when the container is created.
+ "extensions": [
+ "ms-python.python",
+ "ms-python.vscode-pylance"
+ ],
+
+ // Use 'forwardPorts' to make a list of ports inside the container available locally.
+ // "forwardPorts": [],
+
+ // Use 'postCreateCommand' to run commands after the container is created.
+ "postCreateCommand": "pip3 install --user poetry",
+
+ // Comment out connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
+ "remoteUser": "vscode"
+}
diff --git a/.github/changelog.sh b/.github/changelog.sh
new file mode 100644
index 0000000..2d43826
--- /dev/null
+++ b/.github/changelog.sh
@@ -0,0 +1,435 @@
+#!/usr/bin/env zsh
+
+##############################
+# CHANGELOG SCRIPT CONSTANTS #
+##############################
+
+#* Holds the list of valid types recognized in a commit subject
+#* and the display string of such type
+local -A TYPES
+TYPES=(
+ BUILD "Build system"
+ CHORE "Chore"
+ CI "CI"
+ CUT "Features removed"
+ DOC "Documentation"
+ FEAT "Features"
+ FIX "Bug fixes"
+ LICENSE "License update"
+ MAKE "Build system"
+ OPTIMIZE "Code optimization"
+ PERF "Performance"
+ REFACTOR "Code Refactoring"
+ REFORMAT "Code Reformating"
+ REVERT "Revert"
+ TEST "Testing"
+)
+
+#* Types that will be displayed in their own section,
+#* in the order specified here.
+local -a MAIN_TYPES
+MAIN_TYPES=(FEAT FIX PERF REFACTOR DOCS DOC)
+
+#* Types that will be displayed under the category of other changes
+local -a OTHER_TYPES
+OTHER_TYPES=(MAKE TEST STYLE CI OTHER)
+
+#* Commit types that don't appear in $MAIN_TYPES nor $OTHER_TYPES
+#* will not be displayed and will simply be ignored.
+
+
+############################
+# COMMIT PARSING UTILITIES #
+############################
+
+function parse-commit {
+
+ # This function uses the following globals as output: commits (A),
+ # subjects (A), scopes (A) and breaking (A). All associative arrays (A)
+ # have $hash as the key.
+ # - commits holds the commit type
+ # - subjects holds the commit subject
+ # - scopes holds the scope of a commit
+ # - breaking holds the breaking change warning if a commit does
+ # make a breaking change
+
+ function commit:type {
+ local commit_message="$1"
+ local type="$(sed -E 's/^([a-zA-Z_\-]+)(\(.+\))?!?: .+$/\1/' <<< "$commit_message"| tr '[:lower:]' '[:upper:]')"
+ # If $type doesn't appear in $TYPES array mark it as 'other'
+ if [[ -n "${(k)TYPES[(i)${type}]}" ]]; then
+ echo $type
+ else
+ echo other
+ fi
+ }
+
+ function commit:scope {
+ local scope
+
+ # Try to find scope in "type(<scope>):" format
+ # Scope will be formatted in lower cases
+ scope=$(sed -nE 's/^[a-zA-Z_\-]+\((.+)\)!?: .+$/\1/p' <<< "$1")
+ if [[ -n "$scope" ]]; then
+ echo "$scope" | tr '[:upper:]' '[:lower:]'
+ return
+ fi
+
+ # If no scope found, try to find it in "<scope>:" format
+ # Make sure it's not a type before printing it
+ scope=$(sed -nE 's/^([a-zA-Z_\-]+): .+$/\1/p' <<< "$1")
+ if [[ -z "${(k)TYPES[(i)$scope]}" ]]; then
+ echo "$scope"
+ fi
+ }
+
+ function commit:subject {
+ # Only display the relevant part of the commit, i.e. if it has the format
+ # type[(scope)!]: subject, where the part between [] is optional, only
+ # displays subject. If it doesn't match the format, returns the whole string.
+ sed -E 's/^[a-zA-Z_\-]+(\(.+\))?!?: (.+)$/\2/' <<< "$1"
+ }
+
+ # Return subject if the body or subject match the breaking change format
+ function commit:is-breaking {
+ local subject="$1" body="$2" message
+
+ if [[ "$body" =~ "BREAKING CHANGE: (.*)" || \
+ "$subject" =~ '^[^ :\)]+\)?!: (.*)$' ]]; then
+ message="${match[1]}"
+ # remove CR characters (might be inserted in GitHub UI commit description form)
+ message="${message//$'\r'/}"
+ # skip next paragraphs (separated by two newlines or more)
+ message="${message%%$'\n\n'*}"
+ # ... and replace newlines with spaces
+ echo "${message//$'\n'/ }"
+ else
+ return 1
+ fi
+ }
+
+ # Return truncated hash of the reverted commit
+ function commit:is-revert {
+ local subject="$1" body="$2"
+
+ if [[ "$subject" = Revert* && \
+ "$body" =~ "This reverts commit ([^.]+)\." ]]; then
+ echo "${match[1]:0:7}"
+ else
+ return 1
+ fi
+ }
+
+ # Parse commit with hash $1
+ local hash="$1" subject body warning rhash
+ subject="$(command git show -s --format=%s $hash)"
+ body="$(command git show -s --format=%b $hash)"
+
+ # Commits following Conventional Commits (https://www.conventionalcommits.org/)
+ # have the following format, where parts between [] are optional:
+ #
+ # type[(scope)][!]: subject
+ #
+ # commit body
+ # [BREAKING CHANGE: warning]
+
+ # commits holds the commit type
+ commits[$hash]="$(commit:type "$subject")"
+ # scopes holds the commit scope
+ scopes[$hash]="$(commit:scope "$subject")"
+ # subjects holds the commit subject
+ subjects[$hash]="$(commit:subject "$subject")"
+
+ # breaking holds whether a commit has breaking changes
+ # and its warning message if it does
+ if warning=$(commit:is-breaking "$subject" "$body"); then
+ breaking[$hash]="$warning"
+ fi
+
+ # reverts holds commits reverted in the same release
+ if rhash=$(commit:is-revert "$subject" "$body"); then
+ reverts[$hash]=$rhash
+ fi
+}
+
+#############################
+# RELEASE CHANGELOG DISPLAY #
+#############################
+
+function display-release {
+
+ # This function uses the following globals: output, version,
+ # commits (A), subjects (A), scopes (A), breaking (A) and reverts (A).
+ #
+ # - output is the output format to use when formatting (raw|text|md)
+ # - version is the version in which the commits are made
+ # - commits, subjects, scopes, breaking, and reverts are associative arrays
+ # with commit hashes as keys
+
+ # Remove commits that were reverted
+ local hash rhash
+ for hash rhash in ${(kv)reverts}; do
+ if (( ${+commits[$rhash]} )); then
+ # Remove revert commit
+ unset "commits[$hash]" "subjects[$hash]" "scopes[$hash]" "breaking[$hash]"
+ # Remove reverted commit
+ unset "commits[$rhash]" "subjects[$rhash]" "scopes[$rhash]" "breaking[$rhash]"
+ fi
+ done
+
+ # If no commits left skip displaying the release
+ if (( $#commits == 0 )); then
+ return
+ fi
+
+ ##* Formatting functions
+
+ # Format the hash according to output format
+ # If no parameter is passed, assume it comes from `$hash`
+ function fmt:hash {
+ #* Uses $hash from outer scope
+ local hash="${1:-$hash}"
+ case "$output" in
+ raw) printf "$hash" ;;
+ text) printf "\e[33m$hash\e[0m" ;; # red
+ md) printf "[\`$hash\`](https://github.com/aristanetworks/ansible-avd/commit/$hash)" ;;
+ esac
+ }
+
+ # Format headers according to output format
+ # Levels 1 to 2 are considered special, the rest are formatted
+ # the same, except in md output format.
+ function fmt:header {
+ local header="$1" level="$2"
+ case "$output" in
+ raw)
+ case "$level" in
+ 1) printf "$header\n$(printf '%.0s=' {1..${#header}})\n\n" ;;
+ 2) printf "$header\n$(printf '%.0s-' {1..${#header}})\n\n" ;;
+ *) printf "$header:\n\n" ;;
+ esac ;;
+ text)
+ case "$level" in
+ 1|2) printf "\e[1;4m$header\e[0m\n\n" ;; # bold, underlined
+ *) printf "\e[1m$header:\e[0m\n\n" ;; # bold
+ esac ;;
+ md) printf "$(printf '%.0s#' {1..${level}}) $header\n\n" ;;
+ esac
+ }
+
+ function fmt:scope {
+ #* Uses $scopes (A) and $hash from outer scope
+ local scope="${1:-${scopes[$hash]}}"
+
+ # Get length of longest scope for padding
+ local max_scope=0 padding=0
+ for hash in ${(k)scopes}; do
+ max_scope=$(( max_scope < ${#scopes[$hash]} ? ${#scopes[$hash]} : max_scope ))
+ done
+
+ # If no scopes, exit the function
+ if [[ $max_scope -eq 0 ]]; then
+ return
+ fi
+
+ # Get how much padding is required for this scope
+ padding=$(( max_scope < ${#scope} ? 0 : max_scope - ${#scope} ))
+ padding="${(r:$padding:: :):-}"
+
+ # If no scope, print padding and 3 spaces (equivalent to "[] ")
+ if [[ -z "$scope" ]]; then
+ printf "${padding} "
+ return
+ fi
+
+ # Print [scope]
+ case "$output" in
+ raw|md) printf "[$scope]${padding} " ;;
+ text) printf "[\e[38;5;9m$scope\e[0m]${padding} " ;; # red 9
+ esac
+ }
+
+ # If no parameter is passed, assume it comes from `$subjects[$hash]`
+ function fmt:subject {
+ #* Uses $subjects (A) and $hash from outer scope
+ local subject="${1:-${subjects[$hash]}}"
+
+ # Capitalize first letter of the subject
+ subject="${(U)subject:0:1}${subject:1}"
+
+ case "$output" in
+ raw) printf "$subject" ;;
+ # In text mode, highlight (#<issue>) and dim text between `backticks`
+ text) sed -E $'s|#([0-9]+)|\e[32m#\\1\e[0m|g;s|`([^`]+)`|`\e[2m\\1\e[0m`|g' <<< "$subject" ;;
+ # In markdown mode, link to (#<issue>) issues
+ md) sed -E 's|#([0-9]+)|[#\1](https://github.com/aristanetworks/ansible-avd/issues/\1)|g' <<< "$subject" ;;
+ esac
+ }
+
+ function fmt:type {
+ #* Uses $type from outer scope
+ local type="${1:-${TYPES[$type]:-${(C)type}}}"
+ [[ -z "$type" ]] && return 0
+ case "$output" in
+ raw|md) printf "$type: " ;;
+ text) printf "\e[4m$type\e[24m: " ;; # underlined
+ esac
+ }
+
+ ##* Section functions
+
+ function display:version {
+ fmt:header "$version" 2
+ }
+
+ function display:breaking {
+ (( $#breaking != 0 )) || return 0
+
+ case "$output" in
+ raw) fmt:header "BREAKING CHANGES" 3 ;;
+ text|md) fmt:header "⚠ BREAKING CHANGES" 3 ;;
+ esac
+
+ local hash subject
+ for hash message in ${(kv)breaking}; do
+ echo " - $(fmt:hash) $(fmt:scope)$(fmt:subject "${message}")"
+ done | sort
+ echo
+ }
+
+ function display:type {
+ local hash type="$1"
+
+ local -a hashes
+ hashes=(${(k)commits[(R)$type]})
+
+ # If no commits found of type $type, go to next type
+ (( $#hashes != 0 )) || return 0
+
+ fmt:header "${TYPES[$type]}" 3
+ for hash in $hashes; do
+ echo " - $(fmt:hash) $(fmt:scope)$(fmt:subject)"
+ done | sort -k3 # sort by scope
+ echo
+ }
+
+ function display:others {
+ local hash type
+
+ # Commits made under types considered other changes
+ local -A changes
+ changes=(${(kv)commits[(R)${(j:|:)OTHER_TYPES}]})
+
+ # If no commits found under "other" types, don't display anything
+ (( $#changes != 0 )) || return 0
+
+ fmt:header "Other changes" 3
+ for hash type in ${(kv)changes}; do
+ case "$type" in
+ other) echo " - $(fmt:hash) $(fmt:scope)$(fmt:subject)" ;;
+ *) echo " - $(fmt:hash) $(fmt:scope)$(fmt:type)$(fmt:subject)" ;;
+ esac
+ done | sort -k3 # sort by scope
+ echo
+ }
+
+ ##* Release sections order
+
+ # Display version header
+ display:version
+
+ # Display breaking changes first
+ display:breaking
+
+ # Display changes for commit types in the order specified
+ for type in $MAIN_TYPES; do
+ display:type "$type"
+ done
+
+ # Display other changes
+ display:others
+}
+
+function main {
+ # $1 = until commit, $2 = since commit
+ local until="$1" since="$2"
+
+ # $3 = output format (--text|--raw|--md)
+ # --md: uses markdown formatting
+ # --raw: outputs without style
+ # --text: uses ANSI escape codes to style the output
+ local output=${${3:-"--text"}#--*}
+
+ if [[ -z "$until" ]]; then
+ until=HEAD
+ fi
+
+ if [[ -z "$since" ]]; then
+ # If $since is not specified:
+ # 1) try to find the version used before updating
+ # 2) try to find the first version tag before $until
+ since=$(command git config --get ansible-avd.lastVersion 2>/dev/null) || \
+ since=$(command git describe --abbrev=0 --tags "$until^" 2>/dev/null) || \
+ unset since
+ elif [[ "$since" = --all ]]; then
+ unset since
+ fi
+
+ # Commit classification arrays
+ local -A commits subjects scopes breaking reverts
+ local truncate=0 read_commits=0
+ local hash version tag
+
+ # Get the first version name:
+ # 1) try tag-like version, or
+ # 2) try name-rev, or
+ # 3) try branch name, or
+ # 4) try short hash
+ version=$(command git describe --tags $until 2>/dev/null) \
+ || version=$(command git name-rev --no-undefined --name-only --exclude="remotes/*" $until 2>/dev/null) \
+ || version=$(command git symbolic-ref --quiet --short $until 2>/dev/null) \
+ || version=$(command git rev-parse --short $until 2>/dev/null)
+
+ # Get commit list from $until commit until $since commit, or until root
+ # commit if $since is unset, in short hash form.
+ # --first-parent is used when dealing with merges: it only prints the
+ # merge commit, not the commits of the merged branch.
+ command git rev-list --first-parent --abbrev-commit --abbrev=7 ${since:+$since..}$until | while read hash; do
+ # Truncate list on versions with a lot of commits
+ if [[ -z "$since" ]] && (( ++read_commits > 35 )); then
+ truncate=1
+ break
+ fi
+
+ # If we find a new release (exact tag)
+ if tag=$(command git describe --exact-match --tags $hash 2>/dev/null); then
+ # Output previous release
+ display-release
+ # Reinitialize commit storage
+ commits=()
+ subjects=()
+ scopes=()
+ breaking=()
+ reverts=()
+ # Start work on next release
+ version="$tag"
+ read_commits=1
+ fi
+
+ parse-commit "$hash"
+ done
+
+ display-release
+
+ if (( truncate )); then
+ echo " ...more commits omitted"
+ echo
+ fi
+}
+
+# Use raw output if stdout is not a tty
+if [[ ! -t 1 && -z "$3" ]]; then
+ main "$1" "$2" --raw
+else
+ main "$@"
+fi
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 0000000..9695146
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,28 @@
+# To get started with Dependabot version updates, you'll need to specify which
+# package ecosystems to update and where the package manifests are located.
+# Please see the documentation for all configuration options:
+# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
+
+version: 2
+updates:
+ - package-ecosystem: "github-actions"
+ directory: "/"
+ labels:
+ - dependabot
+ schedule:
+ interval: "weekly"
+ commit-message:
+ prefix: "bump"
+ include: "ci"
+ open-pull-requests-limit: 10
+ - package-ecosystem: "pip"
+ directory: "/"
+ labels:
+ schedule:
+ interval: "weekly"
+ labels:
+ - dependabot
+ commit-message:
+ prefix: "bump"
+ include: "requirements"
+ open-pull-requests-limit: 10
diff --git a/.github/workflows/on_demand.yml b/.github/workflows/on_demand.yml
new file mode 100644
index 0000000..1e41be2
--- /dev/null
+++ b/.github/workflows/on_demand.yml
@@ -0,0 +1,89 @@
+---
+name: "Build a docker image on-demand"
+on:
+ workflow_dispatch:
+ inputs:
+ tag:
+ description: "Tag to use during the build (default: dev)"
+ required: true
+ default: 'dev'
+
+jobs:
+ docker:
+ name: Docker Image Build
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+
+ - name: Docker meta for TAG
+ id: meta
+ uses: docker/metadata-action@v4
+ with:
+ images: |
+ ${{ secrets.DOCKER_IMAGE }}
+ ghcr.io/${{ secrets.DOCKER_IMAGE }}
+ tags: |
+ type=raw,value=${{ inputs.tag }}
+
+ - name: Login to DockerHub
+ uses: docker/login-action@v2
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKER_PASSWORD }}
+
+ - name: Login to GitHub Container Registry
+ uses: docker/login-action@v2
+ with:
+ registry: ghcr.io
+ username: ${{ github.repository_owner }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Build and push
+ uses: docker/build-push-action@v4
+ with:
+ context: .
+ file: Dockerfile
+ push: true
+ tags: ${{ steps.meta.outputs.tags }}
+ labels: ${{ steps.meta.outputs.labels }}
+
+ docker_in_docker:
+ name: Docker Image Build with Docker support
+ runs-on: ubuntu-latest
+ needs: [docker]
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+
+ - name: Docker meta for TAG
+ id: meta
+ uses: docker/metadata-action@v4
+ with:
+ images: |
+ ${{ secrets.DOCKER_IMAGE }}
+ ghcr.io/${{ secrets.DOCKER_IMAGE }}
+ tags: |
+ type=raw,value=${{ inputs.tag }}-dind
+
+ - name: Login to DockerHub
+ uses: docker/login-action@v2
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKER_PASSWORD }}
+
+ - name: Login to GitHub Container Registry
+ uses: docker/login-action@v2
+ with:
+ registry: ghcr.io
+ username: ${{ github.repository_owner }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Build and push
+ uses: docker/build-push-action@v4
+ with:
+ context: .
+ file: Dockerfile.docker
+ push: true
+ tags: ${{ steps.meta.outputs.tags }}
+ labels: ${{ steps.meta.outputs.labels }}
diff --git a/.github/workflows/pr-management.yml b/.github/workflows/pr-management.yml
new file mode 100644
index 0000000..508a870
--- /dev/null
+++ b/.github/workflows/pr-management.yml
@@ -0,0 +1,104 @@
+---
+name: code-testing
+on:
+ push:
+ branches:
+ - main
+ pull_request_target:
+ types: [assigned, opened, synchronize, reopened]
+
+jobs:
+ compiling:
+ name: Run installation process and code compilation supported Python versions
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ python-version: ["3.8", "3.9", "3.10"]
+
+ steps:
+ - uses: actions/checkout@v3
+
+ - name: Set up Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v4
+ with:
+ python-version: ${{ matrix.python-version }}
+
+ - name: install requirements
+ run: |
+ pip install .
+
+ - name: install dev requirements
+ run: pip install .[dev]
+
+ - name: validate the syntax of python scripts
+ run: |
+ python -m py_compile $(git ls-files '*.py')
+
+ linting:
+ name: Run flake8, pylint for supported Python versions
+ runs-on: ubuntu-latest
+ needs: [compiling]
+
+ strategy:
+ matrix:
+ python: ["3.8", "3.9", "3.10"]
+
+ steps:
+ - uses: actions/checkout@v3
+
+ - name: Setup Python
+ uses: actions/setup-python@v3
+ with:
+ python-version: ${{ matrix.python }}
+
+ - name: Install dependencies
+ run: pip install tox tox-gh-actions
+
+ - name: "Run tox for ${{ matrix.python }}"
+ run: tox -e lint
+
+ typing:
+ name: Run mypy for supported Python versions
+ runs-on: ubuntu-latest
+ needs: [compiling]
+
+ strategy:
+ matrix:
+ python: ["3.8", "3.9", "3.10"]
+
+ steps:
+ - uses: actions/checkout@v3
+
+ - name: Setup Python
+ uses: actions/setup-python@v3
+ with:
+ python-version: ${{ matrix.python }}
+
+ - name: Install dependencies
+ run: pip install tox tox-gh-actions
+
+ - name: "Run tox for ${{ matrix.python }}"
+ run: tox -e type
+
+ pytest:
+ name: Run pytest validation
+ runs-on: ubuntu-latest
+ needs: [linting, typing]
+
+ strategy:
+ matrix:
+ python: ["3.8", "3.9", "3.10"]
+
+ steps:
+ - uses: actions/checkout@v3
+
+ - name: Setup Python
+ uses: actions/setup-python@v3
+ with:
+ python-version: ${{ matrix.python }}
+
+ - name: Install dependencies
+ run: pip install tox tox-gh-actions
+
+ - name: "Run tox for ${{ matrix.python }}"
+ run: tox -e testenv
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
new file mode 100644
index 0000000..70cce9c
--- /dev/null
+++ b/.github/workflows/release.yml
@@ -0,0 +1,136 @@
+---
+name: "Tag & Release management"
+on:
+ push:
+ # Sequence of patterns matched against refs/tags
+ tags:
+ - 'v[0-9]+.[0-9]+.[0-9]+' # Push events to matching v*, i.e. v1.0, v20.15.10
+jobs:
+ # release:
+ # name: Create Github Release
+ # runs-on: ubuntu-latest
+ # steps:
+ # - name: Checkout code
+ # uses: actions/checkout@v3
+ # with:
+ # fetch-depth: 0
+
+ # - name: Generate Changelog
+ # run: |
+ # sudo apt update && sudo apt install zsh
+ # export TAG_CURRENT=$(git describe --abbrev=0 --tags)
+ # export TAG_PREVIOUS=$(git describe --abbrev=0 --tags `git rev-list --tags --skip=1 --max-count=1`)
+ # echo "Previous tag is: ${TAG_PREVIOUS}"
+ # echo "Current tag is: ${TAG_CURRENT}"
+ # zsh .github/changelog.sh ${TAG_CURRENT} ${TAG_PREVIOUS} md > CHANGELOG.md
+ # cat CHANGELOG.md
+
+ # - name: Release on Github
+ # uses: softprops/action-gh-release@v1
+ # with:
+ # draft: false
+ # body_path: CHANGELOG.md
+
+ pypi:
+ name: Publish version to Pypi servers
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v3
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install setuptools wheel build
+
+ - name: Build package
+ run: |
+ python -m build
+
+ - name: Publish package to TestPyPI
+ uses: pypa/gh-action-pypi-publish@release/v1
+ with:
+ user: __token__
+ password: ${{ secrets.PYPI_API_TOKEN }}
+
+ docker:
+ name: Docker Image Build
+ runs-on: ubuntu-latest
+ needs: [pypi]
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+
+ - name: Docker meta for TAG
+ id: meta
+ uses: docker/metadata-action@v4
+ with:
+ images:
+ ${{ secrets.DOCKER_IMAGE }}
+ ghcr.io/${{ secrets.DOCKER_IMAGE }}
+ tags: |
+ type=semver,pattern={{raw}}
+ type=raw,value=latest
+
+ - name: Login to DockerHub
+ uses: docker/login-action@v2
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKER_PASSWORD }}
+
+ - name: Login to GitHub Container Registry
+ uses: docker/login-action@v2
+ with:
+ registry: ghcr.io
+ username: ${{ github.repository_owner }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Build and push
+ uses: docker/build-push-action@v4
+ with:
+ context: .
+ file: Dockerfile
+ push: true
+ tags: ${{ steps.meta.outputs.tags }}
+ labels: ${{ steps.meta.outputs.labels }}
+
+ docker_in_docker:
+ name: Docker Image Build with Docker support
+ runs-on: ubuntu-latest
+ needs: [docker]
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+
+ - name: Docker meta for TAG
+ id: meta
+ uses: docker/metadata-action@v4
+ with:
+ images:
+ ${{ secrets.DOCKER_IMAGE }}
+ ghcr.io/${{ secrets.DOCKER_IMAGE }}
+ tags: |
+ type=semver,pattern={{raw}}-dind
+ type=raw,value=latest-dind
+
+ - name: Login to DockerHub
+ uses: docker/login-action@v2
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKER_PASSWORD }}
+
+ - name: Login to GitHub Container Registry
+ uses: docker/login-action@v2
+ with:
+ registry: ghcr.io
+ username: ${{ github.repository_owner }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Build and push
+ uses: docker/build-push-action@v4
+ with:
+ context: .
+ file: Dockerfile.docker
+ push: true
+ tags: ${{ steps.meta.outputs.tags }}
+ labels: ${{ steps.meta.outputs.labels }}
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..0afd524
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,348 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+cover/
+*/report.html
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+# For a library or package, you might want to ignore these files since the code is
+# intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+# General
+.DS_Store
+.AppleDouble
+.LSOverride
+
+# Icon must end with two \r
+Icon
+
+
+# Thumbnails
+._*
+
+# Files that might appear in the root of a volume
+.DocumentRevisions-V100
+.fseventsd
+.Spotlight-V100
+.TemporaryItems
+.Trashes
+.VolumeIcon.icns
+.com.apple.timemachine.donotpresent
+
+# Directories potentially created on remote AFP share
+.AppleDB
+.AppleDesktop
+Network Trash Folder
+Temporary Items
+.apdisk
+
+.vscode/*
+.vscode/settings.json
+!.vscode/tasks.json
+!.vscode/launch.json
+!.vscode/extensions.json
+*.code-workspace
+
+# Local History for Visual Studio Code
+.history/
+
+*.swi
+*.sha512sum
+.idea
+
+.python-version
+
+
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+# For a library or package, you might want to ignore these files since the code is
+# intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# poetry
+# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
+# This is especially recommended for binary packages to ensure reproducibility, and is more
+# commonly ignored for libraries.
+# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
+#poetry.lock
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+# PyCharm
+# JetBrains specific template is maintainted in a separate JetBrains.gitignore that can
+# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
+# and can be added to the global gitignore or merged into this file. For a more nuclear
+# option (not recommended) you can uncomment the following to ignore the entire idea folder.
+.idea/
+
+# vscode
+.vscode/*
+
+*.tar.xz
+
+report.html
+
+*.swp
+arista.xml
+tester.py \ No newline at end of file
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000..a2850f5
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,27 @@
+ARG PYTHON_VER=3.10
+
+FROM python:${PYTHON_VER}-slim
+
+RUN pip install --upgrade pip
+
+WORKDIR /local
+COPY . /local
+
+LABEL maintainer="Thomas Grimonet <tom@inetsix.net>"
+LABEL "org.opencontainers.image.title"="eos-downloader" \
+ "org.opencontainers.image.description"="eos-downloader container" \
+ "org.opencontainers.artifact.description"="A CLI to manage Arista EOS version download" \
+ "org.opencontainers.image.source"="https://github.com/titom73/eos-downloader" \
+ "org.opencontainers.image.url"="https://github.com/titom73/eos-downloader" \
+ "org.opencontainers.image.documentation"="https://github.com/titom73/eos-downloader" \
+ "org.opencontainers.image.licenses"="Apache-2.0" \
+ "org.opencontainers.image.vendor"="N/A" \
+ "org.opencontainers.image.authors"="Thomas Grimonet <tom@inetsix.net>" \
+ "org.opencontainers.image.base.name"="python" \
+ "org.opencontainers.image.revision"="dev" \
+ "org.opencontainers.image.version"="dev"
+
+ENV PYTHONPATH=/local
+RUN pip --no-cache-dir install .
+
+ENTRYPOINT [ "/usr/local/bin/ardl" ]
diff --git a/Dockerfile.docker b/Dockerfile.docker
new file mode 100644
index 0000000..72ef4e5
--- /dev/null
+++ b/Dockerfile.docker
@@ -0,0 +1,34 @@
+ARG PYTHON_VER=3.10
+
+FROM python:${PYTHON_VER}-slim
+
+RUN apt-get update \
+ && apt-get install -y --no-install-recommends ca-certificates \
+ curl \
+ gnupg \
+ lsb-release \
+ && curl -fsSL https://download.docker.com/linux/ubuntu/gpg | \
+ gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg \
+ && echo \
+ "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/debian \
+ $(lsb_release -cs) stable" | tee /etc/apt/sources.list.d/docker.list > /dev/null \
+ && apt-get update \
+ && apt-get install -y --no-install-recommends docker-ce-cli \
+ && rm -rf /var/lib/apt/lists/* \
+ && rm -Rf /usr/share/doc && rm -Rf /usr/share/man \
+ && apt-get clean
+
+RUN pip install --upgrade pip
+
+WORKDIR /local
+COPY . /local
+
+LABEL maintainer="Thomas Grimonet <tom@inetsix.net>"
+LABEL com.example.version="edge"
+LABEL com.example.release-date="2022-04-05"
+LABEL com.example.version.is-production="False"
+
+ENV PYTHONPATH=/local
+RUN pip --no-cache-dir install .
+
+ENTRYPOINT [ "/usr/local/bin/ardl" ] \ No newline at end of file
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..6db866a
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,204 @@
+Copyright (c) 2019, Arista Networks
+All rights reserved.
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2019 Arista Networks
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..ccbc15c
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,10 @@
+CURRENT_DIR = $(shell pwd)
+
+DOCKER_NAME ?= titom73/eos-downloader
+DOCKER_TAG ?= dev
+DOCKER_FILE ?= Dockerfile
+PYTHON_VER ?= 3.9
+
+.PHONY: build
+build:
+ docker build -t $(DOCKER_NAME):$(DOCKER_TAG) --build-arg DOCKER_VERSION=$(DOCKER_VERSION) -f $(DOCKER_FILE) .
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..a85cf13
--- /dev/null
+++ b/README.md
@@ -0,0 +1,183 @@
+ [![code-testing](https://github.com/titom73/eos-downloader/actions/workflows/pr-management.yml/badge.svg?event=push)](https://github.com/titom73/eos-downloader/actions/workflows/pr-management.yml) ![PyPI - Python Version](https://img.shields.io/pypi/pyversions/eos-downloader) ![GitHub release (latest SemVer)](https://img.shields.io/github/v/release/titom73/arista-downloader) ![PyPI - Downloads/month](https://img.shields.io/pypi/dm/eos-downloader) ![Docker Image Size (tag)](https://img.shields.io/docker/image-size/titom73/eos-downloader/edge)
+
+# Arista Software Downloader
+
+Script to download Arista softwares to local folder, Cloudvision or EVE-NG.
+
+```bash
+pip install eos-downloader
+```
+
+## CLI commands
+
+A new CLI is available to execute commands. This CLI is going to replace [`eos-download`](./bin/README.md) script which is now marked as __deprecated__
+
+```bash
+ ardl
+Usage: ardl [OPTIONS] COMMAND [ARGS]...
+
+ Arista Network Download CLI
+
+Options:
+ --token TEXT Arista Token from your customer account [env var:
+ ARISTA_TOKEN]
+ --help Show this message and exit.
+
+Commands:
+ debug Debug commands to work with ardl
+ get Download Arista from Arista website
+ version Display version of ardl
+```
+
+> **Warning**
+> To use this CLI you need to get a valid token from your [Arista Account page](https://www.arista.com/en/users/profile).
+> For technical reason, it is only available for customers with active maintenance contracts and not for personnal accounts
+
+### Download EOS Package
+
+
+> Supported packages are: EOS, cEOS, vEOS-lab, cEOS64
+
+You can download EOS packages with following commands:
+
+```bash
+# Example for a cEOS package
+$ ardl get eos --version 4.28.3M --image-type cEOS
+```
+
+Available options are :
+
+```bash
+Usage: ardl get eos [OPTIONS]
+
+ Download EOS image from Arista website
+
+Options:
+ --image-type [64|INT|2GB-INT|cEOS|cEOS64|vEOS|vEOS-lab|EOS-2GB|default]
+ EOS Image type [required]
+ --version TEXT EOS version
+ -l, --latest Get latest version in given branch. If
+ --branch is not use, get the latest branch
+ with specific release type
+ -rtype, --release-type [F|M] EOS release type to search
+ -b, --branch TEXT EOS Branch to list releases
+ --docker-name TEXT Docker image name (default: arista/ceos)
+ [default: arista/ceos]
+ --output PATH Path to save image [default: .]
+ --log-level, --log [debug|info|warning|error|critical]
+ Logging level of the command
+ --eve-ng Run EVE-NG vEOS provisioning (only if CLI
+ runs on an EVE-NG server)
+ --disable-ztp Disable ZTP process in vEOS image (only
+ available with --eve-ng)
+ --import-docker Import docker image (only available with
+ --image_type cEOSlab)
+ --help Show this message and exit.
+```
+
+You can use `--latest` and `--release-type` option to get latest EOS version matching a specific release type
+
+```bash
+# Get latest M release
+❯ ardl get eos --latest -rtype m
+🪐 eos-downloader is starting...
+ - Image Type: default
+ - Version: None
+🔎 Searching file EOS-4.29.3M.swi
+ -> Found file at /support/download/EOS-USA/Active Releases/4.29/EOS-4.29.3M/EOS-4.29.3M.swi
+...
+✅ Downloaded file is correct.
+✅ processing done !
+```
+
+### List available EOS versions from Arista website
+
+You can easily get list of available version using CLI as shown below:
+
+```bash
+❯ ardl info eos-versions
+Usage: ardl info eos-versions [OPTIONS]
+
+ List Available EOS version on Arista.com website.
+
+ Comes with some filters to get latest release (F or M) as well as branch
+ filtering
+
+ - To get latest M release available (without any branch): ardl info eos-
+ versions --latest -rtype m
+
+ - To get latest F release available: ardl info eos-versions --latest
+ -rtype F
+
+Options:
+ -l, --latest Get latest version in given branch. If
+ --branch is not use, get the latest branch
+ with specific release type
+ -rtype, --release-type [F|M] EOS release type to search
+ -b, --branch TEXT EOS Branch to list releases
+ -v, --verbose Human readable output. Default is none to
+ use output in script)
+ --log-level, --log [debug|info|warning|error|critical]
+ Logging level of the command
+ --help Show this message and exit.
+```
+
+__Example__
+
+```bash
+❯ ardl info eos-versions -rtype m --branch 4.28
+['4.28.6.1M', '4.28.6M', '4.28.5.1M', '4.28.5M', '4.28.4M', '4.28.3M']
+```
+
+### Download CVP package
+
+> Supported packages are: OVA, KVM, RPM, Upgrade
+
+```bash
+$ ardl get cvp --format upgrade --version 2022.2.1 --log-level debug --output ~/Downloads
+```
+
+Available options are :
+
+```bash
+ --format [ova|rpm|kvm|upgrade] CVP Image type [required]
+ --version TEXT CVP version [required]
+ --output PATH Path to save image [default: .]
+ --log-level, --log [debug|info|warning|error|critical]
+ Logging level of the command
+ --help Show this message and exit.
+```
+
+## Requirements
+
+Repository requires Python `>=3.6` with following requirements:
+
+```requirements
+cvprac
+cryptography
+paramiko
+requests
+requests-toolbelt
+scp
+tqdm
+```
+
+On EVE-NG, you may have to install/upgrade __pyOpenSSL__ in version `23.0.0`:
+
+```
+# Error when running ardl: AttributeError: module 'lib' has no attribute 'X509_V_FLAG_CB_ISSUER_CHECK'
+
+$ pip install pyopenssl --upgrade
+```
+
+## Docker
+
+Please refer to [docker documentation](docs/docker.md)
+
+## Author
+
+From an original idea of [@Mark Rayson](https://github.com/Sparky-python) in [arista-netdevops-community/eos-scripts](https://github.com/arista-netdevops-community/eos-scripts)
+
+## License
+
+Code is under [Apache2](LICENSE) License
diff --git a/bin/README.md b/bin/README.md
new file mode 100644
index 0000000..7509633
--- /dev/null
+++ b/bin/README.md
@@ -0,0 +1,111 @@
+## scripts
+
+These scripts are deprecated and will be removed in a futur version. Please prefer the use of the CLI implemented in the package.
+
+### eos-download
+
+```bash
+usage: eos-download [-h]
+ --version VERSION
+ [--token TOKEN]
+ [--image IMAGE]
+ [--destination DESTINATION]
+ [--eve]
+ [--noztp]
+ [--import_docker]
+ [--docker_name DOCKER_NAME]
+ [--verbose VERBOSE]
+ [--log]
+
+EOS downloader script.
+
+optional arguments:
+ -h, --help show this help message and exit
+ --token TOKEN arista.com user API key - can use ENV:ARISTA_TOKEN
+ --image IMAGE Type of EOS image required
+ --version VERSION EOS version to download from website
+ --destination DESTINATION
+ Path where to save EOS package downloaded
+ --eve Option to install EOS package to EVE-NG
+ --noztp Option to deactivate ZTP when used with EVE-NG
+ --import_docker Option to import cEOS image to docker
+ --docker_name DOCKER_NAME
+ Docker image name to use
+ --verbose VERBOSE Script verbosity
+ --log Option to activate logging to eos-downloader.log file
+```
+
+- Token are read from `ENV:ARISTA_TOKEN` unless you specify a specific token with CLI.
+
+- Supported platforms:
+
+ - `INT`: International version
+ - `64`: 64 bits version
+ - `2GB` for 2GB flash platform
+ - `2GB-INT`: for 2GB running International
+ - `vEOS`: Virtual EOS image
+ - `vEOS-lab`: Virtual Lab EOS
+ - `vEOS64-lab`: Virtual Lab EOS running 64B
+ - `cEOS`: Docker version of EOS
+ - `cEOS64`: Docker version of EOS running in 64 bits
+
+#### Examples
+
+- Download vEOS-lab image and install in EVE-NG
+
+```bash
+$ eos-download --image vEOS-lab --version 4.25.7M --eve --noztp
+```
+
+- Download Docker image
+
+```bash
+$ eos-download --image cEOS --version 4.27.1F
+🪐 eos-downloader is starting...
+ - Image Type: cEOS
+ - Version: 4.27.2F
+✅ Authenticated on arista.com
+🔎 Searching file cEOS-lab-4.27.2F.tar.xz
+ -> Found file at /support/download/EOS-USA/Active Releases/4.27/EOS-4.27.2F/cEOS-lab/cEOS-lab-4.27.2F.tar.xz
+💾 Downloading cEOS-lab-4.27.2F.tar.xz ━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100.0% • 17.1 MB/s • 451.6/451.6 MB • 0:00:19 •
+🚀 Running checksum validation
+🔎 Searching file cEOS-lab-4.27.2F.tar.xz.sha512sum
+ -> Found file at /support/download/EOS-USA/Active
+Releases/4.27/EOS-4.27.2F/cEOS-lab/cEOS-lab-4.27.2F.tar.xz.sha512sum
+💾 Downloading cEOS-lab-4.27.2F.tar.xz.sha512sum ━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100.0% • ? • 154/154 bytes • 0:00:00 •
+✅ Downloaded file is correct.
+```
+
+__Note:__ `ARISTA_TOKEN` should be set in your .profile and not set for each command. If not set, you can use `--token` knob.
+
+```bash
+# Export Token
+export ARISTA_TOKEN="xxxxxxx"
+```
+
+### Cloudvision Image uploader
+
+Create an image bundle on Cloudvision.
+
+```bash
+cvp-upload -h
+usage: cvp-upload [-h]
+ [--token TOKEN]
+ [--image IMAGE]
+ --cloudvision CLOUDVISION
+ [--create_bundle]
+ [--timeout TIMEOUT]
+ [--verbose VERBOSE]
+
+Cloudvision Image uploader script.
+
+optional arguments:
+ -h, --help show this help message and exit
+ --token TOKEN CVP Authentication token - can use ENV:ARISTA_AVD_CV_TOKEN
+ --image IMAGE Type of EOS image required
+ --cloudvision CLOUDVISION
+ Cloudvision instance where to upload image
+ --create_bundle Option to create image bundle with new uploaded image
+ --timeout TIMEOUT Timeout connection. Default is set to 1200sec
+ --verbose VERBOSE Script verbosity
+``` \ No newline at end of file
diff --git a/bin/cvp-upload b/bin/cvp-upload
new file mode 100755
index 0000000..74213fe
--- /dev/null
+++ b/bin/cvp-upload
@@ -0,0 +1,56 @@
+#!/usr/bin/python
+
+import sys
+import os
+import argparse
+from eos_downloader.cvp import CvFeatureManager, CvpAuthenticationItem
+from loguru import logger
+
+ARISTA_AVD_CV_TOKEN = os.getenv('ARISTA_AVD_CV_TOKEN', '')
+
+
+def read_cli():
+ parser = argparse.ArgumentParser(description='Cloudvision Image uploader script.')
+ parser.add_argument('--token', required=False,
+ default=ARISTA_AVD_CV_TOKEN,
+ help='CVP Authentication token - can use ENV:ARISTA_AVD_CV_TOKEN')
+ parser.add_argument('--image', required=False,
+ default='EOS', help='Type of EOS image required')
+ parser.add_argument('--cloudvision', required=True,
+ help='Cloudvision instance where to upload image')
+ parser.add_argument('--create_bundle', required=False, action='store_true',
+ help="Option to create image bundle with new uploaded image")
+ parser.add_argument('--timeout', required=False,
+ default=1200,
+ help='Timeout connection. Default is set to 1200sec')
+ parser.add_argument('--verbose', required=False,
+ default='info', help='Script verbosity')
+ return parser.parse_args()
+
+
+if __name__ == '__main__':
+
+ cli_options = read_cli()
+
+ logger.remove()
+ logger.add(sys.stderr, level=str(cli_options.verbose).upper())
+
+ cv_authentication = CvpAuthenticationItem(
+ server=cli_options.cloudvision,
+ token=cli_options.token,
+ port=443,
+ timeout=cli_options.timeout,
+ validate_cert=False
+ )
+
+ my_cvp_uploader = CvFeatureManager(authentication=cv_authentication)
+ result_upload = my_cvp_uploader.upload_image(cli_options.image)
+ if result_upload and cli_options.create_bundle:
+ bundle_name = os.path.basename(cli_options.image)
+ logger.info('Creating image bundle {}'.format(bundle_name))
+ my_cvp_uploader.create_bundle(
+ name=bundle_name,
+ images_name=[bundle_name]
+ )
+
+ sys.exit(0)
diff --git a/bin/eos-download b/bin/eos-download
new file mode 100755
index 0000000..9826b31
--- /dev/null
+++ b/bin/eos-download
@@ -0,0 +1,86 @@
+#!/usr/bin/python
+
+import sys
+import os
+import argparse
+import eos_downloader.eos
+from loguru import logger
+from rich.console import Console
+
+ARISTA_TOKEN = os.getenv('ARISTA_TOKEN', '')
+
+
+def read_cli():
+ parser = argparse.ArgumentParser(description='EOS downloader script.')
+ parser.add_argument('--token', required=False,
+ default=ARISTA_TOKEN,
+ help='arista.com user API key - can use ENV:ARISTA_TOKEN')
+ parser.add_argument('--image', required=False,
+ default='EOS', help='Type of EOS image required')
+ parser.add_argument('--version', required=True,
+ default='', help='EOS version to download from website')
+
+ parser.add_argument('--destination', required=False,
+ default=str(os.getcwd()),
+ help='Path where to save EOS package downloaded')
+
+ parser.add_argument('--eve', required=False, action='store_true',
+ help="Option to install EOS package to EVE-NG")
+ parser.add_argument('--noztp', required=False, action='store_true',
+ help="Option to deactivate ZTP when used with EVE-NG")
+
+ parser.add_argument('--import_docker', required=False, action='store_true',
+ help="Option to import cEOS image to docker")
+ parser.add_argument('--docker_name', required=False,
+ default='arista/ceos',
+ help='Docker image name to use')
+
+ parser.add_argument('--verbose', required=False,
+ default='info', help='Script verbosity')
+ parser.add_argument('--log', required=False, action='store_true',
+ help="Option to activate logging to eos-downloader.log file")
+
+ return parser.parse_args()
+
+
+if __name__ == '__main__':
+
+ cli_options = read_cli()
+
+ console = Console()
+
+ console.print('\n[red]WARNING: This script is now deprecated. Please use ardl cli instead[/red]\n\n')
+
+ if cli_options.token is None or cli_options.token == '':
+ console.print('\n❗ Token is unset ! Please configure ARISTA_TOKEN or use --token option', style="bold red")
+ sys.exit(1)
+
+ logger.remove()
+ if cli_options.log:
+ logger.add("eos-downloader.log", rotation="10 MB", level=str(cli_options.verbose).upper())
+
+ console.print("🪐 [bold blue]eos-downloader[/bold blue] is starting...", )
+ console.print(f' - Image Type: {cli_options.image}')
+ console.print(f' - Version: {cli_options.version}')
+
+
+ my_download = eos_downloader.eos.EOSDownloader(
+ image=cli_options.image,
+ software='EOS',
+ version=cli_options.version,
+ token=cli_options.token,
+ hash_method='sha512sum')
+
+ my_download.authenticate()
+
+ if cli_options.eve:
+ my_download.provision_eve(noztp=cli_options.noztp, checksum=True)
+ else:
+ my_download.download_local(file_path=cli_options.destination, checksum=True)
+
+ if cli_options.import_docker:
+ my_download.docker_import(
+ image_name=cli_options.docker_name
+ )
+ console.print('✅ processing done !')
+ sys.exit(0)
diff --git a/docs/docker.md b/docs/docker.md
new file mode 100644
index 0000000..d2407f2
--- /dev/null
+++ b/docs/docker.md
@@ -0,0 +1,49 @@
+# Docker Image
+
+A [docker image](https://hub.docker.com/repository/docker/titom73/eos-downloader/tags?page=1&ordering=last_updated) is also available when Python cannot be used.
+
+## Connect to your docker container
+
+```bash
+$ docker pull titom73/eos-downloader:edge
+docker run -it --rm --entrypoint bash titom73/eos-downloader:dev
+root@a9a8ceb533df:/local# ardl get eos --help
+$ cd /download
+$ ardl --token xxxx get eos --image-format cEOS --version 4.28.3M
+```
+
+## Use CLI with docker
+
+```bash
+docker run --rm titom73/eos-downloader:dev get eos --help
+Usage: ardl get eos [OPTIONS]
+
+ Download EOS image from Arista website
+
+Options:
+ --image-type [64|INT|2GB-INT|cEOS|cEOS64|vEOS|vEOS-lab|EOS-2GB|default]
+ EOS Image type [required]
+ --version TEXT EOS version [required]
+ --docker-name TEXT Docker image name (default: arista/ceos)
+ [default: arista/ceos]
+ --output PATH Path to save image [default: .]
+ --log-level, --log [debug|info|warning|error|critical]
+ Logging level of the command
+ --eve-ng / --no-eve-ng Run EVE-NG vEOS provisioning (only if CLI
+ runs on an EVE-NG server)
+ --disable-ztp / --no-disable-ztp
+ Disable ZTP process in vEOS image (only
+ available with --eve-ng)
+ --import-docker / --no-import-docker
+ Import docker image (only available with
+ --image_type cEOSlab)
+ --help Show this message and exit.
+```
+
+#### Available TAGS
+
+- `edge`: Latest version built from the main branch
+- `latest`: Latest stable Version
+- `semver`: Version built from git tag
+- `latest-dind`: Latest stable Version with docker CLI
+- `semver-dind`: Version built from git tag with docker CLI
diff --git a/eos_downloader/__init__.py b/eos_downloader/__init__.py
new file mode 100644
index 0000000..345ccf7
--- /dev/null
+++ b/eos_downloader/__init__.py
@@ -0,0 +1,47 @@
+#!/usr/bin/python
+# coding: utf-8 -*-
+
+"""
+EOS Downloader module.
+"""
+
+from __future__ import (absolute_import, division,
+ print_function, unicode_literals, annotations)
+import dataclasses
+from typing import Any
+import json
+import importlib.metadata
+
+__author__ = '@titom73'
+__email__ = 'tom@inetsix.net'
+__date__ = '2022-03-16'
+__version__ = importlib.metadata.version("eos-downloader")
+
+# __all__ = ["CvpAuthenticationItem", "CvFeatureManager", "EOSDownloader", "ObjectDownloader", "reverse"]
+
+ARISTA_GET_SESSION = "https://www.arista.com/custom_data/api/cvp/getSessionCode/"
+
+ARISTA_SOFTWARE_FOLDER_TREE = "https://www.arista.com/custom_data/api/cvp/getFolderTree/"
+
+ARISTA_DOWNLOAD_URL = "https://www.arista.com/custom_data/api/cvp/getDownloadLink/"
+
+MSG_TOKEN_EXPIRED = """The API token has expired. Please visit arista.com, click on your profile and
+select Regenerate Token then re-run the script with the new token.
+"""
+
+MSG_TOKEN_INVALID = """The API token is incorrect. Please visit arista.com, click on your profile and
+check the Access Token. Then re-run the script with the correct token.
+"""
+
+MSG_INVALID_DATA = """Invalid data returned by server
+"""
+
+EVE_QEMU_FOLDER_PATH = '/opt/unetlab/addons/qemu/'
+
+
+class EnhancedJSONEncoder(json.JSONEncoder):
+ """Custom JSon encoder."""
+ def default(self, o: Any) -> Any:
+ if dataclasses.is_dataclass(o):
+ return dataclasses.asdict(o)
+ return super().default(o)
diff --git a/eos_downloader/cli/__init__.py b/eos_downloader/cli/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/eos_downloader/cli/__init__.py
diff --git a/eos_downloader/cli/cli.py b/eos_downloader/cli/cli.py
new file mode 100644
index 0000000..ddd0dea
--- /dev/null
+++ b/eos_downloader/cli/cli.py
@@ -0,0 +1,76 @@
+#!/usr/bin/env python
+# coding: utf-8 -*-
+# pylint: disable=no-value-for-parameter
+# pylint: disable=cyclic-import
+# pylint: disable=too-many-arguments
+# pylint: disable=unused-argument
+
+
+"""
+ARDL CLI Baseline.
+"""
+
+import click
+from rich.console import Console
+import eos_downloader
+from eos_downloader.cli.get import commands as get_commands
+from eos_downloader.cli.debug import commands as debug_commands
+from eos_downloader.cli.info import commands as info_commands
+
+
+@click.group()
+@click.pass_context
+@click.option('--token', show_envvar=True, default=None, help='Arista Token from your customer account')
+def ardl(ctx: click.Context, token: str) -> None:
+ """Arista Network Download CLI"""
+ ctx.ensure_object(dict)
+ ctx.obj['token'] = token
+
+
+@click.command()
+def version() -> None:
+ """Display version of ardl"""
+ console = Console()
+ console.print(f'ardl is running version {eos_downloader.__version__}')
+
+
+@ardl.group(no_args_is_help=True)
+@click.pass_context
+def get(ctx: click.Context) -> None:
+ # pylint: disable=redefined-builtin
+ """Download Arista from Arista website"""
+
+
+@ardl.group(no_args_is_help=True)
+@click.pass_context
+def info(ctx: click.Context) -> None:
+ # pylint: disable=redefined-builtin
+ """List information from Arista website"""
+
+
+@ardl.group(no_args_is_help=True)
+@click.pass_context
+def debug(ctx: click.Context) -> None:
+ # pylint: disable=redefined-builtin
+ """Debug commands to work with ardl"""
+
+# ANTA CLI Execution
+
+
+def cli() -> None:
+ """Load ANTA CLI"""
+ # Load group commands
+ get.add_command(get_commands.eos)
+ get.add_command(get_commands.cvp)
+ info.add_command(info_commands.eos_versions)
+ debug.add_command(debug_commands.xml)
+ ardl.add_command(version)
+ # Load CLI
+ ardl(
+ obj={},
+ auto_envvar_prefix='arista'
+ )
+
+
+if __name__ == '__main__':
+ cli()
diff --git a/eos_downloader/cli/debug/__init__.py b/eos_downloader/cli/debug/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/eos_downloader/cli/debug/__init__.py
diff --git a/eos_downloader/cli/debug/commands.py b/eos_downloader/cli/debug/commands.py
new file mode 100644
index 0000000..107b8a0
--- /dev/null
+++ b/eos_downloader/cli/debug/commands.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+# coding: utf-8 -*-
+# pylint: disable=no-value-for-parameter
+# pylint: disable=too-many-arguments
+# pylint: disable=line-too-long
+# pylint: disable=duplicate-code
+# flake8: noqa E501
+
+"""
+Commands for ARDL CLI to get data.
+"""
+
+import xml.etree.ElementTree as ET
+from xml.dom import minidom
+
+import click
+from loguru import logger
+from rich.console import Console
+
+import eos_downloader.eos
+
+
+@click.command()
+@click.pass_context
+@click.option('--output', default=str('arista.xml'), help='Path to save XML file', type=click.Path(), show_default=True)
+@click.option('--log-level', '--log', help='Logging level of the command', default=None, type=click.Choice(['debug', 'info', 'warning', 'error', 'critical'], case_sensitive=False))
+def xml(ctx: click.Context, output: str, log_level: str) -> None:
+ # sourcery skip: remove-unnecessary-cast
+ """Extract XML directory structure"""
+ console = Console()
+ # Get from Context
+ token = ctx.obj['token']
+
+ logger.remove()
+ if log_level is not None:
+ logger.add("eos-downloader.log", rotation="10 MB", level=log_level.upper())
+
+ my_download = eos_downloader.eos.EOSDownloader(
+ image='unset',
+ software='EOS',
+ version='unset',
+ token=token,
+ hash_method='sha512sum')
+
+ my_download.authenticate()
+ xml_object: ET.ElementTree = my_download._get_folder_tree() # pylint: disable=protected-access
+ xml_content = xml_object.getroot()
+
+ xmlstr = minidom.parseString(ET.tostring(xml_content)).toprettyxml(indent=" ", newl='')
+ with open(output, "w", encoding='utf-8') as f:
+ f.write(str(xmlstr))
+
+ console.print(f'XML file saved in: { output }')
diff --git a/eos_downloader/cli/get/__init__.py b/eos_downloader/cli/get/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/eos_downloader/cli/get/__init__.py
diff --git a/eos_downloader/cli/get/commands.py b/eos_downloader/cli/get/commands.py
new file mode 100644
index 0000000..13a8eec
--- /dev/null
+++ b/eos_downloader/cli/get/commands.py
@@ -0,0 +1,137 @@
+#!/usr/bin/env python
+# coding: utf-8 -*-
+# pylint: disable=no-value-for-parameter
+# pylint: disable=too-many-arguments
+# pylint: disable=line-too-long
+# pylint: disable=redefined-builtin
+# flake8: noqa E501
+
+"""
+Commands for ARDL CLI to get data.
+"""
+
+import os
+import sys
+from typing import Union
+
+import click
+from loguru import logger
+from rich.console import Console
+
+import eos_downloader.eos
+from eos_downloader.models.version import BASE_VERSION_STR, RTYPE_FEATURE, RTYPES
+
+EOS_IMAGE_TYPE = ['64', 'INT', '2GB-INT', 'cEOS', 'cEOS64', 'vEOS', 'vEOS-lab', 'EOS-2GB', 'default']
+CVP_IMAGE_TYPE = ['ova', 'rpm', 'kvm', 'upgrade']
+
+@click.command(no_args_is_help=True)
+@click.pass_context
+@click.option('--image-type', default='default', help='EOS Image type', type=click.Choice(EOS_IMAGE_TYPE), required=True)
+@click.option('--version', default=None, help='EOS version', type=str, required=False)
+@click.option('--latest', '-l', is_flag=True, type=click.BOOL, default=False, help='Get latest version in given branch. If --branch is not use, get the latest branch with specific release type')
+@click.option('--release-type', '-rtype', type=click.Choice(RTYPES, case_sensitive=False), default=RTYPE_FEATURE, help='EOS release type to search')
+@click.option('--branch', '-b', type=click.STRING, default=None, help='EOS Branch to list releases')
+@click.option('--docker-name', default='arista/ceos', help='Docker image name (default: arista/ceos)', type=str, show_default=True)
+@click.option('--output', default=str(os.path.relpath(os.getcwd(), start=os.curdir)), help='Path to save image', type=click.Path(),show_default=True)
+# Debugging
+@click.option('--log-level', '--log', help='Logging level of the command', default=None, type=click.Choice(['debug', 'info', 'warning', 'error', 'critical'], case_sensitive=False))
+# Boolean triggers
+@click.option('--eve-ng', is_flag=True, help='Run EVE-NG vEOS provisioning (only if CLI runs on an EVE-NG server)', default=False)
+@click.option('--disable-ztp', is_flag=True, help='Disable ZTP process in vEOS image (only available with --eve-ng)', default=False)
+@click.option('--import-docker', is_flag=True, help='Import docker image (only available with --image_type cEOSlab)', default=False)
+def eos(
+ ctx: click.Context, image_type: str, output: str, log_level: str, eve_ng: bool, disable_ztp: bool,
+ import_docker: bool, docker_name: str, version: Union[str, None] = None, release_type: str = RTYPE_FEATURE,
+ latest: bool = False, branch: Union[str,None] = None
+ ) -> int:
+ """Download EOS image from Arista website"""
+ console = Console()
+ # Get from Context
+ token = ctx.obj['token']
+ if token is None or token == '':
+ console.print('❗ Token is unset ! Please configure ARISTA_TOKEN or use --token option', style="bold red")
+ sys.exit(1)
+
+ logger.remove()
+ if log_level is not None:
+ logger.add("eos-downloader.log", rotation="10 MB", level=log_level.upper())
+
+ console.print("🪐 [bold blue]eos-downloader[/bold blue] is starting...", )
+ console.print(f' - Image Type: {image_type}')
+ console.print(f' - Version: {version}')
+
+
+ if version is not None:
+ my_download = eos_downloader.eos.EOSDownloader(
+ image=image_type,
+ software='EOS',
+ version=version,
+ token=token,
+ hash_method='sha512sum')
+ my_download.authenticate()
+
+ elif latest:
+ my_download = eos_downloader.eos.EOSDownloader(
+ image=image_type,
+ software='EOS',
+ version='unset',
+ token=token,
+ hash_method='sha512sum')
+ my_download.authenticate()
+ if branch is None:
+ branch = str(my_download.latest_branch(rtype=release_type).branch)
+ latest_version = my_download.latest_eos(branch, rtype=release_type)
+ if str(latest_version) == BASE_VERSION_STR:
+ console.print(f'[red]Error[/red], cannot find any version in {branch} for {release_type} release type')
+ sys.exit(1)
+ my_download.version = str(latest_version)
+
+ if eve_ng:
+ my_download.provision_eve(noztp=disable_ztp, checksum=True)
+ else:
+ my_download.download_local(file_path=output, checksum=True)
+
+ if import_docker:
+ my_download.docker_import(
+ image_name=docker_name
+ )
+ console.print('✅ processing done !')
+ sys.exit(0)
+
+
+
+@click.command(no_args_is_help=True)
+@click.pass_context
+@click.option('--format', default='upgrade', help='CVP Image type', type=click.Choice(CVP_IMAGE_TYPE), required=True)
+@click.option('--version', default=None, help='CVP version', type=str, required=True)
+@click.option('--output', default=str(os.path.relpath(os.getcwd(), start=os.curdir)), help='Path to save image', type=click.Path(),show_default=True)
+@click.option('--log-level', '--log', help='Logging level of the command', default=None, type=click.Choice(['debug', 'info', 'warning', 'error', 'critical'], case_sensitive=False))
+def cvp(ctx: click.Context, version: str, format: str, output: str, log_level: str) -> int:
+ """Download CVP image from Arista website"""
+ console = Console()
+ # Get from Context
+ token = ctx.obj['token']
+ if token is None or token == '':
+ console.print('❗ Token is unset ! Please configure ARISTA_TOKEN or use --token option', style="bold red")
+ sys.exit(1)
+
+ logger.remove()
+ if log_level is not None:
+ logger.add("eos-downloader.log", rotation="10 MB", level=log_level.upper())
+
+ console.print("🪐 [bold blue]eos-downloader[/bold blue] is starting...", )
+ console.print(f' - Image Type: {format}')
+ console.print(f' - Version: {version}')
+
+ my_download = eos_downloader.eos.EOSDownloader(
+ image=format,
+ software='CloudVision',
+ version=version,
+ token=token,
+ hash_method='md5sum')
+
+ my_download.authenticate()
+
+ my_download.download_local(file_path=output, checksum=False)
+ console.print('✅ processing done !')
+ sys.exit(0)
diff --git a/eos_downloader/cli/info/__init__.py b/eos_downloader/cli/info/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/eos_downloader/cli/info/__init__.py
diff --git a/eos_downloader/cli/info/commands.py b/eos_downloader/cli/info/commands.py
new file mode 100644
index 0000000..b51003b
--- /dev/null
+++ b/eos_downloader/cli/info/commands.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python
+# coding: utf-8 -*-
+# pylint: disable=no-value-for-parameter
+# pylint: disable=too-many-arguments
+# pylint: disable=line-too-long
+# pylint: disable=redefined-builtin
+# flake8: noqa E501
+
+"""
+Commands for ARDL CLI to list data.
+"""
+
+import sys
+from typing import Union
+
+import click
+from loguru import logger
+from rich.console import Console
+from rich.pretty import pprint
+
+import eos_downloader.eos
+from eos_downloader.models.version import BASE_VERSION_STR, RTYPE_FEATURE, RTYPES
+
+
+@click.command(no_args_is_help=True)
+@click.pass_context
+@click.option('--latest', '-l', is_flag=True, type=click.BOOL, default=False, help='Get latest version in given branch. If --branch is not use, get the latest branch with specific release type')
+@click.option('--release-type', '-rtype', type=click.Choice(RTYPES, case_sensitive=False), default=RTYPE_FEATURE, help='EOS release type to search')
+@click.option('--branch', '-b', type=click.STRING, default=None, help='EOS Branch to list releases')
+@click.option('--verbose', '-v', is_flag=True, type=click.BOOL, default=False, help='Human readable output. Default is none to use output in script)')
+@click.option('--log-level', '--log', help='Logging level of the command', default='warning', type=click.Choice(['debug', 'info', 'warning', 'error', 'critical'], case_sensitive=False))
+def eos_versions(ctx: click.Context, log_level: str, branch: Union[str,None] = None, release_type: str = RTYPE_FEATURE, latest: bool = False, verbose: bool = False) -> None:
+ # pylint: disable = too-many-branches
+ """
+ List Available EOS version on Arista.com website.
+
+ Comes with some filters to get latest release (F or M) as well as branch filtering
+
+ - To get latest M release available (without any branch): ardl info eos-versions --latest -rtype m
+
+ - To get latest F release available: ardl info eos-versions --latest -rtype F
+ """
+ console = Console()
+ # Get from Context
+ token = ctx.obj['token']
+
+ logger.remove()
+ if log_level is not None:
+ logger.add("eos-downloader.log", rotation="10 MB", level=log_level.upper())
+
+ my_download = eos_downloader.eos.EOSDownloader(
+ image='unset',
+ software='EOS',
+ version='unset',
+ token=token,
+ hash_method='sha512sum')
+
+ auth = my_download.authenticate()
+ if verbose and auth:
+ console.print('✅ Authenticated on arista.com')
+
+ if release_type is not None:
+ release_type = release_type.upper()
+
+ if latest:
+ if branch is None:
+ branch = str(my_download.latest_branch(rtype=release_type).branch)
+ latest_version = my_download.latest_eos(branch, rtype=release_type)
+ if str(latest_version) == BASE_VERSION_STR:
+ console.print(f'[red]Error[/red], cannot find any version in {branch} for {release_type} release type')
+ sys.exit(1)
+ if verbose:
+ console.print(f'Branch {branch} has been selected with release type {release_type}')
+ if branch is not None:
+ console.print(f'Latest release for {branch}: {latest_version}')
+ else:
+ console.print(f'Latest EOS release: {latest_version}')
+ else:
+ console.print(f'{ latest_version }')
+ else:
+ versions = my_download.get_eos_versions(branch=branch, rtype=release_type)
+ if verbose:
+ console.print(f'List of available versions for {branch if branch is not None else "all branches"}')
+ for version in versions:
+ console.print(f' → {str(version)}')
+ else:
+ pprint([str(version) for version in versions])
diff --git a/eos_downloader/cvp.py b/eos_downloader/cvp.py
new file mode 100644
index 0000000..6f14eb0
--- /dev/null
+++ b/eos_downloader/cvp.py
@@ -0,0 +1,276 @@
+#!/usr/bin/python
+# coding: utf-8 -*-
+
+"""
+CVP Uploader content
+"""
+
+import os
+from typing import List, Optional, Any
+from dataclasses import dataclass
+from loguru import logger
+from cvprac.cvp_client import CvpClient
+from cvprac.cvp_client_errors import CvpLoginError
+
+# from eos_downloader.tools import exc_to_str
+
+# logger = logging.getLogger(__name__)
+
+
+@dataclass
+class CvpAuthenticationItem:
+ """
+ Data structure to represent Cloudvision Authentication
+ """
+ server: str
+ port: int = 443
+ token: Optional[str] = None
+ timeout: int = 1200
+ validate_cert: bool = False
+
+
+class Filer():
+ # pylint: disable=too-few-public-methods
+ """
+ Filer Helper for file management
+ """
+ def __init__(self, path: str) -> None:
+ self.file_exist = False
+ self.filename = ''
+ self.absolute_path = ''
+ self.relative_path = path
+ if os.path.exists(path):
+ self.file_exist = True
+ self.filename = os.path.basename(path)
+ self.absolute_path = os.path.realpath(path)
+
+ def __repr__(self) -> str:
+ return self.absolute_path if self.file_exist else ''
+
+
+class CvFeatureManager():
+ """
+ CvFeatureManager Object to interect with Cloudvision
+ """
+ def __init__(self, authentication: CvpAuthenticationItem) -> None:
+ """
+ __init__ Class Creator
+
+ Parameters
+ ----------
+ authentication : CvpAuthenticationItem
+ Authentication information to use to connect to Cloudvision
+ """
+ self._authentication = authentication
+ # self._cv_instance = CvpClient()
+ self._cv_instance = self._connect(authentication=authentication)
+ self._cv_images = self.__get_images()
+ # self._cv_bundles = self.__get_bundles()
+
+ def _connect(self, authentication: CvpAuthenticationItem) -> CvpClient:
+ """
+ _connect Connection management
+
+ Parameters
+ ----------
+ authentication : CvpAuthenticationItem
+ Authentication information to use to connect to Cloudvision
+
+ Returns
+ -------
+ CvpClient
+ cvprac session to cloudvision
+ """
+ client = CvpClient()
+ if authentication.token is not None:
+ try:
+ client.connect(
+ nodes=[authentication.server],
+ username='',
+ password='',
+ api_token=authentication.token,
+ is_cvaas=True,
+ port=authentication.port,
+ cert=authentication.validate_cert,
+ request_timeout=authentication.timeout
+ )
+ except CvpLoginError as error_data:
+ logger.error(f'Cannot connect to Cloudvision server {authentication.server}')
+ logger.debug(f'Error message: {error_data}')
+ logger.info('connected to Cloudvision server')
+ logger.debug(f'Connection info: {authentication}')
+ return client
+
+ def __get_images(self) -> List[Any]:
+ """
+ __get_images Collect information about images on Cloudvision
+
+ Returns
+ -------
+ dict
+ Fact returned by Cloudvision
+ """
+ images = []
+ logger.debug(' -> Collecting images')
+ images = self._cv_instance.api.get_images()['data']
+ return images if self.__check_api_result(images) else []
+
+ # def __get_bundles(self):
+ # """
+ # __get_bundles [Not In use] Collect information about bundles on Cloudvision
+
+ # Returns
+ # -------
+ # dict
+ # Fact returned by Cloudvision
+ # """
+ # bundles = []
+ # logger.debug(' -> Collecting images bundles')
+ # bundles = self._cv_instance.api.get_image_bundles()['data']
+ # # bundles = self._cv_instance.post(url='/cvpservice/image/getImageBundles.do?queryparam=&startIndex=0&endIndex=0')['data']
+ # return bundles if self.__check_api_result(bundles) else None
+
+ def __check_api_result(self, arg0: Any) -> bool:
+ """
+ __check_api_result Check API calls return content
+
+ Parameters
+ ----------
+ arg0 : any
+ Element to test
+
+ Returns
+ -------
+ bool
+ True if data are correct False in other cases
+ """
+ logger.debug(arg0)
+ return len(arg0) > 0
+
+ def _does_image_exist(self, image_name: str) -> bool:
+ """
+ _does_image_exist Check if an image is referenced in Cloudvision facts
+
+ Parameters
+ ----------
+ image_name : str
+ Name of the image to search for
+
+ Returns
+ -------
+ bool
+ True if present
+ """
+ return any(image_name == image['name'] for image in self._cv_images) if isinstance(self._cv_images, list) else False
+
+ def _does_bundle_exist(self, bundle_name: str) -> bool:
+ # pylint: disable=unused-argument
+ """
+ _does_bundle_exist Check if an image is referenced in Cloudvision facts
+
+ Returns
+ -------
+ bool
+ True if present
+ """
+ # return any(bundle_name == bundle['name'] for bundle in self._cv_bundles)
+ return False
+
+ def upload_image(self, image_path: str) -> bool:
+ """
+ upload_image Upload an image to Cloudvision server
+
+ Parameters
+ ----------
+ image_path : str
+ Path to the local file to upload
+
+ Returns
+ -------
+ bool
+ True if succeeds
+ """
+ image_item = Filer(path=image_path)
+ if image_item.file_exist is False:
+ logger.error(f'File not found: {image_item.relative_path}')
+ return False
+ logger.info(f'File path for image: {image_item}')
+ if self._does_image_exist(image_name=image_item.filename):
+ logger.error("Image found in Cloudvision , Please delete it before running this script")
+ return False
+ try:
+ upload_result = self._cv_instance.api.add_image(filepath=image_item.absolute_path)
+ except Exception as e: # pylint: disable=broad-exception-caught
+ logger.error('An error occurred during upload, check CV connection')
+ logger.error(f'Exception message is: {e}')
+ return False
+ logger.debug(f'Upload Result is : {upload_result}')
+ return True
+
+ def build_image_list(self, image_list: List[str]) -> List[Any]:
+ """
+ Builds a list of the image data structures, for a given list of image names.
+ Parameters
+ ----------
+ image_list : list
+ List of software image names
+ Returns
+ -------
+ List:
+ Returns a list of images, with complete data or None in the event of failure
+ """
+ internal_image_list = []
+ image_data = None
+ success = True
+
+ for entry in image_list:
+ for image in self._cv_images:
+ if image["imageFileName"] == entry:
+ image_data = image
+
+ if image_data is not None:
+ internal_image_list.append(image_data)
+ image_data = None
+ else:
+ success = False
+
+ return internal_image_list if success else []
+
+ def create_bundle(self, name: str, images_name: List[str]) -> bool:
+ """
+ create_bundle Create a bundle with a list of images.
+
+ Parameters
+ ----------
+ name : str
+ Name of the bundle
+ images_name : List[str]
+ List of images available on Cloudvision
+
+ Returns
+ -------
+ bool
+ True if succeeds
+ """
+ logger.debug(f'Init creation of an image bundle {name} with following images {images_name}')
+ all_images_present: List[bool] = []
+ self._cv_images = self.__get_images()
+ all_images_present.extend(
+ self._does_image_exist(image_name=image_name)
+ for image_name in images_name
+ )
+ # Bundle Create
+ if self._does_bundle_exist(bundle_name=name) is False:
+ logger.debug(f'Creating image bundle {name} with following images {images_name}')
+ images_data = self.build_image_list(image_list=images_name)
+ if images_data is not None:
+ logger.debug('Images information: {images_data}')
+ try:
+ data = self._cv_instance.api.save_image_bundle(name=name, images=images_data)
+ except Exception as e: # pylint: disable=broad-exception-caught
+ logger.critical(f'{e}')
+ else:
+ logger.debug(data)
+ return True
+ logger.critical('No data found for images')
+ return False
diff --git a/eos_downloader/data.py b/eos_downloader/data.py
new file mode 100644
index 0000000..74f2f8e
--- /dev/null
+++ b/eos_downloader/data.py
@@ -0,0 +1,93 @@
+#!/usr/bin/python
+# coding: utf-8 -*-
+
+"""
+EOS Downloader Information to use in
+eos_downloader.object_downloader.ObjectDownloader._build_filename.
+
+Data are built from content of Arista XML file
+"""
+
+
+# [platform][image][version]
+DATA_MAPPING = {
+ "CloudVision": {
+ "ova": {
+ "extension": ".ova",
+ "prepend": "cvp",
+ "folder_level": 0
+ },
+ "rpm": {
+ "extension": "",
+ "prepend": "cvp-rpm-installer",
+ "folder_level": 0
+ },
+ "kvm": {
+ "extension": "-kvm.tgz",
+ "prepend": "cvp",
+ "folder_level": 0
+ },
+ "upgrade": {
+ "extension": ".tgz",
+ "prepend": "cvp-upgrade",
+ "folder_level": 0
+ },
+ },
+ "EOS": {
+ "64": {
+ "extension": ".swi",
+ "prepend": "EOS64",
+ "folder_level": 0
+ },
+ "INT": {
+ "extension": "-INT.swi",
+ "prepend": "EOS",
+ "folder_level": 1
+ },
+ "2GB-INT": {
+ "extension": "-INT.swi",
+ "prepend": "EOS-2GB",
+ "folder_level": 1
+ },
+ "cEOS": {
+ "extension": ".tar.xz",
+ "prepend": "cEOS-lab",
+ "folder_level": 0
+ },
+ "cEOS64": {
+ "extension": ".tar.xz",
+ "prepend": "cEOS64-lab",
+ "folder_level": 0
+ },
+ "vEOS": {
+ "extension": ".vmdk",
+ "prepend": "vEOS",
+ "folder_level": 0
+ },
+ "vEOS-lab": {
+ "extension": ".vmdk",
+ "prepend": "vEOS-lab",
+ "folder_level": 0
+ },
+ "EOS-2GB": {
+ "extension": ".swi",
+ "prepend": "EOS-2GB",
+ "folder_level": 0
+ },
+ "RN": {
+ "extension": "-",
+ "prepend": "RN",
+ "folder_level": 0
+ },
+ "SOURCE": {
+ "extension": "-source.tar",
+ "prepend": "EOS",
+ "folder_level": 0
+ },
+ "default": {
+ "extension": ".swi",
+ "prepend": "EOS",
+ "folder_level": 0
+ }
+ }
+}
diff --git a/eos_downloader/download.py b/eos_downloader/download.py
new file mode 100644
index 0000000..2297b04
--- /dev/null
+++ b/eos_downloader/download.py
@@ -0,0 +1,77 @@
+# flake8: noqa: F811
+# pylint: disable=unused-argument
+# pylint: disable=too-few-public-methods
+
+"""download module"""
+
+import os.path
+import signal
+from concurrent.futures import ThreadPoolExecutor
+from threading import Event
+from typing import Iterable, Any
+
+import requests
+import rich
+from rich import console
+from rich.progress import (BarColumn, DownloadColumn, Progress, TaskID,
+ TextColumn, TimeElapsedColumn, TransferSpeedColumn)
+
+console = rich.get_console()
+done_event = Event()
+
+
+def handle_sigint(signum: Any, frame: Any) -> None:
+ """Progress bar handler"""
+ done_event.set()
+
+
+signal.signal(signal.SIGINT, handle_sigint)
+
+
+class DownloadProgressBar():
+ """
+ Object to manage Download process with Progress Bar from Rich
+ """
+
+ def __init__(self) -> None:
+ """
+ Class Constructor
+ """
+ self.progress = Progress(
+ TextColumn("💾 Downloading [bold blue]{task.fields[filename]}", justify="right"),
+ BarColumn(bar_width=None),
+ "[progress.percentage]{task.percentage:>3.1f}%",
+ "•",
+ TransferSpeedColumn(),
+ "•",
+ DownloadColumn(),
+ "•",
+ TimeElapsedColumn(),
+ "•",
+ console=console
+ )
+
+ def _copy_url(self, task_id: TaskID, url: str, path: str, block_size: int = 1024) -> bool:
+ """Copy data from a url to a local file."""
+ response = requests.get(url, stream=True, timeout=5)
+ # This will break if the response doesn't contain content length
+ self.progress.update(task_id, total=int(response.headers['Content-Length']))
+ with open(path, "wb") as dest_file:
+ self.progress.start_task(task_id)
+ for data in response.iter_content(chunk_size=block_size):
+ dest_file.write(data)
+ self.progress.update(task_id, advance=len(data))
+ if done_event.is_set():
+ return True
+ # console.print(f"Downloaded {path}")
+ return False
+
+ def download(self, urls: Iterable[str], dest_dir: str) -> None:
+ """Download multuple files to the given directory."""
+ with self.progress:
+ with ThreadPoolExecutor(max_workers=4) as pool:
+ for url in urls:
+ filename = url.split("/")[-1].split('?')[0]
+ dest_path = os.path.join(dest_dir, filename)
+ task_id = self.progress.add_task("download", filename=filename, start=False)
+ pool.submit(self._copy_url, task_id, url, dest_path)
diff --git a/eos_downloader/eos.py b/eos_downloader/eos.py
new file mode 100644
index 0000000..e5f3670
--- /dev/null
+++ b/eos_downloader/eos.py
@@ -0,0 +1,177 @@
+#!/usr/bin/python
+# coding: utf-8 -*-
+# flake8: noqa: F811
+
+"""
+Specific EOS inheritance from object_download
+"""
+
+import os
+import xml.etree.ElementTree as ET
+from typing import List, Union
+
+import rich
+from loguru import logger
+from rich import console
+
+from eos_downloader.models.version import BASE_BRANCH_STR, BASE_VERSION_STR, REGEX_EOS_VERSION, RTYPE_FEATURE, EosVersion
+from eos_downloader.object_downloader import ObjectDownloader
+
+# logger = logging.getLogger(__name__)
+
+console = rich.get_console()
+
+class EOSDownloader(ObjectDownloader):
+ """
+ EOSDownloader Object to download EOS images from Arista.com website
+
+ Supercharge ObjectDownloader to support EOS specific actions
+
+ Parameters
+ ----------
+ ObjectDownloader : ObjectDownloader
+ Base object
+ """
+
+ eos_versions: Union[List[EosVersion], None] = None
+
+ @staticmethod
+ def _disable_ztp(file_path: str) -> None:
+ """
+ _disable_ztp Method to disable ZTP in EOS image
+
+ Create a file in the EOS image to disable ZTP process during initial boot
+
+ Parameters
+ ----------
+ file_path : str
+ Path where EOS image is located
+ """
+ logger.info('Mounting volume to disable ZTP')
+ console.print('🚀 Mounting volume to disable ZTP')
+ raw_folder = os.path.join(file_path, "raw")
+ os.system(f"rm -rf {raw_folder}")
+ os.system(f"mkdir -p {raw_folder}")
+ os.system(
+ f'guestmount -a {os.path.join(file_path, "hda.qcow2")} -m /dev/sda2 {os.path.join(file_path, "raw")}')
+ ztp_file = os.path.join(file_path, 'raw/zerotouch-config')
+ with open(ztp_file, 'w', encoding='ascii') as zfile:
+ zfile.write('DISABLE=True')
+ logger.info(f'Unmounting volume in {file_path}')
+ os.system(f"guestunmount {os.path.join(file_path, 'raw')}")
+ os.system(f"rm -rf {os.path.join(file_path, 'raw')}")
+ logger.info(f"Volume has been successfully unmounted at {file_path}")
+
+ def _parse_xml_for_version(self,root_xml: ET.ElementTree, xpath: str = './/dir[@label="Active Releases"]/dir/dir/[@label]') -> List[EosVersion]:
+ """
+ Extract list of available EOS versions from Arista.com website
+
+ Create a list of EosVersion object for all versions available on Arista.com
+
+ Args:
+ root_xml (ET.ElementTree): XML file with all versions available
+ xpath (str, optional): XPATH to use to extract EOS version. Defaults to './/dir[@label="Active Releases"]/dir/dir/[@label]'.
+
+ Returns:
+ List[EosVersion]: List of EosVersion representing all available EOS versions
+ """
+ # XPATH: .//dir[@label="Active Releases"]/dir/dir/[@label]
+ if self.eos_versions is None:
+ logger.debug(f'Using xpath {xpath}')
+ eos_versions = []
+ for node in root_xml.findall(xpath):
+ if 'label' in node.attrib and node.get('label') is not None:
+ label = node.get('label')
+ if label is not None and REGEX_EOS_VERSION.match(label):
+ eos_version = EosVersion.from_str(label)
+ eos_versions.append(eos_version)
+ logger.debug(f"Found {label} - {eos_version}")
+ logger.debug(f'List of versions found on arista.com is: {eos_versions}')
+ self.eos_versions = eos_versions
+ else:
+ logger.debug('receiving instruction to download versions, but already available')
+ return self.eos_versions
+
+ def _get_branches(self, with_rtype: str = RTYPE_FEATURE) -> List[str]:
+ """
+ Extract all EOS branches available from arista.com
+
+ Call self._parse_xml_for_version and then build list of available branches
+
+ Args:
+ rtype (str, optional): Release type to find. Can be M or F, default to F
+
+ Returns:
+ List[str]: A lsit of string that represent all availables EOS branches
+ """
+ root = self._get_folder_tree()
+ versions = self._parse_xml_for_version(root_xml=root)
+ return list({version.branch for version in versions if version.rtype == with_rtype})
+
+ def latest_branch(self, rtype: str = RTYPE_FEATURE) -> EosVersion:
+ """
+ Get latest branch from semver standpoint
+
+ Args:
+ rtype (str, optional): Release type to find. Can be M or F, default to F
+
+ Returns:
+ EosVersion: Latest Branch object
+ """
+ selected_branch = EosVersion.from_str(BASE_BRANCH_STR)
+ for branch in self._get_branches(with_rtype=rtype):
+ branch = EosVersion.from_str(branch)
+ if branch > selected_branch:
+ selected_branch = branch
+ return selected_branch
+
+ def get_eos_versions(self, branch: Union[str,None] = None, rtype: Union[str,None] = None) -> List[EosVersion]:
+ """
+ Get a list of available EOS version available on arista.com
+
+ If a branch is provided, only version in this branch are listed.
+ Otherwise, all versions are provided.
+
+ Args:
+ branch (str, optional): An EOS branch to filter. Defaults to None.
+ rtype (str, optional): Release type to find. Can be M or F, default to F
+
+ Returns:
+ List[EosVersion]: A list of versions available
+ """
+ root = self._get_folder_tree()
+ result = []
+ for version in self._parse_xml_for_version(root_xml=root):
+ if branch is None and (version.rtype == rtype or rtype is None):
+ result.append(version)
+ elif branch is not None and version.is_in_branch(branch) and version.rtype == rtype:
+ result.append(version)
+ return result
+
+ def latest_eos(self, branch: Union[str,None] = None, rtype: str = RTYPE_FEATURE) -> EosVersion:
+ """
+ Get latest version of EOS
+
+ If a branch is provided, only version in this branch are listed.
+ Otherwise, all versions are provided.
+ You can select what type of version to consider: M or F
+
+ Args:
+ branch (str, optional): An EOS branch to filter. Defaults to None.
+ rtype (str, optional): An EOS version type to filter, Can be M or F. Defaults to None.
+
+ Returns:
+ EosVersion: latest version selected
+ """
+ selected_version = EosVersion.from_str(BASE_VERSION_STR)
+ if branch is None:
+ latest_branch = self.latest_branch(rtype=rtype)
+ else:
+ latest_branch = EosVersion.from_str(branch)
+ for version in self.get_eos_versions(branch=str(latest_branch.branch), rtype=rtype):
+ if version > selected_version:
+ if rtype is not None and version.rtype == rtype:
+ selected_version = version
+ if rtype is None:
+ selected_version = version
+ return selected_version
diff --git a/eos_downloader/models/__init__.py b/eos_downloader/models/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/eos_downloader/models/__init__.py
diff --git a/eos_downloader/models/version.py b/eos_downloader/models/version.py
new file mode 100644
index 0000000..14448c1
--- /dev/null
+++ b/eos_downloader/models/version.py
@@ -0,0 +1,272 @@
+#!/usr/bin/python
+# coding: utf-8 -*-
+
+"""Module for EOS version management"""
+
+from __future__ import annotations
+
+import re
+import typing
+from typing import Any, Optional
+
+from loguru import logger
+from pydantic import BaseModel
+
+from eos_downloader.tools import exc_to_str
+
+# logger = logging.getLogger(__name__)
+
+BASE_VERSION_STR = '4.0.0F'
+BASE_BRANCH_STR = '4.0'
+
+RTYPE_FEATURE = 'F'
+RTYPE_MAINTENANCE = 'M'
+RTYPES = [RTYPE_FEATURE, RTYPE_MAINTENANCE]
+
+# Regular Expression to capture multiple EOS version format
+# 4.24
+# 4.23.0
+# 4.21.1M
+# 4.28.10.F
+# 4.28.6.1M
+REGEX_EOS_VERSION = re.compile(r"^.*(?P<major>4)\.(?P<minor>\d{1,2})\.(?P<patch>\d{1,2})(?P<other>\.\d*)*(?P<rtype>[M,F])*$")
+REGEX_EOS_BRANCH = re.compile(r"^.*(?P<major>4)\.(?P<minor>\d{1,2})(\.?P<patch>\d)*(\.\d)*(?P<rtype>[M,F])*$")
+
+
+class EosVersion(BaseModel):
+ """
+ EosVersion object to play with version management in code
+
+ Since EOS is not using strictly semver approach, this class mimic some functions from semver lib for Arista EOS versions
+ It is based on Pydantic and provides helpers for comparison:
+
+ Examples:
+ >>> eos_version_str = '4.23.2F'
+ >>> eos_version = EosVersion.from_str(eos_version_str)
+ >>> print(f'str representation is: {str(eos_version)}')
+ str representation is: 4.23.2F
+
+ >>> other_version = EosVersion.from_str(other_version_str)
+ >>> print(f'eos_version < other_version: {eos_version < other_version}')
+ eos_version < other_version: True
+
+ >>> print(f'Is eos_version match("<=4.23.3M"): {eos_version.match("<=4.23.3M")}')
+ Is eos_version match("<=4.23.3M"): True
+
+ >>> print(f'Is eos_version in branch 4.23: {eos_version.is_in_branch("4.23.0")}')
+ Is eos_version in branch 4.23: True
+
+ Args:
+ BaseModel (Pydantic): Pydantic Base Model
+ """
+ major: int = 4
+ minor: int = 0
+ patch: int = 0
+ rtype: Optional[str] = 'F'
+ other: Any
+
+ @classmethod
+ def from_str(cls, eos_version: str) -> EosVersion:
+ """
+ Class constructor from a string representing EOS version
+
+ Use regular expresion to extract fields from string.
+ It supports following formats:
+ - 4.24
+ - 4.23.0
+ - 4.21.1M
+ - 4.28.10.F
+ - 4.28.6.1M
+
+ Args:
+ eos_version (str): EOS version in str format
+
+ Returns:
+ EosVersion object
+ """
+ logger.debug(f'receiving version: {eos_version}')
+ if REGEX_EOS_VERSION.match(eos_version):
+ matches = REGEX_EOS_VERSION.match(eos_version)
+ # assert matches is not None
+ assert matches is not None
+ return cls(**matches.groupdict())
+ if REGEX_EOS_BRANCH.match(eos_version):
+ matches = REGEX_EOS_BRANCH.match(eos_version)
+ # assert matches is not None
+ assert matches is not None
+ return cls(**matches.groupdict())
+ logger.error(f'Error occured with {eos_version}')
+ return EosVersion()
+
+ @property
+ def branch(self) -> str:
+ """
+ Extract branch of version
+
+ Returns:
+ str: branch from version
+ """
+ return f'{self.major}.{self.minor}'
+
+ def __str__(self) -> str:
+ """
+ Standard str representation
+
+ Return string for EOS version like 4.23.3M
+
+ Returns:
+ str: A standard EOS version string representing <MAJOR>.<MINOR>.<PATCH><RTYPE>
+ """
+ if self.other is None:
+ return f'{self.major}.{self.minor}.{self.patch}{self.rtype}'
+ return f'{self.major}.{self.minor}.{self.patch}{self.other}{self.rtype}'
+
+ def _compare(self, other: EosVersion) -> float:
+ """
+ An internal comparison function to compare 2 EosVersion objects
+
+ Do a deep comparison from Major to Release Type
+ The return value is
+ - negative if ver1 < ver2,
+ - zero if ver1 == ver2
+ - strictly positive if ver1 > ver2
+
+ Args:
+ other (EosVersion): An EosVersion to compare with this object
+
+ Raises:
+ ValueError: Raise ValueError if input is incorrect type
+
+ Returns:
+ float: -1 if ver1 < ver2, 0 if ver1 == ver2, 1 if ver1 > ver2
+ """
+ if not isinstance(other, EosVersion):
+ raise ValueError(f'could not compare {other} as it is not an EosVersion object')
+ comparison_flag: float = 0
+ logger.warning(f'current version {self.__str__()} - other {str(other)}') # pylint: disable = unnecessary-dunder-call
+ for key, _ in self.dict().items():
+ if comparison_flag == 0 and self.dict()[key] is None or other.dict()[key] is None:
+ logger.debug(f'{key}: local None - remote None')
+ logger.debug(f'{key}: local {self.dict()} - remote {other.dict()}')
+ return comparison_flag
+ logger.debug(f'{key}: local {self.dict()[key]} - remote {other.dict()[key]}')
+ if comparison_flag == 0 and self.dict()[key] < other.dict()[key]:
+ comparison_flag = -1
+ if comparison_flag == 0 and self.dict()[key] > other.dict()[key]:
+ comparison_flag = 1
+ if comparison_flag != 0:
+ logger.info(f'comparison result is {comparison_flag}')
+ return comparison_flag
+ logger.info(f'comparison result is {comparison_flag}')
+ return comparison_flag
+
+ @typing.no_type_check
+ def __eq__(self, other):
+ """ Implement __eq__ function (==) """
+ return self._compare(other) == 0
+
+ @typing.no_type_check
+ def __ne__(self, other):
+ # type: ignore
+ """ Implement __nw__ function (!=) """
+ return self._compare(other) != 0
+
+ @typing.no_type_check
+ def __lt__(self, other):
+ # type: ignore
+ """ Implement __lt__ function (<) """
+ return self._compare(other) < 0
+
+ @typing.no_type_check
+ def __le__(self, other):
+ # type: ignore
+ """ Implement __le__ function (<=) """
+ return self._compare(other) <= 0
+
+ @typing.no_type_check
+ def __gt__(self, other):
+ # type: ignore
+ """ Implement __gt__ function (>) """
+ return self._compare(other) > 0
+
+ @typing.no_type_check
+ def __ge__(self, other):
+ # type: ignore
+ """ Implement __ge__ function (>=) """
+ return self._compare(other) >= 0
+
+ def match(self, match_expr: str) -> bool:
+ """
+ Compare self to match a match expression.
+
+ Example:
+ >>> eos_version.match("<=4.23.3M")
+ True
+ >>> eos_version.match("==4.23.3M")
+ False
+
+ Args:
+ match_expr (str): optional operator and version; valid operators are
+ ``<`` smaller than
+ ``>`` greater than
+ ``>=`` greator or equal than
+ ``<=`` smaller or equal than
+ ``==`` equal
+ ``!=`` not equal
+
+ Raises:
+ ValueError: If input has no match_expr nor match_ver
+
+ Returns:
+ bool: True if the expression matches the version, otherwise False
+ """
+ prefix = match_expr[:2]
+ if prefix in (">=", "<=", "==", "!="):
+ match_version = match_expr[2:]
+ elif prefix and prefix[0] in (">", "<"):
+ prefix = prefix[0]
+ match_version = match_expr[1:]
+ elif match_expr and match_expr[0] in "0123456789":
+ prefix = "=="
+ match_version = match_expr
+ else:
+ raise ValueError(
+ "match_expr parameter should be in format <op><ver>, "
+ "where <op> is one of "
+ "['<', '>', '==', '<=', '>=', '!=']. "
+ f"You provided: {match_expr}"
+ )
+ logger.debug(f'work on comparison {prefix} with base release {match_version}')
+ possibilities_dict = {
+ ">": (1,),
+ "<": (-1,),
+ "==": (0,),
+ "!=": (-1, 1),
+ ">=": (0, 1),
+ "<=": (-1, 0),
+ }
+ possibilities = possibilities_dict[prefix]
+ cmp_res = self._compare(EosVersion.from_str(match_version))
+
+ return cmp_res in possibilities
+
+ def is_in_branch(self, branch_str: str) -> bool:
+ """
+ Check if current version is part of a branch version
+
+ Comparison is done across MAJOR and MINOR
+
+ Args:
+ branch_str (str): a string for EOS branch. It supports following formats 4.23 or 4.23.0
+
+ Returns:
+ bool: True if current version is in provided branch, otherwise False
+ """
+ try:
+ logger.debug(f'reading branch str:{branch_str}')
+ branch = EosVersion.from_str(branch_str)
+ except Exception as error: # pylint: disable = broad-exception-caught
+ logger.error(exc_to_str(error))
+ else:
+ return self.major == branch.major and self.minor == branch.minor
+ return False
diff --git a/eos_downloader/object_downloader.py b/eos_downloader/object_downloader.py
new file mode 100644
index 0000000..0420acb
--- /dev/null
+++ b/eos_downloader/object_downloader.py
@@ -0,0 +1,513 @@
+#!/usr/bin/python
+# coding: utf-8 -*-
+# flake8: noqa: F811
+# pylint: disable=too-many-instance-attributes
+# pylint: disable=too-many-arguments
+
+"""
+eos_downloader class definition
+"""
+
+from __future__ import (absolute_import, division, print_function,
+ unicode_literals, annotations)
+
+import base64
+import glob
+import hashlib
+import json
+import os
+import sys
+import xml.etree.ElementTree as ET
+from typing import Union
+
+import requests
+import rich
+from loguru import logger
+from rich import console
+from tqdm import tqdm
+
+from eos_downloader import (ARISTA_DOWNLOAD_URL, ARISTA_GET_SESSION,
+ ARISTA_SOFTWARE_FOLDER_TREE, EVE_QEMU_FOLDER_PATH,
+ MSG_INVALID_DATA, MSG_TOKEN_EXPIRED)
+from eos_downloader.data import DATA_MAPPING
+from eos_downloader.download import DownloadProgressBar
+
+# logger = logging.getLogger(__name__)
+
+console = rich.get_console()
+
+
+class ObjectDownloader():
+ """
+ ObjectDownloader Generic Object to download from Arista.com
+ """
+ def __init__(self, image: str, version: str, token: str, software: str = 'EOS', hash_method: str = 'md5sum'):
+ """
+ __init__ Class constructor
+
+ generic class constructor
+
+ Parameters
+ ----------
+ image : str
+ Type of image to download
+ version : str
+ Version of the package to download
+ token : str
+ Arista API token
+ software : str, optional
+ Package name to download (vEOS-lab, cEOS, EOS, ...), by default 'EOS'
+ hash_method : str, optional
+ Hash protocol to use to check download, by default 'md5sum'
+ """
+ self.software = software
+ self.image = image
+ self._version = version
+ self.token = token
+ self.folder_level = 0
+ self.session_id = None
+ self.filename = self._build_filename()
+ self.hash_method = hash_method
+ self.timeout = 5
+ # Logging
+ logger.debug(f'Filename built by _build_filename is {self.filename}')
+
+ def __str__(self) -> str:
+ return f'{self.software} - {self.image} - {self.version}'
+
+ # def __repr__(self):
+ # return str(self.__dict__)
+
+ @property
+ def version(self) -> str:
+ """Get version."""
+ return self._version
+
+ @version.setter
+ def version(self, value: str) -> None:
+ """Set version."""
+ self._version = value
+ self.filename = self._build_filename()
+
+ # ------------------------------------------------------------------------ #
+ # Internal METHODS
+ # ------------------------------------------------------------------------ #
+
+ def _build_filename(self) -> str:
+ """
+ _build_filename Helper to build filename to search on arista.com
+
+ Returns
+ -------
+ str:
+ Filename to search for on Arista.com
+ """
+ logger.info('start build')
+ if self.software in DATA_MAPPING:
+ logger.info(f'software in data mapping: {self.software}')
+ if self.image in DATA_MAPPING[self.software]:
+ logger.info(f'image in data mapping: {self.image}')
+ return f"{DATA_MAPPING[self.software][self.image]['prepend']}-{self.version}{DATA_MAPPING[self.software][self.image]['extension']}"
+ return f"{DATA_MAPPING[self.software]['default']['prepend']}-{self.version}{DATA_MAPPING[self.software]['default']['extension']}"
+ raise ValueError(f'Incorrect value for software {self.software}')
+
+ def _parse_xml_for_path(self, root_xml: ET.ElementTree, xpath: str, search_file: str) -> str:
+ # sourcery skip: remove-unnecessary-cast
+ """
+ _parse_xml Read and extract data from XML using XPATH
+
+ Get all interested nodes using XPATH and then get node that match search_file
+
+ Parameters
+ ----------
+ root_xml : ET.ElementTree
+ XML document
+ xpath : str
+ XPATH expression to filter XML
+ search_file : str
+ Filename to search for
+
+ Returns
+ -------
+ str
+ File Path on Arista server side
+ """
+ logger.debug(f'Using xpath {xpath}')
+ logger.debug(f'Search for file {search_file}')
+ console.print(f'🔎 Searching file {search_file}')
+ for node in root_xml.findall(xpath):
+ # logger.debug('Found {}', node.text)
+ if str(node.text).lower() == search_file.lower():
+ path = node.get('path')
+ console.print(f' -> Found file at {path}')
+ logger.info(f'Found {node.text} at {node.get("path")}')
+ return str(node.get('path')) if node.get('path') is not None else ''
+ logger.error(f'Requested file ({self.filename}) not found !')
+ return ''
+
+ def _get_hash(self, file_path: str) -> str:
+ """
+ _get_hash Download HASH file from Arista server
+
+ Parameters
+ ----------
+ file_path : str
+ Path of the HASH file
+
+ Returns
+ -------
+ str
+ Hash string read from HASH file downloaded from Arista.com
+ """
+ remote_hash_file = self._get_remote_hashpath(hash_method=self.hash_method)
+ hash_url = self._get_url(remote_file_path=remote_hash_file)
+ # hash_downloaded = self._download_file_raw(url=hash_url, file_path=file_path + "/" + os.path.basename(remote_hash_file))
+ dl_rich_progress_bar = DownloadProgressBar()
+ dl_rich_progress_bar.download(urls=[hash_url], dest_dir=file_path)
+ hash_downloaded = f"{file_path}/{os.path.basename(remote_hash_file)}"
+ hash_content = 'unset'
+ with open(hash_downloaded, 'r', encoding='utf-8') as f:
+ hash_content = f.read()
+ return hash_content.split(' ')[0]
+
+ @staticmethod
+ def _compute_hash_md5sum(file: str, hash_expected: str) -> bool:
+ """
+ _compute_hash_md5sum Compare MD5 sum
+
+ Do comparison between local md5 of the file and value provided by arista.com
+
+ Parameters
+ ----------
+ file : str
+ Local file to use for MD5 sum
+ hash_expected : str
+ MD5 from arista.com
+
+ Returns
+ -------
+ bool
+ True if both are equal, False if not
+ """
+ hash_md5 = hashlib.md5()
+ with open(file, "rb") as f:
+ for chunk in iter(lambda: f.read(4096), b""):
+ hash_md5.update(chunk)
+ if hash_md5.hexdigest() == hash_expected:
+ return True
+ logger.warning(f'Downloaded file is corrupt: local md5 ({hash_md5.hexdigest()}) is different to md5 from arista ({hash_expected})')
+ return False
+
+ @staticmethod
+ def _compute_hash_sh512sum(file: str, hash_expected: str) -> bool:
+ """
+ _compute_hash_sh512sum Compare SHA512 sum
+
+ Do comparison between local sha512 of the file and value provided by arista.com
+
+ Parameters
+ ----------
+ file : str
+ Local file to use for MD5 sum
+ hash_expected : str
+ SHA512 from arista.com
+
+ Returns
+ -------
+ bool
+ True if both are equal, False if not
+ """
+ hash_sha512 = hashlib.sha512()
+ with open(file, "rb") as f:
+ for chunk in iter(lambda: f.read(4096), b""):
+ hash_sha512.update(chunk)
+ if hash_sha512.hexdigest() == hash_expected:
+ return True
+ logger.warning(f'Downloaded file is corrupt: local sha512 ({hash_sha512.hexdigest()}) is different to sha512 from arista ({hash_expected})')
+ return False
+
+ def _get_folder_tree(self) -> ET.ElementTree:
+ """
+ _get_folder_tree Download XML tree from Arista server
+
+ Returns
+ -------
+ ET.ElementTree
+ XML document
+ """
+ if self.session_id is None:
+ self.authenticate()
+ jsonpost = {'sessionCode': self.session_id}
+ result = requests.post(ARISTA_SOFTWARE_FOLDER_TREE, data=json.dumps(jsonpost), timeout=self.timeout)
+ try:
+ folder_tree = result.json()["data"]["xml"]
+ return ET.ElementTree(ET.fromstring(folder_tree))
+ except KeyError as error:
+ logger.error(MSG_INVALID_DATA)
+ logger.error(f'Server returned: {error}')
+ console.print(f'❌ {MSG_INVALID_DATA}', style="bold red")
+ sys.exit(1)
+
+ def _get_remote_filepath(self) -> str:
+ """
+ _get_remote_filepath Helper to get path of the file to download
+
+ Set XPATH and return result of _parse_xml for the file to download
+
+ Returns
+ -------
+ str
+ Remote path of the file to download
+ """
+ root = self._get_folder_tree()
+ logger.debug("GET XML content from ARISTA.com")
+ xpath = f'.//dir[@label="{self.software}"]//file'
+ return self._parse_xml_for_path(root_xml=root, xpath=xpath, search_file=self.filename)
+
+ def _get_remote_hashpath(self, hash_method: str = 'md5sum') -> str:
+ """
+ _get_remote_hashpath Helper to get path of the hash's file to download
+
+ Set XPATH and return result of _parse_xml for the file to download
+
+ Returns
+ -------
+ str
+ Remote path of the hash's file to download
+ """
+ root = self._get_folder_tree()
+ logger.debug("GET XML content from ARISTA.com")
+ xpath = f'.//dir[@label="{self.software}"]//file'
+ return self._parse_xml_for_path(
+ root_xml=root,
+ xpath=xpath,
+ search_file=f'{self.filename}.{hash_method}',
+ )
+
+ def _get_url(self, remote_file_path: str) -> str:
+ """
+ _get_url Get URL to use for downloading file from Arista server
+
+ Send remote_file_path to get correct URL to use for download
+
+ Parameters
+ ----------
+ remote_file_path : str
+ Filepath from XML to use to get correct download link
+
+ Returns
+ -------
+ str
+ URL link to use for download
+ """
+ if self.session_id is None:
+ self.authenticate()
+ jsonpost = {'sessionCode': self.session_id, 'filePath': remote_file_path}
+ result = requests.post(ARISTA_DOWNLOAD_URL, data=json.dumps(jsonpost), timeout=self.timeout)
+ if 'data' in result.json() and 'url' in result.json()['data']:
+ # logger.debug('URL to download file is: {}', result.json())
+ return result.json()["data"]["url"]
+ logger.critical(f'Server returns following message: {result.json()}')
+ return ''
+
+ @staticmethod
+ def _download_file_raw(url: str, file_path: str) -> str:
+ """
+ _download_file Helper to download file from Arista.com
+
+ [extended_summary]
+
+ Parameters
+ ----------
+ url : str
+ URL provided by server for remote_file_path
+ file_path : str
+ Location where to save local file
+
+ Returns
+ -------
+ str
+ File path
+ """
+ chunkSize = 1024
+ r = requests.get(url, stream=True, timeout=5)
+ with open(file_path, 'wb') as f:
+ pbar = tqdm(unit="B", total=int(r.headers['Content-Length']), unit_scale=True, unit_divisor=1024)
+ for chunk in r.iter_content(chunk_size=chunkSize):
+ if chunk:
+ pbar.update(len(chunk))
+ f.write(chunk)
+ return file_path
+
+ def _download_file(self, file_path: str, filename: str, rich_interface: bool = True) -> Union[None, str]:
+ remote_file_path = self._get_remote_filepath()
+ logger.info(f'File found on arista server: {remote_file_path}')
+ file_url = self._get_url(remote_file_path=remote_file_path)
+ if file_url is not False:
+ if not rich_interface:
+ return self._download_file_raw(url=file_url, file_path=os.path.join(file_path, filename))
+ rich_downloader = DownloadProgressBar()
+ rich_downloader.download(urls=[file_url], dest_dir=file_path)
+ return os.path.join(file_path, filename)
+ logger.error(f'Cannot download file {file_path}')
+ return None
+
+ @staticmethod
+ def _create_destination_folder(path: str) -> None:
+ # os.makedirs(path, mode, exist_ok=True)
+ os.system(f'mkdir -p {path}')
+
+ @staticmethod
+ def _disable_ztp(file_path: str) -> None:
+ pass
+
+ # ------------------------------------------------------------------------ #
+ # Public METHODS
+ # ------------------------------------------------------------------------ #
+
+ def authenticate(self) -> bool:
+ """
+ authenticate Authenticate user on Arista.com server
+
+ Send API token and get a session-id from remote server.
+ Session-id will be used by all other functions.
+
+ Returns
+ -------
+ bool
+ True if authentication succeeds=, False in all other situations.
+ """
+ credentials = (base64.b64encode(self.token.encode())).decode("utf-8")
+ session_code_url = ARISTA_GET_SESSION
+ jsonpost = {'accessToken': credentials}
+
+ result = requests.post(session_code_url, data=json.dumps(jsonpost), timeout=self.timeout)
+
+ if result.json()["status"]["message"] in[ 'Access token expired', 'Invalid access token']:
+ console.print(f'❌ {MSG_TOKEN_EXPIRED}', style="bold red")
+ logger.error(MSG_TOKEN_EXPIRED)
+ return False
+
+ try:
+ if 'data' in result.json():
+ self.session_id = result.json()["data"]["session_code"]
+ logger.info('Authenticated on arista.com')
+ return True
+ logger.debug(f'{result.json()}')
+ return False
+ except KeyError as error_arista:
+ logger.error(f'Error: {error_arista}')
+ sys.exit(1)
+
+ def download_local(self, file_path: str, checksum: bool = False) -> bool:
+ # sourcery skip: move-assign
+ """
+ download_local Entrypoint for local download feature
+
+ Do local downnload feature:
+ - Get remote file path
+ - Get URL from Arista.com
+ - Download file
+ - Do HASH comparison (optional)
+
+ Parameters
+ ----------
+ file_path : str
+ Local path to save downloaded file
+ checksum : bool, optional
+ Execute checksum or not, by default False
+
+ Returns
+ -------
+ bool
+ True if everything went well, False if any problem appears
+ """
+ file_downloaded = str(self._download_file(file_path=file_path, filename=self.filename))
+
+ # Check file HASH
+ hash_result = False
+ if checksum:
+ logger.info('🚀 Running checksum validation')
+ console.print('🚀 Running checksum validation')
+ if self.hash_method == 'md5sum':
+ hash_expected = self._get_hash(file_path=file_path)
+ hash_result = self._compute_hash_md5sum(file=file_downloaded, hash_expected=hash_expected)
+ elif self.hash_method == 'sha512sum':
+ hash_expected = self._get_hash(file_path=file_path)
+ hash_result = self._compute_hash_sh512sum(file=file_downloaded, hash_expected=hash_expected)
+ if not hash_result:
+ logger.error('Downloaded file is corrupted, please check your connection')
+ console.print('❌ Downloaded file is corrupted, please check your connection')
+ return False
+ logger.info('Downloaded file is correct.')
+ console.print('✅ Downloaded file is correct.')
+ return True
+
+ def provision_eve(self, noztp: bool = False, checksum: bool = True) -> None:
+ # pylint: disable=unused-argument
+ """
+ provision_eve Entrypoint for EVE-NG download and provisioning
+
+ Do following actions:
+ - Get remote file path
+ - Get URL from file path
+ - Download file
+ - Convert file to qcow2 format
+ - Create new version to EVE-NG
+ - Disable ZTP (optional)
+
+ Parameters
+ ----------
+ noztp : bool, optional
+ Flag to deactivate ZTP in EOS image, by default False
+ checksum : bool, optional
+ Flag to ask for hash validation, by default True
+ """
+ # Build image name to use in folder path
+ eos_image_name = self.filename.rstrip(".vmdk").lower()
+ if noztp:
+ eos_image_name = f'{eos_image_name}-noztp'
+ # Create full path for EVE-NG
+ file_path = os.path.join(EVE_QEMU_FOLDER_PATH, eos_image_name.rstrip())
+ # Create folders in filesystem
+ self._create_destination_folder(path=file_path)
+
+ # Download file to local destination
+ file_downloaded = self._download_file(
+ file_path=file_path, filename=self.filename)
+
+ # Convert to QCOW2 format
+ file_qcow2 = os.path.join(file_path, "hda.qcow2")
+ logger.info('Converting VMDK to QCOW2 format')
+ console.print('🚀 Converting VMDK to QCOW2 format...')
+
+ os.system(f'$(which qemu-img) convert -f vmdk -O qcow2 {file_downloaded} {file_qcow2}')
+
+ logger.info('Applying unl_wrapper to fix permissions')
+ console.print('Applying unl_wrapper to fix permissions')
+
+ os.system('/opt/unetlab/wrappers/unl_wrapper -a fixpermissions')
+ os.system(f'rm -f {file_downloaded}')
+
+ if noztp:
+ self._disable_ztp(file_path=file_path)
+
+ def docker_import(self, image_name: str = "arista/ceos") -> None:
+ """
+ Import docker container to your docker server.
+
+ Import downloaded container to your local docker engine.
+
+ Args:
+ version (str):
+ image_name (str, optional): Image name to use. Defaults to "arista/ceos".
+ """
+ docker_image = f'{image_name}:{self.version}'
+ logger.info(f'Importing image {self.filename} to {docker_image}')
+ console.print(f'🚀 Importing image {self.filename} to {docker_image}')
+ os.system(f'$(which docker) import {self.filename} {docker_image}')
+ for filename in glob.glob(f'{self.filename}*'):
+ try:
+ os.remove(filename)
+ except FileNotFoundError:
+ console.print(f'File not found: {filename}')
diff --git a/eos_downloader/tools.py b/eos_downloader/tools.py
new file mode 100644
index 0000000..a0f971a
--- /dev/null
+++ b/eos_downloader/tools.py
@@ -0,0 +1,13 @@
+#!/usr/bin/python
+# coding: utf-8 -*-
+
+"""Module for tools related to ardl"""
+
+
+def exc_to_str(exception: Exception) -> str:
+ """
+ Helper function to parse Exceptions
+ """
+ return (
+ f"{type(exception).__name__}{f' ({str(exception)})' if str(exception) else ''}"
+ )
diff --git a/pylintrc b/pylintrc
new file mode 100644
index 0000000..a9fbc4a
--- /dev/null
+++ b/pylintrc
@@ -0,0 +1,25 @@
+[MESSAGES CONTROL]
+disable=
+ invalid-name,
+ logging-fstring-interpolation,
+ fixme
+
+[BASIC]
+good-names=runCmds, i, y, t, c, x, e, fd, ip, v
+
+[DESIGN]
+max-statements=61
+max-returns=8
+max-locals=23
+
+[FORMAT]
+max-line-length=165
+max-module-lines=1700
+
+[SIMILARITIES]
+# making similarity lines limit a bit higher than default 4
+min-similarity-lines=10
+
+[MAIN]
+load-plugins=pylint_pydantic
+extension-pkg-whitelist=pydantic \ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..2b510e3
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,189 @@
+# content of pyproject.toml
+
+[build-system]
+requires = ["setuptools>=64.0.0", "wheel"]
+build-backend = "setuptools.build_meta"
+
+[project]
+name = "eos_downloader"
+version = "v0.8.1-dev1"
+readme = "README.md"
+authors = [{ name = "Thomas Grimonet", email = "thomas.grimonet@gmail.com" }]
+maintainers = [
+ { name = "Thomas Grimonet", email = "thomas.grimonet@gmail.com" },
+]
+description = "Arista EOS/CVP downloader script"
+license = { file = "LICENSE" }
+dependencies = [
+"cryptography",
+"paramiko",
+"requests>=2.20.0",
+"requests-toolbelt",
+"scp",
+"tqdm",
+"loguru",
+"rich==12.0.1",
+"cvprac>=1.0.7",
+"click==8.1.3",
+"click-help-colors==0.9.1",
+"pydantic==1.10.4",
+]
+keywords = ["eos_downloader", "Arista", "eos", "cvp", "network", "automation", "networking", "devops", "netdevops"]
+classifiers = [
+ 'Development Status :: 4 - Beta',
+ 'Intended Audience :: Developers',
+ 'Intended Audience :: System Administrators',
+ 'Intended Audience :: Information Technology',
+ 'Topic :: System :: Software Distribution',
+ 'Topic :: Terminals',
+ 'Topic :: Utilities',
+ 'License :: OSI Approved :: Apache Software License',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.8',
+ 'Programming Language :: Python :: 3.9',
+ 'Programming Language :: Python :: 3.10',
+ 'Programming Language :: Python :: 3 :: Only',
+ 'Programming Language :: Python :: Implementation :: PyPy',
+]
+requires-python = ">=3.8"
+
+[project.optional-dependencies]
+dev = [
+"isort==5.12.0",
+"mypy==0.991",
+"mypy-extensions>=0.4.3",
+"pre-commit>=2.20.0",
+"pylint",
+"pytest>=7.1.2",
+"pytest-cov>=2.11.1",
+"pytest-dependency",
+"pytest-html>=3.1.1",
+"pytest-metadata>=1.11.0",
+"pylint-pydantic>=0.1.4",
+"tox==4.0.11",
+"types-PyYAML",
+"types-paramiko",
+"types-requests",
+"typing-extensions",
+"yamllint",
+"flake8==4.0.1",
+"pyflakes==2.4.0"
+]
+
+[project.urls]
+Homepage = "https://www.github.com/titom73/eos-downloader"
+"Bug Tracker" = "https://www.github.com/titom73/eos-downloader/issues"
+Contributing = "https://www.github.com/titom73/eos-downloader"
+
+[project.scripts]
+ardl = "eos_downloader.cli.cli:cli"
+lard = "eos_downloader.cli.cli:cli"
+
+[tool.setuptools.packages.find]
+include = ["eos_downloader*"]
+namespaces = false
+
+# mypy as per https://pydantic-docs.helpmanual.io/mypy_plugin/#enabling-the-plugin
+[tool.mypy]
+plugins = [
+ "pydantic.mypy",
+ ]
+follow_imports = "skip"
+ignore_missing_imports = true
+warn_redundant_casts = true
+# Note: tox find some unused type ignore which are required for pre-commit.. to
+# investigate
+# warn_unused_ignores = true
+disallow_any_generics = true
+check_untyped_defs = true
+no_implicit_reexport = true
+strict_optional = true
+
+# for strict mypy: (this is the tricky one :-))
+disallow_untyped_defs = true
+
+mypy_path = "eos_downloader"
+
+[tool.pydantic-mypy]
+init_forbid_extra = true
+init_typed = true
+warn_required_dynamic_aliases = true
+warn_untyped_fields = true
+
+
+[tool.tox]
+legacy_tox_ini = """
+[tox]
+min_version = 4.0
+envlist =
+ clean,
+ lint,
+ type
+
+
+[tox-full]
+min_version = 4.0
+envlist =
+ clean,
+ py{38,39,310},
+ lint,
+ type,
+ report
+
+[gh-actions]
+python =
+ 3.8: lint, type
+ 3.9: lint, type
+ 3.10: lint, type
+
+[gh-actions-full]
+python =
+ 3.8: py38
+ 3.9: py39
+ 3.10: py310, lint, type, coverage
+
+[testenv]
+description = run the test driver with {basepython}
+extras = dev
+commands =
+ pytest -rA -q --cov-report term:skip-covered --cov-report term:skip-covered --html=report.html --self-contained-html --cov-report=html --color yes --cov=eos_downloader
+
+[testenv:lint]
+description = check the code style
+commands =
+ flake8 --max-line-length=165 --config=/dev/null eos_downloader
+ pylint eos_downloader
+
+[testenv:type]
+description = check typing
+commands =
+ type: mypy --config-file=pyproject.toml eos_downloader
+
+[testenv:clean]
+deps = coverage[toml]
+skip_install = true
+commands = coverage erase
+
+[testenv:report]
+deps = coverage[toml]
+commands = coverage report
+# add the following to make the report fail under some percentage
+# commands = coverage report --fail-under=80
+depends = py310
+
+"""
+
+[tool.pytest.ini_options]
+addopts = "-ra -q -s -vv --capture=tee-sys --cov --cov-append"
+log_level = "INFO"
+log_cli = "True"
+
+[tool.coverage.run]
+source = ['eos_downloader']
+# omit = []
+
+[tool.isort]
+profile = "black"
+line_length = 165
diff --git a/pytest.ini b/pytest.ini
new file mode 100644
index 0000000..2ae2fae
--- /dev/null
+++ b/pytest.ini
@@ -0,0 +1,5 @@
+[pytest]
+markers =
+ webtest: Tests that require connectivity to Arista.com.
+ slow: Test that are slow to run and excluded by default.
+ eos_download: Testing of EOS-DOWNLOAD \ No newline at end of file
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/__init__.py
diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/lib/__init__.py
diff --git a/tests/lib/dataset.py b/tests/lib/dataset.py
new file mode 100644
index 0000000..1286456
--- /dev/null
+++ b/tests/lib/dataset.py
@@ -0,0 +1,116 @@
+#!/usr/bin/python
+# coding: utf-8 -*-
+# pylint: disable=logger-format-interpolation
+# pylint: disable=dangerous-default-value
+# flake8: noqa: W503
+# flake8: noqa: W1202
+
+from __future__ import (absolute_import, division, print_function)
+import os
+import eos_downloader
+from eos_downloader.eos import EOSDownloader
+from eos_downloader.data import DATA_MAPPING
+
+
+# --------------------------------------------------------------- #
+# MOOCK data to use for testing
+# --------------------------------------------------------------- #
+
+# Get Auth token
+# eos_token = os.getenv('ARISTA_TOKEN')
+eos_token = os.getenv('ARISTA_TOKEN', 'invalid_token')
+eos_token_invalid = 'invalid_token'
+
+eos_dataset_valid = [
+ {
+ 'image': 'EOS',
+ 'version': '4.26.3M',
+ 'software': 'EOS',
+ 'filename': 'EOS-4.26.3M.swi',
+ 'expected_hash': 'sha512sum',
+ 'remote_path': '/support/download/EOS-USA/Active Releases/4.26/EOS-4.26.3M/EOS-4.26.3M.swi',
+ 'compute_checksum': True
+ },
+ {
+ 'image': 'EOS',
+ 'version': '4.25.6M',
+ 'software': 'EOS',
+ 'filename': 'EOS-4.25.6M.swi',
+ 'expected_hash': 'md5sum',
+ 'remote_path': '/support/download/EOS-USA/Active Releases/4.25/EOS-4.25.6M/EOS-4.25.6M.swi',
+ 'compute_checksum': True
+ },
+ {
+ 'image': 'vEOS-lab',
+ 'version': '4.25.6M',
+ 'software': 'EOS',
+ 'filename': 'vEOS-lab-4.25.6M.vmdk',
+ 'expected_hash': 'md5sum',
+ 'remote_path': '/support/download/EOS-USA/Active Releases/4.25/EOS-4.25.6M/vEOS-lab/vEOS-lab-4.25.6M.vmdk',
+ 'compute_checksum': False
+ }
+]
+
+
+eos_dataset_invalid = [
+ {
+ 'image': 'default',
+ 'version': '4.26.3M',
+ 'software': 'EOS',
+ 'filename': 'EOS-4.26.3M.swi',
+ 'expected_hash': 'sha512sum',
+ 'remote_path': '/support/download/EOS-USA/Active Releases/4.26/EOS-4.26.3M/EOS-4.26.3M.swi',
+ 'compute_checksum': True
+ }
+]
+
+eos_version = [
+ {
+ 'version': 'EOS-4.23.1F',
+ 'is_valid': True,
+ 'major': 4,
+ 'minor': 23,
+ 'patch': 1,
+ 'rtype': 'F'
+ },
+ {
+ 'version': 'EOS-4.23.0',
+ 'is_valid': True,
+ 'major': 4,
+ 'minor': 23,
+ 'patch': 0,
+ 'rtype': None
+ },
+ {
+ 'version': 'EOS-4.23',
+ 'is_valid': True,
+ 'major': 4,
+ 'minor': 23,
+ 'patch': 0,
+ 'rtype': None
+ },
+ {
+ 'version': 'EOS-4.23.1M',
+ 'is_valid': True,
+ 'major': 4,
+ 'minor': 23,
+ 'patch': 1,
+ 'rtype': 'M'
+ },
+ {
+ 'version': 'EOS-4.23.1.F',
+ 'is_valid': True,
+ 'major': 4,
+ 'minor': 23,
+ 'patch': 1,
+ 'rtype': 'F'
+ },
+ {
+ 'version': 'EOS-5.23.1F',
+ 'is_valid': False,
+ 'major': 4,
+ 'minor': 23,
+ 'patch': 1,
+ 'rtype': 'F'
+ },
+] \ No newline at end of file
diff --git a/tests/lib/fixtures.py b/tests/lib/fixtures.py
new file mode 100644
index 0000000..4515f9b
--- /dev/null
+++ b/tests/lib/fixtures.py
@@ -0,0 +1,69 @@
+#!/usr/bin/python
+# coding: utf-8 -*-
+# pylint: disable=logger-format-interpolation
+# pylint: disable=dangerous-default-value
+# flake8: noqa: W503
+# flake8: noqa: W1202
+
+from __future__ import (absolute_import, division, print_function)
+import os
+import pytest
+import eos_downloader
+from typing import Dict, Any, List
+from tests.lib.dataset import eos_dataset_valid, eos_dataset_invalid, eos_token, eos_token_invalid
+
+
+
+@pytest.fixture
+@pytest.mark.parametrize("DOWNLOAD_INFO", eos_dataset_valid)
+def create_download_instance(request, DOWNLOAD_INFO):
+ # logger.info("Execute fixture to create class elements")
+ request.cls.eos_downloader = eos_downloader.eos.EOSDownloader(
+ image=DOWNLOAD_INFO['image'],
+ software=DOWNLOAD_INFO['software'],
+ version=DOWNLOAD_INFO['version'],
+ token=eos_token,
+ hash_method='sha512sum')
+ yield
+ # logger.info('Cleanup test environment')
+ os.system('rm -f {}*'.format(DOWNLOAD_INFO['filename']))
+
+
+def generate_test_ids_dict(val: Dict[str, Any], key: str = 'name') -> str:
+ """
+ generate_test_ids Helper to generate test ID for parametrize
+
+ Only related to SYSTEM_CONFIGLETS_TESTS structure
+
+ Parameters
+ ----------
+ val : dict
+ A configlet test structure
+
+ Returns
+ -------
+ str
+ Name of the configlet
+ """
+ if key in val.keys():
+ # note this wouldn't show any hours/minutes/seconds
+ return val[key]
+ return "undefined_test"
+
+def generate_test_ids_list(val: List[Dict[str, Any]], key: str = 'name') -> str:
+ """
+ generate_test_ids Helper to generate test ID for parametrize
+
+ Only related to SYSTEM_CONFIGLETS_TESTS structure
+
+ Parameters
+ ----------
+ val : dict
+ A configlet test structure
+
+ Returns
+ -------
+ str
+ Name of the configlet
+ """
+ return [ entry[key] if key in entry.keys() else 'unset_entry' for entry in val ]
diff --git a/tests/lib/helpers.py b/tests/lib/helpers.py
new file mode 100644
index 0000000..308f2a5
--- /dev/null
+++ b/tests/lib/helpers.py
@@ -0,0 +1,40 @@
+#!/usr/bin/python
+# coding: utf-8 -*-
+# pylint: disable=logger-format-interpolation
+# pylint: disable=dangerous-default-value
+# flake8: noqa: W503
+# flake8: noqa: W1202
+
+from __future__ import (absolute_import, division, print_function)
+
+import os
+
+from eos_downloader.data import DATA_MAPPING
+
+
+
+def default_filename(version: str, info):
+ """
+ default_filename Helper to build default filename
+
+ Parameters
+ ----------
+ version : str
+ EOS version
+ info : dict
+ TEST Inputs
+
+ Returns
+ -------
+ str
+ Filename
+ """
+ if version is None or info is None:
+ return None
+ return DATA_MAPPING[info['software']]['default']['prepend'] + '-' + version + '.swi'
+
+
+def is_on_github_actions():
+ """Check if code is running on a CI runner"""
+ if "CI" not in os.environ or not os.environ["CI"] or "GITHUB_RUN_ID" not in os.environ:
+ return False \ No newline at end of file
diff --git a/tests/system/__init__.py b/tests/system/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/system/__init__.py
diff --git a/tests/system/test_eos_download.py.old b/tests/system/test_eos_download.py.old
new file mode 100644
index 0000000..6ae56fe
--- /dev/null
+++ b/tests/system/test_eos_download.py.old
@@ -0,0 +1,48 @@
+#!/usr/bin/python
+# coding: utf-8 -*-
+# pylint: disable=logger-format-interpolation
+# pylint: disable=dangerous-default-value
+# flake8: noqa: W503
+# flake8: noqa: W1202
+
+from __future__ import (absolute_import, division, print_function)
+import sys
+import os
+import platform
+from loguru import logger
+import pytest
+import eos_downloader
+from eos_downloader.eos import EOSDownloader
+from eos_downloader.data import DATA_MAPPING
+from tests.lib.dataset import eos_dataset_valid, eos_token, eos_token_invalid
+from tests.lib.fixtures import create_download_instance
+from tests.lib.helpers import default_filename
+
+
+# --------------------------------------------------------------- #
+# TEST CASES
+# --------------------------------------------------------------- #
+
+
+@pytest.mark.usefixtures("create_download_instance")
+@pytest.mark.parametrize("DOWNLOAD_INFO", eos_dataset_valid, ids=['EOS-sha512', 'EOS-md5' ,'vEOS-lab-no-hash'])
+@pytest.mark.eos_download
+class TestEosDownload_valid():
+ def test_data(self, DOWNLOAD_INFO):
+ print(str(DOWNLOAD_INFO))
+
+ @pytest.mark.dependency(name='authentication')
+ @pytest.mark.skipif(eos_token == eos_token_invalid, reason="Token is not set correctly")
+ @pytest.mark.skipif(platform.system() != 'Darwin', reason="Incorrect Hardware")
+ # @pytest.mark.xfail(reason="Deliberate - CI not set for testing AUTH")
+ @pytest.mark.webtest
+ def test_eos_download_authenticate(self):
+ assert self.eos_downloader.authenticate() is True
+
+ @pytest.mark.dependency(depends=["authentication"], scope='class')
+ @pytest.mark.webtest
+ @pytest.mark.slow
+ @pytest.mark.eos_download
+ def test_download_local(self, DOWNLOAD_INFO):
+ self.eos_downloader.download_local(file_path='.', checksum=DOWNLOAD_INFO['compute_checksum'])
+
diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/unit/__init__.py
diff --git a/tests/unit/test_eos_version.py b/tests/unit/test_eos_version.py
new file mode 100644
index 0000000..1b97ffc
--- /dev/null
+++ b/tests/unit/test_eos_version.py
@@ -0,0 +1,130 @@
+#!/usr/bin/python
+# coding: utf-8 -*-
+# pylint: disable=logger-format-interpolation
+# pylint: disable=dangerous-default-value
+# flake8: noqa: W503
+# flake8: noqa: W1202
+
+from __future__ import (absolute_import, division, print_function)
+
+import sys
+from loguru import logger
+import pytest
+from eos_downloader.models.version import EosVersion, BASE_VERSION_STR
+from tests.lib.dataset import eos_version
+from tests.lib.fixtures import generate_test_ids_list
+
+logger.remove()
+logger.add(sys.stderr, level="DEBUG")
+
+@pytest.mark.parametrize("EOS_VERSION", eos_version, ids=generate_test_ids_list(eos_version,key='version'))
+def test_eos_version_from_str(EOS_VERSION):
+ version = EosVersion.from_str(EOS_VERSION['version'])
+ if EOS_VERSION['is_valid']:
+ assert version.major == EOS_VERSION['major']
+ assert version.minor == EOS_VERSION['minor']
+ assert version.patch == EOS_VERSION['patch']
+ assert version.rtype == EOS_VERSION['rtype']
+ else:
+ assert str(version) == BASE_VERSION_STR
+
+
+@pytest.mark.parametrize("EOS_VERSION", eos_version, ids=generate_test_ids_list(eos_version,key='version'))
+def test_eos_version_to_str(EOS_VERSION):
+ version = EosVersion(**EOS_VERSION)
+ if EOS_VERSION['is_valid']:
+ assert version.major == EOS_VERSION['major']
+ assert version.minor == EOS_VERSION['minor']
+ assert version.patch == EOS_VERSION['patch']
+ assert version.rtype == EOS_VERSION['rtype']
+
+@pytest.mark.parametrize("EOS_VERSION", eos_version, ids=generate_test_ids_list(eos_version,key='version'))
+def test_eos_version_branch(EOS_VERSION):
+ if EOS_VERSION['is_valid']:
+ version = EosVersion(**EOS_VERSION)
+ assert version.branch == f'{EOS_VERSION["major"]}.{EOS_VERSION["minor"]}'
+
+@pytest.mark.parametrize("EOS_VERSION", eos_version, ids=generate_test_ids_list(eos_version,key='version'))
+def test_eos_version_eq_operator(EOS_VERSION):
+ if not EOS_VERSION['is_valid']:
+ pytest.skip('not a valid version to test')
+ version = EosVersion(**EOS_VERSION)
+ logger.warning(f'version is: {version.dict()}')
+ assert version == version
+
+@pytest.mark.parametrize("EOS_VERSION", eos_version, ids=generate_test_ids_list(eos_version,key='version'))
+def test_eos_version_ge_operator(EOS_VERSION):
+ if not EOS_VERSION['is_valid']:
+ pytest.skip('not a valid version to test')
+ version = EosVersion(**EOS_VERSION)
+ version_b = EosVersion.from_str(BASE_VERSION_STR)
+ assert version >= version_b
+
+@pytest.mark.parametrize("EOS_VERSION", eos_version, ids=generate_test_ids_list(eos_version,key='version'))
+def test_eos_version_gs_operator(EOS_VERSION):
+ if not EOS_VERSION['is_valid']:
+ pytest.skip('not a valid version to test')
+ version = EosVersion(**EOS_VERSION)
+ version_b = EosVersion.from_str(BASE_VERSION_STR)
+ assert version > version_b
+
+@pytest.mark.parametrize("EOS_VERSION", eos_version, ids=generate_test_ids_list(eos_version,key='version'))
+def test_eos_version_le_operator(EOS_VERSION):
+ if not EOS_VERSION['is_valid']:
+ pytest.skip('not a valid version to test')
+ version = EosVersion(**EOS_VERSION)
+ version_b = EosVersion.from_str(BASE_VERSION_STR)
+ assert version_b <= version
+
+@pytest.mark.parametrize("EOS_VERSION", eos_version, ids=generate_test_ids_list(eos_version,key='version'))
+def test_eos_version_ls_operator(EOS_VERSION):
+ if not EOS_VERSION['is_valid']:
+ pytest.skip('not a valid version to test')
+ version = EosVersion(**EOS_VERSION)
+ version_b = EosVersion.from_str(BASE_VERSION_STR)
+ assert version_b < version
+
+@pytest.mark.parametrize("EOS_VERSION", eos_version, ids=generate_test_ids_list(eos_version,key='version'))
+def test_eos_version_ne_operator(EOS_VERSION):
+ if not EOS_VERSION['is_valid']:
+ pytest.skip('not a valid version to test')
+ version = EosVersion(**EOS_VERSION)
+ version_b = EosVersion.from_str(BASE_VERSION_STR)
+ assert version_b != version
+
+@pytest.mark.parametrize("EOS_VERSION", eos_version, ids=generate_test_ids_list(eos_version,key='version'))
+def test_eos_version_match(EOS_VERSION):
+ if not EOS_VERSION['is_valid']:
+ pytest.skip('not a valid version to test')
+ version = EosVersion(**EOS_VERSION)
+ assert version.match(f'=={EOS_VERSION["version"]}')
+ assert version.match(f'!={BASE_VERSION_STR}')
+ assert version.match(f'>={BASE_VERSION_STR}')
+ assert version.match(f'>{BASE_VERSION_STR}')
+ assert version.match('<=4.99.0F')
+ assert version.match('<4.99.0F')
+
+@pytest.mark.parametrize("EOS_VERSION", eos_version, ids=generate_test_ids_list(eos_version,key='version'))
+def test_eos_version_is_in_branch(EOS_VERSION):
+ if not EOS_VERSION['is_valid']:
+ pytest.skip('not a valid version to test')
+ version = EosVersion(**EOS_VERSION)
+ assert version.is_in_branch(f"{EOS_VERSION['major']}.{EOS_VERSION['minor']}")
+
+@pytest.mark.parametrize("EOS_VERSION", eos_version, ids=generate_test_ids_list(eos_version,key='version'))
+def test_eos_version_match_exception(EOS_VERSION):
+ if not EOS_VERSION['is_valid']:
+ pytest.skip('not a valid version to test')
+ with pytest.raises(Exception) as e_info:
+ version = EosVersion(**EOS_VERSION)
+ assert version.match(f'+={EOS_VERSION["version"]}')
+ logger.info(f'receive exception: {e_info}')
+
+@pytest.mark.parametrize("EOS_VERSION", eos_version, ids=generate_test_ids_list(eos_version,key='version'))
+def test_eos_version_compare_exception(EOS_VERSION):
+ if not EOS_VERSION['is_valid']:
+ pytest.skip('not a valid version to test')
+ with pytest.raises(Exception) as e_info:
+ version = EosVersion(**EOS_VERSION)
+ version._compare(BASE_VERSION_STR)
+ logger.info(f'receive exception: {e_info}')
diff --git a/tests/unit/test_object_downloader.py b/tests/unit/test_object_downloader.py
new file mode 100644
index 0000000..8237b1c
--- /dev/null
+++ b/tests/unit/test_object_downloader.py
@@ -0,0 +1,141 @@
+#!/usr/bin/python
+# coding: utf-8 -*-
+# pylint: disable=logger-format-interpolation
+# pylint: disable=dangerous-default-value
+# flake8: noqa: W503
+# flake8: noqa: W1202
+
+# import platform
+import sys
+
+import pytest
+from loguru import logger
+
+import eos_downloader
+from eos_downloader.data import DATA_MAPPING
+from eos_downloader.eos import EOSDownloader
+from tests.lib.dataset import eos_dataset_invalid, eos_dataset_valid, eos_token, eos_token_invalid
+from tests.lib.fixtures import create_download_instance
+from tests.lib.helpers import default_filename, is_on_github_actions
+
+logger.remove()
+logger.add(sys.stderr, level="DEBUG")
+
+@pytest.mark.usefixtures("create_download_instance")
+@pytest.mark.parametrize("DOWNLOAD_INFO", eos_dataset_valid, ids=['EOS-sha512', 'EOS-md5' ,'vEOS-lab-no-hash'])
+@pytest.mark.eos_download
+class TestEosDownload_valid():
+ def test_data(self, DOWNLOAD_INFO):
+ logger.info(f'test input: {DOWNLOAD_INFO}')
+ logger.info(f'test build: {self.eos_downloader.__dict__}')
+
+ def test_eos_download_create(self, DOWNLOAD_INFO):
+ my_download = eos_downloader.eos.EOSDownloader(
+ image=DOWNLOAD_INFO['image'],
+ software=DOWNLOAD_INFO['software'],
+ version=DOWNLOAD_INFO['version'],
+ token=eos_token,
+ hash_method='sha512sum')
+ logger.info(my_download)
+ assert isinstance(my_download, eos_downloader.eos.EOSDownloader)
+
+ def test_eos_download_repr_string(self, DOWNLOAD_INFO):
+ expected = f"{DOWNLOAD_INFO['software']} - {DOWNLOAD_INFO['image']} - {DOWNLOAD_INFO['version']}"
+ logger.info(self.eos_downloader)
+ assert str(self.eos_downloader) == expected
+
+ def test_eos_download_build_filename(self, DOWNLOAD_INFO):
+ assert self.eos_downloader._build_filename() == DOWNLOAD_INFO['filename']
+
+ @pytest.mark.dependency(name='authentication')
+ @pytest.mark.skipif(eos_token == eos_token_invalid, reason="Token is not set correctly")
+ @pytest.mark.skipif(is_on_github_actions(), reason="Running on Github Runner")
+ # @pytest.mark.xfail(reason="Deliberate - CI not set for testing AUTH")
+ @pytest.mark.webtest
+ def test_eos_download_authenticate(self):
+ assert self.eos_downloader.authenticate() is True
+
+ @pytest.mark.dependency(depends=["authentication"], scope='class')
+ @pytest.mark.webtest
+ def test_eos_download_get_remote_file_path(self, DOWNLOAD_INFO):
+ assert self.eos_downloader._get_remote_filepath() == DOWNLOAD_INFO['remote_path']
+
+ @pytest.mark.dependency(depends=["authentication"], scope='class')
+ @pytest.mark.webtest
+ def test_eos_download_get_file_url(self, DOWNLOAD_INFO):
+ url = self.eos_downloader._get_url(remote_file_path = DOWNLOAD_INFO['remote_path'])
+ logger.info(url)
+ assert 'https://downloads.arista.com/EOS-USA/Active%20Releases/' in url
+
+@pytest.mark.usefixtures("create_download_instance")
+@pytest.mark.parametrize("DOWNLOAD_INFO", eos_dataset_invalid, ids=['EOS-FAKE'])
+class TestEosDownload_invalid():
+
+ def test_data(self, DOWNLOAD_INFO):
+ logger.info(f'test input: {dict(DOWNLOAD_INFO)}')
+ logger.info(f'test build: {self.eos_downloader.__dict__}')
+
+ def test_eos_download_login_error(self, DOWNLOAD_INFO):
+ my_download = eos_downloader.eos.EOSDownloader(
+ image=DOWNLOAD_INFO['image'],
+ software=DOWNLOAD_INFO['software'],
+ version=DOWNLOAD_INFO['version'],
+ token=eos_token_invalid,
+ hash_method=DOWNLOAD_INFO['expected_hash'])
+ assert my_download.authenticate() is False
+
+ @pytest.mark.dependency(name='authentication')
+ @pytest.mark.skipif(eos_token == eos_token_invalid, reason="Token is not set correctly")
+ @pytest.mark.skipif(is_on_github_actions(), reason="Running on Github Runner")
+ # @pytest.mark.xfail(reason="Deliberate - CI not set for testing AUTH")
+ @pytest.mark.webtest
+ def test_eos_download_authenticate(self):
+ assert self.eos_downloader.authenticate() is True
+
+ # SOFTWARE/PLATFORM TESTING
+
+ # @pytest.mark.skip(reason="Not yet implemented in lib")
+ def test_eos_file_name_with_incorrect_software(self, DOWNLOAD_INFO):
+ self.eos_downloader.software = 'FAKE'
+ logger.info(f'test build: {self.eos_downloader.__dict__}')
+ with pytest.raises(ValueError) as e_info:
+ result = self.eos_downloader._build_filename()
+ logger.info(f'receive exception: {e_info}')
+ self.eos_downloader.software = DOWNLOAD_INFO['software']
+
+ @pytest.mark.webtest
+ @pytest.mark.dependency(depends=["authentication"], scope='class')
+ def test_eos_download_get_remote_file_path_for_invlaid_software(self, DOWNLOAD_INFO):
+ self.eos_downloader.software = 'FAKE'
+ logger.info(f'Platform set to: {self.eos_downloader.software}')
+ logger.info(f'test build: {self.eos_downloader.__dict__}')
+ with pytest.raises(ValueError) as e_info:
+ result = self.eos_downloader._build_filename()
+ logger.info(f'receive exception: {e_info}')
+ self.eos_downloader.software = DOWNLOAD_INFO['software']
+
+ # IMAGE TESTING
+
+ def test_eos_file_name_with_incorrect_image(self, DOWNLOAD_INFO):
+ self.eos_downloader.image = 'FAKE'
+ logger.info(f'Image set to: {self.eos_downloader.image}')
+ assert DOWNLOAD_INFO['filename'] == self.eos_downloader._build_filename()
+ self.eos_downloader.software == DOWNLOAD_INFO['image']
+
+ @pytest.mark.webtest
+ @pytest.mark.dependency(depends=["authentication"], scope='class')
+ def test_eos_download_get_remote_file_path_for_invlaid_image(self, DOWNLOAD_INFO):
+ self.eos_downloader.image = 'FAKE'
+ logger.info(f'Image set to: {self.eos_downloader.image}')
+ assert self.eos_downloader.authenticate() is True
+ assert DOWNLOAD_INFO['filename'] == self.eos_downloader._build_filename()
+ self.eos_downloader.image = DOWNLOAD_INFO['image']
+
+ # VERSION TESTING
+
+ @pytest.mark.webtest
+ @pytest.mark.dependency(depends=["authentication"], scope='class')
+ def test_eos_download_get_remote_file_path_for_invlaid_version(self, DOWNLOAD_INFO):
+ self.eos_downloader.version = 'FAKE'
+ logger.info(f'Version set to: {self.eos_downloader.version}')
+ assert self.eos_downloader._get_remote_filepath() == '' \ No newline at end of file