summaryrefslogtreecommitdiffstats
path: root/.github/scripts
diff options
context:
space:
mode:
Diffstat (limited to '.github/scripts')
-rwxr-xr-x.github/scripts/check_latest_versions.py33
-rw-r--r--.github/scripts/check_latest_versions_per_channel.py9
-rw-r--r--.github/scripts/modules/github_actions.py27
-rw-r--r--.github/scripts/modules/requirements.txt1
-rw-r--r--.github/scripts/modules/version_manipulation.py141
-rwxr-xr-x.github/scripts/upload-new-version-tags.sh18
6 files changed, 229 insertions, 0 deletions
diff --git a/.github/scripts/check_latest_versions.py b/.github/scripts/check_latest_versions.py
new file mode 100755
index 000000000..67b11f8d5
--- /dev/null
+++ b/.github/scripts/check_latest_versions.py
@@ -0,0 +1,33 @@
+import sys
+import os
+import modules.version_manipulation as ndvm
+import modules.github_actions as cigh
+
+
+def main(command_line_args):
+ """
+ Inputs: Single version or multiple versions
+ Outputs:
+ Create files with the versions that needed update under temp_dir/staging-new-releases
+ Setting the GitHub outputs, 'versions_needs_update' to 'true'
+ """
+ versions = [str(arg) for arg in command_line_args]
+ # Create a temp output folder for the release that need update
+ staging = os.path.join(os.environ.get('TMPDIR', '/tmp'), 'staging-new-releases')
+ os.makedirs(staging, exist_ok=True)
+ for version in versions:
+ temp_value = ndvm.compare_version_with_remote(version)
+ if temp_value:
+ path, filename = ndvm.get_release_path_and_filename(version)
+ release_path = os.path.join(staging, path)
+ os.makedirs(release_path, exist_ok=True)
+ file_release_path = os.path.join(release_path, filename)
+ with open(file_release_path, "w") as file:
+ print("Creating local copy of the release version update at: ", file_release_path)
+ file.write(version)
+ if cigh.run_as_github_action():
+ cigh.update_github_output("versions_needs_update", "true")
+
+
+if __name__ == "__main__":
+ main(sys.argv[1:])
diff --git a/.github/scripts/check_latest_versions_per_channel.py b/.github/scripts/check_latest_versions_per_channel.py
new file mode 100644
index 000000000..885e5a98c
--- /dev/null
+++ b/.github/scripts/check_latest_versions_per_channel.py
@@ -0,0 +1,9 @@
+import check_latest_versions
+import modules.version_manipulation as ndvm
+import sys
+
+if __name__ == "__main__":
+ channel = sys.argv[1]
+ sorted_agents_by_major = ndvm.sort_and_grouby_major_agents_of_channel(channel)
+ latest_per_major = [values[0] for values in sorted_agents_by_major.values()]
+ check_latest_versions.main(latest_per_major)
diff --git a/.github/scripts/modules/github_actions.py b/.github/scripts/modules/github_actions.py
new file mode 100644
index 000000000..1d653a77b
--- /dev/null
+++ b/.github/scripts/modules/github_actions.py
@@ -0,0 +1,27 @@
+import os
+
+
+def update_github_env(key, value):
+ try:
+ env_file = os.getenv('GITHUB_ENV')
+ print(env_file)
+ with open(env_file, "a") as file:
+ file.write(f"{key}={value}")
+ print(f"Updated GITHUB_ENV with {key}={value}")
+ except Exception as e:
+ print(f"Error updating GITHUB_ENV. Error: {e}")
+
+
+def update_github_output(key, value):
+ try:
+ env_file = os.getenv('GITHUB_OUTPUT')
+ print(env_file)
+ with open(env_file, "a") as file:
+ file.write(f"{key}={value}")
+ print(f"Updated GITHUB_OUTPUT with {key}={value}")
+ except Exception as e:
+ print(f"Error updating GITHUB_OUTPUT. Error: {e}")
+
+
+def run_as_github_action():
+ return os.environ.get('GITHUB_ACTIONS') == 'true'
diff --git a/.github/scripts/modules/requirements.txt b/.github/scripts/modules/requirements.txt
new file mode 100644
index 000000000..fbec796fa
--- /dev/null
+++ b/.github/scripts/modules/requirements.txt
@@ -0,0 +1 @@
+PyGithub==2.1.1
diff --git a/.github/scripts/modules/version_manipulation.py b/.github/scripts/modules/version_manipulation.py
new file mode 100644
index 000000000..cc346fb54
--- /dev/null
+++ b/.github/scripts/modules/version_manipulation.py
@@ -0,0 +1,141 @@
+import os
+import re
+import requests
+from itertools import groupby
+from github import Github
+from github.GithubException import GithubException
+
+repos_URL = {
+ "stable": "netdata/netdata",
+ "nightly": "netdata/netdata-nightlies"
+}
+
+GH_TOKEN = os.getenv("GH_TOKEN")
+if GH_TOKEN is None or GH_TOKEN != "":
+ print("Token is not defined or empty, continuing with limitation on requests per sec towards Github API")
+
+
+def identify_channel(_version):
+ nightly_pattern = r'v(\d+)\.(\d+)\.(\d+)-(\d+)-nightly'
+ stable_pattern = r'v(\d+)\.(\d+)\.(\d+)'
+ if re.match(nightly_pattern, _version):
+ _channel = "nightly"
+ _pattern = nightly_pattern
+ elif re.match(stable_pattern, _version):
+ _channel = "stable"
+ _pattern = stable_pattern
+ else:
+ print("Invalid version format.")
+ return None
+ return _channel, _pattern
+
+
+def padded_version(item):
+ key_value = '10000'
+ for value in item[1:]:
+ key_value += f'{value:05}'
+ return int(key_value)
+
+
+def extract_version(title):
+ if identify_channel(title):
+ _, _pattern = identify_channel(title)
+ try:
+ match = re.match(_pattern, title)
+ if match:
+ return tuple(map(int, match.groups()))
+ except Exception as e:
+ print(f"Unexpected error: {e}")
+ return None
+
+
+def get_release_path_and_filename(_version):
+ nightly_pattern = r'v(\d+)\.(\d+)\.(\d+)-(\d+)-nightly'
+ stable_pattern = r'v(\d+)\.(\d+)\.(\d+)'
+ if match := re.match(nightly_pattern, _version):
+ msb = match.group(1)
+ _path = "nightly"
+ _filename = f"v{msb}"
+ elif match := re.match(stable_pattern, _version):
+ msb = match.group(1)
+ _path = "stable"
+ _filename = f"v{msb}"
+ else:
+ print("Invalid version format.")
+ exit(1)
+ return (_path, _filename)
+
+
+def compare_version_with_remote(version):
+ """
+ If the version = fun (version) you need to update the version in the
+ remote. If the version remote doesn't exist, returns the version
+ :param channel: any version of the agent
+ :return: the greater from version and version remote.
+ """
+
+ prefix = "https://packages.netdata.cloud/releases"
+ path, filename = get_release_path_and_filename(version)
+
+ remote_url = f"{prefix}/{path}/{filename}"
+ response = requests.get(remote_url)
+
+ if response.status_code == 200:
+ version_remote = response.text.rstrip()
+
+ version_components = extract_version(version)
+ remote_version_components = extract_version(version_remote)
+
+ absolute_version = padded_version(version_components)
+ absolute_remote_version = padded_version(remote_version_components)
+
+ if absolute_version > absolute_remote_version:
+ print(f"Version in the remote: {version_remote}, is older than the current: {version}, I need to update")
+ return (version)
+ else:
+ print(f"Version in the remote: {version_remote}, is newer than the current: {version}, no action needed")
+ return (None)
+ else:
+ # Remote version not found
+ print(f"Version in the remote not found, updating the predefined latest path with the version: {version}")
+ return (version)
+
+
+def sort_and_grouby_major_agents_of_channel(channel):
+ """
+ Fetches the GH API and read either netdata/netdata or netdata/netdata-nightlies repo. It fetches all of their
+ releases implements a grouping by their major release number.
+ Every k,v in this dictionary is in the form; "vX": [descending ordered list of Agents in this major release].
+ :param channel: "nightly" or "stable"
+ :return: None or dict() with the Agents grouped by major version # (vX)
+ """
+ try:
+ G = Github(GH_TOKEN)
+ repo = G.get_repo(repos_URL[channel])
+ releases = repo.get_releases()
+ except GithubException as e:
+ print(f"GitHub API request failed: {e}")
+ return None
+
+ except Exception as e:
+ print(f"An unexpected error occurred: {e}")
+ return None
+
+ extracted_titles = [extract_version(item.title) for item in releases if
+ extract_version(item.title) is not None]
+ # Necessary sorting for implement the group by
+ extracted_titles.sort(key=lambda x: x[0])
+ # Group titles by major version
+ grouped_by_major = {major: list(group) for major, group in groupby(extracted_titles, key=lambda x: x[0])}
+ sorted_grouped_by_major = {}
+ for key, values in grouped_by_major.items():
+ sorted_values = sorted(values, key=padded_version, reverse=True)
+ sorted_grouped_by_major[key] = sorted_values
+ # Transform them in the correct form
+ if channel == "stable":
+ result_dict = {f"v{key}": [f"v{a}.{b}.{c}" for a, b, c in values] for key, values in
+ sorted_grouped_by_major.items()}
+ else:
+ result_dict = {f"v{key}": [f"v{a}.{b}.{c}-{d}-nightly" for a, b, c, d in values] for key, values in
+ sorted_grouped_by_major.items()}
+ return result_dict
diff --git a/.github/scripts/upload-new-version-tags.sh b/.github/scripts/upload-new-version-tags.sh
new file mode 100755
index 000000000..a9b0cd303
--- /dev/null
+++ b/.github/scripts/upload-new-version-tags.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+
+set -e
+
+host="packages.netdata.cloud"
+user="netdatabot"
+
+prefix="/var/www/html/releases"
+staging="${TMPDIR:-/tmp}/staging-new-releases"
+
+mkdir -p "${staging}"
+
+for source_dir in "${staging}"/*; do
+ if [ -d "${source_dir}" ]; then
+ base_name=$(basename "${source_dir}")
+ scp -r "${source_dir}"/* "${user}@${host}:${prefix}/${base_name}"
+ fi
+done