summaryrefslogtreecommitdiffstats
path: root/.github/scripts
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 02:57:58 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 02:57:58 +0000
commitbe1c7e50e1e8809ea56f2c9d472eccd8ffd73a97 (patch)
tree9754ff1ca740f6346cf8483ec915d4054bc5da2d /.github/scripts
parentInitial commit. (diff)
downloadnetdata-upstream.tar.xz
netdata-upstream.zip
Adding upstream version 1.44.3.upstream/1.44.3upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to '')
-rwxr-xr-x.github/scripts/build-artifacts.sh82
-rwxr-xr-x.github/scripts/build-dist.sh71
-rwxr-xr-x.github/scripts/build-static.sh61
-rwxr-xr-x.github/scripts/bump-packaging-version.sh6
-rwxr-xr-x.github/scripts/check-updater.sh49
-rwxr-xr-x.github/scripts/check_latest_versions.py33
-rw-r--r--.github/scripts/check_latest_versions_per_channel.py9
-rwxr-xr-x.github/scripts/ci-support-pkgs.sh18
-rwxr-xr-x.github/scripts/docker-test.sh64
-rw-r--r--.github/scripts/functions.sh69
-rwxr-xr-x.github/scripts/gen-docker-tags.py19
-rwxr-xr-x.github/scripts/gen-matrix-build.py34
-rwxr-xr-x.github/scripts/gen-matrix-eol-check.py29
-rwxr-xr-x.github/scripts/gen-matrix-packaging.py36
-rwxr-xr-x.github/scripts/gen-matrix-repoconfig.py27
-rwxr-xr-x.github/scripts/get-static-cache-key.sh16
-rw-r--r--.github/scripts/modules/github_actions.py27
-rw-r--r--.github/scripts/modules/requirements.txt1
-rw-r--r--.github/scripts/modules/version_manipulation.py141
-rwxr-xr-x.github/scripts/netdata-pkgcloud-cleanup.py190
-rwxr-xr-x.github/scripts/package-upload.sh43
-rwxr-xr-x.github/scripts/package_cloud_wrapper.sh48
-rwxr-xr-x.github/scripts/pkg-test.sh162
-rwxr-xr-x.github/scripts/platform-impending-eol.py58
-rwxr-xr-x.github/scripts/prepare-release-base.sh180
-rwxr-xr-x.github/scripts/run-updater-check.sh30
-rwxr-xr-x.github/scripts/run_install_with_dist_file.sh39
-rwxr-xr-x.github/scripts/upload-new-version-tags.sh18
28 files changed, 1560 insertions, 0 deletions
diff --git a/.github/scripts/build-artifacts.sh b/.github/scripts/build-artifacts.sh
new file mode 100755
index 00000000..569c79a5
--- /dev/null
+++ b/.github/scripts/build-artifacts.sh
@@ -0,0 +1,82 @@
+#!/bin/sh
+#
+# Builds the netdata-vX.y.Z-xxxx.tar.gz source tarball (dist)
+# and netdata-vX.Y.Z-xxxx.gz.run (static x86_64) artifacts.
+
+set -e
+
+# shellcheck source=.github/scripts/functions.sh
+. "$(dirname "$0")/functions.sh"
+
+NAME="${NAME:-netdata}"
+VERSION="${VERSION:-"$(git describe)"}"
+BASENAME="$NAME-$VERSION"
+
+prepare_build() {
+ progress "Preparing build"
+ (
+ test -d artifacts || mkdir -p artifacts
+ echo "${VERSION}" > packaging/version
+ ) >&2
+}
+
+build_dist() {
+ progress "Building dist"
+ (
+ command -v git > /dev/null && [ -d .git ] && git clean -d -f
+ autoreconf -ivf
+ ./configure \
+ --prefix=/usr \
+ --sysconfdir=/etc \
+ --localstatedir=/var \
+ --libexecdir=/usr/libexec \
+ --with-zlib \
+ --with-math \
+ --with-user=netdata \
+ --disable-dependency-tracking \
+ CFLAGS=-O2
+ make dist
+ mv "${BASENAME}.tar.gz" artifacts/
+ ) >&2
+}
+
+build_static_x86_64() {
+ progress "Building static x86_64"
+ (
+ command -v git > /dev/null && [ -d .git ] && git clean -d -f
+ USER="" ./packaging/makeself/build-x86_64-static.sh
+ ) >&2
+}
+
+prepare_assets() {
+ progress "Preparing assets"
+ (
+ cp packaging/version artifacts/latest-version.txt
+
+ cd artifacts || exit 1
+ ln -f "${BASENAME}.tar.gz" netdata-latest.tar.gz
+ ln -f "${BASENAME}.gz.run" netdata-latest.gz.run
+ sha256sum -b ./* > "sha256sums.txt"
+ ) >&2
+}
+
+steps="prepare_build build_dist build_static_x86_64"
+steps="$steps prepare_assets"
+
+_main() {
+ for step in $steps; do
+ if ! run "$step"; then
+ if [ -t 1 ]; then
+ debug
+ else
+ fail "Build failed"
+ fi
+ fi
+ done
+
+ echo "πŸŽ‰ All Done!"
+}
+
+if [ -n "$0" ] && [ x"$0" != x"-bash" ]; then
+ _main "$@"
+fi
diff --git a/.github/scripts/build-dist.sh b/.github/scripts/build-dist.sh
new file mode 100755
index 00000000..027b6214
--- /dev/null
+++ b/.github/scripts/build-dist.sh
@@ -0,0 +1,71 @@
+#!/bin/sh
+#
+# Builds the netdata-vX.y.Z-xxxx.tar.gz source tarball (dist)
+
+set -e
+
+# shellcheck source=.github/scripts/functions.sh
+. "$(dirname "$0")/functions.sh"
+
+NAME="${NAME:-netdata}"
+VERSION="${VERSION:-"$(git describe --always)"}"
+BASENAME="$NAME-$VERSION"
+
+prepare_build() {
+ progress "Preparing build"
+ (
+ test -d artifacts || mkdir -p artifacts
+ echo "${VERSION}" > packaging/version
+ ) >&2
+}
+
+build_dist() {
+ progress "Building dist"
+ (
+ command -v git > /dev/null && [ -d .git ] && git clean -d -f
+ autoreconf -ivf
+ ./configure \
+ --prefix=/usr \
+ --sysconfdir=/etc \
+ --localstatedir=/var \
+ --libexecdir=/usr/libexec \
+ --with-zlib \
+ --with-math \
+ --with-user=netdata \
+ --disable-dependency-tracking \
+ CFLAGS=-O2
+ make dist
+ mv "${BASENAME}.tar.gz" artifacts/
+ ) >&2
+}
+
+prepare_assets() {
+ progress "Preparing assets"
+ (
+ cp packaging/version artifacts/latest-version.txt
+ cd artifacts || exit 1
+ ln -f "${BASENAME}.tar.gz" netdata-latest.tar.gz
+ ln -f "${BASENAME}.gz.run" netdata-latest.gz.run
+ sha256sum -b ./* > "sha256sums.txt"
+ ) >&2
+}
+
+steps="prepare_build build_dist prepare_assets"
+
+_main() {
+ for step in $steps; do
+ if ! run "$step"; then
+ if [ -t 1 ]; then
+ debug
+ else
+ fail "Build failed"
+ fi
+ fi
+ done
+
+ echo "πŸŽ‰ All Done!"
+}
+
+if [ -n "$0" ] && [ x"$0" != x"-bash" ]; then
+ _main "$@"
+fi
diff --git a/.github/scripts/build-static.sh b/.github/scripts/build-static.sh
new file mode 100755
index 00000000..e8105143
--- /dev/null
+++ b/.github/scripts/build-static.sh
@@ -0,0 +1,61 @@
+#!/bin/sh
+#
+# Builds the netdata-vX.Y.Z-xxxx.gz.run (static x86_64) artifact.
+
+set -e
+
+# shellcheck source=.github/scripts/functions.sh
+. "$(dirname "$0")/functions.sh"
+
+BUILDARCH="${1}"
+NAME="${NAME:-netdata}"
+VERSION="${VERSION:-"$(git describe)"}"
+BASENAME="$NAME-$BUILDARCH-$VERSION"
+
+prepare_build() {
+ progress "Preparing build"
+ (
+ test -d artifacts || mkdir -p artifacts
+ ) >&2
+}
+
+build_static() {
+ progress "Building static ${BUILDARCH}"
+ (
+ USER="" ./packaging/makeself/build-static.sh "${BUILDARCH}"
+ ) >&2
+}
+
+prepare_assets() {
+ progress "Preparing assets"
+ (
+ cp packaging/version artifacts/latest-version.txt
+
+ cd artifacts || exit 1
+ ln -s "${BASENAME}.gz.run" "netdata-${BUILDARCH}-latest.gz.run"
+ if [ "${BUILDARCH}" = "x86_64" ]; then
+ ln -s "${BASENAME}.gz.run" netdata-latest.gz.run
+ fi
+ ) >&2
+}
+
+steps="prepare_build build_static"
+steps="$steps prepare_assets"
+
+_main() {
+ for step in $steps; do
+ if ! run "$step"; then
+ if [ -t 1 ]; then
+ debug
+ else
+ fail "Build failed"
+ fi
+ fi
+ done
+
+ echo "πŸŽ‰ All Done!"
+}
+
+if [ -n "$0" ] && [ x"$0" != x"-bash" ]; then
+ _main "$@"
+fi
diff --git a/.github/scripts/bump-packaging-version.sh b/.github/scripts/bump-packaging-version.sh
new file mode 100755
index 00000000..bffcb0c1
--- /dev/null
+++ b/.github/scripts/bump-packaging-version.sh
@@ -0,0 +1,6 @@
+#!/bin/sh
+
+VERSION="$(git describe)"
+echo "$VERSION" > packaging/version
+git add -A
+git ci -m "[netdata nightly] $VERSION"
diff --git a/.github/scripts/check-updater.sh b/.github/scripts/check-updater.sh
new file mode 100755
index 00000000..3df0c9de
--- /dev/null
+++ b/.github/scripts/check-updater.sh
@@ -0,0 +1,49 @@
+#!/bin/sh
+#
+set -e
+# shellcheck source=.github/scripts/functions.sh
+. "$(dirname "$0")/functions.sh"
+
+check_successful_update() {
+ progress "Check netdata version after update"
+ (
+ netdata_version=$(netdata -v | awk '{print $2}')
+ updater_version=$(cat packaging/version)
+ if [ "$netdata_version" = "$updater_version" ]; then
+ echo "Update successful!"
+ else
+ exit 1
+ fi
+ ) >&2
+}
+
+check_autoupdate_enabled() {
+ progress "Check autoupdate still enabled after update"
+ (
+ if [ -f /etc/periodic/daily/netdata-updater ] || [ -f /etc/cron.daily/netdata-updater ]; then
+ echo "Update successful!"
+ else
+ exit 1
+ fi
+ ) >&2
+}
+
+steps="check_successful_update check_autoupdate_enabled"
+
+_main() {
+ for step in $steps; do
+ if ! run "$step"; then
+ if [ -t 1 ]; then
+ debug
+ else
+ fail "Build failed"
+ fi
+ fi
+ done
+
+ echo "πŸŽ‰ All Done!"
+}
+
+if [ -n "$0" ] && [ x"$0" != x"-bash" ]; then
+ _main "$@"
+fi
diff --git a/.github/scripts/check_latest_versions.py b/.github/scripts/check_latest_versions.py
new file mode 100755
index 00000000..67b11f8d
--- /dev/null
+++ b/.github/scripts/check_latest_versions.py
@@ -0,0 +1,33 @@
+import sys
+import os
+import modules.version_manipulation as ndvm
+import modules.github_actions as cigh
+
+
+def main(command_line_args):
+ """
+ Inputs: Single version or multiple versions
+ Outputs:
+ Create files with the versions that needed update under temp_dir/staging-new-releases
+ Setting the GitHub outputs, 'versions_needs_update' to 'true'
+ """
+ versions = [str(arg) for arg in command_line_args]
+ # Create a temp output folder for the release that need update
+ staging = os.path.join(os.environ.get('TMPDIR', '/tmp'), 'staging-new-releases')
+ os.makedirs(staging, exist_ok=True)
+ for version in versions:
+ temp_value = ndvm.compare_version_with_remote(version)
+ if temp_value:
+ path, filename = ndvm.get_release_path_and_filename(version)
+ release_path = os.path.join(staging, path)
+ os.makedirs(release_path, exist_ok=True)
+ file_release_path = os.path.join(release_path, filename)
+ with open(file_release_path, "w") as file:
+ print("Creating local copy of the release version update at: ", file_release_path)
+ file.write(version)
+ if cigh.run_as_github_action():
+ cigh.update_github_output("versions_needs_update", "true")
+
+
+if __name__ == "__main__":
+ main(sys.argv[1:])
diff --git a/.github/scripts/check_latest_versions_per_channel.py b/.github/scripts/check_latest_versions_per_channel.py
new file mode 100644
index 00000000..885e5a98
--- /dev/null
+++ b/.github/scripts/check_latest_versions_per_channel.py
@@ -0,0 +1,9 @@
+import check_latest_versions
+import modules.version_manipulation as ndvm
+import sys
+
+if __name__ == "__main__":
+ channel = sys.argv[1]
+ sorted_agents_by_major = ndvm.sort_and_grouby_major_agents_of_channel(channel)
+ latest_per_major = [values[0] for values in sorted_agents_by_major.values()]
+ check_latest_versions.main(latest_per_major)
diff --git a/.github/scripts/ci-support-pkgs.sh b/.github/scripts/ci-support-pkgs.sh
new file mode 100755
index 00000000..5cedbf3b
--- /dev/null
+++ b/.github/scripts/ci-support-pkgs.sh
@@ -0,0 +1,18 @@
+#!/bin/sh
+
+# This script installs supporting packages needed for CI, which provide following:
+# cron, pidof
+
+set -e
+
+. /etc/os-release
+
+case "${ID}" in
+ amzn|almalinux|centos|fedora)
+ dnf install -y procps-ng cronie cronie-anacron || \
+ yum install -y procps-ng cronie cronie-anacron
+ ;;
+ arch)
+ pacman -S --noconfirm cronie
+ ;;
+esac
diff --git a/.github/scripts/docker-test.sh b/.github/scripts/docker-test.sh
new file mode 100755
index 00000000..0f5fa469
--- /dev/null
+++ b/.github/scripts/docker-test.sh
@@ -0,0 +1,64 @@
+#!/bin/sh
+
+export DEBIAN_FRONTEND=noninteractive
+
+wait_for() {
+ host="${1}"
+ port="${2}"
+ name="${3}"
+ timeout="30"
+
+ if command -v nc > /dev/null ; then
+ netcat="nc"
+ elif command -v netcat > /dev/null ; then
+ netcat="netcat"
+ else
+ printf "Unable to find a usable netcat command.\n"
+ return 1
+ fi
+
+ printf "Waiting for %s on %s:%s ... " "${name}" "${host}" "${port}"
+
+ sleep 30
+
+ i=0
+ while ! ${netcat} -z "${host}" "${port}"; do
+ sleep 1
+ if [ "$i" -gt "$timeout" ]; then
+ printf "Timed out!\n"
+ docker ps -a
+ echo "::group::Netdata container logs"
+ docker logs netdata 2>&1
+ echo "::endgroup::"
+ return 1
+ fi
+ i="$((i + 1))"
+ done
+ printf "OK\n"
+}
+
+if [ -z "$(command -v nc 2>/dev/null)" ] && [ -z "$(command -v netcat 2>/dev/null)" ]; then
+ sudo apt-get update && sudo apt-get upgrade -y && sudo apt-get install -y netcat
+fi
+
+docker run -d --name=netdata \
+ -p 19999:19999 \
+ -v netdataconfig:/etc/netdata \
+ -v netdatalib:/var/lib/netdata \
+ -v netdatacache:/var/cache/netdata \
+ -v /etc/passwd:/host/etc/passwd:ro \
+ -v /etc/group:/host/etc/group:ro \
+ -v /proc:/host/proc:ro \
+ -v /sys:/host/sys:ro \
+ -v /etc/os-release:/host/etc/os-release:ro \
+ --cap-add SYS_PTRACE \
+ --security-opt apparmor=unconfined \
+ netdata/netdata:test
+
+wait_for localhost 19999 netdata || exit 1
+
+curl -sS http://127.0.0.1:19999/api/v1/info > ./response || exit 1
+
+cat ./response
+
+jq '.version' ./response || exit 1
diff --git a/.github/scripts/functions.sh b/.github/scripts/functions.sh
new file mode 100644
index 00000000..7cd2e080
--- /dev/null
+++ b/.github/scripts/functions.sh
@@ -0,0 +1,69 @@
+#!/bin/sh
+
+# This file is included by download.sh & build.sh
+
+set -e
+
+color() {
+ fg="$1"
+ bg="${2}"
+ ft="${3:-0}"
+
+ printf "\33[%s;%s;%s" "$ft" "$fg" "$bg"
+}
+
+color_reset() {
+ printf "\033[0m"
+}
+
+ok() {
+ if [ -t 1 ]; then
+ printf "%s[ OK ]%s\n" "$(color 37 42m 1)" "$(color_reset)"
+ else
+ printf "%s\n" "[ OK ]"
+ fi
+}
+
+err() {
+ if [ -t 1 ]; then
+ printf "%s[ ERR ]%s\n" "$(color 37 41m 1)" "$(color_reset)"
+ else
+ printf "%s\n" "[ ERR ]"
+ fi
+}
+
+run() {
+ retval=0
+ logfile="$(mktemp -t "run-XXXXXX")"
+ if "$@" 2> "$logfile"; then
+ ok
+ else
+ retval=$?
+ err
+ tail -n 100 "$logfile" || true
+ fi
+ rm -rf "$logfile"
+ return $retval
+}
+
+progress() {
+ printf "%-40s" "$(printf "%s ... " "$1")"
+}
+
+log() {
+ printf "%s\n" "$1"
+}
+
+error() {
+ log "ERROR: ${1}"
+}
+
+fail() {
+ log "FATAL: ${1}"
+ exit 1
+}
+
+debug() {
+ log "Dropping into a shell for debugging ..."
+ exec /bin/sh
+}
diff --git a/.github/scripts/gen-docker-tags.py b/.github/scripts/gen-docker-tags.py
new file mode 100755
index 00000000..8c88d3b5
--- /dev/null
+++ b/.github/scripts/gen-docker-tags.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python3
+
+import sys
+
+version = sys.argv[1].split('.')
+suffix = sys.argv[2]
+
+REPO = f'netdata/netdata{suffix}'
+GHCR = f'ghcr.io/{REPO}'
+QUAY = f'quay.io/{REPO}'
+
+tags = []
+
+for repo in [REPO, GHCR, QUAY]:
+ tags.append(':'.join([repo, version[0]]))
+ tags.append(':'.join([repo, '.'.join(version[0:2])]))
+ tags.append(':'.join([repo, '.'.join(version[0:3])]))
+
+print(','.join(tags))
diff --git a/.github/scripts/gen-matrix-build.py b/.github/scripts/gen-matrix-build.py
new file mode 100755
index 00000000..3185e883
--- /dev/null
+++ b/.github/scripts/gen-matrix-build.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python3
+
+import json
+
+from ruamel.yaml import YAML
+
+yaml = YAML(typ='safe')
+entries = []
+
+with open('.github/data/distros.yml') as f:
+ data = yaml.load(f)
+
+for i, v in enumerate(data['include']):
+ e = {
+ 'artifact_key': v['distro'] + str(v['version']).replace('.', ''),
+ 'version': v['version'],
+ }
+
+ if 'base_image' in v:
+ e['distro'] = v['base_image']
+ else:
+ e['distro'] = ':'.join([v['distro'], str(v['version'])])
+
+ if 'env_prep' in v:
+ e['env_prep'] = v['env_prep']
+
+ if 'jsonc_removal' in v:
+ e['jsonc_removal'] = v['jsonc_removal']
+
+ entries.append(e)
+
+entries.sort(key=lambda k: k['distro'])
+matrix = json.dumps({'include': entries}, sort_keys=True)
+print(matrix)
diff --git a/.github/scripts/gen-matrix-eol-check.py b/.github/scripts/gen-matrix-eol-check.py
new file mode 100755
index 00000000..63852728
--- /dev/null
+++ b/.github/scripts/gen-matrix-eol-check.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python3
+'''Generate the build matrix for the EOL check jobs.'''
+
+import json
+
+from ruamel.yaml import YAML
+
+yaml = YAML(typ='safe')
+entries = list()
+
+with open('.github/data/distros.yml') as f:
+ data = yaml.load(f)
+
+for item in data['include']:
+ if 'eol_check' in item and item['eol_check']:
+ if isinstance(item['eol_check'], str):
+ distro = item['eol_check']
+ else:
+ distro = item['distro']
+
+ entries.append({
+ 'distro': distro,
+ 'release': item['version'],
+ 'full_name': f'{ item["distro"] } { item["version"] }'
+ })
+
+entries.sort(key=lambda k: (k['distro'], k['release']))
+matrix = json.dumps({'include': entries}, sort_keys=True)
+print(matrix)
diff --git a/.github/scripts/gen-matrix-packaging.py b/.github/scripts/gen-matrix-packaging.py
new file mode 100755
index 00000000..9347cd76
--- /dev/null
+++ b/.github/scripts/gen-matrix-packaging.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python3
+
+import json
+import sys
+
+from ruamel.yaml import YAML
+
+ALWAYS_RUN_ARCHES = ["amd64", "x86_64"]
+SHORT_RUN = sys.argv[1]
+yaml = YAML(typ='safe')
+entries = list()
+run_limited = False
+
+with open('.github/data/distros.yml') as f:
+ data = yaml.load(f)
+
+if bool(int(SHORT_RUN)):
+ run_limited = True
+
+for i, v in enumerate(data['include']):
+ if 'packages' in data['include'][i]:
+ for arch in data['include'][i]['packages']['arches']:
+ if arch in ALWAYS_RUN_ARCHES or not run_limited:
+ entries.append({
+ 'distro': data['include'][i]['distro'],
+ 'version': data['include'][i]['version'],
+ 'repo_distro': data['include'][i]['packages']['repo_distro'],
+ 'format': data['include'][i]['packages']['type'],
+ 'base_image': data['include'][i]['base_image'] if 'base_image' in data['include'][i] else ':'.join([data['include'][i]['distro'], data['include'][i]['version']]),
+ 'platform': data['platform_map'][arch],
+ 'arch': arch
+ })
+
+entries.sort(key=lambda k: (data['arch_order'].index(k['arch']), k['distro'], k['version']))
+matrix = json.dumps({'include': entries}, sort_keys=True)
+print(matrix)
diff --git a/.github/scripts/gen-matrix-repoconfig.py b/.github/scripts/gen-matrix-repoconfig.py
new file mode 100755
index 00000000..264cd53e
--- /dev/null
+++ b/.github/scripts/gen-matrix-repoconfig.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python3
+
+import json
+
+from ruamel.yaml import YAML
+
+yaml = YAML(typ='safe')
+entries = list()
+
+with open('.github/data/distros.yml') as f:
+ data = yaml.load(f)
+
+for i, v in enumerate(data['include']):
+ if 'packages' in data['include'][i]:
+ entries.append({
+ 'distro': data['include'][i]['distro'],
+ 'version': data['include'][i]['version'],
+ 'pkgclouddistro': data['include'][i]['packages']['repo_distro'],
+ 'format': data['include'][i]['packages']['type'],
+ 'base_image': data['include'][i]['base_image'] if 'base_image' in data['include'][i] else ':'.join([data['include'][i]['distro'], data['include'][i]['version']]),
+ 'platform': data['platform_map']['amd64'],
+ 'arches': ' '.join(['"' + x + '"' for x in data['include'][i]['packages']['arches']])
+ })
+
+entries.sort(key=lambda k: (k['distro'], k['version']))
+matrix = json.dumps({'include': entries}, sort_keys=True)
+print(matrix)
diff --git a/.github/scripts/get-static-cache-key.sh b/.github/scripts/get-static-cache-key.sh
new file mode 100755
index 00000000..5093b332
--- /dev/null
+++ b/.github/scripts/get-static-cache-key.sh
@@ -0,0 +1,16 @@
+#!/bin/sh
+
+arch="${1}"
+platform="$(packaging/makeself/uname2platform.sh "${arch}")"
+builder_rev="v1"
+
+docker pull --platform "${platform}" netdata/static-builder:${builder_rev}
+
+# shellcheck disable=SC2046
+cat $(find packaging/makeself/jobs -type f ! -regex '.*\(netdata\|-makeself\).*') > /tmp/static-cache-key-data
+
+docker run -it --rm --platform "${platform}" netdata/static-builder:${builder_rev} sh -c 'apk list -I 2>/dev/null' >> /tmp/static-cache-key-data
+
+h="$(sha256sum /tmp/static-cache-key-data | cut -f 1 -d ' ')"
+
+echo "key=static-${arch}-${h}" >> "${GITHUB_OUTPUT}"
diff --git a/.github/scripts/modules/github_actions.py b/.github/scripts/modules/github_actions.py
new file mode 100644
index 00000000..1d653a77
--- /dev/null
+++ b/.github/scripts/modules/github_actions.py
@@ -0,0 +1,27 @@
+import os
+
+
+def update_github_env(key, value):
+ try:
+ env_file = os.getenv('GITHUB_ENV')
+ print(env_file)
+ with open(env_file, "a") as file:
+ file.write(f"{key}={value}")
+ print(f"Updated GITHUB_ENV with {key}={value}")
+ except Exception as e:
+ print(f"Error updating GITHUB_ENV. Error: {e}")
+
+
+def update_github_output(key, value):
+ try:
+ env_file = os.getenv('GITHUB_OUTPUT')
+ print(env_file)
+ with open(env_file, "a") as file:
+ file.write(f"{key}={value}")
+ print(f"Updated GITHUB_OUTPUT with {key}={value}")
+ except Exception as e:
+ print(f"Error updating GITHUB_OUTPUT. Error: {e}")
+
+
+def run_as_github_action():
+ return os.environ.get('GITHUB_ACTIONS') == 'true'
diff --git a/.github/scripts/modules/requirements.txt b/.github/scripts/modules/requirements.txt
new file mode 100644
index 00000000..fbec796f
--- /dev/null
+++ b/.github/scripts/modules/requirements.txt
@@ -0,0 +1 @@
+PyGithub==2.1.1
diff --git a/.github/scripts/modules/version_manipulation.py b/.github/scripts/modules/version_manipulation.py
new file mode 100644
index 00000000..cc346fb5
--- /dev/null
+++ b/.github/scripts/modules/version_manipulation.py
@@ -0,0 +1,141 @@
+import os
+import re
+import requests
+from itertools import groupby
+from github import Github
+from github.GithubException import GithubException
+
+repos_URL = {
+ "stable": "netdata/netdata",
+ "nightly": "netdata/netdata-nightlies"
+}
+
+GH_TOKEN = os.getenv("GH_TOKEN")
+if GH_TOKEN is None or GH_TOKEN != "":
+ print("Token is not defined or empty, continuing with limitation on requests per sec towards Github API")
+
+
+def identify_channel(_version):
+ nightly_pattern = r'v(\d+)\.(\d+)\.(\d+)-(\d+)-nightly'
+ stable_pattern = r'v(\d+)\.(\d+)\.(\d+)'
+ if re.match(nightly_pattern, _version):
+ _channel = "nightly"
+ _pattern = nightly_pattern
+ elif re.match(stable_pattern, _version):
+ _channel = "stable"
+ _pattern = stable_pattern
+ else:
+ print("Invalid version format.")
+ return None
+ return _channel, _pattern
+
+
+def padded_version(item):
+ key_value = '10000'
+ for value in item[1:]:
+ key_value += f'{value:05}'
+ return int(key_value)
+
+
+def extract_version(title):
+ if identify_channel(title):
+ _, _pattern = identify_channel(title)
+ try:
+ match = re.match(_pattern, title)
+ if match:
+ return tuple(map(int, match.groups()))
+ except Exception as e:
+ print(f"Unexpected error: {e}")
+ return None
+
+
+def get_release_path_and_filename(_version):
+ nightly_pattern = r'v(\d+)\.(\d+)\.(\d+)-(\d+)-nightly'
+ stable_pattern = r'v(\d+)\.(\d+)\.(\d+)'
+ if match := re.match(nightly_pattern, _version):
+ msb = match.group(1)
+ _path = "nightly"
+ _filename = f"v{msb}"
+ elif match := re.match(stable_pattern, _version):
+ msb = match.group(1)
+ _path = "stable"
+ _filename = f"v{msb}"
+ else:
+ print("Invalid version format.")
+ exit(1)
+ return (_path, _filename)
+
+
+def compare_version_with_remote(version):
+ """
+ If the version = fun (version) you need to update the version in the
+ remote. If the version remote doesn't exist, returns the version
+ :param channel: any version of the agent
+ :return: the greater from version and version remote.
+ """
+
+ prefix = "https://packages.netdata.cloud/releases"
+ path, filename = get_release_path_and_filename(version)
+
+ remote_url = f"{prefix}/{path}/{filename}"
+ response = requests.get(remote_url)
+
+ if response.status_code == 200:
+ version_remote = response.text.rstrip()
+
+ version_components = extract_version(version)
+ remote_version_components = extract_version(version_remote)
+
+ absolute_version = padded_version(version_components)
+ absolute_remote_version = padded_version(remote_version_components)
+
+ if absolute_version > absolute_remote_version:
+ print(f"Version in the remote: {version_remote}, is older than the current: {version}, I need to update")
+ return (version)
+ else:
+ print(f"Version in the remote: {version_remote}, is newer than the current: {version}, no action needed")
+ return (None)
+ else:
+ # Remote version not found
+ print(f"Version in the remote not found, updating the predefined latest path with the version: {version}")
+ return (version)
+
+
+def sort_and_grouby_major_agents_of_channel(channel):
+ """
+ Fetches the GH API and read either netdata/netdata or netdata/netdata-nightlies repo. It fetches all of their
+ releases implements a grouping by their major release number.
+ Every k,v in this dictionary is in the form; "vX": [descending ordered list of Agents in this major release].
+ :param channel: "nightly" or "stable"
+ :return: None or dict() with the Agents grouped by major version # (vX)
+ """
+ try:
+ G = Github(GH_TOKEN)
+ repo = G.get_repo(repos_URL[channel])
+ releases = repo.get_releases()
+ except GithubException as e:
+ print(f"GitHub API request failed: {e}")
+ return None
+
+ except Exception as e:
+ print(f"An unexpected error occurred: {e}")
+ return None
+
+ extracted_titles = [extract_version(item.title) for item in releases if
+ extract_version(item.title) is not None]
+ # Necessary sorting for implement the group by
+ extracted_titles.sort(key=lambda x: x[0])
+ # Group titles by major version
+ grouped_by_major = {major: list(group) for major, group in groupby(extracted_titles, key=lambda x: x[0])}
+ sorted_grouped_by_major = {}
+ for key, values in grouped_by_major.items():
+ sorted_values = sorted(values, key=padded_version, reverse=True)
+ sorted_grouped_by_major[key] = sorted_values
+ # Transform them in the correct form
+ if channel == "stable":
+ result_dict = {f"v{key}": [f"v{a}.{b}.{c}" for a, b, c in values] for key, values in
+ sorted_grouped_by_major.items()}
+ else:
+ result_dict = {f"v{key}": [f"v{a}.{b}.{c}-{d}-nightly" for a, b, c, d in values] for key, values in
+ sorted_grouped_by_major.items()}
+ return result_dict
diff --git a/.github/scripts/netdata-pkgcloud-cleanup.py b/.github/scripts/netdata-pkgcloud-cleanup.py
new file mode 100755
index 00000000..f6311e47
--- /dev/null
+++ b/.github/scripts/netdata-pkgcloud-cleanup.py
@@ -0,0 +1,190 @@
+#!/bin/env python3
+
+import requests
+from requests.auth import HTTPBasicAuth
+from datetime import date, datetime, timedelta
+import os
+import sys
+import argparse
+from pprint import pprint
+from datetime import datetime
+from dateutil import parser
+
+
+class PackageCloud:
+ NUM_PACKAGE_MINOR_TO_KEEP = 5
+ NUM_RETENTION_DAYS = 30
+ # number of pages to process. Use '0' to process all
+ MAX_PAGES = 0
+
+ def __init__(self, repo_type, dry_run=True, auth_token=None):
+ self.headers = {
+ "Accept" : "application/json",
+ "Content-Type" : "application/json",
+ }
+ self.dry_run = dry_run
+ self.repo_type = repo_type
+ if repo_type == "stable":
+ repo = "netdata/netdata"
+ elif repo_type == "devel":
+ repo = "netdata/netdata-devel"
+ elif repo_type == "edge":
+ repo = "netdata/netdata-edge"
+ else:
+ print(f"ERROR: unknown repo type '{repo_type}'!\nAccepted values are: stable,devel,edge")
+ sys.exit(1)
+ self.base_url = f"https://packagecloud.io/api/v1/repos/{repo}"
+ self.auth = HTTPBasicAuth(username=auth_token, password='') if auth_token else None
+
+ def get_all_packages(self):
+ page = 1
+ all_pkg_list = []
+ while True:
+ url = f"{self.base_url}/packages.json?page={page}"
+ if page > self.MAX_PAGES and self.MAX_PAGES != 0:
+ break
+ else:
+ pkg_list = requests.get(url, auth=self.auth, headers=self.headers).json()
+ if len(pkg_list) == 0:
+ break
+ else:
+ print(f"Processing page: {page}")
+ for element in pkg_list:
+ self.is_pkg_older_than_days(element, 30)
+ if element['name'] != 'netdata-repo' and element['name'] != 'netdata-repo-edge':
+ all_pkg_list.append(element)
+ page += 1
+ return all_pkg_list
+
+ def delete_package(self, destroy_url):
+ if self.dry_run:
+ print(f" - DRY_RUN mode. Not deleting package '{destroy_url}'.")
+ else:
+ print(f" - Deleting package: {destroy_url}")
+ url = f"https://packagecloud.io{destroy_url}"
+ response = requests.delete(url, auth=self.auth, headers=self.headers).json()
+ response = None
+ if not response:
+ print(f" Package deleted successfully.")
+ else:
+ print(f" Failed deleting package!")
+
+ def get_destroy_url(self, pkg_url):
+ url = f"https://packagecloud.io{pkg_url}"
+ response = requests.get(url, auth=self.auth, headers=self.headers)
+ response.raise_for_status()
+ return response.json()['destroy_url']
+
+ def get_packages_for_distro(self, distro, all_pkg_list):
+ distro_pkg_list = [ pkg for pkg in all_pkg_list if pkg['distro_version'] == distro ]
+ return distro_pkg_list
+
+ def get_packages_for_arch(self, arch, all_pkg_list):
+ arch_pkg_list = [ pkg for pkg in all_pkg_list if pkg['package_url'].split('/')[11] == arch ]
+ return arch_pkg_list
+
+ def get_arches(self, pkg_list):
+ arches = list(set([pkg['package_url'].split('/')[11] for pkg in pkg_list ]))
+ return arches
+
+ def get_pkg_list(self, pkg_name, pkg_list):
+ filtered_list = [ pkg for pkg in pkg_list if pkg['name'] == pkg_name ]
+ return filtered_list
+
+ def get_minor_versions(self, all_versions):
+ minor_versions = ['.'.join(version.split('.')[:-1]) for version in all_versions ]
+ minor_versions = list(set(minor_versions))
+ minor_versions.sort()
+ return minor_versions
+
+ def is_pkg_older_than_days(self, pkg, num_days):
+ pkg_create_date = datetime.strptime(pkg['created_at'], '%Y-%m-%dT%H:%M:%S.%fZ')
+ time_difference = datetime.now() - pkg_create_date
+ return time_difference.days > num_days
+
+ def cleanup_repo(self):
+ if self.repo_type == 'stable':
+ self.cleanup_stable_repo()
+ else:
+ self.cleanup_edge_repo()
+
+ def cleanup_edge_repo(self):
+ all_pkg_list = self.get_all_packages()
+ pkgs_to_delete = []
+ pkgs_to_keep = []
+ for package in all_pkg_list:
+ if self.is_pkg_older_than_days(package, self.NUM_RETENTION_DAYS):
+ pkgs_to_delete.append(package)
+ else:
+ pkgs_to_keep.append(package)
+ print(f"Keeping the following packages (newer than {self.NUM_RETENTION_DAYS} days):")
+ for pkg in pkgs_to_keep:
+ print(f" > pkg: {pkg['package_html_url']} / created_at: {pkg['created_at']}")
+ print(f"Deleting the following packages (older than {self.NUM_RETENTION_DAYS} days):")
+ for pkg in pkgs_to_delete:
+ print(f" > pkg: {pkg['package_html_url']} / created_at: {pkg['created_at']}")
+ self.delete_package(pkg['destroy_url'])
+
+ def cleanup_stable_repo(self):
+ all_pkg_list = self.get_all_packages()
+ all_distros = list(set([ pkg['distro_version'] for pkg in all_pkg_list ]))
+ all_distros = sorted(all_distros)
+ print(f"<> Distributions list: {all_distros}")
+
+ for distro in all_distros:
+ print(f">> Processing distro: {distro}")
+ pkg_list_distro = self.get_packages_for_distro(distro, all_pkg_list)
+ arches = self.get_arches(pkg_list_distro)
+ print(f" <> Arch list: {arches}")
+ for arch in arches:
+ print(f" >> Processing arch: {distro} -> {arch}")
+ pkg_list_arch = self.get_packages_for_arch(arch, pkg_list_distro)
+ pkg_names = [pkg['name'] for pkg in pkg_list_arch]
+ pkg_names = list(set(pkg_names))
+ print(f" <> Package names: {pkg_names}")
+ for pkg_name in pkg_names:
+ print(f" >> Processing package: {distro} -> {arch} -> {pkg_name}")
+ pkg_list = self.get_pkg_list(pkg_name, pkg_list_arch)
+ pkg_versions = [pkg['version'] for pkg in pkg_list]
+ pkg_minor_versions = self.get_minor_versions(pkg_versions)
+ pkg_minor_to_keep = pkg_minor_versions[-self.NUM_PACKAGE_MINOR_TO_KEEP:]
+ print(f" <> Minor Package Versions to Keep: {pkg_minor_to_keep}")
+ pkg_minor_to_delete = list(set(pkg_minor_versions) - set(pkg_minor_to_keep))
+ print(f" <> Minor Package Versions to Delete: {pkg_minor_to_delete}")
+ urls_to_keep = [pkg['package_url'] for pkg in pkg_list if '.'.join(pkg['version'].split('.')[:-1]) in pkg_minor_to_keep]
+ urls_to_delete = [pkg['package_url'] for pkg in pkg_list if '.'.join(pkg['version'].split('.')[:-1]) in pkg_minor_to_delete]
+ for pkg_url in urls_to_delete:
+ destroy_url = self.get_destroy_url(pkg_url)
+ self.delete_package(destroy_url)
+
+
+def configure():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--repo-type', '-r', required=True,
+ help='Repository type against to perform cleanup')
+ parser.add_argument('--dry-run', '-d', action='store_true',
+ help='Dry-run Mode')
+ args = parser.parse_args()
+ try:
+ token = os.environ['PKGCLOUD_TOKEN']
+ except Exception as e:
+ print(f"FATAL: 'PKGCLOUD_TOKEN' environment variable is not set!", file=sys.stderr)
+ sys.exit(1)
+ repo_type = args.repo_type
+ dry_run = args.dry_run
+ conf = {
+ 'repo_type': args.repo_type,
+ 'dry_run': args.dry_run,
+ 'token': token
+ }
+ return conf
+
+
+def main():
+ config = configure()
+ pkg_cloud = PackageCloud(config['repo_type'], config['dry_run'], config['token'])
+ pkg_cloud.cleanup_repo()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/.github/scripts/package-upload.sh b/.github/scripts/package-upload.sh
new file mode 100755
index 00000000..13d63b4a
--- /dev/null
+++ b/.github/scripts/package-upload.sh
@@ -0,0 +1,43 @@
+#!/bin/sh
+
+set -e
+
+host="packages.netdata.cloud"
+user="netdatabot"
+
+distro="${1}"
+arch="${2}"
+format="${3}"
+repo="${4}"
+
+staging="${TMPDIR:-/tmp}/package-staging"
+prefix="/home/netdatabot/incoming/${repo}/"
+
+packages="$(find artifacts -name "*.${format}")"
+
+mkdir -p "${staging}"
+
+case "${format}" in
+ deb)
+ src="${staging}/${distro}"
+ mkdir -p "${src}"
+
+ for pkg in ${packages}; do
+ cp "${pkg}" "${src}"
+ done
+ ;;
+ rpm)
+ src="${staging}/${distro}/${arch}/"
+ mkdir -p "${src}"
+
+ for pkg in ${packages}; do
+ cp "${pkg}" "${src}"
+ done
+ ;;
+ *)
+ echo "Unrecognized package format ${format}."
+ exit 1
+ ;;
+esac
+
+rsync -vrptO "${staging}/" "${user}@${host}:${prefix}"
diff --git a/.github/scripts/package_cloud_wrapper.sh b/.github/scripts/package_cloud_wrapper.sh
new file mode 100755
index 00000000..7640ef48
--- /dev/null
+++ b/.github/scripts/package_cloud_wrapper.sh
@@ -0,0 +1,48 @@
+#!/usr/bin/env bash
+#
+# This is a tool to help removal of packages from packagecloud.io
+# It utilizes the package_cloud utility provided from packagecloud.io
+#
+# Depends on:
+# 1) package cloud gem (detects absence and installs it)
+#
+# Requires:
+# 1) PKG_CLOUD_TOKEN variable exported
+# 2) To properly install package_cloud when not found, it requires: ruby gcc gcc-c++ ruby-devel
+#
+# Copyright: SPDX-License-Identifier: GPL-3.0-or-later
+#
+# Author : Pavlos Emm. Katsoulakis (paul@netdata.cloud)
+#shellcheck disable=SC2068,SC2145
+set -e
+PKG_CLOUD_CONFIG="$HOME/.package_cloud_configuration.cfg"
+
+# If we are not in netdata git repo, at the top level directory, fail
+TOP_LEVEL=$(basename "$(git rev-parse --show-toplevel)")
+CWD=$(git rev-parse --show-cdup)
+if [ -n "$CWD" ] || [ ! "${TOP_LEVEL}" == "netdata" ]; then
+ echo "Run as .github/scripts/$(basename "$0") from top level directory of netdata git repository"
+ echo "Docker build process aborted"
+ exit 1
+fi
+
+# Install dependency if not there
+if ! command -v package_cloud > /dev/null 2>&1; then
+ echo "No package cloud gem found, installing"
+ sudo gem install -V package_cloud || (echo "Package cloud installation failed. you might want to check if required dependencies are there (ruby gcc gcc-c++ ruby-devel)" && exit 1)
+else
+ echo "Found package_cloud gem, continuing"
+fi
+
+# Check for required token and prepare config
+if [ -z "${PKG_CLOUD_TOKEN}" ]; then
+ echo "Please set PKG_CLOUD_TOKEN to be able to use ${0}"
+ exit 1
+fi
+echo "{\"url\":\"https://packagecloud.io\",\"token\":\"${PKG_CLOUD_TOKEN}\"}" > "${PKG_CLOUD_CONFIG}"
+
+echo "Executing package_cloud with config ${PKG_CLOUD_CONFIG} and parameters $@"
+package_cloud $@ --config="${PKG_CLOUD_CONFIG}"
+
+rm -rf "${PKG_CLOUD_CONFIG}"
+echo "Done!"
diff --git a/.github/scripts/pkg-test.sh b/.github/scripts/pkg-test.sh
new file mode 100755
index 00000000..35767bf2
--- /dev/null
+++ b/.github/scripts/pkg-test.sh
@@ -0,0 +1,162 @@
+#!/bin/sh
+
+install_debian_like() {
+ # This is needed to ensure package installs don't prompt for any user input.
+ export DEBIAN_FRONTEND=noninteractive
+
+ if apt-cache show netcat 2>&1 | grep -q "No packages found"; then
+ netcat="netcat-traditional"
+ else
+ netcat="netcat"
+ fi
+
+ apt-get update
+
+ # Install Netdata
+ # Strange quoting is required here so that glob matching works.
+ # shellcheck disable=SC2046
+ apt-get install -y $(find /netdata/artifacts -type f -name 'netdata*.deb' \
+! -name '*dbgsym*' ! -name '*cups*' ! -name '*freeipmi*') || exit 3
+
+ # Install testing tools
+ apt-get install -y --no-install-recommends curl "${netcat}" jq || exit 1
+}
+
+install_fedora_like() {
+ # Using a glob pattern here because I can't reliably determine what the
+ # resulting package name will be (TODO: There must be a better way!)
+
+ PKGMGR="$( (command -v dnf > /dev/null && echo "dnf") || echo "yum")"
+
+ if [ "${PKGMGR}" = "dnf" ]; then
+ opts="--allowerasing"
+ fi
+
+ # Install Netdata
+ # Strange quoting is required here so that glob matching works.
+ "${PKGMGR}" install -y /netdata/artifacts/netdata*.rpm || exit 1
+
+ # Install testing tools
+ "${PKGMGR}" install -y curl nc jq || exit 1
+}
+
+install_centos() {
+ # Using a glob pattern here because I can't reliably determine what the
+ # resulting package name will be (TODO: There must be a better way!)
+
+ PKGMGR="$( (command -v dnf > /dev/null && echo "dnf") || echo "yum")"
+
+ if [ "${PKGMGR}" = "dnf" ]; then
+ opts="--allowerasing"
+ fi
+
+ # Install EPEL (needed for `jq`
+ "${PKGMGR}" install -y epel-release || exit 1
+
+ # Install Netdata
+ # Strange quoting is required here so that glob matching works.
+ "${PKGMGR}" install -y /netdata/artifacts/netdata*.rpm || exit 1
+
+ # Install testing tools
+ # shellcheck disable=SC2086
+ "${PKGMGR}" install -y ${opts} curl nc jq || exit 1
+}
+
+install_amazon_linux() {
+ PKGMGR="$( (command -v dnf > /dev/null && echo "dnf") || echo "yum")"
+
+ if [ "${PKGMGR}" = "dnf" ]; then
+ opts="--allowerasing"
+ fi
+
+ # Install Netdata
+ # Strange quoting is required here so that glob matching works.
+ "${PKGMGR}" install -y /netdata/artifacts/netdata*.rpm || exit 1
+
+ # Install testing tools
+ # shellcheck disable=SC2086
+ "${PKGMGR}" install -y ${opts} curl nc jq || exit 1
+}
+
+install_suse_like() {
+ # Using a glob pattern here because I can't reliably determine what the
+ # resulting package name will be (TODO: There must be a better way!)
+
+ # Install Netdata
+ # Strange quoting is required here so that glob matching works.
+ zypper install -y --allow-downgrade --allow-unsigned-rpm /netdata/artifacts/netdata*.rpm || exit 1
+
+ # Install testing tools
+ zypper install -y --allow-downgrade --no-recommends curl netcat-openbsd jq || exit 1
+}
+
+dump_log() {
+ cat ./netdata.log
+}
+
+wait_for() {
+ host="${1}"
+ port="${2}"
+ name="${3}"
+ timeout="30"
+
+ if command -v nc > /dev/null ; then
+ netcat="nc"
+ elif command -v netcat > /dev/null ; then
+ netcat="netcat"
+ else
+ printf "Unable to find a usable netcat command.\n"
+ return 1
+ fi
+
+ printf "Waiting for %s on %s:%s ... " "${name}" "${host}" "${port}"
+
+ sleep 30
+
+ i=0
+ while ! ${netcat} -z "${host}" "${port}"; do
+ sleep 1
+ if [ "$i" -gt "$timeout" ]; then
+ printf "Timed out!\n"
+ return 1
+ fi
+ i="$((i + 1))"
+ done
+ printf "OK\n"
+}
+
+case "${DISTRO}" in
+ debian | ubuntu)
+ install_debian_like
+ ;;
+ fedora | oraclelinux)
+ install_fedora_like
+ ;;
+ centos| centos-stream | rockylinux | almalinux)
+ install_centos
+ ;;
+ amazonlinux)
+ install_amazon_linux
+ ;;
+ opensuse)
+ install_suse_like
+ ;;
+ *)
+ printf "ERROR: unsupported distro: %s_%s\n" "${DISTRO}" "${DISTRO_VERSION}"
+ exit 1
+ ;;
+esac
+
+trap dump_log EXIT
+
+/usr/sbin/netdata -D > ./netdata.log 2>&1 &
+
+wait_for localhost 19999 netdata || exit 1
+
+curl -sS http://127.0.0.1:19999/api/v1/info > ./response || exit 1
+
+cat ./response
+
+jq '.version' ./response || exit 1
+
+trap - EXIT
diff --git a/.github/scripts/platform-impending-eol.py b/.github/scripts/platform-impending-eol.py
new file mode 100755
index 00000000..c57e5edd
--- /dev/null
+++ b/.github/scripts/platform-impending-eol.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python3
+'''Check if a given distro is going to be EOL soon.
+
+ This queries the public API of https://endoflife.date to fetch EOL dates.
+
+ β€˜soon’ is defined by LEAD_DAYS, currently 30 days.'''
+
+import datetime
+import json
+import sys
+import urllib.request
+
+URL_BASE = 'https://endoflife.date/api'
+NOW = datetime.date.today()
+LEAD_DAYS = datetime.timedelta(days=30)
+
+DISTRO = sys.argv[1]
+RELEASE = sys.argv[2]
+
+EXIT_NOT_IMPENDING = 0
+EXIT_IMPENDING = 1
+EXIT_NO_DATA = 2
+EXIT_FAILURE = 3
+
+try:
+ with urllib.request.urlopen(f'{ URL_BASE }/{ DISTRO }/{ RELEASE }.json') as response:
+ match response.status:
+ case 200:
+ data = json.load(response)
+ case _:
+ print(
+ f'Failed to retrieve data for { DISTRO } { RELEASE } ' +
+ f'(status: { response.status }).',
+ file=sys.stderr
+ )
+ sys.exit(EXIT_FAILURE)
+except urllib.error.HTTPError as e:
+ match e.code:
+ case 404:
+ print(f'No data available for { DISTRO } { RELEASE }.', file=sys.stderr)
+ sys.exit(EXIT_NO_DATA)
+ case _:
+ print(
+ f'Failed to retrieve data for { DISTRO } { RELEASE } ' +
+ f'(status: { e.code }).',
+ file=sys.stderr
+ )
+ sys.exit(EXIT_FAILURE)
+
+eol = datetime.date.fromisoformat(data['eol'])
+
+offset = abs(eol - NOW)
+
+if offset <= LEAD_DAYS:
+ print(data['eol'])
+ sys.exit(EXIT_IMPENDING)
+else:
+ sys.exit(EXIT_NOT_IMPENDING)
diff --git a/.github/scripts/prepare-release-base.sh b/.github/scripts/prepare-release-base.sh
new file mode 100755
index 00000000..06a2da16
--- /dev/null
+++ b/.github/scripts/prepare-release-base.sh
@@ -0,0 +1,180 @@
+#!/bin/sh
+
+set -e
+
+REPO="${1}"
+EVENT_NAME="${2}"
+EVENT_TYPE="${3}"
+EVENT_VERSION="${4}"
+RELEASE_TEST="${5}"
+
+##############################################################
+# Version validation functions
+
+check_version_format() {
+ if ! echo "${EVENT_VERSION}" | grep -qE '^v[[:digit:]]+\.[[:digit:]]+\.[[:digit:]]+$'; then
+ echo "::error::The supplied version (${EVENT_VERSION}) is not a valid version string."
+ return 1
+ fi
+}
+
+patch_is_zero() {
+ if ! echo "${EVENT_VERSION}" | grep -qE '^v[[:digit:]]+\.[[:digit:]]+\.0$'; then
+ echo "::error::The patch number for a ${EVENT_TYPE} build must be 0."
+ return 1
+ fi
+}
+
+minor_is_zero() {
+ if ! echo "${EVENT_VERSION}" | grep -qE '^v[[:digit:]]+\.0'; then
+ echo "::error::The minor version number for a ${EVENT_TYPE} build must be 0."
+ return 1
+ fi
+}
+
+major_matches() {
+ current_major="$(cut -f 1 -d '-' packaging/version | cut -f 1 -d '.' | cut -f 2 -d 'v')"
+ target_major="$(echo "${EVENT_VERSION}" | cut -f 1 -d '.' | cut -f 2 -d 'v')"
+
+ if [ "${target_major}" != "${current_major}" ]; then
+ echo "::error::Major version mismatch, expected ${current_major} but got ${target_major}."
+ return 1
+ fi
+}
+
+minor_matches() {
+ current_minor="$(cut -f 1 -d '-' packaging/version | cut -f 2 -d '.')"
+ target_minor="$(echo "${EVENT_VERSION}" | cut -f 2 -d '.')"
+
+ if [ "${target_minor}" != "${current_minor}" ]; then
+ echo "::error::Minor version mismatch, expected ${current_minor} but got ${target_minor}."
+ return 1
+ fi
+}
+
+check_for_existing_tag() {
+ if git tag | grep -qE "^${EVENT_VERSION}$"; then
+ echo "::error::A tag for version ${EVENT_VERSION} already exists."
+ return 1
+ fi
+}
+
+check_newer_major_version() {
+ current="$(cut -f 1 -d '-' packaging/version | cut -f 1 -d '.' | cut -f 2 -d 'v')"
+ target="$(echo "${EVENT_VERSION}" | cut -f 1 -d '.' | cut -f 2 -d 'v')"
+
+ if [ "${target}" -le "${current}" ]; then
+ echo "::error::Version ${EVENT_VERSION} is not newer than the current version."
+ return 1
+ fi
+}
+
+check_newer_minor_version() {
+ current="$(cut -f 1 -d '-' packaging/version | cut -f 2 -d '.')"
+ target="$(echo "${EVENT_VERSION}" | cut -f 2 -d '.')"
+
+ if [ "${target}" -le "${current}" ]; then
+ echo "::error::Version ${EVENT_VERSION} is not newer than the current version."
+ return 1
+ fi
+}
+
+check_newer_patch_version() {
+ current="$(cut -f 1 -d '-' packaging/version | cut -f 3 -d '.')"
+ target="$(echo "${EVENT_VERSION}" | cut -f 3 -d '.')"
+
+ if [ "${target}" -le "${current}" ]; then
+ echo "::error::Version ${EVENT_VERSION} is not newer than the current version."
+ return 1
+ fi
+}
+
+##############################################################
+# Core logic
+
+git config user.name "netdatabot"
+git config user.email "bot@netdata.cloud"
+
+if [ "${REPO}" != "netdata/netdata" ] && [ -z "${RELEASE_TEST}" ]; then
+ echo "::notice::Not running in the netdata/netdata repository, not queueing a release build."
+ echo "run=false" >> "${GITHUB_OUTPUT}"
+elif [ "${EVENT_NAME}" = 'schedule' ] || [ "${EVENT_TYPE}" = 'nightly' ]; then
+ echo "::notice::Preparing a nightly release build."
+ LAST_TAG=$(git describe --abbrev=0 --tags)
+ COMMITS_SINCE_RELEASE=$(git rev-list "${LAST_TAG}"..HEAD --count)
+ NEW_VERSION="${LAST_TAG}-$((COMMITS_SINCE_RELEASE + 1))-nightly"
+ LAST_VERSION_COMMIT="$(git rev-list -1 HEAD packaging/version)"
+ HEAD_COMMIT="$(git rev-parse HEAD)"
+ if [ "${EVENT_NAME}" = 'schedule' ] && [ "${LAST_VERSION_COMMIT}" = "${HEAD_COMMIT}" ] && grep -qE '.*-nightly$' packaging/version; then
+ echo "::notice::No commits since last nightly build, not publishing a new nightly build."
+ echo "run=false" >> "${GITHUB_OUTPUT}"
+ else
+ echo "${NEW_VERSION}" > packaging/version || exit 1
+ # shellcheck disable=SC2129
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ echo "message=Update changelog and version for nightly build: ${NEW_VERSION}." >> "${GITHUB_OUTPUT}"
+ echo "ref=master" >> "${GITHUB_OUTPUT}"
+ echo "type=nightly" >> "${GITHUB_OUTPUT}"
+ echo "branch=master" >> "${GITHUB_OUTPUT}"
+ echo "version=nightly" >> "${GITHUB_OUTPUT}"
+ fi
+elif [ "${EVENT_TYPE}" = 'patch' ] && [ "${EVENT_VERSION}" != "nightly" ]; then
+ echo "::notice::Preparing a patch release build."
+ check_version_format || exit 1
+ check_for_existing_tag || exit 1
+ branch_name="$(echo "${EVENT_VERSION}" | cut -f 1-2 -d '.')"
+ if ! git checkout "${branch_name}"; then
+ echo "::error::Could not find a branch for the ${branch_name}.x release series."
+ exit 1
+ fi
+ minor_matches || exit 1
+ major_matches || exit 1
+ check_newer_patch_version || exit 1
+ echo "${EVENT_VERSION}" > packaging/version || exit 1
+ # shellcheck disable=SC2129
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ echo "message=Patch release ${EVENT_VERSION}." >> "${GITHUB_OUTPUT}"
+ echo "ref=${EVENT_VERSION}" >> "${GITHUB_OUTPUT}"
+ echo "type=release" >> "${GITHUB_OUTPUT}"
+ echo "branch=${branch_name}" >> "${GITHUB_OUTPUT}"
+ echo "version=$(tr -d 'v' < packaging/version)" >> "${GITHUB_OUTPUT}"
+elif [ "${EVENT_TYPE}" = 'minor' ] && [ "${EVENT_VERSION}" != "nightly" ]; then
+ echo "::notice::Preparing a minor release build."
+ check_version_format || exit 1
+ patch_is_zero || exit 1
+ major_matches || exit 1
+ check_newer_minor_version || exit 1
+ check_for_existing_tag || exit 1
+ branch_name="$(echo "${EVENT_VERSION}" | cut -f 1-2 -d '.')"
+ if [ -n "$(git branch --list "${branch_name}")" ]; then
+ echo "::error::A branch named ${branch_name} already exists in the repository."
+ exit 1
+ fi
+ echo "${EVENT_VERSION}" > packaging/version || exit 1
+ # shellcheck disable=SC2129
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ echo "message=Minor release ${EVENT_VERSION}." >> "${GITHUB_OUTPUT}"
+ echo "ref=${EVENT_VERSION}" >> "${GITHUB_OUTPUT}"
+ echo "type=release" >> "${GITHUB_OUTPUT}"
+ echo "branch=master" >> "${GITHUB_OUTPUT}"
+ echo "new-branch=${branch_name}" >> "${GITHUB_OUTPUT}"
+ echo "version=$(tr -d 'v' < packaging/version)" >> "${GITHUB_OUTPUT}"
+elif [ "${EVENT_TYPE}" = 'major' ] && [ "${EVENT_VERSION}" != "nightly" ]; then
+ echo "::notice::Preparing a major release build."
+ check_version_format || exit 1
+ minor_is_zero || exit 1
+ patch_is_zero || exit 1
+ check_newer_major_version || exit 1
+ check_for_existing_tag || exit 1
+ echo "${EVENT_VERSION}" > packaging/version || exit 1
+ # shellcheck disable=SC2129
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ echo "message=Major release ${EVENT_VERSION}" >> "${GITHUB_OUTPUT}"
+ echo "ref=${EVENT_VERSION}" >> "${GITHUB_OUTPUT}"
+ echo "type=release" >> "${GITHUB_OUTPUT}"
+ echo "branch=master" >> "${GITHUB_OUTPUT}"
+ echo "version=$(tr -d 'v' < packaging/version)" >> "${GITHUB_OUTPUT}"
+else
+ echo '::error::Unrecognized release type or invalid version.'
+ exit 1
+fi
diff --git a/.github/scripts/run-updater-check.sh b/.github/scripts/run-updater-check.sh
new file mode 100755
index 00000000..1224d8f6
--- /dev/null
+++ b/.github/scripts/run-updater-check.sh
@@ -0,0 +1,30 @@
+#!/bin/sh
+
+echo ">>> Installing CI support packages..."
+/netdata/.github/scripts/ci-support-pkgs.sh
+mkdir -p /etc/cron.daily # Needed to make auto-update checking work correctly on some platforms.
+echo ">>> Installing Netdata..."
+/netdata/packaging/installer/kickstart.sh --dont-wait --build-only --disable-telemetry || exit 1
+echo "::group::>>> Pre-Update Environment File Contents"
+cat /etc/netdata/.environment
+echo "::endgroup::"
+echo "::group::>>> Pre-Update Netdata Build Info"
+netdata -W buildinfo
+echo "::endgroup::"
+echo ">>> Updating Netdata..."
+export NETDATA_BASE_URL="http://localhost:8080/artifacts/" # Pull the tarball from the local web server.
+timeout 3600 /netdata/packaging/installer/netdata-updater.sh --not-running-from-cron --no-updater-self-update
+
+case "$?" in
+ 124) echo "!!! Updater timed out." ; exit 1 ;;
+ 0) ;;
+ *) echo "!!! Updater failed." ; exit 1 ;;
+esac
+echo "::group::>>> Post-Update Environment File Contents"
+cat /etc/netdata/.environment
+echo "::endgroup::"
+echo "::group::>>> Post-Update Netdata Build Info"
+netdata -W buildinfo
+echo "::endgroup::"
+echo ">>> Checking if update was successful..."
+/netdata/.github/scripts/check-updater.sh || exit 1
diff --git a/.github/scripts/run_install_with_dist_file.sh b/.github/scripts/run_install_with_dist_file.sh
new file mode 100755
index 00000000..74652efd
--- /dev/null
+++ b/.github/scripts/run_install_with_dist_file.sh
@@ -0,0 +1,39 @@
+#!/usr/bin/env bash
+#
+# This script is evaluating netdata installation with the source from make dist
+#
+# Copyright: SPDX-License-Identifier: GPL-3.0-or-later
+#
+# Author : Pavlos Emm. Katsoulakis <paul@netdata.cloud)
+
+set -e
+
+if [ $# -ne 1 ]; then
+ printf >&2 "Usage: %s <dist_file>\n" "$(basename "$0")"
+ exit 1
+fi
+
+distfile="${1}"
+shift
+
+printf >&2 "Opening dist archive %s ... " "${distfile}"
+tar -xovf "${distfile}"
+distdir="$(echo "${distfile}" | rev | cut -d. -f3- | rev)"
+cp -a packaging/installer/install-required-packages.sh "${distdir}/install-required-packages.sh"
+if [ ! -d "${distdir}" ]; then
+ printf >&2 "ERROR: %s is not a directory" "${distdir}"
+ exit 2
+fi
+
+printf >&2 "Entering %s and starting docker run ..." "${distdir}"
+
+pushd "${distdir}" || exit 1
+docker run \
+ -e DISABLE_TELEMETRY=1 \
+ -v "${PWD}:/netdata" \
+ -w /netdata \
+ "ubuntu:latest" \
+ /bin/bash -c "./install-required-packages.sh --dont-wait --non-interactive netdata && apt install wget && ./netdata-installer.sh --dont-wait --require-cloud --disable-telemetry --install-prefix /tmp --one-time-build && echo \"Validating netdata instance is running\" && wget -O - 'http://127.0.0.1:19999/api/v1/info' | grep version"
+popd || exit 1
+
+echo "All Done!"
diff --git a/.github/scripts/upload-new-version-tags.sh b/.github/scripts/upload-new-version-tags.sh
new file mode 100755
index 00000000..a9b0cd30
--- /dev/null
+++ b/.github/scripts/upload-new-version-tags.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+
+set -e
+
+host="packages.netdata.cloud"
+user="netdatabot"
+
+prefix="/var/www/html/releases"
+staging="${TMPDIR:-/tmp}/staging-new-releases"
+
+mkdir -p "${staging}"
+
+for source_dir in "${staging}"/*; do
+ if [ -d "${source_dir}" ]; then
+ base_name=$(basename "${source_dir}")
+ scp -r "${source_dir}"/* "${user}@${host}:${prefix}/${base_name}"
+ fi
+done