summaryrefslogtreecommitdiffstats
path: root/.github
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 02:57:58 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 02:57:58 +0000
commitbe1c7e50e1e8809ea56f2c9d472eccd8ffd73a97 (patch)
tree9754ff1ca740f6346cf8483ec915d4054bc5da2d /.github
parentInitial commit. (diff)
downloadnetdata-be1c7e50e1e8809ea56f2c9d472eccd8ffd73a97.tar.xz
netdata-be1c7e50e1e8809ea56f2c9d472eccd8ffd73a97.zip
Adding upstream version 1.44.3.upstream/1.44.3upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to '.github')
-rw-r--r--.github/CODEOWNERS56
-rw-r--r--.github/ISSUE_TEMPLATE.md15
-rw-r--r--.github/ISSUE_TEMPLATE/BUG_REPORT.yml101
-rw-r--r--.github/ISSUE_TEMPLATE/FEAT_REQUEST.yml62
-rw-r--r--.github/ISSUE_TEMPLATE/config.yml12
-rw-r--r--.github/PULL_REQUEST_TEMPLATE.md27
-rw-r--r--.github/codeql/c-cpp-config.yml2
-rw-r--r--.github/codeql/python-config.yml10
-rw-r--r--.github/data/distros.yml353
-rw-r--r--.github/dependabot.yml9
-rw-r--r--.github/dockerfiles/Dockerfile.build_test18
-rw-r--r--.github/dockerfiles/Dockerfile.clang18
-rw-r--r--.github/labeler.yml158
-rwxr-xr-x.github/scripts/build-artifacts.sh82
-rwxr-xr-x.github/scripts/build-dist.sh71
-rwxr-xr-x.github/scripts/build-static.sh61
-rwxr-xr-x.github/scripts/bump-packaging-version.sh6
-rwxr-xr-x.github/scripts/check-updater.sh49
-rwxr-xr-x.github/scripts/check_latest_versions.py33
-rw-r--r--.github/scripts/check_latest_versions_per_channel.py9
-rwxr-xr-x.github/scripts/ci-support-pkgs.sh18
-rwxr-xr-x.github/scripts/docker-test.sh64
-rw-r--r--.github/scripts/functions.sh69
-rwxr-xr-x.github/scripts/gen-docker-tags.py19
-rwxr-xr-x.github/scripts/gen-matrix-build.py34
-rwxr-xr-x.github/scripts/gen-matrix-eol-check.py29
-rwxr-xr-x.github/scripts/gen-matrix-packaging.py36
-rwxr-xr-x.github/scripts/gen-matrix-repoconfig.py27
-rwxr-xr-x.github/scripts/get-static-cache-key.sh16
-rw-r--r--.github/scripts/modules/github_actions.py27
-rw-r--r--.github/scripts/modules/requirements.txt1
-rw-r--r--.github/scripts/modules/version_manipulation.py141
-rwxr-xr-x.github/scripts/netdata-pkgcloud-cleanup.py190
-rwxr-xr-x.github/scripts/package-upload.sh43
-rwxr-xr-x.github/scripts/package_cloud_wrapper.sh48
-rwxr-xr-x.github/scripts/pkg-test.sh162
-rwxr-xr-x.github/scripts/platform-impending-eol.py58
-rwxr-xr-x.github/scripts/prepare-release-base.sh180
-rwxr-xr-x.github/scripts/run-updater-check.sh30
-rwxr-xr-x.github/scripts/run_install_with_dist_file.sh39
-rwxr-xr-x.github/scripts/upload-new-version-tags.sh18
-rw-r--r--.github/stale.yml18
-rw-r--r--.github/workflows/add-to-project.yml26
-rw-r--r--.github/workflows/build.yml1004
-rw-r--r--.github/workflows/checks.yml133
-rw-r--r--.github/workflows/cloud_regression.yml69
-rw-r--r--.github/workflows/codeql.yml118
-rw-r--r--.github/workflows/coverity.yml63
-rw-r--r--.github/workflows/dashboard-pr.yml54
-rw-r--r--.github/workflows/docker.yml413
-rw-r--r--.github/workflows/docs.yml29
-rw-r--r--.github/workflows/generate-integrations.yml107
-rw-r--r--.github/workflows/labeler.yml21
-rw-r--r--.github/workflows/monitor-releases.yml72
-rw-r--r--.github/workflows/packagecloud.yml36
-rw-r--r--.github/workflows/packaging.yml313
-rw-r--r--.github/workflows/platform-eol-check.yml153
-rw-r--r--.github/workflows/release.yml214
-rw-r--r--.github/workflows/repoconfig-packages.yml155
-rw-r--r--.github/workflows/review.yml254
-rw-r--r--.github/workflows/tests.yml41
-rw-r--r--.github/workflows/trigger-learn-update.yml37
62 files changed, 5731 insertions, 0 deletions
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
new file mode 100644
index 00000000..7f368ceb
--- /dev/null
+++ b/.github/CODEOWNERS
@@ -0,0 +1,56 @@
+# Files which shouldn't be changed manually are owned by @netdatabot.
+# This way we prevent modifications which will be overwriten by automation.
+
+# Global (default) code owner
+* @Ferroin
+
+# Ownership by directory structure
+.github/ @Ferroin @tkatsoulas
+aclk/ @stelfrag @underhood
+build/ @Ferroin @tkatsoulas
+contrib/debian @Ferroin @tkatsoulas
+collectors/ @thiagoftsm
+collectors/ebpf.plugin/ @thiagoftsm
+collectors/charts.d.plugin/ @ilyam8 @Ferroin
+collectors/freebsd.plugin/ @thiagoftsm
+collectors/macos.plugin/ @thiagoftsm
+collectors/python.d.plugin/ @ilyam8
+collectors/cups.plugin/ @thiagoftsm
+exporting/ @thiagoftsm
+daemon/ @thiagoftsm @vkalintiris
+database/ @thiagoftsm @vkalintiris
+docs/ @tkatsoulas @Ancairon
+health/ @thiagoftsm @vkalintiris @MrZammler
+health/health.d/ @thiagoftsm @MrZammler
+health/notifications/ @Ferroin @thiagoftsm @MrZammler
+ml/ @vkalintiris
+libnetdata/ @thiagoftsm @vkalintiris
+packaging/ @Ferroin @tkatsoulas
+registry/ @novykh
+streaming/ @thiagoftsm
+system/ @Ferroin @tkatsoulas
+tests/ @Ferroin @vkalintiris @tkatsoulas
+web/ @thiagoftsm @vkalintiris
+web/gui/ @novykh
+logsmanagement/ @Dim-P @thiagoftsm
+
+# Ownership by filetype (overwrites ownership by directory)
+*.am @Ferroin @tkatsoulas
+*.md @tkatsoulas @Ancairon
+*.mdx @tkatsoulas @Ancairon
+Dockerfile* @Ferroin @tkatsoulas
+
+# Ownership of specific files
+.gitignore @Ferroin @tkatsoulas @vkalintiris
+.eslintrc @Ferroin @tkatsoulas
+.eslintignore @Ferroin @tkatsoulas
+.csslintrc @Ferroin @tkatsoulas
+.codeclimate.yml @Ferroin @tkatsoulas
+.codacy.yml @Ferroin @tkatsoulas
+.yamllint.yml @Ferroin @tkatsoulas
+netdata.spec.in @Ferroin @tkatsoulas
+netdata-installer.sh @Ferroin @tkatsoulas
+packaging/version @netdatabot @Ferroin @tkatsoulas
+
+LICENSE.md @Ferroin @tkatsoulas @vkalintiris
+CHANGELOG.md @netdatabot @Ferroin @tkatsoulas
diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md
new file mode 100644
index 00000000..bd939bab
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE.md
@@ -0,0 +1,15 @@
+---
+about: General issue template
+labels: "needs triage", "no changelog"
+---
+
+<!---
+This is a generic issue template. We usually prefer contributors to use one
+of 3 other specific issue templates (bug report, feature request, question)
+to allow our automation classify those so you can get response faster.
+However if your issue doesn't fall into either one of those 3 categories
+use this generic template.
+--->
+
+#### Summary
+
diff --git a/.github/ISSUE_TEMPLATE/BUG_REPORT.yml b/.github/ISSUE_TEMPLATE/BUG_REPORT.yml
new file mode 100644
index 00000000..b63daba8
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/BUG_REPORT.yml
@@ -0,0 +1,101 @@
+name: "Netdata Agent: Bug report"
+description: "Submit a report and help us improve our free and open-source Netdata Agent"
+title: "[Bug]: "
+labels: ["bug", "needs triage"]
+body:
+ - type: markdown
+ attributes:
+ value: "### Thank you for contributing to our project!"
+ - type: markdown
+ attributes:
+ value: |
+ <img src="https://img.shields.io/github/v/release/netdata/netdata.svg?label=latest%20stable"> <img src="https://img.shields.io/badge/dynamic/xml?url=https://storage.googleapis.com/netdata-nightlies/latest-version.txt&label=latest%20nightly&query=/text()">
+ - type: markdown
+ attributes:
+ value: |
+ Before submitting, we'd appreciate it if you:
+ - Verify that your issue is not already reported on GitHub.
+ - Check if your Netdata Agent is up to date. If not, we recommend that you [update](https://learn.netdata.cloud/docs/agent/packaging/installer/update) first.
+ - type: textarea
+ id: bug-description
+ attributes:
+ label: Bug description
+ description: Provide a description of the bug you're experiencing.
+ validations:
+ required: true
+ - type: textarea
+ id: expected-behavior
+ attributes:
+ label: Expected behavior
+ description: Describe what you expected to happen.
+ validations:
+ required: true
+ - type: textarea
+ id: reproduce
+ attributes:
+ label: Steps to reproduce
+ description: Describe the steps to reproduce the bug.
+ value: |
+ 1.
+ 2.
+ 3.
+ ...
+ validations:
+ required: true
+ - type: dropdown
+ id: install-method
+ attributes:
+ label: Installation method
+ description: |
+ Select [installation method](https://learn.netdata.cloud/docs/agent/packaging/installer#alternative-methods) you used.
+ Describe the method in the "Additional info" section if you chose "other".
+ options:
+ - "kickstart.sh"
+ - "docker"
+ - "helmchart (kubernetes)"
+ - "manual setup of official DEB/RPM packages"
+ - "from git"
+ - "from source"
+ - "other"
+ validations:
+ required: true
+ - type: textarea
+ id: system-info
+ attributes:
+ label: System info
+ description: |
+ Provide information about your system. To get this information, execute one of the following commands based on your OS:
+ ```shell
+ # Linux
+ uname -a; grep -HvE "^#|URL" /etc/*release
+ # BSD
+ uname -a; uname -K
+ # macOS
+ uname -a; sw_vers
+ ```
+ > NOTE: This will be automatically formatted into code, so no need for backticks.
+ render: shell
+ validations:
+ required: true
+ - type: textarea
+ id: netdata-buildfinfo
+ attributes:
+ label: Netdata build info
+ description: |
+ Provide Netdata Agent version and build info. To get this information, execute:
+ ```shell
+ netdata -W buildinfo
+ # If get "netdata: command not found", try (required running Netdata)
+ $(ps aux | grep -m1 -E -o "[a-zA-Z/]+netdata ") -W buildinfo
+ ```
+ > NOTE: This will be automatically formatted into code, so no need for backticks.
+ render: shell
+ validations:
+ required: true
+ - type: textarea
+ id: additional-info
+ attributes:
+ label: Additional info
+ description: Any additional information related to the issue (ex. logs).
+ validations:
+ required: false
diff --git a/.github/ISSUE_TEMPLATE/FEAT_REQUEST.yml b/.github/ISSUE_TEMPLATE/FEAT_REQUEST.yml
new file mode 100644
index 00000000..11d77a04
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/FEAT_REQUEST.yml
@@ -0,0 +1,62 @@
+name: "Netdata Agent: Feature request"
+description: "Submit a feature request and help us improve our free and open-source Netdata Agent"
+title: "[Feat]: "
+labels: ["feature request", "needs triage"]
+body:
+ - type: markdown
+ attributes:
+ value: "### Thank you for contributing to our project!"
+ - type: markdown
+ attributes:
+ value: |
+ Submit a feature request and help us improve our free and open-source Netdata Agent.
+ - type: textarea
+ id: problem
+ attributes:
+ label: Problem
+ description: |
+ Is your feature request intended to solve a problem? If so, provide a description of the problem.
+ validations:
+ required: false
+ - type: textarea
+ id: description
+ attributes:
+ label: Description
+ description: |
+ Provide a clear and concise description of the feature you want or need.
+ validations:
+ required: true
+ - type: dropdown
+ id: importance
+ attributes:
+ label: Importance
+ description: |
+ Help us to understand the importance of your feature request. Choose "blocker" if lack of this feature stops you from using Netdata Agent.
+ options:
+ - "nice to have"
+ - "really want"
+ - "must have"
+ - "blocker"
+ validations:
+ required: true
+ - type: textarea
+ id: value-proposition
+ attributes:
+ label: Value proposition
+ description: |
+ Help us to understand why we need this feaure. Describe benefits that users receive if we implement this feature request.
+ value: |
+ 1.
+ 2.
+ 3.
+ ...
+ validations:
+ required: true
+ - type: textarea
+ id: proposed-implementation
+ attributes:
+ label: Proposed implementation
+ description: |
+ Share your proposal if you have any ideas on how this feature can be implemented.
+ validations:
+ required: false
diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
new file mode 100644
index 00000000..79678d7b
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -0,0 +1,12 @@
+# Ref: https://docs.github.com/en/communities/using-templates-to-encourage-useful-issues-and-pull-requests/configuring-issue-templates-for-your-repository#configuring-the-template-chooser
+blank_issues_enabled: false
+contact_links:
+ - name: "Netdata Agent: Question"
+ url: https://github.com/netdata/netdata/discussions/new?category=q-a
+ about: Ask a question about Netdata Agent
+ - name: "Netdata Cloud"
+ url: https://github.com/netdata/netdata-cloud/issues/new/choose
+ about: Create a report to help us improve our web application
+ - name: Community
+ url: https://netdata.cloud/community
+ about: If you don't know where to start, visit our community page!
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 00000000..829d8e64
--- /dev/null
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,27 @@
+##### Summary
+<!--
+Describe the change in summary section, including rationale and design decisions.
+Include "Fixes #nnn" if you are fixing an existing issue.
+-->
+
+##### Test Plan
+
+<!--
+Provide enough detail so that your reviewer can understand which test cases you
+have covered, and recreate them if necessary. If our CI covers sufficient tests, then state which tests cover the change.
+-->
+
+##### Additional Information
+<!-- This is usually used to help others understand your
+motivation behind this change. A step-by-step reproduction of the problem is
+helpful if there is no related issue. -->
+
+<details> <summary>For users: How does this change affect me?</summary>
+ <!--
+Describe the PR affects users:
+- Which area of Netdata is affected by the change?
+- Can they see the change or is it an under the hood? If they can see it, where?
+- How is the user impacted by the change?
+- What are there any benefits of the change?
+-->
+</details>
diff --git a/.github/codeql/c-cpp-config.yml b/.github/codeql/c-cpp-config.yml
new file mode 100644
index 00000000..cd7c2401
--- /dev/null
+++ b/.github/codeql/c-cpp-config.yml
@@ -0,0 +1,2 @@
+paths-ignore:
+ - httpd/h2o
diff --git a/.github/codeql/python-config.yml b/.github/codeql/python-config.yml
new file mode 100644
index 00000000..c82727ce
--- /dev/null
+++ b/.github/codeql/python-config.yml
@@ -0,0 +1,10 @@
+paths-ignore:
+ - .github
+ - build_external/
+ - ml/dlib
+ - ml/json
+ - tests/api
+ - web/gui
+ - collectors/python.d.plugin/python_modules/pyyaml*
+ - collectors/python.d.plugin/python_modules/third_party
+ - collectors/python.d.plugin/python_modules/urllib3
diff --git a/.github/data/distros.yml b/.github/data/distros.yml
new file mode 100644
index 00000000..9175a5c7
--- /dev/null
+++ b/.github/data/distros.yml
@@ -0,0 +1,353 @@
+# This defines the full set of distros we run CI on.
+---
+platform_map: # map packaging architectures to docker platforms
+ aarch64: linux/arm64/v8
+ amd64: linux/amd64
+ arm64: linux/arm64/v8
+ armhf: linux/arm/v7
+ armhfp: linux/arm/v7
+ i386: linux/i386
+ x86_64: linux/amd64
+arch_order: # sort order for per-architecture jobs in CI
+ - amd64
+ - x86_64
+ - i386
+ - armhf
+ - armhfp
+ - arm64
+ - aarch64
+include:
+ - &alpine
+ distro: alpine
+ version: edge
+ support_type: Community
+ notes: ''
+ eol_check: false
+ env_prep: |
+ apk add -U bash
+ jsonc_removal: |
+ apk del json-c-dev
+ test:
+ ebpf-core: true
+ - <<: *alpine
+ version: "3.18"
+ support_type: Core
+ notes: ''
+ eol_check: true
+ - <<: *alpine
+ version: "3.17"
+ support_type: Intermediate
+ notes: ''
+ eol_check: true
+ - <<: *alpine
+ version: "3.16"
+ support_type: Intermediate
+ notes: ''
+ eol_check: true
+
+ - distro: archlinux
+ version: latest
+ support_type: Intermediate
+ notes: ''
+ eol_check: false
+ env_prep: |
+ pacman --noconfirm -Syu && pacman --noconfirm -Sy grep libffi
+ test:
+ ebpf-core: true
+
+ - &amzn
+ distro: amazonlinux
+ version: "2"
+ support_type: Core
+ notes: ''
+ eol_check: 'amazon-linux'
+ packages: &amzn_packages
+ type: rpm
+ repo_distro: amazonlinux/2
+ arches:
+ - x86_64
+ - aarch64
+ test:
+ ebpf-core: false
+ - <<: *amzn
+ version: "2023"
+ packages:
+ <<: *amzn_packages
+ repo_distro: amazonlinux/2023
+
+ - distro: centos
+ version: "7"
+ support_type: Core
+ notes: ''
+ eol_check: false
+ packages:
+ type: rpm
+ repo_distro: el/7
+ alt_links:
+ - el/7Server
+ - el/7Client
+ arches:
+ - x86_64
+ test:
+ ebpf-core: false
+
+ - &centos_stream
+ distro: centos-stream
+ base_image: 'quay.io/centos/centos:stream9'
+ version: '9'
+ support_type: 'Community'
+ notes: ''
+ jsonc_removal: |
+ dnf remove -y json-c-devel
+ eol_check: true
+ packages: &cs_packages
+ type: rpm
+ repo_distro: el/c9s
+ arches:
+ - x86_64
+ - aarch64
+ test:
+ ebpf-core: true
+ - <<: *centos_stream
+ version: '8'
+ base_image: 'quay.io/centos/centos:stream8'
+ packages:
+ <<: *cs_packages
+ repo_distro: el/c8s
+
+ - &debian
+ distro: debian
+ version: "12"
+ support_type: Core
+ notes: ''
+ base_image: debian:bookworm
+ eol_check: true
+ env_prep: |
+ apt-get update
+ jsonc_removal: |
+ apt-get purge -y libjson-c-dev
+ packages: &debian_packages
+ type: deb
+ repo_distro: debian/bookworm
+ arches:
+ - i386
+ - amd64
+ - armhf
+ - arm64
+ test:
+ ebpf-core: true
+ - <<: *debian
+ version: "11"
+ base_image: debian:bullseye
+ packages:
+ <<: *debian_packages
+ repo_distro: debian/bullseye
+ test:
+ ebpf-core: false
+ - <<: *debian
+ version: "10"
+ base_image: debian:buster
+ packages:
+ <<: *debian_packages
+ repo_distro: debian/buster
+ test:
+ ebpf-core: false
+
+ - &fedora
+ distro: fedora
+ version: "39"
+ support_type: Core
+ notes: ''
+ eol_check: true
+ jsonc_removal: |
+ dnf remove -y json-c-devel
+ packages: &fedora_packages
+ type: rpm
+ repo_distro: fedora/39
+ arches:
+ - x86_64
+ - aarch64
+ test:
+ ebpf-core: true
+ - <<: *fedora
+ version: "38"
+ packages:
+ <<: *fedora_packages
+ repo_distro: fedora/38
+ test:
+ ebpf-core: true
+ - <<: *fedora
+ version: "37"
+ packages:
+ <<: *fedora_packages
+ repo_distro: fedora/37
+ test:
+ ebpf-core: true
+
+ - &opensuse
+ distro: opensuse
+ version: "15.5"
+ support_type: Core
+ notes: ''
+ eol_check: true
+ base_image: opensuse/leap:15.5
+ jsonc_removal: |
+ zypper rm -y libjson-c-devel
+ packages: &opensuse_packages
+ type: rpm
+ repo_distro: opensuse/15.5
+ arches:
+ - x86_64
+ - aarch64
+ test:
+ ebpf-core: true
+ - <<: *opensuse
+ version: "15.4"
+ support_type: Core
+ notes: ''
+ base_image: opensuse/leap:15.4
+ packages:
+ <<: *opensuse_packages
+ repo_distro: opensuse/15.4
+
+ - &oracle
+ distro: oraclelinux
+ version: "8"
+ support_type: Core
+ notes: ''
+ eol_check: true
+ jsonc_removal: |
+ dnf remove -y json-c-devel
+ packages: &oracle_packages
+ type: rpm
+ repo_distro: ol/8
+ arches:
+ - x86_64
+ - aarch64
+ test:
+ ebpf-core: true
+ - <<: *oracle
+ version: "9"
+ packages:
+ <<: *oracle_packages
+ repo_distro: ol/9
+
+ - &rocky
+ distro: rockylinux
+ version: "9"
+ support_type: Core
+ notes: ''
+ jsonc_removal: |
+ dnf remove -y json-c-devel
+ eol_check: true
+ packages: &rocky_packages
+ type: rpm
+ repo_distro: el/9
+ alt_links:
+ - el/9Server
+ - el/9Client
+ - el/9RedHatVirtualizationHost
+ arches:
+ - x86_64
+ - aarch64
+ test:
+ ebpf-core: true
+ - <<: *rocky
+ version: "8"
+ packages:
+ <<: *rocky_packages
+ repo_distro: el/8
+ alt_links:
+ - el/8Server
+ - el/8Client
+ - el/8RedHatVirtualizationHost
+
+ - &ubuntu
+ distro: ubuntu
+ version: "22.04"
+ support_type: Core
+ notes: ''
+ eol_check: true
+ env_prep: |
+ rm -f /etc/apt/apt.conf.d/docker && apt-get update
+ jsonc_removal: |
+ apt-get remove -y libjson-c-dev
+ packages: &ubuntu_packages
+ type: deb
+ repo_distro: ubuntu/jammy
+ arches:
+ - amd64
+ - armhf
+ - arm64
+ test:
+ ebpf-core: true
+ - <<: *ubuntu
+ version: "23.10"
+ packages:
+ <<: *ubuntu_packages
+ repo_distro: ubuntu/mantic
+ - <<: *ubuntu
+ version: "23.04"
+ packages:
+ <<: *ubuntu_packages
+ repo_distro: ubuntu/lunar
+ - <<: *ubuntu
+ version: "20.04"
+ packages:
+ <<: *ubuntu_packages
+ repo_distro: ubuntu/focal
+no_include: # Info for platforms not covered in CI
+ - distro: docker
+ version: "19.03 or newer"
+ support_type: Core
+ notes: ''
+ packages:
+ arches:
+ - linux/i386
+ - linux/amd64
+ - linux/arm/v7
+ - linux/arm64
+ - linux/ppc64le
+
+ - distro: clearlinux
+ version: latest
+ support_type: Community
+ notes: ''
+
+ - &rhel
+ distro: rhel
+ version: "9.x"
+ support_type: Core
+ notes: ''
+ packages:
+ arches:
+ - x86_64
+ - aarch64
+ - <<: *rhel
+ version: "8.x"
+ - <<: *rhel
+ version: "7.x"
+ packages:
+ arches:
+ - x86_64
+
+ - &freebsd
+ distro: freebsd
+ version: 13-STABLE
+ support_type: Community
+ notes: ''
+
+ - &macos
+ distro: macos
+ version: '13'
+ support_type: Community
+ notes: ''
+ - <<: *macos
+ version: '12'
+ - <<: *macos
+ version: '11'
+
+ - distro: gentoo
+ version: latest
+ support_type: Community
+ notes: ''
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 00000000..b02b155d
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,9 @@
+version: 2
+updates:
+ - package-ecosystem: github-actions
+ directory: /
+ schedule:
+ interval: weekly
+ labels:
+ - "no changelog"
+ - "area/ci"
diff --git a/.github/dockerfiles/Dockerfile.build_test b/.github/dockerfiles/Dockerfile.build_test
new file mode 100644
index 00000000..c275d61d
--- /dev/null
+++ b/.github/dockerfiles/Dockerfile.build_test
@@ -0,0 +1,18 @@
+ARG BASE
+
+FROM ${BASE}
+
+ARG PRE
+ENV PRE=${PRE}
+ARG RMJSONC
+ENV RMJSONC=${RMJSONC}
+ENV DISABLE_TELEMETRY=1
+ENV GITHUB_ACTIONS=true
+
+RUN echo "${PRE}" > /prep-cmd.sh && \
+ echo "${RMJSONC}" > /rmjsonc.sh && chmod +x /rmjsonc.sh && \
+ /bin/sh /prep-cmd.sh
+
+COPY . /netdata
+
+RUN /netdata/packaging/installer/install-required-packages.sh --dont-wait --non-interactive netdata
diff --git a/.github/dockerfiles/Dockerfile.clang b/.github/dockerfiles/Dockerfile.clang
new file mode 100644
index 00000000..62bb0194
--- /dev/null
+++ b/.github/dockerfiles/Dockerfile.clang
@@ -0,0 +1,18 @@
+FROM debian:buster AS build
+
+# Disable apt/dpkg interactive mode
+ENV DEBIAN_FRONTEND=noninteractive
+
+# Install all build dependencies
+COPY packaging/installer/install-required-packages.sh /tmp/install-required-packages.sh
+RUN /tmp/install-required-packages.sh --dont-wait --non-interactive netdata-all
+
+# Install Clang and set as default CC
+RUN apt-get install -y clang && \
+ update-alternatives --install /usr/bin/cc cc /usr/bin/clang 100
+
+WORKDIR /netdata
+COPY . .
+
+# Build Netdata
+RUN ./netdata-installer.sh --dont-wait --dont-start-it --disable-go --require-cloud
diff --git a/.github/labeler.yml b/.github/labeler.yml
new file mode 100644
index 00000000..0ea825ef
--- /dev/null
+++ b/.github/labeler.yml
@@ -0,0 +1,158 @@
+# This configures label matching for PR's.
+#
+# The keys are labels, and the values are lists of minimatch patterns
+# to which those labels apply.
+#
+# NOTE: This can only add labels, not remove them.
+# NOTE: Due to YAML syntax limitations, patterns or labels which start
+# with a character that is part of the standard YAML syntax must be
+# quoted.
+#
+# Please keep the labels sorted and deduplicated.
+
+area/ACLK:
+ - aclk/**
+ - database/sqlite/sqlite_aclk*
+ - mqtt_websockets
+
+area/claim:
+ - claim/*
+
+area/exporting:
+ - exporting/**
+
+area/build:
+ - build/**
+ - build_external/**
+ - CMakeLists.txt
+ - configure.ac
+ - Makefile.am
+ - "**/Makefile.am"
+
+area/ci:
+ - .github/**
+
+area/daemon:
+ - daemon/**
+
+area/database:
+ - database/**
+
+area/docs:
+ - "*.md"
+ - "**/*.md"
+ - "**/*.mdx"
+ - diagrams/**
+
+# -----------------collectors----------------------
+
+area/collectors:
+ - collectors/**
+
+collectors/plugins.d:
+ - collectors/plugins.d/**
+
+collectors/apps:
+ - collectors/apps.plugin/**
+
+collectors/cgroups:
+ - collectors/cgroups.plugin/**
+
+collectors/charts.d:
+ - collectors/charts.d.plugin/**
+
+collectors/cups:
+ - collectors/cups.plugin/**
+
+collectors/debugfs:
+ - collectors/debugfs.plugin/**
+
+collectors/diskspace:
+ - collectors/diskspace.plugin/**
+
+collectors/ebpf:
+ - collectors/ebpf.plugin/**
+
+collectors/freebsd:
+ - collectors/freebsd.plugin/**
+
+collectors/freeipmi:
+ - collectors/freeipmi.plugin/**
+
+collectors/idlejitter:
+ - collectors/idlejitter.plugin/**
+
+collectors/ioping:
+ - collectors/ioping.plugin/**
+
+collectors/macos:
+ - collectors/macos.plugin/**
+
+collectors/nfacct:
+ - collectors/nfacct.plugin/**
+
+collectors/perf:
+ - collectors/perf.plugin/**
+
+collectors/proc:
+ - collectors/proc.plugin/**
+
+collectors/python.d:
+ - collectors/python.d.plugin/**
+
+collectors/slabinfo:
+ - collectors/slabinfo.plugin/**
+
+collectors/statsd:
+ - collectors/statsd.plugin/**
+
+collectors/systemd-journal:
+ - collectors/systemd-journal.plugin/**
+
+collectors/tc:
+ - collectors/tc.plugin/**
+
+collectors/timex:
+ - collectors/timex.plugin/**
+
+collectors/xenstat:
+ - collectors/xenstat.plugin/**
+
+# ----------------/collectors----------------------
+
+area/health:
+ - health/**
+
+area/metadata:
+ - "**/*metadata.yaml"
+ - integrations/**
+
+area/ml:
+ - ml/**
+
+area/packaging:
+ - contrib/**
+ - packaging/**
+ - system/**
+ - Dockerfile*
+ - netdata-installer.sh
+ - netdata.spec.in
+
+area/registry:
+ - registry/**
+
+area/streaming:
+ - streaming/**
+
+area/tests:
+ - tests/**
+ - daemon/unit_test*
+ - coverity-scan.sh
+ - cppcheck.sh
+ - netdata.cppcheck
+
+area/web:
+ - web/**
+
+area/logs-management:
+ - logsmanagement/**
diff --git a/.github/scripts/build-artifacts.sh b/.github/scripts/build-artifacts.sh
new file mode 100755
index 00000000..569c79a5
--- /dev/null
+++ b/.github/scripts/build-artifacts.sh
@@ -0,0 +1,82 @@
+#!/bin/sh
+#
+# Builds the netdata-vX.y.Z-xxxx.tar.gz source tarball (dist)
+# and netdata-vX.Y.Z-xxxx.gz.run (static x86_64) artifacts.
+
+set -e
+
+# shellcheck source=.github/scripts/functions.sh
+. "$(dirname "$0")/functions.sh"
+
+NAME="${NAME:-netdata}"
+VERSION="${VERSION:-"$(git describe)"}"
+BASENAME="$NAME-$VERSION"
+
+prepare_build() {
+ progress "Preparing build"
+ (
+ test -d artifacts || mkdir -p artifacts
+ echo "${VERSION}" > packaging/version
+ ) >&2
+}
+
+build_dist() {
+ progress "Building dist"
+ (
+ command -v git > /dev/null && [ -d .git ] && git clean -d -f
+ autoreconf -ivf
+ ./configure \
+ --prefix=/usr \
+ --sysconfdir=/etc \
+ --localstatedir=/var \
+ --libexecdir=/usr/libexec \
+ --with-zlib \
+ --with-math \
+ --with-user=netdata \
+ --disable-dependency-tracking \
+ CFLAGS=-O2
+ make dist
+ mv "${BASENAME}.tar.gz" artifacts/
+ ) >&2
+}
+
+build_static_x86_64() {
+ progress "Building static x86_64"
+ (
+ command -v git > /dev/null && [ -d .git ] && git clean -d -f
+ USER="" ./packaging/makeself/build-x86_64-static.sh
+ ) >&2
+}
+
+prepare_assets() {
+ progress "Preparing assets"
+ (
+ cp packaging/version artifacts/latest-version.txt
+
+ cd artifacts || exit 1
+ ln -f "${BASENAME}.tar.gz" netdata-latest.tar.gz
+ ln -f "${BASENAME}.gz.run" netdata-latest.gz.run
+ sha256sum -b ./* > "sha256sums.txt"
+ ) >&2
+}
+
+steps="prepare_build build_dist build_static_x86_64"
+steps="$steps prepare_assets"
+
+_main() {
+ for step in $steps; do
+ if ! run "$step"; then
+ if [ -t 1 ]; then
+ debug
+ else
+ fail "Build failed"
+ fi
+ fi
+ done
+
+ echo "🎉 All Done!"
+}
+
+if [ -n "$0" ] && [ x"$0" != x"-bash" ]; then
+ _main "$@"
+fi
diff --git a/.github/scripts/build-dist.sh b/.github/scripts/build-dist.sh
new file mode 100755
index 00000000..027b6214
--- /dev/null
+++ b/.github/scripts/build-dist.sh
@@ -0,0 +1,71 @@
+#!/bin/sh
+#
+# Builds the netdata-vX.y.Z-xxxx.tar.gz source tarball (dist)
+
+set -e
+
+# shellcheck source=.github/scripts/functions.sh
+. "$(dirname "$0")/functions.sh"
+
+NAME="${NAME:-netdata}"
+VERSION="${VERSION:-"$(git describe --always)"}"
+BASENAME="$NAME-$VERSION"
+
+prepare_build() {
+ progress "Preparing build"
+ (
+ test -d artifacts || mkdir -p artifacts
+ echo "${VERSION}" > packaging/version
+ ) >&2
+}
+
+build_dist() {
+ progress "Building dist"
+ (
+ command -v git > /dev/null && [ -d .git ] && git clean -d -f
+ autoreconf -ivf
+ ./configure \
+ --prefix=/usr \
+ --sysconfdir=/etc \
+ --localstatedir=/var \
+ --libexecdir=/usr/libexec \
+ --with-zlib \
+ --with-math \
+ --with-user=netdata \
+ --disable-dependency-tracking \
+ CFLAGS=-O2
+ make dist
+ mv "${BASENAME}.tar.gz" artifacts/
+ ) >&2
+}
+
+prepare_assets() {
+ progress "Preparing assets"
+ (
+ cp packaging/version artifacts/latest-version.txt
+ cd artifacts || exit 1
+ ln -f "${BASENAME}.tar.gz" netdata-latest.tar.gz
+ ln -f "${BASENAME}.gz.run" netdata-latest.gz.run
+ sha256sum -b ./* > "sha256sums.txt"
+ ) >&2
+}
+
+steps="prepare_build build_dist prepare_assets"
+
+_main() {
+ for step in $steps; do
+ if ! run "$step"; then
+ if [ -t 1 ]; then
+ debug
+ else
+ fail "Build failed"
+ fi
+ fi
+ done
+
+ echo "🎉 All Done!"
+}
+
+if [ -n "$0" ] && [ x"$0" != x"-bash" ]; then
+ _main "$@"
+fi
diff --git a/.github/scripts/build-static.sh b/.github/scripts/build-static.sh
new file mode 100755
index 00000000..e8105143
--- /dev/null
+++ b/.github/scripts/build-static.sh
@@ -0,0 +1,61 @@
+#!/bin/sh
+#
+# Builds the netdata-vX.Y.Z-xxxx.gz.run (static x86_64) artifact.
+
+set -e
+
+# shellcheck source=.github/scripts/functions.sh
+. "$(dirname "$0")/functions.sh"
+
+BUILDARCH="${1}"
+NAME="${NAME:-netdata}"
+VERSION="${VERSION:-"$(git describe)"}"
+BASENAME="$NAME-$BUILDARCH-$VERSION"
+
+prepare_build() {
+ progress "Preparing build"
+ (
+ test -d artifacts || mkdir -p artifacts
+ ) >&2
+}
+
+build_static() {
+ progress "Building static ${BUILDARCH}"
+ (
+ USER="" ./packaging/makeself/build-static.sh "${BUILDARCH}"
+ ) >&2
+}
+
+prepare_assets() {
+ progress "Preparing assets"
+ (
+ cp packaging/version artifacts/latest-version.txt
+
+ cd artifacts || exit 1
+ ln -s "${BASENAME}.gz.run" "netdata-${BUILDARCH}-latest.gz.run"
+ if [ "${BUILDARCH}" = "x86_64" ]; then
+ ln -s "${BASENAME}.gz.run" netdata-latest.gz.run
+ fi
+ ) >&2
+}
+
+steps="prepare_build build_static"
+steps="$steps prepare_assets"
+
+_main() {
+ for step in $steps; do
+ if ! run "$step"; then
+ if [ -t 1 ]; then
+ debug
+ else
+ fail "Build failed"
+ fi
+ fi
+ done
+
+ echo "🎉 All Done!"
+}
+
+if [ -n "$0" ] && [ x"$0" != x"-bash" ]; then
+ _main "$@"
+fi
diff --git a/.github/scripts/bump-packaging-version.sh b/.github/scripts/bump-packaging-version.sh
new file mode 100755
index 00000000..bffcb0c1
--- /dev/null
+++ b/.github/scripts/bump-packaging-version.sh
@@ -0,0 +1,6 @@
+#!/bin/sh
+
+VERSION="$(git describe)"
+echo "$VERSION" > packaging/version
+git add -A
+git ci -m "[netdata nightly] $VERSION"
diff --git a/.github/scripts/check-updater.sh b/.github/scripts/check-updater.sh
new file mode 100755
index 00000000..3df0c9de
--- /dev/null
+++ b/.github/scripts/check-updater.sh
@@ -0,0 +1,49 @@
+#!/bin/sh
+#
+set -e
+# shellcheck source=.github/scripts/functions.sh
+. "$(dirname "$0")/functions.sh"
+
+check_successful_update() {
+ progress "Check netdata version after update"
+ (
+ netdata_version=$(netdata -v | awk '{print $2}')
+ updater_version=$(cat packaging/version)
+ if [ "$netdata_version" = "$updater_version" ]; then
+ echo "Update successful!"
+ else
+ exit 1
+ fi
+ ) >&2
+}
+
+check_autoupdate_enabled() {
+ progress "Check autoupdate still enabled after update"
+ (
+ if [ -f /etc/periodic/daily/netdata-updater ] || [ -f /etc/cron.daily/netdata-updater ]; then
+ echo "Update successful!"
+ else
+ exit 1
+ fi
+ ) >&2
+}
+
+steps="check_successful_update check_autoupdate_enabled"
+
+_main() {
+ for step in $steps; do
+ if ! run "$step"; then
+ if [ -t 1 ]; then
+ debug
+ else
+ fail "Build failed"
+ fi
+ fi
+ done
+
+ echo "🎉 All Done!"
+}
+
+if [ -n "$0" ] && [ x"$0" != x"-bash" ]; then
+ _main "$@"
+fi
diff --git a/.github/scripts/check_latest_versions.py b/.github/scripts/check_latest_versions.py
new file mode 100755
index 00000000..67b11f8d
--- /dev/null
+++ b/.github/scripts/check_latest_versions.py
@@ -0,0 +1,33 @@
+import sys
+import os
+import modules.version_manipulation as ndvm
+import modules.github_actions as cigh
+
+
+def main(command_line_args):
+ """
+ Inputs: Single version or multiple versions
+ Outputs:
+ Create files with the versions that needed update under temp_dir/staging-new-releases
+ Setting the GitHub outputs, 'versions_needs_update' to 'true'
+ """
+ versions = [str(arg) for arg in command_line_args]
+ # Create a temp output folder for the release that need update
+ staging = os.path.join(os.environ.get('TMPDIR', '/tmp'), 'staging-new-releases')
+ os.makedirs(staging, exist_ok=True)
+ for version in versions:
+ temp_value = ndvm.compare_version_with_remote(version)
+ if temp_value:
+ path, filename = ndvm.get_release_path_and_filename(version)
+ release_path = os.path.join(staging, path)
+ os.makedirs(release_path, exist_ok=True)
+ file_release_path = os.path.join(release_path, filename)
+ with open(file_release_path, "w") as file:
+ print("Creating local copy of the release version update at: ", file_release_path)
+ file.write(version)
+ if cigh.run_as_github_action():
+ cigh.update_github_output("versions_needs_update", "true")
+
+
+if __name__ == "__main__":
+ main(sys.argv[1:])
diff --git a/.github/scripts/check_latest_versions_per_channel.py b/.github/scripts/check_latest_versions_per_channel.py
new file mode 100644
index 00000000..885e5a98
--- /dev/null
+++ b/.github/scripts/check_latest_versions_per_channel.py
@@ -0,0 +1,9 @@
+import check_latest_versions
+import modules.version_manipulation as ndvm
+import sys
+
+if __name__ == "__main__":
+ channel = sys.argv[1]
+ sorted_agents_by_major = ndvm.sort_and_grouby_major_agents_of_channel(channel)
+ latest_per_major = [values[0] for values in sorted_agents_by_major.values()]
+ check_latest_versions.main(latest_per_major)
diff --git a/.github/scripts/ci-support-pkgs.sh b/.github/scripts/ci-support-pkgs.sh
new file mode 100755
index 00000000..5cedbf3b
--- /dev/null
+++ b/.github/scripts/ci-support-pkgs.sh
@@ -0,0 +1,18 @@
+#!/bin/sh
+
+# This script installs supporting packages needed for CI, which provide following:
+# cron, pidof
+
+set -e
+
+. /etc/os-release
+
+case "${ID}" in
+ amzn|almalinux|centos|fedora)
+ dnf install -y procps-ng cronie cronie-anacron || \
+ yum install -y procps-ng cronie cronie-anacron
+ ;;
+ arch)
+ pacman -S --noconfirm cronie
+ ;;
+esac
diff --git a/.github/scripts/docker-test.sh b/.github/scripts/docker-test.sh
new file mode 100755
index 00000000..0f5fa469
--- /dev/null
+++ b/.github/scripts/docker-test.sh
@@ -0,0 +1,64 @@
+#!/bin/sh
+
+export DEBIAN_FRONTEND=noninteractive
+
+wait_for() {
+ host="${1}"
+ port="${2}"
+ name="${3}"
+ timeout="30"
+
+ if command -v nc > /dev/null ; then
+ netcat="nc"
+ elif command -v netcat > /dev/null ; then
+ netcat="netcat"
+ else
+ printf "Unable to find a usable netcat command.\n"
+ return 1
+ fi
+
+ printf "Waiting for %s on %s:%s ... " "${name}" "${host}" "${port}"
+
+ sleep 30
+
+ i=0
+ while ! ${netcat} -z "${host}" "${port}"; do
+ sleep 1
+ if [ "$i" -gt "$timeout" ]; then
+ printf "Timed out!\n"
+ docker ps -a
+ echo "::group::Netdata container logs"
+ docker logs netdata 2>&1
+ echo "::endgroup::"
+ return 1
+ fi
+ i="$((i + 1))"
+ done
+ printf "OK\n"
+}
+
+if [ -z "$(command -v nc 2>/dev/null)" ] && [ -z "$(command -v netcat 2>/dev/null)" ]; then
+ sudo apt-get update && sudo apt-get upgrade -y && sudo apt-get install -y netcat
+fi
+
+docker run -d --name=netdata \
+ -p 19999:19999 \
+ -v netdataconfig:/etc/netdata \
+ -v netdatalib:/var/lib/netdata \
+ -v netdatacache:/var/cache/netdata \
+ -v /etc/passwd:/host/etc/passwd:ro \
+ -v /etc/group:/host/etc/group:ro \
+ -v /proc:/host/proc:ro \
+ -v /sys:/host/sys:ro \
+ -v /etc/os-release:/host/etc/os-release:ro \
+ --cap-add SYS_PTRACE \
+ --security-opt apparmor=unconfined \
+ netdata/netdata:test
+
+wait_for localhost 19999 netdata || exit 1
+
+curl -sS http://127.0.0.1:19999/api/v1/info > ./response || exit 1
+
+cat ./response
+
+jq '.version' ./response || exit 1
diff --git a/.github/scripts/functions.sh b/.github/scripts/functions.sh
new file mode 100644
index 00000000..7cd2e080
--- /dev/null
+++ b/.github/scripts/functions.sh
@@ -0,0 +1,69 @@
+#!/bin/sh
+
+# This file is included by download.sh & build.sh
+
+set -e
+
+color() {
+ fg="$1"
+ bg="${2}"
+ ft="${3:-0}"
+
+ printf "\33[%s;%s;%s" "$ft" "$fg" "$bg"
+}
+
+color_reset() {
+ printf "\033[0m"
+}
+
+ok() {
+ if [ -t 1 ]; then
+ printf "%s[ OK ]%s\n" "$(color 37 42m 1)" "$(color_reset)"
+ else
+ printf "%s\n" "[ OK ]"
+ fi
+}
+
+err() {
+ if [ -t 1 ]; then
+ printf "%s[ ERR ]%s\n" "$(color 37 41m 1)" "$(color_reset)"
+ else
+ printf "%s\n" "[ ERR ]"
+ fi
+}
+
+run() {
+ retval=0
+ logfile="$(mktemp -t "run-XXXXXX")"
+ if "$@" 2> "$logfile"; then
+ ok
+ else
+ retval=$?
+ err
+ tail -n 100 "$logfile" || true
+ fi
+ rm -rf "$logfile"
+ return $retval
+}
+
+progress() {
+ printf "%-40s" "$(printf "%s ... " "$1")"
+}
+
+log() {
+ printf "%s\n" "$1"
+}
+
+error() {
+ log "ERROR: ${1}"
+}
+
+fail() {
+ log "FATAL: ${1}"
+ exit 1
+}
+
+debug() {
+ log "Dropping into a shell for debugging ..."
+ exec /bin/sh
+}
diff --git a/.github/scripts/gen-docker-tags.py b/.github/scripts/gen-docker-tags.py
new file mode 100755
index 00000000..8c88d3b5
--- /dev/null
+++ b/.github/scripts/gen-docker-tags.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python3
+
+import sys
+
+version = sys.argv[1].split('.')
+suffix = sys.argv[2]
+
+REPO = f'netdata/netdata{suffix}'
+GHCR = f'ghcr.io/{REPO}'
+QUAY = f'quay.io/{REPO}'
+
+tags = []
+
+for repo in [REPO, GHCR, QUAY]:
+ tags.append(':'.join([repo, version[0]]))
+ tags.append(':'.join([repo, '.'.join(version[0:2])]))
+ tags.append(':'.join([repo, '.'.join(version[0:3])]))
+
+print(','.join(tags))
diff --git a/.github/scripts/gen-matrix-build.py b/.github/scripts/gen-matrix-build.py
new file mode 100755
index 00000000..3185e883
--- /dev/null
+++ b/.github/scripts/gen-matrix-build.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python3
+
+import json
+
+from ruamel.yaml import YAML
+
+yaml = YAML(typ='safe')
+entries = []
+
+with open('.github/data/distros.yml') as f:
+ data = yaml.load(f)
+
+for i, v in enumerate(data['include']):
+ e = {
+ 'artifact_key': v['distro'] + str(v['version']).replace('.', ''),
+ 'version': v['version'],
+ }
+
+ if 'base_image' in v:
+ e['distro'] = v['base_image']
+ else:
+ e['distro'] = ':'.join([v['distro'], str(v['version'])])
+
+ if 'env_prep' in v:
+ e['env_prep'] = v['env_prep']
+
+ if 'jsonc_removal' in v:
+ e['jsonc_removal'] = v['jsonc_removal']
+
+ entries.append(e)
+
+entries.sort(key=lambda k: k['distro'])
+matrix = json.dumps({'include': entries}, sort_keys=True)
+print(matrix)
diff --git a/.github/scripts/gen-matrix-eol-check.py b/.github/scripts/gen-matrix-eol-check.py
new file mode 100755
index 00000000..63852728
--- /dev/null
+++ b/.github/scripts/gen-matrix-eol-check.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python3
+'''Generate the build matrix for the EOL check jobs.'''
+
+import json
+
+from ruamel.yaml import YAML
+
+yaml = YAML(typ='safe')
+entries = list()
+
+with open('.github/data/distros.yml') as f:
+ data = yaml.load(f)
+
+for item in data['include']:
+ if 'eol_check' in item and item['eol_check']:
+ if isinstance(item['eol_check'], str):
+ distro = item['eol_check']
+ else:
+ distro = item['distro']
+
+ entries.append({
+ 'distro': distro,
+ 'release': item['version'],
+ 'full_name': f'{ item["distro"] } { item["version"] }'
+ })
+
+entries.sort(key=lambda k: (k['distro'], k['release']))
+matrix = json.dumps({'include': entries}, sort_keys=True)
+print(matrix)
diff --git a/.github/scripts/gen-matrix-packaging.py b/.github/scripts/gen-matrix-packaging.py
new file mode 100755
index 00000000..9347cd76
--- /dev/null
+++ b/.github/scripts/gen-matrix-packaging.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python3
+
+import json
+import sys
+
+from ruamel.yaml import YAML
+
+ALWAYS_RUN_ARCHES = ["amd64", "x86_64"]
+SHORT_RUN = sys.argv[1]
+yaml = YAML(typ='safe')
+entries = list()
+run_limited = False
+
+with open('.github/data/distros.yml') as f:
+ data = yaml.load(f)
+
+if bool(int(SHORT_RUN)):
+ run_limited = True
+
+for i, v in enumerate(data['include']):
+ if 'packages' in data['include'][i]:
+ for arch in data['include'][i]['packages']['arches']:
+ if arch in ALWAYS_RUN_ARCHES or not run_limited:
+ entries.append({
+ 'distro': data['include'][i]['distro'],
+ 'version': data['include'][i]['version'],
+ 'repo_distro': data['include'][i]['packages']['repo_distro'],
+ 'format': data['include'][i]['packages']['type'],
+ 'base_image': data['include'][i]['base_image'] if 'base_image' in data['include'][i] else ':'.join([data['include'][i]['distro'], data['include'][i]['version']]),
+ 'platform': data['platform_map'][arch],
+ 'arch': arch
+ })
+
+entries.sort(key=lambda k: (data['arch_order'].index(k['arch']), k['distro'], k['version']))
+matrix = json.dumps({'include': entries}, sort_keys=True)
+print(matrix)
diff --git a/.github/scripts/gen-matrix-repoconfig.py b/.github/scripts/gen-matrix-repoconfig.py
new file mode 100755
index 00000000..264cd53e
--- /dev/null
+++ b/.github/scripts/gen-matrix-repoconfig.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python3
+
+import json
+
+from ruamel.yaml import YAML
+
+yaml = YAML(typ='safe')
+entries = list()
+
+with open('.github/data/distros.yml') as f:
+ data = yaml.load(f)
+
+for i, v in enumerate(data['include']):
+ if 'packages' in data['include'][i]:
+ entries.append({
+ 'distro': data['include'][i]['distro'],
+ 'version': data['include'][i]['version'],
+ 'pkgclouddistro': data['include'][i]['packages']['repo_distro'],
+ 'format': data['include'][i]['packages']['type'],
+ 'base_image': data['include'][i]['base_image'] if 'base_image' in data['include'][i] else ':'.join([data['include'][i]['distro'], data['include'][i]['version']]),
+ 'platform': data['platform_map']['amd64'],
+ 'arches': ' '.join(['"' + x + '"' for x in data['include'][i]['packages']['arches']])
+ })
+
+entries.sort(key=lambda k: (k['distro'], k['version']))
+matrix = json.dumps({'include': entries}, sort_keys=True)
+print(matrix)
diff --git a/.github/scripts/get-static-cache-key.sh b/.github/scripts/get-static-cache-key.sh
new file mode 100755
index 00000000..5093b332
--- /dev/null
+++ b/.github/scripts/get-static-cache-key.sh
@@ -0,0 +1,16 @@
+#!/bin/sh
+
+arch="${1}"
+platform="$(packaging/makeself/uname2platform.sh "${arch}")"
+builder_rev="v1"
+
+docker pull --platform "${platform}" netdata/static-builder:${builder_rev}
+
+# shellcheck disable=SC2046
+cat $(find packaging/makeself/jobs -type f ! -regex '.*\(netdata\|-makeself\).*') > /tmp/static-cache-key-data
+
+docker run -it --rm --platform "${platform}" netdata/static-builder:${builder_rev} sh -c 'apk list -I 2>/dev/null' >> /tmp/static-cache-key-data
+
+h="$(sha256sum /tmp/static-cache-key-data | cut -f 1 -d ' ')"
+
+echo "key=static-${arch}-${h}" >> "${GITHUB_OUTPUT}"
diff --git a/.github/scripts/modules/github_actions.py b/.github/scripts/modules/github_actions.py
new file mode 100644
index 00000000..1d653a77
--- /dev/null
+++ b/.github/scripts/modules/github_actions.py
@@ -0,0 +1,27 @@
+import os
+
+
+def update_github_env(key, value):
+ try:
+ env_file = os.getenv('GITHUB_ENV')
+ print(env_file)
+ with open(env_file, "a") as file:
+ file.write(f"{key}={value}")
+ print(f"Updated GITHUB_ENV with {key}={value}")
+ except Exception as e:
+ print(f"Error updating GITHUB_ENV. Error: {e}")
+
+
+def update_github_output(key, value):
+ try:
+ env_file = os.getenv('GITHUB_OUTPUT')
+ print(env_file)
+ with open(env_file, "a") as file:
+ file.write(f"{key}={value}")
+ print(f"Updated GITHUB_OUTPUT with {key}={value}")
+ except Exception as e:
+ print(f"Error updating GITHUB_OUTPUT. Error: {e}")
+
+
+def run_as_github_action():
+ return os.environ.get('GITHUB_ACTIONS') == 'true'
diff --git a/.github/scripts/modules/requirements.txt b/.github/scripts/modules/requirements.txt
new file mode 100644
index 00000000..fbec796f
--- /dev/null
+++ b/.github/scripts/modules/requirements.txt
@@ -0,0 +1 @@
+PyGithub==2.1.1
diff --git a/.github/scripts/modules/version_manipulation.py b/.github/scripts/modules/version_manipulation.py
new file mode 100644
index 00000000..cc346fb5
--- /dev/null
+++ b/.github/scripts/modules/version_manipulation.py
@@ -0,0 +1,141 @@
+import os
+import re
+import requests
+from itertools import groupby
+from github import Github
+from github.GithubException import GithubException
+
+repos_URL = {
+ "stable": "netdata/netdata",
+ "nightly": "netdata/netdata-nightlies"
+}
+
+GH_TOKEN = os.getenv("GH_TOKEN")
+if GH_TOKEN is None or GH_TOKEN != "":
+ print("Token is not defined or empty, continuing with limitation on requests per sec towards Github API")
+
+
+def identify_channel(_version):
+ nightly_pattern = r'v(\d+)\.(\d+)\.(\d+)-(\d+)-nightly'
+ stable_pattern = r'v(\d+)\.(\d+)\.(\d+)'
+ if re.match(nightly_pattern, _version):
+ _channel = "nightly"
+ _pattern = nightly_pattern
+ elif re.match(stable_pattern, _version):
+ _channel = "stable"
+ _pattern = stable_pattern
+ else:
+ print("Invalid version format.")
+ return None
+ return _channel, _pattern
+
+
+def padded_version(item):
+ key_value = '10000'
+ for value in item[1:]:
+ key_value += f'{value:05}'
+ return int(key_value)
+
+
+def extract_version(title):
+ if identify_channel(title):
+ _, _pattern = identify_channel(title)
+ try:
+ match = re.match(_pattern, title)
+ if match:
+ return tuple(map(int, match.groups()))
+ except Exception as e:
+ print(f"Unexpected error: {e}")
+ return None
+
+
+def get_release_path_and_filename(_version):
+ nightly_pattern = r'v(\d+)\.(\d+)\.(\d+)-(\d+)-nightly'
+ stable_pattern = r'v(\d+)\.(\d+)\.(\d+)'
+ if match := re.match(nightly_pattern, _version):
+ msb = match.group(1)
+ _path = "nightly"
+ _filename = f"v{msb}"
+ elif match := re.match(stable_pattern, _version):
+ msb = match.group(1)
+ _path = "stable"
+ _filename = f"v{msb}"
+ else:
+ print("Invalid version format.")
+ exit(1)
+ return (_path, _filename)
+
+
+def compare_version_with_remote(version):
+ """
+ If the version = fun (version) you need to update the version in the
+ remote. If the version remote doesn't exist, returns the version
+ :param channel: any version of the agent
+ :return: the greater from version and version remote.
+ """
+
+ prefix = "https://packages.netdata.cloud/releases"
+ path, filename = get_release_path_and_filename(version)
+
+ remote_url = f"{prefix}/{path}/{filename}"
+ response = requests.get(remote_url)
+
+ if response.status_code == 200:
+ version_remote = response.text.rstrip()
+
+ version_components = extract_version(version)
+ remote_version_components = extract_version(version_remote)
+
+ absolute_version = padded_version(version_components)
+ absolute_remote_version = padded_version(remote_version_components)
+
+ if absolute_version > absolute_remote_version:
+ print(f"Version in the remote: {version_remote}, is older than the current: {version}, I need to update")
+ return (version)
+ else:
+ print(f"Version in the remote: {version_remote}, is newer than the current: {version}, no action needed")
+ return (None)
+ else:
+ # Remote version not found
+ print(f"Version in the remote not found, updating the predefined latest path with the version: {version}")
+ return (version)
+
+
+def sort_and_grouby_major_agents_of_channel(channel):
+ """
+ Fetches the GH API and read either netdata/netdata or netdata/netdata-nightlies repo. It fetches all of their
+ releases implements a grouping by their major release number.
+ Every k,v in this dictionary is in the form; "vX": [descending ordered list of Agents in this major release].
+ :param channel: "nightly" or "stable"
+ :return: None or dict() with the Agents grouped by major version # (vX)
+ """
+ try:
+ G = Github(GH_TOKEN)
+ repo = G.get_repo(repos_URL[channel])
+ releases = repo.get_releases()
+ except GithubException as e:
+ print(f"GitHub API request failed: {e}")
+ return None
+
+ except Exception as e:
+ print(f"An unexpected error occurred: {e}")
+ return None
+
+ extracted_titles = [extract_version(item.title) for item in releases if
+ extract_version(item.title) is not None]
+ # Necessary sorting for implement the group by
+ extracted_titles.sort(key=lambda x: x[0])
+ # Group titles by major version
+ grouped_by_major = {major: list(group) for major, group in groupby(extracted_titles, key=lambda x: x[0])}
+ sorted_grouped_by_major = {}
+ for key, values in grouped_by_major.items():
+ sorted_values = sorted(values, key=padded_version, reverse=True)
+ sorted_grouped_by_major[key] = sorted_values
+ # Transform them in the correct form
+ if channel == "stable":
+ result_dict = {f"v{key}": [f"v{a}.{b}.{c}" for a, b, c in values] for key, values in
+ sorted_grouped_by_major.items()}
+ else:
+ result_dict = {f"v{key}": [f"v{a}.{b}.{c}-{d}-nightly" for a, b, c, d in values] for key, values in
+ sorted_grouped_by_major.items()}
+ return result_dict
diff --git a/.github/scripts/netdata-pkgcloud-cleanup.py b/.github/scripts/netdata-pkgcloud-cleanup.py
new file mode 100755
index 00000000..f6311e47
--- /dev/null
+++ b/.github/scripts/netdata-pkgcloud-cleanup.py
@@ -0,0 +1,190 @@
+#!/bin/env python3
+
+import requests
+from requests.auth import HTTPBasicAuth
+from datetime import date, datetime, timedelta
+import os
+import sys
+import argparse
+from pprint import pprint
+from datetime import datetime
+from dateutil import parser
+
+
+class PackageCloud:
+ NUM_PACKAGE_MINOR_TO_KEEP = 5
+ NUM_RETENTION_DAYS = 30
+ # number of pages to process. Use '0' to process all
+ MAX_PAGES = 0
+
+ def __init__(self, repo_type, dry_run=True, auth_token=None):
+ self.headers = {
+ "Accept" : "application/json",
+ "Content-Type" : "application/json",
+ }
+ self.dry_run = dry_run
+ self.repo_type = repo_type
+ if repo_type == "stable":
+ repo = "netdata/netdata"
+ elif repo_type == "devel":
+ repo = "netdata/netdata-devel"
+ elif repo_type == "edge":
+ repo = "netdata/netdata-edge"
+ else:
+ print(f"ERROR: unknown repo type '{repo_type}'!\nAccepted values are: stable,devel,edge")
+ sys.exit(1)
+ self.base_url = f"https://packagecloud.io/api/v1/repos/{repo}"
+ self.auth = HTTPBasicAuth(username=auth_token, password='') if auth_token else None
+
+ def get_all_packages(self):
+ page = 1
+ all_pkg_list = []
+ while True:
+ url = f"{self.base_url}/packages.json?page={page}"
+ if page > self.MAX_PAGES and self.MAX_PAGES != 0:
+ break
+ else:
+ pkg_list = requests.get(url, auth=self.auth, headers=self.headers).json()
+ if len(pkg_list) == 0:
+ break
+ else:
+ print(f"Processing page: {page}")
+ for element in pkg_list:
+ self.is_pkg_older_than_days(element, 30)
+ if element['name'] != 'netdata-repo' and element['name'] != 'netdata-repo-edge':
+ all_pkg_list.append(element)
+ page += 1
+ return all_pkg_list
+
+ def delete_package(self, destroy_url):
+ if self.dry_run:
+ print(f" - DRY_RUN mode. Not deleting package '{destroy_url}'.")
+ else:
+ print(f" - Deleting package: {destroy_url}")
+ url = f"https://packagecloud.io{destroy_url}"
+ response = requests.delete(url, auth=self.auth, headers=self.headers).json()
+ response = None
+ if not response:
+ print(f" Package deleted successfully.")
+ else:
+ print(f" Failed deleting package!")
+
+ def get_destroy_url(self, pkg_url):
+ url = f"https://packagecloud.io{pkg_url}"
+ response = requests.get(url, auth=self.auth, headers=self.headers)
+ response.raise_for_status()
+ return response.json()['destroy_url']
+
+ def get_packages_for_distro(self, distro, all_pkg_list):
+ distro_pkg_list = [ pkg for pkg in all_pkg_list if pkg['distro_version'] == distro ]
+ return distro_pkg_list
+
+ def get_packages_for_arch(self, arch, all_pkg_list):
+ arch_pkg_list = [ pkg for pkg in all_pkg_list if pkg['package_url'].split('/')[11] == arch ]
+ return arch_pkg_list
+
+ def get_arches(self, pkg_list):
+ arches = list(set([pkg['package_url'].split('/')[11] for pkg in pkg_list ]))
+ return arches
+
+ def get_pkg_list(self, pkg_name, pkg_list):
+ filtered_list = [ pkg for pkg in pkg_list if pkg['name'] == pkg_name ]
+ return filtered_list
+
+ def get_minor_versions(self, all_versions):
+ minor_versions = ['.'.join(version.split('.')[:-1]) for version in all_versions ]
+ minor_versions = list(set(minor_versions))
+ minor_versions.sort()
+ return minor_versions
+
+ def is_pkg_older_than_days(self, pkg, num_days):
+ pkg_create_date = datetime.strptime(pkg['created_at'], '%Y-%m-%dT%H:%M:%S.%fZ')
+ time_difference = datetime.now() - pkg_create_date
+ return time_difference.days > num_days
+
+ def cleanup_repo(self):
+ if self.repo_type == 'stable':
+ self.cleanup_stable_repo()
+ else:
+ self.cleanup_edge_repo()
+
+ def cleanup_edge_repo(self):
+ all_pkg_list = self.get_all_packages()
+ pkgs_to_delete = []
+ pkgs_to_keep = []
+ for package in all_pkg_list:
+ if self.is_pkg_older_than_days(package, self.NUM_RETENTION_DAYS):
+ pkgs_to_delete.append(package)
+ else:
+ pkgs_to_keep.append(package)
+ print(f"Keeping the following packages (newer than {self.NUM_RETENTION_DAYS} days):")
+ for pkg in pkgs_to_keep:
+ print(f" > pkg: {pkg['package_html_url']} / created_at: {pkg['created_at']}")
+ print(f"Deleting the following packages (older than {self.NUM_RETENTION_DAYS} days):")
+ for pkg in pkgs_to_delete:
+ print(f" > pkg: {pkg['package_html_url']} / created_at: {pkg['created_at']}")
+ self.delete_package(pkg['destroy_url'])
+
+ def cleanup_stable_repo(self):
+ all_pkg_list = self.get_all_packages()
+ all_distros = list(set([ pkg['distro_version'] for pkg in all_pkg_list ]))
+ all_distros = sorted(all_distros)
+ print(f"<> Distributions list: {all_distros}")
+
+ for distro in all_distros:
+ print(f">> Processing distro: {distro}")
+ pkg_list_distro = self.get_packages_for_distro(distro, all_pkg_list)
+ arches = self.get_arches(pkg_list_distro)
+ print(f" <> Arch list: {arches}")
+ for arch in arches:
+ print(f" >> Processing arch: {distro} -> {arch}")
+ pkg_list_arch = self.get_packages_for_arch(arch, pkg_list_distro)
+ pkg_names = [pkg['name'] for pkg in pkg_list_arch]
+ pkg_names = list(set(pkg_names))
+ print(f" <> Package names: {pkg_names}")
+ for pkg_name in pkg_names:
+ print(f" >> Processing package: {distro} -> {arch} -> {pkg_name}")
+ pkg_list = self.get_pkg_list(pkg_name, pkg_list_arch)
+ pkg_versions = [pkg['version'] for pkg in pkg_list]
+ pkg_minor_versions = self.get_minor_versions(pkg_versions)
+ pkg_minor_to_keep = pkg_minor_versions[-self.NUM_PACKAGE_MINOR_TO_KEEP:]
+ print(f" <> Minor Package Versions to Keep: {pkg_minor_to_keep}")
+ pkg_minor_to_delete = list(set(pkg_minor_versions) - set(pkg_minor_to_keep))
+ print(f" <> Minor Package Versions to Delete: {pkg_minor_to_delete}")
+ urls_to_keep = [pkg['package_url'] for pkg in pkg_list if '.'.join(pkg['version'].split('.')[:-1]) in pkg_minor_to_keep]
+ urls_to_delete = [pkg['package_url'] for pkg in pkg_list if '.'.join(pkg['version'].split('.')[:-1]) in pkg_minor_to_delete]
+ for pkg_url in urls_to_delete:
+ destroy_url = self.get_destroy_url(pkg_url)
+ self.delete_package(destroy_url)
+
+
+def configure():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--repo-type', '-r', required=True,
+ help='Repository type against to perform cleanup')
+ parser.add_argument('--dry-run', '-d', action='store_true',
+ help='Dry-run Mode')
+ args = parser.parse_args()
+ try:
+ token = os.environ['PKGCLOUD_TOKEN']
+ except Exception as e:
+ print(f"FATAL: 'PKGCLOUD_TOKEN' environment variable is not set!", file=sys.stderr)
+ sys.exit(1)
+ repo_type = args.repo_type
+ dry_run = args.dry_run
+ conf = {
+ 'repo_type': args.repo_type,
+ 'dry_run': args.dry_run,
+ 'token': token
+ }
+ return conf
+
+
+def main():
+ config = configure()
+ pkg_cloud = PackageCloud(config['repo_type'], config['dry_run'], config['token'])
+ pkg_cloud.cleanup_repo()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/.github/scripts/package-upload.sh b/.github/scripts/package-upload.sh
new file mode 100755
index 00000000..13d63b4a
--- /dev/null
+++ b/.github/scripts/package-upload.sh
@@ -0,0 +1,43 @@
+#!/bin/sh
+
+set -e
+
+host="packages.netdata.cloud"
+user="netdatabot"
+
+distro="${1}"
+arch="${2}"
+format="${3}"
+repo="${4}"
+
+staging="${TMPDIR:-/tmp}/package-staging"
+prefix="/home/netdatabot/incoming/${repo}/"
+
+packages="$(find artifacts -name "*.${format}")"
+
+mkdir -p "${staging}"
+
+case "${format}" in
+ deb)
+ src="${staging}/${distro}"
+ mkdir -p "${src}"
+
+ for pkg in ${packages}; do
+ cp "${pkg}" "${src}"
+ done
+ ;;
+ rpm)
+ src="${staging}/${distro}/${arch}/"
+ mkdir -p "${src}"
+
+ for pkg in ${packages}; do
+ cp "${pkg}" "${src}"
+ done
+ ;;
+ *)
+ echo "Unrecognized package format ${format}."
+ exit 1
+ ;;
+esac
+
+rsync -vrptO "${staging}/" "${user}@${host}:${prefix}"
diff --git a/.github/scripts/package_cloud_wrapper.sh b/.github/scripts/package_cloud_wrapper.sh
new file mode 100755
index 00000000..7640ef48
--- /dev/null
+++ b/.github/scripts/package_cloud_wrapper.sh
@@ -0,0 +1,48 @@
+#!/usr/bin/env bash
+#
+# This is a tool to help removal of packages from packagecloud.io
+# It utilizes the package_cloud utility provided from packagecloud.io
+#
+# Depends on:
+# 1) package cloud gem (detects absence and installs it)
+#
+# Requires:
+# 1) PKG_CLOUD_TOKEN variable exported
+# 2) To properly install package_cloud when not found, it requires: ruby gcc gcc-c++ ruby-devel
+#
+# Copyright: SPDX-License-Identifier: GPL-3.0-or-later
+#
+# Author : Pavlos Emm. Katsoulakis (paul@netdata.cloud)
+#shellcheck disable=SC2068,SC2145
+set -e
+PKG_CLOUD_CONFIG="$HOME/.package_cloud_configuration.cfg"
+
+# If we are not in netdata git repo, at the top level directory, fail
+TOP_LEVEL=$(basename "$(git rev-parse --show-toplevel)")
+CWD=$(git rev-parse --show-cdup)
+if [ -n "$CWD" ] || [ ! "${TOP_LEVEL}" == "netdata" ]; then
+ echo "Run as .github/scripts/$(basename "$0") from top level directory of netdata git repository"
+ echo "Docker build process aborted"
+ exit 1
+fi
+
+# Install dependency if not there
+if ! command -v package_cloud > /dev/null 2>&1; then
+ echo "No package cloud gem found, installing"
+ sudo gem install -V package_cloud || (echo "Package cloud installation failed. you might want to check if required dependencies are there (ruby gcc gcc-c++ ruby-devel)" && exit 1)
+else
+ echo "Found package_cloud gem, continuing"
+fi
+
+# Check for required token and prepare config
+if [ -z "${PKG_CLOUD_TOKEN}" ]; then
+ echo "Please set PKG_CLOUD_TOKEN to be able to use ${0}"
+ exit 1
+fi
+echo "{\"url\":\"https://packagecloud.io\",\"token\":\"${PKG_CLOUD_TOKEN}\"}" > "${PKG_CLOUD_CONFIG}"
+
+echo "Executing package_cloud with config ${PKG_CLOUD_CONFIG} and parameters $@"
+package_cloud $@ --config="${PKG_CLOUD_CONFIG}"
+
+rm -rf "${PKG_CLOUD_CONFIG}"
+echo "Done!"
diff --git a/.github/scripts/pkg-test.sh b/.github/scripts/pkg-test.sh
new file mode 100755
index 00000000..35767bf2
--- /dev/null
+++ b/.github/scripts/pkg-test.sh
@@ -0,0 +1,162 @@
+#!/bin/sh
+
+install_debian_like() {
+ # This is needed to ensure package installs don't prompt for any user input.
+ export DEBIAN_FRONTEND=noninteractive
+
+ if apt-cache show netcat 2>&1 | grep -q "No packages found"; then
+ netcat="netcat-traditional"
+ else
+ netcat="netcat"
+ fi
+
+ apt-get update
+
+ # Install Netdata
+ # Strange quoting is required here so that glob matching works.
+ # shellcheck disable=SC2046
+ apt-get install -y $(find /netdata/artifacts -type f -name 'netdata*.deb' \
+! -name '*dbgsym*' ! -name '*cups*' ! -name '*freeipmi*') || exit 3
+
+ # Install testing tools
+ apt-get install -y --no-install-recommends curl "${netcat}" jq || exit 1
+}
+
+install_fedora_like() {
+ # Using a glob pattern here because I can't reliably determine what the
+ # resulting package name will be (TODO: There must be a better way!)
+
+ PKGMGR="$( (command -v dnf > /dev/null && echo "dnf") || echo "yum")"
+
+ if [ "${PKGMGR}" = "dnf" ]; then
+ opts="--allowerasing"
+ fi
+
+ # Install Netdata
+ # Strange quoting is required here so that glob matching works.
+ "${PKGMGR}" install -y /netdata/artifacts/netdata*.rpm || exit 1
+
+ # Install testing tools
+ "${PKGMGR}" install -y curl nc jq || exit 1
+}
+
+install_centos() {
+ # Using a glob pattern here because I can't reliably determine what the
+ # resulting package name will be (TODO: There must be a better way!)
+
+ PKGMGR="$( (command -v dnf > /dev/null && echo "dnf") || echo "yum")"
+
+ if [ "${PKGMGR}" = "dnf" ]; then
+ opts="--allowerasing"
+ fi
+
+ # Install EPEL (needed for `jq`
+ "${PKGMGR}" install -y epel-release || exit 1
+
+ # Install Netdata
+ # Strange quoting is required here so that glob matching works.
+ "${PKGMGR}" install -y /netdata/artifacts/netdata*.rpm || exit 1
+
+ # Install testing tools
+ # shellcheck disable=SC2086
+ "${PKGMGR}" install -y ${opts} curl nc jq || exit 1
+}
+
+install_amazon_linux() {
+ PKGMGR="$( (command -v dnf > /dev/null && echo "dnf") || echo "yum")"
+
+ if [ "${PKGMGR}" = "dnf" ]; then
+ opts="--allowerasing"
+ fi
+
+ # Install Netdata
+ # Strange quoting is required here so that glob matching works.
+ "${PKGMGR}" install -y /netdata/artifacts/netdata*.rpm || exit 1
+
+ # Install testing tools
+ # shellcheck disable=SC2086
+ "${PKGMGR}" install -y ${opts} curl nc jq || exit 1
+}
+
+install_suse_like() {
+ # Using a glob pattern here because I can't reliably determine what the
+ # resulting package name will be (TODO: There must be a better way!)
+
+ # Install Netdata
+ # Strange quoting is required here so that glob matching works.
+ zypper install -y --allow-downgrade --allow-unsigned-rpm /netdata/artifacts/netdata*.rpm || exit 1
+
+ # Install testing tools
+ zypper install -y --allow-downgrade --no-recommends curl netcat-openbsd jq || exit 1
+}
+
+dump_log() {
+ cat ./netdata.log
+}
+
+wait_for() {
+ host="${1}"
+ port="${2}"
+ name="${3}"
+ timeout="30"
+
+ if command -v nc > /dev/null ; then
+ netcat="nc"
+ elif command -v netcat > /dev/null ; then
+ netcat="netcat"
+ else
+ printf "Unable to find a usable netcat command.\n"
+ return 1
+ fi
+
+ printf "Waiting for %s on %s:%s ... " "${name}" "${host}" "${port}"
+
+ sleep 30
+
+ i=0
+ while ! ${netcat} -z "${host}" "${port}"; do
+ sleep 1
+ if [ "$i" -gt "$timeout" ]; then
+ printf "Timed out!\n"
+ return 1
+ fi
+ i="$((i + 1))"
+ done
+ printf "OK\n"
+}
+
+case "${DISTRO}" in
+ debian | ubuntu)
+ install_debian_like
+ ;;
+ fedora | oraclelinux)
+ install_fedora_like
+ ;;
+ centos| centos-stream | rockylinux | almalinux)
+ install_centos
+ ;;
+ amazonlinux)
+ install_amazon_linux
+ ;;
+ opensuse)
+ install_suse_like
+ ;;
+ *)
+ printf "ERROR: unsupported distro: %s_%s\n" "${DISTRO}" "${DISTRO_VERSION}"
+ exit 1
+ ;;
+esac
+
+trap dump_log EXIT
+
+/usr/sbin/netdata -D > ./netdata.log 2>&1 &
+
+wait_for localhost 19999 netdata || exit 1
+
+curl -sS http://127.0.0.1:19999/api/v1/info > ./response || exit 1
+
+cat ./response
+
+jq '.version' ./response || exit 1
+
+trap - EXIT
diff --git a/.github/scripts/platform-impending-eol.py b/.github/scripts/platform-impending-eol.py
new file mode 100755
index 00000000..c57e5edd
--- /dev/null
+++ b/.github/scripts/platform-impending-eol.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python3
+'''Check if a given distro is going to be EOL soon.
+
+ This queries the public API of https://endoflife.date to fetch EOL dates.
+
+ ‘soon’ is defined by LEAD_DAYS, currently 30 days.'''
+
+import datetime
+import json
+import sys
+import urllib.request
+
+URL_BASE = 'https://endoflife.date/api'
+NOW = datetime.date.today()
+LEAD_DAYS = datetime.timedelta(days=30)
+
+DISTRO = sys.argv[1]
+RELEASE = sys.argv[2]
+
+EXIT_NOT_IMPENDING = 0
+EXIT_IMPENDING = 1
+EXIT_NO_DATA = 2
+EXIT_FAILURE = 3
+
+try:
+ with urllib.request.urlopen(f'{ URL_BASE }/{ DISTRO }/{ RELEASE }.json') as response:
+ match response.status:
+ case 200:
+ data = json.load(response)
+ case _:
+ print(
+ f'Failed to retrieve data for { DISTRO } { RELEASE } ' +
+ f'(status: { response.status }).',
+ file=sys.stderr
+ )
+ sys.exit(EXIT_FAILURE)
+except urllib.error.HTTPError as e:
+ match e.code:
+ case 404:
+ print(f'No data available for { DISTRO } { RELEASE }.', file=sys.stderr)
+ sys.exit(EXIT_NO_DATA)
+ case _:
+ print(
+ f'Failed to retrieve data for { DISTRO } { RELEASE } ' +
+ f'(status: { e.code }).',
+ file=sys.stderr
+ )
+ sys.exit(EXIT_FAILURE)
+
+eol = datetime.date.fromisoformat(data['eol'])
+
+offset = abs(eol - NOW)
+
+if offset <= LEAD_DAYS:
+ print(data['eol'])
+ sys.exit(EXIT_IMPENDING)
+else:
+ sys.exit(EXIT_NOT_IMPENDING)
diff --git a/.github/scripts/prepare-release-base.sh b/.github/scripts/prepare-release-base.sh
new file mode 100755
index 00000000..06a2da16
--- /dev/null
+++ b/.github/scripts/prepare-release-base.sh
@@ -0,0 +1,180 @@
+#!/bin/sh
+
+set -e
+
+REPO="${1}"
+EVENT_NAME="${2}"
+EVENT_TYPE="${3}"
+EVENT_VERSION="${4}"
+RELEASE_TEST="${5}"
+
+##############################################################
+# Version validation functions
+
+check_version_format() {
+ if ! echo "${EVENT_VERSION}" | grep -qE '^v[[:digit:]]+\.[[:digit:]]+\.[[:digit:]]+$'; then
+ echo "::error::The supplied version (${EVENT_VERSION}) is not a valid version string."
+ return 1
+ fi
+}
+
+patch_is_zero() {
+ if ! echo "${EVENT_VERSION}" | grep -qE '^v[[:digit:]]+\.[[:digit:]]+\.0$'; then
+ echo "::error::The patch number for a ${EVENT_TYPE} build must be 0."
+ return 1
+ fi
+}
+
+minor_is_zero() {
+ if ! echo "${EVENT_VERSION}" | grep -qE '^v[[:digit:]]+\.0'; then
+ echo "::error::The minor version number for a ${EVENT_TYPE} build must be 0."
+ return 1
+ fi
+}
+
+major_matches() {
+ current_major="$(cut -f 1 -d '-' packaging/version | cut -f 1 -d '.' | cut -f 2 -d 'v')"
+ target_major="$(echo "${EVENT_VERSION}" | cut -f 1 -d '.' | cut -f 2 -d 'v')"
+
+ if [ "${target_major}" != "${current_major}" ]; then
+ echo "::error::Major version mismatch, expected ${current_major} but got ${target_major}."
+ return 1
+ fi
+}
+
+minor_matches() {
+ current_minor="$(cut -f 1 -d '-' packaging/version | cut -f 2 -d '.')"
+ target_minor="$(echo "${EVENT_VERSION}" | cut -f 2 -d '.')"
+
+ if [ "${target_minor}" != "${current_minor}" ]; then
+ echo "::error::Minor version mismatch, expected ${current_minor} but got ${target_minor}."
+ return 1
+ fi
+}
+
+check_for_existing_tag() {
+ if git tag | grep -qE "^${EVENT_VERSION}$"; then
+ echo "::error::A tag for version ${EVENT_VERSION} already exists."
+ return 1
+ fi
+}
+
+check_newer_major_version() {
+ current="$(cut -f 1 -d '-' packaging/version | cut -f 1 -d '.' | cut -f 2 -d 'v')"
+ target="$(echo "${EVENT_VERSION}" | cut -f 1 -d '.' | cut -f 2 -d 'v')"
+
+ if [ "${target}" -le "${current}" ]; then
+ echo "::error::Version ${EVENT_VERSION} is not newer than the current version."
+ return 1
+ fi
+}
+
+check_newer_minor_version() {
+ current="$(cut -f 1 -d '-' packaging/version | cut -f 2 -d '.')"
+ target="$(echo "${EVENT_VERSION}" | cut -f 2 -d '.')"
+
+ if [ "${target}" -le "${current}" ]; then
+ echo "::error::Version ${EVENT_VERSION} is not newer than the current version."
+ return 1
+ fi
+}
+
+check_newer_patch_version() {
+ current="$(cut -f 1 -d '-' packaging/version | cut -f 3 -d '.')"
+ target="$(echo "${EVENT_VERSION}" | cut -f 3 -d '.')"
+
+ if [ "${target}" -le "${current}" ]; then
+ echo "::error::Version ${EVENT_VERSION} is not newer than the current version."
+ return 1
+ fi
+}
+
+##############################################################
+# Core logic
+
+git config user.name "netdatabot"
+git config user.email "bot@netdata.cloud"
+
+if [ "${REPO}" != "netdata/netdata" ] && [ -z "${RELEASE_TEST}" ]; then
+ echo "::notice::Not running in the netdata/netdata repository, not queueing a release build."
+ echo "run=false" >> "${GITHUB_OUTPUT}"
+elif [ "${EVENT_NAME}" = 'schedule' ] || [ "${EVENT_TYPE}" = 'nightly' ]; then
+ echo "::notice::Preparing a nightly release build."
+ LAST_TAG=$(git describe --abbrev=0 --tags)
+ COMMITS_SINCE_RELEASE=$(git rev-list "${LAST_TAG}"..HEAD --count)
+ NEW_VERSION="${LAST_TAG}-$((COMMITS_SINCE_RELEASE + 1))-nightly"
+ LAST_VERSION_COMMIT="$(git rev-list -1 HEAD packaging/version)"
+ HEAD_COMMIT="$(git rev-parse HEAD)"
+ if [ "${EVENT_NAME}" = 'schedule' ] && [ "${LAST_VERSION_COMMIT}" = "${HEAD_COMMIT}" ] && grep -qE '.*-nightly$' packaging/version; then
+ echo "::notice::No commits since last nightly build, not publishing a new nightly build."
+ echo "run=false" >> "${GITHUB_OUTPUT}"
+ else
+ echo "${NEW_VERSION}" > packaging/version || exit 1
+ # shellcheck disable=SC2129
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ echo "message=Update changelog and version for nightly build: ${NEW_VERSION}." >> "${GITHUB_OUTPUT}"
+ echo "ref=master" >> "${GITHUB_OUTPUT}"
+ echo "type=nightly" >> "${GITHUB_OUTPUT}"
+ echo "branch=master" >> "${GITHUB_OUTPUT}"
+ echo "version=nightly" >> "${GITHUB_OUTPUT}"
+ fi
+elif [ "${EVENT_TYPE}" = 'patch' ] && [ "${EVENT_VERSION}" != "nightly" ]; then
+ echo "::notice::Preparing a patch release build."
+ check_version_format || exit 1
+ check_for_existing_tag || exit 1
+ branch_name="$(echo "${EVENT_VERSION}" | cut -f 1-2 -d '.')"
+ if ! git checkout "${branch_name}"; then
+ echo "::error::Could not find a branch for the ${branch_name}.x release series."
+ exit 1
+ fi
+ minor_matches || exit 1
+ major_matches || exit 1
+ check_newer_patch_version || exit 1
+ echo "${EVENT_VERSION}" > packaging/version || exit 1
+ # shellcheck disable=SC2129
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ echo "message=Patch release ${EVENT_VERSION}." >> "${GITHUB_OUTPUT}"
+ echo "ref=${EVENT_VERSION}" >> "${GITHUB_OUTPUT}"
+ echo "type=release" >> "${GITHUB_OUTPUT}"
+ echo "branch=${branch_name}" >> "${GITHUB_OUTPUT}"
+ echo "version=$(tr -d 'v' < packaging/version)" >> "${GITHUB_OUTPUT}"
+elif [ "${EVENT_TYPE}" = 'minor' ] && [ "${EVENT_VERSION}" != "nightly" ]; then
+ echo "::notice::Preparing a minor release build."
+ check_version_format || exit 1
+ patch_is_zero || exit 1
+ major_matches || exit 1
+ check_newer_minor_version || exit 1
+ check_for_existing_tag || exit 1
+ branch_name="$(echo "${EVENT_VERSION}" | cut -f 1-2 -d '.')"
+ if [ -n "$(git branch --list "${branch_name}")" ]; then
+ echo "::error::A branch named ${branch_name} already exists in the repository."
+ exit 1
+ fi
+ echo "${EVENT_VERSION}" > packaging/version || exit 1
+ # shellcheck disable=SC2129
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ echo "message=Minor release ${EVENT_VERSION}." >> "${GITHUB_OUTPUT}"
+ echo "ref=${EVENT_VERSION}" >> "${GITHUB_OUTPUT}"
+ echo "type=release" >> "${GITHUB_OUTPUT}"
+ echo "branch=master" >> "${GITHUB_OUTPUT}"
+ echo "new-branch=${branch_name}" >> "${GITHUB_OUTPUT}"
+ echo "version=$(tr -d 'v' < packaging/version)" >> "${GITHUB_OUTPUT}"
+elif [ "${EVENT_TYPE}" = 'major' ] && [ "${EVENT_VERSION}" != "nightly" ]; then
+ echo "::notice::Preparing a major release build."
+ check_version_format || exit 1
+ minor_is_zero || exit 1
+ patch_is_zero || exit 1
+ check_newer_major_version || exit 1
+ check_for_existing_tag || exit 1
+ echo "${EVENT_VERSION}" > packaging/version || exit 1
+ # shellcheck disable=SC2129
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ echo "message=Major release ${EVENT_VERSION}" >> "${GITHUB_OUTPUT}"
+ echo "ref=${EVENT_VERSION}" >> "${GITHUB_OUTPUT}"
+ echo "type=release" >> "${GITHUB_OUTPUT}"
+ echo "branch=master" >> "${GITHUB_OUTPUT}"
+ echo "version=$(tr -d 'v' < packaging/version)" >> "${GITHUB_OUTPUT}"
+else
+ echo '::error::Unrecognized release type or invalid version.'
+ exit 1
+fi
diff --git a/.github/scripts/run-updater-check.sh b/.github/scripts/run-updater-check.sh
new file mode 100755
index 00000000..1224d8f6
--- /dev/null
+++ b/.github/scripts/run-updater-check.sh
@@ -0,0 +1,30 @@
+#!/bin/sh
+
+echo ">>> Installing CI support packages..."
+/netdata/.github/scripts/ci-support-pkgs.sh
+mkdir -p /etc/cron.daily # Needed to make auto-update checking work correctly on some platforms.
+echo ">>> Installing Netdata..."
+/netdata/packaging/installer/kickstart.sh --dont-wait --build-only --disable-telemetry || exit 1
+echo "::group::>>> Pre-Update Environment File Contents"
+cat /etc/netdata/.environment
+echo "::endgroup::"
+echo "::group::>>> Pre-Update Netdata Build Info"
+netdata -W buildinfo
+echo "::endgroup::"
+echo ">>> Updating Netdata..."
+export NETDATA_BASE_URL="http://localhost:8080/artifacts/" # Pull the tarball from the local web server.
+timeout 3600 /netdata/packaging/installer/netdata-updater.sh --not-running-from-cron --no-updater-self-update
+
+case "$?" in
+ 124) echo "!!! Updater timed out." ; exit 1 ;;
+ 0) ;;
+ *) echo "!!! Updater failed." ; exit 1 ;;
+esac
+echo "::group::>>> Post-Update Environment File Contents"
+cat /etc/netdata/.environment
+echo "::endgroup::"
+echo "::group::>>> Post-Update Netdata Build Info"
+netdata -W buildinfo
+echo "::endgroup::"
+echo ">>> Checking if update was successful..."
+/netdata/.github/scripts/check-updater.sh || exit 1
diff --git a/.github/scripts/run_install_with_dist_file.sh b/.github/scripts/run_install_with_dist_file.sh
new file mode 100755
index 00000000..74652efd
--- /dev/null
+++ b/.github/scripts/run_install_with_dist_file.sh
@@ -0,0 +1,39 @@
+#!/usr/bin/env bash
+#
+# This script is evaluating netdata installation with the source from make dist
+#
+# Copyright: SPDX-License-Identifier: GPL-3.0-or-later
+#
+# Author : Pavlos Emm. Katsoulakis <paul@netdata.cloud)
+
+set -e
+
+if [ $# -ne 1 ]; then
+ printf >&2 "Usage: %s <dist_file>\n" "$(basename "$0")"
+ exit 1
+fi
+
+distfile="${1}"
+shift
+
+printf >&2 "Opening dist archive %s ... " "${distfile}"
+tar -xovf "${distfile}"
+distdir="$(echo "${distfile}" | rev | cut -d. -f3- | rev)"
+cp -a packaging/installer/install-required-packages.sh "${distdir}/install-required-packages.sh"
+if [ ! -d "${distdir}" ]; then
+ printf >&2 "ERROR: %s is not a directory" "${distdir}"
+ exit 2
+fi
+
+printf >&2 "Entering %s and starting docker run ..." "${distdir}"
+
+pushd "${distdir}" || exit 1
+docker run \
+ -e DISABLE_TELEMETRY=1 \
+ -v "${PWD}:/netdata" \
+ -w /netdata \
+ "ubuntu:latest" \
+ /bin/bash -c "./install-required-packages.sh --dont-wait --non-interactive netdata && apt install wget && ./netdata-installer.sh --dont-wait --require-cloud --disable-telemetry --install-prefix /tmp --one-time-build && echo \"Validating netdata instance is running\" && wget -O - 'http://127.0.0.1:19999/api/v1/info' | grep version"
+popd || exit 1
+
+echo "All Done!"
diff --git a/.github/scripts/upload-new-version-tags.sh b/.github/scripts/upload-new-version-tags.sh
new file mode 100755
index 00000000..a9b0cd30
--- /dev/null
+++ b/.github/scripts/upload-new-version-tags.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+
+set -e
+
+host="packages.netdata.cloud"
+user="netdatabot"
+
+prefix="/var/www/html/releases"
+staging="${TMPDIR:-/tmp}/staging-new-releases"
+
+mkdir -p "${staging}"
+
+for source_dir in "${staging}"/*; do
+ if [ -d "${source_dir}" ]; then
+ base_name=$(basename "${source_dir}")
+ scp -r "${source_dir}"/* "${user}@${host}:${prefix}/${base_name}"
+ fi
+done
diff --git a/.github/stale.yml b/.github/stale.yml
new file mode 100644
index 00000000..abf927a4
--- /dev/null
+++ b/.github/stale.yml
@@ -0,0 +1,18 @@
+---
+only: issues
+limitPerRun: 30
+daysUntilStale: 30
+daysUntilClose: 7
+exemptLabels:
+ - bug
+ - help wanted
+ - feature request
+exemptProjects: true
+exemptMilestones: true
+staleLabel: stale
+markComment: >
+ This issue has been inactive for 30 days.
+ It will be closed in one week, unless it is updated.
+closeComment: >
+ This issue has been automatically closed due to extended period of inactivity.
+ Please reopen if it is still valid. Thank you for your contributions.
diff --git a/.github/workflows/add-to-project.yml b/.github/workflows/add-to-project.yml
new file mode 100644
index 00000000..986d836a
--- /dev/null
+++ b/.github/workflows/add-to-project.yml
@@ -0,0 +1,26 @@
+name: Add issues to Agent Board
+
+on:
+ issues:
+ types:
+ - opened
+ - transferred
+
+jobs:
+ add-to-project:
+ name: Add issue to project
+ if: github.repository == 'netdata/netdata'
+ runs-on: ubuntu-latest
+ steps:
+ - name: Add issues to Agent project board
+ uses: actions/add-to-project@v0.5.0
+ with:
+ project-url: https://github.com/orgs/netdata/projects/32
+ github-token: ${{ secrets.NETDATABOT_ORG_GITHUB_TOKEN }}
+
+ - name: Add issues to Product Bug project board
+ uses: actions/add-to-project@v0.5.0
+ with:
+ project-url: https://github.com/orgs/netdata/projects/45
+ github-token: ${{ secrets.NETDATABOT_ORG_GITHUB_TOKEN }}
+ labeled: bug
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
new file mode 100644
index 00000000..4a6debc4
--- /dev/null
+++ b/.github/workflows/build.yml
@@ -0,0 +1,1004 @@
+---
+# Ci code for building release artifacts.
+name: Build
+on:
+ push: # Master branch checks only validate the build and generate artifacts for testing.
+ branches:
+ - master
+ pull_request: null # PR checks only validate the build and generate artifacts for testing.
+ workflow_dispatch: # Dispatch runs build and validate, then push to the appropriate storage location.
+ inputs:
+ type:
+ description: Build Type
+ default: nightly
+ required: true
+ version:
+ description: Version Tag
+ default: nightly
+ required: true
+concurrency: # This keeps multiple instances of the job from running concurrently for the same ref and event type.
+ group: build-${{ github.ref }}-${{ github.event_name }}
+ cancel-in-progress: true
+jobs:
+ file-check: # Check what files changed if we’re being run in a PR or on a push.
+ name: Check Modified Files
+ runs-on: ubuntu-latest
+ outputs:
+ run: ${{ steps.check-run.outputs.run }}
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+ submodules: recursive
+ - name: Check files
+ id: check-files
+ uses: tj-actions/changed-files@v40
+ with:
+ since_last_remote_commit: ${{ github.event_name != 'pull_request' }}
+ files: |
+ **.c
+ **.cc
+ **.h
+ **.hh
+ **.in
+ configure.ac
+ netdata-installer.sh
+ **/Makefile*
+ Makefile*
+ .github/data/distros.yml
+ .github/workflows/build.yml
+ .github/scripts/build-static.sh
+ .github/scripts/get-static-cache-key.sh
+ .github/scripts/gen-matrix-build.py
+ .github/scripts/run-updater-check.sh
+ build/**
+ packaging/makeself/**
+ packaging/installer/**
+ aclk/aclk-schemas/
+ ml/dlib/
+ mqtt_websockets
+ web/server/h2o/libh2o
+ files_ignore: |
+ netdata.spec.in
+ **.md
+ - name: Check Run
+ id: check-run
+ run: |
+ if [ "${{ steps.check-files.outputs.any_modified }}" == "true" ] || [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
+ echo 'run=true' >> "${GITHUB_OUTPUT}"
+ else
+ echo 'run=false' >> "${GITHUB_OUTPUT}"
+ fi
+
+ build-dist: # Build the distribution tarball and store it as an artifact.
+ name: Build Distribution Tarball
+ runs-on: ubuntu-latest
+ needs:
+ - file-check
+ outputs:
+ distfile: ${{ steps.build.outputs.distfile }}
+ steps:
+ - name: Skip Check
+ id: skip
+ if: needs.file-check.outputs.run != 'true'
+ run: echo "SKIPPED"
+ - name: Checkout
+ id: checkout
+ if: needs.file-check.outputs.run == 'true'
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+ submodules: recursive
+ - name: Fix tags
+ id: fix-tags
+ if: github.event_name != 'push' && needs.file-check.outputs.run == 'true'
+ run: |
+ git fetch --tags --force
+ - name: Mark Stable
+ id: channel
+ if: github.event_name == 'workflow_dispatch' && github.event.inputs.type != 'nightly' && needs.file-check.outputs.run == 'true'
+ run: |
+ sed -i 's/^RELEASE_CHANNEL="nightly"/RELEASE_CHANNEL="stable"/' netdata-installer.sh
+ - name: Build
+ id: build
+ if: needs.file-check.outputs.run == 'true'
+ run: |
+ git describe
+ mkdir -p artifacts
+ ./packaging/installer/install-required-packages.sh --dont-wait --non-interactive netdata
+ autoreconf -ivf
+ ./configure --prefix=/usr \
+ --sysconfdir=/etc \
+ --localstatedir=/var \
+ --libexecdir=/usr/libexec \
+ --with-zlib \
+ --with-math \
+ --with-user=netdata
+ make dist
+ echo "distfile=$(find . -name 'netdata-*.tar.gz')" >> "${GITHUB_OUTPUT}"
+ cp netdata-*.tar.gz artifacts/
+ - name: Store
+ id: store
+ if: needs.file-check.outputs.run == 'true'
+ uses: actions/upload-artifact@v3
+ with:
+ name: dist-tarball
+ path: artifacts/*.tar.gz
+ retention-days: 30
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Distribution tarball creation failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to create source tarball for distribution.
+ Checkout: ${{ steps.checkout.outcome }}
+ Fix Tags: ${{ steps.fix-tags.outcome }}
+ Mark stable: ${{ steps.channel.outcome }}
+ Build: ${{ steps.build.outcome }}
+ Store: ${{ steps.store.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.event_name != 'pull_request'
+ && github.repository == 'netdata/netdata'
+ && needs.file-check.outputs.run == 'true'
+ }}
+
+ build-static: # Build the static binary archives, and store them as artifacts.
+ name: Build Static
+ runs-on: ubuntu-latest
+ needs:
+ - file-check
+ strategy:
+ matrix:
+ arch:
+ - x86_64
+ - armv7l
+ - aarch64
+ - ppc64le
+ steps:
+ - name: Skip Check
+ id: skip
+ if: needs.file-check.outputs.run != 'true'
+ run: echo "SKIPPED"
+ - name: Checkout
+ id: checkout
+ if: needs.file-check.outputs.run == 'true'
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+ submodules: recursive
+ - name: Fix tags
+ id: fix-tags
+ if: github.event_name != 'push' && needs.file-check.outputs.run == 'true'
+ run: |
+ git fetch --tags --force
+ - name: Mark Stable
+ id: channel
+ if: github.event_name == 'workflow_dispatch' && github.event.inputs.type != 'nightly' && needs.file-check.outputs.run == 'true'
+ run: |
+ sed -i 's/^RELEASE_CHANNEL="nightly"/RELEASE_CHANNEL="stable"/' netdata-installer.sh packaging/makeself/install-or-update.sh
+ - name: Get Cache Key
+ if: (github.event_name != 'pull_request' || ! contains(github.event.pull_request.labels.*.name, 'run-ci/no-cache')) && needs.file-check.outputs.run == 'true'
+ id: cache-key
+ run: .github/scripts/get-static-cache-key.sh ${{ matrix.arch }} "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/no-cache') }}"
+ - name: Cache
+ if: (github.event_name != 'pull_request' || ! contains(github.event.pull_request.labels.*.name, 'run-ci/no-cache')) && needs.file-check.outputs.run == 'true'
+ id: cache
+ uses: actions/cache@v3
+ with:
+ path: artifacts/cache
+ key: ${{ steps.cache-key.outputs.key }}
+ - name: Build
+ if: github.event_name != 'workflow_dispatch' && needs.file-check.outputs.run == 'true' # Don’t use retries on PRs.
+ run: .github/scripts/build-static.sh ${{ matrix.arch }}
+ - name: Build
+ if: github.event_name == 'workflow_dispatch' && needs.file-check.outputs.run == 'true'
+ id: build
+ uses: nick-fields/retry@v2
+ with:
+ timeout_minutes: 180
+ max_attempts: 3
+ command: .github/scripts/build-static.sh ${{ matrix.arch }}
+ - name: Store
+ id: store
+ if: needs.file-check.outputs.run == 'true'
+ uses: actions/upload-artifact@v3
+ with:
+ name: static-archive
+ path: artifacts/*.gz.run
+ retention-days: 30
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Static build failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to create static installer archive for ${{ matrix.arch }}.
+ Checkout: ${{ steps.checkout.outcome }}
+ Fix Tags: ${{ steps.fix-tags.outcome }}
+ Mark stable: ${{ steps.channel.outcome }}
+ Build: ${{ steps.build.outcome }}
+ Store: ${{ steps.store.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.event_name != 'pull_request'
+ && github.repository == 'netdata/netdata'
+ && needs.file-check.outputs.run == 'true'
+ }}
+
+ matrix: # Generate the shared build matrix for our build tests.
+ name: Prepare Build Matrix
+ runs-on: ubuntu-latest
+ if: github.event_name != 'workflow_dispatch'
+ outputs:
+ matrix: ${{ steps.set-matrix.outputs.matrix }}
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ - name: Prepare tools
+ id: prepare
+ run: |
+ sudo apt-get update && sudo apt-get install -y python3-ruamel.yaml
+ - name: Read build matrix
+ id: set-matrix
+ run: |
+ matrix="$(.github/scripts/gen-matrix-build.py)"
+ echo "Generated matrix: ${matrix}"
+ echo "matrix=${matrix}" >> "${GITHUB_OUTPUT}"
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Build matrix preparation failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to prepare build matrix for build checks.
+ Checkout: ${{ steps.checkout.outcome }}
+ Prepare tools: ${{ steps.prepare.outcome }}
+ Read build matrix: ${{ steps.set-matrix.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.event_name != 'pull_request'
+ && github.repository == 'netdata/netdata'
+ }}
+
+ prepare-test-images: # Prepare the test environments for our build checks. This also checks dependency handling code for each tested environment.
+ name: Prepare Test Environments
+ runs-on: ubuntu-latest
+ if: github.event_name != 'workflow_dispatch'
+ needs:
+ - matrix
+ env:
+ RETRY_DELAY: 300
+ strategy:
+ # Unlike the actual build tests, this completes _very_ fast (average of about 3 minutes for each job), so we
+ # just run everything in parallel instead lof limiting job concurrency.
+ fail-fast: false
+ matrix: ${{ fromJson(needs.matrix.outputs.matrix) }}
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ - name: Setup Buildx
+ id: buildx
+ uses: docker/setup-buildx-action@v3
+ - name: Build test environment
+ id: build1
+ uses: docker/build-push-action@v5
+ continue-on-error: true # We retry 3 times at 5 minute intervals if there is a failure here.
+ with:
+ push: false
+ load: false
+ file: .github/dockerfiles/Dockerfile.build_test
+ build-args: |
+ BASE=${{ matrix.distro }}
+ PRE=${{ matrix.env_prep }}
+ RMJSONC=${{ matrix.jsonc_removal }}
+ outputs: type=docker,dest=/tmp/image.tar
+ tags: test:${{ matrix.artifact_key }}
+ - name: Retry delay
+ if: ${{ steps.build1.outcome == 'failure' }}
+ run: sleep "${RETRY_DELAY}"
+ - name: Build test environment (attempt 2)
+ if: ${{ steps.build1.outcome == 'failure' }}
+ id: build2
+ uses: docker/build-push-action@v5
+ continue-on-error: true # We retry 3 times at 5 minute intervals if there is a failure here.
+ with:
+ push: false
+ load: false
+ file: .github/dockerfiles/Dockerfile.build_test
+ build-args: |
+ BASE=${{ matrix.distro }}
+ PRE=${{ matrix.env_prep }}
+ RMJSONC=${{ matrix.jsonc_removal }}
+ outputs: type=docker,dest=/tmp/image.tar
+ tags: test:${{ matrix.artifact_key }}
+ - name: Retry delay
+ if: ${{ steps.build1.outcome == 'failure' && steps.build2.outcome == 'failure' }}
+ run: sleep "${RETRY_DELAY}"
+ - name: Build test environment (attempt 3)
+ if: ${{ steps.build1.outcome == 'failure' && steps.build2.outcome == 'failure' }}
+ id: build3
+ uses: docker/build-push-action@v5
+ with:
+ push: false
+ load: false
+ file: .github/dockerfiles/Dockerfile.build_test
+ build-args: |
+ BASE=${{ matrix.distro }}
+ PRE=${{ matrix.env_prep }}
+ RMJSONC=${{ matrix.jsonc_removal }}
+ outputs: type=docker,dest=/tmp/image.tar
+ tags: test:${{ matrix.artifact_key }}
+ - name: Upload image artifact
+ id: upload
+ uses: actions/upload-artifact@v3
+ with:
+ name: ${{ matrix.artifact_key }}-test-env
+ path: /tmp/image.tar
+ retention-days: 30
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Test environment preparation for ${{ matrix.distro }} failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Test environment preparation for ${{ matrix.distro }} failed.
+ Checkout: ${{ steps.checkout.outcome }}
+ Set up Buildx: ${{ steps.buildx.outcome }}
+ Build test environment: ${{ steps.build1.outcome }}
+ Build test environment (attempt 2): ${{ steps.build2.outcome }}
+ Build test environment (attempt 3): ${{ steps.build3.outcome }}
+ Upload: ${{ steps.upload.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.event_name != 'pull_request'
+ && github.repository == 'netdata/netdata'
+ }}
+
+ source-build: # Test various source build arrangements.
+ name: Test Source Build
+ runs-on: ubuntu-latest
+ if: github.event_name != 'workflow_dispatch'
+ needs:
+ - matrix
+ - prepare-test-images
+ - file-check
+ strategy:
+ fail-fast: false
+ max-parallel: 8
+ matrix: ${{ fromJson(needs.matrix.outputs.matrix) }}
+ steps:
+ - name: Skip Check
+ id: skip
+ if: needs.file-check.outputs.run != 'true'
+ run: echo "SKIPPED"
+ - name: Checkout
+ id: checkout
+ if: needs.file-check.outputs.run == 'true'
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+ - name: Fetch test environment
+ id: fetch
+ if: needs.file-check.outputs.run == 'true'
+ uses: actions/download-artifact@v3
+ with:
+ name: ${{ matrix.artifact_key }}-test-env
+ - name: Load test environment
+ id: load
+ if: needs.file-check.outputs.run == 'true'
+ run: docker load --input image.tar
+ - name: Regular build on ${{ matrix.distro }}
+ id: build-basic
+ if: needs.file-check.outputs.run == 'true'
+ run: |
+ docker run --security-opt seccomp=unconfined -w /netdata test:${{ matrix.artifact_key }} \
+ /bin/sh -c 'autoreconf -ivf && ./configure --disable-dependency-tracking && make -j2'
+ - name: netdata-installer on ${{ matrix.distro }}, disable cloud
+ id: build-no-cloud
+ if: needs.file-check.outputs.run == 'true'
+ run: |
+ docker run --security-opt seccomp=unconfined -w /netdata test:${{ matrix.artifact_key }} \
+ /bin/sh -c './netdata-installer.sh --dont-wait --dont-start-it --disable-cloud --one-time-build'
+ - name: netdata-installer on ${{ matrix.distro }}, require cloud
+ id: build-cloud
+ if: needs.file-check.outputs.run == 'true'
+ run: |
+ docker run --security-opt seccomp=unconfined -w /netdata test:${{ matrix.artifact_key }} \
+ /bin/sh -c './netdata-installer.sh --dont-wait --dont-start-it --require-cloud --one-time-build'
+ - name: netdata-installer on ${{ matrix.distro }}, require cloud, no JSON-C
+ id: build-no-jsonc
+ if: matrix.jsonc_removal != '' && needs.file-check.outputs.run == 'true'
+ run: |
+ docker run --security-opt seccomp=unconfined -w /netdata test:${{ matrix.artifact_key }} \
+ /bin/sh -c '/rmjsonc.sh && ./netdata-installer.sh --dont-wait --dont-start-it --require-cloud --one-time-build'
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Build tests for ${{ matrix.distro }} failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Build tests for ${{ matrix.distro }} failed.
+ Checkout: ${{ steps.checkout.outcome }}
+ Fetch test environment: ${{ steps.fetch.outcome }}
+ Load test environment: ${{ steps.load.outcome }}
+ Regular build: ${{ steps.build-basic.outcome }}
+ netdata-installer, disable cloud: ${{ steps.build-no-cloud.outcome }}
+ netdata-installer, require cloud: ${{ steps.build-cloud.outcome }}
+ netdata-installer, no JSON-C: ${{ steps.build-no-jsonc.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.event_name != 'pull_request'
+ && github.repository == 'netdata/netdata'
+ && needs.file-check.outputs.run == 'true'
+ }}
+
+ updater-check: # Test the generated dist archive using the updater code.
+ name: Test Generated Distfile and Updater Code
+ runs-on: ubuntu-latest
+ if: github.event_name != 'workflow_dispatch'
+ needs:
+ - build-dist
+ - matrix
+ - prepare-test-images
+ - file-check
+ strategy:
+ fail-fast: false
+ max-parallel: 8
+ matrix: ${{ fromJson(needs.matrix.outputs.matrix) }}
+ services:
+ apache: # This gets used to serve the dist tarball for the updater script.
+ image: httpd:2.4
+ ports:
+ - 8080:80
+ volumes:
+ - ${{ github.workspace }}:/usr/local/apache2/htdocs/
+ steps:
+ - name: Skip Check
+ id: skip
+ if: needs.file-check.outputs.run != 'true'
+ run: echo "SKIPPED"
+ - name: Checkout
+ id: checkout
+ if: needs.file-check.outputs.run == 'true'
+ uses: actions/checkout@v4
+ - name: Fetch dist tarball artifacts
+ id: fetch-tarball
+ if: needs.file-check.outputs.run == 'true'
+ uses: actions/download-artifact@v3
+ with:
+ name: dist-tarball
+ path: dist-tarball
+ - name: Prepare artifact directory
+ id: prepare
+ if: needs.file-check.outputs.run == 'true'
+ run: |
+ mkdir -p artifacts/download/latest || exit 1
+ echo "9999.0.0-0" > artifacts/download/latest/latest-version.txt || exit 1
+ cp dist-tarball/* artifacts/download/latest || exit 1
+ cd artifacts/download/latest || exit 1
+ ln -s ${{ needs.build-dist.outputs.distfile }} netdata-latest.tar.gz || exit 1
+ sha256sum -b ./* > "sha256sums.txt" || exit 1
+ cat sha256sums.txt
+ - name: Fetch test environment
+ id: fetch-test-environment
+ if: needs.file-check.outputs.run == 'true'
+ uses: actions/download-artifact@v3
+ with:
+ name: ${{ matrix.artifact_key }}-test-env
+ - name: Load test environment
+ id: load
+ if: needs.file-check.outputs.run == 'true'
+ run: docker load --input image.tar
+ - name: Install netdata and run the updater on ${{ matrix.distro }}
+ id: updater-check
+ if: needs.file-check.outputs.run == 'true'
+ run: |
+ docker run --security-opt seccomp=unconfined -e DISABLE_TELEMETRY=1 --network host -w /netdata test:${{ matrix.artifact_key }} \
+ /netdata/.github/scripts/run-updater-check.sh
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Updater checks for ${{ matrix.distro }} failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Updater checks for ${{ matrix.distro }} failed.
+ Checkout: ${{ steps.checkout.outcome }}
+ Fetch dist tarball: ${{ steps.fetch-tarball.outcome }}
+ Prepare artifact directory: ${{ steps.prepare.outcome }}
+ Fetch test environment: ${{ steps.fetch-test-environment.outcome }}
+ Load test environment: ${{ steps.load.outcome }}
+ Updater check: ${{ steps.updater-check.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.event_name != 'pull_request'
+ && github.repository == 'netdata/netdata'
+ && needs.file-check.outputs.run == 'true'
+ }}
+
+ prepare-upload: # Consolidate the artifacts for uploading or releasing.
+ name: Prepare Artifacts
+ runs-on: ubuntu-latest
+ needs:
+ - build-dist
+ - build-static
+ - file-check
+ steps:
+ - name: Skip Check
+ id: skip
+ if: needs.file-check.outputs.run != 'true'
+ run: echo "SKIPPED"
+ - name: Checkout
+ id: checkout
+ if: needs.file-check.outputs.run == 'true'
+ uses: actions/checkout@v4
+ - name: Prepare Environment
+ id: prepare
+ if: needs.file-check.outputs.run == 'true'
+ run: mkdir -p artifacts
+ - name: Retrieve Dist Tarball
+ id: fetch-dist
+ if: needs.file-check.outputs.run == 'true'
+ uses: actions/download-artifact@v3
+ with:
+ name: dist-tarball
+ path: dist-tarball
+ - name: Retrieve Static Build Artifacts
+ id: fetch-static
+ if: needs.file-check.outputs.run == 'true'
+ uses: actions/download-artifact@v3
+ with:
+ name: static-archive
+ path: static-archive
+ - name: Prepare Artifacts
+ id: consolidate
+ if: needs.file-check.outputs.run == 'true'
+ working-directory: ./artifacts/
+ run: |
+ mv ../dist-tarball/* . || exit 1
+ mv ../static-archive/* . || exit 1
+ ln -s ${{ needs.build-dist.outputs.distfile }} netdata-latest.tar.gz || exit 1
+ cp ../packaging/version ./latest-version.txt || exit 1
+ cp ../integrations/integrations.js ./integrations.js || exit 1
+ sha256sum -b ./* > sha256sums.txt || exit 1
+ cat sha256sums.txt
+ - name: Store Artifacts
+ id: store
+ if: needs.file-check.outputs.run == 'true'
+ uses: actions/upload-artifact@v3
+ with:
+ name: final-artifacts
+ path: artifacts/*
+ retention-days: 30
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Failed to prepare release artifacts for upload:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to prepare release artifacts for upload.
+ CHeckout: ${{ steps.checkout.outcome }}
+ Prepare environment: ${{ steps.prepare.outcome }}
+ Fetch dist tarball: ${{ steps.fetch-dist.outcome }}
+ Fetch static builds: ${{ steps.fetch-static.outcome }}
+ Consolidate artifacts: ${{ steps.consolidate.outcome }}
+ Store: ${{ steps.store.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.event_name != 'pull_request'
+ && github.repository == 'netdata/netdata'
+ && needs.file-check.outputs.run == 'true'
+ }}
+
+ artifact-verification-dist: # Verify the regular installer works with the consolidated artifacts.
+ name: Test Consolidated Artifacts (Source)
+ runs-on: ubuntu-latest
+ needs:
+ - prepare-upload
+ - file-check
+ services:
+ apache: # This gets used to serve the dist tarball for the updater script.
+ image: httpd:2.4
+ ports:
+ - 8080:80
+ volumes:
+ - ${{ github.workspace }}:/usr/local/apache2/htdocs/
+ steps:
+ - name: Skip Check
+ id: skip
+ if: needs.file-check.outputs.run != 'true'
+ run: echo "SKIPPED"
+ - name: Checkout
+ id: checkout
+ if: needs.file-check.outputs.run == 'true'
+ uses: actions/checkout@v4
+ - name: Fetch artifacts
+ id: fetch
+ if: needs.file-check.outputs.run == 'true'
+ uses: actions/download-artifact@v3
+ with:
+ name: final-artifacts
+ path: artifacts
+ - name: Prepare artifacts directory
+ id: prepare
+ if: needs.file-check.outputs.run == 'true'
+ run: |
+ mkdir -p download/latest
+ mv artifacts/* download/latest
+ - name: Verify that artifacts work with installer
+ id: verify
+ if: needs.file-check.outputs.run == 'true'
+ env:
+ NETDATA_TARBALL_BASEURL: http://localhost:8080/
+ run: packaging/installer/kickstart.sh --build-only --dont-start-it --disable-telemetry --dont-wait
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Artifact verification for source tarball failed.'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Artifact verification for source tarball failed.
+ Checkout: ${{ steps.checkout.outcome }}
+ Fetch artifacts: ${{ steps.fetch.outcome }}
+ Verify artifacts: ${{ steps.verify.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.event_name != 'pull_request'
+ && github.repository == 'netdata/netdata'
+ && needs.file-check.outputs.run == 'true'
+ }}
+
+ artifact-verification-static: # Verify the static installer works with the consolidated artifacts.
+ name: Test Consolidated Artifacts (Static)
+ runs-on: ubuntu-latest
+ needs:
+ - prepare-upload
+ - file-check
+ services:
+ apache: # This gets used to serve the static archives.
+ image: httpd:2.4
+ ports:
+ - 8080:80
+ volumes:
+ - ${{ github.workspace }}:/usr/local/apache2/htdocs/
+ steps:
+ - name: Skip Check
+ id: skip
+ if: needs.file-check.outputs.run != 'true'
+ run: echo "SKIPPED"
+ - name: Checkout
+ id: checkout
+ if: needs.file-check.outputs.run == 'true'
+ uses: actions/checkout@v4
+ - name: Fetch artifacts
+ id: fetch-artifacts
+ if: needs.file-check.outputs.run == 'true'
+ uses: actions/download-artifact@v3
+ with:
+ name: final-artifacts
+ path: artifacts
+ - name: Prepare artifacts directory
+ id: prepare
+ if: needs.file-check.outputs.run == 'true'
+ run: |
+ mkdir -p download/latest
+ mv artifacts/* download/latest
+ - name: Verify that artifacts work with installer
+ id: verify
+ if: needs.file-check.outputs.run == 'true'
+ env:
+ NETDATA_TARBALL_BASEURL: http://localhost:8080/
+ run: packaging/installer/kickstart.sh --static-only --dont-start-it --disable-telemetry
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Artifact verification for static build failed.'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Artifact verification for static build failed.
+ Checkout: ${{ steps.checkout.outcome }}
+ Fetch artifacts: ${{ steps.fetch-artifacts.outcome }}
+ Verify artifacts: ${{ steps.verify.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.event_name != 'pull_request'
+ && github.repository == 'netdata/netdata'
+ && needs.file-check.outputs.run == 'true'
+ }}
+
+ upload-nightly: # Upload the nightly build artifacts to GCS.
+ name: Upload Nightly Artifacts
+ runs-on: ubuntu-latest
+ if: github.event_name == 'workflow_dispatch' && github.event.inputs.type == 'nightly' && github.repository == 'netdata/netdata'
+ needs:
+ - artifact-verification-dist
+ - artifact-verification-static
+ steps:
+ - name: Retrieve Artifacts
+ id: fetch
+ uses: actions/download-artifact@v3
+ with:
+ name: final-artifacts
+ path: final-artifacts
+ - name: Authenticate to GCS
+ id: gcs-auth
+ uses: google-github-actions/auth@v1
+ with:
+ project_id: ${{ secrets.GCP_NIGHTLY_STORAGE_PROJECT }}
+ credentials_json: ${{ secrets.GCS_STORAGE_SERVICE_KEY_JSON }}
+ - name: Setup GCS
+ id: gcs-setup
+ uses: google-github-actions/setup-gcloud@v1.1.1
+ - name: Upload Artifacts
+ id: upload
+ uses: google-github-actions/upload-cloud-storage@v1.0.3
+ with:
+ destination: ${{ secrets.GCP_NIGHTLY_STORAGE_BUCKET }}
+ gzip: false
+ path: ./final-artifacts
+ parent: false
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Failed to upload nightly release artifacts:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to upload nightly release artifacts.
+ Fetch artifacts: ${{ steps.fetch.outcome }}
+ Authenticatie GCS: ${{ steps.gcs-auth.outcome }}
+ Setup GCS: ${{ steps.gcs-setup.outcome }}
+ Upload artifacts: ${{ steps.upload.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.event_name != 'pull_request'
+ }}
+
+ create-nightly: # Create a nightly build release in netdata/netdata-nightlies
+ name: Create Nightly Release
+ runs-on: ubuntu-latest
+ if: github.event_name == 'workflow_dispatch' && github.event.inputs.type == 'nightly' && github.repository == 'netdata/netdata'
+ needs:
+ - artifact-verification-dist
+ - artifact-verification-static
+ steps:
+ - name: Checkout Main Repo
+ id: checkout-main
+ uses: actions/checkout@v4
+ with:
+ path: main
+ - name: Checkout Nightly Repo
+ id: checkout-nightly
+ uses: actions/checkout@v4
+ with:
+ repository: netdata/netdata-nightlies
+ path: nightlies
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ - name: Retrieve Artifacts
+ id: fetch
+ uses: actions/download-artifact@v3
+ with:
+ name: final-artifacts
+ path: final-artifacts
+ - name: Prepare version info
+ id: version
+ run: |
+ # shellcheck disable=SC2129
+ echo "version=$(cat main/packaging/version)" >> "${GITHUB_OUTPUT}"
+ echo "commit=$(cd nightlies && git rev-parse HEAD)" >> "${GITHUB_OUTPUT}"
+ echo "date=$(date +%F)" >> "${GITHUB_OUTPUT}"
+ - name: Create Release
+ id: create-release
+ uses: ncipollo/release-action@v1
+ with:
+ allowUpdates: false
+ artifactErrorsFailBuild: true
+ artifacts: 'final-artifacts/sha256sums.txt,final-artifacts/netdata-*.tar.gz,final-artifacts/netdata-*.gz.run,final-artifacts/integrations.js'
+ owner: netdata
+ repo: netdata-nightlies
+ body: Netdata nightly build for ${{ steps.version.outputs.date }}.
+ commit: ${{ steps.version.outputs.commit }}
+ makeLatest: true
+ tag: ${{ steps.version.outputs.version }}
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ - name: Checkout netdata main Repo # Checkout back to netdata/netdata repo to the update latest packaged versions
+ id: checkout-netdata
+ uses: actions/checkout@v4
+ with:
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ - name: Init python environment for publish release metadata
+ uses: actions/setup-python@v4
+ id: init-python
+ with:
+ python-version: "3.12"
+ - name: Setup python environment
+ id: setup-python
+ run: |
+ pip install -r .github/scripts/modules/requirements.txt
+ - name: Check if the version is latest and published
+ id: check-latest-version
+ run: |
+ python .github/scripts/check_latest_versions.py ${{ steps.version.outputs.version }}
+ - name: SSH setup
+ id: ssh-setup
+ if: github.event_name == 'workflow_dispatch' && github.repository == 'netdata/netdata' && steps.check-latest-version.outputs.versions_needs_update == 'true'
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.NETDATABOT_PACKAGES_SSH_KEY }}
+ name: id_ecdsa
+ known_hosts: ${{ secrets.PACKAGES_KNOWN_HOSTS }}
+ - name: Sync newer releases
+ id: sync-releases
+ if: github.event_name == 'workflow_dispatch' && github.repository == 'netdata/netdata' && steps.check-latest-version.outputs.versions_needs_update == 'true'
+ run: |
+ .github/scripts/upload-new-version-tags.sh
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Failed to draft release:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to create nightly release or attach artifacts.
+ Checkout netdata/netdata: ${{ steps.checkout-main.outcome }}
+ Checkout netdata/netdata-nightlies: ${{ steps.checkout-nightly.outcome }}
+ Fetch artifacts: ${{ steps.fetch.outcome }}
+ Prepare version info: ${{ steps.version.outcome }}
+ Create release: ${{ steps.create-release.outcome }}
+ Checkout back netdata/netdata: ${{ steps.checkout-netdata.outcome }}
+ Init python environment: ${{ steps.init-python.outcome }}
+ Setup python environment: ${{ steps.setup-python.outcome }}
+ Check the nearly published release against the advertised: ${{ steps.check-latest-version.outcome }}
+ Setup ssh: ${{ steps.ssh-setup.outcome }}
+ Sync with the releases: ${{ steps.sync-releases.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.event_name == 'workflow_dispatch'
+ }}
+
+ normalize-tag: # Fix the release tag if needed
+ name: Normalize Release Tag
+ runs-on: ubuntu-latest
+ if: github.event_name == 'workflow_dispatch' && github.event.inputs.type == 'release'
+ outputs:
+ tag: ${{ steps.tag.outputs.tag }}
+ steps:
+ - name: Normalize Tag
+ id: tag
+ run: |
+ if echo ${{ github.event.inputs.version }} | grep -qE '^[[:digit:]]+\.[[:digit:]]+\.[[:digit:]]+$'; then
+ echo "tag=v${{ github.event.inputs.version }}" >> "${GITHUB_OUTPUT}"
+ else
+ echo "tag=${{ github.event.inputs.version }}" >> "${GITHUB_OUTPUT}"
+ fi
+
+ upload-release: # Create the draft release and upload the build artifacts.
+ name: Create Release Draft
+ runs-on: ubuntu-latest
+ if: github.event_name == 'workflow_dispatch' && github.event.inputs.type == 'release' && github.repository == 'netdata/netdata'
+ needs:
+ - artifact-verification-dist
+ - artifact-verification-static
+ - normalize-tag
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ - name: Retrieve Artifacts
+ id: fetch
+ uses: actions/download-artifact@v3
+ with:
+ name: final-artifacts
+ path: final-artifacts
+ - name: Create Release
+ id: create-release
+ uses: ncipollo/release-action@v1
+ with:
+ allowUpdates: false
+ artifactErrorsFailBuild: true
+ artifacts: 'final-artifacts/sha256sums.txt,final-artifacts/netdata-*.tar.gz,final-artifacts/netdata-*.gz.run,final-artifacts/integrations.js'
+ draft: true
+ tag: ${{ needs.normalize-tag.outputs.tag }}
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Failed to draft release:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to create draft release or attach artifacts.
+ Checkout: ${{ steps.checkout.outcome }}
+ Fetch artifacts: ${{ steps.fetch.outcome }}
+ Create draft release: ${{ steps.create-release.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.event_name == 'workflow_dispatch'
+ }}
+ - name: Success Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'good'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Created agent draft release:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: "${{ github.repository }}: ${{ steps.create-release.outputs.html_url }}"
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ success()
+ && github.event_name == 'workflow_dispatch'
+ }}
diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml
new file mode 100644
index 00000000..1308f45f
--- /dev/null
+++ b/.github/workflows/checks.yml
@@ -0,0 +1,133 @@
+---
+name: Checks
+on:
+ push:
+ branches:
+ - master
+ pull_request: null
+env:
+ DISABLE_TELEMETRY: 1
+concurrency:
+ group: checks-${{ github.ref }}
+ cancel-in-progress: true
+jobs:
+ file-check: # Check what files changed if we’re being run in a PR or on a push.
+ name: Check Modified Files
+ runs-on: ubuntu-latest
+ outputs:
+ run: ${{ steps.check-run.outputs.run }}
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+ submodules: recursive
+ - name: Check files
+ id: check-files
+ uses: tj-actions/changed-files@v40
+ with:
+ since_last_remote_commit: ${{ github.event_name != 'pull_request' }}
+ files: |
+ **.c
+ **.cc
+ **.h
+ **.hh
+ **.in
+ configure.ac
+ **/Makefile*
+ Makefile*
+ .gitignore
+ .github/workflows/checks.yml
+ build/**
+ aclk/aclk-schemas/
+ ml/dlib/
+ mqtt_websockets
+ web/server/h2o/libh2o
+ files_ignore: |
+ netdata.spec.in
+ **.md
+ - name: Check Run
+ id: check-run
+ run: |
+ if [ "${{ steps.check-files.outputs.any_modified }}" == "true" ] || [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
+ echo 'run=true' >> "${GITHUB_OUTPUT}"
+ else
+ echo 'run=false' >> "${GITHUB_OUTPUT}"
+ fi
+
+ libressl-checks:
+ name: LibreSSL
+ needs:
+ - file-check
+ runs-on: ubuntu-latest
+ steps:
+ - name: Skip Check
+ id: skip
+ if: needs.file-check.outputs.run != 'true'
+ run: echo "SKIPPED"
+ - name: Checkout
+ if: needs.file-check.outputs.run == 'true'
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+ - name: Build
+ if: needs.file-check.outputs.run == 'true'
+ run: >
+ docker run -v "$PWD":/netdata -w /netdata alpine:latest /bin/sh -c
+ 'apk add bash;
+ ./packaging/installer/install-required-packages.sh --dont-wait --non-interactive netdata;
+ apk del openssl openssl-dev;
+ apk add libressl libressl-dev;
+ autoreconf -ivf;
+ ./configure --disable-dependency-tracking;
+ make;'
+
+ clang-checks:
+ name: Clang
+ needs:
+ - file-check
+ runs-on: ubuntu-latest
+ steps:
+ - name: Skip Check
+ id: skip
+ if: needs.file-check.outputs.run != 'true'
+ run: echo "SKIPPED"
+ - name: Checkout
+ if: needs.file-check.outputs.run == 'true'
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+ - name: Build
+ if: needs.file-check.outputs.run == 'true'
+ run: docker build -f .github/dockerfiles/Dockerfile.clang .
+
+ gitignore-check:
+ name: .gitignore
+ needs:
+ - file-check
+ runs-on: ubuntu-latest
+ steps:
+ - name: Skip Check
+ id: skip
+ if: needs.file-check.outputs.run != 'true'
+ run: echo "SKIPPED"
+ - name: Checkout
+ if: needs.file-check.outputs.run == 'true'
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+ - name: Prepare environment
+ if: needs.file-check.outputs.run == 'true'
+ run: ./packaging/installer/install-required-packages.sh --dont-wait --non-interactive netdata
+ - name: Build netdata
+ if: needs.file-check.outputs.run == 'true'
+ run: ./netdata-installer.sh --dont-start-it --disable-telemetry --dont-wait --install-prefix /tmp/install --one-time-build
+ - name: Check that repo is clean
+ if: needs.file-check.outputs.run == 'true'
+ run: |
+ git status --porcelain=v1 > /tmp/porcelain
+ if [ -s /tmp/porcelain ]; then
+ cat /tmp/porcelain
+ exit 1
+ fi
diff --git a/.github/workflows/cloud_regression.yml b/.github/workflows/cloud_regression.yml
new file mode 100644
index 00000000..01fcdca4
--- /dev/null
+++ b/.github/workflows/cloud_regression.yml
@@ -0,0 +1,69 @@
+name: Trigger Cloud Regression E2E Tests
+on:
+ push:
+ branches: [master]
+ paths:
+ - 'CMakeLists.txt'
+ - '**.c'
+ - '**.cc'
+ - '**.cpp'
+ - '**.h'
+ - 'mqtt_websockets/**'
+ - 'aclk/aclk-schemas/**'
+jobs:
+ trigger_cloud_regression_tests:
+ runs-on: ubuntu-latest
+ if: github.repository == 'netdata/netdata'
+ steps:
+ - name: Evaluate workflow dispatch parameters
+ env:
+ PR_REPO_NAME: ${{ github.event.pull_request.head.repo.full_name }}
+ PR_BRANCH_NAME: ${{ github.event.pull_request.head.ref }}
+ PR_COMMIT_HASH: ${{ github.event.pull_request.head.sha }}
+ id: output-workflow-dispatch-params
+ run: |
+ if [ ${{ github.event_name }} == 'pull_request_target' ]; then
+ NETDATA_CUSTOM_REPO="$PR_REPO_NAME"
+ NETDATA_CUSTOM_BRANCH="$PR_BRANCH_NAME"
+ NETDATA_CUSTOM_PR_NUMBER="${{ github.event.number }}"
+ NETDATA_CUSTOM_COMMIT_HASH="$PR_COMMIT_HASH"
+ elif [ ${{ github.event_name }} == 'push' ]; then
+ NETDATA_CUSTOM_REPO="netdata/netdata"
+ NETDATA_CUSTOM_BRANCH="master"
+ NETDATA_CUSTOM_PR_NUMBER=""
+ NETDATA_CUSTOM_COMMIT_HASH="${{ github.sha }}"
+ fi
+ echo "netdata_repo=${NETDATA_CUSTOM_REPO}" >> $GITHUB_OUTPUT
+ echo "netdata_branch=${NETDATA_CUSTOM_BRANCH}" >> $GITHUB_OUTPUT
+ echo "netdata_pr_number=${NETDATA_CUSTOM_PR_NUMBER}" >> $GITHUB_OUTPUT
+ echo "netdata_commit_hash=${NETDATA_CUSTOM_COMMIT_HASH}" >> $GITHUB_OUTPUT
+
+ - name: Trigger Full Cloud Regression
+ uses: aurelien-baudet/workflow-dispatch@v2
+ with:
+ repo: netdata/test-automation
+ ref: refs/heads/master
+ workflow: regression.yml
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ inputs: '{ "netdata_branch": "${{ steps.output-workflow-dispatch-params.outputs.netdata_branch }}",
+ "netdata_repo": "${{ steps.output-workflow-dispatch-params.outputs.netdata_repo }}",
+ "netdata_pr_number": "${{ steps.output-workflow-dispatch-params.outputs.netdata_pr_number }}",
+ "netdata_branch_commit_hash": "${{ steps.output-workflow-dispatch-params.outputs.netdata_commit_hash }}",
+ "custom_netdata_image": "true"
+ }'
+ wait-for-completion: false
+
+ - name: Trigger Agent Parent/Child with Cloud Integration tests
+ uses: aurelien-baudet/workflow-dispatch@v2
+ with:
+ repo: netdata/test-automation
+ ref: refs/heads/master
+ workflow: agent_smoke_tests.yml
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ inputs: '{ "netdata_branch": "${{ steps.output-workflow-dispatch-params.outputs.netdata_branch }}",
+ "netdata_repo": "${{ steps.output-workflow-dispatch-params.outputs.netdata_repo }}",
+ "netdata_pr_number": "${{ steps.output-workflow-dispatch-params.outputs.netdata_pr_number }}",
+ "netdata_branch_commit_hash": "${{ steps.output-workflow-dispatch-params.outputs.netdata_commit_hash }}",
+ "custom_netdata_image": "true"
+ }'
+ wait-for-completion: true
diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml
new file mode 100644
index 00000000..ae5818af
--- /dev/null
+++ b/.github/workflows/codeql.yml
@@ -0,0 +1,118 @@
+---
+# Run CodeQL to analyze C/C++ and Python code.
+name: CodeQL
+on:
+ pull_request:
+ types: [opened, reopened, labeled, synchronize]
+ branches: [master]
+ push:
+ branches: [master]
+ schedule:
+ - cron: "27 2 * * 1"
+env:
+ DISABLE_TELEMETRY: 1
+concurrency:
+ group: codeql-${{ github.ref }}
+ cancel-in-progress: true
+jobs:
+ prepare:
+ name: Prepare Jobs
+ runs-on: ubuntu-latest
+ outputs:
+ cpp: ${{ steps.cpp.outputs.run }}
+ python: ${{ steps.python.outputs.run }}
+ steps:
+ - name: Clone repository
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+ fetch-depth: 0
+ - name: Check if we should always run
+ id: always
+ run: |
+ if [ "${{ github.event_name }}" = "pull_request" ]; then
+ if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/codeql') }}" = "true" ]; then
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ echo '::notice::Found ci/codeql label, unconditionally running all CodeQL checks.'
+ else
+ echo "run=false" >> "${GITHUB_OUTPUT}"
+ fi
+ else
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ fi
+ - name: Check for C/C++ changes
+ id: cpp
+ run: |
+ if [ "${{ steps.always.outputs.run }}" = "false" ]; then
+ if git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq '.*\.[ch](xx|\+\+)?' ; then
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ echo '::notice::C/C++ code has changed, need to run CodeQL.'
+ else
+ echo "run=false" >> "${GITHUB_OUTPUT}"
+ fi
+ else
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ fi
+ - name: Check for python changes
+ id: python
+ run: |
+ if [ "${{ steps.always.outputs.run }}" = "false" ]; then
+ if git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq 'collectors/python.d.plugin/.*\.py' ; then
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ echo '::notice::Python code has changed, need to run CodeQL.'
+ else
+ echo "run=false" >> "${GITHUB_OUTPUT}"
+ fi
+ else
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ fi
+
+ analyze-cpp:
+ name: Analyze C/C++
+ runs-on: ubuntu-latest
+ needs: prepare
+ if: needs.prepare.outputs.cpp == 'true'
+ permissions:
+ security-events: write
+ steps:
+ - name: Git clone repository
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+ fetch-depth: 0
+ - name: Initialize CodeQL
+ uses: github/codeql-action/init@v2
+ with:
+ languages: cpp
+ config-file: ./.github/codeql/c-cpp-config.yml
+ - name: Prepare environment
+ run: ./packaging/installer/install-required-packages.sh --dont-wait --non-interactive netdata
+ - name: Build netdata
+ run: ./netdata-installer.sh --dont-start-it --disable-telemetry --dont-wait --install-prefix /tmp/install --one-time-build
+ - name: Run CodeQL
+ uses: github/codeql-action/analyze@v2
+ with:
+ category: "/language:cpp"
+
+ analyze-python:
+ name: Analyze Python
+ runs-on: ubuntu-latest
+ needs: prepare
+ if: needs.prepare.outputs.python == 'true'
+ permissions:
+ security-events: write
+ steps:
+ - name: Git clone repository
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+ fetch-depth: 0
+ - name: Initialize CodeQL
+ uses: github/codeql-action/init@v2
+ with:
+ config-file: ./.github/codeql/python-config.yml
+ languages: python
+ - name: Run CodeQL
+ uses: github/codeql-action/analyze@v2
+ with:
+ category: "/language:python"
diff --git a/.github/workflows/coverity.yml b/.github/workflows/coverity.yml
new file mode 100644
index 00000000..eb68c302
--- /dev/null
+++ b/.github/workflows/coverity.yml
@@ -0,0 +1,63 @@
+---
+# Runs coverity-scan.sh every 24h on `master`
+name: Coverity Scan
+on:
+ schedule:
+ - cron: '0 1 * * *'
+ pull_request:
+ paths:
+ - .github/workflows/coverity.yml
+ - coverity-scan.sh
+env:
+ DISABLE_TELEMETRY: 1
+concurrency:
+ group: coverity-${{ github.ref }}
+ cancel-in-progress: true
+jobs:
+ coverity:
+ if: github.repository == 'netdata/netdata'
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ id: checkout
+ with:
+ submodules: recursive
+ - name: Prepare environment
+ id: prepare
+ env:
+ DEBIAN_FRONTEND: 'noninteractive'
+ run: |
+ ./packaging/installer/install-required-packages.sh \
+ --dont-wait --non-interactive netdata
+ sudo apt-get install -y libjson-c-dev libyaml-dev libipmimonitoring-dev \
+ libcups2-dev libsnappy-dev libprotobuf-dev \
+ libprotoc-dev libssl-dev protobuf-compiler \
+ libnetfilter-acct-dev
+ - name: Run coverity-scan
+ id: run
+ env:
+ REPOSITORY: 'netdata/netdata'
+ COVERITY_SCAN_TOKEN: ${{ secrets.COVERITY_SCAN_TOKEN }}
+ COVERITY_SCAN_SUBMIT_MAIL: ${{ secrets.COVERITY_SCAN_SUBMIT_MAIL }}
+ run: |
+ bash -x ./coverity-scan.sh --with-install
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Coverity run failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Coverity failed to run correctly.
+ Checkout: ${{ steps.checkout.outcome }}
+ Environment preparation: ${{ steps.prepare.outcome }}
+ Coverity run: ${{ steps.run.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: ${{
+ failure()
+ && github.event_name != 'pull_request'
+ && startsWith(github.ref, 'refs/heads/master')
+ }}
diff --git a/.github/workflows/dashboard-pr.yml b/.github/workflows/dashboard-pr.yml
new file mode 100644
index 00000000..f02cfb69
--- /dev/null
+++ b/.github/workflows/dashboard-pr.yml
@@ -0,0 +1,54 @@
+---
+# Create a PR to update the react dashboard code.
+name: Dashboard Version PR
+
+on:
+ workflow_dispatch:
+ inputs:
+ dashboard_version:
+ # This must be specified, and must _exactly_ match the version
+ # tag for the release to be used for the update.
+ description: Dashboard Version
+ required: true
+
+env:
+ DISABLE_TELEMETRY: 1
+
+jobs:
+ dashboard-pr:
+ name: Generate Dashboard Version Bump PR
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ - name: Update Files
+ id: update
+ run: |
+ web/gui/bundle_dashboard_v1.py ${{ github.event.inputs.dashboard_version }}
+ - name: Create Pull Request
+ id: pr
+ uses: peter-evans/create-pull-request@v5
+ with:
+ title: 'Update dashboard to version ${{ github.event.inputs.dashboard_version }}.'
+ body: 'See https://github.com/netdata/dashboard/releases/tag/${{ github.event.inputs.dashboard_version }} for changes.'
+ branch: dashboard-${{ github.event.inputs.dashboard_version }}
+ branch-suffix: timestamp
+ delete-branch: true
+ commit-message: 'Update dashboard to version ${{ github.event.inputs.dashboard_version }}.'
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Dashboard update PR creation failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to create PR to update dashboard code to newest release.
+ Checkout: ${{ steps.checkout.outcome }}
+ Update files: ${{ steps.update.outcome }}
+ Create PR: ${{ steps.pr.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: failure()
diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml
new file mode 100644
index 00000000..b7fe0a86
--- /dev/null
+++ b/.github/workflows/docker.yml
@@ -0,0 +1,413 @@
+---
+name: Docker
+on:
+ push:
+ branches:
+ - master
+ pull_request: null
+ workflow_dispatch:
+ inputs:
+ version:
+ description: Version Tag
+ default: nightly
+ required: true
+env:
+ DISABLE_TELEMETRY: 1
+concurrency:
+ group: docker-${{ github.ref }}-${{ github.event_name }}
+ cancel-in-progress: true
+jobs:
+ file-check: # Check what files changed if we’re being run in a PR or on a push.
+ name: Check Modified Files
+ runs-on: ubuntu-latest
+ outputs:
+ run: ${{ steps.check-run.outputs.run }}
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+ submodules: recursive
+ - name: Check files
+ id: file-check
+ uses: tj-actions/changed-files@v40
+ with:
+ since_last_remote_commit: ${{ github.event_name != 'pull_request' }}
+ files: |
+ **.c
+ **.cc
+ **.h
+ **.hh
+ **.in
+ .dockerignore
+ configure.ac
+ netdata-installer.sh
+ **/Makefile*
+ Makefile*
+ .github/workflows/docker.yml
+ .github/scripts/docker-test.sh
+ build/**
+ packaging/docker/**
+ packaging/installer/**
+ aclk/aclk-schemas/
+ ml/dlib/
+ mqtt_websockets
+ web/server/h2o/libh2o
+ files_ignore: |
+ netdata.spec.in
+ **.md
+ - name: Check Run
+ id: check-run
+ run: |
+ if [ "${{ steps.file-check.outputs.any_modified }}" == "true" ] || [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
+ echo 'run=true' >> "${GITHUB_OUTPUT}"
+ else
+ echo 'run=false' >> "${GITHUB_OUTPUT}"
+ fi
+
+ docker-test:
+ name: Docker Runtime Test
+ needs:
+ - file-check
+ runs-on: ubuntu-latest
+ steps:
+ - name: Skip Check
+ id: skip
+ if: needs.file-check.outputs.run != 'true'
+ run: echo "SKIPPED"
+ - name: Checkout
+ id: checkout
+ if: needs.file-check.outputs.run == 'true'
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+ - name: Setup Buildx
+ id: prepare
+ if: needs.file-check.outputs.run == 'true'
+ uses: docker/setup-buildx-action@v3
+ - name: Test Build
+ id: build
+ if: needs.file-check.outputs.run == 'true'
+ uses: docker/build-push-action@v5
+ with:
+ load: true
+ push: false
+ tags: netdata/netdata:test
+ - name: Test Image
+ id: test
+ if: needs.file-check.outputs.run == 'true'
+ run: .github/scripts/docker-test.sh
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Docker runtime testing failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Building or testing Docker image for linux/amd64 failed.
+ CHeckout: ${{ steps.checkout.outcome }}
+ Setup buildx: ${{ steps.prepare.outcome }}
+ Build image: ${{ steps.build.outcome }}
+ Test image: ${{ steps.test.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.event_name != 'pull_request'
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.repository == 'netdata/netdata'
+ && needs.file-check.outputs.run == 'true'
+ }}
+
+ docker-ci:
+ if: github.event_name != 'workflow_dispatch'
+ name: Docker Alt Arch Builds
+ needs:
+ - docker-test
+ - file-check
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ platforms:
+ - linux/i386
+ - linux/arm/v7
+ - linux/arm64
+ - linux/ppc64le
+ steps:
+ - name: Skip Check
+ id: skip
+ if: needs.file-check.outputs.run != 'true'
+ run: echo "SKIPPED"
+ - name: Checkout
+ id: checkout
+ if: needs.file-check.outputs.run == 'true'
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+ - name: Setup QEMU
+ id: qemu
+ if: matrix.platforms != 'linux/i386' && needs.file-check.outputs.run == 'true'
+ uses: docker/setup-qemu-action@v3
+ - name: Setup Buildx
+ id: buildx
+ if: needs.file-check.outputs.run == 'true'
+ uses: docker/setup-buildx-action@v3
+ - name: Build
+ id: build
+ if: needs.file-check.outputs.run == 'true'
+ uses: docker/build-push-action@v5
+ with:
+ platforms: ${{ matrix.platforms }}
+ load: false
+ push: false
+ tags: netdata/netdata:test
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Docker build testing failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Building Docker image for ${{ matrix.platforms }} failed.
+ CHeckout: ${{ steps.checkout.outcome }}
+ Setup QEMU: ${{ steps.qemu.outcome }}
+ Setup buildx: ${{ steps.buildx.outcome }}
+ Build image: ${{ steps.build.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.event_name != 'pull_request'
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.repository == 'netdata/netdata'
+ && needs.file-check.outputs.run == 'true'
+ }}
+
+ normalize-tag: # Fix the release tag if needed
+ name: Normalize Release Tag
+ runs-on: ubuntu-latest
+ if: github.event_name == 'workflow_dispatch'
+ outputs:
+ tag: ${{ steps.tag.outputs.tag }}
+ steps:
+ - name: Normalize Tag
+ id: tag
+ run: |
+ if echo ${{ github.event.inputs.version }} | grep -qE '^[[:digit:]]+\.[[:digit:]]+\.[[:digit:]]+$'; then
+ echo "tag=v${{ github.event.inputs.version }}" >> "${GITHUB_OUTPUT}"
+ else
+ echo "tag=${{ github.event.inputs.version }}" >> "${GITHUB_OUTPUT}"
+ fi
+
+ docker-publish:
+ if: github.event_name == 'workflow_dispatch'
+ name: Docker Build and Publish
+ needs:
+ - docker-test
+ - normalize-tag
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+ - name: Determine which tags to use
+ id: release-tags
+ if: github.event.inputs.version != 'nightly'
+ run: |
+ echo "tags=netdata/netdata:latest,netdata/netdata:stable,ghcr.io/netdata/netdata:latest,ghcr.io/netdata/netdata:stable,quay.io/netdata/netdata:latest,quay.io/netdata/netdata:stable,$(.github/scripts/gen-docker-tags.py ${{ needs.normalize-tag.outputs.tag }} '')" \
+ >> "${GITHUB_ENV}"
+ - name: Determine which tags to use
+ id: nightly-tags
+ if: github.event.inputs.version == 'nightly'
+ run: |
+ echo "tags=netdata/netdata:latest,netdata/netdata:edge,ghcr.io/netdata/netdata:latest,ghcr.io/netdata/netdata:edge,quay.io/netdata/netdata:latest,quay.io/netdata/netdata:edge" >> "${GITHUB_ENV}"
+ - name: Mark image as official
+ id: env
+ if: github.repository == 'netdata/netdata'
+ run: echo "OFFICIAL_IMAGE=true" >> "${GITHUB_ENV}"
+ - name: Setup QEMU
+ id: qemu
+ uses: docker/setup-qemu-action@v3
+ - name: Setup Buildx
+ id: buildx
+ uses: docker/setup-buildx-action@v3
+ - name: Docker Hub Login
+ id: docker-hub-login
+ if: github.repository == 'netdata/netdata'
+ uses: docker/login-action@v3
+ with:
+ username: ${{ secrets.DOCKER_HUB_USERNAME }}
+ password: ${{ secrets.DOCKER_HUB_PASSWORD }}
+ - name: GitHub Container Registry Login
+ id: ghcr-login
+ if: github.repository == 'netdata/netdata'
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: ${{ github.repository_owner }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+ - name: Quay.io Login
+ id: quay-login
+ if: github.repository == 'netdata/netdata'
+ uses: docker/login-action@v3
+ with:
+ registry: quay.io
+ username: ${{ secrets.NETDATABOT_QUAY_USERNAME }}
+ password: ${{ secrets.NETDATABOT_QUAY_TOKEN }}
+ - name: Docker Build
+ id: build
+ uses: docker/build-push-action@v5
+ with:
+ platforms: linux/amd64,linux/i386,linux/arm/v7,linux/arm64,linux/ppc64le
+ push: ${{ github.repository == 'netdata/netdata' }}
+ tags: ${{ env.tags }}
+ build-args: OFFICIAL_IMAGE=${{ env.OFFICIAL_IMAGE }}
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Docker Build failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to build or publish Docker images.
+ CHeckout: ${{ steps.checkout.outcome }}
+ Generate release tags: ${{ steps.release-tags.outcome }}
+ Generate nightly tags: ${{ steps.nightly-tags.outcome }}
+ Setup environment: ${{ steps.env.outcome }}
+ Setup QEMU: ${{ steps.qemu.outcome }}
+ Setup buildx: ${{ steps.buildx.outcome }}
+ Login to DockerHub: ${{ steps.docker-hub-login.outcome }}
+ Login to GHCR: ${{ steps.ghcr-login.outcome }}
+ Login to Quay: ${{ steps.quay-login.outcome }}
+ Build and publish images: ${{ steps.build.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.event_name != 'pull_request'
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.repository == 'netdata/netdata'
+ }}
+ - name: Trigger Helmchart PR
+ if: github.event_name == 'workflow_dispatch' && github.event.inputs.version != 'nightly' && github.repository == 'netdata/netdata'
+ uses: benc-uk/workflow-dispatch@v1
+ with:
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ repo: netdata/helmchart
+ workflow: Agent Version PR
+ ref: refs/heads/master
+ inputs: '{"agent_version": "${{ needs.normalize-tag.outputs.tag }}"}'
+ - name: Trigger MSI build
+ if: github.event_name == 'workflow_dispatch' && github.event.inputs.version != 'nightly' && github.repository == 'netdata/netdata'
+ uses: benc-uk/workflow-dispatch@v1
+ with:
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ repo: netdata/msi-installer
+ workflow: Build
+ ref: refs/heads/master
+ inputs: '{"tag": "${{ needs.normalize-tag.outputs.tag }}", "pwd": "${{ secrets.MSI_CODE_SIGNING_PASSWORD }}"}'
+
+ docker-dbg-publish:
+ if: github.event_name == 'workflow_dispatch'
+ name: Docker Build and Publish (Debugging Image)
+ needs:
+ - docker-test
+ - normalize-tag
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+ - name: Determine which tags to use
+ id: release-tags
+ if: github.event.inputs.version != 'nightly'
+ run: |
+ echo "tags=netdata/netdata-debug:latest,netdata/netdata-debug:stable,ghcr.io/netdata/netdata-debug:latest,ghcr.io/netdata/netdata-debug:stable,quay.io/netdata/netdata-debug:latest,quay.io/netdata/netdata-debug:stable,$(.github/scripts/gen-docker-tags.py ${{ needs.normalize-tag.outputs.tag }} '-debug')" \
+ >> "${GITHUB_ENV}"
+ - name: Determine which tags to use
+ id: nightly-tags
+ if: github.event.inputs.version == 'nightly'
+ run: |
+ echo "tags=netdata/netdata-debug:latest,netdata/netdata-debug:edge,ghcr.io/netdata/netdata-debug:latest,ghcr.io/netdata/netdata-debug:edge,quay.io/netdata/netdata-debug:latest,quay.io/netdata/netdata-debug:edge" >> "${GITHUB_ENV}"
+ - name: Mark image as official
+ id: env
+ if: github.repository == 'netdata/netdata'
+ run: echo "OFFICIAL_IMAGE=true" >> "${GITHUB_ENV}"
+ - name: Setup QEMU
+ id: qemu
+ uses: docker/setup-qemu-action@v3
+ - name: Setup Buildx
+ id: buildx
+ uses: docker/setup-buildx-action@v3
+ - name: Docker Hub Login
+ id: docker-hub-login
+ if: github.repository == 'netdata/netdata'
+ uses: docker/login-action@v3
+ with:
+ username: ${{ secrets.DOCKER_HUB_USERNAME }}
+ password: ${{ secrets.DOCKER_HUB_PASSWORD }}
+ - name: GitHub Container Registry Login
+ id: ghcr-login
+ if: github.repository == 'netdata/netdata'
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: ${{ github.repository_owner }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+ - name: Quay.io Login
+ id: quay-login
+ if: github.repository == 'netdata/netdata'
+ uses: docker/login-action@v3
+ with:
+ registry: quay.io
+ username: ${{ secrets.NETDATABOT_QUAY_USERNAME }}
+ password: ${{ secrets.NETDATABOT_QUAY_TOKEN }}
+ - name: Docker Build
+ id: build
+ uses: docker/build-push-action@v5
+ with:
+ platforms: linux/amd64,linux/i386,linux/arm/v7,linux/arm64,linux/ppc64le
+ push: ${{ github.repository == 'netdata/netdata' }}
+ tags: ${{ env.tags }}
+ build-args: |
+ OFFICIAL_IMAGE=${{ env.OFFICIAL_IMAGE }}
+ DEBUG_BUILD=1
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Docker Debug Build failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to build or publish Docker debug images.
+ Checkout: ${{ steps.checkout.outcome }}
+ Generate release tags: ${{ steps.release-tags.outcome }}
+ Generate nightly tags: ${{ steps.nightly-tags.outcome }}
+ Setup environment: ${{ steps.env.outcome }}
+ Setup QEMU: ${{ steps.qemu.outcome }}
+ Setup buildx: ${{ steps.buildx.outcome }}
+ Login to DockerHub: ${{ steps.docker-hub-login.outcome }}
+ Login to GHCR: ${{ steps.ghcr-login.outcome }}
+ Login to Quay: ${{ steps.quay-login.outcome }}
+ Build and publish images: ${{ steps.build.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.event_name != 'pull_request'
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.repository == 'netdata/netdata'
+ }}
diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
new file mode 100644
index 00000000..a0554b16
--- /dev/null
+++ b/.github/workflows/docs.yml
@@ -0,0 +1,29 @@
+---
+name: Docs
+on:
+ push:
+ branches:
+ - master
+ paths:
+ - '**.md'
+ pull_request:
+ paths:
+ - '**.md'
+env:
+ DISABLE_TELEMETRY: 1
+jobs:
+ markdown-link-check:
+ name: Broken Links
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+ - name: Run link check
+ uses: gaurav-nelson/github-action-markdown-link-check@v1
+ with:
+ use-quiet-mode: 'no'
+ use-verbose-mode: 'yes'
+ check-modified-files-only: 'yes'
+ config-file: '.mlc_config.json'
diff --git a/.github/workflows/generate-integrations.yml b/.github/workflows/generate-integrations.yml
new file mode 100644
index 00000000..4128e992
--- /dev/null
+++ b/.github/workflows/generate-integrations.yml
@@ -0,0 +1,107 @@
+---
+# CI workflow used to regenerate `integrations/integrations.js` and accompanying documentation when relevant source files are changed.
+name: Generate Integrations
+on:
+ push:
+ branches:
+ - master
+ paths: # If any of these files change, we need to regenerate integrations.js.
+ - 'collectors/**/metadata.yaml'
+ - 'exporting/**/metadata.yaml'
+ - 'health/notifications/**/metadata.yaml'
+ - 'integrations/templates/**'
+ - 'integrations/categories.yaml'
+ - 'integrations/deploy.yaml'
+ - 'integrations/cloud-notifications/metadata.yaml'
+ - 'integrations/gen_integrations.py'
+ - 'packaging/go.d.version'
+ workflow_dispatch: null
+concurrency: # This keeps multiple instances of the job from running concurrently for the same ref.
+ group: integrations-${{ github.ref }}
+ cancel-in-progress: true
+jobs:
+ generate-integrations:
+ name: Generate Integrations
+ runs-on: ubuntu-latest
+ if: github.repository == 'netdata/netdata'
+ steps:
+ - name: Checkout Agent
+ id: checkout-agent
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 1
+ submodules: recursive
+ - name: Get Go Ref
+ id: get-go-ref
+ run: echo "go_ref=$(cat packaging/go.d.version)" >> "${GITHUB_ENV}"
+ - name: Checkout Go
+ id: checkout-go
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 1
+ path: go.d.plugin
+ repository: netdata/go.d.plugin
+ ref: ${{ env.go_ref }}
+ - name: Prepare Dependencies
+ id: prep-deps
+ run: |
+ sudo apt-get install python3-venv
+ python3 -m venv ./virtualenv
+ source ./virtualenv/bin/activate
+ pip install jsonschema referencing jinja2 ruamel.yaml
+ - name: Generate Integrations
+ id: generate
+ run: |
+ source ./virtualenv/bin/activate
+ python3 integrations/gen_integrations.py
+ - name: Generate Integrations Documentation
+ id: generate-integrations-documentation
+ run: |
+ python3 integrations/gen_docs_integrations.py
+ - name: Generate collectors/COLLECTORS.md
+ id: generate-collectors-md
+ run: |
+ python3 integrations/gen_doc_collector_page.py
+ - name: Clean Up Temporary Data
+ id: clean
+ run: rm -rf go.d.plugin virtualenv
+ - name: Create PR
+ id: create-pr
+ uses: peter-evans/create-pull-request@v5
+ with:
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ commit-message: Regenerate integrations.js
+ branch: integrations-regen
+ title: Regenerate integrations.js
+ body: |
+ Regenerate `integrations/integrations.js`, and documentation based on the
+ latest code.
+
+ This PR was auto-generated by
+ `.github/workflows/generate-integrations.yml`.
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Integrations regeneration failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to create PR rebuilding integrations.js
+ Checkout Agent: ${{ steps.checkout-agent.outcome }}
+ Get Go Ref: ${{ steps.get-go-ref.outcome }}
+ Checkout Go: ${{ steps.checkout-go.outcome }}
+ Prepare Dependencies: ${{ steps.prep-deps.outcome }}
+ Generate Integrations: ${{ steps.generate.outcome }}
+ Generate Integrations Documentation: ${{ steps.generate-integrations-documentation.outcome }}
+ Generate collectors/COLLECTORS.md: ${{ steps.generate-collectors-md.outcome }}
+ Clean Up Temporary Data: ${{ steps.clean.outcome }}
+ Create PR: ${{ steps.create-pr.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.repository == 'netdata/netdata'
+ }}
diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml
new file mode 100644
index 00000000..a1e3b52f
--- /dev/null
+++ b/.github/workflows/labeler.yml
@@ -0,0 +1,21 @@
+---
+# Handles labelling of PR's.
+name: Pull Request Labeler
+on:
+ pull_request_target: null
+concurrency:
+ group: pr-label-${{ github.repository_id }}-${{ github.event.pull_request.number }}
+ cancel-in-progress: true
+jobs:
+ labeler:
+ name: Apply PR Labels
+ runs-on: ubuntu-latest
+ permissions:
+ contents: read
+ pull-requests: write
+ steps:
+ - uses: actions/labeler@v4
+ if: github.repository == 'netdata/netdata'
+ with:
+ repo-token: "${{ secrets.GITHUB_TOKEN }}"
+ sync-labels: true
diff --git a/.github/workflows/monitor-releases.yml b/.github/workflows/monitor-releases.yml
new file mode 100644
index 00000000..649cf68a
--- /dev/null
+++ b/.github/workflows/monitor-releases.yml
@@ -0,0 +1,72 @@
+---
+name: Monitor-releases
+
+on:
+ release:
+ types: [released, deleted]
+ workflow_dispatch:
+ inputs:
+ channel:
+ description: 'Specify the release channel'
+ required: true
+ default: 'stable'
+
+
+concurrency: # This keeps multiple instances of the job from running concurrently for the same ref and event type.
+ group: monitor-{{ github.event.inputs.channel }}-releases-${{ github.ref }}-${{ github.event_name }}
+ cancel-in-progress: true
+
+jobs:
+ update-stable-agents-metadata:
+ name: update-stable-agents-metadata
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ with:
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ - name: Init python environment
+ uses: actions/setup-python@v4
+ id: init-python
+ with:
+ python-version: "3.12"
+ - name: Setup python environment
+ id: setup-python
+ run: |
+ pip install -r .github/scripts/modules/requirements.txt
+ - name: Check for newer versions
+ id: check-newer-releases
+ run: |
+ python .github/scripts/check_latest_versions_per_channel.py "${{ github.event.inputs.channel }}"
+ - name: SSH setup
+ id: ssh-setup
+ if: github.event_name == 'workflow_dispatch' && github.repository == 'netdata/netdata' && steps.check-newer-releases.outputs.versions_needs_update == 'true'
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.NETDATABOT_PACKAGES_SSH_KEY }}
+ name: id_ecdsa
+ known_hosts: ${{ secrets.PACKAGES_KNOWN_HOSTS }}
+ - name: Sync newer releases
+ id: sync-releases
+ if: github.event_name == 'workflow_dispatch' && github.repository == 'netdata/netdata' && steps.check-newer-releases.outputs.versions_needs_update == 'true'
+ run: |
+ .github/scripts/upload-new-version-tags.sh
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Failed to prepare changelog:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to update stable Agent's metadata.
+ Checkout: ${{ steps.checkout.outcome }}
+ Init python: ${{ steps.init-python.outcome }}
+ Setup python: ${{ steps.setup-python.outcome }}
+ Check for newer stable releaes: ${{ steps.check-newer-releases.outcome }}
+ Setup ssh: ${{ steps.ssh-setup.outcome }}
+ Syncing newer release to packages.netdata.cloud : ${{ steps.sync-releases.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: failure()
diff --git a/.github/workflows/packagecloud.yml b/.github/workflows/packagecloud.yml
new file mode 100644
index 00000000..3c427756
--- /dev/null
+++ b/.github/workflows/packagecloud.yml
@@ -0,0 +1,36 @@
+---
+# Runs PackageCloud cleanup every day at 9pm
+name: PackageCloud Cleanup
+on:
+ schedule:
+ - cron: '0 21 * * *'
+ workflow_dispatch: null
+
+jobs:
+ cleanup:
+ name: PackageCloud Cleanup
+ runs-on: ubuntu-latest
+ if: github.repository == 'netdata/netdata'
+ strategy:
+ fail-fast: false
+ matrix:
+ repos:
+ - stable
+ - edge
+ - devel
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ id: checkout
+ with:
+ submodules: recursive
+ - name: Prepare environment
+ id: prepare
+ run: |
+ pip3 install requests python-dateutil
+ - name: Run PackageCloud Cleanup
+ id: cleanup
+ env:
+ PKGCLOUD_TOKEN: ${{ secrets.PACKAGE_CLOUD_API_KEY }}
+ run: |
+ python3 .github/scripts/netdata-pkgcloud-cleanup.py -r ${{ matrix.repos }}
diff --git a/.github/workflows/packaging.yml b/.github/workflows/packaging.yml
new file mode 100644
index 00000000..eb936c4d
--- /dev/null
+++ b/.github/workflows/packaging.yml
@@ -0,0 +1,313 @@
+---
+# Handles building of binary packages for the agent.
+name: Packages
+on:
+ pull_request:
+ types:
+ - opened
+ - reopened
+ - labeled
+ - synchronize
+ push:
+ branches:
+ - master
+ workflow_dispatch:
+ inputs:
+ type:
+ description: Package build type
+ default: devel
+ required: true
+ version:
+ description: Package version
+ required: false
+env:
+ DISABLE_TELEMETRY: 1
+ REPO_PREFIX: netdata/netdata
+concurrency:
+ group: packages-${{ github.ref }}-${{ github.event_name }}
+ cancel-in-progress: true
+jobs:
+ file-check: # Check what files changed if we’re being run in a PR or on a push.
+ name: Check Modified Files
+ runs-on: ubuntu-latest
+ outputs:
+ run: ${{ steps.check-run.outputs.run }}
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+ submodules: recursive
+ - name: Check files
+ id: file-check
+ uses: tj-actions/changed-files@v40
+ with:
+ since_last_remote_commit: ${{ github.event_name != 'pull_request' }}
+ files: |
+ **.c
+ **.cc
+ **.h
+ **.hh
+ **.in
+ netdata.spec.in
+ configure.ac
+ **/Makefile*
+ Makefile*
+ .github/data/distros.yml
+ .github/workflows/packaging.yml
+ .github/scripts/gen-matrix-packaging.py
+ .github/scripts/pkg-test.sh
+ build/**
+ packaging/*.sh
+ packaging/*.checksums
+ packaging/*.version
+ contrib/debian/**
+ aclk/aclk-schemas/
+ ml/dlib/
+ mqtt_websockets
+ web/server/h2o/libh2o
+ files_ignore: |
+ **.md
+ - name: Check Run
+ id: check-run
+ run: |
+ if [ "${{ steps.file-check.outputs.any_modified }}" == "true" ] || [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
+ echo 'run=true' >> "${GITHUB_OUTPUT}"
+ else
+ echo 'run=false' >> "${GITHUB_OUTPUT}"
+ fi
+
+ matrix:
+ name: Prepare Build Matrix
+ runs-on: ubuntu-latest
+ outputs:
+ matrix: ${{ steps.set-matrix.outputs.matrix }}
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ - name: Prepare tools
+ id: prepare
+ run: |
+ sudo apt-get update && sudo apt-get install -y python3-ruamel.yaml
+ - name: Read build matrix
+ id: set-matrix
+ run: |
+ if [ "${{ github.event_name }}" = "pull_request" ] && \
+ [ "${{ !contains(github.event.pull_request.labels.*.name, 'run-ci/packaging') }}" = "true" ]; then
+ matrix="$(.github/scripts/gen-matrix-packaging.py 1)"
+ else
+ matrix="$(.github/scripts/gen-matrix-packaging.py 0)"
+ fi
+ echo "Generated matrix: ${matrix}"
+ echo "matrix=${matrix}" >> "${GITHUB_OUTPUT}"
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Package Build matrix generation failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to generate build matrix for package build.
+ Checkout: ${{ steps.checkout.outcome }}
+ Prepare Tools: ${{ steps.prepare.outcome }}
+ Read Build Matrix: ${{ steps.set-matrix.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.event_name != 'pull_request'
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.repository == 'netdata/netdata'
+ }}
+
+ version-check:
+ name: Version check
+ runs-on: ubuntu-latest
+ outputs:
+ repo: ${{ steps.check-version.outputs.repo }}
+ version: ${{ steps.check-version.outputs.version }}
+ retention: ${{ steps.check-version.outputs.retention }}
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ - name: Check Version
+ id: check-version
+ run: |
+ if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
+ case "${{ github.event.inputs.type }}" in
+ "release")
+ echo "repo=${REPO_PREFIX}" >> "${GITHUB_OUTPUT}"
+ echo "version=${{ github.event.inputs.version }}" >> "${GITHUB_OUTPUT}"
+ echo "retention=365" >> "${GITHUB_OUTPUT}"
+ ;;
+ "nightly")
+ echo "repo=${REPO_PREFIX}-edge" >> "${GITHUB_OUTPUT}"
+ echo "version=$(tr -d 'v' < packaging/version)" >> "${GITHUB_OUTPUT}"
+ echo "retention=30" >> "${GITHUB_OUTPUT}"
+ ;;
+ *)
+ echo "repo=${REPO_PREFIX}-devel" >> "${GITHUB_OUTPUT}"
+ echo "version=0.${GITHUB_SHA}" >> "${GITHUB_OUTPUT}"
+ echo "retention=30" >> "${GITHUB_OUTPUT}"
+ ;;
+ esac
+ else
+ echo "version=$(cut -d'-' -f 1 packaging/version | tr -d 'v')" >> "${GITHUB_OUTPUT}"
+ echo "retention=0" >> "${GITHUB_OUTPUT}"
+ fi
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Package Build version check failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to generate version information for package build.
+ Checkout: ${{ steps.checkout.outcome }}
+ Check Version: ${{ steps.check-version.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.event_name != 'pull_request'
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.repository == 'netdata/netdata'
+ }}
+
+ build:
+ name: Build
+ runs-on: ubuntu-latest
+ env:
+ DOCKER_CLI_EXPERIMENTAL: enabled
+ needs:
+ - matrix
+ - version-check
+ - file-check
+ strategy:
+ matrix: ${{ fromJson(needs.matrix.outputs.matrix) }}
+ # We intentiaonally disable the fail-fast behavior so that a
+ # build failure for one version doesn't prevent us from publishing
+ # successfully built and tested packages for another version.
+ fail-fast: false
+ max-parallel: 8
+ steps:
+ - name: Skip Check
+ id: skip
+ if: needs.file-check.outputs.run != 'true'
+ run: echo "SKIPPED"
+ - name: Checkout
+ id: checkout
+ if: needs.file-check.outputs.run == 'true'
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0 # We need full history for versioning
+ submodules: recursive
+ - name: Setup QEMU
+ id: qemu
+ if: matrix.platform != 'linux/amd64' && matrix.platform != 'linux/i386' && needs.file-check.outputs.run == 'true'
+ uses: docker/setup-qemu-action@v3
+ - name: Prepare Docker Environment
+ id: docker-config
+ if: needs.file-check.outputs.run == 'true'
+ shell: bash
+ run: |
+ echo '{"cgroup-parent": "actions-job.slice", "experimental": true}' | sudo tee /etc/docker/daemon.json 2>/dev/null
+ sudo service docker restart
+ - name: Fetch images
+ id: fetch-images
+ if: needs.file-check.outputs.run == 'true'
+ uses: nick-invision/retry@v2
+ with:
+ max_attempts: 3
+ retry_wait_seconds: 30
+ timeout_seconds: 900
+ command: |
+ docker pull --platform ${{ matrix.platform }} ${{ matrix.base_image }}
+ docker pull --platform ${{ matrix.platform }} netdata/package-builders:${{ matrix.distro }}${{ matrix.version }}-v1
+ - name: Build Packages
+ id: build
+ if: needs.file-check.outputs.run == 'true'
+ shell: bash
+ run: |
+ docker run --security-opt seccomp=unconfined -e DISABLE_TELEMETRY=1 -e VERSION=${{ needs.version-check.outputs.version }} \
+ --platform=${{ matrix.platform }} -v "$PWD":/netdata netdata/package-builders:${{ matrix.distro }}${{ matrix.version }}-v1
+ - name: Save Packages
+ id: artifacts
+ if: needs.file-check.outputs.run == 'true'
+ continue-on-error: true
+ uses: actions/upload-artifact@v3
+ with:
+ name: ${{ matrix.distro }}-${{ matrix.version }}-${{ matrix.arch }}-packages
+ path: ${{ github.workspace }}/artifacts/*
+ - name: Test Packages
+ id: test
+ if: needs.file-check.outputs.run == 'true'
+ shell: bash
+ run: |
+ docker run --security-opt seccomp=unconfined -e DISABLE_TELEMETRY=1 -e DISTRO=${{ matrix.distro }} \
+ -e VERSION=${{ needs.version-check.outputs.version }} -e DISTRO_VERSION=${{ matrix.version }} \
+ --platform=${{ matrix.platform }} -v "$PWD":/netdata ${{ matrix.base_image }} \
+ /netdata/.github/scripts/pkg-test.sh
+ - name: Upload to PackageCloud
+ id: upload
+ if: github.event_name == 'workflow_dispatch' && github.repository == 'netdata/netdata' && needs.file-check.outputs.run == 'true'
+ continue-on-error: true
+ shell: bash
+ env:
+ PKG_CLOUD_TOKEN: ${{ secrets.PACKAGE_CLOUD_API_KEY }}
+ run: |
+ printf "Packages to upload:\n%s" "$(ls artifacts/*.${{ matrix.format }})"
+ for pkgfile in artifacts/*.${{ matrix.format }} ; do
+ .github/scripts/package_cloud_wrapper.sh yank ${{ needs.version-check.outputs.repo }}/${{ matrix.repo_distro }} \
+ "$(basename "${pkgfile}")" || true
+ .github/scripts/package_cloud_wrapper.sh push ${{ needs.version-check.outputs.repo }}/${{ matrix.repo_distro }} "${pkgfile}"
+ done
+ - name: SSH setup
+ id: ssh-setup
+ if: github.event_name == 'workflow_dispatch' && github.repository == 'netdata/netdata' && needs.file-check.outputs.run == 'true'
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.NETDATABOT_PACKAGES_SSH_KEY }}
+ name: id_ecdsa
+ known_hosts: ${{ secrets.PACKAGES_KNOWN_HOSTS }}
+ - name: Upload to packages.netdata.cloud
+ id: package-upload
+ if: github.event_name == 'workflow_dispatch' && github.repository == 'netdata/netdata' && needs.file-check.outputs.run == 'true'
+ run: |
+ .github/scripts/package-upload.sh \
+ ${{ matrix.repo_distro }} \
+ ${{ matrix.arch }} \
+ ${{ matrix.format }} \
+ ${{ needs.version-check.outputs.repo }}
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Package Build failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: ${{ matrix.repo_distro }} ${{ matrix.version }} package build for ${{ matrix.arch }} failed.
+ Checkout: ${{ steps.checkout.outcome }}
+ Setup QEMU: ${{ steps.qemu.outcome }}
+ Setup Docker: ${{ steps.docker-config.outcome }}
+ Fetch images: ${{ steps.fetch-images.outcome }}
+ Build: ${{ steps.build.outcome }}
+ Test: ${{ steps.test.outcome }}
+ Publish to PackageCloud: ${{ steps.upload.outcome }}
+ Import SSH Key: ${{ steps.ssh-setup.outcome }}
+ Publish to packages.netdata.cloud: ${{ steps.package-upload.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.event_name != 'pull_request'
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.repository == 'netdata/netdata'
+ && needs.file-check.outputs.run == 'true'
+ }}
diff --git a/.github/workflows/platform-eol-check.yml b/.github/workflows/platform-eol-check.yml
new file mode 100644
index 00000000..ae290a97
--- /dev/null
+++ b/.github/workflows/platform-eol-check.yml
@@ -0,0 +1,153 @@
+---
+# Auto-generate issues for EOL of platforms that are approaching their EOL date.
+# Uses https://endoflife.date and their new API to check for EOL dates.
+#
+# Issues are created when the EOL date is within the next 30 days.
+name: Check Platform EOL
+on: # Run weekly and whenever manually triggered
+ schedule:
+ - cron: '0 3 * * 1'
+ workflow_dispatch: null
+concurrency: # Simple single-instance concurrency.
+ group: eol-check-${{ github.repository }}
+ cancel-in-progress: true
+jobs:
+ # Prepare the build matrix.
+ # This uses output from .github/scripts/gen-matrix-eol-check.py
+ matrix:
+ name: Prepare Build Matrix
+ runs-on: ubuntu-latest
+ outputs:
+ matrix: ${{ steps.set-matrix.outputs.matrix }}
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ - name: Prepare tools
+ id: prepare
+ run: |
+ sudo apt-get update && sudo apt-get install -y python3-ruamel.yaml
+ - name: Read build matrix
+ id: set-matrix
+ run: |
+ matrix="$(.github/scripts/gen-matrix-eol-check.py)"
+ echo "Generated matrix: ${matrix}"
+ echo "matrix=${matrix}" >> "${GITHUB_OUTPUT}"
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Failed to generate build matrix for platform EOL checks:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Build matrix generation for scheduled platform EOL check has failed:
+ Checkout: ${{ steps.checkout.outcome }}
+ Prepare Tools: ${{ steps.prepare.outcome }}
+ Read Build Matrix: ${{ steps.set-matrix.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.event_name == 'schedule'
+ && github.repository == 'netdata/netdata'
+ }}
+
+ eol-check:
+ name: EOL Check
+ runs-on: ubuntu-latest
+ needs:
+ - matrix
+ strategy:
+ matrix: ${{ fromJson(needs.matrix.outputs.matrix) }}
+ fail-fast: false # We want to check everything, so don’t bail on the first failure.
+ max-parallel: 2 # Cap of two jobs at a time to limit impact on other CI.
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ # Actually check the EOL date for the platform.
+ - name: Check EOL Date
+ id: check
+ shell: sh {0}
+ run: |
+ d="$(.github/scripts/platform-impending-eol.py ${{ matrix.distro }} ${{ matrix.release }})"
+ case $? in
+ 0) echo "pending=false" >> "${GITHUB_OUTPUT}" ;;
+ 1)
+ echo "pending=true" >> "${GITHUB_OUTPUT}"
+ echo "date=${d}" >> "${GITHUB_OUTPUT}"
+ ;;
+ 2)
+ echo "pending=false" >> "${GITHUB_OUTPUT}"
+ echo "::info::No EOL information found for ${{ matrix.full_name }}"
+ ;;
+ *)
+ echo "::error::Failed to check EOL date for ${{ matrix.full_name }}"
+ exit 1
+ ;;
+ esac
+ # Figure out the issue title.
+ # This is it’s own step so we only have to set it in one place.
+ - name: Determine Issue Title
+ id: title
+ if: steps.check.outputs.pending == 'true'
+ run: |
+ echo "title=[Platform EOL]: ${{ matrix.full_name }} will be EOL soon." >> "${GITHUB_OUTPUT}"
+ # Check if there is an existing issue in the repo for the platform EOL.
+ # The actual command line to make the check is unfortunately
+ # complicated because GitHub thinks that it’s sensible to exit
+ # with a status of 0 if there are no results for a search.
+ - name: Check for Existing Issue
+ id: existing
+ if: steps.check.outputs.pending == 'true'
+ env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ run: |
+ set -e
+ count=$(gh issue list -R netdata/netdata -s all -S '${{ steps.title.outputs.title }} in:title' --json 'id' -q '. | length')
+ if [ "${count}" -ge 1 ]; then
+ echo 'exists=true' >> "${GITHUB_OUTPUT}"
+ else
+ echo 'exists=false' >> "${GITHUB_OUTPUT}"
+ fi
+ # If the platform is near EOL and there is no existing issue, create one.
+ - name: Create EOL Issue
+ id: create-issue
+ if: steps.check.outputs.pending == 'true' && steps.existing.outputs.exists == 'false'
+ uses: imjohnbo/issue-bot@v3
+ with:
+ assignees: Ferroin, tkatsoulas
+ labels: area/packaging, needs triage
+ title: ${{ steps.title.outputs.title }}
+ body: |
+ Based on information from https://endoflife.date/${{ matrix.distro }}, upstream support for ${{ matrix.full_name }} will be ending on ${{ steps.check.outputs.date }}. A PR should be created to remove this platform from our platform support document, CI, and packaging code.
+
+ - [ ] Remove platform from `packaging/PLATFORM_SUPPORT.md`
+ - [ ] Remove platform from `.github/data/distros.yml`
+ - [ ] Remove platform package builder from helper-images repo (if applicable).
+ - [ ] Verify any other platform support code that needs to be cleaned up.
+ # Send a notification to Slack if a job failed.
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Platform EOL check failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: A scheduled check for the EOL status of ${{ matrix.full_name }} has failed.
+ Checkout: ${{ steps.checkout.outcome }}
+ Check EOL Status: ${{ steps.check.outcome }}
+ Generate Issue Title: ${{ steps.title.outcome }}
+ Check for Existing Issue: ${{ steps.existing.outcome }}
+ Create Issue: ${{ steps.create-issue.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.event_name == 'schedule'
+ && github.repository == 'netdata/netdata'
+ }}
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
new file mode 100644
index 00000000..2fa51cc5
--- /dev/null
+++ b/.github/workflows/release.yml
@@ -0,0 +1,214 @@
+---
+# Workflow for triggering a release.
+name: Release
+on:
+ schedule:
+ - cron: '0 0 * * *'
+ workflow_dispatch: # Dispatch runs build and validate, then push to the appropriate storage location.
+ inputs:
+ type:
+ description: Build Type
+ default: nightly
+ required: true
+ version:
+ description: Version Tag
+ default: nightly
+ required: true
+concurrency: # This keeps multiple instances of the job from running concurrently for the same ref and event type.
+ group: release-${{ github.ref }}-${{ github.event_name }}
+ cancel-in-progress: true
+jobs:
+ update-changelogs:
+ name: Update changelog
+ runs-on: ubuntu-latest
+ outputs:
+ ref: ${{ steps.target.outputs.ref }}
+ version: ${{ steps.target.outputs.version }}
+ type: ${{ steps.target.outputs.type }}
+ run: ${{ steps.target.outputs.run }}
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+ submodules: recursive
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ - name: Prepare base ref
+ id: target
+ run: >-
+ .github/scripts/prepare-release-base.sh \
+ ${{ github.repository }} \
+ ${{ github.event_name }} \
+ ${{ github.event.inputs.type }} \
+ ${{ github.event.inputs.version }} \
+ ${{ secrets.NETDATA_RELEASE_TEST }}
+ - name: Generate Nightly Changleog
+ id: nightly-changelog
+ if: steps.target.outputs.run == 'true' && steps.target.outputs.type == 'nightly'
+ uses: heinrichreimer/github-changelog-generator-action@v2.3
+ with:
+ bugLabels: IGNOREBUGS
+ excludeLabels: "stale,duplicate,question,invalid,wontfix,discussion,no changelog"
+ issues: false
+ sinceTag: v1.10.0
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ unreleasedLabel: "**Next release**"
+ verbose: true
+ maxIssues: 500
+ - name: Generate Release Changelog
+ id: release-changelog
+ if: steps.target.outputs.run == 'true' && steps.target.outputs.type != 'nightly'
+ uses: heinrichreimer/github-changelog-generator-action@v2.3
+ with:
+ bugLabels: IGNOREBUGS
+ excludeLabels: "stale,duplicate,question,invalid,wontfix,discussion,no changelog"
+ futureRelease: ${{ github.event.inputs.version }}
+ issues: false
+ sinceTag: v1.10.0
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ unreleasedLabel: "**Next release**"
+ verbose: true
+ maxIssues: 500
+ - name: Commit Changes
+ id: commit
+ if: steps.target.outputs.run == 'true'
+ env:
+ GITHUB_TOKEN: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ run: |
+ git config user.name "netdatabot"
+ git config user.email "bot@netdata.cloud"
+ git add packaging/version CHANGELOG.md
+ git commit -m "[ci skip] ${{ steps.target.outputs.message }}"
+ if [ "${{ steps.target.outputs.type }}" != "nightly" ]; then
+ git tag -a "${{ github.event.inputs.version }}" -m "${{ steps.target.outputs.message }}"
+ fi
+ if [ -n "${{ steps.target.outputs.new-branch }}" ]; then
+ git branch "${{ steps.target.outputs.new-branch }}"
+ fi
+ git push --tags origin "${{ steps.target.outputs.branch }}"
+ if [ -n "${{ steps.target.outputs.new-branch }}" ]; then
+ git push origin "${{ steps.target.outputs.new-branch }}"
+ fi
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Failed to prepare changelog:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to prepare changelog.
+ Checkout: ${{ steps.checkout.outcome }}
+ Prepare base ref: ${{ steps.target.outcome }}
+ Generate nightly changelog: ${{ steps.nightly-changelog.outcome }}
+ Generate release changelog: ${{ steps.release-changelog.outcome }}
+ Commit changes: ${{ steps.commit.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: failure()
+
+ trigger-artifacts:
+ name: Trigger artifact builds
+ runs-on: ubuntu-latest
+ needs: update-changelogs
+ if: needs.update-changelogs.outputs.run == 'true'
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ needs.update-changelogs.outputs.ref }}
+ - name: Trigger build
+ id: trigger
+ uses: benc-uk/workflow-dispatch@v1
+ with:
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ repo: ${{ github.repository }}
+ workflow: build.yml
+ ref: ${{ needs.update-changelogs.outputs.ref }}
+ inputs: '{"version": "${{ needs.update-changelogs.outputs.version }}", "type": "${{ needs.update-changelogs.outputs.type }}"}'
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Failed to trigger ${{ needs.update-changelogs.outputs.type }} artifact builds:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to trigger ${{ needs.update-changelogs.outputs.type }} artifact builds.
+ Checkout: ${{ steps.checkout.outcome }}
+ Trigger build: ${{ steps.trigger.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: failure()
+
+ trigger-docker:
+ name: Trigger docker builds
+ runs-on: ubuntu-latest
+ needs: update-changelogs
+ if: needs.update-changelogs.outputs.run == 'true'
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ needs.update-changelogs.outputs.ref }}
+ - name: Trigger build
+ id: trigger
+ uses: benc-uk/workflow-dispatch@v1
+ with:
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ repo: ${{ github.repository }}
+ workflow: docker.yml
+ ref: ${{ needs.update-changelogs.outputs.ref }}
+ inputs: '{"version": "${{ needs.update-changelogs.outputs.version }}"}'
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Failed to trigger ${{ needs.update-changelogs.outputs.type }} Docker builds:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to trigger ${{ needs.update-changelogs.outputs.type }} Docker builds.
+ Checkout: ${{ steps.checkout.outcome }}
+ Trigger build: ${{ steps.trigger.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: failure()
+
+ trigger-packages:
+ name: Trigger package builds
+ runs-on: ubuntu-latest
+ needs: update-changelogs
+ if: needs.update-changelogs.outputs.run == 'true'
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ needs.update-changelogs.outputs.ref }}
+ - name: Trigger build
+ id: trigger
+ uses: benc-uk/workflow-dispatch@v1
+ with:
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ repo: ${{ github.repository }}
+ workflow: packaging.yml
+ ref: ${{ needs.update-changelogs.outputs.ref }}
+ inputs: '{"version": "${{ needs.update-changelogs.outputs.version }}", "type": "${{ needs.update-changelogs.outputs.type }}"}'
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Failed to trigger ${{ needs.update-changelogs.outputs.type }} package builds:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to trigger ${{ needs.update-changelogs.outputs.type }} package builds.
+ Checkout: ${{ steps.checkout.outcome }}
+ Trigger build: ${{ steps.trigger.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: failure()
diff --git a/.github/workflows/repoconfig-packages.yml b/.github/workflows/repoconfig-packages.yml
new file mode 100644
index 00000000..df8fac20
--- /dev/null
+++ b/.github/workflows/repoconfig-packages.yml
@@ -0,0 +1,155 @@
+---
+# Handles building of binary packages for the agent.
+name: Repository Packages
+on:
+ workflow_dispatch: null
+ pull_request:
+ paths:
+ - packaging/repoconfig/**
+ - .github/workflows/repoconfig-packages.yml
+ - .github/data/distros.yml
+ push:
+ branches:
+ - master
+ paths:
+ - packaging/repoconfig/**
+ - .github/workflows/repoconfig-packages.yml
+ - .github/data/distros.yml
+env:
+ DISABLE_TELEMETRY: 1
+ REPO_PREFIX: netdata/netdata
+jobs:
+ matrix:
+ name: Prepare Build Matrix
+ runs-on: ubuntu-latest
+ outputs:
+ matrix: ${{ steps.set-matrix.outputs.matrix }}
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ - name: Prepare tools
+ id: prepare
+ run: |
+ sudo apt-get update && sudo apt-get install -y python3-ruamel.yaml
+ - name: Read build matrix
+ id: set-matrix
+ run: |
+ matrix="$(.github/scripts/gen-matrix-repoconfig.py)"
+ echo "Generated matrix: ${matrix}"
+ echo "matrix=${matrix}" >> "${GITHUB_OUTPUT}"
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Repository Package Build matrix generation failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to generate build matrix for repository package build.
+ Checkout: ${{ steps.checkout.outcome }}
+ Prepare Tools: ${{ steps.prepare.outcome }}
+ Read Build Matrix: ${{ steps.set-matrix.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.event_name != 'pull_request'
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.repository == 'netdata/netdata'
+ }}
+
+ build:
+ name: Build
+ runs-on: ubuntu-latest
+ env:
+ DISABLE_TELEMETRY: 1
+ DOCKER_CLI_EXPERIMENTAL: enabled
+ needs:
+ - matrix
+ strategy:
+ matrix: ${{ fromJson(needs.matrix.outputs.matrix) }}
+ # We intentiaonally disable the fail-fast behavior so that a
+ # build failure for one version doesn't prevent us from publishing
+ # successfully built and tested packages for another version.
+ fail-fast: false
+ max-parallel: 8
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ # Unlike normally, we do not need a deep clone or submodules for this.
+ - name: Fetch base image
+ id: fetch-images
+ uses: nick-invision/retry@v2
+ with:
+ max_attempts: 3
+ retry_wait_seconds: 30
+ timeout_seconds: 900
+ command: docker pull --platform ${{ matrix.platform }} ${{ matrix.base_image }}
+ - name: Build Packages
+ id: build
+ shell: bash
+ run: |
+ docker run --security-opt seccomp=unconfined -e DISABLE_TELEMETRY=1 --platform ${{ matrix.platform }} \
+ -v "$PWD":/netdata ${{ matrix.base_image }} \
+ /netdata/packaging/repoconfig/build-${{ matrix.format }}.sh
+ - name: Upload Packages
+ id: publish
+ if: github.event_name != 'pull_request' && github.repository == 'netdata/netdata'
+ continue-on-error: true
+ shell: bash
+ env:
+ PKG_CLOUD_TOKEN: ${{ secrets.PACKAGE_CLOUD_API_KEY }}
+ run: |
+ printf "Packages to upload:\n%s" "$(ls artifacts/*.${{ matrix.format }})"
+ for pkgfile in artifacts/*.${{ matrix.format }} ; do
+ .github/scripts/package_cloud_wrapper.sh yank "${REPO_PREFIX}/${{ matrix.pkgclouddistro }}" \
+ "$(basename "${pkgfile}")" || true
+ .github/scripts/package_cloud_wrapper.sh push "${REPO_PREFIX}/${{ matrix.pkgclouddistro }}" "${pkgfile}"
+ .github/scripts/package_cloud_wrapper.sh yank "${REPO_PREFIX}-edge/${{ matrix.pkgclouddistro }}" \
+ "$(basename "${pkgfile}")" || true
+ .github/scripts/package_cloud_wrapper.sh push "${REPO_PREFIX}-edge/${{ matrix.pkgclouddistro }}" "${pkgfile}"
+ .github/scripts/package_cloud_wrapper.sh yank "${REPO_PREFIX}-repoconfig/${{ matrix.pkgclouddistro }}" \
+ "$(basename "${pkgfile}")" || true
+ .github/scripts/package_cloud_wrapper.sh push "${REPO_PREFIX}-repoconfig/${{ matrix.pkgclouddistro }}" "${pkgfile}"
+ done
+ - name: SSH setup
+ id: ssh-setup
+ if: github.event_name != 'pull_request' && github.repository == 'netdata/netdata'
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.NETDATABOT_PACKAGES_SSH_KEY }}
+ name: id_ecdsa
+ known_hosts: ${{ secrets.PACKAGES_KNOWN_HOSTS }}
+ - name: Upload to packages.netdata.cloud
+ id: package-upload
+ if: github.event_name != 'pull_request' && github.repository == 'netdata/netdata'
+ run: |
+ for arch in ${{ matrix.arches }}; do
+ for suffix in '' -edge -repoconfig ; do
+ .github/scripts/package-upload.sh \
+ ${{ matrix.pkgclouddistro }} \
+ ${arch} \
+ ${{ matrix.format }} \
+ netdata/netdata${suffix}
+ done
+ done
+ - name: Failure Notification
+ if: ${{ failure() && github.repository == 'netdata/netdata' }}
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Repository Package Build failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: ${{ matrix.pkgclouddistro }} ${{ matrix.version }} repository package build failed.
+ Checkout: ${{ steps.checkout.outcome }}
+ Fetch images: ${{ steps.fetch-images.outcome }}
+ Build: ${{ steps.build.outcome }}
+ Publish to PackageCloud: ${{ steps.publish.outcome }}
+ Import SSH Key: ${{ steps.ssh-setup.outcome }}
+ Publish to packages.netdata.cloud: ${{ steps.package-upload.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
diff --git a/.github/workflows/review.yml b/.github/workflows/review.yml
new file mode 100644
index 00000000..8cb61f2c
--- /dev/null
+++ b/.github/workflows/review.yml
@@ -0,0 +1,254 @@
+---
+# Runs various linter checks against PR with suggested changes to improve quality
+name: Review
+on:
+ pull_request:
+ types: [opened, reopened, labeled, synchronize]
+env:
+ DISABLE_TELEMETRY: 1
+concurrency:
+ group: review-${{ github.ref }}
+ cancel-in-progress: true
+jobs:
+ prep-review:
+ name: Prepare Review Jobs
+ runs-on: ubuntu-latest
+ outputs:
+ actionlint: ${{ steps.actionlint.outputs.run }}
+ clangformat: ${{ steps.clangformat.outputs.run }}
+ eslint: ${{ steps.eslint.outputs.run }}
+ flake8: ${{ steps.flake8.outputs.run }}
+ hadolint: ${{ steps.hadolint.outputs.run }}
+ shellcheck: ${{ steps.shellcheck.outputs.run }}
+ yamllint: ${{ steps.yamllint.outputs.run }}
+ steps:
+ - name: Clone repository
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+ fetch-depth: 0
+ - name: Check files for actionlint
+ id: actionlint
+ run: |
+ if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/actionlint') }}" = "true" ]; then
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ elif git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq '\.github/workflows/.*' ; then
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ echo 'GitHub Actions workflows have changed, need to run actionlint.'
+ else
+ echo "run=false" >> "${GITHUB_OUTPUT}"
+ fi
+ # - name: Check files for clang-format
+ # id: clangformat
+ # run: |
+ # if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/clang-format') }}" = "true" ]; then
+ # echo "run=true" >> "${GITHUB_OUTPUT}"
+ # elif git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq '\.cpp$|\.cxx$|\.c$|\.hpp$|\.hxx$|\.h$' ; then
+ # echo "run=true" >> "${GITHUB_OUTPUT}"
+ # echo 'C/C++ code has changed, need to run clang-format.'
+ # else
+ # echo "run=false" >> "${GITHUB_OUTPUT}"
+ # fi
+ - name: Check files for eslint
+ id: eslint
+ run: |
+ if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/eslint') }}" = "true" ]; then
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ elif git diff --name-only origin/${{ github.base_ref }} HEAD | grep -v "web/gui/v1" | grep -v "web/gui/v2" | grep -v "integrations/" | grep -Eq '.*\.js' ; then
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ echo 'JS files have changed, need to run ESLint.'
+ else
+ echo "run=false" >> "${GITHUB_OUTPUT}"
+ fi
+ - name: Check files for flake8
+ id: flake8
+ run: |
+ if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/flake8') }}" = "true" ]; then
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ elif git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq '.*\.py' ; then
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ echo 'Python files have changed, need to run flake8.'
+ else
+ echo "run=false" >> "${GITHUB_OUTPUT}"
+ fi
+ - name: Check files for hadolint
+ id: hadolint
+ run: |
+ if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/hadolint') }}" = "true" ]; then
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ elif git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq '.*Dockerfile.*' ; then
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ echo 'Dockerfiles have changed, need to run Hadolint.'
+ else
+ echo "run=false" >> "${GITHUB_OUTPUT}"
+ fi
+ - name: Check files for shellcheck
+ id: shellcheck
+ run: |
+ if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/shellcheck') }}" = "true" ]; then
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ elif git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq '.*\.sh.*' ; then
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ echo 'Shell scripts have changed, need to run shellcheck.'
+ else
+ echo "run=false" >> "${GITHUB_OUTPUT}"
+ fi
+ - name: Check files for yamllint
+ id: yamllint
+ run: |
+ if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/yamllint') }}" = "true" ]; then
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ elif git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq '.*\.ya?ml|python\.d/.*\.conf' ; then
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ echo 'YAML files have changed, need to run yamllint.'
+ else
+ echo "run=false" >> "${GITHUB_OUTPUT}"
+ fi
+
+ actionlint:
+ name: actionlint
+ needs: prep-review
+ if: needs.prep-review.outputs.actionlint == 'true'
+ runs-on: ubuntu-latest
+ steps:
+ - name: Git clone repository
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+ fetch-depth: 0
+ - name: Run actionlint
+ uses: reviewdog/action-actionlint@v1
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ reporter: github-pr-check
+
+ clang-format:
+ name: clang-format
+ needs: prep-review
+ if: needs.prep-review.outputs.clangformat == 'true'
+ runs-on: ubuntu-latest
+ steps:
+ - name: Git clone repository
+ uses: actions/checkout@v4
+ with:
+ submodules: false
+ fetch-depth: 0
+ - name: Check for label
+ id: label
+ run: |
+ if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/clang-format') }}" = "true" ]; then
+ echo 'check-all=true' >> "${GITHUB_OUTPUT}"
+ else
+ echo 'check-all=false' >> "${GITHUB_OUTPUT}"
+ fi
+ - name: Run clang-format
+ run: |
+ if [ "${{ steps.label.outputs.check-all }}" == 'true' ]; then
+ find . -regex '.*\.\(c\|cpp\|cxx\|h\|hpp\|hxx\)$' -exec clang-format -i --style=file '{}' \;
+ else
+ git diff --name-only origin/${{ github.base_ref }} HEAD | grep -E '\.cpp$|\.cxx$|\.c$|\.hpp$|\.hxx$|\.h$' | \
+ xargs -n 1 -r clang-format -i --style=file
+ fi
+ git status --porcelain=v1 > /tmp/porcelain
+ if [ -s /tmp/porcelain ]; then
+ cat /tmp/porcelain
+ exit 1
+ fi
+
+ eslint:
+ name: eslint
+ needs: prep-review
+ if: needs.prep-review.outputs.eslint == 'true'
+ runs-on: ubuntu-latest
+ steps:
+ - name: Git clone repository
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+ fetch-depth: 0
+ - name: Install eslint
+ run: npm install eslint -D
+ - name: Run eslint
+ uses: reviewdog/action-eslint@v1
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ reporter: github-pr-check
+ eslint_flags: '.'
+
+ flake8:
+ name: flake8
+ needs: prep-review
+ if: needs.prep-review.outputs.flake8 == 'true'
+ runs-on: ubuntu-latest
+ steps:
+ - name: Git clone repository
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+ fetch-depth: 0
+ - name: Setup Python
+ uses: actions/setup-python@v4
+ with:
+ python-version: "3.10"
+ - name: Run flake8
+ uses: reviewdog/action-flake8@v3
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ reporter: github-pr-check
+
+ hadolint:
+ name: hadolint
+ needs: prep-review
+ if: needs.prep-review.outputs.hadolint == 'true'
+ runs-on: ubuntu-latest
+ steps:
+ - name: Git clone repository
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+ - name: Run hadolint
+ uses: reviewdog/action-hadolint@v1
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ reporter: github-pr-check
+
+ shellcheck:
+ name: shellcheck
+ needs: prep-review
+ if: needs.prep-review.outputs.shellcheck == 'true'
+ runs-on: ubuntu-latest
+ steps:
+ - name: Git clone repository
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+ fetch-depth: 0
+ - name: Run shellcheck
+ uses: reviewdog/action-shellcheck@v1
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ reporter: github-pr-check
+ path: "."
+ pattern: "*.sh*"
+ exclude: |
+ ./.git/*
+ packaging/makeself/makeself.sh
+ packaging/makeself/makeself-header.sh
+ ./fluent-bit/*
+
+ yamllint:
+ name: yamllint
+ needs: prep-review
+ if: needs.prep-review.outputs.yamllint == 'true'
+ runs-on: ubuntu-latest
+ steps:
+ - name: Git clone repository
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+ fetch-depth: 0
+ - name: Run yamllint
+ uses: reviewdog/action-yamllint@v1
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ reporter: github-pr-check
diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
new file mode 100644
index 00000000..46384ffc
--- /dev/null
+++ b/.github/workflows/tests.yml
@@ -0,0 +1,41 @@
+---
+# Runs Tests on Pushes to `master` and Pull Requests
+name: Tests
+on:
+ push:
+ branches:
+ - master
+ paths:
+ - 'CMakeLists.txt'
+ - '**.c'
+ - '**.h'
+ pull_request:
+ paths:
+ - 'CMakeLists.txt'
+ - '**.c'
+ - '**.h'
+env:
+ DISABLE_TELEMETRY: 1
+concurrency:
+ group: tests-${{ github.ref }}
+ cancel-in-progress: true
+jobs:
+ unit-tests-legacy:
+ name: Unit Tests (legacy)
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+ - name: Prepare environment
+ run: |
+ ./packaging/installer/install-required-packages.sh --dont-wait --non-interactive netdata-all
+ sudo apt-get install -y libjson-c-dev libyaml-dev libipmimonitoring-dev libcups2-dev libsnappy-dev \
+ libprotobuf-dev libprotoc-dev libssl-dev protobuf-compiler \
+ libnetfilter-acct-dev
+ - name: Run ./tests/run-unit-tests.sh
+ env:
+ CFLAGS: "-O1 -DNETDATA_INTERNAL_CHECKS=1 -DNETDATA_VERIFY_LOCKS=1"
+ run: |
+ ./tests/run-unit-tests.sh
diff --git a/.github/workflows/trigger-learn-update.yml b/.github/workflows/trigger-learn-update.yml
new file mode 100644
index 00000000..3d39ebab
--- /dev/null
+++ b/.github/workflows/trigger-learn-update.yml
@@ -0,0 +1,37 @@
+---
+name: Trigger Netdata Learn documentation update
+on:
+ push:
+ branches:
+ - master
+ paths:
+ - "**.mdx?"
+ - "packaging/installer/kickstart.sh"
+concurrency:
+ group: learn-trigger-${{ github.ref }}
+ cancel-in-progress: true
+jobs:
+ trigger-ingest:
+ name: Trigger Netdata Learn ingest workflow.
+ if: github.repository == 'netdata/netdata'
+ runs-on: ubuntu-latest
+ steps:
+ - name: Trigger Netdata Learn ingest workflow.
+ uses: benc-uk/workflow-dispatch@v1
+ with:
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ repo: netdata/learn
+ workflow: Ingest
+ ref: refs/heads/master
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Triggering Netdata Learn documentation update failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to trigger Netdata Learn documentation update workflow.
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: failure()