summaryrefslogtreecommitdiffstats
path: root/.github
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--.github/CODEOWNERS57
-rw-r--r--.github/ISSUE_TEMPLATE.md15
-rw-r--r--.github/ISSUE_TEMPLATE/BUG_REPORT.yml102
-rw-r--r--.github/ISSUE_TEMPLATE/FEAT_REQUEST.yml62
-rw-r--r--.github/ISSUE_TEMPLATE/config.yml12
-rw-r--r--.github/PULL_REQUEST_TEMPLATE.md27
-rw-r--r--.github/codeql/python-config.yml10
-rw-r--r--.github/data/distros.yml214
-rw-r--r--.github/dependabot.yml9
-rw-r--r--.github/dockerfiles/Dockerfile.build_test18
-rw-r--r--.github/dockerfiles/Dockerfile.clang18
-rw-r--r--.github/labeler.yml190
-rwxr-xr-x.github/scripts/build-artifacts.sh82
-rwxr-xr-x.github/scripts/build-dist.sh71
-rwxr-xr-x.github/scripts/build-static.sh61
-rwxr-xr-x.github/scripts/bump-packaging-version.sh6
-rwxr-xr-x.github/scripts/check-updater.sh49
-rwxr-xr-x.github/scripts/ci-support-pkgs.sh14
-rwxr-xr-x.github/scripts/docker-test.sh60
-rw-r--r--.github/scripts/functions.sh69
-rwxr-xr-x.github/scripts/gen-docker-tags.py14
-rwxr-xr-x.github/scripts/get-static-cache-key.sh15
-rwxr-xr-x.github/scripts/netdata-pkgcloud-cleanup.py190
-rwxr-xr-x.github/scripts/package-upload.sh43
-rwxr-xr-x.github/scripts/package_cloud_wrapper.sh48
-rwxr-xr-x.github/scripts/pkg-test.sh138
-rwxr-xr-x.github/scripts/prepare-release-base.sh176
-rwxr-xr-x.github/scripts/run-updater-check.sh14
-rwxr-xr-x.github/scripts/run_install_with_dist_file.sh39
-rw-r--r--.github/stale.yml18
-rw-r--r--.github/workflows/add-to-project.yml26
-rw-r--r--.github/workflows/build.yml861
-rw-r--r--.github/workflows/checks.yml61
-rw-r--r--.github/workflows/cloud_regression.yml54
-rw-r--r--.github/workflows/codeql.yml117
-rw-r--r--.github/workflows/coverity.yml63
-rw-r--r--.github/workflows/dashboard-pr.yml54
-rw-r--r--.github/workflows/docker.yml298
-rw-r--r--.github/workflows/docs.yml29
-rw-r--r--.github/workflows/labeler.yml18
-rw-r--r--.github/workflows/packagecloud.yml36
-rw-r--r--.github/workflows/packaging.yml279
-rw-r--r--.github/workflows/release.yml214
-rw-r--r--.github/workflows/repoconfig-packages.yml183
-rw-r--r--.github/workflows/review.yml172
-rw-r--r--.github/workflows/tests.yml41
-rw-r--r--.github/workflows/trigger-learn-update.yml37
47 files changed, 4384 insertions, 0 deletions
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
new file mode 100644
index 0000000..c513b71
--- /dev/null
+++ b/.github/CODEOWNERS
@@ -0,0 +1,57 @@
+# Files which shouldn't be changed manually are owned by @netdatabot.
+# This way we prevent modifications which will be overwriten by automation.
+
+# Global (default) code owner
+* @Ferroin
+
+# Ownership by directory structure
+.travis/ @Ferroin
+.github/ @Ferroin
+aclk/ @stelfrag @underhood
+build/ @Ferroin
+contrib/debian @Ferroin
+collectors/ @thiagoftsm
+collectors/ebpf.plugin/ @thiagoftsm
+collectors/charts.d.plugin/ @ilyam8 @Ferroin
+collectors/freebsd.plugin/ @thiagoftsm
+collectors/macos.plugin/ @thiagoftsm
+collectors/python.d.plugin/ @ilyam8
+collectors/cups.plugin/ @simonnagl @thiagoftsm
+exporting/ @thiagoftsm
+daemon/ @thiagoftsm @vkalintiris
+database/ @thiagoftsm @vkalintiris
+docs/ @DShreve2
+health/ @thiagoftsm @vkalintiris @MrZammler
+health/health.d/ @thiagoftsm @MrZammler
+health/notifications/ @Ferroin @thiagoftsm @MrZammler
+ml/ @andrewm4894 @vkalintiris
+libnetdata/ @thiagoftsm @vkalintiris
+packaging/ @Ferroin
+registry/ @jacekkolasa
+streaming/ @thiagoftsm
+system/ @Ferroin
+tests/ @Ferroin @vkalintiris
+web/ @thiagoftsm @vkalintiris
+web/gui/ @jacekkolasa
+
+# Ownership by filetype (overwrites ownership by directory)
+*.am @Ferroin
+*.md @DShreve2
+Dockerfile* @Ferroin
+
+# Ownership of specific files
+.gitignore @Ferroin @vkalintiris
+.travis.yml @Ferroin
+.lgtm.yml @Ferroin
+.eslintrc @Ferroin
+.eslintignore @Ferroin
+.csslintrc @Ferroin
+.codeclimate.yml @Ferroin
+.codacy.yml @Ferroin
+.yamllint.yml @Ferroin
+netdata.spec.in @Ferroin
+netdata-installer.sh @Ferroin
+packaging/version @netdatabot @Ferroin
+
+LICENSE.md @DShreve2 @Ferroin @vkalintiris
+CHANGELOG.md @netdatabot @Ferroin
diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md
new file mode 100644
index 0000000..bd939ba
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE.md
@@ -0,0 +1,15 @@
+---
+about: General issue template
+labels: "needs triage", "no changelog"
+---
+
+<!---
+This is a generic issue template. We usually prefer contributors to use one
+of 3 other specific issue templates (bug report, feature request, question)
+to allow our automation classify those so you can get response faster.
+However if your issue doesn't fall into either one of those 3 categories
+use this generic template.
+--->
+
+#### Summary
+
diff --git a/.github/ISSUE_TEMPLATE/BUG_REPORT.yml b/.github/ISSUE_TEMPLATE/BUG_REPORT.yml
new file mode 100644
index 0000000..d82d463
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/BUG_REPORT.yml
@@ -0,0 +1,102 @@
+name: "Netdata Agent: Bug report"
+description: "Submit a report and help us improve our free and open-source Netdata Agent"
+title: "[Bug]: "
+labels: ["bug", "needs triage"]
+body:
+ - type: markdown
+ attributes:
+ value: "### Thank you for contributing to our project!"
+ - type: markdown
+ attributes:
+ value: |
+ <img src="https://img.shields.io/github/v/release/netdata/netdata.svg?label=latest%20stable"> <img src="https://img.shields.io/badge/dynamic/xml?url=https://storage.googleapis.com/netdata-nightlies/latest-version.txt&label=latest%20nightly&query=/text()">
+ - type: markdown
+ attributes:
+ value: |
+ Before submitting, we'd appreciate it if you:
+ - Verify that your issue is not already reported on GitHub.
+ - Check if your Netdata Agent is up to date. If not, we recommend that you [update](https://learn.netdata.cloud/docs/agent/packaging/installer/update) first.
+ - type: textarea
+ id: bug-description
+ attributes:
+ label: Bug description
+ description: Provide a description of the bug you're experiencing.
+ validations:
+ required: true
+ - type: textarea
+ id: expected-behavior
+ attributes:
+ label: Expected behavior
+ description: Describe what you expected to happen.
+ validations:
+ required: true
+ - type: textarea
+ id: reproduce
+ attributes:
+ label: Steps to reproduce
+ description: Describe the steps to reproduce the bug.
+ value: |
+ 1.
+ 2.
+ 3.
+ ...
+ validations:
+ required: true
+ - type: dropdown
+ id: install-method
+ attributes:
+ label: Installation method
+ description: |
+ Select [installation method](https://learn.netdata.cloud/docs/agent/packaging/installer#alternative-methods) you used.
+ Describe the method in the "Additional info" section if you chose "other".
+ options:
+ - "kickstart.sh"
+ - "kickstart-static64.sh"
+ - "native binary packages (.deb/.rpm)"
+ - "from git"
+ - "from source"
+ - "docker"
+ - "helmchart (kubernetes)"
+ - "other"
+ validations:
+ required: true
+ - type: textarea
+ id: system-info
+ attributes:
+ label: System info
+ description: |
+ Provide information about your system. To get this information, execute one of the following commands based on your OS:
+ ```shell
+ # Linux
+ uname -a; grep -HvE "^#|URL" /etc/*release
+ # BSD
+ uname -a; uname -K
+ # macOS
+ uname -a; sw_vers
+ ```
+ > NOTE: This will be automatically formatted into code, so no need for backticks.
+ render: shell
+ validations:
+ required: true
+ - type: textarea
+ id: netdata-buildfinfo
+ attributes:
+ label: Netdata build info
+ description: |
+ Provide Netdata Agent version and build info. To get this information, execute:
+ ```shell
+ netdata -W buildinfo
+ # If get "netdata: command not found", try (required running Netdata)
+ $(ps aux | grep -m1 -E -o "[a-zA-Z/]+netdata ") -W buildinfo
+ ```
+ > NOTE: This will be automatically formatted into code, so no need for backticks.
+ render: shell
+ validations:
+ required: true
+ - type: textarea
+ id: additional-info
+ attributes:
+ label: Additional info
+ description: Any additional information related to the issue (ex. logs).
+ validations:
+ required: false
diff --git a/.github/ISSUE_TEMPLATE/FEAT_REQUEST.yml b/.github/ISSUE_TEMPLATE/FEAT_REQUEST.yml
new file mode 100644
index 0000000..11d77a0
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/FEAT_REQUEST.yml
@@ -0,0 +1,62 @@
+name: "Netdata Agent: Feature request"
+description: "Submit a feature request and help us improve our free and open-source Netdata Agent"
+title: "[Feat]: "
+labels: ["feature request", "needs triage"]
+body:
+ - type: markdown
+ attributes:
+ value: "### Thank you for contributing to our project!"
+ - type: markdown
+ attributes:
+ value: |
+ Submit a feature request and help us improve our free and open-source Netdata Agent.
+ - type: textarea
+ id: problem
+ attributes:
+ label: Problem
+ description: |
+ Is your feature request intended to solve a problem? If so, provide a description of the problem.
+ validations:
+ required: false
+ - type: textarea
+ id: description
+ attributes:
+ label: Description
+ description: |
+ Provide a clear and concise description of the feature you want or need.
+ validations:
+ required: true
+ - type: dropdown
+ id: importance
+ attributes:
+ label: Importance
+ description: |
+ Help us to understand the importance of your feature request. Choose "blocker" if lack of this feature stops you from using Netdata Agent.
+ options:
+ - "nice to have"
+ - "really want"
+ - "must have"
+ - "blocker"
+ validations:
+ required: true
+ - type: textarea
+ id: value-proposition
+ attributes:
+ label: Value proposition
+ description: |
+ Help us to understand why we need this feaure. Describe benefits that users receive if we implement this feature request.
+ value: |
+ 1.
+ 2.
+ 3.
+ ...
+ validations:
+ required: true
+ - type: textarea
+ id: proposed-implementation
+ attributes:
+ label: Proposed implementation
+ description: |
+ Share your proposal if you have any ideas on how this feature can be implemented.
+ validations:
+ required: false
diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
new file mode 100644
index 0000000..79678d7
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -0,0 +1,12 @@
+# Ref: https://docs.github.com/en/communities/using-templates-to-encourage-useful-issues-and-pull-requests/configuring-issue-templates-for-your-repository#configuring-the-template-chooser
+blank_issues_enabled: false
+contact_links:
+ - name: "Netdata Agent: Question"
+ url: https://github.com/netdata/netdata/discussions/new?category=q-a
+ about: Ask a question about Netdata Agent
+ - name: "Netdata Cloud"
+ url: https://github.com/netdata/netdata-cloud/issues/new/choose
+ about: Create a report to help us improve our web application
+ - name: Community
+ url: https://netdata.cloud/community
+ about: If you don't know where to start, visit our community page!
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 0000000..829d8e6
--- /dev/null
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,27 @@
+##### Summary
+<!--
+Describe the change in summary section, including rationale and design decisions.
+Include "Fixes #nnn" if you are fixing an existing issue.
+-->
+
+##### Test Plan
+
+<!--
+Provide enough detail so that your reviewer can understand which test cases you
+have covered, and recreate them if necessary. If our CI covers sufficient tests, then state which tests cover the change.
+-->
+
+##### Additional Information
+<!-- This is usually used to help others understand your
+motivation behind this change. A step-by-step reproduction of the problem is
+helpful if there is no related issue. -->
+
+<details> <summary>For users: How does this change affect me?</summary>
+ <!--
+Describe the PR affects users:
+- Which area of Netdata is affected by the change?
+- Can they see the change or is it an under the hood? If they can see it, where?
+- How is the user impacted by the change?
+- What are there any benefits of the change?
+-->
+</details>
diff --git a/.github/codeql/python-config.yml b/.github/codeql/python-config.yml
new file mode 100644
index 0000000..c82727c
--- /dev/null
+++ b/.github/codeql/python-config.yml
@@ -0,0 +1,10 @@
+paths-ignore:
+ - .github
+ - build_external/
+ - ml/dlib
+ - ml/json
+ - tests/api
+ - web/gui
+ - collectors/python.d.plugin/python_modules/pyyaml*
+ - collectors/python.d.plugin/python_modules/third_party
+ - collectors/python.d.plugin/python_modules/urllib3
diff --git a/.github/data/distros.yml b/.github/data/distros.yml
new file mode 100644
index 0000000..cc52752
--- /dev/null
+++ b/.github/data/distros.yml
@@ -0,0 +1,214 @@
+# This defines the full set of distros we run CI on.
+---
+platform_map: # map packaging architectures to docker platforms
+ aarch64: linux/arm64/v8
+ amd64: linux/amd64
+ arm64: linux/arm64/v8
+ armhf: linux/arm/v7
+ armhfp: linux/arm/v7
+ i386: linux/i386
+ x86_64: linux/amd64
+arch_order: # sort order for per-architecture jobs in CI
+ - amd64
+ - x86_64
+ - i386
+ - armhf
+ - armhfp
+ - arm64
+ - aarch64
+include:
+ - &alpine
+ distro: alpine
+ version: edge
+ env_prep: |
+ apk add -U bash
+ jsonc_removal: |
+ apk del json-c-dev
+ test:
+ ebpf-core: true
+ - <<: *alpine
+ version: "3.17"
+ - <<: *alpine
+ version: "3.16"
+ - <<: *alpine
+ version: "3.15"
+ - <<: *alpine
+ version: "3.14"
+
+ - distro: archlinux
+ version: latest
+ env_prep: |
+ pacman --noconfirm -Syu && pacman --noconfirm -Sy grep libffi
+ test:
+ ebpf-core: true
+
+ - &alma
+ distro: almalinux
+ version: "9"
+ base_image: almalinux
+ jsonc_removal: |
+ dnf remove -y json-c-devel
+ packages: &alma_packages
+ type: rpm
+ repo_distro: el/9
+ arches:
+ - x86_64
+ - aarch64
+ test:
+ ebpf-core: true
+ - <<: *alma
+ version: "8"
+ packages:
+ <<: *alma_packages
+ repo_distro: el/8
+
+ - distro: centos
+ version: "7"
+ packages:
+ type: rpm
+ repo_distro: el/7
+ arches:
+ - x86_64
+ test:
+ ebpf-core: false
+
+ - &debian
+ distro: debian
+ version: "11"
+ env_prep: |
+ apt-get update
+ jsonc_removal: |
+ apt-get purge -y libjson-c-dev
+ packages: &debian_packages
+ type: deb
+ repo_distro: debian/bullseye
+ arches:
+ - i386
+ - amd64
+ - armhf
+ - arm64
+ test:
+ ebpf-core: true
+ - <<: *debian
+ version: "10"
+ packages:
+ <<: *debian_packages
+ repo_distro: debian/buster
+ test:
+ ebpf-core: false
+
+ - &fedora
+ distro: fedora
+ version: "37"
+ jsonc_removal: |
+ dnf remove -y json-c-devel
+ packages: &fedora_packages
+ type: rpm
+ repo_distro: fedora/37
+ arches:
+ - x86_64
+ - aarch64
+ test:
+ ebpf-core: true
+ - <<: *fedora
+ version: "36"
+ packages:
+ <<: *fedora_packages
+ repo_distro: fedora/36
+ arches:
+ - x86_64
+ - armhfp
+ - aarch64
+ test:
+ ebpf-core: true
+ - <<: *fedora
+ version: "35"
+ packages:
+ <<: *fedora_packages
+ repo_distro: fedora/35
+ arches:
+ - x86_64
+ - armhfp
+ - aarch64
+ test:
+ ebpf-core: true
+
+ - &opensuse
+ distro: opensuse
+ version: "15.4"
+ base_image: opensuse/leap
+ jsonc_removal: |
+ zypper rm -y libjson-c-devel
+ packages: &opensuse_packages
+ type: rpm
+ repo_distro: opensuse/15.4
+ arches:
+ - x86_64
+ - aarch64
+ test:
+ ebpf-core: true
+ - <<: *opensuse
+ version: "15.3"
+ packages:
+ <<: *opensuse_packages
+ repo_distro: opensuse/15.3
+ test:
+ ebpf-core: false
+
+ - &oracle
+ distro: oraclelinux
+ version: "8"
+ jsonc_removal: |
+ dnf remove -y json-c-devel
+ packages: &oracle_packages
+ type: rpm
+ repo_distro: ol/8
+ arches:
+ - x86_64
+ - aarch64
+ test:
+ ebpf-core: true
+ - <<: *oracle
+ version: "9"
+ packages:
+ <<: *oracle_packages
+ repo_distro: ol/9
+
+ - &ubuntu
+ distro: ubuntu
+ version: "22.10"
+ env_prep: |
+ rm -f /etc/apt/apt.conf.d/docker && apt-get update
+ jsonc_removal: |
+ apt-get remove -y libjson-c-dev
+ packages: &ubuntu_packages
+ type: deb
+ repo_distro: ubuntu/kinetic
+ arches:
+ - amd64
+ - armhf
+ - arm64
+ test:
+ ebpf-core: true
+ - <<: *ubuntu
+ version: "22.04"
+ packages:
+ <<: *ubuntu_packages
+ repo_distro: ubuntu/jammy
+ - <<: *ubuntu
+ version: "20.04"
+ packages:
+ <<: *ubuntu_packages
+ repo_distro: ubuntu/focal
+ - <<: *ubuntu
+ version: "18.04"
+ packages:
+ <<: *ubuntu_packages
+ repo_distro: ubuntu/bionic
+ arches:
+ - i386
+ - amd64
+ - armhf
+ - arm64
+ test:
+ ebpf-core: false
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 0000000..b02b155
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,9 @@
+version: 2
+updates:
+ - package-ecosystem: github-actions
+ directory: /
+ schedule:
+ interval: weekly
+ labels:
+ - "no changelog"
+ - "area/ci"
diff --git a/.github/dockerfiles/Dockerfile.build_test b/.github/dockerfiles/Dockerfile.build_test
new file mode 100644
index 0000000..c275d61
--- /dev/null
+++ b/.github/dockerfiles/Dockerfile.build_test
@@ -0,0 +1,18 @@
+ARG BASE
+
+FROM ${BASE}
+
+ARG PRE
+ENV PRE=${PRE}
+ARG RMJSONC
+ENV RMJSONC=${RMJSONC}
+ENV DISABLE_TELEMETRY=1
+ENV GITHUB_ACTIONS=true
+
+RUN echo "${PRE}" > /prep-cmd.sh && \
+ echo "${RMJSONC}" > /rmjsonc.sh && chmod +x /rmjsonc.sh && \
+ /bin/sh /prep-cmd.sh
+
+COPY . /netdata
+
+RUN /netdata/packaging/installer/install-required-packages.sh --dont-wait --non-interactive netdata
diff --git a/.github/dockerfiles/Dockerfile.clang b/.github/dockerfiles/Dockerfile.clang
new file mode 100644
index 0000000..62bb019
--- /dev/null
+++ b/.github/dockerfiles/Dockerfile.clang
@@ -0,0 +1,18 @@
+FROM debian:buster AS build
+
+# Disable apt/dpkg interactive mode
+ENV DEBIAN_FRONTEND=noninteractive
+
+# Install all build dependencies
+COPY packaging/installer/install-required-packages.sh /tmp/install-required-packages.sh
+RUN /tmp/install-required-packages.sh --dont-wait --non-interactive netdata-all
+
+# Install Clang and set as default CC
+RUN apt-get install -y clang && \
+ update-alternatives --install /usr/bin/cc cc /usr/bin/clang 100
+
+WORKDIR /netdata
+COPY . .
+
+# Build Netdata
+RUN ./netdata-installer.sh --dont-wait --dont-start-it --disable-go --require-cloud
diff --git a/.github/labeler.yml b/.github/labeler.yml
new file mode 100644
index 0000000..c723250
--- /dev/null
+++ b/.github/labeler.yml
@@ -0,0 +1,190 @@
+# This configures label matching for PR's.
+#
+# The keys are labels, and the values are lists of minimatch patterns
+# to which those labels apply.
+#
+# NOTE: This can only add labels, not remove them.
+# NOTE: Due to YAML syntax limitations, patterns or labels which start
+# with a character that is part of the standard YAML syntax must be
+# quoted.
+#
+# Please keep the labels sorted and deduplicated.
+
+area/ACLK:
+ - aclk/*
+ - aclk/**/*
+ - database/sqlite/sqlite_aclk*
+ - mqtt_websockets
+
+area/claim:
+ - claim/*
+
+area/exporting:
+ - exporting/*
+ - exporting/**/*
+
+area/build:
+ - build/*
+ - build/**/*
+ - build_external/*
+ - build_external/**/*
+ - CMakeLists.txt
+ - configure.ac
+ - Makefile.am
+ - "**/Makefile.am"
+
+area/ci:
+ - .travis/*
+ - .travis/**/*
+ - .github/*
+ - .github/**/*
+
+area/daemon:
+ - daemon/*
+ - daemon/**/*
+
+area/database:
+ - database/*
+ - database/**/*
+
+area/docs:
+ - "*.md"
+ - "**/*.md"
+ - "**/*.mdx"
+ - diagrams/*
+ - diagrams/**/*
+
+# -----------------collectors----------------------
+
+area/collectors:
+ - collectors/*
+ - collectors/**/*
+
+collectors/plugins.d:
+ - collectors/plugins.d/*
+ - collectors/plugins.d/**/*
+
+collectors/apps:
+ - collectors/apps.plugin/*
+ - collectors/apps.plugin/**/*
+
+collectors/cgroups:
+ - collectors/cgroups.plugin/*
+ - collectors/cgroups.plugin/**/*
+
+collectors/charts.d:
+ - collectors/charts.d.plugin/*
+ - collectors/charts.d.plugin/**/*
+
+collectors/cups:
+ - collectors/cups.plugin/*
+ - collectors/cups.plugin/**/*
+
+collectors/diskspace:
+ - collectors/diskspace.plugin/*
+ - collectors/diskspace.plugin/**/*
+
+collectors/ebpf:
+ - collectors/ebpf.plugin/*
+ - collectors/ebpf.plugin/**/*
+
+collectors/fping:
+ - collectors/fping.plugin/*
+ - collectors/fping.plugin/**/*
+
+collectors/freebsd:
+ - collectors/freebsd.plugin/*
+ - collectors/freebsd.plugin/**/*
+
+collectors/freeipmi:
+ - collectors/freeipmi.plugin/*
+ - collectors/freeipmi.plugin/**/*
+
+collectors/idlejitter:
+ - collectors/idlejitter.plugin/*
+ - collectors/idlejitter.plugin/**/*
+
+collectors/ioping:
+ - collectors/ioping.plugin/*
+ - collectors/ioping.plugin/**/*
+
+collectors/macos:
+ - collectors/macos.plugin/*
+ - collectors/macos.plugin/**/*
+
+collectors/nfacct:
+ - collectors/nfacct.plugin/*
+ - collectors/nfacct.plugin/**/*
+
+collectors/perf:
+ - collectors/perf.plugin/*
+ - collectors/perf.plugin/**/*
+
+collectors/proc:
+ - collectors/proc.plugin/*
+ - collectors/proc.plugin/**/*
+
+collectors/python.d:
+ - collectors/python.d.plugin/*
+ - collectors/python.d.plugin/**/*
+
+collectors/slabinfo:
+ - collectors/slabinfo.plugin/*
+ - collectors/slabinfo.plugin/**/*
+
+collectors/statsd:
+ - collectors/statsd.plugin/*
+ - collectors/statsd.plugin/**/*
+
+collectors/tc:
+ - collectors/tc.plugin/*
+ - collectors/tc.plugin/**/*
+
+collectors/timex:
+ - collectors/timex.plugin/*
+ - collectors/timex.plugin/**/*
+
+collectors/xenstat:
+ - collectors/xenstat.plugin/*
+ - collectors/xenstat.plugin/**/*
+
+# ----------------/collectors----------------------
+
+area/health:
+ - health/*
+ - health/**/*
+
+area/ml:
+ - ml/*
+ - ml/**/*
+
+area/packaging:
+ - contrib/*
+ - contrib/**/*
+ - packaging/*
+ - packaging/**/*
+ - system/*
+ - system/**/*
+ - Dockerfile*
+ - netdata-installer.sh
+ - netdata.spec.in
+
+area/registry:
+ - registry/*
+ - registry/**/*
+
+area/streaming:
+ - streaming/*
+ - streaming/**/*
+
+area/tests:
+ - tests/*
+ - tests/**/*
+ - daemon/unit_test*
+ - coverity-scan.sh
+ - cppcheck.sh
+ - netdata.cppcheck
+
+area/web:
+ - web/*
+ - web/**/*
diff --git a/.github/scripts/build-artifacts.sh b/.github/scripts/build-artifacts.sh
new file mode 100755
index 0000000..569c79a
--- /dev/null
+++ b/.github/scripts/build-artifacts.sh
@@ -0,0 +1,82 @@
+#!/bin/sh
+#
+# Builds the netdata-vX.y.Z-xxxx.tar.gz source tarball (dist)
+# and netdata-vX.Y.Z-xxxx.gz.run (static x86_64) artifacts.
+
+set -e
+
+# shellcheck source=.github/scripts/functions.sh
+. "$(dirname "$0")/functions.sh"
+
+NAME="${NAME:-netdata}"
+VERSION="${VERSION:-"$(git describe)"}"
+BASENAME="$NAME-$VERSION"
+
+prepare_build() {
+ progress "Preparing build"
+ (
+ test -d artifacts || mkdir -p artifacts
+ echo "${VERSION}" > packaging/version
+ ) >&2
+}
+
+build_dist() {
+ progress "Building dist"
+ (
+ command -v git > /dev/null && [ -d .git ] && git clean -d -f
+ autoreconf -ivf
+ ./configure \
+ --prefix=/usr \
+ --sysconfdir=/etc \
+ --localstatedir=/var \
+ --libexecdir=/usr/libexec \
+ --with-zlib \
+ --with-math \
+ --with-user=netdata \
+ --disable-dependency-tracking \
+ CFLAGS=-O2
+ make dist
+ mv "${BASENAME}.tar.gz" artifacts/
+ ) >&2
+}
+
+build_static_x86_64() {
+ progress "Building static x86_64"
+ (
+ command -v git > /dev/null && [ -d .git ] && git clean -d -f
+ USER="" ./packaging/makeself/build-x86_64-static.sh
+ ) >&2
+}
+
+prepare_assets() {
+ progress "Preparing assets"
+ (
+ cp packaging/version artifacts/latest-version.txt
+
+ cd artifacts || exit 1
+ ln -f "${BASENAME}.tar.gz" netdata-latest.tar.gz
+ ln -f "${BASENAME}.gz.run" netdata-latest.gz.run
+ sha256sum -b ./* > "sha256sums.txt"
+ ) >&2
+}
+
+steps="prepare_build build_dist build_static_x86_64"
+steps="$steps prepare_assets"
+
+_main() {
+ for step in $steps; do
+ if ! run "$step"; then
+ if [ -t 1 ]; then
+ debug
+ else
+ fail "Build failed"
+ fi
+ fi
+ done
+
+ echo "🎉 All Done!"
+}
+
+if [ -n "$0" ] && [ x"$0" != x"-bash" ]; then
+ _main "$@"
+fi
diff --git a/.github/scripts/build-dist.sh b/.github/scripts/build-dist.sh
new file mode 100755
index 0000000..027b621
--- /dev/null
+++ b/.github/scripts/build-dist.sh
@@ -0,0 +1,71 @@
+#!/bin/sh
+#
+# Builds the netdata-vX.y.Z-xxxx.tar.gz source tarball (dist)
+
+set -e
+
+# shellcheck source=.github/scripts/functions.sh
+. "$(dirname "$0")/functions.sh"
+
+NAME="${NAME:-netdata}"
+VERSION="${VERSION:-"$(git describe --always)"}"
+BASENAME="$NAME-$VERSION"
+
+prepare_build() {
+ progress "Preparing build"
+ (
+ test -d artifacts || mkdir -p artifacts
+ echo "${VERSION}" > packaging/version
+ ) >&2
+}
+
+build_dist() {
+ progress "Building dist"
+ (
+ command -v git > /dev/null && [ -d .git ] && git clean -d -f
+ autoreconf -ivf
+ ./configure \
+ --prefix=/usr \
+ --sysconfdir=/etc \
+ --localstatedir=/var \
+ --libexecdir=/usr/libexec \
+ --with-zlib \
+ --with-math \
+ --with-user=netdata \
+ --disable-dependency-tracking \
+ CFLAGS=-O2
+ make dist
+ mv "${BASENAME}.tar.gz" artifacts/
+ ) >&2
+}
+
+prepare_assets() {
+ progress "Preparing assets"
+ (
+ cp packaging/version artifacts/latest-version.txt
+ cd artifacts || exit 1
+ ln -f "${BASENAME}.tar.gz" netdata-latest.tar.gz
+ ln -f "${BASENAME}.gz.run" netdata-latest.gz.run
+ sha256sum -b ./* > "sha256sums.txt"
+ ) >&2
+}
+
+steps="prepare_build build_dist prepare_assets"
+
+_main() {
+ for step in $steps; do
+ if ! run "$step"; then
+ if [ -t 1 ]; then
+ debug
+ else
+ fail "Build failed"
+ fi
+ fi
+ done
+
+ echo "🎉 All Done!"
+}
+
+if [ -n "$0" ] && [ x"$0" != x"-bash" ]; then
+ _main "$@"
+fi
diff --git a/.github/scripts/build-static.sh b/.github/scripts/build-static.sh
new file mode 100755
index 0000000..e810514
--- /dev/null
+++ b/.github/scripts/build-static.sh
@@ -0,0 +1,61 @@
+#!/bin/sh
+#
+# Builds the netdata-vX.Y.Z-xxxx.gz.run (static x86_64) artifact.
+
+set -e
+
+# shellcheck source=.github/scripts/functions.sh
+. "$(dirname "$0")/functions.sh"
+
+BUILDARCH="${1}"
+NAME="${NAME:-netdata}"
+VERSION="${VERSION:-"$(git describe)"}"
+BASENAME="$NAME-$BUILDARCH-$VERSION"
+
+prepare_build() {
+ progress "Preparing build"
+ (
+ test -d artifacts || mkdir -p artifacts
+ ) >&2
+}
+
+build_static() {
+ progress "Building static ${BUILDARCH}"
+ (
+ USER="" ./packaging/makeself/build-static.sh "${BUILDARCH}"
+ ) >&2
+}
+
+prepare_assets() {
+ progress "Preparing assets"
+ (
+ cp packaging/version artifacts/latest-version.txt
+
+ cd artifacts || exit 1
+ ln -s "${BASENAME}.gz.run" "netdata-${BUILDARCH}-latest.gz.run"
+ if [ "${BUILDARCH}" = "x86_64" ]; then
+ ln -s "${BASENAME}.gz.run" netdata-latest.gz.run
+ fi
+ ) >&2
+}
+
+steps="prepare_build build_static"
+steps="$steps prepare_assets"
+
+_main() {
+ for step in $steps; do
+ if ! run "$step"; then
+ if [ -t 1 ]; then
+ debug
+ else
+ fail "Build failed"
+ fi
+ fi
+ done
+
+ echo "🎉 All Done!"
+}
+
+if [ -n "$0" ] && [ x"$0" != x"-bash" ]; then
+ _main "$@"
+fi
diff --git a/.github/scripts/bump-packaging-version.sh b/.github/scripts/bump-packaging-version.sh
new file mode 100755
index 0000000..bffcb0c
--- /dev/null
+++ b/.github/scripts/bump-packaging-version.sh
@@ -0,0 +1,6 @@
+#!/bin/sh
+
+VERSION="$(git describe)"
+echo "$VERSION" > packaging/version
+git add -A
+git ci -m "[netdata nightly] $VERSION"
diff --git a/.github/scripts/check-updater.sh b/.github/scripts/check-updater.sh
new file mode 100755
index 0000000..3df0c9d
--- /dev/null
+++ b/.github/scripts/check-updater.sh
@@ -0,0 +1,49 @@
+#!/bin/sh
+#
+set -e
+# shellcheck source=.github/scripts/functions.sh
+. "$(dirname "$0")/functions.sh"
+
+check_successful_update() {
+ progress "Check netdata version after update"
+ (
+ netdata_version=$(netdata -v | awk '{print $2}')
+ updater_version=$(cat packaging/version)
+ if [ "$netdata_version" = "$updater_version" ]; then
+ echo "Update successful!"
+ else
+ exit 1
+ fi
+ ) >&2
+}
+
+check_autoupdate_enabled() {
+ progress "Check autoupdate still enabled after update"
+ (
+ if [ -f /etc/periodic/daily/netdata-updater ] || [ -f /etc/cron.daily/netdata-updater ]; then
+ echo "Update successful!"
+ else
+ exit 1
+ fi
+ ) >&2
+}
+
+steps="check_successful_update check_autoupdate_enabled"
+
+_main() {
+ for step in $steps; do
+ if ! run "$step"; then
+ if [ -t 1 ]; then
+ debug
+ else
+ fail "Build failed"
+ fi
+ fi
+ done
+
+ echo "🎉 All Done!"
+}
+
+if [ -n "$0" ] && [ x"$0" != x"-bash" ]; then
+ _main "$@"
+fi
diff --git a/.github/scripts/ci-support-pkgs.sh b/.github/scripts/ci-support-pkgs.sh
new file mode 100755
index 0000000..bfa9c83
--- /dev/null
+++ b/.github/scripts/ci-support-pkgs.sh
@@ -0,0 +1,14 @@
+#!/bin/sh
+
+# This script installs supporting packages needed for CI, which provide following:
+# cron, pidof
+
+set -e
+
+if [ -f /etc/centos-release ] || [ -f /etc/redhat-release ] || [ -f /etc/fedora-release ] || [ -f /etc/almalinux-release ]; then
+ # Alma, Fedora, CentOS, Redhat
+ dnf install -y procps-ng cronie cronie-anacron || yum install -y procps-ng cronie cronie-anacron
+elif [ -f /etc/arch-release ]; then
+ # Arch
+ pacman -S --noconfirm cronie
+fi
diff --git a/.github/scripts/docker-test.sh b/.github/scripts/docker-test.sh
new file mode 100755
index 0000000..22821d1
--- /dev/null
+++ b/.github/scripts/docker-test.sh
@@ -0,0 +1,60 @@
+#!/bin/sh
+
+export DEBIAN_FRONTEND=noninteractive
+
+wait_for() {
+ host="${1}"
+ port="${2}"
+ name="${3}"
+ timeout="30"
+
+ if command -v nc > /dev/null ; then
+ netcat="nc"
+ elif command -v netcat > /dev/null ; then
+ netcat="netcat"
+ else
+ printf "Unable to find a usable netcat command.\n"
+ return 1
+ fi
+
+ printf "Waiting for %s on %s:%s ... " "${name}" "${host}" "${port}"
+
+ sleep 30
+
+ i=0
+ while ! ${netcat} -z "${host}" "${port}"; do
+ sleep 1
+ if [ "$i" -gt "$timeout" ]; then
+ printf "Timed out!\n"
+ return 1
+ fi
+ i="$((i + 1))"
+ done
+ printf "OK\n"
+}
+
+if [ -z "$(command -v nc 2>/dev/null)" ] && [ -z "$(command -v netcat 2>/dev/null)" ]; then
+ sudo apt-get update && sudo apt-get upgrade -y && sudo apt-get install -y netcat
+fi
+
+docker run -d --name=netdata \
+ -p 19999:19999 \
+ -v netdataconfig:/etc/netdata \
+ -v netdatalib:/var/lib/netdata \
+ -v netdatacache:/var/cache/netdata \
+ -v /etc/passwd:/host/etc/passwd:ro \
+ -v /etc/group:/host/etc/group:ro \
+ -v /proc:/host/proc:ro \
+ -v /sys:/host/sys:ro \
+ -v /etc/os-release:/host/etc/os-release:ro \
+ --cap-add SYS_PTRACE \
+ --security-opt apparmor=unconfined \
+ netdata/netdata:test
+
+wait_for localhost 19999 netdata || exit 1
+
+curl -sS http://127.0.0.1:19999/api/v1/info > ./response || exit 1
+
+cat ./response
+
+jq '.version' ./response || exit 1
diff --git a/.github/scripts/functions.sh b/.github/scripts/functions.sh
new file mode 100644
index 0000000..7cd2e08
--- /dev/null
+++ b/.github/scripts/functions.sh
@@ -0,0 +1,69 @@
+#!/bin/sh
+
+# This file is included by download.sh & build.sh
+
+set -e
+
+color() {
+ fg="$1"
+ bg="${2}"
+ ft="${3:-0}"
+
+ printf "\33[%s;%s;%s" "$ft" "$fg" "$bg"
+}
+
+color_reset() {
+ printf "\033[0m"
+}
+
+ok() {
+ if [ -t 1 ]; then
+ printf "%s[ OK ]%s\n" "$(color 37 42m 1)" "$(color_reset)"
+ else
+ printf "%s\n" "[ OK ]"
+ fi
+}
+
+err() {
+ if [ -t 1 ]; then
+ printf "%s[ ERR ]%s\n" "$(color 37 41m 1)" "$(color_reset)"
+ else
+ printf "%s\n" "[ ERR ]"
+ fi
+}
+
+run() {
+ retval=0
+ logfile="$(mktemp -t "run-XXXXXX")"
+ if "$@" 2> "$logfile"; then
+ ok
+ else
+ retval=$?
+ err
+ tail -n 100 "$logfile" || true
+ fi
+ rm -rf "$logfile"
+ return $retval
+}
+
+progress() {
+ printf "%-40s" "$(printf "%s ... " "$1")"
+}
+
+log() {
+ printf "%s\n" "$1"
+}
+
+error() {
+ log "ERROR: ${1}"
+}
+
+fail() {
+ log "FATAL: ${1}"
+ exit 1
+}
+
+debug() {
+ log "Dropping into a shell for debugging ..."
+ exec /bin/sh
+}
diff --git a/.github/scripts/gen-docker-tags.py b/.github/scripts/gen-docker-tags.py
new file mode 100755
index 0000000..df4dc02
--- /dev/null
+++ b/.github/scripts/gen-docker-tags.py
@@ -0,0 +1,14 @@
+#!/usr/bin/env python3
+
+import sys
+
+version = sys.argv[1].split('.')
+suffix = sys.argv[2]
+
+REPO = f'netdata/netdata{suffix}'
+
+MAJOR = ':'.join([REPO, version[0]])
+MINOR = ':'.join([REPO, '.'.join(version[0:2])])
+PATCH = ':'.join([REPO, '.'.join(version[0:3])])
+
+print(','.join([MAJOR, MINOR, PATCH]))
diff --git a/.github/scripts/get-static-cache-key.sh b/.github/scripts/get-static-cache-key.sh
new file mode 100755
index 0000000..d9fa285
--- /dev/null
+++ b/.github/scripts/get-static-cache-key.sh
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+arch="${1}"
+platform="$(packaging/makeself/uname2platform.sh "${arch}")"
+
+docker pull --platform "${platform}" netdata/static-builder
+
+# shellcheck disable=SC2046
+cat $(find packaging/makeself/jobs -type f ! -regex '.*\(netdata\|-makeself\).*') > /tmp/static-cache-key-data
+
+docker run -it --rm --platform "${platform}" netdata/static-builder sh -c 'apk list -I 2>/dev/null' >> /tmp/static-cache-key-data
+
+h="$(sha256sum /tmp/static-cache-key-data | cut -f 1 -d ' ')"
+
+echo "::set-output name=key::static-${arch}-${h}"
diff --git a/.github/scripts/netdata-pkgcloud-cleanup.py b/.github/scripts/netdata-pkgcloud-cleanup.py
new file mode 100755
index 0000000..f6311e4
--- /dev/null
+++ b/.github/scripts/netdata-pkgcloud-cleanup.py
@@ -0,0 +1,190 @@
+#!/bin/env python3
+
+import requests
+from requests.auth import HTTPBasicAuth
+from datetime import date, datetime, timedelta
+import os
+import sys
+import argparse
+from pprint import pprint
+from datetime import datetime
+from dateutil import parser
+
+
+class PackageCloud:
+ NUM_PACKAGE_MINOR_TO_KEEP = 5
+ NUM_RETENTION_DAYS = 30
+ # number of pages to process. Use '0' to process all
+ MAX_PAGES = 0
+
+ def __init__(self, repo_type, dry_run=True, auth_token=None):
+ self.headers = {
+ "Accept" : "application/json",
+ "Content-Type" : "application/json",
+ }
+ self.dry_run = dry_run
+ self.repo_type = repo_type
+ if repo_type == "stable":
+ repo = "netdata/netdata"
+ elif repo_type == "devel":
+ repo = "netdata/netdata-devel"
+ elif repo_type == "edge":
+ repo = "netdata/netdata-edge"
+ else:
+ print(f"ERROR: unknown repo type '{repo_type}'!\nAccepted values are: stable,devel,edge")
+ sys.exit(1)
+ self.base_url = f"https://packagecloud.io/api/v1/repos/{repo}"
+ self.auth = HTTPBasicAuth(username=auth_token, password='') if auth_token else None
+
+ def get_all_packages(self):
+ page = 1
+ all_pkg_list = []
+ while True:
+ url = f"{self.base_url}/packages.json?page={page}"
+ if page > self.MAX_PAGES and self.MAX_PAGES != 0:
+ break
+ else:
+ pkg_list = requests.get(url, auth=self.auth, headers=self.headers).json()
+ if len(pkg_list) == 0:
+ break
+ else:
+ print(f"Processing page: {page}")
+ for element in pkg_list:
+ self.is_pkg_older_than_days(element, 30)
+ if element['name'] != 'netdata-repo' and element['name'] != 'netdata-repo-edge':
+ all_pkg_list.append(element)
+ page += 1
+ return all_pkg_list
+
+ def delete_package(self, destroy_url):
+ if self.dry_run:
+ print(f" - DRY_RUN mode. Not deleting package '{destroy_url}'.")
+ else:
+ print(f" - Deleting package: {destroy_url}")
+ url = f"https://packagecloud.io{destroy_url}"
+ response = requests.delete(url, auth=self.auth, headers=self.headers).json()
+ response = None
+ if not response:
+ print(f" Package deleted successfully.")
+ else:
+ print(f" Failed deleting package!")
+
+ def get_destroy_url(self, pkg_url):
+ url = f"https://packagecloud.io{pkg_url}"
+ response = requests.get(url, auth=self.auth, headers=self.headers)
+ response.raise_for_status()
+ return response.json()['destroy_url']
+
+ def get_packages_for_distro(self, distro, all_pkg_list):
+ distro_pkg_list = [ pkg for pkg in all_pkg_list if pkg['distro_version'] == distro ]
+ return distro_pkg_list
+
+ def get_packages_for_arch(self, arch, all_pkg_list):
+ arch_pkg_list = [ pkg for pkg in all_pkg_list if pkg['package_url'].split('/')[11] == arch ]
+ return arch_pkg_list
+
+ def get_arches(self, pkg_list):
+ arches = list(set([pkg['package_url'].split('/')[11] for pkg in pkg_list ]))
+ return arches
+
+ def get_pkg_list(self, pkg_name, pkg_list):
+ filtered_list = [ pkg for pkg in pkg_list if pkg['name'] == pkg_name ]
+ return filtered_list
+
+ def get_minor_versions(self, all_versions):
+ minor_versions = ['.'.join(version.split('.')[:-1]) for version in all_versions ]
+ minor_versions = list(set(minor_versions))
+ minor_versions.sort()
+ return minor_versions
+
+ def is_pkg_older_than_days(self, pkg, num_days):
+ pkg_create_date = datetime.strptime(pkg['created_at'], '%Y-%m-%dT%H:%M:%S.%fZ')
+ time_difference = datetime.now() - pkg_create_date
+ return time_difference.days > num_days
+
+ def cleanup_repo(self):
+ if self.repo_type == 'stable':
+ self.cleanup_stable_repo()
+ else:
+ self.cleanup_edge_repo()
+
+ def cleanup_edge_repo(self):
+ all_pkg_list = self.get_all_packages()
+ pkgs_to_delete = []
+ pkgs_to_keep = []
+ for package in all_pkg_list:
+ if self.is_pkg_older_than_days(package, self.NUM_RETENTION_DAYS):
+ pkgs_to_delete.append(package)
+ else:
+ pkgs_to_keep.append(package)
+ print(f"Keeping the following packages (newer than {self.NUM_RETENTION_DAYS} days):")
+ for pkg in pkgs_to_keep:
+ print(f" > pkg: {pkg['package_html_url']} / created_at: {pkg['created_at']}")
+ print(f"Deleting the following packages (older than {self.NUM_RETENTION_DAYS} days):")
+ for pkg in pkgs_to_delete:
+ print(f" > pkg: {pkg['package_html_url']} / created_at: {pkg['created_at']}")
+ self.delete_package(pkg['destroy_url'])
+
+ def cleanup_stable_repo(self):
+ all_pkg_list = self.get_all_packages()
+ all_distros = list(set([ pkg['distro_version'] for pkg in all_pkg_list ]))
+ all_distros = sorted(all_distros)
+ print(f"<> Distributions list: {all_distros}")
+
+ for distro in all_distros:
+ print(f">> Processing distro: {distro}")
+ pkg_list_distro = self.get_packages_for_distro(distro, all_pkg_list)
+ arches = self.get_arches(pkg_list_distro)
+ print(f" <> Arch list: {arches}")
+ for arch in arches:
+ print(f" >> Processing arch: {distro} -> {arch}")
+ pkg_list_arch = self.get_packages_for_arch(arch, pkg_list_distro)
+ pkg_names = [pkg['name'] for pkg in pkg_list_arch]
+ pkg_names = list(set(pkg_names))
+ print(f" <> Package names: {pkg_names}")
+ for pkg_name in pkg_names:
+ print(f" >> Processing package: {distro} -> {arch} -> {pkg_name}")
+ pkg_list = self.get_pkg_list(pkg_name, pkg_list_arch)
+ pkg_versions = [pkg['version'] for pkg in pkg_list]
+ pkg_minor_versions = self.get_minor_versions(pkg_versions)
+ pkg_minor_to_keep = pkg_minor_versions[-self.NUM_PACKAGE_MINOR_TO_KEEP:]
+ print(f" <> Minor Package Versions to Keep: {pkg_minor_to_keep}")
+ pkg_minor_to_delete = list(set(pkg_minor_versions) - set(pkg_minor_to_keep))
+ print(f" <> Minor Package Versions to Delete: {pkg_minor_to_delete}")
+ urls_to_keep = [pkg['package_url'] for pkg in pkg_list if '.'.join(pkg['version'].split('.')[:-1]) in pkg_minor_to_keep]
+ urls_to_delete = [pkg['package_url'] for pkg in pkg_list if '.'.join(pkg['version'].split('.')[:-1]) in pkg_minor_to_delete]
+ for pkg_url in urls_to_delete:
+ destroy_url = self.get_destroy_url(pkg_url)
+ self.delete_package(destroy_url)
+
+
+def configure():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--repo-type', '-r', required=True,
+ help='Repository type against to perform cleanup')
+ parser.add_argument('--dry-run', '-d', action='store_true',
+ help='Dry-run Mode')
+ args = parser.parse_args()
+ try:
+ token = os.environ['PKGCLOUD_TOKEN']
+ except Exception as e:
+ print(f"FATAL: 'PKGCLOUD_TOKEN' environment variable is not set!", file=sys.stderr)
+ sys.exit(1)
+ repo_type = args.repo_type
+ dry_run = args.dry_run
+ conf = {
+ 'repo_type': args.repo_type,
+ 'dry_run': args.dry_run,
+ 'token': token
+ }
+ return conf
+
+
+def main():
+ config = configure()
+ pkg_cloud = PackageCloud(config['repo_type'], config['dry_run'], config['token'])
+ pkg_cloud.cleanup_repo()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/.github/scripts/package-upload.sh b/.github/scripts/package-upload.sh
new file mode 100755
index 0000000..fd8a8cd
--- /dev/null
+++ b/.github/scripts/package-upload.sh
@@ -0,0 +1,43 @@
+#!/bin/sh
+
+set -e
+
+host="packages.netdata.cloud"
+user="netdatabot"
+
+distro="${1}"
+arch="${2}"
+format="${3}"
+repo="${4}"
+
+staging="${TMPDIR:-/tmp}/package-staging"
+prefix="/home/netdatabot/incoming/${repo}/"
+
+packages="$(find artifacts -name "*.${format}")"
+
+mkdir -p "${staging}"
+
+case "${format}" in
+ deb)
+ src="${staging}/$(echo "${distro}" | cut -f 1 -d '/')/pool/"
+ mkdir -p "${src}"
+
+ for pkg in ${packages}; do
+ cp "${pkg}" "${src}"
+ done
+ ;;
+ rpm)
+ src="${staging}/${distro}/${arch}/"
+ mkdir -p "${src}"
+
+ for pkg in ${packages}; do
+ cp "${pkg}" "${src}"
+ done
+ ;;
+ *)
+ echo "Unrecognized package format ${format}."
+ exit 1
+ ;;
+esac
+
+rsync -vrptO "${staging}/" "${user}@${host}:${prefix}"
diff --git a/.github/scripts/package_cloud_wrapper.sh b/.github/scripts/package_cloud_wrapper.sh
new file mode 100755
index 0000000..7640ef4
--- /dev/null
+++ b/.github/scripts/package_cloud_wrapper.sh
@@ -0,0 +1,48 @@
+#!/usr/bin/env bash
+#
+# This is a tool to help removal of packages from packagecloud.io
+# It utilizes the package_cloud utility provided from packagecloud.io
+#
+# Depends on:
+# 1) package cloud gem (detects absence and installs it)
+#
+# Requires:
+# 1) PKG_CLOUD_TOKEN variable exported
+# 2) To properly install package_cloud when not found, it requires: ruby gcc gcc-c++ ruby-devel
+#
+# Copyright: SPDX-License-Identifier: GPL-3.0-or-later
+#
+# Author : Pavlos Emm. Katsoulakis (paul@netdata.cloud)
+#shellcheck disable=SC2068,SC2145
+set -e
+PKG_CLOUD_CONFIG="$HOME/.package_cloud_configuration.cfg"
+
+# If we are not in netdata git repo, at the top level directory, fail
+TOP_LEVEL=$(basename "$(git rev-parse --show-toplevel)")
+CWD=$(git rev-parse --show-cdup)
+if [ -n "$CWD" ] || [ ! "${TOP_LEVEL}" == "netdata" ]; then
+ echo "Run as .github/scripts/$(basename "$0") from top level directory of netdata git repository"
+ echo "Docker build process aborted"
+ exit 1
+fi
+
+# Install dependency if not there
+if ! command -v package_cloud > /dev/null 2>&1; then
+ echo "No package cloud gem found, installing"
+ sudo gem install -V package_cloud || (echo "Package cloud installation failed. you might want to check if required dependencies are there (ruby gcc gcc-c++ ruby-devel)" && exit 1)
+else
+ echo "Found package_cloud gem, continuing"
+fi
+
+# Check for required token and prepare config
+if [ -z "${PKG_CLOUD_TOKEN}" ]; then
+ echo "Please set PKG_CLOUD_TOKEN to be able to use ${0}"
+ exit 1
+fi
+echo "{\"url\":\"https://packagecloud.io\",\"token\":\"${PKG_CLOUD_TOKEN}\"}" > "${PKG_CLOUD_CONFIG}"
+
+echo "Executing package_cloud with config ${PKG_CLOUD_CONFIG} and parameters $@"
+package_cloud $@ --config="${PKG_CLOUD_CONFIG}"
+
+rm -rf "${PKG_CLOUD_CONFIG}"
+echo "Done!"
diff --git a/.github/scripts/pkg-test.sh b/.github/scripts/pkg-test.sh
new file mode 100755
index 0000000..e3bc3e7
--- /dev/null
+++ b/.github/scripts/pkg-test.sh
@@ -0,0 +1,138 @@
+#!/bin/sh
+
+install_debian_like() {
+ # This is needed to ensure package installs don't prompt for any user input.
+ export DEBIAN_FRONTEND=noninteractive
+
+ if apt-cache show netcat 2>&1 | grep -q "No packages found"; then
+ netcat="netcat-traditional"
+ else
+ netcat="netcat"
+ fi
+
+ apt-get update
+
+ # Install Netdata
+ apt-get install -y /netdata/artifacts/netdata_"${VERSION}"*_*.deb || exit 1
+
+ # Install testing tools
+ apt-get install -y --no-install-recommends curl "${netcat}" jq || exit 1
+}
+
+install_fedora_like() {
+ # Using a glob pattern here because I can't reliably determine what the
+ # resulting package name will be (TODO: There must be a better way!)
+
+ PKGMGR="$( (command -v dnf > /dev/null && echo "dnf") || echo "yum")"
+
+ pkg_version="$(echo "${VERSION}" | tr - .)"
+
+ # Install Netdata
+ "$PKGMGR" install -y /netdata/artifacts/netdata-"${pkg_version}"-*.rpm
+
+ # Install testing tools
+ "$PKGMGR" install -y curl nc jq || exit 1
+}
+
+install_centos() {
+ # Using a glob pattern here because I can't reliably determine what the
+ # resulting package name will be (TODO: There must be a better way!)
+
+ PKGMGR="$( (command -v dnf > /dev/null && echo "dnf") || echo "yum")"
+
+ pkg_version="$(echo "${VERSION}" | tr - .)"
+
+ if [ "${PKGMGR}" = "dnf" ]; then
+ opts="--allowerasing"
+ fi
+
+ # Install EPEL (needed for `jq`
+ "$PKGMGR" install -y epel-release || exit 1
+
+ # Install Netdata
+ "$PKGMGR" install -y /netdata/artifacts/netdata-"${pkg_version}"-*.rpm
+
+ # Install testing tools
+ "$PKGMGR" install -y ${opts} curl nc jq || exit 1
+}
+
+install_suse_like() {
+ # Using a glob pattern here because I can't reliably determine what the
+ # resulting package name will be (TODO: There must be a better way!)
+
+ pkg_version="$(echo "${VERSION}" | tr - .)"
+
+ # Install Netdata
+ zypper install -y --allow-unsigned-rpm /netdata/artifacts/netdata-"${pkg_version}"-*.rpm
+
+ # Install testing tools
+ zypper install -y --no-recommends curl netcat-openbsd jq || exit 1
+}
+
+dump_log() {
+ cat ./netdata.log
+}
+
+wait_for() {
+ host="${1}"
+ port="${2}"
+ name="${3}"
+ timeout="30"
+
+ if command -v nc > /dev/null ; then
+ netcat="nc"
+ elif command -v netcat > /dev/null ; then
+ netcat="netcat"
+ else
+ printf "Unable to find a usable netcat command.\n"
+ return 1
+ fi
+
+ printf "Waiting for %s on %s:%s ... " "${name}" "${host}" "${port}"
+
+ sleep 30
+
+ i=0
+ while ! ${netcat} -z "${host}" "${port}"; do
+ sleep 1
+ if [ "$i" -gt "$timeout" ]; then
+ printf "Timed out!\n"
+ return 1
+ fi
+ i="$((i + 1))"
+ done
+ printf "OK\n"
+}
+
+case "${DISTRO}" in
+ debian | ubuntu)
+ install_debian_like
+ ;;
+ fedora | oraclelinux)
+ install_fedora_like
+ ;;
+ centos | rockylinux | almalinux)
+ install_centos
+ ;;
+ opensuse)
+ install_suse_like
+ ;;
+ *)
+ printf "ERROR: unsupported distro: %s_%s\n" "${DISTRO}" "${DISTRO_VERSION}"
+ exit 1
+ ;;
+esac
+
+trap dump_log EXIT
+
+/usr/sbin/netdata -D > ./netdata.log 2>&1 &
+
+wait_for localhost 19999 netdata || exit 1
+
+curl -sS http://127.0.0.1:19999/api/v1/info > ./response || exit 1
+
+cat ./response
+
+jq '.version' ./response || exit 1
+
+trap - EXIT
diff --git a/.github/scripts/prepare-release-base.sh b/.github/scripts/prepare-release-base.sh
new file mode 100755
index 0000000..7c24f6b
--- /dev/null
+++ b/.github/scripts/prepare-release-base.sh
@@ -0,0 +1,176 @@
+#!/bin/sh
+
+set -e
+
+REPO="${1}"
+EVENT_NAME="${2}"
+EVENT_TYPE="${3}"
+EVENT_VERSION="${4}"
+RELEASE_TEST="${5}"
+
+##############################################################
+# Version validation functions
+
+check_version_format() {
+ if ! echo "${EVENT_VERSION}" | grep -qE '^v[[:digit:]]+\.[[:digit:]]+\.[[:digit:]]+$'; then
+ echo "::error::The supplied version (${EVENT_VERSION}) is not a valid version string."
+ return 1
+ fi
+}
+
+patch_is_zero() {
+ if ! echo "${EVENT_VERSION}" | grep -qE '^v[[:digit:]]+\.[[:digit:]]+\.0$'; then
+ echo "::error::The patch number for a ${EVENT_TYPE} build must be 0."
+ return 1
+ fi
+}
+
+minor_is_zero() {
+ if ! echo "${EVENT_VERSION}" | grep -qE '^v[[:digit:]]+\.0'; then
+ echo "::error::The minor version number for a ${EVENT_TYPE} build must be 0."
+ return 1
+ fi
+}
+
+major_matches() {
+ current_major="$(cut -f 1 -d '-' packaging/version | cut -f 1 -d '.' | cut -f 2 -d 'v')"
+ target_major="$(echo "${EVENT_VERSION}" | cut -f 1 -d '.' | cut -f 2 -d 'v')"
+
+ if [ "${target_major}" != "${current_major}" ]; then
+ echo "::error::Major version mismatch, expected ${current_major} but got ${target_major}."
+ return 1
+ fi
+}
+
+minor_matches() {
+ current_minor="$(cut -f 1 -d '-' packaging/version | cut -f 2 -d '.')"
+ target_minor="$(echo "${EVENT_VERSION}" | cut -f 2 -d '.')"
+
+ if [ "${target_minor}" != "${current_minor}" ]; then
+ echo "::error::Minor version mismatch, expected ${current_minor} but got ${target_minor}."
+ return 1
+ fi
+}
+
+check_for_existing_tag() {
+ if git tag | grep -qE "^${EVENT_VERSION}$"; then
+ echo "::error::A tag for version ${EVENT_VERSION} already exists."
+ return 1
+ fi
+}
+
+check_newer_major_version() {
+ current="$(cut -f 1 -d '-' packaging/version | cut -f 1 -d '.' | cut -f 2 -d 'v')"
+ target="$(echo "${EVENT_VERSION}" | cut -f 1 -d '.' | cut -f 2 -d 'v')"
+
+ if [ "${target}" -le "${current}" ]; then
+ echo "::error::Version ${EVENT_VERSION} is not newer than the current version."
+ return 1
+ fi
+}
+
+check_newer_minor_version() {
+ current="$(cut -f 1 -d '-' packaging/version | cut -f 2 -d '.')"
+ target="$(echo "${EVENT_VERSION}" | cut -f 2 -d '.')"
+
+ if [ "${target}" -le "${current}" ]; then
+ echo "::error::Version ${EVENT_VERSION} is not newer than the current version."
+ return 1
+ fi
+}
+
+check_newer_patch_version() {
+ current="$(cut -f 1 -d '-' packaging/version | cut -f 3 -d '.')"
+ target="$(echo "${EVENT_VERSION}" | cut -f 3 -d '.')"
+
+ if [ "${target}" -le "${current}" ]; then
+ echo "::error::Version ${EVENT_VERSION} is not newer than the current version."
+ return 1
+ fi
+}
+
+##############################################################
+# Core logic
+
+git config user.name "netdatabot"
+git config user.email "bot@netdata.cloud"
+
+if [ "${REPO}" != "netdata/netdata" ] && [ -z "${RELEASE_TEST}" ]; then
+ echo "::notice::Not running in the netdata/netdata repository, not queueing a release build."
+ echo "::set-output name=run::false"
+elif [ "${EVENT_NAME}" = 'schedule' ] || [ "${EVENT_TYPE}" = 'nightly' ]; then
+ echo "::notice::Preparing a nightly release build."
+ LAST_TAG=$(git describe --abbrev=0 --tags)
+ COMMITS_SINCE_RELEASE=$(git rev-list "${LAST_TAG}"..HEAD --count)
+ NEW_VERSION="${LAST_TAG}-$((COMMITS_SINCE_RELEASE + 1))-nightly"
+ LAST_VERSION_COMMIT="$(git rev-list -1 HEAD packaging/version)"
+ HEAD_COMMIT="$(git rev-parse HEAD)"
+ if [ "${EVENT_NAME}" = 'schedule' ] && [ "${LAST_VERSION_COMMIT}" = "${HEAD_COMMIT}" ] && grep -qE '.*-nightly$' packaging/version; then
+ echo "::notice::No commits since last nightly build, not publishing a new nightly build."
+ echo "::set-output name=run::false"
+ else
+ echo "${NEW_VERSION}" > packaging/version || exit 1
+ echo "::set-output name=run::true"
+ echo "::set-output name=message::Update changelog and version for nightly build: ${NEW_VERSION}."
+ echo "::set-output name=ref::master"
+ echo "::set-output name=type::nightly"
+ echo "::set-output name=branch::master"
+ echo "::set-output name=version::nightly"
+ fi
+elif [ "${EVENT_TYPE}" = 'patch' ] && [ "${EVENT_VERSION}" != "nightly" ]; then
+ echo "::notice::Preparing a patch release build."
+ check_version_format || exit 1
+ check_for_existing_tag || exit 1
+ branch_name="$(echo "${EVENT_VERSION}" | cut -f 1-2 -d '.')"
+ if ! git checkout "${branch_name}"; then
+ echo "::error::Could not find a branch for the ${branch_name}.x release series."
+ exit 1
+ fi
+ minor_matches || exit 1
+ major_matches || exit 1
+ check_newer_patch_version || exit 1
+ echo "${EVENT_VERSION}" > packaging/version || exit 1
+ echo "::set-output name=run::true"
+ echo "::set-output name=message::Patch release ${EVENT_VERSION}."
+ echo "::set-output name=ref::${EVENT_VERSION}"
+ echo "::set-output name=type::release"
+ echo "::set-output name=branch::${branch_name}"
+ echo "::set-output name=version::$(tr -d 'v' < packaging/version)"
+elif [ "${EVENT_TYPE}" = 'minor' ] && [ "${EVENT_VERSION}" != "nightly" ]; then
+ echo "::notice::Preparing a minor release build."
+ check_version_format || exit 1
+ patch_is_zero || exit 1
+ major_matches || exit 1
+ check_newer_minor_version || exit 1
+ check_for_existing_tag || exit 1
+ branch_name="$(echo "${EVENT_VERSION}" | cut -f 1-2 -d '.')"
+ if [ -n "$(git branch --list "${branch_name}")" ]; then
+ echo "::error::A branch named ${branch_name} already exists in the repository."
+ exit 1
+ fi
+ echo "${EVENT_VERSION}" > packaging/version || exit 1
+ echo "::set-output name=run::true"
+ echo "::set-output name=message::Minor release ${EVENT_VERSION}."
+ echo "::set-output name=ref::${EVENT_VERSION}"
+ echo "::set-output name=type::release"
+ echo "::set-output name=branch::master"
+ echo "::set-output name=new-branch::${branch_name}"
+ echo "::set-output name=version::$(tr -d 'v' < packaging/version)"
+elif [ "${EVENT_TYPE}" = 'major' ] && [ "${EVENT_VERSION}" != "nightly" ]; then
+ echo "::notice::Preparing a major release build."
+ check_version_format || exit 1
+ minor_is_zero || exit 1
+ patch_is_zero || exit 1
+ check_newer_major_version || exit 1
+ check_for_existing_tag || exit 1
+ echo "${EVENT_VERSION}" > packaging/version || exit 1
+ echo "::set-output name=run::true"
+ echo "::set-output name=message::Major release ${EVENT_VERSION}"
+ echo "::set-output name=ref::${EVENT_VERSION}"
+ echo "::set-output name=type::release"
+ echo "::set-output name=branch::master"
+ echo "::set-output name=version::$(tr -d 'v' < packaging/version)"
+else
+ echo '::error::Unrecognized release type or invalid version.'
+ exit 1
+fi
diff --git a/.github/scripts/run-updater-check.sh b/.github/scripts/run-updater-check.sh
new file mode 100755
index 0000000..31ab71d
--- /dev/null
+++ b/.github/scripts/run-updater-check.sh
@@ -0,0 +1,14 @@
+#!/bin/sh
+
+echo ">>> Installing CI support packages..."
+/netdata/.github/scripts/ci-support-pkgs.sh
+echo ">>> Installing Netdata..."
+/netdata/packaging/installer/kickstart.sh --dont-wait --build-only --disable-telemetry || exit 1
+echo "::group::Environment File Contents"
+cat /etc/netdata/.environment
+echo "::endgroup::"
+echo ">>> Updating Netdata..."
+export NETDATA_NIGHTLIES_BASEURL="http://localhost:8080/artifacts/" # Pull the tarball from the local web server.
+/netdata/packaging/installer/netdata-updater.sh --not-running-from-cron --no-updater-self-update || exit 1
+echo ">>> Checking if update was successful..."
+/netdata/.github/scripts/check-updater.sh || exit 1
diff --git a/.github/scripts/run_install_with_dist_file.sh b/.github/scripts/run_install_with_dist_file.sh
new file mode 100755
index 0000000..d59e8b1
--- /dev/null
+++ b/.github/scripts/run_install_with_dist_file.sh
@@ -0,0 +1,39 @@
+#!/usr/bin/env bash
+#
+# This script is evaluating netdata installation with the source from make dist
+#
+# Copyright: SPDX-License-Identifier: GPL-3.0-or-later
+#
+# Author : Pavlos Emm. Katsoulakis <paul@netdata.cloud)
+
+set -e
+
+if [ $# -ne 1 ]; then
+ printf >&2 "Usage: %s <dist_file>\n" "$(basename "$0")"
+ exit 1
+fi
+
+distfile="${1}"
+shift
+
+printf >&2 "Opening dist archive %s ... " "${distfile}"
+tar -xovf "${distfile}"
+distdir="$(echo "${distfile}" | rev | cut -d. -f3- | rev)"
+cp -a packaging/installer/install-required-packages.sh "${distdir}/install-required-packages.sh"
+if [ ! -d "${distdir}" ]; then
+ printf >&2 "ERROR: %s is not a directory" "${distdir}"
+ exit 2
+fi
+
+printf >&2 "Entering %s and starting docker run ..." "${distdir}"
+
+pushd "${distdir}" || exit 1
+docker run \
+ -e DISABLE_TELEMETRY=1 \
+ -v "${PWD}:/netdata" \
+ -w /netdata \
+ "ubuntu:latest" \
+ /bin/bash -c "./install-required-packages.sh --dont-wait --non-interactive netdata && apt install wget && ./netdata-installer.sh --dont-wait --require-cloud --disable-telemetry --install /tmp --one-time-build && echo \"Validating netdata instance is running\" && wget -O - 'http://127.0.0.1:19999/api/v1/info' | grep version"
+popd || exit 1
+
+echo "All Done!"
diff --git a/.github/stale.yml b/.github/stale.yml
new file mode 100644
index 0000000..abf927a
--- /dev/null
+++ b/.github/stale.yml
@@ -0,0 +1,18 @@
+---
+only: issues
+limitPerRun: 30
+daysUntilStale: 30
+daysUntilClose: 7
+exemptLabels:
+ - bug
+ - help wanted
+ - feature request
+exemptProjects: true
+exemptMilestones: true
+staleLabel: stale
+markComment: >
+ This issue has been inactive for 30 days.
+ It will be closed in one week, unless it is updated.
+closeComment: >
+ This issue has been automatically closed due to extended period of inactivity.
+ Please reopen if it is still valid. Thank you for your contributions.
diff --git a/.github/workflows/add-to-project.yml b/.github/workflows/add-to-project.yml
new file mode 100644
index 0000000..a80d8b4
--- /dev/null
+++ b/.github/workflows/add-to-project.yml
@@ -0,0 +1,26 @@
+name: Add issues to Agent Board
+
+on:
+ issues:
+ types:
+ - opened
+ - transferred
+
+jobs:
+ add-to-project:
+ name: Add issue to project
+ if: github.repository == 'netdata/netdata'
+ runs-on: ubuntu-latest
+ steps:
+ - name: Add issues to Agent project board
+ uses: actions/add-to-project@v0.4.0
+ with:
+ project-url: https://github.com/orgs/netdata/projects/32
+ github-token: ${{ secrets.NETDATABOT_ORG_GITHUB_TOKEN }}
+
+ - name: Add issues to Product Bug project board
+ uses: actions/add-to-project@v0.4.0
+ with:
+ project-url: https://github.com/orgs/netdata/projects/45
+ github-token: ${{ secrets.NETDATABOT_ORG_GITHUB_TOKEN }}
+ labeled: bug
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
new file mode 100644
index 0000000..53f1590
--- /dev/null
+++ b/.github/workflows/build.yml
@@ -0,0 +1,861 @@
+---
+# Ci code for building release artifacts.
+name: Build
+on:
+ push: # Master branch checks only validate the build and generate artifacts for testing.
+ branches:
+ - master
+ pull_request: null # PR checks only validate the build and generate artifacts for testing.
+ workflow_dispatch: # Dispatch runs build and validate, then push to the appropriate storage location.
+ inputs:
+ type:
+ description: Build Type
+ default: nightly
+ required: true
+ version:
+ description: Version Tag
+ default: nightly
+ required: true
+concurrency: # This keeps multiple instances of the job from running concurrently for the same ref and event type.
+ group: build-${{ github.ref }}-${{ github.event_name }}
+ cancel-in-progress: true
+jobs:
+ build-dist: # Build the distribution tarball and store it as an artifact.
+ name: Build Distribution Tarball
+ runs-on: ubuntu-latest
+ outputs:
+ distfile: ${{ steps.build.outputs.distfile }}
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v3
+ with:
+ fetch-depth: 0
+ submodules: recursive
+ - name: Fix tags
+ id: fix-tags
+ if: github.event_name != 'push'
+ run: |
+ git fetch --tags --force
+ - name: Mark Stable
+ id: channel
+ if: github.event_name == 'workflow_dispatch' && github.event.inputs.type != 'nightly'
+ run: |
+ sed -i 's/^RELEASE_CHANNEL="nightly" *#/RELEASE_CHANNEL="stable" #/' netdata-installer.sh
+ - name: Build
+ id: build
+ run: |
+ git describe
+ mkdir -p artifacts
+ ./packaging/installer/install-required-packages.sh --dont-wait --non-interactive netdata
+ autoreconf -ivf
+ ./configure --prefix=/usr \
+ --sysconfdir=/etc \
+ --localstatedir=/var \
+ --libexecdir=/usr/libexec \
+ --with-zlib \
+ --with-math \
+ --with-user=netdata
+ make dist
+ echo "::set-output name=distfile::$(find . -name 'netdata-*.tar.gz')"
+ cp netdata-*.tar.gz artifacts/
+ - name: Store
+ id: store
+ uses: actions/upload-artifact@v3
+ with:
+ name: dist-tarball
+ path: artifacts/*.tar.gz
+ retention-days: 30
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Distribution tarball creation failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to create source tarball for distribution.
+ Checkout: ${{ steps.checkout.outcome }}
+ Fix Tags: ${{ steps.fix-tags.outcome }}
+ Mark stable: ${{ steps.channel.outcome }}
+ Build: ${{ steps.build.outcome }}
+ Store: ${{ steps.store.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.event_name != 'pull_request'
+ && github.repository == 'netdata/netdata'
+ }}
+
+ build-static: # Build the static binary archives, and store them as artifacts.
+ name: Build Static
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ arch:
+ - x86_64
+ - armv7l
+ - aarch64
+ - ppc64le
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v3
+ with:
+ fetch-depth: 0
+ submodules: recursive
+ - name: Fix tags
+ id: fix-tags
+ if: github.event_name != 'push'
+ run: |
+ git fetch --tags --force
+ - name: Mark Stable
+ id: channel
+ if: github.event_name == 'workflow_dispatch' && github.event.inputs.type != 'nightly'
+ run: |
+ sed -i 's/^RELEASE_CHANNEL="nightly" *#/RELEASE_CHANNEL="stable" #/' netdata-installer.sh packaging/makeself/install-or-update.sh
+ - name: Get Cache Key
+ id: cache-key
+ run: .github/scripts/get-static-cache-key.sh ${{ matrix.arch }}
+ - name: Cache
+ id: cache
+ uses: actions/cache@v3
+ with:
+ path: artifacts/cache
+ key: ${{ steps.cache-key.outputs.key }}
+ - name: Build
+ if: github.event_name != 'workflow_dispatch' # Don’t use retries on PRs.
+ run: .github/scripts/build-static.sh ${{ matrix.arch }}
+ - name: Build
+ if: github.event_name == 'workflow_dispatch'
+ id: build
+ uses: nick-fields/retry@v2
+ with:
+ timeout_minutes: 180
+ retries: 3
+ command: .github/scripts/build-static.sh ${{ matrix.arch }}
+ - name: Store
+ id: store
+ uses: actions/upload-artifact@v3
+ with:
+ name: static-archive
+ path: artifacts/*.gz.run
+ retention-days: 30
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Static build failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to create static installer archive for ${{ matrix.arch }}.
+ Checkout: ${{ steps.checkout.outcome }}
+ Fix Tags: ${{ steps.fix-tags.outcome }}
+ Mark stable: ${{ steps.channel.outcome }}
+ Build: ${{ steps.build.outcome }}
+ Store: ${{ steps.store.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.event_name != 'pull_request'
+ && github.repository == 'netdata/netdata'
+ }}
+
+ matrix: # Generate the shared build matrix for our build tests.
+ name: Prepare Build Matrix
+ runs-on: ubuntu-latest
+ outputs:
+ matrix: ${{ steps.set-matrix.outputs.matrix }}
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v3
+ - name: Prepare tools
+ id: prepare
+ run: |
+ sudo apt-get update && sudo apt-get install -y python3-ruamel.yaml
+ - name: Read build matrix
+ id: set-matrix
+ shell: python3 {0}
+ run: |
+ from ruamel.yaml import YAML
+ import json
+ yaml = YAML(typ='safe')
+ entries = list()
+
+ with open('.github/data/distros.yml') as f:
+ data = yaml.load(f)
+
+ for i, v in enumerate(data['include']):
+ e = {
+ 'artifact_key': v['distro'] + str(v['version']).replace('.', ''),
+ 'version': v['version'],
+ }
+
+ if 'base_image' in v:
+ e['distro'] = ':'.join([v['base_image'], str(v['version'])])
+ else:
+ e['distro'] = ':'.join([v['distro'], str(v['version'])])
+
+ if 'env_prep' in v:
+ e['env_prep'] = v['env_prep']
+
+ if 'jsonc_removal' in v:
+ e['jsonc_removal'] = v['jsonc_removal']
+
+ entries.append(e)
+
+ entries.sort(key=lambda k: k['distro'])
+ matrix = json.dumps({'include': entries}, sort_keys=True)
+ print('Generated Matrix: ' + matrix)
+ print('::set-output name=matrix::' + matrix)
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Build matrix preparation failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to prepare build matrix for build checks.
+ Checkout: ${{ steps.checkout.outcome }}
+ Prepare tools: ${{ steps.prepare.outcome }}
+ Read build matrix: ${{ steps.set-matrix.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.event_name != 'pull_request'
+ && github.repository == 'netdata/netdata'
+ }}
+
+ prepare-test-images: # Prepare the test environments for our build checks. This also checks dependency handling code for each tested environment.
+ name: Prepare Test Environments
+ runs-on: ubuntu-latest
+ needs:
+ - matrix
+ env:
+ RETRY_DELAY: 300
+ strategy:
+ # Unlike the actal build tests, this completes _very_ fast (average of about 3 minutes for each job), so we
+ # just run everything in parallel instead lof limiting job concurrency.
+ fail-fast: false
+ matrix: ${{ fromJson(needs.matrix.outputs.matrix) }}
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v3
+ - name: Setup Buildx
+ id: buildx
+ uses: docker/setup-buildx-action@v2
+ - name: Build test environment
+ id: build1
+ uses: docker/build-push-action@v3
+ continue-on-error: true # We retry 3 times at 5 minute intervals if there is a failure here.
+ with:
+ push: false
+ load: false
+ file: .github/dockerfiles/Dockerfile.build_test
+ build-args: |
+ BASE=${{ matrix.distro }}
+ PRE=${{ matrix.env_prep }}
+ RMJSONC=${{ matrix.jsonc_removal }}
+ outputs: type=oci,dest=/tmp/image.tar
+ tags: test:${{ matrix.artifact_key }}
+ - name: Retry delay
+ if: ${{ steps.build1.outcome == 'failure' }}
+ run: sleep "${RETRY_DELAY}"
+ - name: Build test environment (attempt 2)
+ if: ${{ steps.build1.outcome == 'failure' }}
+ id: build2
+ uses: docker/build-push-action@v3
+ continue-on-error: true # We retry 3 times at 5 minute intervals if there is a failure here.
+ with:
+ push: false
+ load: false
+ file: .github/dockerfiles/Dockerfile.build_test
+ build-args: |
+ BASE=${{ matrix.distro }}
+ PRE=${{ matrix.env_prep }}
+ RMJSONC=${{ matrix.jsonc_removal }}
+ outputs: type=oci,dest=/tmp/image.tar
+ tags: test:${{ matrix.artifact_key }}
+ - name: Retry delay
+ if: ${{ steps.build1.outcome == 'failure' && steps.build2.outcome == 'failure' }}
+ run: sleep "${RETRY_DELAY}"
+ - name: Build test environment (attempt 3)
+ if: ${{ steps.build1.outcome == 'failure' && steps.build2.outcome == 'failure' }}
+ id: build3
+ uses: docker/build-push-action@v3
+ with:
+ push: false
+ load: false
+ file: .github/dockerfiles/Dockerfile.build_test
+ build-args: |
+ BASE=${{ matrix.distro }}
+ PRE=${{ matrix.env_prep }}
+ RMJSONC=${{ matrix.jsonc_removal }}
+ outputs: type=oci,dest=/tmp/image.tar
+ tags: test:${{ matrix.artifact_key }}
+ - name: Upload image artifact
+ id: upload
+ uses: actions/upload-artifact@v3
+ with:
+ name: ${{ matrix.artifact_key }}-test-env
+ path: /tmp/image.tar
+ retention-days: 30
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Test environment preparation for ${{ matrix.distro }} failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Test environment preparation for ${{ matrix.distro }} failed.
+ Checkout: ${{ steps.checkout.outcome }}
+ Set up Buildx: ${{ steps.buildx.outcome }}
+ Build test environment: ${{ steps.build1.outcome }}
+ Build test environment (attempt 2): ${{ steps.build2.outcome }}
+ Build test environment (attempt 3): ${{ steps.build3.outcome }}
+ Upload: ${{ steps.upload.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.event_name != 'pull_request'
+ && github.repository == 'netdata/netdata'
+ }}
+
+ source-build: # Test various source build arrangements.
+ name: Test Source Build
+ runs-on: ubuntu-latest
+ needs:
+ - matrix
+ - prepare-test-images
+ strategy:
+ fail-fast: false
+ max-parallel: 8
+ matrix: ${{ fromJson(needs.matrix.outputs.matrix) }}
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v3
+ with:
+ submodules: recursive
+ - name: Fetch test environment
+ id: fetch
+ uses: actions/download-artifact@v3
+ with:
+ name: ${{ matrix.artifact_key }}-test-env
+ - name: Load test environment
+ id: load
+ run: |
+ docker load --input image.tar | tee image-info.txt
+ echo "::set-output name=image::$(cut -d ':' -f 3 image-info.txt)"
+ - name: Regular build on ${{ matrix.distro }}
+ id: build-basic
+ run: |
+ docker run --security-opt seccomp=unconfined -w /netdata sha256:${{ steps.load.outputs.image }} \
+ /bin/sh -c 'autoreconf -ivf && ./configure --disable-dependency-tracking && make -j2'
+ - name: netdata-installer on ${{ matrix.distro }}, disable cloud
+ id: build-no-cloud
+ run: |
+ docker run --security-opt seccomp=unconfined -w /netdata sha256:${{ steps.load.outputs.image }} \
+ /bin/sh -c './netdata-installer.sh --dont-wait --dont-start-it --disable-cloud --one-time-build'
+ - name: netdata-installer on ${{ matrix.distro }}, require cloud
+ id: build-cloud
+ run: |
+ docker run --security-opt seccomp=unconfined -w /netdata sha256:${{ steps.load.outputs.image }} \
+ /bin/sh -c './netdata-installer.sh --dont-wait --dont-start-it --require-cloud --one-time-build'
+ - name: netdata-installer on ${{ matrix.distro }}, require cloud, no JSON-C
+ id: build-no-jsonc
+ if: matrix.jsonc_removal != ''
+ run: |
+ docker run --security-opt seccomp=unconfined -w /netdata sha256:${{ steps.load.outputs.image }} \
+ /bin/sh -c '/rmjsonc.sh && ./netdata-installer.sh --dont-wait --dont-start-it --require-cloud --one-time-build'
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Build tests for ${{ matrix.distro }} failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Build tests for ${{ matrix.distro }} failed.
+ Checkout: ${{ steps.checkout.outcome }}
+ Fetch test environment: ${{ steps.fetch.outcome }}
+ Load test environment: ${{ steps.load.outcome }}
+ Regular build: ${{ steps.build-basic.outcome }}
+ netdata-installer, disable cloud: ${{ steps.build-no-cloud.outcome }}
+ netdata-installer, require cloud: ${{ steps.build-cloud.outcome }}
+ netdata-installer, no JSON-C: ${{ steps.build-no-jsonc.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.event_name != 'pull_request'
+ && github.repository == 'netdata/netdata'
+ }}
+
+ updater-check: # Test the generated dist archive using the updater code.
+ name: Test Generated Distfile and Updater Code
+ runs-on: ubuntu-latest
+ needs:
+ - build-dist
+ - matrix
+ - prepare-test-images
+ strategy:
+ fail-fast: false
+ max-parallel: 8
+ matrix: ${{ fromJson(needs.matrix.outputs.matrix) }}
+ services:
+ apache: # This gets used to serve the dist tarball for the updater script.
+ image: httpd:2.4
+ ports:
+ - 8080:80
+ volumes:
+ - ${{ github.workspace }}:/usr/local/apache2/htdocs/
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v3
+ - name: Fetch dist tarball artifacts
+ id: fetch-tarball
+ uses: actions/download-artifact@v3
+ with:
+ name: dist-tarball
+ path: dist-tarball
+ - name: Prepare artifact directory
+ id: prepare
+ run: |
+ mkdir -p artifacts || exit 1
+ echo "9999.0.0-0" > artifacts/latest-version.txt || exit 1
+ cp dist-tarball/* artifacts || exit 1
+ cd artifacts || exit 1
+ ln -s ${{ needs.build-dist.outputs.distfile }} netdata-latest.tar.gz || exit 1
+ sha256sum -b ./* > "sha256sums.txt" || exit 1
+ cat sha256sums.txt
+ - name: Fetch test environment
+ id: fetch-test-environment
+ uses: actions/download-artifact@v3
+ with:
+ name: ${{ matrix.artifact_key }}-test-env
+ - name: Load test environment
+ id: load
+ run: |
+ docker load --input image.tar | tee image-info.txt
+ echo "::set-output name=image::$(cut -d ':' -f 3 image-info.txt)"
+ - name: Install netdata and run the updater on ${{ matrix.distro }}
+ id: updater-check
+ run: |
+ docker run --security-opt seccomp=unconfined -e DISABLE_TELEMETRY=1 --network host -w /netdata sha256:${{ steps.load.outputs.image }} \
+ /netdata/.github/scripts/run-updater-check.sh
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Updater checks for ${{ matrix.distro }} failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Updater checks for ${{ matrix.distro }} failed.
+ Checkout: ${{ steps.checkout.outcome }}
+ Fetch dist tarball: ${{ steps.fetch-tarball.outcome }}
+ Prepare artifact directory: ${{ steps.prepare.outcome }}
+ Fetch test environment: ${{ steps.fetch-test-environment.outcome }}
+ Load test environment: ${{ steps.load.outcome }}
+ Updater check: ${{ steps.updater-check.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.event_name != 'pull_request'
+ && github.repository == 'netdata/netdata'
+ }}
+
+ prepare-upload: # Consolidate the artifacts for uploading or releasing.
+ name: Prepare Artifacts
+ runs-on: ubuntu-latest
+ needs:
+ - build-dist
+ - build-static
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v3
+ - name: Prepare Environment
+ id: prepare
+ run: mkdir -p artifacts
+ - name: Retrieve Dist Tarball
+ id: fetch-dist
+ uses: actions/download-artifact@v3
+ with:
+ name: dist-tarball
+ path: dist-tarball
+ - name: Retrieve Static Build Artifacts
+ id: fetch-static
+ uses: actions/download-artifact@v3
+ with:
+ name: static-archive
+ path: static-archive
+ - name: Prepare Artifacts
+ id: consolidate
+ working-directory: ./artifacts/
+ run: |
+ mv ../dist-tarball/* . || exit 1
+ mv ../static-archive/* . || exit 1
+ ln -s ${{ needs.build-dist.outputs.distfile }} netdata-latest.tar.gz || exit 1
+ cp ../packaging/version ./latest-version.txt || exit 1
+ sha256sum -b ./* > sha256sums.txt || exit 1
+ cat sha256sums.txt
+ - name: Store Artifacts
+ id: store
+ uses: actions/upload-artifact@v3
+ with:
+ name: final-artifacts
+ path: artifacts/*
+ retention-days: 30
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Failed to prepare release artifacts for upload:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to prepare release artifacts for upload.
+ CHeckout: ${{ steps.checkout.outcome }}
+ Prepare environment: ${{ steps.prepare.outcome }}
+ Fetch dist tarball: ${{ steps.fetch-dist.outcome }}
+ Fetch static builds: ${{ steps.fetch-static.outcome }}
+ Consolidate artifacts: ${{ steps.consolidate.outcome }}
+ Store: ${{ steps.store.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.event_name != 'pull_request'
+ && github.repository == 'netdata/netdata'
+ }}
+
+ artifact-verification-dist: # Verify the regular installer works with the consolidated artifacts.
+ name: Test Consolidated Artifacts (Source)
+ runs-on: ubuntu-latest
+ needs:
+ - prepare-upload
+ services:
+ apache: # This gets used to serve the dist tarball for the updater script.
+ image: httpd:2.4
+ ports:
+ - 8080:80
+ volumes:
+ - ${{ github.workspace }}:/usr/local/apache2/htdocs/
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v3
+ - name: Fetch artifacts
+ id: fetch
+ uses: actions/download-artifact@v3
+ with:
+ name: final-artifacts
+ path: artifacts
+ - name: Verify that artifacts work with installer
+ id: verify
+ env:
+ NETDATA_TARBALL_BASEURL: http://localhost:8080/artifacts
+ run: packaging/installer/kickstart.sh --build-only --dont-start-it --disable-telemetry --dont-wait
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Artifact verification for source tarball failed.'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Artifact verification for source tarball failed.
+ Checkout: ${{ steps.checkout.outcome }}
+ Fetch artifacts: ${{ steps.fetch.outcome }}
+ Verify artifacts: ${{ steps.verify.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.event_name != 'pull_request'
+ && github.repository == 'netdata/netdata'
+ }}
+
+ artifact-verification-static: # Verify the static installer works with the consolidated artifacts.
+ name: Test Consolidated Artifacts (Static)
+ runs-on: ubuntu-latest
+ needs:
+ - prepare-upload
+ services:
+ apache: # This gets used to serve the static archives.
+ image: httpd:2.4
+ ports:
+ - 8080:80
+ volumes:
+ - ${{ github.workspace }}:/usr/local/apache2/htdocs/
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v3
+ - name: Fetch artifacts
+ id: fetch-artifacts
+ uses: actions/download-artifact@v3
+ with:
+ name: final-artifacts
+ path: artifacts
+ - name: Verify that artifacts work with installer
+ id: verify
+ env:
+ NETDATA_TARBALL_BASEURL: http://localhost:8080/artifacts
+ run: packaging/installer/kickstart.sh --static-only --dont-start-it --disable-telemetry
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Artifact verification for static build failed.'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Artifact verification for static build failed.
+ Checkout: ${{ steps.checkout.outcome }}
+ Fetch artifacts: ${{ steps.fetch-artifacts.outcome }}
+ Verify artifacts: ${{ steps.verify.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.event_name != 'pull_request'
+ && github.repository == 'netdata/netdata'
+ }}
+
+ upload-nightly: # Upload the nightly build artifacts to GCS.
+ name: Upload Nightly Artifacts
+ runs-on: ubuntu-latest
+ if: github.event_name == 'workflow_dispatch' && github.event.inputs.type == 'nightly' && github.repository == 'netdata/netdata'
+ needs:
+ - updater-check
+ - source-build
+ - artifact-verification-dist
+ - artifact-verification-static
+ steps:
+ - name: Retrieve Artifacts
+ id: fetch
+ uses: actions/download-artifact@v3
+ with:
+ name: final-artifacts
+ path: final-artifacts
+ - name: Authenticate to GCS
+ id: gcs-auth
+ uses: google-github-actions/auth@v1
+ with:
+ project_id: ${{ secrets.GCP_NIGHTLY_STORAGE_PROJECT }}
+ credentials_json: ${{ secrets.GCS_STORAGE_SERVICE_KEY_JSON }}
+ - name: Setup GCS
+ id: gcs-setup
+ uses: google-github-actions/setup-gcloud@v1.0.1
+ - name: Upload Artifacts
+ id: upload
+ uses: google-github-actions/upload-cloud-storage@v1.0.0
+ with:
+ destination: ${{ secrets.GCP_NIGHTLY_STORAGE_BUCKET }}
+ gzip: false
+ path: ./final-artifacts
+ parent: false
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Failed to upload nightly release artifacts:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to upload nightly release artifacts.
+ Fetch artifacts: ${{ steps.fetch.outcome }}
+ Authenticatie GCS: ${{ steps.gcs-auth.outcome }}
+ Setup GCS: ${{ steps.gcs-setup.outcome }}
+ Upload artifacts: ${{ steps.upload.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.event_name != 'pull_request'
+ }}
+
+ create-nightly: # Create a nightly build release in netdata/netdata-nightlies
+ name: Create Nightly Release
+ runs-on: ubuntu-latest
+ if: github.event_name == 'workflow_dispatch' && github.event.inputs.type == 'nightly' && github.repository == 'netdata/netdata'
+ needs:
+ - updater-check
+ - source-build
+ - artifact-verification-dist
+ - artifact-verification-static
+ steps:
+ - name: Checkout Main Repo
+ id: checkout-main
+ uses: actions/checkout@v3
+ with:
+ path: main
+ - name: Checkout Nightly Repo
+ id: checkout-nightly
+ uses: actions/checkout@v3
+ with:
+ repository: netdata/netdata-nightlies
+ path: nightlies
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ - name: Retrieve Artifacts
+ id: fetch
+ uses: actions/download-artifact@v3
+ with:
+ name: final-artifacts
+ path: final-artifacts
+ - name: Prepare version info
+ id: version
+ run: |
+ # shellcheck disable=SC2129
+ echo "version=$(cat main/packaging/version)" >> "${GITHUB_OUTPUT}"
+ echo "commit=$(cd nightlies && git rev-parse HEAD)" >> "${GITHUB_OUTPUT}"
+ echo "date=$(date +%F)" >> "${GITHUB_OUTPUT}"
+ - name: Create Release
+ id: create-release
+ uses: ncipollo/release-action@v1
+ with:
+ allowUpdates: false
+ artifactErrorsFailBuild: true
+ artifacts: 'final-artifacts/sha256sums.txt,final-artifacts/netdata-*.tar.gz,final-artifacts/netdata-*.gz.run'
+ owner: netdata
+ repo: netdata-nightlies
+ body: Netdata nightly build for ${{ steps.version.outputs.date }}.
+ commit: ${{ steps.version.outputs.commit }}
+ tag: ${{ steps.version.outputs.version }}
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Failed to draft release:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to create nightly release or attach artifacts.
+ Checkout netdata/netdata: ${{ steps.checkout-main.outcome }}
+ Checkout netdata/netdata-nightlies: ${{ steps.checkout-nightly.outcome }}
+ Fetch artifacts: ${{ steps.fetch.outcome }}
+ Prepare version info: ${{ steps.version.outcome }}
+ Create release: ${{ steps.create-release.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.event_name == 'workflow_dispatch'
+ }}
+
+ normalize-tag: # Fix the release tag if needed
+ name: Normalize Release Tag
+ runs-on: ubuntu-latest
+ if: github.event_name == 'workflow_dispatch' && github.event.inputs.type == 'release'
+ outputs:
+ tag: ${{ steps.tag.outputs.tag }}
+ steps:
+ - name: Normalize Tag
+ id: tag
+ run: |
+ if echo ${{ github.event.inputs.version }} | grep -qE '^[[:digit:]]+\.[[:digit:]]+\.[[:digit:]]+$'; then
+ echo "::set-output name=tag::v${{ github.event.inputs.version }}"
+ else
+ echo "::set-output name=tag::${{ github.event.inputs.version }}"
+ fi
+
+ upload-release: # Create the draft release and upload the build artifacts.
+ name: Create Release Draft
+ runs-on: ubuntu-latest
+ if: github.event_name == 'workflow_dispatch' && github.event.inputs.type == 'release' && github.repository == 'netdata/netdata'
+ needs:
+ - updater-check
+ - source-build
+ - artifact-verification-dist
+ - artifact-verification-static
+ - normalize-tag
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v3
+ - name: Retrieve Artifacts
+ id: fetch
+ uses: actions/download-artifact@v3
+ with:
+ name: final-artifacts
+ path: final-artifacts
+ - name: Create Release
+ id: create-release
+ uses: ncipollo/release-action@v1
+ with:
+ allowUpdates: false
+ artifactErrorsFailBuild: true
+ artifacts: 'final-artifacts/sha256sums.txt,final-artifacts/netdata-*.tar.gz,final-artifacts/netdata-*.gz.run'
+ draft: true
+ tag: ${{ needs.normalize-tag.outputs.tag }}
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Failed to draft release:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to create draft release or attach artifacts.
+ Checkout: ${{ steps.checkout.outcome }}
+ Fetch artifacts: ${{ steps.fetch.outcome }}
+ Create draft release: ${{ steps.create-release.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.event_name == 'workflow_dispatch'
+ }}
+ - name: Success Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'good'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Created agent draft release:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: "${{ github.repository }}: ${{ steps.create-release.outputs.html_url }}"
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ success()
+ && github.event_name == 'workflow_dispatch'
+ }}
diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml
new file mode 100644
index 0000000..65ad6ac
--- /dev/null
+++ b/.github/workflows/checks.yml
@@ -0,0 +1,61 @@
+---
+name: Checks
+on:
+ push:
+ branches:
+ - master
+ pull_request: null
+env:
+ DISABLE_TELEMETRY: 1
+concurrency:
+ group: checks-${{ github.ref }}
+ cancel-in-progress: true
+jobs:
+ libressl-checks:
+ name: LibreSSL
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+ with:
+ submodules: recursive
+ - name: Build
+ run: >
+ docker run -v "$PWD":/netdata -w /netdata alpine:latest /bin/sh -c
+ 'apk add bash;
+ ./packaging/installer/install-required-packages.sh --dont-wait --non-interactive netdata;
+ apk del openssl openssl-dev;
+ apk add libressl libressl-dev;
+ autoreconf -ivf;
+ ./configure --disable-dependency-tracking;
+ make;'
+ clang-checks:
+ name: Clang
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+ with:
+ submodules: recursive
+ - name: Build
+ run: |
+ docker build -f .github/dockerfiles/Dockerfile.clang .
+ gitignore-check:
+ name: .gitignore
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+ with:
+ submodules: recursive
+ - name: Prepare environment
+ run: ./packaging/installer/install-required-packages.sh --dont-wait --non-interactive netdata
+ - name: Build netdata
+ run: ./netdata-installer.sh --dont-start-it --disable-telemetry --dont-wait --install /tmp/install --one-time-build
+ - name: Check that repo is clean
+ run: |
+ git status --porcelain=v1 > /tmp/porcelain
+ if [ -s /tmp/porcelain ]; then
+ cat /tmp/porcelain
+ exit 1
+ fi
diff --git a/.github/workflows/cloud_regression.yml b/.github/workflows/cloud_regression.yml
new file mode 100644
index 0000000..b6e321f
--- /dev/null
+++ b/.github/workflows/cloud_regression.yml
@@ -0,0 +1,54 @@
+name: Trigger Cloud Regression E2E Tests
+on:
+ push:
+ branches: [master]
+ paths:
+ - 'CMakeLists.txt'
+ - '**.c'
+ - '**.cc'
+ - '**.cpp'
+ - '**.h'
+ - 'mqtt_websockets/**'
+ - 'aclk/aclk-schemas/**'
+jobs:
+ trigger_cloud_regression_tests:
+ runs-on: ubuntu-latest
+ if: github.repository == 'netdata/netdata'
+ steps:
+ - name: Evaluate workflow dispatch parameters
+ env:
+ PR_REPO_NAME: ${{ github.event.pull_request.head.repo.full_name }}
+ PR_BRANCH_NAME: ${{ github.event.pull_request.head.ref }}
+ PR_COMMIT_HASH: ${{ github.event.pull_request.head.sha }}
+ id: output-workflow-dispatch-params
+ run: |
+ if [ ${{ github.event_name }} == 'pull_request_target' ]; then
+ NETDATA_CUSTOM_REPO="$PR_REPO_NAME"
+ NETDATA_CUSTOM_BRANCH="$PR_BRANCH_NAME"
+ NETDATA_CUSTOM_PR_NUMBER="${{ github.event.number }}"
+ NETDATA_CUSTOM_COMMIT_HASH="$PR_COMMIT_HASH"
+ elif [ ${{ github.event_name }} == 'push' ]; then
+ NETDATA_CUSTOM_REPO="netdata/netdata"
+ NETDATA_CUSTOM_BRANCH="master"
+ NETDATA_CUSTOM_PR_NUMBER=""
+ NETDATA_CUSTOM_COMMIT_HASH="${{ github.sha }}"
+ fi
+ echo "::set-output name=netdata_repo::${NETDATA_CUSTOM_REPO}"
+ echo "::set-output name=netdata_branch::${NETDATA_CUSTOM_BRANCH}"
+ echo "::set-output name=netdata_pr_number::${NETDATA_CUSTOM_PR_NUMBER}"
+ echo "::set-output name=netdata_commit_hash::${NETDATA_CUSTOM_COMMIT_HASH}"
+
+ - name: Trigger Cloud Regression
+ uses: aurelien-baudet/workflow-dispatch@v2
+ with:
+ repo: netdata/test-automation
+ ref: refs/heads/master
+ workflow: regression.yml
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ inputs: '{ "netdata_branch": "${{ steps.output-workflow-dispatch-params.outputs.netdata_branch }}",
+ "netdata_repo": "${{ steps.output-workflow-dispatch-params.outputs.netdata_repo }}",
+ "netdata_pr_number": "${{ steps.output-workflow-dispatch-params.outputs.netdata_pr_number }}",
+ "netdata_branch_commit_hash": "${{ steps.output-workflow-dispatch-params.outputs.netdata_commit_hash }}",
+ "custom_netdata_image": "true"
+ }'
+ wait-for-completion: false
diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml
new file mode 100644
index 0000000..021376a
--- /dev/null
+++ b/.github/workflows/codeql.yml
@@ -0,0 +1,117 @@
+---
+# Run CodeQL to analyze C/C++ and Python code.
+name: CodeQL
+on:
+ pull_request:
+ types: [opened, reopened, labeled, synchronize]
+ branches: [master]
+ push:
+ branches: [master]
+ schedule:
+ - cron: "27 2 * * 1"
+env:
+ DISABLE_TELEMETRY: 1
+concurrency:
+ group: codeql-${{ github.ref }}
+ cancel-in-progress: true
+jobs:
+ prepare:
+ name: Prepare Jobs
+ runs-on: ubuntu-latest
+ outputs:
+ cpp: ${{ steps.cpp.outputs.run }}
+ python: ${{ steps.python.outputs.run }}
+ steps:
+ - name: Clone repository
+ uses: actions/checkout@v3
+ with:
+ submodules: recursive
+ fetch-depth: 0
+ - name: Check if we should always run
+ id: always
+ run: |
+ if [ "${{ github.event_name }}" = "pull_request" ]; then
+ if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/codeql') }}" = "true" ]; then
+ echo '::set-output name=run::true'
+ echo '::notice::Found ci/codeql label, unconditionally running all CodeQL checks.'
+ else
+ echo '::set-output name=run::false'
+ fi
+ else
+ echo '::set-output name=run::true'
+ fi
+ - name: Check for C/C++ changes
+ id: cpp
+ run: |
+ if [ "${{ steps.always.outputs.run }}" = "false" ]; then
+ if git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq '.*\.[ch](xx|\+\+)?' ; then
+ echo '::set-output name=run::true'
+ echo '::notice::C/C++ code has changed, need to run CodeQL.'
+ else
+ echo '::set-output name=run::false'
+ fi
+ else
+ echo '::set-output name=run::true'
+ fi
+ - name: Check for python changes
+ id: python
+ run: |
+ if [ "${{ steps.always.outputs.run }}" = "false" ]; then
+ if git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq 'collectors/python.d.plugin/.*\.py' ; then
+ echo '::set-output name=run::true'
+ echo '::notice::Python code has changed, need to run CodeQL.'
+ else
+ echo '::set-output name=run::false'
+ fi
+ else
+ echo '::set-output name=run::true'
+ fi
+
+ analyze-cpp:
+ name: Analyze C/C++
+ runs-on: ubuntu-latest
+ needs: prepare
+ if: needs.prepare.outputs.cpp == 'true'
+ permissions:
+ security-events: write
+ steps:
+ - name: Git clone repository
+ uses: actions/checkout@v3
+ with:
+ submodules: recursive
+ fetch-depth: 0
+ - name: Initialize CodeQL
+ uses: github/codeql-action/init@v2
+ with:
+ languages: cpp
+ - name: Prepare environment
+ run: ./packaging/installer/install-required-packages.sh --dont-wait --non-interactive netdata
+ - name: Build netdata
+ run: ./netdata-installer.sh --dont-start-it --disable-telemetry --dont-wait --install /tmp/install --one-time-build
+ - name: Run CodeQL
+ uses: github/codeql-action/analyze@v2
+ with:
+ category: "/language:cpp"
+
+ analyze-python:
+ name: Analyze Python
+ runs-on: ubuntu-latest
+ needs: prepare
+ if: needs.prepare.outputs.python == 'true'
+ permissions:
+ security-events: write
+ steps:
+ - name: Git clone repository
+ uses: actions/checkout@v3
+ with:
+ submodules: recursive
+ fetch-depth: 0
+ - name: Initialize CodeQL
+ uses: github/codeql-action/init@v2
+ with:
+ config-file: ./.github/codeql/python-config.yml
+ languages: python
+ - name: Run CodeQL
+ uses: github/codeql-action/analyze@v2
+ with:
+ category: "/language:python"
diff --git a/.github/workflows/coverity.yml b/.github/workflows/coverity.yml
new file mode 100644
index 0000000..9d1119a
--- /dev/null
+++ b/.github/workflows/coverity.yml
@@ -0,0 +1,63 @@
+---
+# Runs coverity-scan.sh every 24h on `master`
+name: Coverity Scan
+on:
+ schedule:
+ - cron: '0 1 * * *'
+ pull_request:
+ paths:
+ - .github/workflows/coverity.yml
+ - coverity-scan.sh
+env:
+ DISABLE_TELEMETRY: 1
+concurrency:
+ group: coverity-${{ github.ref }}
+ cancel-in-progress: true
+jobs:
+ coverity:
+ if: github.repository == 'netdata/netdata'
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+ id: checkout
+ with:
+ submodules: recursive
+ - name: Prepare environment
+ id: prepare
+ env:
+ DEBIAN_FRONTEND: 'noninteractive'
+ run: |
+ ./packaging/installer/install-required-packages.sh \
+ --dont-wait --non-interactive netdata
+ sudo apt-get install -y libjson-c-dev libipmimonitoring-dev \
+ libcups2-dev libsnappy-dev libprotobuf-dev \
+ libprotoc-dev libssl-dev protobuf-compiler \
+ libnetfilter-acct-dev
+ - name: Run coverity-scan
+ id: run
+ env:
+ REPOSITORY: 'netdata/netdata'
+ COVERITY_SCAN_TOKEN: ${{ secrets.COVERITY_SCAN_TOKEN }}
+ COVERITY_SCAN_SUBMIT_MAIL: ${{ secrets.COVERITY_SCAN_SUBMIT_MAIL }}
+ run: |
+ bash -x ./coverity-scan.sh --with-install
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Coverity run failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Coverity failed to run correctly.
+ Checkout: ${{ steps.checkout.outcome }}
+ Environment preparation: ${{ steps.prepare.outcome }}
+ Coverity run: ${{ steps.run.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: ${{
+ failure()
+ && github.event_name != 'pull_request'
+ && startsWith(github.ref, 'refs/heads/master')
+ }}
diff --git a/.github/workflows/dashboard-pr.yml b/.github/workflows/dashboard-pr.yml
new file mode 100644
index 0000000..c99f989
--- /dev/null
+++ b/.github/workflows/dashboard-pr.yml
@@ -0,0 +1,54 @@
+---
+# Create a PR to update the react dashboard code.
+name: Dashboard Version PR
+
+on:
+ workflow_dispatch:
+ inputs:
+ dashboard_version:
+ # This must be specified, and must _exactly_ match the version
+ # tag for the release to be used for the update.
+ description: Dashboard Version
+ required: true
+
+env:
+ DISABLE_TELEMETRY: 1
+
+jobs:
+ dashboard-pr:
+ name: Generate Dashboard Version Bump PR
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v3
+ - name: Update Files
+ id: update
+ run: |
+ web/gui/bundle_dashboard.py ${{ github.event.inputs.dashboard_version }}
+ - name: Create Pull Request
+ id: pr
+ uses: peter-evans/create-pull-request@v4
+ with:
+ title: 'Update dashboard to version ${{ github.event.inputs.dashboard_version }}.'
+ body: 'See https://github.com/netdata/dashboard/releases/tag/${{ github.event.inputs.dashboard_version }} for changes.'
+ branch: dashboard-${{ github.event.inputs.dashboard_version }}
+ branch-suffix: timestamp
+ delete-branch: true
+ commit-message: 'Update dashboard to version ${{ github.event.inputs.dashboard_version }}.'
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Dashboard update PR creation failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to create PR to update dashboard code to newest release.
+ Checkout: ${{ steps.checkout.outcome }}
+ Update files: ${{ steps.update.outcome }}
+ Create PR: ${{ steps.pr.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: failure()
diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml
new file mode 100644
index 0000000..b7eb53c
--- /dev/null
+++ b/.github/workflows/docker.yml
@@ -0,0 +1,298 @@
+---
+name: Docker
+on:
+ push:
+ branches:
+ - master
+ pull_request: null
+ workflow_dispatch:
+ inputs:
+ version:
+ description: Version Tag
+ default: nightly
+ required: true
+env:
+ DISABLE_TELEMETRY: 1
+concurrency:
+ group: docker-${{ github.ref }}-${{ github.event_name }}
+ cancel-in-progress: true
+jobs:
+ docker-test:
+ name: Docker Runtime Test
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v3
+ with:
+ submodules: recursive
+ - name: Setup Buildx
+ id: prepare
+ uses: docker/setup-buildx-action@v2
+ - name: Test Build
+ id: build
+ uses: docker/build-push-action@v3
+ with:
+ load: true
+ push: false
+ tags: netdata/netdata:test
+ - name: Test Image
+ id: test
+ run: .github/scripts/docker-test.sh
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Docker runtime testing failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Building or testing Docker image for linux/amd64 failed.
+ CHeckout: ${{ steps.checkout.outcome }}
+ Setup buildx: ${{ steps.prepare.outcome }}
+ Build image: ${{ steps.build.outcome }}
+ Test image: ${{ steps.test.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.event_name != 'pull_request'
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.repository == 'netdata/netdata'
+ }}
+
+ docker-ci:
+ if: github.event_name != 'workflow_dispatch'
+ name: Docker Alt Arch Builds
+ needs: docker-test
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ platforms:
+ - linux/i386
+ - linux/arm/v7
+ - linux/arm64
+ - linux/ppc64le
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v3
+ with:
+ submodules: recursive
+ - name: Setup QEMU
+ id: qemu
+ if: matrix.platforms != 'linux/i386'
+ uses: docker/setup-qemu-action@v2
+ - name: Setup Buildx
+ id: buildx
+ uses: docker/setup-buildx-action@v2
+ - name: Build
+ id: build
+ uses: docker/build-push-action@v3
+ with:
+ platforms: ${{ matrix.platforms }}
+ load: false
+ push: false
+ tags: netdata/netdata:test
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Docker build testing failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Building Docker image for ${{ matrix.platforms }} failed.
+ CHeckout: ${{ steps.checkout.outcome }}
+ Setup QEMU: ${{ steps.qemu.outcome }}
+ Setup buildx: ${{ steps.buildx.outcome }}
+ Build image: ${{ steps.build.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.event_name != 'pull_request'
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.repository == 'netdata/netdata'
+ }}
+
+ normalize-tag: # Fix the release tag if needed
+ name: Normalize Release Tag
+ runs-on: ubuntu-latest
+ if: github.event_name == 'workflow_dispatch'
+ outputs:
+ tag: ${{ steps.tag.outputs.tag }}
+ steps:
+ - name: Normalize Tag
+ id: tag
+ run: |
+ if echo ${{ github.event.inputs.version }} | grep -qE '^[[:digit:]]+\.[[:digit:]]+\.[[:digit:]]+$'; then
+ echo "::set-output name=tag::v${{ github.event.inputs.version }}"
+ else
+ echo "::set-output name=tag::${{ github.event.inputs.version }}"
+ fi
+
+ docker-publish:
+ if: github.event_name == 'workflow_dispatch'
+ name: Docker Build and Publish
+ needs:
+ - docker-test
+ - normalize-tag
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v3
+ with:
+ submodules: recursive
+ - name: Determine which tags to use
+ id: release-tags
+ if: github.event.inputs.version != 'nightly'
+ run: |
+ echo "tags=netdata/netdata:latest,netdata/netdata:stable,$(.github/scripts/gen-docker-tags.py ${{ needs.normalize-tag.outputs.tag }} '')" \
+ >> "${GITHUB_ENV}"
+ - name: Determine which tags to use
+ id: nightly-tags
+ if: github.event.inputs.version == 'nightly'
+ run: |
+ echo "tags=netdata/netdata:latest,netdata/netdata:edge" >> "${GITHUB_ENV}"
+ - name: Mark image as official
+ id: env
+ if: github.repository == 'netdata/netdata'
+ run: echo "OFFICIAL_IMAGE=true" >> "${GITHUB_ENV}"
+ - name: Setup QEMU
+ id: qemu
+ uses: docker/setup-qemu-action@v2
+ - name: Setup Buildx
+ id: buildx
+ uses: docker/setup-buildx-action@v2
+ - name: Docker Hub Login
+ id: login
+ if: github.repository == 'netdata/netdata'
+ uses: docker/login-action@v2
+ with:
+ username: ${{ secrets.DOCKER_HUB_USERNAME }}
+ password: ${{ secrets.DOCKER_HUB_PASSWORD }}
+ - name: Docker Build
+ id: build
+ uses: docker/build-push-action@v3
+ with:
+ platforms: linux/amd64,linux/i386,linux/arm/v7,linux/arm64,linux/ppc64le
+ push: ${{ github.repository == 'netdata/netdata' }}
+ tags: ${{ env.tags }}
+ build-args: OFFICIAL_IMAGE=${{ env.OFFICIAL_IMAGE }}
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Docker Build failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to build or publish Docker images.
+ CHeckout: ${{ steps.checkout.outcome }}
+ Generate release tags: ${{ steps.release-tags.outcome }}
+ Generate nightly tags: ${{ steps.nightly-tags.outcome }}
+ Setup environment: ${{ steps.env.outcome }}
+ Setup QEMU: ${{ steps.qemu.outcome }}
+ Setup buildx: ${{ steps.buildx.outcome }}
+ Authenticate against DockerHub: ${{ steps.login.outcome }}
+ Build and publish images: ${{ steps.build.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.event_name != 'pull_request'
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.repository == 'netdata/netdata'
+ }}
+ - name: Trigger Helmchart PR
+ if: github.event_name == 'workflow_dispatch' && github.event.inputs.version != 'nightly' && github.repository == 'netdata/netdata'
+ uses: benc-uk/workflow-dispatch@v1
+ with:
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ repo: netdata/helmchart
+ workflow: Agent Version PR
+ ref: refs/heads/master
+ inputs: '{"agent_version": "${{ needs.normalize-tag.outputs.tag }}"}'
+
+ docker-dbg-publish:
+ if: github.event_name == 'workflow_dispatch'
+ name: Docker Build and Publish (Debuging Image)
+ needs:
+ - docker-test
+ - normalize-tag
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v3
+ with:
+ submodules: recursive
+ - name: Determine which tags to use
+ id: release-tags
+ if: github.event.inputs.version != 'nightly'
+ run: |
+ echo "tags=netdata/netdata-debug:latest,netdata/netdata-debug:stable,$(.github/scripts/gen-docker-tags.py ${{ needs.normalize-tag.outputs.tag }} '-debug')" \
+ >> "${GITHUB_ENV}"
+ - name: Determine which tags to use
+ id: nightly-tags
+ if: github.event.inputs.version == 'nightly'
+ run: |
+ echo "tags=netdata/netdata-debug:latest,netdata/netdata-debug:edge" >> "${GITHUB_ENV}"
+ - name: Mark image as official
+ id: env
+ if: github.repository == 'netdata/netdata'
+ run: echo "OFFICIAL_IMAGE=true" >> "${GITHUB_ENV}"
+ - name: Setup QEMU
+ id: qemu
+ uses: docker/setup-qemu-action@v2
+ - name: Setup Buildx
+ id: buildx
+ uses: docker/setup-buildx-action@v2
+ - name: Docker Hub Login
+ id: login
+ if: github.repository == 'netdata/netdata'
+ uses: docker/login-action@v2
+ with:
+ username: ${{ secrets.DOCKER_HUB_USERNAME }}
+ password: ${{ secrets.DOCKER_HUB_PASSWORD }}
+ - name: Docker Build
+ id: build
+ uses: docker/build-push-action@v3
+ with:
+ platforms: linux/amd64,linux/i386,linux/arm/v7,linux/arm64,linux/ppc64le
+ push: ${{ github.repository == 'netdata/netdata' }}
+ tags: ${{ env.tags }}
+ build-args: |
+ OFFICIAL_IMAGE=${{ env.OFFICIAL_IMAGE }}
+ DEBUG_BUILD=1
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Docker Debug Build failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to build or publish Docker debug images.
+ CHeckout: ${{ steps.checkout.outcome }}
+ Generate release tags: ${{ steps.release-tags.outcome }}
+ Generate nightly tags: ${{ steps.nightly-tags.outcome }}
+ Setup environment: ${{ steps.env.outcome }}
+ Setup QEMU: ${{ steps.qemu.outcome }}
+ Setup buildx: ${{ steps.buildx.outcome }}
+ Authenticate against DockerHub: ${{ steps.login.outcome }}
+ Build and publish images: ${{ steps.build.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.event_name != 'pull_request'
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.repository == 'netdata/netdata'
+ }}
diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
new file mode 100644
index 0000000..69fda40
--- /dev/null
+++ b/.github/workflows/docs.yml
@@ -0,0 +1,29 @@
+---
+name: Docs
+on:
+ push:
+ branches:
+ - master
+ paths:
+ - '**.md'
+ pull_request:
+ paths:
+ - '**.md'
+env:
+ DISABLE_TELEMETRY: 1
+jobs:
+ markdown-link-check:
+ name: Broken Links
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+ with:
+ submodules: recursive
+ - name: Run link check
+ uses: gaurav-nelson/github-action-markdown-link-check@v1
+ with:
+ use-quiet-mode: 'no'
+ use-verbose-mode: 'yes'
+ check-modified-files-only: 'yes'
+ config-file: '.mlc_config.json'
diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml
new file mode 100644
index 0000000..0854080
--- /dev/null
+++ b/.github/workflows/labeler.yml
@@ -0,0 +1,18 @@
+---
+# Handles labelling of PR's.
+name: Pull Request Labeler
+on:
+ schedule:
+ - cron: '*/10 * * * *'
+env:
+ DISABLE_TELEMETRY: 1
+jobs:
+ labeler:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: docker://docker.io/ilyam8/periodic-pr-labeler:v0.1.0
+ if: github.repository == 'netdata/netdata'
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ GITHUB_REPOSITORY: ${{ github.repository }}
+ LABEL_MAPPINGS_FILE: .github/labeler.yml
diff --git a/.github/workflows/packagecloud.yml b/.github/workflows/packagecloud.yml
new file mode 100644
index 0000000..ba70c17
--- /dev/null
+++ b/.github/workflows/packagecloud.yml
@@ -0,0 +1,36 @@
+---
+# Runs PackageCloud cleanup every day at 9pm
+name: PackageCloud Cleanup
+on:
+ schedule:
+ - cron: '0 21 * * *'
+ workflow_dispatch: null
+
+jobs:
+ cleanup:
+ name: PackageCloud Cleanup
+ runs-on: ubuntu-latest
+ if: github.repository == 'netdata/netdata'
+ strategy:
+ fail-fast: false
+ matrix:
+ repos:
+ - stable
+ - edge
+ - devel
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+ id: checkout
+ with:
+ submodules: recursive
+ - name: Prepare environment
+ id: prepare
+ run: |
+ pip3 install requests python-dateutil
+ - name: Run PackageCloud Cleanup
+ id: cleanup
+ env:
+ PKGCLOUD_TOKEN: ${{ secrets.PACKAGE_CLOUD_API_KEY }}
+ run: |
+ python3 .github/scripts/netdata-pkgcloud-cleanup.py -r ${{ matrix.repos }}
diff --git a/.github/workflows/packaging.yml b/.github/workflows/packaging.yml
new file mode 100644
index 0000000..ddd8356
--- /dev/null
+++ b/.github/workflows/packaging.yml
@@ -0,0 +1,279 @@
+---
+# Handles building of binary packages for the agent.
+name: Packages
+on:
+ pull_request:
+ types:
+ - opened
+ - reopened
+ - labeled
+ - synchronize
+ branches:
+ - master
+ push:
+ branches:
+ - master
+ workflow_dispatch:
+ inputs:
+ type:
+ description: Package build type
+ default: devel
+ required: true
+ version:
+ description: Package version
+ required: false
+env:
+ DISABLE_TELEMETRY: 1
+ REPO_PREFIX: netdata/netdata
+concurrency:
+ group: packages-${{ github.ref }}-${{ github.event_name }}
+ cancel-in-progress: true
+jobs:
+ matrix:
+ name: Prepare Build Matrix
+ runs-on: ubuntu-latest
+ outputs:
+ matrix: ${{ steps.set-matrix.outputs.matrix }}
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v3
+ - name: Prepare tools
+ id: prepare
+ run: |
+ sudo apt-get update && sudo apt-get install -y python3-ruamel.yaml
+ - name: Read build matrix
+ id: set-matrix
+ shell: python3 {0}
+ run: |
+ from ruamel.yaml import YAML
+ import json
+ import re
+ import os
+ ALWAYS_RUN_ARCHES = ["amd64", "x86_64"]
+ yaml = YAML(typ='safe')
+ entries = list()
+ run_limited = False
+
+ with open('.github/data/distros.yml') as f:
+ data = yaml.load(f)
+
+ if "${{ github.event_name }}" == "pull_request" and "${{ !contains(github.event.pull_request.labels.*.name, 'run-ci/packaging') }}":
+ run_limited = True
+
+ for i, v in enumerate(data['include']):
+ if 'packages' in data['include'][i]:
+ for arch in data['include'][i]['packages']['arches']:
+ if arch in ALWAYS_RUN_ARCHES or not run_limited:
+ entries.append({
+ 'distro': data['include'][i]['distro'],
+ 'version': data['include'][i]['version'],
+ 'repo_distro': data['include'][i]['packages']['repo_distro'],
+ 'format': data['include'][i]['packages']['type'],
+ 'base_image': data['include'][i]['base_image'] if 'base_image' in data['include'][i] else data['include'][i]['distro'],
+ 'platform': data['platform_map'][arch],
+ 'arch': arch
+ })
+
+ entries.sort(key=lambda k: (data['arch_order'].index(k['arch']), k['distro'], k['version']))
+ matrix = json.dumps({'include': entries}, sort_keys=True)
+ print('Generated Matrix: ' + matrix)
+ print('::set-output name=matrix::' + matrix)
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Package Build matrix generation failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to generate build matrix for package build.
+ Checkout: ${{ steps.checkout.outcome }}
+ Prepare Tools: ${{ steps.prepare.outcome }}
+ Read Build Matrix: ${{ steps.set-matrix.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.event_name != 'pull_request'
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.repository == 'netdata/netdata'
+ }}
+
+ version-check:
+ name: Version check
+ runs-on: ubuntu-latest
+ outputs:
+ repo: ${{ steps.check-version.outputs.repo }}
+ version: ${{ steps.check-version.outputs.version }}
+ retention: ${{ steps.check-version.outputs.retention }}
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v3
+ - name: Check Version
+ id: check-version
+ run: |
+ if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
+ case "${{ github.event.inputs.type }}" in
+ "release")
+ echo "::set-output name=repo::${REPO_PREFIX}"
+ echo "::set-output name=version::${{ github.event.inputs.version }}"
+ echo "::set-output name=retention::365"
+ ;;
+ "nightly")
+ echo "::set-output name=repo::${REPO_PREFIX}-edge"
+ echo "::set-output name=version::$(tr -d 'v' < packaging/version)"
+ echo "::set-output name=retention::30"
+ ;;
+ *)
+ echo "::set-output name=repo::${REPO_PREFIX}-devel"
+ echo "::set-output name=version::0.${GITHUB_SHA}"
+ echo "::set-output name=retention::30"
+ ;;
+ esac
+ else
+ echo "::set-output name=version::$(cut -d'-' -f 1 packaging/version | tr -d 'v')"
+ echo "::set-output name=retention::0"
+ fi
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Package Build version check failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to generate version information for package build.
+ Checkout: ${{ steps.checkout.outcome }}
+ Check Version: ${{ steps.check-version.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.event_name != 'pull_request'
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.repository == 'netdata/netdata'
+ }}
+
+ build:
+ name: Build
+ runs-on: ubuntu-latest
+ env:
+ DOCKER_CLI_EXPERIMENTAL: enabled
+ needs:
+ - matrix
+ - version-check
+ strategy:
+ matrix: ${{ fromJson(needs.matrix.outputs.matrix) }}
+ # We intentiaonally disable the fail-fast behavior so that a
+ # build failure for one version doesn't prevent us from publishing
+ # successfully built and tested packages for another version.
+ fail-fast: false
+ max-parallel: 8
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v3
+ with:
+ fetch-depth: 0 # We need full history for versioning
+ submodules: recursive
+ - name: Setup QEMU
+ id: qemu
+ if: matrix.platform != 'linux/amd64' && matrix.platform != 'linux/i386'
+ uses: docker/setup-qemu-action@v2
+ - name: Prepare Docker Environment
+ id: docker-config
+ shell: bash
+ run: |
+ echo '{"cgroup-parent": "/actions_job", "experimental": true}' | sudo tee /etc/docker/daemon.json 2>/dev/null
+ sudo service docker restart
+ - name: Fetch images
+ id: fetch-images
+ uses: nick-invision/retry@v2
+ with:
+ max_attempts: 3
+ retry_wait_seconds: 30
+ timeout_seconds: 900
+ command: |
+ docker pull --platform ${{ matrix.platform }} ${{ matrix.base_image }}:${{ matrix.version }}
+ docker pull --platform ${{ matrix.platform }} netdata/package-builders:${{ matrix.distro }}${{ matrix.version }}
+ - name: Build Packages
+ id: build
+ shell: bash
+ run: |
+ docker run --security-opt seccomp=unconfined -e DISABLE_TELEMETRY=1 -e VERSION=${{ needs.version-check.outputs.version }} \
+ --platform=${{ matrix.platform }} -v "$PWD":/netdata netdata/package-builders:${{ matrix.distro }}${{ matrix.version }}
+ - name: Save Packages
+ id: artifacts
+ continue-on-error: true
+ uses: actions/upload-artifact@v3
+ with:
+ name: ${{ matrix.distro }}-${{ matrix.version }}-${{ matrix.arch }}-packages
+ path: ${{ github.workspace }}/artifacts/*
+ - name: Test Packages
+ id: test
+ shell: bash
+ run: |
+ docker run --security-opt seccomp=unconfined -e DISABLE_TELEMETRY=1 -e DISTRO=${{ matrix.distro }} \
+ -e VERSION=${{ needs.version-check.outputs.version }} -e DISTRO_VERSION=${{ matrix.version }} \
+ --platform=${{ matrix.platform }} -v "$PWD":/netdata ${{ matrix.base_image }}:${{ matrix.version }} \
+ /netdata/.github/scripts/pkg-test.sh
+ - name: SSH setup
+ id: ssh-setup
+ if: github.event_name == 'workflow_dispatch' && github.repository == 'netdata/netdata'
+ continue-on-error: true
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.NETDATABOT_PACKAGES_SSH_KEY }}
+ name: id_ecdsa
+ known_hosts: ${{ secrets.PACKAGES_KNOWN_HOSTS }}
+ - name: Upload to packages.netdata.cloud
+ id: package-upload
+ continue-on-error: true
+ if: github.event_name == 'workflow_dispatch' && github.repository == 'netdata/netdata'
+ run: |
+ .github/scripts/package-upload.sh \
+ ${{ matrix.repo_distro }} \
+ ${{ matrix.arch }} \
+ ${{ matrix.format }} \
+ ${{ needs.version-check.outputs.repo }}
+ - name: Upload to PackageCloud
+ id: upload
+ if: github.event_name == 'workflow_dispatch' && github.repository == 'netdata/netdata'
+ shell: bash
+ env:
+ PKG_CLOUD_TOKEN: ${{ secrets.PACKAGE_CLOUD_API_KEY }}
+ run: |
+ printf "Packages to upload:\n%s" "$(ls artifacts/*.${{ matrix.format }})"
+ for pkgfile in artifacts/*.${{ matrix.format }} ; do
+ .github/scripts/package_cloud_wrapper.sh yank ${{ needs.version-check.outputs.repo }}/${{ matrix.repo_distro }} \
+ "$(basename "${pkgfile}")" || true
+ .github/scripts/package_cloud_wrapper.sh push ${{ needs.version-check.outputs.repo }}/${{ matrix.repo_distro }} "${pkgfile}"
+ done
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Package Build failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: ${{ matrix.repo_distro }} ${{ matrix.version }} package build for ${{ matrix.arch }} failed.
+ Checkout: ${{ steps.checkout.outcome }}
+ Setup QEMU: ${{ steps.qemu.outcome }}
+ Setup Docker: ${{ steps.docker-config.outcome }}
+ Fetch images: ${{ steps.fetch-images.outcome }}
+ Build: ${{ steps.build.outcome }}
+ Test: ${{ steps.test.outcome }}
+ Import SSH Key: ${{ steps.ssh-setup.outcome }}
+ Publish to packages.netdata.cloud: ${{ steps.package-upload.outcome }}
+ Publish to PackageCloud: ${{ steps.upload.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.event_name != 'pull_request'
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.repository == 'netdata/netdata'
+ }}
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
new file mode 100644
index 0000000..e16ecab
--- /dev/null
+++ b/.github/workflows/release.yml
@@ -0,0 +1,214 @@
+---
+# Workflow for triggering a release.
+name: Release
+on:
+ schedule:
+ - cron: '0 0 * * *'
+ workflow_dispatch: # Dispatch runs build and validate, then push to the appropriate storage location.
+ inputs:
+ type:
+ description: Build Type
+ default: nightly
+ required: true
+ version:
+ description: Version Tag
+ default: nightly
+ required: true
+concurrency: # This keeps multiple instances of the job from running concurrently for the same ref and event type.
+ group: release-${{ github.ref }}-${{ github.event_name }}
+ cancel-in-progress: true
+jobs:
+ update-changelogs:
+ name: Update changelog
+ runs-on: ubuntu-latest
+ outputs:
+ ref: ${{ steps.target.outputs.ref }}
+ version: ${{ steps.target.outputs.version }}
+ type: ${{ steps.target.outputs.type }}
+ run: ${{ steps.target.outputs.run }}
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v2
+ with:
+ fetch-depth: 0
+ submodules: recursive
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ - name: Prepare base ref
+ id: target
+ run: >-
+ .github/scripts/prepare-release-base.sh \
+ ${{ github.repository }} \
+ ${{ github.event_name }} \
+ ${{ github.event.inputs.type }} \
+ ${{ github.event.inputs.version }} \
+ ${{ secrets.NETDATA_RELEASE_TEST }}
+ - name: Generate Nightly Changleog
+ id: nightly-changelog
+ if: steps.target.outputs.run == 'true' && steps.target.outputs.type == 'nightly'
+ uses: heinrichreimer/github-changelog-generator-action@v2.3
+ with:
+ bugLabels: IGNOREBUGS
+ excludeLabels: "stale,duplicate,question,invalid,wontfix,discussion,no changelog"
+ issues: false
+ sinceTag: v1.10.0
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ unreleasedLabel: "**Next release**"
+ verbose: true
+ maxIssues: 500
+ - name: Generate Release Changelog
+ id: release-changelog
+ if: steps.target.outputs.run == 'true' && steps.target.outputs.type != 'nightly'
+ uses: heinrichreimer/github-changelog-generator-action@v2.3
+ with:
+ bugLabels: IGNOREBUGS
+ excludeLabels: "stale,duplicate,question,invalid,wontfix,discussion,no changelog"
+ futureRelease: ${{ github.event.inputs.version }}
+ issues: false
+ sinceTag: v1.10.0
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ unreleasedLabel: "**Next release**"
+ verbose: true
+ maxIssues: 500
+ - name: Commit Changes
+ id: commit
+ if: steps.target.outputs.run == 'true'
+ env:
+ GITHUB_TOKEN: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ run: |
+ git config user.name "netdatabot"
+ git config user.email "bot@netdata.cloud"
+ git add packaging/version CHANGELOG.md
+ git commit -m "[ci skip] ${{ steps.target.outputs.message }}"
+ if [ "${{ steps.target.outputs.type }}" != "nightly" ]; then
+ git tag -a "${{ github.event.inputs.version }}" -m "${{ steps.target.outputs.message }}"
+ fi
+ if [ -n "${{ steps.target.outputs.new-branch }}" ]; then
+ git branch "${{ steps.target.outputs.new-branch }}"
+ fi
+ git push --tags origin "${{ steps.target.outputs.branch }}"
+ if [ -n "${{ steps.target.outputs.new-branch }}" ]; then
+ git push origin "${{ steps.target.outputs.new-branch }}"
+ fi
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Failed to prepare changelog:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to prepare changelog.
+ Checkout: ${{ steps.checkout.outcome }}
+ Prepare base ref: ${{ steps.target.outcome }}
+ Generate nightly changelog: ${{ steps.nightly-changelog.outcome }}
+ Generate release changelog: ${{ steps.release-changelog.outcome }}
+ Commit changes: ${{ steps.commit.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: failure()
+
+ trigger-artifacts:
+ name: Trigger artifact builds
+ runs-on: ubuntu-latest
+ needs: update-changelogs
+ if: needs.update-changelogs.outputs.run == 'true'
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v2
+ with:
+ ref: ${{ needs.update-changelogs.outputs.ref }}
+ - name: Trigger build
+ id: trigger
+ uses: benc-uk/workflow-dispatch@v1
+ with:
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ repo: ${{ github.repository }}
+ workflow: Build
+ ref: ${{ needs.update-changelogs.outputs.ref }}
+ inputs: '{"version": "${{ needs.update-changelogs.outputs.version }}", "type": "${{ needs.update-changelogs.outputs.type }}"}'
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Failed to trigger ${{ needs.update-changelogs.outputs.type }} artifact builds:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to trigger ${{ needs.update-changelogs.outputs.type }} artifact builds.
+ Checkout: ${{ steps.checkout.outcome }}
+ Trigger build: ${{ steps.trigger.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: failure()
+
+ trigger-docker:
+ name: Trigger docker builds
+ runs-on: ubuntu-latest
+ needs: update-changelogs
+ if: needs.update-changelogs.outputs.run == 'true'
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v2
+ with:
+ ref: ${{ needs.update-changelogs.outputs.ref }}
+ - name: Trigger build
+ id: trigger
+ uses: benc-uk/workflow-dispatch@v1
+ with:
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ repo: ${{ github.repository }}
+ workflow: Docker
+ ref: ${{ needs.update-changelogs.outputs.ref }}
+ inputs: '{"version": "${{ needs.update-changelogs.outputs.version }}"}'
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Failed to trigger ${{ needs.update-changelogs.outputs.type }} Docker builds:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to trigger ${{ needs.update-changelogs.outputs.type }} Docker builds.
+ Checkout: ${{ steps.checkout.outcome }}
+ Trigger build: ${{ steps.trigger.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: failure()
+
+ trigger-packages:
+ name: Trigger package builds
+ runs-on: ubuntu-latest
+ needs: update-changelogs
+ if: needs.update-changelogs.outputs.run == 'true'
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v2
+ with:
+ ref: ${{ needs.update-changelogs.outputs.ref }}
+ - name: Trigger build
+ id: trigger
+ uses: benc-uk/workflow-dispatch@v1
+ with:
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ repo: ${{ github.repository }}
+ workflow: Packages
+ ref: ${{ needs.update-changelogs.outputs.ref }}
+ inputs: '{"version": "${{ needs.update-changelogs.outputs.version }}", "type": "${{ needs.update-changelogs.outputs.type }}"}'
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Failed to trigger ${{ needs.update-changelogs.outputs.type }} package builds:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to trigger ${{ needs.update-changelogs.outputs.type }} package builds.
+ Checkout: ${{ steps.checkout.outcome }}
+ Trigger build: ${{ steps.trigger.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: failure()
diff --git a/.github/workflows/repoconfig-packages.yml b/.github/workflows/repoconfig-packages.yml
new file mode 100644
index 0000000..824ddd3
--- /dev/null
+++ b/.github/workflows/repoconfig-packages.yml
@@ -0,0 +1,183 @@
+---
+# Handles building of binary packages for the agent.
+name: Repository Packages
+on:
+ workflow_dispatch: null
+ pull_request:
+ paths:
+ - packaging/repoconfig/**
+ - .github/workflows/repoconfig-packages.yml
+ - .github/data/distros.yml
+ push:
+ branches:
+ - master
+ paths:
+ - packaging/repoconfig/**
+ - .github/workflows/repoconfig-packages.yml
+ - .github/data/distros.yml
+env:
+ DISABLE_TELEMETRY: 1
+ REPO_PREFIX: netdata/netdata
+jobs:
+ matrix:
+ name: Prepare Build Matrix
+ runs-on: ubuntu-latest
+ outputs:
+ matrix: ${{ steps.set-matrix.outputs.matrix }}
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v3
+ - name: Prepare tools
+ id: prepare
+ run: |
+ sudo apt-get update && sudo apt-get install -y python3-ruamel.yaml
+ - name: Read build matrix
+ id: set-matrix
+ shell: python3 {0}
+ run: |
+ from ruamel.yaml import YAML
+ import json
+ yaml = YAML(typ='safe')
+ entries = list()
+
+ with open('.github/data/distros.yml') as f:
+ data = yaml.load(f)
+
+ for i, v in enumerate(data['include']):
+ if 'packages' in data['include'][i]:
+ entries.append({
+ 'distro': data['include'][i]['distro'],
+ 'version': data['include'][i]['version'],
+ 'pkgclouddistro': data['include'][i]['packages']['repo_distro'],
+ 'format': data['include'][i]['packages']['type'],
+ 'base_image': data['include'][i]['base_image'] if 'base_image' in data['include'][i] else data['include'][i]['distro'],
+ 'platform': data['platform_map']['amd64']
+ })
+
+ entries.sort(key=lambda k: (k['distro'], k['version']))
+ matrix = json.dumps({'include': entries}, sort_keys=True)
+ print('Generated Matrix: ' + matrix)
+ print('::set-output name=matrix::' + matrix)
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Repository Package Build matrix generation failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to generate build matrix for repository package build.
+ Checkout: ${{ steps.checkout.outcome }}
+ Prepare Tools: ${{ steps.prepare.outcome }}
+ Read Build Matrix: ${{ steps.set-matrix.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.event_name != 'pull_request'
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.repository == 'netdata/netdata'
+ }}
+
+ build:
+ name: Build
+ runs-on: ubuntu-latest
+ env:
+ DISABLE_TELEMETRY: 1
+ DOCKER_CLI_EXPERIMENTAL: enabled
+ needs:
+ - matrix
+ strategy:
+ matrix: ${{ fromJson(needs.matrix.outputs.matrix) }}
+ # We intentiaonally disable the fail-fast behavior so that a
+ # build failure for one version doesn't prevent us from publishing
+ # successfully built and tested packages for another version.
+ fail-fast: false
+ max-parallel: 8
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v3
+ # Unlike normally, we do not need a deep clone or submodules for this.
+ - name: Fetch base image
+ id: fetch-images
+ uses: nick-invision/retry@v2
+ with:
+ max_attempts: 3
+ retry_wait_seconds: 30
+ timeout_seconds: 900
+ command: docker pull --platform ${{ matrix.platform }} ${{ matrix.base_image }}:${{ matrix.version }}
+ - name: Build Packages
+ id: build
+ shell: bash
+ run: |
+ docker run --security-opt seccomp=unconfined -e DISABLE_TELEMETRY=1 --platform ${{ matrix.platform }} \
+ -v "$PWD":/netdata ${{ matrix.base_image }}:${{ matrix.version }} \
+ /netdata/packaging/repoconfig/build-${{ matrix.format }}.sh
+ - name: SSH setup
+ id: ssh-setup
+ if: github.event_name == 'workflow_dispatch'
+ continue-on-error: true
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.NETDATABOT_PACKAGES_SSH_KEY }}
+ name: id_ecdsa
+ known_hosts: ${{ secrets.PACKAGES_KNOWN_HOSTS }}
+ - name: Upload to packages.netdata.cloud
+ id: package-upload
+ continue-on-error: true
+ if: github.event_name == 'workflow_dispatch'
+ run: |
+ .github/scripts/package-upload.sh \
+ ${{ matrix.repo_distro }} \
+ ${{ matrix.arch }} \
+ ${{ matrix.format }} \
+ netdata/netdata
+ .github/scripts/package-upload.sh \
+ ${{ matrix.repo_distro }} \
+ ${{ matrix.arch }} \
+ ${{ matrix.format }} \
+ netdata/netdata-edge
+ .github/scripts/package-upload.sh \
+ ${{ matrix.repo_distro }} \
+ ${{ matrix.arch }} \
+ ${{ matrix.format }} \
+ netdata/netdata-repoconfig
+ - name: Upload Packages
+ id: publish
+ if: github.event_name != 'pull_request' && github.repository == 'netdata/netdata'
+ shell: bash
+ env:
+ PKG_CLOUD_TOKEN: ${{ secrets.PACKAGE_CLOUD_API_KEY }}
+ run: |
+ printf "Packages to upload:\n%s" "$(ls artifacts/*.${{ matrix.format }})"
+ for pkgfile in artifacts/*.${{ matrix.format }} ; do
+ .github/scripts/package_cloud_wrapper.sh yank "${REPO_PREFIX}/${{ matrix.pkgclouddistro }}" \
+ "$(basename "${pkgfile}")" || true
+ .github/scripts/package_cloud_wrapper.sh push "${REPO_PREFIX}/${{ matrix.pkgclouddistro }}" "${pkgfile}"
+ .github/scripts/package_cloud_wrapper.sh yank "${REPO_PREFIX}-edge/${{ matrix.pkgclouddistro }}" \
+ "$(basename "${pkgfile}")" || true
+ .github/scripts/package_cloud_wrapper.sh push "${REPO_PREFIX}-edge/${{ matrix.pkgclouddistro }}" "${pkgfile}"
+ .github/scripts/package_cloud_wrapper.sh yank "${REPO_PREFIX}-repoconfig/${{ matrix.pkgclouddistro }}" \
+ "$(basename "${pkgfile}")" || true
+ .github/scripts/package_cloud_wrapper.sh push "${REPO_PREFIX}-repoconfig/${{ matrix.pkgclouddistro }}" "${pkgfile}"
+ done
+ - name: Failure Notification
+ if: ${{ failure() && github.repository == 'netdata/netdata' }}
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Repository Package Build failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: ${{ matrix.pkgclouddistro }} ${{ matrix.version }} repository package build failed.
+ Checkout: ${{ steps.checkout.outcome }}
+ Fetch images: ${{ steps.fetch-images.outcome }}
+ Build: ${{ steps.build.outcome }}
+ Import SSH Key: ${{ steps.ssh-setup.outcome }}
+ Publish to packages.netdata.cloud: ${{ steps.package-upload.outcome }}
+ Publish to PackageCloud: ${{ steps.publish.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
diff --git a/.github/workflows/review.yml b/.github/workflows/review.yml
new file mode 100644
index 0000000..5679b24
--- /dev/null
+++ b/.github/workflows/review.yml
@@ -0,0 +1,172 @@
+---
+# Runs various ReviewDog based checks against PR with suggested changes to improve quality
+name: Review
+on:
+ pull_request:
+ types: [opened, reopened, labeled, synchronize]
+env:
+ DISABLE_TELEMETRY: 1
+concurrency:
+ group: review-${{ github.ref }}
+ cancel-in-progress: true
+jobs:
+ prep-review:
+ name: Prepare Review Jobs
+ runs-on: ubuntu-latest
+ outputs:
+ actionlint: ${{ steps.actionlint.outputs.run }}
+ eslint: ${{ steps.eslint.outputs.run }}
+ hadolint: ${{ steps.hadolint.outputs.run }}
+ shellcheck: ${{ steps.shellcheck.outputs.run }}
+ yamllint: ${{ steps.yamllint.outputs.run }}
+ steps:
+ - name: Clone repository
+ uses: actions/checkout@v3
+ with:
+ submodules: recursive
+ fetch-depth: 0
+ - name: Check files for actionlint
+ id: actionlint
+ run: |
+ if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/actionlint') }}" = "true" ]; then
+ echo '::set-output name=run::true'
+ elif git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq '\.github/workflows/.*' ; then
+ echo '::set-output name=run::true'
+ echo 'GitHub Actions workflows have changed, need to run actionlint.'
+ else
+ echo '::set-output name=run::false'
+ fi
+ - name: Check files for eslint
+ id: eslint
+ run: |
+ if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/eslint') }}" = "true" ]; then
+ echo '::set-output name=run::true'
+ elif git diff --name-only origin/${{ github.base_ref }} HEAD | grep -v "web/gui/dashboard" | grep -Eq '.*\.js|node\.d\.plugin\.in' ; then
+ echo '::set-output name=run::true'
+ echo 'JS files have changed, need to run ESLint.'
+ else
+ echo '::set-output name=run::false'
+ fi
+ - name: Check files for hadolint
+ id: hadolint
+ run: |
+ if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/hadolint') }}" = "true" ]; then
+ echo '::set-output name=run::true'
+ elif git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq '.*Dockerfile.*' ; then
+ echo '::set-output name=run::true'
+ echo 'Dockerfiles have changed, need to run Hadolint.'
+ else
+ echo '::set-output name=run::false'
+ fi
+ - name: Check files for shellcheck
+ id: shellcheck
+ run: |
+ if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/shellcheck') }}" = "true" ]; then
+ echo '::set-output name=run::true'
+ elif git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq '.*\.sh.*' ; then
+ echo '::set-output name=run::true'
+ echo 'Shell scripts have changed, need to run shellcheck.'
+ else
+ echo '::set-output name=run::false'
+ fi
+ - name: Check files for yamllint
+ id: yamllint
+ run: |
+ if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/yamllint') }}" = "true" ]; then
+ echo '::set-output name=run::true'
+ elif git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq '.*\.ya?ml|python\.d/.*\.conf' ; then
+ echo '::set-output name=run::true'
+ echo 'YAML files have changed, need to run yamllint.'
+ else
+ echo '::set-output name=run::false'
+ fi
+
+ actionlint:
+ name: actionlint
+ needs: prep-review
+ if: needs.prep-review.outputs.actionlint == 'true'
+ runs-on: ubuntu-latest
+ steps:
+ - name: Git clone repository
+ uses: actions/checkout@v3
+ with:
+ submodules: recursive
+ fetch-depth: 0
+ - name: Run actionlint
+ uses: reviewdog/action-actionlint@v1
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ reporter: github-pr-check
+
+ eslint:
+ name: eslint
+ needs: prep-review
+ if: needs.prep-review.outputs.eslint == 'true'
+ runs-on: ubuntu-latest
+ steps:
+ - name: Git clone repository
+ uses: actions/checkout@v3
+ with:
+ submodules: recursive
+ fetch-depth: 0
+ - name: Install eslint
+ run: npm install eslint -D
+ - name: Run eslint
+ uses: reviewdog/action-eslint@v1
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ reporter: github-pr-check
+ eslint_flags: '.'
+
+ hadolint:
+ name: hadolint
+ needs: prep-review
+ if: needs.prep-review.outputs.hadolint == 'true'
+ runs-on: ubuntu-latest
+ steps:
+ - name: Git clone repository
+ uses: actions/checkout@v3
+ with:
+ fetch-depth: 0
+ - name: Run hadolint
+ uses: reviewdog/action-hadolint@v1
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ reporter: github-pr-check
+
+ shellcheck:
+ name: shellcheck
+ needs: prep-review
+ if: needs.prep-review.outputs.shellcheck == 'true'
+ runs-on: ubuntu-latest
+ steps:
+ - name: Git clone repository
+ uses: actions/checkout@v3
+ with:
+ submodules: recursive
+ fetch-depth: 0
+ - name: Run shellcheck
+ uses: reviewdog/action-shellcheck@v1
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ reporter: github-pr-check
+ path: "."
+ pattern: "*.sh*"
+ exclude: "./.git/*"
+
+ yamllint:
+ name: yamllint
+ needs: prep-review
+ if: needs.prep-review.outputs.yamllint == 'true'
+ runs-on: ubuntu-latest
+ steps:
+ - name: Git clone repository
+ uses: actions/checkout@v3
+ with:
+ submodules: recursive
+ fetch-depth: 0
+ - name: Run yamllint
+ uses: reviewdog/action-yamllint@v1
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ reporter: github-pr-check
diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
new file mode 100644
index 0000000..d483868
--- /dev/null
+++ b/.github/workflows/tests.yml
@@ -0,0 +1,41 @@
+---
+# Runs Tests on Pushes to `master` and Pull Requests
+name: Tests
+on:
+ push:
+ branches:
+ - master
+ paths:
+ - 'CMakeLists.txt'
+ - '**.c'
+ - '**.h'
+ pull_request:
+ paths:
+ - 'CMakeLists.txt'
+ - '**.c'
+ - '**.h'
+env:
+ DISABLE_TELEMETRY: 1
+concurrency:
+ group: tests-${{ github.ref }}
+ cancel-in-progress: true
+jobs:
+ unit-tests-legacy:
+ name: Unit Tests (legacy)
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+ with:
+ submodules: recursive
+ - name: Prepare environment
+ run: |
+ ./packaging/installer/install-required-packages.sh --dont-wait --non-interactive netdata-all
+ sudo apt-get install -y libjson-c-dev libipmimonitoring-dev libcups2-dev libsnappy-dev \
+ libprotobuf-dev libprotoc-dev libssl-dev protobuf-compiler \
+ libnetfilter-acct-dev
+ - name: Run ./tests/run-unit-tests.sh
+ env:
+ CFLAGS: "-O1 -DNETDATA_INTERNAL_CHECKS=1 -DNETDATA_VERIFY_LOCKS=1"
+ run: |
+ ./tests/run-unit-tests.sh
diff --git a/.github/workflows/trigger-learn-update.yml b/.github/workflows/trigger-learn-update.yml
new file mode 100644
index 0000000..3d39eba
--- /dev/null
+++ b/.github/workflows/trigger-learn-update.yml
@@ -0,0 +1,37 @@
+---
+name: Trigger Netdata Learn documentation update
+on:
+ push:
+ branches:
+ - master
+ paths:
+ - "**.mdx?"
+ - "packaging/installer/kickstart.sh"
+concurrency:
+ group: learn-trigger-${{ github.ref }}
+ cancel-in-progress: true
+jobs:
+ trigger-ingest:
+ name: Trigger Netdata Learn ingest workflow.
+ if: github.repository == 'netdata/netdata'
+ runs-on: ubuntu-latest
+ steps:
+ - name: Trigger Netdata Learn ingest workflow.
+ uses: benc-uk/workflow-dispatch@v1
+ with:
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ repo: netdata/learn
+ workflow: Ingest
+ ref: refs/heads/master
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Triggering Netdata Learn documentation update failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to trigger Netdata Learn documentation update workflow.
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: failure()