summaryrefslogtreecommitdiffstats
path: root/.github
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-03-21 17:19:04 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-03-21 17:19:04 +0000
commit310edf444908b09ea6d00c03baceb7925f3bb7a2 (patch)
tree7064577c7fa7a851e2e930beb606ea8237b0bbd2 /.github
parentReleasing debian version 1.44.3-2. (diff)
downloadnetdata-310edf444908b09ea6d00c03baceb7925f3bb7a2.tar.xz
netdata-310edf444908b09ea6d00c03baceb7925f3bb7a2.zip
Merging upstream version 1.45.0.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to '.github')
-rw-r--r--.github/CODEOWNERS51
-rw-r--r--.github/ISSUE_TEMPLATE/config.yml3
-rw-r--r--.github/codeql/python-config.yml11
-rw-r--r--.github/data/distros.yml58
-rw-r--r--.github/dependabot.yml6
-rw-r--r--.github/dockerfiles/Dockerfile.clang5
-rw-r--r--.github/labeler.yml256
-rwxr-xr-x.github/scripts/build-artifacts.sh82
-rwxr-xr-x.github/scripts/build-dist.sh71
-rwxr-xr-x.github/scripts/docker-test.sh50
-rwxr-xr-x.github/scripts/gen-docker-imagetool-args.py27
-rwxr-xr-x.github/scripts/gen-docker-tags.py35
-rwxr-xr-x.github/scripts/gen-matrix-build.py3
-rwxr-xr-x.github/scripts/gen-matrix-packaging.py1
-rwxr-xr-x.github/scripts/get-go-version.py39
-rwxr-xr-x.github/scripts/get-static-cache-key.sh1
-rwxr-xr-x.github/scripts/pkg-test.sh41
-rwxr-xr-x.github/scripts/run-updater-check.sh3
-rwxr-xr-x.github/scripts/run_install_with_dist_file.sh39
-rw-r--r--.github/workflows/add-to-project.yml4
-rw-r--r--.github/workflows/build-macos.yml142
-rw-r--r--.github/workflows/build.yml211
-rw-r--r--.github/workflows/checks.yml54
-rw-r--r--.github/workflows/codeql.yml54
-rw-r--r--.github/workflows/coverity.yml7
-rw-r--r--.github/workflows/dashboard-pr.yml4
-rw-r--r--.github/workflows/docker.yml628
-rw-r--r--.github/workflows/docs.yml29
-rw-r--r--.github/workflows/generate-integrations.yml25
-rw-r--r--.github/workflows/go-tests.yml124
-rw-r--r--.github/workflows/kickstart-upload.yml54
-rw-r--r--.github/workflows/labeler.yml2
-rw-r--r--.github/workflows/monitor-releases.yml2
-rw-r--r--.github/workflows/packaging.yml65
-rw-r--r--.github/workflows/release.yml4
-rw-r--r--.github/workflows/repoconfig-packages.yml2
-rw-r--r--.github/workflows/review.yml66
37 files changed, 1446 insertions, 813 deletions
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 7f368ceb7..d038ce680 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -6,48 +6,45 @@
# Ownership by directory structure
.github/ @Ferroin @tkatsoulas
-aclk/ @stelfrag @underhood
-build/ @Ferroin @tkatsoulas
+src/aclk/ @stelfrag
contrib/debian @Ferroin @tkatsoulas
-collectors/ @thiagoftsm
-collectors/ebpf.plugin/ @thiagoftsm
-collectors/charts.d.plugin/ @ilyam8 @Ferroin
-collectors/freebsd.plugin/ @thiagoftsm
-collectors/macos.plugin/ @thiagoftsm
-collectors/python.d.plugin/ @ilyam8
-collectors/cups.plugin/ @thiagoftsm
-exporting/ @thiagoftsm
-daemon/ @thiagoftsm @vkalintiris
-database/ @thiagoftsm @vkalintiris
+src/collectors/ @thiagoftsm
+src/collectors/ebpf.plugin/ @thiagoftsm
+src/collectors/charts.d.plugin/ @ilyam8 @Ferroin
+src/collectors/freebsd.plugin/ @thiagoftsm
+src/collectors/macos.plugin/ @thiagoftsm
+src/collectors/python.d.plugin/ @ilyam8
+src/collectors/cups.plugin/ @thiagoftsm
+src/exporting/ @thiagoftsm
+src/daemon/ @thiagoftsm @vkalintiris
+src/database/ @thiagoftsm @vkalintiris
docs/ @tkatsoulas @Ancairon
-health/ @thiagoftsm @vkalintiris @MrZammler
-health/health.d/ @thiagoftsm @MrZammler
-health/notifications/ @Ferroin @thiagoftsm @MrZammler
-ml/ @vkalintiris
-libnetdata/ @thiagoftsm @vkalintiris
+src/go/ @ilyam8
+src/health/ @thiagoftsm @vkalintiris
+src/health/health.d/ @thiagoftsm
+src/health/notifications/ @Ferroin @thiagoftsm
+src/ml/ @vkalintiris
+src/libnetdata/ @thiagoftsm @vkalintiris
packaging/ @Ferroin @tkatsoulas
-registry/ @novykh
-streaming/ @thiagoftsm
+packaging/cmake @Ferroin @vkalintiris
+src/registry/ @novykh
+src/streaming/ @thiagoftsm
system/ @Ferroin @tkatsoulas
tests/ @Ferroin @vkalintiris @tkatsoulas
-web/ @thiagoftsm @vkalintiris
-web/gui/ @novykh
-logsmanagement/ @Dim-P @thiagoftsm
+src/web/ @thiagoftsm @vkalintiris
+src/web/gui/ @novykh
+src/logsmanagement/ @thiagoftsm
# Ownership by filetype (overwrites ownership by directory)
-*.am @Ferroin @tkatsoulas
*.md @tkatsoulas @Ancairon
*.mdx @tkatsoulas @Ancairon
Dockerfile* @Ferroin @tkatsoulas
# Ownership of specific files
.gitignore @Ferroin @tkatsoulas @vkalintiris
-.eslintrc @Ferroin @tkatsoulas
-.eslintignore @Ferroin @tkatsoulas
-.csslintrc @Ferroin @tkatsoulas
-.codeclimate.yml @Ferroin @tkatsoulas
.codacy.yml @Ferroin @tkatsoulas
.yamllint.yml @Ferroin @tkatsoulas
+CMakeLists.txt @Ferroin @vkalintiris
netdata.spec.in @Ferroin @tkatsoulas
netdata-installer.sh @Ferroin @tkatsoulas
packaging/version @netdatabot @Ferroin @tkatsoulas
diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
index 79678d7b5..196717dba 100644
--- a/.github/ISSUE_TEMPLATE/config.yml
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -7,6 +7,9 @@ contact_links:
- name: "Netdata Cloud"
url: https://github.com/netdata/netdata-cloud/issues/new/choose
about: Create a report to help us improve our web application
+ - name: "Netdata Mobile App"
+ url: https://github.com/netdata/netdata-cloud/issues/new/choose
+ about: Create a report to help us improve our Mobile App
- name: Community
url: https://netdata.cloud/community
about: If you don't know where to start, visit our community page!
diff --git a/.github/codeql/python-config.yml b/.github/codeql/python-config.yml
index c82727ce3..a31b3c805 100644
--- a/.github/codeql/python-config.yml
+++ b/.github/codeql/python-config.yml
@@ -1,10 +1,9 @@
paths-ignore:
- .github
- build_external/
- - ml/dlib
- - ml/json
+ - src/ml/dlib
- tests/api
- - web/gui
- - collectors/python.d.plugin/python_modules/pyyaml*
- - collectors/python.d.plugin/python_modules/third_party
- - collectors/python.d.plugin/python_modules/urllib3
+ - src/web/gui
+ - src/collectors/python.d.plugin/python_modules/pyyaml*
+ - src/collectors/python.d.plugin/python_modules/third_party
+ - src/collectors/python.d.plugin/python_modules/urllib3
diff --git a/.github/data/distros.yml b/.github/data/distros.yml
index 9175a5c73..811c78f07 100644
--- a/.github/data/distros.yml
+++ b/.github/data/distros.yml
@@ -30,6 +30,12 @@ include:
test:
ebpf-core: true
- <<: *alpine
+ version: "3.19"
+ support_type: Core
+ notes: ''
+ eol_check: true
+ bundle_sentry: false
+ - <<: *alpine
version: "3.18"
support_type: Core
notes: ''
@@ -50,6 +56,7 @@ include:
support_type: Intermediate
notes: ''
eol_check: false
+ bundle_sentry: false
env_prep: |
pacman --noconfirm -Syu && pacman --noconfirm -Sy grep libffi
test:
@@ -61,25 +68,31 @@ include:
support_type: Core
notes: ''
eol_check: 'amazon-linux'
+ bundle_sentry: false
packages: &amzn_packages
type: rpm
repo_distro: amazonlinux/2
arches:
- x86_64
- aarch64
- test:
+ test: &amzn_test
ebpf-core: false
+ skip-local-build: true
- <<: *amzn
version: "2023"
packages:
<<: *amzn_packages
repo_distro: amazonlinux/2023
+ test:
+ <<: *amzn_test
+ skip-local-build: false
- distro: centos
version: "7"
support_type: Core
notes: ''
eol_check: false
+ bundle_sentry: false
packages:
type: rpm
repo_distro: el/7
@@ -90,6 +103,7 @@ include:
- x86_64
test:
ebpf-core: false
+ skip-local-build: true
- &centos_stream
distro: centos-stream
@@ -100,6 +114,7 @@ include:
jsonc_removal: |
dnf remove -y json-c-devel
eol_check: true
+ bundle_sentry: false
packages: &cs_packages
type: rpm
repo_distro: el/c9s
@@ -122,6 +137,7 @@ include:
notes: ''
base_image: debian:bookworm
eol_check: true
+ bundle_sentry: true
env_prep: |
apt-get update
jsonc_removal: |
@@ -139,6 +155,7 @@ include:
- <<: *debian
version: "11"
base_image: debian:bullseye
+ bundle_sentry: false
packages:
<<: *debian_packages
repo_distro: debian/bullseye
@@ -147,6 +164,7 @@ include:
- <<: *debian
version: "10"
base_image: debian:buster
+ bundle_sentry: false
packages:
<<: *debian_packages
repo_distro: debian/buster
@@ -159,6 +177,7 @@ include:
support_type: Core
notes: ''
eol_check: true
+ bundle_sentry: false
jsonc_removal: |
dnf remove -y json-c-devel
packages: &fedora_packages
@@ -176,13 +195,6 @@ include:
repo_distro: fedora/38
test:
ebpf-core: true
- - <<: *fedora
- version: "37"
- packages:
- <<: *fedora_packages
- repo_distro: fedora/37
- test:
- ebpf-core: true
- &opensuse
distro: opensuse
@@ -190,6 +202,7 @@ include:
support_type: Core
notes: ''
eol_check: true
+ bundle_sentry: false
base_image: opensuse/leap:15.5
jsonc_removal: |
zypper rm -y libjson-c-devel
@@ -201,14 +214,6 @@ include:
- aarch64
test:
ebpf-core: true
- - <<: *opensuse
- version: "15.4"
- support_type: Core
- notes: ''
- base_image: opensuse/leap:15.4
- packages:
- <<: *opensuse_packages
- repo_distro: opensuse/15.4
- &oracle
distro: oraclelinux
@@ -216,6 +221,7 @@ include:
support_type: Core
notes: ''
eol_check: true
+ bundle_sentry: false
jsonc_removal: |
dnf remove -y json-c-devel
packages: &oracle_packages
@@ -240,6 +246,7 @@ include:
jsonc_removal: |
dnf remove -y json-c-devel
eol_check: true
+ bundle_sentry: false
packages: &rocky_packages
type: rpm
repo_distro: el/9
@@ -268,6 +275,7 @@ include:
support_type: Core
notes: ''
eol_check: true
+ bundle_sentry: false
env_prep: |
rm -f /etc/apt/apt.conf.d/docker && apt-get update
jsonc_removal: |
@@ -287,15 +295,23 @@ include:
<<: *ubuntu_packages
repo_distro: ubuntu/mantic
- <<: *ubuntu
- version: "23.04"
- packages:
- <<: *ubuntu_packages
- repo_distro: ubuntu/lunar
- - <<: *ubuntu
version: "20.04"
packages:
<<: *ubuntu_packages
repo_distro: ubuntu/focal
+legacy: # Info for platforms we used to support and still need to handle packages for
+ - <<: *fedora
+ version: "37"
+ bundle_sentry: false
+ packages:
+ <<: *fedora_packages
+ repo_distro: fedora/37
+ - <<: *opensuse
+ version: "15.4"
+ bundle_sentry: false
+ packages:
+ <<: *opensuse_packages
+ repo_distro: opensuse/15.4
no_include: # Info for platforms not covered in CI
- distro: docker
version: "19.03 or newer"
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
index b02b155d3..48b729622 100644
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -7,3 +7,9 @@ updates:
labels:
- "no changelog"
- "area/ci"
+ - package-ecosystem: gomod
+ directory: /src/go/collectors/go.d.plugin
+ schedule:
+ interval: weekly
+ labels:
+ - "area/go"
diff --git a/.github/dockerfiles/Dockerfile.clang b/.github/dockerfiles/Dockerfile.clang
index 62bb01941..869254198 100644
--- a/.github/dockerfiles/Dockerfile.clang
+++ b/.github/dockerfiles/Dockerfile.clang
@@ -1,4 +1,4 @@
-FROM debian:buster AS build
+FROM debian:12 AS build
# Disable apt/dpkg interactive mode
ENV DEBIAN_FRONTEND=noninteractive
@@ -9,7 +9,8 @@ RUN /tmp/install-required-packages.sh --dont-wait --non-interactive netdata-all
# Install Clang and set as default CC
RUN apt-get install -y clang && \
- update-alternatives --install /usr/bin/cc cc /usr/bin/clang 100
+ update-alternatives --install /usr/bin/cc cc /usr/bin/clang 100 && \
+ update-alternatives --install /usr/bin/c++ c++ /usr/bin/clang++ 100
WORKDIR /netdata
COPY . .
diff --git a/.github/labeler.yml b/.github/labeler.yml
index 0ea825ef4..d7c357509 100644
--- a/.github/labeler.yml
+++ b/.github/labeler.yml
@@ -11,148 +11,282 @@
# Please keep the labels sorted and deduplicated.
area/ACLK:
- - aclk/**
- - database/sqlite/sqlite_aclk*
- - mqtt_websockets
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/aclk/**
+ - src/database/sqlite/sqlite_aclk*
+ - src/aclk/mqtt_websockets
area/claim:
- - claim/*
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/claim/*
area/exporting:
- - exporting/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/exporting/**
area/build:
- - build/**
- - build_external/**
- - CMakeLists.txt
- - configure.ac
- - Makefile.am
- - "**/Makefile.am"
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - build/**
+ - build_external/**
+ - CMakeLists.txt
+ - configure.ac
+ - Makefile.am
+ - "**/Makefile.am"
area/ci:
- - .github/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - .github/**
area/daemon:
- - daemon/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/daemon/**
area/database:
- - database/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/database/**
area/docs:
- - "*.md"
- - "**/*.md"
- - "**/*.mdx"
- - diagrams/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - "*.md"
+ - "**/*.md"
+ - "**/*.mdx"
+ - docs/diagrams/**
# -----------------collectors----------------------
area/collectors:
- - collectors/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/**
+ - src/go/collectors/go.d.plugin/**
collectors/plugins.d:
- - collectors/plugins.d/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/plugins.d/**
collectors/apps:
- - collectors/apps.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/apps.plugin/**
collectors/cgroups:
- - collectors/cgroups.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/cgroups.plugin/**
collectors/charts.d:
- - collectors/charts.d.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/charts.d.plugin/**
collectors/cups:
- - collectors/cups.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/cups.plugin/**
collectors/debugfs:
- - collectors/debugfs.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/debugfs.plugin/**
collectors/diskspace:
- - collectors/diskspace.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/diskspace.plugin/**
collectors/ebpf:
- - collectors/ebpf.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/ebpf.plugin/**
collectors/freebsd:
- - collectors/freebsd.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/freebsd.plugin/**
collectors/freeipmi:
- - collectors/freeipmi.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/freeipmi.plugin/**
+
+collectors/go.d.plugin:
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/go/collectors/go.d.plugin/**
collectors/idlejitter:
- - collectors/idlejitter.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/idlejitter.plugin/**
collectors/ioping:
- - collectors/ioping.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/ioping.plugin/**
collectors/macos:
- - collectors/macos.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/macos.plugin/**
collectors/nfacct:
- - collectors/nfacct.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/nfacct.plugin/**
collectors/perf:
- - collectors/perf.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/perf.plugin/**
collectors/proc:
- - collectors/proc.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/proc.plugin/**
collectors/python.d:
- - collectors/python.d.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/python.d.plugin/**
collectors/slabinfo:
- - collectors/slabinfo.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/slabinfo.plugin/**
collectors/statsd:
- - collectors/statsd.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/statsd.plugin/**
collectors/systemd-journal:
- - collectors/systemd-journal.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/systemd-journal.plugin/**
collectors/tc:
- - collectors/tc.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/tc.plugin/**
collectors/timex:
- - collectors/timex.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/timex.plugin/**
collectors/xenstat:
- - collectors/xenstat.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/xenstat.plugin/**
# ----------------/collectors----------------------
+area/go:
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/go/**
+
area/health:
- - health/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/health/**
area/metadata:
- - "**/*metadata.yaml"
- - integrations/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - "**/*metadata.yaml"
+ - integrations/**
area/ml:
- - ml/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/ml/**
area/packaging:
- - contrib/**
- - packaging/**
- - system/**
- - Dockerfile*
- - netdata-installer.sh
- - netdata.spec.in
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - contrib/**
+ - packaging/**
+ - system/**
+ - Dockerfile*
+ - netdata-installer.sh
+ - netdata.spec.in
area/registry:
- - registry/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/registry/**
area/streaming:
- - streaming/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/streaming/**
area/tests:
- - tests/**
- - daemon/unit_test*
- - coverity-scan.sh
- - cppcheck.sh
- - netdata.cppcheck
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - tests/**
+ - src/daemon/unit_test*
+ - packaging/utils/coverity-scan.sh
area/web:
- - web/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/web/**
area/logs-management:
- - logsmanagement/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/logsmanagement/**
diff --git a/.github/scripts/build-artifacts.sh b/.github/scripts/build-artifacts.sh
deleted file mode 100755
index 569c79a5a..000000000
--- a/.github/scripts/build-artifacts.sh
+++ /dev/null
@@ -1,82 +0,0 @@
-#!/bin/sh
-#
-# Builds the netdata-vX.y.Z-xxxx.tar.gz source tarball (dist)
-# and netdata-vX.Y.Z-xxxx.gz.run (static x86_64) artifacts.
-
-set -e
-
-# shellcheck source=.github/scripts/functions.sh
-. "$(dirname "$0")/functions.sh"
-
-NAME="${NAME:-netdata}"
-VERSION="${VERSION:-"$(git describe)"}"
-BASENAME="$NAME-$VERSION"
-
-prepare_build() {
- progress "Preparing build"
- (
- test -d artifacts || mkdir -p artifacts
- echo "${VERSION}" > packaging/version
- ) >&2
-}
-
-build_dist() {
- progress "Building dist"
- (
- command -v git > /dev/null && [ -d .git ] && git clean -d -f
- autoreconf -ivf
- ./configure \
- --prefix=/usr \
- --sysconfdir=/etc \
- --localstatedir=/var \
- --libexecdir=/usr/libexec \
- --with-zlib \
- --with-math \
- --with-user=netdata \
- --disable-dependency-tracking \
- CFLAGS=-O2
- make dist
- mv "${BASENAME}.tar.gz" artifacts/
- ) >&2
-}
-
-build_static_x86_64() {
- progress "Building static x86_64"
- (
- command -v git > /dev/null && [ -d .git ] && git clean -d -f
- USER="" ./packaging/makeself/build-x86_64-static.sh
- ) >&2
-}
-
-prepare_assets() {
- progress "Preparing assets"
- (
- cp packaging/version artifacts/latest-version.txt
-
- cd artifacts || exit 1
- ln -f "${BASENAME}.tar.gz" netdata-latest.tar.gz
- ln -f "${BASENAME}.gz.run" netdata-latest.gz.run
- sha256sum -b ./* > "sha256sums.txt"
- ) >&2
-}
-
-steps="prepare_build build_dist build_static_x86_64"
-steps="$steps prepare_assets"
-
-_main() {
- for step in $steps; do
- if ! run "$step"; then
- if [ -t 1 ]; then
- debug
- else
- fail "Build failed"
- fi
- fi
- done
-
- echo "🎉 All Done!"
-}
-
-if [ -n "$0" ] && [ x"$0" != x"-bash" ]; then
- _main "$@"
-fi
diff --git a/.github/scripts/build-dist.sh b/.github/scripts/build-dist.sh
deleted file mode 100755
index 027b62147..000000000
--- a/.github/scripts/build-dist.sh
+++ /dev/null
@@ -1,71 +0,0 @@
-#!/bin/sh
-#
-# Builds the netdata-vX.y.Z-xxxx.tar.gz source tarball (dist)
-
-set -e
-
-# shellcheck source=.github/scripts/functions.sh
-. "$(dirname "$0")/functions.sh"
-
-NAME="${NAME:-netdata}"
-VERSION="${VERSION:-"$(git describe --always)"}"
-BASENAME="$NAME-$VERSION"
-
-prepare_build() {
- progress "Preparing build"
- (
- test -d artifacts || mkdir -p artifacts
- echo "${VERSION}" > packaging/version
- ) >&2
-}
-
-build_dist() {
- progress "Building dist"
- (
- command -v git > /dev/null && [ -d .git ] && git clean -d -f
- autoreconf -ivf
- ./configure \
- --prefix=/usr \
- --sysconfdir=/etc \
- --localstatedir=/var \
- --libexecdir=/usr/libexec \
- --with-zlib \
- --with-math \
- --with-user=netdata \
- --disable-dependency-tracking \
- CFLAGS=-O2
- make dist
- mv "${BASENAME}.tar.gz" artifacts/
- ) >&2
-}
-
-prepare_assets() {
- progress "Preparing assets"
- (
- cp packaging/version artifacts/latest-version.txt
- cd artifacts || exit 1
- ln -f "${BASENAME}.tar.gz" netdata-latest.tar.gz
- ln -f "${BASENAME}.gz.run" netdata-latest.gz.run
- sha256sum -b ./* > "sha256sums.txt"
- ) >&2
-}
-
-steps="prepare_build build_dist prepare_assets"
-
-_main() {
- for step in $steps; do
- if ! run "$step"; then
- if [ -t 1 ]; then
- debug
- else
- fail "Build failed"
- fi
- fi
- done
-
- echo "🎉 All Done!"
-}
-
-if [ -n "$0" ] && [ x"$0" != x"-bash" ]; then
- _main "$@"
-fi
diff --git a/.github/scripts/docker-test.sh b/.github/scripts/docker-test.sh
index 0f5fa469c..0ef064501 100755
--- a/.github/scripts/docker-test.sh
+++ b/.github/scripts/docker-test.sh
@@ -1,41 +1,8 @@
#!/bin/sh
-export DEBIAN_FRONTEND=noninteractive
-
-wait_for() {
- host="${1}"
- port="${2}"
- name="${3}"
- timeout="30"
-
- if command -v nc > /dev/null ; then
- netcat="nc"
- elif command -v netcat > /dev/null ; then
- netcat="netcat"
- else
- printf "Unable to find a usable netcat command.\n"
- return 1
- fi
-
- printf "Waiting for %s on %s:%s ... " "${name}" "${host}" "${port}"
-
- sleep 30
+SCRIPT_DIR="$(CDPATH='' cd -- "$(dirname -- "$0")" && pwd -P)"
- i=0
- while ! ${netcat} -z "${host}" "${port}"; do
- sleep 1
- if [ "$i" -gt "$timeout" ]; then
- printf "Timed out!\n"
- docker ps -a
- echo "::group::Netdata container logs"
- docker logs netdata 2>&1
- echo "::endgroup::"
- return 1
- fi
- i="$((i + 1))"
- done
- printf "OK\n"
-}
+export DEBIAN_FRONTEND=noninteractive
if [ -z "$(command -v nc 2>/dev/null)" ] && [ -z "$(command -v netcat 2>/dev/null)" ]; then
sudo apt-get update && sudo apt-get upgrade -y && sudo apt-get install -y netcat
@@ -55,10 +22,9 @@ docker run -d --name=netdata \
--security-opt apparmor=unconfined \
netdata/netdata:test
-wait_for localhost 19999 netdata || exit 1
-
-curl -sS http://127.0.0.1:19999/api/v1/info > ./response || exit 1
-
-cat ./response
-
-jq '.version' ./response || exit 1
+if ! "${SCRIPT_DIR}/../../packaging/runtime-check.sh"; then
+ docker ps -a
+ echo "::group::Netdata container logs"
+ docker logs netdata 2>&1
+ echo "::endgroup::"
+fi
diff --git a/.github/scripts/gen-docker-imagetool-args.py b/.github/scripts/gen-docker-imagetool-args.py
new file mode 100755
index 000000000..c0eaa1cfc
--- /dev/null
+++ b/.github/scripts/gen-docker-imagetool-args.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python3
+
+import sys
+
+from pathlib import Path
+
+DIGEST_PATH = Path(sys.argv[1])
+TAG_PREFIX = sys.argv[2]
+TAGS = sys.argv[3]
+
+if TAG_PREFIX:
+ PUSH_TAGS = tuple([
+ t for t in TAGS.split(',') if t.startswith(TAG_PREFIX)
+ ])
+else:
+ PUSH_TAGS = tuple([
+ t for t in TAGS.split(',') if t.startswith('netdata/')
+ ])
+
+IMAGE_NAME = PUSH_TAGS[0].split(':')[0]
+
+images = []
+
+for f in DIGEST_PATH.glob('*'):
+ images.append(f'{IMAGE_NAME}@sha256:{f.name}')
+
+print(f'-t {" -t ".join(PUSH_TAGS)} {" ".join(images)}')
diff --git a/.github/scripts/gen-docker-tags.py b/.github/scripts/gen-docker-tags.py
index 8c88d3b5e..c45b991d9 100755
--- a/.github/scripts/gen-docker-tags.py
+++ b/.github/scripts/gen-docker-tags.py
@@ -2,18 +2,33 @@
import sys
-version = sys.argv[1].split('.')
-suffix = sys.argv[2]
+github_event = sys.argv[1]
+version = sys.argv[2]
-REPO = f'netdata/netdata{suffix}'
-GHCR = f'ghcr.io/{REPO}'
-QUAY = f'quay.io/{REPO}'
+REPO = 'netdata/netdata'
-tags = []
+REPOS = (
+ REPO,
+ f'quay.io/{REPO}',
+ f'ghcr.io/{REPO}',
+)
-for repo in [REPO, GHCR, QUAY]:
- tags.append(':'.join([repo, version[0]]))
- tags.append(':'.join([repo, '.'.join(version[0:2])]))
- tags.append(':'.join([repo, '.'.join(version[0:3])]))
+match version:
+ case '':
+ tags = (f'{REPO}:test',)
+ case 'nightly':
+ tags = tuple([
+ f'{r}:{t}' for r in REPOS for t in ('edge', 'latest')
+ ])
+ case _:
+ v = f'v{version}'.split('.')
+
+ tags = tuple([
+ f'{r}:{t}' for r in REPOS for t in (
+ v[0],
+ '.'.join(v[0:2]),
+ '.'.join(v[0:3]),
+ )
+ ])
print(','.join(tags))
diff --git a/.github/scripts/gen-matrix-build.py b/.github/scripts/gen-matrix-build.py
index 3185e8836..9a70a44ce 100755
--- a/.github/scripts/gen-matrix-build.py
+++ b/.github/scripts/gen-matrix-build.py
@@ -11,6 +11,9 @@ with open('.github/data/distros.yml') as f:
data = yaml.load(f)
for i, v in enumerate(data['include']):
+ if v['test'].get('skip-local-build', False):
+ continue
+
e = {
'artifact_key': v['distro'] + str(v['version']).replace('.', ''),
'version': v['version'],
diff --git a/.github/scripts/gen-matrix-packaging.py b/.github/scripts/gen-matrix-packaging.py
index 9347cd767..b695e33ab 100755
--- a/.github/scripts/gen-matrix-packaging.py
+++ b/.github/scripts/gen-matrix-packaging.py
@@ -28,6 +28,7 @@ for i, v in enumerate(data['include']):
'format': data['include'][i]['packages']['type'],
'base_image': data['include'][i]['base_image'] if 'base_image' in data['include'][i] else ':'.join([data['include'][i]['distro'], data['include'][i]['version']]),
'platform': data['platform_map'][arch],
+ 'bundle_sentry': data['include'][i]['bundle_sentry'],
'arch': arch
})
diff --git a/.github/scripts/get-go-version.py b/.github/scripts/get-go-version.py
new file mode 100755
index 000000000..105c537c8
--- /dev/null
+++ b/.github/scripts/get-go-version.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python3
+
+import json
+import os
+import pathlib
+
+from packaging.version import parse
+
+SCRIPT_PATH = pathlib.Path(__file__).parents[0]
+REPO_ROOT = SCRIPT_PATH.parents[1]
+GO_SRC = REPO_ROOT / 'src' / 'go'
+
+GITHUB_OUTPUT = pathlib.Path(os.environ['GITHUB_OUTPUT'])
+
+version = parse('1.0.0')
+modules = []
+
+for modfile in GO_SRC.glob('**/go.mod'):
+ moddata = modfile.read_text()
+
+ for line in moddata.splitlines():
+ if line.startswith('go '):
+ version = max(version, parse(line.split()[1]))
+ break
+
+ for main in modfile.parent.glob('**/main.go'):
+ mainpath = main.relative_to(modfile.parent).parent
+
+ if 'examples' in mainpath.parts:
+ continue
+
+ modules.append({
+ 'module': str(modfile.parent),
+ 'version': str(version),
+ 'build_target': f'github.com/netdata/netdata/go/{ modfile.parts[-2] }/{ str(mainpath) }/',
+ })
+
+with GITHUB_OUTPUT.open('a') as f:
+ f.write(f'matrix={ json.dumps({"include": modules}) }\n')
diff --git a/.github/scripts/get-static-cache-key.sh b/.github/scripts/get-static-cache-key.sh
index 5093b3327..e45ae30bd 100755
--- a/.github/scripts/get-static-cache-key.sh
+++ b/.github/scripts/get-static-cache-key.sh
@@ -8,6 +8,7 @@ docker pull --platform "${platform}" netdata/static-builder:${builder_rev}
# shellcheck disable=SC2046
cat $(find packaging/makeself/jobs -type f ! -regex '.*\(netdata\|-makeself\).*') > /tmp/static-cache-key-data
+cat packaging/makeself/bundled-packages.version >> /tmp/static-cache-key-data
docker run -it --rm --platform "${platform}" netdata/static-builder:${builder_rev} sh -c 'apk list -I 2>/dev/null' >> /tmp/static-cache-key-data
diff --git a/.github/scripts/pkg-test.sh b/.github/scripts/pkg-test.sh
index 35767bf2e..6120d3fc3 100755
--- a/.github/scripts/pkg-test.sh
+++ b/.github/scripts/pkg-test.sh
@@ -1,5 +1,7 @@
#!/bin/sh
+SCRIPT_DIR="$(CDPATH='' cd -- "$(dirname -- "$0")" && pwd -P)"
+
install_debian_like() {
# This is needed to ensure package installs don't prompt for any user input.
export DEBIAN_FRONTEND=noninteractive
@@ -94,37 +96,6 @@ dump_log() {
cat ./netdata.log
}
-wait_for() {
- host="${1}"
- port="${2}"
- name="${3}"
- timeout="30"
-
- if command -v nc > /dev/null ; then
- netcat="nc"
- elif command -v netcat > /dev/null ; then
- netcat="netcat"
- else
- printf "Unable to find a usable netcat command.\n"
- return 1
- fi
-
- printf "Waiting for %s on %s:%s ... " "${name}" "${host}" "${port}"
-
- sleep 30
-
- i=0
- while ! ${netcat} -z "${host}" "${port}"; do
- sleep 1
- if [ "$i" -gt "$timeout" ]; then
- printf "Timed out!\n"
- return 1
- fi
- i="$((i + 1))"
- done
- printf "OK\n"
-}
-
case "${DISTRO}" in
debian | ubuntu)
install_debian_like
@@ -151,12 +122,6 @@ trap dump_log EXIT
/usr/sbin/netdata -D > ./netdata.log 2>&1 &
-wait_for localhost 19999 netdata || exit 1
-
-curl -sS http://127.0.0.1:19999/api/v1/info > ./response || exit 1
-
-cat ./response
-
-jq '.version' ./response || exit 1
+"${SCRIPT_DIR}/../../packaging/runtime-check.sh" || exit 1
trap - EXIT
diff --git a/.github/scripts/run-updater-check.sh b/.github/scripts/run-updater-check.sh
index 1224d8f67..2e70a10af 100755
--- a/.github/scripts/run-updater-check.sh
+++ b/.github/scripts/run-updater-check.sh
@@ -12,7 +12,8 @@ echo "::group::>>> Pre-Update Netdata Build Info"
netdata -W buildinfo
echo "::endgroup::"
echo ">>> Updating Netdata..."
-export NETDATA_BASE_URL="http://localhost:8080/artifacts/" # Pull the tarball from the local web server.
+export NETDATA_BASE_URL="http://localhost:8080/artifacts" # Pull the tarball from the local web server.
+echo 'NETDATA_ACCEPT_MAJOR_VERSIONS="1 9999"' > /etc/netdata/netdata-updater.conf
timeout 3600 /netdata/packaging/installer/netdata-updater.sh --not-running-from-cron --no-updater-self-update
case "$?" in
diff --git a/.github/scripts/run_install_with_dist_file.sh b/.github/scripts/run_install_with_dist_file.sh
deleted file mode 100755
index 74652efdd..000000000
--- a/.github/scripts/run_install_with_dist_file.sh
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env bash
-#
-# This script is evaluating netdata installation with the source from make dist
-#
-# Copyright: SPDX-License-Identifier: GPL-3.0-or-later
-#
-# Author : Pavlos Emm. Katsoulakis <paul@netdata.cloud)
-
-set -e
-
-if [ $# -ne 1 ]; then
- printf >&2 "Usage: %s <dist_file>\n" "$(basename "$0")"
- exit 1
-fi
-
-distfile="${1}"
-shift
-
-printf >&2 "Opening dist archive %s ... " "${distfile}"
-tar -xovf "${distfile}"
-distdir="$(echo "${distfile}" | rev | cut -d. -f3- | rev)"
-cp -a packaging/installer/install-required-packages.sh "${distdir}/install-required-packages.sh"
-if [ ! -d "${distdir}" ]; then
- printf >&2 "ERROR: %s is not a directory" "${distdir}"
- exit 2
-fi
-
-printf >&2 "Entering %s and starting docker run ..." "${distdir}"
-
-pushd "${distdir}" || exit 1
-docker run \
- -e DISABLE_TELEMETRY=1 \
- -v "${PWD}:/netdata" \
- -w /netdata \
- "ubuntu:latest" \
- /bin/bash -c "./install-required-packages.sh --dont-wait --non-interactive netdata && apt install wget && ./netdata-installer.sh --dont-wait --require-cloud --disable-telemetry --install-prefix /tmp --one-time-build && echo \"Validating netdata instance is running\" && wget -O - 'http://127.0.0.1:19999/api/v1/info' | grep version"
-popd || exit 1
-
-echo "All Done!"
diff --git a/.github/workflows/add-to-project.yml b/.github/workflows/add-to-project.yml
index 986d836a1..838917146 100644
--- a/.github/workflows/add-to-project.yml
+++ b/.github/workflows/add-to-project.yml
@@ -13,13 +13,13 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Add issues to Agent project board
- uses: actions/add-to-project@v0.5.0
+ uses: actions/add-to-project@v0.6.0
with:
project-url: https://github.com/orgs/netdata/projects/32
github-token: ${{ secrets.NETDATABOT_ORG_GITHUB_TOKEN }}
- name: Add issues to Product Bug project board
- uses: actions/add-to-project@v0.5.0
+ uses: actions/add-to-project@v0.6.0
with:
project-url: https://github.com/orgs/netdata/projects/45
github-token: ${{ secrets.NETDATABOT_ORG_GITHUB_TOKEN }}
diff --git a/.github/workflows/build-macos.yml b/.github/workflows/build-macos.yml
new file mode 100644
index 000000000..dd6e41540
--- /dev/null
+++ b/.github/workflows/build-macos.yml
@@ -0,0 +1,142 @@
+---
+# CI code for build and test on macOS
+name: macOS Build and test
+on:
+ push: # Master branch checks only validate the build and generate artifacts for testing.
+ branches:
+ - master
+ pull_request: null # PR checks only validate the build and generate artifacts for testing.
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+
+jobs:
+ file-check: # Check what files changed if we’re being run in a PR or on a push.
+ name: Check Modified Files
+ runs-on: ubuntu-latest
+ outputs:
+ run: ${{ steps.check-run.outputs.run }}
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+ submodules: recursive
+ - name: Check files
+ id: check-files
+ uses: tj-actions/changed-files@v43
+ with:
+ since_last_remote_commit: ${{ github.event_name != 'pull_request' }}
+ files: |
+ **/*.c
+ **/*.cc
+ **/*.h
+ **/*.hh
+ **/*.in
+ **/*.patch
+ **/*.cmake
+ CMakeLists.txt
+ netdata-installer.sh
+ .github/workflows/build-macos.yml
+ .github/scripts/run-updater-check.sh
+ packaging/cmake/
+ packaging/installer/
+ packaging/*.sh
+ packaging/*.version
+ packaging/*.checksums
+ src/aclk/aclk-schemas/
+ src/ml/dlib/
+ src/fluent-bit/
+ src/web/server/h2o/libh2o/
+ files_ignore: |
+ netdata.spec.in
+ **/*.md
+ - name: List all changed files in pattern
+ continue-on-error: true
+ env:
+ ALL_CHANGED_FILES: ${{ steps.check-files.outputs.all_changed_files }}
+ run: |
+ for file in ${ALL_CHANGED_FILES}; do
+ echo "$file was changed"
+ done
+ - name: Check Run
+ id: check-run
+ run: |
+ if [ "${{ steps.check-files.outputs.any_modified }}" == "true" ] || [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
+ echo 'run=true' >> "${GITHUB_OUTPUT}"
+ else
+ echo 'run=false' >> "${GITHUB_OUTPUT}"
+ fi
+
+ build-test:
+ env:
+ DISABLE_TELEMETRY: 1
+ runs-on: ${{ matrix.runner }}
+ needs:
+ - file-check
+ strategy:
+ fail-fast: false
+ max-parallel: 3
+ matrix:
+ include:
+ - name: macos-12
+ runner: macos-12
+ - name: macos-13
+ runner: macos-13
+ - name: macos-14-M1
+ runner: macos-14
+ steps:
+ - name: Skip Check
+ id: skip
+ if: needs.file-check.outputs.run != 'true'
+ run: echo "SKIPPED"
+ - uses: actions/checkout@v4
+ id: checkout
+ if: needs.file-check.outputs.run == 'true'
+ with:
+ submodules: recursive
+ - name: Install latest bash
+ id: install-bash
+ if: needs.file-check.outputs.run == 'true'
+ run: |
+ brew install bash
+ - name: Install netdata dependencies
+ id: install-nd-dep
+ if: needs.file-check.outputs.run == 'true'
+ run: |
+ bash ./packaging/installer/install-required-packages.sh --dont-wait --non-interactive netdata
+ - name: Build from source
+ id: build-source
+ if: needs.file-check.outputs.run == 'true'
+ run: |
+ sudo bash ./netdata-installer.sh --install-no-prefix /usr/local/netdata --dont-wait --dont-start-it --require-cloud --one-time-build
+ - name: Test Agent start up
+ id: test-agent
+ if: needs.file-check.outputs.run == 'true'
+ run: |
+ /usr/local/netdata/usr/sbin/netdata -D > ./netdata.log 2>&1 &
+ ./packaging/runtime-check.sh
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Build & test from source macOS failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: macOS Build and test.
+ Checkout: ${{ steps.checkout.outcome }}
+ Setup runner: ${{ steps.install-bash.outcome }}
+ Install netdata required packages: ${{ steps.install-nd-dep.outcome }}
+ Build from source: ${{ steps.build-source.outcome }}
+ Test Agent runtime: ${{ steps.test-agent.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.event_name != 'pull_request'
+ && github.repository == 'netdata/netdata'
+ }}
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 4a6debc46..7ca83d867 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -34,35 +34,46 @@ jobs:
submodules: recursive
- name: Check files
id: check-files
- uses: tj-actions/changed-files@v40
+ uses: tj-actions/changed-files@v43
with:
since_last_remote_commit: ${{ github.event_name != 'pull_request' }}
files: |
- **.c
- **.cc
- **.h
- **.hh
- **.in
- configure.ac
+ **/*.c
+ **/*.cc
+ **/*.h
+ **/*.hh
+ **/*.in
+ **/*.patch
+ **/*.cmake
+ CMakeLists.txt
netdata-installer.sh
- **/Makefile*
- Makefile*
.github/data/distros.yml
.github/workflows/build.yml
.github/scripts/build-static.sh
.github/scripts/get-static-cache-key.sh
.github/scripts/gen-matrix-build.py
.github/scripts/run-updater-check.sh
- build/**
- packaging/makeself/**
- packaging/installer/**
- aclk/aclk-schemas/
- ml/dlib/
- mqtt_websockets
- web/server/h2o/libh2o
+ packaging/cmake/
+ packaging/makeself/
+ packaging/installer/
+ packaging/*.sh
+ packaging/*.version
+ packaging/*.checksums
+ src/aclk/aclk-schemas/
+ src/ml/dlib/
+ src/fluent-bit/
+ src/web/server/h2o/libh2o/
files_ignore: |
netdata.spec.in
- **.md
+ **/*.md
+ - name: List all changed files in pattern
+ continue-on-error: true
+ env:
+ ALL_CHANGED_FILES: ${{ steps.check-files.outputs.all_changed_files }}
+ run: |
+ for file in ${ALL_CHANGED_FILES}; do
+ echo "$file was changed"
+ done
- name: Check Run
id: check-run
run: |
@@ -105,24 +116,17 @@ jobs:
id: build
if: needs.file-check.outputs.run == 'true'
run: |
- git describe
- mkdir -p artifacts
- ./packaging/installer/install-required-packages.sh --dont-wait --non-interactive netdata
- autoreconf -ivf
- ./configure --prefix=/usr \
- --sysconfdir=/etc \
- --localstatedir=/var \
- --libexecdir=/usr/libexec \
- --with-zlib \
- --with-math \
- --with-user=netdata
- make dist
+ mkdir -p artifacts/
+ tar --create --file "artifacts/netdata-$(git describe).tar.gz" \
+ --sort=name --posix --auto-compress --exclude=artifacts/ --exclude=.git \
+ --exclude=.gitignore --exclude=.gitattributes --exclude=.gitmodules \
+ --transform "s/^\\.\\//netdata-$(git describe)\\//" --verbose .
+ cd artifacts/
echo "distfile=$(find . -name 'netdata-*.tar.gz')" >> "${GITHUB_OUTPUT}"
- cp netdata-*.tar.gz artifacts/
- name: Store
id: store
if: needs.file-check.outputs.run == 'true'
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
name: dist-tarball
path: artifacts/*.tar.gz
@@ -161,6 +165,7 @@ jobs:
matrix:
arch:
- x86_64
+ - armv6l
- armv7l
- aarch64
- ppc64le
@@ -193,7 +198,7 @@ jobs:
- name: Cache
if: (github.event_name != 'pull_request' || ! contains(github.event.pull_request.labels.*.name, 'run-ci/no-cache')) && needs.file-check.outputs.run == 'true'
id: cache
- uses: actions/cache@v3
+ uses: actions/cache@v4
with:
path: artifacts/cache
key: ${{ steps.cache-key.outputs.key }}
@@ -203,7 +208,7 @@ jobs:
- name: Build
if: github.event_name == 'workflow_dispatch' && needs.file-check.outputs.run == 'true'
id: build
- uses: nick-fields/retry@v2
+ uses: nick-fields/retry@v3
with:
timeout_minutes: 180
max_attempts: 3
@@ -211,9 +216,9 @@ jobs:
- name: Store
id: store
if: needs.file-check.outputs.run == 'true'
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
- name: static-archive
+ name: dist-static-${{ matrix.arch }}
path: artifacts/*.gz.run
retention-days: 30
- name: Failure Notification
@@ -354,7 +359,7 @@ jobs:
tags: test:${{ matrix.artifact_key }}
- name: Upload image artifact
id: upload
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
name: ${{ matrix.artifact_key }}-test-env
path: /tmp/image.tar
@@ -410,19 +415,18 @@ jobs:
- name: Fetch test environment
id: fetch
if: needs.file-check.outputs.run == 'true'
- uses: actions/download-artifact@v3
+ uses: Wandalen/wretry.action@v1
with:
- name: ${{ matrix.artifact_key }}-test-env
+ action: actions/download-artifact@v4
+ with: |
+ name: ${{ matrix.artifact_key }}-test-env
+ path: .
+ attempt_limit: 3
+ attempt_delay: 2000
- name: Load test environment
id: load
if: needs.file-check.outputs.run == 'true'
run: docker load --input image.tar
- - name: Regular build on ${{ matrix.distro }}
- id: build-basic
- if: needs.file-check.outputs.run == 'true'
- run: |
- docker run --security-opt seccomp=unconfined -w /netdata test:${{ matrix.artifact_key }} \
- /bin/sh -c 'autoreconf -ivf && ./configure --disable-dependency-tracking && make -j2'
- name: netdata-installer on ${{ matrix.distro }}, disable cloud
id: build-no-cloud
if: needs.file-check.outputs.run == 'true'
@@ -454,7 +458,6 @@ jobs:
Checkout: ${{ steps.checkout.outcome }}
Fetch test environment: ${{ steps.fetch.outcome }}
Load test environment: ${{ steps.load.outcome }}
- Regular build: ${{ steps.build-basic.outcome }}
netdata-installer, disable cloud: ${{ steps.build-no-cloud.outcome }}
netdata-installer, require cloud: ${{ steps.build-cloud.outcome }}
netdata-installer, no JSON-C: ${{ steps.build-no-jsonc.outcome }}
@@ -500,27 +503,40 @@ jobs:
- name: Fetch dist tarball artifacts
id: fetch-tarball
if: needs.file-check.outputs.run == 'true'
- uses: actions/download-artifact@v3
+ uses: Wandalen/wretry.action@v1
with:
- name: dist-tarball
- path: dist-tarball
+ action: actions/download-artifact@v4
+ with: |
+ name: dist-tarball
+ path: dist-tarball
+ attempt_limit: 3
+ attempt_delay: 2000
- name: Prepare artifact directory
id: prepare
if: needs.file-check.outputs.run == 'true'
run: |
- mkdir -p artifacts/download/latest || exit 1
- echo "9999.0.0-0" > artifacts/download/latest/latest-version.txt || exit 1
- cp dist-tarball/* artifacts/download/latest || exit 1
- cd artifacts/download/latest || exit 1
+ mkdir -p artifacts/download/v9999.0.0 || exit 1
+ mkdir -p artifacts/latest || exit 1
+ echo "v9999.0.0" > artifacts/latest/latest-version.txt || exit 1
+ cp dist-tarball/* artifacts/download/v9999.0.0 || exit 1
+ cd artifacts/download/v9999.0.0 || exit 1
ln -s ${{ needs.build-dist.outputs.distfile }} netdata-latest.tar.gz || exit 1
+ ls -lFh
sha256sum -b ./* > "sha256sums.txt" || exit 1
cat sha256sums.txt
+ cd ../.. || exit 1
+ ls -lR
- name: Fetch test environment
id: fetch-test-environment
if: needs.file-check.outputs.run == 'true'
- uses: actions/download-artifact@v3
+ uses: Wandalen/wretry.action@v1
with:
- name: ${{ matrix.artifact_key }}-test-env
+ action: actions/download-artifact@v4
+ with: |
+ name: ${{ matrix.artifact_key }}-test-env
+ path: .
+ attempt_limit: 3
+ attempt_delay: 2000
- name: Load test environment
id: load
if: needs.file-check.outputs.run == 'true'
@@ -577,27 +593,24 @@ jobs:
id: prepare
if: needs.file-check.outputs.run == 'true'
run: mkdir -p artifacts
- - name: Retrieve Dist Tarball
+ - name: Retrieve Build Artifacts
id: fetch-dist
if: needs.file-check.outputs.run == 'true'
- uses: actions/download-artifact@v3
+ uses: Wandalen/wretry.action@v1
with:
- name: dist-tarball
- path: dist-tarball
- - name: Retrieve Static Build Artifacts
- id: fetch-static
- if: needs.file-check.outputs.run == 'true'
- uses: actions/download-artifact@v3
- with:
- name: static-archive
- path: static-archive
+ action: actions/download-artifact@v4
+ with: |
+ pattern: dist-*
+ path: dist-artifacts
+ merge-multiple: true
+ attempt_limit: 3
+ attempt_delay: 2000
- name: Prepare Artifacts
id: consolidate
if: needs.file-check.outputs.run == 'true'
working-directory: ./artifacts/
run: |
- mv ../dist-tarball/* . || exit 1
- mv ../static-archive/* . || exit 1
+ mv ../dist-artifacts/* . || exit 1
ln -s ${{ needs.build-dist.outputs.distfile }} netdata-latest.tar.gz || exit 1
cp ../packaging/version ./latest-version.txt || exit 1
cp ../integrations/integrations.js ./integrations.js || exit 1
@@ -606,7 +619,7 @@ jobs:
- name: Store Artifacts
id: store
if: needs.file-check.outputs.run == 'true'
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
name: final-artifacts
path: artifacts/*
@@ -662,16 +675,21 @@ jobs:
- name: Fetch artifacts
id: fetch
if: needs.file-check.outputs.run == 'true'
- uses: actions/download-artifact@v3
+ uses: Wandalen/wretry.action@v1
with:
- name: final-artifacts
- path: artifacts
+ action: actions/download-artifact@v4
+ with: |
+ name: final-artifacts
+ path: artifacts
+ attempt_limit: 3
+ attempt_delay: 2000
- name: Prepare artifacts directory
id: prepare
if: needs.file-check.outputs.run == 'true'
run: |
mkdir -p download/latest
mv artifacts/* download/latest
+ ls -al download/latest
- name: Verify that artifacts work with installer
id: verify
if: needs.file-check.outputs.run == 'true'
@@ -726,16 +744,21 @@ jobs:
- name: Fetch artifacts
id: fetch-artifacts
if: needs.file-check.outputs.run == 'true'
- uses: actions/download-artifact@v3
+ uses: Wandalen/wretry.action@v1
with:
- name: final-artifacts
- path: artifacts
+ action: actions/download-artifact@v4
+ with: |
+ name: final-artifacts
+ path: artifacts
+ attempt_limit: 3
+ attempt_delay: 2000
- name: Prepare artifacts directory
id: prepare
if: needs.file-check.outputs.run == 'true'
run: |
mkdir -p download/latest
mv artifacts/* download/latest
+ ls -al download/latest
- name: Verify that artifacts work with installer
id: verify
if: needs.file-check.outputs.run == 'true'
@@ -775,26 +798,30 @@ jobs:
steps:
- name: Retrieve Artifacts
id: fetch
- uses: actions/download-artifact@v3
+ uses: Wandalen/wretry.action@v1
with:
- name: final-artifacts
- path: final-artifacts
+ action: actions/download-artifact@v4
+ with: |
+ name: final-artifacts
+ path: final-artifacts
+ attempt_limit: 3
+ attempt_delay: 2000
- name: Authenticate to GCS
id: gcs-auth
- uses: google-github-actions/auth@v1
+ uses: google-github-actions/auth@v2
with:
project_id: ${{ secrets.GCP_NIGHTLY_STORAGE_PROJECT }}
credentials_json: ${{ secrets.GCS_STORAGE_SERVICE_KEY_JSON }}
- name: Setup GCS
id: gcs-setup
- uses: google-github-actions/setup-gcloud@v1.1.1
+ uses: google-github-actions/setup-gcloud@v2.0.1
- name: Upload Artifacts
id: upload
- uses: google-github-actions/upload-cloud-storage@v1.0.3
+ uses: google-github-actions/upload-cloud-storage@v2.0.0
with:
destination: ${{ secrets.GCP_NIGHTLY_STORAGE_BUCKET }}
gzip: false
- path: ./final-artifacts
+ path: ./final-artifacts/latest-version.txt
parent: false
- name: Failure Notification
uses: rtCamp/action-slack-notify@v2
@@ -840,10 +867,14 @@ jobs:
token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
- name: Retrieve Artifacts
id: fetch
- uses: actions/download-artifact@v3
+ uses: Wandalen/wretry.action@v1
with:
- name: final-artifacts
- path: final-artifacts
+ action: actions/download-artifact@v4
+ with: |
+ name: final-artifacts
+ path: final-artifacts
+ attempt_limit: 3
+ attempt_delay: 2000
- name: Prepare version info
id: version
run: |
@@ -871,7 +902,7 @@ jobs:
with:
token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
- name: Init python environment for publish release metadata
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
id: init-python
with:
python-version: "3.12"
@@ -954,10 +985,14 @@ jobs:
uses: actions/checkout@v4
- name: Retrieve Artifacts
id: fetch
- uses: actions/download-artifact@v3
+ uses: Wandalen/wretry.action@v1
with:
- name: final-artifacts
- path: final-artifacts
+ action: actions/download-artifact@v4
+ with: |
+ name: final-artifacts
+ path: final-artifacts
+ attempt_limit: 3
+ attempt_delay: 2000
- name: Create Release
id: create-release
uses: ncipollo/release-action@v1
diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml
index 1308f45fa..01ada7422 100644
--- a/.github/workflows/checks.yml
+++ b/.github/workflows/checks.yml
@@ -25,28 +25,43 @@ jobs:
submodules: recursive
- name: Check files
id: check-files
- uses: tj-actions/changed-files@v40
+ uses: tj-actions/changed-files@v43
with:
since_last_remote_commit: ${{ github.event_name != 'pull_request' }}
files: |
- **.c
- **.cc
- **.h
- **.hh
- **.in
- configure.ac
- **/Makefile*
- Makefile*
+ **/*.c
+ **/*.cc
+ **/*.h
+ **/*.hh
+ **/*.in
+ **/*.patch
+ **/*.cmake
+ CMakeLists.txt
.gitignore
- .github/workflows/checks.yml
- build/**
- aclk/aclk-schemas/
- ml/dlib/
- mqtt_websockets
- web/server/h2o/libh2o
+ .github/data/distros.yml
+ .github/workflows/build.yml
+ .github/scripts/build-static.sh
+ .github/scripts/get-static-cache-key.sh
+ .github/scripts/gen-matrix-build.py
+ .github/scripts/run-updater-check.sh
+ packaging/cmake/
+ packaging/*.version
+ packaging/*.checksums
+ src/aclk/aclk-schemas/
+ src/ml/dlib/
+ src/fluent-bit/
+ src/web/server/h2o/libh2o/
files_ignore: |
netdata.spec.in
- **.md
+ **/*.md
+ - name: List all changed files in pattern
+ continue-on-error: true
+ env:
+ ALL_CHANGED_FILES: ${{ steps.check-files.outputs.all_changed_files }}
+ run: |
+ for file in ${ALL_CHANGED_FILES}; do
+ echo "$file was changed"
+ done
- name: Check Run
id: check-run
run: |
@@ -78,11 +93,8 @@ jobs:
'apk add bash;
./packaging/installer/install-required-packages.sh --dont-wait --non-interactive netdata;
apk del openssl openssl-dev;
- apk add libressl libressl-dev;
- autoreconf -ivf;
- ./configure --disable-dependency-tracking;
- make;'
-
+ apk add libressl libressl-dev protobuf-dev;
+ ./netdata-installer.sh --disable-telemetry --dont-start-it --dont-wait --one-time-build;'
clang-checks:
name: Clang
needs:
diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml
index ae5818afc..6c2c36365 100644
--- a/.github/workflows/codeql.yml
+++ b/.github/workflows/codeql.yml
@@ -21,6 +21,7 @@ jobs:
outputs:
cpp: ${{ steps.cpp.outputs.run }}
python: ${{ steps.python.outputs.run }}
+ go: ${{ steps.go.outputs.run }}
steps:
- name: Clone repository
uses: actions/checkout@v4
@@ -57,7 +58,7 @@ jobs:
id: python
run: |
if [ "${{ steps.always.outputs.run }}" = "false" ]; then
- if git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq 'collectors/python.d.plugin/.*\.py' ; then
+ if git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq 'src/collectors/python.d.plugin/.*\.py' ; then
echo "run=true" >> "${GITHUB_OUTPUT}"
echo '::notice::Python code has changed, need to run CodeQL.'
else
@@ -66,6 +67,19 @@ jobs:
else
echo "run=true" >> "${GITHUB_OUTPUT}"
fi
+ - name: Check for Go changes
+ id: go
+ run: |
+ if [ "${{ steps.always.outputs.run }}" = "false" ]; then
+ if git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq 'src/go/*\.go' ; then
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ echo '::notice::Go code has changed, need to run CodeQL.'
+ else
+ echo "run=false" >> "${GITHUB_OUTPUT}"
+ fi
+ else
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ fi
analyze-cpp:
name: Analyze C/C++
@@ -81,7 +95,7 @@ jobs:
submodules: recursive
fetch-depth: 0
- name: Initialize CodeQL
- uses: github/codeql-action/init@v2
+ uses: github/codeql-action/init@v3
with:
languages: cpp
config-file: ./.github/codeql/c-cpp-config.yml
@@ -90,7 +104,7 @@ jobs:
- name: Build netdata
run: ./netdata-installer.sh --dont-start-it --disable-telemetry --dont-wait --install-prefix /tmp/install --one-time-build
- name: Run CodeQL
- uses: github/codeql-action/analyze@v2
+ uses: github/codeql-action/analyze@v3
with:
category: "/language:cpp"
@@ -108,11 +122,41 @@ jobs:
submodules: recursive
fetch-depth: 0
- name: Initialize CodeQL
- uses: github/codeql-action/init@v2
+ uses: github/codeql-action/init@v3
with:
config-file: ./.github/codeql/python-config.yml
languages: python
- name: Run CodeQL
- uses: github/codeql-action/analyze@v2
+ uses: github/codeql-action/analyze@v3
with:
category: "/language:python"
+
+ analyze-go:
+ name: Analyze Go
+ runs-on: ubuntu-latest
+ needs: prepare
+ if: needs.prepare.outputs.go == 'true'
+ strategy:
+ matrix:
+ tree:
+ - src/go/collectors/go.d.plugin
+ permissions:
+ security-events: write
+ steps:
+ - name: Git clone repository
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+ fetch-depth: 0
+ - name: Initialize CodeQL
+ uses: github/codeql-action/init@v3
+ with:
+ languages: go
+ - name: Autobuild
+ uses: github/codeql-action/autobuild@v3
+ with:
+ working-directory: ${{ matrix.tree }}
+ - name: Run CodeQL
+ uses: github/codeql-action/analyze@v3
+ with:
+ category: "/language:go"
diff --git a/.github/workflows/coverity.yml b/.github/workflows/coverity.yml
index eb68c302b..9113e179e 100644
--- a/.github/workflows/coverity.yml
+++ b/.github/workflows/coverity.yml
@@ -7,7 +7,7 @@ on:
pull_request:
paths:
- .github/workflows/coverity.yml
- - coverity-scan.sh
+ - packaging/utils/coverity-scan.sh
env:
DISABLE_TELEMETRY: 1
concurrency:
@@ -33,7 +33,8 @@ jobs:
sudo apt-get install -y libjson-c-dev libyaml-dev libipmimonitoring-dev \
libcups2-dev libsnappy-dev libprotobuf-dev \
libprotoc-dev libssl-dev protobuf-compiler \
- libnetfilter-acct-dev
+ libnetfilter-acct-dev libmongoc-dev libxen-dev \
+ libsystemd-dev ninja-build
- name: Run coverity-scan
id: run
env:
@@ -41,7 +42,7 @@ jobs:
COVERITY_SCAN_TOKEN: ${{ secrets.COVERITY_SCAN_TOKEN }}
COVERITY_SCAN_SUBMIT_MAIL: ${{ secrets.COVERITY_SCAN_SUBMIT_MAIL }}
run: |
- bash -x ./coverity-scan.sh --with-install
+ bash -x ./packaging/utils/coverity-scan.sh --with-install
- name: Failure Notification
uses: rtCamp/action-slack-notify@v2
env:
diff --git a/.github/workflows/dashboard-pr.yml b/.github/workflows/dashboard-pr.yml
index f02cfb69d..418a8b8e6 100644
--- a/.github/workflows/dashboard-pr.yml
+++ b/.github/workflows/dashboard-pr.yml
@@ -25,10 +25,10 @@ jobs:
- name: Update Files
id: update
run: |
- web/gui/bundle_dashboard_v1.py ${{ github.event.inputs.dashboard_version }}
+ src/web/gui/bundle_dashboard_v1.py ${{ github.event.inputs.dashboard_version }}
- name: Create Pull Request
id: pr
- uses: peter-evans/create-pull-request@v5
+ uses: peter-evans/create-pull-request@v6
with:
title: 'Update dashboard to version ${{ github.event.inputs.dashboard_version }}.'
body: 'See https://github.com/netdata/dashboard/releases/tag/${{ github.event.inputs.dashboard_version }} for changes.'
diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml
index b7fe0a866..b9f865f13 100644
--- a/.github/workflows/docker.yml
+++ b/.github/workflows/docker.yml
@@ -1,4 +1,13 @@
---
+# Handle building docker images both for CI checks and for eleases.
+#
+# The case of releaases is unfortunately rather complicated, as Docker
+# tooling does not have great support for handling of multiarch images
+# published to multiple registries. As a result, we have to build the
+# images, export the cache, and then _rebuild_ the images using the exported
+# cache but with different output parameters for buildx. We also need to
+# do the second build step as a separate job for each registry so that a
+# failure to publish one place won’t break publishing elsewhere.
name: Docker
on:
push:
@@ -25,52 +34,78 @@ jobs:
steps:
- name: Checkout
id: checkout
+ if: github.event_name != 'workflow_dispatch'
uses: actions/checkout@v4
with:
fetch-depth: 0
submodules: recursive
- name: Check files
- id: file-check
- uses: tj-actions/changed-files@v40
+ id: check-files
+ if: github.event_name != 'workflow_dispatch'
+ uses: tj-actions/changed-files@v43
with:
since_last_remote_commit: ${{ github.event_name != 'pull_request' }}
files: |
- **.c
- **.cc
- **.h
- **.hh
- **.in
+ **/*.c
+ **/*.cc
+ **/*.h
+ **/*.hh
+ **/*.in
+ **/*.patch
+ **/*.cmake
.dockerignore
- configure.ac
+ CMakeLists.txt
netdata-installer.sh
- **/Makefile*
- Makefile*
.github/workflows/docker.yml
.github/scripts/docker-test.sh
- build/**
- packaging/docker/**
- packaging/installer/**
- aclk/aclk-schemas/
- ml/dlib/
- mqtt_websockets
- web/server/h2o/libh2o
+ .github/scripts/gen-docker-tags.py
+ .github/scripts/gen-docker-imagetool-args.py
+ packaging/cmake/
+ packaging/docker/
+ packaging/installer/
+ packaging/runtime-check.sh
+ packaging/*.version
+ packaging/*.checksums
+ src/aclk/aclk-schemas/
+ src/ml/dlib/
+ src/fluent-bit/
+ src/web/server/h2o/libh2o/
files_ignore: |
netdata.spec.in
- **.md
+ **/*.md
+ - name: List all changed files in pattern
+ continue-on-error: true
+ if: github.event_name != 'workflow_dispatch'
+ env:
+ ALL_CHANGED_FILES: ${{ steps.check-files.outputs.all_changed_files }}
+ run: |
+ for file in ${ALL_CHANGED_FILES}; do
+ echo "$file was changed"
+ done
- name: Check Run
id: check-run
run: |
- if [ "${{ steps.file-check.outputs.any_modified }}" == "true" ] || [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
+ if [ "${{ steps.check-files.outputs.any_modified }}" == "true" ] || [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
echo 'run=true' >> "${GITHUB_OUTPUT}"
else
echo 'run=false' >> "${GITHUB_OUTPUT}"
fi
- docker-test:
- name: Docker Runtime Test
+ build-images:
+ name: Build Docker Images
needs:
- file-check
runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ platform:
+ - linux/amd64
+ - linux/i386
+ - linux/arm/v7
+ - linux/arm64
+ - linux/ppc64le
+ # Fail fast on releases, but run everything to completion on other triggers.
+ fail-fast: ${{ github.event_name == 'workflow_dispatch' }}
steps:
- name: Skip Check
id: skip
@@ -81,333 +116,554 @@ jobs:
if: needs.file-check.outputs.run == 'true'
uses: actions/checkout@v4
with:
+ fetch-depth: 0
submodules: recursive
+ - name: Generate Artifact Name
+ id: artifact-name
+ if: github.repository == 'netdata/netdata' && needs.file-check.outputs.run == 'true' && github.event_name == 'workflow_dispatch'
+ run: echo "platform=$(echo ${{ matrix.platform }} | tr '/' '-' | cut -f 2- -d '-')" >> "${GITHUB_OUTPUT}"
+ - name: Mark image as official
+ id: env
+ if: github.repository == 'netdata/netdata' && needs.file-check.outputs.run == 'true' && github.event_name == 'workflow_dispatch'
+ run: echo "OFFICIAL_IMAGE=true" >> "${GITHUB_ENV}"
+ - name: Setup QEMU
+ id: qemu
+ if: matrix.platform != 'linux/i386' && matrix.platform != 'linux/amd64' && needs.file-check.outputs.run == 'true'
+ uses: docker/setup-qemu-action@v3
- name: Setup Buildx
id: prepare
if: needs.file-check.outputs.run == 'true'
uses: docker/setup-buildx-action@v3
- - name: Test Build
+ - name: Build Image
id: build
if: needs.file-check.outputs.run == 'true'
uses: docker/build-push-action@v5
with:
- load: true
- push: false
+ platforms: ${{ matrix.platform }}
tags: netdata/netdata:test
+ load: true
+ cache-to: type=local,dest=/tmp/build-cache,mode=max
+ build-args: OFFICIAL_IMAGE=${{ env.OFFICIAL_IMAGE }}
- name: Test Image
id: test
- if: needs.file-check.outputs.run == 'true'
+ if: needs.file-check.outputs.run == 'true' && matrix.platform == 'linux/amd64'
run: .github/scripts/docker-test.sh
+ - name: Upload Cache
+ id: upload-cache
+ if: github.repository == 'netdata/netdata' && needs.file-check.outputs.run == 'true' && github.event_name == 'workflow_dispatch'
+ uses: actions/upload-artifact@v4
+ with:
+ name: cache-${{ steps.artifact-name.outputs.platform }}
+ path: /tmp/build-cache/*
+ retention-days: 1
- name: Failure Notification
uses: rtCamp/action-slack-notify@v2
env:
SLACK_COLOR: 'danger'
SLACK_FOOTER: ''
SLACK_ICON_EMOJI: ':github-actions:'
- SLACK_TITLE: 'Docker runtime testing failed:'
+ SLACK_TITLE: 'Docker build failed:'
SLACK_USERNAME: 'GitHub Actions'
SLACK_MESSAGE: |-
- ${{ github.repository }}: Building or testing Docker image for linux/amd64 failed.
- CHeckout: ${{ steps.checkout.outcome }}
+ ${{ github.repository }}: Building or testing Docker image for ${{ matrix.platform }} failed.
+ Checkout: ${{ steps.checkout.outcome }}
+ Determine artifact name: ${{ steps.artifact-name.outcome }}
+ Setup environment: ${{ steps.env.outcome }}
+ Setup QEMU: ${{ steps.qemu.outcome }}
Setup buildx: ${{ steps.prepare.outcome }}
Build image: ${{ steps.build.outcome }}
Test image: ${{ steps.test.outcome }}
+ Upload build cache: ${{ steps.upload-cache.outcome }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
if: >-
${{
failure()
&& github.event_name != 'pull_request'
- && startsWith(github.ref, 'refs/heads/master')
&& github.repository == 'netdata/netdata'
&& needs.file-check.outputs.run == 'true'
}}
- docker-ci:
- if: github.event_name != 'workflow_dispatch'
- name: Docker Alt Arch Builds
- needs:
- - docker-test
- - file-check
+ gen-tags:
+ name: Generate Docker Tags
runs-on: ubuntu-latest
+ if: github.event_name == 'workflow_dispatch'
+ outputs:
+ tags: ${{ steps.tag.outputs.tags }}
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ - name: Generate Tags
+ id: tag
+ run: |
+ if [ ${{ github.event_name }} = 'workflow_dispatch' ]; then
+ echo "tags=$(.github/scripts/gen-docker-tags.py ${{ github.event_name }} ${{ github.event.inputs.version }})" >> "${GITHUB_OUTPUT}"
+ else
+ echo "tags=$(.github/scripts/gen-docker-tags.py ${{ github.event_name }} '')" >> "${GITHUB_OUTPUT}"
+ fi
+
+ build-images-docker-hub:
+ name: Push Images to Docker Hub
+ if: github.event_name == 'workflow_dispatch'
+ needs:
+ - build-images
+ - gen-tags
strategy:
matrix:
- platforms:
+ platform:
+ - linux/amd64
- linux/i386
- linux/arm/v7
- linux/arm64
- linux/ppc64le
+ runs-on: ubuntu-latest
steps:
- - name: Skip Check
- id: skip
- if: needs.file-check.outputs.run != 'true'
- run: echo "SKIPPED"
- name: Checkout
id: checkout
- if: needs.file-check.outputs.run == 'true'
uses: actions/checkout@v4
with:
+ fetch-depth: 0
submodules: recursive
+ - name: Generate Artifact Name
+ id: artifact-name
+ run: echo "platform=$(echo ${{ matrix.platform }} | tr '/' '-' | cut -f 2- -d '-')" >> "${GITHUB_OUTPUT}"
+ - name: Download Cache
+ id: fetch-cache
+ uses: actions/download-artifact@v4
+ with:
+ name: cache-${{ steps.artifact-name.outputs.platform }}
+ path: /tmp/build-cache
+ - name: Mark image as official
+ id: env
+ if: github.repository == 'netdata/netdata'
+ run: echo "OFFICIAL_IMAGE=true" >> "${GITHUB_ENV}"
- name: Setup QEMU
id: qemu
- if: matrix.platforms != 'linux/i386' && needs.file-check.outputs.run == 'true'
+ if: matrix.platform != 'linux/i386' && matrix.platform != 'linux/amd64'
uses: docker/setup-qemu-action@v3
- name: Setup Buildx
- id: buildx
- if: needs.file-check.outputs.run == 'true'
+ id: prepare
uses: docker/setup-buildx-action@v3
- - name: Build
+ - name: Registry Login
+ id: login
+ if: github.repository == 'netdata/netdata'
+ uses: docker/login-action@v3
+ with:
+ username: ${{ secrets.DOCKER_HUB_USERNAME }}
+ password: ${{ secrets.DOCKER_HUB_PASSWORD }}
+ - name: Build Image
id: build
- if: needs.file-check.outputs.run == 'true'
uses: docker/build-push-action@v5
with:
- platforms: ${{ matrix.platforms }}
- load: false
- push: false
- tags: netdata/netdata:test
+ platforms: ${{ matrix.platform }}
+ cache-from: type=local,src=/tmp/build-cache
+ build-args: OFFICIAL_IMAGE=${{ env.OFFICIAL_IMAGE }}
+ outputs: type=image,name=netdata/netdata,push-by-digest=true,name-canonical=true,push=true
+ - name: Export Digest
+ id: export-digest
+ if: github.repository == 'netdata/netdata'
+ run: |
+ mkdir -p /tmp/digests
+ digest="${{ steps.build.outputs.digest }}"
+ touch "/tmp/digests/${digest#sha256:}"
+ - name: Upload digest
+ id: upload-digest
+ if: github.repository == 'netdata/netdata'
+ uses: actions/upload-artifact@v4
+ with:
+ name: docker-digests-${{ steps.artifact-name.outputs.platform }}
+ path: /tmp/digests/*
+ if-no-files-found: error
+ retention-days: 1
- name: Failure Notification
uses: rtCamp/action-slack-notify@v2
env:
SLACK_COLOR: 'danger'
SLACK_FOOTER: ''
SLACK_ICON_EMOJI: ':github-actions:'
- SLACK_TITLE: 'Docker build testing failed:'
+ SLACK_TITLE: 'Docker Hub upload failed:'
SLACK_USERNAME: 'GitHub Actions'
SLACK_MESSAGE: |-
- ${{ github.repository }}: Building Docker image for ${{ matrix.platforms }} failed.
- CHeckout: ${{ steps.checkout.outcome }}
+ ${{ github.repository }}: Creating or uploading Docker image for ${{ matrix.platform }} on Docker Hub failed.
+ Checkout: ${{ steps.checkout.outcome }}
+ Determine artifact name: ${{ steps.artifact-name.outcome }}
+ Fetch build cache: ${{ steps.fetch-cache.outcome }}
+ Setup environment: ${{ steps.env.outcome }}
Setup QEMU: ${{ steps.qemu.outcome }}
- Setup buildx: ${{ steps.buildx.outcome }}
+ Setup buildx: ${{ steps.prepare.outcome }}
+ Login to registry: ${{ steps.login.outcome }}
Build image: ${{ steps.build.outcome }}
+ Export digest: ${{ steps.export-digest.outcome }}
+ Upload digest: ${{ steps.upload-digest.outcome }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
if: >-
${{
failure()
- && github.event_name != 'pull_request'
- && startsWith(github.ref, 'refs/heads/master')
&& github.repository == 'netdata/netdata'
- && needs.file-check.outputs.run == 'true'
}}
- normalize-tag: # Fix the release tag if needed
- name: Normalize Release Tag
- runs-on: ubuntu-latest
+ publish-docker-hub:
+ name: Consolidate and tag images for DockerHub
if: github.event_name == 'workflow_dispatch'
- outputs:
- tag: ${{ steps.tag.outputs.tag }}
+ needs:
+ - build-images-docker-hub
+ - gen-tags
+ runs-on: ubuntu-latest
steps:
- - name: Normalize Tag
- id: tag
- run: |
- if echo ${{ github.event.inputs.version }} | grep -qE '^[[:digit:]]+\.[[:digit:]]+\.[[:digit:]]+$'; then
- echo "tag=v${{ github.event.inputs.version }}" >> "${GITHUB_OUTPUT}"
- else
- echo "tag=${{ github.event.inputs.version }}" >> "${GITHUB_OUTPUT}"
- fi
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ - name: Download digests
+ id: fetch-digests
+ uses: actions/download-artifact@v4
+ with:
+ path: /tmp/digests
+ pattern: docker-digests-*
+ merge-multiple: true
+ - name: Setup Buildx
+ id: prepare
+ uses: docker/setup-buildx-action@v3
+ - name: Registry Login
+ id: login
+ if: github.repository == 'netdata/netdata'
+ uses: docker/login-action@v3
+ with:
+ username: ${{ secrets.DOCKER_HUB_USERNAME }}
+ password: ${{ secrets.DOCKER_HUB_PASSWORD }}
+ - name: Create and Push Manifest
+ id: manifest
+ if: github.repository == 'netdata/netdata'
+ run: docker buildx imagetools create $(.github/scripts/gen-docker-imagetool-args.py /tmp/digests '' ${{ needs.gen-tags.outputs.tags }})
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Publishing Docker images to Docker Hub failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Publishing Docker images to Docker Hub failed.
+ Checkout: ${{ steps.checkout.outcome }}
+ Download digests: ${{ steps.fetch-digests.outcome }}
+ Setup buildx: ${{ steps.prepare.outcome }}
+ Login to registry: ${{ steps.login.outcome }}
+ Create and push manifest: ${{ steps.manifest.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.repository == 'netdata/netdata'
+ }}
- docker-publish:
+ build-images-quay:
+ name: Push Images to Quay.io
if: github.event_name == 'workflow_dispatch'
- name: Docker Build and Publish
needs:
- - docker-test
- - normalize-tag
+ - build-images
+ - gen-tags
+ strategy:
+ matrix:
+ platform:
+ - linux/amd64
+ - linux/i386
+ - linux/arm/v7
+ - linux/arm64
+ - linux/ppc64le
runs-on: ubuntu-latest
steps:
- name: Checkout
id: checkout
uses: actions/checkout@v4
with:
+ fetch-depth: 0
submodules: recursive
- - name: Determine which tags to use
- id: release-tags
- if: github.event.inputs.version != 'nightly'
- run: |
- echo "tags=netdata/netdata:latest,netdata/netdata:stable,ghcr.io/netdata/netdata:latest,ghcr.io/netdata/netdata:stable,quay.io/netdata/netdata:latest,quay.io/netdata/netdata:stable,$(.github/scripts/gen-docker-tags.py ${{ needs.normalize-tag.outputs.tag }} '')" \
- >> "${GITHUB_ENV}"
- - name: Determine which tags to use
- id: nightly-tags
- if: github.event.inputs.version == 'nightly'
- run: |
- echo "tags=netdata/netdata:latest,netdata/netdata:edge,ghcr.io/netdata/netdata:latest,ghcr.io/netdata/netdata:edge,quay.io/netdata/netdata:latest,quay.io/netdata/netdata:edge" >> "${GITHUB_ENV}"
+ - name: Generate Artifact Name
+ id: artifact-name
+ run: echo "platform=$(echo ${{ matrix.platform }} | tr '/' '-' | cut -f 2- -d '-')" >> "${GITHUB_OUTPUT}"
+ - name: Download Cache
+ id: fetch-cache
+ uses: actions/download-artifact@v4
+ with:
+ name: cache-${{ steps.artifact-name.outputs.platform }}
+ path: /tmp/build-cache
- name: Mark image as official
id: env
if: github.repository == 'netdata/netdata'
run: echo "OFFICIAL_IMAGE=true" >> "${GITHUB_ENV}"
- name: Setup QEMU
id: qemu
+ if: matrix.platform != 'linux/i386' && matrix.platform != 'linux/amd64'
uses: docker/setup-qemu-action@v3
- name: Setup Buildx
- id: buildx
+ id: prepare
uses: docker/setup-buildx-action@v3
- - name: Docker Hub Login
- id: docker-hub-login
- if: github.repository == 'netdata/netdata'
- uses: docker/login-action@v3
- with:
- username: ${{ secrets.DOCKER_HUB_USERNAME }}
- password: ${{ secrets.DOCKER_HUB_PASSWORD }}
- - name: GitHub Container Registry Login
- id: ghcr-login
- if: github.repository == 'netdata/netdata'
- uses: docker/login-action@v3
- with:
- registry: ghcr.io
- username: ${{ github.repository_owner }}
- password: ${{ secrets.GITHUB_TOKEN }}
- - name: Quay.io Login
- id: quay-login
+ - name: Registry Login
+ id: login
if: github.repository == 'netdata/netdata'
uses: docker/login-action@v3
with:
registry: quay.io
username: ${{ secrets.NETDATABOT_QUAY_USERNAME }}
password: ${{ secrets.NETDATABOT_QUAY_TOKEN }}
- - name: Docker Build
+ - name: Build Image
id: build
uses: docker/build-push-action@v5
with:
- platforms: linux/amd64,linux/i386,linux/arm/v7,linux/arm64,linux/ppc64le
- push: ${{ github.repository == 'netdata/netdata' }}
- tags: ${{ env.tags }}
+ platforms: ${{ matrix.platform }}
+ cache-from: type=local,src=/tmp/build-cache
build-args: OFFICIAL_IMAGE=${{ env.OFFICIAL_IMAGE }}
+ outputs: type=image,name=quay.io/netdata/netdata,push-by-digest=true,name-canonical=true,push=true
+ - name: Export Digest
+ id: export-digest
+ if: github.repository == 'netdata/netdata'
+ run: |
+ mkdir -p /tmp/digests
+ digest="${{ steps.build.outputs.digest }}"
+ touch "/tmp/digests/${digest#sha256:}"
+ - name: Upload digest
+ id: upload-digest
+ if: github.repository == 'netdata/netdata'
+ uses: actions/upload-artifact@v4
+ with:
+ name: quay-digests-${{ steps.artifact-name.outputs.platform }}
+ path: /tmp/digests/*
+ if-no-files-found: error
+ retention-days: 1
- name: Failure Notification
uses: rtCamp/action-slack-notify@v2
env:
SLACK_COLOR: 'danger'
SLACK_FOOTER: ''
SLACK_ICON_EMOJI: ':github-actions:'
- SLACK_TITLE: 'Docker Build failed:'
+ SLACK_TITLE: 'Quay.io upload failed:'
SLACK_USERNAME: 'GitHub Actions'
SLACK_MESSAGE: |-
- ${{ github.repository }}: Failed to build or publish Docker images.
- CHeckout: ${{ steps.checkout.outcome }}
- Generate release tags: ${{ steps.release-tags.outcome }}
- Generate nightly tags: ${{ steps.nightly-tags.outcome }}
+ ${{ github.repository }}: Creating or uploading Docker image for ${{ matrix.platform }} on Quay.io failed.
+ Checkout: ${{ steps.checkout.outcome }}
+ Determine artifact name: ${{ steps.artifact-name.outcome }}
+ Fetch build cache: ${{ steps.fetch-cache.outcome }}
Setup environment: ${{ steps.env.outcome }}
Setup QEMU: ${{ steps.qemu.outcome }}
- Setup buildx: ${{ steps.buildx.outcome }}
- Login to DockerHub: ${{ steps.docker-hub-login.outcome }}
- Login to GHCR: ${{ steps.ghcr-login.outcome }}
- Login to Quay: ${{ steps.quay-login.outcome }}
- Build and publish images: ${{ steps.build.outcome }}
+ Setup buildx: ${{ steps.prepare.outcome }}
+ Login to registry: ${{ steps.login.outcome }}
+ Build image: ${{ steps.build.outcome }}
+ Export digest: ${{ steps.export-digest.outcome }}
+ Upload digest: ${{ steps.upload-digest.outcome }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
if: >-
${{
failure()
- && github.event_name != 'pull_request'
- && startsWith(github.ref, 'refs/heads/master')
&& github.repository == 'netdata/netdata'
}}
- - name: Trigger Helmchart PR
- if: github.event_name == 'workflow_dispatch' && github.event.inputs.version != 'nightly' && github.repository == 'netdata/netdata'
- uses: benc-uk/workflow-dispatch@v1
+
+ publish-quay:
+ name: Consolidate and tag images for Quay.io
+ if: github.event_name == 'workflow_dispatch'
+ needs:
+ - build-images-quay
+ - gen-tags
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ - name: Download digests
+ id: fetch-digests
+ uses: actions/download-artifact@v4
with:
- token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
- repo: netdata/helmchart
- workflow: Agent Version PR
- ref: refs/heads/master
- inputs: '{"agent_version": "${{ needs.normalize-tag.outputs.tag }}"}'
- - name: Trigger MSI build
- if: github.event_name == 'workflow_dispatch' && github.event.inputs.version != 'nightly' && github.repository == 'netdata/netdata'
- uses: benc-uk/workflow-dispatch@v1
+ path: /tmp/digests
+ pattern: quay-digests-*
+ merge-multiple: true
+ - name: Setup Buildx
+ id: prepare
+ uses: docker/setup-buildx-action@v3
+ - name: Registry Login
+ id: login
+ if: github.repository == 'netdata/netdata'
+ uses: docker/login-action@v3
with:
- token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
- repo: netdata/msi-installer
- workflow: Build
- ref: refs/heads/master
- inputs: '{"tag": "${{ needs.normalize-tag.outputs.tag }}", "pwd": "${{ secrets.MSI_CODE_SIGNING_PASSWORD }}"}'
+ registry: quay.io
+ username: ${{ secrets.NETDATABOT_QUAY_USERNAME }}
+ password: ${{ secrets.NETDATABOT_QUAY_TOKEN }}
+ - name: Create and Push Manifest
+ id: manifest
+ if: github.repository == 'netdata/netdata'
+ run: docker buildx imagetools create $(.github/scripts/gen-docker-imagetool-args.py /tmp/digests 'quay.io' ${{ needs.gen-tags.outputs.tags }})
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Publishing Docker images on Quay.io failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Publishing Docker images on Quay.io failed.
+ Checkout: ${{ steps.checkout.outcome }}
+ Download digests: ${{ steps.fetch-digests.outcome }}
+ Setup buildx: ${{ steps.prepare.outcome }}
+ Login to registry: ${{ steps.login.outcome }}
+ Create and push manifest: ${{ steps.manifest.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.repository == 'netdata/netdata'
+ }}
- docker-dbg-publish:
+ build-images-ghcr:
+ name: Push Images to GHCR
if: github.event_name == 'workflow_dispatch'
- name: Docker Build and Publish (Debugging Image)
needs:
- - docker-test
- - normalize-tag
+ - build-images
+ - gen-tags
+ strategy:
+ matrix:
+ platform:
+ - linux/amd64
+ - linux/i386
+ - linux/arm/v7
+ - linux/arm64
+ - linux/ppc64le
runs-on: ubuntu-latest
steps:
- name: Checkout
id: checkout
uses: actions/checkout@v4
with:
+ fetch-depth: 0
submodules: recursive
- - name: Determine which tags to use
- id: release-tags
- if: github.event.inputs.version != 'nightly'
- run: |
- echo "tags=netdata/netdata-debug:latest,netdata/netdata-debug:stable,ghcr.io/netdata/netdata-debug:latest,ghcr.io/netdata/netdata-debug:stable,quay.io/netdata/netdata-debug:latest,quay.io/netdata/netdata-debug:stable,$(.github/scripts/gen-docker-tags.py ${{ needs.normalize-tag.outputs.tag }} '-debug')" \
- >> "${GITHUB_ENV}"
- - name: Determine which tags to use
- id: nightly-tags
- if: github.event.inputs.version == 'nightly'
- run: |
- echo "tags=netdata/netdata-debug:latest,netdata/netdata-debug:edge,ghcr.io/netdata/netdata-debug:latest,ghcr.io/netdata/netdata-debug:edge,quay.io/netdata/netdata-debug:latest,quay.io/netdata/netdata-debug:edge" >> "${GITHUB_ENV}"
+ - name: Generate Artifact Name
+ id: artifact-name
+ run: echo "platform=$(echo ${{ matrix.platform }} | tr '/' '-' | cut -f 2- -d '-')" >> "${GITHUB_OUTPUT}"
+ - name: Download Cache
+ id: fetch-cache
+ uses: actions/download-artifact@v4
+ with:
+ name: cache-${{ steps.artifact-name.outputs.platform }}
+ path: /tmp/build-cache
- name: Mark image as official
id: env
if: github.repository == 'netdata/netdata'
run: echo "OFFICIAL_IMAGE=true" >> "${GITHUB_ENV}"
- name: Setup QEMU
id: qemu
+ if: matrix.platform != 'linux/i386' && matrix.platform != 'linux/amd64'
uses: docker/setup-qemu-action@v3
- name: Setup Buildx
- id: buildx
+ id: prepare
uses: docker/setup-buildx-action@v3
- - name: Docker Hub Login
- id: docker-hub-login
- if: github.repository == 'netdata/netdata'
- uses: docker/login-action@v3
- with:
- username: ${{ secrets.DOCKER_HUB_USERNAME }}
- password: ${{ secrets.DOCKER_HUB_PASSWORD }}
- - name: GitHub Container Registry Login
- id: ghcr-login
+ - name: Registry Login
+ id: login
if: github.repository == 'netdata/netdata'
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- - name: Quay.io Login
- id: quay-login
- if: github.repository == 'netdata/netdata'
- uses: docker/login-action@v3
- with:
- registry: quay.io
- username: ${{ secrets.NETDATABOT_QUAY_USERNAME }}
- password: ${{ secrets.NETDATABOT_QUAY_TOKEN }}
- - name: Docker Build
+ - name: Build Image
id: build
uses: docker/build-push-action@v5
with:
- platforms: linux/amd64,linux/i386,linux/arm/v7,linux/arm64,linux/ppc64le
- push: ${{ github.repository == 'netdata/netdata' }}
- tags: ${{ env.tags }}
- build-args: |
- OFFICIAL_IMAGE=${{ env.OFFICIAL_IMAGE }}
- DEBUG_BUILD=1
+ platforms: ${{ matrix.platform }}
+ cache-from: type=local,src=/tmp/build-cache
+ build-args: OFFICIAL_IMAGE=${{ env.OFFICIAL_IMAGE }}
+ outputs: type=image,name=ghcr.io/netdata/netdata,push-by-digest=true,name-canonical=true,push=true
+ - name: Export Digest
+ id: export-digest
+ if: github.repository == 'netdata/netdata'
+ run: |
+ mkdir -p /tmp/digests
+ digest="${{ steps.build.outputs.digest }}"
+ touch "/tmp/digests/${digest#sha256:}"
+ - name: Upload digest
+ id: upload-digest
+ if: github.repository == 'netdata/netdata'
+ uses: actions/upload-artifact@v4
+ with:
+ name: ghcr-digests-${{ steps.artifact-name.outputs.platform }}
+ path: /tmp/digests/*
+ if-no-files-found: error
+ retention-days: 1
- name: Failure Notification
uses: rtCamp/action-slack-notify@v2
env:
SLACK_COLOR: 'danger'
SLACK_FOOTER: ''
SLACK_ICON_EMOJI: ':github-actions:'
- SLACK_TITLE: 'Docker Debug Build failed:'
+ SLACK_TITLE: 'GHCR upload failed:'
SLACK_USERNAME: 'GitHub Actions'
SLACK_MESSAGE: |-
- ${{ github.repository }}: Failed to build or publish Docker debug images.
+ ${{ github.repository }}: Creating or uploading Docker image for ${{ matrix.platform }} on GHCR failed.
Checkout: ${{ steps.checkout.outcome }}
- Generate release tags: ${{ steps.release-tags.outcome }}
- Generate nightly tags: ${{ steps.nightly-tags.outcome }}
+ Determine artifact name: ${{ steps.artifact-name.outcome }}
+ Fetch build cache: ${{ steps.fetch-cache.outcome }}
Setup environment: ${{ steps.env.outcome }}
Setup QEMU: ${{ steps.qemu.outcome }}
- Setup buildx: ${{ steps.buildx.outcome }}
- Login to DockerHub: ${{ steps.docker-hub-login.outcome }}
- Login to GHCR: ${{ steps.ghcr-login.outcome }}
- Login to Quay: ${{ steps.quay-login.outcome }}
- Build and publish images: ${{ steps.build.outcome }}
+ Setup buildx: ${{ steps.prepare.outcome }}
+ Login to registry: ${{ steps.login.outcome }}
+ Build image: ${{ steps.build.outcome }}
+ Export digest: ${{ steps.export-digest.outcome }}
+ Upload digest: ${{ steps.upload-digest.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.repository == 'netdata/netdata'
+ }}
+
+ publish-ghcr:
+ name: Consolidate and tag images for GHCR
+ if: github.event_name == 'workflow_dispatch'
+ needs:
+ - build-images-quay
+ - gen-tags
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ - name: Download digests
+ id: fetch-digests
+ uses: actions/download-artifact@v4
+ with:
+ path: /tmp/digests
+ pattern: ghcr-digests-*
+ merge-multiple: true
+ - name: Setup Buildx
+ id: prepare
+ uses: docker/setup-buildx-action@v3
+ - name: Registry Login
+ id: login
+ if: github.repository == 'netdata/netdata'
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: ${{ github.repository_owner }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+ - name: Create and Push Manifest
+ id: manifest
+ if: github.repository == 'netdata/netdata'
+ run: docker buildx imagetools create $(.github/scripts/gen-docker-imagetool-args.py /tmp/digests 'ghcr.io' ${{ needs.gen-tags.outputs.tags }})
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Publishing Docker images on GHCR failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Publishing Docker images on GHCR failed.
+ Checkout: ${{ steps.checkout.outcome }}
+ Download digests: ${{ steps.fetch-digests.outcome }}
+ Setup buildx: ${{ steps.prepare.outcome }}
+ Login to registry: ${{ steps.login.outcome }}
+ Create and push manifest: ${{ steps.manifest.outcome }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
if: >-
${{
failure()
- && github.event_name != 'pull_request'
- && startsWith(github.ref, 'refs/heads/master')
&& github.repository == 'netdata/netdata'
}}
diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
deleted file mode 100644
index a0554b167..000000000
--- a/.github/workflows/docs.yml
+++ /dev/null
@@ -1,29 +0,0 @@
----
-name: Docs
-on:
- push:
- branches:
- - master
- paths:
- - '**.md'
- pull_request:
- paths:
- - '**.md'
-env:
- DISABLE_TELEMETRY: 1
-jobs:
- markdown-link-check:
- name: Broken Links
- runs-on: ubuntu-latest
- steps:
- - name: Checkout
- uses: actions/checkout@v4
- with:
- submodules: recursive
- - name: Run link check
- uses: gaurav-nelson/github-action-markdown-link-check@v1
- with:
- use-quiet-mode: 'no'
- use-verbose-mode: 'yes'
- check-modified-files-only: 'yes'
- config-file: '.mlc_config.json'
diff --git a/.github/workflows/generate-integrations.yml b/.github/workflows/generate-integrations.yml
index 4128e9925..f7d4df866 100644
--- a/.github/workflows/generate-integrations.yml
+++ b/.github/workflows/generate-integrations.yml
@@ -6,15 +6,15 @@ on:
branches:
- master
paths: # If any of these files change, we need to regenerate integrations.js.
- - 'collectors/**/metadata.yaml'
- - 'exporting/**/metadata.yaml'
- - 'health/notifications/**/metadata.yaml'
+ - 'src/collectors/**/metadata.yaml'
+ - 'src/go/collectors/**/metadata.yaml'
+ - 'src/exporting/**/metadata.yaml'
+ - 'src/health/notifications/**/metadata.yaml'
- 'integrations/templates/**'
- 'integrations/categories.yaml'
- 'integrations/deploy.yaml'
- 'integrations/cloud-notifications/metadata.yaml'
- 'integrations/gen_integrations.py'
- - 'packaging/go.d.version'
workflow_dispatch: null
concurrency: # This keeps multiple instances of the job from running concurrently for the same ref.
group: integrations-${{ github.ref }}
@@ -31,17 +31,6 @@ jobs:
with:
fetch-depth: 1
submodules: recursive
- - name: Get Go Ref
- id: get-go-ref
- run: echo "go_ref=$(cat packaging/go.d.version)" >> "${GITHUB_ENV}"
- - name: Checkout Go
- id: checkout-go
- uses: actions/checkout@v4
- with:
- fetch-depth: 1
- path: go.d.plugin
- repository: netdata/go.d.plugin
- ref: ${{ env.go_ref }}
- name: Prepare Dependencies
id: prep-deps
run: |
@@ -58,7 +47,7 @@ jobs:
id: generate-integrations-documentation
run: |
python3 integrations/gen_docs_integrations.py
- - name: Generate collectors/COLLECTORS.md
+ - name: Generate src/collectors/COLLECTORS.md
id: generate-collectors-md
run: |
python3 integrations/gen_doc_collector_page.py
@@ -67,7 +56,7 @@ jobs:
run: rm -rf go.d.plugin virtualenv
- name: Create PR
id: create-pr
- uses: peter-evans/create-pull-request@v5
+ uses: peter-evans/create-pull-request@v6
with:
token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
commit-message: Regenerate integrations.js
@@ -95,7 +84,7 @@ jobs:
Prepare Dependencies: ${{ steps.prep-deps.outcome }}
Generate Integrations: ${{ steps.generate.outcome }}
Generate Integrations Documentation: ${{ steps.generate-integrations-documentation.outcome }}
- Generate collectors/COLLECTORS.md: ${{ steps.generate-collectors-md.outcome }}
+ Generate src/collectors/COLLECTORS.md: ${{ steps.generate-collectors-md.outcome }}
Clean Up Temporary Data: ${{ steps.clean.outcome }}
Create PR: ${{ steps.create-pr.outcome }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
diff --git a/.github/workflows/go-tests.yml b/.github/workflows/go-tests.yml
new file mode 100644
index 000000000..9e5550507
--- /dev/null
+++ b/.github/workflows/go-tests.yml
@@ -0,0 +1,124 @@
+---
+# Ci code for building release artifacts.
+name: Go Tests
+on:
+ push: # Master branch checks only validate the build and generate artifacts for testing.
+ branches:
+ - master
+ pull_request: null # PR checks only validate the build and generate artifacts for testing.
+concurrency: # This keeps multiple instances of the job from running concurrently for the same ref and event type.
+ group: go-test-${{ github.ref }}-${{ github.event_name }}
+ cancel-in-progress: true
+jobs:
+ file-check: # Check what files changed if we’re being run in a PR or on a push.
+ name: Check Modified Files
+ runs-on: ubuntu-latest
+ outputs:
+ run: ${{ steps.check-run.outputs.run }}
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+ submodules: recursive
+ - name: Check files
+ id: check-files
+ uses: tj-actions/changed-files@v43
+ with:
+ since_last_remote_commit: ${{ github.event_name != 'pull_request' }}
+ files: |
+ **/*.cmake
+ CMakeLists.txt
+ .github/workflows/go-tests.yml
+ packaging/cmake/
+ src/go/**
+ files_ignore: |
+ **/*.md
+ src/go/**/metadata.yaml
+ - name: List all changed files in pattern
+ continue-on-error: true
+ env:
+ ALL_CHANGED_FILES: ${{ steps.check-files.outputs.all_changed_files }}
+ run: |
+ for file in ${ALL_CHANGED_FILES}; do
+ echo "$file was changed"
+ done
+ - name: Check Run
+ id: check-run
+ run: |
+ if [ "${{ steps.check-files.outputs.any_modified }}" == "true" ] || [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
+ echo 'run=true' >> "${GITHUB_OUTPUT}"
+ else
+ echo 'run=false' >> "${GITHUB_OUTPUT}"
+ fi
+
+ matrix:
+ name: Generate Build Matrix
+ runs-on: ubuntu-latest
+ outputs:
+ matrix: ${{ steps.get-version.outputs.matrix }}
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Install dependencies
+ run: sudo apt-get update && sudo apt-get upgrade -y && sudo apt-get install -y python3-packaging
+ - name: Get Go version and modules
+ id: get-version
+ run: .github/scripts/get-go-version.py
+
+ tests:
+ name: Go toolchain tests
+ runs-on: ubuntu-latest
+ needs:
+ - file-check
+ - matrix
+ strategy:
+ fail-fast: false
+ matrix: ${{ fromJson(needs.matrix.outputs.matrix) }}
+ steps:
+ - name: Skip Check
+ id: skip
+ if: needs.file-check.outputs.run != 'true'
+ run: echo "SKIPPED"
+ - name: Install Go
+ uses: actions/setup-go@v5
+ with:
+ go-version: ${{ matrix.version }}
+ - name: Checkout
+ if: needs.file-check.outputs.run == 'true'
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+ - name: Go mod download
+ if: needs.file-check.outputs.run == 'true'
+ run: go mod download
+ working-directory: ${{ matrix.module }}
+ - name: Compile
+ if: needs.file-check.outputs.run == 'true'
+ run: |
+ CGO_ENABLED=0 go build -o /tmp/go-test-build ${{ matrix.build_target }}
+ /tmp/go-test-build --help || true
+ working-directory: ${{ matrix.module }}
+ - name: Go fmt
+ if: needs.file-check.outputs.run == 'true'
+ run: |
+ go fmt ./... | tee modified-files
+ [ "$(wc -l modified-files | cut -f 1 -d ' ')" -eq 0 ] || exit 1
+ working-directory: ${{ matrix.module }}
+ - name: Go vet
+ if: needs.file-check.outputs.run == 'true'
+ run: go vet ./...
+ working-directory: ${{ matrix.module }}
+ - name: Set up gotestfmt
+ if: needs.file-check.outputs.run == 'true'
+ uses: GoTestTools/gotestfmt-action@v2
+ with:
+ token: ${{ secrets.GITHUB_TOKEN }}
+ version: v2.0.0
+ - name: Go test
+ if: needs.file-check.outputs.run == 'true'
+ run: |
+ set -euo pipefail
+ go test -json ./... -race -count=1 2>&1 | gotestfmt -hide all
+ working-directory: ${{ matrix.module }}
diff --git a/.github/workflows/kickstart-upload.yml b/.github/workflows/kickstart-upload.yml
new file mode 100644
index 000000000..77c26d7bc
--- /dev/null
+++ b/.github/workflows/kickstart-upload.yml
@@ -0,0 +1,54 @@
+---
+# Upload the kickstart script to the repo server
+name: Upload Kickstart Script
+on:
+ push:
+ branches:
+ - master
+ paths:
+ - .github/workflows/kickstart-upload.yml
+ - packaging/installer/kickstart.sh
+ workflow_dispatch: null
+concurrency:
+ group: kickstart-upload
+ cancel-in-progress: true
+jobs:
+ upload:
+ name: Upload Kickstart Script
+ runs-on: ubuntu-latest
+ if: github.repository == 'netdata/netdata'
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 1
+ - name: SSH setup
+ id: ssh-setup
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.NETDATABOT_PACKAGES_SSH_KEY }}
+ name: id_ecdsa
+ known_hosts: ${{ secrets.PACKAGES_KNOWN_HOSTS }}
+ - name: Upload
+ id: upload
+ run: rsync -vp packaging/installer/kickstart.sh netdatabot@packages.netdata.cloud:/home/netdatabot/incoming/kickstart.sh
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Kickstart upload failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to upload updated kickstart script to repo server.
+ Checkout: ${{ steps.checkout.outcome }}
+ Import SSH Key: ${{ steps.ssh-setup.outcome }}
+ Upload: ${{ steps.upload.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.repository == 'netdata/netdata'
+ }}
diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml
index a1e3b52fe..3f66b98f4 100644
--- a/.github/workflows/labeler.yml
+++ b/.github/workflows/labeler.yml
@@ -14,7 +14,7 @@ jobs:
contents: read
pull-requests: write
steps:
- - uses: actions/labeler@v4
+ - uses: actions/labeler@v5
if: github.repository == 'netdata/netdata'
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"
diff --git a/.github/workflows/monitor-releases.yml b/.github/workflows/monitor-releases.yml
index 649cf68aa..7962c9861 100644
--- a/.github/workflows/monitor-releases.yml
+++ b/.github/workflows/monitor-releases.yml
@@ -27,7 +27,7 @@ jobs:
with:
token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
- name: Init python environment
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
id: init-python
with:
python-version: "3.12"
diff --git a/.github/workflows/packaging.yml b/.github/workflows/packaging.yml
index eb936c4d9..7e72fb3f0 100644
--- a/.github/workflows/packaging.yml
+++ b/.github/workflows/packaging.yml
@@ -40,39 +40,47 @@ jobs:
fetch-depth: 0
submodules: recursive
- name: Check files
- id: file-check
- uses: tj-actions/changed-files@v40
+ id: check-files
+ uses: tj-actions/changed-files@v43
with:
since_last_remote_commit: ${{ github.event_name != 'pull_request' }}
files: |
- **.c
- **.cc
- **.h
- **.hh
- **.in
+ **/*.c
+ **/*.cc
+ **/*.h
+ **/*.hh
+ **/*.in
+ **/*.patch
+ **/*.cmake
netdata.spec.in
- configure.ac
- **/Makefile*
- Makefile*
+ contrib/debian/
+ CMakeLists.txt
.github/data/distros.yml
.github/workflows/packaging.yml
.github/scripts/gen-matrix-packaging.py
.github/scripts/pkg-test.sh
- build/**
+ packaging/cmake/
packaging/*.sh
- packaging/*.checksums
packaging/*.version
- contrib/debian/**
- aclk/aclk-schemas/
- ml/dlib/
- mqtt_websockets
- web/server/h2o/libh2o
+ packaging/*.checksums
+ src/aclk/aclk-schemas/
+ src/ml/dlib/
+ src/fluent-bit/
+ src/web/server/h2o/libh2o/
files_ignore: |
- **.md
+ **/*.md
+ - name: List all changed files in pattern
+ continue-on-error: true
+ env:
+ ALL_CHANGED_FILES: ${{ steps.check-files.outputs.all_changed_files }}
+ run: |
+ for file in ${ALL_CHANGED_FILES}; do
+ echo "$file was changed"
+ done
- name: Check Run
id: check-run
run: |
- if [ "${{ steps.file-check.outputs.any_modified }}" == "true" ] || [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
+ if [ "${{ steps.check-files.outputs.any_modified }}" == "true" ] || [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
echo 'run=true' >> "${GITHUB_OUTPUT}"
else
echo 'run=false' >> "${GITHUB_OUTPUT}"
@@ -207,6 +215,18 @@ jobs:
with:
fetch-depth: 0 # We need full history for versioning
submodules: recursive
+ - name: Set Sentry telemetry env vars
+ id: set-telemetry-env-vars
+ run: |
+ if [ "${{ github.repository }}" = 'netdata/netdata' ] && \
+ [ "${{ matrix.bundle_sentry }}" = 'true' ] && \
+ [ "${{ github.event_name }}" = 'workflow_dispatch' ]; then
+ echo "RELEASE_PIPELINE=Production" >> "${GITHUB_ENV}"
+ echo "UPLOAD_SENTRY=true" >> "${GITHUB_ENV}"
+ else
+ echo "RELEASE_PIPELINE=Unknown" >> "${GITHUB_ENV}"
+ echo "UPLOAD_SENTRY=false" >> "${GITHUB_ENV}"
+ fi
- name: Setup QEMU
id: qemu
if: matrix.platform != 'linux/amd64' && matrix.platform != 'linux/i386' && needs.file-check.outputs.run == 'true'
@@ -221,7 +241,7 @@ jobs:
- name: Fetch images
id: fetch-images
if: needs.file-check.outputs.run == 'true'
- uses: nick-invision/retry@v2
+ uses: nick-invision/retry@v3
with:
max_attempts: 3
retry_wait_seconds: 30
@@ -235,12 +255,15 @@ jobs:
shell: bash
run: |
docker run --security-opt seccomp=unconfined -e DISABLE_TELEMETRY=1 -e VERSION=${{ needs.version-check.outputs.version }} \
+ -e ENABLE_SENTRY=${{ matrix.bundle_sentry }} -e RELEASE_PIPELINE=${{ env.RELEASE_PIPELINE }} \
+ -e BUILD_DESTINATION=${{ matrix.distro }}${{ matrix.version }}_${{ matrix.arch }} -e UPLOAD_SENTRY=${{ env.UPLOAD_SENTRY }} \
+ -e SENTRY_AUTH_TOKEN=${{ secrets.SENTRY_CLI_TOKEN }} -e NETDATA_SENTRY_DSN=${{ secrets.SENTRY_DSN }} \
--platform=${{ matrix.platform }} -v "$PWD":/netdata netdata/package-builders:${{ matrix.distro }}${{ matrix.version }}-v1
- name: Save Packages
id: artifacts
if: needs.file-check.outputs.run == 'true'
continue-on-error: true
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
name: ${{ matrix.distro }}-${{ matrix.version }}-${{ matrix.arch }}-packages
path: ${{ github.workspace }}/artifacts/*
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 2fa51cc52..91f2d5493 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -46,7 +46,7 @@ jobs:
- name: Generate Nightly Changleog
id: nightly-changelog
if: steps.target.outputs.run == 'true' && steps.target.outputs.type == 'nightly'
- uses: heinrichreimer/github-changelog-generator-action@v2.3
+ uses: heinrichreimer/github-changelog-generator-action@v2.4
with:
bugLabels: IGNOREBUGS
excludeLabels: "stale,duplicate,question,invalid,wontfix,discussion,no changelog"
@@ -59,7 +59,7 @@ jobs:
- name: Generate Release Changelog
id: release-changelog
if: steps.target.outputs.run == 'true' && steps.target.outputs.type != 'nightly'
- uses: heinrichreimer/github-changelog-generator-action@v2.3
+ uses: heinrichreimer/github-changelog-generator-action@v2.4
with:
bugLabels: IGNOREBUGS
excludeLabels: "stale,duplicate,question,invalid,wontfix,discussion,no changelog"
diff --git a/.github/workflows/repoconfig-packages.yml b/.github/workflows/repoconfig-packages.yml
index df8fac204..f3d65a3e1 100644
--- a/.github/workflows/repoconfig-packages.yml
+++ b/.github/workflows/repoconfig-packages.yml
@@ -81,7 +81,7 @@ jobs:
# Unlike normally, we do not need a deep clone or submodules for this.
- name: Fetch base image
id: fetch-images
- uses: nick-invision/retry@v2
+ uses: nick-invision/retry@v3
with:
max_attempts: 3
retry_wait_seconds: 30
diff --git a/.github/workflows/review.yml b/.github/workflows/review.yml
index 8cb61f2cf..c7b038135 100644
--- a/.github/workflows/review.yml
+++ b/.github/workflows/review.yml
@@ -16,8 +16,8 @@ jobs:
outputs:
actionlint: ${{ steps.actionlint.outputs.run }}
clangformat: ${{ steps.clangformat.outputs.run }}
- eslint: ${{ steps.eslint.outputs.run }}
flake8: ${{ steps.flake8.outputs.run }}
+ golangci-lint: ${{ steps.golangci-lint.outputs.run }}
hadolint: ${{ steps.hadolint.outputs.run }}
shellcheck: ${{ steps.shellcheck.outputs.run }}
yamllint: ${{ steps.yamllint.outputs.run }}
@@ -49,17 +49,6 @@ jobs:
# else
# echo "run=false" >> "${GITHUB_OUTPUT}"
# fi
- - name: Check files for eslint
- id: eslint
- run: |
- if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/eslint') }}" = "true" ]; then
- echo "run=true" >> "${GITHUB_OUTPUT}"
- elif git diff --name-only origin/${{ github.base_ref }} HEAD | grep -v "web/gui/v1" | grep -v "web/gui/v2" | grep -v "integrations/" | grep -Eq '.*\.js' ; then
- echo "run=true" >> "${GITHUB_OUTPUT}"
- echo 'JS files have changed, need to run ESLint.'
- else
- echo "run=false" >> "${GITHUB_OUTPUT}"
- fi
- name: Check files for flake8
id: flake8
run: |
@@ -71,6 +60,17 @@ jobs:
else
echo "run=false" >> "${GITHUB_OUTPUT}"
fi
+ - name: Check files for golangci-lint
+ id: golangci-lint
+ run: |
+ if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/golangci-lint') }}" = "true" ]; then
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ elif git diff --name-only origin/${{ github.base_ref }} HEAD -- | grep -Eq '.*\.go' ; then
+ echo "run=true" >> $GITHUB_OUTPUT
+ echo 'Go code has changed, need to run golangci-lint.'
+ else
+ echo "run=false" >> $GITHUB_OUTPUT
+ fi
- name: Check files for hadolint
id: hadolint
run: |
@@ -155,26 +155,6 @@ jobs:
exit 1
fi
- eslint:
- name: eslint
- needs: prep-review
- if: needs.prep-review.outputs.eslint == 'true'
- runs-on: ubuntu-latest
- steps:
- - name: Git clone repository
- uses: actions/checkout@v4
- with:
- submodules: recursive
- fetch-depth: 0
- - name: Install eslint
- run: npm install eslint -D
- - name: Run eslint
- uses: reviewdog/action-eslint@v1
- with:
- github_token: ${{ secrets.GITHUB_TOKEN }}
- reporter: github-pr-check
- eslint_flags: '.'
-
flake8:
name: flake8
needs: prep-review
@@ -187,7 +167,7 @@ jobs:
submodules: recursive
fetch-depth: 0
- name: Setup Python
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Run flake8
@@ -196,6 +176,26 @@ jobs:
github_token: ${{ secrets.GITHUB_TOKEN }}
reporter: github-pr-check
+ golangci-lint:
+ name: golangci-lint
+ needs: prep-review
+ if: needs.prep-review.outputs.golangci-lint == 'true'
+ strategy:
+ matrix:
+ tree:
+ - src/go/collectors/go.d.plugin
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Run golangci-lint
+ uses: reviewdog/action-golangci-lint@v2
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ reporter: github-pr-check
+ golangci_lint_flags: '--timeout=10m'
+ workdir: ${{ matrix.tree }}
+
hadolint:
name: hadolint
needs: prep-review