summaryrefslogtreecommitdiffstats
path: root/.github
diff options
context:
space:
mode:
Diffstat (limited to '.github')
-rw-r--r--.github/CODEOWNERS85
-rw-r--r--.github/ISSUE_TEMPLATE/config.yml3
-rw-r--r--.github/codeql/python-config.yml11
-rw-r--r--.github/data/distros.yml118
-rw-r--r--.github/dependabot.yml6
-rw-r--r--.github/dockerfiles/Dockerfile.clang5
-rw-r--r--.github/labeler.yml261
-rwxr-xr-x.github/scripts/build-artifacts.sh82
-rwxr-xr-x.github/scripts/build-dist.sh71
-rwxr-xr-x.github/scripts/build-static.sh2
-rwxr-xr-x.github/scripts/docker-test.sh50
-rwxr-xr-x.github/scripts/gen-docker-imagetool-args.py27
-rwxr-xr-x.github/scripts/gen-docker-tags.py37
-rwxr-xr-x.github/scripts/gen-matrix-build.py3
-rwxr-xr-x.github/scripts/gen-matrix-packaging.py2
-rwxr-xr-x.github/scripts/get-go-version.py39
-rwxr-xr-x.github/scripts/get-static-cache-key.sh1
-rwxr-xr-x.github/scripts/package-upload.sh13
-rwxr-xr-x.github/scripts/pkg-test.sh60
-rwxr-xr-x.github/scripts/run-updater-check.sh5
-rwxr-xr-x.github/scripts/run_install_with_dist_file.sh39
-rwxr-xr-x.github/scripts/upload-new-version-tags.sh3
-rw-r--r--.github/workflows/add-to-project.yml4
-rw-r--r--.github/workflows/build-macos.yml143
-rw-r--r--.github/workflows/build.yml282
-rw-r--r--.github/workflows/checks.yml84
-rw-r--r--.github/workflows/cloud_regression.yml15
-rw-r--r--.github/workflows/codeql.yml54
-rw-r--r--.github/workflows/coverity.yml7
-rw-r--r--.github/workflows/dashboard-pr.yml4
-rw-r--r--.github/workflows/docker.yml708
-rw-r--r--.github/workflows/docs.yml29
-rw-r--r--.github/workflows/generate-integrations.yml29
-rw-r--r--.github/workflows/go-tests.yml127
-rw-r--r--.github/workflows/kickstart-upload.yml59
-rw-r--r--.github/workflows/labeler.yml2
-rw-r--r--.github/workflows/monitor-releases.yml34
-rw-r--r--.github/workflows/packaging.yml85
-rw-r--r--.github/workflows/platform-eol-check.yml5
-rw-r--r--.github/workflows/release.yml4
-rw-r--r--.github/workflows/repoconfig-packages.yml42
-rw-r--r--.github/workflows/review.yml130
42 files changed, 1830 insertions, 940 deletions
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 7f368ceb7..95116696f 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -5,52 +5,49 @@
* @Ferroin
# Ownership by directory structure
-.github/ @Ferroin @tkatsoulas
-aclk/ @stelfrag @underhood
-build/ @Ferroin @tkatsoulas
-contrib/debian @Ferroin @tkatsoulas
-collectors/ @thiagoftsm
-collectors/ebpf.plugin/ @thiagoftsm
-collectors/charts.d.plugin/ @ilyam8 @Ferroin
-collectors/freebsd.plugin/ @thiagoftsm
-collectors/macos.plugin/ @thiagoftsm
-collectors/python.d.plugin/ @ilyam8
-collectors/cups.plugin/ @thiagoftsm
-exporting/ @thiagoftsm
-daemon/ @thiagoftsm @vkalintiris
-database/ @thiagoftsm @vkalintiris
-docs/ @tkatsoulas @Ancairon
-health/ @thiagoftsm @vkalintiris @MrZammler
-health/health.d/ @thiagoftsm @MrZammler
-health/notifications/ @Ferroin @thiagoftsm @MrZammler
-ml/ @vkalintiris
-libnetdata/ @thiagoftsm @vkalintiris
-packaging/ @Ferroin @tkatsoulas
-registry/ @novykh
-streaming/ @thiagoftsm
-system/ @Ferroin @tkatsoulas
-tests/ @Ferroin @vkalintiris @tkatsoulas
-web/ @thiagoftsm @vkalintiris
-web/gui/ @novykh
-logsmanagement/ @Dim-P @thiagoftsm
+.github/ @netdata/agent-sre
+src/aclk/ @stelfrag
+src/collectors/ @thiagoftsm
+src/collectors/ebpf.plugin/ @thiagoftsm
+src/collectors/charts.d.plugin/ @ilyam8 @Ferroin
+src/collectors/freebsd.plugin/ @thiagoftsm
+src/collectors/macos.plugin/ @thiagoftsm
+src/collectors/python.d.plugin/ @ilyam8
+src/collectors/cups.plugin/ @thiagoftsm
+src/exporting/ @thiagoftsm
+src/daemon/ @thiagoftsm @vkalintiris
+src/database/ @thiagoftsm @vkalintiris
+docs/ @Ancairon
+src/go/ @ilyam8
+src/health/ @thiagoftsm @vkalintiris
+src/health/health.d/ @thiagoftsm
+src/health/notifications/ @Ferroin @thiagoftsm
+src/ml/ @vkalintiris
+src/libnetdata/ @thiagoftsm @vkalintiris
+packaging/ @netdata/agent-sre
+packaging/cmake @Ferroin @vkalintiris
+src/registry/ @novykh
+src/streaming/ @thiagoftsm
+system/ @netdata/agent-sre
+tests/ @netdata/agent-sre @vkalintiris
+src/web/ @thiagoftsm @vkalintiris
+src/web/gui/ @novykh
+src/logsmanagement/ @thiagoftsm
# Ownership by filetype (overwrites ownership by directory)
-*.am @Ferroin @tkatsoulas
-*.md @tkatsoulas @Ancairon
-*.mdx @tkatsoulas @Ancairon
-Dockerfile* @Ferroin @tkatsoulas
+*.md @Ancairon
+*.mdx @Ancairon
+Dockerfile* @netdata/agent-sre
# Ownership of specific files
-.gitignore @Ferroin @tkatsoulas @vkalintiris
-.eslintrc @Ferroin @tkatsoulas
-.eslintignore @Ferroin @tkatsoulas
-.csslintrc @Ferroin @tkatsoulas
-.codeclimate.yml @Ferroin @tkatsoulas
-.codacy.yml @Ferroin @tkatsoulas
-.yamllint.yml @Ferroin @tkatsoulas
-netdata.spec.in @Ferroin @tkatsoulas
-netdata-installer.sh @Ferroin @tkatsoulas
-packaging/version @netdatabot @Ferroin @tkatsoulas
+.gitignore @netdata/agent-sre @vkalintiris
+.codacy.yml @netdata/agent-sre
+.yamllint.yml @netdata/agent-sre
+CMakeLists.txt @netdata/agent-sre @vkalintiris
+netdata.spec.in @netdata/agent-sre
+netdata-installer.sh @netdata/agent-sre
+packaging/version @netdatabot @netdata/agent-sre
+packaging/repoconfig/CMakelists.txt @netdata/agent-sre
-LICENSE.md @Ferroin @tkatsoulas @vkalintiris
-CHANGELOG.md @netdatabot @Ferroin @tkatsoulas
+LICENSE.md @netdata/agent-sre @vkalintiris
+CHANGELOG.md @netdatabot @netdata/agent-sre
diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
index 79678d7b5..196717dba 100644
--- a/.github/ISSUE_TEMPLATE/config.yml
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -7,6 +7,9 @@ contact_links:
- name: "Netdata Cloud"
url: https://github.com/netdata/netdata-cloud/issues/new/choose
about: Create a report to help us improve our web application
+ - name: "Netdata Mobile App"
+ url: https://github.com/netdata/netdata-cloud/issues/new/choose
+ about: Create a report to help us improve our Mobile App
- name: Community
url: https://netdata.cloud/community
about: If you don't know where to start, visit our community page!
diff --git a/.github/codeql/python-config.yml b/.github/codeql/python-config.yml
index c82727ce3..a31b3c805 100644
--- a/.github/codeql/python-config.yml
+++ b/.github/codeql/python-config.yml
@@ -1,10 +1,9 @@
paths-ignore:
- .github
- build_external/
- - ml/dlib
- - ml/json
+ - src/ml/dlib
- tests/api
- - web/gui
- - collectors/python.d.plugin/python_modules/pyyaml*
- - collectors/python.d.plugin/python_modules/third_party
- - collectors/python.d.plugin/python_modules/urllib3
+ - src/web/gui
+ - src/collectors/python.d.plugin/python_modules/pyyaml*
+ - src/collectors/python.d.plugin/python_modules/third_party
+ - src/collectors/python.d.plugin/python_modules/urllib3
diff --git a/.github/data/distros.yml b/.github/data/distros.yml
index 9175a5c73..838cb72fc 100644
--- a/.github/data/distros.yml
+++ b/.github/data/distros.yml
@@ -16,6 +16,15 @@ arch_order: # sort order for per-architecture jobs in CI
- armhfp
- arm64
- aarch64
+default_sentry: &default_sentry # Default configuration for Sentry usage
+ amd64: false
+ x86_64: false
+ i386: false
+ armhf: false
+ armhfp: false
+ arm64: false
+ aarch64: false
+default_builder_rev: &def_builder_rev v1
include:
- &alpine
distro: alpine
@@ -23,6 +32,7 @@ include:
support_type: Community
notes: ''
eol_check: false
+ bundle_sentry: *default_sentry
env_prep: |
apk add -U bash
jsonc_removal: |
@@ -30,17 +40,22 @@ include:
test:
ebpf-core: true
- <<: *alpine
- version: "3.18"
+ version: "3.20"
support_type: Core
notes: ''
eol_check: true
- <<: *alpine
- version: "3.17"
+ version: "3.19"
+ support_type: Core
+ notes: ''
+ eol_check: true
+ - <<: *alpine
+ version: "3.18"
support_type: Intermediate
notes: ''
eol_check: true
- <<: *alpine
- version: "3.16"
+ version: "3.17"
support_type: Intermediate
notes: ''
eol_check: true
@@ -50,6 +65,7 @@ include:
support_type: Intermediate
notes: ''
eol_check: false
+ bundle_sentry: *default_sentry
env_prep: |
pacman --noconfirm -Syu && pacman --noconfirm -Sy grep libffi
test:
@@ -61,28 +77,37 @@ include:
support_type: Core
notes: ''
eol_check: 'amazon-linux'
+ bundle_sentry: *default_sentry
packages: &amzn_packages
type: rpm
repo_distro: amazonlinux/2
+ builder_rev: *def_builder_rev
arches:
- x86_64
- aarch64
- test:
+ test: &amzn_test
ebpf-core: false
+ skip-local-build: true
- <<: *amzn
version: "2023"
packages:
<<: *amzn_packages
repo_distro: amazonlinux/2023
+ test:
+ <<: *amzn_test
+ skip-local-build: false
- distro: centos
version: "7"
+ base_image: "netdata/legacy:centos7"
support_type: Core
notes: ''
eol_check: false
+ bundle_sentry: *default_sentry
packages:
type: rpm
repo_distro: el/7
+ builder_rev: *def_builder_rev
alt_links:
- el/7Server
- el/7Client
@@ -90,6 +115,7 @@ include:
- x86_64
test:
ebpf-core: false
+ skip-local-build: true
- &centos_stream
distro: centos-stream
@@ -100,20 +126,16 @@ include:
jsonc_removal: |
dnf remove -y json-c-devel
eol_check: true
+ bundle_sentry: *default_sentry
packages: &cs_packages
type: rpm
repo_distro: el/c9s
+ builder_rev: *def_builder_rev
arches:
- x86_64
- aarch64
test:
ebpf-core: true
- - <<: *centos_stream
- version: '8'
- base_image: 'quay.io/centos/centos:stream8'
- packages:
- <<: *cs_packages
- repo_distro: el/c8s
- &debian
distro: debian
@@ -122,6 +144,9 @@ include:
notes: ''
base_image: debian:bookworm
eol_check: true
+ bundle_sentry:
+ <<: *default_sentry
+ amd64: true
env_prep: |
apt-get update
jsonc_removal: |
@@ -129,6 +154,7 @@ include:
packages: &debian_packages
type: deb
repo_distro: debian/bookworm
+ builder_rev: v2
arches:
- i386
- amd64
@@ -147,6 +173,7 @@ include:
- <<: *debian
version: "10"
base_image: debian:buster
+ bundle_sentry: *default_sentry
packages:
<<: *debian_packages
repo_distro: debian/buster
@@ -155,60 +182,55 @@ include:
- &fedora
distro: fedora
- version: "39"
+ version: "40"
support_type: Core
notes: ''
eol_check: true
+ bundle_sentry: *default_sentry
jsonc_removal: |
dnf remove -y json-c-devel
packages: &fedora_packages
type: rpm
- repo_distro: fedora/39
+ repo_distro: fedora/40
+ builder_rev: *def_builder_rev
arches:
- x86_64
- aarch64
test:
ebpf-core: true
- <<: *fedora
- version: "38"
- packages:
- <<: *fedora_packages
- repo_distro: fedora/38
- test:
- ebpf-core: true
- - <<: *fedora
- version: "37"
+ version: "39"
packages:
<<: *fedora_packages
- repo_distro: fedora/37
+ repo_distro: fedora/39
test:
ebpf-core: true
- &opensuse
distro: opensuse
- version: "15.5"
+ version: "15.6"
support_type: Core
notes: ''
eol_check: true
- base_image: opensuse/leap:15.5
+ bundle_sentry: *default_sentry
+ base_image: opensuse/leap:15.6
jsonc_removal: |
zypper rm -y libjson-c-devel
packages: &opensuse_packages
type: rpm
- repo_distro: opensuse/15.5
+ repo_distro: opensuse/15.6
+ builder_rev: *def_builder_rev
arches:
- x86_64
- aarch64
test:
ebpf-core: true
- <<: *opensuse
- version: "15.4"
- support_type: Core
- notes: ''
- base_image: opensuse/leap:15.4
+ version: "15.5"
+ base_image: opensuse/leap:15.5
packages:
<<: *opensuse_packages
- repo_distro: opensuse/15.4
+ repo_distro: opensuse/15.5
- &oracle
distro: oraclelinux
@@ -216,11 +238,13 @@ include:
support_type: Core
notes: ''
eol_check: true
+ bundle_sentry: *default_sentry
jsonc_removal: |
dnf remove -y json-c-devel
packages: &oracle_packages
type: rpm
repo_distro: ol/8
+ builder_rev: *def_builder_rev
arches:
- x86_64
- aarch64
@@ -240,9 +264,11 @@ include:
jsonc_removal: |
dnf remove -y json-c-devel
eol_check: true
+ bundle_sentry: *default_sentry
packages: &rocky_packages
type: rpm
repo_distro: el/9
+ builder_rev: *def_builder_rev
alt_links:
- el/9Server
- el/9Client
@@ -264,17 +290,21 @@ include:
- &ubuntu
distro: ubuntu
- version: "22.04"
+ version: "24.04"
support_type: Core
notes: ''
eol_check: true
+ bundle_sentry:
+ <<: *default_sentry
+ amd64: true
env_prep: |
rm -f /etc/apt/apt.conf.d/docker && apt-get update
jsonc_removal: |
apt-get remove -y libjson-c-dev
packages: &ubuntu_packages
type: deb
- repo_distro: ubuntu/jammy
+ repo_distro: ubuntu/noble
+ builder_rev: v2
arches:
- amd64
- armhf
@@ -287,15 +317,37 @@ include:
<<: *ubuntu_packages
repo_distro: ubuntu/mantic
- <<: *ubuntu
- version: "23.04"
+ version: "22.04"
packages:
<<: *ubuntu_packages
- repo_distro: ubuntu/lunar
+ repo_distro: ubuntu/jammy
- <<: *ubuntu
version: "20.04"
packages:
<<: *ubuntu_packages
repo_distro: ubuntu/focal
+legacy: # Info for platforms we used to support and still need to handle packages for
+ - <<: *fedora
+ version: "37"
+ packages:
+ <<: *fedora_packages
+ repo_distro: fedora/37
+ - <<: *fedora
+ version: "38"
+ packages:
+ <<: *fedora_packages
+ repo_distro: fedora/38
+ - <<: *opensuse
+ version: "15.4"
+ packages:
+ <<: *opensuse_packages
+ repo_distro: opensuse/15.4
+ - <<: *centos_stream
+ version: '8'
+ base_image: 'quay.io/centos/centos:stream8'
+ packages:
+ <<: *cs_packages
+ repo_distro: el/c8s
no_include: # Info for platforms not covered in CI
- distro: docker
version: "19.03 or newer"
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
index b02b155d3..48b729622 100644
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -7,3 +7,9 @@ updates:
labels:
- "no changelog"
- "area/ci"
+ - package-ecosystem: gomod
+ directory: /src/go/collectors/go.d.plugin
+ schedule:
+ interval: weekly
+ labels:
+ - "area/go"
diff --git a/.github/dockerfiles/Dockerfile.clang b/.github/dockerfiles/Dockerfile.clang
index 62bb01941..869254198 100644
--- a/.github/dockerfiles/Dockerfile.clang
+++ b/.github/dockerfiles/Dockerfile.clang
@@ -1,4 +1,4 @@
-FROM debian:buster AS build
+FROM debian:12 AS build
# Disable apt/dpkg interactive mode
ENV DEBIAN_FRONTEND=noninteractive
@@ -9,7 +9,8 @@ RUN /tmp/install-required-packages.sh --dont-wait --non-interactive netdata-all
# Install Clang and set as default CC
RUN apt-get install -y clang && \
- update-alternatives --install /usr/bin/cc cc /usr/bin/clang 100
+ update-alternatives --install /usr/bin/cc cc /usr/bin/clang 100 && \
+ update-alternatives --install /usr/bin/c++ c++ /usr/bin/clang++ 100
WORKDIR /netdata
COPY . .
diff --git a/.github/labeler.yml b/.github/labeler.yml
index 0ea825ef4..0cbec181d 100644
--- a/.github/labeler.yml
+++ b/.github/labeler.yml
@@ -11,148 +11,287 @@
# Please keep the labels sorted and deduplicated.
area/ACLK:
- - aclk/**
- - database/sqlite/sqlite_aclk*
- - mqtt_websockets
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/aclk/**
+ - src/database/sqlite/sqlite_aclk*
+ - src/aclk/mqtt_websockets
area/claim:
- - claim/*
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/claim/*
area/exporting:
- - exporting/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/exporting/**
area/build:
- - build/**
- - build_external/**
- - CMakeLists.txt
- - configure.ac
- - Makefile.am
- - "**/Makefile.am"
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - build/**
+ - build_external/**
+ - CMakeLists.txt
+ - configure.ac
+ - Makefile.am
+ - "**/Makefile.am"
area/ci:
- - .github/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - .github/**
area/daemon:
- - daemon/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/daemon/**
area/database:
- - database/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/database/**
area/docs:
- - "*.md"
- - "**/*.md"
- - "**/*.mdx"
- - diagrams/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - "*.md"
+ - "**/*.md"
+ - "**/*.mdx"
+ - docs/diagrams/**
# -----------------collectors----------------------
area/collectors:
- - collectors/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/**
+ - src/go/collectors/go.d.plugin/**
collectors/plugins.d:
- - collectors/plugins.d/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/plugins.d/**
collectors/apps:
- - collectors/apps.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/apps.plugin/**
collectors/cgroups:
- - collectors/cgroups.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/cgroups.plugin/**
collectors/charts.d:
- - collectors/charts.d.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/charts.d.plugin/**
collectors/cups:
- - collectors/cups.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/cups.plugin/**
collectors/debugfs:
- - collectors/debugfs.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/debugfs.plugin/**
collectors/diskspace:
- - collectors/diskspace.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/diskspace.plugin/**
collectors/ebpf:
- - collectors/ebpf.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/ebpf.plugin/**
collectors/freebsd:
- - collectors/freebsd.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/freebsd.plugin/**
collectors/freeipmi:
- - collectors/freeipmi.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/freeipmi.plugin/**
+
+collectors/go.d:
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/go/collectors/go.d.plugin/**
collectors/idlejitter:
- - collectors/idlejitter.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/idlejitter.plugin/**
collectors/ioping:
- - collectors/ioping.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/ioping.plugin/**
collectors/macos:
- - collectors/macos.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/macos.plugin/**
collectors/nfacct:
- - collectors/nfacct.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/nfacct.plugin/**
collectors/perf:
- - collectors/perf.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/perf.plugin/**
collectors/proc:
- - collectors/proc.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/proc.plugin/**
collectors/python.d:
- - collectors/python.d.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/python.d.plugin/**
collectors/slabinfo:
- - collectors/slabinfo.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/slabinfo.plugin/**
collectors/statsd:
- - collectors/statsd.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/statsd.plugin/**
collectors/systemd-journal:
- - collectors/systemd-journal.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/systemd-journal.plugin/**
collectors/tc:
- - collectors/tc.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/tc.plugin/**
collectors/timex:
- - collectors/timex.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/timex.plugin/**
+
+collectors/windows:
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/windows.plugin/**
collectors/xenstat:
- - collectors/xenstat.plugin/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/collectors/xenstat.plugin/**
# ----------------/collectors----------------------
+area/go:
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/go/**
+
area/health:
- - health/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/health/**
area/metadata:
- - "**/*metadata.yaml"
- - integrations/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - "**/*metadata.yaml"
+ - integrations/**
area/ml:
- - ml/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/ml/**
area/packaging:
- - contrib/**
- - packaging/**
- - system/**
- - Dockerfile*
- - netdata-installer.sh
- - netdata.spec.in
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - packaging/**
+ - system/**
+ - Dockerfile*
+ - netdata-installer.sh
+ - netdata.spec.in
area/registry:
- - registry/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/registry/**
area/streaming:
- - streaming/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/streaming/**
area/tests:
- - tests/**
- - daemon/unit_test*
- - coverity-scan.sh
- - cppcheck.sh
- - netdata.cppcheck
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - tests/**
+ - src/daemon/unit_test*
+ - packaging/utils/coverity-scan.sh
area/web:
- - web/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/web/**
area/logs-management:
- - logsmanagement/**
+ - any:
+ - changed-files:
+ - any-glob-to-any-file:
+ - src/logsmanagement/**
diff --git a/.github/scripts/build-artifacts.sh b/.github/scripts/build-artifacts.sh
deleted file mode 100755
index 569c79a5a..000000000
--- a/.github/scripts/build-artifacts.sh
+++ /dev/null
@@ -1,82 +0,0 @@
-#!/bin/sh
-#
-# Builds the netdata-vX.y.Z-xxxx.tar.gz source tarball (dist)
-# and netdata-vX.Y.Z-xxxx.gz.run (static x86_64) artifacts.
-
-set -e
-
-# shellcheck source=.github/scripts/functions.sh
-. "$(dirname "$0")/functions.sh"
-
-NAME="${NAME:-netdata}"
-VERSION="${VERSION:-"$(git describe)"}"
-BASENAME="$NAME-$VERSION"
-
-prepare_build() {
- progress "Preparing build"
- (
- test -d artifacts || mkdir -p artifacts
- echo "${VERSION}" > packaging/version
- ) >&2
-}
-
-build_dist() {
- progress "Building dist"
- (
- command -v git > /dev/null && [ -d .git ] && git clean -d -f
- autoreconf -ivf
- ./configure \
- --prefix=/usr \
- --sysconfdir=/etc \
- --localstatedir=/var \
- --libexecdir=/usr/libexec \
- --with-zlib \
- --with-math \
- --with-user=netdata \
- --disable-dependency-tracking \
- CFLAGS=-O2
- make dist
- mv "${BASENAME}.tar.gz" artifacts/
- ) >&2
-}
-
-build_static_x86_64() {
- progress "Building static x86_64"
- (
- command -v git > /dev/null && [ -d .git ] && git clean -d -f
- USER="" ./packaging/makeself/build-x86_64-static.sh
- ) >&2
-}
-
-prepare_assets() {
- progress "Preparing assets"
- (
- cp packaging/version artifacts/latest-version.txt
-
- cd artifacts || exit 1
- ln -f "${BASENAME}.tar.gz" netdata-latest.tar.gz
- ln -f "${BASENAME}.gz.run" netdata-latest.gz.run
- sha256sum -b ./* > "sha256sums.txt"
- ) >&2
-}
-
-steps="prepare_build build_dist build_static_x86_64"
-steps="$steps prepare_assets"
-
-_main() {
- for step in $steps; do
- if ! run "$step"; then
- if [ -t 1 ]; then
- debug
- else
- fail "Build failed"
- fi
- fi
- done
-
- echo "🎉 All Done!"
-}
-
-if [ -n "$0" ] && [ x"$0" != x"-bash" ]; then
- _main "$@"
-fi
diff --git a/.github/scripts/build-dist.sh b/.github/scripts/build-dist.sh
deleted file mode 100755
index 027b62147..000000000
--- a/.github/scripts/build-dist.sh
+++ /dev/null
@@ -1,71 +0,0 @@
-#!/bin/sh
-#
-# Builds the netdata-vX.y.Z-xxxx.tar.gz source tarball (dist)
-
-set -e
-
-# shellcheck source=.github/scripts/functions.sh
-. "$(dirname "$0")/functions.sh"
-
-NAME="${NAME:-netdata}"
-VERSION="${VERSION:-"$(git describe --always)"}"
-BASENAME="$NAME-$VERSION"
-
-prepare_build() {
- progress "Preparing build"
- (
- test -d artifacts || mkdir -p artifacts
- echo "${VERSION}" > packaging/version
- ) >&2
-}
-
-build_dist() {
- progress "Building dist"
- (
- command -v git > /dev/null && [ -d .git ] && git clean -d -f
- autoreconf -ivf
- ./configure \
- --prefix=/usr \
- --sysconfdir=/etc \
- --localstatedir=/var \
- --libexecdir=/usr/libexec \
- --with-zlib \
- --with-math \
- --with-user=netdata \
- --disable-dependency-tracking \
- CFLAGS=-O2
- make dist
- mv "${BASENAME}.tar.gz" artifacts/
- ) >&2
-}
-
-prepare_assets() {
- progress "Preparing assets"
- (
- cp packaging/version artifacts/latest-version.txt
- cd artifacts || exit 1
- ln -f "${BASENAME}.tar.gz" netdata-latest.tar.gz
- ln -f "${BASENAME}.gz.run" netdata-latest.gz.run
- sha256sum -b ./* > "sha256sums.txt"
- ) >&2
-}
-
-steps="prepare_build build_dist prepare_assets"
-
-_main() {
- for step in $steps; do
- if ! run "$step"; then
- if [ -t 1 ]; then
- debug
- else
- fail "Build failed"
- fi
- fi
- done
-
- echo "🎉 All Done!"
-}
-
-if [ -n "$0" ] && [ x"$0" != x"-bash" ]; then
- _main "$@"
-fi
diff --git a/.github/scripts/build-static.sh b/.github/scripts/build-static.sh
index e81051438..9b29a3d99 100755
--- a/.github/scripts/build-static.sh
+++ b/.github/scripts/build-static.sh
@@ -22,7 +22,7 @@ prepare_build() {
build_static() {
progress "Building static ${BUILDARCH}"
(
- USER="" ./packaging/makeself/build-static.sh "${BUILDARCH}"
+ EXTRA_INSTALL_FLAGS="${EXTRA_INSTALL_FLAGS}" USER="" ./packaging/makeself/build-static.sh "${BUILDARCH}"
) >&2
}
diff --git a/.github/scripts/docker-test.sh b/.github/scripts/docker-test.sh
index 0f5fa469c..0ef064501 100755
--- a/.github/scripts/docker-test.sh
+++ b/.github/scripts/docker-test.sh
@@ -1,41 +1,8 @@
#!/bin/sh
-export DEBIAN_FRONTEND=noninteractive
-
-wait_for() {
- host="${1}"
- port="${2}"
- name="${3}"
- timeout="30"
-
- if command -v nc > /dev/null ; then
- netcat="nc"
- elif command -v netcat > /dev/null ; then
- netcat="netcat"
- else
- printf "Unable to find a usable netcat command.\n"
- return 1
- fi
-
- printf "Waiting for %s on %s:%s ... " "${name}" "${host}" "${port}"
-
- sleep 30
+SCRIPT_DIR="$(CDPATH='' cd -- "$(dirname -- "$0")" && pwd -P)"
- i=0
- while ! ${netcat} -z "${host}" "${port}"; do
- sleep 1
- if [ "$i" -gt "$timeout" ]; then
- printf "Timed out!\n"
- docker ps -a
- echo "::group::Netdata container logs"
- docker logs netdata 2>&1
- echo "::endgroup::"
- return 1
- fi
- i="$((i + 1))"
- done
- printf "OK\n"
-}
+export DEBIAN_FRONTEND=noninteractive
if [ -z "$(command -v nc 2>/dev/null)" ] && [ -z "$(command -v netcat 2>/dev/null)" ]; then
sudo apt-get update && sudo apt-get upgrade -y && sudo apt-get install -y netcat
@@ -55,10 +22,9 @@ docker run -d --name=netdata \
--security-opt apparmor=unconfined \
netdata/netdata:test
-wait_for localhost 19999 netdata || exit 1
-
-curl -sS http://127.0.0.1:19999/api/v1/info > ./response || exit 1
-
-cat ./response
-
-jq '.version' ./response || exit 1
+if ! "${SCRIPT_DIR}/../../packaging/runtime-check.sh"; then
+ docker ps -a
+ echo "::group::Netdata container logs"
+ docker logs netdata 2>&1
+ echo "::endgroup::"
+fi
diff --git a/.github/scripts/gen-docker-imagetool-args.py b/.github/scripts/gen-docker-imagetool-args.py
new file mode 100755
index 000000000..c0eaa1cfc
--- /dev/null
+++ b/.github/scripts/gen-docker-imagetool-args.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python3
+
+import sys
+
+from pathlib import Path
+
+DIGEST_PATH = Path(sys.argv[1])
+TAG_PREFIX = sys.argv[2]
+TAGS = sys.argv[3]
+
+if TAG_PREFIX:
+ PUSH_TAGS = tuple([
+ t for t in TAGS.split(',') if t.startswith(TAG_PREFIX)
+ ])
+else:
+ PUSH_TAGS = tuple([
+ t for t in TAGS.split(',') if t.startswith('netdata/')
+ ])
+
+IMAGE_NAME = PUSH_TAGS[0].split(':')[0]
+
+images = []
+
+for f in DIGEST_PATH.glob('*'):
+ images.append(f'{IMAGE_NAME}@sha256:{f.name}')
+
+print(f'-t {" -t ".join(PUSH_TAGS)} {" ".join(images)}')
diff --git a/.github/scripts/gen-docker-tags.py b/.github/scripts/gen-docker-tags.py
index 8c88d3b5e..f60e1cd41 100755
--- a/.github/scripts/gen-docker-tags.py
+++ b/.github/scripts/gen-docker-tags.py
@@ -2,18 +2,35 @@
import sys
-version = sys.argv[1].split('.')
-suffix = sys.argv[2]
+github_event = sys.argv[1]
+version = sys.argv[2]
-REPO = f'netdata/netdata{suffix}'
-GHCR = f'ghcr.io/{REPO}'
-QUAY = f'quay.io/{REPO}'
+REPO = 'netdata/netdata'
-tags = []
+REPOS = (
+ REPO,
+ f'quay.io/{REPO}',
+ f'ghcr.io/{REPO}',
+)
-for repo in [REPO, GHCR, QUAY]:
- tags.append(':'.join([repo, version[0]]))
- tags.append(':'.join([repo, '.'.join(version[0:2])]))
- tags.append(':'.join([repo, '.'.join(version[0:3])]))
+match version:
+ case '':
+ tags = (f'{REPO}:test',)
+ case 'nightly':
+ tags = tuple([
+ f'{r}:{t}' for r in REPOS for t in ('edge', 'latest')
+ ])
+ case _:
+ v = f'v{version}'.split('.')
+
+ tags = tuple([
+ f'{r}:{t}' for r in REPOS for t in (
+ v[0],
+ '.'.join(v[0:2]),
+ '.'.join(v[0:3]),
+ )
+ ])
+
+ tags = tags + tuple([f'{r}:stable' for r in REPOS])
print(','.join(tags))
diff --git a/.github/scripts/gen-matrix-build.py b/.github/scripts/gen-matrix-build.py
index 3185e8836..9a70a44ce 100755
--- a/.github/scripts/gen-matrix-build.py
+++ b/.github/scripts/gen-matrix-build.py
@@ -11,6 +11,9 @@ with open('.github/data/distros.yml') as f:
data = yaml.load(f)
for i, v in enumerate(data['include']):
+ if v['test'].get('skip-local-build', False):
+ continue
+
e = {
'artifact_key': v['distro'] + str(v['version']).replace('.', ''),
'version': v['version'],
diff --git a/.github/scripts/gen-matrix-packaging.py b/.github/scripts/gen-matrix-packaging.py
index 9347cd767..c00ff3181 100755
--- a/.github/scripts/gen-matrix-packaging.py
+++ b/.github/scripts/gen-matrix-packaging.py
@@ -27,7 +27,9 @@ for i, v in enumerate(data['include']):
'repo_distro': data['include'][i]['packages']['repo_distro'],
'format': data['include'][i]['packages']['type'],
'base_image': data['include'][i]['base_image'] if 'base_image' in data['include'][i] else ':'.join([data['include'][i]['distro'], data['include'][i]['version']]),
+ 'builder_rev': data['include'][i]['packages']['builder_rev'],
'platform': data['platform_map'][arch],
+ 'bundle_sentry': data['include'][i]['bundle_sentry'][arch],
'arch': arch
})
diff --git a/.github/scripts/get-go-version.py b/.github/scripts/get-go-version.py
new file mode 100755
index 000000000..105c537c8
--- /dev/null
+++ b/.github/scripts/get-go-version.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python3
+
+import json
+import os
+import pathlib
+
+from packaging.version import parse
+
+SCRIPT_PATH = pathlib.Path(__file__).parents[0]
+REPO_ROOT = SCRIPT_PATH.parents[1]
+GO_SRC = REPO_ROOT / 'src' / 'go'
+
+GITHUB_OUTPUT = pathlib.Path(os.environ['GITHUB_OUTPUT'])
+
+version = parse('1.0.0')
+modules = []
+
+for modfile in GO_SRC.glob('**/go.mod'):
+ moddata = modfile.read_text()
+
+ for line in moddata.splitlines():
+ if line.startswith('go '):
+ version = max(version, parse(line.split()[1]))
+ break
+
+ for main in modfile.parent.glob('**/main.go'):
+ mainpath = main.relative_to(modfile.parent).parent
+
+ if 'examples' in mainpath.parts:
+ continue
+
+ modules.append({
+ 'module': str(modfile.parent),
+ 'version': str(version),
+ 'build_target': f'github.com/netdata/netdata/go/{ modfile.parts[-2] }/{ str(mainpath) }/',
+ })
+
+with GITHUB_OUTPUT.open('a') as f:
+ f.write(f'matrix={ json.dumps({"include": modules}) }\n')
diff --git a/.github/scripts/get-static-cache-key.sh b/.github/scripts/get-static-cache-key.sh
index 5093b3327..e45ae30bd 100755
--- a/.github/scripts/get-static-cache-key.sh
+++ b/.github/scripts/get-static-cache-key.sh
@@ -8,6 +8,7 @@ docker pull --platform "${platform}" netdata/static-builder:${builder_rev}
# shellcheck disable=SC2046
cat $(find packaging/makeself/jobs -type f ! -regex '.*\(netdata\|-makeself\).*') > /tmp/static-cache-key-data
+cat packaging/makeself/bundled-packages.version >> /tmp/static-cache-key-data
docker run -it --rm --platform "${platform}" netdata/static-builder:${builder_rev} sh -c 'apk list -I 2>/dev/null' >> /tmp/static-cache-key-data
diff --git a/.github/scripts/package-upload.sh b/.github/scripts/package-upload.sh
index 13d63b4a7..9305ffa67 100755
--- a/.github/scripts/package-upload.sh
+++ b/.github/scripts/package-upload.sh
@@ -2,18 +2,19 @@
set -e
-host="packages.netdata.cloud"
user="netdatabot"
-distro="${1}"
-arch="${2}"
-format="${3}"
-repo="${4}"
+host="${1}"
+distro="${2}"
+arch="${3}"
+format="${4}"
+repo="${5}"
+pkg_src="${6:-./artifacts}"
staging="${TMPDIR:-/tmp}/package-staging"
prefix="/home/netdatabot/incoming/${repo}/"
-packages="$(find artifacts -name "*.${format}")"
+packages="$(find "${pkg_src}" -name "*.${format}")"
mkdir -p "${staging}"
diff --git a/.github/scripts/pkg-test.sh b/.github/scripts/pkg-test.sh
index 35767bf2e..f0c0dc11a 100755
--- a/.github/scripts/pkg-test.sh
+++ b/.github/scripts/pkg-test.sh
@@ -1,5 +1,7 @@
#!/bin/sh
+SCRIPT_DIR="$(CDPATH='' cd -- "$(dirname -- "$0")" && pwd -P)"
+
install_debian_like() {
# This is needed to ensure package installs don't prompt for any user input.
export DEBIAN_FRONTEND=noninteractive
@@ -19,7 +21,9 @@ install_debian_like() {
! -name '*dbgsym*' ! -name '*cups*' ! -name '*freeipmi*') || exit 3
# Install testing tools
- apt-get install -y --no-install-recommends curl "${netcat}" jq || exit 1
+ apt-get install -y --no-install-recommends curl dpkg-dev "${netcat}" jq || exit 1
+
+ dpkg-architecture --equal amd64 || NETDATA_SKIP_EBPF=1
}
install_fedora_like() {
@@ -38,6 +42,8 @@ install_fedora_like() {
# Install testing tools
"${PKGMGR}" install -y curl nc jq || exit 1
+
+ [ "$(rpm --eval '%{_build_arch}')" = "x86_64" ] || NETDATA_SKIP_EBPF=1
}
install_centos() {
@@ -60,6 +66,8 @@ install_centos() {
# Install testing tools
# shellcheck disable=SC2086
"${PKGMGR}" install -y ${opts} curl nc jq || exit 1
+
+ [ "$(rpm --eval '%{_build_arch}')" = "x86_64" ] || NETDATA_SKIP_EBPF=1
}
install_amazon_linux() {
@@ -76,6 +84,8 @@ install_amazon_linux() {
# Install testing tools
# shellcheck disable=SC2086
"${PKGMGR}" install -y ${opts} curl nc jq || exit 1
+
+ [ "$(rpm --eval '%{_build_arch}')" = "x86_64" ] || NETDATA_SKIP_EBPF=1
}
install_suse_like() {
@@ -88,43 +98,14 @@ install_suse_like() {
# Install testing tools
zypper install -y --allow-downgrade --no-recommends curl netcat-openbsd jq || exit 1
+
+ [ "$(rpm --eval '%{_build_arch}')" = "x86_64" ] || NETDATA_SKIP_EBPF=1
}
dump_log() {
cat ./netdata.log
}
-wait_for() {
- host="${1}"
- port="${2}"
- name="${3}"
- timeout="30"
-
- if command -v nc > /dev/null ; then
- netcat="nc"
- elif command -v netcat > /dev/null ; then
- netcat="netcat"
- else
- printf "Unable to find a usable netcat command.\n"
- return 1
- fi
-
- printf "Waiting for %s on %s:%s ... " "${name}" "${host}" "${port}"
-
- sleep 30
-
- i=0
- while ! ${netcat} -z "${host}" "${port}"; do
- sleep 1
- if [ "$i" -gt "$timeout" ]; then
- printf "Timed out!\n"
- return 1
- fi
- i="$((i + 1))"
- done
- printf "OK\n"
-}
-
case "${DISTRO}" in
debian | ubuntu)
install_debian_like
@@ -132,7 +113,7 @@ case "${DISTRO}" in
fedora | oraclelinux)
install_fedora_like
;;
- centos| centos-stream | rockylinux | almalinux)
+ centos | centos-stream | rockylinux | almalinux)
install_centos
;;
amazonlinux)
@@ -149,14 +130,15 @@ esac
trap dump_log EXIT
-/usr/sbin/netdata -D > ./netdata.log 2>&1 &
-
-wait_for localhost 19999 netdata || exit 1
+export NETDATA_LIBEXEC_PREFIX=/usr/libexec/netdata
+export NETDATA_SKIP_LIBEXEC_PARTS="logs-management|freeipmi|xenstat|nfacct|cups"
-curl -sS http://127.0.0.1:19999/api/v1/info > ./response || exit 1
+if [ -n "${NETDATA_SKIP_EBPF}" ]; then
+ export NETDATA_SKIP_LIBEXEC_PARTS="${NETDATA_SKIP_LIBEXEC_PARTS}|ebpf"
+fi
-cat ./response
+/usr/sbin/netdata -D > ./netdata.log 2>&1 &
-jq '.version' ./response || exit 1
+"${SCRIPT_DIR}/../../packaging/runtime-check.sh" || exit 1
trap - EXIT
diff --git a/.github/scripts/run-updater-check.sh b/.github/scripts/run-updater-check.sh
index 1224d8f67..456a0e5d0 100755
--- a/.github/scripts/run-updater-check.sh
+++ b/.github/scripts/run-updater-check.sh
@@ -4,7 +4,7 @@ echo ">>> Installing CI support packages..."
/netdata/.github/scripts/ci-support-pkgs.sh
mkdir -p /etc/cron.daily # Needed to make auto-update checking work correctly on some platforms.
echo ">>> Installing Netdata..."
-/netdata/packaging/installer/kickstart.sh --dont-wait --build-only --disable-telemetry || exit 1
+/netdata/packaging/installer/kickstart.sh --dont-wait --build-only --dont-start-it --disable-telemetry "${EXTRA_INSTALL_FLAGS:+--local-build-options "${EXTRA_INSTALL_FLAGS}"}" || exit 1
echo "::group::>>> Pre-Update Environment File Contents"
cat /etc/netdata/.environment
echo "::endgroup::"
@@ -12,7 +12,8 @@ echo "::group::>>> Pre-Update Netdata Build Info"
netdata -W buildinfo
echo "::endgroup::"
echo ">>> Updating Netdata..."
-export NETDATA_BASE_URL="http://localhost:8080/artifacts/" # Pull the tarball from the local web server.
+export NETDATA_BASE_URL="http://localhost:8080/artifacts" # Pull the tarball from the local web server.
+echo 'NETDATA_ACCEPT_MAJOR_VERSIONS="1 9999"' > /etc/netdata/netdata-updater.conf
timeout 3600 /netdata/packaging/installer/netdata-updater.sh --not-running-from-cron --no-updater-self-update
case "$?" in
diff --git a/.github/scripts/run_install_with_dist_file.sh b/.github/scripts/run_install_with_dist_file.sh
deleted file mode 100755
index 74652efdd..000000000
--- a/.github/scripts/run_install_with_dist_file.sh
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env bash
-#
-# This script is evaluating netdata installation with the source from make dist
-#
-# Copyright: SPDX-License-Identifier: GPL-3.0-or-later
-#
-# Author : Pavlos Emm. Katsoulakis <paul@netdata.cloud)
-
-set -e
-
-if [ $# -ne 1 ]; then
- printf >&2 "Usage: %s <dist_file>\n" "$(basename "$0")"
- exit 1
-fi
-
-distfile="${1}"
-shift
-
-printf >&2 "Opening dist archive %s ... " "${distfile}"
-tar -xovf "${distfile}"
-distdir="$(echo "${distfile}" | rev | cut -d. -f3- | rev)"
-cp -a packaging/installer/install-required-packages.sh "${distdir}/install-required-packages.sh"
-if [ ! -d "${distdir}" ]; then
- printf >&2 "ERROR: %s is not a directory" "${distdir}"
- exit 2
-fi
-
-printf >&2 "Entering %s and starting docker run ..." "${distdir}"
-
-pushd "${distdir}" || exit 1
-docker run \
- -e DISABLE_TELEMETRY=1 \
- -v "${PWD}:/netdata" \
- -w /netdata \
- "ubuntu:latest" \
- /bin/bash -c "./install-required-packages.sh --dont-wait --non-interactive netdata && apt install wget && ./netdata-installer.sh --dont-wait --require-cloud --disable-telemetry --install-prefix /tmp --one-time-build && echo \"Validating netdata instance is running\" && wget -O - 'http://127.0.0.1:19999/api/v1/info' | grep version"
-popd || exit 1
-
-echo "All Done!"
diff --git a/.github/scripts/upload-new-version-tags.sh b/.github/scripts/upload-new-version-tags.sh
index a9b0cd303..ffdfadfa0 100755
--- a/.github/scripts/upload-new-version-tags.sh
+++ b/.github/scripts/upload-new-version-tags.sh
@@ -2,9 +2,10 @@
set -e
-host="packages.netdata.cloud"
user="netdatabot"
+host="${1}"
+
prefix="/var/www/html/releases"
staging="${TMPDIR:-/tmp}/staging-new-releases"
diff --git a/.github/workflows/add-to-project.yml b/.github/workflows/add-to-project.yml
index 986d836a1..7716cab07 100644
--- a/.github/workflows/add-to-project.yml
+++ b/.github/workflows/add-to-project.yml
@@ -13,13 +13,13 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Add issues to Agent project board
- uses: actions/add-to-project@v0.5.0
+ uses: actions/add-to-project@v1.0.1
with:
project-url: https://github.com/orgs/netdata/projects/32
github-token: ${{ secrets.NETDATABOT_ORG_GITHUB_TOKEN }}
- name: Add issues to Product Bug project board
- uses: actions/add-to-project@v0.5.0
+ uses: actions/add-to-project@v1.0.1
with:
project-url: https://github.com/orgs/netdata/projects/45
github-token: ${{ secrets.NETDATABOT_ORG_GITHUB_TOKEN }}
diff --git a/.github/workflows/build-macos.yml b/.github/workflows/build-macos.yml
new file mode 100644
index 000000000..d3bfa6d77
--- /dev/null
+++ b/.github/workflows/build-macos.yml
@@ -0,0 +1,143 @@
+---
+# CI code for build and test on macOS
+name: macOS Build and test
+on:
+ push: # Master branch checks only validate the build and generate artifacts for testing.
+ branches:
+ - master
+ pull_request: null # PR checks only validate the build and generate artifacts for testing.
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+
+jobs:
+ file-check: # Check what files changed if we’re being run in a PR or on a push.
+ name: Check Modified Files
+ runs-on: ubuntu-latest
+ outputs:
+ run: ${{ steps.check-run.outputs.run }}
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+ submodules: recursive
+ - name: Check files
+ id: check-files
+ uses: tj-actions/changed-files@v44
+ with:
+ since_last_remote_commit: ${{ github.event_name != 'pull_request' }}
+ files: |
+ **/*.c
+ **/*.cc
+ **/*.h
+ **/*.hh
+ **/*.in
+ **/*.patch
+ **/*.cmake
+ CMakeLists.txt
+ netdata-installer.sh
+ .github/workflows/build-macos.yml
+ .github/scripts/run-updater-check.sh
+ packaging/cmake/
+ packaging/installer/
+ packaging/*.sh
+ packaging/*.version
+ packaging/*.checksums
+ src/aclk/aclk-schemas/
+ src/ml/dlib/
+ src/fluent-bit/
+ src/web/server/h2o/libh2o/
+ files_ignore: |
+ netdata.spec.in
+ **/*.md
+ packaging/repoconfig/
+ - name: List all changed files in pattern
+ continue-on-error: true
+ env:
+ ALL_CHANGED_FILES: ${{ steps.check-files.outputs.all_changed_files }}
+ run: |
+ for file in ${ALL_CHANGED_FILES}; do
+ echo "$file was changed"
+ done
+ - name: Check Run
+ id: check-run
+ run: |
+ if [ "${{ steps.check-files.outputs.any_modified }}" == "true" ] || [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
+ echo 'run=true' >> "${GITHUB_OUTPUT}"
+ else
+ echo 'run=false' >> "${GITHUB_OUTPUT}"
+ fi
+
+ build-test:
+ env:
+ DISABLE_TELEMETRY: 1
+ runs-on: ${{ matrix.runner }}
+ needs:
+ - file-check
+ strategy:
+ fail-fast: false
+ max-parallel: 3
+ matrix:
+ include:
+ - name: macos-12
+ runner: macos-12
+ - name: macos-13
+ runner: macos-13
+ - name: macos-14-M1
+ runner: macos-14
+ steps:
+ - name: Skip Check
+ id: skip
+ if: needs.file-check.outputs.run != 'true'
+ run: echo "SKIPPED"
+ - uses: actions/checkout@v4
+ id: checkout
+ if: needs.file-check.outputs.run == 'true'
+ with:
+ submodules: recursive
+ - name: Install latest bash
+ id: install-bash
+ if: needs.file-check.outputs.run == 'true'
+ run: |
+ brew install bash
+ - name: Install netdata dependencies
+ id: install-nd-dep
+ if: needs.file-check.outputs.run == 'true'
+ run: |
+ bash ./packaging/installer/install-required-packages.sh --dont-wait --non-interactive netdata-all
+ - name: Build from source
+ id: build-source
+ if: needs.file-check.outputs.run == 'true'
+ run: |
+ sudo bash ./netdata-installer.sh --install-no-prefix /usr/local/netdata --dont-wait --dont-start-it --require-cloud --one-time-build
+ - name: Test Agent start up
+ id: test-agent
+ if: needs.file-check.outputs.run == 'true'
+ run: |
+ /usr/local/netdata/usr/sbin/netdata -D > ./netdata.log 2>&1 &
+ ./packaging/runtime-check.sh
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Build & test from source macOS failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: macOS Build and test.
+ Checkout: ${{ steps.checkout.outcome }}
+ Setup runner: ${{ steps.install-bash.outcome }}
+ Install netdata required packages: ${{ steps.install-nd-dep.outcome }}
+ Build from source: ${{ steps.build-source.outcome }}
+ Test Agent runtime: ${{ steps.test-agent.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.event_name != 'pull_request'
+ && github.repository == 'netdata/netdata'
+ }}
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 4a6debc46..cd48a63bc 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -25,6 +25,7 @@ jobs:
runs-on: ubuntu-latest
outputs:
run: ${{ steps.check-run.outputs.run }}
+ skip-go: ${{ steps.check-go.outputs.skip-go }}
steps:
- name: Checkout
id: checkout
@@ -32,45 +33,80 @@ jobs:
with:
fetch-depth: 0
submodules: recursive
- - name: Check files
- id: check-files
- uses: tj-actions/changed-files@v40
+ - name: Check source files
+ id: check-source-files
+ uses: tj-actions/changed-files@v44
with:
since_last_remote_commit: ${{ github.event_name != 'pull_request' }}
files: |
- **.c
- **.cc
- **.h
- **.hh
- **.in
- configure.ac
+ **/*.c
+ **/*.cc
+ **/*.h
+ **/*.hh
+ **/*.in
+ **/*.patch
+ src/aclk/aclk-schemas/
+ src/ml/dlib/
+ src/fluent-bit/
+ src/web/server/h2o/libh2o/
+ files_ignore: |
+ netdata.spec.in
+ **/*.md
+ - name: Check build files
+ id: check-build-files
+ uses: tj-actions/changed-files@v44
+ with:
+ since_last_remote_commit: ${{ github.event_name != 'pull_request' }}
+ files: |
+ **/*.cmake
+ CMakeLists.txt
netdata-installer.sh
- **/Makefile*
- Makefile*
.github/data/distros.yml
.github/workflows/build.yml
.github/scripts/build-static.sh
.github/scripts/get-static-cache-key.sh
.github/scripts/gen-matrix-build.py
.github/scripts/run-updater-check.sh
- build/**
- packaging/makeself/**
- packaging/installer/**
- aclk/aclk-schemas/
- ml/dlib/
- mqtt_websockets
- web/server/h2o/libh2o
+ packaging/cmake/
+ packaging/makeself/
+ packaging/installer/
+ packaging/*.sh
+ packaging/*.version
+ packaging/*.checksums
files_ignore: |
- netdata.spec.in
- **.md
+ **/*.md
+ packaging/repoconfig/
+ - name: List all changed files in pattern
+ continue-on-error: true
+ env:
+ CHANGED_SOURCE_FILES: ${{ steps.check-source-files.outputs.all_changed_files }}
+ CHANGED_BUILD_FILES: ${{ steps.check-build-files.outputs.all_changed_files }}
+ run: |
+ for file in ${CHANGED_SOURCE_FILES} ${CHANGED_BUILD_FILES} ; do
+ echo "$file was changed"
+ done
- name: Check Run
id: check-run
run: |
- if [ "${{ steps.check-files.outputs.any_modified }}" == "true" ] || [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
+ if [ "${{ steps.check-source-files.outputs.any_modified }}" == "true" ] || [ "${{ steps.check-build-files.outputs.any_modified }}" == "true" ] || [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
echo 'run=true' >> "${GITHUB_OUTPUT}"
else
echo 'run=false' >> "${GITHUB_OUTPUT}"
fi
+ - name: Check Go
+ id: check-go
+ env:
+ OTHER_CHANGED_FILES: ${{ steps.check-source-files.outputs.other_changed_files }}
+ run: |
+ if [ '${{ github.event_name }}' == 'pull_request' ]; then
+ if echo "${OTHER_CHANGED_FILES}" | grep -q '.*/(.*\.go|go\.mod|go\.sum)$' || [ "${{ steps.check-build-files.outputs.any_modified }}" == "true" ]; then
+ echo 'skip-go=' >> "${GITHUB_OUTPUT}"
+ else
+ echo 'skip-go=--disable-go' >> "${GITHUB_OUTPUT}"
+ fi
+ else
+ echo 'skip-go=' >> "${GITHUB_OUTPUT}"
+ fi
build-dist: # Build the distribution tarball and store it as an artifact.
name: Build Distribution Tarball
@@ -105,24 +141,17 @@ jobs:
id: build
if: needs.file-check.outputs.run == 'true'
run: |
- git describe
- mkdir -p artifacts
- ./packaging/installer/install-required-packages.sh --dont-wait --non-interactive netdata
- autoreconf -ivf
- ./configure --prefix=/usr \
- --sysconfdir=/etc \
- --localstatedir=/var \
- --libexecdir=/usr/libexec \
- --with-zlib \
- --with-math \
- --with-user=netdata
- make dist
+ mkdir -p artifacts/
+ tar --create --file "artifacts/netdata-$(git describe).tar.gz" \
+ --sort=name --posix --auto-compress --exclude=artifacts/ --exclude=.git \
+ --exclude=.gitignore --exclude=.gitattributes --exclude=.gitmodules \
+ --transform "s/^\\.\\//netdata-$(git describe)\\//" --verbose .
+ cd artifacts/
echo "distfile=$(find . -name 'netdata-*.tar.gz')" >> "${GITHUB_OUTPUT}"
- cp netdata-*.tar.gz artifacts/
- name: Store
id: store
if: needs.file-check.outputs.run == 'true'
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
name: dist-tarball
path: artifacts/*.tar.gz
@@ -161,6 +190,7 @@ jobs:
matrix:
arch:
- x86_64
+ - armv6l
- armv7l
- aarch64
- ppc64le
@@ -193,27 +223,31 @@ jobs:
- name: Cache
if: (github.event_name != 'pull_request' || ! contains(github.event.pull_request.labels.*.name, 'run-ci/no-cache')) && needs.file-check.outputs.run == 'true'
id: cache
- uses: actions/cache@v3
+ uses: actions/cache@v4
with:
path: artifacts/cache
key: ${{ steps.cache-key.outputs.key }}
- name: Build
if: github.event_name != 'workflow_dispatch' && needs.file-check.outputs.run == 'true' # Don’t use retries on PRs.
- run: .github/scripts/build-static.sh ${{ matrix.arch }}
+ run: |
+ export EXTRA_INSTALL_FLAGS=${{ needs.file-check.outputs.skip-go }}
+ .github/scripts/build-static.sh ${{ matrix.arch }}
- name: Build
if: github.event_name == 'workflow_dispatch' && needs.file-check.outputs.run == 'true'
id: build
- uses: nick-fields/retry@v2
+ uses: nick-fields/retry@v3
with:
timeout_minutes: 180
max_attempts: 3
- command: .github/scripts/build-static.sh ${{ matrix.arch }}
+ command: |
+ export EXTRA_INSTALL_FLAGS=${{ needs.file-check.outputs.skip-go }}
+ .github/scripts/build-static.sh ${{ matrix.arch }}
- name: Store
id: store
if: needs.file-check.outputs.run == 'true'
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
- name: static-archive
+ name: dist-static-${{ matrix.arch }}
path: artifacts/*.gz.run
retention-days: 30
- name: Failure Notification
@@ -254,7 +288,8 @@ jobs:
- name: Prepare tools
id: prepare
run: |
- sudo apt-get update && sudo apt-get install -y python3-ruamel.yaml
+ sudo apt-get update || true
+ sudo apt-get install -y python3-ruamel.yaml
- name: Read build matrix
id: set-matrix
run: |
@@ -354,7 +389,7 @@ jobs:
tags: test:${{ matrix.artifact_key }}
- name: Upload image artifact
id: upload
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
name: ${{ matrix.artifact_key }}-test-env
path: /tmp/image.tar
@@ -410,37 +445,36 @@ jobs:
- name: Fetch test environment
id: fetch
if: needs.file-check.outputs.run == 'true'
- uses: actions/download-artifact@v3
+ uses: Wandalen/wretry.action@v3
with:
- name: ${{ matrix.artifact_key }}-test-env
+ action: actions/download-artifact@v4
+ with: |
+ name: ${{ matrix.artifact_key }}-test-env
+ path: .
+ attempt_limit: 3
+ attempt_delay: 2000
- name: Load test environment
id: load
if: needs.file-check.outputs.run == 'true'
run: docker load --input image.tar
- - name: Regular build on ${{ matrix.distro }}
- id: build-basic
- if: needs.file-check.outputs.run == 'true'
- run: |
- docker run --security-opt seccomp=unconfined -w /netdata test:${{ matrix.artifact_key }} \
- /bin/sh -c 'autoreconf -ivf && ./configure --disable-dependency-tracking && make -j2'
- name: netdata-installer on ${{ matrix.distro }}, disable cloud
id: build-no-cloud
if: needs.file-check.outputs.run == 'true'
run: |
docker run --security-opt seccomp=unconfined -w /netdata test:${{ matrix.artifact_key }} \
- /bin/sh -c './netdata-installer.sh --dont-wait --dont-start-it --disable-cloud --one-time-build'
+ /bin/sh -c './netdata-installer.sh --dont-wait --dont-start-it --disable-cloud --one-time-build ${{ needs.file-check.outputs.skip-go }}'
- name: netdata-installer on ${{ matrix.distro }}, require cloud
id: build-cloud
if: needs.file-check.outputs.run == 'true'
run: |
docker run --security-opt seccomp=unconfined -w /netdata test:${{ matrix.artifact_key }} \
- /bin/sh -c './netdata-installer.sh --dont-wait --dont-start-it --require-cloud --one-time-build'
+ /bin/sh -c './netdata-installer.sh --dont-wait --dont-start-it --require-cloud --one-time-build ${{ needs.file-check.outputs.skip-go }}'
- name: netdata-installer on ${{ matrix.distro }}, require cloud, no JSON-C
id: build-no-jsonc
if: matrix.jsonc_removal != '' && needs.file-check.outputs.run == 'true'
run: |
docker run --security-opt seccomp=unconfined -w /netdata test:${{ matrix.artifact_key }} \
- /bin/sh -c '/rmjsonc.sh && ./netdata-installer.sh --dont-wait --dont-start-it --require-cloud --one-time-build'
+ /bin/sh -c '/rmjsonc.sh && ./netdata-installer.sh --dont-wait --dont-start-it --require-cloud --one-time-build ${{ needs.file-check.outputs.skip-go }}'
- name: Failure Notification
uses: rtCamp/action-slack-notify@v2
env:
@@ -454,7 +488,6 @@ jobs:
Checkout: ${{ steps.checkout.outcome }}
Fetch test environment: ${{ steps.fetch.outcome }}
Load test environment: ${{ steps.load.outcome }}
- Regular build: ${{ steps.build-basic.outcome }}
netdata-installer, disable cloud: ${{ steps.build-no-cloud.outcome }}
netdata-installer, require cloud: ${{ steps.build-cloud.outcome }}
netdata-installer, no JSON-C: ${{ steps.build-no-jsonc.outcome }}
@@ -500,27 +533,40 @@ jobs:
- name: Fetch dist tarball artifacts
id: fetch-tarball
if: needs.file-check.outputs.run == 'true'
- uses: actions/download-artifact@v3
+ uses: Wandalen/wretry.action@v3
with:
- name: dist-tarball
- path: dist-tarball
+ action: actions/download-artifact@v4
+ with: |
+ name: dist-tarball
+ path: dist-tarball
+ attempt_limit: 3
+ attempt_delay: 2000
- name: Prepare artifact directory
id: prepare
if: needs.file-check.outputs.run == 'true'
run: |
- mkdir -p artifacts/download/latest || exit 1
- echo "9999.0.0-0" > artifacts/download/latest/latest-version.txt || exit 1
- cp dist-tarball/* artifacts/download/latest || exit 1
- cd artifacts/download/latest || exit 1
+ mkdir -p artifacts/download/v9999.0.0 || exit 1
+ mkdir -p artifacts/latest || exit 1
+ echo "v9999.0.0" > artifacts/latest/latest-version.txt || exit 1
+ cp dist-tarball/* artifacts/download/v9999.0.0 || exit 1
+ cd artifacts/download/v9999.0.0 || exit 1
ln -s ${{ needs.build-dist.outputs.distfile }} netdata-latest.tar.gz || exit 1
+ ls -lFh
sha256sum -b ./* > "sha256sums.txt" || exit 1
cat sha256sums.txt
+ cd ../.. || exit 1
+ ls -lR
- name: Fetch test environment
id: fetch-test-environment
if: needs.file-check.outputs.run == 'true'
- uses: actions/download-artifact@v3
+ uses: Wandalen/wretry.action@v3
with:
- name: ${{ matrix.artifact_key }}-test-env
+ action: actions/download-artifact@v4
+ with: |
+ name: ${{ matrix.artifact_key }}-test-env
+ path: .
+ attempt_limit: 3
+ attempt_delay: 2000
- name: Load test environment
id: load
if: needs.file-check.outputs.run == 'true'
@@ -529,8 +575,9 @@ jobs:
id: updater-check
if: needs.file-check.outputs.run == 'true'
run: |
- docker run --security-opt seccomp=unconfined -e DISABLE_TELEMETRY=1 --network host -w /netdata test:${{ matrix.artifact_key }} \
- /netdata/.github/scripts/run-updater-check.sh
+ docker run --security-opt seccomp=unconfined -e DISABLE_TELEMETRY=1 --network host -w /netdata \
+ -e EXTRA_INSTALL_FLAGS=${{ needs.file-check.outputs.skip-go }} \
+ test:${{ matrix.artifact_key }} /netdata/.github/scripts/run-updater-check.sh
- name: Failure Notification
uses: rtCamp/action-slack-notify@v2
env:
@@ -577,27 +624,24 @@ jobs:
id: prepare
if: needs.file-check.outputs.run == 'true'
run: mkdir -p artifacts
- - name: Retrieve Dist Tarball
+ - name: Retrieve Build Artifacts
id: fetch-dist
if: needs.file-check.outputs.run == 'true'
- uses: actions/download-artifact@v3
+ uses: Wandalen/wretry.action@v3
with:
- name: dist-tarball
- path: dist-tarball
- - name: Retrieve Static Build Artifacts
- id: fetch-static
- if: needs.file-check.outputs.run == 'true'
- uses: actions/download-artifact@v3
- with:
- name: static-archive
- path: static-archive
+ action: actions/download-artifact@v4
+ with: |
+ pattern: dist-*
+ path: dist-artifacts
+ merge-multiple: true
+ attempt_limit: 3
+ attempt_delay: 2000
- name: Prepare Artifacts
id: consolidate
if: needs.file-check.outputs.run == 'true'
working-directory: ./artifacts/
run: |
- mv ../dist-tarball/* . || exit 1
- mv ../static-archive/* . || exit 1
+ mv ../dist-artifacts/* . || exit 1
ln -s ${{ needs.build-dist.outputs.distfile }} netdata-latest.tar.gz || exit 1
cp ../packaging/version ./latest-version.txt || exit 1
cp ../integrations/integrations.js ./integrations.js || exit 1
@@ -606,7 +650,7 @@ jobs:
- name: Store Artifacts
id: store
if: needs.file-check.outputs.run == 'true'
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
name: final-artifacts
path: artifacts/*
@@ -621,10 +665,9 @@ jobs:
SLACK_USERNAME: 'GitHub Actions'
SLACK_MESSAGE: |-
${{ github.repository }}: Failed to prepare release artifacts for upload.
- CHeckout: ${{ steps.checkout.outcome }}
+ Checkout: ${{ steps.checkout.outcome }}
Prepare environment: ${{ steps.prepare.outcome }}
Fetch dist tarball: ${{ steps.fetch-dist.outcome }}
- Fetch static builds: ${{ steps.fetch-static.outcome }}
Consolidate artifacts: ${{ steps.consolidate.outcome }}
Store: ${{ steps.store.outcome }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
@@ -662,16 +705,21 @@ jobs:
- name: Fetch artifacts
id: fetch
if: needs.file-check.outputs.run == 'true'
- uses: actions/download-artifact@v3
+ uses: Wandalen/wretry.action@v3
with:
- name: final-artifacts
- path: artifacts
+ action: actions/download-artifact@v4
+ with: |
+ name: final-artifacts
+ path: artifacts
+ attempt_limit: 3
+ attempt_delay: 2000
- name: Prepare artifacts directory
id: prepare
if: needs.file-check.outputs.run == 'true'
run: |
mkdir -p download/latest
mv artifacts/* download/latest
+ ls -al download/latest
- name: Verify that artifacts work with installer
id: verify
if: needs.file-check.outputs.run == 'true'
@@ -726,16 +774,21 @@ jobs:
- name: Fetch artifacts
id: fetch-artifacts
if: needs.file-check.outputs.run == 'true'
- uses: actions/download-artifact@v3
+ uses: Wandalen/wretry.action@v3
with:
- name: final-artifacts
- path: artifacts
+ action: actions/download-artifact@v4
+ with: |
+ name: final-artifacts
+ path: artifacts
+ attempt_limit: 3
+ attempt_delay: 2000
- name: Prepare artifacts directory
id: prepare
if: needs.file-check.outputs.run == 'true'
run: |
mkdir -p download/latest
mv artifacts/* download/latest
+ ls -al download/latest
- name: Verify that artifacts work with installer
id: verify
if: needs.file-check.outputs.run == 'true'
@@ -775,26 +828,30 @@ jobs:
steps:
- name: Retrieve Artifacts
id: fetch
- uses: actions/download-artifact@v3
+ uses: Wandalen/wretry.action@v3
with:
- name: final-artifacts
- path: final-artifacts
+ action: actions/download-artifact@v4
+ with: |
+ name: final-artifacts
+ path: final-artifacts
+ attempt_limit: 3
+ attempt_delay: 2000
- name: Authenticate to GCS
id: gcs-auth
- uses: google-github-actions/auth@v1
+ uses: google-github-actions/auth@v2
with:
project_id: ${{ secrets.GCP_NIGHTLY_STORAGE_PROJECT }}
credentials_json: ${{ secrets.GCS_STORAGE_SERVICE_KEY_JSON }}
- name: Setup GCS
id: gcs-setup
- uses: google-github-actions/setup-gcloud@v1.1.1
+ uses: google-github-actions/setup-gcloud@v2.1.0
- name: Upload Artifacts
id: upload
- uses: google-github-actions/upload-cloud-storage@v1.0.3
+ uses: google-github-actions/upload-cloud-storage@v2.1.0
with:
destination: ${{ secrets.GCP_NIGHTLY_STORAGE_BUCKET }}
gzip: false
- path: ./final-artifacts
+ path: ./final-artifacts/latest-version.txt
parent: false
- name: Failure Notification
uses: rtCamp/action-slack-notify@v2
@@ -840,10 +897,14 @@ jobs:
token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
- name: Retrieve Artifacts
id: fetch
- uses: actions/download-artifact@v3
+ uses: Wandalen/wretry.action@v3
with:
- name: final-artifacts
- path: final-artifacts
+ action: actions/download-artifact@v4
+ with: |
+ name: final-artifacts
+ path: final-artifacts
+ attempt_limit: 3
+ attempt_delay: 2000
- name: Prepare version info
id: version
run: |
@@ -871,7 +932,7 @@ jobs:
with:
token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
- name: Init python environment for publish release metadata
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
id: init-python
with:
python-version: "3.12"
@@ -891,11 +952,17 @@ jobs:
key: ${{ secrets.NETDATABOT_PACKAGES_SSH_KEY }}
name: id_ecdsa
known_hosts: ${{ secrets.PACKAGES_KNOWN_HOSTS }}
- - name: Sync newer releases
+ - name: Sync release info to packages.netdata.cloud
id: sync-releases
+ continue-on-error: true
if: github.event_name == 'workflow_dispatch' && github.repository == 'netdata/netdata' && steps.check-latest-version.outputs.versions_needs_update == 'true'
run: |
- .github/scripts/upload-new-version-tags.sh
+ .github/scripts/upload-new-version-tags.sh packages.netdata.cloud
+ - name: Sync release info to packages2.netdata.cloud
+ id: sync-releases2
+ if: github.event_name == 'workflow_dispatch' && github.repository == 'netdata/netdata' && steps.check-latest-version.outputs.versions_needs_update == 'true'
+ run: |
+ .github/scripts/upload-new-version-tags.sh packages.netdata.cloud
- name: Failure Notification
uses: rtCamp/action-slack-notify@v2
env:
@@ -916,7 +983,8 @@ jobs:
Setup python environment: ${{ steps.setup-python.outcome }}
Check the nearly published release against the advertised: ${{ steps.check-latest-version.outcome }}
Setup ssh: ${{ steps.ssh-setup.outcome }}
- Sync with the releases: ${{ steps.sync-releases.outcome }}
+ Sync release info to packages.netdata.cloud: ${{ steps.sync-releases.outcome }}
+ Sync release info to packages2.netdata.cloud: ${{ steps.sync-releases2.outcome }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
if: >-
${{
@@ -954,10 +1022,14 @@ jobs:
uses: actions/checkout@v4
- name: Retrieve Artifacts
id: fetch
- uses: actions/download-artifact@v3
+ uses: Wandalen/wretry.action@v3
with:
- name: final-artifacts
- path: final-artifacts
+ action: actions/download-artifact@v4
+ with: |
+ name: final-artifacts
+ path: final-artifacts
+ attempt_limit: 3
+ attempt_delay: 2000
- name: Create Release
id: create-release
uses: ncipollo/release-action@v1
diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml
index 1308f45fa..2ddcd822b 100644
--- a/.github/workflows/checks.yml
+++ b/.github/workflows/checks.yml
@@ -16,6 +16,7 @@ jobs:
runs-on: ubuntu-latest
outputs:
run: ${{ steps.check-run.outputs.run }}
+ skip-go: ${{ steps.check-go.outputs.skip-go }}
steps:
- name: Checkout
id: checkout
@@ -23,38 +24,73 @@ jobs:
with:
fetch-depth: 0
submodules: recursive
- - name: Check files
- id: check-files
- uses: tj-actions/changed-files@v40
+ - name: Check source files
+ id: check-source-files
+ uses: tj-actions/changed-files@v44
with:
since_last_remote_commit: ${{ github.event_name != 'pull_request' }}
files: |
- **.c
- **.cc
- **.h
- **.hh
- **.in
- configure.ac
- **/Makefile*
- Makefile*
- .gitignore
- .github/workflows/checks.yml
- build/**
- aclk/aclk-schemas/
- ml/dlib/
- mqtt_websockets
- web/server/h2o/libh2o
+ **/*.c
+ **/*.cc
+ **/*.h
+ **/*.hh
+ **/*.in
+ **/*.patch
+ src/aclk/aclk-schemas/
+ src/ml/dlib/
+ src/fluent-bit/
+ src/web/server/h2o/libh2o/
files_ignore: |
netdata.spec.in
- **.md
+ **/*.md
+ - name: Check build files
+ id: check-build-files
+ uses: tj-actions/changed-files@v44
+ with:
+ since_last_remote_commit: ${{ github.event_name != 'pull_request' }}
+ files: |
+ **/*.cmake
+ CMakeLists.txt
+ .gitignore
+ .github/data/distros.yml
+ .github/workflows/build.yml
+ packaging/cmake/
+ packaging/*.version
+ packaging/*.checksums
+ files_ignore: |
+ **/*.md
+ packaging/repoconfig/
+ - name: List all changed files in pattern
+ continue-on-error: true
+ env:
+ CHANGED_SOURCE_FILES: ${{ steps.check-source-files.outputs.all_changed_files }}
+ CHANGED_BUILD_FILES: ${{ steps.check-build-files.outputs.all_changed_files }}
+ run: |
+ for file in ${CHANGED_SOURCE_FILES} ${CHANGED_BUILD_FILES} ; do
+ echo "$file was changed"
+ done
- name: Check Run
id: check-run
run: |
- if [ "${{ steps.check-files.outputs.any_modified }}" == "true" ] || [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
+ if [ "${{ steps.check-source-files.outputs.any_modified }}" == "true" ] || [ "${{ steps.check-build-files.outputs.any_modified }}" == "true" ] || [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
echo 'run=true' >> "${GITHUB_OUTPUT}"
else
echo 'run=false' >> "${GITHUB_OUTPUT}"
fi
+ - name: Check Go
+ id: check-go
+ env:
+ OTHER_CHANGED_FILES: ${{ steps.check-source-files.outputs.other_changed_files }}
+ run: |
+ if [ '${{ github.event_name }}' == 'pull_request' ]; then
+ if echo "${OTHER_CHANGED_FILES}" | grep -q '.*/(.*\.go|go\.mod|go\.sum)$' || [ "${{ steps.check-build-files.outputs.any_modified }}" == "true" ]; then
+ echo 'skip-go=' >> "${GITHUB_OUTPUT}"
+ else
+ echo 'skip-go=--disable-go' >> "${GITHUB_OUTPUT}"
+ fi
+ else
+ echo 'skip-go=' >> "${GITHUB_OUTPUT}"
+ fi
libressl-checks:
name: LibreSSL
@@ -78,10 +114,8 @@ jobs:
'apk add bash;
./packaging/installer/install-required-packages.sh --dont-wait --non-interactive netdata;
apk del openssl openssl-dev;
- apk add libressl libressl-dev;
- autoreconf -ivf;
- ./configure --disable-dependency-tracking;
- make;'
+ apk add libressl libressl-dev protobuf-dev;
+ ./netdata-installer.sh --disable-telemetry --dont-start-it --dont-wait --one-time-build --disable-go;'
clang-checks:
name: Clang
@@ -122,7 +156,7 @@ jobs:
run: ./packaging/installer/install-required-packages.sh --dont-wait --non-interactive netdata
- name: Build netdata
if: needs.file-check.outputs.run == 'true'
- run: ./netdata-installer.sh --dont-start-it --disable-telemetry --dont-wait --install-prefix /tmp/install --one-time-build
+ run: ./netdata-installer.sh --dont-start-it --disable-telemetry --dont-wait --install-prefix /tmp/install --one-time-build ${{ needs.file-check.outputs.skip-go }}
- name: Check that repo is clean
if: needs.file-check.outputs.run == 'true'
run: |
diff --git a/.github/workflows/cloud_regression.yml b/.github/workflows/cloud_regression.yml
index 01fcdca4d..67a55ed2a 100644
--- a/.github/workflows/cloud_regression.yml
+++ b/.github/workflows/cloud_regression.yml
@@ -22,21 +22,24 @@ jobs:
PR_COMMIT_HASH: ${{ github.event.pull_request.head.sha }}
id: output-workflow-dispatch-params
run: |
- if [ ${{ github.event_name }} == 'pull_request_target' ]; then
+ if [ "${{ github.event_name }}" == 'pull_request_target' ]; then
NETDATA_CUSTOM_REPO="$PR_REPO_NAME"
NETDATA_CUSTOM_BRANCH="$PR_BRANCH_NAME"
NETDATA_CUSTOM_PR_NUMBER="${{ github.event.number }}"
NETDATA_CUSTOM_COMMIT_HASH="$PR_COMMIT_HASH"
- elif [ ${{ github.event_name }} == 'push' ]; then
+ elif [ "${{ github.event_name }}" == 'push' ]; then
NETDATA_CUSTOM_REPO="netdata/netdata"
NETDATA_CUSTOM_BRANCH="master"
NETDATA_CUSTOM_PR_NUMBER=""
NETDATA_CUSTOM_COMMIT_HASH="${{ github.sha }}"
fi
- echo "netdata_repo=${NETDATA_CUSTOM_REPO}" >> $GITHUB_OUTPUT
- echo "netdata_branch=${NETDATA_CUSTOM_BRANCH}" >> $GITHUB_OUTPUT
- echo "netdata_pr_number=${NETDATA_CUSTOM_PR_NUMBER}" >> $GITHUB_OUTPUT
- echo "netdata_commit_hash=${NETDATA_CUSTOM_COMMIT_HASH}" >> $GITHUB_OUTPUT
+
+ {
+ echo "netdata_repo=${NETDATA_CUSTOM_REPO}"
+ echo "netdata_branch=${NETDATA_CUSTOM_BRANCH}"
+ echo "netdata_pr_number=${NETDATA_CUSTOM_PR_NUMBER}"
+ echo "netdata_commit_hash=${NETDATA_CUSTOM_COMMIT_HASH}"
+ } >> "$GITHUB_OUTPUT"
- name: Trigger Full Cloud Regression
uses: aurelien-baudet/workflow-dispatch@v2
diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml
index ae5818afc..6c2c36365 100644
--- a/.github/workflows/codeql.yml
+++ b/.github/workflows/codeql.yml
@@ -21,6 +21,7 @@ jobs:
outputs:
cpp: ${{ steps.cpp.outputs.run }}
python: ${{ steps.python.outputs.run }}
+ go: ${{ steps.go.outputs.run }}
steps:
- name: Clone repository
uses: actions/checkout@v4
@@ -57,7 +58,7 @@ jobs:
id: python
run: |
if [ "${{ steps.always.outputs.run }}" = "false" ]; then
- if git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq 'collectors/python.d.plugin/.*\.py' ; then
+ if git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq 'src/collectors/python.d.plugin/.*\.py' ; then
echo "run=true" >> "${GITHUB_OUTPUT}"
echo '::notice::Python code has changed, need to run CodeQL.'
else
@@ -66,6 +67,19 @@ jobs:
else
echo "run=true" >> "${GITHUB_OUTPUT}"
fi
+ - name: Check for Go changes
+ id: go
+ run: |
+ if [ "${{ steps.always.outputs.run }}" = "false" ]; then
+ if git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq 'src/go/*\.go' ; then
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ echo '::notice::Go code has changed, need to run CodeQL.'
+ else
+ echo "run=false" >> "${GITHUB_OUTPUT}"
+ fi
+ else
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ fi
analyze-cpp:
name: Analyze C/C++
@@ -81,7 +95,7 @@ jobs:
submodules: recursive
fetch-depth: 0
- name: Initialize CodeQL
- uses: github/codeql-action/init@v2
+ uses: github/codeql-action/init@v3
with:
languages: cpp
config-file: ./.github/codeql/c-cpp-config.yml
@@ -90,7 +104,7 @@ jobs:
- name: Build netdata
run: ./netdata-installer.sh --dont-start-it --disable-telemetry --dont-wait --install-prefix /tmp/install --one-time-build
- name: Run CodeQL
- uses: github/codeql-action/analyze@v2
+ uses: github/codeql-action/analyze@v3
with:
category: "/language:cpp"
@@ -108,11 +122,41 @@ jobs:
submodules: recursive
fetch-depth: 0
- name: Initialize CodeQL
- uses: github/codeql-action/init@v2
+ uses: github/codeql-action/init@v3
with:
config-file: ./.github/codeql/python-config.yml
languages: python
- name: Run CodeQL
- uses: github/codeql-action/analyze@v2
+ uses: github/codeql-action/analyze@v3
with:
category: "/language:python"
+
+ analyze-go:
+ name: Analyze Go
+ runs-on: ubuntu-latest
+ needs: prepare
+ if: needs.prepare.outputs.go == 'true'
+ strategy:
+ matrix:
+ tree:
+ - src/go/collectors/go.d.plugin
+ permissions:
+ security-events: write
+ steps:
+ - name: Git clone repository
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+ fetch-depth: 0
+ - name: Initialize CodeQL
+ uses: github/codeql-action/init@v3
+ with:
+ languages: go
+ - name: Autobuild
+ uses: github/codeql-action/autobuild@v3
+ with:
+ working-directory: ${{ matrix.tree }}
+ - name: Run CodeQL
+ uses: github/codeql-action/analyze@v3
+ with:
+ category: "/language:go"
diff --git a/.github/workflows/coverity.yml b/.github/workflows/coverity.yml
index eb68c302b..9113e179e 100644
--- a/.github/workflows/coverity.yml
+++ b/.github/workflows/coverity.yml
@@ -7,7 +7,7 @@ on:
pull_request:
paths:
- .github/workflows/coverity.yml
- - coverity-scan.sh
+ - packaging/utils/coverity-scan.sh
env:
DISABLE_TELEMETRY: 1
concurrency:
@@ -33,7 +33,8 @@ jobs:
sudo apt-get install -y libjson-c-dev libyaml-dev libipmimonitoring-dev \
libcups2-dev libsnappy-dev libprotobuf-dev \
libprotoc-dev libssl-dev protobuf-compiler \
- libnetfilter-acct-dev
+ libnetfilter-acct-dev libmongoc-dev libxen-dev \
+ libsystemd-dev ninja-build
- name: Run coverity-scan
id: run
env:
@@ -41,7 +42,7 @@ jobs:
COVERITY_SCAN_TOKEN: ${{ secrets.COVERITY_SCAN_TOKEN }}
COVERITY_SCAN_SUBMIT_MAIL: ${{ secrets.COVERITY_SCAN_SUBMIT_MAIL }}
run: |
- bash -x ./coverity-scan.sh --with-install
+ bash -x ./packaging/utils/coverity-scan.sh --with-install
- name: Failure Notification
uses: rtCamp/action-slack-notify@v2
env:
diff --git a/.github/workflows/dashboard-pr.yml b/.github/workflows/dashboard-pr.yml
index f02cfb69d..418a8b8e6 100644
--- a/.github/workflows/dashboard-pr.yml
+++ b/.github/workflows/dashboard-pr.yml
@@ -25,10 +25,10 @@ jobs:
- name: Update Files
id: update
run: |
- web/gui/bundle_dashboard_v1.py ${{ github.event.inputs.dashboard_version }}
+ src/web/gui/bundle_dashboard_v1.py ${{ github.event.inputs.dashboard_version }}
- name: Create Pull Request
id: pr
- uses: peter-evans/create-pull-request@v5
+ uses: peter-evans/create-pull-request@v6
with:
title: 'Update dashboard to version ${{ github.event.inputs.dashboard_version }}.'
body: 'See https://github.com/netdata/dashboard/releases/tag/${{ github.event.inputs.dashboard_version }} for changes.'
diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml
index b7fe0a866..0d22aee98 100644
--- a/.github/workflows/docker.yml
+++ b/.github/workflows/docker.yml
@@ -1,4 +1,13 @@
---
+# Handle building docker images both for CI checks and for eleases.
+#
+# The case of releaases is unfortunately rather complicated, as Docker
+# tooling does not have great support for handling of multiarch images
+# published to multiple registries. As a result, we have to build the
+# images, export the cache, and then _rebuild_ the images using the exported
+# cache but with different output parameters for buildx. We also need to
+# do the second build step as a separate job for each registry so that a
+# failure to publish one place won’t break publishing elsewhere.
name: Docker
on:
push:
@@ -22,55 +31,106 @@ jobs:
runs-on: ubuntu-latest
outputs:
run: ${{ steps.check-run.outputs.run }}
+ skip-go: ${{ steps.check-go.outputs.skip-go }}
steps:
- name: Checkout
id: checkout
+ if: github.event_name != 'workflow_dispatch'
uses: actions/checkout@v4
with:
fetch-depth: 0
submodules: recursive
- - name: Check files
- id: file-check
- uses: tj-actions/changed-files@v40
+ - name: Check source files
+ id: check-source-files
+ if: github.event_name != 'workflow_dispatch'
+ uses: tj-actions/changed-files@v44
+ with:
+ since_last_remote_commit: ${{ github.event_name != 'pull_request' }}
+ files: |
+ **/*.c
+ **/*.cc
+ **/*.h
+ **/*.hh
+ **/*.in
+ **/*.patch
+ src/aclk/aclk-schemas/
+ src/ml/dlib/
+ src/fluent-bit/
+ src/web/server/h2o/libh2o/
+ files_ignore: |
+ netdata.spec.in
+ **/*.md
+ - name: Check build system files
+ id: check-build-files
+ if: github.event_name != 'workflow_dispatch'
+ uses: tj-actions/changed-files@v44
with:
since_last_remote_commit: ${{ github.event_name != 'pull_request' }}
files: |
- **.c
- **.cc
- **.h
- **.hh
- **.in
.dockerignore
- configure.ac
+ CMakeLists.txt
netdata-installer.sh
- **/Makefile*
- Makefile*
.github/workflows/docker.yml
.github/scripts/docker-test.sh
- build/**
- packaging/docker/**
- packaging/installer/**
- aclk/aclk-schemas/
- ml/dlib/
- mqtt_websockets
- web/server/h2o/libh2o
+ .github/scripts/gen-docker-tags.py
+ .github/scripts/gen-docker-imagetool-args.py
+ packaging/cmake/
+ packaging/docker/
+ packaging/installer/
+ packaging/runtime-check.sh
+ packaging/*.version
+ packaging/*.checksums
files_ignore: |
- netdata.spec.in
- **.md
+ **/*.md
+ packaging/repoconfig/
+ - name: List all changed files in pattern
+ continue-on-error: true
+ if: github.event_name != 'workflow_dispatch'
+ env:
+ CHANGED_SOURCE_FILES: ${{ steps.check-source-files.outputs.all_changed_files }}
+ CHANGED_BUILD_FILES: ${{ steps.check-build-files.outputs.all_changed_files }}
+ run: |
+ for file in ${CHANGED_SOURCE_FILES} ${CHANGED_BUILD_FILES} ; do
+ echo "$file was changed"
+ done
- name: Check Run
id: check-run
run: |
- if [ "${{ steps.file-check.outputs.any_modified }}" == "true" ] || [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
+ if [ "${{ steps.check-source-files.outputs.any_modified }}" == "true" ] || [ "${{ steps.check-build-files.outputs.any_modified }}" == "true" ] || [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
echo 'run=true' >> "${GITHUB_OUTPUT}"
else
echo 'run=false' >> "${GITHUB_OUTPUT}"
fi
+ - name: Check Go
+ id: check-go
+ env:
+ OTHER_CHANGED_FILES: ${{ steps.check-source-files.outputs.other_changed_files }}
+ run: |
+ if [ '${{ github.event_name }}' == 'pull_request' ]; then
+ if echo "${OTHER_CHANGED_FILES}" | grep -q '.*/(.*\.go|go\.mod|go\.sum)$' || [ "${{ steps.check-build-files.outputs.any_modified }}" == "true" ]; then
+ echo 'skip-go=' >> "${GITHUB_OUTPUT}"
+ else
+ echo 'skip-go=--disable-go' >> "${GITHUB_OUTPUT}"
+ fi
+ else
+ echo 'skip-go=' >> "${GITHUB_OUTPUT}"
+ fi
- docker-test:
- name: Docker Runtime Test
+ build-images:
+ name: Build Docker Images
needs:
- file-check
runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ platform:
+ - linux/amd64
+ - linux/i386
+ - linux/arm/v7
+ - linux/arm64
+ - linux/ppc64le
+ # Fail fast on releases, but run everything to completion on other triggers.
+ fail-fast: ${{ github.event_name == 'workflow_dispatch' }}
steps:
- name: Skip Check
id: skip
@@ -81,328 +141,606 @@ jobs:
if: needs.file-check.outputs.run == 'true'
uses: actions/checkout@v4
with:
+ fetch-depth: 0
submodules: recursive
+ - name: Generate Artifact Name
+ id: artifact-name
+ if: github.repository == 'netdata/netdata' && needs.file-check.outputs.run == 'true' && github.event_name == 'workflow_dispatch'
+ run: echo "platform=$(echo ${{ matrix.platform }} | tr '/' '-' | cut -f 2- -d '-')" >> "${GITHUB_OUTPUT}"
+ - name: Mark image as official
+ id: env
+ if: github.repository == 'netdata/netdata' && needs.file-check.outputs.run == 'true' && github.event_name == 'workflow_dispatch'
+ run: echo "OFFICIAL_IMAGE=true" >> "${GITHUB_ENV}"
+ - name: Setup QEMU
+ id: qemu
+ if: matrix.platform != 'linux/i386' && matrix.platform != 'linux/amd64' && needs.file-check.outputs.run == 'true'
+ uses: docker/setup-qemu-action@v3
- name: Setup Buildx
id: prepare
if: needs.file-check.outputs.run == 'true'
uses: docker/setup-buildx-action@v3
- - name: Test Build
+ - name: Build Image
id: build
if: needs.file-check.outputs.run == 'true'
uses: docker/build-push-action@v5
with:
- load: true
- push: false
+ platforms: ${{ matrix.platform }}
tags: netdata/netdata:test
+ load: true
+ cache-to: type=local,dest=/tmp/build-cache,mode=max
+ build-args: |
+ OFFICIAL_IMAGE=${{ env.OFFICIAL_IMAGE }}
+ EXTRA_INSTALL_OPTS=${{ needs.file-check.outputs.skip-go }}
- name: Test Image
id: test
- if: needs.file-check.outputs.run == 'true'
+ if: needs.file-check.outputs.run == 'true' && matrix.platform == 'linux/amd64'
run: .github/scripts/docker-test.sh
+ - name: Upload Cache
+ id: upload-cache
+ if: github.repository == 'netdata/netdata' && needs.file-check.outputs.run == 'true' && github.event_name == 'workflow_dispatch'
+ uses: actions/upload-artifact@v4
+ with:
+ name: cache-${{ steps.artifact-name.outputs.platform }}
+ path: /tmp/build-cache/*
+ retention-days: 1
- name: Failure Notification
uses: rtCamp/action-slack-notify@v2
env:
SLACK_COLOR: 'danger'
SLACK_FOOTER: ''
SLACK_ICON_EMOJI: ':github-actions:'
- SLACK_TITLE: 'Docker runtime testing failed:'
+ SLACK_TITLE: 'Docker build failed:'
SLACK_USERNAME: 'GitHub Actions'
SLACK_MESSAGE: |-
- ${{ github.repository }}: Building or testing Docker image for linux/amd64 failed.
- CHeckout: ${{ steps.checkout.outcome }}
+ ${{ github.repository }}: Building or testing Docker image for ${{ matrix.platform }} failed.
+ Checkout: ${{ steps.checkout.outcome }}
+ Determine artifact name: ${{ steps.artifact-name.outcome }}
+ Setup environment: ${{ steps.env.outcome }}
+ Setup QEMU: ${{ steps.qemu.outcome }}
Setup buildx: ${{ steps.prepare.outcome }}
Build image: ${{ steps.build.outcome }}
Test image: ${{ steps.test.outcome }}
+ Upload build cache: ${{ steps.upload-cache.outcome }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
if: >-
${{
failure()
&& github.event_name != 'pull_request'
- && startsWith(github.ref, 'refs/heads/master')
&& github.repository == 'netdata/netdata'
&& needs.file-check.outputs.run == 'true'
}}
- docker-ci:
- if: github.event_name != 'workflow_dispatch'
- name: Docker Alt Arch Builds
- needs:
- - docker-test
- - file-check
+ gen-tags:
+ name: Generate Docker Tags
runs-on: ubuntu-latest
+ if: github.event_name == 'workflow_dispatch'
+ outputs:
+ tags: ${{ steps.tag.outputs.tags }}
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ - name: Generate Tags
+ id: tag
+ run: |
+ if [ ${{ github.event_name }} = 'workflow_dispatch' ]; then
+ echo "tags=$(.github/scripts/gen-docker-tags.py ${{ github.event_name }} ${{ github.event.inputs.version }})" >> "${GITHUB_OUTPUT}"
+ else
+ echo "tags=$(.github/scripts/gen-docker-tags.py ${{ github.event_name }} '')" >> "${GITHUB_OUTPUT}"
+ fi
+
+ build-images-docker-hub:
+ name: Push Images to Docker Hub
+ if: github.event_name == 'workflow_dispatch'
+ needs:
+ - build-images
+ - gen-tags
strategy:
matrix:
- platforms:
+ platform:
+ - linux/amd64
- linux/i386
- linux/arm/v7
- linux/arm64
- linux/ppc64le
+ runs-on: ubuntu-latest
steps:
- - name: Skip Check
- id: skip
- if: needs.file-check.outputs.run != 'true'
- run: echo "SKIPPED"
- name: Checkout
id: checkout
- if: needs.file-check.outputs.run == 'true'
uses: actions/checkout@v4
with:
+ fetch-depth: 0
submodules: recursive
+ - name: Generate Artifact Name
+ id: artifact-name
+ run: echo "platform=$(echo ${{ matrix.platform }} | tr '/' '-' | cut -f 2- -d '-')" >> "${GITHUB_OUTPUT}"
+ - name: Download Cache
+ id: fetch-cache
+ uses: actions/download-artifact@v4
+ with:
+ name: cache-${{ steps.artifact-name.outputs.platform }}
+ path: /tmp/build-cache
+ - name: Mark image as official
+ id: env
+ if: github.repository == 'netdata/netdata'
+ run: echo "OFFICIAL_IMAGE=true" >> "${GITHUB_ENV}"
- name: Setup QEMU
id: qemu
- if: matrix.platforms != 'linux/i386' && needs.file-check.outputs.run == 'true'
+ if: matrix.platform != 'linux/i386' && matrix.platform != 'linux/amd64'
uses: docker/setup-qemu-action@v3
- name: Setup Buildx
- id: buildx
- if: needs.file-check.outputs.run == 'true'
+ id: prepare
uses: docker/setup-buildx-action@v3
- - name: Build
+ - name: Registry Login
+ id: login
+ if: github.repository == 'netdata/netdata'
+ uses: docker/login-action@v3
+ with:
+ username: ${{ secrets.DOCKER_HUB_USERNAME }}
+ password: ${{ secrets.DOCKER_HUB_PASSWORD }}
+ - name: Build Image
id: build
- if: needs.file-check.outputs.run == 'true'
uses: docker/build-push-action@v5
with:
- platforms: ${{ matrix.platforms }}
- load: false
- push: false
- tags: netdata/netdata:test
+ platforms: ${{ matrix.platform }}
+ cache-from: type=local,src=/tmp/build-cache
+ outputs: type=image,name=netdata/netdata,push-by-digest=true,name-canonical=true,push=true
+ build-args: OFFICIAL_IMAGE=${{ env.OFFICIAL_IMAGE }}
+ - name: Export Digest
+ id: export-digest
+ if: github.repository == 'netdata/netdata'
+ run: |
+ mkdir -p /tmp/digests
+ digest="${{ steps.build.outputs.digest }}"
+ touch "/tmp/digests/${digest#sha256:}"
+ - name: Upload digest
+ id: upload-digest
+ if: github.repository == 'netdata/netdata'
+ uses: actions/upload-artifact@v4
+ with:
+ name: docker-digests-${{ steps.artifact-name.outputs.platform }}
+ path: /tmp/digests/*
+ if-no-files-found: error
+ retention-days: 1
- name: Failure Notification
uses: rtCamp/action-slack-notify@v2
env:
SLACK_COLOR: 'danger'
SLACK_FOOTER: ''
SLACK_ICON_EMOJI: ':github-actions:'
- SLACK_TITLE: 'Docker build testing failed:'
+ SLACK_TITLE: 'Docker Hub upload failed:'
SLACK_USERNAME: 'GitHub Actions'
SLACK_MESSAGE: |-
- ${{ github.repository }}: Building Docker image for ${{ matrix.platforms }} failed.
- CHeckout: ${{ steps.checkout.outcome }}
+ ${{ github.repository }}: Creating or uploading Docker image for ${{ matrix.platform }} on Docker Hub failed.
+ Checkout: ${{ steps.checkout.outcome }}
+ Determine artifact name: ${{ steps.artifact-name.outcome }}
+ Fetch build cache: ${{ steps.fetch-cache.outcome }}
+ Setup environment: ${{ steps.env.outcome }}
Setup QEMU: ${{ steps.qemu.outcome }}
- Setup buildx: ${{ steps.buildx.outcome }}
+ Setup buildx: ${{ steps.prepare.outcome }}
+ Login to registry: ${{ steps.login.outcome }}
Build image: ${{ steps.build.outcome }}
+ Export digest: ${{ steps.export-digest.outcome }}
+ Upload digest: ${{ steps.upload-digest.outcome }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
if: >-
${{
failure()
- && github.event_name != 'pull_request'
- && startsWith(github.ref, 'refs/heads/master')
&& github.repository == 'netdata/netdata'
- && needs.file-check.outputs.run == 'true'
}}
- normalize-tag: # Fix the release tag if needed
- name: Normalize Release Tag
- runs-on: ubuntu-latest
+ publish-docker-hub:
+ name: Consolidate and tag images for DockerHub
if: github.event_name == 'workflow_dispatch'
- outputs:
- tag: ${{ steps.tag.outputs.tag }}
+ needs:
+ - build-images-docker-hub
+ - gen-tags
+ runs-on: ubuntu-latest
steps:
- - name: Normalize Tag
- id: tag
- run: |
- if echo ${{ github.event.inputs.version }} | grep -qE '^[[:digit:]]+\.[[:digit:]]+\.[[:digit:]]+$'; then
- echo "tag=v${{ github.event.inputs.version }}" >> "${GITHUB_OUTPUT}"
- else
- echo "tag=${{ github.event.inputs.version }}" >> "${GITHUB_OUTPUT}"
- fi
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ - name: Download digests
+ id: fetch-digests
+ uses: actions/download-artifact@v4
+ with:
+ path: /tmp/digests
+ pattern: docker-digests-*
+ merge-multiple: true
+ - name: Setup Buildx
+ id: prepare
+ uses: docker/setup-buildx-action@v3
+ - name: Registry Login
+ id: login
+ if: github.repository == 'netdata/netdata'
+ uses: docker/login-action@v3
+ with:
+ username: ${{ secrets.DOCKER_HUB_USERNAME }}
+ password: ${{ secrets.DOCKER_HUB_PASSWORD }}
+ - name: Create and Push Manifest
+ id: manifest
+ if: github.repository == 'netdata/netdata'
+ run: docker buildx imagetools create $(.github/scripts/gen-docker-imagetool-args.py /tmp/digests '' "${{ needs.gen-tags.outputs.tags }}")
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Publishing Docker images to Docker Hub failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Publishing Docker images to Docker Hub failed.
+ Checkout: ${{ steps.checkout.outcome }}
+ Download digests: ${{ steps.fetch-digests.outcome }}
+ Setup buildx: ${{ steps.prepare.outcome }}
+ Login to registry: ${{ steps.login.outcome }}
+ Create and push manifest: ${{ steps.manifest.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.repository == 'netdata/netdata'
+ }}
- docker-publish:
+ build-images-quay:
+ name: Push Images to Quay.io
if: github.event_name == 'workflow_dispatch'
- name: Docker Build and Publish
needs:
- - docker-test
- - normalize-tag
+ - build-images
+ - gen-tags
+ strategy:
+ matrix:
+ platform:
+ - linux/amd64
+ - linux/i386
+ - linux/arm/v7
+ - linux/arm64
+ - linux/ppc64le
runs-on: ubuntu-latest
steps:
- name: Checkout
id: checkout
uses: actions/checkout@v4
with:
+ fetch-depth: 0
submodules: recursive
- - name: Determine which tags to use
- id: release-tags
- if: github.event.inputs.version != 'nightly'
- run: |
- echo "tags=netdata/netdata:latest,netdata/netdata:stable,ghcr.io/netdata/netdata:latest,ghcr.io/netdata/netdata:stable,quay.io/netdata/netdata:latest,quay.io/netdata/netdata:stable,$(.github/scripts/gen-docker-tags.py ${{ needs.normalize-tag.outputs.tag }} '')" \
- >> "${GITHUB_ENV}"
- - name: Determine which tags to use
- id: nightly-tags
- if: github.event.inputs.version == 'nightly'
- run: |
- echo "tags=netdata/netdata:latest,netdata/netdata:edge,ghcr.io/netdata/netdata:latest,ghcr.io/netdata/netdata:edge,quay.io/netdata/netdata:latest,quay.io/netdata/netdata:edge" >> "${GITHUB_ENV}"
+ - name: Generate Artifact Name
+ id: artifact-name
+ run: echo "platform=$(echo ${{ matrix.platform }} | tr '/' '-' | cut -f 2- -d '-')" >> "${GITHUB_OUTPUT}"
+ - name: Download Cache
+ id: fetch-cache
+ uses: actions/download-artifact@v4
+ with:
+ name: cache-${{ steps.artifact-name.outputs.platform }}
+ path: /tmp/build-cache
- name: Mark image as official
id: env
if: github.repository == 'netdata/netdata'
run: echo "OFFICIAL_IMAGE=true" >> "${GITHUB_ENV}"
- name: Setup QEMU
id: qemu
+ if: matrix.platform != 'linux/i386' && matrix.platform != 'linux/amd64'
uses: docker/setup-qemu-action@v3
- name: Setup Buildx
- id: buildx
+ id: prepare
uses: docker/setup-buildx-action@v3
- - name: Docker Hub Login
- id: docker-hub-login
- if: github.repository == 'netdata/netdata'
- uses: docker/login-action@v3
- with:
- username: ${{ secrets.DOCKER_HUB_USERNAME }}
- password: ${{ secrets.DOCKER_HUB_PASSWORD }}
- - name: GitHub Container Registry Login
- id: ghcr-login
- if: github.repository == 'netdata/netdata'
- uses: docker/login-action@v3
- with:
- registry: ghcr.io
- username: ${{ github.repository_owner }}
- password: ${{ secrets.GITHUB_TOKEN }}
- - name: Quay.io Login
- id: quay-login
+ - name: Registry Login
+ id: login
if: github.repository == 'netdata/netdata'
uses: docker/login-action@v3
with:
registry: quay.io
username: ${{ secrets.NETDATABOT_QUAY_USERNAME }}
password: ${{ secrets.NETDATABOT_QUAY_TOKEN }}
- - name: Docker Build
+ - name: Build Image
id: build
uses: docker/build-push-action@v5
with:
- platforms: linux/amd64,linux/i386,linux/arm/v7,linux/arm64,linux/ppc64le
- push: ${{ github.repository == 'netdata/netdata' }}
- tags: ${{ env.tags }}
+ platforms: ${{ matrix.platform }}
+ cache-from: type=local,src=/tmp/build-cache
build-args: OFFICIAL_IMAGE=${{ env.OFFICIAL_IMAGE }}
+ outputs: type=image,name=quay.io/netdata/netdata,push-by-digest=true,name-canonical=true,push=true
+ - name: Export Digest
+ id: export-digest
+ if: github.repository == 'netdata/netdata'
+ run: |
+ mkdir -p /tmp/digests
+ digest="${{ steps.build.outputs.digest }}"
+ touch "/tmp/digests/${digest#sha256:}"
+ - name: Upload digest
+ id: upload-digest
+ if: github.repository == 'netdata/netdata'
+ uses: actions/upload-artifact@v4
+ with:
+ name: quay-digests-${{ steps.artifact-name.outputs.platform }}
+ path: /tmp/digests/*
+ if-no-files-found: error
+ retention-days: 1
- name: Failure Notification
uses: rtCamp/action-slack-notify@v2
env:
SLACK_COLOR: 'danger'
SLACK_FOOTER: ''
SLACK_ICON_EMOJI: ':github-actions:'
- SLACK_TITLE: 'Docker Build failed:'
+ SLACK_TITLE: 'Quay.io upload failed:'
SLACK_USERNAME: 'GitHub Actions'
SLACK_MESSAGE: |-
- ${{ github.repository }}: Failed to build or publish Docker images.
- CHeckout: ${{ steps.checkout.outcome }}
- Generate release tags: ${{ steps.release-tags.outcome }}
- Generate nightly tags: ${{ steps.nightly-tags.outcome }}
+ ${{ github.repository }}: Creating or uploading Docker image for ${{ matrix.platform }} on Quay.io failed.
+ Checkout: ${{ steps.checkout.outcome }}
+ Determine artifact name: ${{ steps.artifact-name.outcome }}
+ Fetch build cache: ${{ steps.fetch-cache.outcome }}
Setup environment: ${{ steps.env.outcome }}
Setup QEMU: ${{ steps.qemu.outcome }}
- Setup buildx: ${{ steps.buildx.outcome }}
- Login to DockerHub: ${{ steps.docker-hub-login.outcome }}
- Login to GHCR: ${{ steps.ghcr-login.outcome }}
- Login to Quay: ${{ steps.quay-login.outcome }}
- Build and publish images: ${{ steps.build.outcome }}
+ Setup buildx: ${{ steps.prepare.outcome }}
+ Login to registry: ${{ steps.login.outcome }}
+ Build image: ${{ steps.build.outcome }}
+ Export digest: ${{ steps.export-digest.outcome }}
+ Upload digest: ${{ steps.upload-digest.outcome }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
if: >-
${{
failure()
- && github.event_name != 'pull_request'
- && startsWith(github.ref, 'refs/heads/master')
&& github.repository == 'netdata/netdata'
}}
- - name: Trigger Helmchart PR
- if: github.event_name == 'workflow_dispatch' && github.event.inputs.version != 'nightly' && github.repository == 'netdata/netdata'
- uses: benc-uk/workflow-dispatch@v1
+
+ publish-quay:
+ name: Consolidate and tag images for Quay.io
+ if: github.event_name == 'workflow_dispatch'
+ needs:
+ - build-images-quay
+ - gen-tags
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ - name: Download digests
+ id: fetch-digests
+ uses: actions/download-artifact@v4
with:
- token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
- repo: netdata/helmchart
- workflow: Agent Version PR
- ref: refs/heads/master
- inputs: '{"agent_version": "${{ needs.normalize-tag.outputs.tag }}"}'
- - name: Trigger MSI build
- if: github.event_name == 'workflow_dispatch' && github.event.inputs.version != 'nightly' && github.repository == 'netdata/netdata'
- uses: benc-uk/workflow-dispatch@v1
+ path: /tmp/digests
+ pattern: quay-digests-*
+ merge-multiple: true
+ - name: Setup Buildx
+ id: prepare
+ uses: docker/setup-buildx-action@v3
+ - name: Registry Login
+ id: login
+ if: github.repository == 'netdata/netdata'
+ uses: docker/login-action@v3
with:
- token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
- repo: netdata/msi-installer
- workflow: Build
- ref: refs/heads/master
- inputs: '{"tag": "${{ needs.normalize-tag.outputs.tag }}", "pwd": "${{ secrets.MSI_CODE_SIGNING_PASSWORD }}"}'
+ registry: quay.io
+ username: ${{ secrets.NETDATABOT_QUAY_USERNAME }}
+ password: ${{ secrets.NETDATABOT_QUAY_TOKEN }}
+ - name: Create and Push Manifest
+ id: manifest
+ if: github.repository == 'netdata/netdata'
+ run: docker buildx imagetools create $(.github/scripts/gen-docker-imagetool-args.py /tmp/digests 'quay.io' "${{ needs.gen-tags.outputs.tags }}")
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Publishing Docker images on Quay.io failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Publishing Docker images on Quay.io failed.
+ Checkout: ${{ steps.checkout.outcome }}
+ Download digests: ${{ steps.fetch-digests.outcome }}
+ Setup buildx: ${{ steps.prepare.outcome }}
+ Login to registry: ${{ steps.login.outcome }}
+ Create and push manifest: ${{ steps.manifest.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.repository == 'netdata/netdata'
+ }}
- docker-dbg-publish:
+ build-images-ghcr:
+ name: Push Images to GHCR
if: github.event_name == 'workflow_dispatch'
- name: Docker Build and Publish (Debugging Image)
needs:
- - docker-test
- - normalize-tag
+ - build-images
+ - gen-tags
+ strategy:
+ matrix:
+ platform:
+ - linux/amd64
+ - linux/i386
+ - linux/arm/v7
+ - linux/arm64
+ - linux/ppc64le
runs-on: ubuntu-latest
steps:
- name: Checkout
id: checkout
uses: actions/checkout@v4
with:
+ fetch-depth: 0
submodules: recursive
- - name: Determine which tags to use
- id: release-tags
- if: github.event.inputs.version != 'nightly'
- run: |
- echo "tags=netdata/netdata-debug:latest,netdata/netdata-debug:stable,ghcr.io/netdata/netdata-debug:latest,ghcr.io/netdata/netdata-debug:stable,quay.io/netdata/netdata-debug:latest,quay.io/netdata/netdata-debug:stable,$(.github/scripts/gen-docker-tags.py ${{ needs.normalize-tag.outputs.tag }} '-debug')" \
- >> "${GITHUB_ENV}"
- - name: Determine which tags to use
- id: nightly-tags
- if: github.event.inputs.version == 'nightly'
- run: |
- echo "tags=netdata/netdata-debug:latest,netdata/netdata-debug:edge,ghcr.io/netdata/netdata-debug:latest,ghcr.io/netdata/netdata-debug:edge,quay.io/netdata/netdata-debug:latest,quay.io/netdata/netdata-debug:edge" >> "${GITHUB_ENV}"
+ - name: Generate Artifact Name
+ id: artifact-name
+ run: echo "platform=$(echo ${{ matrix.platform }} | tr '/' '-' | cut -f 2- -d '-')" >> "${GITHUB_OUTPUT}"
+ - name: Download Cache
+ id: fetch-cache
+ uses: actions/download-artifact@v4
+ with:
+ name: cache-${{ steps.artifact-name.outputs.platform }}
+ path: /tmp/build-cache
- name: Mark image as official
id: env
if: github.repository == 'netdata/netdata'
run: echo "OFFICIAL_IMAGE=true" >> "${GITHUB_ENV}"
- name: Setup QEMU
id: qemu
+ if: matrix.platform != 'linux/i386' && matrix.platform != 'linux/amd64'
uses: docker/setup-qemu-action@v3
- name: Setup Buildx
- id: buildx
+ id: prepare
uses: docker/setup-buildx-action@v3
- - name: Docker Hub Login
- id: docker-hub-login
+ - name: Registry Login
+ id: login
if: github.repository == 'netdata/netdata'
uses: docker/login-action@v3
with:
- username: ${{ secrets.DOCKER_HUB_USERNAME }}
- password: ${{ secrets.DOCKER_HUB_PASSWORD }}
- - name: GitHub Container Registry Login
- id: ghcr-login
+ registry: ghcr.io
+ username: ${{ github.repository_owner }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+ - name: Build Image
+ id: build
+ uses: docker/build-push-action@v5
+ with:
+ platforms: ${{ matrix.platform }}
+ cache-from: type=local,src=/tmp/build-cache
+ build-args: OFFICIAL_IMAGE=${{ env.OFFICIAL_IMAGE }}
+ outputs: type=image,name=ghcr.io/netdata/netdata,push-by-digest=true,name-canonical=true,push=true
+ - name: Export Digest
+ id: export-digest
+ if: github.repository == 'netdata/netdata'
+ run: |
+ mkdir -p /tmp/digests
+ digest="${{ steps.build.outputs.digest }}"
+ touch "/tmp/digests/${digest#sha256:}"
+ - name: Upload digest
+ id: upload-digest
+ if: github.repository == 'netdata/netdata'
+ uses: actions/upload-artifact@v4
+ with:
+ name: ghcr-digests-${{ steps.artifact-name.outputs.platform }}
+ path: /tmp/digests/*
+ if-no-files-found: error
+ retention-days: 1
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'GHCR upload failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Creating or uploading Docker image for ${{ matrix.platform }} on GHCR failed.
+ Checkout: ${{ steps.checkout.outcome }}
+ Determine artifact name: ${{ steps.artifact-name.outcome }}
+ Fetch build cache: ${{ steps.fetch-cache.outcome }}
+ Setup environment: ${{ steps.env.outcome }}
+ Setup QEMU: ${{ steps.qemu.outcome }}
+ Setup buildx: ${{ steps.prepare.outcome }}
+ Login to registry: ${{ steps.login.outcome }}
+ Build image: ${{ steps.build.outcome }}
+ Export digest: ${{ steps.export-digest.outcome }}
+ Upload digest: ${{ steps.upload-digest.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.repository == 'netdata/netdata'
+ }}
+
+ publish-ghcr:
+ name: Consolidate and tag images for GHCR
+ if: github.event_name == 'workflow_dispatch'
+ needs:
+ - build-images-ghcr
+ - gen-tags
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ - name: Download digests
+ id: fetch-digests
+ uses: actions/download-artifact@v4
+ with:
+ path: /tmp/digests
+ pattern: ghcr-digests-*
+ merge-multiple: true
+ - name: Setup Buildx
+ id: prepare
+ uses: docker/setup-buildx-action@v3
+ - name: Registry Login
+ id: login
if: github.repository == 'netdata/netdata'
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- - name: Quay.io Login
- id: quay-login
+ - name: Create and Push Manifest
+ id: manifest
if: github.repository == 'netdata/netdata'
- uses: docker/login-action@v3
+ run: docker buildx imagetools create $(.github/scripts/gen-docker-imagetool-args.py /tmp/digests 'ghcr.io' "${{ needs.gen-tags.outputs.tags }}")
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Publishing Docker images on GHCR failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Publishing Docker images on GHCR failed.
+ Checkout: ${{ steps.checkout.outcome }}
+ Download digests: ${{ steps.fetch-digests.outcome }}
+ Setup buildx: ${{ steps.prepare.outcome }}
+ Login to registry: ${{ steps.login.outcome }}
+ Create and push manifest: ${{ steps.manifest.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.repository == 'netdata/netdata'
+ }}
+
+ trigger-subsequent-workflows:
+ if: github.event_name == 'workflow_dispatch'
+ name: Trigger subsquent workflows for newly added versions
+ needs:
+ - publish-docker-hub
+ - gen-tags
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
with:
- registry: quay.io
- username: ${{ secrets.NETDATABOT_QUAY_USERNAME }}
- password: ${{ secrets.NETDATABOT_QUAY_TOKEN }}
- - name: Docker Build
- id: build
- uses: docker/build-push-action@v5
+ submodules: recursive
+ - name: Trigger Helmchart PR
+ if: github.event_name == 'workflow_dispatch' && github.event.inputs.version != 'nightly' && github.repository == 'netdata/netdata'
+ id: trigger-helmchart
+ uses: benc-uk/workflow-dispatch@v1
with:
- platforms: linux/amd64,linux/i386,linux/arm/v7,linux/arm64,linux/ppc64le
- push: ${{ github.repository == 'netdata/netdata' }}
- tags: ${{ env.tags }}
- build-args: |
- OFFICIAL_IMAGE=${{ env.OFFICIAL_IMAGE }}
- DEBUG_BUILD=1
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ repo: netdata/helmchart
+ workflow: Agent Version PR
+ ref: refs/heads/master
+ inputs: '{"agent_version": "v${{ inputs.version }}"}'
+ - name: Trigger MSI build
+ if: github.event_name == 'workflow_dispatch' && github.event.inputs.version != 'nightly' && github.repository == 'netdata/netdata'
+ id: trigger-msi
+ uses: benc-uk/workflow-dispatch@v1
+ with:
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ repo: netdata/msi-installer
+ workflow: Build
+ ref: refs/heads/master
+ inputs: '{"tag": "stable", "pwd": "${{ secrets.MSI_CODE_SIGNING_PASSWORD }}"}'
- name: Failure Notification
uses: rtCamp/action-slack-notify@v2
env:
SLACK_COLOR: 'danger'
SLACK_FOOTER: ''
SLACK_ICON_EMOJI: ':github-actions:'
- SLACK_TITLE: 'Docker Debug Build failed:'
+ SLACK_TITLE: ':'
SLACK_USERNAME: 'GitHub Actions'
SLACK_MESSAGE: |-
- ${{ github.repository }}: Failed to build or publish Docker debug images.
+ ${{ github.repository }}: Version cascade failed
Checkout: ${{ steps.checkout.outcome }}
- Generate release tags: ${{ steps.release-tags.outcome }}
- Generate nightly tags: ${{ steps.nightly-tags.outcome }}
- Setup environment: ${{ steps.env.outcome }}
- Setup QEMU: ${{ steps.qemu.outcome }}
- Setup buildx: ${{ steps.buildx.outcome }}
- Login to DockerHub: ${{ steps.docker-hub-login.outcome }}
- Login to GHCR: ${{ steps.ghcr-login.outcome }}
- Login to Quay: ${{ steps.quay-login.outcome }}
- Build and publish images: ${{ steps.build.outcome }}
+ Trigger Helmchart PR: ${{ steps.trigger-helmchart.outcome }}
+ Trigger MSI build: ${{ steps.trigger-msi.outcome }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
if: >-
${{
diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
deleted file mode 100644
index a0554b167..000000000
--- a/.github/workflows/docs.yml
+++ /dev/null
@@ -1,29 +0,0 @@
----
-name: Docs
-on:
- push:
- branches:
- - master
- paths:
- - '**.md'
- pull_request:
- paths:
- - '**.md'
-env:
- DISABLE_TELEMETRY: 1
-jobs:
- markdown-link-check:
- name: Broken Links
- runs-on: ubuntu-latest
- steps:
- - name: Checkout
- uses: actions/checkout@v4
- with:
- submodules: recursive
- - name: Run link check
- uses: gaurav-nelson/github-action-markdown-link-check@v1
- with:
- use-quiet-mode: 'no'
- use-verbose-mode: 'yes'
- check-modified-files-only: 'yes'
- config-file: '.mlc_config.json'
diff --git a/.github/workflows/generate-integrations.yml b/.github/workflows/generate-integrations.yml
index 4128e9925..8287f9295 100644
--- a/.github/workflows/generate-integrations.yml
+++ b/.github/workflows/generate-integrations.yml
@@ -6,15 +6,15 @@ on:
branches:
- master
paths: # If any of these files change, we need to regenerate integrations.js.
- - 'collectors/**/metadata.yaml'
- - 'exporting/**/metadata.yaml'
- - 'health/notifications/**/metadata.yaml'
+ - 'src/collectors/**/metadata.yaml'
+ - 'src/go/collectors/**/metadata.yaml'
+ - 'src/exporting/**/metadata.yaml'
+ - 'src/health/notifications/**/metadata.yaml'
- 'integrations/templates/**'
- 'integrations/categories.yaml'
- 'integrations/deploy.yaml'
- 'integrations/cloud-notifications/metadata.yaml'
- 'integrations/gen_integrations.py'
- - 'packaging/go.d.version'
workflow_dispatch: null
concurrency: # This keeps multiple instances of the job from running concurrently for the same ref.
group: integrations-${{ github.ref }}
@@ -31,17 +31,6 @@ jobs:
with:
fetch-depth: 1
submodules: recursive
- - name: Get Go Ref
- id: get-go-ref
- run: echo "go_ref=$(cat packaging/go.d.version)" >> "${GITHUB_ENV}"
- - name: Checkout Go
- id: checkout-go
- uses: actions/checkout@v4
- with:
- fetch-depth: 1
- path: go.d.plugin
- repository: netdata/go.d.plugin
- ref: ${{ env.go_ref }}
- name: Prepare Dependencies
id: prep-deps
run: |
@@ -58,7 +47,7 @@ jobs:
id: generate-integrations-documentation
run: |
python3 integrations/gen_docs_integrations.py
- - name: Generate collectors/COLLECTORS.md
+ - name: Generate src/collectors/COLLECTORS.md
id: generate-collectors-md
run: |
python3 integrations/gen_doc_collector_page.py
@@ -67,7 +56,7 @@ jobs:
run: rm -rf go.d.plugin virtualenv
- name: Create PR
id: create-pr
- uses: peter-evans/create-pull-request@v5
+ uses: peter-evans/create-pull-request@v6
with:
token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
commit-message: Regenerate integrations.js
@@ -90,12 +79,10 @@ jobs:
SLACK_MESSAGE: |-
${{ github.repository }}: Failed to create PR rebuilding integrations.js
Checkout Agent: ${{ steps.checkout-agent.outcome }}
- Get Go Ref: ${{ steps.get-go-ref.outcome }}
- Checkout Go: ${{ steps.checkout-go.outcome }}
- Prepare Dependencies: ${{ steps.prep-deps.outcome }}
+ Prep python env and deps: ${{ steps.prep-deps.outcome }}
Generate Integrations: ${{ steps.generate.outcome }}
Generate Integrations Documentation: ${{ steps.generate-integrations-documentation.outcome }}
- Generate collectors/COLLECTORS.md: ${{ steps.generate-collectors-md.outcome }}
+ Generate src/collectors/COLLECTORS.md: ${{ steps.generate-collectors-md.outcome }}
Clean Up Temporary Data: ${{ steps.clean.outcome }}
Create PR: ${{ steps.create-pr.outcome }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
diff --git a/.github/workflows/go-tests.yml b/.github/workflows/go-tests.yml
new file mode 100644
index 000000000..93596454b
--- /dev/null
+++ b/.github/workflows/go-tests.yml
@@ -0,0 +1,127 @@
+---
+# Ci code for building release artifacts.
+name: Go Tests
+on:
+ push: # Master branch checks only validate the build and generate artifacts for testing.
+ branches:
+ - master
+ pull_request: null # PR checks only validate the build and generate artifacts for testing.
+concurrency: # This keeps multiple instances of the job from running concurrently for the same ref and event type.
+ group: go-test-${{ github.ref }}-${{ github.event_name }}
+ cancel-in-progress: true
+jobs:
+ file-check: # Check what files changed if we’re being run in a PR or on a push.
+ name: Check Modified Files
+ runs-on: ubuntu-latest
+ outputs:
+ run: ${{ steps.check-run.outputs.run }}
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+ submodules: recursive
+ - name: Check files
+ id: check-files
+ uses: tj-actions/changed-files@v44
+ with:
+ since_last_remote_commit: ${{ github.event_name != 'pull_request' }}
+ files: |
+ **/*.cmake
+ CMakeLists.txt
+ .github/workflows/go-tests.yml
+ packaging/cmake/
+ src/go/**
+ files_ignore: |
+ **/*.md
+ src/go/**/metadata.yaml
+ packaging/repoconfig/
+ - name: List all changed files in pattern
+ continue-on-error: true
+ env:
+ ALL_CHANGED_FILES: ${{ steps.check-files.outputs.all_changed_files }}
+ run: |
+ for file in ${ALL_CHANGED_FILES}; do
+ echo "$file was changed"
+ done
+ - name: Check Run
+ id: check-run
+ run: |
+ if [ "${{ steps.check-files.outputs.any_modified }}" == "true" ] || [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
+ echo 'run=true' >> "${GITHUB_OUTPUT}"
+ else
+ echo 'run=false' >> "${GITHUB_OUTPUT}"
+ fi
+
+ matrix:
+ name: Generate Build Matrix
+ runs-on: ubuntu-latest
+ outputs:
+ matrix: ${{ steps.get-version.outputs.matrix }}
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Install dependencies
+ run: |
+ sudo apt-get update || true
+ sudo apt-get install -y python3-packaging
+ - name: Get Go version and modules
+ id: get-version
+ run: .github/scripts/get-go-version.py
+
+ tests:
+ name: Go toolchain tests
+ runs-on: ubuntu-latest
+ needs:
+ - file-check
+ - matrix
+ strategy:
+ fail-fast: false
+ matrix: ${{ fromJson(needs.matrix.outputs.matrix) }}
+ steps:
+ - name: Skip Check
+ id: skip
+ if: needs.file-check.outputs.run != 'true'
+ run: echo "SKIPPED"
+ - name: Install Go
+ uses: actions/setup-go@v5
+ with:
+ go-version: ${{ matrix.version }}
+ - name: Checkout
+ if: needs.file-check.outputs.run == 'true'
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+ - name: Go mod download
+ if: needs.file-check.outputs.run == 'true'
+ run: go mod download
+ working-directory: ${{ matrix.module }}
+ - name: Compile
+ if: needs.file-check.outputs.run == 'true'
+ run: |
+ CGO_ENABLED=0 go build -o /tmp/go-test-build ${{ matrix.build_target }}
+ /tmp/go-test-build --help || true
+ working-directory: ${{ matrix.module }}
+ - name: Go fmt
+ if: needs.file-check.outputs.run == 'true'
+ run: |
+ go fmt ./... | tee modified-files
+ [ "$(wc -l modified-files | cut -f 1 -d ' ')" -eq 0 ] || exit 1
+ working-directory: ${{ matrix.module }}
+ - name: Go vet
+ if: needs.file-check.outputs.run == 'true'
+ run: go vet ./...
+ working-directory: ${{ matrix.module }}
+ - name: Set up gotestfmt
+ if: needs.file-check.outputs.run == 'true'
+ uses: GoTestTools/gotestfmt-action@v2
+ with:
+ token: ${{ secrets.GITHUB_TOKEN }}
+ version: v2.0.0
+ - name: Go test
+ if: needs.file-check.outputs.run == 'true'
+ run: |
+ set -euo pipefail
+ go test -json ./... -race -count=1 2>&1 | gotestfmt -hide all
+ working-directory: ${{ matrix.module }}
diff --git a/.github/workflows/kickstart-upload.yml b/.github/workflows/kickstart-upload.yml
new file mode 100644
index 000000000..46505a4a3
--- /dev/null
+++ b/.github/workflows/kickstart-upload.yml
@@ -0,0 +1,59 @@
+---
+# Upload the kickstart script to the repo server
+name: Upload Kickstart Script
+on:
+ push:
+ branches:
+ - master
+ paths:
+ - .github/workflows/kickstart-upload.yml
+ - packaging/installer/kickstart.sh
+ workflow_dispatch: null
+concurrency:
+ group: kickstart-upload
+ cancel-in-progress: true
+jobs:
+ upload:
+ name: Upload Kickstart Script
+ runs-on: ubuntu-latest
+ if: github.repository == 'netdata/netdata'
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 1
+ - name: SSH setup
+ id: ssh-setup
+ uses: shimataro/ssh-key-action@v2
+ with:
+ key: ${{ secrets.NETDATABOT_PACKAGES_SSH_KEY }}
+ name: id_ecdsa
+ known_hosts: ${{ secrets.PACKAGES_KNOWN_HOSTS }}
+ - name: Upload to packages.netdata.cloud
+ id: upload-primary
+ continue-on-error: true
+ run: rsync -vp packaging/installer/kickstart.sh netdatabot@packages.netdata.cloud:/home/netdatabot/incoming/kickstart.sh
+ - name: Upload to packages2.netdata.cloud
+ id: upload-packages2
+ run: rsync -vp packaging/installer/kickstart.sh netdatabot@packages2.netdata.cloud:/home/netdatabot/incoming/kickstart.sh
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Kickstart upload failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Failed to upload updated kickstart script to repo server.
+ Checkout: ${{ steps.checkout.outcome }}
+ Import SSH Key: ${{ steps.ssh-setup.outcome }}
+ Upload to packages.netdata.cloud: ${{ steps.upload-packages.outcome }}
+ Upload to packages2.netdata.cloud: ${{ steps.upload-packages2.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && startsWith(github.ref, 'refs/heads/master')
+ && github.repository == 'netdata/netdata'
+ }}
diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml
index a1e3b52fe..3f66b98f4 100644
--- a/.github/workflows/labeler.yml
+++ b/.github/workflows/labeler.yml
@@ -14,7 +14,7 @@ jobs:
contents: read
pull-requests: write
steps:
- - uses: actions/labeler@v4
+ - uses: actions/labeler@v5
if: github.repository == 'netdata/netdata'
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"
diff --git a/.github/workflows/monitor-releases.yml b/.github/workflows/monitor-releases.yml
index 649cf68aa..e4269f3c1 100644
--- a/.github/workflows/monitor-releases.yml
+++ b/.github/workflows/monitor-releases.yml
@@ -1,5 +1,5 @@
---
-name: Monitor-releases
+name: Monitor releases
on:
release:
@@ -12,13 +12,13 @@ on:
default: 'stable'
-concurrency: # This keeps multiple instances of the job from running concurrently for the same ref and event type.
+concurrency:
group: monitor-{{ github.event.inputs.channel }}-releases-${{ github.ref }}-${{ github.event_name }}
cancel-in-progress: true
jobs:
- update-stable-agents-metadata:
- name: update-stable-agents-metadata
+ update-agents-metadata:
+ name: update-agents-metadata
runs-on: ubuntu-latest
steps:
- name: Checkout
@@ -26,8 +26,16 @@ jobs:
uses: actions/checkout@v4
with:
token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
- - name: Init python environment
- uses: actions/setup-python@v4
+ - name: Overwrite defaults
+ id: ow-defaults
+ run: |
+ if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
+ echo "RELEASE_CHANNEL=${{ github.event.inputs.channel }}" >> "${GITHUB_ENV}"
+ else
+ echo "RELEASE_CHANNEL=stable" >> "${GITHUB_ENV}"
+ fi
+ - name: Init Python environment
+ uses: actions/setup-python@v5
id: init-python
with:
python-version: "3.12"
@@ -38,7 +46,7 @@ jobs:
- name: Check for newer versions
id: check-newer-releases
run: |
- python .github/scripts/check_latest_versions_per_channel.py "${{ github.event.inputs.channel }}"
+ python .github/scripts/check_latest_versions_per_channel.py "${{ env.RELEASE_CHANNEL }}"
- name: SSH setup
id: ssh-setup
if: github.event_name == 'workflow_dispatch' && github.repository == 'netdata/netdata' && steps.check-newer-releases.outputs.versions_needs_update == 'true'
@@ -47,11 +55,17 @@ jobs:
key: ${{ secrets.NETDATABOT_PACKAGES_SSH_KEY }}
name: id_ecdsa
known_hosts: ${{ secrets.PACKAGES_KNOWN_HOSTS }}
- - name: Sync newer releases
+ - name: Sync newer releases to packages.netdata.cloud
id: sync-releases
+ continue-on-error: true
+ if: github.event_name == 'workflow_dispatch' && github.repository == 'netdata/netdata' && steps.check-newer-releases.outputs.versions_needs_update == 'true'
+ run: |
+ .github/scripts/upload-new-version-tags.sh packages.netdata.cloud
+ - name: Sync newer releases to packages2.netdata.cloud
+ id: sync-releases2
if: github.event_name == 'workflow_dispatch' && github.repository == 'netdata/netdata' && steps.check-newer-releases.outputs.versions_needs_update == 'true'
run: |
- .github/scripts/upload-new-version-tags.sh
+ .github/scripts/upload-new-version-tags.sh packages2.netdata.cloud
- name: Failure Notification
uses: rtCamp/action-slack-notify@v2
env:
@@ -63,10 +77,12 @@ jobs:
SLACK_MESSAGE: |-
${{ github.repository }}: Failed to update stable Agent's metadata.
Checkout: ${{ steps.checkout.outcome }}
+ Overwrite inputs: ${{ steps.ow-defaults.outcome }}
Init python: ${{ steps.init-python.outcome }}
Setup python: ${{ steps.setup-python.outcome }}
Check for newer stable releaes: ${{ steps.check-newer-releases.outcome }}
Setup ssh: ${{ steps.ssh-setup.outcome }}
Syncing newer release to packages.netdata.cloud : ${{ steps.sync-releases.outcome }}
+ Syncing newer release to packages2.netdata.cloud : ${{ steps.sync-releases2.outcome }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
if: failure()
diff --git a/.github/workflows/packaging.yml b/.github/workflows/packaging.yml
index eb936c4d9..ebfba8072 100644
--- a/.github/workflows/packaging.yml
+++ b/.github/workflows/packaging.yml
@@ -40,39 +40,47 @@ jobs:
fetch-depth: 0
submodules: recursive
- name: Check files
- id: file-check
- uses: tj-actions/changed-files@v40
+ id: check-files
+ uses: tj-actions/changed-files@v44
with:
since_last_remote_commit: ${{ github.event_name != 'pull_request' }}
files: |
- **.c
- **.cc
- **.h
- **.hh
- **.in
+ **/*.c
+ **/*.cc
+ **/*.h
+ **/*.hh
+ **/*.in
+ **/*.patch
+ **/*.cmake
netdata.spec.in
- configure.ac
- **/Makefile*
- Makefile*
+ CMakeLists.txt
.github/data/distros.yml
.github/workflows/packaging.yml
.github/scripts/gen-matrix-packaging.py
.github/scripts/pkg-test.sh
- build/**
+ packaging/cmake/
packaging/*.sh
- packaging/*.checksums
packaging/*.version
- contrib/debian/**
- aclk/aclk-schemas/
- ml/dlib/
- mqtt_websockets
- web/server/h2o/libh2o
+ packaging/*.checksums
+ src/aclk/aclk-schemas/
+ src/ml/dlib/
+ src/fluent-bit/
+ src/web/server/h2o/libh2o/
files_ignore: |
- **.md
+ **/*.md
+ packaging/repoconfig/
+ - name: List all changed files in pattern
+ continue-on-error: true
+ env:
+ ALL_CHANGED_FILES: ${{ steps.check-files.outputs.all_changed_files }}
+ run: |
+ for file in ${ALL_CHANGED_FILES}; do
+ echo "$file was changed"
+ done
- name: Check Run
id: check-run
run: |
- if [ "${{ steps.file-check.outputs.any_modified }}" == "true" ] || [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
+ if [ "${{ steps.check-files.outputs.any_modified }}" == "true" ] || [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
echo 'run=true' >> "${GITHUB_OUTPUT}"
else
echo 'run=false' >> "${GITHUB_OUTPUT}"
@@ -90,7 +98,8 @@ jobs:
- name: Prepare tools
id: prepare
run: |
- sudo apt-get update && sudo apt-get install -y python3-ruamel.yaml
+ sudo apt-get update || true
+ sudo apt-get install -y python3-ruamel.yaml
- name: Read build matrix
id: set-matrix
run: |
@@ -207,6 +216,18 @@ jobs:
with:
fetch-depth: 0 # We need full history for versioning
submodules: recursive
+ - name: Set Sentry telemetry env vars
+ id: set-telemetry-env-vars
+ run: |
+ if [ "${{ github.repository }}" = 'netdata/netdata' ] && \
+ [ "${{ matrix.bundle_sentry }}" = 'true' ] && \
+ [ "${{ github.event_name }}" = 'workflow_dispatch' ]; then
+ echo "RELEASE_PIPELINE=Production" >> "${GITHUB_ENV}"
+ echo "UPLOAD_SENTRY=true" >> "${GITHUB_ENV}"
+ else
+ echo "RELEASE_PIPELINE=Unknown" >> "${GITHUB_ENV}"
+ echo "UPLOAD_SENTRY=false" >> "${GITHUB_ENV}"
+ fi
- name: Setup QEMU
id: qemu
if: matrix.platform != 'linux/amd64' && matrix.platform != 'linux/i386' && needs.file-check.outputs.run == 'true'
@@ -221,26 +242,29 @@ jobs:
- name: Fetch images
id: fetch-images
if: needs.file-check.outputs.run == 'true'
- uses: nick-invision/retry@v2
+ uses: nick-invision/retry@v3
with:
max_attempts: 3
retry_wait_seconds: 30
timeout_seconds: 900
command: |
docker pull --platform ${{ matrix.platform }} ${{ matrix.base_image }}
- docker pull --platform ${{ matrix.platform }} netdata/package-builders:${{ matrix.distro }}${{ matrix.version }}-v1
+ docker pull --platform ${{ matrix.platform }} netdata/package-builders:${{ matrix.distro }}${{ matrix.version }}-${{ matrix.builder_rev }}
- name: Build Packages
id: build
if: needs.file-check.outputs.run == 'true'
shell: bash
run: |
docker run --security-opt seccomp=unconfined -e DISABLE_TELEMETRY=1 -e VERSION=${{ needs.version-check.outputs.version }} \
- --platform=${{ matrix.platform }} -v "$PWD":/netdata netdata/package-builders:${{ matrix.distro }}${{ matrix.version }}-v1
+ -e ENABLE_SENTRY=${{ matrix.bundle_sentry }} -e RELEASE_PIPELINE=${{ env.RELEASE_PIPELINE }} \
+ -e BUILD_DESTINATION=${{ matrix.distro }}${{ matrix.version }}_${{ matrix.arch }} -e UPLOAD_SENTRY=${{ env.UPLOAD_SENTRY }} \
+ -e SENTRY_AUTH_TOKEN=${{ secrets.SENTRY_CLI_TOKEN }} -e NETDATA_SENTRY_DSN=${{ secrets.SENTRY_DSN }} \
+ --platform=${{ matrix.platform }} -v "$PWD":/netdata netdata/package-builders:${{ matrix.distro }}${{ matrix.version }}-${{ matrix.builder_rev }}
- name: Save Packages
id: artifacts
if: needs.file-check.outputs.run == 'true'
continue-on-error: true
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
name: ${{ matrix.distro }}-${{ matrix.version }}-${{ matrix.arch }}-packages
path: ${{ github.workspace }}/artifacts/*
@@ -277,9 +301,21 @@ jobs:
known_hosts: ${{ secrets.PACKAGES_KNOWN_HOSTS }}
- name: Upload to packages.netdata.cloud
id: package-upload
+ continue-on-error: true
+ if: github.event_name == 'workflow_dispatch' && github.repository == 'netdata/netdata' && needs.file-check.outputs.run == 'true'
+ run: |
+ .github/scripts/package-upload.sh \
+ packages.netdata.cloud \
+ ${{ matrix.repo_distro }} \
+ ${{ matrix.arch }} \
+ ${{ matrix.format }} \
+ ${{ needs.version-check.outputs.repo }}
+ - name: Upload to packages2.netdata.cloud
+ id: package2-upload
if: github.event_name == 'workflow_dispatch' && github.repository == 'netdata/netdata' && needs.file-check.outputs.run == 'true'
run: |
.github/scripts/package-upload.sh \
+ packages2.netdata.cloud \
${{ matrix.repo_distro }} \
${{ matrix.arch }} \
${{ matrix.format }} \
@@ -302,6 +338,7 @@ jobs:
Publish to PackageCloud: ${{ steps.upload.outcome }}
Import SSH Key: ${{ steps.ssh-setup.outcome }}
Publish to packages.netdata.cloud: ${{ steps.package-upload.outcome }}
+ Publish to packages2.netdata.cloud: ${{ steps.package2-upload.outcome }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
if: >-
${{
diff --git a/.github/workflows/platform-eol-check.yml b/.github/workflows/platform-eol-check.yml
index ae290a973..185640f2f 100644
--- a/.github/workflows/platform-eol-check.yml
+++ b/.github/workflows/platform-eol-check.yml
@@ -26,7 +26,8 @@ jobs:
- name: Prepare tools
id: prepare
run: |
- sudo apt-get update && sudo apt-get install -y python3-ruamel.yaml
+ sudo apt-get update || true
+ sudo apt-get install -y python3-ruamel.yaml
- name: Read build matrix
id: set-matrix
run: |
@@ -118,7 +119,7 @@ jobs:
if: steps.check.outputs.pending == 'true' && steps.existing.outputs.exists == 'false'
uses: imjohnbo/issue-bot@v3
with:
- assignees: Ferroin, tkatsoulas
+ assignees: Ferroin
labels: area/packaging, needs triage
title: ${{ steps.title.outputs.title }}
body: |
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 2fa51cc52..91f2d5493 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -46,7 +46,7 @@ jobs:
- name: Generate Nightly Changleog
id: nightly-changelog
if: steps.target.outputs.run == 'true' && steps.target.outputs.type == 'nightly'
- uses: heinrichreimer/github-changelog-generator-action@v2.3
+ uses: heinrichreimer/github-changelog-generator-action@v2.4
with:
bugLabels: IGNOREBUGS
excludeLabels: "stale,duplicate,question,invalid,wontfix,discussion,no changelog"
@@ -59,7 +59,7 @@ jobs:
- name: Generate Release Changelog
id: release-changelog
if: steps.target.outputs.run == 'true' && steps.target.outputs.type != 'nightly'
- uses: heinrichreimer/github-changelog-generator-action@v2.3
+ uses: heinrichreimer/github-changelog-generator-action@v2.4
with:
bugLabels: IGNOREBUGS
excludeLabels: "stale,duplicate,question,invalid,wontfix,discussion,no changelog"
diff --git a/.github/workflows/repoconfig-packages.yml b/.github/workflows/repoconfig-packages.yml
index df8fac204..8b43a2358 100644
--- a/.github/workflows/repoconfig-packages.yml
+++ b/.github/workflows/repoconfig-packages.yml
@@ -31,7 +31,8 @@ jobs:
- name: Prepare tools
id: prepare
run: |
- sudo apt-get update && sudo apt-get install -y python3-ruamel.yaml
+ sudo apt-get update || true
+ sudo apt-get install -y python3-ruamel.yaml
- name: Read build matrix
id: set-matrix
run: |
@@ -81,7 +82,7 @@ jobs:
# Unlike normally, we do not need a deep clone or submodules for this.
- name: Fetch base image
id: fetch-images
- uses: nick-invision/retry@v2
+ uses: nick-invision/retry@v3
with:
max_attempts: 3
retry_wait_seconds: 30
@@ -102,14 +103,8 @@ jobs:
env:
PKG_CLOUD_TOKEN: ${{ secrets.PACKAGE_CLOUD_API_KEY }}
run: |
- printf "Packages to upload:\n%s" "$(ls artifacts/*.${{ matrix.format }})"
+ printf "Packages to upload:\n%s" "$(ls packaging/repoconfig/artifacts/*.${{ matrix.format }})"
for pkgfile in artifacts/*.${{ matrix.format }} ; do
- .github/scripts/package_cloud_wrapper.sh yank "${REPO_PREFIX}/${{ matrix.pkgclouddistro }}" \
- "$(basename "${pkgfile}")" || true
- .github/scripts/package_cloud_wrapper.sh push "${REPO_PREFIX}/${{ matrix.pkgclouddistro }}" "${pkgfile}"
- .github/scripts/package_cloud_wrapper.sh yank "${REPO_PREFIX}-edge/${{ matrix.pkgclouddistro }}" \
- "$(basename "${pkgfile}")" || true
- .github/scripts/package_cloud_wrapper.sh push "${REPO_PREFIX}-edge/${{ matrix.pkgclouddistro }}" "${pkgfile}"
.github/scripts/package_cloud_wrapper.sh yank "${REPO_PREFIX}-repoconfig/${{ matrix.pkgclouddistro }}" \
"$(basename "${pkgfile}")" || true
.github/scripts/package_cloud_wrapper.sh push "${REPO_PREFIX}-repoconfig/${{ matrix.pkgclouddistro }}" "${pkgfile}"
@@ -124,16 +119,32 @@ jobs:
known_hosts: ${{ secrets.PACKAGES_KNOWN_HOSTS }}
- name: Upload to packages.netdata.cloud
id: package-upload
+ continue-on-error: true
+ if: github.event_name != 'pull_request' && github.repository == 'netdata/netdata'
+ run: |
+ # shellcheck disable=SC2043
+ for arch in ${{ matrix.arches }}; do
+ .github/scripts/package-upload.sh \
+ packages.netdata.cloud \
+ "${{ matrix.pkgclouddistro }}" \
+ "${arch}" \
+ "${{ matrix.format }}" \
+ netdata/netdata-repoconfig \
+ packaging/repoconfig/artifacts
+ done
+ - name: Upload to packages2.netdata.cloud
+ id: package2-upload
if: github.event_name != 'pull_request' && github.repository == 'netdata/netdata'
run: |
+ # shellcheck disable=SC2043
for arch in ${{ matrix.arches }}; do
- for suffix in '' -edge -repoconfig ; do
.github/scripts/package-upload.sh \
- ${{ matrix.pkgclouddistro }} \
- ${arch} \
- ${{ matrix.format }} \
- netdata/netdata${suffix}
- done
+ packages2.netdata.cloud \
+ "${{ matrix.pkgclouddistro }}" \
+ "${arch}" \
+ "${{ matrix.format }}" \
+ netdata/netdata-repoconfig \
+ packaging/repoconfig/artifacts
done
- name: Failure Notification
if: ${{ failure() && github.repository == 'netdata/netdata' }}
@@ -152,4 +163,5 @@ jobs:
Publish to PackageCloud: ${{ steps.publish.outcome }}
Import SSH Key: ${{ steps.ssh-setup.outcome }}
Publish to packages.netdata.cloud: ${{ steps.package-upload.outcome }}
+ Publish to packages2.netdata.cloud: ${{ steps.package2-upload.outcome }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
diff --git a/.github/workflows/review.yml b/.github/workflows/review.yml
index 8cb61f2cf..30b09ec06 100644
--- a/.github/workflows/review.yml
+++ b/.github/workflows/review.yml
@@ -15,9 +15,9 @@ jobs:
runs-on: ubuntu-latest
outputs:
actionlint: ${{ steps.actionlint.outputs.run }}
- clangformat: ${{ steps.clangformat.outputs.run }}
- eslint: ${{ steps.eslint.outputs.run }}
+ # clangformat: ${{ steps.clangformat.outputs.run }}
flake8: ${{ steps.flake8.outputs.run }}
+ golangci-lint: ${{ steps.golangci-lint.outputs.run }}
hadolint: ${{ steps.hadolint.outputs.run }}
shellcheck: ${{ steps.shellcheck.outputs.run }}
yamllint: ${{ steps.yamllint.outputs.run }}
@@ -49,25 +49,25 @@ jobs:
# else
# echo "run=false" >> "${GITHUB_OUTPUT}"
# fi
- - name: Check files for eslint
- id: eslint
+ - name: Check files for flake8
+ id: flake8
run: |
- if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/eslint') }}" = "true" ]; then
+ if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/flake8') }}" = "true" ]; then
echo "run=true" >> "${GITHUB_OUTPUT}"
- elif git diff --name-only origin/${{ github.base_ref }} HEAD | grep -v "web/gui/v1" | grep -v "web/gui/v2" | grep -v "integrations/" | grep -Eq '.*\.js' ; then
+ elif git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq '.*\.py' ; then
echo "run=true" >> "${GITHUB_OUTPUT}"
- echo 'JS files have changed, need to run ESLint.'
+ echo 'Python files have changed, need to run flake8.'
else
echo "run=false" >> "${GITHUB_OUTPUT}"
fi
- - name: Check files for flake8
- id: flake8
+ - name: Check files for golangci-lint
+ id: golangci-lint
run: |
- if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/flake8') }}" = "true" ]; then
+ if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/golangci-lint') }}" = "true" ]; then
echo "run=true" >> "${GITHUB_OUTPUT}"
- elif git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq '.*\.py' ; then
+ elif git diff --name-only origin/"${{ github.base_ref }}" HEAD -- | grep -Eq '.*\.go'; then
echo "run=true" >> "${GITHUB_OUTPUT}"
- echo 'Python files have changed, need to run flake8.'
+ echo 'Go code has changed, need to run golangci-lint.'
else
echo "run=false" >> "${GITHUB_OUTPUT}"
fi
@@ -122,58 +122,38 @@ jobs:
github_token: ${{ secrets.GITHUB_TOKEN }}
reporter: github-pr-check
- clang-format:
- name: clang-format
- needs: prep-review
- if: needs.prep-review.outputs.clangformat == 'true'
- runs-on: ubuntu-latest
- steps:
- - name: Git clone repository
- uses: actions/checkout@v4
- with:
- submodules: false
- fetch-depth: 0
- - name: Check for label
- id: label
- run: |
- if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/clang-format') }}" = "true" ]; then
- echo 'check-all=true' >> "${GITHUB_OUTPUT}"
- else
- echo 'check-all=false' >> "${GITHUB_OUTPUT}"
- fi
- - name: Run clang-format
- run: |
- if [ "${{ steps.label.outputs.check-all }}" == 'true' ]; then
- find . -regex '.*\.\(c\|cpp\|cxx\|h\|hpp\|hxx\)$' -exec clang-format -i --style=file '{}' \;
- else
- git diff --name-only origin/${{ github.base_ref }} HEAD | grep -E '\.cpp$|\.cxx$|\.c$|\.hpp$|\.hxx$|\.h$' | \
- xargs -n 1 -r clang-format -i --style=file
- fi
- git status --porcelain=v1 > /tmp/porcelain
- if [ -s /tmp/porcelain ]; then
- cat /tmp/porcelain
- exit 1
- fi
-
- eslint:
- name: eslint
- needs: prep-review
- if: needs.prep-review.outputs.eslint == 'true'
- runs-on: ubuntu-latest
- steps:
- - name: Git clone repository
- uses: actions/checkout@v4
- with:
- submodules: recursive
- fetch-depth: 0
- - name: Install eslint
- run: npm install eslint -D
- - name: Run eslint
- uses: reviewdog/action-eslint@v1
- with:
- github_token: ${{ secrets.GITHUB_TOKEN }}
- reporter: github-pr-check
- eslint_flags: '.'
+# clang-format:
+# name: clang-format
+# needs: prep-review
+# if: needs.prep-review.outputs.clangformat == 'true'
+# runs-on: ubuntu-latest
+# steps:
+# - name: Git clone repository
+# uses: actions/checkout@v4
+# with:
+# submodules: false
+# fetch-depth: 0
+# - name: Check for label
+# id: label
+# run: |
+# if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/clang-format') }}" = "true" ]; then
+# echo 'check-all=true' >> "${GITHUB_OUTPUT}"
+# else
+# echo 'check-all=false' >> "${GITHUB_OUTPUT}"
+# fi
+# - name: Run clang-format
+# run: |
+# if [ "${{ steps.label.outputs.check-all }}" == 'true' ]; then
+# find . -regex '.*\.\(c\|cpp\|cxx\|h\|hpp\|hxx\)$' -exec clang-format -i --style=file '{}' \;
+# else
+# git diff --name-only origin/${{ github.base_ref }} HEAD | grep -E '\.cpp$|\.cxx$|\.c$|\.hpp$|\.hxx$|\.h$' | \
+# xargs -n 1 -r clang-format -i --style=file
+# fi
+# git status --porcelain=v1 > /tmp/porcelain
+# if [ -s /tmp/porcelain ]; then
+# cat /tmp/porcelain
+# exit 1
+# fi
flake8:
name: flake8
@@ -187,7 +167,7 @@ jobs:
submodules: recursive
fetch-depth: 0
- name: Setup Python
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Run flake8
@@ -196,6 +176,26 @@ jobs:
github_token: ${{ secrets.GITHUB_TOKEN }}
reporter: github-pr-check
+ golangci-lint:
+ name: golangci-lint
+ needs: prep-review
+ if: needs.prep-review.outputs.golangci-lint == 'true'
+ strategy:
+ matrix:
+ tree:
+ - src/go/collectors/go.d.plugin
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Run golangci-lint
+ uses: reviewdog/action-golangci-lint@v2
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ reporter: github-pr-check
+ golangci_lint_flags: '--timeout=10m'
+ workdir: ${{ matrix.tree }}
+
hadolint:
name: hadolint
needs: prep-review