diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2023-10-17 09:30:23 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2023-10-17 09:30:23 +0000 |
commit | 517a443636daa1e8085cb4e5325524a54e8a8fd7 (patch) | |
tree | 5352109cc7cd5122274ab0cfc1f887b685f04edf /.github | |
parent | Releasing debian version 1.42.4-1. (diff) | |
download | netdata-517a443636daa1e8085cb4e5325524a54e8a8fd7.tar.xz netdata-517a443636daa1e8085cb4e5325524a54e8a8fd7.zip |
Merging upstream version 1.43.0.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to '.github')
23 files changed, 519 insertions, 544 deletions
diff --git a/.github/data/distros.yml b/.github/data/distros.yml index cdd0faf06..bd71fd15f 100644 --- a/.github/data/distros.yml +++ b/.github/data/distros.yml @@ -60,34 +60,6 @@ include: test: ebpf-core: true - - &alma - distro: almalinux - version: "9" - support_type: Core - notes: '' - jsonc_removal: | - dnf remove -y json-c-devel - eol_check: true - packages: &alma_packages - type: rpm - repo_distro: el/9 - alt_links: - - el/9Server - - el/9Client - arches: - - x86_64 - - aarch64 - test: - ebpf-core: true - - <<: *alma - version: "8" - packages: - <<: *alma_packages - repo_distro: el/8 - alt_links: - - el/8Server - - el/8Client - - &amzn distro: amazonlinux version: "2" @@ -108,7 +80,6 @@ include: <<: *amzn_packages repo_distro: amazonlinux/2023 - - distro: centos version: "7" support_type: Core @@ -125,6 +96,30 @@ include: test: ebpf-core: false + - ¢os_stream + distro: centos-stream + base_image: 'quay.io/centos/centos:stream9' + version: '9' + support_type: 'Community' + notes: '' + jsonc_removal: | + dnf remove -y json-c-devel + eol_check: true + packages: &cs_packages + type: rpm + repo_distro: el/c9s + arches: + - x86_64 + - aarch64 + test: + ebpf-core: true + - <<: *centos_stream + version: '8' + base_image: 'quay.io/centos/centos:stream8' + packages: + <<: *cs_packages + repo_distro: el/c8s + - &debian distro: debian version: "12" @@ -165,7 +160,7 @@ include: - &fedora distro: fedora - version: "38" + version: "39" support_type: Core notes: '' eol_check: true @@ -173,13 +168,20 @@ include: dnf remove -y json-c-devel packages: &fedora_packages type: rpm - repo_distro: fedora/38 + repo_distro: fedora/39 arches: - x86_64 - aarch64 test: ebpf-core: true - <<: *fedora + version: "38" + packages: + <<: *fedora_packages + repo_distro: fedora/38 + test: + ebpf-core: true + - <<: *fedora version: "37" packages: <<: *fedora_packages @@ -198,7 +200,7 @@ include: zypper rm -y libjson-c-devel packages: &opensuse_packages type: rpm - repo_distro: opensuse/leap:15.5 + repo_distro: opensuse/15.5 arches: - x86_64 - aarch64 @@ -235,6 +237,34 @@ include: <<: *oracle_packages repo_distro: ol/9 + - &rocky + distro: rockylinux + version: "9" + support_type: Core + notes: '' + jsonc_removal: | + dnf remove -y json-c-devel + eol_check: true + packages: &rocky_packages + type: rpm + repo_distro: el/9 + alt_links: + - el/9Server + - el/9Client + arches: + - x86_64 + - aarch64 + test: + ebpf-core: true + - <<: *rocky + version: "8" + packages: + <<: *rocky_packages + repo_distro: el/8 + alt_links: + - el/8Server + - el/8Client + - &ubuntu distro: ubuntu version: "22.04" @@ -255,6 +285,11 @@ include: test: ebpf-core: true - <<: *ubuntu + version: "23.10" + packages: + <<: *ubuntu_packages + repo_distro: ubuntu/mantic + - <<: *ubuntu version: "23.04" packages: <<: *ubuntu_packages diff --git a/.github/scripts/ci-support-pkgs.sh b/.github/scripts/ci-support-pkgs.sh index 9ba11b68e..5cedbf3b9 100755 --- a/.github/scripts/ci-support-pkgs.sh +++ b/.github/scripts/ci-support-pkgs.sh @@ -9,7 +9,8 @@ set -e case "${ID}" in amzn|almalinux|centos|fedora) - dnf install -y procps-ng cronie cronie-anacron || yum install -y procps-ng cronie cronie-anacron + dnf install -y procps-ng cronie cronie-anacron || \ + yum install -y procps-ng cronie cronie-anacron ;; arch) pacman -S --noconfirm cronie diff --git a/.github/scripts/get-static-cache-key.sh b/.github/scripts/get-static-cache-key.sh index 3b07088f4..5093b3327 100755 --- a/.github/scripts/get-static-cache-key.sh +++ b/.github/scripts/get-static-cache-key.sh @@ -2,13 +2,14 @@ arch="${1}" platform="$(packaging/makeself/uname2platform.sh "${arch}")" +builder_rev="v1" -docker pull --platform "${platform}" netdata/static-builder +docker pull --platform "${platform}" netdata/static-builder:${builder_rev} # shellcheck disable=SC2046 cat $(find packaging/makeself/jobs -type f ! -regex '.*\(netdata\|-makeself\).*') > /tmp/static-cache-key-data -docker run -it --rm --platform "${platform}" netdata/static-builder sh -c 'apk list -I 2>/dev/null' >> /tmp/static-cache-key-data +docker run -it --rm --platform "${platform}" netdata/static-builder:${builder_rev} sh -c 'apk list -I 2>/dev/null' >> /tmp/static-cache-key-data h="$(sha256sum /tmp/static-cache-key-data | cut -f 1 -d ' ')" diff --git a/.github/scripts/pkg-test.sh b/.github/scripts/pkg-test.sh index 85e8b2e8d..35767bf2e 100755 --- a/.github/scripts/pkg-test.sh +++ b/.github/scripts/pkg-test.sh @@ -14,7 +14,9 @@ install_debian_like() { # Install Netdata # Strange quoting is required here so that glob matching works. - apt-get install -y $(find /netdata/artifacts -type f -name 'netdata*.deb' ! -name '*dbgsym*' ! -name '*cups*' ! -name '*freeipmi*') || exit 3 + # shellcheck disable=SC2046 + apt-get install -y $(find /netdata/artifacts -type f -name 'netdata*.deb' \ +! -name '*dbgsym*' ! -name '*cups*' ! -name '*freeipmi*') || exit 3 # Install testing tools apt-get install -y --no-install-recommends curl "${netcat}" jq || exit 1 @@ -32,10 +34,10 @@ install_fedora_like() { # Install Netdata # Strange quoting is required here so that glob matching works. - "$PKGMGR" install -y /netdata/artifacts/netdata*.rpm || exit 1 + "${PKGMGR}" install -y /netdata/artifacts/netdata*.rpm || exit 1 # Install testing tools - "$PKGMGR" install -y curl nc jq || exit 1 + "${PKGMGR}" install -y curl nc jq || exit 1 } install_centos() { @@ -49,15 +51,15 @@ install_centos() { fi # Install EPEL (needed for `jq` - "$PKGMGR" install -y epel-release || exit 1 + "${PKGMGR}" install -y epel-release || exit 1 # Install Netdata # Strange quoting is required here so that glob matching works. - "$PKGMGR" install -y /netdata/artifacts/netdata*.rpm || exit 1 + "${PKGMGR}" install -y /netdata/artifacts/netdata*.rpm || exit 1 # Install testing tools # shellcheck disable=SC2086 - "$PKGMGR" install -y ${opts} curl nc jq || exit 1 + "${PKGMGR}" install -y ${opts} curl nc jq || exit 1 } install_amazon_linux() { @@ -69,11 +71,11 @@ install_amazon_linux() { # Install Netdata # Strange quoting is required here so that glob matching works. - "$PKGMGR" install -y /netdata/artifacts/netdata*.rpm || exit 1 + "${PKGMGR}" install -y /netdata/artifacts/netdata*.rpm || exit 1 # Install testing tools # shellcheck disable=SC2086 - "$PKGMGR" install -y ${opts} curl nc jq || exit 1 + "${PKGMGR}" install -y ${opts} curl nc jq || exit 1 } install_suse_like() { @@ -130,7 +132,7 @@ case "${DISTRO}" in fedora | oraclelinux) install_fedora_like ;; - centos | rockylinux | almalinux) + centos| centos-stream | rockylinux | almalinux) install_centos ;; amazonlinux) diff --git a/.github/workflows/build-dummy.yml b/.github/workflows/build-dummy.yml deleted file mode 100644 index 6bf327e2d..000000000 --- a/.github/workflows/build-dummy.yml +++ /dev/null @@ -1,127 +0,0 @@ ---- -# Ci code for building release artifacts. -# -# This workflow exists so we can require these checks to pass, but skip -# them on PRs that have nothing to do with the source code. -name: Build -on: - pull_request: # PR checks only validate the build and generate artifacts for testing. - paths-ignore: # This MUST be kept in-sync with the paths-ignore key for the build-dummy.yml workflow. - - '**.c' - - '**.cc' - - '**.h' - - '**.hh' - - '**.in' - - '!netdata.spec.in' - - 'configure.ac' - - 'netdata-installer.sh' - - '**/Makefile*' - - 'Makefile*' - - '.github/workflows/build.yml' - - '.github/scripts/build-static.sh' - - '.github/scripts/get-static-cache-key.sh' - - '.github/scripts/gen-matrix-build.py' - - '.github/scripts/run-updater-check.sh' - - 'build/**' - - 'packaging/makeself/**' - - 'packaging/installer/**' - - 'aclk/aclk-schemas/' - - 'ml/dlib/' - - 'mqtt_websockets' - - 'web/server/h2o/libh2o' - - '!**.md' -concurrency: # This keeps multiple instances of the job from running concurrently for the same ref and event type. - group: build-${{ github.ref }}-${{ github.event_name }} - cancel-in-progress: true -jobs: - build-dist: # Build the distribution tarball and store it as an artifact. - name: Build Distribution Tarball - runs-on: ubuntu-latest - steps: - - run: echo 'NOT REQUIRED' - - build-static: # Build the static binary archives, and store them as artifacts. - name: Build Static - runs-on: ubuntu-latest - strategy: - matrix: - arch: - - x86_64 - - armv7l - - aarch64 - - ppc64le - steps: - - run: echo 'NOT REQUIRED' - - matrix: # Generate the shared build matrix for our build tests. - name: Prepare Build Matrix - runs-on: ubuntu-latest - outputs: - matrix: ${{ steps.set-matrix.outputs.matrix }} - steps: - - name: Checkout - id: checkout - uses: actions/checkout@v3 - - name: Prepare tools - id: prepare - run: | - sudo apt-get update && sudo apt-get install -y python3-ruamel.yaml - - name: Read build matrix - id: set-matrix - run: | - matrix="$(.github/scripts/gen-matrix-build.py)" - echo "Generated matrix: ${matrix}" - echo "matrix=${matrix}" >> "${GITHUB_OUTPUT}" - - prepare-test-images: # Prepare the test environments for our build checks. This also checks dependency handling code for each tested environment. - name: Prepare Test Environments - runs-on: ubuntu-latest - needs: - - matrix - env: - RETRY_DELAY: 300 - strategy: - fail-fast: false - matrix: ${{ fromJson(needs.matrix.outputs.matrix) }} - steps: - - run: echo 'NOT REQUIRED' - - source-build: # Test various source build arrangements. - name: Test Source Build - runs-on: ubuntu-latest - needs: - - matrix - strategy: - fail-fast: false - matrix: ${{ fromJson(needs.matrix.outputs.matrix) }} - steps: - - run: echo 'NOT REQUIRED' - - updater-check: # Test the generated dist archive using the updater code. - name: Test Generated Distfile and Updater Code - runs-on: ubuntu-latest - needs: - - matrix - strategy: - fail-fast: false - matrix: ${{ fromJson(needs.matrix.outputs.matrix) }} - steps: - - run: echo 'NOT REQUIRED' - - prepare-upload: # Consolidate the artifacts for uploading or releasing. - name: Prepare Artifacts - runs-on: ubuntu-latest - steps: - - run: echo 'NOT REQUIRED' - - artifact-verification-dist: # Verify the regular installer works with the consolidated artifacts. - name: Test Consolidated Artifacts (Source) - runs-on: ubuntu-latest - steps: - - run: echo 'NOT REQUIRED' - - artifact-verification-static: # Verify the static installer works with the consolidated artifacts. - name: Test Consolidated Artifacts (Static) - runs-on: ubuntu-latest - steps: - - run: echo 'NOT REQUIRED' diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 180574a3c..2aabbcf2d 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -5,31 +5,7 @@ on: push: # Master branch checks only validate the build and generate artifacts for testing. branches: - master - pull_request: # PR checks only validate the build and generate artifacts for testing. - paths: # This MUST be kept in-sync with the paths-ignore key for the build-dummy.yml workflow. - - '**.c' - - '**.cc' - - '**.h' - - '**.hh' - - '**.in' - - '!netdata.spec.in' - - 'configure.ac' - - 'netdata-installer.sh' - - '**/Makefile*' - - 'Makefile*' - - '.github/workflows/build.yml' - - '.github/scripts/build-static.sh' - - '.github/scripts/get-static-cache-key.sh' - - '.github/scripts/gen-matrix-build.py' - - '.github/scripts/run-updater-check.sh' - - 'build/**' - - 'packaging/makeself/**' - - 'packaging/installer/**' - - 'aclk/aclk-schemas/' - - 'ml/dlib/' - - 'mqtt_websockets' - - 'web/server/h2o/libh2o' - - '!**.md' + pull_request: null # PR checks only validate the build and generate artifacts for testing. workflow_dispatch: # Dispatch runs build and validate, then push to the appropriate storage location. inputs: type: @@ -44,30 +20,90 @@ concurrency: # This keeps multiple instances of the job from running concurrentl group: build-${{ github.ref }}-${{ github.event_name }} cancel-in-progress: true jobs: + file-check: # Check what files changed if we’re being run in a PR or on a push. + name: Check Modified Files + runs-on: ubuntu-latest + outputs: + run: ${{ steps.check-run.outputs.run }} + steps: + - name: Checkout + id: checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + submodules: recursive + - name: Check files + id: check-files + uses: tj-actions/changed-files@v39 + with: + since_last_remote_commit: ${{ github.event_name != 'pull_request' }} + files: | + **.c + **.cc + **.h + **.hh + **.in + configure.ac + netdata-installer.sh + **/Makefile* + Makefile* + .github/data/distros.yml + .github/workflows/build.yml + .github/scripts/build-static.sh + .github/scripts/get-static-cache-key.sh + .github/scripts/gen-matrix-build.py + .github/scripts/run-updater-check.sh + build/** + packaging/makeself/** + packaging/installer/** + aclk/aclk-schemas/ + ml/dlib/ + mqtt_websockets + web/server/h2o/libh2o + files_ignore: | + netdata.spec.in + **.md + - name: Check Run + id: check-run + run: | + if [ "${{ steps.check-files.outputs.any_modified }}" == "true" ] || [ "${{ github.event_name }}" == "workflow_dispatch" ]; then + echo 'run=true' >> "${GITHUB_OUTPUT}" + else + echo 'run=false' >> "${GITHUB_OUTPUT}" + fi + build-dist: # Build the distribution tarball and store it as an artifact. name: Build Distribution Tarball runs-on: ubuntu-latest + needs: + - file-check outputs: distfile: ${{ steps.build.outputs.distfile }} steps: + - name: Skip Check + id: skip + if: needs.file-check.outputs.run != 'true' + run: echo "SKIPPED" - name: Checkout id: checkout - uses: actions/checkout@v3 + if: needs.file-check.outputs.run == 'true' + uses: actions/checkout@v4 with: fetch-depth: 0 submodules: recursive - name: Fix tags id: fix-tags - if: github.event_name != 'push' + if: github.event_name != 'push' && needs.file-check.outputs.run == 'true' run: | git fetch --tags --force - name: Mark Stable id: channel - if: github.event_name == 'workflow_dispatch' && github.event.inputs.type != 'nightly' + if: github.event_name == 'workflow_dispatch' && github.event.inputs.type != 'nightly' && needs.file-check.outputs.run == 'true' run: | sed -i 's/^RELEASE_CHANNEL="nightly"/RELEASE_CHANNEL="stable"/' netdata-installer.sh - name: Build id: build + if: needs.file-check.outputs.run == 'true' run: | git describe mkdir -p artifacts @@ -85,6 +121,7 @@ jobs: cp netdata-*.tar.gz artifacts/ - name: Store id: store + if: needs.file-check.outputs.run == 'true' uses: actions/upload-artifact@v3 with: name: dist-tarball @@ -112,11 +149,14 @@ jobs: && startsWith(github.ref, 'refs/heads/master') && github.event_name != 'pull_request' && github.repository == 'netdata/netdata' + && needs.file-check.outputs.run == 'true' }} build-static: # Build the static binary archives, and store them as artifacts. name: Build Static runs-on: ubuntu-latest + needs: + - file-check strategy: matrix: arch: @@ -125,38 +165,43 @@ jobs: - aarch64 - ppc64le steps: + - name: Skip Check + id: skip + if: needs.file-check.outputs.run != 'true' + run: echo "SKIPPED" - name: Checkout id: checkout - uses: actions/checkout@v3 + if: needs.file-check.outputs.run == 'true' + uses: actions/checkout@v4 with: fetch-depth: 0 submodules: recursive - name: Fix tags id: fix-tags - if: github.event_name != 'push' + if: github.event_name != 'push' && needs.file-check.outputs.run == 'true' run: | git fetch --tags --force - name: Mark Stable id: channel - if: github.event_name == 'workflow_dispatch' && github.event.inputs.type != 'nightly' + if: github.event_name == 'workflow_dispatch' && github.event.inputs.type != 'nightly' && needs.file-check.outputs.run == 'true' run: | sed -i 's/^RELEASE_CHANNEL="nightly"/RELEASE_CHANNEL="stable"/' netdata-installer.sh packaging/makeself/install-or-update.sh - name: Get Cache Key - if: github.event_name != 'pull_request' || ! contains(github.event.pull_request.labels.*.name, 'run-ci/no-cache') + if: (github.event_name != 'pull_request' || ! contains(github.event.pull_request.labels.*.name, 'run-ci/no-cache')) && needs.file-check.outputs.run == 'true' id: cache-key run: .github/scripts/get-static-cache-key.sh ${{ matrix.arch }} "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/no-cache') }}" - name: Cache - if: github.event_name != 'pull_request' || ! contains(github.event.pull_request.labels.*.name, 'run-ci/no-cache') + if: (github.event_name != 'pull_request' || ! contains(github.event.pull_request.labels.*.name, 'run-ci/no-cache')) && needs.file-check.outputs.run == 'true' id: cache uses: actions/cache@v3 with: path: artifacts/cache key: ${{ steps.cache-key.outputs.key }} - name: Build - if: github.event_name != 'workflow_dispatch' # Don’t use retries on PRs. + if: github.event_name != 'workflow_dispatch' && needs.file-check.outputs.run == 'true' # Don’t use retries on PRs. run: .github/scripts/build-static.sh ${{ matrix.arch }} - name: Build - if: github.event_name == 'workflow_dispatch' + if: github.event_name == 'workflow_dispatch' && needs.file-check.outputs.run == 'true' id: build uses: nick-fields/retry@v2 with: @@ -165,6 +210,7 @@ jobs: command: .github/scripts/build-static.sh ${{ matrix.arch }} - name: Store id: store + if: needs.file-check.outputs.run == 'true' uses: actions/upload-artifact@v3 with: name: static-archive @@ -192,6 +238,7 @@ jobs: && startsWith(github.ref, 'refs/heads/master') && github.event_name != 'pull_request' && github.repository == 'netdata/netdata' + && needs.file-check.outputs.run == 'true' }} matrix: # Generate the shared build matrix for our build tests. @@ -203,7 +250,7 @@ jobs: steps: - name: Checkout id: checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Prepare tools id: prepare run: | @@ -252,13 +299,13 @@ jobs: steps: - name: Checkout id: checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Buildx id: buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Build test environment id: build1 - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v5 continue-on-error: true # We retry 3 times at 5 minute intervals if there is a failure here. with: push: false @@ -276,7 +323,7 @@ jobs: - name: Build test environment (attempt 2) if: ${{ steps.build1.outcome == 'failure' }} id: build2 - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v5 continue-on-error: true # We retry 3 times at 5 minute intervals if there is a failure here. with: push: false @@ -294,7 +341,7 @@ jobs: - name: Build test environment (attempt 3) if: ${{ steps.build1.outcome == 'failure' && steps.build2.outcome == 'failure' }} id: build3 - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v5 with: push: false load: false @@ -344,42 +391,53 @@ jobs: needs: - matrix - prepare-test-images + - file-check strategy: fail-fast: false max-parallel: 8 matrix: ${{ fromJson(needs.matrix.outputs.matrix) }} steps: + - name: Skip Check + id: skip + if: needs.file-check.outputs.run != 'true' + run: echo "SKIPPED" - name: Checkout id: checkout - uses: actions/checkout@v3 + if: needs.file-check.outputs.run == 'true' + uses: actions/checkout@v4 with: submodules: recursive - name: Fetch test environment id: fetch + if: needs.file-check.outputs.run == 'true' uses: actions/download-artifact@v3 with: name: ${{ matrix.artifact_key }}-test-env - name: Load test environment id: load + if: needs.file-check.outputs.run == 'true' run: docker load --input image.tar - name: Regular build on ${{ matrix.distro }} id: build-basic + if: needs.file-check.outputs.run == 'true' run: | docker run --security-opt seccomp=unconfined -w /netdata test:${{ matrix.artifact_key }} \ /bin/sh -c 'autoreconf -ivf && ./configure --disable-dependency-tracking && make -j2' - name: netdata-installer on ${{ matrix.distro }}, disable cloud id: build-no-cloud + if: needs.file-check.outputs.run == 'true' run: | docker run --security-opt seccomp=unconfined -w /netdata test:${{ matrix.artifact_key }} \ /bin/sh -c './netdata-installer.sh --dont-wait --dont-start-it --disable-cloud --one-time-build' - name: netdata-installer on ${{ matrix.distro }}, require cloud id: build-cloud + if: needs.file-check.outputs.run == 'true' run: | docker run --security-opt seccomp=unconfined -w /netdata test:${{ matrix.artifact_key }} \ /bin/sh -c './netdata-installer.sh --dont-wait --dont-start-it --require-cloud --one-time-build' - name: netdata-installer on ${{ matrix.distro }}, require cloud, no JSON-C id: build-no-jsonc - if: matrix.jsonc_removal != '' + if: matrix.jsonc_removal != '' && needs.file-check.outputs.run == 'true' run: | docker run --security-opt seccomp=unconfined -w /netdata test:${{ matrix.artifact_key }} \ /bin/sh -c '/rmjsonc.sh && ./netdata-installer.sh --dont-wait --dont-start-it --require-cloud --one-time-build' @@ -407,6 +465,7 @@ jobs: && startsWith(github.ref, 'refs/heads/master') && github.event_name != 'pull_request' && github.repository == 'netdata/netdata' + && needs.file-check.outputs.run == 'true' }} updater-check: # Test the generated dist archive using the updater code. @@ -417,6 +476,7 @@ jobs: - build-dist - matrix - prepare-test-images + - file-check strategy: fail-fast: false max-parallel: 8 @@ -429,17 +489,24 @@ jobs: volumes: - ${{ github.workspace }}:/usr/local/apache2/htdocs/ steps: + - name: Skip Check + id: skip + if: needs.file-check.outputs.run != 'true' + run: echo "SKIPPED" - name: Checkout id: checkout - uses: actions/checkout@v3 + if: needs.file-check.outputs.run == 'true' + uses: actions/checkout@v4 - name: Fetch dist tarball artifacts id: fetch-tarball + if: needs.file-check.outputs.run == 'true' uses: actions/download-artifact@v3 with: name: dist-tarball path: dist-tarball - name: Prepare artifact directory id: prepare + if: needs.file-check.outputs.run == 'true' run: | mkdir -p artifacts/download/latest || exit 1 echo "9999.0.0-0" > artifacts/download/latest/latest-version.txt || exit 1 @@ -450,14 +517,17 @@ jobs: cat sha256sums.txt - name: Fetch test environment id: fetch-test-environment + if: needs.file-check.outputs.run == 'true' uses: actions/download-artifact@v3 with: name: ${{ matrix.artifact_key }}-test-env - name: Load test environment id: load + if: needs.file-check.outputs.run == 'true' run: docker load --input image.tar - name: Install netdata and run the updater on ${{ matrix.distro }} id: updater-check + if: needs.file-check.outputs.run == 'true' run: | docker run --security-opt seccomp=unconfined -e DISABLE_TELEMETRY=1 --network host -w /netdata test:${{ matrix.artifact_key }} \ /netdata/.github/scripts/run-updater-check.sh @@ -484,6 +554,7 @@ jobs: && startsWith(github.ref, 'refs/heads/master') && github.event_name != 'pull_request' && github.repository == 'netdata/netdata' + && needs.file-check.outputs.run == 'true' }} prepare-upload: # Consolidate the artifacts for uploading or releasing. @@ -492,27 +563,37 @@ jobs: needs: - build-dist - build-static + - file-check steps: + - name: Skip Check + id: skip + if: needs.file-check.outputs.run != 'true' + run: echo "SKIPPED" - name: Checkout id: checkout - uses: actions/checkout@v3 + if: needs.file-check.outputs.run == 'true' + uses: actions/checkout@v4 - name: Prepare Environment id: prepare + if: needs.file-check.outputs.run == 'true' run: mkdir -p artifacts - name: Retrieve Dist Tarball id: fetch-dist + if: needs.file-check.outputs.run == 'true' uses: actions/download-artifact@v3 with: name: dist-tarball path: dist-tarball - name: Retrieve Static Build Artifacts id: fetch-static + if: needs.file-check.outputs.run == 'true' uses: actions/download-artifact@v3 with: name: static-archive path: static-archive - name: Prepare Artifacts id: consolidate + if: needs.file-check.outputs.run == 'true' working-directory: ./artifacts/ run: | mv ../dist-tarball/* . || exit 1 @@ -524,6 +605,7 @@ jobs: cat sha256sums.txt - name: Store Artifacts id: store + if: needs.file-check.outputs.run == 'true' uses: actions/upload-artifact@v3 with: name: final-artifacts @@ -552,6 +634,7 @@ jobs: && startsWith(github.ref, 'refs/heads/master') && github.event_name != 'pull_request' && github.repository == 'netdata/netdata' + && needs.file-check.outputs.run == 'true' }} artifact-verification-dist: # Verify the regular installer works with the consolidated artifacts. @@ -559,6 +642,7 @@ jobs: runs-on: ubuntu-latest needs: - prepare-upload + - file-check services: apache: # This gets used to serve the dist tarball for the updater script. image: httpd:2.4 @@ -567,22 +651,30 @@ jobs: volumes: - ${{ github.workspace }}:/usr/local/apache2/htdocs/ steps: + - name: Skip Check + id: skip + if: needs.file-check.outputs.run != 'true' + run: echo "SKIPPED" - name: Checkout id: checkout - uses: actions/checkout@v3 + if: needs.file-check.outputs.run == 'true' + uses: actions/checkout@v4 - name: Fetch artifacts id: fetch + if: needs.file-check.outputs.run == 'true' uses: actions/download-artifact@v3 with: name: final-artifacts path: artifacts - name: Prepare artifacts directory id: prepare + if: needs.file-check.outputs.run == 'true' run: | mkdir -p download/latest mv artifacts/* download/latest - name: Verify that artifacts work with installer id: verify + if: needs.file-check.outputs.run == 'true' env: NETDATA_TARBALL_BASEURL: http://localhost:8080/ run: packaging/installer/kickstart.sh --build-only --dont-start-it --disable-telemetry --dont-wait @@ -606,6 +698,7 @@ jobs: && startsWith(github.ref, 'refs/heads/master') && github.event_name != 'pull_request' && github.repository == 'netdata/netdata' + && needs.file-check.outputs.run == 'true' }} artifact-verification-static: # Verify the static installer works with the consolidated artifacts. @@ -613,6 +706,7 @@ jobs: runs-on: ubuntu-latest needs: - prepare-upload + - file-check services: apache: # This gets used to serve the static archives. image: httpd:2.4 @@ -621,22 +715,30 @@ jobs: volumes: - ${{ github.workspace }}:/usr/local/apache2/htdocs/ steps: + - name: Skip Check + id: skip + if: needs.file-check.outputs.run != 'true' + run: echo "SKIPPED" - name: Checkout id: checkout - uses: actions/checkout@v3 + if: needs.file-check.outputs.run == 'true' + uses: actions/checkout@v4 - name: Fetch artifacts id: fetch-artifacts + if: needs.file-check.outputs.run == 'true' uses: actions/download-artifact@v3 with: name: final-artifacts path: artifacts - name: Prepare artifacts directory id: prepare + if: needs.file-check.outputs.run == 'true' run: | mkdir -p download/latest mv artifacts/* download/latest - name: Verify that artifacts work with installer id: verify + if: needs.file-check.outputs.run == 'true' env: NETDATA_TARBALL_BASEURL: http://localhost:8080/ run: packaging/installer/kickstart.sh --static-only --dont-start-it --disable-telemetry @@ -660,6 +762,7 @@ jobs: && startsWith(github.ref, 'refs/heads/master') && github.event_name != 'pull_request' && github.repository == 'netdata/netdata' + && needs.file-check.outputs.run == 'true' }} upload-nightly: # Upload the nightly build artifacts to GCS. @@ -725,12 +828,12 @@ jobs: steps: - name: Checkout Main Repo id: checkout-main - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: path: main - name: Checkout Nightly Repo id: checkout-nightly - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: netdata/netdata-nightlies path: nightlies @@ -811,7 +914,7 @@ jobs: steps: - name: Checkout id: checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Retrieve Artifacts id: fetch uses: actions/download-artifact@v3 diff --git a/.github/workflows/checks-dummy.yml b/.github/workflows/checks-dummy.yml deleted file mode 100644 index 369d70ff9..000000000 --- a/.github/workflows/checks-dummy.yml +++ /dev/null @@ -1,42 +0,0 @@ ---- -name: Checks -on: - pull_request: - paths-ignore: # This MUST be kept in sync with the paths key for the checks.yml workflow. - - '**.c' - - '**.cc' - - '**.h' - - '**.hh' - - '**.in' - - '!netdata.spec.in' - - 'configure.ac' - - '**/Makefile*' - - 'Makefile*' - - '.gitignore' - - '.github/workflows/checks.yml' - - 'build/**' - - 'aclk/aclk-schemas/' - - 'ml/dlib/' - - 'mqtt_websockets' - - 'web/server/h2o/libh2o' -env: - DISABLE_TELEMETRY: 1 -concurrency: - group: checks-${{ github.ref }} - cancel-in-progress: true -jobs: - libressl-checks: - name: LibreSSL - runs-on: ubuntu-latest - steps: - - run: "echo 'NOT REQUIRED'" - clang-checks: - name: Clang - runs-on: ubuntu-latest - steps: - - run: "echo 'NOT REQUIRED'" - gitignore-check: - name: .gitignore - runs-on: ubuntu-latest - steps: - - run: "echo 'NOT REQUIRED'" diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index 4c892ffce..e99704adb 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -2,58 +2,77 @@ name: Checks on: push: - paths: - - '**.c' - - '**.cc' - - '**.h' - - '**.hh' - - '**.in' - - '!netdata.spec.in' - - 'configure.ac' - - '**/Makefile*' - - 'Makefile*' - - '.gitignore' - - '.github/workflows/checks.yml' - - 'build/**' - - 'aclk/aclk-schemas/' - - 'ml/dlib/' - - 'mqtt_websockets' - - 'web/server/h2o/libh2o' branches: - master - pull_request: - paths: # This MUST be kept in-sync with the paths-ignore key for the checks-dummy.yml workflow. - - '**.c' - - '**.cc' - - '**.h' - - '**.hh' - - '**.in' - - '!netdata.spec.in' - - 'configure.ac' - - '**/Makefile*' - - 'Makefile*' - - '.gitignore' - - '.github/workflows/checks.yml' - - 'build/**' - - 'aclk/aclk-schemas/' - - 'ml/dlib/' - - 'mqtt_websockets' - - 'web/server/h2o/libh2o' + pull_request: null env: DISABLE_TELEMETRY: 1 concurrency: group: checks-${{ github.ref }} cancel-in-progress: true jobs: + file-check: # Check what files changed if we’re being run in a PR or on a push. + name: Check Modified Files + runs-on: ubuntu-latest + outputs: + run: ${{ steps.check-run.outputs.run }} + steps: + - name: Checkout + id: checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + submodules: recursive + - name: Check files + id: check-files + uses: tj-actions/changed-files@v39 + with: + since_last_remote_commit: ${{ github.event_name != 'pull_request' }} + files: | + **.c + **.cc + **.h + **.hh + **.in + configure.ac + **/Makefile* + Makefile* + .gitignore + .github/workflows/checks.yml + build/** + aclk/aclk-schemas/ + ml/dlib/ + mqtt_websockets + web/server/h2o/libh2o + files_ignore: | + netdata.spec.in + **.md + - name: Check Run + id: check-run + run: | + if [ "${{ steps.check-files.outputs.any_modified }}" == "true" ] || [ "${{ github.event_name }}" == "workflow_dispatch" ]; then + echo 'run=true' >> "${GITHUB_OUTPUT}" + else + echo 'run=false' >> "${GITHUB_OUTPUT}" + fi + libressl-checks: name: LibreSSL + needs: + - file-check runs-on: ubuntu-latest steps: + - name: Skip Check + id: skip + if: needs.file-check.outputs.run != 'true' + run: echo "SKIPPED" - name: Checkout - uses: actions/checkout@v3 + if: needs.file-check.outputs.run == 'true' + uses: actions/checkout@v4 with: submodules: recursive - name: Build + if: needs.file-check.outputs.run == 'true' run: > docker run -v "$PWD":/netdata -w /netdata alpine:latest /bin/sh -c 'apk add bash; @@ -63,30 +82,49 @@ jobs: autoreconf -ivf; ./configure --disable-dependency-tracking; make;' + clang-checks: name: Clang + needs: + - file-check runs-on: ubuntu-latest steps: + - name: Skip Check + id: skip + if: needs.file-check.outputs.run != 'true' + run: echo "SKIPPED" - name: Checkout - uses: actions/checkout@v3 + if: needs.file-check.outputs.run == 'true' + uses: actions/checkout@v4 with: submodules: recursive - name: Build - run: | - docker build -f .github/dockerfiles/Dockerfile.clang . + if: needs.file-check.outputs.run == 'true' + run: docker build -f .github/dockerfiles/Dockerfile.clang . + gitignore-check: name: .gitignore + needs: + - file-check runs-on: ubuntu-latest steps: + - name: Skip Check + id: skip + if: needs.file-check.outputs.run != 'true' + run: echo "SKIPPED" - name: Checkout - uses: actions/checkout@v3 + if: needs.file-check.outputs.run == 'true' + uses: actions/checkout@v4 with: submodules: recursive - name: Prepare environment + if: needs.file-check.outputs.run == 'true' run: ./packaging/installer/install-required-packages.sh --dont-wait --non-interactive netdata - name: Build netdata + if: needs.file-check.outputs.run == 'true' run: ./netdata-installer.sh --dont-start-it --disable-telemetry --dont-wait --install-prefix /tmp/install --one-time-build - name: Check that repo is clean + if: needs.file-check.outputs.run == 'true' run: | git status --porcelain=v1 > /tmp/porcelain if [ -s /tmp/porcelain ]; then diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 174f650ea..ae5818afc 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -23,7 +23,7 @@ jobs: python: ${{ steps.python.outputs.run }} steps: - name: Clone repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: submodules: recursive fetch-depth: 0 @@ -76,7 +76,7 @@ jobs: security-events: write steps: - name: Git clone repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: submodules: recursive fetch-depth: 0 @@ -103,7 +103,7 @@ jobs: security-events: write steps: - name: Git clone repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: submodules: recursive fetch-depth: 0 diff --git a/.github/workflows/coverity.yml b/.github/workflows/coverity.yml index 8a1ee2486..eb68c302b 100644 --- a/.github/workflows/coverity.yml +++ b/.github/workflows/coverity.yml @@ -19,7 +19,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 id: checkout with: submodules: recursive diff --git a/.github/workflows/dashboard-pr.yml b/.github/workflows/dashboard-pr.yml index ac414da10..f02cfb69d 100644 --- a/.github/workflows/dashboard-pr.yml +++ b/.github/workflows/dashboard-pr.yml @@ -21,7 +21,7 @@ jobs: steps: - name: Checkout id: checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Update Files id: update run: | diff --git a/.github/workflows/docker-dummy.yml b/.github/workflows/docker-dummy.yml deleted file mode 100644 index 64131dac5..000000000 --- a/.github/workflows/docker-dummy.yml +++ /dev/null @@ -1,51 +0,0 @@ ---- -name: Docker -on: - pull_request: - paths-ignore: # This MUST be kept in-sync with the paths key for the dummy.yml workflow. - - '**.c' - - '**.cc' - - '**.h' - - '**.hh' - - '**.in' - - '!netdata.spec.in' - - '.dockerignore' - - 'configure.ac' - - 'netdata-installer.sh' - - '**/Makefile*' - - 'Makefile*' - - '.github/workflows/docker.yml' - - '.github/scripts/docker-test.sh' - - 'build/**' - - 'packaging/docker/**' - - 'packaging/installer/**' - - 'aclk/aclk-schemas/' - - 'ml/dlib/' - - 'mqtt_websockets' - - 'web/server/h2o/libh2o' - - '!**.md' -env: - DISABLE_TELEMETRY: 1 -concurrency: - group: docker-${{ github.ref }}-${{ github.event_name }} - cancel-in-progress: true -jobs: - docker-test: - name: Docker Runtime Test - runs-on: ubuntu-latest - steps: - - run: echo 'NOT REQUIRED' - - docker-ci: - name: Docker Alt Arch Builds - needs: docker-test - runs-on: ubuntu-latest - strategy: - matrix: - platforms: - - linux/i386 - - linux/arm/v7 - - linux/arm64 - - linux/ppc64le - steps: - - run: echo 'NOT REQUIRED' diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index aad83ced5..c1dfc55eb 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -4,29 +4,7 @@ on: push: branches: - master - pull_request: - paths: # This MUST be kept in-sync with the paths-ignore key for the docker-dummy.yml workflow. - - '**.c' - - '**.cc' - - '**.h' - - '**.hh' - - '**.in' - - '!netdata.spec.in' - - '.dockerignore' - - 'configure.ac' - - 'netdata-installer.sh' - - '**/Makefile*' - - 'Makefile*' - - '.github/workflows/docker.yml' - - '.github/scripts/docker-test.sh' - - 'build/**' - - 'packaging/docker/**' - - 'packaging/installer/**' - - 'aclk/aclk-schemas/' - - 'ml/dlib/' - - 'mqtt_websockets' - - 'web/server/h2o/libh2o' - - '!**.md' + pull_request: null workflow_dispatch: inputs: version: @@ -39,27 +17,86 @@ concurrency: group: docker-${{ github.ref }}-${{ github.event_name }} cancel-in-progress: true jobs: + file-check: # Check what files changed if we’re being run in a PR or on a push. + name: Check Modified Files + runs-on: ubuntu-latest + outputs: + run: ${{ steps.check-run.outputs.run }} + steps: + - name: Checkout + id: checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + submodules: recursive + - name: Check files + id: file-check + uses: tj-actions/changed-files@v39 + with: + since_last_remote_commit: ${{ github.event_name != 'pull_request' }} + files: | + **.c + **.cc + **.h + **.hh + **.in + .dockerignore + configure.ac + netdata-installer.sh + **/Makefile* + Makefile* + .github/workflows/docker.yml + .github/scripts/docker-test.sh + build/** + packaging/docker/** + packaging/installer/** + aclk/aclk-schemas/ + ml/dlib/ + mqtt_websockets + web/server/h2o/libh2o + files_ignore: | + netdata.spec.in + **.md + - name: Check Run + id: check-run + run: | + if [ "${{ steps.file-check.outputs.any_modified }}" == "true" ] || [ "${{ github.event_name }}" == "workflow_dispatch" ]; then + echo 'run=true' >> "${GITHUB_OUTPUT}" + else + echo 'run=false' >> "${GITHUB_OUTPUT}" + fi + docker-test: name: Docker Runtime Test + needs: + - file-check runs-on: ubuntu-latest steps: + - name: Skip Check + id: skip + if: needs.file-check.outputs.run != 'true' + run: echo "SKIPPED" - name: Checkout id: checkout - uses: actions/checkout@v3 + if: needs.file-check.outputs.run == 'true' + uses: actions/checkout@v4 with: submodules: recursive - name: Setup Buildx id: prepare - uses: docker/setup-buildx-action@v2 + if: needs.file-check.outputs.run == 'true' + uses: docker/setup-buildx-action@v3 - name: Test Build id: build - uses: docker/build-push-action@v4 + if: needs.file-check.outputs.run == 'true' + uses: docker/build-push-action@v5 with: load: true push: false tags: netdata/netdata:test - name: Test Image id: test + if: needs.file-check.outputs.run == 'true' run: .github/scripts/docker-test.sh - name: Failure Notification uses: rtCamp/action-slack-notify@v2 @@ -82,12 +119,15 @@ jobs: && github.event_name != 'pull_request' && startsWith(github.ref, 'refs/heads/master') && github.repository == 'netdata/netdata' + && needs.file-check.outputs.run == 'true' }} docker-ci: if: github.event_name != 'workflow_dispatch' name: Docker Alt Arch Builds - needs: docker-test + needs: + - docker-test + - file-check runs-on: ubuntu-latest strategy: matrix: @@ -97,21 +137,28 @@ jobs: - linux/arm64 - linux/ppc64le steps: + - name: Skip Check + id: skip + if: needs.file-check.outputs.run != 'true' + run: echo "SKIPPED" - name: Checkout id: checkout - uses: actions/checkout@v3 + if: needs.file-check.outputs.run == 'true' + uses: actions/checkout@v4 with: submodules: recursive - name: Setup QEMU id: qemu - if: matrix.platforms != 'linux/i386' - uses: docker/setup-qemu-action@v2 + if: matrix.platforms != 'linux/i386' && needs.file-check.outputs.run == 'true' + uses: docker/setup-qemu-action@v3 - name: Setup Buildx id: buildx - uses: docker/setup-buildx-action@v2 + if: needs.file-check.outputs.run == 'true' + uses: docker/setup-buildx-action@v3 - name: Build id: build - uses: docker/build-push-action@v4 + if: needs.file-check.outputs.run == 'true' + uses: docker/build-push-action@v5 with: platforms: ${{ matrix.platforms }} load: false @@ -138,6 +185,7 @@ jobs: && github.event_name != 'pull_request' && startsWith(github.ref, 'refs/heads/master') && github.repository == 'netdata/netdata' + && needs.file-check.outputs.run == 'true' }} normalize-tag: # Fix the release tag if needed @@ -166,7 +214,7 @@ jobs: steps: - name: Checkout id: checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: submodules: recursive - name: Determine which tags to use @@ -186,21 +234,21 @@ jobs: run: echo "OFFICIAL_IMAGE=true" >> "${GITHUB_ENV}" - name: Setup QEMU id: qemu - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 - name: Setup Buildx id: buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Docker Hub Login id: docker-hub-login if: github.repository == 'netdata/netdata' - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKER_HUB_USERNAME }} password: ${{ secrets.DOCKER_HUB_PASSWORD }} - name: GitHub Container Registry Login id: ghcr-login if: github.repository == 'netdata/netdata' - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.repository_owner }} @@ -208,14 +256,14 @@ jobs: - name: Quay.io Login id: quay-login if: github.repository == 'netdata/netdata' - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: quay.io username: ${{ secrets.NETDATABOT_QUAY_USERNAME }} password: ${{ secrets.NETDATABOT_QUAY_TOKEN }} - name: Docker Build id: build - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v5 with: platforms: linux/amd64,linux/i386,linux/arm/v7,linux/arm64,linux/ppc64le push: ${{ github.repository == 'netdata/netdata' }} @@ -278,7 +326,7 @@ jobs: steps: - name: Checkout id: checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: submodules: recursive - name: Determine which tags to use @@ -298,21 +346,21 @@ jobs: run: echo "OFFICIAL_IMAGE=true" >> "${GITHUB_ENV}" - name: Setup QEMU id: qemu - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 - name: Setup Buildx id: buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Docker Hub Login id: docker-hub-login if: github.repository == 'netdata/netdata' - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKER_HUB_USERNAME }} password: ${{ secrets.DOCKER_HUB_PASSWORD }} - name: GitHub Container Registry Login id: ghcr-login if: github.repository == 'netdata/netdata' - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.repository_owner }} @@ -320,14 +368,14 @@ jobs: - name: Quay.io Login id: quay-login if: github.repository == 'netdata/netdata' - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: quay.io username: ${{ secrets.NETDATABOT_QUAY_USERNAME }} password: ${{ secrets.NETDATABOT_QUAY_TOKEN }} - name: Docker Build id: build - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v5 with: platforms: linux/amd64,linux/i386,linux/arm/v7,linux/arm64,linux/ppc64le push: ${{ github.repository == 'netdata/netdata' }} diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 69fda40c3..a0554b167 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -17,7 +17,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: submodules: recursive - name: Run link check diff --git a/.github/workflows/generate-integrations.yml b/.github/workflows/generate-integrations.yml index 599cefbc0..4128e9925 100644 --- a/.github/workflows/generate-integrations.yml +++ b/.github/workflows/generate-integrations.yml @@ -1,6 +1,5 @@ --- -# CI workflow used to regenerate `integrations/integrations.js` when -# relevant source files are changed. +# CI workflow used to regenerate `integrations/integrations.js` and accompanying documentation when relevant source files are changed. name: Generate Integrations on: push: @@ -28,7 +27,7 @@ jobs: steps: - name: Checkout Agent id: checkout-agent - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 1 submodules: recursive @@ -37,7 +36,7 @@ jobs: run: echo "go_ref=$(cat packaging/go.d.version)" >> "${GITHUB_ENV}" - name: Checkout Go id: checkout-go - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 1 path: go.d.plugin @@ -55,6 +54,14 @@ jobs: run: | source ./virtualenv/bin/activate python3 integrations/gen_integrations.py + - name: Generate Integrations Documentation + id: generate-integrations-documentation + run: | + python3 integrations/gen_docs_integrations.py + - name: Generate collectors/COLLECTORS.md + id: generate-collectors-md + run: | + python3 integrations/gen_doc_collector_page.py - name: Clean Up Temporary Data id: clean run: rm -rf go.d.plugin virtualenv @@ -67,7 +74,7 @@ jobs: branch: integrations-regen title: Regenerate integrations.js body: | - Regenerate `integrations/integrations.js` based on the + Regenerate `integrations/integrations.js`, and documentation based on the latest code. This PR was auto-generated by @@ -87,6 +94,8 @@ jobs: Checkout Go: ${{ steps.checkout-go.outcome }} Prepare Dependencies: ${{ steps.prep-deps.outcome }} Generate Integrations: ${{ steps.generate.outcome }} + Generate Integrations Documentation: ${{ steps.generate-integrations-documentation.outcome }} + Generate collectors/COLLECTORS.md: ${{ steps.generate-collectors-md.outcome }} Clean Up Temporary Data: ${{ steps.clean.outcome }} Create PR: ${{ steps.create-pr.outcome }} SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }} diff --git a/.github/workflows/packagecloud.yml b/.github/workflows/packagecloud.yml index ba70c177b..3c427756a 100644 --- a/.github/workflows/packagecloud.yml +++ b/.github/workflows/packagecloud.yml @@ -20,7 +20,7 @@ jobs: - devel steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 id: checkout with: submodules: recursive diff --git a/.github/workflows/packaging-dummy.yml b/.github/workflows/packaging-dummy.yml deleted file mode 100644 index 653227e1c..000000000 --- a/.github/workflows/packaging-dummy.yml +++ /dev/null @@ -1,80 +0,0 @@ ---- -# Handles building of binary packages for the agent. -# -# This workflow exists so that we can make these required checks but -# still skip running them on PRs where they are not relevant. -name: Packages -on: - pull_request: - types: - - opened - - reopened - - labeled - - synchronize - paths-ignore: # This MUST be kept in-sync with the paths key for the packaging.yml workflow. - - '**.c' - - '**.cc' - - '**.h' - - '**.hh' - - '**.in' - - 'netdata.spec.in' - - 'configure.ac' - - '**/Makefile*' - - 'Makefile*' - - '.github/workflows/packaging.yml' - - '.github/scripts/gen-matrix-packaging.py' - - '.github/scripts/pkg-test.sh' - - 'build/**' - - 'packaging/*.sh' - - 'packaging/*.checksums' - - 'packaging/*.version' - - 'contrib/debian/**' - - 'aclk/aclk-schemas/' - - 'ml/dlib/' - - 'mqtt_websockets' - - 'web/server/h2o/libh2o' - - '!**.md' -env: - DISABLE_TELEMETRY: 1 - REPO_PREFIX: netdata/netdata -concurrency: - group: packages-${{ github.ref }}-${{ github.event_name }} - cancel-in-progress: true -jobs: - matrix: - name: Prepare Build Matrix - runs-on: ubuntu-latest - outputs: - matrix: ${{ steps.set-matrix.outputs.matrix }} - steps: - - name: Checkout - id: checkout - uses: actions/checkout@v3 - - name: Prepare tools - id: prepare - run: | - sudo apt-get update && sudo apt-get install -y python3-ruamel.yaml - - name: Read build matrix - id: set-matrix - run: | - if [ "${{ github.event_name }}" = "pull_request" ] && \ - [ "${{ !contains(github.event.pull_request.labels.*.name, 'run-ci/packaging') }}" = "true" ]; then - matrix="$(.github/scripts/gen-matrix-packaging.py 1)" - else - matrix="$(.github/scripts/gen-matrix-packaging.py 0)" - fi - echo "Generated matrix: ${matrix}" - echo "matrix=${matrix}" >> "${GITHUB_OUTPUT}" - - build: - name: Build - runs-on: ubuntu-latest - env: - DOCKER_CLI_EXPERIMENTAL: enabled - needs: - - matrix - strategy: - matrix: ${{ fromJson(needs.matrix.outputs.matrix) }} - fail-fast: false - steps: - - run: echo 'NOT REQUIRED' diff --git a/.github/workflows/packaging.yml b/.github/workflows/packaging.yml index 7e8c7e527..9b15cf94b 100644 --- a/.github/workflows/packaging.yml +++ b/.github/workflows/packaging.yml @@ -8,31 +8,6 @@ on: - reopened - labeled - synchronize - paths: # This MUST be kept in-sync with the paths-ignore key for the packaging-dummy.yml workflow. - - '**.c' - - '**.cc' - - '**.h' - - '**.hh' - - '**.in' - - 'netdata.spec.in' - - 'configure.ac' - - '**/Makefile*' - - 'Makefile*' - - '.github/workflows/packaging.yml' - - '.github/scripts/gen-matrix-packaging.py' - - '.github/scripts/pkg-test.sh' - - 'build/**' - - 'packaging/*.sh' - - 'packaging/*.checksums' - - 'packaging/*.version' - - 'contrib/debian/**' - - 'aclk/aclk-schemas/' - - 'ml/dlib/' - - 'mqtt_websockets' - - 'web/server/h2o/libh2o' - - '!**.md' - branches: - - master push: branches: - master @@ -52,6 +27,57 @@ concurrency: group: packages-${{ github.ref }}-${{ github.event_name }} cancel-in-progress: true jobs: + file-check: # Check what files changed if we’re being run in a PR or on a push. + name: Check Modified Files + runs-on: ubuntu-latest + outputs: + run: ${{ steps.check-run.outputs.run }} + steps: + - name: Checkout + id: checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + submodules: recursive + - name: Check files + id: file-check + uses: tj-actions/changed-files@v39 + with: + since_last_remote_commit: ${{ github.event_name != 'pull_request' }} + files: | + **.c + **.cc + **.h + **.hh + **.in + netdata.spec.in + configure.ac + **/Makefile* + Makefile* + .github/data/distros.yml + .github/workflows/packaging.yml + .github/scripts/gen-matrix-packaging.py + .github/scripts/pkg-test.sh + build/** + packaging/*.sh + packaging/*.checksums + packaging/*.version + contrib/debian/** + aclk/aclk-schemas/ + ml/dlib/ + mqtt_websockets + web/server/h2o/libh2o + files_ignore: | + **.md + - name: Check Run + id: check-run + run: | + if [ "${{ steps.file-check.outputs.any_modified }}" == "true" ] || [ "${{ github.event_name }}" == "workflow_dispatch" ]; then + echo 'run=true' >> "${GITHUB_OUTPUT}" + else + echo 'run=false' >> "${GITHUB_OUTPUT}" + fi + matrix: name: Prepare Build Matrix runs-on: ubuntu-latest @@ -60,7 +86,7 @@ jobs: steps: - name: Checkout id: checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Prepare tools id: prepare run: | @@ -107,7 +133,7 @@ jobs: steps: - name: Checkout id: checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Check Version id: check-version run: | @@ -161,6 +187,7 @@ jobs: needs: - matrix - version-check + - file-check strategy: matrix: ${{ fromJson(needs.matrix.outputs.matrix) }} # We intentiaonally disable the fail-fast behavior so that a @@ -169,24 +196,31 @@ jobs: fail-fast: false max-parallel: 8 steps: + - name: Skip Check + id: skip + if: needs.file-check.outputs.run != 'true' + run: echo "SKIPPED" - name: Checkout id: checkout - uses: actions/checkout@v3 + if: needs.file-check.outputs.run == 'true' + uses: actions/checkout@v4 with: fetch-depth: 0 # We need full history for versioning submodules: recursive - name: Setup QEMU id: qemu - if: matrix.platform != 'linux/amd64' && matrix.platform != 'linux/i386' - uses: docker/setup-qemu-action@v2 + if: matrix.platform != 'linux/amd64' && matrix.platform != 'linux/i386' && needs.file-check.outputs.run == 'true' + uses: docker/setup-qemu-action@v3 - name: Prepare Docker Environment id: docker-config + if: needs.file-check.outputs.run == 'true' shell: bash run: | echo '{"cgroup-parent": "actions-job.slice", "experimental": true}' | sudo tee /etc/docker/daemon.json 2>/dev/null sudo service docker restart - name: Fetch images id: fetch-images + if: needs.file-check.outputs.run == 'true' uses: nick-invision/retry@v2 with: max_attempts: 3 @@ -194,15 +228,17 @@ jobs: timeout_seconds: 900 command: | docker pull --platform ${{ matrix.platform }} ${{ matrix.base_image }} - docker pull --platform ${{ matrix.platform }} netdata/package-builders:${{ matrix.distro }}${{ matrix.version }} + docker pull --platform ${{ matrix.platform }} netdata/package-builders:${{ matrix.distro }}${{ matrix.version }}-v1 - name: Build Packages id: build + if: needs.file-check.outputs.run == 'true' shell: bash run: | docker run --security-opt seccomp=unconfined -e DISABLE_TELEMETRY=1 -e VERSION=${{ needs.version-check.outputs.version }} \ - --platform=${{ matrix.platform }} -v "$PWD":/netdata netdata/package-builders:${{ matrix.distro }}${{ matrix.version }} + --platform=${{ matrix.platform }} -v "$PWD":/netdata netdata/package-builders:${{ matrix.distro }}${{ matrix.version }}-v1 - name: Save Packages id: artifacts + if: needs.file-check.outputs.run == 'true' continue-on-error: true uses: actions/upload-artifact@v3 with: @@ -210,6 +246,7 @@ jobs: path: ${{ github.workspace }}/artifacts/* - name: Test Packages id: test + if: needs.file-check.outputs.run == 'true' shell: bash run: | docker run --security-opt seccomp=unconfined -e DISABLE_TELEMETRY=1 -e DISTRO=${{ matrix.distro }} \ @@ -218,7 +255,7 @@ jobs: /netdata/.github/scripts/pkg-test.sh - name: Upload to PackageCloud id: upload - if: github.event_name == 'workflow_dispatch' && github.repository == 'netdata/netdata' + if: github.event_name == 'workflow_dispatch' && github.repository == 'netdata/netdata' && needs.file-check.outputs.run == 'true' continue-on-error: true shell: bash env: @@ -232,7 +269,7 @@ jobs: done - name: SSH setup id: ssh-setup - if: github.event_name == 'workflow_dispatch' && github.repository == 'netdata/netdata' + if: github.event_name == 'workflow_dispatch' && github.repository == 'netdata/netdata' && needs.file-check.outputs.run == 'true' uses: shimataro/ssh-key-action@v2 with: key: ${{ secrets.NETDATABOT_PACKAGES_SSH_KEY }} @@ -240,7 +277,7 @@ jobs: known_hosts: ${{ secrets.PACKAGES_KNOWN_HOSTS }} - name: Upload to packages.netdata.cloud id: package-upload - if: github.event_name == 'workflow_dispatch' && github.repository == 'netdata/netdata' + if: github.event_name == 'workflow_dispatch' && github.repository == 'netdata/netdata' && needs.file-check.outputs.run == 'true' run: | .github/scripts/package-upload.sh \ ${{ matrix.repo_distro }} \ @@ -272,4 +309,5 @@ jobs: && github.event_name != 'pull_request' && startsWith(github.ref, 'refs/heads/master') && github.repository == 'netdata/netdata' + && needs.file-check.outputs.run == 'true' }} diff --git a/.github/workflows/platform-eol-check.yml b/.github/workflows/platform-eol-check.yml index d1f4416cd..ae290a973 100644 --- a/.github/workflows/platform-eol-check.yml +++ b/.github/workflows/platform-eol-check.yml @@ -22,7 +22,7 @@ jobs: steps: - name: Checkout id: checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Prepare tools id: prepare run: | @@ -66,7 +66,7 @@ jobs: steps: - name: Checkout id: checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 # Actually check the EOL date for the platform. - name: Check EOL Date id: check diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e675d789f..2fa51cc52 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -29,7 +29,7 @@ jobs: steps: - name: Checkout id: checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 submodules: recursive @@ -116,7 +116,7 @@ jobs: steps: - name: Checkout id: checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ needs.update-changelogs.outputs.ref }} - name: Trigger build @@ -151,7 +151,7 @@ jobs: steps: - name: Checkout id: checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ needs.update-changelogs.outputs.ref }} - name: Trigger build @@ -186,7 +186,7 @@ jobs: steps: - name: Checkout id: checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ needs.update-changelogs.outputs.ref }} - name: Trigger build diff --git a/.github/workflows/repoconfig-packages.yml b/.github/workflows/repoconfig-packages.yml index e2b41570f..df8fac204 100644 --- a/.github/workflows/repoconfig-packages.yml +++ b/.github/workflows/repoconfig-packages.yml @@ -27,7 +27,7 @@ jobs: steps: - name: Checkout id: checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Prepare tools id: prepare run: | @@ -77,7 +77,7 @@ jobs: steps: - name: Checkout id: checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 # Unlike normally, we do not need a deep clone or submodules for this. - name: Fetch base image id: fetch-images diff --git a/.github/workflows/review.yml b/.github/workflows/review.yml index 7e76717ed..732a4a5a1 100644 --- a/.github/workflows/review.yml +++ b/.github/workflows/review.yml @@ -23,7 +23,7 @@ jobs: yamllint: ${{ steps.yamllint.outputs.run }} steps: - name: Clone repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: submodules: recursive fetch-depth: 0 @@ -112,7 +112,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Git clone repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: submodules: recursive fetch-depth: 0 @@ -129,7 +129,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Git clone repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: submodules: false fetch-depth: 0 @@ -162,7 +162,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Git clone repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: submodules: recursive fetch-depth: 0 @@ -182,7 +182,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Git clone repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: submodules: recursive fetch-depth: 0 @@ -203,7 +203,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Git clone repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 - name: Run hadolint @@ -219,7 +219,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Git clone repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: submodules: recursive fetch-depth: 0 @@ -242,7 +242,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Git clone repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: submodules: recursive fetch-depth: 0 diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 5f83a4405..46384ffc5 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -25,7 +25,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: submodules: recursive - name: Prepare environment |