diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2023-02-06 16:11:30 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2023-02-06 16:11:30 +0000 |
commit | aa2fe8ccbfcb117efa207d10229eeeac5d0f97c7 (patch) | |
tree | 941cbdd387b41c1a81587c20a6df9f0e5e0ff7ab /.github | |
parent | Adding upstream version 1.37.1. (diff) | |
download | netdata-aa2fe8ccbfcb117efa207d10229eeeac5d0f97c7.tar.xz netdata-aa2fe8ccbfcb117efa207d10229eeeac5d0f97c7.zip |
Adding upstream version 1.38.0.upstream/1.38.0
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to '')
-rw-r--r-- | .github/CODEOWNERS | 49 | ||||
-rw-r--r-- | .github/data/distros.yml | 31 | ||||
-rw-r--r-- | .github/labeler.yml | 6 | ||||
-rwxr-xr-x | .github/scripts/docker-test.sh | 4 | ||||
-rwxr-xr-x | .github/scripts/gen-docker-tags.py | 13 | ||||
-rwxr-xr-x | .github/scripts/gen-matrix-build.py | 34 | ||||
-rwxr-xr-x | .github/scripts/gen-matrix-packaging.py | 36 | ||||
-rwxr-xr-x | .github/scripts/gen-matrix-repoconfig.py | 27 | ||||
-rwxr-xr-x | .github/scripts/get-static-cache-key.sh | 2 | ||||
-rwxr-xr-x | .github/scripts/package-upload.sh | 2 | ||||
-rwxr-xr-x | .github/scripts/prepare-release-base.sh | 58 | ||||
-rwxr-xr-x | .github/scripts/run-updater-check.sh | 21 | ||||
-rwxr-xr-x | .github/scripts/run_install_with_dist_file.sh | 2 | ||||
-rw-r--r-- | .github/workflows/build.yml | 100 | ||||
-rw-r--r-- | .github/workflows/checks.yml | 2 | ||||
-rw-r--r-- | .github/workflows/cloud_regression.yml | 25 | ||||
-rw-r--r-- | .github/workflows/codeql.yml | 20 | ||||
-rw-r--r-- | .github/workflows/docker.yml | 60 | ||||
-rw-r--r-- | .github/workflows/labeler.yml | 21 | ||||
-rw-r--r-- | .github/workflows/packaging.yml | 66 | ||||
-rw-r--r-- | .github/workflows/repoconfig-packages.yml | 55 | ||||
-rw-r--r-- | .github/workflows/review.yml | 30 |
22 files changed, 372 insertions, 292 deletions
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index c513b71dc..34c934550 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -5,53 +5,50 @@ * @Ferroin # Ownership by directory structure -.travis/ @Ferroin -.github/ @Ferroin +.github/ @Ferroin @tkatsoulas aclk/ @stelfrag @underhood -build/ @Ferroin -contrib/debian @Ferroin +build/ @Ferroin @tkatsoulas +contrib/debian @Ferroin @tkatsoulas collectors/ @thiagoftsm collectors/ebpf.plugin/ @thiagoftsm collectors/charts.d.plugin/ @ilyam8 @Ferroin collectors/freebsd.plugin/ @thiagoftsm collectors/macos.plugin/ @thiagoftsm collectors/python.d.plugin/ @ilyam8 -collectors/cups.plugin/ @simonnagl @thiagoftsm +collectors/cups.plugin/ @thiagoftsm exporting/ @thiagoftsm daemon/ @thiagoftsm @vkalintiris database/ @thiagoftsm @vkalintiris -docs/ @DShreve2 +docs/ @tkatsoulas health/ @thiagoftsm @vkalintiris @MrZammler health/health.d/ @thiagoftsm @MrZammler health/notifications/ @Ferroin @thiagoftsm @MrZammler ml/ @andrewm4894 @vkalintiris libnetdata/ @thiagoftsm @vkalintiris -packaging/ @Ferroin +packaging/ @Ferroin @tkatsoulas registry/ @jacekkolasa streaming/ @thiagoftsm -system/ @Ferroin -tests/ @Ferroin @vkalintiris +system/ @Ferroin @tkatsoulas +tests/ @Ferroin @vkalintiris @tkatsoulas web/ @thiagoftsm @vkalintiris web/gui/ @jacekkolasa # Ownership by filetype (overwrites ownership by directory) -*.am @Ferroin -*.md @DShreve2 -Dockerfile* @Ferroin +*.am @Ferroin @tkatsoulas +*.md @tkatsoulas +Dockerfile* @Ferroin @tkatsoulas # Ownership of specific files -.gitignore @Ferroin @vkalintiris -.travis.yml @Ferroin -.lgtm.yml @Ferroin -.eslintrc @Ferroin -.eslintignore @Ferroin -.csslintrc @Ferroin -.codeclimate.yml @Ferroin -.codacy.yml @Ferroin -.yamllint.yml @Ferroin -netdata.spec.in @Ferroin -netdata-installer.sh @Ferroin -packaging/version @netdatabot @Ferroin +.gitignore @Ferroin @tkatsoulas @vkalintiris +.eslintrc @Ferroin @tkatsoulas +.eslintignore @Ferroin @tkatsoulas +.csslintrc @Ferroin @tkatsoulas +.codeclimate.yml @Ferroin @tkatsoulas +.codacy.yml @Ferroin @tkatsoulas +.yamllint.yml @Ferroin @tkatsoulas +netdata.spec.in @Ferroin @tkatsoulas +netdata-installer.sh @Ferroin @tkatsoulas +packaging/version @netdatabot @Ferroin @tkatsoulas -LICENSE.md @DShreve2 @Ferroin @vkalintiris -CHANGELOG.md @netdatabot @Ferroin +LICENSE.md @Ferroin @tkatsoulas @vkalintiris +CHANGELOG.md @netdatabot @Ferroin @tkatsoulas diff --git a/.github/data/distros.yml b/.github/data/distros.yml index cc5275298..452170c07 100644 --- a/.github/data/distros.yml +++ b/.github/data/distros.yml @@ -51,6 +51,9 @@ include: packages: &alma_packages type: rpm repo_distro: el/9 + alt_links: + - el/9Server + - el/9Client arches: - x86_64 - aarch64 @@ -61,12 +64,18 @@ include: packages: <<: *alma_packages repo_distro: el/8 + alt_links: + - el/8Server + - el/8Client - distro: centos version: "7" packages: type: rpm repo_distro: el/7 + alt_links: + - el/7Server + - el/7Client arches: - x86_64 test: @@ -115,21 +124,6 @@ include: packages: <<: *fedora_packages repo_distro: fedora/36 - arches: - - x86_64 - - armhfp - - aarch64 - test: - ebpf-core: true - - <<: *fedora - version: "35" - packages: - <<: *fedora_packages - repo_distro: fedora/35 - arches: - - x86_64 - - armhfp - - aarch64 test: ebpf-core: true @@ -147,13 +141,6 @@ include: - aarch64 test: ebpf-core: true - - <<: *opensuse - version: "15.3" - packages: - <<: *opensuse_packages - repo_distro: opensuse/15.3 - test: - ebpf-core: false - &oracle distro: oraclelinux diff --git a/.github/labeler.yml b/.github/labeler.yml index c72325076..4d3a614d4 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -34,8 +34,6 @@ area/build: - "**/Makefile.am" area/ci: - - .travis/* - - .travis/**/* - .github/* - .github/**/* @@ -88,10 +86,6 @@ collectors/ebpf: - collectors/ebpf.plugin/* - collectors/ebpf.plugin/**/* -collectors/fping: - - collectors/fping.plugin/* - - collectors/fping.plugin/**/* - collectors/freebsd: - collectors/freebsd.plugin/* - collectors/freebsd.plugin/**/* diff --git a/.github/scripts/docker-test.sh b/.github/scripts/docker-test.sh index 22821d17e..0f5fa469c 100755 --- a/.github/scripts/docker-test.sh +++ b/.github/scripts/docker-test.sh @@ -26,6 +26,10 @@ wait_for() { sleep 1 if [ "$i" -gt "$timeout" ]; then printf "Timed out!\n" + docker ps -a + echo "::group::Netdata container logs" + docker logs netdata 2>&1 + echo "::endgroup::" return 1 fi i="$((i + 1))" diff --git a/.github/scripts/gen-docker-tags.py b/.github/scripts/gen-docker-tags.py index df4dc0263..8c88d3b5e 100755 --- a/.github/scripts/gen-docker-tags.py +++ b/.github/scripts/gen-docker-tags.py @@ -6,9 +6,14 @@ version = sys.argv[1].split('.') suffix = sys.argv[2] REPO = f'netdata/netdata{suffix}' +GHCR = f'ghcr.io/{REPO}' +QUAY = f'quay.io/{REPO}' -MAJOR = ':'.join([REPO, version[0]]) -MINOR = ':'.join([REPO, '.'.join(version[0:2])]) -PATCH = ':'.join([REPO, '.'.join(version[0:3])]) +tags = [] -print(','.join([MAJOR, MINOR, PATCH])) +for repo in [REPO, GHCR, QUAY]: + tags.append(':'.join([repo, version[0]])) + tags.append(':'.join([repo, '.'.join(version[0:2])])) + tags.append(':'.join([repo, '.'.join(version[0:3])])) + +print(','.join(tags)) diff --git a/.github/scripts/gen-matrix-build.py b/.github/scripts/gen-matrix-build.py new file mode 100755 index 000000000..28406470f --- /dev/null +++ b/.github/scripts/gen-matrix-build.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python3 + +import json + +from ruamel.yaml import YAML + +yaml = YAML(typ='safe') +entries = [] + +with open('.github/data/distros.yml') as f: + data = yaml.load(f) + +for i, v in enumerate(data['include']): + e = { + 'artifact_key': v['distro'] + str(v['version']).replace('.', ''), + 'version': v['version'], + } + + if 'base_image' in v: + e['distro'] = ':'.join([v['base_image'], str(v['version'])]) + else: + e['distro'] = ':'.join([v['distro'], str(v['version'])]) + + if 'env_prep' in v: + e['env_prep'] = v['env_prep'] + + if 'jsonc_removal' in v: + e['jsonc_removal'] = v['jsonc_removal'] + + entries.append(e) + +entries.sort(key=lambda k: k['distro']) +matrix = json.dumps({'include': entries}, sort_keys=True) +print(matrix) diff --git a/.github/scripts/gen-matrix-packaging.py b/.github/scripts/gen-matrix-packaging.py new file mode 100755 index 000000000..01e9ec790 --- /dev/null +++ b/.github/scripts/gen-matrix-packaging.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python3 + +import json +import sys + +from ruamel.yaml import YAML + +ALWAYS_RUN_ARCHES = ["amd64", "x86_64"] +SHORT_RUN = sys.argv[1] +yaml = YAML(typ='safe') +entries = list() +run_limited = False + +with open('.github/data/distros.yml') as f: + data = yaml.load(f) + +if bool(int(SHORT_RUN)): + run_limited = True + +for i, v in enumerate(data['include']): + if 'packages' in data['include'][i]: + for arch in data['include'][i]['packages']['arches']: + if arch in ALWAYS_RUN_ARCHES or not run_limited: + entries.append({ + 'distro': data['include'][i]['distro'], + 'version': data['include'][i]['version'], + 'repo_distro': data['include'][i]['packages']['repo_distro'], + 'format': data['include'][i]['packages']['type'], + 'base_image': data['include'][i]['base_image'] if 'base_image' in data['include'][i] else data['include'][i]['distro'], + 'platform': data['platform_map'][arch], + 'arch': arch + }) + +entries.sort(key=lambda k: (data['arch_order'].index(k['arch']), k['distro'], k['version'])) +matrix = json.dumps({'include': entries}, sort_keys=True) +print(matrix) diff --git a/.github/scripts/gen-matrix-repoconfig.py b/.github/scripts/gen-matrix-repoconfig.py new file mode 100755 index 000000000..46f671697 --- /dev/null +++ b/.github/scripts/gen-matrix-repoconfig.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python3 + +import json + +from ruamel.yaml import YAML + +yaml = YAML(typ='safe') +entries = list() + +with open('.github/data/distros.yml') as f: + data = yaml.load(f) + +for i, v in enumerate(data['include']): + if 'packages' in data['include'][i]: + entries.append({ + 'distro': data['include'][i]['distro'], + 'version': data['include'][i]['version'], + 'pkgclouddistro': data['include'][i]['packages']['repo_distro'], + 'format': data['include'][i]['packages']['type'], + 'base_image': data['include'][i]['base_image'] if 'base_image' in data['include'][i] else data['include'][i]['distro'], + 'platform': data['platform_map']['amd64'], + 'arches': ' '.join(['"' + x + '"' for x in data['include'][i]['packages']['arches']]) + }) + +entries.sort(key=lambda k: (k['distro'], k['version'])) +matrix = json.dumps({'include': entries}, sort_keys=True) +print(matrix) diff --git a/.github/scripts/get-static-cache-key.sh b/.github/scripts/get-static-cache-key.sh index d9fa28597..3b07088f4 100755 --- a/.github/scripts/get-static-cache-key.sh +++ b/.github/scripts/get-static-cache-key.sh @@ -12,4 +12,4 @@ docker run -it --rm --platform "${platform}" netdata/static-builder sh -c 'apk l h="$(sha256sum /tmp/static-cache-key-data | cut -f 1 -d ' ')" -echo "::set-output name=key::static-${arch}-${h}" +echo "key=static-${arch}-${h}" >> "${GITHUB_OUTPUT}" diff --git a/.github/scripts/package-upload.sh b/.github/scripts/package-upload.sh index fd8a8cda2..13d63b4a7 100755 --- a/.github/scripts/package-upload.sh +++ b/.github/scripts/package-upload.sh @@ -19,7 +19,7 @@ mkdir -p "${staging}" case "${format}" in deb) - src="${staging}/$(echo "${distro}" | cut -f 1 -d '/')/pool/" + src="${staging}/${distro}" mkdir -p "${src}" for pkg in ${packages}; do diff --git a/.github/scripts/prepare-release-base.sh b/.github/scripts/prepare-release-base.sh index 7c24f6b66..06a2da160 100755 --- a/.github/scripts/prepare-release-base.sh +++ b/.github/scripts/prepare-release-base.sh @@ -97,7 +97,7 @@ git config user.email "bot@netdata.cloud" if [ "${REPO}" != "netdata/netdata" ] && [ -z "${RELEASE_TEST}" ]; then echo "::notice::Not running in the netdata/netdata repository, not queueing a release build." - echo "::set-output name=run::false" + echo "run=false" >> "${GITHUB_OUTPUT}" elif [ "${EVENT_NAME}" = 'schedule' ] || [ "${EVENT_TYPE}" = 'nightly' ]; then echo "::notice::Preparing a nightly release build." LAST_TAG=$(git describe --abbrev=0 --tags) @@ -107,15 +107,16 @@ elif [ "${EVENT_NAME}" = 'schedule' ] || [ "${EVENT_TYPE}" = 'nightly' ]; then HEAD_COMMIT="$(git rev-parse HEAD)" if [ "${EVENT_NAME}" = 'schedule' ] && [ "${LAST_VERSION_COMMIT}" = "${HEAD_COMMIT}" ] && grep -qE '.*-nightly$' packaging/version; then echo "::notice::No commits since last nightly build, not publishing a new nightly build." - echo "::set-output name=run::false" + echo "run=false" >> "${GITHUB_OUTPUT}" else echo "${NEW_VERSION}" > packaging/version || exit 1 - echo "::set-output name=run::true" - echo "::set-output name=message::Update changelog and version for nightly build: ${NEW_VERSION}." - echo "::set-output name=ref::master" - echo "::set-output name=type::nightly" - echo "::set-output name=branch::master" - echo "::set-output name=version::nightly" + # shellcheck disable=SC2129 + echo "run=true" >> "${GITHUB_OUTPUT}" + echo "message=Update changelog and version for nightly build: ${NEW_VERSION}." >> "${GITHUB_OUTPUT}" + echo "ref=master" >> "${GITHUB_OUTPUT}" + echo "type=nightly" >> "${GITHUB_OUTPUT}" + echo "branch=master" >> "${GITHUB_OUTPUT}" + echo "version=nightly" >> "${GITHUB_OUTPUT}" fi elif [ "${EVENT_TYPE}" = 'patch' ] && [ "${EVENT_VERSION}" != "nightly" ]; then echo "::notice::Preparing a patch release build." @@ -130,12 +131,13 @@ elif [ "${EVENT_TYPE}" = 'patch' ] && [ "${EVENT_VERSION}" != "nightly" ]; then major_matches || exit 1 check_newer_patch_version || exit 1 echo "${EVENT_VERSION}" > packaging/version || exit 1 - echo "::set-output name=run::true" - echo "::set-output name=message::Patch release ${EVENT_VERSION}." - echo "::set-output name=ref::${EVENT_VERSION}" - echo "::set-output name=type::release" - echo "::set-output name=branch::${branch_name}" - echo "::set-output name=version::$(tr -d 'v' < packaging/version)" + # shellcheck disable=SC2129 + echo "run=true" >> "${GITHUB_OUTPUT}" + echo "message=Patch release ${EVENT_VERSION}." >> "${GITHUB_OUTPUT}" + echo "ref=${EVENT_VERSION}" >> "${GITHUB_OUTPUT}" + echo "type=release" >> "${GITHUB_OUTPUT}" + echo "branch=${branch_name}" >> "${GITHUB_OUTPUT}" + echo "version=$(tr -d 'v' < packaging/version)" >> "${GITHUB_OUTPUT}" elif [ "${EVENT_TYPE}" = 'minor' ] && [ "${EVENT_VERSION}" != "nightly" ]; then echo "::notice::Preparing a minor release build." check_version_format || exit 1 @@ -149,13 +151,14 @@ elif [ "${EVENT_TYPE}" = 'minor' ] && [ "${EVENT_VERSION}" != "nightly" ]; then exit 1 fi echo "${EVENT_VERSION}" > packaging/version || exit 1 - echo "::set-output name=run::true" - echo "::set-output name=message::Minor release ${EVENT_VERSION}." - echo "::set-output name=ref::${EVENT_VERSION}" - echo "::set-output name=type::release" - echo "::set-output name=branch::master" - echo "::set-output name=new-branch::${branch_name}" - echo "::set-output name=version::$(tr -d 'v' < packaging/version)" + # shellcheck disable=SC2129 + echo "run=true" >> "${GITHUB_OUTPUT}" + echo "message=Minor release ${EVENT_VERSION}." >> "${GITHUB_OUTPUT}" + echo "ref=${EVENT_VERSION}" >> "${GITHUB_OUTPUT}" + echo "type=release" >> "${GITHUB_OUTPUT}" + echo "branch=master" >> "${GITHUB_OUTPUT}" + echo "new-branch=${branch_name}" >> "${GITHUB_OUTPUT}" + echo "version=$(tr -d 'v' < packaging/version)" >> "${GITHUB_OUTPUT}" elif [ "${EVENT_TYPE}" = 'major' ] && [ "${EVENT_VERSION}" != "nightly" ]; then echo "::notice::Preparing a major release build." check_version_format || exit 1 @@ -164,12 +167,13 @@ elif [ "${EVENT_TYPE}" = 'major' ] && [ "${EVENT_VERSION}" != "nightly" ]; then check_newer_major_version || exit 1 check_for_existing_tag || exit 1 echo "${EVENT_VERSION}" > packaging/version || exit 1 - echo "::set-output name=run::true" - echo "::set-output name=message::Major release ${EVENT_VERSION}" - echo "::set-output name=ref::${EVENT_VERSION}" - echo "::set-output name=type::release" - echo "::set-output name=branch::master" - echo "::set-output name=version::$(tr -d 'v' < packaging/version)" + # shellcheck disable=SC2129 + echo "run=true" >> "${GITHUB_OUTPUT}" + echo "message=Major release ${EVENT_VERSION}" >> "${GITHUB_OUTPUT}" + echo "ref=${EVENT_VERSION}" >> "${GITHUB_OUTPUT}" + echo "type=release" >> "${GITHUB_OUTPUT}" + echo "branch=master" >> "${GITHUB_OUTPUT}" + echo "version=$(tr -d 'v' < packaging/version)" >> "${GITHUB_OUTPUT}" else echo '::error::Unrecognized release type or invalid version.' exit 1 diff --git a/.github/scripts/run-updater-check.sh b/.github/scripts/run-updater-check.sh index 31ab71de8..a96a1d6ef 100755 --- a/.github/scripts/run-updater-check.sh +++ b/.github/scripts/run-updater-check.sh @@ -4,11 +4,26 @@ echo ">>> Installing CI support packages..." /netdata/.github/scripts/ci-support-pkgs.sh echo ">>> Installing Netdata..." /netdata/packaging/installer/kickstart.sh --dont-wait --build-only --disable-telemetry || exit 1 -echo "::group::Environment File Contents" +echo "::group::>>> Pre-Update Environment File Contents" cat /etc/netdata/.environment echo "::endgroup::" +echo "::group::>>> Pre-Update Netdata Build Info" +netdata -W buildinfo +echo "::endgroup::" echo ">>> Updating Netdata..." -export NETDATA_NIGHTLIES_BASEURL="http://localhost:8080/artifacts/" # Pull the tarball from the local web server. -/netdata/packaging/installer/netdata-updater.sh --not-running-from-cron --no-updater-self-update || exit 1 +export NETDATA_BASE_URL="http://localhost:8080/artifacts/" # Pull the tarball from the local web server. +timeout 3600 /netdata/packaging/installer/netdata-updater.sh --not-running-from-cron --no-updater-self-update + +case "$?" in + 124) echo "!!! Updater timed out." ; exit 1 ;; + 0) ;; + *) echo "!!! Updater failed." ; exit 1 ;; +esac +echo "::group::>>> Post-Update Environment File Contents" +cat /etc/netdata/.environment +echo "::endgroup::" +echo "::group::>>> Post-Update Netdata Build Info" +netdata -W buildinfo +echo "::endgroup::" echo ">>> Checking if update was successful..." /netdata/.github/scripts/check-updater.sh || exit 1 diff --git a/.github/scripts/run_install_with_dist_file.sh b/.github/scripts/run_install_with_dist_file.sh index d59e8b134..74652efdd 100755 --- a/.github/scripts/run_install_with_dist_file.sh +++ b/.github/scripts/run_install_with_dist_file.sh @@ -33,7 +33,7 @@ docker run \ -v "${PWD}:/netdata" \ -w /netdata \ "ubuntu:latest" \ - /bin/bash -c "./install-required-packages.sh --dont-wait --non-interactive netdata && apt install wget && ./netdata-installer.sh --dont-wait --require-cloud --disable-telemetry --install /tmp --one-time-build && echo \"Validating netdata instance is running\" && wget -O - 'http://127.0.0.1:19999/api/v1/info' | grep version" + /bin/bash -c "./install-required-packages.sh --dont-wait --non-interactive netdata && apt install wget && ./netdata-installer.sh --dont-wait --require-cloud --disable-telemetry --install-prefix /tmp --one-time-build && echo \"Validating netdata instance is running\" && wget -O - 'http://127.0.0.1:19999/api/v1/info' | grep version" popd || exit 1 echo "All Done!" diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 53f1590f8..c3924fb0c 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -57,7 +57,7 @@ jobs: --with-math \ --with-user=netdata make dist - echo "::set-output name=distfile::$(find . -name 'netdata-*.tar.gz')" + echo "distfile=$(find . -name 'netdata-*.tar.gz')" >> "${GITHUB_OUTPUT}" cp netdata-*.tar.gz artifacts/ - name: Store id: store @@ -171,6 +171,7 @@ jobs: matrix: # Generate the shared build matrix for our build tests. name: Prepare Build Matrix runs-on: ubuntu-latest + if: github.event_name != 'workflow_dispatch' outputs: matrix: ${{ steps.set-matrix.outputs.matrix }} steps: @@ -183,39 +184,10 @@ jobs: sudo apt-get update && sudo apt-get install -y python3-ruamel.yaml - name: Read build matrix id: set-matrix - shell: python3 {0} run: | - from ruamel.yaml import YAML - import json - yaml = YAML(typ='safe') - entries = list() - - with open('.github/data/distros.yml') as f: - data = yaml.load(f) - - for i, v in enumerate(data['include']): - e = { - 'artifact_key': v['distro'] + str(v['version']).replace('.', ''), - 'version': v['version'], - } - - if 'base_image' in v: - e['distro'] = ':'.join([v['base_image'], str(v['version'])]) - else: - e['distro'] = ':'.join([v['distro'], str(v['version'])]) - - if 'env_prep' in v: - e['env_prep'] = v['env_prep'] - - if 'jsonc_removal' in v: - e['jsonc_removal'] = v['jsonc_removal'] - - entries.append(e) - - entries.sort(key=lambda k: k['distro']) - matrix = json.dumps({'include': entries}, sort_keys=True) - print('Generated Matrix: ' + matrix) - print('::set-output name=matrix::' + matrix) + matrix="$(.github/scripts/gen-matrix-build.py)" + echo "Generated matrix: ${matrix}" + echo "matrix=${matrix}" >> "${GITHUB_OUTPUT}" - name: Failure Notification uses: rtCamp/action-slack-notify@v2 env: @@ -241,12 +213,13 @@ jobs: prepare-test-images: # Prepare the test environments for our build checks. This also checks dependency handling code for each tested environment. name: Prepare Test Environments runs-on: ubuntu-latest + if: github.event_name != 'workflow_dispatch' needs: - matrix env: RETRY_DELAY: 300 strategy: - # Unlike the actal build tests, this completes _very_ fast (average of about 3 minutes for each job), so we + # Unlike the actual build tests, this completes _very_ fast (average of about 3 minutes for each job), so we # just run everything in parallel instead lof limiting job concurrency. fail-fast: false matrix: ${{ fromJson(needs.matrix.outputs.matrix) }} @@ -269,7 +242,7 @@ jobs: BASE=${{ matrix.distro }} PRE=${{ matrix.env_prep }} RMJSONC=${{ matrix.jsonc_removal }} - outputs: type=oci,dest=/tmp/image.tar + outputs: type=docker,dest=/tmp/image.tar tags: test:${{ matrix.artifact_key }} - name: Retry delay if: ${{ steps.build1.outcome == 'failure' }} @@ -287,7 +260,7 @@ jobs: BASE=${{ matrix.distro }} PRE=${{ matrix.env_prep }} RMJSONC=${{ matrix.jsonc_removal }} - outputs: type=oci,dest=/tmp/image.tar + outputs: type=docker,dest=/tmp/image.tar tags: test:${{ matrix.artifact_key }} - name: Retry delay if: ${{ steps.build1.outcome == 'failure' && steps.build2.outcome == 'failure' }} @@ -304,7 +277,7 @@ jobs: BASE=${{ matrix.distro }} PRE=${{ matrix.env_prep }} RMJSONC=${{ matrix.jsonc_removal }} - outputs: type=oci,dest=/tmp/image.tar + outputs: type=docker,dest=/tmp/image.tar tags: test:${{ matrix.artifact_key }} - name: Upload image artifact id: upload @@ -341,6 +314,7 @@ jobs: source-build: # Test various source build arrangements. name: Test Source Build runs-on: ubuntu-latest + if: github.event_name != 'workflow_dispatch' needs: - matrix - prepare-test-images @@ -361,29 +335,27 @@ jobs: name: ${{ matrix.artifact_key }}-test-env - name: Load test environment id: load - run: | - docker load --input image.tar | tee image-info.txt - echo "::set-output name=image::$(cut -d ':' -f 3 image-info.txt)" + run: docker load --input image.tar - name: Regular build on ${{ matrix.distro }} id: build-basic run: | - docker run --security-opt seccomp=unconfined -w /netdata sha256:${{ steps.load.outputs.image }} \ + docker run --security-opt seccomp=unconfined -w /netdata test:${{ matrix.artifact_key }} \ /bin/sh -c 'autoreconf -ivf && ./configure --disable-dependency-tracking && make -j2' - name: netdata-installer on ${{ matrix.distro }}, disable cloud id: build-no-cloud run: | - docker run --security-opt seccomp=unconfined -w /netdata sha256:${{ steps.load.outputs.image }} \ + docker run --security-opt seccomp=unconfined -w /netdata test:${{ matrix.artifact_key }} \ /bin/sh -c './netdata-installer.sh --dont-wait --dont-start-it --disable-cloud --one-time-build' - name: netdata-installer on ${{ matrix.distro }}, require cloud id: build-cloud run: | - docker run --security-opt seccomp=unconfined -w /netdata sha256:${{ steps.load.outputs.image }} \ + docker run --security-opt seccomp=unconfined -w /netdata test:${{ matrix.artifact_key }} \ /bin/sh -c './netdata-installer.sh --dont-wait --dont-start-it --require-cloud --one-time-build' - name: netdata-installer on ${{ matrix.distro }}, require cloud, no JSON-C id: build-no-jsonc if: matrix.jsonc_removal != '' run: | - docker run --security-opt seccomp=unconfined -w /netdata sha256:${{ steps.load.outputs.image }} \ + docker run --security-opt seccomp=unconfined -w /netdata test:${{ matrix.artifact_key }} \ /bin/sh -c '/rmjsonc.sh && ./netdata-installer.sh --dont-wait --dont-start-it --require-cloud --one-time-build' - name: Failure Notification uses: rtCamp/action-slack-notify@v2 @@ -414,6 +386,7 @@ jobs: updater-check: # Test the generated dist archive using the updater code. name: Test Generated Distfile and Updater Code runs-on: ubuntu-latest + if: github.event_name != 'workflow_dispatch' needs: - build-dist - matrix @@ -442,10 +415,10 @@ jobs: - name: Prepare artifact directory id: prepare run: | - mkdir -p artifacts || exit 1 - echo "9999.0.0-0" > artifacts/latest-version.txt || exit 1 - cp dist-tarball/* artifacts || exit 1 - cd artifacts || exit 1 + mkdir -p artifacts/download/latest || exit 1 + echo "9999.0.0-0" > artifacts/download/latest/latest-version.txt || exit 1 + cp dist-tarball/* artifacts/download/latest || exit 1 + cd artifacts/download/latest || exit 1 ln -s ${{ needs.build-dist.outputs.distfile }} netdata-latest.tar.gz || exit 1 sha256sum -b ./* > "sha256sums.txt" || exit 1 cat sha256sums.txt @@ -456,13 +429,11 @@ jobs: name: ${{ matrix.artifact_key }}-test-env - name: Load test environment id: load - run: | - docker load --input image.tar | tee image-info.txt - echo "::set-output name=image::$(cut -d ':' -f 3 image-info.txt)" + run: docker load --input image.tar - name: Install netdata and run the updater on ${{ matrix.distro }} id: updater-check run: | - docker run --security-opt seccomp=unconfined -e DISABLE_TELEMETRY=1 --network host -w /netdata sha256:${{ steps.load.outputs.image }} \ + docker run --security-opt seccomp=unconfined -e DISABLE_TELEMETRY=1 --network host -w /netdata test:${{ matrix.artifact_key }} \ /netdata/.github/scripts/run-updater-check.sh - name: Failure Notification uses: rtCamp/action-slack-notify@v2 @@ -578,10 +549,15 @@ jobs: with: name: final-artifacts path: artifacts + - name: Prepare artifacts directory + id: prepare + run: | + mkdir -p download/latest + mv artifacts/* download/latest - name: Verify that artifacts work with installer id: verify env: - NETDATA_TARBALL_BASEURL: http://localhost:8080/artifacts + NETDATA_TARBALL_BASEURL: http://localhost:8080/ run: packaging/installer/kickstart.sh --build-only --dont-start-it --disable-telemetry --dont-wait - name: Failure Notification uses: rtCamp/action-slack-notify@v2 @@ -627,10 +603,15 @@ jobs: with: name: final-artifacts path: artifacts + - name: Prepare artifacts directory + id: prepare + run: | + mkdir -p download/latest + mv artifacts/* download/latest - name: Verify that artifacts work with installer id: verify env: - NETDATA_TARBALL_BASEURL: http://localhost:8080/artifacts + NETDATA_TARBALL_BASEURL: http://localhost:8080/ run: packaging/installer/kickstart.sh --static-only --dont-start-it --disable-telemetry - name: Failure Notification uses: rtCamp/action-slack-notify@v2 @@ -659,8 +640,6 @@ jobs: runs-on: ubuntu-latest if: github.event_name == 'workflow_dispatch' && github.event.inputs.type == 'nightly' && github.repository == 'netdata/netdata' needs: - - updater-check - - source-build - artifact-verification-dist - artifact-verification-static steps: @@ -714,8 +693,6 @@ jobs: runs-on: ubuntu-latest if: github.event_name == 'workflow_dispatch' && github.event.inputs.type == 'nightly' && github.repository == 'netdata/netdata' needs: - - updater-check - - source-build - artifact-verification-dist - artifact-verification-static steps: @@ -755,6 +732,7 @@ jobs: repo: netdata-nightlies body: Netdata nightly build for ${{ steps.version.outputs.date }}. commit: ${{ steps.version.outputs.commit }} + makeLatest: true tag: ${{ steps.version.outputs.version }} token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }} - name: Failure Notification @@ -790,9 +768,9 @@ jobs: id: tag run: | if echo ${{ github.event.inputs.version }} | grep -qE '^[[:digit:]]+\.[[:digit:]]+\.[[:digit:]]+$'; then - echo "::set-output name=tag::v${{ github.event.inputs.version }}" + echo "tag=v${{ github.event.inputs.version }}" >> "${GITHUB_OUTPUT}" else - echo "::set-output name=tag::${{ github.event.inputs.version }}" + echo "tag=${{ github.event.inputs.version }}" >> "${GITHUB_OUTPUT}" fi upload-release: # Create the draft release and upload the build artifacts. @@ -800,8 +778,6 @@ jobs: runs-on: ubuntu-latest if: github.event_name == 'workflow_dispatch' && github.event.inputs.type == 'release' && github.repository == 'netdata/netdata' needs: - - updater-check - - source-build - artifact-verification-dist - artifact-verification-static - normalize-tag diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index 65ad6acbc..799f8d991 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -51,7 +51,7 @@ jobs: - name: Prepare environment run: ./packaging/installer/install-required-packages.sh --dont-wait --non-interactive netdata - name: Build netdata - run: ./netdata-installer.sh --dont-start-it --disable-telemetry --dont-wait --install /tmp/install --one-time-build + run: ./netdata-installer.sh --dont-start-it --disable-telemetry --dont-wait --install-prefix /tmp/install --one-time-build - name: Check that repo is clean run: | git status --porcelain=v1 > /tmp/porcelain diff --git a/.github/workflows/cloud_regression.yml b/.github/workflows/cloud_regression.yml index b6e321fe1..01fcdca4d 100644 --- a/.github/workflows/cloud_regression.yml +++ b/.github/workflows/cloud_regression.yml @@ -33,12 +33,12 @@ jobs: NETDATA_CUSTOM_PR_NUMBER="" NETDATA_CUSTOM_COMMIT_HASH="${{ github.sha }}" fi - echo "::set-output name=netdata_repo::${NETDATA_CUSTOM_REPO}" - echo "::set-output name=netdata_branch::${NETDATA_CUSTOM_BRANCH}" - echo "::set-output name=netdata_pr_number::${NETDATA_CUSTOM_PR_NUMBER}" - echo "::set-output name=netdata_commit_hash::${NETDATA_CUSTOM_COMMIT_HASH}" + echo "netdata_repo=${NETDATA_CUSTOM_REPO}" >> $GITHUB_OUTPUT + echo "netdata_branch=${NETDATA_CUSTOM_BRANCH}" >> $GITHUB_OUTPUT + echo "netdata_pr_number=${NETDATA_CUSTOM_PR_NUMBER}" >> $GITHUB_OUTPUT + echo "netdata_commit_hash=${NETDATA_CUSTOM_COMMIT_HASH}" >> $GITHUB_OUTPUT - - name: Trigger Cloud Regression + - name: Trigger Full Cloud Regression uses: aurelien-baudet/workflow-dispatch@v2 with: repo: netdata/test-automation @@ -52,3 +52,18 @@ jobs: "custom_netdata_image": "true" }' wait-for-completion: false + + - name: Trigger Agent Parent/Child with Cloud Integration tests + uses: aurelien-baudet/workflow-dispatch@v2 + with: + repo: netdata/test-automation + ref: refs/heads/master + workflow: agent_smoke_tests.yml + token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }} + inputs: '{ "netdata_branch": "${{ steps.output-workflow-dispatch-params.outputs.netdata_branch }}", + "netdata_repo": "${{ steps.output-workflow-dispatch-params.outputs.netdata_repo }}", + "netdata_pr_number": "${{ steps.output-workflow-dispatch-params.outputs.netdata_pr_number }}", + "netdata_branch_commit_hash": "${{ steps.output-workflow-dispatch-params.outputs.netdata_commit_hash }}", + "custom_netdata_image": "true" + }' + wait-for-completion: true diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 021376a2d..b2af615e4 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -32,39 +32,39 @@ jobs: run: | if [ "${{ github.event_name }}" = "pull_request" ]; then if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/codeql') }}" = "true" ]; then - echo '::set-output name=run::true' + echo "run=true" >> "${GITHUB_OUTPUT}" echo '::notice::Found ci/codeql label, unconditionally running all CodeQL checks.' else - echo '::set-output name=run::false' + echo "run=false" >> "${GITHUB_OUTPUT}" fi else - echo '::set-output name=run::true' + echo "run=true" >> "${GITHUB_OUTPUT}" fi - name: Check for C/C++ changes id: cpp run: | if [ "${{ steps.always.outputs.run }}" = "false" ]; then if git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq '.*\.[ch](xx|\+\+)?' ; then - echo '::set-output name=run::true' + echo "run=true" >> "${GITHUB_OUTPUT}" echo '::notice::C/C++ code has changed, need to run CodeQL.' else - echo '::set-output name=run::false' + echo "run=false" >> "${GITHUB_OUTPUT}" fi else - echo '::set-output name=run::true' + echo "run=true" >> "${GITHUB_OUTPUT}" fi - name: Check for python changes id: python run: | if [ "${{ steps.always.outputs.run }}" = "false" ]; then if git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq 'collectors/python.d.plugin/.*\.py' ; then - echo '::set-output name=run::true' + echo "run=true" >> "${GITHUB_OUTPUT}" echo '::notice::Python code has changed, need to run CodeQL.' else - echo '::set-output name=run::false' + echo "run=false" >> "${GITHUB_OUTPUT}" fi else - echo '::set-output name=run::true' + echo "run=true" >> "${GITHUB_OUTPUT}" fi analyze-cpp: @@ -87,7 +87,7 @@ jobs: - name: Prepare environment run: ./packaging/installer/install-required-packages.sh --dont-wait --non-interactive netdata - name: Build netdata - run: ./netdata-installer.sh --dont-start-it --disable-telemetry --dont-wait --install /tmp/install --one-time-build + run: ./netdata-installer.sh --dont-start-it --disable-telemetry --dont-wait --install-prefix /tmp/install --one-time-build - name: Run CodeQL uses: github/codeql-action/analyze@v2 with: diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index b7eb53c8e..78a39d5a2 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -129,9 +129,9 @@ jobs: id: tag run: | if echo ${{ github.event.inputs.version }} | grep -qE '^[[:digit:]]+\.[[:digit:]]+\.[[:digit:]]+$'; then - echo "::set-output name=tag::v${{ github.event.inputs.version }}" + echo "tag=v${{ github.event.inputs.version }}" >> "${GITHUB_OUTPUT}" else - echo "::set-output name=tag::${{ github.event.inputs.version }}" + echo "tag=${{ github.event.inputs.version }}" >> "${GITHUB_OUTPUT}" fi docker-publish: @@ -151,13 +151,13 @@ jobs: id: release-tags if: github.event.inputs.version != 'nightly' run: | - echo "tags=netdata/netdata:latest,netdata/netdata:stable,$(.github/scripts/gen-docker-tags.py ${{ needs.normalize-tag.outputs.tag }} '')" \ + echo "tags=netdata/netdata:latest,netdata/netdata:stable,ghcr.io/netdata/netdata:latest,ghcr.io/netdata/netdata:stable,quay.io/netdata/netdata:latest,quay.io/netdata/netdata:stable,$(.github/scripts/gen-docker-tags.py ${{ needs.normalize-tag.outputs.tag }} '')" \ >> "${GITHUB_ENV}" - name: Determine which tags to use id: nightly-tags if: github.event.inputs.version == 'nightly' run: | - echo "tags=netdata/netdata:latest,netdata/netdata:edge" >> "${GITHUB_ENV}" + echo "tags=netdata/netdata:latest,netdata/netdata:edge,ghcr.io/netdata/netdata:latest,ghcr.io/netdata/netdata:edge,quay.io/netdata/netdata:latest,quay.io/netdata/netdata:edge" >> "${GITHUB_ENV}" - name: Mark image as official id: env if: github.repository == 'netdata/netdata' @@ -169,12 +169,28 @@ jobs: id: buildx uses: docker/setup-buildx-action@v2 - name: Docker Hub Login - id: login + id: docker-hub-login if: github.repository == 'netdata/netdata' uses: docker/login-action@v2 with: username: ${{ secrets.DOCKER_HUB_USERNAME }} password: ${{ secrets.DOCKER_HUB_PASSWORD }} + - name: GitHub Container Registry Login + id: ghcr-login + if: github.repository == 'netdata/netdata' + uses: docker/login-action@v2 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: Quay.io Login + id: quay-login + if: github.repository == 'netdata/netdata' + uses: docker/login-action@v2 + with: + registry: quay.io + username: ${{ secrets.NETDATABOT_QUAY_USERNAME }} + password: ${{ secrets.NETDATABOT_QUAY_TOKEN }} - name: Docker Build id: build uses: docker/build-push-action@v3 @@ -199,7 +215,9 @@ jobs: Setup environment: ${{ steps.env.outcome }} Setup QEMU: ${{ steps.qemu.outcome }} Setup buildx: ${{ steps.buildx.outcome }} - Authenticate against DockerHub: ${{ steps.login.outcome }} + Login to DockerHub: ${{ steps.docker-hub-login.outcome }} + Login to GHCR: ${{ steps.ghcr-login.outcome }} + Login to Quay: ${{ steps.quay-login.outcome }} Build and publish images: ${{ steps.build.outcome }} SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }} if: >- @@ -221,7 +239,7 @@ jobs: docker-dbg-publish: if: github.event_name == 'workflow_dispatch' - name: Docker Build and Publish (Debuging Image) + name: Docker Build and Publish (Debugging Image) needs: - docker-test - normalize-tag @@ -236,13 +254,13 @@ jobs: id: release-tags if: github.event.inputs.version != 'nightly' run: | - echo "tags=netdata/netdata-debug:latest,netdata/netdata-debug:stable,$(.github/scripts/gen-docker-tags.py ${{ needs.normalize-tag.outputs.tag }} '-debug')" \ + echo "tags=netdata/netdata-debug:latest,netdata/netdata-debug:stable,ghcr.io/netdata/netdata-debug:latest,ghcr.io/netdata/netdata-debug:stable,quay.io/netdata/netdata-debug:latest,quay.io/netdata/netdata-debug:stable,$(.github/scripts/gen-docker-tags.py ${{ needs.normalize-tag.outputs.tag }} '-debug')" \ >> "${GITHUB_ENV}" - name: Determine which tags to use id: nightly-tags if: github.event.inputs.version == 'nightly' run: | - echo "tags=netdata/netdata-debug:latest,netdata/netdata-debug:edge" >> "${GITHUB_ENV}" + echo "tags=netdata/netdata-debug:latest,netdata/netdata-debug:edge,ghcr.io/netdata/netdata-debug:latest,ghcr.io/netdata/netdata-debug:edge,quay.io/netdata/netdata-debug:latest,quay.io/netdata/netdata-debug:edge" >> "${GITHUB_ENV}" - name: Mark image as official id: env if: github.repository == 'netdata/netdata' @@ -254,12 +272,28 @@ jobs: id: buildx uses: docker/setup-buildx-action@v2 - name: Docker Hub Login - id: login + id: docker-hub-login if: github.repository == 'netdata/netdata' uses: docker/login-action@v2 with: username: ${{ secrets.DOCKER_HUB_USERNAME }} password: ${{ secrets.DOCKER_HUB_PASSWORD }} + - name: GitHub Container Registry Login + id: ghcr-login + if: github.repository == 'netdata/netdata' + uses: docker/login-action@v2 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: Quay.io Login + id: quay-login + if: github.repository == 'netdata/netdata' + uses: docker/login-action@v2 + with: + registry: quay.io + username: ${{ secrets.NETDATABOT_QUAY_USERNAME }} + password: ${{ secrets.NETDATABOT_QUAY_TOKEN }} - name: Docker Build id: build uses: docker/build-push-action@v3 @@ -280,13 +314,15 @@ jobs: SLACK_USERNAME: 'GitHub Actions' SLACK_MESSAGE: |- ${{ github.repository }}: Failed to build or publish Docker debug images. - CHeckout: ${{ steps.checkout.outcome }} + Checkout: ${{ steps.checkout.outcome }} Generate release tags: ${{ steps.release-tags.outcome }} Generate nightly tags: ${{ steps.nightly-tags.outcome }} Setup environment: ${{ steps.env.outcome }} Setup QEMU: ${{ steps.qemu.outcome }} Setup buildx: ${{ steps.buildx.outcome }} - Authenticate against DockerHub: ${{ steps.login.outcome }} + Login to DockerHub: ${{ steps.docker-hub-login.outcome }} + Login to GHCR: ${{ steps.ghcr-login.outcome }} + Login to Quay: ${{ steps.quay-login.outcome }} Build and publish images: ${{ steps.build.outcome }} SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }} if: >- diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml index 0854080a7..2b8b41fcb 100644 --- a/.github/workflows/labeler.yml +++ b/.github/workflows/labeler.yml @@ -2,17 +2,20 @@ # Handles labelling of PR's. name: Pull Request Labeler on: - schedule: - - cron: '*/10 * * * *' -env: - DISABLE_TELEMETRY: 1 + pull_request_target: null +concurrency: + group: pr-label-${{ github.ref }} + cancel-in-progress: true jobs: labeler: + name: Apply PR Labels runs-on: ubuntu-latest + permissions: + contents: read + pull-requests: write steps: - - uses: docker://docker.io/ilyam8/periodic-pr-labeler:v0.1.0 + - uses: actions/labeler@v4 if: github.repository == 'netdata/netdata' - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - GITHUB_REPOSITORY: ${{ github.repository }} - LABEL_MAPPINGS_FILE: .github/labeler.yml + with: + repo-token: "${{ secrets.GITHUB_TOKEN }}" + sync-labels: true diff --git a/.github/workflows/packaging.yml b/.github/workflows/packaging.yml index ddd8356e4..c99f535ab 100644 --- a/.github/workflows/packaging.yml +++ b/.github/workflows/packaging.yml @@ -44,41 +44,15 @@ jobs: sudo apt-get update && sudo apt-get install -y python3-ruamel.yaml - name: Read build matrix id: set-matrix - shell: python3 {0} run: | - from ruamel.yaml import YAML - import json - import re - import os - ALWAYS_RUN_ARCHES = ["amd64", "x86_64"] - yaml = YAML(typ='safe') - entries = list() - run_limited = False - - with open('.github/data/distros.yml') as f: - data = yaml.load(f) - - if "${{ github.event_name }}" == "pull_request" and "${{ !contains(github.event.pull_request.labels.*.name, 'run-ci/packaging') }}": - run_limited = True - - for i, v in enumerate(data['include']): - if 'packages' in data['include'][i]: - for arch in data['include'][i]['packages']['arches']: - if arch in ALWAYS_RUN_ARCHES or not run_limited: - entries.append({ - 'distro': data['include'][i]['distro'], - 'version': data['include'][i]['version'], - 'repo_distro': data['include'][i]['packages']['repo_distro'], - 'format': data['include'][i]['packages']['type'], - 'base_image': data['include'][i]['base_image'] if 'base_image' in data['include'][i] else data['include'][i]['distro'], - 'platform': data['platform_map'][arch], - 'arch': arch - }) - - entries.sort(key=lambda k: (data['arch_order'].index(k['arch']), k['distro'], k['version'])) - matrix = json.dumps({'include': entries}, sort_keys=True) - print('Generated Matrix: ' + matrix) - print('::set-output name=matrix::' + matrix) + if [ "${{ github.event_name }}" = "pull_request" ] && \ + [ "${{ !contains(github.event.pull_request.labels.*.name, 'run-ci/packaging') }}" = "true" ]; then + matrix="$(.github/scripts/gen-matrix-packaging.py 1)" + else + matrix="$(.github/scripts/gen-matrix-packaging.py 0)" + fi + echo "Generated matrix: ${matrix}" + echo "matrix=${matrix}" >> "${GITHUB_OUTPUT}" - name: Failure Notification uses: rtCamp/action-slack-notify@v2 env: @@ -117,24 +91,24 @@ jobs: if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then case "${{ github.event.inputs.type }}" in "release") - echo "::set-output name=repo::${REPO_PREFIX}" - echo "::set-output name=version::${{ github.event.inputs.version }}" - echo "::set-output name=retention::365" + echo "repo=${REPO_PREFIX}" >> "${GITHUB_OUTPUT}" + echo "version=${{ github.event.inputs.version }}" >> "${GITHUB_OUTPUT}" + echo "retention=365" >> "${GITHUB_OUTPUT}" ;; "nightly") - echo "::set-output name=repo::${REPO_PREFIX}-edge" - echo "::set-output name=version::$(tr -d 'v' < packaging/version)" - echo "::set-output name=retention::30" + echo "repo=${REPO_PREFIX}-edge" >> "${GITHUB_OUTPUT}" + echo "version=$(tr -d 'v' < packaging/version)" >> "${GITHUB_OUTPUT}" + echo "retention=30" >> "${GITHUB_OUTPUT}" ;; *) - echo "::set-output name=repo::${REPO_PREFIX}-devel" - echo "::set-output name=version::0.${GITHUB_SHA}" - echo "::set-output name=retention::30" + echo "repo=${REPO_PREFIX}-devel" >> "${GITHUB_OUTPUT}" + echo "version=0.${GITHUB_SHA}" >> "${GITHUB_OUTPUT}" + echo "retention=30" >> "${GITHUB_OUTPUT}" ;; esac else - echo "::set-output name=version::$(cut -d'-' -f 1 packaging/version | tr -d 'v')" - echo "::set-output name=retention::0" + echo "version=$(cut -d'-' -f 1 packaging/version | tr -d 'v')" >> "${GITHUB_OUTPUT}" + echo "retention=0" >> "${GITHUB_OUTPUT}" fi - name: Failure Notification uses: rtCamp/action-slack-notify@v2 @@ -186,7 +160,7 @@ jobs: id: docker-config shell: bash run: | - echo '{"cgroup-parent": "/actions_job", "experimental": true}' | sudo tee /etc/docker/daemon.json 2>/dev/null + echo '{"cgroup-parent": "actions-job.slice", "experimental": true}' | sudo tee /etc/docker/daemon.json 2>/dev/null sudo service docker restart - name: Fetch images id: fetch-images diff --git a/.github/workflows/repoconfig-packages.yml b/.github/workflows/repoconfig-packages.yml index 824ddd341..f8a3dc406 100644 --- a/.github/workflows/repoconfig-packages.yml +++ b/.github/workflows/repoconfig-packages.yml @@ -34,31 +34,10 @@ jobs: sudo apt-get update && sudo apt-get install -y python3-ruamel.yaml - name: Read build matrix id: set-matrix - shell: python3 {0} run: | - from ruamel.yaml import YAML - import json - yaml = YAML(typ='safe') - entries = list() - - with open('.github/data/distros.yml') as f: - data = yaml.load(f) - - for i, v in enumerate(data['include']): - if 'packages' in data['include'][i]: - entries.append({ - 'distro': data['include'][i]['distro'], - 'version': data['include'][i]['version'], - 'pkgclouddistro': data['include'][i]['packages']['repo_distro'], - 'format': data['include'][i]['packages']['type'], - 'base_image': data['include'][i]['base_image'] if 'base_image' in data['include'][i] else data['include'][i]['distro'], - 'platform': data['platform_map']['amd64'] - }) - - entries.sort(key=lambda k: (k['distro'], k['version'])) - matrix = json.dumps({'include': entries}, sort_keys=True) - print('Generated Matrix: ' + matrix) - print('::set-output name=matrix::' + matrix) + matrix="$(.github/scripts/gen-matrix-repoconfig.py)" + echo "Generated matrix: ${matrix}" + echo "matrix=${matrix}" >> "${GITHUB_OUTPUT}" - name: Failure Notification uses: rtCamp/action-slack-notify@v2 env: @@ -117,7 +96,7 @@ jobs: /netdata/packaging/repoconfig/build-${{ matrix.format }}.sh - name: SSH setup id: ssh-setup - if: github.event_name == 'workflow_dispatch' + if: github.event_name != 'pull_request' && github.repository == 'netdata/netdata' continue-on-error: true uses: shimataro/ssh-key-action@v2 with: @@ -127,23 +106,17 @@ jobs: - name: Upload to packages.netdata.cloud id: package-upload continue-on-error: true - if: github.event_name == 'workflow_dispatch' + if: github.event_name != 'pull_request' && github.repository == 'netdata/netdata' run: | - .github/scripts/package-upload.sh \ - ${{ matrix.repo_distro }} \ - ${{ matrix.arch }} \ - ${{ matrix.format }} \ - netdata/netdata - .github/scripts/package-upload.sh \ - ${{ matrix.repo_distro }} \ - ${{ matrix.arch }} \ - ${{ matrix.format }} \ - netdata/netdata-edge - .github/scripts/package-upload.sh \ - ${{ matrix.repo_distro }} \ - ${{ matrix.arch }} \ - ${{ matrix.format }} \ - netdata/netdata-repoconfig + for arch in ${{ matrix.arches }}; do + for suffix in '' -edge -repoconfig ; do + .github/scripts/package-upload.sh \ + ${{ matrix.pkgclouddistro }} \ + ${arch} \ + ${{ matrix.format }} \ + netdata/netdata${suffix} + done + done - name: Upload Packages id: publish if: github.event_name != 'pull_request' && github.repository == 'netdata/netdata' diff --git a/.github/workflows/review.yml b/.github/workflows/review.yml index 5679b246c..7f12aeecd 100644 --- a/.github/workflows/review.yml +++ b/.github/workflows/review.yml @@ -29,56 +29,56 @@ jobs: id: actionlint run: | if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/actionlint') }}" = "true" ]; then - echo '::set-output name=run::true' + echo "run=true" >> "${GITHUB_OUTPUT}" elif git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq '\.github/workflows/.*' ; then - echo '::set-output name=run::true' + echo "run=true" >> "${GITHUB_OUTPUT}" echo 'GitHub Actions workflows have changed, need to run actionlint.' else - echo '::set-output name=run::false' + echo "run=false" >> "${GITHUB_OUTPUT}" fi - name: Check files for eslint id: eslint run: | if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/eslint') }}" = "true" ]; then - echo '::set-output name=run::true' + echo "run=true" >> "${GITHUB_OUTPUT}" elif git diff --name-only origin/${{ github.base_ref }} HEAD | grep -v "web/gui/dashboard" | grep -Eq '.*\.js|node\.d\.plugin\.in' ; then - echo '::set-output name=run::true' + echo "run=true" >> "${GITHUB_OUTPUT}" echo 'JS files have changed, need to run ESLint.' else - echo '::set-output name=run::false' + echo "run=false" >> "${GITHUB_OUTPUT}" fi - name: Check files for hadolint id: hadolint run: | if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/hadolint') }}" = "true" ]; then - echo '::set-output name=run::true' + echo "run=true" >> "${GITHUB_OUTPUT}" elif git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq '.*Dockerfile.*' ; then - echo '::set-output name=run::true' + echo "run=true" >> "${GITHUB_OUTPUT}" echo 'Dockerfiles have changed, need to run Hadolint.' else - echo '::set-output name=run::false' + echo "run=false" >> "${GITHUB_OUTPUT}" fi - name: Check files for shellcheck id: shellcheck run: | if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/shellcheck') }}" = "true" ]; then - echo '::set-output name=run::true' + echo "run=true" >> "${GITHUB_OUTPUT}" elif git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq '.*\.sh.*' ; then - echo '::set-output name=run::true' + echo "run=true" >> "${GITHUB_OUTPUT}" echo 'Shell scripts have changed, need to run shellcheck.' else - echo '::set-output name=run::false' + echo "run=false" >> "${GITHUB_OUTPUT}" fi - name: Check files for yamllint id: yamllint run: | if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/yamllint') }}" = "true" ]; then - echo '::set-output name=run::true' + echo "run=true" >> "${GITHUB_OUTPUT}" elif git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq '.*\.ya?ml|python\.d/.*\.conf' ; then - echo '::set-output name=run::true' + echo "run=true" >> "${GITHUB_OUTPUT}" echo 'YAML files have changed, need to run yamllint.' else - echo '::set-output name=run::false' + echo "run=false" >> "${GITHUB_OUTPUT}" fi actionlint: |