summaryrefslogtreecommitdiffstats
path: root/.github
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2023-05-08 16:27:08 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2023-05-08 16:27:08 +0000
commit81581f9719bc56f01d5aa08952671d65fda9867a (patch)
tree0f5c6b6138bf169c23c9d24b1fc0a3521385cb18 /.github
parentReleasing debian version 1.38.1-1. (diff)
downloadnetdata-81581f9719bc56f01d5aa08952671d65fda9867a.tar.xz
netdata-81581f9719bc56f01d5aa08952671d65fda9867a.zip
Merging upstream version 1.39.0.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to '.github')
-rw-r--r--.github/CODEOWNERS5
-rw-r--r--.github/ISSUE_TEMPLATE/BUG_REPORT.yml7
-rw-r--r--.github/data/distros.yml82
-rwxr-xr-x.github/scripts/ci-support-pkgs.sh17
-rwxr-xr-x.github/scripts/gen-matrix-build.py2
-rwxr-xr-x.github/scripts/gen-matrix-eol-check.py29
-rwxr-xr-x.github/scripts/gen-matrix-packaging.py2
-rwxr-xr-x.github/scripts/gen-matrix-repoconfig.py2
-rwxr-xr-x.github/scripts/pkg-test.sh36
-rwxr-xr-x.github/scripts/platform-impending-eol.py58
-rwxr-xr-x.github/scripts/run-updater-check.sh1
-rw-r--r--.github/workflows/add-to-project.yml4
-rw-r--r--.github/workflows/build.yml16
-rw-r--r--.github/workflows/coverity.yml2
-rw-r--r--.github/workflows/dashboard-pr.yml2
-rw-r--r--.github/workflows/docker.yml17
-rw-r--r--.github/workflows/labeler.yml2
-rw-r--r--.github/workflows/packaging.yml35
-rw-r--r--.github/workflows/platform-eol-check.yml153
-rw-r--r--.github/workflows/release.yml8
-rw-r--r--.github/workflows/repoconfig-packages.yml47
-rw-r--r--.github/workflows/review.yml85
-rw-r--r--.github/workflows/tests.yml2
23 files changed, 508 insertions, 106 deletions
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 34c934550..7857b9a73 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -19,7 +19,7 @@ collectors/cups.plugin/ @thiagoftsm
exporting/ @thiagoftsm
daemon/ @thiagoftsm @vkalintiris
database/ @thiagoftsm @vkalintiris
-docs/ @tkatsoulas
+docs/ @tkatsoulas @andrewm4894 @cakrit
health/ @thiagoftsm @vkalintiris @MrZammler
health/health.d/ @thiagoftsm @MrZammler
health/notifications/ @Ferroin @thiagoftsm @MrZammler
@@ -35,7 +35,8 @@ web/gui/ @jacekkolasa
# Ownership by filetype (overwrites ownership by directory)
*.am @Ferroin @tkatsoulas
-*.md @tkatsoulas
+*.md @tkatsoulas @andrewm4894 @cakrit
+*.mdx @tkatsoulas @andrewm4894 @cakrit
Dockerfile* @Ferroin @tkatsoulas
# Ownership of specific files
diff --git a/.github/ISSUE_TEMPLATE/BUG_REPORT.yml b/.github/ISSUE_TEMPLATE/BUG_REPORT.yml
index d82d46331..b63daba8a 100644
--- a/.github/ISSUE_TEMPLATE/BUG_REPORT.yml
+++ b/.github/ISSUE_TEMPLATE/BUG_REPORT.yml
@@ -51,12 +51,11 @@ body:
Describe the method in the "Additional info" section if you chose "other".
options:
- "kickstart.sh"
- - "kickstart-static64.sh"
- - "native binary packages (.deb/.rpm)"
- - "from git"
- - "from source"
- "docker"
- "helmchart (kubernetes)"
+ - "manual setup of official DEB/RPM packages"
+ - "from git"
+ - "from source"
- "other"
validations:
required: true
diff --git a/.github/data/distros.yml b/.github/data/distros.yml
index 452170c07..bfe8b7615 100644
--- a/.github/data/distros.yml
+++ b/.github/data/distros.yml
@@ -1,6 +1,6 @@
# This defines the full set of distros we run CI on.
---
-platform_map: # map packaging architectures to docker platforms
+platform_map: # map packaging architectures to docker platforms
aarch64: linux/arm64/v8
amd64: linux/amd64
arm64: linux/arm64/v8
@@ -8,7 +8,7 @@ platform_map: # map packaging architectures to docker platforms
armhfp: linux/arm/v7
i386: linux/i386
x86_64: linux/amd64
-arch_order: # sort order for per-architecture jobs in CI
+arch_order: # sort order for per-architecture jobs in CI
- amd64
- x86_64
- i386
@@ -20,6 +20,7 @@ include:
- &alpine
distro: alpine
version: edge
+ eol_check: false
env_prep: |
apk add -U bash
jsonc_removal: |
@@ -28,15 +29,17 @@ include:
ebpf-core: true
- <<: *alpine
version: "3.17"
+ eol_check: true
- <<: *alpine
version: "3.16"
+ eol_check: true
- <<: *alpine
version: "3.15"
- - <<: *alpine
- version: "3.14"
+ eol_check: true
- distro: archlinux
version: latest
+ eol_check: false
env_prep: |
pacman --noconfirm -Syu && pacman --noconfirm -Sy grep libffi
test:
@@ -45,9 +48,9 @@ include:
- &alma
distro: almalinux
version: "9"
- base_image: almalinux
jsonc_removal: |
dnf remove -y json-c-devel
+ eol_check: true
packages: &alma_packages
type: rpm
repo_distro: el/9
@@ -68,8 +71,28 @@ include:
- el/8Server
- el/8Client
+ - &amzn
+ distro: amazonlinux
+ version: "2"
+ eol_check: 'amazon-linux'
+ packages: &amzn_packages
+ type: rpm
+ repo_distro: amazonlinux/2
+ arches:
+ - x86_64
+ - aarch64
+ test:
+ ebpf-core: false
+ - <<: *amzn
+ version: "2023"
+ packages:
+ <<: *amzn_packages
+ repo_distro: amazonlinux/2023
+
+
- distro: centos
version: "7"
+ eol_check: false
packages:
type: rpm
repo_distro: el/7
@@ -83,14 +106,16 @@ include:
- &debian
distro: debian
- version: "11"
+ version: "12"
+ base_image: debian:bookworm
+ eol_check: true
env_prep: |
apt-get update
jsonc_removal: |
apt-get purge -y libjson-c-dev
packages: &debian_packages
type: deb
- repo_distro: debian/bullseye
+ repo_distro: debian/bookworm
arches:
- i386
- amd64
@@ -99,7 +124,16 @@ include:
test:
ebpf-core: true
- <<: *debian
+ version: "11"
+ base_image: debian:bullseye
+ packages:
+ <<: *debian_packages
+ repo_distro: debian/bullseye
+ test:
+ ebpf-core: false
+ - <<: *debian
version: "10"
+ base_image: debian:buster
packages:
<<: *debian_packages
repo_distro: debian/buster
@@ -108,18 +142,26 @@ include:
- &fedora
distro: fedora
- version: "37"
+ version: "38"
+ eol_check: true
jsonc_removal: |
dnf remove -y json-c-devel
packages: &fedora_packages
type: rpm
- repo_distro: fedora/37
+ repo_distro: fedora/38
arches:
- x86_64
- aarch64
test:
ebpf-core: true
- <<: *fedora
+ version: "37"
+ packages:
+ <<: *fedora_packages
+ repo_distro: fedora/37
+ test:
+ ebpf-core: true
+ - <<: *fedora
version: "36"
packages:
<<: *fedora_packages
@@ -130,7 +172,8 @@ include:
- &opensuse
distro: opensuse
version: "15.4"
- base_image: opensuse/leap
+ eol_check: true
+ base_image: opensuse/leap:15.4
jsonc_removal: |
zypper rm -y libjson-c-devel
packages: &opensuse_packages
@@ -145,6 +188,7 @@ include:
- &oracle
distro: oraclelinux
version: "8"
+ eol_check: true
jsonc_removal: |
dnf remove -y json-c-devel
packages: &oracle_packages
@@ -164,6 +208,7 @@ include:
- &ubuntu
distro: ubuntu
version: "22.10"
+ eol_check: true
env_prep: |
rm -f /etc/apt/apt.conf.d/docker && apt-get update
jsonc_removal: |
@@ -178,6 +223,11 @@ include:
test:
ebpf-core: true
- <<: *ubuntu
+ version: "23.04"
+ packages:
+ <<: *ubuntu_packages
+ repo_distro: ubuntu/lunar
+ - <<: *ubuntu
version: "22.04"
packages:
<<: *ubuntu_packages
@@ -187,15 +237,3 @@ include:
packages:
<<: *ubuntu_packages
repo_distro: ubuntu/focal
- - <<: *ubuntu
- version: "18.04"
- packages:
- <<: *ubuntu_packages
- repo_distro: ubuntu/bionic
- arches:
- - i386
- - amd64
- - armhf
- - arm64
- test:
- ebpf-core: false
diff --git a/.github/scripts/ci-support-pkgs.sh b/.github/scripts/ci-support-pkgs.sh
index bfa9c83a5..9ba11b68e 100755
--- a/.github/scripts/ci-support-pkgs.sh
+++ b/.github/scripts/ci-support-pkgs.sh
@@ -5,10 +5,13 @@
set -e
-if [ -f /etc/centos-release ] || [ -f /etc/redhat-release ] || [ -f /etc/fedora-release ] || [ -f /etc/almalinux-release ]; then
- # Alma, Fedora, CentOS, Redhat
- dnf install -y procps-ng cronie cronie-anacron || yum install -y procps-ng cronie cronie-anacron
-elif [ -f /etc/arch-release ]; then
- # Arch
- pacman -S --noconfirm cronie
-fi
+. /etc/os-release
+
+case "${ID}" in
+ amzn|almalinux|centos|fedora)
+ dnf install -y procps-ng cronie cronie-anacron || yum install -y procps-ng cronie cronie-anacron
+ ;;
+ arch)
+ pacman -S --noconfirm cronie
+ ;;
+esac
diff --git a/.github/scripts/gen-matrix-build.py b/.github/scripts/gen-matrix-build.py
index 28406470f..3185e8836 100755
--- a/.github/scripts/gen-matrix-build.py
+++ b/.github/scripts/gen-matrix-build.py
@@ -17,7 +17,7 @@ for i, v in enumerate(data['include']):
}
if 'base_image' in v:
- e['distro'] = ':'.join([v['base_image'], str(v['version'])])
+ e['distro'] = v['base_image']
else:
e['distro'] = ':'.join([v['distro'], str(v['version'])])
diff --git a/.github/scripts/gen-matrix-eol-check.py b/.github/scripts/gen-matrix-eol-check.py
new file mode 100755
index 000000000..638527284
--- /dev/null
+++ b/.github/scripts/gen-matrix-eol-check.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python3
+'''Generate the build matrix for the EOL check jobs.'''
+
+import json
+
+from ruamel.yaml import YAML
+
+yaml = YAML(typ='safe')
+entries = list()
+
+with open('.github/data/distros.yml') as f:
+ data = yaml.load(f)
+
+for item in data['include']:
+ if 'eol_check' in item and item['eol_check']:
+ if isinstance(item['eol_check'], str):
+ distro = item['eol_check']
+ else:
+ distro = item['distro']
+
+ entries.append({
+ 'distro': distro,
+ 'release': item['version'],
+ 'full_name': f'{ item["distro"] } { item["version"] }'
+ })
+
+entries.sort(key=lambda k: (k['distro'], k['release']))
+matrix = json.dumps({'include': entries}, sort_keys=True)
+print(matrix)
diff --git a/.github/scripts/gen-matrix-packaging.py b/.github/scripts/gen-matrix-packaging.py
index 01e9ec790..9347cd767 100755
--- a/.github/scripts/gen-matrix-packaging.py
+++ b/.github/scripts/gen-matrix-packaging.py
@@ -26,7 +26,7 @@ for i, v in enumerate(data['include']):
'version': data['include'][i]['version'],
'repo_distro': data['include'][i]['packages']['repo_distro'],
'format': data['include'][i]['packages']['type'],
- 'base_image': data['include'][i]['base_image'] if 'base_image' in data['include'][i] else data['include'][i]['distro'],
+ 'base_image': data['include'][i]['base_image'] if 'base_image' in data['include'][i] else ':'.join([data['include'][i]['distro'], data['include'][i]['version']]),
'platform': data['platform_map'][arch],
'arch': arch
})
diff --git a/.github/scripts/gen-matrix-repoconfig.py b/.github/scripts/gen-matrix-repoconfig.py
index 46f671697..264cd53e7 100755
--- a/.github/scripts/gen-matrix-repoconfig.py
+++ b/.github/scripts/gen-matrix-repoconfig.py
@@ -17,7 +17,7 @@ for i, v in enumerate(data['include']):
'version': data['include'][i]['version'],
'pkgclouddistro': data['include'][i]['packages']['repo_distro'],
'format': data['include'][i]['packages']['type'],
- 'base_image': data['include'][i]['base_image'] if 'base_image' in data['include'][i] else data['include'][i]['distro'],
+ 'base_image': data['include'][i]['base_image'] if 'base_image' in data['include'][i] else ':'.join([data['include'][i]['distro'], data['include'][i]['version']]),
'platform': data['platform_map']['amd64'],
'arches': ' '.join(['"' + x + '"' for x in data['include'][i]['packages']['arches']])
})
diff --git a/.github/scripts/pkg-test.sh b/.github/scripts/pkg-test.sh
index e3bc3e7d4..45b8c320b 100755
--- a/.github/scripts/pkg-test.sh
+++ b/.github/scripts/pkg-test.sh
@@ -13,6 +13,7 @@ install_debian_like() {
apt-get update
# Install Netdata
+ # Strange quoting is required here so that glob matching works.
apt-get install -y /netdata/artifacts/netdata_"${VERSION}"*_*.deb || exit 1
# Install testing tools
@@ -27,8 +28,13 @@ install_fedora_like() {
pkg_version="$(echo "${VERSION}" | tr - .)"
+ if [ "${PKGMGR}" = "dnf" ]; then
+ opts="--allowerasing"
+ fi
+
# Install Netdata
- "$PKGMGR" install -y /netdata/artifacts/netdata-"${pkg_version}"-*.rpm
+ # Strange quoting is required here so that glob matching works.
+ "$PKGMGR" install -y /netdata/artifacts/netdata-"${pkg_version}"-*.rpm || exit 1
# Install testing tools
"$PKGMGR" install -y curl nc jq || exit 1
@@ -50,9 +56,29 @@ install_centos() {
"$PKGMGR" install -y epel-release || exit 1
# Install Netdata
- "$PKGMGR" install -y /netdata/artifacts/netdata-"${pkg_version}"-*.rpm
+ # Strange quoting is required here so that glob matching works.
+ "$PKGMGR" install -y /netdata/artifacts/netdata-"${pkg_version}"-*.rpm || exit 1
# Install testing tools
+ # shellcheck disable=SC2086
+ "$PKGMGR" install -y ${opts} curl nc jq || exit 1
+}
+
+install_amazon_linux() {
+ PKGMGR="$( (command -v dnf > /dev/null && echo "dnf") || echo "yum")"
+
+ pkg_version="$(echo "${VERSION}" | tr - .)"
+
+ if [ "${PKGMGR}" = "dnf" ]; then
+ opts="--allowerasing"
+ fi
+
+ # Install Netdata
+ # Strange quoting is required here so that glob matching works.
+ "$PKGMGR" install -y /netdata/artifacts/netdata-"${pkg_version}"-*.rpm || exit 1
+
+ # Install testing tools
+ # shellcheck disable=SC2086
"$PKGMGR" install -y ${opts} curl nc jq || exit 1
}
@@ -63,7 +89,8 @@ install_suse_like() {
pkg_version="$(echo "${VERSION}" | tr - .)"
# Install Netdata
- zypper install -y --allow-unsigned-rpm /netdata/artifacts/netdata-"${pkg_version}"-*.rpm
+ # Strange quoting is required here so that glob matching works.
+ zypper install -y --allow-unsigned-rpm /netdata/artifacts/netdata-"${pkg_version}"-*.rpm || exit 1
# Install testing tools
zypper install -y --no-recommends curl netcat-openbsd jq || exit 1
@@ -114,6 +141,9 @@ case "${DISTRO}" in
centos | rockylinux | almalinux)
install_centos
;;
+ amazonlinux)
+ install_amazon_linux
+ ;;
opensuse)
install_suse_like
;;
diff --git a/.github/scripts/platform-impending-eol.py b/.github/scripts/platform-impending-eol.py
new file mode 100755
index 000000000..c57e5edde
--- /dev/null
+++ b/.github/scripts/platform-impending-eol.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python3
+'''Check if a given distro is going to be EOL soon.
+
+ This queries the public API of https://endoflife.date to fetch EOL dates.
+
+ ‘soon’ is defined by LEAD_DAYS, currently 30 days.'''
+
+import datetime
+import json
+import sys
+import urllib.request
+
+URL_BASE = 'https://endoflife.date/api'
+NOW = datetime.date.today()
+LEAD_DAYS = datetime.timedelta(days=30)
+
+DISTRO = sys.argv[1]
+RELEASE = sys.argv[2]
+
+EXIT_NOT_IMPENDING = 0
+EXIT_IMPENDING = 1
+EXIT_NO_DATA = 2
+EXIT_FAILURE = 3
+
+try:
+ with urllib.request.urlopen(f'{ URL_BASE }/{ DISTRO }/{ RELEASE }.json') as response:
+ match response.status:
+ case 200:
+ data = json.load(response)
+ case _:
+ print(
+ f'Failed to retrieve data for { DISTRO } { RELEASE } ' +
+ f'(status: { response.status }).',
+ file=sys.stderr
+ )
+ sys.exit(EXIT_FAILURE)
+except urllib.error.HTTPError as e:
+ match e.code:
+ case 404:
+ print(f'No data available for { DISTRO } { RELEASE }.', file=sys.stderr)
+ sys.exit(EXIT_NO_DATA)
+ case _:
+ print(
+ f'Failed to retrieve data for { DISTRO } { RELEASE } ' +
+ f'(status: { e.code }).',
+ file=sys.stderr
+ )
+ sys.exit(EXIT_FAILURE)
+
+eol = datetime.date.fromisoformat(data['eol'])
+
+offset = abs(eol - NOW)
+
+if offset <= LEAD_DAYS:
+ print(data['eol'])
+ sys.exit(EXIT_IMPENDING)
+else:
+ sys.exit(EXIT_NOT_IMPENDING)
diff --git a/.github/scripts/run-updater-check.sh b/.github/scripts/run-updater-check.sh
index a96a1d6ef..1224d8f67 100755
--- a/.github/scripts/run-updater-check.sh
+++ b/.github/scripts/run-updater-check.sh
@@ -2,6 +2,7 @@
echo ">>> Installing CI support packages..."
/netdata/.github/scripts/ci-support-pkgs.sh
+mkdir -p /etc/cron.daily # Needed to make auto-update checking work correctly on some platforms.
echo ">>> Installing Netdata..."
/netdata/packaging/installer/kickstart.sh --dont-wait --build-only --disable-telemetry || exit 1
echo "::group::>>> Pre-Update Environment File Contents"
diff --git a/.github/workflows/add-to-project.yml b/.github/workflows/add-to-project.yml
index a80d8b41d..986d836a1 100644
--- a/.github/workflows/add-to-project.yml
+++ b/.github/workflows/add-to-project.yml
@@ -13,13 +13,13 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Add issues to Agent project board
- uses: actions/add-to-project@v0.4.0
+ uses: actions/add-to-project@v0.5.0
with:
project-url: https://github.com/orgs/netdata/projects/32
github-token: ${{ secrets.NETDATABOT_ORG_GITHUB_TOKEN }}
- name: Add issues to Product Bug project board
- uses: actions/add-to-project@v0.4.0
+ uses: actions/add-to-project@v0.5.0
with:
project-url: https://github.com/orgs/netdata/projects/45
github-token: ${{ secrets.NETDATABOT_ORG_GITHUB_TOKEN }}
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index c3924fb0c..c349e4fdd 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -118,9 +118,11 @@ jobs:
run: |
sed -i 's/^RELEASE_CHANNEL="nightly" *#/RELEASE_CHANNEL="stable" #/' netdata-installer.sh packaging/makeself/install-or-update.sh
- name: Get Cache Key
+ if: github.event_name != 'pull_request' || ! contains(github.event.pull_request.labels.*.name, 'run-ci/no-cache')
id: cache-key
- run: .github/scripts/get-static-cache-key.sh ${{ matrix.arch }}
+ run: .github/scripts/get-static-cache-key.sh ${{ matrix.arch }} "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/no-cache') }}"
- name: Cache
+ if: github.event_name != 'pull_request' || ! contains(github.event.pull_request.labels.*.name, 'run-ci/no-cache')
id: cache
uses: actions/cache@v3
with:
@@ -135,7 +137,7 @@ jobs:
uses: nick-fields/retry@v2
with:
timeout_minutes: 180
- retries: 3
+ max_attempts: 3
command: .github/scripts/build-static.sh ${{ matrix.arch }}
- name: Store
id: store
@@ -232,7 +234,7 @@ jobs:
uses: docker/setup-buildx-action@v2
- name: Build test environment
id: build1
- uses: docker/build-push-action@v3
+ uses: docker/build-push-action@v4
continue-on-error: true # We retry 3 times at 5 minute intervals if there is a failure here.
with:
push: false
@@ -250,7 +252,7 @@ jobs:
- name: Build test environment (attempt 2)
if: ${{ steps.build1.outcome == 'failure' }}
id: build2
- uses: docker/build-push-action@v3
+ uses: docker/build-push-action@v4
continue-on-error: true # We retry 3 times at 5 minute intervals if there is a failure here.
with:
push: false
@@ -268,7 +270,7 @@ jobs:
- name: Build test environment (attempt 3)
if: ${{ steps.build1.outcome == 'failure' && steps.build2.outcome == 'failure' }}
id: build3
- uses: docker/build-push-action@v3
+ uses: docker/build-push-action@v4
with:
push: false
load: false
@@ -657,10 +659,10 @@ jobs:
credentials_json: ${{ secrets.GCS_STORAGE_SERVICE_KEY_JSON }}
- name: Setup GCS
id: gcs-setup
- uses: google-github-actions/setup-gcloud@v1.0.1
+ uses: google-github-actions/setup-gcloud@v1.1.0
- name: Upload Artifacts
id: upload
- uses: google-github-actions/upload-cloud-storage@v1.0.0
+ uses: google-github-actions/upload-cloud-storage@v1.0.1
with:
destination: ${{ secrets.GCP_NIGHTLY_STORAGE_BUCKET }}
gzip: false
diff --git a/.github/workflows/coverity.yml b/.github/workflows/coverity.yml
index 9d1119a82..8a1ee2486 100644
--- a/.github/workflows/coverity.yml
+++ b/.github/workflows/coverity.yml
@@ -30,7 +30,7 @@ jobs:
run: |
./packaging/installer/install-required-packages.sh \
--dont-wait --non-interactive netdata
- sudo apt-get install -y libjson-c-dev libipmimonitoring-dev \
+ sudo apt-get install -y libjson-c-dev libyaml-dev libipmimonitoring-dev \
libcups2-dev libsnappy-dev libprotobuf-dev \
libprotoc-dev libssl-dev protobuf-compiler \
libnetfilter-acct-dev
diff --git a/.github/workflows/dashboard-pr.yml b/.github/workflows/dashboard-pr.yml
index c99f98919..c7d14c486 100644
--- a/.github/workflows/dashboard-pr.yml
+++ b/.github/workflows/dashboard-pr.yml
@@ -28,7 +28,7 @@ jobs:
web/gui/bundle_dashboard.py ${{ github.event.inputs.dashboard_version }}
- name: Create Pull Request
id: pr
- uses: peter-evans/create-pull-request@v4
+ uses: peter-evans/create-pull-request@v5
with:
title: 'Update dashboard to version ${{ github.event.inputs.dashboard_version }}.'
body: 'See https://github.com/netdata/dashboard/releases/tag/${{ github.event.inputs.dashboard_version }} for changes.'
diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml
index 78a39d5a2..5eec3df74 100644
--- a/.github/workflows/docker.yml
+++ b/.github/workflows/docker.yml
@@ -31,7 +31,7 @@ jobs:
uses: docker/setup-buildx-action@v2
- name: Test Build
id: build
- uses: docker/build-push-action@v3
+ uses: docker/build-push-action@v4
with:
load: true
push: false
@@ -89,7 +89,7 @@ jobs:
uses: docker/setup-buildx-action@v2
- name: Build
id: build
- uses: docker/build-push-action@v3
+ uses: docker/build-push-action@v4
with:
platforms: ${{ matrix.platforms }}
load: false
@@ -193,7 +193,7 @@ jobs:
password: ${{ secrets.NETDATABOT_QUAY_TOKEN }}
- name: Docker Build
id: build
- uses: docker/build-push-action@v3
+ uses: docker/build-push-action@v4
with:
platforms: linux/amd64,linux/i386,linux/arm/v7,linux/arm64,linux/ppc64le
push: ${{ github.repository == 'netdata/netdata' }}
@@ -236,6 +236,15 @@ jobs:
workflow: Agent Version PR
ref: refs/heads/master
inputs: '{"agent_version": "${{ needs.normalize-tag.outputs.tag }}"}'
+ - name: Trigger MSI build
+ if: github.event_name == 'workflow_dispatch' && github.event.inputs.version != 'nightly' && github.repository == 'netdata/netdata'
+ uses: benc-uk/workflow-dispatch@v1
+ with:
+ token: ${{ secrets.NETDATABOT_GITHUB_TOKEN }}
+ repo: netdata/msi-installer
+ workflow: Build
+ ref: refs/heads/master
+ inputs: '{"tag": "${{ needs.normalize-tag.outputs.tag }}", "pwd": "${{ secrets.MSI_CODE_SIGNING_PASSWORD }}"}'
docker-dbg-publish:
if: github.event_name == 'workflow_dispatch'
@@ -296,7 +305,7 @@ jobs:
password: ${{ secrets.NETDATABOT_QUAY_TOKEN }}
- name: Docker Build
id: build
- uses: docker/build-push-action@v3
+ uses: docker/build-push-action@v4
with:
platforms: linux/amd64,linux/i386,linux/arm/v7,linux/arm64,linux/ppc64le
push: ${{ github.repository == 'netdata/netdata' }}
diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml
index 2b8b41fcb..a1e3b52fe 100644
--- a/.github/workflows/labeler.yml
+++ b/.github/workflows/labeler.yml
@@ -4,7 +4,7 @@ name: Pull Request Labeler
on:
pull_request_target: null
concurrency:
- group: pr-label-${{ github.ref }}
+ group: pr-label-${{ github.repository_id }}-${{ github.event.pull_request.number }}
cancel-in-progress: true
jobs:
labeler:
diff --git a/.github/workflows/packaging.yml b/.github/workflows/packaging.yml
index c99f535ab..a8d502847 100644
--- a/.github/workflows/packaging.yml
+++ b/.github/workflows/packaging.yml
@@ -170,7 +170,7 @@ jobs:
retry_wait_seconds: 30
timeout_seconds: 900
command: |
- docker pull --platform ${{ matrix.platform }} ${{ matrix.base_image }}:${{ matrix.version }}
+ docker pull --platform ${{ matrix.platform }} ${{ matrix.base_image }}
docker pull --platform ${{ matrix.platform }} netdata/package-builders:${{ matrix.distro }}${{ matrix.version }}
- name: Build Packages
id: build
@@ -191,12 +191,25 @@ jobs:
run: |
docker run --security-opt seccomp=unconfined -e DISABLE_TELEMETRY=1 -e DISTRO=${{ matrix.distro }} \
-e VERSION=${{ needs.version-check.outputs.version }} -e DISTRO_VERSION=${{ matrix.version }} \
- --platform=${{ matrix.platform }} -v "$PWD":/netdata ${{ matrix.base_image }}:${{ matrix.version }} \
+ --platform=${{ matrix.platform }} -v "$PWD":/netdata ${{ matrix.base_image }} \
/netdata/.github/scripts/pkg-test.sh
+ - name: Upload to PackageCloud
+ id: upload
+ if: github.event_name == 'workflow_dispatch' && github.repository == 'netdata/netdata'
+ continue-on-error: true
+ shell: bash
+ env:
+ PKG_CLOUD_TOKEN: ${{ secrets.PACKAGE_CLOUD_API_KEY }}
+ run: |
+ printf "Packages to upload:\n%s" "$(ls artifacts/*.${{ matrix.format }})"
+ for pkgfile in artifacts/*.${{ matrix.format }} ; do
+ .github/scripts/package_cloud_wrapper.sh yank ${{ needs.version-check.outputs.repo }}/${{ matrix.repo_distro }} \
+ "$(basename "${pkgfile}")" || true
+ .github/scripts/package_cloud_wrapper.sh push ${{ needs.version-check.outputs.repo }}/${{ matrix.repo_distro }} "${pkgfile}"
+ done
- name: SSH setup
id: ssh-setup
if: github.event_name == 'workflow_dispatch' && github.repository == 'netdata/netdata'
- continue-on-error: true
uses: shimataro/ssh-key-action@v2
with:
key: ${{ secrets.NETDATABOT_PACKAGES_SSH_KEY }}
@@ -204,7 +217,6 @@ jobs:
known_hosts: ${{ secrets.PACKAGES_KNOWN_HOSTS }}
- name: Upload to packages.netdata.cloud
id: package-upload
- continue-on-error: true
if: github.event_name == 'workflow_dispatch' && github.repository == 'netdata/netdata'
run: |
.github/scripts/package-upload.sh \
@@ -212,19 +224,6 @@ jobs:
${{ matrix.arch }} \
${{ matrix.format }} \
${{ needs.version-check.outputs.repo }}
- - name: Upload to PackageCloud
- id: upload
- if: github.event_name == 'workflow_dispatch' && github.repository == 'netdata/netdata'
- shell: bash
- env:
- PKG_CLOUD_TOKEN: ${{ secrets.PACKAGE_CLOUD_API_KEY }}
- run: |
- printf "Packages to upload:\n%s" "$(ls artifacts/*.${{ matrix.format }})"
- for pkgfile in artifacts/*.${{ matrix.format }} ; do
- .github/scripts/package_cloud_wrapper.sh yank ${{ needs.version-check.outputs.repo }}/${{ matrix.repo_distro }} \
- "$(basename "${pkgfile}")" || true
- .github/scripts/package_cloud_wrapper.sh push ${{ needs.version-check.outputs.repo }}/${{ matrix.repo_distro }} "${pkgfile}"
- done
- name: Failure Notification
uses: rtCamp/action-slack-notify@v2
env:
@@ -240,9 +239,9 @@ jobs:
Fetch images: ${{ steps.fetch-images.outcome }}
Build: ${{ steps.build.outcome }}
Test: ${{ steps.test.outcome }}
+ Publish to PackageCloud: ${{ steps.upload.outcome }}
Import SSH Key: ${{ steps.ssh-setup.outcome }}
Publish to packages.netdata.cloud: ${{ steps.package-upload.outcome }}
- Publish to PackageCloud: ${{ steps.upload.outcome }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
if: >-
${{
diff --git a/.github/workflows/platform-eol-check.yml b/.github/workflows/platform-eol-check.yml
new file mode 100644
index 000000000..d1f4416cd
--- /dev/null
+++ b/.github/workflows/platform-eol-check.yml
@@ -0,0 +1,153 @@
+---
+# Auto-generate issues for EOL of platforms that are approaching their EOL date.
+# Uses https://endoflife.date and their new API to check for EOL dates.
+#
+# Issues are created when the EOL date is within the next 30 days.
+name: Check Platform EOL
+on: # Run weekly and whenever manually triggered
+ schedule:
+ - cron: '0 3 * * 1'
+ workflow_dispatch: null
+concurrency: # Simple single-instance concurrency.
+ group: eol-check-${{ github.repository }}
+ cancel-in-progress: true
+jobs:
+ # Prepare the build matrix.
+ # This uses output from .github/scripts/gen-matrix-eol-check.py
+ matrix:
+ name: Prepare Build Matrix
+ runs-on: ubuntu-latest
+ outputs:
+ matrix: ${{ steps.set-matrix.outputs.matrix }}
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v3
+ - name: Prepare tools
+ id: prepare
+ run: |
+ sudo apt-get update && sudo apt-get install -y python3-ruamel.yaml
+ - name: Read build matrix
+ id: set-matrix
+ run: |
+ matrix="$(.github/scripts/gen-matrix-eol-check.py)"
+ echo "Generated matrix: ${matrix}"
+ echo "matrix=${matrix}" >> "${GITHUB_OUTPUT}"
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Failed to generate build matrix for platform EOL checks:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: Build matrix generation for scheduled platform EOL check has failed:
+ Checkout: ${{ steps.checkout.outcome }}
+ Prepare Tools: ${{ steps.prepare.outcome }}
+ Read Build Matrix: ${{ steps.set-matrix.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.event_name == 'schedule'
+ && github.repository == 'netdata/netdata'
+ }}
+
+ eol-check:
+ name: EOL Check
+ runs-on: ubuntu-latest
+ needs:
+ - matrix
+ strategy:
+ matrix: ${{ fromJson(needs.matrix.outputs.matrix) }}
+ fail-fast: false # We want to check everything, so don’t bail on the first failure.
+ max-parallel: 2 # Cap of two jobs at a time to limit impact on other CI.
+ steps:
+ - name: Checkout
+ id: checkout
+ uses: actions/checkout@v3
+ # Actually check the EOL date for the platform.
+ - name: Check EOL Date
+ id: check
+ shell: sh {0}
+ run: |
+ d="$(.github/scripts/platform-impending-eol.py ${{ matrix.distro }} ${{ matrix.release }})"
+ case $? in
+ 0) echo "pending=false" >> "${GITHUB_OUTPUT}" ;;
+ 1)
+ echo "pending=true" >> "${GITHUB_OUTPUT}"
+ echo "date=${d}" >> "${GITHUB_OUTPUT}"
+ ;;
+ 2)
+ echo "pending=false" >> "${GITHUB_OUTPUT}"
+ echo "::info::No EOL information found for ${{ matrix.full_name }}"
+ ;;
+ *)
+ echo "::error::Failed to check EOL date for ${{ matrix.full_name }}"
+ exit 1
+ ;;
+ esac
+ # Figure out the issue title.
+ # This is it’s own step so we only have to set it in one place.
+ - name: Determine Issue Title
+ id: title
+ if: steps.check.outputs.pending == 'true'
+ run: |
+ echo "title=[Platform EOL]: ${{ matrix.full_name }} will be EOL soon." >> "${GITHUB_OUTPUT}"
+ # Check if there is an existing issue in the repo for the platform EOL.
+ # The actual command line to make the check is unfortunately
+ # complicated because GitHub thinks that it’s sensible to exit
+ # with a status of 0 if there are no results for a search.
+ - name: Check for Existing Issue
+ id: existing
+ if: steps.check.outputs.pending == 'true'
+ env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ run: |
+ set -e
+ count=$(gh issue list -R netdata/netdata -s all -S '${{ steps.title.outputs.title }} in:title' --json 'id' -q '. | length')
+ if [ "${count}" -ge 1 ]; then
+ echo 'exists=true' >> "${GITHUB_OUTPUT}"
+ else
+ echo 'exists=false' >> "${GITHUB_OUTPUT}"
+ fi
+ # If the platform is near EOL and there is no existing issue, create one.
+ - name: Create EOL Issue
+ id: create-issue
+ if: steps.check.outputs.pending == 'true' && steps.existing.outputs.exists == 'false'
+ uses: imjohnbo/issue-bot@v3
+ with:
+ assignees: Ferroin, tkatsoulas
+ labels: area/packaging, needs triage
+ title: ${{ steps.title.outputs.title }}
+ body: |
+ Based on information from https://endoflife.date/${{ matrix.distro }}, upstream support for ${{ matrix.full_name }} will be ending on ${{ steps.check.outputs.date }}. A PR should be created to remove this platform from our platform support document, CI, and packaging code.
+
+ - [ ] Remove platform from `packaging/PLATFORM_SUPPORT.md`
+ - [ ] Remove platform from `.github/data/distros.yml`
+ - [ ] Remove platform package builder from helper-images repo (if applicable).
+ - [ ] Verify any other platform support code that needs to be cleaned up.
+ # Send a notification to Slack if a job failed.
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER: ''
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Platform EOL check failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: |-
+ ${{ github.repository }}: A scheduled check for the EOL status of ${{ matrix.full_name }} has failed.
+ Checkout: ${{ steps.checkout.outcome }}
+ Check EOL Status: ${{ steps.check.outcome }}
+ Generate Issue Title: ${{ steps.title.outcome }}
+ Check for Existing Issue: ${{ steps.existing.outcome }}
+ Create Issue: ${{ steps.create-issue.outcome }}
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.event_name == 'schedule'
+ && github.repository == 'netdata/netdata'
+ }}
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index e16ecaba7..ef9bf94b4 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -29,7 +29,7 @@ jobs:
steps:
- name: Checkout
id: checkout
- uses: actions/checkout@v2
+ uses: actions/checkout@v3
with:
fetch-depth: 0
submodules: recursive
@@ -116,7 +116,7 @@ jobs:
steps:
- name: Checkout
id: checkout
- uses: actions/checkout@v2
+ uses: actions/checkout@v3
with:
ref: ${{ needs.update-changelogs.outputs.ref }}
- name: Trigger build
@@ -151,7 +151,7 @@ jobs:
steps:
- name: Checkout
id: checkout
- uses: actions/checkout@v2
+ uses: actions/checkout@v3
with:
ref: ${{ needs.update-changelogs.outputs.ref }}
- name: Trigger build
@@ -186,7 +186,7 @@ jobs:
steps:
- name: Checkout
id: checkout
- uses: actions/checkout@v2
+ uses: actions/checkout@v3
with:
ref: ${{ needs.update-changelogs.outputs.ref }}
- name: Trigger build
diff --git a/.github/workflows/repoconfig-packages.yml b/.github/workflows/repoconfig-packages.yml
index f8a3dc406..e2b41570f 100644
--- a/.github/workflows/repoconfig-packages.yml
+++ b/.github/workflows/repoconfig-packages.yml
@@ -86,18 +86,37 @@ jobs:
max_attempts: 3
retry_wait_seconds: 30
timeout_seconds: 900
- command: docker pull --platform ${{ matrix.platform }} ${{ matrix.base_image }}:${{ matrix.version }}
+ command: docker pull --platform ${{ matrix.platform }} ${{ matrix.base_image }}
- name: Build Packages
id: build
shell: bash
run: |
docker run --security-opt seccomp=unconfined -e DISABLE_TELEMETRY=1 --platform ${{ matrix.platform }} \
- -v "$PWD":/netdata ${{ matrix.base_image }}:${{ matrix.version }} \
+ -v "$PWD":/netdata ${{ matrix.base_image }} \
/netdata/packaging/repoconfig/build-${{ matrix.format }}.sh
+ - name: Upload Packages
+ id: publish
+ if: github.event_name != 'pull_request' && github.repository == 'netdata/netdata'
+ continue-on-error: true
+ shell: bash
+ env:
+ PKG_CLOUD_TOKEN: ${{ secrets.PACKAGE_CLOUD_API_KEY }}
+ run: |
+ printf "Packages to upload:\n%s" "$(ls artifacts/*.${{ matrix.format }})"
+ for pkgfile in artifacts/*.${{ matrix.format }} ; do
+ .github/scripts/package_cloud_wrapper.sh yank "${REPO_PREFIX}/${{ matrix.pkgclouddistro }}" \
+ "$(basename "${pkgfile}")" || true
+ .github/scripts/package_cloud_wrapper.sh push "${REPO_PREFIX}/${{ matrix.pkgclouddistro }}" "${pkgfile}"
+ .github/scripts/package_cloud_wrapper.sh yank "${REPO_PREFIX}-edge/${{ matrix.pkgclouddistro }}" \
+ "$(basename "${pkgfile}")" || true
+ .github/scripts/package_cloud_wrapper.sh push "${REPO_PREFIX}-edge/${{ matrix.pkgclouddistro }}" "${pkgfile}"
+ .github/scripts/package_cloud_wrapper.sh yank "${REPO_PREFIX}-repoconfig/${{ matrix.pkgclouddistro }}" \
+ "$(basename "${pkgfile}")" || true
+ .github/scripts/package_cloud_wrapper.sh push "${REPO_PREFIX}-repoconfig/${{ matrix.pkgclouddistro }}" "${pkgfile}"
+ done
- name: SSH setup
id: ssh-setup
if: github.event_name != 'pull_request' && github.repository == 'netdata/netdata'
- continue-on-error: true
uses: shimataro/ssh-key-action@v2
with:
key: ${{ secrets.NETDATABOT_PACKAGES_SSH_KEY }}
@@ -105,7 +124,6 @@ jobs:
known_hosts: ${{ secrets.PACKAGES_KNOWN_HOSTS }}
- name: Upload to packages.netdata.cloud
id: package-upload
- continue-on-error: true
if: github.event_name != 'pull_request' && github.repository == 'netdata/netdata'
run: |
for arch in ${{ matrix.arches }}; do
@@ -117,25 +135,6 @@ jobs:
netdata/netdata${suffix}
done
done
- - name: Upload Packages
- id: publish
- if: github.event_name != 'pull_request' && github.repository == 'netdata/netdata'
- shell: bash
- env:
- PKG_CLOUD_TOKEN: ${{ secrets.PACKAGE_CLOUD_API_KEY }}
- run: |
- printf "Packages to upload:\n%s" "$(ls artifacts/*.${{ matrix.format }})"
- for pkgfile in artifacts/*.${{ matrix.format }} ; do
- .github/scripts/package_cloud_wrapper.sh yank "${REPO_PREFIX}/${{ matrix.pkgclouddistro }}" \
- "$(basename "${pkgfile}")" || true
- .github/scripts/package_cloud_wrapper.sh push "${REPO_PREFIX}/${{ matrix.pkgclouddistro }}" "${pkgfile}"
- .github/scripts/package_cloud_wrapper.sh yank "${REPO_PREFIX}-edge/${{ matrix.pkgclouddistro }}" \
- "$(basename "${pkgfile}")" || true
- .github/scripts/package_cloud_wrapper.sh push "${REPO_PREFIX}-edge/${{ matrix.pkgclouddistro }}" "${pkgfile}"
- .github/scripts/package_cloud_wrapper.sh yank "${REPO_PREFIX}-repoconfig/${{ matrix.pkgclouddistro }}" \
- "$(basename "${pkgfile}")" || true
- .github/scripts/package_cloud_wrapper.sh push "${REPO_PREFIX}-repoconfig/${{ matrix.pkgclouddistro }}" "${pkgfile}"
- done
- name: Failure Notification
if: ${{ failure() && github.repository == 'netdata/netdata' }}
uses: rtCamp/action-slack-notify@v2
@@ -150,7 +149,7 @@ jobs:
Checkout: ${{ steps.checkout.outcome }}
Fetch images: ${{ steps.fetch-images.outcome }}
Build: ${{ steps.build.outcome }}
+ Publish to PackageCloud: ${{ steps.publish.outcome }}
Import SSH Key: ${{ steps.ssh-setup.outcome }}
Publish to packages.netdata.cloud: ${{ steps.package-upload.outcome }}
- Publish to PackageCloud: ${{ steps.publish.outcome }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
diff --git a/.github/workflows/review.yml b/.github/workflows/review.yml
index 7f12aeecd..5756e4b21 100644
--- a/.github/workflows/review.yml
+++ b/.github/workflows/review.yml
@@ -1,5 +1,5 @@
---
-# Runs various ReviewDog based checks against PR with suggested changes to improve quality
+# Runs various linter checks against PR with suggested changes to improve quality
name: Review
on:
pull_request:
@@ -15,7 +15,9 @@ jobs:
runs-on: ubuntu-latest
outputs:
actionlint: ${{ steps.actionlint.outputs.run }}
+ clangformat: ${{ steps.clangformat.outputs.run }}
eslint: ${{ steps.eslint.outputs.run }}
+ flake8: ${{ steps.flake8.outputs.run }}
hadolint: ${{ steps.hadolint.outputs.run }}
shellcheck: ${{ steps.shellcheck.outputs.run }}
yamllint: ${{ steps.yamllint.outputs.run }}
@@ -36,6 +38,17 @@ jobs:
else
echo "run=false" >> "${GITHUB_OUTPUT}"
fi
+ - name: Check files for clang-format
+ id: clangformat
+ run: |
+ if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/clang-format') }}" = "true" ]; then
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ elif git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq '\.cpp$|\.cxx$|\.c$|\.hpp$|\.hxx$|\.h$' ; then
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ echo 'C/C++ code has changed, need to run clang-format.'
+ else
+ echo "run=false" >> "${GITHUB_OUTPUT}"
+ fi
- name: Check files for eslint
id: eslint
run: |
@@ -47,6 +60,17 @@ jobs:
else
echo "run=false" >> "${GITHUB_OUTPUT}"
fi
+ - name: Check files for flake8
+ id: flake8
+ run: |
+ if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/flake8') }}" = "true" ]; then
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ elif git diff --name-only origin/${{ github.base_ref }} HEAD | grep -Eq '.*\.py' ; then
+ echo "run=true" >> "${GITHUB_OUTPUT}"
+ echo 'Python files have changed, need to run flake8.'
+ else
+ echo "run=false" >> "${GITHUB_OUTPUT}"
+ fi
- name: Check files for hadolint
id: hadolint
run: |
@@ -98,6 +122,39 @@ jobs:
github_token: ${{ secrets.GITHUB_TOKEN }}
reporter: github-pr-check
+ clang-format:
+ name: clang-format
+ needs: prep-review
+ if: needs.prep-review.outputs.clangformat == 'true'
+ runs-on: ubuntu-latest
+ steps:
+ - name: Git clone repository
+ uses: actions/checkout@v3
+ with:
+ submodules: false
+ fetch-depth: 0
+ - name: Check for label
+ id: label
+ run: |
+ if [ "${{ contains(github.event.pull_request.labels.*.name, 'run-ci/clang-format') }}" = "true" ]; then
+ echo 'check-all=true' >> "${GITHUB_OUTPUT}"
+ else
+ echo 'check-all=false' >> "${GITHUB_OUTPUT}"
+ fi
+ - name: Run clang-format
+ run: |
+ if [ "${{ steps.label.outputs.check-all }}" == 'true' ]; then
+ find . -regex '.*\.\(c\|cpp\|cxx\|h\|hpp\|hxx\)$' -exec clang-format -i --style=file '{}' \;
+ else
+ git diff --name-only origin/${{ github.base_ref }} HEAD | grep -E '\.cpp$|\.cxx$|\.c$|\.hpp$|\.hxx$|\.h$' | \
+ xargs -n 1 -r clang-format -i --style=file
+ fi
+ git status --porcelain=v1 > /tmp/porcelain
+ if [ -s /tmp/porcelain ]; then
+ cat /tmp/porcelain
+ exit 1
+ fi
+
eslint:
name: eslint
needs: prep-review
@@ -118,6 +175,27 @@ jobs:
reporter: github-pr-check
eslint_flags: '.'
+ flake8:
+ name: flake8
+ needs: prep-review
+ if: needs.prep-review.outputs.flake8 == 'true'
+ runs-on: ubuntu-latest
+ steps:
+ - name: Git clone repository
+ uses: actions/checkout@v3
+ with:
+ submodules: recursive
+ fetch-depth: 0
+ - name: Setup Python
+ uses: actions/setup-python@v4
+ with:
+ python-version: "3.10"
+ - name: Run flake8
+ uses: reviewdog/action-flake8@v3
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ reporter: github-pr-check
+
hadolint:
name: hadolint
needs: prep-review
@@ -152,7 +230,10 @@ jobs:
reporter: github-pr-check
path: "."
pattern: "*.sh*"
- exclude: "./.git/*"
+ exclude: |
+ ./.git/*
+ packaging/makeself/makeself.sh
+ packaging/makeself/makeself-header.sh
yamllint:
name: yamllint
diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
index d48386855..5f83a4405 100644
--- a/.github/workflows/tests.yml
+++ b/.github/workflows/tests.yml
@@ -31,7 +31,7 @@ jobs:
- name: Prepare environment
run: |
./packaging/installer/install-required-packages.sh --dont-wait --non-interactive netdata-all
- sudo apt-get install -y libjson-c-dev libipmimonitoring-dev libcups2-dev libsnappy-dev \
+ sudo apt-get install -y libjson-c-dev libyaml-dev libipmimonitoring-dev libcups2-dev libsnappy-dev \
libprotobuf-dev libprotoc-dev libssl-dev protobuf-compiler \
libnetfilter-acct-dev
- name: Run ./tests/run-unit-tests.sh