summaryrefslogtreecommitdiffstats
path: root/.github
diff options
context:
space:
mode:
Diffstat (limited to '.github')
-rw-r--r--.github/ISSUE_TEMPLATE/bug_report.md19
-rwxr-xr-x.github/scripts/build-dist.sh70
-rwxr-xr-x.github/scripts/check-updater.sh38
-rwxr-xr-x.github/scripts/gen-docker-tags.py13
-rwxr-xr-x.github/scripts/old_package_purging.sh88
-rwxr-xr-x.github/scripts/package_cloud_wrapper.sh48
-rw-r--r--.github/workflows/build-and-install.yml13
-rw-r--r--.github/workflows/checks.yml10
-rw-r--r--.github/workflows/coverity.yml16
-rw-r--r--.github/workflows/docker.yml28
-rw-r--r--.github/workflows/docs.yml2
-rw-r--r--.github/workflows/packaging.yml149
-rw-r--r--.github/workflows/review.yml3
-rw-r--r--.github/workflows/tests.yml6
-rw-r--r--.github/workflows/updater.yml84
15 files changed, 575 insertions, 12 deletions
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
index ecfd9fc47..799fba7a0 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -1,6 +1,6 @@
---
name: "Bug report: Netdata Agent"
-about: Submit a report and help us improve our free and open-source Netdata Agent
+about: "Submit a report and help us improve our free and open-source Netdata Agent"
labels: bug, needs triage
---
@@ -11,9 +11,11 @@ When creating a bug report please:
-->
##### Bug report summary
+
<!-- Provide a clear and concise description of the bug you're experiencing. -->
##### OS / Environment
+
<!--
Provide as much information about your environment (which operating system and distribution you're using, if Netdata is running in a container, etc.)
as possible to allow us reproduce this bug faster.
@@ -25,19 +27,30 @@ To get this information, execute the following commands based on your operating
Place the output from the command in the code section below.
-->
+
```
```
##### Netdata version
+
<!--
+Provide output of `netdata -W buildinfo`.
+
+If your Netdata Agent is older, and doesn't support buildinfo, do the following:
Provide output of `netdata -V`.
If Netdata is running, execute: $(ps aux | grep -E -o "[a-zA-Z/]+netdata ") -V
-->
-
+
+##### Installation method
+
+<!--
+Tell us which installation method you used (https://learn.netdata.cloud/docs/agent/packaging/installer#alternative-methods).
+ -->
##### Component Name
+
<!--
Let us know which component is affected by the bug. Our code is structured according to its component,
so the component name is the same as the top level directory of the repository.
@@ -45,6 +58,7 @@ For example, a bug in the dashboard would be under the web component.
-->
##### Steps To Reproduce
+
<!--
Describe how you found this bug and how we can reproduce it, preferably with a minimal test-case scenario.
If you'd like to attach larger files, use gist.github.com and paste in links.
@@ -54,4 +68,5 @@ If you'd like to attach larger files, use gist.github.com and paste in links.
2. ...
##### Expected behavior
+
<!-- Provide a clear and concise description of what you expected to happen. -->
diff --git a/.github/scripts/build-dist.sh b/.github/scripts/build-dist.sh
new file mode 100755
index 000000000..f7e27324c
--- /dev/null
+++ b/.github/scripts/build-dist.sh
@@ -0,0 +1,70 @@
+#!/bin/sh
+#
+# Builds the netdata-vX.y.Z-xxxx.tar.gz source tarball (dist)
+
+set -e
+
+# shellcheck source=.github/scripts/functions.sh
+. "$(dirname "$0")/functions.sh"
+
+NAME="${NAME:-netdata}"
+VERSION="${VERSION:-"$(git describe --always)"}"
+BASENAME="$NAME-$VERSION"
+
+prepare_build() {
+ progress "Preparing build"
+ (
+ test -d artifacts || mkdir -p artifacts
+ echo "${VERSION}" > packaging/version
+ ) >&2
+}
+
+build_dist() {
+ progress "Building dist"
+ (
+ command -v git > /dev/null && [ -d .git ] && git clean -d -f
+ autoreconf -ivf
+ ./configure \
+ --prefix=/usr \
+ --sysconfdir=/etc \
+ --localstatedir=/var \
+ --libexecdir=/usr/libexec \
+ --with-zlib \
+ --with-math \
+ --with-user=netdata \
+ CFLAGS=-O2
+ make dist
+ mv "${BASENAME}.tar.gz" artifacts/
+ ) >&2
+}
+
+prepare_assets() {
+ progress "Preparing assets"
+ (
+ cp packaging/version artifacts/latest-version.txt
+ cd artifacts || exit 1
+ ln -f "${BASENAME}.tar.gz" netdata-latest.tar.gz
+ ln -f "${BASENAME}.gz.run" netdata-latest.gz.run
+ sha256sum -b ./* > "sha256sums.txt"
+ ) >&2
+}
+
+steps="prepare_build build_dist prepare_assets"
+
+_main() {
+ for step in $steps; do
+ if ! run "$step"; then
+ if [ -t 1 ]; then
+ debug
+ else
+ fail "Build failed"
+ fi
+ fi
+ done
+
+ echo "🎉 All Done!"
+}
+
+if [ -n "$0" ] && [ x"$0" != x"-bash" ]; then
+ _main "$@"
+fi
diff --git a/.github/scripts/check-updater.sh b/.github/scripts/check-updater.sh
new file mode 100755
index 000000000..3ef4857f9
--- /dev/null
+++ b/.github/scripts/check-updater.sh
@@ -0,0 +1,38 @@
+#!/bin/sh
+#
+set -e
+# shellcheck source=.github/scripts/functions.sh
+. "$(dirname "$0")/functions.sh"
+
+check_successfull_update() {
+ progress "Check netdata version after update"
+ (
+ netdata_version=$(netdata -v | awk '{print $2}')
+ updater_version=$(cat packaging/version)
+ if [ "$netdata_version" = "$updater_version" ]; then
+ echo "Update successfull!"
+ else
+ exit 1
+ fi
+ ) >&2
+}
+
+steps="check_successfull_update"
+
+_main() {
+ for step in $steps; do
+ if ! run "$step"; then
+ if [ -t 1 ]; then
+ debug
+ else
+ fail "Build failed"
+ fi
+ fi
+ done
+
+ echo "🎉 All Done!"
+}
+
+if [ -n "$0" ] && [ x"$0" != x"-bash" ]; then
+ _main "$@"
+fi
diff --git a/.github/scripts/gen-docker-tags.py b/.github/scripts/gen-docker-tags.py
new file mode 100755
index 000000000..6c6251155
--- /dev/null
+++ b/.github/scripts/gen-docker-tags.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python3
+
+import sys
+
+REPO = 'netdata/netdata'
+
+version = sys.argv[1].split('.')
+
+MAJOR = ':'.join([REPO, version[0]])
+MINOR = ':'.join([REPO, '.'.join(version[0:2])])
+PATCH = ':'.join([REPO, '.'.join(version[0:3])])
+
+print(','.join([MAJOR, MINOR, PATCH]))
diff --git a/.github/scripts/old_package_purging.sh b/.github/scripts/old_package_purging.sh
new file mode 100755
index 000000000..c90c4b780
--- /dev/null
+++ b/.github/scripts/old_package_purging.sh
@@ -0,0 +1,88 @@
+#!/usr/bin/env bash
+#
+# Script to handle package cloud retention policy
+# Our open source subscription is limited,
+# so we use this sript to control the number of packages maintained historically
+#
+# Dependencies:
+# - PACKAGE_CLOUD_RETENTION_DAYS
+# This is to indicate for how many days back we want to maintain the various RPM and DEB packages on package cloud
+#
+# Copyright : SPDX-License-Identifier: GPL-3.0-or-later
+#
+# Author : Pavlos Emm. Katsoulakis <paul@netdata.cloud>
+#
+set -e
+
+delete_files_for_version() {
+ local v="$1"
+
+ # Delete the selected filenames in version
+ FILES_IN_VERSION=$(jq --sort-keys --arg v "${v}" '.[] | select ( .version | contains($v))' "${PKG_LIST_FILE}" | grep filename | cut -d':' -f 2)
+
+ # Iterate through the files and delete them
+ for pkg in ${FILES_IN_VERSION/\\n/}; do
+ pkg=${pkg/,/}
+ pkg=${pkg/\"/}
+ pkg=${pkg/\"/}
+ echo "Attempting yank on ${pkg}.."
+ .github/scripts/package_cloud_wrapper.sh yank "${REPO}" "${pkg}" || echo "Nothing to yank or error on ${pkg}"
+ done
+}
+
+# If we are not in netdata git repo, at the top level directory, fail
+TOP_LEVEL=$(basename "$(git rev-parse --show-toplevel)")
+CWD=$(git rev-parse --show-cdup)
+if [ -n "$CWD" ] || [ ! "${TOP_LEVEL}" == "netdata" ]; then
+ echo "Run as .github/scripts/$(basename "$0") from top level directory of netdata git repository"
+ echo "Old packages yanking cancelled"
+ exit 1
+fi
+
+if [ -z "${REPO}" ]; then
+ echo "No REPO variable found"
+ exit 1
+fi
+
+if [ -z ${PKG_CLOUD_TOKEN} ]; then
+ echo "No PKG_CLOUD_TOKEN variable found"
+ exit 1
+fi
+
+if [ -z ${PACKAGE_CLOUD_RETENTION_DAYS} ]; then
+ echo "No PACKAGE_CLOUD_RETENTION_DAYS variable found"
+ exit 1
+fi
+
+TMP_DIR="$(mktemp -d /tmp/netdata-old-package-yanking-XXXXXX)"
+PKG_LIST_FILE="${TMP_DIR}/complete_package_list.json"
+DATE_EPOCH="1970-01-01"
+DATE_UNTIL_TO_DELETE=$(date --date="${PACKAGE_CLOUD_RETENTION_DAYS} day ago" +%Y-%m-%d)
+
+
+echo "Created temp directory: ${TMP_DIR}"
+echo "We will be purging contents up until ${DATE_UNTIL_TO_DELETE}"
+
+echo "Calling package could to retrieve all available packages on ${REPO}"
+curl -sS "https://${PKG_CLOUD_TOKEN}:@packagecloud.io/api/v1/repos/${REPO}/packages.json" > "${PKG_LIST_FILE}"
+
+# Get versions within the desired duration
+#
+VERSIONS_TO_PURGE=$(jq --arg s "${DATE_EPOCH}" --arg e "${DATE_UNTIL_TO_DELETE}" '
+[($s, $e) | strptime("%Y-%m-%d")[0:3]] as $r
+ | map(select(
+ (.created_at[:19] | strptime("%Y-%m-%dT%H:%M:%S")[0:3]) as $d
+ | $d >= $r[0] and $d <= $r[1]
+))' "${PKG_LIST_FILE}" | grep '"version":' | sort -u | sed -e 's/ //g' | cut -d':' -f2)
+
+echo "We will be deleting the following versions: ${VERSIONS_TO_PURGE}"
+for v in ${VERSIONS_TO_PURGE/\n//}; do
+ v=${v/\"/}
+ v=${v/\"/}
+ v=${v/,/}
+ echo "Remove all files for version $v"
+ delete_files_for_version "${v}"
+done
+
+# Done, clean up
+[ -d "${TMP_DIR}" ] && rm -rf "${TMP_DIR}"
diff --git a/.github/scripts/package_cloud_wrapper.sh b/.github/scripts/package_cloud_wrapper.sh
new file mode 100755
index 000000000..7640ef484
--- /dev/null
+++ b/.github/scripts/package_cloud_wrapper.sh
@@ -0,0 +1,48 @@
+#!/usr/bin/env bash
+#
+# This is a tool to help removal of packages from packagecloud.io
+# It utilizes the package_cloud utility provided from packagecloud.io
+#
+# Depends on:
+# 1) package cloud gem (detects absence and installs it)
+#
+# Requires:
+# 1) PKG_CLOUD_TOKEN variable exported
+# 2) To properly install package_cloud when not found, it requires: ruby gcc gcc-c++ ruby-devel
+#
+# Copyright: SPDX-License-Identifier: GPL-3.0-or-later
+#
+# Author : Pavlos Emm. Katsoulakis (paul@netdata.cloud)
+#shellcheck disable=SC2068,SC2145
+set -e
+PKG_CLOUD_CONFIG="$HOME/.package_cloud_configuration.cfg"
+
+# If we are not in netdata git repo, at the top level directory, fail
+TOP_LEVEL=$(basename "$(git rev-parse --show-toplevel)")
+CWD=$(git rev-parse --show-cdup)
+if [ -n "$CWD" ] || [ ! "${TOP_LEVEL}" == "netdata" ]; then
+ echo "Run as .github/scripts/$(basename "$0") from top level directory of netdata git repository"
+ echo "Docker build process aborted"
+ exit 1
+fi
+
+# Install dependency if not there
+if ! command -v package_cloud > /dev/null 2>&1; then
+ echo "No package cloud gem found, installing"
+ sudo gem install -V package_cloud || (echo "Package cloud installation failed. you might want to check if required dependencies are there (ruby gcc gcc-c++ ruby-devel)" && exit 1)
+else
+ echo "Found package_cloud gem, continuing"
+fi
+
+# Check for required token and prepare config
+if [ -z "${PKG_CLOUD_TOKEN}" ]; then
+ echo "Please set PKG_CLOUD_TOKEN to be able to use ${0}"
+ exit 1
+fi
+echo "{\"url\":\"https://packagecloud.io\",\"token\":\"${PKG_CLOUD_TOKEN}\"}" > "${PKG_CLOUD_CONFIG}"
+
+echo "Executing package_cloud with config ${PKG_CLOUD_CONFIG} and parameters $@"
+package_cloud $@ --config="${PKG_CLOUD_CONFIG}"
+
+rm -rf "${PKG_CLOUD_CONFIG}"
+echo "Done!"
diff --git a/.github/workflows/build-and-install.yml b/.github/workflows/build-and-install.yml
index 26a144acd..4c4693601 100644
--- a/.github/workflows/build-and-install.yml
+++ b/.github/workflows/build-and-install.yml
@@ -12,6 +12,8 @@ jobs:
steps:
- name: Git clone repository
uses: actions/checkout@v2
+ with:
+ submodules: recursive
- run: |
git fetch --prune --unshallow --tags
- name: Build
@@ -34,10 +36,12 @@ jobs:
- 'clearlinux:latest'
- 'debian:10'
- 'debian:9'
+ - 'fedora:34'
- 'fedora:33'
- 'fedora:32'
- 'opensuse/leap:15.2'
- 'opensuse/tumbleweed:latest'
+ - 'ubuntu:21.04'
- 'ubuntu:20.10'
- 'ubuntu:20.04'
- 'ubuntu:18.04'
@@ -72,6 +76,8 @@ jobs:
pre: 'apt-get update'
rmjsonc: 'apt-get remove -y libjson-c-dev'
+ - distro: 'fedora:34'
+ rmjsonc: 'dnf remove -y json-c-devel'
- distro: 'fedora:33'
rmjsonc: 'dnf remove -y json-c-devel'
- distro: 'fedora:32'
@@ -82,6 +88,9 @@ jobs:
- distro: 'opensuse/tumbleweed:latest'
rmjsonc: 'zypper rm -y libjson-c-devel'
+ - distro: 'ubuntu:21.04'
+ pre: 'apt-get update'
+ rmjsonc: 'apt-get remove -y libjson-c-dev'
- distro: 'ubuntu:20.10'
pre: 'apt-get update'
rmjsonc: 'apt-get remove -y libjson-c-dev'
@@ -98,6 +107,8 @@ jobs:
steps:
- name: Git clone repository
uses: actions/checkout@v2
+ with:
+ submodules: recursive
- name: install-required-packages.sh on ${{ matrix.distro }}
env:
PRE: ${{ matrix.pre }}
@@ -183,6 +194,8 @@ jobs:
steps:
- name: Git clone repository
uses: actions/checkout@v2
+ with:
+ submodules: recursive
- name: install-required-packages.sh on ${{ matrix.distro }}
env:
PRE: ${{ matrix.pre }}
diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml
index cf494e95c..7225d3dbe 100644
--- a/.github/workflows/checks.yml
+++ b/.github/workflows/checks.yml
@@ -12,6 +12,8 @@ jobs:
steps:
- name: Git clone repository
uses: actions/checkout@v2
+ with:
+ submodules: recursive
- name: Run checksum checks on kickstart files
env:
LOCAL_ONLY: "true"
@@ -23,6 +25,8 @@ jobs:
steps:
- name: Git clone repository
uses: actions/checkout@v2
+ with:
+ submodules: recursive
- name: Install required packages
run: |
./packaging/installer/install-required-packages.sh --dont-wait --non-interactive netdata
@@ -43,6 +47,8 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v2
+ with:
+ submodules: recursive
- name: Build
run: >
docker run -v "$PWD":/netdata -w /netdata alpine:latest /bin/sh -c
@@ -68,6 +74,8 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v2
+ with:
+ submodules: recursive
- name: Prepare environment
run: |
./packaging/installer/install-required-packages.sh --dont-wait --non-interactive netdata
@@ -98,6 +106,8 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v2
+ with:
+ submodules: recursive
- name: Prepare environment
run: ./packaging/installer/install-required-packages.sh --dont-wait --non-interactive netdata
- name: Build netdata
diff --git a/.github/workflows/coverity.yml b/.github/workflows/coverity.yml
index 926257dc0..766275ed9 100644
--- a/.github/workflows/coverity.yml
+++ b/.github/workflows/coverity.yml
@@ -15,6 +15,8 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v2
+ with:
+ submodules: recursive
- name: Prepare environment
env:
DEBIAN_FRONTEND: 'noninteractive'
@@ -32,3 +34,17 @@ jobs:
COVERITY_SCAN_SUBMIT_MAIL: ${{ secrets.COVERITY_SCAN_SUBMIT_MAIL }}
run: |
./coverity-scan.sh --with-install
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER:
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Coverity run failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: ${{
+ failure()
+ && github.event_name != 'pull_request'
+ && startsWith(github.ref, 'refs/heads/master')
+ }}
diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml
index 04f91bff5..a5648d2a9 100644
--- a/.github/workflows/docker.yml
+++ b/.github/workflows/docker.yml
@@ -4,15 +4,7 @@ on:
push:
branches:
- master
- paths:
- - '.github/workflows/docker.yml'
- - 'netdata-installer.sh'
- - 'packaging/**'
pull_request:
- paths:
- - '.github/workflows/docker.yml'
- - 'netdata-installer.sh'
- - 'packaging/**'
workflow_dispatch:
inputs:
version:
@@ -26,11 +18,13 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v2
+ with:
+ submodules: recursive
- name: Determine if we should push changes and which tags to use
if: github.event_name == 'workflow_dispatch' && github.event.inputs.version != 'nightly'
run: |
echo "publish=true" >> $GITHUB_ENV
- echo "tags=netdata/netdata:latest,netdata/netdata:stable,netdata/netdata:${{ github.event.inputs.version }}" >> $GITHUB_ENV
+ echo "tags=netdata/netdata:latest,netdata/netdata:stable,$(.github/scripts/gen-docker-tags.py ${{ github.event.inputs.version }})" >> $GITHUB_ENV
- name: Determine if we should push changes and which tags to use
if: github.event_name == 'workflow_dispatch' && github.event.inputs.version == 'nightly'
run: |
@@ -57,3 +51,19 @@ jobs:
platforms: linux/amd64,linux/i386,linux/arm/v7,linux/arm64
push: ${{ env.publish }}
tags: ${{ env.tags }}
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER:
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Docker Build failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: "Docker image build failed."
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.event_name != 'pull_request'
+ && startsWith(github.ref, 'refs/heads/master')
+ }}
diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
index 2a4fe87e4..9f7234f92 100644
--- a/.github/workflows/docs.yml
+++ b/.github/workflows/docs.yml
@@ -16,6 +16,8 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v2
+ with:
+ submodules: recursive
- name: Run link check
uses: gaurav-nelson/github-action-markdown-link-check@v1
with:
diff --git a/.github/workflows/packaging.yml b/.github/workflows/packaging.yml
new file mode 100644
index 000000000..7340c27bb
--- /dev/null
+++ b/.github/workflows/packaging.yml
@@ -0,0 +1,149 @@
+---
+# Handles building of binary packages for the agent.
+name: Packages
+on:
+ pull_request:
+ branches:
+ - master
+ - develop
+ workflow_dispatch:
+ inputs:
+ type:
+ name: Package build type
+ default: devel
+ required: true
+ version:
+ name: Package version
+ required: false
+jobs:
+ build:
+ name: Build
+ runs-on: ubuntu-latest
+ env:
+ DOCKER_CLI_EXPERIMENTAL: enabled
+ strategy:
+ matrix:
+ include:
+ - {distro: debian, version: "9", pkgclouddistro: debian/stretch, format: deb, base_image: debian, platform: linux/amd64, arch: amd64}
+ - {distro: debian, version: "9", pkgclouddistro: debian/stretch, format: deb, base_image: debian, platform: linux/i386, arch: i386}
+ - {distro: debian, version: "10", pkgclouddistro: debian/buster, format: deb, base_image: debian, platform: linux/amd64, arch: amd64}
+ - {distro: debian, version: "10", pkgclouddistro: debian/buster, format: deb, base_image: debian, platform: linux/i386, arch: i386}
+ - {distro: ubuntu, version: "16.04", pkgclouddistro: ubuntu/xenial, format: deb, base_image: ubuntu, platform: linux/amd64, arch: amd64}
+ - {distro: ubuntu, version: "16.04", pkgclouddistro: ubuntu/xenial, format: deb, base_image: ubuntu, platform: linux/i386, arch: i386}
+ - {distro: ubuntu, version: "18.04", pkgclouddistro: ubuntu/bionic, format: deb, base_image: ubuntu, platform: linux/amd64, arch: amd64}
+ - {distro: ubuntu, version: "18.04", pkgclouddistro: ubuntu/bionic, format: deb, base_image: ubuntu, platform: linux/i386, arch: i386}
+ - {distro: ubuntu, version: "20.04", pkgclouddistro: ubuntu/focal, format: deb, base_image: ubuntu, platform: linux/amd64, arch: amd64}
+ - {distro: ubuntu, version: "20.10", pkgclouddistro: ubuntu/groovy, format: deb, base_image: ubuntu, platform: linux/amd64, arch: amd64}
+ - {distro: ubuntu, version: "21.04", pkgclouddistro: ubuntu/hirsute, format: deb, base_image: ubuntu, platform: linux/amd64, arch: amd64}
+ - {distro: centos, version: "7", pkgclouddistro: el/7, format: rpm, base_image: centos, platform: linux/amd64, arch: amd64}
+ - {distro: centos, version: "8", pkgclouddistro: el/8, format: rpm, base_image: centos, platform: linux/amd64, arch: amd64}
+ - {distro: fedora, version: "32", pkgclouddistro: fedora/32, format: rpm, base_image: fedora, platform: linux/amd64, arch: amd64}
+ - {distro: fedora, version: "33", pkgclouddistro: fedora/33, format: rpm, base_image: fedora, platform: linux/amd64, arch: amd64}
+ - {distro: fedora, version: "34", pkgclouddistro: fedora/34, format: rpm, base_image: fedora, platform: linux/amd64, arch: amd64}
+ - {distro: opensuse, version: "15.2", pkgclouddistro: opensuse/15.2, format: rpm, base_image: opensuse/leap, platform: linux/amd64, arch: amd64}
+ # We intentiaonally disable the fail-fast behavior so that a
+ # build failure for one version doesn't prevent us from publishing
+ # successfully built and tested packages for another version.
+ fail-fast: false
+ steps:
+ - name: Checkout PR # Checkout the PR if it's a PR.
+ if: github.event_name == 'pull_request'
+ uses: actions/checkout@v2
+ with:
+ fetch-depth: 0 # We need full history for versioning
+ submodules: true
+ - name: Checkout Tag # Otherwise check out the tag that triggered this.
+ if: github.event_name == 'workflow_dispatch'
+ uses: actions/checkout@v2
+ with:
+ ref: ${{ github.event.ref }}
+ fetch-depth: 0 # We need full history for versioning
+ submodules: true
+ - name: Check Base Branch
+ run: |
+ if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
+ echo "runtype=${{ github.event.inputs.type }}" >> $GITHUB_ENV
+ case "${{ github.event.inputs.type }}" in
+ "release")
+ echo "repo=${{ secrets.PACKAGE_CLOUD_REPO }}" >> $GITHUB_ENV
+ echo "pkg_version=${{ github.event.inputs.version }}" >> $GITHUB_ENV
+ echo "pkg_retention_days=365" >> $GITHUB_ENV
+ ;;
+ "nightly")
+ echo "repo=${{ secrets.PACKAGE_CLOUD_REPO }}-edge" >> $GITHUB_ENV
+ echo "pkg_version=${{ github.event.inputs.version }}" >> $GITHUB_ENV
+ echo "pkg_retention_days=30" >> $GITHUB_ENV
+ ;;
+ *)
+ echo "repo=${{ secrets.PACKAGE_CLOUD_REPO }}-devel" >> $GITHUB_ENV
+ echo "pkg_version=0.${GITHUB_SHA}" >> $GITHUB_ENV
+ echo "pkg_retention_days=30" >> $GITHUB_ENV
+ ;;
+ esac
+ else
+ echo "runtype=test" >> $GITHUB_ENV
+ echo "pkg_version=$(cut -d'-' -f 1 packaging/version | sed -e 's/^v//')" >> $GITHUB_ENV
+ fi
+ - name: Setup QEMU
+ if: matrix.platform != 'linux/amd64'
+ uses: docker/setup-qemu-action@v1
+ - name: Setup Buildx
+ uses: docker/setup-buildx-action@v1
+ - name: Prepare Docker Environment
+ shell: bash
+ run: |
+ echo '{"cgroup-parent": "/actions_job", "experimental": true}' | sudo tee /etc/docker/daemon.json 2>/dev/null
+ sudo service docker restart
+ - name: Build Packages
+ uses: docker/build-push-action@v2
+ with:
+ platforms: ${{ matrix.platform }}
+ file: packaging/Dockerfile.packager
+ tags: local/package-builder:${{ matrix.distro}}${{ matrix.version }}
+ push: false
+ load: true
+ build-args: |
+ ARCH=${{ matrix.arch }}
+ DISTRO=${{ matrix.distro }}
+ TEST_BASE=${{ matrix.base_image }}
+ DISTRO_VERSION=${{ matrix.version }}
+ PKG_VERSION=${{ env.pkg_version }}
+ - name: Extract Packages
+ shell: bash
+ run: |
+ mkdir -p artifacts
+ docker run --platform ${{ matrix.platform }} -v $PWD/artifacts:/artifacts local/package-builder:${{ matrix.distro }}${{ matrix.version }}
+ - name: Upload
+ if: github.event_name == 'workflow_dispatch'
+ shell: bash
+ env:
+ PKG_CLOUD_TOKEN: ${{ secrets.PACKAGE_CLOUD_API_KEY }}
+ run: |
+ echo "Packages to upload:\n$(ls artifacts/*.${{ matrix.format }})"
+ for pkgfile in artifacts/*.${{ matrix.format }} ; do
+ .github/scripts/package_cloud_wrapper.sh push ${{ env.repo }}/${{ matrix.pkgclouddistro }} ${pkgfile}
+ done
+ - name: Clean
+ if: github.event_name == 'workflow_dispatch'
+ shell: bash
+ env:
+ REPO: ${{ env.repo }}
+ PKG_CLOUD_TOKEN: ${{ secrets.PACKAGE_CLOUD_API_KEY }}
+ PACKAGE_CLOUD_RETENTION_DAYS: ${{ env.pkg_retention_days }}
+ run: .github/scripts/old_package_purging.sh
+ - name: Failure Notification
+ uses: rtCamp/action-slack-notify@v2
+ env:
+ SLACK_COLOR: 'danger'
+ SLACK_FOOTER:
+ SLACK_ICON_EMOJI: ':github-actions:'
+ SLACK_TITLE: 'Package Build failed:'
+ SLACK_USERNAME: 'GitHub Actions'
+ SLACK_MESSAGE: "${{ matrix.pkgclouddistro }} ${{ matrix.version }} package build for ${{ matrix.arch }} failed."
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }}
+ if: >-
+ ${{
+ failure()
+ && github.event_name != 'pull_request'
+ && startsWith(github.ref, 'refs/heads/master')
+ }}
diff --git a/.github/workflows/review.yml b/.github/workflows/review.yml
index ca8f6de13..a267fea3f 100644
--- a/.github/workflows/review.yml
+++ b/.github/workflows/review.yml
@@ -16,6 +16,7 @@ jobs:
- name: Git clone repository
uses: actions/checkout@v2
with:
+ submodules: recursive
fetch-depth: 0
- name: Check files
run: |
@@ -57,6 +58,7 @@ jobs:
- name: Git clone repository
uses: actions/checkout@v2
with:
+ submodules: recursive
fetch-depth: 0
- name: Check files
run: |
@@ -80,6 +82,7 @@ jobs:
- name: Git clone repository
uses: actions/checkout@v2
with:
+ submodules: recursive
fetch-depth: 0
- name: Check files
run: |
diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
index ef6bfbc2a..c166c7442 100644
--- a/.github/workflows/tests.yml
+++ b/.github/workflows/tests.yml
@@ -21,6 +21,8 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v2
+ with:
+ submodules: recursive
- name: Prepare environment
run: |
./packaging/installer/install-required-packages.sh --dont-wait --non-interactive netdata-all
@@ -39,6 +41,8 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v2
+ with:
+ submodules: recursive
- name: Prepare environment
run: |
./packaging/installer/install-required-packages.sh --dont-wait --non-interactive netdata-all
@@ -48,7 +52,7 @@ jobs:
- name: Configure
run: |
autoreconf -ivf
- ./configure
+ ./configure --without-aclk-ng
# XXX: Work-around for bug with libbson-1.0 in Ubuntu 18.04
# See: https://bugs.launchpad.net/ubuntu/+source/libmongoc/+bug/1790771
# https://jira.mongodb.org/browse/CDRIVER-2818
diff --git a/.github/workflows/updater.yml b/.github/workflows/updater.yml
new file mode 100644
index 000000000..76cb2fdbe
--- /dev/null
+++ b/.github/workflows/updater.yml
@@ -0,0 +1,84 @@
+---
+name: Updater
+on:
+ push:
+ branches:
+ - master
+ pull_request:
+ branches:
+ - master
+
+jobs:
+ source-build:
+ name: Install, Build & Update
+ strategy:
+ fail-fast: false
+ matrix:
+ distro:
+ - 'alpine:3.12'
+ - 'alpine:3.13'
+ - 'archlinux:latest'
+ - 'centos:7'
+ - 'centos:8'
+ - 'clearlinux:latest'
+ - 'debian:9'
+ - 'debian:10'
+ - 'fedora:33'
+ - 'fedora:34'
+ - 'ubuntu:16.04'
+ - 'ubuntu:18.04'
+ - 'ubuntu:20.04'
+ - 'ubuntu:20.10'
+ - 'ubuntu:21.04'
+ include:
+ - distro: 'alpine:3.12'
+ pre: 'apk add -U bash'
+ - distro: 'alpine:3.13'
+ pre: 'apk add -U bash'
+ - distro: 'debian:9'
+ pre: 'apt-get update'
+ - distro: 'debian:10'
+ pre: 'apt-get update'
+ - distro: 'ubuntu:16.04'
+ pre: 'apt-get update'
+ - distro: 'ubuntu:18.04'
+ pre: 'apt-get update'
+ - distro: 'ubuntu:20.04'
+ pre: 'apt-get update'
+ - distro: 'ubuntu:20.10'
+ pre: 'apt-get update'
+ - distro: 'ubuntu:21.04'
+ pre: 'apt-get update'
+ runs-on: ubuntu-latest
+ steps:
+ - name: Git clone repository
+ uses: actions/checkout@v2
+ with:
+ submodules: recursive
+ - name: Install required packages & build tarball
+ run: |
+ ./packaging/installer/install-required-packages.sh --dont-wait --non-interactive netdata-all
+ .github/scripts/build-dist.sh
+ - name: Run a dockerised web server to serve files used by the custom update script
+ run: |
+ docker run -dit --name my-apache-app -p 8080:80 -v "$PWD":/usr/local/apache2/htdocs/ httpd:2.4
+ - name: Replace URLs in updater script to point at the local web server
+ run: |
+ ORIG_TARBALL="export NETDATA_TARBALL_URL=.*"
+ ORIG_CHECKSUM="export NETDATA_TARBALL_CHECKSUM_URL=.*"
+ CURRENT_VERSION="current_version=.*"
+ NEW_TARBALL="export NETDATA_TARBALL_URL=http://localhost:8080/artifacts/netdata-latest.tar.gz"
+ NEW_CHECKSUM="export NETDATA_TARBALL_CHECKSUM_URL=http://localhost:8080/artifacts/sha256sums.txt"
+ sed -i "s|${ORIG_TARBALL}|${NEW_TARBALL}|g" packaging/installer/netdata-updater.sh
+ sed -i "s|${ORIG_CHECKSUM}|${NEW_CHECKSUM}|g" packaging/installer/netdata-updater.sh
+ sed -i "s|"current_version=.*"|"current_version=1"|g" packaging/installer/netdata-updater.sh
+ - name: Install netdata and run the updater on ${{ matrix.distro }}
+ env:
+ PRE: ${{ matrix.pre }}
+ run: |
+ echo $PRE > ./prep-cmd.sh
+ docker build . -f .github/dockerfiles/Dockerfile.build_test -t test --build-arg BASE=${{ matrix.distro }}
+ docker run --network host -w /netdata test \
+ /bin/sh -c '/netdata/packaging/installer/kickstart.sh --dont-wait \
+ && /netdata/packaging/installer/netdata-updater.sh --not-running-from-cron --no-updater-self-update \
+ && bash /netdata/.github/scripts/check-updater.sh'