summaryrefslogtreecommitdiffstats
path: root/fluent-bit/.github/workflows/call-build-linux-packages.yaml
diff options
context:
space:
mode:
Diffstat (limited to 'fluent-bit/.github/workflows/call-build-linux-packages.yaml')
-rw-r--r--fluent-bit/.github/workflows/call-build-linux-packages.yaml263
1 files changed, 0 insertions, 263 deletions
diff --git a/fluent-bit/.github/workflows/call-build-linux-packages.yaml b/fluent-bit/.github/workflows/call-build-linux-packages.yaml
deleted file mode 100644
index b8000225a..000000000
--- a/fluent-bit/.github/workflows/call-build-linux-packages.yaml
+++ /dev/null
@@ -1,263 +0,0 @@
----
-name: Reusable workflow to build binary packages into S3 bucket
-
-on:
- workflow_call:
- inputs:
- version:
- description: The version of Fluent Bit to create.
- type: string
- required: true
- ref:
- description: The commit, tag or branch of Fluent Bit to checkout for building that creates the version above.
- type: string
- required: true
- build_matrix:
- description: The build targets to produce as a JSON matrix.
- type: string
- required: true
- environment:
- description: The Github environment to run this workflow on.
- type: string
- required: false
- unstable:
- description: Optionally add metadata to build to indicate an unstable build, set to the contents you want to add.
- type: string
- required: false
- default: ""
- ignore_failing_targets:
- description: Optionally ignore any failing builds in the matrix and continue.
- type: boolean
- required: false
- default: false
- secrets:
- token:
- description: The Github token or similar to authenticate with.
- required: true
- bucket:
- description: The name of the S3 (US-East) bucket to push packages into.
- required: false
- access_key_id:
- description: The S3 access key id for the bucket.
- required: false
- secret_access_key:
- description: The S3 secret access key for the bucket.
- required: false
- gpg_private_key:
- description: The GPG key to use for signing the packages.
- required: false
- gpg_private_key_passphrase:
- description: The GPG key passphrase to use for signing the packages.
- required: false
-
-jobs:
- call-build-capture-source:
- # Capture source tarball and generate checksum for it
- name: Extract any supporting metadata
- runs-on: ubuntu-latest
- environment: ${{ inputs.environment }}
- permissions:
- contents: read
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- ref: ${{ inputs.ref }}
- path: source
-
- - name: Create tarball and checksums
- run: |
- tar -czvf $SOURCE_FILENAME_PREFIX.tar.gz -C source --exclude-vcs .
- md5sum $SOURCE_FILENAME_PREFIX.tar.gz > $SOURCE_FILENAME_PREFIX.tar.gz.md5
- sha256sum $SOURCE_FILENAME_PREFIX.tar.gz > $SOURCE_FILENAME_PREFIX.tar.gz.sha256
- # Move to a directory to simplify upload/sync
- mkdir -p source-packages
- cp -fv $SOURCE_FILENAME_PREFIX* source-packages/
- shell: bash
- env:
- SOURCE_FILENAME_PREFIX: source-${{ inputs.version }}
-
- - name: Upload the source artifacts
- uses: actions/upload-artifact@v3
- with:
- name: source-${{ inputs.version }}
- path: source-packages/*
- if-no-files-found: error
-
- # Pick up latest master version
- - name: Checkout code for action
- if: inputs.environment == 'staging'
- uses: actions/checkout@v4
- with:
- path: action-support
-
- - name: Push tarballs to S3
- # Only upload for staging
- if: inputs.environment == 'staging'
- uses: ./action-support/.github/actions/sync-to-bucket
- with:
- bucket: ${{ secrets.bucket }}
- access_key_id: ${{ secrets.access_key_id }}
- secret_access_key: ${{ secrets.secret_access_key }}
- bucket-directory: "${{ inputs.version }}/source"
- source-directory: "source-packages/"
-
- call-build-linux-packages:
- name: ${{ matrix.distro }} package build and stage to S3
- environment: ${{ inputs.environment }}
- # Ensure for OSS Fluent Bit repo we enable usage of Actuated runners for ARM builds, for forks it should keep existing ubuntu-latest usage.
- runs-on: ${{ (contains(matrix.distro, 'arm' ) && (github.repository == 'fluent/fluent-bit') && 'actuated-aarch64') || 'ubuntu-latest' }}
- permissions:
- contents: read
- strategy:
- fail-fast: false
- matrix: ${{ fromJSON(inputs.build_matrix) }}
- # Potentially we support continuing with all successful targets
- continue-on-error: ${{ inputs.ignore_failing_targets || false }}
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- ref: ${{ inputs.ref }}
-
- - name: Set up Actuated mirror
- if: contains(matrix.distro, 'arm' ) && (github.repository == 'fluent/fluent-bit')
- uses: self-actuated/hub-mirror@master
-
- - name: Set up QEMU
- uses: docker/setup-qemu-action@v3
-
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3
-
- - name: Replace all special characters with dashes
- id: formatted_distro
- run:
- output=${INPUT//[\/]/-}
- echo "$INPUT --> $output"
- echo "replaced=$output" >> "$GITHUB_OUTPUT"
- shell: bash
- env:
- INPUT: ${{ matrix.distro }}
-
- - name: fluent-bit - ${{ matrix.distro }} artifacts
- run: |
- ./build.sh
- env:
- FLB_DISTRO: ${{ matrix.distro }}
- FLB_OUT_DIR: ${{ inputs.version }}/staging
- FLB_NIGHTLY_BUILD: ${{ inputs.unstable }}
- CMAKE_INSTALL_PREFIX: /opt/fluent-bit/
- working-directory: packaging
-
- - name: Upload the ${{ steps.formatted_distro.outputs.replaced }} artifacts
- uses: actions/upload-artifact@v3
- with:
- name: packages-${{ inputs.version }}-${{ steps.formatted_distro.outputs.replaced }}
- path: packaging/packages/
- if-no-files-found: error
-
- - name: Retrieve target info for repo creation
- id: get-target-info
- # Remove any .arm648 suffix
- # For ubuntu map to codename using the disto-info list (CSV)
- run: |
- sudo apt-get update
- sudo apt-get install -y distro-info awscli
- TARGET=${DISTRO%*.arm64v8}
- if [[ "$TARGET" == "ubuntu/"* ]]; then
- UBUNTU_CODENAME=$(cut -d ',' -f 1,3 < "/usr/share/distro-info/ubuntu.csv"|grep "${TARGET##*/}"|cut -d ',' -f 2)
- if [[ -n "$UBUNTU_CODENAME" ]]; then
- TARGET="ubuntu/$UBUNTU_CODENAME"
- else
- echo "Unable to extract codename for $DISTRO"
- exit 1
- fi
- fi
- echo "$TARGET"
- echo "target=$TARGET" >> $GITHUB_OUTPUT
- env:
- DISTRO: ${{ matrix.distro }}
- shell: bash
-
- - name: Verify output target
- # Only upload for staging
- # Make sure not to do a --delete on sync as it will remove the other architecture
- run: |
- if [ -z "${{ steps.get-target-info.outputs.target }}" ]; then
- echo "Invalid (empty) target defined"
- exit 1
- fi
- shell: bash
-
- # Pick up latest master version
- - name: Checkout code for action
- if: inputs.environment == 'staging'
- uses: actions/checkout@v4
- with:
- path: action-support
-
- - name: Push packages to S3
- # Only upload for staging
- if: inputs.environment == 'staging'
- uses: ./action-support/.github/actions/sync-to-bucket
- with:
- bucket: ${{ secrets.bucket }}
- access_key_id: ${{ secrets.access_key_id }}
- secret_access_key: ${{ secrets.secret_access_key }}
- bucket-directory: "${{ inputs.version }}/${{ steps.get-target-info.outputs.target }}/"
- source-directory: "packaging/packages/${{ matrix.distro }}/${{ inputs.version }}/staging/"
-
- call-build-linux-packages-repo:
- name: Create repo metadata in S3
- # Only upload for staging
- if: inputs.environment == 'staging'
- # Need to use 18.04 as 20.04 has no createrepo available
- runs-on: ubuntu-22.04
- environment: ${{ inputs.environment }}
- needs:
- - call-build-linux-packages
- steps:
- - name: Install dependencies
- run: |
- sudo apt-get update
- sudo apt-get install -y createrepo-c aptly awscli
-
- - name: Checkout code for repo metadata construction - always latest
- uses: actions/checkout@v4
-
- - name: Import GPG key for signing
- id: import_gpg
- uses: crazy-max/ghaction-import-gpg@v6
- with:
- gpg_private_key: ${{ secrets.gpg_private_key }}
- passphrase: ${{ secrets.gpg_private_key_passphrase }}
-
- - name: Create repositories on staging now
- # We sync down what we have for the release directories.
- # Create the repo metadata then upload to the root of the bucket.
- # This will wipe out any versioned directories in the process.
- run: |
- rm -rf ./latest/
- mkdir -p ./latest/
- if [ -n "${AWS_S3_ENDPOINT}" ]; then
- ENDPOINT="--endpoint-url ${AWS_S3_ENDPOINT}"
- fi
- aws s3 sync "s3://$AWS_S3_BUCKET/${{ inputs.version }}" ./latest/ --no-progress ${ENDPOINT}
-
- gpg --export -a "${{ steps.import_gpg.outputs.name }}" > ./latest/fluentbit.key
- rpm --import ./latest/fluentbit.key
-
- ./update-repos.sh "./latest/"
- echo "${{ inputs.version }}" > "./latest/latest-version.txt"
- aws s3 sync "./latest/" "s3://$AWS_S3_BUCKET" --delete --follow-symlinks --no-progress ${ENDPOINT}
- env:
- GPG_KEY: ${{ steps.import_gpg.outputs.name }}
- AWS_REGION: "us-east-1"
- AWS_ACCESS_KEY_ID: ${{ secrets.access_key_id }}
- AWS_SECRET_ACCESS_KEY: ${{ secrets.secret_access_key }}
- AWS_S3_BUCKET: ${{ secrets.bucket }}
- # To use with Minio locally (or update to whatever endpoint you want)
- # AWS_S3_ENDPOINT: http://localhost:9000
- shell: bash
- working-directory: packaging