blob: b8000225abef880a4890ea0e8bbd5c40433b5da3 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
|
---
name: Reusable workflow to build binary packages into S3 bucket
on:
workflow_call:
inputs:
version:
description: The version of Fluent Bit to create.
type: string
required: true
ref:
description: The commit, tag or branch of Fluent Bit to checkout for building that creates the version above.
type: string
required: true
build_matrix:
description: The build targets to produce as a JSON matrix.
type: string
required: true
environment:
description: The Github environment to run this workflow on.
type: string
required: false
unstable:
description: Optionally add metadata to build to indicate an unstable build, set to the contents you want to add.
type: string
required: false
default: ""
ignore_failing_targets:
description: Optionally ignore any failing builds in the matrix and continue.
type: boolean
required: false
default: false
secrets:
token:
description: The Github token or similar to authenticate with.
required: true
bucket:
description: The name of the S3 (US-East) bucket to push packages into.
required: false
access_key_id:
description: The S3 access key id for the bucket.
required: false
secret_access_key:
description: The S3 secret access key for the bucket.
required: false
gpg_private_key:
description: The GPG key to use for signing the packages.
required: false
gpg_private_key_passphrase:
description: The GPG key passphrase to use for signing the packages.
required: false
jobs:
call-build-capture-source:
# Capture source tarball and generate checksum for it
name: Extract any supporting metadata
runs-on: ubuntu-latest
environment: ${{ inputs.environment }}
permissions:
contents: read
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ inputs.ref }}
path: source
- name: Create tarball and checksums
run: |
tar -czvf $SOURCE_FILENAME_PREFIX.tar.gz -C source --exclude-vcs .
md5sum $SOURCE_FILENAME_PREFIX.tar.gz > $SOURCE_FILENAME_PREFIX.tar.gz.md5
sha256sum $SOURCE_FILENAME_PREFIX.tar.gz > $SOURCE_FILENAME_PREFIX.tar.gz.sha256
# Move to a directory to simplify upload/sync
mkdir -p source-packages
cp -fv $SOURCE_FILENAME_PREFIX* source-packages/
shell: bash
env:
SOURCE_FILENAME_PREFIX: source-${{ inputs.version }}
- name: Upload the source artifacts
uses: actions/upload-artifact@v3
with:
name: source-${{ inputs.version }}
path: source-packages/*
if-no-files-found: error
# Pick up latest master version
- name: Checkout code for action
if: inputs.environment == 'staging'
uses: actions/checkout@v4
with:
path: action-support
- name: Push tarballs to S3
# Only upload for staging
if: inputs.environment == 'staging'
uses: ./action-support/.github/actions/sync-to-bucket
with:
bucket: ${{ secrets.bucket }}
access_key_id: ${{ secrets.access_key_id }}
secret_access_key: ${{ secrets.secret_access_key }}
bucket-directory: "${{ inputs.version }}/source"
source-directory: "source-packages/"
call-build-linux-packages:
name: ${{ matrix.distro }} package build and stage to S3
environment: ${{ inputs.environment }}
# Ensure for OSS Fluent Bit repo we enable usage of Actuated runners for ARM builds, for forks it should keep existing ubuntu-latest usage.
runs-on: ${{ (contains(matrix.distro, 'arm' ) && (github.repository == 'fluent/fluent-bit') && 'actuated-aarch64') || 'ubuntu-latest' }}
permissions:
contents: read
strategy:
fail-fast: false
matrix: ${{ fromJSON(inputs.build_matrix) }}
# Potentially we support continuing with all successful targets
continue-on-error: ${{ inputs.ignore_failing_targets || false }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ inputs.ref }}
- name: Set up Actuated mirror
if: contains(matrix.distro, 'arm' ) && (github.repository == 'fluent/fluent-bit')
uses: self-actuated/hub-mirror@master
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Replace all special characters with dashes
id: formatted_distro
run:
output=${INPUT//[\/]/-}
echo "$INPUT --> $output"
echo "replaced=$output" >> "$GITHUB_OUTPUT"
shell: bash
env:
INPUT: ${{ matrix.distro }}
- name: fluent-bit - ${{ matrix.distro }} artifacts
run: |
./build.sh
env:
FLB_DISTRO: ${{ matrix.distro }}
FLB_OUT_DIR: ${{ inputs.version }}/staging
FLB_NIGHTLY_BUILD: ${{ inputs.unstable }}
CMAKE_INSTALL_PREFIX: /opt/fluent-bit/
working-directory: packaging
- name: Upload the ${{ steps.formatted_distro.outputs.replaced }} artifacts
uses: actions/upload-artifact@v3
with:
name: packages-${{ inputs.version }}-${{ steps.formatted_distro.outputs.replaced }}
path: packaging/packages/
if-no-files-found: error
- name: Retrieve target info for repo creation
id: get-target-info
# Remove any .arm648 suffix
# For ubuntu map to codename using the disto-info list (CSV)
run: |
sudo apt-get update
sudo apt-get install -y distro-info awscli
TARGET=${DISTRO%*.arm64v8}
if [[ "$TARGET" == "ubuntu/"* ]]; then
UBUNTU_CODENAME=$(cut -d ',' -f 1,3 < "/usr/share/distro-info/ubuntu.csv"|grep "${TARGET##*/}"|cut -d ',' -f 2)
if [[ -n "$UBUNTU_CODENAME" ]]; then
TARGET="ubuntu/$UBUNTU_CODENAME"
else
echo "Unable to extract codename for $DISTRO"
exit 1
fi
fi
echo "$TARGET"
echo "target=$TARGET" >> $GITHUB_OUTPUT
env:
DISTRO: ${{ matrix.distro }}
shell: bash
- name: Verify output target
# Only upload for staging
# Make sure not to do a --delete on sync as it will remove the other architecture
run: |
if [ -z "${{ steps.get-target-info.outputs.target }}" ]; then
echo "Invalid (empty) target defined"
exit 1
fi
shell: bash
# Pick up latest master version
- name: Checkout code for action
if: inputs.environment == 'staging'
uses: actions/checkout@v4
with:
path: action-support
- name: Push packages to S3
# Only upload for staging
if: inputs.environment == 'staging'
uses: ./action-support/.github/actions/sync-to-bucket
with:
bucket: ${{ secrets.bucket }}
access_key_id: ${{ secrets.access_key_id }}
secret_access_key: ${{ secrets.secret_access_key }}
bucket-directory: "${{ inputs.version }}/${{ steps.get-target-info.outputs.target }}/"
source-directory: "packaging/packages/${{ matrix.distro }}/${{ inputs.version }}/staging/"
call-build-linux-packages-repo:
name: Create repo metadata in S3
# Only upload for staging
if: inputs.environment == 'staging'
# Need to use 18.04 as 20.04 has no createrepo available
runs-on: ubuntu-22.04
environment: ${{ inputs.environment }}
needs:
- call-build-linux-packages
steps:
- name: Install dependencies
run: |
sudo apt-get update
sudo apt-get install -y createrepo-c aptly awscli
- name: Checkout code for repo metadata construction - always latest
uses: actions/checkout@v4
- name: Import GPG key for signing
id: import_gpg
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.gpg_private_key }}
passphrase: ${{ secrets.gpg_private_key_passphrase }}
- name: Create repositories on staging now
# We sync down what we have for the release directories.
# Create the repo metadata then upload to the root of the bucket.
# This will wipe out any versioned directories in the process.
run: |
rm -rf ./latest/
mkdir -p ./latest/
if [ -n "${AWS_S3_ENDPOINT}" ]; then
ENDPOINT="--endpoint-url ${AWS_S3_ENDPOINT}"
fi
aws s3 sync "s3://$AWS_S3_BUCKET/${{ inputs.version }}" ./latest/ --no-progress ${ENDPOINT}
gpg --export -a "${{ steps.import_gpg.outputs.name }}" > ./latest/fluentbit.key
rpm --import ./latest/fluentbit.key
./update-repos.sh "./latest/"
echo "${{ inputs.version }}" > "./latest/latest-version.txt"
aws s3 sync "./latest/" "s3://$AWS_S3_BUCKET" --delete --follow-symlinks --no-progress ${ENDPOINT}
env:
GPG_KEY: ${{ steps.import_gpg.outputs.name }}
AWS_REGION: "us-east-1"
AWS_ACCESS_KEY_ID: ${{ secrets.access_key_id }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.secret_access_key }}
AWS_S3_BUCKET: ${{ secrets.bucket }}
# To use with Minio locally (or update to whatever endpoint you want)
# AWS_S3_ENDPOINT: http://localhost:9000
shell: bash
working-directory: packaging
|