summaryrefslogtreecommitdiffstats
path: root/fluent-bit/lib/librdkafka-2.1.0/packaging
diff options
context:
space:
mode:
Diffstat (limited to 'fluent-bit/lib/librdkafka-2.1.0/packaging')
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/RELEASE.md311
-rwxr-xr-xfluent-bit/lib/librdkafka-2.1.0/packaging/alpine/build-alpine.sh38
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/archlinux/PKGBUILD30
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/Config.cmake.in37
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/Modules/FindLZ4.cmake38
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/Modules/FindZSTD.cmake27
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/Modules/LICENSE.FindZstd178
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/README.md38
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/config.h.in52
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/parseversion.cmake60
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/rdkafka.pc.in12
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/atomic_32_test.c8
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/atomic_64_test.c8
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/c11threads_test.c14
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/crc32c_hw_test.c27
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/dlopen_test.c11
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/libsasl2_test.c7
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/pthread_setname_darwin_test.c6
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/pthread_setname_freebsd_test.c7
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/pthread_setname_gnu_test.c5
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/rand_r_test.c7
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/rdkafka_setup.cmake122
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/regex_test.c10
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/strndup_test.c5
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/sync_32_test.c8
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/sync_64_test.c8
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/cp/README.md14
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/cp/check_features.c64
-rwxr-xr-xfluent-bit/lib/librdkafka-2.1.0/packaging/cp/verify-deb.sh34
-rwxr-xr-xfluent-bit/lib/librdkafka-2.1.0/packaging/cp/verify-packages.sh43
-rwxr-xr-xfluent-bit/lib/librdkafka-2.1.0/packaging/cp/verify-rpm.sh38
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/debian/.gitignore6
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/debian/changelog66
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/debian/compat1
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/debian/control49
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/debian/copyright84
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/debian/docs5
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/debian/gbp.conf9
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka-dev.dirs2
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka-dev.examples2
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka-dev.install6
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka-dev.substvars1
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka.dsc16
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka1-dbg.substvars1
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka1.dirs1
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka1.install2
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka1.postinst.debhelper5
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka1.postrm.debhelper5
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka1.symbols64
-rwxr-xr-xfluent-bit/lib/librdkafka-2.1.0/packaging/debian/rules19
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/debian/source/format1
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/debian/watch2
-rwxr-xr-xfluent-bit/lib/librdkafka-2.1.0/packaging/get_version.py21
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/homebrew/README.md15
-rwxr-xr-xfluent-bit/lib/librdkafka-2.1.0/packaging/homebrew/brew-update-pr.sh31
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/mingw-w64/configure-build-msys2-mingw-static.sh52
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/mingw-w64/configure-build-msys2-mingw.sh21
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/mingw-w64/run-tests.sh6
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/mingw-w64/semaphoreci-build.sh38
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/mingw-w64/travis-before-install.sh20
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/.gitignore7
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/README.md78
-rwxr-xr-xfluent-bit/lib/librdkafka-2.1.0/packaging/nuget/artifact.py177
-rwxr-xr-xfluent-bit/lib/librdkafka-2.1.0/packaging/nuget/cleanup-s3.py143
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-win32__bldtype-Release/msvcr120.zipbin0 -> 679055 bytes
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-win32__bldtype-Release/msvcr140.zipbin0 -> 516022 bytes
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-x64__bldtype-Release/msvcr120.zipbin0 -> 662837 bytes
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-x64__bldtype-Release/msvcr140.zipbin0 -> 621912 bytes
-rwxr-xr-xfluent-bit/lib/librdkafka-2.1.0/packaging/nuget/nuget.sh21
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/nugetpackage.py286
-rwxr-xr-xfluent-bit/lib/librdkafka-2.1.0/packaging/nuget/packaging.py448
-rwxr-xr-xfluent-bit/lib/librdkafka-2.1.0/packaging/nuget/push-to-nuget.sh21
-rwxr-xr-xfluent-bit/lib/librdkafka-2.1.0/packaging/nuget/release.py167
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/requirements.txt3
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/staticpackage.py178
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/templates/librdkafka.redist.nuspec21
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/templates/librdkafka.redist.props18
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/templates/librdkafka.redist.targets19
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/zfile/__init__.py0
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/zfile/zfile.py98
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/.gitignore7
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/Makefile92
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/README.md23
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/el7-x86_64.cfg40
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/librdkafka.spec118
-rwxr-xr-xfluent-bit/lib/librdkafka-2.1.0/packaging/rpm/mock-on-docker.sh97
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/tests/.gitignore2
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/tests/Makefile25
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/tests/README.md8
-rwxr-xr-xfluent-bit/lib/librdkafka-2.1.0/packaging/rpm/tests/run-test.sh49
-rwxr-xr-xfluent-bit/lib/librdkafka-2.1.0/packaging/rpm/tests/test-on-docker.sh56
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/tests/test.c77
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/tests/test.cpp34
-rwxr-xr-xfluent-bit/lib/librdkafka-2.1.0/packaging/tools/build-deb-package.sh64
-rwxr-xr-xfluent-bit/lib/librdkafka-2.1.0/packaging/tools/build-debian.sh65
-rwxr-xr-xfluent-bit/lib/librdkafka-2.1.0/packaging/tools/build-manylinux.sh68
-rwxr-xr-xfluent-bit/lib/librdkafka-2.1.0/packaging/tools/build-release-artifacts.sh138
-rwxr-xr-xfluent-bit/lib/librdkafka-2.1.0/packaging/tools/distro-build.sh38
-rwxr-xr-xfluent-bit/lib/librdkafka-2.1.0/packaging/tools/gh-release-checksums.py39
-rwxr-xr-xfluent-bit/lib/librdkafka-2.1.0/packaging/tools/rdutcoverage.sh25
-rw-r--r--fluent-bit/lib/librdkafka-2.1.0/packaging/tools/requirements.txt2
-rwxr-xr-xfluent-bit/lib/librdkafka-2.1.0/packaging/tools/style-format.sh148
102 files changed, 4718 insertions, 0 deletions
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/RELEASE.md b/fluent-bit/lib/librdkafka-2.1.0/packaging/RELEASE.md
new file mode 100644
index 000000000..930636db4
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/RELEASE.md
@@ -0,0 +1,311 @@
+# librdkafka release process
+
+This guide outlines the steps needed to release a new version of librdkafka
+and publish packages to channels (NuGet, Homebrew, etc,..).
+
+Releases are done in two phases:
+ * release-candidate(s) - RC1 will be the first release candidate, and any
+ changes to the repository will require a new RC.
+ * final release - the final release is based directly on the last RC tag
+ followed by a single version-bump commit (see below).
+
+Release tag and version format:
+ * tagged release builds to verify CI release builders: vA.B.C-PREn
+ * release-candidate: vA.B.C-RCn
+ * final release: vA.B.C
+
+
+## Update protocol requests and error codes
+
+Check out the latest version of Apache Kafka (not trunk, needs to be a released
+version since protocol may change on trunk).
+
+### Protocol request types
+
+Generate protocol request type codes with:
+
+ $ src/generate_proto.sh ~/src/your-kafka-dir
+
+Cut'n'paste the new defines and strings to `rdkafka_protocol.h` and
+`rdkafka_proto.h`.
+
+### Error codes
+
+Error codes must currently be parsed manually, open
+`clients/src/main/java/org/apache/kafka/common/protocol/Errors.java`
+in the Kafka source directory and update the `rd_kafka_resp_err_t` and
+`RdKafka::ErrorCode` enums in `rdkafka.h` and `rdkafkacpp.h`
+respectively.
+Add the error strings to `rdkafka.c`.
+The Kafka error strings are sometimes a bit too verbose for our taste,
+so feel free to rewrite them (usually removing a couple of 'the's).
+Error strings must not contain a trailing period.
+
+**NOTE**: Only add **new** error codes, do not alter existing ones since that
+ will be a breaking API change.
+
+
+## Run regression tests
+
+**Build tests:**
+
+ $ cd tests
+ $ make -j build
+
+**Run the full regression test suite:** (requires Linux and the trivup python package)
+
+ $ make full
+
+
+If all tests pass, carry on, otherwise identify and fix bug and start over.
+
+
+
+## Write release notes / changelog
+
+All relevant PRs should also include an update to [CHANGELOG.md](../CHANGELOG.md)
+that in a user-centric fashion outlines what changed.
+It might not be practical for all contributors to write meaningful changelog
+entries, so it is okay to add them separately later after the PR has been
+merged (make sure to credit community contributors for their work).
+
+The changelog should include:
+ * What type of release (maintenance or feature release)
+ * A short intro to the release, describing the type of release: maintenance
+ or feature release, as well as fix or feature high-lights.
+ * A section of **New features**, if any.
+ * A section of **Upgrade considerations**, if any, to outline important changes
+ that require user attention.
+ * A section of **Enhancements**, if any.
+ * A section of **Fixes**, if any, preferably with Consumer, Producer, and
+ Generic sub-sections.
+
+
+## Pre-release code tasks
+
+**Switch to the release branch which is of the format `A.B.C.x` or `A.B.x`.**
+
+ $ git checkout -b 0.11.1.x
+
+
+**Update in-code versions.**
+
+The last octet in the version hex number is the pre-build/release-candidate
+number, where 0xAABBCCff is the final release for version 0xAABBCC.
+Release candidates start at 200, thus 0xAABBCCc9 is RC1, 0xAABBCCca is RC2, etc.
+
+Change the `RD_KAFKA_VERSION` defines in both `src/rdkafka.h` and
+`src-cpp/rdkafkacpp.h` to the version to build, such as 0x000b01c9
+for v0.11.1-RC1, or 0x000b01ff for the final v0.11.1 release.
+Update the librdkafka version in `vcpkg.json`.
+
+ # Update defines
+ $ $EDITOR src/rdkafka.h src-cpp/rdkafkacpp.h vcpkg.json
+
+ # Reconfigure and build
+ $ ./configure
+ $ make
+
+ # Check git diff for correctness
+ $ git diff
+
+ # Commit
+ $ git commit -m "Version v0.11.1-RC1" src/rdkafka.h src-cpp/rdkafkacpp.h
+
+
+**Create tag.**
+
+ $ git tag v0.11.1-RC1 # for an RC
+ # or for the final release:
+ $ git tag v0.11.1 # for the final release
+
+
+**Push branch and commit to github**
+
+ # Dry-run first to make sure things look correct
+ $ git push --dry-run origin 0.11.1.x
+
+ # Live
+ $ git push origin 0.11.1.x
+**Push tags and commit to github**
+
+ # Dry-run first to make sure things look correct.
+ $ git push --dry-run --tags origin v0.11.1-RC1
+
+ # Live
+ $ git push --tags origin v0.11.1-RC1
+
+
+## Creating packages
+
+As soon as a tag is pushed the CI system (SemaphoreCI) will start its
+build pipeline and eventually upload packaging artifacts to the SemaphoreCI
+project artifact store.
+
+Monitor the Semaphore CI project page to know when the build pipeline
+is finished, then download the relevant artifacts for further use, see
+*The artifact pipeline* chapter below.
+
+
+## Publish release on github
+
+Create a release on github by going to https://github.com/edenhill/librdkafka/releases
+and Draft a new release.
+Name the release the same as the final release tag (e.g., `v1.9.0`) and set
+the tag to the same.
+Paste the CHANGELOG.md section for this release into the release description,
+look at the preview and fix any formatting issues.
+
+Run the following command to get checksums of the github release assets:
+
+ $ packaging/tools/gh-release-checksums.py <the-tag>
+
+It will take some time for the script to download the files, when done
+paste the output to the end of the release page.
+
+Make sure the release page looks okay, is still correct (check for new commits),
+and has the correct tag, then click Publish release.
+
+
+
+### Homebrew recipe update
+
+**Note**: This is typically not needed since homebrew seems to pick up new
+ release versions quickly enough. Recommend you skip this step.
+
+The brew-update-pr.sh script automatically pushes a PR to homebrew-core
+with a patch to update the librdkafka version of the formula.
+This should only be done for final releases and not release candidates.
+
+On a MacOSX host with homebrew installed:
+
+ $ cd package/homebrew
+ # Dry-run first to see that things are okay.
+ $ ./brew-update-pr.sh v0.11.1
+ # If everything looks good, do the live push:
+ $ ./brew-update-pr.sh --upload v0.11.1
+
+
+### Deb and RPM packaging
+
+Debian and RPM packages are generated by Confluent packaging, called
+Independent client releases, which is a separate non-public process and the
+resulting packages are made available on Confluent's client deb and rpm
+repositories.
+
+That process is outside the scope of this document.
+
+See the Confluent docs for instructions how to access these packages:
+https://docs.confluent.io/current/installation.html
+
+
+
+
+## Build and release artifacts
+
+The following chapter explains what, how, and where artifacts are built.
+It also outlines where these artifacts are used.
+
+### So what is an artifact?
+
+An artifact is a build of the librdkafka library, dynamic/shared and/or static,
+with a certain set of external or built-in dependencies, for a specific
+architecture and operating system (and sometimes even operating system version).
+
+If you build librdkafka from source with no special `./configure` arguments
+you will end up with:
+
+ * a dynamically linked library (e.g., `librdkafka.so.1`)
+ with a set of dynamically linked external dependencies (OpenSSL, zlib, etc),
+ all depending on what dependencies are available on the build host.
+
+ * a static library (`librdkafka.a`) that will have external dependencies
+ that needs to be linked dynamically. There is no way for a static library
+ to express link dependencies, so there will also be `rdkafka-static.pc`
+ pkg-config file generated that contains linker flags for the external
+ dependencies.
+ Those external dependencies are however most likely only available on the
+ build host, so this static library is not particularily useful for
+ repackaging purposes (such as for high-level clients using librdkafka).
+
+ * a self-contained static-library (`librdkafka-static.a`) which attempts
+ to contain static versions of all external dependencies, effectively making
+ it possible to link just with `librdkafka-static.a` to get all
+ dependencies needed.
+ Since the state of static libraries in the various distro and OS packaging
+ systems is of varying quality and availability, it is usually not possible
+ for the librdkafka build system (mklove) to generate this completely
+ self-contained static library simply using dependencies available on the
+ build system, and the make phase of the build will emit warnings when it
+ can't bundle all external dependencies due to this.
+ To circumvent this problem it is possible for the build system (mklove)
+ to download and build static libraries of all needed external dependencies,
+ which in turn allows it to create a complete bundle of all dependencies.
+ This results in a `librdkafka-static.a` that has no external dependecies
+ other than the system libraries (libc, pthreads, rt, etc).
+ To achieve this you will need to pass
+ `--install-deps --source-deps-only --enable-static` to
+ librdkafka's `./configure`.
+
+ * `rdkafka.pc` and `rdkafka-static.pc` pkg-config files that tells
+ applications and libraries that depend on librdkafka what external
+ dependencies are needed to successfully link with librdkafka.
+ This is mainly useful for the dynamic librdkafka librdkafka
+ (`librdkafka.so.1` or `librdkafka.1.dylib` on OSX).
+
+
+**NOTE**: Due to libsasl2/cyrus-sasl's dynamically loaded plugins, it is
+not possible for us to provide a self-contained static library with
+GSSAPI/Kerberos support.
+
+
+
+### The artifact pipeline
+
+We rely solely on CI systems to build our artifacts; no artifacts must be built
+on a non-CI system (e.g., someones work laptop, some random ec2 instance, etc).
+
+The reasons for this are:
+
+ 1. Reproducible builds: we want a well-defined environment that doesn't change
+ (too much) without notice and that we can rebuild artifacts on at a later
+ time if required.
+ 2. Security; these CI systems provide at least some degree of security
+ guarantees, and they're managed by people who knows what they're doing
+ most of the time. This minimizes the risk for an artifact to be silently
+ compromised due to the developer's laptop being hacked.
+ 3. Logs; we have build logs for all artifacts, which contains checksums.
+ This way we can know how an artifact was built, what features were enabled
+ and what versions of dependencies were used, as well as know that an
+ artifact has not been tampered with after leaving the CI system.
+
+
+By default the CI jobs are triggered by branch pushes and pull requests
+and contain a set of jobs to validate that the changes that were pushed does
+not break compilation or functionality (by running parts of the test suite).
+These jobs do not produce any artifacts.
+
+
+For the artifact pipeline there's tag builds, which are triggered by pushing a
+tag to the git repository.
+These tag builds will generate artifacts which are used by the same pipeline
+to create NuGet and static library packages, which are then uploaded to
+SemaphoreCI's project artifact store.
+
+Once a tag build pipeline is done, you can download the relevant packages
+from the Semaphore CI project artifact store.
+
+The NuGet package, `librdkafka.redist.<version>.nupkg`, needs to be
+manually uploaded to NuGet.
+
+The `librdkafka-static-bundle-<version>.tgz` static library bundle
+needs to be manually imported into the confluent-kafka-go client using the
+import script that resides in the Go client repository.
+
+
+**Note**: You will need a NuGet API key to upload nuget packages.
+
+
+See [nuget/nugetpackaging.py] and [nuget/staticpackaging.py] to see how
+packages are assembled from build artifacts.
+
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/alpine/build-alpine.sh b/fluent-bit/lib/librdkafka-2.1.0/packaging/alpine/build-alpine.sh
new file mode 100755
index 000000000..e6d2471c9
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/alpine/build-alpine.sh
@@ -0,0 +1,38 @@
+#!/bin/sh
+#
+# Build librdkafka on Alpine.
+#
+
+set -x
+
+if [ "$1" = "--in-docker" ]; then
+ # Runs in docker, performs the actual build.
+ shift
+
+ apk add bash curl gcc g++ make musl-dev linux-headers bsd-compat-headers git python3 perl patch
+
+ git clone /v /librdkafka
+
+ cd /librdkafka
+ ./configure --install-deps --disable-gssapi --disable-lz4-ext --enable-static $*
+ make -j
+ examples/rdkafka_example -X builtin.features
+ CI=true make -C tests run_local_quick
+
+ # Create a tarball in artifacts/
+ cd src
+ ldd librdkafka.so.1
+ tar cvzf /v/artifacts/alpine-librdkafka.tgz librdkafka.so.1 librdkafka*.a rdkafka-static.pc
+ cd ../..
+
+else
+ # Runs on the host, simply spins up the in-docker build.
+ if [ ! -f configure.self ]; then
+ echo "Must be run from the top-level librdkafka dir"
+ exit 1
+ fi
+
+ mkdir -p artifacts
+
+ exec docker run -v $PWD:/v alpine:3.12 /v/packaging/alpine/$(basename $0) --in-docker $*
+fi
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/archlinux/PKGBUILD b/fluent-bit/lib/librdkafka-2.1.0/packaging/archlinux/PKGBUILD
new file mode 100644
index 000000000..7063d5cef
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/archlinux/PKGBUILD
@@ -0,0 +1,30 @@
+pkgname=librdkafka
+pkgver=1.0.0.RC5.r11.g3cf68480
+pkgrel=1
+pkgdesc='The Apache Kafka C/C++ client library'
+url='https://github.com/edenhill/librdkafka'
+license=('BSD')
+arch=('x86_64')
+source=('git+https://github.com/edenhill/librdkafka#branch=master')
+sha256sums=('SKIP')
+depends=(glibc libsasl lz4 openssl zlib zstd)
+makedepends=(bash git python3)
+
+pkgver() {
+ cd "$pkgname"
+ git describe --long --tags --match "v[0-9]*" | sed 's/^v//;s/\([^-]*-g\)/r\1/;s/-/./g'
+}
+
+build() {
+ cd "$pkgname"
+ ./configure --prefix=/usr
+ make
+}
+
+package() {
+ cd "$pkgname"
+ make install DESTDIR="$pkgdir"
+ for f in $(find -type f -name 'LICENSE*'); do
+ install -D -m0644 "$f" "$pkgdir/usr/share/licenses/$pkgname/$f"
+ done
+}
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/Config.cmake.in b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/Config.cmake.in
new file mode 100644
index 000000000..8a6522b06
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/Config.cmake.in
@@ -0,0 +1,37 @@
+@PACKAGE_INIT@
+
+include(CMakeFindDependencyMacro)
+
+if(@WITH_ZLIB@)
+ find_dependency(ZLIB)
+endif()
+
+if(@WITH_CURL@)
+ find_dependency(CURL)
+endif()
+
+if(@WITH_ZSTD@)
+ find_library(ZSTD zstd)
+ if(NOT ZSTD)
+ message(ERROR "ZSTD library not found!")
+ else()
+ message(STATUS "Found ZSTD: " ${ZSTD})
+ endif()
+endif()
+
+if(@WITH_SSL@)
+ if(@WITH_BUNDLED_SSL@)
+ # TODO: custom SSL library should be installed
+ else()
+ find_dependency(OpenSSL)
+ endif()
+endif()
+
+if(@WITH_LZ4_EXT@)
+ find_dependency(LZ4)
+endif()
+
+find_dependency(Threads)
+
+include("${CMAKE_CURRENT_LIST_DIR}/@targets_export_name@.cmake")
+check_required_components("@PROJECT_NAME@")
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/Modules/FindLZ4.cmake b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/Modules/FindLZ4.cmake
new file mode 100644
index 000000000..594c4290c
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/Modules/FindLZ4.cmake
@@ -0,0 +1,38 @@
+find_path(LZ4_INCLUDE_DIR
+ NAMES lz4.h
+ DOC "lz4 include directory")
+mark_as_advanced(LZ4_INCLUDE_DIR)
+find_library(LZ4_LIBRARY
+ NAMES lz4
+ DOC "lz4 library")
+mark_as_advanced(LZ4_LIBRARY)
+
+if (LZ4_INCLUDE_DIR)
+ file(STRINGS "${LZ4_INCLUDE_DIR}/lz4.h" _lz4_version_lines
+ REGEX "#define[ \t]+LZ4_VERSION_(MAJOR|MINOR|RELEASE)")
+ string(REGEX REPLACE ".*LZ4_VERSION_MAJOR *\([0-9]*\).*" "\\1" _lz4_version_major "${_lz4_version_lines}")
+ string(REGEX REPLACE ".*LZ4_VERSION_MINOR *\([0-9]*\).*" "\\1" _lz4_version_minor "${_lz4_version_lines}")
+ string(REGEX REPLACE ".*LZ4_VERSION_RELEASE *\([0-9]*\).*" "\\1" _lz4_version_release "${_lz4_version_lines}")
+ set(LZ4_VERSION "${_lz4_version_major}.${_lz4_version_minor}.${_lz4_version_release}")
+ unset(_lz4_version_major)
+ unset(_lz4_version_minor)
+ unset(_lz4_version_release)
+ unset(_lz4_version_lines)
+endif ()
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(LZ4
+ REQUIRED_VARS LZ4_LIBRARY LZ4_INCLUDE_DIR
+ VERSION_VAR LZ4_VERSION)
+
+if (LZ4_FOUND)
+ set(LZ4_INCLUDE_DIRS "${LZ4_INCLUDE_DIR}")
+ set(LZ4_LIBRARIES "${LZ4_LIBRARY}")
+
+ if (NOT TARGET LZ4::LZ4)
+ add_library(LZ4::LZ4 UNKNOWN IMPORTED)
+ set_target_properties(LZ4::LZ4 PROPERTIES
+ IMPORTED_LOCATION "${LZ4_LIBRARY}"
+ INTERFACE_INCLUDE_DIRECTORIES "${LZ4_INCLUDE_DIR}")
+ endif ()
+endif ()
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/Modules/FindZSTD.cmake b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/Modules/FindZSTD.cmake
new file mode 100644
index 000000000..7de137e0f
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/Modules/FindZSTD.cmake
@@ -0,0 +1,27 @@
+#
+# - Try to find Facebook zstd library
+# This will define
+# ZSTD_FOUND
+# ZSTD_INCLUDE_DIR
+# ZSTD_LIBRARY
+#
+
+find_path(ZSTD_INCLUDE_DIR NAMES zstd.h)
+
+find_library(ZSTD_LIBRARY_DEBUG NAMES zstdd zstd_staticd)
+find_library(ZSTD_LIBRARY_RELEASE NAMES zstd zstd_static)
+
+include(SelectLibraryConfigurations)
+SELECT_LIBRARY_CONFIGURATIONS(ZSTD)
+
+include(FindPackageHandleStandardArgs)
+FIND_PACKAGE_HANDLE_STANDARD_ARGS(
+ ZSTD DEFAULT_MSG
+ ZSTD_LIBRARY ZSTD_INCLUDE_DIR
+)
+
+if (ZSTD_FOUND)
+ message(STATUS "Found Zstd: ${ZSTD_LIBRARY}")
+endif()
+
+mark_as_advanced(ZSTD_INCLUDE_DIR ZSTD_LIBRARY)
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/Modules/LICENSE.FindZstd b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/Modules/LICENSE.FindZstd
new file mode 100644
index 000000000..9561f469b
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/Modules/LICENSE.FindZstd
@@ -0,0 +1,178 @@
+FindZstd.cmake: git@github.com:facebook/folly.git 87f1a403b49552dae75ae94c8610dd5979913477
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/README.md b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/README.md
new file mode 100644
index 000000000..47ad2cb63
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/README.md
@@ -0,0 +1,38 @@
+# Build librdkafka with cmake
+
+The cmake build mode is experimental and not officially supported,
+the community is asked to maintain and support this mode through PRs.
+
+Set up build environment (from top-level librdkafka directory):
+
+ $ cmake -H. -B_cmake_build
+
+On MacOSX and OpenSSL from Homebrew you might need to do:
+
+ $ cmake -H. -B_cmake_build -DOPENSSL_ROOT_DIR=/usr/local/opt/openssl
+
+
+Build the library:
+
+ $ cmake --build _cmake_build
+
+If you want to build static library:
+
+ $ cmake --build _cmake_build -DRDKAFKA_BUILD_STATIC=1
+
+
+Run (local) tests:
+
+ $ (cd _cmake_build && ctest -VV -R RdKafkaTestBrokerLess)
+
+
+Install library:
+
+ $ cmake --build _cmake_build --target install
+
+
+If you use librdkafka as submodule in cmake project and want static link of librdkafka:
+
+ set(RDKAFKA_BUILD_STATIC ON CACHE BOOL "")
+ add_subdirectory(librdkafka)
+ target_link_libraries(your_library_or_executable rdkafka)
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/config.h.in b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/config.h.in
new file mode 100644
index 000000000..9e356c5f9
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/config.h.in
@@ -0,0 +1,52 @@
+#cmakedefine01 WITHOUT_OPTIMIZATION
+#cmakedefine01 ENABLE_DEVEL
+#cmakedefine01 ENABLE_REFCNT_DEBUG
+
+#cmakedefine01 HAVE_ATOMICS_32
+#cmakedefine01 HAVE_ATOMICS_32_SYNC
+
+#if (HAVE_ATOMICS_32)
+# if (HAVE_ATOMICS_32_SYNC)
+# define ATOMIC_OP32(OP1,OP2,PTR,VAL) __sync_ ## OP1 ## _and_ ## OP2(PTR, VAL)
+# else
+# define ATOMIC_OP32(OP1,OP2,PTR,VAL) __atomic_ ## OP1 ## _ ## OP2(PTR, VAL, __ATOMIC_SEQ_CST)
+# endif
+#endif
+
+#cmakedefine01 HAVE_ATOMICS_64
+#cmakedefine01 HAVE_ATOMICS_64_SYNC
+
+#if (HAVE_ATOMICS_64)
+# if (HAVE_ATOMICS_64_SYNC)
+# define ATOMIC_OP64(OP1,OP2,PTR,VAL) __sync_ ## OP1 ## _and_ ## OP2(PTR, VAL)
+# else
+# define ATOMIC_OP64(OP1,OP2,PTR,VAL) __atomic_ ## OP1 ## _ ## OP2(PTR, VAL, __ATOMIC_SEQ_CST)
+# endif
+#endif
+
+#cmakedefine01 WITH_PKGCONFIG
+#cmakedefine01 WITH_HDRHISTOGRAM
+#cmakedefine01 WITH_ZLIB
+#cmakedefine01 WITH_CURL
+#cmakedefine01 WITH_OAUTHBEARER_OIDC
+#cmakedefine01 WITH_ZSTD
+#cmakedefine01 WITH_LIBDL
+#cmakedefine01 WITH_PLUGINS
+#define WITH_SNAPPY 1
+#define WITH_SOCKEM 1
+#cmakedefine01 WITH_SSL
+#cmakedefine01 WITH_SASL
+#cmakedefine01 WITH_SASL_SCRAM
+#cmakedefine01 WITH_SASL_OAUTHBEARER
+#cmakedefine01 WITH_SASL_CYRUS
+#cmakedefine01 WITH_LZ4_EXT
+#cmakedefine01 HAVE_REGEX
+#cmakedefine01 HAVE_STRNDUP
+#cmakedefine01 HAVE_RAND_R
+#cmakedefine01 HAVE_PTHREAD_SETNAME_GNU
+#cmakedefine01 HAVE_PTHREAD_SETNAME_DARWIN
+#cmakedefine01 HAVE_PTHREAD_SETNAME_FREEBSD
+#cmakedefine01 WITH_C11THREADS
+#cmakedefine01 WITH_CRC32C_HW
+#define SOLIB_EXT "${CMAKE_SHARED_LIBRARY_SUFFIX}"
+#define BUILT_WITH "${BUILT_WITH}"
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/parseversion.cmake b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/parseversion.cmake
new file mode 100644
index 000000000..592e8df54
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/parseversion.cmake
@@ -0,0 +1,60 @@
+# hex2dec(<out-var> <input>):
+# Convert a hexadecimal value <input> to decimal and write the result
+# to <out-var>.
+macro(hex2dec var val)
+ set(${var} 0)
+
+ set(hex2dec_idx 0)
+ string(LENGTH "${val}" hex2dec_len)
+
+ while(hex2dec_idx LESS hex2dec_len)
+ string(SUBSTRING ${val} ${hex2dec_idx} 1 hex2dec_char)
+
+ if(hex2dec_char MATCHES "[0-9]")
+ set(hex2dec_char ${hex2dec_char})
+ elseif(hex2dec_char MATCHES "[aA]")
+ set(hex2dec_char 10)
+ elseif(hex2dec_char MATCHES "[bB]")
+ set(hex2dec_char 11)
+ elseif(hex2dec_char MATCHES "[cC]")
+ set(hex2dec_char 12)
+ elseif(hex2dec_char MATCHES "[dD]")
+ set(hex2dec_char 13)
+ elseif(hex2dec_char MATCHES "[eE]")
+ set(hex2dec_char 14)
+ elseif(hex2dec_char MATCHES "[fF]")
+ set(hex2dec_char 15)
+ else()
+ message(FATAL_ERROR "Invalid format for hexidecimal character: " ${hex2dec_char})
+ endif()
+
+ math(EXPR hex2dec_char "${hex2dec_char} << ((${hex2dec_len}-${hex2dec_idx}-1)*4)")
+ math(EXPR ${var} "${${var}}+${hex2dec_char}")
+ math(EXPR hex2dec_idx "${hex2dec_idx}+1")
+ endwhile()
+endmacro(hex2dec)
+
+# parseversion(<filepath>):
+# Parse the file given by <filepath> for the RD_KAFKA_VERSION constant
+# and convert the hex value to decimal version numbers.
+# Creates the following CMake variables:
+# * RDKAFKA_VERSION
+# * RDKAFKA_VERSION_MAJOR
+# * RDKAFKA_VERSION_MINOR
+# * RDKAFKA_VERSION_REVISION
+# * RDKAFKA_VERSION_PRERELEASE
+macro(parseversion path)
+ file(STRINGS ${path} rdkafka_version_def REGEX "#define *RD_KAFKA_VERSION *\(0x[a-f0-9]*\)\.*")
+ string(REGEX REPLACE "#define *RD_KAFKA_VERSION *0x" "" rdkafka_version_hex ${rdkafka_version_def})
+
+ string(SUBSTRING ${rdkafka_version_hex} 0 2 rdkafka_version_major_hex)
+ string(SUBSTRING ${rdkafka_version_hex} 2 2 rdkafka_version_minor_hex)
+ string(SUBSTRING ${rdkafka_version_hex} 4 2 rdkafka_version_revision_hex)
+ string(SUBSTRING ${rdkafka_version_hex} 6 2 rdkafka_version_prerelease_hex)
+
+ hex2dec(RDKAFKA_VERSION_MAJOR ${rdkafka_version_major_hex})
+ hex2dec(RDKAFKA_VERSION_MINOR ${rdkafka_version_minor_hex})
+ hex2dec(RDKAFKA_VERSION_REVISION ${rdkafka_version_revision_hex})
+ hex2dec(RDKAFKA_VERSION_PRERELEASE ${rdkafka_version_prerelease_hex})
+ set(RDKAFKA_VERSION "${RDKAFKA_VERSION_MAJOR}.${RDKAFKA_VERSION_MINOR}.${RDKAFKA_VERSION_REVISION}")
+endmacro(parseversion)
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/rdkafka.pc.in b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/rdkafka.pc.in
new file mode 100644
index 000000000..0eb17e856
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/rdkafka.pc.in
@@ -0,0 +1,12 @@
+prefix=@CMAKE_INSTALL_PREFIX@
+exec_prefix=${prefix}
+includedir=${prefix}/include
+libdir=${prefix}/lib
+
+Name: @PKG_CONFIG_NAME@
+Description: @PKG_CONFIG_DESCRIPTION@
+Version: @PKG_CONFIG_VERSION@
+Requires: @PKG_CONFIG_REQUIRES@
+Cflags: @PKG_CONFIG_CFLAGS@
+Libs: @PKG_CONFIG_LIBS@
+Libs.private: @PKG_CONFIG_LIBS_PRIVATE@
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/atomic_32_test.c b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/atomic_32_test.c
new file mode 100644
index 000000000..b3373bb8b
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/atomic_32_test.c
@@ -0,0 +1,8 @@
+#include <inttypes.h>
+
+int32_t foo(int32_t i) {
+ return __atomic_add_fetch(&i, 1, __ATOMIC_SEQ_CST);
+}
+
+int main() {
+}
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/atomic_64_test.c b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/atomic_64_test.c
new file mode 100644
index 000000000..31922b85c
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/atomic_64_test.c
@@ -0,0 +1,8 @@
+#include <inttypes.h>
+
+int64_t foo(int64_t i) {
+ return __atomic_add_fetch(&i, 1, __ATOMIC_SEQ_CST);
+}
+
+int main() {
+}
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/c11threads_test.c b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/c11threads_test.c
new file mode 100644
index 000000000..31681ae61
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/c11threads_test.c
@@ -0,0 +1,14 @@
+#include <threads.h>
+
+static int start_func(void *arg) {
+ int iarg = *(int *)arg;
+ return iarg;
+}
+
+void main(void) {
+ thrd_t thr;
+ int arg = 1;
+ if (thrd_create(&thr, start_func, (void *)&arg) != thrd_success) {
+ ;
+ }
+}
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/crc32c_hw_test.c b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/crc32c_hw_test.c
new file mode 100644
index 000000000..e80097803
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/crc32c_hw_test.c
@@ -0,0 +1,27 @@
+#include <inttypes.h>
+#include <stdio.h>
+#define LONGx1 "8192"
+#define LONGx2 "16384"
+void main(void) {
+ const char *n = "abcdefghijklmnopqrstuvwxyz0123456789";
+ uint64_t c0 = 0, c1 = 1, c2 = 2;
+ uint64_t s;
+ uint32_t eax = 1, ecx;
+ __asm__("cpuid" : "=c"(ecx) : "a"(eax) : "%ebx", "%edx");
+ __asm__(
+ "crc32b\t"
+ "(%1), %0"
+ : "=r"(c0)
+ : "r"(n), "0"(c0));
+ __asm__(
+ "crc32q\t"
+ "(%3), %0\n\t"
+ "crc32q\t" LONGx1
+ "(%3), %1\n\t"
+ "crc32q\t" LONGx2 "(%3), %2"
+ : "=r"(c0), "=r"(c1), "=r"(c2)
+ : "r"(n), "0"(c0), "1"(c1), "2"(c2));
+ s = c0 + c1 + c2;
+ printf("avoiding unused code removal by printing %d, %d, %d\n", (int)s,
+ (int)eax, (int)ecx);
+}
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/dlopen_test.c b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/dlopen_test.c
new file mode 100644
index 000000000..ecb478994
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/dlopen_test.c
@@ -0,0 +1,11 @@
+#include <string.h>
+#include <dlfcn.h>
+
+int main() {
+ void *h;
+ /* Try loading anything, we don't care if it works */
+ h = dlopen("__nothing_rdkafka.so", RTLD_NOW | RTLD_LOCAL);
+ if (h)
+ dlclose(h);
+ return 0;
+}
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/libsasl2_test.c b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/libsasl2_test.c
new file mode 100644
index 000000000..3f3ab3409
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/libsasl2_test.c
@@ -0,0 +1,7 @@
+#include <string.h>
+#include <sasl/sasl.h>
+
+int main() {
+ sasl_done();
+ return 0;
+}
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/pthread_setname_darwin_test.c b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/pthread_setname_darwin_test.c
new file mode 100644
index 000000000..73e31e069
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/pthread_setname_darwin_test.c
@@ -0,0 +1,6 @@
+#include <pthread.h>
+
+int main() {
+ pthread_setname_np("abc");
+ return 0;
+}
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/pthread_setname_freebsd_test.c b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/pthread_setname_freebsd_test.c
new file mode 100644
index 000000000..329ace08e
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/pthread_setname_freebsd_test.c
@@ -0,0 +1,7 @@
+#include <pthread.h>
+#include <pthread_np.h>
+
+int main() {
+ pthread_set_name_np(pthread_self(), "abc");
+ return 0;
+}
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/pthread_setname_gnu_test.c b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/pthread_setname_gnu_test.c
new file mode 100644
index 000000000..3be1b21bc
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/pthread_setname_gnu_test.c
@@ -0,0 +1,5 @@
+#include <pthread.h>
+
+int main() {
+ return pthread_setname_np(pthread_self(), "abc");
+}
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/rand_r_test.c b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/rand_r_test.c
new file mode 100644
index 000000000..be722d0a0
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/rand_r_test.c
@@ -0,0 +1,7 @@
+#include <stdlib.h>
+
+int main() {
+ unsigned int seed = 0xbeaf;
+ (void)rand_r(&seed);
+ return 0;
+}
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/rdkafka_setup.cmake b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/rdkafka_setup.cmake
new file mode 100644
index 000000000..5ea7f7dc6
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/rdkafka_setup.cmake
@@ -0,0 +1,122 @@
+try_compile(
+ HAVE_REGEX
+ "${CMAKE_CURRENT_BINARY_DIR}/try_compile"
+ "${TRYCOMPILE_SRC_DIR}/regex_test.c"
+)
+
+try_compile(
+ HAVE_STRNDUP
+ "${CMAKE_CURRENT_BINARY_DIR}/try_compile"
+ "${TRYCOMPILE_SRC_DIR}/strndup_test.c"
+)
+
+try_compile(
+ HAVE_RAND_R
+ "${CMAKE_CURRENT_BINARY_DIR}/try_compile"
+ "${TRYCOMPILE_SRC_DIR}/rand_r_test.c"
+)
+
+try_compile(
+ HAVE_PTHREAD_SETNAME_GNU
+ "${CMAKE_CURRENT_BINARY_DIR}/try_compile"
+ "${TRYCOMPILE_SRC_DIR}/pthread_setname_gnu_test.c"
+ COMPILE_DEFINITIONS "-D_GNU_SOURCE"
+ LINK_LIBRARIES "-lpthread"
+)
+
+try_compile(
+ HAVE_PTHREAD_SETNAME_DARWIN
+ "${CMAKE_CURRENT_BINARY_DIR}/try_compile"
+ "${TRYCOMPILE_SRC_DIR}/pthread_setname_darwin_test.c"
+ COMPILE_DEFINITIONS "-D_DARWIN_C_SOURCE"
+ LINK_LIBRARIES "-lpthread"
+)
+
+try_compile(
+ HAVE_PTHREAD_SETNAME_FREEBSD
+ "${CMAKE_CURRENT_BINARY_DIR}/try_compile"
+ "${TRYCOMPILE_SRC_DIR}/pthread_setname_freebsd_test.c"
+ LINK_LIBRARIES "-lpthread"
+)
+
+# Atomic 32 tests {
+set(LINK_ATOMIC NO)
+set(HAVE_ATOMICS_32 NO)
+set(HAVE_ATOMICS_32_SYNC NO)
+
+try_compile(
+ _atomics_32
+ "${CMAKE_CURRENT_BINARY_DIR}/try_compile"
+ "${TRYCOMPILE_SRC_DIR}/atomic_32_test.c"
+)
+
+if(_atomics_32)
+ set(HAVE_ATOMICS_32 YES)
+else()
+ try_compile(
+ _atomics_32_lib
+ "${CMAKE_CURRENT_BINARY_DIR}/try_compile"
+ "${TRYCOMPILE_SRC_DIR}/atomic_32_test.c"
+ LINK_LIBRARIES "-latomic"
+ )
+ if(_atomics_32_lib)
+ set(HAVE_ATOMICS_32 YES)
+ set(LINK_ATOMIC YES)
+ else()
+ try_compile(
+ HAVE_ATOMICS_32_SYNC
+ "${CMAKE_CURRENT_BINARY_DIR}/try_compile"
+ "${TRYCOMPILE_SRC_DIR}/sync_32_test.c"
+ )
+ endif()
+endif()
+# }
+
+# Atomic 64 tests {
+set(HAVE_ATOMICS_64 NO)
+set(HAVE_ATOMICS_64_SYNC NO)
+
+try_compile(
+ _atomics_64
+ "${CMAKE_CURRENT_BINARY_DIR}/try_compile"
+ "${TRYCOMPILE_SRC_DIR}/atomic_64_test.c"
+)
+
+if(_atomics_64)
+ set(HAVE_ATOMICS_64 YES)
+else()
+ try_compile(
+ _atomics_64_lib
+ "${CMAKE_CURRENT_BINARY_DIR}/try_compile"
+ "${TRYCOMPILE_SRC_DIR}/atomic_64_test.c"
+ LINK_LIBRARIES "-latomic"
+ )
+ if(_atomics_64_lib)
+ set(HAVE_ATOMICS_64 YES)
+ set(LINK_ATOMIC YES)
+ else()
+ try_compile(
+ HAVE_ATOMICS_64_SYNC
+ "${CMAKE_CURRENT_BINARY_DIR}/try_compile"
+ "${TRYCOMPILE_SRC_DIR}/sync_64_test.c"
+ )
+ endif()
+endif()
+# }
+
+# C11 threads
+try_compile(
+ WITH_C11THREADS
+ "${CMAKE_CURRENT_BINARY_DIR}/try_compile"
+ "${TRYCOMPILE_SRC_DIR}/c11threads_test.c"
+ LINK_LIBRARIES "-pthread"
+)
+# }
+
+# CRC32C {
+try_compile(
+ WITH_CRC32C_HW
+ "${CMAKE_CURRENT_BINARY_DIR}/try_compile"
+ "${TRYCOMPILE_SRC_DIR}/crc32c_hw_test.c"
+)
+# }
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/regex_test.c b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/regex_test.c
new file mode 100644
index 000000000..329098d20
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/regex_test.c
@@ -0,0 +1,10 @@
+#include <stddef.h>
+#include <regex.h>
+
+int main() {
+ regcomp(NULL, NULL, 0);
+ regexec(NULL, NULL, 0, NULL, 0);
+ regerror(0, NULL, NULL, 0);
+ regfree(NULL);
+ return 0;
+}
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/strndup_test.c b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/strndup_test.c
new file mode 100644
index 000000000..a10b74526
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/strndup_test.c
@@ -0,0 +1,5 @@
+#include <string.h>
+
+int main() {
+ return strndup("hi", 2) ? 0 : 1;
+}
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/sync_32_test.c b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/sync_32_test.c
new file mode 100644
index 000000000..2bc80ab4c
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/sync_32_test.c
@@ -0,0 +1,8 @@
+#include <inttypes.h>
+
+int32_t foo(int32_t i) {
+ return __sync_add_and_fetch(&i, 1);
+}
+
+int main() {
+}
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/sync_64_test.c b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/sync_64_test.c
new file mode 100644
index 000000000..4b6ad6d38
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/cmake/try_compile/sync_64_test.c
@@ -0,0 +1,8 @@
+#include <inttypes.h>
+
+int64_t foo(int64_t i) {
+ return __sync_add_and_fetch(&i, 1);
+}
+
+int main() {
+}
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/cp/README.md b/fluent-bit/lib/librdkafka-2.1.0/packaging/cp/README.md
new file mode 100644
index 000000000..24a82f142
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/cp/README.md
@@ -0,0 +1,14 @@
+# Confluent Platform package verification
+
+This small set of scripts verifies the librdkafka packages that
+are part of the Confluent Platform.
+
+The base_url is the http S3 bucket path to the a PR job, or similar.
+
+## How to use
+
+ $ ./verify-packages.sh 5.3 https://thes3bucketpath/X/Y
+
+
+Requires docker and patience.
+
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/cp/check_features.c b/fluent-bit/lib/librdkafka-2.1.0/packaging/cp/check_features.c
new file mode 100644
index 000000000..4229402fd
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/cp/check_features.c
@@ -0,0 +1,64 @@
+#include <stdio.h>
+#include <string.h>
+#include <librdkafka/rdkafka.h>
+
+int main(int argc, char **argv) {
+ rd_kafka_conf_t *conf;
+ char buf[512];
+ size_t sz = sizeof(buf);
+ rd_kafka_conf_res_t res;
+ static const char *expected_features = "ssl,sasl_gssapi,lz4,zstd";
+ char errstr[512];
+ int i;
+ int failures = 0;
+
+ printf("librdkafka %s (0x%x, define: 0x%x)\n", rd_kafka_version_str(),
+ rd_kafka_version(), RD_KAFKA_VERSION);
+
+ if (argc > 1 && !(argc & 1)) {
+ printf("Usage: %s [config.property config-value ..]\n",
+ argv[0]);
+ return 1;
+ }
+
+ conf = rd_kafka_conf_new();
+ res = rd_kafka_conf_get(conf, "builtin.features", buf, &sz);
+
+ if (res != RD_KAFKA_CONF_OK) {
+ printf("ERROR: conf_get failed: %d\n", res);
+ return 1;
+ }
+
+ printf("builtin.features: %s\n", buf);
+
+ /* librdkafka allows checking for expected features
+ * by setting the corresponding feature flags in builtin.features,
+ * which will return an error if one or more flags are not enabled. */
+ if (rd_kafka_conf_set(conf, "builtin.features", expected_features,
+ errstr, sizeof(errstr)) != RD_KAFKA_CONF_OK) {
+ printf(
+ "ERROR: expected at least features: %s\n"
+ "got error: %s\n",
+ expected_features, errstr);
+ failures++;
+ }
+
+ printf("all expected features matched: %s\n", expected_features);
+
+ /* Apply config from argv key value pairs */
+ for (i = 1; i + 1 < argc; i += 2) {
+ printf("verifying config %s=%s\n", argv[i], argv[i + 1]);
+ if (rd_kafka_conf_set(conf, argv[i], argv[i + 1], errstr,
+ sizeof(errstr)) != RD_KAFKA_CONF_OK) {
+ printf("ERROR: failed to set %s=%s: %s\n", argv[i],
+ argv[i + 1], errstr);
+ failures++;
+ }
+ }
+
+ rd_kafka_conf_destroy(conf);
+
+ printf("%d failures\n", failures);
+
+ return !!failures;
+}
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/cp/verify-deb.sh b/fluent-bit/lib/librdkafka-2.1.0/packaging/cp/verify-deb.sh
new file mode 100755
index 000000000..1350d0655
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/cp/verify-deb.sh
@@ -0,0 +1,34 @@
+#!/bin/bash
+#
+
+set -e
+
+cpver=$1
+base_url=$2
+
+if [[ -z $base_url ]]; then
+ echo "Usage: $0 <cp-base-ver> <base_url>"
+ exit 1
+fi
+
+apt-get update
+apt-get install -y apt-transport-https wget
+
+wget -qO - ${base_url}/deb/${cpver}/archive.key | apt-key add -
+
+
+cat >/etc/apt/sources.list.d/Confluent.list <<EOF
+deb [arch=amd64] $base_url/deb/${cpver} stable main
+EOF
+
+apt-get update
+apt-get install -y librdkafka-dev gcc
+
+gcc /v/check_features.c -o /tmp/check_features -lrdkafka
+
+/tmp/check_features
+
+# Verify plugins
+apt-get install -y confluent-librdkafka-plugins
+
+/tmp/check_features plugin.library.paths monitoring-interceptor
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/cp/verify-packages.sh b/fluent-bit/lib/librdkafka-2.1.0/packaging/cp/verify-packages.sh
new file mode 100755
index 000000000..ecddbd558
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/cp/verify-packages.sh
@@ -0,0 +1,43 @@
+#!/bin/bash
+#
+# Verifies RPM and DEB packages from Confluent Platform
+#
+
+cpver=$1
+base_url=$2
+
+if [[ -z $base_url ]]; then
+ echo "Usage: $0 <CP-M.m-version> <base-url>"
+ echo ""
+ echo " <CP-M.m-version> is the Major.minor version of CP, e.g., 5.3"
+ echo " <base-url> is the release base bucket URL"
+ exit 1
+fi
+
+thisdir="$( cd "$(dirname "$0")" ; pwd -P )"
+
+echo "#### Verifying RPM packages ####"
+docker run -v $thisdir:/v centos:7 /v/verify-rpm.sh $cpver $base_url
+rpm_status=$?
+
+echo "#### Verifying Debian packages ####"
+docker run -v $thisdir:/v ubuntu:16.04 /v/verify-deb.sh $cpver $base_url
+deb_status=$?
+
+
+if [[ $rpm_status == 0 ]]; then
+ echo "SUCCESS: RPM packages verified"
+else
+ echo "ERROR: RPM package verification failed"
+fi
+
+if [[ $deb_status == 0 ]]; then
+ echo "SUCCESS: Debian packages verified"
+else
+ echo "ERROR: Debian package verification failed"
+fi
+
+if [[ $deb_status != 0 || $rpm_status != 0 ]]; then
+ exit 1
+fi
+
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/cp/verify-rpm.sh b/fluent-bit/lib/librdkafka-2.1.0/packaging/cp/verify-rpm.sh
new file mode 100755
index 000000000..d7b3b1a14
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/cp/verify-rpm.sh
@@ -0,0 +1,38 @@
+#!/bin/bash
+#
+
+set -e
+
+cpver=$1
+base_url=$2
+
+if [[ -z $base_url ]]; then
+ echo "Usage: $0 <cp-base-ver> <base_url>"
+ exit 1
+fi
+
+cat >/etc/yum.repos.d/Confluent.repo <<EOF
+[Confluent.dist]
+name=Confluent repository (dist)
+baseurl=$base_url/rpm/${cpver}/7
+gpgcheck=0
+gpgkey=$base_url/rpm/${cpver}/archive.key
+enabled=1
+[Confluent]
+name=Confluent repository
+baseurl=$base_url/rpm/${cpver}
+gpgcheck=1
+gpgkey=$base_url/rpm/${cpver}/archive.key
+enabled=1
+EOF
+
+yum install -y librdkafka-devel gcc
+
+gcc /v/check_features.c -o /tmp/check_features -lrdkafka
+
+/tmp/check_features
+
+# Verify plugins
+yum install -y confluent-librdkafka-plugins
+
+/tmp/check_features plugin.library.paths monitoring-interceptor
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/.gitignore b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/.gitignore
new file mode 100644
index 000000000..eb66d4d31
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/.gitignore
@@ -0,0 +1,6 @@
+*.log
+files
+librdkafka-dev
+librdkafka1-dbg
+librdkafka1
+tmp
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/changelog b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/changelog
new file mode 100644
index 000000000..c50cb5aa8
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/changelog
@@ -0,0 +1,66 @@
+librdkafka (0.8.6-1) unstable; urgency=medium
+
+ * New upstream release.
+ * Backport upstream commit f6fd0da, adding --disable-silent-rules
+ compatibility support to mklove. (Closes: #788742)
+
+ -- Faidon Liambotis <paravoid@debian.org> Sun, 19 Jul 2015 01:36:18 +0300
+
+librdkafka (0.8.5-2) unstable; urgency=medium
+
+ * Install rdkafka.pc in the right, multiarch location. (Closes: #766759)
+
+ -- Faidon Liambotis <paravoid@debian.org> Sun, 26 Oct 2014 06:47:07 +0200
+
+librdkafka (0.8.5-1) unstable; urgency=medium
+
+ * New upstream release.
+ - Fixes kFreeBSD FTBFS.
+ * Ship rdkafka.pc pkg-config in librdkafka-dev.
+
+ -- Faidon Liambotis <paravoid@debian.org> Fri, 24 Oct 2014 18:03:22 +0300
+
+librdkafka (0.8.4-1) unstable; urgency=medium
+
+ * New upstream release, including a new build system.
+ - Add Build-Depends on perl, required by configure.
+ - Support multiarch library paths.
+ - Better detection of architecture atomic builtins, supporting more
+ architectures. (Closes: #739930)
+ - Various portability bugs fixed. (Closes: #730506)
+ - Update debian/librdkafka1.symbols.
+ * Convert to a multiarch package.
+ * Switch to Architecture: any, because of renewed upstream portability.
+ * Update debian/copyright to add src/ before Files: paths.
+ * Update Standards-Version to 3.9.6, no changes needed.
+ * Ship only the C library for now, not the new C++ library; the latter is
+ still in flux in some ways and will probably be shipped in a separate
+ package in a future release.
+
+ -- Faidon Liambotis <paravoid@debian.org> Wed, 22 Oct 2014 23:57:24 +0300
+
+librdkafka (0.8.3-1) unstable; urgency=medium
+
+ * New upstream release.
+ - Multiple internal symbols hidden; breaks ABI without a SONAME bump, but
+ these were internal and should not break any applications, packaged or
+ not.
+ * Update Standards-Version to 3.9.5, no changes needed.
+
+ -- Faidon Liambotis <paravoid@debian.org> Tue, 18 Feb 2014 02:21:43 +0200
+
+librdkafka (0.8.1-1) unstable; urgency=medium
+
+ * New upstream release.
+ - Multiple fixes to FTBFS on various architectures. (Closes: #730506)
+ - Remove dh_auto_clean override, fixed upstream.
+ * Limit the set of architectures: upstream currently relies on 64-bit atomic
+ operations that several Debian architectures do not support.
+
+ -- Faidon Liambotis <paravoid@debian.org> Thu, 05 Dec 2013 16:53:28 +0200
+
+librdkafka (0.8.0-1) unstable; urgency=low
+
+ * Initial release. (Closes: #710271)
+
+ -- Faidon Liambotis <paravoid@debian.org> Mon, 04 Nov 2013 16:50:07 +0200
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/compat b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/compat
new file mode 100644
index 000000000..ec635144f
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/compat
@@ -0,0 +1 @@
+9
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/control b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/control
new file mode 100644
index 000000000..510db8f23
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/control
@@ -0,0 +1,49 @@
+Source: librdkafka
+Priority: optional
+Maintainer: Faidon Liambotis <paravoid@debian.org>
+Build-Depends: debhelper (>= 9), zlib1g-dev, libssl-dev, libsasl2-dev, python3
+Standards-Version: 3.9.6
+Section: libs
+Homepage: https://github.com/edenhill/librdkafka
+Vcs-Git: git://github.com/edenhill/librdkafka.git -b debian
+Vcs-Browser: https://github.com/edenhill/librdkafka/tree/debian
+
+Package: librdkafka1
+Architecture: any
+Multi-Arch: same
+Depends: ${shlibs:Depends}, ${misc:Depends}
+Description: library implementing the Apache Kafka protocol
+ librdkafka is a C implementation of the Apache Kafka protocol. It currently
+ implements the 0.8 version of the protocol and can be used to develop both
+ Producers and Consumers.
+ .
+ More information about Apache Kafka can be found at http://kafka.apache.org/
+
+Package: librdkafka-dev
+Section: libdevel
+Architecture: any
+Multi-Arch: same
+Depends: librdkafka1 (= ${binary:Version}), ${misc:Depends}
+Description: library implementing the Apache Kafka protocol (development headers)
+ librdkafka is a C implementation of the Apache Kafka protocol. It currently
+ implements the 0.8 version of the protocol and can be used to develop both
+ Producers and Consumers.
+ .
+ More information about Apache Kafka can be found at http://kafka.apache.org/
+ .
+ This package contains the development headers.
+
+Package: librdkafka1-dbg
+Section: debug
+Priority: extra
+Architecture: any
+Multi-Arch: same
+Depends: librdkafka1 (= ${binary:Version}), ${misc:Depends}
+Description: library implementing the Apache Kafka protocol (debugging symbols)
+ librdkafka is a C implementation of the Apache Kafka protocol. It currently
+ implements the 0.8 version of the protocol and can be used to develop both
+ Producers and Consumers.
+ .
+ More information about Apache Kafka can be found at http://kafka.apache.org/
+ .
+ This package contains the debugging symbols.
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/copyright b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/copyright
new file mode 100644
index 000000000..20885d9f3
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/copyright
@@ -0,0 +1,84 @@
+Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
+Upstream-Name: librdkafka
+Source: https://github.com/edenhill/librdkafka
+
+License: BSD-2-clause
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions are met:
+ .
+ 1. Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+ 2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+ .
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ POSSIBILITY OF SUCH DAMAGE.
+
+Files: *
+Copyright: 2012-2015, Magnus Edenhill
+License: BSD-2-clause
+
+Files: src/rdcrc32.c src/rdcrc32.h
+Copyright: 2006-2012, Thomas Pircher <tehpeh@gmx.net>
+License: MIT
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+ .
+ The above copyright notice and this permission notice shall be included in
+ all copies or substantial portions of the Software.
+ .
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ THE SOFTWARE.
+
+Files: src/snappy.c src/snappy.h src/snappy_compat.h
+Copyright: 2005, Google Inc.
+ 2011, Intel Corporation
+License: BSD-3-clause
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions are
+ met:
+ .
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following disclaimer
+ in the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Google Inc. nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+ .
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+Files: debian/*
+Copyright: 2013 Faidon Liambotis <paravoid@debian.org>
+License: BSD-2-clause
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/docs b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/docs
new file mode 100644
index 000000000..0b76c34c4
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/docs
@@ -0,0 +1,5 @@
+README.md
+INTRODUCTION.md
+CONFIGURATION.md
+STATISTICS.md
+CHANGELOG.md \ No newline at end of file
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/gbp.conf b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/gbp.conf
new file mode 100644
index 000000000..b2a0f02e3
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/gbp.conf
@@ -0,0 +1,9 @@
+[buildpackage]
+upstream-tree=tag
+upstream-branch=master
+debian-branch=debian
+upstream-tag=%(version)s
+debian-tag=debian/%(version)s
+no-create-orig = True
+tarball-dir = ../tarballs
+export-dir = ../build-area
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka-dev.dirs b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka-dev.dirs
new file mode 100644
index 000000000..44188162e
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka-dev.dirs
@@ -0,0 +1,2 @@
+usr/lib
+usr/include
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka-dev.examples b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka-dev.examples
new file mode 100644
index 000000000..b45032efe
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka-dev.examples
@@ -0,0 +1,2 @@
+examples/rdkafka_example.c
+examples/rdkafka_performance.c
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka-dev.install b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka-dev.install
new file mode 100644
index 000000000..478f660f5
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka-dev.install
@@ -0,0 +1,6 @@
+usr/include/*/rdkafka.h
+usr/include/*/rdkafkacpp.h
+usr/lib/*/librdkafka.a
+usr/lib/*/librdkafka.so
+usr/lib/*/librdkafka++.a
+usr/lib/*/librdkafka++.so
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka-dev.substvars b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka-dev.substvars
new file mode 100644
index 000000000..abd3ebebc
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka-dev.substvars
@@ -0,0 +1 @@
+misc:Depends=
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka.dsc b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka.dsc
new file mode 100644
index 000000000..447b9e656
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka.dsc
@@ -0,0 +1,16 @@
+Format: 3.0 (quilt)
+Source: librdkafka
+Binary: librdkafka1, librdkafka-dev, librdkafka1-dbg
+Architecture: any
+Version: 0.9.1-1pre1
+Maintainer: Magnus Edenhill <librdkafka@edenhill.se>
+Homepage: https://github.com/edenhill/librdkafka
+Standards-Version: 3.9.6
+Vcs-Browser: https://github.com/edenhill/librdkafka/tree/master
+Vcs-Git: git://github.com/edenhill/librdkafka.git -b master
+Build-Depends: debhelper (>= 9), zlib1g-dev, libssl-dev, libsasl2-dev, python3
+Package-List:
+ librdkafka-dev deb libdevel optional arch=any
+ librdkafka1 deb libs optional arch=any
+ librdkafka1-dbg deb debug extra arch=any
+Original-Maintainer: Faidon Liambotis <paravoid@debian.org>
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka1-dbg.substvars b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka1-dbg.substvars
new file mode 100644
index 000000000..abd3ebebc
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka1-dbg.substvars
@@ -0,0 +1 @@
+misc:Depends=
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka1.dirs b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka1.dirs
new file mode 100644
index 000000000..68457717b
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka1.dirs
@@ -0,0 +1 @@
+usr/lib
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka1.install b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka1.install
new file mode 100644
index 000000000..7e86e5f18
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka1.install
@@ -0,0 +1,2 @@
+usr/lib/*/librdkafka.so.*
+usr/lib/*/librdkafka++.so.*
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka1.postinst.debhelper b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka1.postinst.debhelper
new file mode 100644
index 000000000..3d89d3ef6
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka1.postinst.debhelper
@@ -0,0 +1,5 @@
+# Automatically added by dh_makeshlibs
+if [ "$1" = "configure" ]; then
+ ldconfig
+fi
+# End automatically added section
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka1.postrm.debhelper b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka1.postrm.debhelper
new file mode 100644
index 000000000..7f4404727
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka1.postrm.debhelper
@@ -0,0 +1,5 @@
+# Automatically added by dh_makeshlibs
+if [ "$1" = "remove" ]; then
+ ldconfig
+fi
+# End automatically added section
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka1.symbols b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka1.symbols
new file mode 100644
index 000000000..0ef576eb1
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/librdkafka1.symbols
@@ -0,0 +1,64 @@
+librdkafka.so.1 librdkafka1 #MINVER#
+* Build-Depends-Package: librdkafka-dev
+ rd_kafka_brokers_add@Base 0.8.0
+ rd_kafka_conf_destroy@Base 0.8.0
+ rd_kafka_conf_dump@Base 0.8.3
+ rd_kafka_conf_dump_free@Base 0.8.3
+ rd_kafka_conf_dup@Base 0.8.3
+ rd_kafka_conf_new@Base 0.8.0
+ rd_kafka_conf_properties_show@Base 0.8.0
+ rd_kafka_conf_set@Base 0.8.0
+ rd_kafka_conf_set_dr_cb@Base 0.8.0
+ rd_kafka_conf_set_dr_msg_cb@Base 0.8.4
+ rd_kafka_conf_set_error_cb@Base 0.8.0
+ rd_kafka_conf_set_log_cb@Base 0.8.4
+ rd_kafka_conf_set_opaque@Base 0.8.0
+ rd_kafka_conf_set_open_cb@Base 0.8.4
+ rd_kafka_conf_set_socket_cb@Base 0.8.4
+ rd_kafka_conf_set_stats_cb@Base 0.8.0
+ rd_kafka_consume@Base 0.8.0
+ rd_kafka_consume_batch@Base 0.8.0
+ rd_kafka_consume_batch_queue@Base 0.8.4
+ rd_kafka_consume_callback@Base 0.8.0
+ rd_kafka_consume_callback_queue@Base 0.8.4
+ rd_kafka_consume_queue@Base 0.8.4
+ rd_kafka_consume_start@Base 0.8.0
+ rd_kafka_consume_start_queue@Base 0.8.4
+ rd_kafka_consume_stop@Base 0.8.0
+ rd_kafka_destroy@Base 0.8.0
+ rd_kafka_dump@Base 0.8.0
+ rd_kafka_err2str@Base 0.8.0
+ rd_kafka_errno2err@Base 0.8.3
+ rd_kafka_log_print@Base 0.8.0
+ rd_kafka_log_syslog@Base 0.8.0
+ rd_kafka_message_destroy@Base 0.8.0
+ rd_kafka_metadata@Base 0.8.4
+ rd_kafka_metadata_destroy@Base 0.8.4
+ rd_kafka_msg_partitioner_random@Base 0.8.0
+ rd_kafka_name@Base 0.8.0
+ rd_kafka_new@Base 0.8.0
+ rd_kafka_offset_store@Base 0.8.3
+ rd_kafka_opaque@Base 0.8.4
+ rd_kafka_outq_len@Base 0.8.0
+ rd_kafka_poll@Base 0.8.0
+ rd_kafka_produce@Base 0.8.0
+ rd_kafka_produce_batch@Base 0.8.4
+ rd_kafka_queue_destroy@Base 0.8.4
+ rd_kafka_queue_new@Base 0.8.4
+ rd_kafka_set_log_level@Base 0.8.0
+ rd_kafka_set_logger@Base 0.8.0
+ rd_kafka_thread_cnt@Base 0.8.0
+ rd_kafka_topic_conf_destroy@Base 0.8.0
+ rd_kafka_topic_conf_dump@Base 0.8.3
+ rd_kafka_topic_conf_dup@Base 0.8.3
+ rd_kafka_topic_conf_new@Base 0.8.0
+ rd_kafka_topic_conf_set@Base 0.8.0
+ rd_kafka_topic_conf_set_opaque@Base 0.8.0
+ rd_kafka_topic_conf_set_partitioner_cb@Base 0.8.0
+ rd_kafka_topic_destroy@Base 0.8.0
+ rd_kafka_topic_name@Base 0.8.0
+ rd_kafka_topic_new@Base 0.8.0
+ rd_kafka_topic_partition_available@Base 0.8.0
+ rd_kafka_version@Base 0.8.1
+ rd_kafka_version_str@Base 0.8.1
+ rd_kafka_wait_destroyed@Base 0.8.0
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/rules b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/rules
new file mode 100755
index 000000000..a18c40d98
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/rules
@@ -0,0 +1,19 @@
+#!/usr/bin/make -f
+
+# Uncomment this to turn on verbose mode.
+#export DH_VERBOSE=1
+
+%:
+ dh $@
+
+override_dh_strip:
+ dh_strip --dbg-package=librdkafka1-dbg
+
+override_dh_auto_install:
+ dh_auto_install
+ install -D -m 0644 rdkafka.pc \
+ debian/librdkafka-dev/usr/lib/${DEB_HOST_MULTIARCH}/pkgconfig/rdkafka.pc
+ install -D -m 0644 rdkafka-static.pc \
+ debian/librdkafka-dev/usr/lib/${DEB_HOST_MULTIARCH}/pkgconfig/rdkafka-static.pc
+
+.PHONY: override_dh_strip override_dh_auth_install
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/source/format b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/source/format
new file mode 100644
index 000000000..163aaf8d8
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/source/format
@@ -0,0 +1 @@
+3.0 (quilt)
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/watch b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/watch
new file mode 100644
index 000000000..fc9aec86f
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/debian/watch
@@ -0,0 +1,2 @@
+version=3
+http://github.com/edenhill/librdkafka/tags .*/(\d[\d\.]*)\.tar\.gz
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/get_version.py b/fluent-bit/lib/librdkafka-2.1.0/packaging/get_version.py
new file mode 100755
index 000000000..fad1d9718
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/get_version.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python3
+
+import sys
+
+if len(sys.argv) != 2:
+ raise Exception('Usage: %s path/to/rdkafka.h' % sys.argv[0])
+
+kafka_h_file = sys.argv[1]
+f = open(kafka_h_file)
+for line in f:
+ if '#define RD_KAFKA_VERSION' in line:
+ version = line.split()[-1]
+ break
+f.close()
+
+major = int(version[2:4], 16)
+minor = int(version[4:6], 16)
+patch = int(version[6:8], 16)
+version = '.'.join(str(item) for item in (major, minor, patch))
+
+print(version)
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/homebrew/README.md b/fluent-bit/lib/librdkafka-2.1.0/packaging/homebrew/README.md
new file mode 100644
index 000000000..a23a08537
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/homebrew/README.md
@@ -0,0 +1,15 @@
+# Update the Homebrew librdkafka package version
+
+The `./brew-update-pr.sh` script in this directory updates the
+brew formula for librdkafka and pushes a PR to the homebrew-core repository.
+
+You should run it in two steps, first an implicit dry-run mode
+to check that things seem correct, and if that checks out a
+live upload mode which actually pushes the PR.
+
+ # Do a dry-run first, v0.11.0 is the librdkafka tag:
+ $ ./brew-update-pr.sh v0.11.0
+
+ # If everything looks okay, run the live upload mode:
+ $ ./brew-update-pr.sh --upload v0.11.0
+
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/homebrew/brew-update-pr.sh b/fluent-bit/lib/librdkafka-2.1.0/packaging/homebrew/brew-update-pr.sh
new file mode 100755
index 000000000..f756159cd
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/homebrew/brew-update-pr.sh
@@ -0,0 +1,31 @@
+#!/bin/bash
+#
+# Automatically pushes a PR to homebrew-core to update
+# the librdkafka version.
+#
+# Usage:
+# # Dry-run:
+# ./brew-update-pr.sh v0.11.0
+# # if everything looks good:
+# ./brew-update-pr.sh --upload v0.11.0
+#
+
+
+DRY_RUN="--dry-run"
+if [[ $1 == "--upload" ]]; then
+ DRY_RUN=
+ shift
+fi
+
+TAG=$1
+
+if [[ -z $TAG ]]; then
+ echo "Usage: $0 [--upload] <librdkafka-tag>"
+ exit 1
+fi
+
+set -eu
+
+brew bump-formula-pr $DRY_RUN --strict \
+ --url=https://github.com/edenhill/librdkafka/archive/${TAG}.tar.gz \
+ librdkafka
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/mingw-w64/configure-build-msys2-mingw-static.sh b/fluent-bit/lib/librdkafka-2.1.0/packaging/mingw-w64/configure-build-msys2-mingw-static.sh
new file mode 100644
index 000000000..a5162caad
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/mingw-w64/configure-build-msys2-mingw-static.sh
@@ -0,0 +1,52 @@
+#!/bin/bash
+
+set -e
+
+cmake \
+ -G "MinGW Makefiles" \
+ -D CMAKE_INSTALL_PREFIX="$PWD/dest/" \
+ -D RDKAFKA_BUILD_STATIC=ON \
+ .
+
+$mingw64 mingw32-make
+$mingw64 mingw32-make install
+
+# Bundle all the static dependencies with the static lib we just built
+mkdir mergescratch
+pushd mergescratch
+cp /C/msys64/mingw64/lib/libzstd.a ./
+cp /C/msys64/mingw64/lib/libcrypto.a ./
+cp /C/msys64/mingw64/lib/liblz4.a ./
+cp /C/msys64/mingw64/lib/libssl.a ./
+cp /C/msys64/mingw64/lib/libz.a ./
+cp ../src/librdkafka.a ./
+
+# Have to rename because ar won't work with + in the name
+cp ../src-cpp/librdkafka++.a ./librdkafkacpp.a
+ar -M << EOF
+create librdkafka-static.a
+addlib librdkafka.a
+addlib libzstd.a
+addlib libcrypto.a
+addlib liblz4.a
+addlib libssl.a
+addlib libz.a
+save
+end
+EOF
+
+ar -M << EOF
+create librdkafkacpp-static.a
+addlib librdkafka-static.a
+addlib librdkafkacpp.a
+save
+end
+EOF
+
+strip -g ./librdkafka-static.a
+strip -g ./librdkafkacpp-static.a
+cp ./librdkafka-static.a ../dest/lib/
+cp ./librdkafkacpp-static.a ../dest/lib/librdkafka++-static.a
+popd
+rm -rf ./mergescratch
+
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/mingw-w64/configure-build-msys2-mingw.sh b/fluent-bit/lib/librdkafka-2.1.0/packaging/mingw-w64/configure-build-msys2-mingw.sh
new file mode 100644
index 000000000..b0b81fe0a
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/mingw-w64/configure-build-msys2-mingw.sh
@@ -0,0 +1,21 @@
+#!/bin/bash
+
+set -e
+
+cmake \
+ -G "MinGW Makefiles" \
+ -D CMAKE_INSTALL_PREFIX="$PWD/dest/" \
+ -D WITHOUT_WIN32_CONFIG=ON \
+ -D RDKAFKA_BUILD_EXAMPLES=ON \
+ -D RDKAFKA_BUILD_TESTS=ON \
+ -D RDKAFKA_BUILD_STATIC=OFF \
+ -D CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS=TRUE .
+
+$mingw64 mingw32-make
+$mingw64 mingw32-make install
+
+cd tests
+cp ../dest/bin/librdkafka.dll ./
+cp ../dest/bin/librdkafka++.dll ./
+CI=true ./test-runner.exe -l -Q
+cd ..
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/mingw-w64/run-tests.sh b/fluent-bit/lib/librdkafka-2.1.0/packaging/mingw-w64/run-tests.sh
new file mode 100644
index 000000000..6749add5d
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/mingw-w64/run-tests.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+set -e
+
+cd tests
+./test-runner.exe -l -Q -p1 0000
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/mingw-w64/semaphoreci-build.sh b/fluent-bit/lib/librdkafka-2.1.0/packaging/mingw-w64/semaphoreci-build.sh
new file mode 100644
index 000000000..378545b44
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/mingw-w64/semaphoreci-build.sh
@@ -0,0 +1,38 @@
+#!/bin/bash
+#
+
+set -ex
+
+if [[ $1 == "--static" ]]; then
+ linkage="static"
+ shift
+else
+linkage="dynamic"
+fi
+
+if [[ -z $1 ]]; then
+ echo "Usage: $0 [--static] <relative-path-to-output-librdkafka.tgz>"
+ exit 1
+fi
+
+archive="${PWD}/$1"
+
+source ./packaging/mingw-w64/travis-before-install.sh
+
+if [[ $linkage == "static" ]]; then
+ ./packaging/mingw-w64/configure-build-msys2-mingw-static.sh
+else
+ ./packaging/mingw-w64/configure-build-msys2-mingw.sh
+fi
+
+
+./packaging/mingw-w64/run-tests.sh
+
+pushd dest
+tar cvzf $archive .
+sha256sum $archive
+popd
+
+
+
+
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/mingw-w64/travis-before-install.sh b/fluent-bit/lib/librdkafka-2.1.0/packaging/mingw-w64/travis-before-install.sh
new file mode 100644
index 000000000..e75507f93
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/mingw-w64/travis-before-install.sh
@@ -0,0 +1,20 @@
+#!/bin/bash
+
+set -e
+
+export msys2='cmd //C RefreshEnv.cmd '
+export msys2+='& set MSYS=winsymlinks:nativestrict '
+export msys2+='& C:\\msys64\\msys2_shell.cmd -defterm -no-start'
+export mingw64="$msys2 -mingw64 -full-path -here -c "\"\$@"\" --"
+export msys2+=" -msys2 -c "\"\$@"\" --"
+
+# Have to update pacman first or choco upgrade will failure due to migration
+# to zstd instead of xz compression
+$msys2 pacman -Sy --noconfirm pacman
+
+## Install more MSYS2 packages from https://packages.msys2.org/base here
+$msys2 pacman --sync --noconfirm --needed mingw-w64-x86_64-gcc mingw-w64-x86_64-make mingw-w64-x86_64-cmake mingw-w64-x86_64-openssl mingw-w64-x86_64-lz4 mingw-w64-x86_64-zstd
+
+taskkill //IM gpg-agent.exe //F || true # https://travis-ci.community/t/4967
+export PATH=/C/msys64/mingw64/bin:$PATH
+export MAKE=mingw32-make # so that Autotools can find it
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/.gitignore b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/.gitignore
new file mode 100644
index 000000000..56919a155
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/.gitignore
@@ -0,0 +1,7 @@
+dl-*
+out-*
+*.nupkg
+*.tgz
+*.key
+*.pyc
+__pycache__
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/README.md b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/README.md
new file mode 100644
index 000000000..87b176930
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/README.md
@@ -0,0 +1,78 @@
+# Package assembly
+
+This set of scripts collect CI artifacts from a local directory or S3, and
+assembles them into a package structure defined by a packaging class in a
+staging directory.
+For the NugetPackage class the NuGet tool is then run (from within docker) on
+this staging directory to create a proper NuGet package (with all the metadata).
+While the StaticPackage class creates a tarball.
+
+The finalized nuget package maybe uploaded manually to NuGet.org
+
+## Requirements
+
+ * Requires Python 3
+ * Requires Docker
+ * (if --s3) Requires private S3 access keys for the librdkafka-ci-packages bucket.
+
+
+
+## Usage
+
+1. Trigger CI builds by creating and pushing a new release (candidate) tag
+ in the librdkafka repo. Make sure the tag is created on the correct branch.
+
+ $ git tag v0.11.0-RC3
+ $ git push origin v0.11.0-RC3
+
+2. Wait for CI builds to finish, monitor the builds here:
+
+ * https://travis-ci.org/edenhill/librdkafka
+ * https://ci.appveyor.com/project/edenhill/librdkafka
+
+Or if using SemaphoreCI, just have the packaging job depend on prior build jobs
+in the same pipeline.
+
+3. On a Linux host, run the release.py script to assemble the NuGet package
+
+ $ cd packaging/nuget
+ # Specify the tag
+ $ ./release.py v0.11.0-RC3
+ # Optionally, if the tag was moved and an exact sha is also required:
+ # $ ./release.py --sha <the-full-git-sha> v0.11.0-RC3
+
+4. If all artifacts were available the NuGet package will be built
+ and reside in the current directory as librdkafka.redist.<v-less-tag>.nupkg
+
+5. Test the package manually
+
+6. Upload the package to NuGet
+
+ * https://www.nuget.org/packages/manage/upload
+
+7. If you trust this process you can have release.py upload the package
+ automatically to NuGet after building it:
+
+ $ ./release.py --retries 100 --upload your-nuget-api.key v0.11.0-RC3
+
+
+
+## Other uses
+
+### Create static library bundles
+
+To create a bundle (tarball) of librdkafka self-contained static library
+builds, use the following command:
+
+ $ ./release.py --class StaticPackage v1.1.0
+
+
+### Clean up S3 bucket
+
+To clean up old non-release/non-RC builds from the S3 bucket, first check with:
+
+ $ AWS_PROFILE=.. ./cleanup-s3.py --age 360
+
+Verify that the listed objects should really be deleted, then delete:
+
+ $ AWS_PROFILE=.. ./cleanup-s3.py --age 360 --delete
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/artifact.py b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/artifact.py
new file mode 100755
index 000000000..c58e0c9c7
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/artifact.py
@@ -0,0 +1,177 @@
+#!/usr/bin/env python3
+#
+#
+# Collects CI artifacts from S3 storage, downloading them
+# to a local directory.
+#
+# The artifacts' folder in the S3 bucket must have the following token
+# format:
+# <token>-[<value>]__ (repeat)
+#
+# Recognized tokens (unrecognized tokens are ignored):
+# p - project (e.g., "confluent-kafka-python")
+# bld - builder (e.g., "travis")
+# plat - platform ("osx", "linux", ..)
+# arch - arch ("x64", ..)
+# tag - git tag
+# sha - git sha
+# bid - builder's build-id
+# bldtype - Release, Debug (appveyor)
+#
+# Example:
+# p-confluent-kafka-python__bld-travis__plat-linux__tag-__sha-112130ce297656ea1c39e7c94c99286f95133a24__bid-271588764__/confluent_kafka-0.11.0-cp35-cp35m-manylinux1_x86_64.whl
+
+
+import re
+import os
+import boto3
+
+import packaging
+
+s3_bucket = 'librdkafka-ci-packages'
+dry_run = False
+
+
+class Artifact (object):
+ def __init__(self, arts, path, info=None):
+ self.path = path
+ # Remove unexpanded AppVeyor $(..) tokens from filename
+ self.fname = re.sub(r'\$\([^\)]+\)', '', os.path.basename(path))
+ slpath = os.path.join(os.path.dirname(path), self.fname)
+ if os.path.isfile(slpath):
+ # Already points to local file in correct location
+ self.lpath = slpath
+ else:
+ # Prepare download location in dlpath
+ self.lpath = os.path.join(arts.dlpath, slpath)
+
+ if info is None:
+ self.info = dict()
+ else:
+ # Assign the map and convert all keys to lower case
+ self.info = {k.lower(): v for k, v in info.items()}
+ # Rename values, e.g., 'plat':'linux' to 'plat':'debian'
+ for k, v in self.info.items():
+ rdict = packaging.rename_vals.get(k, None)
+ if rdict is not None:
+ self.info[k] = rdict.get(v, v)
+
+ # Score value for sorting
+ self.score = 0
+
+ # AppVeyor symbol builds are of less value
+ if self.fname.find('.symbols.') != -1:
+ self.score -= 10
+
+ self.arts = arts
+ arts.artifacts.append(self)
+
+ def __repr__(self):
+ return self.path
+
+ def __lt__(self, other):
+ return self.score < other.score
+
+ def download(self):
+ """ Download artifact from S3 and store in local directory .lpath.
+ If the artifact is already downloaded nothing is done. """
+ if os.path.isfile(self.lpath) and os.path.getsize(self.lpath) > 0:
+ return
+ print('Downloading %s -> %s' % (self.path, self.lpath))
+ if dry_run:
+ return
+ ldir = os.path.dirname(self.lpath)
+ if not os.path.isdir(ldir):
+ os.makedirs(ldir, 0o755)
+ self.arts.s3_bucket.download_file(self.path, self.lpath)
+
+
+class Artifacts (object):
+ def __init__(self, match, dlpath):
+ super(Artifacts, self).__init__()
+ self.match = match
+ self.artifacts = list()
+ # Download directory (make sure it ends with a path separator)
+ if not dlpath.endswith(os.path.sep):
+ dlpath = os.path.join(dlpath, '')
+ self.dlpath = dlpath
+ if not os.path.isdir(self.dlpath):
+ if not dry_run:
+ os.makedirs(self.dlpath, 0o755)
+
+ def collect_single(self, path, req_tag=True):
+ """ Collect single artifact, be it in S3 or locally.
+ :param: path string: S3 or local (relative) path
+ :param: req_tag bool: Require tag to match.
+ """
+
+ print('? %s' % path)
+
+ # For local files, strip download path.
+ # Also ignore any parent directories.
+ if path.startswith(self.dlpath):
+ folder = os.path.basename(os.path.dirname(path[len(self.dlpath):]))
+ else:
+ folder = os.path.basename(os.path.dirname(path))
+
+ # The folder contains the tokens needed to perform
+ # matching of project, gitref, etc.
+ rinfo = re.findall(r'(?P<tag>[^-]+)-(?P<val>.*?)__', folder)
+ if rinfo is None or len(rinfo) == 0:
+ print('Incorrect folder/file name format for %s' % folder)
+ return None
+
+ info = dict(rinfo)
+
+ # Ignore AppVeyor Debug builds
+ if info.get('bldtype', '').lower() == 'debug':
+ print('Ignoring debug artifact %s' % folder)
+ return None
+
+ tag = info.get('tag', None)
+ if tag is not None and (len(tag) == 0 or tag.startswith('$(')):
+ # AppVeyor doesn't substite $(APPVEYOR_REPO_TAG_NAME)
+ # with an empty value when not set, it leaves that token
+ # in the string - so translate that to no tag.
+ del info['tag']
+
+ # Match tag or sha to gitref
+ unmatched = list()
+ for m, v in self.match.items():
+ if m not in info or info[m] != v:
+ unmatched.append(m)
+
+ # Make sure all matches were satisfied, unless this is a
+ # common artifact.
+ if info.get('p', '') != 'common' and len(unmatched) > 0:
+ print(info)
+ print('%s: %s did not match %s' %
+ (info.get('p', None), folder, unmatched))
+ return None
+
+ return Artifact(self, path, info)
+
+ def collect_s3(self):
+ """ Collect and download build-artifacts from S3 based on
+ git reference """
+ print(
+ 'Collecting artifacts matching %s from S3 bucket %s' %
+ (self.match, s3_bucket))
+ self.s3 = boto3.resource('s3')
+ self.s3_bucket = self.s3.Bucket(s3_bucket)
+ self.s3_client = boto3.client('s3')
+ for item in self.s3_client.list_objects(
+ Bucket=s3_bucket, Prefix='librdkafka/').get('Contents'):
+ self.collect_single(item.get('Key'))
+
+ for a in self.artifacts:
+ a.download()
+
+ def collect_local(self, path, req_tag=True):
+ """ Collect artifacts from a local directory possibly previously
+ collected from s3 """
+ for f in [os.path.join(dp, f) for dp, dn,
+ filenames in os.walk(path) for f in filenames]:
+ if not os.path.isfile(f):
+ continue
+ self.collect_single(f, req_tag)
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/cleanup-s3.py b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/cleanup-s3.py
new file mode 100755
index 000000000..2093af0c1
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/cleanup-s3.py
@@ -0,0 +1,143 @@
+#!/usr/bin/env python3
+#
+# Clean up test builds from librdkafka's S3 bucket.
+# This also covers python builds.
+
+import re
+from datetime import datetime, timezone
+import boto3
+import argparse
+
+# Collects CI artifacts from S3 storage, downloading them
+# to a local directory, or collecting already downloaded artifacts from
+# local directory.
+#
+# The artifacts' folder in the S3 bucket must have the following token
+# format:
+# <token>-[<value>]__ (repeat)
+#
+# Recognized tokens (unrecognized tokens are ignored):
+# p - project (e.g., "confluent-kafka-python")
+# bld - builder (e.g., "travis")
+# plat - platform ("osx", "linux", ..)
+# arch - arch ("x64", ..)
+# tag - git tag
+# sha - git sha
+# bid - builder's build-id
+# bldtype - Release, Debug (appveyor)
+# lnk - std, static
+#
+# Example:
+# librdkafka/p-librdkafka__bld-travis__plat-linux__arch-x64__tag-v0.0.62__sha-d051b2c19eb0c118991cd8bc5cf86d8e5e446cde__bid-1562.1/librdkafka.tar.gz
+
+
+s3_bucket = 'librdkafka-ci-packages'
+
+
+def may_delete(path):
+ """ Returns true if S3 object path is eligible for deletion, e.g.
+ has a non-release/rc tag. """
+
+ # The path contains the tokens needed to perform
+ # matching of project, gitref, etc.
+ rinfo = re.findall(r'(?P<tag>[^-]+)-(?P<val>.*?)(?:__|$)', path)
+ if rinfo is None or len(rinfo) == 0:
+ print(f"Incorrect folder/file name format for {path}")
+ return False
+
+ info = dict(rinfo)
+
+ tag = info.get('tag', None)
+ if tag is not None and (len(tag) == 0 or tag.startswith('$(')):
+ # AppVeyor doesn't substite $(APPVEYOR_REPO_TAG_NAME)
+ # with an empty value when not set, it leaves that token
+ # in the string - so translate that to no tag.
+ del info['tag']
+ tag = None
+
+ if tag is None:
+ return True
+
+ if re.match(r'^v?\d+\.\d+\.\d+(-?RC\d+)?$', tag,
+ flags=re.IGNORECASE) is None:
+ return True
+
+ return False
+
+
+def collect_s3(s3, min_age_days=60):
+ """ Collect artifacts from S3 """
+ now = datetime.now(timezone.utc)
+ eligible = []
+ totcnt = 0
+ # note: list_objects will return at most 1000 objects per call,
+ # use continuation token to read full list.
+ cont_token = None
+ more = True
+ while more:
+ if cont_token is not None:
+ res = s3.list_objects_v2(Bucket=s3_bucket,
+ ContinuationToken=cont_token)
+ else:
+ res = s3.list_objects_v2(Bucket=s3_bucket)
+
+ if res.get('IsTruncated') is True:
+ cont_token = res.get('NextContinuationToken')
+ else:
+ more = False
+
+ for item in res.get('Contents'):
+ totcnt += 1
+ age = (now - item.get('LastModified')).days
+ path = item.get('Key')
+ if age >= min_age_days and may_delete(path):
+ eligible.append(path)
+
+ return (eligible, totcnt)
+
+
+def chunk_list(lst, cnt):
+ """ Split list into lists of cnt """
+ for i in range(0, len(lst), cnt):
+ yield lst[i:i + cnt]
+
+
+if __name__ == '__main__':
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--delete",
+ help="WARNING! Don't just check, actually delete "
+ "S3 objects.",
+ action="store_true")
+ parser.add_argument("--age", help="Minimum object age in days.",
+ type=int, default=360)
+
+ args = parser.parse_args()
+ dry_run = args.delete is not True
+ min_age_days = args.age
+
+ if dry_run:
+ op = "Eligible for deletion"
+ else:
+ op = "Deleting"
+
+ s3 = boto3.client('s3')
+
+ # Collect eligible artifacts
+ eligible, totcnt = collect_s3(s3, min_age_days=min_age_days)
+ print(f"{len(eligible)}/{totcnt} eligible artifacts to delete")
+
+ # Delete in chunks of 1000 (max what the S3 API can do)
+ for chunk in chunk_list(eligible, 1000):
+ print(op + ":\n" + '\n'.join(chunk))
+ if dry_run:
+ continue
+
+ res = s3.delete_objects(Bucket=s3_bucket,
+ Delete={
+ 'Objects': [{'Key': x} for x in chunk],
+ 'Quiet': True
+ })
+ errors = res.get('Errors', [])
+ if len(errors) > 0:
+ raise Exception(f"Delete failed: {errors}")
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-win32__bldtype-Release/msvcr120.zip b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-win32__bldtype-Release/msvcr120.zip
new file mode 100644
index 000000000..9bc5e9fbc
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-win32__bldtype-Release/msvcr120.zip
Binary files differ
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-win32__bldtype-Release/msvcr140.zip b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-win32__bldtype-Release/msvcr140.zip
new file mode 100644
index 000000000..152938138
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-win32__bldtype-Release/msvcr140.zip
Binary files differ
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-x64__bldtype-Release/msvcr120.zip b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-x64__bldtype-Release/msvcr120.zip
new file mode 100644
index 000000000..3609c0385
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-x64__bldtype-Release/msvcr120.zip
Binary files differ
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-x64__bldtype-Release/msvcr140.zip b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-x64__bldtype-Release/msvcr140.zip
new file mode 100644
index 000000000..b99e5ae5b
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/common/p-common__plat-windows__arch-x64__bldtype-Release/msvcr140.zip
Binary files differ
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/nuget.sh b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/nuget.sh
new file mode 100755
index 000000000..032371231
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/nuget.sh
@@ -0,0 +1,21 @@
+#!/bin/bash
+#
+#
+# Front-end for nuget that runs nuget in a docker image.
+
+set -ex
+
+if [[ -f /.dockerenv ]]; then
+ echo "Inside docker"
+
+ pushd $(dirname $0)
+
+ nuget $*
+
+ popd
+
+else
+ echo "Running docker image"
+ docker run -v $(pwd):/io mono:latest /io/$0 $*
+fi
+
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/nugetpackage.py b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/nugetpackage.py
new file mode 100644
index 000000000..aea05ade0
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/nugetpackage.py
@@ -0,0 +1,286 @@
+#!/usr/bin/env python3
+#
+# Create NuGet package
+#
+
+import os
+import tempfile
+import shutil
+import subprocess
+from packaging import Package, Mapping
+
+
+class NugetPackage (Package):
+ """ All platforms, archs, et.al, are bundled into one set of
+ NuGet output packages: "main", redist and symbols """
+
+ # See .semamphore/semaphore.yml for where these are built.
+ mappings = [
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'lnk': 'std'},
+ 'librdkafka.tgz',
+ './usr/local/include/librdkafka/rdkafka.h',
+ 'build/native/include/librdkafka/rdkafka.h'),
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'lnk': 'std'},
+ 'librdkafka.tgz',
+ './usr/local/include/librdkafka/rdkafkacpp.h',
+ 'build/native/include/librdkafka/rdkafkacpp.h'),
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'lnk': 'std'},
+ 'librdkafka.tgz',
+ './usr/local/include/librdkafka/rdkafka_mock.h',
+ 'build/native/include/librdkafka/rdkafka_mock.h'),
+
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'lnk': 'std'},
+ 'librdkafka.tgz',
+ './usr/local/share/doc/librdkafka/README.md',
+ 'README.md'),
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'lnk': 'std'},
+ 'librdkafka.tgz',
+ './usr/local/share/doc/librdkafka/CONFIGURATION.md',
+ 'CONFIGURATION.md'),
+ Mapping({'arch': 'x64',
+ 'plat': 'osx',
+ 'lnk': 'all'},
+ 'librdkafka.tgz',
+ './usr/local/share/doc/librdkafka/LICENSES.txt',
+ 'LICENSES.txt'),
+
+ # OSX x64
+ Mapping({'arch': 'x64',
+ 'plat': 'osx'},
+ 'librdkafka.tgz',
+ './usr/local/lib/librdkafka.dylib',
+ 'runtimes/osx-x64/native/librdkafka.dylib'),
+ # OSX arm64
+ Mapping({'arch': 'arm64',
+ 'plat': 'osx'},
+ 'librdkafka.tgz',
+ './usr/local/lib/librdkafka.1.dylib',
+ 'runtimes/osx-arm64/native/librdkafka.dylib'),
+
+ # Linux glibc centos6 x64 with GSSAPI
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'dist': 'centos6',
+ 'lnk': 'std'},
+ 'librdkafka.tgz',
+ './usr/local/lib/librdkafka.so.1',
+ 'runtimes/linux-x64/native/librdkafka.so'),
+ # Linux glibc centos6 x64 without GSSAPI (no external deps)
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'dist': 'centos6',
+ 'lnk': 'all'},
+ 'librdkafka.tgz',
+ './usr/local/lib/librdkafka.so.1',
+ 'runtimes/linux-x64/native/centos6-librdkafka.so'),
+ # Linux glibc centos7 x64 with GSSAPI
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'dist': 'centos7',
+ 'lnk': 'std'},
+ 'librdkafka.tgz',
+ './usr/local/lib/librdkafka.so.1',
+ 'runtimes/linux-x64/native/centos7-librdkafka.so'),
+ # Linux glibc centos7 arm64 without GSSAPI (no external deps)
+ Mapping({'arch': 'arm64',
+ 'plat': 'linux',
+ 'dist': 'centos7',
+ 'lnk': 'all'},
+ 'librdkafka.tgz',
+ './usr/local/lib/librdkafka.so.1',
+ 'runtimes/linux-arm64/native/librdkafka.so'),
+
+ # Linux musl alpine x64 without GSSAPI (no external deps)
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'dist': 'alpine',
+ 'lnk': 'all'},
+ 'librdkafka.tgz',
+ './usr/local/lib/librdkafka.so.1',
+ 'runtimes/linux-x64/native/alpine-librdkafka.so'),
+
+ # Common Win runtime
+ Mapping({'arch': 'x64',
+ 'plat': 'win'},
+ 'msvcr140.zip',
+ 'vcruntime140.dll',
+ 'runtimes/win-x64/native/vcruntime140.dll'),
+ Mapping({'arch': 'x64',
+ 'plat': 'win'},
+ 'msvcr140.zip',
+ 'msvcp140.dll', 'runtimes/win-x64/native/msvcp140.dll'),
+
+ # matches x64 librdkafka.redist.zip
+ Mapping({'arch': 'x64',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/bin/v142/x64/Release/librdkafka.dll',
+ 'runtimes/win-x64/native/librdkafka.dll'),
+ Mapping({'arch': 'x64',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/bin/v142/x64/Release/librdkafkacpp.dll',
+ 'runtimes/win-x64/native/librdkafkacpp.dll'),
+ Mapping({'arch': 'x64',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/bin/v142/x64/Release/libcrypto-3-x64.dll',
+ 'runtimes/win-x64/native/libcrypto-3-x64.dll'),
+ Mapping({'arch': 'x64',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/bin/v142/x64/Release/libssl-3-x64.dll',
+ 'runtimes/win-x64/native/libssl-3-x64.dll'),
+ Mapping({'arch': 'x64',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/bin/v142/x64/Release/zlib1.dll',
+ 'runtimes/win-x64/native/zlib1.dll'),
+ Mapping({'arch': 'x64',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/bin/v142/x64/Release/zstd.dll',
+ 'runtimes/win-x64/native/zstd.dll'),
+ Mapping({'arch': 'x64',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/bin/v142/x64/Release/libcurl.dll',
+ 'runtimes/win-x64/native/libcurl.dll'),
+ # matches x64 librdkafka.redist.zip, lib files
+ Mapping({'arch': 'x64',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/lib/v142/x64/Release/librdkafka.lib',
+ 'build/native/lib/win/x64/win-x64-Release/v142/librdkafka.lib' # noqa: E501
+ ),
+ Mapping({'arch': 'x64',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/lib/v142/x64/Release/librdkafkacpp.lib',
+ 'build/native/lib/win/x64/win-x64-Release/v142/librdkafkacpp.lib' # noqa: E501
+ ),
+
+ Mapping({'arch': 'x86',
+ 'plat': 'win'},
+ 'msvcr140.zip',
+ 'vcruntime140.dll',
+ 'runtimes/win-x86/native/vcruntime140.dll'),
+ Mapping({'arch': 'x86',
+ 'plat': 'win'},
+ 'msvcr140.zip',
+ 'msvcp140.dll', 'runtimes/win-x86/native/msvcp140.dll'),
+
+ # matches Win32 librdkafka.redist.zip
+ Mapping({'arch': 'x86',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/bin/v142/Win32/Release/librdkafka.dll',
+ 'runtimes/win-x86/native/librdkafka.dll'),
+ Mapping({'arch': 'x86',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/bin/v142/Win32/Release/librdkafkacpp.dll',
+ 'runtimes/win-x86/native/librdkafkacpp.dll'),
+ Mapping({'arch': 'x86',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/bin/v142/Win32/Release/libcrypto-3.dll',
+ 'runtimes/win-x86/native/libcrypto-3.dll'),
+ Mapping({'arch': 'x86',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/bin/v142/Win32/Release/libssl-3.dll',
+ 'runtimes/win-x86/native/libssl-3.dll'),
+
+ Mapping({'arch': 'x86',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/bin/v142/Win32/Release/zlib1.dll',
+ 'runtimes/win-x86/native/zlib1.dll'),
+ Mapping({'arch': 'x86',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/bin/v142/Win32/Release/zstd.dll',
+ 'runtimes/win-x86/native/zstd.dll'),
+ Mapping({'arch': 'x86',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/bin/v142/Win32/Release/libcurl.dll',
+ 'runtimes/win-x86/native/libcurl.dll'),
+
+ # matches Win32 librdkafka.redist.zip, lib files
+ Mapping({'arch': 'x86',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/lib/v142/Win32/Release/librdkafka.lib',
+ 'build/native/lib/win/x86/win-x86-Release/v142/librdkafka.lib' # noqa: E501
+ ),
+ Mapping({'arch': 'x86',
+ 'plat': 'win'},
+ 'librdkafka.redist*',
+ 'build/native/lib/v142/Win32/Release/librdkafkacpp.lib',
+ 'build/native/lib/win/x86/win-x86-Release/v142/librdkafkacpp.lib' # noqa: E501
+ )
+ ]
+
+ def __init__(self, version, arts):
+ if version.startswith('v'):
+ version = version[1:] # Strip v prefix
+ super(NugetPackage, self).__init__(version, arts)
+
+ def cleanup(self):
+ if os.path.isdir(self.stpath):
+ shutil.rmtree(self.stpath)
+
+ def build(self, buildtype):
+ """ Build single NuGet package for all its artifacts. """
+
+ # NuGet removes the prefixing v from the version.
+ vless_version = self.kv['version']
+ if vless_version[0] == 'v':
+ vless_version = vless_version[1:]
+
+ self.stpath = tempfile.mkdtemp(prefix="out-", suffix="-%s" % buildtype,
+ dir=".")
+
+ self.render('librdkafka.redist.nuspec')
+ self.copy_template('librdkafka.redist.targets',
+ destpath=os.path.join('build', 'native'))
+ self.copy_template('librdkafka.redist.props',
+ destpath='build')
+
+ # Generate template tokens for artifacts
+ for a in self.arts.artifacts:
+ if 'bldtype' not in a.info:
+ a.info['bldtype'] = 'release'
+
+ a.info['variant'] = '%s-%s-%s' % (a.info.get('plat'),
+ a.info.get('arch'),
+ a.info.get('bldtype'))
+ if 'toolset' not in a.info:
+ a.info['toolset'] = 'v142'
+
+ # Apply mappings and extract files
+ self.apply_mappings()
+
+ print('Tree extracted to %s' % self.stpath)
+
+ # After creating a bare-bone nupkg layout containing the artifacts
+ # and some spec and props files, call the 'nuget' utility to
+ # make a proper nupkg of it (with all the metadata files).
+ subprocess.check_call("./nuget.sh pack %s -BasePath '%s' -NonInteractive" % # noqa: E501
+ (os.path.join(self.stpath,
+ 'librdkafka.redist.nuspec'),
+ self.stpath), shell=True)
+
+ return 'librdkafka.redist.%s.nupkg' % vless_version
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/packaging.py b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/packaging.py
new file mode 100755
index 000000000..c4dab806d
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/packaging.py
@@ -0,0 +1,448 @@
+#!/usr/bin/env python3
+#
+# Packaging script.
+# Assembles packages using CI artifacts.
+#
+
+import sys
+import re
+import os
+import shutil
+from fnmatch import fnmatch
+from string import Template
+from zfile import zfile
+import boto3
+import magic
+
+if sys.version_info[0] < 3:
+ from urllib import unquote as _unquote
+else:
+ from urllib.parse import unquote as _unquote
+
+
+def unquote(path):
+ # Removes URL escapes, and normalizes the path by removing ./.
+ path = _unquote(path)
+ if path[:2] == './':
+ return path[2:]
+ return path
+
+
+# Rename token values
+rename_vals = {'plat': {'windows': 'win'},
+ 'arch': {'x86_64': 'x64',
+ 'amd64': 'x64',
+ 'i386': 'x86',
+ 'win32': 'x86'}}
+
+# Filemagic arch mapping.
+# key is (plat, arch, file_extension), value is a compiled filemagic regex.
+# This is used to verify that an artifact has the expected file type.
+magic_patterns = {
+ ('win', 'x64', '.dll'): re.compile('PE32.*DLL.* x86-64, for MS Windows'),
+ ('win', 'x86', '.dll'):
+ re.compile('PE32.*DLL.* Intel 80386, for MS Windows'),
+ ('win', 'x64', '.lib'): re.compile('current ar archive'),
+ ('win', 'x86', '.lib'): re.compile('current ar archive'),
+ ('linux', 'x64', '.so'): re.compile('ELF 64.* x86-64'),
+ ('linux', 'arm64', '.so'): re.compile('ELF 64.* ARM aarch64'),
+ ('osx', 'x64', '.dylib'): re.compile('Mach-O 64.* x86_64'),
+ ('osx', 'arm64', '.dylib'): re.compile('Mach-O 64.*arm64')}
+
+magic = magic.Magic()
+
+
+def magic_mismatch(path, a):
+ """ Verify that the filemagic for \\p path matches for artifact \\p a.
+ Returns True if the magic file info does NOT match.
+ Returns False if no matching is needed or the magic matches. """
+ k = (a.info.get('plat', None), a.info.get('arch', None),
+ os.path.splitext(path)[1])
+ pattern = magic_patterns.get(k, None)
+ if pattern is None:
+ return False
+
+ minfo = magic.id_filename(path)
+ if not pattern.match(minfo):
+ print(
+ f"Warning: {path} magic \"{minfo}\" "
+ f"does not match expected {pattern} for key {k}")
+ return True
+
+ return False
+
+
+# Collects CI artifacts from S3 storage, downloading them
+# to a local directory, or collecting already downloaded artifacts from
+# local directory.
+#
+# The artifacts' folder in the S3 bucket must have the following token
+# format:
+# <token>-[<value>]__ (repeat)
+#
+# Recognized tokens (unrecognized tokens are ignored):
+# p - project (e.g., "confluent-kafka-python")
+# bld - builder (e.g., "travis")
+# plat - platform ("osx", "linux", ..)
+# dist - distro or runtime ("centos6", "mingw", "msvcr", "alpine", ..).
+# arch - arch ("x64", ..)
+# tag - git tag
+# sha - git sha
+# bid - builder's build-id
+# bldtype - Release, Debug (appveyor)
+# lnk - Linkage ("std", "static", "all" (both std and static))
+# extra - Extra build options, typically "gssapi" (for cyrus-sasl linking).
+
+#
+# Example:
+# librdkafka/p-librdkafka__bld-travis__plat-linux__arch-x64__tag-v0.0.62__sha-d051b2c19eb0c118991cd8bc5cf86d8e5e446cde__bid-1562.1/librdkafka.tar.gz
+
+
+class MissingArtifactError(Exception):
+ pass
+
+
+s3_bucket = 'librdkafka-ci-packages'
+dry_run = False
+
+
+class Artifact (object):
+ def __init__(self, arts, path, info=None):
+ self.path = path
+ # Remove unexpanded AppVeyor $(..) tokens from filename
+ self.fname = re.sub(r'\$\([^\)]+\)', '', os.path.basename(path))
+ slpath = os.path.join(os.path.dirname(path), self.fname)
+ if os.path.isfile(slpath):
+ # Already points to local file in correct location
+ self.lpath = slpath
+ else:
+ # Prepare download location in dlpath
+ self.lpath = os.path.join(arts.dlpath, slpath)
+
+ if info is None:
+ self.info = dict()
+ else:
+ # Assign the map and convert all keys to lower case
+ self.info = {k.lower(): v for k, v in info.items()}
+ # Rename values, e.g., 'plat':'windows' to 'plat':'win'
+ for k, v in self.info.items():
+ rdict = rename_vals.get(k, None)
+ if rdict is not None:
+ self.info[k] = rdict.get(v, v)
+
+ # Score value for sorting
+ self.score = 0
+
+ # AppVeyor symbol builds are of less value
+ if self.fname.find('.symbols.') != -1:
+ self.score -= 10
+
+ self.arts = arts
+ arts.artifacts.append(self)
+
+ def __repr__(self):
+ return self.path
+
+ def __lt__(self, other):
+ return self.score < other.score
+
+ def download(self):
+ """ Download artifact from S3 and store in local directory .lpath.
+ If the artifact is already downloaded nothing is done. """
+ if os.path.isfile(self.lpath) and os.path.getsize(self.lpath) > 0:
+ return
+ print('Downloading %s' % self.path)
+ if dry_run:
+ return
+ ldir = os.path.dirname(self.lpath)
+ if not os.path.isdir(ldir):
+ os.makedirs(ldir, 0o755)
+ self.arts.s3_bucket.download_file(self.path, self.lpath)
+
+
+class Artifacts (object):
+ def __init__(self, match, dlpath):
+ super(Artifacts, self).__init__()
+ self.match = match
+ self.artifacts = list()
+ # Download directory (make sure it ends with a path separator)
+ if not dlpath.endswith(os.path.sep):
+ dlpath = os.path.join(dlpath, '')
+ self.dlpath = dlpath
+ if not os.path.isdir(self.dlpath):
+ if not dry_run:
+ os.makedirs(self.dlpath, 0o755)
+
+ def collect_single(self, path, req_tag=True):
+ """ Collect single artifact, be it in S3 or locally.
+ :param: path string: S3 or local (relative) path
+ :param: req_tag bool: Require tag to match.
+ """
+
+ # For local files, strip download path.
+ # Also ignore any parent directories.
+ if path.startswith(self.dlpath):
+ folder = os.path.basename(os.path.dirname(path[len(self.dlpath):]))
+ else:
+ folder = os.path.basename(os.path.dirname(path))
+
+ # The folder contains the tokens needed to perform
+ # matching of project, gitref, etc.
+ rinfo = re.findall(r'(?P<tag>[^-]+)-(?P<val>.*?)(?:__|$)', folder)
+ if rinfo is None or len(rinfo) == 0:
+ print('Incorrect folder/file name format for %s' % folder)
+ return None
+
+ info = dict(rinfo)
+
+ # Ignore AppVeyor Debug builds
+ if info.get('bldtype', '').lower() == 'debug':
+ print('Ignoring debug artifact %s' % folder)
+ return None
+
+ tag = info.get('tag', None)
+ if tag is not None and (len(tag) == 0 or tag.startswith('$(')):
+ # AppVeyor doesn't substite $(APPVEYOR_REPO_TAG_NAME)
+ # with an empty value when not set, it leaves that token
+ # in the string - so translate that to no tag.
+ del info['tag']
+
+ # Perform matching
+ unmatched = list()
+ for m, v in self.match.items():
+ if m not in info or info[m] != v:
+ unmatched.append(f"{m} = {v}")
+
+ # Make sure all matches were satisfied, unless this is a
+ # common artifact.
+ if info.get('p', '') != 'common' and len(unmatched) > 0:
+ return None
+
+ return Artifact(self, path, info)
+
+ def collect_s3(self):
+ """ Collect and download build-artifacts from S3 based on
+ git reference """
+ print(
+ 'Collecting artifacts matching %s from S3 bucket %s' %
+ (self.match, s3_bucket))
+ self.s3 = boto3.resource('s3')
+ self.s3_bucket = self.s3.Bucket(s3_bucket)
+ self.s3_client = boto3.client('s3')
+
+ # note: list_objects will return at most 1000 objects per call,
+ # use continuation token to read full list.
+ cont_token = None
+ more = True
+ while more:
+ if cont_token is not None:
+ res = self.s3_client.list_objects_v2(
+ Bucket=s3_bucket,
+ Prefix='librdkafka/',
+ ContinuationToken=cont_token)
+ else:
+ res = self.s3_client.list_objects_v2(Bucket=s3_bucket,
+ Prefix='librdkafka/')
+
+ if res.get('IsTruncated') is True:
+ cont_token = res.get('NextContinuationToken')
+ else:
+ more = False
+
+ for item in res.get('Contents'):
+ self.collect_single(item.get('Key'))
+
+ for a in self.artifacts:
+ a.download()
+
+ def collect_local(self, path, req_tag=True):
+ """ Collect artifacts from a local directory possibly previously
+ collected from s3 """
+ for f in [os.path.join(dp, f) for dp, dn,
+ filenames in os.walk(path) for f in filenames]:
+ if not os.path.isfile(f):
+ continue
+ self.collect_single(f, req_tag)
+
+
+class Mapping (object):
+ """ Maps/matches a file in an input release artifact to
+ the output location of the package, based on attributes and paths. """
+
+ def __init__(self, attributes, artifact_fname_glob, path_in_artifact,
+ output_pkg_path=None, artifact_fname_excludes=[]):
+ """
+ @param attributes A dict of artifact attributes that must match.
+ If an attribute name (dict key) is prefixed
+ with "!" (e.g., "!plat") then the attribute
+ must not match.
+ @param artifact_fname_glob Match artifacts with this filename glob.
+ @param path_in_artifact On match, extract this file in the artifact,..
+ @param output_pkg_path ..and write it to this location in the package.
+ Defaults to path_in_artifact.
+ @param artifact_fname_excludes Exclude artifacts matching these
+ filenames.
+
+ Pass a list of Mapping objects to FIXME to perform all mappings.
+ """
+ super(Mapping, self).__init__()
+ self.attributes = attributes
+ self.fname_glob = artifact_fname_glob
+ self.input_path = path_in_artifact
+ if output_pkg_path is None:
+ self.output_path = self.input_path
+ else:
+ self.output_path = output_pkg_path
+ self.name = self.output_path
+ self.fname_excludes = artifact_fname_excludes
+
+ def __str__(self):
+ return self.name
+
+
+class Package (object):
+ """ Generic Package class
+ A Package is a working container for one or more output
+ packages for a specific package type (e.g., nuget) """
+
+ def __init__(self, version, arts):
+ super(Package, self).__init__()
+ self.version = version
+ self.arts = arts
+ # These may be overwritten by specific sub-classes:
+ self.artifacts = arts.artifacts
+ # Staging path, filled in later.
+ self.stpath = None
+ self.kv = {'version': version}
+ self.files = dict()
+
+ def add_file(self, file):
+ self.files[file] = True
+
+ def build(self):
+ """ Build package output(s), return a list of paths "
+ to built packages """
+ raise NotImplementedError
+
+ def cleanup(self):
+ """ Optional cleanup routine for removing temporary files, etc. """
+ pass
+
+ def render(self, fname, destpath='.'):
+ """ Render template in file fname and save to destpath/fname,
+ where destpath is relative to stpath """
+
+ outf = os.path.join(self.stpath, destpath, fname)
+
+ if not os.path.isdir(os.path.dirname(outf)):
+ os.makedirs(os.path.dirname(outf), 0o0755)
+
+ with open(os.path.join('templates', fname), 'r') as tf:
+ tmpl = Template(tf.read())
+ with open(outf, 'w') as of:
+ of.write(tmpl.substitute(self.kv))
+
+ self.add_file(outf)
+
+ def copy_template(self, fname, target_fname=None, destpath='.'):
+ """ Copy template file to destpath/fname
+ where destpath is relative to stpath """
+
+ if target_fname is None:
+ target_fname = fname
+ outf = os.path.join(self.stpath, destpath, target_fname)
+
+ if not os.path.isdir(os.path.dirname(outf)):
+ os.makedirs(os.path.dirname(outf), 0o0755)
+
+ shutil.copy(os.path.join('templates', fname), outf)
+
+ self.add_file(outf)
+
+ def apply_mappings(self):
+ """ Applies a list of Mapping to match and extract files from
+ matching artifacts. If any of the listed Mappings can not be
+ fulfilled an exception is raised. """
+
+ assert self.mappings
+ assert len(self.mappings) > 0
+
+ for m in self.mappings:
+
+ artifact = None
+ for a in self.arts.artifacts:
+ found = True
+
+ for attr in m.attributes:
+ if attr[0] == '!':
+ # Require attribute NOT to match
+ origattr = attr
+ attr = attr[1:]
+
+ if attr in a.info and \
+ a.info[attr] != m.attributes[origattr]:
+ found = False
+ break
+ else:
+ # Require attribute to match
+ if attr not in a.info or \
+ a.info[attr] != m.attributes[attr]:
+ found = False
+ break
+
+ if not fnmatch(a.fname, m.fname_glob):
+ found = False
+
+ for exclude in m.fname_excludes:
+ if exclude in a.fname:
+ found = False
+ break
+
+ if found:
+ artifact = a
+ break
+
+ if artifact is None:
+ raise MissingArtifactError(
+ '%s: unable to find artifact with tags %s matching "%s"' %
+ (m, str(m.attributes), m.fname_glob))
+
+ output_path = os.path.join(self.stpath, m.output_path)
+
+ try:
+ zfile.ZFile.extract(artifact.lpath, m.input_path, output_path)
+# except KeyError:
+# continue
+ except Exception as e:
+ raise Exception(
+ '%s: file not found in archive %s: %s. Files in archive are:\n%s' % # noqa: E501
+ (m, artifact.lpath, e, '\n'.join(zfile.ZFile(
+ artifact.lpath).getnames())))
+
+ # Check that the file type matches.
+ if magic_mismatch(output_path, a):
+ os.unlink(output_path)
+ continue
+
+ # All mappings found and extracted.
+
+ def verify(self, path):
+ """ Verify package content based on the previously defined mappings """
+
+ missing = list()
+ with zfile.ZFile(path, 'r') as zf:
+ print('Verifying %s:' % path)
+
+ # Zipfiles may url-encode filenames, unquote them before matching.
+ pkgd = [unquote(x) for x in zf.getnames()]
+ missing = [x for x in self.mappings if x.output_path not in pkgd]
+
+ if len(missing) > 0:
+ print(
+ 'Missing files in package %s:\n%s' %
+ (path, '\n'.join([str(x) for x in missing])))
+ print('Actual: %s' % '\n'.join(pkgd))
+ return False
+
+ print('OK - %d expected files found' % len(self.mappings))
+ return True
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/push-to-nuget.sh b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/push-to-nuget.sh
new file mode 100755
index 000000000..598dd4cd7
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/push-to-nuget.sh
@@ -0,0 +1,21 @@
+#!/bin/bash
+#
+# Upload NuGet package to NuGet.org using provided NuGet API key
+#
+
+set -e
+
+key=$1
+pkg=$2
+
+if [[ -z $pkg ]]; then
+ echo "Usage: $0 <nuget.org-api-key> <nuget-package>"
+ exit 1
+fi
+
+set -u
+
+docker run -t -v $PWD/$pkg:/$pkg mcr.microsoft.com/dotnet/sdk:3.1 \
+ dotnet nuget push /$pkg -n -s https://api.nuget.org/v3/index.json \
+ -k $key --source https://api.nuget.org/v3/index.json
+
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/release.py b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/release.py
new file mode 100755
index 000000000..f230a580c
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/release.py
@@ -0,0 +1,167 @@
+#!/usr/bin/env python3
+#
+#
+# NuGet release packaging tool.
+# Creates a NuGet package from CI artifacts on S3.
+#
+
+
+import os
+import sys
+import argparse
+import time
+import packaging
+import nugetpackage
+import staticpackage
+
+
+dry_run = False
+
+
+if __name__ == '__main__':
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "--s3",
+ help="Collect artifacts from S3 bucket",
+ action="store_true")
+ parser.add_argument("--dry-run",
+ help="Locate artifacts but don't actually "
+ "download or do anything",
+ action="store_true")
+ parser.add_argument(
+ "--directory",
+ help="Download directory (default: dl-<tag>)",
+ default=None)
+ parser.add_argument(
+ "--no-cleanup",
+ help="Don't clean up temporary folders",
+ action="store_true")
+ parser.add_argument(
+ "--sha",
+ help="Also match on this git sha1",
+ default=None)
+ parser.add_argument(
+ "--ignore-tag",
+ help="Ignore the artifacts' tag attribute (for devel use only)",
+ action="store_true",
+ default=False)
+ parser.add_argument(
+ "--nuget-version",
+ help="The nuget package version (defaults to same as tag)",
+ default=None)
+ parser.add_argument("--upload", help="Upload package to after building, "
+ "using provided NuGet API key "
+ "(either file or the key itself)",
+ default=None,
+ type=str)
+ parser.add_argument(
+ "--class",
+ help="Packaging class (either NugetPackage or StaticPackage)",
+ default="NugetPackage",
+ dest="pkgclass")
+ parser.add_argument(
+ "--retries",
+ help="Number of retries to collect artifacts",
+ default=0,
+ type=int)
+ parser.add_argument("tag", help="Git tag to collect")
+
+ args = parser.parse_args()
+ dry_run = args.dry_run
+ retries = args.retries
+ if not args.directory:
+ args.directory = 'dl-%s' % args.tag
+
+ match = {}
+ if not args.ignore_tag:
+ match['tag'] = args.tag
+
+ if args.sha is not None:
+ match['sha'] = args.sha
+
+ if args.pkgclass == "NugetPackage":
+ pkgclass = nugetpackage.NugetPackage
+ elif args.pkgclass == "StaticPackage":
+ pkgclass = staticpackage.StaticPackage
+ else:
+ raise ValueError(f'Unknown packaging class {args.pkgclass}: '
+ 'should be one of NugetPackage or StaticPackage')
+
+ try:
+ match.update(getattr(pkgclass, 'match'))
+ except BaseException:
+ pass
+
+ arts = packaging.Artifacts(match, args.directory)
+
+ # Collect common local artifacts, such as support files.
+ arts.collect_local('common', req_tag=False)
+
+ while True:
+ if args.s3:
+ arts.collect_s3()
+
+ arts.collect_local(arts.dlpath)
+
+ if len(arts.artifacts) == 0:
+ raise ValueError('No artifacts found for %s' % match)
+
+ print('Collected artifacts (%s):' % (arts.dlpath))
+ for a in arts.artifacts:
+ print(' %s' % a.lpath)
+ print('')
+
+ if args.nuget_version is not None:
+ package_version = args.nuget_version
+ else:
+ package_version = args.tag
+
+ print('')
+
+ if dry_run:
+ sys.exit(0)
+
+ print('Building packages:')
+
+ try:
+ p = pkgclass(package_version, arts)
+ pkgfile = p.build(buildtype='release')
+ break
+ except packaging.MissingArtifactError as e:
+ if retries <= 0 or not args.s3:
+ if not args.no_cleanup:
+ p.cleanup()
+ raise e
+
+ p.cleanup()
+ retries -= 1
+ print(e)
+ print('Retrying in 30 seconds')
+ time.sleep(30)
+
+ if not args.no_cleanup:
+ p.cleanup()
+ else:
+ print(' --no-cleanup: leaving %s' % p.stpath)
+
+ print('')
+
+ if not p.verify(pkgfile):
+ print('Package failed verification.')
+ sys.exit(1)
+
+ print('Created package: %s' % pkgfile)
+
+ if args.upload is not None:
+ if os.path.isfile(args.upload):
+ with open(args.upload, 'r') as f:
+ nuget_key = f.read().replace('\n', '')
+ else:
+ nuget_key = args.upload
+
+ print('Uploading %s to NuGet' % pkgfile)
+ r = os.system("./push-to-nuget.sh '%s' %s" % (nuget_key, pkgfile))
+ assert int(r) == 0, \
+ f"NuGet upload failed with exit code {r}, see previous errors"
+ print('%s successfully uploaded to NuGet' % pkgfile)
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/requirements.txt b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/requirements.txt
new file mode 100644
index 000000000..0fa2fd19c
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/requirements.txt
@@ -0,0 +1,3 @@
+boto3==1.18.45
+rpmfile==1.0.8
+filemagic==1.6
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/staticpackage.py b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/staticpackage.py
new file mode 100644
index 000000000..38567bb60
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/staticpackage.py
@@ -0,0 +1,178 @@
+#!/usr/bin/env python3
+#
+# Create self-contained static-library tar-ball package
+#
+
+import os
+import tempfile
+import shutil
+import subprocess
+from packaging import Package, Mapping
+
+
+class StaticPackage (Package):
+ """ Create a tar-ball with self-contained static libraries.
+ These are later imported into confluent-kafka-go. """
+
+ # Make sure gssapi (cyrus-sasl) is not linked, since that is a
+ # dynamic linkage, by specifying negative match '!extra': 'gssapi'.
+ # Except for on OSX where cyrus-sasl is always available, and
+ # Windows where it is never linked.
+ #
+ # Match statically linked artifacts (which are included in 'all' builds)
+ mappings = [
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'dist': 'centos6',
+ '!extra': 'gssapi'},
+ 'librdkafka.tgz',
+ './usr/local/include/librdkafka/rdkafka.h',
+ 'rdkafka.h'),
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'dist': 'centos6',
+ '!extra': 'gssapi'},
+ 'librdkafka.tgz',
+ './usr/local/share/doc/librdkafka/LICENSES.txt',
+ 'LICENSES.txt'),
+
+ # glibc linux static lib and pkg-config file
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'dist': 'centos6',
+ 'lnk': 'all',
+ '!extra': 'gssapi'},
+ 'librdkafka.tgz',
+ './usr/local/lib/librdkafka-static.a',
+ 'librdkafka_glibc_linux_amd64.a'),
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'dist': 'centos6',
+ 'lnk': 'all',
+ '!extra': 'gssapi'},
+ 'librdkafka.tgz',
+ './usr/local/lib/pkgconfig/rdkafka-static.pc',
+ 'librdkafka_glibc_linux_amd64.pc'),
+
+ # glibc linux arm64 static lib and pkg-config file
+ Mapping({'arch': 'arm64',
+ 'plat': 'linux',
+ 'dist': 'centos7',
+ 'lnk': 'all',
+ '!extra': 'gssapi'},
+ 'librdkafka.tgz',
+ './usr/local/lib/librdkafka-static.a',
+ 'librdkafka_glibc_linux_arm64.a'),
+ Mapping({'arch': 'arm64',
+ 'plat': 'linux',
+ 'dist': 'centos7',
+ 'lnk': 'all',
+ '!extra': 'gssapi'},
+ 'librdkafka.tgz',
+ './usr/local/lib/pkgconfig/rdkafka-static.pc',
+ 'librdkafka_glibc_linux_arm64.pc'),
+
+ # musl linux static lib and pkg-config file
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'dist': 'alpine',
+ 'lnk': 'all',
+ '!extra': 'gssapi'},
+ 'librdkafka.tgz',
+ './usr/local/lib/librdkafka-static.a',
+ 'librdkafka_musl_linux_amd64.a'),
+ Mapping({'arch': 'x64',
+ 'plat': 'linux',
+ 'dist': 'alpine',
+ 'lnk': 'all',
+ '!extra': 'gssapi'},
+ 'librdkafka.tgz',
+ './usr/local/lib/pkgconfig/rdkafka-static.pc',
+ 'librdkafka_musl_linux_amd64.pc'),
+
+ # musl linux arm64 static lib and pkg-config file
+ Mapping({'arch': 'arm64',
+ 'plat': 'linux',
+ 'dist': 'alpine',
+ 'lnk': 'all',
+ '!extra': 'gssapi'},
+ 'librdkafka.tgz',
+ './usr/local/lib/librdkafka-static.a',
+ 'librdkafka_musl_linux_arm64.a'),
+ Mapping({'arch': 'arm64',
+ 'plat': 'linux',
+ 'dist': 'alpine',
+ 'lnk': 'all',
+ '!extra': 'gssapi'},
+ 'librdkafka.tgz',
+ './usr/local/lib/pkgconfig/rdkafka-static.pc',
+ 'librdkafka_musl_linux_arm64.pc'),
+
+ # osx x64 static lib and pkg-config file
+ Mapping({'arch': 'x64',
+ 'plat': 'osx',
+ 'lnk': 'all'},
+ 'librdkafka.tgz',
+ './usr/local/lib/librdkafka-static.a',
+ 'librdkafka_darwin_amd64.a'),
+ Mapping({'arch': 'x64',
+ 'plat': 'osx',
+ 'lnk': 'all'},
+ 'librdkafka.tgz',
+ './usr/local/lib/pkgconfig/rdkafka-static.pc',
+ 'librdkafka_darwin_amd64.pc'),
+
+ # osx arm64 static lib and pkg-config file
+ Mapping({'arch': 'arm64',
+ 'plat': 'osx',
+ 'lnk': 'all'},
+ 'librdkafka.tgz',
+ './usr/local/lib/librdkafka-static.a',
+ 'librdkafka_darwin_arm64.a'),
+ Mapping({'arch': 'arm64',
+ 'plat': 'osx',
+ 'lnk': 'all'},
+ 'librdkafka.tgz',
+ './usr/local/lib/pkgconfig/rdkafka-static.pc',
+ 'librdkafka_darwin_arm64.pc'),
+
+ # win static lib and pkg-config file (mingw)
+ Mapping({'arch': 'x64',
+ 'plat': 'win',
+ 'dist': 'mingw',
+ 'lnk': 'static'},
+ 'librdkafka.tgz',
+ './lib/librdkafka-static.a', 'librdkafka_windows.a'),
+ Mapping({'arch': 'x64',
+ 'plat': 'win',
+ 'dist': 'mingw',
+ 'lnk': 'static'},
+ 'librdkafka.tgz',
+ './lib/pkgconfig/rdkafka-static.pc',
+ 'librdkafka_windows.pc'),
+ ]
+
+ def __init__(self, version, arts):
+ super(StaticPackage, self).__init__(version, arts)
+
+ def cleanup(self):
+ if os.path.isdir(self.stpath):
+ shutil.rmtree(self.stpath)
+
+ def build(self, buildtype):
+ """ Build single package for all artifacts. """
+
+ self.stpath = tempfile.mkdtemp(prefix="out-", dir=".")
+
+ self.apply_mappings()
+
+ print('Tree extracted to %s' % self.stpath)
+
+ # After creating a bare-bone layout, create a tarball.
+ outname = "librdkafka-static-bundle-%s.tgz" % self.version
+ print('Writing to %s in %s' % (outname, self.stpath))
+ subprocess.check_call("(cd %s && tar cvzf ../%s .)" %
+ (self.stpath, outname),
+ shell=True)
+
+ return outname
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/templates/librdkafka.redist.nuspec b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/templates/librdkafka.redist.nuspec
new file mode 100644
index 000000000..dbfd7b1aa
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/templates/librdkafka.redist.nuspec
@@ -0,0 +1,21 @@
+<?xml version="1.0" encoding="utf-8"?>
+<package xmlns="http://schemas.microsoft.com/packaging/2011/10/nuspec.xsd">
+ <metadata>
+ <id>librdkafka.redist</id>
+ <version>${version}</version>
+ <title>librdkafka - redistributable</title>
+ <authors>Magnus Edenhill, edenhill</authors>
+ <owners>Confluent Inc.</owners>
+ <requireLicenseAcceptance>false</requireLicenseAcceptance>
+ <licenseUrl>https://github.com/confluentinc/librdkafka/blob/master/LICENSES.txt</licenseUrl>
+ <projectUrl>https://github.com/confluentinc/librdkafka</projectUrl>
+ <description>The Apache Kafka C/C++ client library - redistributable</description>
+ <summary>The Apache Kafka C/C++ client library</summary>
+ <releaseNotes>Release of librdkafka</releaseNotes>
+ <copyright>Copyright 2012-2023</copyright>
+ <tags>native apache kafka librdkafka C C++ nativepackage</tags>
+ </metadata>
+ <files>
+ <file src="**" />
+ </files>
+</package>
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/templates/librdkafka.redist.props b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/templates/librdkafka.redist.props
new file mode 100644
index 000000000..c1615c61c
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/templates/librdkafka.redist.props
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<Project ToolsVersion="12.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <ItemGroup>
+ <Content Include="$(MSBuildThisFileDirectory)..\runtimes\win-x86\native\*">
+ <Link>librdkafka\x86\%(Filename)%(Extension)</Link>
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ <Content Include="$(MSBuildThisFileDirectory)..\runtimes\win-x64\native\*">
+ <Link>librdkafka\x64\%(Filename)%(Extension)</Link>
+ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+ </Content>
+ </ItemGroup>
+ <ItemDefinitionGroup>
+ <ClCompile>
+ <AdditionalIncludeDirectories>$(MSBuildThisFileDirectory)include;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
+ </ClCompile>
+ </ItemDefinitionGroup>
+</Project>
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/templates/librdkafka.redist.targets b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/templates/librdkafka.redist.targets
new file mode 100644
index 000000000..d174cda11
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/templates/librdkafka.redist.targets
@@ -0,0 +1,19 @@
+<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <ItemDefinitionGroup>
+ <Link>
+ <AdditionalDependencies Condition="'$(Platform)' == 'x64'">$(MSBuildThisFileDirectory)lib\win\x64\win-x64-Release\v142\librdkafka.lib;%(AdditionalDependencies)</AdditionalDependencies>
+ <AdditionalDependencies Condition="'$(Platform)' != 'x64'">$(MSBuildThisFileDirectory)lib\win\x86\win-x86-Release\v142\librdkafka.lib;%(AdditionalDependencies)</AdditionalDependencies>
+ <AdditionalLibraryDirectories Condition="'$(Platform)' == 'x64'">$(MSBuildThisFileDirectory)lib\win\x64\win-x64-Release\v142;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
+ <AdditionalLibraryDirectories Condition="'$(Platform)' != 'x64'">$(MSBuildThisFileDirectory)lib\win\x86\win-x86-Release\v142;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
+ </Link>
+ <ClCompile>
+ <AdditionalIncludeDirectories>$(MSBuildThisFileDirectory)include;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
+ </ClCompile>
+ </ItemDefinitionGroup>
+ <ItemGroup Condition="'$(Platform)' == 'x64'">
+ <ReferenceCopyLocalPaths Include="$(MSBuildThisFileDirectory)..\..\runtimes\win-x64\native\*.dll" />
+ </ItemGroup>
+ <ItemGroup Condition="'$(Platform)' != 'x64'">
+ <ReferenceCopyLocalPaths Include="$(MSBuildThisFileDirectory)..\..\runtimes\win-x86\native\*.dll" />
+ </ItemGroup>
+</Project>
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/zfile/__init__.py b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/zfile/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/zfile/__init__.py
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/zfile/zfile.py b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/zfile/zfile.py
new file mode 100644
index 000000000..51f2df25f
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/nuget/zfile/zfile.py
@@ -0,0 +1,98 @@
+#!/usr/bin/env python3
+
+import os
+import tarfile
+import zipfile
+import rpmfile
+
+
+class ZFile (object):
+ def __init__(self, path, mode='r', ext=None):
+ super(ZFile, self).__init__()
+
+ if ext is not None:
+ _ext = ext
+ else:
+ _ext = os.path.splitext(path)[-1]
+ if _ext.startswith('.'):
+ _ext = _ext[1:]
+
+ if zipfile.is_zipfile(path) or _ext == 'zip':
+ self.f = zipfile.ZipFile(path, mode)
+ elif tarfile.is_tarfile(path) or _ext in ('tar', 'tgz', 'gz'):
+ self.f = tarfile.open(path, mode)
+ elif _ext == 'rpm':
+ self.f = rpmfile.open(path, mode + 'b')
+ else:
+ raise ValueError('Unsupported file extension: %s' % path)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ if callable(getattr(self.f, 'close', None)):
+ self.f.close()
+
+ def getnames(self):
+ if isinstance(self.f, zipfile.ZipFile):
+ return self.f.namelist()
+ elif isinstance(self.f, tarfile.TarFile):
+ return self.f.getnames()
+ elif isinstance(self.f, rpmfile.RPMFile):
+ return [x.name for x in self.f.getmembers()]
+ else:
+ raise NotImplementedError
+
+ def headers(self):
+ if isinstance(self.f, rpmfile.RPMFile):
+ return self.f.headers
+ else:
+ return dict()
+
+ def extract_to(self, member, path):
+ """ Extract compress file's \\p member to \\p path
+ If \\p path is a directory the member's basename will used as
+ filename, otherwise path is considered the full file path name. """
+
+ if not os.path.isdir(os.path.dirname(path)):
+ os.makedirs(os.path.dirname(path))
+
+ if os.path.isdir(path):
+ path = os.path.join(path, os.path.basename(member))
+
+ with open(path, 'wb') as of:
+ if isinstance(self.f, zipfile.ZipFile):
+ zf = self.f.open(member)
+ else:
+ zf = self.f.extractfile(member)
+
+ while True:
+ b = zf.read(1024 * 100)
+ if b:
+ of.write(b)
+ else:
+ break
+
+ zf.close()
+
+ @classmethod
+ def extract(cls, zpath, member, outpath):
+ """
+ Extract file member (full internal path) to output from
+ archive zpath.
+ """
+
+ with ZFile(zpath) as zf:
+ zf.extract_to(member, outpath)
+
+ @classmethod
+ def compress(cls, zpath, paths, stripcnt=0, ext=None):
+ """
+ Create new compressed file \\p zpath containing files in \\p paths
+ """
+
+ with ZFile(zpath, 'w', ext=ext) as zf:
+ for p in paths:
+ outp = os.path.sep.join(p.split(os.path.sep)[stripcnt:])
+ print('zip %s to %s (stripcnt %d)' % (p, outp, stripcnt))
+ zf.f.write(p, outp)
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/.gitignore b/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/.gitignore
new file mode 100644
index 000000000..4bfdf21ed
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/.gitignore
@@ -0,0 +1,7 @@
+*.log
+available_pkgs
+installed_pkgs
+pkgs-*
+arts-*
+cache
+output
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/Makefile b/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/Makefile
new file mode 100644
index 000000000..c5c8f8c10
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/Makefile
@@ -0,0 +1,92 @@
+PACKAGE_NAME?= librdkafka
+VERSION?= $(shell ../get_version.py ../../src/rdkafka.h)
+
+# Jenkins CI integration
+BUILD_NUMBER?= 1
+
+MOCK_CONFIG?=default
+
+RESULT_DIR?=pkgs-$(VERSION)-$(BUILD_NUMBER)-$(MOCK_CONFIG)
+
+# Where built packages are copied with `make copy-artifacts`
+ARTIFACTS_DIR?=../../artifacts
+
+all: rpm
+
+
+SOURCES:
+ mkdir -p SOURCES
+
+archive: SOURCES
+ cd ../../ && \
+ git archive --prefix=$(PACKAGE_NAME)-$(VERSION)/ \
+ -o packaging/rpm/SOURCES/$(PACKAGE_NAME)-$(VERSION).tar.gz HEAD
+
+
+build_prepare: archive
+ mkdir -p $(RESULT_DIR)
+ rm -f $(RESULT_DIR)/$(PACKAGE_NAME)*.rpm
+
+
+srpm: build_prepare
+ /usr/bin/mock \
+ -r $(MOCK_CONFIG) \
+ $(MOCK_OPTIONS) \
+ --define "__version $(VERSION)" \
+ --define "__release $(BUILD_NUMBER)" \
+ --enable-network \
+ --resultdir=$(RESULT_DIR) \
+ --no-clean --no-cleanup-after \
+ --install epel-release \
+ --buildsrpm \
+ --spec=librdkafka.spec \
+ --sources=SOURCES || \
+ (tail -n 100 pkgs-$(VERSION)*/*log ; false)
+ @echo "======= Source RPM now available in $(RESULT_DIR) ======="
+
+rpm: srpm
+ /usr/bin/mock \
+ -r $(MOCK_CONFIG) \
+ $(MOCK_OPTIONS) \
+ --define "__version $(VERSION)"\
+ --define "__release $(BUILD_NUMBER)"\
+ --enable-network \
+ --resultdir=$(RESULT_DIR) \
+ --no-clean --no-cleanup-after \
+ --rebuild $(RESULT_DIR)/$(PACKAGE_NAME)*.src.rpm || \
+ (tail -n 100 pkgs-$(VERSION)*/*log ; false)
+ @echo "======= Binary RPMs now available in $(RESULT_DIR) ======="
+
+copy-artifacts:
+ cp $(RESULT_DIR)/*rpm $(ARTIFACTS_DIR)
+
+clean:
+ rm -rf SOURCES
+ /usr/bin/mock -r $(MOCK_CONFIG) --clean
+
+distclean: clean
+ rm -f build.log root.log state.log available_pkgs installed_pkgs \
+ *.rpm *.tar.gz
+
+# Prepare ubuntu 14.04 for building RPMs with mock.
+# - older versions of mock needs the config file to reside in /etc/mock,
+# so we copy it there.
+# - add a mock system group (if not already exists)
+# - add the current user to the mock group.
+# - prepare mock environment with some needed packages.
+# NOTE: This target should be run with sudo.
+prepare_ubuntu:
+ apt-get -qq update
+ apt-get install -y -qq mock make git python-lzma
+ cp *.cfg /etc/mock/
+ addgroup --system mock || true
+ adduser $$(whoami) mock
+ /usr/bin/mock -r $(MOCK_CONFIG) --init
+ /usr/bin/mock -r $(MOCK_CONFIG) \
+ --enable-network \
+ --no-cleanup-after \
+ --install epel-release shadow-utils
+
+prepare_centos:
+ yum install -y -q mock make git
+ cp *.cfg /etc/mock/
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/README.md b/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/README.md
new file mode 100644
index 000000000..92a6eca95
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/README.md
@@ -0,0 +1,23 @@
+# RPM packages for librdkafka
+
+On a system with RPM mock installed, simply run make to create RPM packages:
+
+ $ make
+
+Additional mock options may be specified using MOCK_OPTIONS:
+
+ $ make MOCK_OPTIONS='--bootstrap-chroot'
+
+
+## Build with Mock on docker
+
+From the librdkafka top-level directory:
+
+ $ packaging/rpm/mock-on-docker.sh
+
+Wait for packages to build, they will be copied to top-level dir artifacts/
+
+Test the packages:
+
+ $ packaging/rpm/tests/test-on-docker.sh
+
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/el7-x86_64.cfg b/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/el7-x86_64.cfg
new file mode 100644
index 000000000..502282749
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/el7-x86_64.cfg
@@ -0,0 +1,40 @@
+config_opts['root'] = 'el7-x86_64'
+config_opts['target_arch'] = 'x86_64'
+config_opts['legal_host_arches'] = ('x86_64',)
+config_opts['chroot_setup_cmd'] = 'install @buildsys-build'
+config_opts['dist'] = 'el7' # only useful for --resultdir variable subst
+config_opts['releasever'] = '7'
+config_opts['docker_unshare_warning'] = False
+config_opts['nosync'] = True
+
+config_opts['yum.conf'] = """
+[main]
+keepcache=1
+debuglevel=2
+reposdir=/dev/null
+logfile=/var/log/yum.log
+retries=15
+obsoletes=1
+gpgcheck=0
+assumeyes=1
+syslog_ident=mock
+syslog_device=
+mdpolicy=group:primary
+
+# repos
+[base]
+name=BaseOS
+mirrorlist=http://mirrorlist.centos.org/?release=7&arch=x86_64&repo=os
+failovermethod=priority
+
+[updates]
+name=updates
+enabled=1
+mirrorlist=http://mirrorlist.centos.org/?release=7&arch=x86_64&repo=updates
+failovermethod=priority
+
+[epel]
+name=epel
+mirrorlist=http://mirrors.fedoraproject.org/mirrorlist?repo=epel-7&arch=x86_64
+failovermethod=priority
+"""
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/librdkafka.spec b/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/librdkafka.spec
new file mode 100644
index 000000000..4f9e8c0d0
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/librdkafka.spec
@@ -0,0 +1,118 @@
+Name: librdkafka
+Version: %{__version}
+Release: %{__release}%{?dist}
+%define soname 1
+
+Summary: The Apache Kafka C library
+Group: Development/Libraries/C and C++
+License: BSD-2-Clause
+URL: https://github.com/edenhill/librdkafka
+Source: librdkafka-%{version}.tar.gz
+
+BuildRequires: zlib-devel libstdc++-devel gcc >= 4.1 gcc-c++ cyrus-sasl-devel
+BuildRoot: %(mktemp -ud %{_tmppath}/%{name}-%{version}-%{release}-XXXXXX)
+
+%define _source_payload w9.gzdio
+%define _binary_payload w9.gzdio
+
+%description
+librdkafka is the C/C++ client library implementation of the Apache Kafka protocol, containing both Producer and Consumer support.
+
+
+%package -n %{name}%{soname}
+Summary: The Apache Kafka C library
+Group: Development/Libraries/C and C++
+Requires: zlib libstdc++ cyrus-sasl
+# openssl libraries were extract to openssl-libs in RHEL7
+%if 0%{?rhel} >= 7
+Requires: openssl-libs >= 1.0.2
+BuildRequires: openssl-devel >= 1.0.2 python3
+%else
+Requires: openssl
+# python34 is provided from epel-release, but that package needs to be installed
+# prior to rpmbuild working out these dependencies (such as from mock).
+BuildRequires: openssl-devel python34
+%endif
+
+%description -n %{name}%{soname}
+librdkafka is the C/C++ client library implementation of the Apache Kafka protocol, containing both Producer and Consumer support.
+
+
+%package -n %{name}-devel
+Summary: The Apache Kafka C library (Development Environment)
+Group: Development/Libraries/C and C++
+Requires: %{name}%{soname} = %{version}
+
+%description -n %{name}-devel
+librdkafka is the C/C++ client library implementation of the Apache Kafka protocol, containing both Producer and Consumer support.
+
+This package contains headers and libraries required to build applications
+using librdkafka.
+
+
+%prep
+%setup -q -n %{name}-%{version}
+
+# --install-deps will install missing dependencies that are not available
+# through BuildRequires, such as libzstd, which will be linked statically.
+%configure --install-deps --disable-lz4-ext
+
+%build
+cat config.log
+make
+examples/rdkafka_example -X builtin.features
+
+%install
+rm -rf %{buildroot}
+DESTDIR=%{buildroot} make install
+
+%clean
+rm -rf %{buildroot}
+
+%post -n %{name}%{soname} -p /sbin/ldconfig
+%postun -n %{name}%{soname} -p /sbin/ldconfig
+
+%files -n %{name}%{soname}
+%defattr(444,root,root)
+%{_libdir}/librdkafka.so.%{soname}
+%{_libdir}/librdkafka++.so.%{soname}
+%defattr(-,root,root)
+%doc %{_docdir}/librdkafka/README.md
+%doc %{_docdir}/librdkafka/LICENSE
+%doc %{_docdir}/librdkafka/CONFIGURATION.md
+%doc %{_docdir}/librdkafka/INTRODUCTION.md
+%doc %{_docdir}/librdkafka/STATISTICS.md
+%doc %{_docdir}/librdkafka/CHANGELOG.md
+%doc %{_docdir}/librdkafka/LICENSES.txt
+
+%defattr(-,root,root)
+#%{_bindir}/rdkafka_example
+#%{_bindir}/rdkafka_performance
+
+
+%files -n %{name}-devel
+%defattr(-,root,root)
+%{_includedir}/librdkafka
+%defattr(444,root,root)
+%{_libdir}/librdkafka.a
+%{_libdir}/librdkafka-static.a
+%{_libdir}/librdkafka.so
+%{_libdir}/librdkafka++.a
+%{_libdir}/librdkafka++.so
+%{_libdir}/pkgconfig/rdkafka++.pc
+%{_libdir}/pkgconfig/rdkafka.pc
+%{_libdir}/pkgconfig/rdkafka-static.pc
+%{_libdir}/pkgconfig/rdkafka++-static.pc
+
+%changelog
+* Thu Apr 09 2015 Eduard Iskandarov <e.iskandarov@corp.mail.ru> 0.8.6-0
+- 0.8.6 simplify build process
+
+* Fri Oct 24 2014 Magnus Edenhill <rdkafka@edenhill.se> 0.8.5-0
+- 0.8.5 release
+
+* Mon Aug 18 2014 Magnus Edenhill <rdkafka@edenhill.se> 0.8.4-0
+- 0.8.4 release
+
+* Mon Mar 17 2014 Magnus Edenhill <vk@edenhill.se> 0.8.3-0
+- Initial RPM package
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/mock-on-docker.sh b/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/mock-on-docker.sh
new file mode 100755
index 000000000..eec3d54a7
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/mock-on-docker.sh
@@ -0,0 +1,97 @@
+#!/bin/bash
+#
+#
+#
+# Run mock in docker to create RPM packages of librdkafka.
+#
+# Usage:
+# packaging/rpm/mock-on-docker.sh [<mock configs ..>]
+#
+
+set -ex
+
+_DOCKER_IMAGE=fedora:35
+_MOCK_CONFIGS="centos+epel-7-x86_64 centos-stream+epel-8-x86_64"
+
+if [[ $1 == "--build" ]]; then
+ on_builder=1
+ shift
+else
+ on_builder=0
+fi
+
+
+if [[ -n $* ]]; then
+ _MOCK_CONFIGS="$*"
+fi
+
+
+if [[ $on_builder == 0 ]]; then
+ #
+ # Running on host, fire up a docker container and run the latter
+ # part of this script in docker.
+ #
+
+ if [[ ! -f configure.self ]]; then
+ echo "$0 must be run from librdkafka top directory"
+ exit 1
+ fi
+
+ mkdir -p ${PWD}/packaging/rpm/cache/mock
+
+ docker run \
+ --privileged \
+ -t \
+ -v ${PWD}/packaging/rpm/cache/mock:/var/cache/mock \
+ -v ${PWD}:/io \
+ $_DOCKER_IMAGE \
+ /io/packaging/rpm/mock-on-docker.sh --build $_MOCK_CONFIGS
+
+ mkdir -p artifacts
+ for MOCK_CONFIG in $_MOCK_CONFIGS ; do
+ cp -vr --no-preserve=ownership packaging/rpm/arts-${MOCK_CONFIG}/*rpm artifacts/
+ done
+
+ echo "All Done"
+
+else
+ #
+ # Running in docker container.
+ #
+
+ dnf install -y -q mock mock-core-configs make git
+
+ echo "%_netsharedpath /sys:/proc" >> /etc/rpm/macros.netshared
+
+ pushd /io/packaging/rpm
+
+ for MOCK_CONFIG in $_MOCK_CONFIGS ; do
+ cfg_file=/etc/mock/${MOCK_CONFIG}.cfg
+ if [[ ! -f $cfg_file ]]; then
+ echo "Error: Mock config $cfg_file does not exist"
+ exit 1
+ fi
+
+ echo "config_opts['plugin_conf']['bind_mount_enable'] = False" >> $cfg_file
+ echo "config_opts['docker_unshare_warning'] = False" >> $cfg_file
+ echo "Building $MOCK_CONFIG in $PWD"
+ cat $cfg_file
+
+ echo "Setting git safe.directory"
+ git config --global --add safe.directory /io
+
+ export MOCK_CONFIG=$MOCK_CONFIG
+ make all
+
+ echo "Done building $MOCK_CONFIG: copying artifacts"
+ artdir="arts-$MOCK_CONFIG"
+ mkdir -p "$artdir"
+ make ARTIFACTS_DIR="$artdir" copy-artifacts
+
+ done
+
+ popd
+ echo "Done"
+fi
+
+exit 0
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/tests/.gitignore b/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/tests/.gitignore
new file mode 100644
index 000000000..333a2b7ac
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/tests/.gitignore
@@ -0,0 +1,2 @@
+test
+testcpp
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/tests/Makefile b/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/tests/Makefile
new file mode 100644
index 000000000..edd457997
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/tests/Makefile
@@ -0,0 +1,25 @@
+
+PROGS?=test test-static testcpp testcpp-static
+
+all: $(PROGS)
+
+test: test.c
+ $(CC) -O2 -Werror -Wall $^ -o $@ $$(pkg-config --libs rdkafka)
+
+test-static: test.c
+ $(CC) -O2 -Werror -Wall $^ -o $@ $$(pkg-config --libs --static rdkafka-static)
+
+testcpp: test.cpp
+ $(CXX) -O2 -Werror -Wall $^ -o $@ $$(pkg-config --libs rdkafka++)
+
+testcpp-static: test.cpp
+ $(CXX) -O2 -Werror -Wall $^ -o $@ $$(pkg-config --libs rdkafka++-static)
+
+run:
+ @(for p in $(PROGS); do \
+ echo "# Running $$p" ; \
+ ./$$p || (echo $$p failed ; exit 1) ; \
+ done)
+
+clean:
+ rm -f $(PROGS)
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/tests/README.md b/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/tests/README.md
new file mode 100644
index 000000000..8d1107b66
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/tests/README.md
@@ -0,0 +1,8 @@
+# Test librdkafka RPMs using docker
+
+After building the RPMs (see README.md in parent directory) test
+the RPMs on the supported CentOS/RHEL versions using:
+
+ $ packaging/rpm/tests/test-on-docker.sh
+
+
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/tests/run-test.sh b/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/tests/run-test.sh
new file mode 100755
index 000000000..c1234a945
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/tests/run-test.sh
@@ -0,0 +1,49 @@
+#!/bin/bash
+#
+# This script runs in the docker container, performing:
+# * install build toolchain
+# * install librdkafka rpms
+# * builds test apps
+# * runs test apps
+#
+# Usage: $0 <docker-image-name>
+
+set -ex
+
+pushd /v
+
+_IMG=$1
+
+echo "Testing on $_IMG"
+
+if [[ $_IMG == "centos:6" ]]; then
+ _EL=6
+ _INST="yum install -y -q"
+elif [[ $_IMG == "centos:7" ]]; then
+ _EL=7
+ _INST="yum install -y -q"
+ # centos:7 ships with openssl-libs 1.0.1 which is outdated and not
+ # ABI-compatible with 1.0.2 (which we build with).
+ # Upgrade openssl-libs, as users would, to prevent missing symbols.
+ _UPG="yum upgrade -y openssl-libs"
+else
+ _EL=8
+ _INST="dnf install -y -q"
+fi
+
+$_INST gcc gcc-c++ make pkg-config
+
+if [[ -n $_UPG ]]; then
+ $_UPG
+fi
+
+$_INST /rpms/librdkafka1-*el${_EL}.x86_64.rpm /rpms/librdkafka-devel-*el${_EL}.x86_64.rpm
+
+make clean all
+
+make run
+
+make clean
+
+echo "$_IMG is all good!"
+
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/tests/test-on-docker.sh b/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/tests/test-on-docker.sh
new file mode 100755
index 000000000..2c12ff792
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/tests/test-on-docker.sh
@@ -0,0 +1,56 @@
+#!/bin/bash
+#
+#
+# Test librdkafka packages in <rpmdirectory> using docker.
+# Must be executed from the librdkafka top-level directory.
+#
+# Usage:
+# packaging/rpm/test-on-docker.sh [<rpm-dir>]
+
+set -ex
+
+if [[ ! -f configure.self ]]; then
+ echo "Must be executed from the librdkafka top-level directory"
+ exit 1
+fi
+
+_DOCKER_IMAGES="centos:7 redhat/ubi8:8.5-226"
+_RPMDIR=artifacts
+
+if [[ -n $1 ]]; then
+ _RPMDIR="$1"
+fi
+
+_RPMDIR=$(readlink -f $_RPMDIR)
+
+if [[ ! -d $_RPMDIR ]]; then
+ echo "$_RPMDIR does not exist"
+ exit 1
+fi
+
+
+fails=""
+for _IMG in $_DOCKER_IMAGES ; do
+ if ! docker run \
+ -t \
+ -v $_RPMDIR:/rpms \
+ -v $(readlink -f packaging/rpm/tests):/v \
+ $_IMG \
+ /v/run-test.sh $_IMG ; then
+ echo "ERROR: $_IMG FAILED"
+ fails="${fails}$_IMG "
+ fi
+done
+
+if [[ -n $fails ]]; then
+ echo "##################################################"
+ echo "# Package verification failed for:"
+ echo "# $fails"
+ echo "# See previous errors"
+ echo "##################################################"
+ exit 1
+fi
+
+exit 0
+
+
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/tests/test.c b/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/tests/test.c
new file mode 100644
index 000000000..cf39b6bcd
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/tests/test.c
@@ -0,0 +1,77 @@
+#include <stdio.h>
+#include <string.h>
+#include <librdkafka/rdkafka.h>
+
+int main(int argc, char **argv) {
+ rd_kafka_conf_t *conf;
+ rd_kafka_t *rk;
+ char features[256];
+ size_t fsize = sizeof(features);
+ char errstr[512];
+ const char *exp_features[] = {
+ "gzip", "snappy", "ssl", "sasl", "regex",
+ "lz4", "sasl_gssapi", "sasl_plain", "sasl_scram", "plugins",
+ "zstd", "sasl_oauthbearer", NULL,
+ };
+ const char **exp;
+ int missing = 0;
+
+
+ printf("librdkafka %s\n", rd_kafka_version_str());
+
+ conf = rd_kafka_conf_new();
+ if (rd_kafka_conf_get(conf, "builtin.features", features, &fsize) !=
+ RD_KAFKA_CONF_OK) {
+ fprintf(stderr, "conf_get failed\n");
+ return 1;
+ }
+
+ printf("builtin.features %s\n", features);
+
+ /* Verify that expected features are enabled. */
+ for (exp = exp_features; *exp; exp++) {
+ const char *t = features;
+ size_t elen = strlen(*exp);
+ int match = 0;
+
+ while ((t = strstr(t, *exp))) {
+ if (t[elen] == ',' || t[elen] == '\0') {
+ match = 1;
+ break;
+ }
+ t += elen;
+ }
+
+ if (match)
+ continue;
+
+ fprintf(stderr, "ERROR: feature %s not found\n", *exp);
+ missing++;
+ }
+
+ if (rd_kafka_conf_set(conf, "security.protocol", "SASL_SSL", errstr,
+ sizeof(errstr)) ||
+ rd_kafka_conf_set(conf, "sasl.mechanism", "PLAIN", errstr,
+ sizeof(errstr)) ||
+ rd_kafka_conf_set(conf, "sasl.username", "username", errstr,
+ sizeof(errstr)) ||
+ rd_kafka_conf_set(conf, "sasl.password", "password", errstr,
+ sizeof(errstr)) ||
+ rd_kafka_conf_set(conf, "debug", "security", errstr,
+ sizeof(errstr))) {
+ fprintf(stderr, "conf_set failed: %s\n", errstr);
+ return 1;
+ }
+
+ rk = rd_kafka_new(RD_KAFKA_PRODUCER, conf, errstr, sizeof(errstr));
+ if (!rk) {
+ fprintf(stderr, "rd_kafka_new failed: %s\n", errstr);
+ return 1;
+ }
+
+ printf("client name %s\n", rd_kafka_name(rk));
+
+ rd_kafka_destroy(rk);
+
+ return missing ? 1 : 0;
+}
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/tests/test.cpp b/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/tests/test.cpp
new file mode 100644
index 000000000..d78a76710
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/rpm/tests/test.cpp
@@ -0,0 +1,34 @@
+#include <iostream>
+#include <librdkafka/rdkafkacpp.h>
+
+
+int main() {
+ std::cout << "librdkafka++ " << RdKafka::version_str() << std::endl;
+
+ RdKafka::Conf *conf = RdKafka::Conf::create(RdKafka::Conf::CONF_GLOBAL);
+
+ std::string features;
+
+ if (conf->get("builtin.features", features) != RdKafka::Conf::CONF_OK) {
+ std::cerr << "conf_get failed" << std::endl;
+ return 1;
+ }
+
+ std::cout << "builtin.features " << features << std::endl;
+
+ std::string errstr;
+ RdKafka::Producer *producer = RdKafka::Producer::create(conf, errstr);
+ if (!producer) {
+ std::cerr << "Producer::create failed: " << errstr << std::endl;
+ return 1;
+ }
+
+ delete conf;
+
+ std::cout << "client name " << producer->name() << std::endl;
+
+
+ delete producer;
+
+ return 0;
+}
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/tools/build-deb-package.sh b/fluent-bit/lib/librdkafka-2.1.0/packaging/tools/build-deb-package.sh
new file mode 100755
index 000000000..d9cad6d25
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/tools/build-deb-package.sh
@@ -0,0 +1,64 @@
+#!/bin/bash
+#
+# Build librdkafka Debian package on a bare-bone Debian host, such as ubuntu:16.04 (docker).
+#
+# Usage (from top-level librdkafka dir):
+# docker run -it -v $PWD:/v ubuntu:16.04 /v/packaging/tools/build-deb-package.sh 1.0.0 master
+#
+
+set -exu
+
+if [[ $# -ne 2 ]]; then
+ echo "Usage: $0 <package-version> <librdkafka-branch-or-tag>"
+ exit 1
+fi
+
+export VERSION=$1
+LRK_BRANCH=$2
+
+apt-get update
+
+# Install debian packaging tools and librdkafka build dependencies
+apt-get install -y git-buildpackage debhelper \
+ zlib1g-dev libssl-dev libsasl2-dev liblz4-dev
+
+
+# Clone the librdkafka git repo to a new location to avoid messing
+# up the librdkafka working directory.
+
+
+BUILD_DIR=$(mktemp -d)
+
+pushd $BUILD_DIR
+
+git clone /v librdkafka
+
+pushd librdkafka
+
+export DEBEMAIL="librdkafka packaging <rdkafka@edenhill.se>"
+git config user.email "rdkafka@edenhill.se"
+git config user.name "librdkafka packaging"
+
+DEB_BRANCH=origin/confluent-debian
+TMP_BRANCH=tmp-debian
+git checkout -b $TMP_BRANCH $LRK_BRANCH
+git merge --no-edit $DEB_BRANCH
+
+dch --newversion ${VERSION/-/\~}-1 "Release version $VERSION" --urgency low && dch --release --distribution unstable ""
+
+git commit -a -m "Tag Debian release $VERSION."
+
+make archive
+mkdir -p ../tarballs || true
+mv librdkafka-${VERSION}.tar.gz ../tarballs/librdkafka_${VERSION}.orig.tar.gz
+
+gbp buildpackage -us -uc --git-debian-branch=$TMP_BRANCH \
+ --git-upstream-tree=$LRK_BRANCH \
+ --git-verbose \
+ --git-builder="debuild --set-envvar=VERSION=$VERSION --set-envvar=SKIP_TESTS=y -i -I"
+
+
+popd # librdkafka
+
+popd # $BUILD_DIR
+
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/tools/build-debian.sh b/fluent-bit/lib/librdkafka-2.1.0/packaging/tools/build-debian.sh
new file mode 100755
index 000000000..e62ee5f67
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/tools/build-debian.sh
@@ -0,0 +1,65 @@
+#!/bin/bash
+#
+# Build librdkafka on a bare-bone Debian host, such as the
+# mcr.microsoft.com/dotnet/sdk Docker image.
+#
+# Statically linked
+# WITH openssl 1.0, zlib
+# WITHOUT libsasl2, lz4(ext, using builtin instead)
+#
+# Usage (from top-level librdkafka dir):
+# docker run -it -v $PWD:/v mcr.microsoft.com/dotnet/sdk /v/packaging/tools/build-debian.sh /v /v/librdkafka-debian9.tgz
+#
+
+
+set -ex
+
+LRK_DIR=$1
+shift
+OUT_TGZ=$1
+shift
+CONFIG_ARGS=$*
+
+if [[ ! -f $LRK_DIR/configure.self || -z $OUT_TGZ ]]; then
+ echo "Usage: $0 <librdkafka-root-direcotry> <output-tgz> [<configure-args..>]"
+ exit 1
+fi
+
+set -u
+
+apt-get update
+apt-get install -y gcc g++ zlib1g-dev python3 git-core make patch
+
+
+# Copy the librdkafka git archive to a new location to avoid messing
+# up the librdkafka working directory.
+
+BUILD_DIR=$(mktemp -d)
+
+pushd $BUILD_DIR
+
+DEST_DIR=$PWD/dest
+mkdir -p $DEST_DIR
+
+# Workaround for newer Git not allowing clone directory to be owned by
+# another user (which is a questionable limitation for the read-only archive
+# command..)
+git config --global --add safe.directory /v
+
+(cd $LRK_DIR ; git archive --format tar HEAD) | tar xf -
+
+./configure --install-deps --disable-gssapi --disable-lz4-ext --enable-static --prefix=$DEST_DIR $CONFIG_ARGS
+make -j
+examples/rdkafka_example -X builtin.features
+CI=true make -C tests run_local_quick
+make install
+
+# Tar up the output directory
+pushd $DEST_DIR
+ldd lib/*.so.1
+tar cvzf $OUT_TGZ .
+popd # $DEST_DIR
+
+popd # $BUILD_DIR
+
+rm -rf "$BUILD_DIR"
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/tools/build-manylinux.sh b/fluent-bit/lib/librdkafka-2.1.0/packaging/tools/build-manylinux.sh
new file mode 100755
index 000000000..4aeaa9622
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/tools/build-manylinux.sh
@@ -0,0 +1,68 @@
+#!/bin/bash
+#
+# Build on a manylinux (https://github.com/pypa/manylinux) docker container.
+#
+# This will provide a self-contained librdkafka shared library that works
+# on most glibc-based Linuxes.
+#
+# Statically linked
+# WITH openssl 1.1.1, zlib, lz4(bundled)
+# WITHOUT libsasl2
+#
+#
+# Run:
+# docker run -t -v "$PWD:/v quay.io/pypa/manylinux2010_x86_64 /v/packaging/tools/build-manylinux.sh /v /v/artifacts/librdkafka-manylinux2010_x86_64.tgz $config_args"
+
+set -ex
+
+LRK_DIR=$1
+shift
+OUT_TGZ=$1
+shift
+CONFIG_ARGS=$*
+
+if [[ ! -f $LRK_DIR/configure.self || -z $OUT_TGZ ]]; then
+ echo "Usage: $0 <librdkafka-root-direcotry> <output-tgz> [<configure-args..>]"
+ exit 1
+fi
+
+set -u
+
+yum install -y libstdc++-devel gcc gcc-c++ python34
+
+# Copy the librdkafka git archive to a new location to avoid messing
+# up the librdkafka working directory.
+
+BUILD_DIR=$(mktemp -d)
+
+pushd $BUILD_DIR
+
+DEST_DIR=$PWD/dest
+mkdir -p $DEST_DIR
+
+# Workaround for newer Git not allowing clone directory to be owned by
+# another user (which is a questionable limitation for the read-only archive
+# command..)
+git config --global --add safe.directory /v
+
+(cd $LRK_DIR ; git archive --format tar HEAD) | tar xf -
+
+./configure --install-deps --source-deps-only --disable-gssapi --disable-lz4-ext --enable-static --prefix=$DEST_DIR $CONFIG_ARGS
+
+make -j
+
+examples/rdkafka_example -X builtin.features
+
+CI=true make -C tests run_local_quick
+
+make install
+
+# Tar up the output directory
+pushd $DEST_DIR
+ldd lib/*.so.1
+tar cvzf $OUT_TGZ .
+popd # $DEST_DIR
+
+popd # $BUILD_DIR
+
+rm -rf "$BUILD_DIR"
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/tools/build-release-artifacts.sh b/fluent-bit/lib/librdkafka-2.1.0/packaging/tools/build-release-artifacts.sh
new file mode 100755
index 000000000..ea09aaf96
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/tools/build-release-artifacts.sh
@@ -0,0 +1,138 @@
+#!/bin/sh
+#
+# ^ NOTE: This needs to be sh, not bash, for alpine compatibility.
+#
+#
+# Build dynamic and statically linked librdkafka libraries useful for
+# release artifacts in high-level clients.
+#
+# Requires docker.
+# Supported docker images:
+# alpine:3.16
+# quay.io/pypa/manylinux2014_aarch64 (centos7)
+# quay.io/pypa/manylinux2014_x86_64 (centos7)
+# quay.io/pypa/manylinux2010_x86_64 (centos6)
+#
+# Usage:
+# packaging/tools/build-release-artifacts.sh [--disable-gssapi] <docker-image> <relative-output-tarball-path.tgz>
+#
+# The output path must be a relative path and inside the librdkafka directory
+# structure.
+#
+
+set -e
+
+docker_image=""
+extra_pkgs_rpm=""
+extra_pkgs_apk=""
+extra_config_args=""
+expected_features="gzip snappy ssl sasl regex lz4 sasl_plain sasl_scram plugins zstd sasl_oauthbearer http oidc"
+
+# Since cyrus-sasl is the only non-statically-linkable dependency,
+# we provide a --disable-gssapi option so that two different libraries
+# can be built: one with GSSAPI/Kerberos support, and one without, depending
+# on this option.
+if [ "$1" = "--disable-gssapi" ]; then
+ extra_config_args="${extra_config_args} --disable-gssapi"
+ disable_gssapi="$1"
+ shift
+else
+ extra_pkgs_rpm="${extra_pkgs_rpm} cyrus-sasl cyrus-sasl-devel"
+ extra_pkgs_apk="${extra_pkgs_apk} cyrus-sasl cyrus-sasl-dev"
+ expected_features="${expected_features} sasl_gssapi"
+ disable_gssapi=""
+fi
+
+# Check if we're running on the host or the (docker) build target.
+if [ "$1" = "--in-docker" -a $# -eq 2 ]; then
+ output="$2"
+elif [ $# -eq 2 ]; then
+ docker_image="$1"
+ output="$2"
+else
+ echo "Usage: $0 [--disable-gssapi] <manylinux-docker-image> <output-path.tgz>"
+ exit 1
+fi
+
+if [ -n "$docker_image" ]; then
+ # Running on the host, spin up the docker builder.
+ exec docker run -v "$PWD:/v" $docker_image /v/packaging/tools/build-release-artifacts.sh $disable_gssapi --in-docker "/v/$output"
+ # Only reached on exec error
+ exit $?
+fi
+
+
+########################################################################
+# Running in the docker instance, this is where we perform the build. #
+########################################################################
+
+
+# Packages required for building librdkafka (perl is for openssl).
+
+if grep -q alpine /etc/os-release 2>/dev/null ; then
+ # Alpine
+ apk add \
+ bash curl gcc g++ make musl-dev linux-headers bsd-compat-headers git \
+ python3 perl patch $extra_pkgs_apk
+
+else
+ # CentOS
+ yum install -y libstdc++-devel gcc gcc-c++ python3 git perl-IPC-Cmd $extra_pkgs_rpm
+fi
+
+
+# Clone the repo so other builds are unaffected of what we're doing
+# and we get a pristine build tree.
+git clone /v /librdkafka
+
+cd /librdkafka
+
+# Build librdkafka
+./configure \
+ --install-deps --source-deps-only --disable-lz4-ext \
+ --enable-static --enable-strip $extra_config_args
+
+make -j
+
+# Show library linkage (for troubleshooting) and checksums (for verification)
+for lib in src/librdkafka.so.1 src-cpp/librdkafka++.so.1; do
+ echo "$0: LINKAGE ${lib}:"
+ ldd src/librdkafka.so.1
+ echo "$0: SHA256 ${lib}:"
+ sha256sum "$lib"
+done
+
+# Verify that expected features are indeed built.
+features=$(examples/rdkafka_example -X builtin.features)
+echo "$0: FEATURES: $features"
+
+missing=""
+for f in $expected_features; do
+ if ! echo "$features" | grep -q "$f" ; then
+ echo "$0: BUILD IS MISSING FEATURE $f"
+ missing="${missing} $f"
+ fi
+done
+
+if [ -n "$missing" ]; then
+ exit 1
+fi
+
+
+# Run quick test suite, mark it as CI to avoid time/resource sensitive
+# tests to fail in case the worker is under-powered.
+CI=true make -C tests run_local_quick
+
+
+# Install librdkafka and then make a tar ball of the installed files.
+mkdir -p /destdir
+
+DESTDIR=/destdir make install
+
+cd /destdir
+tar cvzf "$output" .
+
+# Emit output hash so that build logs can be used to verify artifacts later.
+echo "$0: SHA256 $output:"
+sha256sum "$output"
+
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/tools/distro-build.sh b/fluent-bit/lib/librdkafka-2.1.0/packaging/tools/distro-build.sh
new file mode 100755
index 000000000..a4b5bfa61
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/tools/distro-build.sh
@@ -0,0 +1,38 @@
+#!/bin/bash
+#
+# Build librdkafka for different distros to produce distro-specific artifacts.
+# Requires docker.
+#
+
+set -e
+
+distro=$1
+shift
+config_args=$*
+
+case $distro in
+ manylinux*)
+ # Any pypa/manylinux docker image build.
+ docker run -t -v "$PWD:/v" quay.io/pypa/$distro /v/packaging/tools/build-manylinux.sh /v /v/artifacts/librdkafka-${distro}.tgz $config_args
+ ;;
+ centos)
+ if [[ -n $config_args ]]; then
+ echo "Warning: configure arguments ignored for centos RPM build"
+ fi
+ packaging/rpm/mock-on-docker.sh
+ packaging/rpm/tests/test-on-docker.sh
+ ;;
+ debian)
+ docker run -it -v "$PWD:/v" mcr.microsoft.com/dotnet/sdk:3.1 /v/packaging/tools/build-debian.sh /v /v/artifacts/librdkafka-debian9.tgz $config_args
+ ;;
+ alpine)
+ packaging/alpine/build-alpine.sh $config_args
+ ;;
+ alpine-static)
+ packaging/alpine/build-alpine.sh --enable-static --source-deps-only $config_args
+ ;;
+ *)
+ echo "Usage: $0 <centos|debian|alpine|alpine-static>"
+ exit 1
+ ;;
+esac
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/tools/gh-release-checksums.py b/fluent-bit/lib/librdkafka-2.1.0/packaging/tools/gh-release-checksums.py
new file mode 100755
index 000000000..e7259dc20
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/tools/gh-release-checksums.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python3
+#
+# Calculate checksums for GitHub release artifacts/assets.
+#
+# Use the direct links rather than getting the tarball URLs from
+# the GitHub API since the latter uses the git-sha1 rather than the tag
+# in its zipped up content, causing checksum mismatches.
+#
+
+import sys
+import requests
+import hashlib
+
+
+if __name__ == '__main__':
+
+ if len(sys.argv) != 2:
+ print("Usage: {} <tag>".format(sys.argv[0]))
+ sys.exit(1)
+
+ tag = sys.argv[1]
+
+ print("## Checksums")
+ print("Release asset checksums:")
+
+ for ftype in ["zip", "tar.gz"]:
+ url = "https://github.com/edenhill/librdkafka/archive/{}.{}".format(
+ tag, ftype)
+
+ h = hashlib.sha256()
+
+ r = requests.get(url, stream=True)
+ while True:
+ buf = r.raw.read(100 * 1000)
+ if len(buf) == 0:
+ break
+ h.update(buf)
+
+ print(" * {}.{} SHA256 `{}`".format(tag, ftype, h.hexdigest()))
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/tools/rdutcoverage.sh b/fluent-bit/lib/librdkafka-2.1.0/packaging/tools/rdutcoverage.sh
new file mode 100755
index 000000000..e99c51bdc
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/tools/rdutcoverage.sh
@@ -0,0 +1,25 @@
+#!/bin/bash
+#
+# Verify that code coverage numbers are not reused in multiple places.
+#
+
+set -e
+
+echo "Checking for duplicate coverage numbers:"
+cnt=0
+for d in $(egrep -Rsoh 'RD_UT_COVERAGE\([[:digit:]]+\)' src \
+ | sort | uniq -c | \
+ egrep -v '^[[:space:]]*1 ' | awk '{print $2}'); do
+ grep -RsnF "$d" src
+ cnt=$(expr $cnt + 1)
+done
+
+echo ""
+
+if [[ $cnt -gt 0 ]]; then
+ echo "$cnt duplicates found: please use unique numbers"
+ exit 1
+else
+ echo "No duplicate(s) found"
+ exit 0
+fi
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/tools/requirements.txt b/fluent-bit/lib/librdkafka-2.1.0/packaging/tools/requirements.txt
new file mode 100644
index 000000000..43603098a
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/tools/requirements.txt
@@ -0,0 +1,2 @@
+flake8
+autopep8
diff --git a/fluent-bit/lib/librdkafka-2.1.0/packaging/tools/style-format.sh b/fluent-bit/lib/librdkafka-2.1.0/packaging/tools/style-format.sh
new file mode 100755
index 000000000..c59ecbe6a
--- /dev/null
+++ b/fluent-bit/lib/librdkafka-2.1.0/packaging/tools/style-format.sh
@@ -0,0 +1,148 @@
+#!/bin/bash
+#
+# Check or apply/fix the project coding style to all files passed as arguments.
+# Uses clang-format for C/C++ and flake8 for Python.
+#
+# Requires clang-format version 10 (apt install clang-format-10).
+#
+
+
+CLANG_FORMAT=${CLANG_FORMAT:-clang-format}
+
+set -e
+
+ret=0
+
+if [[ -z $1 ]]; then
+ echo "Usage: $0 [--fix] srcfile1.c srcfile2.h srcfile3.c ..."
+ echo ""
+ exit 0
+fi
+
+if [[ $1 == "--fix" ]]; then
+ fix=1
+ shift
+else
+ fix=0
+fi
+
+clang_format_version=$(${CLANG_FORMAT} --version | sed -Ee 's/.*version ([[:digit:]]+)\.[[:digit:]]+\.[[:digit:]]+.*/\1/')
+if [[ $clang_format_version != "10" ]] ; then
+ echo "$0: clang-format version 10, '$clang_format_version' detected"
+ exit 1
+fi
+
+# Get list of files from .formatignore to ignore formatting for.
+ignore_files=( $(grep '^[^#]..' .formatignore) )
+
+function ignore {
+ local file=$1
+
+ local f
+ for f in "${ignore_files[@]}" ; do
+ [[ $file == $f ]] && return 0
+ done
+
+ return 1
+}
+
+# Read the C++ style from src-cpp/.clang-format and store it
+# in a json-like string which is passed to --style.
+# (It would be great if clang-format could take a file path for the
+# format file..).
+cpp_style="{ $(grep -v '^...$' .clang-format-cpp | grep -v '^$' | tr '\n' ',' | sed -e 's/,$//') }"
+if [[ -z $cpp_style ]]; then
+ echo "$0: Unable to read .clang-format-cpp"
+ exit 1
+fi
+
+extra_info=""
+
+for f in $*; do
+
+ if ignore $f ; then
+ echo "$f is ignored by .formatignore" 1>&2
+ continue
+ fi
+
+ lang="c"
+ if [[ $f == *.cpp ]]; then
+ style="$cpp_style"
+ stylename="C++"
+ elif [[ $f == *.h && $(basename $f) == *cpp* ]]; then
+ style="$cpp_style"
+ stylename="C++ (header)"
+ elif [[ $f == *.py ]]; then
+ lang="py"
+ style="pep8"
+ stylename="pep8"
+ else
+ style="file" # Use .clang-format
+ stylename="C"
+ fi
+
+ check=0
+
+ if [[ $fix == 1 ]]; then
+ # Convert tabs to 8 spaces first.
+ if grep -ql $'\t' "$f"; then
+ sed -i -e 's/\t/ /g' "$f"
+ echo "$f: tabs converted to spaces"
+ fi
+
+ if [[ $lang == c ]]; then
+ # Run clang-format to reformat the file
+ ${CLANG_FORMAT} --style="$style" "$f" > _styletmp
+
+ else
+ # Run autopep8 to reformat the file.
+ python3 -m autopep8 -a "$f" > _styletmp
+ # autopep8 can't fix all errors, so we also perform a flake8 check.
+ check=1
+ fi
+
+ if ! cmp -s "$f" _styletmp; then
+ echo "$f: style fixed ($stylename)"
+ # Use cp to preserve target file mode/attrs.
+ cp _styletmp "$f"
+ rm _styletmp
+ fi
+ fi
+
+ if [[ $fix == 0 || $check == 1 ]]; then
+ # Check for tabs
+ if grep -q $'\t' "$f" ; then
+ echo "$f: contains tabs: convert to 8 spaces instead"
+ ret=1
+ fi
+
+ # Check style
+ if [[ $lang == c ]]; then
+ if ! ${CLANG_FORMAT} --style="$style" --Werror --dry-run "$f" ; then
+ echo "$f: had style errors ($stylename): see clang-format output above"
+ ret=1
+ fi
+ elif [[ $lang == py ]]; then
+ if ! python3 -m flake8 "$f"; then
+ echo "$f: had style errors ($stylename): see flake8 output above"
+ if [[ $fix == 1 ]]; then
+ # autopep8 couldn't fix all errors. Let the user know.
+ extra_info="Error: autopep8 could not fix all errors, fix the flake8 errors manually and run again."
+ fi
+ ret=1
+ fi
+ fi
+ fi
+
+done
+
+rm -f _styletmp
+
+if [[ $ret != 0 ]]; then
+ echo ""
+ echo "You can run the following command to automatically fix the style:"
+ echo " $ make style-fix"
+ [[ -n $extra_info ]] && echo "$extra_info"
+fi
+
+exit $ret